diff --git a/.dockerignore b/.dockerignore
index e9401f0cc9..0e3b22687d 100644
--- a/.dockerignore
+++ b/.dockerignore
@@ -1,2 +1,3 @@
docs
+!docs/coverage
charts
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index a30c747b13..c02551b014 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -1,8 +1,8 @@
# Each line is a file pattern followed by one or more owners.
# https://help.github.com/en/articles/about-code-owners
-* @zhilingc @pradithya @woop @davidheryanto @khorshuheng
-/core/ @zhilingc @pradithya
-/ingestion/ @zhilingc @pradithya
-/serving/ @zhilingc @pradithya
-/cli/ @zhilingc @pradithya
+* @zhilingc @woop @davidheryanto @khorshuheng @pyalex
+/core/ @zhilingc
+/ingestion/ @zhilingc
+/serving/ @zhilingc
+/cli/ @zhilingc
diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md
index b9c8cd6dff..7a78437b5d 100644
--- a/.github/pull_request_template.md
+++ b/.github/pull_request_template.md
@@ -1,8 +1,8 @@
org.hibernatehibernate-core
- 5.3.6.Final
@@ -187,7 +206,6 @@
org.mockitomockito-core
- 2.23.0test
@@ -207,5 +225,21 @@
jaxb-api
+
+ javax.validation
+ validation-api
+ 2.0.0.Final
+
+
+ org.hibernate.validator
+ hibernate-validator
+ 6.1.2.Final
+
+
+ org.hibernate.validator
+ hibernate-validator-annotation-processor
+ 6.1.2.Final
+
+
diff --git a/core/src/main/java/feast/core/config/FeastProperties.java b/core/src/main/java/feast/core/config/FeastProperties.java
index b9c787b6c7..6dad278242 100644
--- a/core/src/main/java/feast/core/config/FeastProperties.java
+++ b/core/src/main/java/feast/core/config/FeastProperties.java
@@ -16,53 +16,225 @@
*/
package feast.core.config;
-import java.util.Map;
+import feast.core.config.FeastProperties.StreamProperties.FeatureStreamOptions;
+import feast.core.validators.OneOfStrings;
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+import java.util.*;
+import javax.annotation.PostConstruct;
+import javax.validation.*;
+import javax.validation.constraints.NotBlank;
+import javax.validation.constraints.NotNull;
+import javax.validation.constraints.Positive;
import lombok.Getter;
import lombok.Setter;
+import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.context.properties.ConfigurationProperties;
+import org.springframework.boot.info.BuildProperties;
@Getter
@Setter
@ConfigurationProperties(prefix = "feast", ignoreInvalidFields = true)
public class FeastProperties {
- private String version;
- private JobProperties jobs;
+ /**
+ * Instantiates a new Feast properties.
+ *
+ * @param buildProperties Feast build properties
+ */
+ @Autowired
+ public FeastProperties(BuildProperties buildProperties) {
+ setVersion(buildProperties.getVersion());
+ }
+
+ /** Instantiates a new Feast properties. */
+ public FeastProperties() {}
+
+ /* Feast Core Build Version */
+ @NotBlank private String version = "unknown";
+
+ /* Population job properties */
+ @NotNull private JobProperties jobs;
+
+ @NotNull
+ /* Feast Kafka stream properties */
private StreamProperties stream;
+ /** Feast job properties. These properties are used for ingestion jobs. */
@Getter
@Setter
public static class JobProperties {
- private String runner;
- private Map options;
+ @NotBlank
+ /* The active Apache Beam runner name. This name references one instance of the Runner class */
+ private String activeRunner;
+
+ /** List of configured job runners. */
+ private List runners = new ArrayList<>();
+
+ /**
+ * Gets a {@link Runner} instance of the active runner
+ *
+ * @return the active runner
+ */
+ public Runner getActiveRunner() {
+ for (Runner runner : getRunners()) {
+ if (activeRunner.equals(runner.getName())) {
+ return runner;
+ }
+ }
+ throw new RuntimeException(
+ String.format(
+ "Active runner is misconfigured. Could not find runner: %s.", activeRunner));
+ }
+
+ /** Job Runner class. */
+ @Getter
+ @Setter
+ public static class Runner {
+
+ /** Job runner name. This must be unique. */
+ String name;
+
+ /** Job runner type DirectRunner, DataflowRunner currently supported */
+ String type;
+
+ /**
+ * Job runner configuration options. See the following for options
+ * https://api.docs.feast.dev/grpc/feast.core.pb.html#Runner
+ */
+ Map options = new HashMap<>();
+
+ /**
+ * Gets the job runner type as an enum.
+ *
+ * @return Returns the job runner type as {@link feast.core.job.Runner}
+ */
+ public feast.core.job.Runner getType() {
+ return feast.core.job.Runner.fromString(type);
+ }
+ }
+
+ @NotNull
+ /* Population job metric properties */
private MetricsProperties metrics;
- private JobUpdatesProperties updates;
- }
- @Getter
- @Setter
- public static class JobUpdatesProperties {
+ /* Timeout in seconds for each attempt to update or submit a new job to the runner */
+ @Positive private long jobUpdateTimeoutSeconds;
- private long timeoutSeconds;
- private long pollingIntervalMillis;
+ /* Job update polling interval in millisecond. How frequently Feast will update running jobs. */
+ @Positive private long pollingIntervalMilliseconds;
}
+ /** Properties used to configure Feast's managed Kafka feature stream. */
@Getter
@Setter
public static class StreamProperties {
+ /* Feature stream type. Only "kafka" is supported. */
+ @OneOfStrings({"kafka"})
+ @NotBlank
private String type;
- private Map options;
+
+ /* Feature stream options */
+ @NotNull private FeatureStreamOptions options;
+
+ /** Feature stream options */
+ @Getter
+ @Setter
+ public static class FeatureStreamOptions {
+
+ /* Kafka topic to use for feature sets without source topics. */
+ @NotBlank private String topic = "feast-features";
+
+ /**
+ * Comma separated list of Kafka bootstrap servers. Used for feature sets without a defined
+ * source.
+ */
+ @NotBlank private String bootstrapServers = "localhost:9092";
+
+ /* Defines the number of copies of managed feature stream Kafka. */
+ @Positive private short replicationFactor = 1;
+
+ /* Number of Kafka partitions to to use for managed feature stream. */
+ @Positive private int partitions = 1;
+ }
}
+ /** Feast population job metrics */
@Getter
@Setter
public static class MetricsProperties {
+ /* Population job metrics enabled */
private boolean enabled;
+
+ /* Metric type. Possible options: statsd */
+ @OneOfStrings({"statsd"})
+ @NotBlank
private String type;
+
+ /* Host of metric sink */
private String host;
- private int port;
+
+ /* Port of metric sink */
+ @Positive private int port;
+ }
+
+ /**
+ * Validates all FeastProperties. This method runs after properties have been initialized and
+ * individually and conditionally validates each class.
+ */
+ @PostConstruct
+ public void validate() {
+ ValidatorFactory factory = Validation.buildDefaultValidatorFactory();
+ Validator validator = factory.getValidator();
+
+ // Validate root fields in FeastProperties
+ Set> violations = validator.validate(this);
+ if (!violations.isEmpty()) {
+ throw new ConstraintViolationException(violations);
+ }
+
+ // Validate Stream properties
+ Set> streamPropertyViolations =
+ validator.validate(getStream());
+ if (!streamPropertyViolations.isEmpty()) {
+ throw new ConstraintViolationException(streamPropertyViolations);
+ }
+
+ // Validate Stream Options
+ Set> featureStreamOptionsViolations =
+ validator.validate(getStream().getOptions());
+ if (!featureStreamOptionsViolations.isEmpty()) {
+ throw new ConstraintViolationException(featureStreamOptionsViolations);
+ }
+
+ // Validate JobProperties
+ Set> jobPropertiesViolations = validator.validate(getJobs());
+ if (!jobPropertiesViolations.isEmpty()) {
+ throw new ConstraintViolationException(jobPropertiesViolations);
+ }
+
+ // Validate MetricsProperties
+ if (getJobs().getMetrics().isEnabled()) {
+ Set> jobMetricViolations =
+ validator.validate(getJobs().getMetrics());
+ if (!jobMetricViolations.isEmpty()) {
+ throw new ConstraintViolationException(jobMetricViolations);
+ }
+ // Additional custom check for hostname value because there is no built-in Spring annotation
+ // to validate the value is a DNS resolvable hostname or an IP address.
+ try {
+ //noinspection ResultOfMethodCallIgnored
+ InetAddress.getByName(getJobs().getMetrics().getHost());
+ } catch (UnknownHostException e) {
+ throw new IllegalArgumentException(
+ "Invalid config value for feast.jobs.metrics.host: "
+ + getJobs().getMetrics().getHost()
+ + ". Make sure it is a valid IP address or DNS hostname e.g. localhost or 10.128.10.40. Error detail: "
+ + e.getMessage());
+ }
+ }
}
}
diff --git a/core/src/main/java/feast/core/config/FeatureStreamConfig.java b/core/src/main/java/feast/core/config/FeatureStreamConfig.java
index 45de359ac7..c1982604c3 100644
--- a/core/src/main/java/feast/core/config/FeatureStreamConfig.java
+++ b/core/src/main/java/feast/core/config/FeatureStreamConfig.java
@@ -17,10 +17,10 @@
package feast.core.config;
import com.google.common.base.Strings;
-import feast.core.SourceProto.KafkaSourceConfig;
-import feast.core.SourceProto.SourceType;
import feast.core.config.FeastProperties.StreamProperties;
import feast.core.model.Source;
+import feast.proto.core.SourceProto.KafkaSourceConfig;
+import feast.proto.core.SourceProto.SourceType;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
@@ -48,8 +48,8 @@ public Source getDefaultSource(FeastProperties feastProperties) {
SourceType featureStreamType = SourceType.valueOf(streamProperties.getType().toUpperCase());
switch (featureStreamType) {
case KAFKA:
- String bootstrapServers = streamProperties.getOptions().get("bootstrapServers");
- String topicName = streamProperties.getOptions().get("topic");
+ String bootstrapServers = streamProperties.getOptions().getBootstrapServers();
+ String topicName = streamProperties.getOptions().getTopic();
Map map = new HashMap<>();
map.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
map.put(
@@ -59,9 +59,8 @@ public Source getDefaultSource(FeastProperties feastProperties) {
NewTopic newTopic =
new NewTopic(
topicName,
- Integer.valueOf(streamProperties.getOptions().getOrDefault("numPartitions", "1")),
- Short.valueOf(
- streamProperties.getOptions().getOrDefault("replicationFactor", "1")));
+ streamProperties.getOptions().getPartitions(),
+ streamProperties.getOptions().getReplicationFactor());
CreateTopicsResult createTopicsResult =
client.createTopics(Collections.singleton(newTopic));
try {
diff --git a/core/src/main/java/feast/core/config/JobConfig.java b/core/src/main/java/feast/core/config/JobConfig.java
index 728fc0545b..30023de064 100644
--- a/core/src/main/java/feast/core/config/JobConfig.java
+++ b/core/src/main/java/feast/core/config/JobConfig.java
@@ -16,22 +16,16 @@
*/
package feast.core.config;
-import com.google.api.client.googleapis.auth.oauth2.GoogleCredential;
-import com.google.api.client.googleapis.javanet.GoogleNetHttpTransport;
-import com.google.api.client.json.jackson2.JacksonFactory;
-import com.google.api.services.dataflow.Dataflow;
-import com.google.api.services.dataflow.DataflowScopes;
-import com.google.common.base.Strings;
+import com.google.gson.Gson;
+import com.google.protobuf.InvalidProtocolBufferException;
+import com.google.protobuf.util.JsonFormat;
import feast.core.config.FeastProperties.JobProperties;
-import feast.core.config.FeastProperties.JobUpdatesProperties;
import feast.core.job.JobManager;
-import feast.core.job.Runner;
import feast.core.job.dataflow.DataflowJobManager;
import feast.core.job.direct.DirectJobRegistry;
import feast.core.job.direct.DirectRunnerJobManager;
-import java.io.IOException;
-import java.security.GeneralSecurityException;
-import java.util.HashMap;
+import feast.proto.core.RunnerProto.DataflowRunnerConfigOptions;
+import feast.proto.core.RunnerProto.DirectRunnerConfigOptions;
import java.util.Map;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
@@ -42,67 +36,39 @@
@Slf4j
@Configuration
public class JobConfig {
+ private final Gson gson = new Gson();
/**
- * Get a JobManager according to the runner type and dataflow configuration.
+ * Get a JobManager according to the runner type and Dataflow configuration.
*
* @param feastProperties feast config properties
*/
@Bean
@Autowired
- public JobManager getJobManager(
- FeastProperties feastProperties, DirectJobRegistry directJobRegistry) {
+ public JobManager getJobManager(FeastProperties feastProperties)
+ throws InvalidProtocolBufferException {
JobProperties jobProperties = feastProperties.getJobs();
- Runner runner = Runner.fromString(jobProperties.getRunner());
- if (jobProperties.getOptions() == null) {
- jobProperties.setOptions(new HashMap<>());
- }
- Map jobOptions = jobProperties.getOptions();
- switch (runner) {
- case DATAFLOW:
- if (Strings.isNullOrEmpty(jobOptions.getOrDefault("region", null))
- || Strings.isNullOrEmpty(jobOptions.getOrDefault("project", null))) {
- log.error("Project and location of the Dataflow runner is not configured");
- throw new IllegalStateException(
- "Project and location of Dataflow runner must be specified for jobs to be run on Dataflow runner.");
- }
- try {
- GoogleCredential credential =
- GoogleCredential.getApplicationDefault().createScoped(DataflowScopes.all());
- Dataflow dataflow =
- new Dataflow(
- GoogleNetHttpTransport.newTrustedTransport(),
- JacksonFactory.getDefaultInstance(),
- credential);
+ FeastProperties.JobProperties.Runner runner = jobProperties.getActiveRunner();
+ Map runnerConfigOptions = runner.getOptions();
+ String configJson = gson.toJson(runnerConfigOptions);
+
+ FeastProperties.MetricsProperties metrics = jobProperties.getMetrics();
- return new DataflowJobManager(
- dataflow, jobProperties.getOptions(), jobProperties.getMetrics());
- } catch (IOException e) {
- throw new IllegalStateException(
- "Unable to find credential required for Dataflow monitoring API", e);
- } catch (GeneralSecurityException e) {
- throw new IllegalStateException("Security exception while connecting to Dataflow API", e);
- } catch (Exception e) {
- throw new IllegalStateException("Unable to initialize DataflowJobManager", e);
- }
+ switch (runner.getType()) {
+ case DATAFLOW:
+ DataflowRunnerConfigOptions.Builder dataflowRunnerConfigOptions =
+ DataflowRunnerConfigOptions.newBuilder();
+ JsonFormat.parser().merge(configJson, dataflowRunnerConfigOptions);
+ return new DataflowJobManager(dataflowRunnerConfigOptions.build(), metrics);
case DIRECT:
+ DirectRunnerConfigOptions.Builder directRunnerConfigOptions =
+ DirectRunnerConfigOptions.newBuilder();
+ JsonFormat.parser().merge(configJson, directRunnerConfigOptions);
return new DirectRunnerJobManager(
- jobProperties.getOptions(), directJobRegistry, jobProperties.getMetrics());
+ directRunnerConfigOptions.build(), new DirectJobRegistry(), metrics);
default:
- throw new IllegalArgumentException("Unsupported runner: " + jobProperties.getRunner());
+ throw new IllegalArgumentException("Unsupported runner: " + runner);
}
}
-
- /** Get a direct job registry */
- @Bean
- public DirectJobRegistry directJobRegistry() {
- return new DirectJobRegistry();
- }
-
- /** Extracts job update options from feast core options. */
- @Bean
- public JobUpdatesProperties jobUpdatesProperties(FeastProperties feastProperties) {
- return feastProperties.getJobs().getUpdates();
- }
}
diff --git a/core/src/main/java/feast/core/dao/FeatureSetRepository.java b/core/src/main/java/feast/core/dao/FeatureSetRepository.java
index 3eba210888..b136650dfd 100644
--- a/core/src/main/java/feast/core/dao/FeatureSetRepository.java
+++ b/core/src/main/java/feast/core/dao/FeatureSetRepository.java
@@ -25,25 +25,16 @@ public interface FeatureSetRepository extends JpaRepository
long count();
- // Find single feature set by project, name, and version
- FeatureSet findFeatureSetByNameAndProject_NameAndVersion(
- String name, String project, Integer version);
+ // Find single feature set by project and name
+ FeatureSet findFeatureSetByNameAndProject_Name(String name, String project);
- // Find single latest version of a feature set by project and name (LIKE)
- FeatureSet findFirstFeatureSetByNameLikeAndProject_NameOrderByVersionDesc(
- String name, String project);
+ // find all feature sets and order by name
+ List findAllByOrderByNameAsc();
- // find all feature sets and order by name and version
- List findAllByOrderByNameAscVersionAsc();
+ // find all feature sets matching the given name pattern with a specific project.
+ List findAllByNameLikeAndProject_NameOrderByNameAsc(String name, String project_name);
- // find all feature sets within a project and order by name and version
- List findAllByProject_NameOrderByNameAscVersionAsc(String project_name);
-
- // find all versions of feature sets matching the given name pattern with a specific project.
- List findAllByNameLikeAndProject_NameOrderByNameAscVersionAsc(
- String name, String project_name);
-
- // find all versions of feature sets matching the given name pattern and project pattern
- List findAllByNameLikeAndProject_NameLikeOrderByNameAscVersionAsc(
+ // find all feature sets matching the given name pattern and project pattern
+ List findAllByNameLikeAndProject_NameLikeOrderByNameAsc(
String name, String project_name);
}
diff --git a/core/src/main/java/feast/core/dao/MetricsRepository.java b/core/src/main/java/feast/core/dao/MetricsRepository.java
deleted file mode 100644
index 7146e1e3ec..0000000000
--- a/core/src/main/java/feast/core/dao/MetricsRepository.java
+++ /dev/null
@@ -1,27 +0,0 @@
-/*
- * SPDX-License-Identifier: Apache-2.0
- * Copyright 2018-2019 The Feast Authors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * https://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package feast.core.dao;
-
-import feast.core.model.Metrics;
-import java.util.List;
-import org.springframework.data.jpa.repository.JpaRepository;
-import org.springframework.stereotype.Repository;
-
-@Repository
-public interface MetricsRepository extends JpaRepository {
- List findByJob_Id(String id);
-}
diff --git a/core/src/main/java/feast/core/grpc/CoreServiceImpl.java b/core/src/main/java/feast/core/grpc/CoreServiceImpl.java
index 42bc0ba23d..db65328b82 100644
--- a/core/src/main/java/feast/core/grpc/CoreServiceImpl.java
+++ b/core/src/main/java/feast/core/grpc/CoreServiceImpl.java
@@ -18,37 +18,38 @@
import com.google.api.gax.rpc.InvalidArgumentException;
import com.google.protobuf.InvalidProtocolBufferException;
-import feast.core.CoreServiceGrpc.CoreServiceImplBase;
-import feast.core.CoreServiceProto.ApplyFeatureSetRequest;
-import feast.core.CoreServiceProto.ApplyFeatureSetResponse;
-import feast.core.CoreServiceProto.ArchiveProjectRequest;
-import feast.core.CoreServiceProto.ArchiveProjectResponse;
-import feast.core.CoreServiceProto.CreateProjectRequest;
-import feast.core.CoreServiceProto.CreateProjectResponse;
-import feast.core.CoreServiceProto.GetFeastCoreVersionRequest;
-import feast.core.CoreServiceProto.GetFeastCoreVersionResponse;
-import feast.core.CoreServiceProto.GetFeatureSetRequest;
-import feast.core.CoreServiceProto.GetFeatureSetResponse;
-import feast.core.CoreServiceProto.ListFeatureSetsRequest;
-import feast.core.CoreServiceProto.ListFeatureSetsResponse;
-import feast.core.CoreServiceProto.ListIngestionJobsRequest;
-import feast.core.CoreServiceProto.ListIngestionJobsResponse;
-import feast.core.CoreServiceProto.ListProjectsRequest;
-import feast.core.CoreServiceProto.ListProjectsResponse;
-import feast.core.CoreServiceProto.ListStoresRequest;
-import feast.core.CoreServiceProto.ListStoresResponse;
-import feast.core.CoreServiceProto.RestartIngestionJobRequest;
-import feast.core.CoreServiceProto.RestartIngestionJobResponse;
-import feast.core.CoreServiceProto.StopIngestionJobRequest;
-import feast.core.CoreServiceProto.StopIngestionJobResponse;
-import feast.core.CoreServiceProto.UpdateStoreRequest;
-import feast.core.CoreServiceProto.UpdateStoreResponse;
+import feast.core.config.FeastProperties;
import feast.core.exception.RetrievalException;
import feast.core.grpc.interceptors.MonitoringInterceptor;
import feast.core.model.Project;
import feast.core.service.AccessManagementService;
import feast.core.service.JobService;
import feast.core.service.SpecService;
+import feast.proto.core.CoreServiceGrpc.CoreServiceImplBase;
+import feast.proto.core.CoreServiceProto.ApplyFeatureSetRequest;
+import feast.proto.core.CoreServiceProto.ApplyFeatureSetResponse;
+import feast.proto.core.CoreServiceProto.ArchiveProjectRequest;
+import feast.proto.core.CoreServiceProto.ArchiveProjectResponse;
+import feast.proto.core.CoreServiceProto.CreateProjectRequest;
+import feast.proto.core.CoreServiceProto.CreateProjectResponse;
+import feast.proto.core.CoreServiceProto.GetFeastCoreVersionRequest;
+import feast.proto.core.CoreServiceProto.GetFeastCoreVersionResponse;
+import feast.proto.core.CoreServiceProto.GetFeatureSetRequest;
+import feast.proto.core.CoreServiceProto.GetFeatureSetResponse;
+import feast.proto.core.CoreServiceProto.ListFeatureSetsRequest;
+import feast.proto.core.CoreServiceProto.ListFeatureSetsResponse;
+import feast.proto.core.CoreServiceProto.ListIngestionJobsRequest;
+import feast.proto.core.CoreServiceProto.ListIngestionJobsResponse;
+import feast.proto.core.CoreServiceProto.ListProjectsRequest;
+import feast.proto.core.CoreServiceProto.ListProjectsResponse;
+import feast.proto.core.CoreServiceProto.ListStoresRequest;
+import feast.proto.core.CoreServiceProto.ListStoresResponse;
+import feast.proto.core.CoreServiceProto.RestartIngestionJobRequest;
+import feast.proto.core.CoreServiceProto.RestartIngestionJobResponse;
+import feast.proto.core.CoreServiceProto.StopIngestionJobRequest;
+import feast.proto.core.CoreServiceProto.StopIngestionJobResponse;
+import feast.proto.core.CoreServiceProto.UpdateStoreRequest;
+import feast.proto.core.CoreServiceProto.UpdateStoreResponse;
import io.grpc.Status;
import io.grpc.StatusRuntimeException;
import io.grpc.stub.StreamObserver;
@@ -64,6 +65,7 @@
@GRpcService(interceptors = {MonitoringInterceptor.class})
public class CoreServiceImpl extends CoreServiceImplBase {
+ private final FeastProperties feastProperties;
private SpecService specService;
private AccessManagementService accessManagementService;
private JobService jobService;
@@ -72,17 +74,28 @@ public class CoreServiceImpl extends CoreServiceImplBase {
public CoreServiceImpl(
SpecService specService,
AccessManagementService accessManagementService,
- JobService jobService) {
+ JobService jobService,
+ FeastProperties feastProperties) {
this.specService = specService;
this.accessManagementService = accessManagementService;
this.jobService = jobService;
+ this.feastProperties = feastProperties;
}
@Override
public void getFeastCoreVersion(
GetFeastCoreVersionRequest request,
StreamObserver responseObserver) {
- super.getFeastCoreVersion(request, responseObserver);
+ try {
+ GetFeastCoreVersionResponse response =
+ GetFeastCoreVersionResponse.newBuilder().setVersion(feastProperties.getVersion()).build();
+ responseObserver.onNext(response);
+ responseObserver.onCompleted();
+ } catch (RetrievalException | StatusRuntimeException e) {
+ log.error("Could not determine Feast Core version: ", e);
+ responseObserver.onError(
+ Status.INTERNAL.withDescription(e.getMessage()).withCause(e).asRuntimeException());
+ }
}
@Override
@@ -183,6 +196,17 @@ public void archiveProject(
accessManagementService.archiveProject(request.getName());
responseObserver.onNext(ArchiveProjectResponse.getDefaultInstance());
responseObserver.onCompleted();
+ } catch (IllegalArgumentException e) {
+ log.error("Recieved an invalid request on calling archiveProject method:", e);
+ responseObserver.onError(
+ Status.INVALID_ARGUMENT
+ .withDescription(e.getMessage())
+ .withCause(e)
+ .asRuntimeException());
+ } catch (UnsupportedOperationException e) {
+ log.error("Attempted to archive an unsupported project:", e);
+ responseObserver.onError(
+ Status.UNIMPLEMENTED.withDescription(e.getMessage()).withCause(e).asRuntimeException());
} catch (Exception e) {
log.error("Exception has occurred in the createProject method: ", e);
responseObserver.onError(
diff --git a/core/src/main/java/feast/core/grpc/HealthServiceImpl.java b/core/src/main/java/feast/core/grpc/HealthServiceImpl.java
new file mode 100644
index 0000000000..3bd2f8748f
--- /dev/null
+++ b/core/src/main/java/feast/core/grpc/HealthServiceImpl.java
@@ -0,0 +1,54 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ * Copyright 2018-2020 The Feast Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package feast.core.grpc;
+
+import feast.core.service.AccessManagementService;
+import io.grpc.Status;
+import io.grpc.health.v1.HealthGrpc.HealthImplBase;
+import io.grpc.health.v1.HealthProto.HealthCheckRequest;
+import io.grpc.health.v1.HealthProto.HealthCheckResponse;
+import io.grpc.health.v1.HealthProto.HealthCheckResponse.ServingStatus;
+import io.grpc.stub.StreamObserver;
+import lombok.extern.slf4j.Slf4j;
+import org.lognet.springboot.grpc.GRpcService;
+import org.springframework.beans.factory.annotation.Autowired;
+
+@Slf4j
+@GRpcService
+public class HealthServiceImpl extends HealthImplBase {
+ private final AccessManagementService accessManagementService;
+
+ @Autowired
+ public HealthServiceImpl(AccessManagementService accessManagementService) {
+ this.accessManagementService = accessManagementService;
+ }
+
+ @Override
+ public void check(
+ HealthCheckRequest request, StreamObserver responseObserver) {
+ try {
+ accessManagementService.listProjects();
+ responseObserver.onNext(
+ HealthCheckResponse.newBuilder().setStatus(ServingStatus.SERVING).build());
+ responseObserver.onCompleted();
+ } catch (Exception e) {
+ log.error("Health Check: unable to retrieve projects.\nError: %s", e);
+ responseObserver.onError(
+ Status.INTERNAL.withDescription(e.getMessage()).withCause(e).asRuntimeException());
+ }
+ }
+}
diff --git a/core/src/main/java/feast/core/http/HealthController.java b/core/src/main/java/feast/core/http/HealthController.java
deleted file mode 100644
index 2451ed793e..0000000000
--- a/core/src/main/java/feast/core/http/HealthController.java
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * SPDX-License-Identifier: Apache-2.0
- * Copyright 2018-2019 The Feast Authors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * https://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package feast.core.http;
-
-import static org.springframework.http.HttpStatus.INTERNAL_SERVER_ERROR;
-
-import java.sql.Connection;
-import java.sql.SQLException;
-import javax.sql.DataSource;
-import lombok.extern.slf4j.Slf4j;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.http.HttpStatus;
-import org.springframework.http.ResponseEntity;
-import org.springframework.web.bind.annotation.RequestMapping;
-import org.springframework.web.bind.annotation.RequestMethod;
-import org.springframework.web.bind.annotation.RestController;
-
-/** Web http for pod health-check endpoints. */
-@Slf4j
-@RestController
-public class HealthController {
-
- private final DataSource db;
-
- @Autowired
- public HealthController(DataSource datasource) {
- this.db = datasource;
- }
-
- /**
- * /ping endpoint checks if the application is ready to serve traffic by checking if it is able to
- * access the metadata db.
- */
- @RequestMapping(value = "/ping", method = RequestMethod.GET)
- public ResponseEntity ping() {
- return ResponseEntity.ok("pong");
- }
-
- /**
- * /healthz endpoint checks if the application is healthy by checking if the application still has
- * access to the metadata db.
- */
- @RequestMapping(value = "/healthz", method = RequestMethod.GET)
- public ResponseEntity healthz() {
- try (Connection conn = db.getConnection()) {
- if (conn.isValid(10)) {
- return ResponseEntity.ok("healthy");
- }
- log.error("Unable to reach DB");
- return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR)
- .body("Unable to establish connection with DB");
- } catch (SQLException e) {
- log.error("Unable to reach DB: {}", e);
- return ResponseEntity.status(INTERNAL_SERVER_ERROR).body(e.getMessage());
- }
- }
-}
diff --git a/core/src/main/java/feast/core/job/JobUpdateTask.java b/core/src/main/java/feast/core/job/JobUpdateTask.java
index f3afe84df7..056da34b3a 100644
--- a/core/src/main/java/feast/core/job/JobUpdateTask.java
+++ b/core/src/main/java/feast/core/job/JobUpdateTask.java
@@ -16,9 +16,7 @@
*/
package feast.core.job;
-import feast.core.FeatureSetProto;
-import feast.core.SourceProto;
-import feast.core.StoreProto;
+import com.google.common.collect.Sets;
import feast.core.log.Action;
import feast.core.log.AuditLogger;
import feast.core.log.Resource;
@@ -27,10 +25,10 @@
import feast.core.model.JobStatus;
import feast.core.model.Source;
import feast.core.model.Store;
+import feast.proto.core.FeatureSetProto.FeatureSetStatus;
import java.time.Instant;
import java.util.List;
import java.util.Optional;
-import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
@@ -38,7 +36,6 @@
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
-import java.util.stream.Collectors;
import lombok.Getter;
import lombok.extern.slf4j.Slf4j;
@@ -52,134 +49,101 @@
@Getter
public class JobUpdateTask implements Callable {
- private final List featureSets;
- private final SourceProto.Source sourceSpec;
- private final StoreProto.Store store;
+ private final List featureSets;
+ private final Source source;
+ private final Store store;
private final Optional currentJob;
- private JobManager jobManager;
- private long jobUpdateTimeoutSeconds;
+ private final JobManager jobManager;
+ private final long jobUpdateTimeoutSeconds;
+ private final String runnerName;
public JobUpdateTask(
- List featureSets,
- SourceProto.Source sourceSpec,
- StoreProto.Store store,
+ List featureSets,
+ Source source,
+ Store store,
Optional currentJob,
JobManager jobManager,
long jobUpdateTimeoutSeconds) {
this.featureSets = featureSets;
- this.sourceSpec = sourceSpec;
+ this.source = source;
this.store = store;
this.currentJob = currentJob;
this.jobManager = jobManager;
this.jobUpdateTimeoutSeconds = jobUpdateTimeoutSeconds;
+ this.runnerName = jobManager.getRunnerType().toString();
}
@Override
public Job call() {
ExecutorService executorService = Executors.newSingleThreadExecutor();
- Source source = Source.fromProto(sourceSpec);
Future submittedJob;
- if (currentJob.isPresent()) {
- Set existingFeatureSetsPopulatedByJob =
- currentJob.get().getFeatureSets().stream()
- .map(FeatureSet::getId)
- .collect(Collectors.toSet());
- Set newFeatureSetsPopulatedByJob =
- featureSets.stream()
- .map(fs -> FeatureSet.fromProto(fs).getId())
- .collect(Collectors.toSet());
- if (existingFeatureSetsPopulatedByJob.size() == newFeatureSetsPopulatedByJob.size()
- && existingFeatureSetsPopulatedByJob.containsAll(newFeatureSetsPopulatedByJob)) {
- Job job = currentJob.get();
- JobStatus newJobStatus = jobManager.getJobStatus(job);
- if (newJobStatus != job.getStatus()) {
- AuditLogger.log(
- Resource.JOB,
- job.getId(),
- Action.STATUS_CHANGE,
- "Job status updated: changed from %s to %s",
- job.getStatus(),
- newJobStatus);
- }
- job.setStatus(newJobStatus);
- return job;
+
+ if (currentJob.isEmpty()) {
+ submittedJob = executorService.submit(this::createJob);
+ } else {
+ Job job = currentJob.get();
+
+ if (requiresUpdate(job)) {
+ submittedJob = executorService.submit(() -> updateJob(job));
} else {
- submittedJob =
- executorService.submit(() -> updateJob(currentJob.get(), featureSets, store));
+ return updateStatus(job);
}
- } else {
- String jobId = createJobId(source.getId(), store.getName());
- submittedJob = executorService.submit(() -> startJob(jobId, featureSets, sourceSpec, store));
}
- Job job = null;
try {
- job = submittedJob.get(getJobUpdateTimeoutSeconds(), TimeUnit.SECONDS);
+ return submittedJob.get(getJobUpdateTimeoutSeconds(), TimeUnit.SECONDS);
} catch (InterruptedException | ExecutionException | TimeoutException e) {
log.warn("Unable to start job for source {} and sink {}: {}", source, store, e.getMessage());
+ return null;
+ } finally {
executorService.shutdownNow();
}
- return job;
+ }
+
+ boolean requiresUpdate(Job job) {
+ // If set of feature sets has changed
+ if (!Sets.newHashSet(featureSets).equals(Sets.newHashSet(job.getFeatureSets()))) {
+ return true;
+ }
+ // If any of the incoming feature sets were updated
+ for (FeatureSet featureSet : featureSets) {
+ if (featureSet.getStatus() == FeatureSetStatus.STATUS_PENDING) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ private Job createJob() {
+ String jobId = createJobId(source.getId(), store.getName());
+ return startJob(jobId);
}
/** Start or update the job to ingest data to the sink. */
- private Job startJob(
- String jobId,
- List featureSetProtos,
- SourceProto.Source source,
- StoreProto.Store sinkSpec) {
-
- List featureSets =
- featureSetProtos.stream()
- .map(
- fsp ->
- FeatureSet.fromProto(
- FeatureSetProto.FeatureSet.newBuilder()
- .setSpec(fsp.getSpec())
- .setMeta(fsp.getMeta())
- .build()))
- .collect(Collectors.toList());
+ private Job startJob(String jobId) {
+
Job job =
new Job(
- jobId,
- "",
- jobManager.getRunnerType().name(),
- Source.fromProto(source),
- Store.fromProto(sinkSpec),
- featureSets,
- JobStatus.PENDING);
+ jobId, "", jobManager.getRunnerType(), source, store, featureSets, JobStatus.PENDING);
try {
- AuditLogger.log(
- Resource.JOB,
- jobId,
- Action.SUBMIT,
- "Building graph and submitting to %s",
- jobManager.getRunnerType().toString());
+ logAudit(Action.SUBMIT, job, "Building graph and submitting to %s", runnerName);
job = jobManager.startJob(job);
- if (job.getExtId().isEmpty()) {
+ var extId = job.getExtId();
+ if (extId.isEmpty()) {
throw new RuntimeException(
String.format("Could not submit job: \n%s", "unable to retrieve job external id"));
}
- AuditLogger.log(
- Resource.JOB,
- jobId,
- Action.STATUS_CHANGE,
- "Job submitted to runner %s with ext id %s.",
- jobManager.getRunnerType().toString(),
- job.getExtId());
+ var auditMessage = "Job submitted to runner %s with ext id %s.";
+ logAudit(Action.STATUS_CHANGE, job, auditMessage, runnerName, extId);
return job;
} catch (Exception e) {
log.error(e.getMessage());
- AuditLogger.log(
- Resource.JOB,
- jobId,
- Action.STATUS_CHANGE,
- "Job failed to be submitted to runner %s. Job status changed to ERROR.",
- jobManager.getRunnerType().toString());
+ var auditMessage = "Job failed to be submitted to runner %s. Job status changed to ERROR.";
+ logAudit(Action.STATUS_CHANGE, job, auditMessage, runnerName);
job.setStatus(JobStatus.ERROR);
return job;
@@ -187,33 +151,33 @@ private Job startJob(
}
/** Update the given job */
- private Job updateJob(
- Job job, List featureSets, StoreProto.Store store) {
- job.setFeatureSets(
- featureSets.stream()
- .map(
- fs ->
- FeatureSet.fromProto(
- FeatureSetProto.FeatureSet.newBuilder()
- .setSpec(fs.getSpec())
- .setMeta(fs.getMeta())
- .build()))
- .collect(Collectors.toList()));
- job.setStore(feast.core.model.Store.fromProto(store));
- AuditLogger.log(
- Resource.JOB,
- job.getId(),
- Action.UPDATE,
- "Updating job %s for runner %s",
- job.getId(),
- jobManager.getRunnerType().toString());
+ private Job updateJob(Job job) {
+ job.setFeatureSets(featureSets);
+ job.setStore(store);
+ logAudit(Action.UPDATE, job, "Updating job %s for runner %s", job.getId(), runnerName);
return jobManager.updateJob(job);
}
+ private Job updateStatus(Job job) {
+ JobStatus currentStatus = job.getStatus();
+ JobStatus newStatus = jobManager.getJobStatus(job);
+ if (newStatus != currentStatus) {
+ var auditMessage = "Job status updated: changed from %s to %s";
+ logAudit(Action.STATUS_CHANGE, job, auditMessage, currentStatus, newStatus);
+ }
+
+ job.setStatus(newStatus);
+ return job;
+ }
+
String createJobId(String sourceId, String storeName) {
String dateSuffix = String.valueOf(Instant.now().toEpochMilli());
String sourceIdTrunc = sourceId.split("/")[0].toLowerCase();
String jobId = String.format("%s-to-%s", sourceIdTrunc, storeName) + dateSuffix;
return jobId.replaceAll("_", "-");
}
+
+ private void logAudit(Action action, Job job, String detail, Object... args) {
+ AuditLogger.log(Resource.JOB, job.getId(), action, detail, args);
+ }
}
diff --git a/core/src/main/java/feast/core/job/Runner.java b/core/src/main/java/feast/core/job/Runner.java
index 4e2033fed6..acccb70c8b 100644
--- a/core/src/main/java/feast/core/job/Runner.java
+++ b/core/src/main/java/feast/core/job/Runner.java
@@ -16,33 +16,37 @@
*/
package feast.core.job;
+import java.util.NoSuchElementException;
+
+/**
+ * An Apache Beam Runner, for which Feast Core supports managing ingestion jobs.
+ *
+ * @see Beam Runners
+ */
public enum Runner {
DATAFLOW("DataflowRunner"),
FLINK("FlinkRunner"),
DIRECT("DirectRunner");
- private final String name;
+ private final String humanName;
- Runner(String name) {
- this.name = name;
+ Runner(String humanName) {
+ this.humanName = humanName;
}
- /**
- * Get the human readable name of this runner. Returns a human readable name of the runner that
- * can be used for logging/config files/etc.
- */
+ /** Returns the human readable name of this runner, usable in logging, config files, etc. */
@Override
public String toString() {
- return name;
+ return humanName;
}
/** Parses a runner from its human readable name. */
- public static Runner fromString(String runner) {
+ public static Runner fromString(String humanName) {
for (Runner r : Runner.values()) {
- if (r.toString().equals(runner)) {
+ if (r.toString().equals(humanName)) {
return r;
}
}
- throw new IllegalArgumentException("Unknown value: " + runner);
+ throw new NoSuchElementException("Unknown Runner value: " + humanName);
}
}
diff --git a/core/src/main/java/feast/core/job/dataflow/DataflowJobConfig.java b/core/src/main/java/feast/core/job/dataflow/DataflowJobConfig.java
deleted file mode 100644
index a9bbf345d1..0000000000
--- a/core/src/main/java/feast/core/job/dataflow/DataflowJobConfig.java
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
- * SPDX-License-Identifier: Apache-2.0
- * Copyright 2018-2019 The Feast Authors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * https://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package feast.core.job.dataflow;
-
-import lombok.Value;
-
-@Value
-public class DataflowJobConfig {
- private String projectId;
- private String location;
-}
diff --git a/core/src/main/java/feast/core/job/dataflow/DataflowJobManager.java b/core/src/main/java/feast/core/job/dataflow/DataflowJobManager.java
index c2313d75ec..2c3da255f5 100644
--- a/core/src/main/java/feast/core/job/dataflow/DataflowJobManager.java
+++ b/core/src/main/java/feast/core/job/dataflow/DataflowJobManager.java
@@ -18,30 +18,34 @@
import static feast.core.util.PipelineUtil.detectClassPathResourcesToStage;
+import com.google.api.client.auth.oauth2.Credential;
+import com.google.api.client.googleapis.auth.oauth2.GoogleCredential;
+import com.google.api.client.googleapis.javanet.GoogleNetHttpTransport;
+import com.google.api.client.json.jackson2.JacksonFactory;
import com.google.api.services.dataflow.Dataflow;
+import com.google.api.services.dataflow.DataflowScopes;
import com.google.common.base.Strings;
import com.google.protobuf.InvalidProtocolBufferException;
import com.google.protobuf.util.JsonFormat;
-import feast.core.FeatureSetProto;
-import feast.core.SourceProto;
-import feast.core.StoreProto;
import feast.core.config.FeastProperties.MetricsProperties;
import feast.core.exception.JobExecutionException;
import feast.core.job.JobManager;
import feast.core.job.Runner;
import feast.core.job.option.FeatureSetJsonByteConverter;
import feast.core.model.*;
-import feast.core.util.TypeConversion;
import feast.ingestion.ImportJob;
import feast.ingestion.options.BZip2Compressor;
import feast.ingestion.options.ImportOptions;
import feast.ingestion.options.OptionCompressor;
+import feast.proto.core.FeatureSetProto;
+import feast.proto.core.RunnerProto.DataflowRunnerConfigOptions;
+import feast.proto.core.SourceProto;
+import feast.proto.core.StoreProto;
import java.io.IOException;
+import java.security.GeneralSecurityException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
-import java.util.Map;
-import java.util.stream.Collectors;
import lombok.extern.slf4j.Slf4j;
import org.apache.beam.runners.dataflow.DataflowPipelineJob;
import org.apache.beam.runners.dataflow.DataflowRunner;
@@ -56,16 +60,48 @@ public class DataflowJobManager implements JobManager {
private final String projectId;
private final String location;
private final Dataflow dataflow;
- private final Map defaultOptions;
+ private final DataflowRunnerConfig defaultOptions;
private final MetricsProperties metrics;
public DataflowJobManager(
- Dataflow dataflow, Map defaultOptions, MetricsProperties metricsProperties) {
- this.defaultOptions = defaultOptions;
+ DataflowRunnerConfigOptions runnerConfigOptions, MetricsProperties metricsProperties) {
+ this(runnerConfigOptions, metricsProperties, getGoogleCredential());
+ }
+
+ public DataflowJobManager(
+ DataflowRunnerConfigOptions runnerConfigOptions,
+ MetricsProperties metricsProperties,
+ Credential credential) {
+
+ defaultOptions = new DataflowRunnerConfig(runnerConfigOptions);
+ Dataflow dataflow = null;
+ try {
+ dataflow =
+ new Dataflow(
+ GoogleNetHttpTransport.newTrustedTransport(),
+ JacksonFactory.getDefaultInstance(),
+ credential);
+ } catch (GeneralSecurityException e) {
+ throw new IllegalStateException("Security exception while connecting to Dataflow API", e);
+ } catch (IOException e) {
+ throw new IllegalStateException("Unable to initialize DataflowJobManager", e);
+ }
+
this.dataflow = dataflow;
this.metrics = metricsProperties;
- this.projectId = defaultOptions.get("project");
- this.location = defaultOptions.get("region");
+ this.projectId = defaultOptions.getProject();
+ this.location = defaultOptions.getRegion();
+ }
+
+ private static Credential getGoogleCredential() {
+ GoogleCredential credential = null;
+ try {
+ credential = GoogleCredential.getApplicationDefault().createScoped(DataflowScopes.all());
+ } catch (IOException e) {
+ throw new IllegalStateException(
+ "Unable to find credential required for Dataflow monitoring API", e);
+ }
+ return credential;
}
@Override
@@ -80,12 +116,15 @@ public Job startJob(Job job) {
for (FeatureSet featureSet : job.getFeatureSets()) {
featureSetProtos.add(featureSet.toProto());
}
- return submitDataflowJob(
- job.getId(),
- featureSetProtos,
- job.getSource().toProto(),
- job.getStore().toProto(),
- false);
+ String extId =
+ submitDataflowJob(
+ job.getId(),
+ featureSetProtos,
+ job.getSource().toProto(),
+ job.getStore().toProto(),
+ false);
+ job.setExtId(extId);
+ return job;
} catch (InvalidProtocolBufferException e) {
log.error(e.getMessage());
@@ -110,8 +149,18 @@ public Job updateJob(Job job) {
for (FeatureSet featureSet : job.getFeatureSets()) {
featureSetProtos.add(featureSet.toProto());
}
- return submitDataflowJob(
- job.getId(), featureSetProtos, job.getSource().toProto(), job.getStore().toProto(), true);
+
+ String extId =
+ submitDataflowJob(
+ job.getId(),
+ featureSetProtos,
+ job.getSource().toProto(),
+ job.getStore().toProto(),
+ true);
+
+ job.setExtId(extId);
+ job.setStatus(JobStatus.PENDING);
+ return job;
} catch (InvalidProtocolBufferException e) {
log.error(e.getMessage());
throw new IllegalArgumentException(
@@ -153,16 +202,15 @@ public void abortJob(String dataflowJobId) {
}
/**
- * Restart a restart dataflow job. Dataflow should ensure continuity between during the restart,
- * so no data should be lost during the restart operation.
+ * Restart a Dataflow job. Dataflow should ensure continuity such that no data should be lost
+ * during the restart operation.
*
* @param job job to restart
* @return the restarted job
*/
@Override
public Job restartJob(Job job) {
- JobStatus status = job.getStatus();
- if (JobStatus.getTerminalState().contains(status)) {
+ if (job.getStatus().isTerminal()) {
// job yet not running: just start job
return this.startJob(job);
} else {
@@ -180,7 +228,7 @@ public Job restartJob(Job job) {
*/
@Override
public JobStatus getJobStatus(Job job) {
- if (!Runner.DATAFLOW.name().equals(job.getRunner())) {
+ if (job.getRunner() != RUNNER_TYPE) {
return job.getStatus();
}
@@ -197,7 +245,7 @@ public JobStatus getJobStatus(Job job) {
return JobStatus.UNKNOWN;
}
- private Job submitDataflowJob(
+ private String submitDataflowJob(
String jobName,
List featureSetProtos,
SourceProto.Source source,
@@ -206,17 +254,8 @@ private Job submitDataflowJob(
try {
ImportOptions pipelineOptions = getPipelineOptions(jobName, featureSetProtos, sink, update);
DataflowPipelineJob pipelineResult = runPipeline(pipelineOptions);
- List featureSets =
- featureSetProtos.stream().map(FeatureSet::fromProto).collect(Collectors.toList());
String jobId = waitForJobToRun(pipelineResult);
- return new Job(
- jobName,
- jobId,
- getRunnerType().name(),
- Source.fromProto(source),
- Store.fromProto(sink),
- featureSets,
- JobStatus.PENDING);
+ return jobId;
} catch (Exception e) {
log.error("Error submitting job", e);
throw new JobExecutionException(String.format("Error running ingestion job: %s", e), e);
@@ -228,9 +267,9 @@ private ImportOptions getPipelineOptions(
List featureSets,
StoreProto.Store sink,
boolean update)
- throws IOException {
- String[] args = TypeConversion.convertMapToArgs(defaultOptions);
- ImportOptions pipelineOptions = PipelineOptionsFactory.fromArgs(args).as(ImportOptions.class);
+ throws IOException, IllegalAccessException {
+ ImportOptions pipelineOptions =
+ PipelineOptionsFactory.fromArgs(defaultOptions.toArgs()).as(ImportOptions.class);
OptionCompressor> featureSetJsonCompressor =
new BZip2Compressor<>(new FeatureSetJsonByteConverter());
@@ -238,6 +277,7 @@ private ImportOptions getPipelineOptions(
pipelineOptions.setFeatureSetJson(featureSetJsonCompressor.compress(featureSets));
pipelineOptions.setStoreJson(Collections.singletonList(JsonFormat.printer().print(sink)));
pipelineOptions.setProject(projectId);
+ pipelineOptions.setDefaultFeastProject(Project.DEFAULT_NAME);
pipelineOptions.setUpdate(update);
pipelineOptions.setRunner(DataflowRunner.class);
pipelineOptions.setJobName(jobName);
diff --git a/core/src/main/java/feast/core/job/dataflow/DataflowRunnerConfig.java b/core/src/main/java/feast/core/job/dataflow/DataflowRunnerConfig.java
new file mode 100644
index 0000000000..85628d2cd0
--- /dev/null
+++ b/core/src/main/java/feast/core/job/dataflow/DataflowRunnerConfig.java
@@ -0,0 +1,101 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ * Copyright 2018-2019 The Feast Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package feast.core.job.dataflow;
+
+import feast.core.job.option.RunnerConfig;
+import feast.proto.core.RunnerProto.DataflowRunnerConfigOptions;
+import java.util.*;
+import javax.validation.*;
+import javax.validation.constraints.NotBlank;
+import lombok.Getter;
+import lombok.Setter;
+
+/** DataflowRunnerConfig contains configuration fields for the Dataflow job runner. */
+@Getter
+@Setter
+public class DataflowRunnerConfig extends RunnerConfig {
+
+ public DataflowRunnerConfig(DataflowRunnerConfigOptions runnerConfigOptions) {
+ this.project = runnerConfigOptions.getProject();
+ this.region = runnerConfigOptions.getRegion();
+ this.zone = runnerConfigOptions.getZone();
+ this.serviceAccount = runnerConfigOptions.getServiceAccount();
+ this.network = runnerConfigOptions.getNetwork();
+ this.subnetwork = runnerConfigOptions.getSubnetwork();
+ this.workerMachineType = runnerConfigOptions.getWorkerMachineType();
+ this.autoscalingAlgorithm = runnerConfigOptions.getAutoscalingAlgorithm();
+ this.usePublicIps = runnerConfigOptions.getUsePublicIps();
+ this.tempLocation = runnerConfigOptions.getTempLocation();
+ this.maxNumWorkers = runnerConfigOptions.getMaxNumWorkers();
+ this.deadLetterTableSpec = runnerConfigOptions.getDeadLetterTableSpec();
+ this.labels = runnerConfigOptions.getLabelsMap();
+ validate();
+ }
+
+ /* Project id to use when launching jobs. */
+ @NotBlank String project;
+
+ /* The Google Compute Engine region for creating Dataflow jobs. */
+ @NotBlank String region;
+
+ /* GCP availability zone for operations. */
+ @NotBlank String zone;
+
+ /* Run the job as a specific service account, instead of the default GCE robot. */
+ String serviceAccount;
+
+ /* GCE network for launching workers. */
+ @NotBlank String network;
+
+ /* GCE subnetwork for launching workers. */
+ @NotBlank String subnetwork;
+
+ /* Machine type to create Dataflow worker VMs as. */
+ String workerMachineType;
+
+ /* The autoscaling algorithm to use for the workerpool. */
+ String autoscalingAlgorithm;
+
+ /* Specifies whether worker pools should be started with public IP addresses. */
+ Boolean usePublicIps;
+
+ /**
+ * A pipeline level default location for storing temporary files. Support Google Cloud Storage
+ * locations, e.g. gs://bucket/object
+ */
+ @NotBlank String tempLocation;
+
+ /* The maximum number of workers to use for the workerpool. */
+ Integer maxNumWorkers;
+
+ /* BigQuery table specification, e.g. PROJECT_ID:DATASET_ID.PROJECT_ID */
+ String deadLetterTableSpec;
+
+ Map labels;
+
+ /** Validates Dataflow runner configuration options */
+ public void validate() {
+ ValidatorFactory factory = Validation.buildDefaultValidatorFactory();
+ Validator validator = factory.getValidator();
+
+ Set> dataflowRunnerConfigViolation =
+ validator.validate(this);
+ if (!dataflowRunnerConfigViolation.isEmpty()) {
+ throw new ConstraintViolationException(dataflowRunnerConfigViolation);
+ }
+ }
+}
diff --git a/core/src/main/java/feast/core/job/direct/DirectRunnerConfig.java b/core/src/main/java/feast/core/job/direct/DirectRunnerConfig.java
new file mode 100644
index 0000000000..ebd327f2f7
--- /dev/null
+++ b/core/src/main/java/feast/core/job/direct/DirectRunnerConfig.java
@@ -0,0 +1,36 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ * Copyright 2018-2020 The Feast Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package feast.core.job.direct;
+
+import feast.core.job.option.RunnerConfig;
+import feast.proto.core.RunnerProto.DirectRunnerConfigOptions;
+
+public class DirectRunnerConfig extends RunnerConfig {
+ /**
+ * Controls the amount of target parallelism the DirectRunner will use. Defaults to the greater of
+ * the number of available processors and 3. Must be a value greater than zero.
+ */
+ Integer targetParallelism;
+
+ /* BigQuery table specification, e.g. PROJECT_ID:DATASET_ID.PROJECT_ID */
+ String deadletterTableSpec;
+
+ public DirectRunnerConfig(DirectRunnerConfigOptions runnerConfigOptions) {
+ this.deadletterTableSpec = runnerConfigOptions.getDeadLetterTableSpec();
+ this.targetParallelism = runnerConfigOptions.getTargetParallelism();
+ }
+}
diff --git a/core/src/main/java/feast/core/job/direct/DirectRunnerJobManager.java b/core/src/main/java/feast/core/job/direct/DirectRunnerJobManager.java
index 9b3a8473e4..715adbdd43 100644
--- a/core/src/main/java/feast/core/job/direct/DirectRunnerJobManager.java
+++ b/core/src/main/java/feast/core/job/direct/DirectRunnerJobManager.java
@@ -18,8 +18,6 @@
import com.google.common.base.Strings;
import com.google.protobuf.util.JsonFormat;
-import feast.core.FeatureSetProto;
-import feast.core.StoreProto;
import feast.core.config.FeastProperties.MetricsProperties;
import feast.core.exception.JobExecutionException;
import feast.core.job.JobManager;
@@ -28,16 +26,18 @@
import feast.core.model.FeatureSet;
import feast.core.model.Job;
import feast.core.model.JobStatus;
-import feast.core.util.TypeConversion;
+import feast.core.model.Project;
import feast.ingestion.ImportJob;
import feast.ingestion.options.BZip2Compressor;
import feast.ingestion.options.ImportOptions;
import feast.ingestion.options.OptionCompressor;
+import feast.proto.core.FeatureSetProto;
+import feast.proto.core.RunnerProto.DirectRunnerConfigOptions;
+import feast.proto.core.StoreProto;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
-import java.util.Map;
import lombok.extern.slf4j.Slf4j;
import org.apache.beam.runners.direct.DirectRunner;
import org.apache.beam.sdk.PipelineResult;
@@ -48,15 +48,15 @@ public class DirectRunnerJobManager implements JobManager {
private final Runner RUNNER_TYPE = Runner.DIRECT;
- protected Map defaultOptions;
+ private DirectRunnerConfig defaultOptions;
private final DirectJobRegistry jobs;
private MetricsProperties metrics;
public DirectRunnerJobManager(
- Map defaultOptions,
+ DirectRunnerConfigOptions directRunnerConfigOptions,
DirectJobRegistry jobs,
MetricsProperties metricsProperties) {
- this.defaultOptions = defaultOptions;
+ this.defaultOptions = new DirectRunnerConfig(directRunnerConfigOptions);
this.jobs = jobs;
this.metrics = metricsProperties;
}
@@ -79,7 +79,7 @@ public Job startJob(Job job) {
featureSetProtos.add(featureSet.toProto());
}
ImportOptions pipelineOptions =
- getPipelineOptions(featureSetProtos, job.getStore().toProto());
+ getPipelineOptions(job.getId(), featureSetProtos, job.getStore().toProto());
PipelineResult pipelineResult = runPipeline(pipelineOptions);
DirectJob directJob = new DirectJob(job.getId(), pipelineResult);
jobs.add(directJob);
@@ -93,16 +93,19 @@ public Job startJob(Job job) {
}
private ImportOptions getPipelineOptions(
- List featureSets, StoreProto.Store sink) throws IOException {
- String[] args = TypeConversion.convertMapToArgs(defaultOptions);
- ImportOptions pipelineOptions = PipelineOptionsFactory.fromArgs(args).as(ImportOptions.class);
+ String jobName, List featureSets, StoreProto.Store sink)
+ throws IOException, IllegalAccessException {
+ ImportOptions pipelineOptions =
+ PipelineOptionsFactory.fromArgs(defaultOptions.toArgs()).as(ImportOptions.class);
OptionCompressor> featureSetJsonCompressor =
new BZip2Compressor<>(new FeatureSetJsonByteConverter());
pipelineOptions.setFeatureSetJson(featureSetJsonCompressor.compress(featureSets));
+ pipelineOptions.setJobName(jobName);
pipelineOptions.setStoreJson(Collections.singletonList(JsonFormat.printer().print(sink)));
pipelineOptions.setRunner(DirectRunner.class);
+ pipelineOptions.setDefaultFeastProject(Project.DEFAULT_NAME);
pipelineOptions.setProject(""); // set to default value to satisfy validation
if (metrics.isEnabled()) {
pipelineOptions.setMetricsExporterType(metrics.getType());
@@ -166,8 +169,7 @@ public PipelineResult runPipeline(ImportOptions pipelineOptions) throws IOExcept
*/
@Override
public Job restartJob(Job job) {
- JobStatus status = job.getStatus();
- if (JobStatus.getTerminalState().contains(status)) {
+ if (job.getStatus().isTerminal()) {
// job yet not running: just start job
return this.startJob(job);
} else {
diff --git a/core/src/main/java/feast/core/job/option/FeatureSetJsonByteConverter.java b/core/src/main/java/feast/core/job/option/FeatureSetJsonByteConverter.java
index dbd04d668f..2f6b37df1b 100644
--- a/core/src/main/java/feast/core/job/option/FeatureSetJsonByteConverter.java
+++ b/core/src/main/java/feast/core/job/option/FeatureSetJsonByteConverter.java
@@ -18,8 +18,8 @@
import com.google.protobuf.InvalidProtocolBufferException;
import com.google.protobuf.util.JsonFormat;
-import feast.core.FeatureSetProto;
import feast.ingestion.options.OptionByteConverter;
+import feast.proto.core.FeatureSetProto;
import java.util.ArrayList;
import java.util.List;
diff --git a/core/src/main/java/feast/core/job/option/RunnerConfig.java b/core/src/main/java/feast/core/job/option/RunnerConfig.java
new file mode 100644
index 0000000000..4b937074a3
--- /dev/null
+++ b/core/src/main/java/feast/core/job/option/RunnerConfig.java
@@ -0,0 +1,75 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ * Copyright 2018-2020 The Feast Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package feast.core.job.option;
+
+import feast.core.util.TypeConversion;
+import java.lang.reflect.Field;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Value class containing the application-default configuration for a runner. When a job is started
+ * by core, all fields in the object will be converted into --key=value args to seed the beam
+ * pipeline options.
+ */
+public abstract class RunnerConfig {
+
+ /**
+ * Converts the fields in this class to a list of --key=value args to be passed to a {@link
+ * org.apache.beam.sdk.options.PipelineOptionsFactory}.
+ *
+ *
Ignores values that are proto-default (e.g. empty string, 0).
+ *
+ * @return Array of string args in the format --key=value.
+ * @throws IllegalAccessException
+ */
+ public String[] toArgs() throws IllegalAccessException {
+ List args = new ArrayList<>();
+ for (Field field : this.getClass().getFields()) {
+ if (field.get(this) == null) {
+ continue;
+ }
+ Class> type = field.getType();
+ if (Map.class.equals(type)) {
+ String jsonString =
+ TypeConversion.convertMapToJsonString((Map) field.get(this));
+ args.add(String.format("--%s=%s", field.getName(), jsonString));
+ continue;
+ }
+
+ if (String.class.equals(type)) {
+ String val = (String) field.get(this);
+ if (!val.equals("")) {
+ args.add(String.format("--%s=%s", field.getName(), val));
+ }
+ continue;
+ }
+
+ if (Integer.class.equals(type)) {
+ Integer val = (Integer) field.get(this);
+ if (val != 0) {
+ args.add(String.format("--%s=%d", field.getName(), val));
+ }
+ continue;
+ }
+
+ args.add(String.format("--%s=%s", field.getName(), field.get(this)));
+ }
+ return args.toArray(String[]::new);
+ }
+}
diff --git a/core/src/main/java/feast/core/model/Entity.java b/core/src/main/java/feast/core/model/Entity.java
new file mode 100644
index 0000000000..6133d492fc
--- /dev/null
+++ b/core/src/main/java/feast/core/model/Entity.java
@@ -0,0 +1,77 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ * Copyright 2018-2019 The Feast Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package feast.core.model;
+
+import feast.proto.core.FeatureSetProto.EntitySpec;
+import feast.proto.types.ValueProto.ValueType;
+import java.util.Objects;
+import javax.persistence.*;
+import lombok.Getter;
+import lombok.Setter;
+
+/** Feast entity object. Contains name and type of the entity. */
+@Getter
+@Setter
+@javax.persistence.Entity
+@Table(
+ name = "entities",
+ uniqueConstraints = @UniqueConstraint(columnNames = {"name", "feature_set_id"}))
+public class Entity {
+
+ @Id @GeneratedValue private Long id;
+
+ private String name;
+
+ @ManyToOne(fetch = FetchType.LAZY)
+ private FeatureSet featureSet;
+
+ /** Data type of the entity. String representation of {@link ValueType} * */
+ private String type;
+
+ public Entity() {}
+
+ private Entity(String name, ValueType.Enum type) {
+ this.setName(name);
+ this.setType(type.toString());
+ }
+
+ public static Entity fromProto(EntitySpec entitySpec) {
+ Entity entity = new Entity(entitySpec.getName(), entitySpec.getValueType());
+ return entity;
+ }
+
+ public EntitySpec toProto() {
+ return EntitySpec.newBuilder().setName(name).setValueType(ValueType.Enum.valueOf(type)).build();
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) {
+ return true;
+ }
+ if (o == null || getClass() != o.getClass()) {
+ return false;
+ }
+ Entity entity = (Entity) o;
+ return getName().equals(entity.getName()) && getType().equals(entity.getType());
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(super.hashCode(), getName(), getType());
+ }
+}
diff --git a/core/src/main/java/feast/core/model/Feature.java b/core/src/main/java/feast/core/model/Feature.java
new file mode 100644
index 0000000000..0b45749379
--- /dev/null
+++ b/core/src/main/java/feast/core/model/Feature.java
@@ -0,0 +1,280 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ * Copyright 2018-2019 The Feast Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package feast.core.model;
+
+import com.google.protobuf.InvalidProtocolBufferException;
+import feast.core.util.TypeConversion;
+import feast.proto.core.FeatureSetProto.FeatureSpec;
+import feast.proto.core.FeatureSetProto.FeatureSpec.Builder;
+import feast.proto.types.ValueProto.ValueType;
+import java.util.Arrays;
+import java.util.Map;
+import java.util.Objects;
+import javax.persistence.*;
+import javax.persistence.Entity;
+import lombok.Getter;
+import lombok.Setter;
+import org.tensorflow.metadata.v0.*;
+
+/**
+ * Feature belonging to a featureset. Contains name, type as well as domain metadata about the
+ * feature.
+ */
+@Getter
+@Setter
+@Entity
+@Table(
+ name = "features",
+ uniqueConstraints = @UniqueConstraint(columnNames = {"name", "feature_set_id"}))
+public class Feature {
+
+ @Id @GeneratedValue private Long id;
+
+ private String name;
+
+ @ManyToOne(fetch = FetchType.LAZY)
+ private FeatureSet featureSet;
+
+ /** Data type of the feature. String representation of {@link ValueType} * */
+ private String type;
+
+ // Labels for this feature
+ @Column(name = "labels", columnDefinition = "text")
+ private String labels;
+
+ // Presence constraints (refer to proto feast.core.FeatureSet.FeatureSpec)
+ // Only one of them can be set.
+ private byte[] presence;
+ private byte[] groupPresence;
+
+ // Shape type (refer to proto feast.core.FeatureSet.FeatureSpec)
+ // Only one of them can be set.
+ private byte[] shape;
+ private byte[] valueCount;
+
+ // Domain info for the values (refer to proto feast.core.FeatureSet.FeatureSpec)
+ // Only one of them can be set.
+ private String domain;
+ private byte[] intDomain;
+ private byte[] floatDomain;
+ private byte[] stringDomain;
+ private byte[] boolDomain;
+ private byte[] structDomain;
+ private byte[] naturalLanguageDomain;
+ private byte[] imageDomain;
+ private byte[] midDomain;
+ private byte[] urlDomain;
+ private byte[] timeDomain;
+ private byte[] timeOfDayDomain;
+
+ public Feature() {}
+ // Whether this feature has been archived. A archived feature cannot be
+ // retrieved from or written to.
+ private boolean archived = false;
+
+ private Feature(String name, ValueType.Enum type) {
+ this.setName(name);
+ this.setType(type.toString());
+ }
+
+ public static Feature fromProto(FeatureSpec featureSpec) {
+ Feature feature = new Feature(featureSpec.getName(), featureSpec.getValueType());
+ feature.labels = TypeConversion.convertMapToJsonString(featureSpec.getLabelsMap());
+ feature.updateSchema(featureSpec);
+ return feature;
+ }
+
+ public FeatureSpec toProto() throws InvalidProtocolBufferException {
+ Builder featureSpecBuilder =
+ FeatureSpec.newBuilder().setName(getName()).setValueType(ValueType.Enum.valueOf(getType()));
+
+ if (getPresence() != null) {
+ featureSpecBuilder.setPresence(FeaturePresence.parseFrom(getPresence()));
+ } else if (getGroupPresence() != null) {
+ featureSpecBuilder.setGroupPresence(FeaturePresenceWithinGroup.parseFrom(getGroupPresence()));
+ }
+
+ if (getShape() != null) {
+ featureSpecBuilder.setShape(FixedShape.parseFrom(getShape()));
+ } else if (getValueCount() != null) {
+ featureSpecBuilder.setValueCount(ValueCount.parseFrom(getValueCount()));
+ }
+
+ if (getDomain() != null) {
+ featureSpecBuilder.setDomain(getDomain());
+ } else if (getIntDomain() != null) {
+ featureSpecBuilder.setIntDomain(IntDomain.parseFrom(getIntDomain()));
+ } else if (getFloatDomain() != null) {
+ featureSpecBuilder.setFloatDomain(FloatDomain.parseFrom(getFloatDomain()));
+ } else if (getStringDomain() != null) {
+ featureSpecBuilder.setStringDomain(StringDomain.parseFrom(getStringDomain()));
+ } else if (getBoolDomain() != null) {
+ featureSpecBuilder.setBoolDomain(BoolDomain.parseFrom(getBoolDomain()));
+ } else if (getStructDomain() != null) {
+ featureSpecBuilder.setStructDomain(StructDomain.parseFrom(getStructDomain()));
+ } else if (getNaturalLanguageDomain() != null) {
+ featureSpecBuilder.setNaturalLanguageDomain(
+ NaturalLanguageDomain.parseFrom(getNaturalLanguageDomain()));
+ } else if (getImageDomain() != null) {
+ featureSpecBuilder.setImageDomain(ImageDomain.parseFrom(getImageDomain()));
+ } else if (getMidDomain() != null) {
+ featureSpecBuilder.setMidDomain(MIDDomain.parseFrom(getMidDomain()));
+ } else if (getUrlDomain() != null) {
+ featureSpecBuilder.setUrlDomain(URLDomain.parseFrom(getUrlDomain()));
+ } else if (getTimeDomain() != null) {
+ featureSpecBuilder.setTimeDomain(TimeDomain.parseFrom(getTimeDomain()));
+ } else if (getTimeOfDayDomain() != null) {
+ featureSpecBuilder.setTimeOfDayDomain(TimeOfDayDomain.parseFrom(getTimeOfDayDomain()));
+ }
+
+ if (getLabels() != null) {
+ featureSpecBuilder.putAllLabels(getLabels());
+ }
+ return featureSpecBuilder.build();
+ }
+
+ private void updateSchema(FeatureSpec featureSpec) {
+ switch (featureSpec.getPresenceConstraintsCase()) {
+ case PRESENCE:
+ setPresence(featureSpec.getPresence().toByteArray());
+ break;
+ case GROUP_PRESENCE:
+ setGroupPresence(featureSpec.getGroupPresence().toByteArray());
+ break;
+ case PRESENCECONSTRAINTS_NOT_SET:
+ break;
+ }
+
+ switch (featureSpec.getShapeTypeCase()) {
+ case SHAPE:
+ setShape(featureSpec.getShape().toByteArray());
+ break;
+ case VALUE_COUNT:
+ setValueCount(featureSpec.getValueCount().toByteArray());
+ break;
+ case SHAPETYPE_NOT_SET:
+ break;
+ }
+
+ switch (featureSpec.getDomainInfoCase()) {
+ case DOMAIN:
+ setDomain(featureSpec.getDomain());
+ break;
+ case INT_DOMAIN:
+ setIntDomain(featureSpec.getIntDomain().toByteArray());
+ break;
+ case FLOAT_DOMAIN:
+ setFloatDomain(featureSpec.getFloatDomain().toByteArray());
+ break;
+ case STRING_DOMAIN:
+ setStringDomain(featureSpec.getStringDomain().toByteArray());
+ break;
+ case BOOL_DOMAIN:
+ setBoolDomain(featureSpec.getBoolDomain().toByteArray());
+ break;
+ case STRUCT_DOMAIN:
+ setStructDomain(featureSpec.getStructDomain().toByteArray());
+ break;
+ case NATURAL_LANGUAGE_DOMAIN:
+ setNaturalLanguageDomain(featureSpec.getNaturalLanguageDomain().toByteArray());
+ break;
+ case IMAGE_DOMAIN:
+ setImageDomain(featureSpec.getImageDomain().toByteArray());
+ break;
+ case MID_DOMAIN:
+ setMidDomain(featureSpec.getMidDomain().toByteArray());
+ break;
+ case URL_DOMAIN:
+ setUrlDomain(featureSpec.getUrlDomain().toByteArray());
+ break;
+ case TIME_DOMAIN:
+ setTimeDomain(featureSpec.getTimeDomain().toByteArray());
+ break;
+ case TIME_OF_DAY_DOMAIN:
+ setTimeOfDayDomain(featureSpec.getTimeOfDayDomain().toByteArray());
+ break;
+ case DOMAININFO_NOT_SET:
+ break;
+ }
+ }
+
+ /** Archive this feature. */
+ public void archive() {
+ this.archived = true;
+ }
+
+ /**
+ * Update the feature object with a valid feature spec. Only schema changes are allowed.
+ *
+ * @param featureSpec {@link FeatureSpec} containing schema changes.
+ */
+ public void updateFromProto(FeatureSpec featureSpec) {
+ if (isArchived()) {
+ throw new IllegalArgumentException(
+ String.format(
+ "You are attempting to create a feature %s that was previously archived. This isn't allowed. Please create a new feature with a different name.",
+ featureSpec.getName()));
+ }
+ if (ValueType.Enum.valueOf(type) != featureSpec.getValueType()) {
+ throw new IllegalArgumentException(
+ String.format(
+ "You are attempting to change the type of feature %s from %s to %s. This isn't allowed. Please create a new feature.",
+ featureSpec.getName(), type, featureSpec.getValueType()));
+ }
+ updateSchema(featureSpec);
+ }
+
+ public Map getLabels() {
+ return TypeConversion.convertJsonStringToMap(this.labels);
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) {
+ return true;
+ }
+ if (o == null || getClass() != o.getClass()) {
+ return false;
+ }
+ Feature feature = (Feature) o;
+ return getName().equals(feature.getName())
+ && getType().equals(feature.getType())
+ && isArchived() == (feature.isArchived())
+ && Objects.equals(getLabels(), feature.getLabels())
+ && Arrays.equals(getPresence(), feature.getPresence())
+ && Arrays.equals(getGroupPresence(), feature.getGroupPresence())
+ && Arrays.equals(getShape(), feature.getShape())
+ && Arrays.equals(getValueCount(), feature.getValueCount())
+ && Objects.equals(getDomain(), feature.getDomain())
+ && Arrays.equals(getIntDomain(), feature.getIntDomain())
+ && Arrays.equals(getFloatDomain(), feature.getFloatDomain())
+ && Arrays.equals(getStringDomain(), feature.getStringDomain())
+ && Arrays.equals(getBoolDomain(), feature.getBoolDomain())
+ && Arrays.equals(getStructDomain(), feature.getStructDomain())
+ && Arrays.equals(getNaturalLanguageDomain(), feature.getNaturalLanguageDomain())
+ && Arrays.equals(getImageDomain(), feature.getImageDomain())
+ && Arrays.equals(getMidDomain(), feature.getMidDomain())
+ && Arrays.equals(getUrlDomain(), feature.getUrlDomain())
+ && Arrays.equals(getTimeDomain(), feature.getTimeDomain())
+ && Arrays.equals(getTimeDomain(), feature.getTimeOfDayDomain());
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(super.hashCode(), getName(), getType(), getLabels());
+ }
+}
diff --git a/core/src/main/java/feast/core/model/FeatureSet.java b/core/src/main/java/feast/core/model/FeatureSet.java
index 232a5f67d1..f7b2dc7cd4 100644
--- a/core/src/main/java/feast/core/model/FeatureSet.java
+++ b/core/src/main/java/feast/core/model/FeatureSet.java
@@ -16,73 +16,35 @@
*/
package feast.core.model;
+import com.google.common.collect.Sets;
import com.google.protobuf.Duration;
import com.google.protobuf.InvalidProtocolBufferException;
import com.google.protobuf.Timestamp;
-import feast.core.FeatureSetProto;
-import feast.core.FeatureSetProto.EntitySpec;
-import feast.core.FeatureSetProto.FeatureSetMeta;
-import feast.core.FeatureSetProto.FeatureSetSpec;
-import feast.core.FeatureSetProto.FeatureSetStatus;
-import feast.core.FeatureSetProto.FeatureSpec;
-import feast.types.ValueProto.ValueType.Enum;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import javax.persistence.CascadeType;
-import javax.persistence.CollectionTable;
-import javax.persistence.Column;
-import javax.persistence.ElementCollection;
-import javax.persistence.Entity;
-import javax.persistence.FetchType;
-import javax.persistence.Id;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.Table;
-import javax.persistence.UniqueConstraint;
+import feast.core.util.TypeConversion;
+import feast.proto.core.FeatureSetProto;
+import feast.proto.core.FeatureSetProto.*;
+import java.util.*;
+import java.util.stream.Collectors;
+import javax.persistence.*;
import lombok.Getter;
import lombok.Setter;
import org.apache.commons.lang3.builder.HashCodeBuilder;
-import org.hibernate.annotations.Fetch;
-import org.hibernate.annotations.FetchMode;
-import org.tensorflow.metadata.v0.BoolDomain;
-import org.tensorflow.metadata.v0.FeaturePresence;
-import org.tensorflow.metadata.v0.FeaturePresenceWithinGroup;
-import org.tensorflow.metadata.v0.FixedShape;
-import org.tensorflow.metadata.v0.FloatDomain;
-import org.tensorflow.metadata.v0.ImageDomain;
-import org.tensorflow.metadata.v0.IntDomain;
-import org.tensorflow.metadata.v0.MIDDomain;
-import org.tensorflow.metadata.v0.NaturalLanguageDomain;
-import org.tensorflow.metadata.v0.StringDomain;
-import org.tensorflow.metadata.v0.StructDomain;
-import org.tensorflow.metadata.v0.TimeDomain;
-import org.tensorflow.metadata.v0.TimeOfDayDomain;
-import org.tensorflow.metadata.v0.URLDomain;
-import org.tensorflow.metadata.v0.ValueCount;
+import org.tensorflow.metadata.v0.*;
@Getter
@Setter
-@Entity
-@Table(name = "feature_sets")
-public class FeatureSet extends AbstractTimestampEntity implements Comparable {
+@javax.persistence.Entity
+@Table(
+ name = "feature_sets",
+ uniqueConstraints = @UniqueConstraint(columnNames = {"name", "project_name"}))
+public class FeatureSet extends AbstractTimestampEntity {
- // Id of the featureSet, defined as project/feature_set_name:feature_set_version
- @Id
- @Column(name = "id", nullable = false, unique = true)
- private String id;
+ @Id @GeneratedValue private long id;
// Name of the featureSet
@Column(name = "name", nullable = false)
private String name;
- // Version of the featureSet
- @Column(name = "version")
- private int version;
-
// Project that this featureSet belongs to
@ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "project_name")
@@ -93,19 +55,20 @@ public class FeatureSet extends AbstractTimestampEntity implements Comparable entities;
+ @OneToMany(
+ mappedBy = "featureSet",
+ cascade = CascadeType.ALL,
+ fetch = FetchType.EAGER,
+ orphanRemoval = true)
+ private Set entities;
// Feature fields inside this feature set
- @ElementCollection(fetch = FetchType.EAGER)
- @CollectionTable(
- name = "features",
- joinColumns = @JoinColumn(name = "feature_set_id"),
- uniqueConstraints = @UniqueConstraint(columnNames = {"name", "project", "version"}))
- @Fetch(FetchMode.SUBSELECT)
- private Set features;
+ @OneToMany(
+ mappedBy = "featureSet",
+ cascade = CascadeType.ALL,
+ fetch = FetchType.EAGER,
+ orphanRemoval = true)
+ private Set features;
// Source on which feature rows can be found
@ManyToOne(cascade = CascadeType.ALL, fetch = FetchType.EAGER)
@@ -113,8 +76,13 @@ public class FeatureSet extends AbstractTimestampEntity implements Comparable entities,
- List features,
+ List entities,
+ List features,
Source source,
+ Map labels,
FeatureSetStatus status) {
this.maxAgeSeconds = maxAgeSeconds;
this.source = source;
- this.status = status.toString();
+ this.status = status;
this.entities = new HashSet<>();
this.features = new HashSet<>();
this.name = name;
this.project = new Project(project);
- this.version = version;
- this.setId(project, name, version);
+ this.labels = TypeConversion.convertMapToJsonString(labels);
addEntities(entities);
addFeatures(features);
}
- private void setId(String project, String name, int version) {
- this.id = project + "/" + name + ":" + version;
- }
-
- public void setVersion(int version) {
- this.version = version;
- this.setId(getProjectName(), getName(), version);
- }
-
public void setName(String name) {
this.name = name;
- this.setId(getProjectName(), name, getVersion());
}
private String getProjectName() {
@@ -166,202 +123,168 @@ private String getProjectName() {
public void setProject(Project project) {
this.project = project;
- this.setId(project.getName(), getName(), getVersion());
}
public static FeatureSet fromProto(FeatureSetProto.FeatureSet featureSetProto) {
FeatureSetSpec featureSetSpec = featureSetProto.getSpec();
Source source = Source.fromProto(featureSetSpec.getSource());
- List featureSpecs = new ArrayList<>();
+ List featureSpecs = new ArrayList<>();
for (FeatureSpec featureSpec : featureSetSpec.getFeaturesList()) {
- featureSpecs.add(new Field(featureSpec));
+ featureSpecs.add(Feature.fromProto(featureSpec));
}
- List entitySpecs = new ArrayList<>();
+ List entitySpecs = new ArrayList<>();
for (EntitySpec entitySpec : featureSetSpec.getEntitiesList()) {
- entitySpecs.add(new Field(entitySpec));
+ entitySpecs.add(Entity.fromProto(entitySpec));
}
return new FeatureSet(
featureSetProto.getSpec().getName(),
featureSetProto.getSpec().getProject(),
- featureSetProto.getSpec().getVersion(),
featureSetSpec.getMaxAge().getSeconds(),
entitySpecs,
featureSpecs,
source,
+ featureSetProto.getSpec().getLabelsMap(),
featureSetProto.getMeta().getStatus());
}
- public void addEntities(List fields) {
- for (Field field : fields) {
- addEntity(field);
+ // Updates the existing feature set from a proto.
+ public void updateFromProto(FeatureSetProto.FeatureSet featureSetProto)
+ throws InvalidProtocolBufferException {
+ FeatureSetSpec spec = featureSetProto.getSpec();
+ if (this.toProto().getSpec().equals(spec)) {
+ return;
+ }
+
+ // 1. validate
+ // 1a. check no change to identifiers
+ if (!name.equals(spec.getName())) {
+ throw new IllegalArgumentException(
+ String.format("Given feature set name %s does not match name %s.", spec.getName(), name));
+ }
+ if (!project.getName().equals(spec.getProject())) {
+ throw new IllegalArgumentException(
+ String.format(
+ "You are attempting to change the project of feature set %s from %s to %s. This isn't allowed. Please create a new feature set under the desired project.",
+ spec.getName(), project, spec.getProject()));
+ }
+
+ Set existingEntities =
+ entities.stream().map(Entity::toProto).collect(Collectors.toSet());
+
+ // 1b. check no change to entities
+ if (!Sets.newHashSet(spec.getEntitiesList()).equals(existingEntities)) {
+ throw new IllegalArgumentException(
+ String.format(
+ "You are attempting to change the entities of this feature set: Given set of entities \n{%s}\n does not match existing set of entities\n {%s}. This isn't allowed. Please create a new feature set. ",
+ spec.getEntitiesList(), existingEntities));
+ }
+
+ // 4. Update max age and source.
+ maxAgeSeconds = spec.getMaxAge().getSeconds();
+ source = Source.fromProto(spec.getSource());
+
+ Map updatedFeatures =
+ spec.getFeaturesList().stream().collect(Collectors.toMap(FeatureSpec::getName, fs -> fs));
+
+ // 3. Tombstone features that are gone, update features that have changed
+ for (Feature existingFeature : features) {
+ String existingFeatureName = existingFeature.getName();
+ FeatureSpec updatedFeatureSpec = updatedFeatures.get(existingFeatureName);
+ if (updatedFeatureSpec == null) {
+ existingFeature.archive();
+ } else {
+ existingFeature.updateFromProto(updatedFeatureSpec);
+ updatedFeatures.remove(existingFeatureName);
+ }
+ }
+
+ // 4. Add new features
+ for (FeatureSpec featureSpec : updatedFeatures.values()) {
+ Feature newFeature = Feature.fromProto(featureSpec);
+ addFeature(newFeature);
}
}
- public void addEntity(Field field) {
- field.setProject(this.project.getName());
- field.setVersion(this.getVersion());
- entities.add(field);
+ public void addEntities(List entities) {
+ for (Entity entity : entities) {
+ addEntity(entity);
+ }
+ }
+
+ public void addEntity(Entity entity) {
+ entity.setFeatureSet(this);
+ entities.add(entity);
}
- public void addFeatures(List fields) {
- for (Field field : fields) {
- addFeature(field);
+ public void addFeatures(List features) {
+ for (Feature feature : features) {
+ addFeature(feature);
}
}
- public void addFeature(Field field) {
- field.setProject(this.project.getName());
- field.setVersion(this.getVersion());
- features.add(field);
+ public void addFeature(Feature feature) {
+ feature.setFeatureSet(this);
+ features.add(feature);
}
public FeatureSetProto.FeatureSet toProto() throws InvalidProtocolBufferException {
List entitySpecs = new ArrayList<>();
- for (Field entityField : entities) {
- EntitySpec.Builder entitySpecBuilder = EntitySpec.newBuilder();
- setEntitySpecFields(entitySpecBuilder, entityField);
- entitySpecs.add(entitySpecBuilder.build());
+ for (Entity entityField : entities) {
+ entitySpecs.add(entityField.toProto());
}
List featureSpecs = new ArrayList<>();
- for (Field featureField : features) {
- FeatureSpec.Builder featureSpecBuilder = FeatureSpec.newBuilder();
- setFeatureSpecFields(featureSpecBuilder, featureField);
- featureSpecs.add(featureSpecBuilder.build());
+ for (Feature featureField : features) {
+ if (!featureField.isArchived()) {
+ featureSpecs.add(featureField.toProto());
+ }
}
FeatureSetMeta.Builder meta =
FeatureSetMeta.newBuilder()
.setCreatedTimestamp(
Timestamp.newBuilder().setSeconds(super.getCreated().getTime() / 1000L))
- .setStatus(FeatureSetStatus.valueOf(status));
+ .setStatus(status);
FeatureSetSpec.Builder spec =
FeatureSetSpec.newBuilder()
.setName(getName())
- .setVersion(getVersion())
.setProject(project.getName())
.setMaxAge(Duration.newBuilder().setSeconds(maxAgeSeconds))
.addAllEntities(entitySpecs)
.addAllFeatures(featureSpecs)
+ .putAllLabels(TypeConversion.convertJsonStringToMap(labels))
.setSource(source.toProto());
return FeatureSetProto.FeatureSet.newBuilder().setMeta(meta).setSpec(spec).build();
}
- // setEntitySpecFields and setFeatureSpecFields methods contain duplicated code because
- // Feast internally treat EntitySpec and FeatureSpec as Field class. However, the proto message
- // builder for EntitySpec and FeatureSpec are of different class.
- @SuppressWarnings("DuplicatedCode")
- private void setEntitySpecFields(EntitySpec.Builder entitySpecBuilder, Field entityField)
- throws InvalidProtocolBufferException {
- entitySpecBuilder
- .setName(entityField.getName())
- .setValueType(Enum.valueOf(entityField.getType()));
-
- if (entityField.getPresence() != null) {
- entitySpecBuilder.setPresence(FeaturePresence.parseFrom(entityField.getPresence()));
- } else if (entityField.getGroupPresence() != null) {
- entitySpecBuilder.setGroupPresence(
- FeaturePresenceWithinGroup.parseFrom(entityField.getGroupPresence()));
- }
-
- if (entityField.getShape() != null) {
- entitySpecBuilder.setShape(FixedShape.parseFrom(entityField.getShape()));
- } else if (entityField.getValueCount() != null) {
- entitySpecBuilder.setValueCount(ValueCount.parseFrom(entityField.getValueCount()));
- }
-
- if (entityField.getDomain() != null) {
- entitySpecBuilder.setDomain(entityField.getDomain());
- } else if (entityField.getIntDomain() != null) {
- entitySpecBuilder.setIntDomain(IntDomain.parseFrom(entityField.getIntDomain()));
- } else if (entityField.getFloatDomain() != null) {
- entitySpecBuilder.setFloatDomain(FloatDomain.parseFrom(entityField.getFloatDomain()));
- } else if (entityField.getStringDomain() != null) {
- entitySpecBuilder.setStringDomain(StringDomain.parseFrom(entityField.getStringDomain()));
- } else if (entityField.getBoolDomain() != null) {
- entitySpecBuilder.setBoolDomain(BoolDomain.parseFrom(entityField.getBoolDomain()));
- } else if (entityField.getStructDomain() != null) {
- entitySpecBuilder.setStructDomain(StructDomain.parseFrom(entityField.getStructDomain()));
- } else if (entityField.getNaturalLanguageDomain() != null) {
- entitySpecBuilder.setNaturalLanguageDomain(
- NaturalLanguageDomain.parseFrom(entityField.getNaturalLanguageDomain()));
- } else if (entityField.getImageDomain() != null) {
- entitySpecBuilder.setImageDomain(ImageDomain.parseFrom(entityField.getImageDomain()));
- } else if (entityField.getMidDomain() != null) {
- entitySpecBuilder.setIntDomain(IntDomain.parseFrom(entityField.getIntDomain()));
- } else if (entityField.getUrlDomain() != null) {
- entitySpecBuilder.setUrlDomain(URLDomain.parseFrom(entityField.getUrlDomain()));
- } else if (entityField.getTimeDomain() != null) {
- entitySpecBuilder.setTimeDomain(TimeDomain.parseFrom(entityField.getTimeDomain()));
- } else if (entityField.getTimeOfDayDomain() != null) {
- entitySpecBuilder.setTimeOfDayDomain(
- TimeOfDayDomain.parseFrom(entityField.getTimeOfDayDomain()));
- }
+ @Override
+ public int hashCode() {
+ HashCodeBuilder hcb = new HashCodeBuilder();
+ hcb.append(project.getName());
+ hcb.append(getName());
+ return hcb.toHashCode();
}
- // Refer to setEntitySpecFields method for the reason for code duplication.
- @SuppressWarnings("DuplicatedCode")
- private void setFeatureSpecFields(FeatureSpec.Builder featureSpecBuilder, Field featureField)
- throws InvalidProtocolBufferException {
- featureSpecBuilder
- .setName(featureField.getName())
- .setValueType(Enum.valueOf(featureField.getType()));
-
- if (featureField.getPresence() != null) {
- featureSpecBuilder.setPresence(FeaturePresence.parseFrom(featureField.getPresence()));
- } else if (featureField.getGroupPresence() != null) {
- featureSpecBuilder.setGroupPresence(
- FeaturePresenceWithinGroup.parseFrom(featureField.getGroupPresence()));
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj) {
+ return true;
}
-
- if (featureField.getShape() != null) {
- featureSpecBuilder.setShape(FixedShape.parseFrom(featureField.getShape()));
- } else if (featureField.getValueCount() != null) {
- featureSpecBuilder.setValueCount(ValueCount.parseFrom(featureField.getValueCount()));
+ if (!(obj instanceof FeatureSet)) {
+ return false;
}
- if (featureField.getDomain() != null) {
- featureSpecBuilder.setDomain(featureField.getDomain());
- } else if (featureField.getIntDomain() != null) {
- featureSpecBuilder.setIntDomain(IntDomain.parseFrom(featureField.getIntDomain()));
- } else if (featureField.getFloatDomain() != null) {
- featureSpecBuilder.setFloatDomain(FloatDomain.parseFrom(featureField.getFloatDomain()));
- } else if (featureField.getStringDomain() != null) {
- featureSpecBuilder.setStringDomain(StringDomain.parseFrom(featureField.getStringDomain()));
- } else if (featureField.getBoolDomain() != null) {
- featureSpecBuilder.setBoolDomain(BoolDomain.parseFrom(featureField.getBoolDomain()));
- } else if (featureField.getStructDomain() != null) {
- featureSpecBuilder.setStructDomain(StructDomain.parseFrom(featureField.getStructDomain()));
- } else if (featureField.getNaturalLanguageDomain() != null) {
- featureSpecBuilder.setNaturalLanguageDomain(
- NaturalLanguageDomain.parseFrom(featureField.getNaturalLanguageDomain()));
- } else if (featureField.getImageDomain() != null) {
- featureSpecBuilder.setImageDomain(ImageDomain.parseFrom(featureField.getImageDomain()));
- } else if (featureField.getMidDomain() != null) {
- featureSpecBuilder.setMidDomain(MIDDomain.parseFrom(featureField.getMidDomain()));
- } else if (featureField.getUrlDomain() != null) {
- featureSpecBuilder.setUrlDomain(URLDomain.parseFrom(featureField.getUrlDomain()));
- } else if (featureField.getTimeDomain() != null) {
- featureSpecBuilder.setTimeDomain(TimeDomain.parseFrom(featureField.getTimeDomain()));
- } else if (featureField.getTimeOfDayDomain() != null) {
- featureSpecBuilder.setTimeOfDayDomain(
- TimeOfDayDomain.parseFrom(featureField.getTimeOfDayDomain()));
+ FeatureSet other = (FeatureSet) obj;
+ if (!getName().equals(other.getName())) {
+ return false;
}
- }
- /**
- * Checks if the given featureSet's schema and source has is different from this one.
- *
- * @param other FeatureSet to compare to
- * @return boolean denoting if the source or schema have changed.
- */
- public boolean equalTo(FeatureSet other) {
- if (!getName().equals(other.getName())) {
+ if (!getLabels().equals(other.getLabels())) {
return false;
}
@@ -378,65 +301,44 @@ public boolean equalTo(FeatureSet other) {
}
// Create a map of all fields in this feature set
- Map fields = new HashMap<>();
+ Map entitiesMap = new HashMap<>();
+ Map featuresMap = new HashMap<>();
- for (Field e : entities) {
- fields.putIfAbsent(e.getName(), e);
+ for (Entity e : entities) {
+ entitiesMap.putIfAbsent(e.getName(), e);
}
- for (Field f : features) {
- fields.putIfAbsent(f.getName(), f);
+ for (Feature f : features) {
+ featuresMap.putIfAbsent(f.getName(), f);
}
// Ensure map size is consistent with existing fields
- if (fields.size() != other.getFeatures().size() + other.getEntities().size()) {
+ if (entitiesMap.size() != other.getEntities().size()) {
+ return false;
+ }
+ if (featuresMap.size() != other.getFeatures().size()) {
return false;
}
// Ensure the other entities and features exist in the field map
- for (Field e : other.getEntities()) {
- if (!fields.containsKey(e.getName())) {
+ for (Entity e : other.getEntities()) {
+ if (!entitiesMap.containsKey(e.getName())) {
return false;
}
- if (!e.equals(fields.get(e.getName()))) {
+ if (!e.equals(entitiesMap.get(e.getName()))) {
return false;
}
}
- for (Field f : other.getFeatures()) {
- if (!fields.containsKey(f.getName())) {
+ for (Feature f : other.getFeatures()) {
+ if (!featuresMap.containsKey(f.getName())) {
return false;
}
- if (!f.equals(fields.get(f.getName()))) {
+ if (!f.equals(featuresMap.get(f.getName()))) {
return false;
}
}
return true;
}
-
- @Override
- public int hashCode() {
- HashCodeBuilder hcb = new HashCodeBuilder();
- hcb.append(project.getName());
- hcb.append(getName());
- hcb.append(getVersion());
- return hcb.toHashCode();
- }
-
- @Override
- public boolean equals(Object obj) {
- if (this == obj) {
- return true;
- }
- if (!(obj instanceof FeatureSet)) {
- return false;
- }
- return this.equalTo(((FeatureSet) obj));
- }
-
- @Override
- public int compareTo(FeatureSet o) {
- return Integer.compare(getVersion(), o.getVersion());
- }
}
diff --git a/core/src/main/java/feast/core/model/Field.java b/core/src/main/java/feast/core/model/Field.java
deleted file mode 100644
index cb23e4eceb..0000000000
--- a/core/src/main/java/feast/core/model/Field.java
+++ /dev/null
@@ -1,252 +0,0 @@
-/*
- * SPDX-License-Identifier: Apache-2.0
- * Copyright 2018-2019 The Feast Authors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * https://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package feast.core.model;
-
-import feast.core.FeatureSetProto.EntitySpec;
-import feast.core.FeatureSetProto.FeatureSpec;
-import feast.types.ValueProto.ValueType;
-import java.util.Arrays;
-import java.util.Objects;
-import javax.persistence.Column;
-import javax.persistence.Embeddable;
-import lombok.Getter;
-import lombok.Setter;
-
-@Getter
-@Setter
-@Embeddable
-public class Field {
-
- // Name of the feature
- @Column(name = "name", nullable = false)
- private String name;
-
- // Type of the feature, should correspond with feast.types.ValueType
- @Column(name = "type", nullable = false)
- private String type;
-
- // Version of the field
- @Column(name = "version")
- private int version;
-
- // Project that this field belongs to
- @Column(name = "project")
- private String project;
-
- // Presence constraints (refer to proto feast.core.FeatureSet.FeatureSpec)
- // Only one of them can be set.
- private byte[] presence;
- private byte[] groupPresence;
-
- // Shape type (refer to proto feast.core.FeatureSet.FeatureSpec)
- // Only one of them can be set.
- private byte[] shape;
- private byte[] valueCount;
-
- // Domain info for the values (refer to proto feast.core.FeatureSet.FeatureSpec)
- // Only one of them can be set.
- private String domain;
- private byte[] intDomain;
- private byte[] floatDomain;
- private byte[] stringDomain;
- private byte[] boolDomain;
- private byte[] structDomain;
- private byte[] naturalLanguageDomain;
- private byte[] imageDomain;
- private byte[] midDomain;
- private byte[] urlDomain;
- private byte[] timeDomain;
- private byte[] timeOfDayDomain;
-
- public Field() {}
-
- public Field(String name, ValueType.Enum type) {
- this.name = name;
- this.type = type.toString();
- }
-
- public Field(FeatureSpec featureSpec) {
- this.name = featureSpec.getName();
- this.type = featureSpec.getValueType().toString();
-
- switch (featureSpec.getPresenceConstraintsCase()) {
- case PRESENCE:
- this.presence = featureSpec.getPresence().toByteArray();
- break;
- case GROUP_PRESENCE:
- this.groupPresence = featureSpec.getGroupPresence().toByteArray();
- break;
- case PRESENCECONSTRAINTS_NOT_SET:
- break;
- }
-
- switch (featureSpec.getShapeTypeCase()) {
- case SHAPE:
- this.shape = featureSpec.getShape().toByteArray();
- break;
- case VALUE_COUNT:
- this.valueCount = featureSpec.getValueCount().toByteArray();
- break;
- case SHAPETYPE_NOT_SET:
- break;
- }
-
- switch (featureSpec.getDomainInfoCase()) {
- case DOMAIN:
- this.domain = featureSpec.getDomain();
- break;
- case INT_DOMAIN:
- this.intDomain = featureSpec.getIntDomain().toByteArray();
- break;
- case FLOAT_DOMAIN:
- this.floatDomain = featureSpec.getFloatDomain().toByteArray();
- break;
- case STRING_DOMAIN:
- this.stringDomain = featureSpec.getStringDomain().toByteArray();
- break;
- case BOOL_DOMAIN:
- this.boolDomain = featureSpec.getBoolDomain().toByteArray();
- break;
- case STRUCT_DOMAIN:
- this.structDomain = featureSpec.getStructDomain().toByteArray();
- break;
- case NATURAL_LANGUAGE_DOMAIN:
- this.naturalLanguageDomain = featureSpec.getNaturalLanguageDomain().toByteArray();
- break;
- case IMAGE_DOMAIN:
- this.imageDomain = featureSpec.getImageDomain().toByteArray();
- break;
- case MID_DOMAIN:
- this.midDomain = featureSpec.getMidDomain().toByteArray();
- break;
- case URL_DOMAIN:
- this.urlDomain = featureSpec.getUrlDomain().toByteArray();
- break;
- case TIME_DOMAIN:
- this.timeDomain = featureSpec.getTimeDomain().toByteArray();
- break;
- case TIME_OF_DAY_DOMAIN:
- this.timeOfDayDomain = featureSpec.getTimeOfDayDomain().toByteArray();
- break;
- case DOMAININFO_NOT_SET:
- break;
- }
- }
-
- public Field(EntitySpec entitySpec) {
- this.name = entitySpec.getName();
- this.type = entitySpec.getValueType().toString();
-
- switch (entitySpec.getPresenceConstraintsCase()) {
- case PRESENCE:
- this.presence = entitySpec.getPresence().toByteArray();
- break;
- case GROUP_PRESENCE:
- this.groupPresence = entitySpec.getGroupPresence().toByteArray();
- break;
- case PRESENCECONSTRAINTS_NOT_SET:
- break;
- }
-
- switch (entitySpec.getShapeTypeCase()) {
- case SHAPE:
- this.shape = entitySpec.getShape().toByteArray();
- break;
- case VALUE_COUNT:
- this.valueCount = entitySpec.getValueCount().toByteArray();
- break;
- case SHAPETYPE_NOT_SET:
- break;
- }
-
- switch (entitySpec.getDomainInfoCase()) {
- case DOMAIN:
- this.domain = entitySpec.getDomain();
- break;
- case INT_DOMAIN:
- this.intDomain = entitySpec.getIntDomain().toByteArray();
- break;
- case FLOAT_DOMAIN:
- this.floatDomain = entitySpec.getFloatDomain().toByteArray();
- break;
- case STRING_DOMAIN:
- this.stringDomain = entitySpec.getStringDomain().toByteArray();
- break;
- case BOOL_DOMAIN:
- this.boolDomain = entitySpec.getBoolDomain().toByteArray();
- break;
- case STRUCT_DOMAIN:
- this.structDomain = entitySpec.getStructDomain().toByteArray();
- break;
- case NATURAL_LANGUAGE_DOMAIN:
- this.naturalLanguageDomain = entitySpec.getNaturalLanguageDomain().toByteArray();
- break;
- case IMAGE_DOMAIN:
- this.imageDomain = entitySpec.getImageDomain().toByteArray();
- break;
- case MID_DOMAIN:
- this.midDomain = entitySpec.getMidDomain().toByteArray();
- break;
- case URL_DOMAIN:
- this.urlDomain = entitySpec.getUrlDomain().toByteArray();
- break;
- case TIME_DOMAIN:
- this.timeDomain = entitySpec.getTimeDomain().toByteArray();
- break;
- case TIME_OF_DAY_DOMAIN:
- this.timeOfDayDomain = entitySpec.getTimeOfDayDomain().toByteArray();
- break;
- case DOMAININFO_NOT_SET:
- break;
- }
- }
-
- @Override
- public boolean equals(Object o) {
- if (this == o) {
- return true;
- }
- if (o == null || getClass() != o.getClass()) {
- return false;
- }
- Field field = (Field) o;
- return Objects.equals(name, field.name)
- && Objects.equals(type, field.type)
- && Objects.equals(project, field.project)
- && Arrays.equals(presence, field.presence)
- && Arrays.equals(groupPresence, field.groupPresence)
- && Arrays.equals(shape, field.shape)
- && Arrays.equals(valueCount, field.valueCount)
- && Objects.equals(domain, field.domain)
- && Arrays.equals(intDomain, field.intDomain)
- && Arrays.equals(floatDomain, field.floatDomain)
- && Arrays.equals(stringDomain, field.stringDomain)
- && Arrays.equals(boolDomain, field.boolDomain)
- && Arrays.equals(structDomain, field.structDomain)
- && Arrays.equals(naturalLanguageDomain, field.naturalLanguageDomain)
- && Arrays.equals(imageDomain, field.imageDomain)
- && Arrays.equals(midDomain, field.midDomain)
- && Arrays.equals(urlDomain, field.urlDomain)
- && Arrays.equals(timeDomain, field.timeDomain)
- && Arrays.equals(timeOfDayDomain, field.timeOfDayDomain);
- }
-
- @Override
- public int hashCode() {
- return Objects.hash(super.hashCode(), name, type);
- }
-}
diff --git a/core/src/main/java/feast/core/model/Job.java b/core/src/main/java/feast/core/model/Job.java
index 738a16db2d..5fce3dffbe 100644
--- a/core/src/main/java/feast/core/model/Job.java
+++ b/core/src/main/java/feast/core/model/Job.java
@@ -17,23 +17,13 @@
package feast.core.model;
import com.google.protobuf.InvalidProtocolBufferException;
-import feast.core.FeatureSetProto;
-import feast.core.IngestionJobProto;
+import feast.core.job.Runner;
+import feast.proto.core.FeatureSetProto;
+import feast.proto.core.IngestionJobProto;
import java.util.ArrayList;
import java.util.List;
-import javax.persistence.CascadeType;
-import javax.persistence.Column;
+import javax.persistence.*;
import javax.persistence.Entity;
-import javax.persistence.EnumType;
-import javax.persistence.Enumerated;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.JoinTable;
-import javax.persistence.ManyToMany;
-import javax.persistence.ManyToOne;
-import javax.persistence.OneToMany;
-import javax.persistence.Table;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.Setter;
@@ -55,9 +45,9 @@ public class Job extends AbstractTimestampEntity {
private String extId;
// Runner type
- // Use Runner.name() when converting a Runner to string to assign to this property.
+ @Enumerated(EnumType.STRING)
@Column(name = "runner")
- private String runner;
+ private Runner runner;
// Source id
@ManyToOne
@@ -70,21 +60,17 @@ public class Job extends AbstractTimestampEntity {
private Store store;
// FeatureSets populated by the job
- @ManyToMany
+ @ManyToMany(cascade = CascadeType.ALL)
@JoinTable(
name = "jobs_feature_sets",
- joinColumns = @JoinColumn(name = "feature_sets_id"),
- inverseJoinColumns = @JoinColumn(name = "job_id"),
+ joinColumns = @JoinColumn(name = "job_id"),
+ inverseJoinColumns = @JoinColumn(name = "feature_sets_id"),
indexes = {
@Index(name = "idx_jobs_feature_sets_job_id", columnList = "job_id"),
@Index(name = "idx_jobs_feature_sets_feature_sets_id", columnList = "feature_sets_id")
})
private List featureSets;
- // Job Metrics
- @OneToMany(mappedBy = "job", cascade = CascadeType.ALL)
- private List metrics;
-
@Enumerated(EnumType.STRING)
@Column(name = "status", length = 16)
private JobStatus status;
@@ -93,26 +79,12 @@ public Job() {
super();
}
- public Job(
- String id,
- String extId,
- String runner,
- Source source,
- Store sink,
- List featureSets,
- JobStatus jobStatus) {
- this.id = id;
- this.extId = extId;
- this.source = source;
- this.runner = runner;
- this.store = sink;
- this.featureSets = featureSets;
- this.status = jobStatus;
+ public boolean hasTerminated() {
+ return getStatus().isTerminal();
}
- public void updateMetrics(List newMetrics) {
- metrics.clear();
- metrics.addAll(newMetrics);
+ public boolean isRunning() {
+ return getStatus() == JobStatus.RUNNING;
}
public String getSinkName() {
diff --git a/core/src/main/java/feast/core/model/JobStatus.java b/core/src/main/java/feast/core/model/JobStatus.java
index 86aa512933..6bafc06ec9 100644
--- a/core/src/main/java/feast/core/model/JobStatus.java
+++ b/core/src/main/java/feast/core/model/JobStatus.java
@@ -16,11 +16,9 @@
*/
package feast.core.model;
-import feast.core.IngestionJobProto.IngestionJobStatus;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
+import feast.proto.core.IngestionJobProto.IngestionJobStatus;
import java.util.Map;
+import java.util.Set;
public enum JobStatus {
/** Job status is not known. */
@@ -53,33 +51,41 @@ public enum JobStatus {
/** job has been suspended */
SUSPENDED;
- private static final Collection TERMINAL_STATE =
- Collections.unmodifiableList(Arrays.asList(COMPLETED, ABORTED, ERROR));
+ private static final Set TERMINAL_STATES = Set.of(COMPLETED, ABORTED, ERROR);
/**
- * Get a collection of terminal job state.
+ * Get the set of terminal job states.
*
- *
Terminal job state is final and will not change to any other state.
+ *
A terminal job state is final and will not change to any other state.
*
- * @return collection of terminal job state.
+ * @return set of terminal job states.
*/
- public static Collection getTerminalState() {
- return TERMINAL_STATE;
+ public static Set getTerminalStates() {
+ return TERMINAL_STATES;
}
- private static final Collection TRANSITIONAL_STATES =
- Collections.unmodifiableList(Arrays.asList(PENDING, ABORTING, SUSPENDING));
+ private static final Set TRANSITIONAL_STATES = Set.of(PENDING, ABORTING, SUSPENDING);
/**
- * Get Transitional Job Status states. Transitionals states are assigned to jobs that
+ * Get Transitional Job Status states. Transitional states are assigned to jobs that are
* transitioning to a more stable state (ie SUSPENDED, ABORTED etc.)
*
- * @return Collection of transitional Job Status states.
+ * @return set of transitional Job Status states.
*/
- public static final Collection getTransitionalStates() {
+ public static Set getTransitionalStates() {
return TRANSITIONAL_STATES;
}
+ /** @return true if this {@code JobStatus} is a terminal state. */
+ public boolean isTerminal() {
+ return getTerminalStates().contains(this);
+ }
+
+ /** @return true if this {@code JobStatus} is a transitional state. */
+ public boolean isTransitional() {
+ return getTransitionalStates().contains(this);
+ }
+
private static final Map INGESTION_JOB_STATUS_MAP =
Map.of(
JobStatus.UNKNOWN, IngestionJobStatus.UNKNOWN,
@@ -95,7 +101,7 @@ public static final Collection getTransitionalStates() {
/**
* Convert a Job Status to Ingestion Job Status proto
*
- * @return IngestionJobStatus proto derieved from this job status
+ * @return IngestionJobStatus proto derived from this job status
*/
public IngestionJobStatus toProto() {
// maps job models job status to ingestion job status
diff --git a/core/src/main/java/feast/core/model/Metrics.java b/core/src/main/java/feast/core/model/Metrics.java
deleted file mode 100644
index 0b7514816f..0000000000
--- a/core/src/main/java/feast/core/model/Metrics.java
+++ /dev/null
@@ -1,64 +0,0 @@
-/*
- * SPDX-License-Identifier: Apache-2.0
- * Copyright 2018-2019 The Feast Authors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * https://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package feast.core.model;
-
-import javax.persistence.Entity;
-import javax.persistence.FetchType;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.Table;
-import lombok.Getter;
-import lombok.NoArgsConstructor;
-import lombok.Setter;
-
-@NoArgsConstructor
-@Getter
-@Setter
-@Entity
-@Table(name = "metrics")
-public class Metrics extends AbstractTimestampEntity {
-
- @Id
- @GeneratedValue(strategy = GenerationType.AUTO)
- private long id;
-
- @ManyToOne(fetch = FetchType.LAZY)
- @JoinColumn(name = "job_id")
- private Job job;
-
- /** Metrics name */
- private String name;
-
- /** Metrics value */
- private double value;
-
- /**
- * Create a metrics owned by a {@code job}.
- *
- * @param job owner of this metrics.
- * @param metricsName metrics name.
- * @param value metrics value.
- */
- public Metrics(Job job, String metricsName, double value) {
- this.job = job;
- this.name = metricsName;
- this.value = value;
- }
-}
diff --git a/core/src/main/java/feast/core/model/Project.java b/core/src/main/java/feast/core/model/Project.java
index d6e6149394..c55830c824 100644
--- a/core/src/main/java/feast/core/model/Project.java
+++ b/core/src/main/java/feast/core/model/Project.java
@@ -34,6 +34,7 @@
@Entity
@Table(name = "projects")
public class Project {
+ public static final String DEFAULT_NAME = "default";
// Name of the project
@Id
diff --git a/core/src/main/java/feast/core/model/Source.java b/core/src/main/java/feast/core/model/Source.java
index 28db1e9a5b..d199a1b676 100644
--- a/core/src/main/java/feast/core/model/Source.java
+++ b/core/src/main/java/feast/core/model/Source.java
@@ -18,10 +18,10 @@
import com.google.common.collect.Sets;
import com.google.protobuf.Message;
-import feast.core.SourceProto;
-import feast.core.SourceProto.KafkaSourceConfig;
-import feast.core.SourceProto.Source.Builder;
-import feast.core.SourceProto.SourceType;
+import feast.proto.core.SourceProto;
+import feast.proto.core.SourceProto.KafkaSourceConfig;
+import feast.proto.core.SourceProto.Source.Builder;
+import feast.proto.core.SourceProto.SourceType;
import io.grpc.Status;
import java.util.Objects;
import java.util.Set;
diff --git a/core/src/main/java/feast/core/model/Store.java b/core/src/main/java/feast/core/model/Store.java
index 9dc44bdc73..1f7c373bdc 100644
--- a/core/src/main/java/feast/core/model/Store.java
+++ b/core/src/main/java/feast/core/model/Store.java
@@ -17,13 +17,14 @@
package feast.core.model;
import com.google.protobuf.InvalidProtocolBufferException;
-import feast.core.StoreProto;
-import feast.core.StoreProto.Store.BigQueryConfig;
-import feast.core.StoreProto.Store.Builder;
-import feast.core.StoreProto.Store.CassandraConfig;
-import feast.core.StoreProto.Store.RedisConfig;
-import feast.core.StoreProto.Store.StoreType;
-import feast.core.StoreProto.Store.Subscription;
+import feast.proto.core.StoreProto;
+import feast.proto.core.StoreProto.Store.BigQueryConfig;
+import feast.proto.core.StoreProto.Store.Builder;
+import feast.proto.core.StoreProto.Store.CassandraConfig;
+import feast.proto.core.StoreProto.Store.RedisClusterConfig;
+import feast.proto.core.StoreProto.Store.RedisConfig;
+import feast.proto.core.StoreProto.Store.StoreType;
+import feast.proto.core.StoreProto.Store.Subscription;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
@@ -82,6 +83,9 @@ public static Store fromProto(StoreProto.Store storeProto) throws IllegalArgumen
case CASSANDRA:
config = storeProto.getCassandraConfig().toByteArray();
break;
+ case REDIS_CLUSTER:
+ config = storeProto.getRedisClusterConfig().toByteArray();
+ break;
default:
throw new IllegalArgumentException("Invalid store provided");
}
@@ -106,6 +110,9 @@ public StoreProto.Store toProto() throws InvalidProtocolBufferException {
case CASSANDRA:
CassandraConfig cassConfig = CassandraConfig.parseFrom(config);
return storeProtoBuilder.setCassandraConfig(cassConfig).build();
+ case REDIS_CLUSTER:
+ RedisClusterConfig redisClusterConfig = RedisClusterConfig.parseFrom(config);
+ return storeProtoBuilder.setRedisClusterConfig(redisClusterConfig).build();
default:
throw new InvalidProtocolBufferException("Invalid store set");
}
@@ -118,22 +125,18 @@ public List getSubscriptions() {
}
private static String convertSubscriptionToString(Subscription sub) {
- if (sub.getVersion().isEmpty() || sub.getName().isEmpty() || sub.getProject().isEmpty()) {
+ if (sub.getName().isEmpty() || sub.getProject().isEmpty()) {
throw new IllegalArgumentException(
String.format("Missing arguments in subscription string: %s", sub.toString()));
}
- return String.format("%s:%s:%s", sub.getProject(), sub.getName(), sub.getVersion());
+ return String.format("%s:%s", sub.getProject(), sub.getName());
}
private Subscription convertStringToSubscription(String sub) {
if (sub.equals("")) {
return Subscription.newBuilder().build();
}
- String[] split = sub.split(":", 3);
- return Subscription.newBuilder()
- .setProject(split[0])
- .setName(split[1])
- .setVersion(split[2])
- .build();
+ String[] split = sub.split(":", 2);
+ return Subscription.newBuilder().setProject(split[0]).setName(split[1]).build();
}
}
diff --git a/core/src/main/java/feast/core/service/AccessManagementService.java b/core/src/main/java/feast/core/service/AccessManagementService.java
index df92750e94..5b02d6f3c4 100644
--- a/core/src/main/java/feast/core/service/AccessManagementService.java
+++ b/core/src/main/java/feast/core/service/AccessManagementService.java
@@ -28,12 +28,15 @@
@Slf4j
@Service
public class AccessManagementService {
-
private ProjectRepository projectRepository;
@Autowired
public AccessManagementService(ProjectRepository projectRepository) {
this.projectRepository = projectRepository;
+ // create default project if it does not yet exist.
+ if (!projectRepository.existsById(Project.DEFAULT_NAME)) {
+ this.createProject(Project.DEFAULT_NAME);
+ }
}
/**
@@ -61,6 +64,9 @@ public void archiveProject(String name) {
if (!project.isPresent()) {
throw new IllegalArgumentException(String.format("Could not find project: \"%s\"", name));
}
+ if (name.equals(Project.DEFAULT_NAME)) {
+ throw new UnsupportedOperationException("Archiving the default project is not allowed.");
+ }
Project p = project.get();
p.setArchived(true);
projectRepository.saveAndFlush(p);
diff --git a/core/src/main/java/feast/core/service/JobCoordinatorService.java b/core/src/main/java/feast/core/service/JobCoordinatorService.java
index b66d181022..90ee54ca16 100644
--- a/core/src/main/java/feast/core/service/JobCoordinatorService.java
+++ b/core/src/main/java/feast/core/service/JobCoordinatorService.java
@@ -17,23 +17,21 @@
package feast.core.service;
import com.google.protobuf.InvalidProtocolBufferException;
-import feast.core.CoreServiceProto.ListFeatureSetsRequest;
-import feast.core.CoreServiceProto.ListStoresRequest.Filter;
-import feast.core.CoreServiceProto.ListStoresResponse;
-import feast.core.FeatureSetProto;
-import feast.core.FeatureSetProto.FeatureSetStatus;
-import feast.core.StoreProto;
-import feast.core.StoreProto.Store.Subscription;
-import feast.core.config.FeastProperties.JobUpdatesProperties;
+import feast.core.config.FeastProperties;
+import feast.core.config.FeastProperties.JobProperties;
import feast.core.dao.FeatureSetRepository;
import feast.core.dao.JobRepository;
import feast.core.job.JobManager;
import feast.core.job.JobUpdateTask;
import feast.core.model.FeatureSet;
import feast.core.model.Job;
-import feast.core.model.JobStatus;
import feast.core.model.Source;
import feast.core.model.Store;
+import feast.proto.core.CoreServiceProto.ListStoresRequest.Filter;
+import feast.proto.core.CoreServiceProto.ListStoresResponse;
+import feast.proto.core.FeatureSetProto.FeatureSetStatus;
+import feast.proto.core.StoreProto;
+import feast.proto.core.StoreProto.Store.Subscription;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
@@ -44,6 +42,7 @@
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.stream.Collectors;
+import javax.validation.constraints.Positive;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.scheduling.annotation.Scheduled;
@@ -54,11 +53,11 @@
@Service
public class JobCoordinatorService {
- private JobRepository jobRepository;
- private FeatureSetRepository featureSetRepository;
- private SpecService specService;
- private JobManager jobManager;
- private JobUpdatesProperties jobUpdatesProperties;
+ private final JobRepository jobRepository;
+ private final FeatureSetRepository featureSetRepository;
+ private final SpecService specService;
+ private final JobManager jobManager;
+ private final JobProperties jobProperties;
@Autowired
public JobCoordinatorService(
@@ -66,12 +65,12 @@ public JobCoordinatorService(
FeatureSetRepository featureSetRepository,
SpecService specService,
JobManager jobManager,
- JobUpdatesProperties jobUpdatesProperties) {
+ FeastProperties feastProperties) {
this.jobRepository = jobRepository;
this.featureSetRepository = featureSetRepository;
this.specService = specService;
this.jobManager = jobManager;
- this.jobUpdatesProperties = jobUpdatesProperties;
+ this.jobProperties = feastProperties.getJobs();
}
/**
@@ -86,98 +85,93 @@ public JobCoordinatorService(
*
4) Updates Feature set statuses
*/
@Transactional
- @Scheduled(fixedDelayString = "${feast.jobs.updates.pollingIntervalMillis}")
+ @Scheduled(fixedDelayString = "${feast.jobs.polling_interval_milliseconds}")
public void Poll() throws InvalidProtocolBufferException {
log.info("Polling for new jobs...");
+ @Positive long updateTimeout = jobProperties.getJobUpdateTimeoutSeconds();
List jobUpdateTasks = new ArrayList<>();
ListStoresResponse listStoresResponse = specService.listStores(Filter.newBuilder().build());
- for (StoreProto.Store store : listStoresResponse.getStoreList()) {
- Set featureSets = new HashSet<>();
- for (Subscription subscription : store.getSubscriptionsList()) {
- featureSets.addAll(
- new ArrayList<>(
- specService
- .listFeatureSets(
- ListFeatureSetsRequest.Filter.newBuilder()
- .setFeatureSetName(subscription.getName())
- .setFeatureSetVersion(subscription.getVersion())
- .setProject(subscription.getProject())
- .build())
- .getFeatureSetsList()));
- }
- if (!featureSets.isEmpty()) {
- featureSets.stream()
- .collect(Collectors.groupingBy(fs -> fs.getSpec().getSource()))
- .entrySet()
- .stream()
- .forEach(
- kv -> {
- Optional originalJob =
- getJob(Source.fromProto(kv.getKey()), Store.fromProto(store));
- jobUpdateTasks.add(
- new JobUpdateTask(
- kv.getValue(),
- kv.getKey(),
- store,
- originalJob,
- jobManager,
- jobUpdatesProperties.getTimeoutSeconds()));
- });
+
+ for (StoreProto.Store storeSpec : listStoresResponse.getStoreList()) {
+ Set featureSets = new HashSet<>();
+ Store store = Store.fromProto(storeSpec);
+
+ for (Subscription subscription : store.getSubscriptions()) {
+ List featureSetsForSub =
+ featureSetRepository.findAllByNameLikeAndProject_NameLikeOrderByNameAsc(
+ subscription.getName().replace('*', '%'),
+ subscription.getProject().replace('*', '%'));
+ featureSets.addAll(featureSetsForSub);
}
+
+ featureSets.stream()
+ .collect(Collectors.groupingBy(FeatureSet::getSource))
+ .forEach(
+ (source, setsForSource) -> {
+ Optional originalJob = getJob(source, store);
+ jobUpdateTasks.add(
+ new JobUpdateTask(
+ setsForSource, source, store, originalJob, jobManager, updateTimeout));
+ });
}
- if (jobUpdateTasks.size() == 0) {
+ if (jobUpdateTasks.isEmpty()) {
log.info("No jobs found.");
return;
}
log.info("Creating/Updating {} jobs...", jobUpdateTasks.size());
- ExecutorService executorService = Executors.newFixedThreadPool(jobUpdateTasks.size());
+ startOrUpdateJobs(jobUpdateTasks);
+
+ log.info("Updating feature set status");
+ updateFeatureSetStatuses(jobUpdateTasks);
+ }
+
+ void startOrUpdateJobs(List tasks) {
+ ExecutorService executorService = Executors.newFixedThreadPool(tasks.size());
ExecutorCompletionService ecs = new ExecutorCompletionService<>(executorService);
- jobUpdateTasks.forEach(ecs::submit);
+ tasks.forEach(ecs::submit);
int completedTasks = 0;
- while (completedTasks < jobUpdateTasks.size()) {
+ List startedJobs = new ArrayList<>();
+ while (completedTasks < tasks.size()) {
try {
Job job = ecs.take().get();
if (job != null) {
- jobRepository.saveAndFlush(job);
+ startedJobs.add(job);
}
} catch (ExecutionException | InterruptedException e) {
log.warn("Unable to start or update job: {}", e.getMessage());
}
completedTasks++;
}
-
- log.info("Updating feature set status");
- updateFeatureSetStatuses(jobUpdateTasks);
+ jobRepository.saveAll(startedJobs);
+ executorService.shutdown();
}
// TODO: make this more efficient
private void updateFeatureSetStatuses(List jobUpdateTasks) {
Set ready = new HashSet<>();
Set pending = new HashSet<>();
- for (JobUpdateTask jobUpdateTask : jobUpdateTasks) {
- Optional job =
- getJob(
- Source.fromProto(jobUpdateTask.getSourceSpec()),
- Store.fromProto(jobUpdateTask.getStore()));
- if (job.isPresent()) {
- if (job.get().getStatus() == JobStatus.RUNNING) {
- ready.addAll(job.get().getFeatureSets());
- } else {
- pending.addAll(job.get().getFeatureSets());
- }
- }
+ for (JobUpdateTask task : jobUpdateTasks) {
+ getJob(task.getSource(), task.getStore())
+ .ifPresent(
+ job -> {
+ if (job.isRunning()) {
+ ready.addAll(job.getFeatureSets());
+ } else {
+ pending.addAll(job.getFeatureSets());
+ }
+ });
}
ready.removeAll(pending);
ready.forEach(
fs -> {
- fs.setStatus(FeatureSetStatus.STATUS_READY.toString());
+ fs.setStatus(FeatureSetStatus.STATUS_READY);
featureSetRepository.save(fs);
});
pending.forEach(
fs -> {
- fs.setStatus(FeatureSetStatus.STATUS_PENDING.toString());
+ fs.setStatus(FeatureSetStatus.STATUS_JOB_STARTING);
featureSetRepository.save(fs);
});
featureSetRepository.flush();
@@ -188,11 +182,8 @@ public Optional getJob(Source source, Store store) {
List jobs =
jobRepository.findBySourceIdAndStoreNameOrderByLastUpdatedDesc(
source.getId(), store.getName());
- jobs =
- jobs.stream()
- .filter(job -> !JobStatus.getTerminalState().contains(job.getStatus()))
- .collect(Collectors.toList());
- if (jobs.size() == 0) {
+ jobs = jobs.stream().filter(job -> !job.hasTerminated()).collect(Collectors.toList());
+ if (jobs.isEmpty()) {
return Optional.empty();
}
// return the latest
diff --git a/core/src/main/java/feast/core/service/JobService.java b/core/src/main/java/feast/core/service/JobService.java
index bf74b90e80..cc125305ec 100644
--- a/core/src/main/java/feast/core/service/JobService.java
+++ b/core/src/main/java/feast/core/service/JobService.java
@@ -17,24 +17,25 @@
package feast.core.service;
import com.google.protobuf.InvalidProtocolBufferException;
-import feast.core.CoreServiceProto.ListFeatureSetsRequest;
-import feast.core.CoreServiceProto.ListFeatureSetsResponse;
-import feast.core.CoreServiceProto.ListIngestionJobsRequest;
-import feast.core.CoreServiceProto.ListIngestionJobsResponse;
-import feast.core.CoreServiceProto.RestartIngestionJobRequest;
-import feast.core.CoreServiceProto.RestartIngestionJobResponse;
-import feast.core.CoreServiceProto.StopIngestionJobRequest;
-import feast.core.CoreServiceProto.StopIngestionJobResponse;
-import feast.core.FeatureSetReferenceProto.FeatureSetReference;
-import feast.core.IngestionJobProto;
import feast.core.dao.JobRepository;
import feast.core.job.JobManager;
+import feast.core.job.Runner;
import feast.core.log.Action;
import feast.core.log.AuditLogger;
import feast.core.log.Resource;
import feast.core.model.FeatureSet;
import feast.core.model.Job;
import feast.core.model.JobStatus;
+import feast.proto.core.CoreServiceProto.ListFeatureSetsRequest;
+import feast.proto.core.CoreServiceProto.ListFeatureSetsResponse;
+import feast.proto.core.CoreServiceProto.ListIngestionJobsRequest;
+import feast.proto.core.CoreServiceProto.ListIngestionJobsResponse;
+import feast.proto.core.CoreServiceProto.RestartIngestionJobRequest;
+import feast.proto.core.CoreServiceProto.RestartIngestionJobResponse;
+import feast.proto.core.CoreServiceProto.StopIngestionJobRequest;
+import feast.proto.core.CoreServiceProto.StopIngestionJobResponse;
+import feast.proto.core.FeatureSetReferenceProto.FeatureSetReference;
+import feast.proto.core.IngestionJobProto;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
@@ -50,13 +51,13 @@
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
-/** Defines a Job Managemenent Service that allows users to manage feast ingestion jobs. */
+/** A Job Management Service that allows users to manage Feast ingestion jobs. */
@Slf4j
@Service
public class JobService {
- private JobRepository jobRepository;
- private SpecService specService;
- private Map jobManagers;
+ private final JobRepository jobRepository;
+ private final SpecService specService;
+ private final Map jobManagers;
@Autowired
public JobService(
@@ -66,13 +67,13 @@ public JobService(
this.jobManagers = new HashMap<>();
for (JobManager manager : jobManagerList) {
- this.jobManagers.put(manager.getRunnerType().name(), manager);
+ this.jobManagers.put(manager.getRunnerType(), manager);
}
}
/* Job Service API */
/**
- * List Ingestion Jobs in feast matching the given request. See CoreService protobuf documentation
+ * List Ingestion Jobs in Feast matching the given request. See CoreService protobuf documentation
* for more detailed documentation.
*
* @param request list ingestion jobs request specifying which jobs to include
@@ -158,6 +159,7 @@ public RestartIngestionJobResponse restartJob(RestartIngestionJobRequest request
// check job exists
Optional getJob = this.jobRepository.findById(request.getId());
if (getJob.isEmpty()) {
+ // FIXME: if getJob.isEmpty then constructing this error message will always throw an error...
throw new NoSuchElementException(
"Attempted to stop nonexistent job with id: " + getJob.get().getId());
}
@@ -165,9 +167,7 @@ public RestartIngestionJobResponse restartJob(RestartIngestionJobRequest request
// check job status is valid for restarting
Job job = getJob.get();
JobStatus status = job.getStatus();
- if (JobStatus.getTransitionalStates().contains(status)
- || JobStatus.getTerminalState().contains(status)
- || status.equals(JobStatus.UNKNOWN)) {
+ if (status.isTransitional() || status.isTerminal() || status == JobStatus.UNKNOWN) {
throw new UnsupportedOperationException(
"Restarting a job with a transitional, terminal or unknown status is unsupported");
}
@@ -208,11 +208,10 @@ public StopIngestionJobResponse stopJob(StopIngestionJobRequest request)
// check job status is valid for stopping
Job job = getJob.get();
JobStatus status = job.getStatus();
- if (JobStatus.getTerminalState().contains(status)) {
+ if (status.isTerminal()) {
// do nothing - job is already stopped
return StopIngestionJobResponse.newBuilder().build();
- } else if (JobStatus.getTransitionalStates().contains(status)
- || status.equals(JobStatus.UNKNOWN)) {
+ } else if (status.isTransitional() || status == JobStatus.UNKNOWN) {
throw new UnsupportedOperationException(
"Stopping a job with a transitional or unknown status is unsupported");
}
@@ -249,7 +248,6 @@ private ListFeatureSetsRequest.Filter toListFeatureSetFilter(FeatureSetReference
// match featuresets using contents of featureset reference
String fsName = fsReference.getName();
String fsProject = fsReference.getProject();
- Integer fsVersion = fsReference.getVersion();
// construct list featureset request filter using feature set reference
// for proto3, default value for missing values:
@@ -259,7 +257,6 @@ private ListFeatureSetsRequest.Filter toListFeatureSetFilter(FeatureSetReference
ListFeatureSetsRequest.Filter.newBuilder()
.setFeatureSetName((fsName != "") ? fsName : "*")
.setProject((fsProject != "") ? fsProject : "*")
- .setFeatureSetVersion((fsVersion != 0) ? fsVersion.toString() : "*")
.build();
return filter;
diff --git a/core/src/main/java/feast/core/service/SpecService.java b/core/src/main/java/feast/core/service/SpecService.java
index 8fec6ac511..01cd264c76 100644
--- a/core/src/main/java/feast/core/service/SpecService.java
+++ b/core/src/main/java/feast/core/service/SpecService.java
@@ -19,23 +19,7 @@
import static feast.core.validators.Matchers.checkValidCharacters;
import static feast.core.validators.Matchers.checkValidCharactersAllowAsterisk;
-import com.google.common.collect.Ordering;
import com.google.protobuf.InvalidProtocolBufferException;
-import feast.core.CoreServiceProto.ApplyFeatureSetResponse;
-import feast.core.CoreServiceProto.ApplyFeatureSetResponse.Status;
-import feast.core.CoreServiceProto.GetFeatureSetRequest;
-import feast.core.CoreServiceProto.GetFeatureSetResponse;
-import feast.core.CoreServiceProto.ListFeatureSetsRequest;
-import feast.core.CoreServiceProto.ListFeatureSetsResponse;
-import feast.core.CoreServiceProto.ListStoresRequest;
-import feast.core.CoreServiceProto.ListStoresResponse;
-import feast.core.CoreServiceProto.ListStoresResponse.Builder;
-import feast.core.CoreServiceProto.UpdateStoreRequest;
-import feast.core.CoreServiceProto.UpdateStoreResponse;
-import feast.core.FeatureSetProto;
-import feast.core.SourceProto;
-import feast.core.StoreProto;
-import feast.core.StoreProto.Store.Subscription;
import feast.core.dao.FeatureSetRepository;
import feast.core.dao.ProjectRepository;
import feast.core.dao.StoreRepository;
@@ -45,10 +29,25 @@
import feast.core.model.Source;
import feast.core.model.Store;
import feast.core.validators.FeatureSetValidator;
+import feast.proto.core.CoreServiceProto.ApplyFeatureSetResponse;
+import feast.proto.core.CoreServiceProto.ApplyFeatureSetResponse.Status;
+import feast.proto.core.CoreServiceProto.GetFeatureSetRequest;
+import feast.proto.core.CoreServiceProto.GetFeatureSetResponse;
+import feast.proto.core.CoreServiceProto.ListFeatureSetsRequest;
+import feast.proto.core.CoreServiceProto.ListFeatureSetsResponse;
+import feast.proto.core.CoreServiceProto.ListStoresRequest;
+import feast.proto.core.CoreServiceProto.ListStoresResponse;
+import feast.proto.core.CoreServiceProto.ListStoresResponse.Builder;
+import feast.proto.core.CoreServiceProto.UpdateStoreRequest;
+import feast.proto.core.CoreServiceProto.UpdateStoreResponse;
+import feast.proto.core.FeatureSetProto;
+import feast.proto.core.FeatureSetProto.FeatureSetStatus;
+import feast.proto.core.SourceProto;
+import feast.proto.core.StoreProto;
+import feast.proto.core.StoreProto.Store.Subscription;
import java.util.ArrayList;
import java.util.List;
import lombok.extern.slf4j.Slf4j;
-import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
@@ -81,7 +80,8 @@ public SpecService(
/**
* Get a feature set matching the feature name and version and project. The feature set name and
* project are required, but version can be omitted by providing 0 for its value. If the version
- * is omitted, the latest feature set will be provided.
+ * is omitted, the latest feature set will be provided. If the project is omitted, the default
+ * would be used.
*
* @param request: GetFeatureSetRequest Request containing filter parameters.
* @return Returns a GetFeatureSetResponse containing a feature set..
@@ -95,130 +95,88 @@ public GetFeatureSetResponse getFeatureSet(GetFeatureSetRequest request)
if (request.getName().isEmpty()) {
throw new IllegalArgumentException("No feature set name provided");
}
+ // Autofill default project if project is not specified
if (request.getProject().isEmpty()) {
- throw new IllegalArgumentException("No project provided");
- }
- if (request.getVersion() < 0) {
- throw new IllegalArgumentException("Version number cannot be less than 0");
+ request = request.toBuilder().setProject(Project.DEFAULT_NAME).build();
}
FeatureSet featureSet;
- // Filter the list based on version
- if (request.getVersion() == 0) {
- featureSet =
- featureSetRepository.findFirstFeatureSetByNameLikeAndProject_NameOrderByVersionDesc(
- request.getName(), request.getProject());
-
- if (featureSet == null) {
- throw new RetrievalException(
- String.format("Feature set with name \"%s\" could not be found.", request.getName()));
- }
- } else {
- featureSet =
- featureSetRepository.findFeatureSetByNameAndProject_NameAndVersion(
- request.getName(), request.getProject(), request.getVersion());
+ featureSet =
+ featureSetRepository.findFeatureSetByNameAndProject_Name(
+ request.getName(), request.getProject());
- if (featureSet == null) {
- throw new RetrievalException(
- String.format(
- "Feature set with name \"%s\" and version \"%s\" could " + "not be found.",
- request.getName(), request.getVersion()));
- }
+ if (featureSet == null) {
+ throw new RetrievalException(
+ String.format("Feature set with name \"%s\" could not be found.", request.getName()));
}
-
- // Only a single item in list, return successfully
return GetFeatureSetResponse.newBuilder().setFeatureSet(featureSet.toProto()).build();
}
/**
- * Return a list of feature sets matching the feature set name, version, and project provided in
- * the filter. All fields are requried. Use '*' for all three arguments in order to return all
- * feature sets and versions in all projects.
+ * Return a list of feature sets matching the feature set name and project provided in the filter.
+ * All fields are requried. Use '*' for all arguments in order to return all feature sets in all
+ * projects.
*
*
Project name can be explicitly provided, or an asterisk can be provided to match all
- * projects. It is not possible to provide a combination of asterisks/wildcards and text.
+ * projects. It is not possible to provide a combination of asterisks/wildcards and text. If the
+ * project name is omitted, the default project would be used.
*
*
The feature set name in the filter accepts an asterisk as a wildcard. All matching feature
* sets will be returned. Regex is not supported. Explicitly defining a feature set name is not
* possible if a project name is not set explicitly
*
- *
The version field can be one of - '*' - This will match all versions - 'latest' - This will
- * match the latest feature set version - '<number>' - This will match a specific feature
- * set version. This property can only be set if both the feature set name and project name are
- * explicitly set.
- *
- * @param filter filter containing the desired featureSet name and version filter
+ * @param filter filter containing the desired featureSet name
* @return ListFeatureSetsResponse with list of featureSets found matching the filter
*/
public ListFeatureSetsResponse listFeatureSets(ListFeatureSetsRequest.Filter filter)
throws InvalidProtocolBufferException {
String name = filter.getFeatureSetName();
String project = filter.getProject();
- String version = filter.getFeatureSetVersion();
- if (project.isEmpty() || name.isEmpty() || version.isEmpty()) {
+ if (name.isEmpty()) {
throw new IllegalArgumentException(
- String.format(
- "Invalid listFeatureSetRequest, missing arguments. Must provide project, feature set name, and version.",
- filter.toString()));
+ "Invalid listFeatureSetRequest, missing arguments. Must provide feature set name:");
}
checkValidCharactersAllowAsterisk(name, "featureSetName");
checkValidCharactersAllowAsterisk(project, "projectName");
- List featureSets = new ArrayList() {};
+ // Autofill default project if project not specified
+ if (project.isEmpty()) {
+ project = Project.DEFAULT_NAME;
+ }
- if (project.equals("*")) {
- // Matching all projects
+ List featureSets = new ArrayList() {};
- if (name.equals("*") && version.equals("*")) {
+ if (project.contains("*")) {
+ // Matching a wildcard project
+ if (name.contains("*")) {
featureSets =
- featureSetRepository.findAllByNameLikeAndProject_NameLikeOrderByNameAscVersionAsc(
+ featureSetRepository.findAllByNameLikeAndProject_NameLikeOrderByNameAsc(
name.replace('*', '%'), project.replace('*', '%'));
} else {
throw new IllegalArgumentException(
String.format(
- "Invalid listFeatureSetRequest. Version and feature set name must be set to "
+ "Invalid listFeatureSetRequest. Feature set name must be set to "
+ "\"*\" if the project name and feature set name aren't set explicitly: \n%s",
filter.toString()));
}
} else if (!project.contains("*")) {
// Matching a specific project
-
- if (name.contains("*") && version.equals("*")) {
- // Find all feature sets matching a pattern and versions in a specific project
- featureSets =
- featureSetRepository.findAllByNameLikeAndProject_NameOrderByNameAscVersionAsc(
- name.replace('*', '%'), project);
-
- } else if (!name.contains("*") && version.equals("*")) {
- // Find all versions of a specific feature set in a specific project
+ if (name.contains("*")) {
+ // Find all feature sets matching a pattern in a specific project
featureSets =
- featureSetRepository.findAllByNameLikeAndProject_NameOrderByNameAscVersionAsc(
- name, project);
-
- } else if (version.equals("latest")) {
- // Find the latest version of a feature set matching a specific pattern in a specific
- // project
- FeatureSet latestFeatureSet =
- featureSetRepository.findFirstFeatureSetByNameLikeAndProject_NameOrderByVersionDesc(
+ featureSetRepository.findAllByNameLikeAndProject_NameOrderByNameAsc(
name.replace('*', '%'), project);
- featureSets.add(latestFeatureSet);
-
- } else if (!name.contains("*") && StringUtils.isNumeric(version)) {
- // Find a specific version of a feature set matching a specific name in a specific project
- FeatureSet specificFeatureSet =
- featureSetRepository.findFeatureSetByNameAndProject_NameAndVersion(
- name, project, Integer.parseInt(version));
- featureSets.add(specificFeatureSet);
- } else {
- throw new IllegalArgumentException(
- String.format(
- "Invalid listFeatureSetRequest. Version must be set to \"*\" if the project "
- + "name and feature set name aren't set explicitly: \n%s",
- filter.toString()));
+ } else if (!name.contains("*")) {
+ // Find a specific feature set in a specific project
+ FeatureSet featureSet =
+ featureSetRepository.findFeatureSetByNameAndProject_Name(name, project);
+ if (featureSet != null) {
+ featureSets.add(featureSet);
+ }
}
} else {
throw new IllegalArgumentException(
@@ -273,17 +231,25 @@ public ListStoresResponse listStores(ListStoresRequest.Filter filter) {
}
/**
- * Creates or updates a feature set in the repository. If there is a change in the feature set
- * schema, then the feature set version will be incremented.
+ * Creates or updates a feature set in the repository.
*
*
This function is idempotent. If no changes are detected in the incoming featureSet's schema,
* this method will update the incoming featureSet spec with the latest version stored in the
- * repository, and return that.
+ * repository, and return that. If project is not specified in the given featureSet, will assign
+ * the featureSet to the'default' project.
*
* @param newFeatureSet Feature set that will be created or updated.
*/
public ApplyFeatureSetResponse applyFeatureSet(FeatureSetProto.FeatureSet newFeatureSet)
throws InvalidProtocolBufferException {
+ // Autofill default project if not specified
+ if (newFeatureSet.getSpec().getProject().isEmpty()) {
+ newFeatureSet =
+ newFeatureSet
+ .toBuilder()
+ .setSpec(newFeatureSet.getSpec().toBuilder().setProject(Project.DEFAULT_NAME).build())
+ .build();
+ }
// Validate incoming feature set
FeatureSetValidator.validateSpec(newFeatureSet);
@@ -300,53 +266,48 @@ public ApplyFeatureSetResponse applyFeatureSet(FeatureSetProto.FeatureSet newFea
throw new IllegalArgumentException(String.format("Project is archived: %s", project_name));
}
- // Retrieve all existing FeatureSet objects
- List existingFeatureSets =
- featureSetRepository.findAllByNameLikeAndProject_NameOrderByNameAscVersionAsc(
- newFeatureSet.getSpec().getName(), project_name);
-
- if (existingFeatureSets.size() == 0) {
- // Create new feature set since it doesn't exist
+ // Set source to default if not set in proto
+ if (newFeatureSet.getSpec().getSource() == SourceProto.Source.getDefaultInstance()) {
newFeatureSet =
newFeatureSet
.toBuilder()
- .setSpec(newFeatureSet.getSpec().toBuilder().setVersion(1))
+ .setSpec(
+ newFeatureSet.getSpec().toBuilder().setSource(defaultSource.toProto()).build())
.build();
- } else {
- // Retrieve the latest feature set if the name does exist
- existingFeatureSets = Ordering.natural().reverse().sortedCopy(existingFeatureSets);
- FeatureSet latest = existingFeatureSets.get(0);
- FeatureSet featureSet = FeatureSet.fromProto(newFeatureSet);
+ }
+ // Retrieve existing FeatureSet
+ FeatureSet featureSet =
+ featureSetRepository.findFeatureSetByNameAndProject_Name(
+ newFeatureSet.getSpec().getName(), project_name);
+
+ Status status;
+ if (featureSet == null) {
+ // Create new feature set since it doesn't exist
+ newFeatureSet = newFeatureSet.toBuilder().setSpec(newFeatureSet.getSpec()).build();
+ featureSet = FeatureSet.fromProto(newFeatureSet);
+ status = Status.CREATED;
+ } else {
// If the featureSet remains unchanged, we do nothing.
- if (featureSet.equalTo(latest)) {
+ if (featureSet.toProto().getSpec().equals(newFeatureSet.getSpec())) {
return ApplyFeatureSetResponse.newBuilder()
- .setFeatureSet(latest.toProto())
+ .setFeatureSet(featureSet.toProto())
.setStatus(Status.NO_CHANGE)
.build();
}
- // TODO: There is a race condition here with incrementing the version
- newFeatureSet =
- newFeatureSet
- .toBuilder()
- .setSpec(newFeatureSet.getSpec().toBuilder().setVersion(latest.getVersion() + 1))
- .build();
- }
-
- // Build a new FeatureSet object which includes the new properties
- FeatureSet featureSet = FeatureSet.fromProto(newFeatureSet);
- if (newFeatureSet.getSpec().getSource() == SourceProto.Source.getDefaultInstance()) {
- featureSet.setSource(defaultSource);
+ featureSet.updateFromProto(newFeatureSet);
+ status = Status.UPDATED;
}
// Persist the FeatureSet object
+ featureSet.setStatus(FeatureSetStatus.STATUS_PENDING);
project.addFeatureSet(featureSet);
projectRepository.saveAndFlush(project);
// Build ApplyFeatureSetResponse
return ApplyFeatureSetResponse.newBuilder()
.setFeatureSet(featureSet.toProto())
- .setStatus(Status.CREATED)
+ .setStatus(status)
.build();
}
@@ -364,7 +325,7 @@ public UpdateStoreResponse updateStore(UpdateStoreRequest updateStoreRequest)
List subs = newStoreProto.getSubscriptionsList();
for (Subscription sub : subs) {
// Ensure that all fields in a subscription contain values
- if ((sub.getVersion().isEmpty() || sub.getName().isEmpty()) || sub.getProject().isEmpty()) {
+ if ((sub.getName().isEmpty()) || sub.getProject().isEmpty()) {
throw new IllegalArgumentException(
String.format("Missing parameter in subscription: %s", sub));
}
diff --git a/core/src/main/java/feast/core/util/TypeConversion.java b/core/src/main/java/feast/core/util/TypeConversion.java
index fd582929c2..8b58eaec4c 100644
--- a/core/src/main/java/feast/core/util/TypeConversion.java
+++ b/core/src/main/java/feast/core/util/TypeConversion.java
@@ -16,12 +16,10 @@
*/
package feast.core.util;
-import com.google.common.base.Strings;
import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
import java.lang.reflect.Type;
import java.util.*;
-import java.util.Map.Entry;
public class TypeConversion {
private static Gson gson = new Gson();
@@ -73,23 +71,6 @@ public static Map convertJsonStringToMap(String jsonString) {
* @return json string corresponding to given map
*/
public static String convertMapToJsonString(Map map) {
- if (map.isEmpty()) {
- return "{}";
- }
return gson.toJson(map);
}
-
- /**
- * Convert a map of key value pairs to a array of java arguments in format --key=value
- *
- * @param map
- * @return array of string arguments
- */
- public static String[] convertMapToArgs(Map map) {
- List args = new ArrayList<>();
- for (Entry arg : map.entrySet()) {
- args.add(Strings.lenientFormat("--%s=%s", arg.getKey(), arg.getValue()));
- }
- return args.toArray(new String[] {});
- }
}
diff --git a/core/src/main/java/feast/core/validators/FeatureSetValidator.java b/core/src/main/java/feast/core/validators/FeatureSetValidator.java
index 213e3898d5..9cfd136b84 100644
--- a/core/src/main/java/feast/core/validators/FeatureSetValidator.java
+++ b/core/src/main/java/feast/core/validators/FeatureSetValidator.java
@@ -19,14 +19,15 @@
import static feast.core.validators.Matchers.checkValidCharacters;
import com.google.common.collect.Sets;
-import feast.core.FeatureSetProto.EntitySpec;
-import feast.core.FeatureSetProto.FeatureSet;
-import feast.core.FeatureSetProto.FeatureSpec;
+import feast.proto.core.FeatureSetProto.EntitySpec;
+import feast.proto.core.FeatureSetProto.FeatureSet;
+import feast.proto.core.FeatureSetProto.FeatureSpec;
import java.util.HashSet;
import java.util.List;
import java.util.stream.Collectors;
public class FeatureSetValidator {
+
public static void validateSpec(FeatureSet featureSet) {
if (featureSet.getSpec().getProject().isEmpty()) {
throw new IllegalArgumentException("Project name must be provided");
@@ -34,6 +35,9 @@ public static void validateSpec(FeatureSet featureSet) {
if (featureSet.getSpec().getName().isEmpty()) {
throw new IllegalArgumentException("Feature set name must be provided");
}
+ if (featureSet.getSpec().getLabelsMap().containsKey("")) {
+ throw new IllegalArgumentException("Feature set label keys must not be empty");
+ }
checkValidCharacters(featureSet.getSpec().getProject(), "project");
checkValidCharacters(featureSet.getSpec().getName(), "name");
@@ -44,6 +48,9 @@ public static void validateSpec(FeatureSet featureSet) {
}
for (FeatureSpec featureSpec : featureSet.getSpec().getFeaturesList()) {
checkValidCharacters(featureSpec.getName(), "features::name");
+ if (featureSpec.getLabelsMap().containsKey("")) {
+ throw new IllegalArgumentException("Feature label keys must not be empty");
+ }
}
}
diff --git a/core/src/main/java/feast/core/validators/OneOfStringValidator.java b/core/src/main/java/feast/core/validators/OneOfStringValidator.java
new file mode 100644
index 0000000000..6b84e44b01
--- /dev/null
+++ b/core/src/main/java/feast/core/validators/OneOfStringValidator.java
@@ -0,0 +1,51 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ * Copyright 2018-2020 The Feast Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package feast.core.validators;
+
+import java.util.Arrays;
+import javax.validation.ConstraintValidator;
+import javax.validation.ConstraintValidatorContext;
+
+/** Validates whether a string value is found within a collection. */
+public class OneOfStringValidator implements ConstraintValidator {
+
+ /** Values that are permitted for a specific instance of this validator */
+ String[] allowedValues;
+
+ /**
+ * Initialize the OneOfStringValidator with a collection of allowed String values.
+ *
+ * @param constraintAnnotation
+ */
+ @Override
+ public void initialize(OneOfStrings constraintAnnotation) {
+ allowedValues = constraintAnnotation.value();
+ }
+
+ /**
+ * Validates whether a string value is found within the collection defined in the annotation.
+ *
+ * @param value String value that should be validated
+ * @param context Provides contextual data and operation when applying a given constraint
+ * validator
+ * @return Boolean value indicating whether the string is found within the allowed values.
+ */
+ @Override
+ public boolean isValid(String value, ConstraintValidatorContext context) {
+ return Arrays.asList(allowedValues).contains(value);
+ }
+}
diff --git a/core/src/main/java/feast/core/validators/OneOfStrings.java b/core/src/main/java/feast/core/validators/OneOfStrings.java
new file mode 100644
index 0000000000..dba290438c
--- /dev/null
+++ b/core/src/main/java/feast/core/validators/OneOfStrings.java
@@ -0,0 +1,49 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ * Copyright 2018-2020 The Feast Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package feast.core.validators;
+
+import java.lang.annotation.*;
+import javax.validation.Constraint;
+import javax.validation.Payload;
+
+/**
+ * Annotation for String "one of" validation. Allows for the definition of a collection through an
+ * annotation. The collection is used to test values defined in the object.
+ */
+@Target({
+ ElementType.METHOD,
+ ElementType.FIELD,
+ ElementType.ANNOTATION_TYPE,
+ ElementType.CONSTRUCTOR,
+ ElementType.PARAMETER
+})
+@Retention(RetentionPolicy.RUNTIME)
+@Documented
+@Constraint(validatedBy = OneOfStringValidator.class)
+public @interface OneOfStrings {
+ /** @return Default error message that is returned if the incorrect value is set */
+ String message() default "Field value must be one of the following: {value}";
+
+ /** Allows for the specification of validation groups to which this constraint belongs. */
+ Class>[] groups() default {};
+
+ /** An attribute payload that can be used to assign custom payload objects to a constraint. */
+ Class extends Payload>[] payload() default {};
+
+ /** @return Default value that is returned if no allowed values are configured */
+ String[] value() default {};
+}
diff --git a/core/src/main/resources/application.yml b/core/src/main/resources/application.yml
index ee060fffc9..51395cf644 100644
--- a/core/src/main/resources/application.yml
+++ b/core/src/main/resources/application.yml
@@ -23,18 +23,39 @@ grpc:
enable-reflection: true
feast:
-# version: @project.version@
jobs:
- # Runner type for feature population jobs. Currently supported runner types are
- # DirectRunner and DataflowRunner.
- runner: DirectRunner
- # Key-value dict of job options to be passed to the population jobs.
- options: {}
- updates:
- # Job update polling interval in milliseconds: how often Feast checks if new jobs should be sent to the runner.
- pollingIntervalMillis: 60000
- # Timeout in seconds for each attempt to update or submit a new job to the runner.
- timeoutSeconds: 240
+ # Job update polling interval in milliseconds: how often Feast checks if new jobs should be sent to the runner.
+ polling_interval_milliseconds: 60000
+
+ # Timeout in seconds for each attempt to update or submit a new job to the runner.
+ job_update_timeout_seconds: 240
+
+ # Name of the active runner in "runners" that should be used. Only a single runner can be active at one time.
+ active_runner: direct
+
+ # List of runner configurations. Please see protos/feast/core/Runner.proto for more details
+ # Alternatively see the following for options https://api.docs.feast.dev/grpc/feast.core.pb.html#Runner
+ runners:
+ - name: direct
+ type: DirectRunner
+ options: {}
+
+ - name: dataflow
+ type: DataflowRunner
+ options:
+ project: my_gcp_project
+ region: asia-east1
+ zone: asia-east1-a
+ tempLocation: gs://bucket/tempLocation
+ network: default
+ subnetwork: regions/asia-east1/subnetworks/mysubnetwork
+ maxNumWorkers: 1
+ autoscalingAlgorithm: THROUGHPUT_BASED
+ usePublicIps: false
+ workerMachineType: n1-standard-1
+ deadLetterTableSpec: project_id:dataset_id.table_id
+
+ # Configuration options for metric collection for all ingestion jobs
metrics:
# Enable metrics pushing for all ingestion jobs.
enabled: false
@@ -49,9 +70,10 @@ feast:
# Feature stream type. Only kafka is supported.
type: kafka
# Feature stream options.
+ # See the following for options https://api.docs.feast.dev/grpc/feast.core.pb.html#KafkaSourceConfig
options:
topic: feast-features
- bootstrapServers: kafka:9092
+ bootstrapServers: localhost:9092
replicationFactor: 1
partitions: 1
diff --git a/core/src/test/java/feast/core/http/HealthControllerTest.java b/core/src/test/java/feast/core/http/HealthControllerTest.java
deleted file mode 100644
index 2fcd622f34..0000000000
--- a/core/src/test/java/feast/core/http/HealthControllerTest.java
+++ /dev/null
@@ -1,80 +0,0 @@
-/*
- * SPDX-License-Identifier: Apache-2.0
- * Copyright 2018-2019 The Feast Authors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * https://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package feast.core.http;
-
-import static org.junit.Assert.assertEquals;
-import static org.mockito.Mockito.*;
-
-import java.sql.Connection;
-import java.sql.SQLException;
-import javax.sql.DataSource;
-import org.junit.Test;
-import org.springframework.http.HttpStatus;
-import org.springframework.http.ResponseEntity;
-
-public class HealthControllerTest {
- @Test
- public void ping() {
- HealthController healthController = new HealthController(null);
- assertEquals(ResponseEntity.ok("pong"), healthController.ping());
- }
-
- @Test
- public void healthz() {
- assertEquals(ResponseEntity.ok("healthy"), mockHealthyController().healthz());
- assertEquals(
- ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR)
- .body("Unable to establish connection with DB"),
- mockUnhealthyControllerBecauseInvalidConn().healthz());
- assertEquals(
- ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).body("mocked sqlexception"),
- mockUnhealthyControllerBecauseSQLException().healthz());
- }
-
- private HealthController mockHealthyController() {
- DataSource mockDataSource = mock(DataSource.class);
- Connection mockConnection = mock(Connection.class);
- try {
- when(mockConnection.isValid(any(int.class))).thenReturn(Boolean.TRUE);
- when(mockDataSource.getConnection()).thenReturn(mockConnection);
- } catch (Exception e) {
- e.printStackTrace();
- }
- return new HealthController(mockDataSource);
- }
-
- private HealthController mockUnhealthyControllerBecauseInvalidConn() {
- DataSource mockDataSource = mock(DataSource.class);
- Connection mockConnection = mock(Connection.class);
- try {
- when(mockConnection.isValid(any(int.class))).thenReturn(Boolean.FALSE);
- when(mockDataSource.getConnection()).thenReturn(mockConnection);
- } catch (Exception ignored) {
- }
- return new HealthController(mockDataSource);
- }
-
- private HealthController mockUnhealthyControllerBecauseSQLException() {
- DataSource mockDataSource = mock(DataSource.class);
- Connection mockConnection = mock(Connection.class);
- try {
- when(mockDataSource.getConnection()).thenThrow(new SQLException("mocked sqlexception"));
- } catch (SQLException ignored) {
- }
- return new HealthController(mockDataSource);
- }
-}
diff --git a/core/src/test/java/feast/core/job/JobUpdateTaskTest.java b/core/src/test/java/feast/core/job/JobUpdateTaskTest.java
index 2a1e80994a..d182673801 100644
--- a/core/src/test/java/feast/core/job/JobUpdateTaskTest.java
+++ b/core/src/test/java/feast/core/job/JobUpdateTaskTest.java
@@ -24,22 +24,24 @@
import static org.mockito.Mockito.when;
import static org.mockito.MockitoAnnotations.initMocks;
-import feast.core.FeatureSetProto;
-import feast.core.FeatureSetProto.FeatureSetMeta;
-import feast.core.FeatureSetProto.FeatureSetSpec;
-import feast.core.SourceProto;
-import feast.core.SourceProto.KafkaSourceConfig;
-import feast.core.SourceProto.SourceType;
-import feast.core.StoreProto;
-import feast.core.StoreProto.Store.RedisConfig;
-import feast.core.StoreProto.Store.StoreType;
-import feast.core.StoreProto.Store.Subscription;
import feast.core.model.FeatureSet;
import feast.core.model.Job;
import feast.core.model.JobStatus;
import feast.core.model.Source;
import feast.core.model.Store;
+import feast.proto.core.FeatureSetProto;
+import feast.proto.core.FeatureSetProto.FeatureSetMeta;
+import feast.proto.core.FeatureSetProto.FeatureSetSpec;
+import feast.proto.core.SourceProto;
+import feast.proto.core.SourceProto.KafkaSourceConfig;
+import feast.proto.core.SourceProto.SourceType;
+import feast.proto.core.StoreProto;
+import feast.proto.core.StoreProto.Store.RedisConfig;
+import feast.proto.core.StoreProto.Store.StoreType;
+import feast.proto.core.StoreProto.Store.Subscription;
import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
import java.util.Optional;
import org.hamcrest.core.IsNull;
import org.junit.Before;
@@ -47,95 +49,69 @@
import org.mockito.Mock;
public class JobUpdateTaskTest {
+ private static final Runner RUNNER = Runner.DATAFLOW;
+
+ private static final FeatureSetProto.FeatureSet.Builder fsBuilder =
+ FeatureSetProto.FeatureSet.newBuilder().setMeta(FeatureSetMeta.newBuilder());
+ private static final FeatureSetSpec.Builder specBuilder = FeatureSetSpec.newBuilder();
@Mock private JobManager jobManager;
- private StoreProto.Store store;
- private SourceProto.Source source;
+ private Store store;
+ private Source source;
+ private FeatureSet featureSet1;
@Before
public void setUp() {
initMocks(this);
+ when(jobManager.getRunnerType()).thenReturn(RUNNER);
+
store =
- StoreProto.Store.newBuilder()
- .setName("test")
- .setType(StoreType.REDIS)
- .setRedisConfig(RedisConfig.newBuilder().build())
- .addSubscriptions(
- Subscription.newBuilder().setProject("*").setName("*").setVersion("*").build())
- .build();
+ Store.fromProto(
+ StoreProto.Store.newBuilder()
+ .setName("test")
+ .setType(StoreType.REDIS)
+ .setRedisConfig(RedisConfig.newBuilder().build())
+ .addSubscriptions(Subscription.newBuilder().setProject("*").setName("*").build())
+ .build());
source =
- SourceProto.Source.newBuilder()
- .setType(SourceType.KAFKA)
- .setKafkaSourceConfig(
- KafkaSourceConfig.newBuilder()
- .setTopic("topic")
- .setBootstrapServers("servers:9092")
- .build())
- .build();
+ Source.fromProto(
+ SourceProto.Source.newBuilder()
+ .setType(SourceType.KAFKA)
+ .setKafkaSourceConfig(
+ KafkaSourceConfig.newBuilder()
+ .setTopic("topic")
+ .setBootstrapServers("servers:9092")
+ .build())
+ .build());
+
+ featureSet1 =
+ FeatureSet.fromProto(fsBuilder.setSpec(specBuilder.setName("featureSet1")).build());
+ featureSet1.setSource(source);
+ }
+
+ Job makeJob(String extId, List featureSets, JobStatus status) {
+ return new Job("job", extId, RUNNER, source, store, featureSets, status);
+ }
+
+ JobUpdateTask makeTask(List featureSets, Optional currentJob) {
+ return new JobUpdateTask(featureSets, source, store, currentJob, jobManager, 100L);
}
@Test
public void shouldUpdateJobIfPresent() {
- FeatureSetProto.FeatureSet featureSet1 =
- FeatureSetProto.FeatureSet.newBuilder()
- .setSpec(
- FeatureSetSpec.newBuilder()
- .setSource(source)
- .setProject("project1")
- .setName("featureSet1")
- .setVersion(1))
- .setMeta(FeatureSetMeta.newBuilder())
- .build();
- FeatureSetProto.FeatureSet featureSet2 =
- FeatureSetProto.FeatureSet.newBuilder()
- .setSpec(
- FeatureSetSpec.newBuilder()
- .setSource(source)
- .setProject("project1")
- .setName("featureSet2")
- .setVersion(1))
- .setMeta(FeatureSetMeta.newBuilder())
- .build();
- Job originalJob =
- new Job(
- "job",
- "old_ext",
- Runner.DATAFLOW.name(),
- feast.core.model.Source.fromProto(source),
- feast.core.model.Store.fromProto(store),
- Arrays.asList(FeatureSet.fromProto(featureSet1)),
- JobStatus.RUNNING);
- JobUpdateTask jobUpdateTask =
- new JobUpdateTask(
- Arrays.asList(featureSet1, featureSet2),
- source,
- store,
- Optional.of(originalJob),
- jobManager,
- 100L);
- Job submittedJob =
- new Job(
- "job",
- "old_ext",
- Runner.DATAFLOW.name(),
- feast.core.model.Source.fromProto(source),
- feast.core.model.Store.fromProto(store),
- Arrays.asList(FeatureSet.fromProto(featureSet1), FeatureSet.fromProto(featureSet2)),
- JobStatus.RUNNING);
+ FeatureSet featureSet2 =
+ FeatureSet.fromProto(fsBuilder.setSpec(specBuilder.setName("featureSet2")).build());
+ List existingFeatureSetsPopulatedByJob = Collections.singletonList(featureSet1);
+ List newFeatureSetsPopulatedByJob = Arrays.asList(featureSet1, featureSet2);
+
+ Job originalJob = makeJob("old_ext", existingFeatureSetsPopulatedByJob, JobStatus.RUNNING);
+ JobUpdateTask jobUpdateTask = makeTask(newFeatureSetsPopulatedByJob, Optional.of(originalJob));
+ Job submittedJob = makeJob("old_ext", newFeatureSetsPopulatedByJob, JobStatus.RUNNING);
- Job expected =
- new Job(
- "job",
- "new_ext",
- Runner.DATAFLOW.name(),
- Source.fromProto(source),
- Store.fromProto(store),
- Arrays.asList(FeatureSet.fromProto(featureSet1), FeatureSet.fromProto(featureSet2)),
- JobStatus.PENDING);
+ Job expected = makeJob("new_ext", newFeatureSetsPopulatedByJob, JobStatus.PENDING);
when(jobManager.updateJob(submittedJob)).thenReturn(expected);
- when(jobManager.getRunnerType()).thenReturn(Runner.DATAFLOW);
Job actual = jobUpdateTask.call();
assertThat(actual, equalTo(expected));
@@ -143,43 +119,13 @@ public void shouldUpdateJobIfPresent() {
@Test
public void shouldCreateJobIfNotPresent() {
- FeatureSetProto.FeatureSet featureSet1 =
- FeatureSetProto.FeatureSet.newBuilder()
- .setSpec(
- FeatureSetSpec.newBuilder()
- .setSource(source)
- .setProject("project1")
- .setName("featureSet1")
- .setVersion(1))
- .setMeta(FeatureSetMeta.newBuilder())
- .build();
- JobUpdateTask jobUpdateTask =
- spy(
- new JobUpdateTask(
- Arrays.asList(featureSet1), source, store, Optional.empty(), jobManager, 100L));
+ var featureSets = Collections.singletonList(featureSet1);
+ JobUpdateTask jobUpdateTask = spy(makeTask(featureSets, Optional.empty()));
doReturn("job").when(jobUpdateTask).createJobId("KAFKA/servers:9092/topic", "test");
- Job expectedInput =
- new Job(
- "job",
- "",
- Runner.DATAFLOW.name(),
- feast.core.model.Source.fromProto(source),
- feast.core.model.Store.fromProto(store),
- Arrays.asList(FeatureSet.fromProto(featureSet1)),
- JobStatus.PENDING);
+ Job expectedInput = makeJob("", featureSets, JobStatus.PENDING);
+ Job expected = makeJob("ext", featureSets, JobStatus.PENDING);
- Job expected =
- new Job(
- "job",
- "ext",
- Runner.DATAFLOW.name(),
- feast.core.model.Source.fromProto(source),
- feast.core.model.Store.fromProto(store),
- Arrays.asList(FeatureSet.fromProto(featureSet1)),
- JobStatus.RUNNING);
-
- when(jobManager.getRunnerType()).thenReturn(Runner.DATAFLOW);
when(jobManager.startJob(expectedInput)).thenReturn(expected);
Job actual = jobUpdateTask.call();
@@ -188,83 +134,25 @@ public void shouldCreateJobIfNotPresent() {
@Test
public void shouldUpdateJobStatusIfNotCreateOrUpdate() {
- FeatureSetProto.FeatureSet featureSet1 =
- FeatureSetProto.FeatureSet.newBuilder()
- .setSpec(
- FeatureSetSpec.newBuilder()
- .setSource(source)
- .setProject("project1")
- .setName("featureSet1")
- .setVersion(1))
- .setMeta(FeatureSetMeta.newBuilder())
- .build();
- Job originalJob =
- new Job(
- "job",
- "ext",
- Runner.DATAFLOW.name(),
- feast.core.model.Source.fromProto(source),
- feast.core.model.Store.fromProto(store),
- Arrays.asList(FeatureSet.fromProto(featureSet1)),
- JobStatus.RUNNING);
- JobUpdateTask jobUpdateTask =
- new JobUpdateTask(
- Arrays.asList(featureSet1), source, store, Optional.of(originalJob), jobManager, 100L);
+ var featureSets = Collections.singletonList(featureSet1);
+ Job originalJob = makeJob("ext", featureSets, JobStatus.RUNNING);
+ JobUpdateTask jobUpdateTask = makeTask(featureSets, Optional.of(originalJob));
when(jobManager.getJobStatus(originalJob)).thenReturn(JobStatus.ABORTING);
- Job expected =
- new Job(
- "job",
- "ext",
- Runner.DATAFLOW.name(),
- Source.fromProto(source),
- Store.fromProto(store),
- Arrays.asList(FeatureSet.fromProto(featureSet1)),
- JobStatus.ABORTING);
- Job actual = jobUpdateTask.call();
+ Job updated = jobUpdateTask.call();
- assertThat(actual, equalTo(expected));
+ assertThat(updated.getStatus(), equalTo(JobStatus.ABORTING));
}
@Test
public void shouldReturnJobWithErrorStatusIfFailedToSubmit() {
- FeatureSetProto.FeatureSet featureSet1 =
- FeatureSetProto.FeatureSet.newBuilder()
- .setSpec(
- FeatureSetSpec.newBuilder()
- .setSource(source)
- .setProject("project1")
- .setName("featureSet1")
- .setVersion(1))
- .setMeta(FeatureSetMeta.newBuilder())
- .build();
- JobUpdateTask jobUpdateTask =
- spy(
- new JobUpdateTask(
- Arrays.asList(featureSet1), source, store, Optional.empty(), jobManager, 100L));
+ var featureSets = Collections.singletonList(featureSet1);
+ JobUpdateTask jobUpdateTask = spy(makeTask(featureSets, Optional.empty()));
doReturn("job").when(jobUpdateTask).createJobId("KAFKA/servers:9092/topic", "test");
- Job expectedInput =
- new Job(
- "job",
- "",
- Runner.DATAFLOW.name(),
- feast.core.model.Source.fromProto(source),
- feast.core.model.Store.fromProto(store),
- Arrays.asList(FeatureSet.fromProto(featureSet1)),
- JobStatus.PENDING);
-
- Job expected =
- new Job(
- "job",
- "",
- Runner.DATAFLOW.name(),
- feast.core.model.Source.fromProto(source),
- feast.core.model.Store.fromProto(store),
- Arrays.asList(FeatureSet.fromProto(featureSet1)),
- JobStatus.ERROR);
+ Job expectedInput = makeJob("", featureSets, JobStatus.PENDING);
+ Job expected = makeJob("", featureSets, JobStatus.ERROR);
- when(jobManager.getRunnerType()).thenReturn(Runner.DATAFLOW);
when(jobManager.startJob(expectedInput))
.thenThrow(new RuntimeException("Something went wrong"));
@@ -274,21 +162,13 @@ public void shouldReturnJobWithErrorStatusIfFailedToSubmit() {
@Test
public void shouldTimeout() {
- FeatureSetProto.FeatureSet featureSet1 =
- FeatureSetProto.FeatureSet.newBuilder()
- .setSpec(
- FeatureSetSpec.newBuilder()
- .setSource(source)
- .setProject("project1")
- .setName("featureSet1")
- .setVersion(1))
- .setMeta(FeatureSetMeta.newBuilder())
- .build();
-
+ var featureSets = Collections.singletonList(featureSet1);
+ var timeoutSeconds = 0L;
JobUpdateTask jobUpdateTask =
spy(
new JobUpdateTask(
- Arrays.asList(featureSet1), source, store, Optional.empty(), jobManager, 0L));
+ featureSets, source, store, Optional.empty(), jobManager, timeoutSeconds));
+
Job actual = jobUpdateTask.call();
assertThat(actual, is(IsNull.nullValue()));
}
diff --git a/core/src/test/java/feast/core/job/RunnerTest.java b/core/src/test/java/feast/core/job/RunnerTest.java
new file mode 100644
index 0000000000..ce1700acbe
--- /dev/null
+++ b/core/src/test/java/feast/core/job/RunnerTest.java
@@ -0,0 +1,42 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ * Copyright 2018-2020 The Feast Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package feast.core.job;
+
+import static org.hamcrest.Matchers.is;
+import static org.junit.Assert.assertThat;
+
+import java.util.NoSuchElementException;
+import org.junit.Test;
+
+public class RunnerTest {
+
+ @Test
+ public void toStringReturnsHumanReadableName() {
+ assertThat(Runner.DATAFLOW.toString(), is("DataflowRunner"));
+ }
+
+ @Test
+ public void fromStringLoadsValueFromHumanReadableName() {
+ var humanName = Runner.DATAFLOW.toString();
+ assertThat(Runner.fromString(humanName), is(Runner.DATAFLOW));
+ }
+
+ @Test(expected = NoSuchElementException.class)
+ public void fromStringThrowsNoSuchElementExceptionForUnknownValue() {
+ Runner.fromString("this is not a valid Runner");
+ }
+}
diff --git a/core/src/test/java/feast/core/job/dataflow/DataflowJobManagerTest.java b/core/src/test/java/feast/core/job/dataflow/DataflowJobManagerTest.java
index 2d562d38df..ea9caa91ff 100644
--- a/core/src/test/java/feast/core/job/dataflow/DataflowJobManagerTest.java
+++ b/core/src/test/java/feast/core/job/dataflow/DataflowJobManagerTest.java
@@ -22,21 +22,13 @@
import static org.mockito.Mockito.*;
import static org.mockito.MockitoAnnotations.initMocks;
+import com.google.api.client.auth.oauth2.Credential;
+import com.google.api.client.googleapis.testing.auth.oauth2.MockGoogleCredential;
import com.google.api.services.dataflow.Dataflow;
import com.google.common.collect.Lists;
import com.google.protobuf.Duration;
import com.google.protobuf.util.JsonFormat;
import com.google.protobuf.util.JsonFormat.Printer;
-import feast.core.FeatureSetProto;
-import feast.core.FeatureSetProto.FeatureSetMeta;
-import feast.core.FeatureSetProto.FeatureSetSpec;
-import feast.core.SourceProto;
-import feast.core.SourceProto.KafkaSourceConfig;
-import feast.core.SourceProto.SourceType;
-import feast.core.StoreProto;
-import feast.core.StoreProto.Store.RedisConfig;
-import feast.core.StoreProto.Store.StoreType;
-import feast.core.StoreProto.Store.Subscription;
import feast.core.config.FeastProperties.MetricsProperties;
import feast.core.exception.JobExecutionException;
import feast.core.job.Runner;
@@ -45,11 +37,21 @@
import feast.ingestion.options.BZip2Compressor;
import feast.ingestion.options.ImportOptions;
import feast.ingestion.options.OptionCompressor;
+import feast.proto.core.FeatureSetProto;
+import feast.proto.core.FeatureSetProto.FeatureSetMeta;
+import feast.proto.core.FeatureSetProto.FeatureSetSpec;
+import feast.proto.core.RunnerProto.DataflowRunnerConfigOptions;
+import feast.proto.core.RunnerProto.DataflowRunnerConfigOptions.Builder;
+import feast.proto.core.SourceProto;
+import feast.proto.core.SourceProto.KafkaSourceConfig;
+import feast.proto.core.SourceProto.SourceType;
+import feast.proto.core.StoreProto;
+import feast.proto.core.StoreProto.Store.RedisConfig;
+import feast.proto.core.StoreProto.Store.StoreType;
+import feast.proto.core.StoreProto.Store.Subscription;
import java.io.IOException;
import java.util.Collections;
-import java.util.HashMap;
import java.util.List;
-import java.util.Map;
import org.apache.beam.runners.dataflow.DataflowPipelineJob;
import org.apache.beam.runners.dataflow.DataflowRunner;
import org.apache.beam.sdk.PipelineResult.State;
@@ -68,18 +70,31 @@ public class DataflowJobManagerTest {
@Mock private Dataflow dataflow;
- private Map defaults;
+ private DataflowRunnerConfigOptions defaults;
private DataflowJobManager dfJobManager;
@Before
public void setUp() {
initMocks(this);
- defaults = new HashMap<>();
- defaults.put("project", "project");
- defaults.put("region", "region");
+ Builder optionsBuilder = DataflowRunnerConfigOptions.newBuilder();
+ optionsBuilder.setProject("project");
+ optionsBuilder.setRegion("region");
+ optionsBuilder.setZone("zone");
+ optionsBuilder.setTempLocation("tempLocation");
+ optionsBuilder.setNetwork("network");
+ optionsBuilder.setSubnetwork("subnetwork");
+ optionsBuilder.putLabels("orchestrator", "feast");
+ defaults = optionsBuilder.build();
MetricsProperties metricsProperties = new MetricsProperties();
metricsProperties.setEnabled(false);
- dfJobManager = new DataflowJobManager(dataflow, defaults, metricsProperties);
+ Credential credential = null;
+ try {
+ credential = MockGoogleCredential.getApplicationDefault();
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+
+ dfJobManager = new DataflowJobManager(defaults, metricsProperties, credential);
dfJobManager = spy(dfJobManager);
}
@@ -90,8 +105,7 @@ public void shouldStartJobWithCorrectPipelineOptions() throws IOException {
.setName("SERVING")
.setType(StoreType.REDIS)
.setRedisConfig(RedisConfig.newBuilder().setHost("localhost").setPort(6379).build())
- .addSubscriptions(
- Subscription.newBuilder().setProject("*").setName("*").setVersion("*").build())
+ .addSubscriptions(Subscription.newBuilder().setProject("*").setName("*").build())
.build();
SourceProto.Source source =
@@ -111,7 +125,6 @@ public void shouldStartJobWithCorrectPipelineOptions() throws IOException {
FeatureSetSpec.newBuilder()
.setSource(source)
.setName("featureSet")
- .setVersion(1)
.setMaxAge(Duration.newBuilder().build()))
.build();
@@ -126,6 +139,7 @@ public void shouldStartJobWithCorrectPipelineOptions() throws IOException {
expectedPipelineOptions.setRegion("region");
expectedPipelineOptions.setUpdate(false);
expectedPipelineOptions.setAppName("DataflowJobManager");
+ expectedPipelineOptions.setLabels(defaults.getLabelsMap());
expectedPipelineOptions.setJobName(jobName);
expectedPipelineOptions.setStoreJson(Lists.newArrayList(printer.print(store)));
@@ -145,7 +159,7 @@ public void shouldStartJobWithCorrectPipelineOptions() throws IOException {
new Job(
jobName,
"",
- Runner.DATAFLOW.name(),
+ Runner.DATAFLOW,
Source.fromProto(source),
Store.fromProto(store),
Lists.newArrayList(FeatureSet.fromProto(featureSet)),
@@ -159,7 +173,7 @@ public void shouldStartJobWithCorrectPipelineOptions() throws IOException {
actualPipelineOptions.getOptionsId()); // avoid comparing this value
// We only check that we are calling getFilesToStage() manually, because the automatic approach
- // throws an error: https://github.com/gojek/feast/pull/291 i.e. do not check for the actual
+ // throws an error: https://github.com/feast-dev/feast/pull/291 i.e. do not check for the actual
// files that are staged
assertThat(
"filesToStage in pipelineOptions should not be null, job manager should set it.",
@@ -207,12 +221,7 @@ public void shouldThrowExceptionWhenJobStateTerminal() throws IOException {
FeatureSetProto.FeatureSet featureSet =
FeatureSetProto.FeatureSet.newBuilder()
- .setSpec(
- FeatureSetSpec.newBuilder()
- .setName("featureSet")
- .setVersion(1)
- .setSource(source)
- .build())
+ .setSpec(FeatureSetSpec.newBuilder().setName("featureSet").setSource(source).build())
.build();
dfJobManager = Mockito.spy(dfJobManager);
@@ -226,7 +235,7 @@ public void shouldThrowExceptionWhenJobStateTerminal() throws IOException {
new Job(
"job",
"",
- Runner.DATAFLOW.name(),
+ Runner.DATAFLOW,
Source.fromProto(source),
Store.fromProto(store),
Lists.newArrayList(FeatureSet.fromProto(featureSet)),
diff --git a/core/src/test/java/feast/core/job/direct/DirectRunnerJobManagerTest.java b/core/src/test/java/feast/core/job/direct/DirectRunnerJobManagerTest.java
index 76530d9f40..0128f5aa0b 100644
--- a/core/src/test/java/feast/core/job/direct/DirectRunnerJobManagerTest.java
+++ b/core/src/test/java/feast/core/job/direct/DirectRunnerJobManagerTest.java
@@ -29,15 +29,6 @@
import com.google.protobuf.Duration;
import com.google.protobuf.util.JsonFormat;
import com.google.protobuf.util.JsonFormat.Printer;
-import feast.core.FeatureSetProto;
-import feast.core.FeatureSetProto.FeatureSetSpec;
-import feast.core.SourceProto;
-import feast.core.SourceProto.KafkaSourceConfig;
-import feast.core.SourceProto.SourceType;
-import feast.core.StoreProto;
-import feast.core.StoreProto.Store.RedisConfig;
-import feast.core.StoreProto.Store.StoreType;
-import feast.core.StoreProto.Store.Subscription;
import feast.core.config.FeastProperties.MetricsProperties;
import feast.core.job.Runner;
import feast.core.job.option.FeatureSetJsonByteConverter;
@@ -49,11 +40,19 @@
import feast.ingestion.options.BZip2Compressor;
import feast.ingestion.options.ImportOptions;
import feast.ingestion.options.OptionCompressor;
+import feast.proto.core.FeatureSetProto;
+import feast.proto.core.FeatureSetProto.FeatureSetSpec;
+import feast.proto.core.RunnerProto.DirectRunnerConfigOptions;
+import feast.proto.core.SourceProto;
+import feast.proto.core.SourceProto.KafkaSourceConfig;
+import feast.proto.core.SourceProto.SourceType;
+import feast.proto.core.StoreProto;
+import feast.proto.core.StoreProto.Store.RedisConfig;
+import feast.proto.core.StoreProto.Store.StoreType;
+import feast.proto.core.StoreProto.Store.Subscription;
import java.io.IOException;
import java.util.Collections;
-import java.util.HashMap;
import java.util.List;
-import java.util.Map;
import org.apache.beam.runners.direct.DirectRunner;
import org.apache.beam.sdk.PipelineResult;
import org.apache.beam.sdk.options.PipelineOptionsFactory;
@@ -71,12 +70,12 @@ public class DirectRunnerJobManagerTest {
@Mock private DirectJobRegistry directJobRegistry;
private DirectRunnerJobManager drJobManager;
- private Map defaults;
+ private DirectRunnerConfigOptions defaults;
@Before
public void setUp() {
initMocks(this);
- defaults = new HashMap<>();
+ defaults = DirectRunnerConfigOptions.newBuilder().setTargetParallelism(1).build();
MetricsProperties metricsProperties = new MetricsProperties();
metricsProperties.setEnabled(false);
@@ -91,8 +90,7 @@ public void shouldStartDirectJobAndRegisterPipelineResult() throws IOException {
.setName("SERVING")
.setType(StoreType.REDIS)
.setRedisConfig(RedisConfig.newBuilder().setHost("localhost").setPort(6379).build())
- .addSubscriptions(
- Subscription.newBuilder().setProject("*").setName("*").setVersion("*").build())
+ .addSubscriptions(Subscription.newBuilder().setProject("*").setName("*").build())
.build();
SourceProto.Source source =
@@ -110,7 +108,6 @@ public void shouldStartDirectJobAndRegisterPipelineResult() throws IOException {
.setSpec(
FeatureSetSpec.newBuilder()
.setName("featureSet")
- .setVersion(1)
.setMaxAge(Duration.newBuilder())
.setSource(source)
.build())
@@ -118,12 +115,14 @@ public void shouldStartDirectJobAndRegisterPipelineResult() throws IOException {
Printer printer = JsonFormat.printer();
+ String expectedJobId = "feast-job-0";
ImportOptions expectedPipelineOptions =
PipelineOptionsFactory.fromArgs("").as(ImportOptions.class);
+ expectedPipelineOptions.setJobName(expectedJobId);
expectedPipelineOptions.setAppName("DirectRunnerJobManager");
expectedPipelineOptions.setRunner(DirectRunner.class);
expectedPipelineOptions.setBlockOnRun(false);
- expectedPipelineOptions.setProject("");
+ expectedPipelineOptions.setTargetParallelism(1);
expectedPipelineOptions.setStoreJson(Lists.newArrayList(printer.print(store)));
expectedPipelineOptions.setProject("");
@@ -132,7 +131,6 @@ public void shouldStartDirectJobAndRegisterPipelineResult() throws IOException {
expectedPipelineOptions.setFeatureSetJson(
featureSetJsonCompressor.compress(Collections.singletonList(featureSet)));
- String expectedJobId = "feast-job-0";
ArgumentCaptor pipelineOptionsCaptor =
ArgumentCaptor.forClass(ImportOptions.class);
ArgumentCaptor directJobCaptor = ArgumentCaptor.forClass(DirectJob.class);
@@ -144,7 +142,7 @@ public void shouldStartDirectJobAndRegisterPipelineResult() throws IOException {
new Job(
expectedJobId,
"",
- Runner.DIRECT.name(),
+ Runner.DIRECT,
Source.fromProto(source),
Store.fromProto(store),
Lists.newArrayList(FeatureSet.fromProto(featureSet)),
diff --git a/core/src/test/java/feast/core/job/option/FeatureSetJsonByteConverterTest.java b/core/src/test/java/feast/core/job/option/FeatureSetJsonByteConverterTest.java
index 2dfeef1d96..a12452b593 100644
--- a/core/src/test/java/feast/core/job/option/FeatureSetJsonByteConverterTest.java
+++ b/core/src/test/java/feast/core/job/option/FeatureSetJsonByteConverterTest.java
@@ -19,9 +19,9 @@
import static org.junit.Assert.*;
import com.google.protobuf.InvalidProtocolBufferException;
-import feast.core.FeatureSetProto;
-import feast.core.SourceProto;
-import feast.types.ValueProto;
+import feast.proto.core.FeatureSetProto;
+import feast.proto.core.SourceProto;
+import feast.proto.types.ValueProto;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
@@ -29,7 +29,7 @@
public class FeatureSetJsonByteConverterTest {
- private FeatureSetProto.FeatureSet newFeatureSet(Integer version, Integer numberOfFeatures) {
+ private FeatureSetProto.FeatureSet newFeatureSet(Integer numberOfFeatures) {
List features =
IntStream.range(1, numberOfFeatures + 1)
.mapToObj(
@@ -51,7 +51,6 @@ private FeatureSetProto.FeatureSet newFeatureSet(Integer version, Integer number
.setBootstrapServers("somebrokers:9092")
.setTopic("sometopic")))
.addAllFeatures(features)
- .setVersion(version)
.addEntities(
FeatureSetProto.EntitySpec.newBuilder()
.setName("entity")
@@ -65,12 +64,11 @@ public void shouldConvertFeatureSetsAsJsonStringBytes() throws InvalidProtocolBu
int nrOfFeatures = 1;
List featureSets =
IntStream.range(1, nrOfFeatureSet + 1)
- .mapToObj(i -> newFeatureSet(i, nrOfFeatures))
+ .mapToObj(i -> newFeatureSet(nrOfFeatures))
.collect(Collectors.toList());
String expectedOutputString =
- "{\"version\":1,"
- + "\"entities\":[{\"name\":\"entity\",\"valueType\":2}],"
+ "{\"entities\":[{\"name\":\"entity\",\"valueType\":2}],"
+ "\"features\":[{\"name\":\"feature1\",\"valueType\":6}],"
+ "\"source\":{"
+ "\"type\":1,"
diff --git a/core/src/test/java/feast/core/model/FeatureSetTest.java b/core/src/test/java/feast/core/model/FeatureSetTest.java
new file mode 100644
index 0000000000..270dc3f3bc
--- /dev/null
+++ b/core/src/test/java/feast/core/model/FeatureSetTest.java
@@ -0,0 +1,205 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ * Copyright 2018-2020 The Feast Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package feast.core.model;
+
+import static org.hamcrest.CoreMatchers.containsString;
+import static org.hamcrest.CoreMatchers.equalTo;
+import static org.junit.Assert.assertThat;
+
+import com.google.protobuf.Duration;
+import com.google.protobuf.InvalidProtocolBufferException;
+import feast.proto.core.FeatureSetProto;
+import feast.proto.core.FeatureSetProto.EntitySpec;
+import feast.proto.core.FeatureSetProto.FeatureSetSpec;
+import feast.proto.core.FeatureSetProto.FeatureSetStatus;
+import feast.proto.core.FeatureSetProto.FeatureSpec;
+import feast.proto.core.SourceProto;
+import feast.proto.core.SourceProto.KafkaSourceConfig;
+import feast.proto.core.SourceProto.SourceType;
+import feast.proto.types.ValueProto.ValueType.Enum;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.tensorflow.metadata.v0.IntDomain;
+
+public class FeatureSetTest {
+ @Rule public final ExpectedException expectedException = ExpectedException.none();
+
+ private FeatureSetProto.FeatureSet oldFeatureSetProto;
+
+ @Before
+ public void setUp() {
+ SourceProto.Source oldSource =
+ SourceProto.Source.newBuilder()
+ .setType(SourceType.KAFKA)
+ .setKafkaSourceConfig(
+ KafkaSourceConfig.newBuilder()
+ .setBootstrapServers("kafka:9092")
+ .setTopic("mytopic"))
+ .build();
+
+ oldFeatureSetProto =
+ FeatureSetProto.FeatureSet.newBuilder()
+ .setSpec(
+ FeatureSetSpec.newBuilder()
+ .setName("featureSet")
+ .setProject("project")
+ .setMaxAge(Duration.newBuilder().setSeconds(100))
+ .setSource(oldSource)
+ .addFeatures(
+ FeatureSpec.newBuilder().setName("feature1").setValueType(Enum.INT64))
+ .addFeatures(
+ FeatureSpec.newBuilder().setName("feature2").setValueType(Enum.STRING))
+ .addEntities(
+ EntitySpec.newBuilder().setName("entity").setValueType(Enum.STRING))
+ .build())
+ .build();
+ }
+
+ @Test
+ public void shouldUpdateFromProto() throws InvalidProtocolBufferException {
+ SourceProto.Source newSource =
+ SourceProto.Source.newBuilder()
+ .setType(SourceType.KAFKA)
+ .setKafkaSourceConfig(
+ KafkaSourceConfig.newBuilder()
+ .setBootstrapServers("kafka:9092")
+ .setTopic("mytopic-changed"))
+ .build();
+
+ FeatureSetProto.FeatureSet newFeatureSetProto =
+ FeatureSetProto.FeatureSet.newBuilder()
+ .setSpec(
+ FeatureSetSpec.newBuilder()
+ .setName("featureSet")
+ .setProject("project")
+ .setMaxAge(Duration.newBuilder().setSeconds(101))
+ .setSource(newSource)
+ .addFeatures(
+ FeatureSpec.newBuilder()
+ .setName("feature1")
+ .setValueType(Enum.INT64)
+ .setIntDomain(IntDomain.newBuilder().setMax(10).setMin(0)))
+ .addFeatures(
+ FeatureSpec.newBuilder().setName("feature3").setValueType(Enum.STRING))
+ .addEntities(
+ EntitySpec.newBuilder().setName("entity").setValueType(Enum.STRING))
+ .build())
+ .build();
+
+ FeatureSet actual = FeatureSet.fromProto(oldFeatureSetProto);
+ actual.updateFromProto(newFeatureSetProto);
+
+ FeatureSet expected = FeatureSet.fromProto(newFeatureSetProto);
+ Feature archivedFeature =
+ Feature.fromProto(
+ FeatureSpec.newBuilder().setName("feature2").setValueType(Enum.STRING).build());
+ archivedFeature.setArchived(true);
+ expected.addFeature(archivedFeature);
+ assertThat(actual, equalTo(expected));
+ }
+
+ @Test
+ public void shouldNotUpdateIfNoChange() throws InvalidProtocolBufferException {
+ FeatureSet actual = FeatureSet.fromProto(oldFeatureSetProto);
+ actual.setStatus(FeatureSetStatus.STATUS_READY);
+ actual.updateFromProto(oldFeatureSetProto);
+
+ FeatureSet expected = FeatureSet.fromProto(oldFeatureSetProto);
+ expected.setStatus(FeatureSetStatus.STATUS_READY);
+
+ assertThat(actual, equalTo(expected));
+ }
+
+ @Test
+ public void shouldThrowExceptionIfUpdateWithEntitiesChanged()
+ throws InvalidProtocolBufferException {
+ SourceProto.Source newSource =
+ SourceProto.Source.newBuilder()
+ .setType(SourceType.KAFKA)
+ .setKafkaSourceConfig(
+ KafkaSourceConfig.newBuilder()
+ .setBootstrapServers("kafka:9092")
+ .setTopic("mytopic-changed"))
+ .build();
+
+ FeatureSetProto.FeatureSet newFeatureSetProto =
+ FeatureSetProto.FeatureSet.newBuilder()
+ .setSpec(
+ FeatureSetSpec.newBuilder()
+ .setName("featureSet")
+ .setProject("project")
+ .setMaxAge(Duration.newBuilder().setSeconds(101))
+ .setSource(newSource)
+ .addFeatures(
+ FeatureSpec.newBuilder()
+ .setName("feature1")
+ .setValueType(Enum.INT64)
+ .setIntDomain(IntDomain.newBuilder().setMax(10).setMin(0)))
+ .addFeatures(
+ FeatureSpec.newBuilder().setName("feature3").setValueType(Enum.STRING))
+ .addEntities(EntitySpec.newBuilder().setName("entity").setValueType(Enum.FLOAT))
+ .build())
+ .build();
+
+ expectedException.expect(IllegalArgumentException.class);
+ expectedException.expectMessage(containsString("does not match existing set of entities"));
+ FeatureSet existingFeatureSet = FeatureSet.fromProto(oldFeatureSetProto);
+ existingFeatureSet.updateFromProto(newFeatureSetProto);
+ }
+
+ @Test
+ public void shouldThrowExceptionIfUpdateWithFeatureTypesChanged()
+ throws InvalidProtocolBufferException {
+ SourceProto.Source newSource =
+ SourceProto.Source.newBuilder()
+ .setType(SourceType.KAFKA)
+ .setKafkaSourceConfig(
+ KafkaSourceConfig.newBuilder()
+ .setBootstrapServers("kafka:9092")
+ .setTopic("mytopic-changed"))
+ .build();
+
+ FeatureSetProto.FeatureSet newFeatureSetProto =
+ FeatureSetProto.FeatureSet.newBuilder()
+ .setSpec(
+ FeatureSetSpec.newBuilder()
+ .setName("featureSet")
+ .setProject("project")
+ .setMaxAge(Duration.newBuilder().setSeconds(101))
+ .setSource(newSource)
+ .addFeatures(
+ FeatureSpec.newBuilder()
+ .setName("feature1")
+ .setValueType(Enum.INT64)
+ .setIntDomain(IntDomain.newBuilder().setMax(10).setMin(0)))
+ .addFeatures(
+ FeatureSpec.newBuilder().setName("feature2").setValueType(Enum.FLOAT))
+ .addEntities(
+ EntitySpec.newBuilder().setName("entity").setValueType(Enum.STRING))
+ .build())
+ .build();
+
+ expectedException.expect(IllegalArgumentException.class);
+ expectedException.expectMessage(
+ containsString(
+ "You are attempting to change the type of feature feature2 from STRING to FLOAT."));
+ FeatureSet existingFeatureSet = FeatureSet.fromProto(oldFeatureSetProto);
+ existingFeatureSet.updateFromProto(newFeatureSetProto);
+ }
+}
diff --git a/core/src/test/java/feast/core/model/JobStatusTest.java b/core/src/test/java/feast/core/model/JobStatusTest.java
new file mode 100644
index 0000000000..f5c8839386
--- /dev/null
+++ b/core/src/test/java/feast/core/model/JobStatusTest.java
@@ -0,0 +1,45 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ * Copyright 2018-2020 The Feast Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package feast.core.model;
+
+import static org.hamcrest.Matchers.is;
+import static org.junit.Assert.assertThat;
+
+import org.junit.Test;
+
+public class JobStatusTest {
+
+ @Test
+ public void isTerminalReturnsTrueForJobStatusWithTerminalState() {
+ JobStatus.getTerminalStates()
+ .forEach(
+ status -> {
+ assertThat(status.isTerminal(), is(true));
+ assertThat(status.isTransitional(), is(false));
+ });
+ }
+
+ @Test
+ public void isTransitionalReturnsTrueForJobStatusWithTransitionalState() {
+ JobStatus.getTransitionalStates()
+ .forEach(
+ status -> {
+ assertThat(status.isTransitional(), is(true));
+ assertThat(status.isTerminal(), is(false));
+ });
+ }
+}
diff --git a/core/src/test/java/feast/core/service/AccessManagementServiceTest.java b/core/src/test/java/feast/core/service/AccessManagementServiceTest.java
new file mode 100644
index 0000000000..15be203709
--- /dev/null
+++ b/core/src/test/java/feast/core/service/AccessManagementServiceTest.java
@@ -0,0 +1,74 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ * Copyright 2018-2019 The Feast Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package feast.core.service;
+
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+import static org.mockito.MockitoAnnotations.initMocks;
+
+import feast.core.dao.ProjectRepository;
+import feast.core.model.Project;
+import java.util.Optional;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.mockito.Mock;
+
+public class AccessManagementServiceTest {
+ @Rule public ExpectedException expectedException = ExpectedException.none();
+ // mocks
+ @Mock private ProjectRepository projectRepository;
+ // dummy models
+ private Project defaultProject;
+ private Project testProject;
+
+ // test target
+ private AccessManagementService accessService;
+
+ @Before
+ public void setup() {
+ initMocks(this);
+ // setup dummy models for testing
+ this.defaultProject = new Project(Project.DEFAULT_NAME);
+ this.testProject = new Project("project");
+ // setup test target
+ when(this.projectRepository.existsById(Project.DEFAULT_NAME)).thenReturn(false);
+ this.accessService = new AccessManagementService(this.projectRepository);
+ }
+
+ @Test
+ public void testDefaultProjectCreateInConstructor() {
+ verify(this.projectRepository).saveAndFlush(this.defaultProject);
+ }
+
+ @Test
+ public void testArchiveProject() {
+ when(this.projectRepository.findById("project")).thenReturn(Optional.of(this.testProject));
+ this.accessService.archiveProject("project");
+ this.testProject.setArchived(true);
+ verify(this.projectRepository).saveAndFlush(this.testProject);
+ // reset archived flag
+ this.testProject.setArchived(false);
+ }
+
+ @Test
+ public void shouldNotArchiveDefaultProject() {
+ expectedException.expect(IllegalArgumentException.class);
+ this.accessService.archiveProject(Project.DEFAULT_NAME);
+ }
+}
diff --git a/core/src/test/java/feast/core/service/JobCoordinatorServiceTest.java b/core/src/test/java/feast/core/service/JobCoordinatorServiceTest.java
index aa71f201dd..8386efb28f 100644
--- a/core/src/test/java/feast/core/service/JobCoordinatorServiceTest.java
+++ b/core/src/test/java/feast/core/service/JobCoordinatorServiceTest.java
@@ -25,21 +25,10 @@
import static org.mockito.Mockito.when;
import static org.mockito.MockitoAnnotations.initMocks;
+import com.google.common.collect.Lists;
import com.google.protobuf.InvalidProtocolBufferException;
-import feast.core.CoreServiceProto.ListFeatureSetsRequest.Filter;
-import feast.core.CoreServiceProto.ListFeatureSetsResponse;
-import feast.core.CoreServiceProto.ListStoresResponse;
-import feast.core.FeatureSetProto;
-import feast.core.FeatureSetProto.FeatureSetMeta;
-import feast.core.FeatureSetProto.FeatureSetSpec;
-import feast.core.SourceProto.KafkaSourceConfig;
-import feast.core.SourceProto.Source;
-import feast.core.SourceProto.SourceType;
-import feast.core.StoreProto;
-import feast.core.StoreProto.Store.RedisConfig;
-import feast.core.StoreProto.Store.StoreType;
-import feast.core.StoreProto.Store.Subscription;
-import feast.core.config.FeastProperties.JobUpdatesProperties;
+import feast.core.config.FeastProperties;
+import feast.core.config.FeastProperties.JobProperties;
import feast.core.dao.FeatureSetRepository;
import feast.core.dao.JobRepository;
import feast.core.job.JobManager;
@@ -48,7 +37,21 @@
import feast.core.model.FeatureSet;
import feast.core.model.Job;
import feast.core.model.JobStatus;
+import feast.proto.core.CoreServiceProto.ListFeatureSetsRequest.Filter;
+import feast.proto.core.CoreServiceProto.ListFeatureSetsResponse;
+import feast.proto.core.CoreServiceProto.ListStoresResponse;
+import feast.proto.core.FeatureSetProto;
+import feast.proto.core.FeatureSetProto.FeatureSetMeta;
+import feast.proto.core.FeatureSetProto.FeatureSetSpec;
+import feast.proto.core.SourceProto.KafkaSourceConfig;
+import feast.proto.core.SourceProto.Source;
+import feast.proto.core.SourceProto.SourceType;
+import feast.proto.core.StoreProto;
+import feast.proto.core.StoreProto.Store.RedisConfig;
+import feast.proto.core.StoreProto.Store.StoreType;
+import feast.proto.core.StoreProto.Store.Subscription;
import java.util.Arrays;
+import java.util.Collections;
import java.util.List;
import org.junit.Before;
import org.junit.Rule;
@@ -65,13 +68,15 @@ public class JobCoordinatorServiceTest {
@Mock SpecService specService;
@Mock FeatureSetRepository featureSetRepository;
- private JobUpdatesProperties jobUpdatesProperties;
+ private FeastProperties feastProperties;
@Before
public void setUp() {
initMocks(this);
- jobUpdatesProperties = new JobUpdatesProperties();
- jobUpdatesProperties.setTimeoutSeconds(5);
+ feastProperties = new FeastProperties();
+ JobProperties jobProperties = new JobProperties();
+ jobProperties.setJobUpdateTimeoutSeconds(5);
+ feastProperties.setJobs(jobProperties);
}
@Test
@@ -79,7 +84,7 @@ public void shouldDoNothingIfNoStoresFound() throws InvalidProtocolBufferExcepti
when(specService.listStores(any())).thenReturn(ListStoresResponse.newBuilder().build());
JobCoordinatorService jcs =
new JobCoordinatorService(
- jobRepository, featureSetRepository, specService, jobManager, jobUpdatesProperties);
+ jobRepository, featureSetRepository, specService, jobManager, feastProperties);
jcs.Poll();
verify(jobRepository, times(0)).saveAndFlush(any());
}
@@ -91,21 +96,16 @@ public void shouldDoNothingIfNoMatchingFeatureSetsFound() throws InvalidProtocol
.setName("test")
.setType(StoreType.REDIS)
.setRedisConfig(RedisConfig.newBuilder().build())
- .addSubscriptions(
- Subscription.newBuilder().setProject("*").setName("*").setVersion("*").build())
+ .addSubscriptions(Subscription.newBuilder().setProject("*").setName("*").build())
.build();
when(specService.listStores(any()))
.thenReturn(ListStoresResponse.newBuilder().addStore(store).build());
when(specService.listFeatureSets(
- Filter.newBuilder()
- .setProject("*")
- .setFeatureSetName("*")
- .setFeatureSetVersion("*")
- .build()))
+ Filter.newBuilder().setProject("*").setFeatureSetName("*").build()))
.thenReturn(ListFeatureSetsResponse.newBuilder().build());
JobCoordinatorService jcs =
new JobCoordinatorService(
- jobRepository, featureSetRepository, specService, jobManager, jobUpdatesProperties);
+ jobRepository, featureSetRepository, specService, jobManager, feastProperties);
jcs.Poll();
verify(jobRepository, times(0)).saveAndFlush(any());
}
@@ -117,12 +117,7 @@ public void shouldGenerateAndSubmitJobsIfAny() throws InvalidProtocolBufferExcep
.setName("test")
.setType(StoreType.REDIS)
.setRedisConfig(RedisConfig.newBuilder().build())
- .addSubscriptions(
- Subscription.newBuilder()
- .setProject("project1")
- .setName("features")
- .setVersion("*")
- .build())
+ .addSubscriptions(Subscription.newBuilder().setProject("project1").setName("*").build())
.build();
Source source =
Source.newBuilder()
@@ -134,60 +129,51 @@ public void shouldGenerateAndSubmitJobsIfAny() throws InvalidProtocolBufferExcep
.build())
.build();
- FeatureSetProto.FeatureSet featureSet1 =
+ FeatureSetProto.FeatureSet featureSetProto1 =
FeatureSetProto.FeatureSet.newBuilder()
.setSpec(
FeatureSetSpec.newBuilder()
.setSource(source)
.setProject("project1")
- .setName("features")
- .setVersion(1))
+ .setName("features1"))
.setMeta(FeatureSetMeta.newBuilder())
.build();
- FeatureSetProto.FeatureSet featureSet2 =
+ FeatureSet featureSet1 = FeatureSet.fromProto(featureSetProto1);
+ FeatureSetProto.FeatureSet featureSetProto2 =
FeatureSetProto.FeatureSet.newBuilder()
.setSpec(
FeatureSetSpec.newBuilder()
.setSource(source)
.setProject("project1")
- .setName("features")
- .setVersion(2))
+ .setName("features2"))
.setMeta(FeatureSetMeta.newBuilder())
.build();
+ FeatureSet featureSet2 = FeatureSet.fromProto(featureSetProto2);
String extId = "ext";
- ArgumentCaptor jobArgCaptor = ArgumentCaptor.forClass(Job.class);
+ ArgumentCaptor> jobArgCaptor = ArgumentCaptor.forClass(List.class);
Job expectedInput =
new Job(
"",
"",
- Runner.DATAFLOW.name(),
+ Runner.DATAFLOW,
feast.core.model.Source.fromProto(source),
feast.core.model.Store.fromProto(store),
- Arrays.asList(FeatureSet.fromProto(featureSet1), FeatureSet.fromProto(featureSet2)),
+ Arrays.asList(featureSet1, featureSet2),
JobStatus.PENDING);
Job expected =
new Job(
"some_id",
extId,
- Runner.DATAFLOW.name(),
+ Runner.DATAFLOW,
feast.core.model.Source.fromProto(source),
feast.core.model.Store.fromProto(store),
- Arrays.asList(FeatureSet.fromProto(featureSet1), FeatureSet.fromProto(featureSet2)),
+ Arrays.asList(featureSet1, featureSet2),
JobStatus.RUNNING);
- when(specService.listFeatureSets(
- Filter.newBuilder()
- .setProject("project1")
- .setFeatureSetName("features")
- .setFeatureSetVersion("*")
- .build()))
- .thenReturn(
- ListFeatureSetsResponse.newBuilder()
- .addFeatureSets(featureSet1)
- .addFeatureSets(featureSet2)
- .build());
+ when(featureSetRepository.findAllByNameLikeAndProject_NameLikeOrderByNameAsc("%", "project1"))
+ .thenReturn(Lists.newArrayList(featureSet1, featureSet2));
when(specService.listStores(any()))
.thenReturn(ListStoresResponse.newBuilder().addStore(store).build());
@@ -196,11 +182,11 @@ public void shouldGenerateAndSubmitJobsIfAny() throws InvalidProtocolBufferExcep
JobCoordinatorService jcs =
new JobCoordinatorService(
- jobRepository, featureSetRepository, specService, jobManager, jobUpdatesProperties);
+ jobRepository, featureSetRepository, specService, jobManager, feastProperties);
jcs.Poll();
- verify(jobRepository, times(1)).saveAndFlush(jobArgCaptor.capture());
- Job actual = jobArgCaptor.getValue();
- assertThat(actual, equalTo(expected));
+ verify(jobRepository, times(1)).saveAll(jobArgCaptor.capture());
+ List actual = jobArgCaptor.getValue();
+ assertThat(actual, equalTo(Collections.singletonList(expected)));
}
@Test
@@ -210,12 +196,7 @@ public void shouldGroupJobsBySource() throws InvalidProtocolBufferException {
.setName("test")
.setType(StoreType.REDIS)
.setRedisConfig(RedisConfig.newBuilder().build())
- .addSubscriptions(
- Subscription.newBuilder()
- .setProject("project1")
- .setName("features")
- .setVersion("*")
- .build())
+ .addSubscriptions(Subscription.newBuilder().setProject("project1").setName("*").build())
.build();
Source source1 =
Source.newBuilder()
@@ -236,79 +217,72 @@ public void shouldGroupJobsBySource() throws InvalidProtocolBufferException {
.build())
.build();
- FeatureSetProto.FeatureSet featureSet1 =
+ FeatureSetProto.FeatureSet featureSetProto1 =
FeatureSetProto.FeatureSet.newBuilder()
.setSpec(
FeatureSetSpec.newBuilder()
.setSource(source1)
.setProject("project1")
- .setName("features")
- .setVersion(1))
+ .setName("features1"))
.setMeta(FeatureSetMeta.newBuilder())
.build();
- FeatureSetProto.FeatureSet featureSet2 =
+ FeatureSet featureSet1 = FeatureSet.fromProto(featureSetProto1);
+
+ FeatureSetProto.FeatureSet featureSetProto2 =
FeatureSetProto.FeatureSet.newBuilder()
.setSpec(
FeatureSetSpec.newBuilder()
.setSource(source2)
.setProject("project1")
- .setName("features")
- .setVersion(2))
+ .setName("features2"))
.setMeta(FeatureSetMeta.newBuilder())
.build();
+ FeatureSet featureSet2 = FeatureSet.fromProto(featureSetProto2);
Job expectedInput1 =
new Job(
"name1",
"",
- Runner.DATAFLOW.name(),
+ Runner.DATAFLOW,
feast.core.model.Source.fromProto(source1),
feast.core.model.Store.fromProto(store),
- Arrays.asList(FeatureSet.fromProto(featureSet1)),
+ Arrays.asList(featureSet1),
JobStatus.PENDING);
Job expected1 =
new Job(
"name1",
"extId1",
- Runner.DATAFLOW.name(),
+ Runner.DATAFLOW,
feast.core.model.Source.fromProto(source1),
feast.core.model.Store.fromProto(store),
- Arrays.asList(FeatureSet.fromProto(featureSet1)),
+ Arrays.asList(featureSet1),
JobStatus.RUNNING);
Job expectedInput2 =
new Job(
"",
"extId2",
- Runner.DATAFLOW.name(),
+ Runner.DATAFLOW,
feast.core.model.Source.fromProto(source2),
feast.core.model.Store.fromProto(store),
- Arrays.asList(FeatureSet.fromProto(featureSet2)),
+ Arrays.asList(featureSet2),
JobStatus.PENDING);
Job expected2 =
new Job(
"name2",
"extId2",
- Runner.DATAFLOW.name(),
+ Runner.DATAFLOW,
feast.core.model.Source.fromProto(source2),
feast.core.model.Store.fromProto(store),
- Arrays.asList(FeatureSet.fromProto(featureSet2)),
+ Arrays.asList(featureSet2),
JobStatus.RUNNING);
- ArgumentCaptor jobArgCaptor = ArgumentCaptor.forClass(Job.class);
+ ArgumentCaptor> jobArgCaptor = ArgumentCaptor.forClass(List.class);
+
+ when(featureSetRepository.findAllByNameLikeAndProject_NameLikeOrderByNameAsc("%", "project1"))
+ .thenReturn(Lists.newArrayList(featureSet1, featureSet2));
- when(specService.listFeatureSets(
- Filter.newBuilder()
- .setProject("project1")
- .setFeatureSetName("features")
- .setFeatureSetVersion("*")
- .build()))
- .thenReturn(
- ListFeatureSetsResponse.newBuilder()
- .addFeatureSets(featureSet1)
- .addFeatureSets(featureSet2)
- .build());
when(specService.listStores(any()))
.thenReturn(ListStoresResponse.newBuilder().addStore(store).build());
@@ -318,11 +292,11 @@ public void shouldGroupJobsBySource() throws InvalidProtocolBufferException {
JobCoordinatorService jcs =
new JobCoordinatorService(
- jobRepository, featureSetRepository, specService, jobManager, jobUpdatesProperties);
+ jobRepository, featureSetRepository, specService, jobManager, feastProperties);
jcs.Poll();
- verify(jobRepository, times(2)).saveAndFlush(jobArgCaptor.capture());
- List actual = jobArgCaptor.getAllValues();
+ verify(jobRepository, times(1)).saveAll(jobArgCaptor.capture());
+ List actual = jobArgCaptor.getValue();
assertThat(actual.get(0), equalTo(expected1));
assertThat(actual.get(1), equalTo(expected2));
diff --git a/core/src/test/java/feast/core/service/JobServiceTest.java b/core/src/test/java/feast/core/service/JobServiceTest.java
index c0e90ca43f..ff056287f9 100644
--- a/core/src/test/java/feast/core/service/JobServiceTest.java
+++ b/core/src/test/java/feast/core/service/JobServiceTest.java
@@ -26,31 +26,23 @@
import static org.mockito.MockitoAnnotations.initMocks;
import com.google.protobuf.InvalidProtocolBufferException;
-import feast.core.CoreServiceProto.ListFeatureSetsRequest;
-import feast.core.CoreServiceProto.ListFeatureSetsResponse;
-import feast.core.CoreServiceProto.ListIngestionJobsRequest;
-import feast.core.CoreServiceProto.ListIngestionJobsResponse;
-import feast.core.CoreServiceProto.RestartIngestionJobRequest;
-import feast.core.CoreServiceProto.RestartIngestionJobResponse;
-import feast.core.CoreServiceProto.StopIngestionJobRequest;
-import feast.core.CoreServiceProto.StopIngestionJobResponse;
-import feast.core.FeatureSetProto.FeatureSetStatus;
-import feast.core.FeatureSetReferenceProto.FeatureSetReference;
-import feast.core.IngestionJobProto.IngestionJob;
-import feast.core.SourceProto.KafkaSourceConfig;
-import feast.core.SourceProto.SourceType;
-import feast.core.StoreProto.Store.RedisConfig;
-import feast.core.StoreProto.Store.StoreType;
import feast.core.dao.JobRepository;
import feast.core.job.JobManager;
import feast.core.job.Runner;
-import feast.core.model.FeatureSet;
-import feast.core.model.Field;
-import feast.core.model.Job;
-import feast.core.model.JobStatus;
-import feast.core.model.Source;
-import feast.core.model.Store;
-import feast.types.ValueProto.ValueType.Enum;
+import feast.core.model.*;
+import feast.proto.core.CoreServiceProto.ListFeatureSetsRequest;
+import feast.proto.core.CoreServiceProto.ListFeatureSetsResponse;
+import feast.proto.core.CoreServiceProto.ListIngestionJobsRequest;
+import feast.proto.core.CoreServiceProto.ListIngestionJobsResponse;
+import feast.proto.core.CoreServiceProto.RestartIngestionJobRequest;
+import feast.proto.core.CoreServiceProto.RestartIngestionJobResponse;
+import feast.proto.core.CoreServiceProto.StopIngestionJobRequest;
+import feast.proto.core.CoreServiceProto.StopIngestionJobResponse;
+import feast.proto.core.FeatureSetReferenceProto.FeatureSetReference;
+import feast.proto.core.IngestionJobProto.IngestionJob;
+import feast.proto.core.StoreProto.Store.RedisConfig;
+import feast.proto.core.StoreProto.Store.StoreType;
+import feast.proto.types.ValueProto.ValueType.Enum;
import java.time.Instant;
import java.util.ArrayList;
import java.util.Arrays;
@@ -77,21 +69,13 @@ public class JobServiceTest {
// test target
public JobService jobService;
- /* unit test setup */
@Before
public void setup() {
initMocks(this);
// create mock objects for testing
// fake data source
- this.dataSource =
- new Source(
- SourceType.KAFKA,
- KafkaSourceConfig.newBuilder()
- .setBootstrapServers("kafka:9092")
- .setTopic("my-topic")
- .build(),
- true);
+ this.dataSource = TestObjectFactory.defaultSource;
// fake data store
this.dataStore =
new Store(
@@ -122,7 +106,6 @@ public void setup() {
new JobService(this.jobRepository, this.specService, Arrays.asList(this.jobManager));
}
- // setup fake spec service
public void setupSpecService() {
try {
ListFeatureSetsResponse response =
@@ -139,7 +122,6 @@ public void setupSpecService() {
}
}
- // setup fake job repository
public void setupJobRepository() {
when(this.jobRepository.findById(this.job.getId())).thenReturn(Optional.of(this.job));
when(this.jobRepository.findByStoreName(this.dataStore.getName()))
@@ -149,28 +131,19 @@ public void setupJobRepository() {
when(this.jobRepository.findAll()).thenReturn(Arrays.asList(this.job));
}
- // TODO: setup fake job manager
public void setupJobManager() {
when(this.jobManager.getRunnerType()).thenReturn(Runner.DATAFLOW);
when(this.jobManager.restartJob(this.job))
.thenReturn(this.newDummyJob(this.job.getId(), this.job.getExtId(), JobStatus.PENDING));
}
- // dummy model constructorss
private FeatureSet newDummyFeatureSet(String name, int version, String project) {
- Field feature = new Field(name + "_feature", Enum.INT64);
- Field entity = new Field(name + "_entity", Enum.STRING);
+ Feature feature = TestObjectFactory.CreateFeature(name + "_feature", Enum.INT64);
+ Entity entity = TestObjectFactory.CreateEntity(name + "_entity", Enum.STRING);
FeatureSet fs =
- new FeatureSet(
- name,
- project,
- version,
- 100L,
- Arrays.asList(entity),
- Arrays.asList(feature),
- this.dataSource,
- FeatureSetStatus.STATUS_READY);
+ TestObjectFactory.CreateFeatureSet(
+ name, project, Arrays.asList(entity), Arrays.asList(feature));
fs.setCreated(Date.from(Instant.ofEpochSecond(10L)));
return fs;
}
@@ -179,7 +152,7 @@ private Job newDummyJob(String id, String extId, JobStatus status) {
return new Job(
id,
extId,
- Runner.DATAFLOW.name(),
+ Runner.DATAFLOW,
this.dataSource,
this.dataStore,
Arrays.asList(this.featureSet),
@@ -190,7 +163,6 @@ private List newDummyFeatureSetReferences() {
return Arrays.asList(
// all provided: name, version and project
FeatureSetReference.newBuilder()
- .setVersion(this.featureSet.getVersion())
.setName(this.featureSet.getName())
.setProject(this.featureSet.getProject().toString())
.build(),
@@ -202,10 +174,7 @@ private List newDummyFeatureSetReferences() {
.build(),
// name and version
- FeatureSetReference.newBuilder()
- .setName(this.featureSet.getName())
- .setVersion(this.featureSet.getVersion())
- .build());
+ FeatureSetReference.newBuilder().setName(this.featureSet.getName()).build());
}
private List newDummyListRequestFilters() {
@@ -214,25 +183,21 @@ private List newDummyListRequestFilters() {
ListFeatureSetsRequest.Filter.newBuilder()
.setFeatureSetName(this.featureSet.getName())
.setProject(this.featureSet.getProject().toString())
- .setFeatureSetVersion(String.valueOf(this.featureSet.getVersion()))
.build(),
// name and project
ListFeatureSetsRequest.Filter.newBuilder()
.setFeatureSetName(this.featureSet.getName())
.setProject(this.featureSet.getProject().toString())
- .setFeatureSetVersion("*")
.build(),
// name and project
ListFeatureSetsRequest.Filter.newBuilder()
.setFeatureSetName(this.featureSet.getName())
.setProject("*")
- .setFeatureSetVersion(String.valueOf(this.featureSet.getVersion()))
.build());
}
- /* unit tests */
private ListIngestionJobsResponse tryListJobs(ListIngestionJobsRequest request) {
ListIngestionJobsResponse response = null;
try {
@@ -245,7 +210,6 @@ private ListIngestionJobsResponse tryListJobs(ListIngestionJobsRequest request)
return response;
}
- // list jobs
@Test
public void testListJobsById() {
ListIngestionJobsRequest.Filter filter =
@@ -304,7 +268,6 @@ public void testListIngestionJobByFeatureSetReference() {
assertThat(this.tryListJobs(request).getJobs(0), equalTo(this.ingestionJob));
}
- // stop jobs
private StopIngestionJobResponse tryStopJob(
StopIngestionJobRequest request, boolean expectError) {
StopIngestionJobResponse response = null;
@@ -341,10 +304,9 @@ public void testStopJobForId() {
}
@Test
- public void testStopAlreadyStop() {
+ public void testStopAlreadyStopped() {
// check that stop jobs does not trying to stop jobs that are not already stopped
- List doNothingStatuses = new ArrayList<>();
- doNothingStatuses.addAll(JobStatus.getTerminalState());
+ List doNothingStatuses = new ArrayList<>(JobStatus.getTerminalStates());
JobStatus prevStatus = this.job.getStatus();
for (JobStatus status : doNothingStatuses) {
diff --git a/core/src/test/java/feast/core/service/SpecServiceTest.java b/core/src/test/java/feast/core/service/SpecServiceTest.java
index 43a66135dc..e584ee71e0 100644
--- a/core/src/test/java/feast/core/service/SpecServiceTest.java
+++ b/core/src/test/java/feast/core/service/SpecServiceTest.java
@@ -1,6 +1,6 @@
/*
* SPDX-License-Identifier: Apache-2.0
- * Copyright 2018-2019 The Feast Authors
+ * Copyright 2018-2020 The Feast Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -26,48 +26,34 @@
import com.google.api.client.util.Lists;
import com.google.protobuf.InvalidProtocolBufferException;
-import feast.core.CoreServiceProto.ApplyFeatureSetResponse;
-import feast.core.CoreServiceProto.ApplyFeatureSetResponse.Status;
-import feast.core.CoreServiceProto.GetFeatureSetRequest;
-import feast.core.CoreServiceProto.GetFeatureSetResponse;
-import feast.core.CoreServiceProto.ListFeatureSetsRequest.Filter;
-import feast.core.CoreServiceProto.ListFeatureSetsResponse;
-import feast.core.CoreServiceProto.ListStoresRequest;
-import feast.core.CoreServiceProto.ListStoresResponse;
-import feast.core.CoreServiceProto.UpdateStoreRequest;
-import feast.core.CoreServiceProto.UpdateStoreResponse;
-import feast.core.FeatureSetProto;
-import feast.core.FeatureSetProto.EntitySpec;
-import feast.core.FeatureSetProto.FeatureSetSpec;
-import feast.core.FeatureSetProto.FeatureSetStatus;
-import feast.core.FeatureSetProto.FeatureSpec;
-import feast.core.SourceProto.KafkaSourceConfig;
-import feast.core.SourceProto.SourceType;
-import feast.core.StoreProto;
-import feast.core.StoreProto.Store.RedisConfig;
-import feast.core.StoreProto.Store.StoreType;
-import feast.core.StoreProto.Store.Subscription;
import feast.core.dao.FeatureSetRepository;
import feast.core.dao.ProjectRepository;
import feast.core.dao.StoreRepository;
import feast.core.exception.RetrievalException;
-import feast.core.model.FeatureSet;
-import feast.core.model.Field;
-import feast.core.model.Project;
-import feast.core.model.Source;
-import feast.core.model.Store;
-import feast.types.ValueProto.ValueType.Enum;
+import feast.core.model.*;
+import feast.proto.core.CoreServiceProto.ApplyFeatureSetResponse;
+import feast.proto.core.CoreServiceProto.ApplyFeatureSetResponse.Status;
+import feast.proto.core.CoreServiceProto.GetFeatureSetRequest;
+import feast.proto.core.CoreServiceProto.GetFeatureSetResponse;
+import feast.proto.core.CoreServiceProto.ListFeatureSetsRequest.Filter;
+import feast.proto.core.CoreServiceProto.ListFeatureSetsResponse;
+import feast.proto.core.CoreServiceProto.ListStoresRequest;
+import feast.proto.core.CoreServiceProto.ListStoresResponse;
+import feast.proto.core.CoreServiceProto.UpdateStoreRequest;
+import feast.proto.core.CoreServiceProto.UpdateStoreResponse;
+import feast.proto.core.FeatureSetProto;
+import feast.proto.core.FeatureSetProto.EntitySpec;
+import feast.proto.core.FeatureSetProto.FeatureSetSpec;
+import feast.proto.core.FeatureSetProto.FeatureSpec;
+import feast.proto.core.StoreProto;
+import feast.proto.core.StoreProto.Store.RedisConfig;
+import feast.proto.core.StoreProto.Store.StoreType;
+import feast.proto.core.StoreProto.Store.Subscription;
+import feast.proto.types.ValueProto.ValueType.Enum;
import java.sql.Date;
import java.time.Instant;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
+import java.util.*;
import java.util.Map.Entry;
-import java.util.Optional;
import java.util.stream.Collectors;
import org.junit.Before;
import org.junit.Rule;
@@ -107,64 +93,39 @@ public class SpecServiceTest {
private List stores;
private Source defaultSource;
+ // TODO: Updates update features in place, so if tests follow the wrong order they might break.
+ // Refactor this maybe?
@Before
public void setUp() {
initMocks(this);
- defaultSource =
- new Source(
- SourceType.KAFKA,
- KafkaSourceConfig.newBuilder()
- .setBootstrapServers("kafka:9092")
- .setTopic("my-topic")
- .build(),
- true);
-
- FeatureSet featureSet1v1 = newDummyFeatureSet("f1", 1, "project1");
- FeatureSet featureSet1v2 = newDummyFeatureSet("f1", 2, "project1");
- FeatureSet featureSet1v3 = newDummyFeatureSet("f1", 3, "project1");
- FeatureSet featureSet2v1 = newDummyFeatureSet("f2", 1, "project1");
-
- Field f3f1 = new Field("f3f1", Enum.INT64);
- Field f3f2 = new Field("f3f2", Enum.INT64);
- Field f3e1 = new Field("f3e1", Enum.STRING);
- FeatureSet featureSet3v1 =
- new FeatureSet(
- "f3",
- "project1",
- 1,
- 100L,
- Arrays.asList(f3e1),
- Arrays.asList(f3f2, f3f1),
- defaultSource,
- FeatureSetStatus.STATUS_READY);
-
- featureSets =
- Arrays.asList(featureSet1v1, featureSet1v2, featureSet1v3, featureSet2v1, featureSet3v1);
- when(featureSetRepository.findAll()).thenReturn(featureSets);
- when(featureSetRepository.findAllByOrderByNameAscVersionAsc()).thenReturn(featureSets);
+ defaultSource = TestObjectFactory.defaultSource;
+
+ FeatureSet featureSet1 = newDummyFeatureSet("f1", "project1");
+ FeatureSet featureSet2 = newDummyFeatureSet("f2", "project1");
+
+ Feature f3f1 = TestObjectFactory.CreateFeature("f3f1", Enum.INT64);
+ Feature f3f2 = TestObjectFactory.CreateFeature("f3f2", Enum.INT64);
+ Entity f3e1 = TestObjectFactory.CreateEntity("f3e1", Enum.STRING);
+ FeatureSet featureSet3 =
+ TestObjectFactory.CreateFeatureSet(
+ "f3", "project1", Arrays.asList(f3e1), Arrays.asList(f3f2, f3f1));
- when(featureSetRepository.findFeatureSetByNameAndProject_NameAndVersion("f1", "project1", 1))
+ FeatureSet featureSet4 = newDummyFeatureSet("f4", Project.DEFAULT_NAME);
+ featureSets = Arrays.asList(featureSet1, featureSet2, featureSet3, featureSet4);
+
+ when(featureSetRepository.findAll()).thenReturn(featureSets);
+ when(featureSetRepository.findAllByOrderByNameAsc()).thenReturn(featureSets);
+ when(featureSetRepository.findFeatureSetByNameAndProject_Name("f1", "project1"))
.thenReturn(featureSets.get(0));
- when(featureSetRepository.findAllByNameLikeAndProject_NameOrderByNameAscVersionAsc(
- "f1", "project1"))
- .thenReturn(featureSets.subList(0, 3));
- when(featureSetRepository.findAllByNameLikeAndProject_NameOrderByNameAscVersionAsc(
- "f3", "project1"))
- .thenReturn(featureSets.subList(4, 5));
- when(featureSetRepository.findFirstFeatureSetByNameLikeAndProject_NameOrderByVersionDesc(
- "f1", "project1"))
- .thenReturn(featureSet1v3);
- when(featureSetRepository.findAllByNameLikeAndProject_NameOrderByNameAscVersionAsc(
- "f1", "project1"))
- .thenReturn(featureSets.subList(0, 3));
- when(featureSetRepository.findAllByNameLikeAndProject_NameOrderByNameAscVersionAsc(
- "asd", "project1"))
+ when(featureSetRepository.findFeatureSetByNameAndProject_Name("f2", "project1"))
+ .thenReturn(featureSets.get(1));
+ when(featureSetRepository.findAllByNameLikeAndProject_NameOrderByNameAsc("f1", "project1"))
+ .thenReturn(featureSets.subList(0, 1));
+ when(featureSetRepository.findAllByNameLikeAndProject_NameOrderByNameAsc("asd", "project1"))
.thenReturn(Lists.newArrayList());
- when(featureSetRepository.findAllByNameLikeAndProject_NameOrderByNameAscVersionAsc(
- "f%", "project1"))
+ when(featureSetRepository.findAllByNameLikeAndProject_NameOrderByNameAsc("f%", "project1"))
.thenReturn(featureSets);
- when(featureSetRepository.findAllByNameLikeAndProject_NameLikeOrderByNameAscVersionAsc(
- "%", "%"))
+ when(featureSetRepository.findAllByNameLikeAndProject_NameLikeOrderByNameAsc("%", "%"))
.thenReturn(featureSets);
when(projectRepository.findAllByArchivedIsFalse())
@@ -191,11 +152,7 @@ public void shouldGetAllFeatureSetsIfOnlyWildcardsProvided()
throws InvalidProtocolBufferException {
ListFeatureSetsResponse actual =
specService.listFeatureSets(
- Filter.newBuilder()
- .setFeatureSetName("*")
- .setProject("*")
- .setFeatureSetVersion("*")
- .build());
+ Filter.newBuilder().setFeatureSetName("*").setProject("*").build());
List list = new ArrayList<>();
for (FeatureSet featureSet : featureSets) {
FeatureSetProto.FeatureSet toProto = featureSet.toProto();
@@ -206,48 +163,12 @@ public void shouldGetAllFeatureSetsIfOnlyWildcardsProvided()
assertThat(actual, equalTo(expected));
}
- @Test
- public void listFeatureSetShouldFailIfFeatureSetProvidedWithoutProject()
- throws InvalidProtocolBufferException {
- expectedException.expect(IllegalArgumentException.class);
- expectedException.expectMessage(
- "Invalid listFeatureSetRequest, missing arguments. Must provide project, feature set name, and version.");
- specService.listFeatureSets(
- Filter.newBuilder().setFeatureSetName("f1").setFeatureSetVersion("1").build());
- }
-
- @Test
- public void shouldGetAllFeatureSetsMatchingNameIfWildcardVersionProvided()
- throws InvalidProtocolBufferException {
- ListFeatureSetsResponse actual =
- specService.listFeatureSets(
- Filter.newBuilder()
- .setProject("project1")
- .setFeatureSetName("f1")
- .setFeatureSetVersion("*")
- .build());
- List expectedFeatureSets =
- featureSets.stream().filter(fs -> fs.getName().equals("f1")).collect(Collectors.toList());
- List list = new ArrayList<>();
- for (FeatureSet expectedFeatureSet : expectedFeatureSets) {
- FeatureSetProto.FeatureSet toProto = expectedFeatureSet.toProto();
- list.add(toProto);
- }
- ListFeatureSetsResponse expected =
- ListFeatureSetsResponse.newBuilder().addAllFeatureSets(list).build();
- assertThat(actual, equalTo(expected));
- }
-
@Test
public void shouldGetAllFeatureSetsMatchingNameWithWildcardSearch()
throws InvalidProtocolBufferException {
ListFeatureSetsResponse actual =
specService.listFeatureSets(
- Filter.newBuilder()
- .setProject("project1")
- .setFeatureSetName("f*")
- .setFeatureSetVersion("*")
- .build());
+ Filter.newBuilder().setProject("project1").setFeatureSetName("f*").build());
List expectedFeatureSets =
featureSets.stream()
.filter(fs -> fs.getName().startsWith("f"))
@@ -263,20 +184,12 @@ public void shouldGetAllFeatureSetsMatchingNameWithWildcardSearch()
}
@Test
- public void shouldGetAllFeatureSetsMatchingVersionIfNoComparator()
- throws InvalidProtocolBufferException {
+ public void shouldGetFeatureSetsByNameAndProject() throws InvalidProtocolBufferException {
ListFeatureSetsResponse actual =
specService.listFeatureSets(
- Filter.newBuilder()
- .setProject("project1")
- .setFeatureSetName("f1")
- .setFeatureSetVersion("1")
- .build());
+ Filter.newBuilder().setProject("project1").setFeatureSetName("f1").build());
List expectedFeatureSets =
- featureSets.stream()
- .filter(fs -> fs.getName().equals("f1"))
- .filter(fs -> fs.getVersion() == 1)
- .collect(Collectors.toList());
+ featureSets.stream().filter(fs -> fs.getName().equals("f1")).collect(Collectors.toList());
List list = new ArrayList<>();
for (FeatureSet expectedFeatureSet : expectedFeatureSets) {
FeatureSetProto.FeatureSet toProto = expectedFeatureSet.toProto();
@@ -287,80 +200,20 @@ public void shouldGetAllFeatureSetsMatchingVersionIfNoComparator()
assertThat(actual, equalTo(expected));
}
- @Test
- public void shouldThrowExceptionIfGetAllFeatureSetsGivenVersionWithComparator()
- throws InvalidProtocolBufferException {
- expectedException.expect(IllegalArgumentException.class);
- specService.listFeatureSets(
- Filter.newBuilder()
- .setProject("project1")
- .setFeatureSetName("f1")
- .setFeatureSetVersion(">1")
- .build());
- }
-
- @Test
- public void shouldGetLatestFeatureSetGivenMissingVersionFilter()
- throws InvalidProtocolBufferException {
- GetFeatureSetResponse actual =
- specService.getFeatureSet(
- GetFeatureSetRequest.newBuilder().setName("f1").setProject("project1").build());
- FeatureSet expected = featureSets.get(2);
- assertThat(actual.getFeatureSet(), equalTo(expected.toProto()));
- }
-
- @Test
- public void shouldGetSpecificFeatureSetGivenSpecificVersionFilter()
- throws InvalidProtocolBufferException {
- when(featureSetRepository.findFeatureSetByNameAndProject_NameAndVersion("f1", "project1", 2))
- .thenReturn(featureSets.get(1));
- GetFeatureSetResponse actual =
- specService.getFeatureSet(
- GetFeatureSetRequest.newBuilder()
- .setProject("project1")
- .setName("f1")
- .setVersion(2)
- .build());
- FeatureSet expected = featureSets.get(1);
- assertThat(actual.getFeatureSet(), equalTo(expected.toProto()));
- }
-
@Test
public void shouldThrowExceptionGivenMissingFeatureSetName()
throws InvalidProtocolBufferException {
expectedException.expect(IllegalArgumentException.class);
expectedException.expectMessage("No feature set name provided");
- specService.getFeatureSet(GetFeatureSetRequest.newBuilder().setVersion(2).build());
+ specService.getFeatureSet(GetFeatureSetRequest.newBuilder().build());
}
@Test
public void shouldThrowExceptionGivenMissingFeatureSet() throws InvalidProtocolBufferException {
expectedException.expect(RetrievalException.class);
- expectedException.expectMessage(
- "Feature set with name \"f1000\" and version \"2\" could not be found.");
+ expectedException.expectMessage("Feature set with name \"f1000\" could not be found.");
specService.getFeatureSet(
- GetFeatureSetRequest.newBuilder()
- .setName("f1000")
- .setProject("project1")
- .setVersion(2)
- .build());
- }
-
- @Test
- public void shouldThrowRetrievalExceptionGivenInvalidFeatureSetVersionComparator()
- throws InvalidProtocolBufferException {
- expectedException.expect(IllegalArgumentException.class);
- expectedException.expectMessage(
- "Invalid listFeatureSetRequest. Version must be set to \"*\" if the project name and feature set name aren't set explicitly: \n"
- + "feature_set_name: \"f1\"\n"
- + "feature_set_version: \">1\"\n"
- + "project: \"project1\"");
- specService.listFeatureSets(
- Filter.newBuilder()
- .setProject("project1")
- .setFeatureSetName("f1")
- .setFeatureSetVersion(">1")
- .build());
+ GetFeatureSetRequest.newBuilder().setName("f1000").setProject("project1").build());
}
@Test
@@ -395,10 +248,10 @@ public void shouldThrowRetrievalExceptionIfNoStoresFoundWithName() {
}
@Test
- public void applyFeatureSetShouldReturnFeatureSetWithLatestVersionIfFeatureSetHasNotChanged()
+ public void applyFeatureSetShouldReturnFeatureSetIfFeatureSetHasNotChanged()
throws InvalidProtocolBufferException {
FeatureSetSpec incomingFeatureSetSpec =
- featureSets.get(2).toProto().getSpec().toBuilder().clearVersion().build();
+ featureSets.get(0).toProto().getSpec().toBuilder().build();
ApplyFeatureSetResponse applyFeatureSetResponse =
specService.applyFeatureSet(
@@ -406,21 +259,19 @@ public void applyFeatureSetShouldReturnFeatureSetWithLatestVersionIfFeatureSetHa
verify(featureSetRepository, times(0)).save(ArgumentMatchers.any(FeatureSet.class));
assertThat(applyFeatureSetResponse.getStatus(), equalTo(Status.NO_CHANGE));
- assertThat(applyFeatureSetResponse.getFeatureSet(), equalTo(featureSets.get(2).toProto()));
+ assertThat(applyFeatureSetResponse.getFeatureSet(), equalTo(featureSets.get(0).toProto()));
}
@Test
- public void applyFeatureSetShouldApplyFeatureSetWithInitVersionIfNotExists()
+ public void applyFeatureSetShouldApplyFeatureSetIfNotExists()
throws InvalidProtocolBufferException {
- when(featureSetRepository.findAllByNameLikeAndProject_NameOrderByNameAscVersionAsc(
- "f2", "project1"))
- .thenReturn(Lists.newArrayList());
+ when(featureSetRepository.findFeatureSetByNameAndProject_Name("f2", "project1"))
+ .thenReturn(null);
- FeatureSetProto.FeatureSet incomingFeatureSet =
- newDummyFeatureSet("f2", 1, "project1").toProto();
+ FeatureSetProto.FeatureSet incomingFeatureSet = newDummyFeatureSet("f2", "project1").toProto();
FeatureSetProto.FeatureSetSpec incomingFeatureSetSpec =
- incomingFeatureSet.getSpec().toBuilder().clearVersion().build();
+ incomingFeatureSet.getSpec().toBuilder().build();
ApplyFeatureSetResponse applyFeatureSetResponse =
specService.applyFeatureSet(
@@ -429,24 +280,16 @@ public void applyFeatureSetShouldApplyFeatureSetWithInitVersionIfNotExists()
FeatureSetProto.FeatureSet expected =
FeatureSetProto.FeatureSet.newBuilder()
- .setSpec(
- incomingFeatureSetSpec
- .toBuilder()
- .setVersion(1)
- .setSource(defaultSource.toProto())
- .build())
+ .setSpec(incomingFeatureSetSpec.toBuilder().setSource(defaultSource.toProto()).build())
.build();
assertThat(applyFeatureSetResponse.getStatus(), equalTo(Status.CREATED));
assertThat(applyFeatureSetResponse.getFeatureSet().getSpec(), equalTo(expected.getSpec()));
- assertThat(
- applyFeatureSetResponse.getFeatureSet().getSpec().getVersion(),
- equalTo(expected.getSpec().getVersion()));
}
@Test
- public void applyFeatureSetShouldIncrementFeatureSetVersionIfAlreadyExists()
+ public void applyFeatureSetShouldUpdateAndSaveFeatureSetIfAlreadyExists()
throws InvalidProtocolBufferException {
- FeatureSetProto.FeatureSet incomingFeatureSet = featureSets.get(2).toProto();
+ FeatureSetProto.FeatureSet incomingFeatureSet = featureSets.get(0).toProto();
incomingFeatureSet =
incomingFeatureSet
.toBuilder()
@@ -455,7 +298,6 @@ public void applyFeatureSetShouldIncrementFeatureSetVersionIfAlreadyExists()
incomingFeatureSet
.getSpec()
.toBuilder()
- .clearVersion()
.addFeatures(
FeatureSpec.newBuilder().setName("feature2").setValueType(Enum.STRING))
.build())
@@ -466,44 +308,27 @@ public void applyFeatureSetShouldIncrementFeatureSetVersionIfAlreadyExists()
.toBuilder()
.setMeta(incomingFeatureSet.getMeta().toBuilder().build())
.setSpec(
- incomingFeatureSet
- .getSpec()
- .toBuilder()
- .setVersion(4)
- .setSource(defaultSource.toProto())
- .build())
+ incomingFeatureSet.getSpec().toBuilder().setSource(defaultSource.toProto()).build())
.build();
ApplyFeatureSetResponse applyFeatureSetResponse =
specService.applyFeatureSet(incomingFeatureSet);
verify(projectRepository).saveAndFlush(ArgumentMatchers.any(Project.class));
- assertThat(applyFeatureSetResponse.getStatus(), equalTo(Status.CREATED));
+ assertThat(applyFeatureSetResponse.getStatus(), equalTo(Status.UPDATED));
assertEquals(
FeatureSet.fromProto(applyFeatureSetResponse.getFeatureSet()),
FeatureSet.fromProto(expected));
- assertThat(
- applyFeatureSetResponse.getFeatureSet().getSpec().getVersion(),
- equalTo(expected.getSpec().getVersion()));
}
@Test
public void applyFeatureSetShouldNotCreateFeatureSetIfFieldsUnordered()
throws InvalidProtocolBufferException {
- Field f3f1 = new Field("f3f1", Enum.INT64);
- Field f3f2 = new Field("f3f2", Enum.INT64);
- Field f3e1 = new Field("f3e1", Enum.STRING);
- FeatureSetProto.FeatureSet incomingFeatureSet =
- (new FeatureSet(
- "f3",
- "project1",
- 5,
- 100L,
- Arrays.asList(f3e1),
- Arrays.asList(f3f2, f3f1),
- defaultSource,
- FeatureSetStatus.STATUS_READY))
- .toProto();
+ FeatureSet featureSet = featureSets.get(1);
+ List features = Lists.newArrayList(featureSet.getFeatures());
+ Collections.shuffle(features);
+ featureSet.setFeatures(Set.copyOf(features));
+ FeatureSetProto.FeatureSet incomingFeatureSet = featureSet.toProto();
ApplyFeatureSetResponse applyFeatureSetResponse =
specService.applyFeatureSet(incomingFeatureSet);
@@ -523,46 +348,11 @@ public void applyFeatureSetShouldNotCreateFeatureSetIfFieldsUnordered()
public void applyFeatureSetShouldAcceptPresenceShapeAndDomainConstraints()
throws InvalidProtocolBufferException {
List entitySpecs = new ArrayList<>();
- entitySpecs.add(
- EntitySpec.newBuilder()
- .setName("entity1")
- .setValueType(Enum.INT64)
- .setPresence(FeaturePresence.getDefaultInstance())
- .setShape(FixedShape.getDefaultInstance())
- .setDomain("mydomain")
- .build());
- entitySpecs.add(
- EntitySpec.newBuilder()
- .setName("entity2")
- .setValueType(Enum.INT64)
- .setGroupPresence(FeaturePresenceWithinGroup.getDefaultInstance())
- .setValueCount(ValueCount.getDefaultInstance())
- .setIntDomain(IntDomain.getDefaultInstance())
- .build());
- entitySpecs.add(
- EntitySpec.newBuilder()
- .setName("entity3")
- .setValueType(Enum.FLOAT)
- .setPresence(FeaturePresence.getDefaultInstance())
- .setValueCount(ValueCount.getDefaultInstance())
- .setFloatDomain(FloatDomain.getDefaultInstance())
- .build());
- entitySpecs.add(
- EntitySpec.newBuilder()
- .setName("entity4")
- .setValueType(Enum.STRING)
- .setPresence(FeaturePresence.getDefaultInstance())
- .setValueCount(ValueCount.getDefaultInstance())
- .setStringDomain(StringDomain.getDefaultInstance())
- .build());
- entitySpecs.add(
- EntitySpec.newBuilder()
- .setName("entity5")
- .setValueType(Enum.BOOL)
- .setPresence(FeaturePresence.getDefaultInstance())
- .setValueCount(ValueCount.getDefaultInstance())
- .setBoolDomain(BoolDomain.getDefaultInstance())
- .build());
+ entitySpecs.add(EntitySpec.newBuilder().setName("entity1").setValueType(Enum.INT64).build());
+ entitySpecs.add(EntitySpec.newBuilder().setName("entity2").setValueType(Enum.INT64).build());
+ entitySpecs.add(EntitySpec.newBuilder().setName("entity3").setValueType(Enum.FLOAT).build());
+ entitySpecs.add(EntitySpec.newBuilder().setName("entity4").setValueType(Enum.STRING).build());
+ entitySpecs.add(EntitySpec.newBuilder().setName("entity5").setValueType(Enum.BOOL).build());
List featureSpecs = new ArrayList<>();
featureSpecs.add(
@@ -630,27 +420,13 @@ public void applyFeatureSetShouldAcceptPresenceShapeAndDomainConstraints()
new ArrayList<>(appliedFeatureSetSpec.getFeaturesList());
appliedFeatureSpecs.sort(Comparator.comparing(FeatureSpec::getName));
- assertEquals(appliedEntitySpecs.size(), entitySpecs.size());
- assertEquals(appliedFeatureSpecs.size(), featureSpecs.size());
-
- for (int i = 0; i < appliedEntitySpecs.size(); i++) {
- assertEquals(entitySpecs.get(i), appliedEntitySpecs.get(i));
- }
-
- for (int i = 0; i < appliedFeatureSpecs.size(); i++) {
- assertEquals(featureSpecs.get(i), appliedFeatureSpecs.get(i));
- }
+ assertEquals(appliedEntitySpecs, entitySpecs);
+ assertEquals(appliedFeatureSpecs, featureSpecs);
}
@Test
public void applyFeatureSetShouldUpdateFeatureSetWhenConstraintsAreUpdated()
throws InvalidProtocolBufferException {
- FeatureSetProto.FeatureSet existingFeatureSet = featureSets.get(2).toProto();
- assertThat(
- "Existing feature set has version 3", existingFeatureSet.getSpec().getVersion() == 3);
- assertThat(
- "Existing feature set has at least 1 feature",
- existingFeatureSet.getSpec().getFeaturesList().size() > 0);
// Map of constraint field name -> value, e.g. "shape" -> FixedShape object.
// If any of these fields are updated, SpecService should update the FeatureSet.
@@ -675,6 +451,10 @@ public void applyFeatureSetShouldUpdateFeatureSetWhenConstraintsAreUpdated()
contraintUpdates.put("time_of_day_domain", TimeOfDayDomain.getDefaultInstance());
for (Entry constraint : contraintUpdates.entrySet()) {
+ FeatureSet featureSet = newDummyFeatureSet("constraints", "project1");
+ FeatureSetProto.FeatureSet existingFeatureSet = featureSet.toProto();
+ when(featureSetRepository.findFeatureSetByNameAndProject_Name("constraints", "project1"))
+ .thenReturn(featureSet);
String name = constraint.getKey();
Object value = constraint.getValue();
FeatureSpec newFeatureSpec =
@@ -693,12 +473,8 @@ public void applyFeatureSetShouldUpdateFeatureSetWhenConstraintsAreUpdated()
assertEquals(
"Response should have CREATED status when field '" + name + "' is updated",
- Status.CREATED,
+ Status.UPDATED,
response.getStatus());
- assertEquals(
- "FeatureSet should have new version when field '" + name + "' is updated",
- existingFeatureSet.getSpec().getVersion() + 1,
- response.getFeatureSet().getSpec().getVersion());
assertEquals(
"Feature should have field '" + name + "' set correctly",
constraint.getValue(),
@@ -713,19 +489,12 @@ public void applyFeatureSetShouldUpdateFeatureSetWhenConstraintsAreUpdated()
@Test
public void applyFeatureSetShouldCreateProjectWhenNotAlreadyExists()
throws InvalidProtocolBufferException {
- Field f3f1 = new Field("f3f1", Enum.INT64);
- Field f3f2 = new Field("f3f2", Enum.INT64);
- Field f3e1 = new Field("f3e1", Enum.STRING);
+ Feature f3f1 = TestObjectFactory.CreateFeature("f3f1", Enum.INT64);
+ Feature f3f2 = TestObjectFactory.CreateFeature("f3f2", Enum.INT64);
+ Entity f3e1 = TestObjectFactory.CreateEntity("f3e1", Enum.STRING);
FeatureSetProto.FeatureSet incomingFeatureSet =
- (new FeatureSet(
- "f3",
- "newproject",
- 5,
- 100L,
- Arrays.asList(f3e1),
- Arrays.asList(f3f2, f3f1),
- defaultSource,
- FeatureSetStatus.STATUS_READY))
+ TestObjectFactory.CreateFeatureSet(
+ "f3", "project", Arrays.asList(f3e1), Arrays.asList(f3f2, f3f1))
.toProto();
ApplyFeatureSetResponse applyFeatureSetResponse =
@@ -736,22 +505,35 @@ public void applyFeatureSetShouldCreateProjectWhenNotAlreadyExists()
equalTo(incomingFeatureSet.getSpec().getProject()));
}
+ @Test
+ public void applyFeatureSetShouldUsedDefaultProjectIfUnspecified()
+ throws InvalidProtocolBufferException {
+ Feature f3f1 = TestObjectFactory.CreateFeature("f3f1", Enum.INT64);
+ Feature f3f2 = TestObjectFactory.CreateFeature("f3f2", Enum.INT64);
+ Entity f3e1 = TestObjectFactory.CreateEntity("f3e1", Enum.STRING);
+
+ // In protov3, unspecified project defaults to ""
+ FeatureSetProto.FeatureSet incomingFeatureSet =
+ TestObjectFactory.CreateFeatureSet("f3", "", Arrays.asList(f3e1), Arrays.asList(f3f2, f3f1))
+ .toProto();
+ ApplyFeatureSetResponse applyFeatureSetResponse =
+ specService.applyFeatureSet(incomingFeatureSet);
+ assertThat(applyFeatureSetResponse.getStatus(), equalTo(Status.CREATED));
+
+ assertThat(
+ applyFeatureSetResponse.getFeatureSet().getSpec().getProject(),
+ equalTo(Project.DEFAULT_NAME));
+ }
+
@Test
public void applyFeatureSetShouldFailWhenProjectIsArchived()
throws InvalidProtocolBufferException {
- Field f3f1 = new Field("f3f1", Enum.INT64);
- Field f3f2 = new Field("f3f2", Enum.INT64);
- Field f3e1 = new Field("f3e1", Enum.STRING);
+ Feature f3f1 = TestObjectFactory.CreateFeature("f3f1", Enum.INT64);
+ Feature f3f2 = TestObjectFactory.CreateFeature("f3f2", Enum.INT64);
+ Entity f3e1 = TestObjectFactory.CreateEntity("f3e1", Enum.STRING);
FeatureSetProto.FeatureSet incomingFeatureSet =
- (new FeatureSet(
- "f3",
- "archivedproject",
- 5,
- 100L,
- Arrays.asList(f3e1),
- Arrays.asList(f3f2, f3f1),
- defaultSource,
- FeatureSetStatus.STATUS_READY))
+ TestObjectFactory.CreateFeatureSet(
+ "f3", "archivedproject", Arrays.asList(f3e1), Arrays.asList(f3f2, f3f1))
.toProto();
expectedException.expect(IllegalArgumentException.class);
@@ -759,6 +541,101 @@ public void applyFeatureSetShouldFailWhenProjectIsArchived()
specService.applyFeatureSet(incomingFeatureSet);
}
+ @Test
+ public void applyFeatureSetShouldAcceptFeatureLabels() throws InvalidProtocolBufferException {
+ List entitySpecs = new ArrayList<>();
+ entitySpecs.add(EntitySpec.newBuilder().setName("entity1").setValueType(Enum.INT64).build());
+
+ Map featureLabels0 =
+ new HashMap<>() {
+ {
+ put("label1", "feast1");
+ }
+ };
+
+ Map featureLabels1 =
+ new HashMap<>() {
+ {
+ put("label1", "feast1");
+ put("label2", "feast2");
+ }
+ };
+
+ List