diff --git a/.dockerignore b/.dockerignore index e9401f0cc9..0e3b22687d 100644 --- a/.dockerignore +++ b/.dockerignore @@ -1,2 +1,3 @@ docs +!docs/coverage charts diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index a30c747b13..c02551b014 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1,8 +1,8 @@ # Each line is a file pattern followed by one or more owners. # https://help.github.com/en/articles/about-code-owners -* @zhilingc @pradithya @woop @davidheryanto @khorshuheng -/core/ @zhilingc @pradithya -/ingestion/ @zhilingc @pradithya -/serving/ @zhilingc @pradithya -/cli/ @zhilingc @pradithya +* @zhilingc @woop @davidheryanto @khorshuheng @pyalex +/core/ @zhilingc +/ingestion/ @zhilingc +/serving/ @zhilingc +/cli/ @zhilingc diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index b9c8cd6dff..7a78437b5d 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -1,8 +1,8 @@ org.hibernate hibernate-core - 5.3.6.Final @@ -187,7 +206,6 @@ org.mockito mockito-core - 2.23.0 test @@ -207,5 +225,21 @@ jaxb-api + + javax.validation + validation-api + 2.0.0.Final + + + org.hibernate.validator + hibernate-validator + 6.1.2.Final + + + org.hibernate.validator + hibernate-validator-annotation-processor + 6.1.2.Final + + diff --git a/core/src/main/java/feast/core/config/FeastProperties.java b/core/src/main/java/feast/core/config/FeastProperties.java index b9c787b6c7..6dad278242 100644 --- a/core/src/main/java/feast/core/config/FeastProperties.java +++ b/core/src/main/java/feast/core/config/FeastProperties.java @@ -16,53 +16,225 @@ */ package feast.core.config; -import java.util.Map; +import feast.core.config.FeastProperties.StreamProperties.FeatureStreamOptions; +import feast.core.validators.OneOfStrings; +import java.net.InetAddress; +import java.net.UnknownHostException; +import java.util.*; +import javax.annotation.PostConstruct; +import javax.validation.*; +import javax.validation.constraints.NotBlank; +import javax.validation.constraints.NotNull; +import javax.validation.constraints.Positive; import lombok.Getter; import lombok.Setter; +import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.context.properties.ConfigurationProperties; +import org.springframework.boot.info.BuildProperties; @Getter @Setter @ConfigurationProperties(prefix = "feast", ignoreInvalidFields = true) public class FeastProperties { - private String version; - private JobProperties jobs; + /** + * Instantiates a new Feast properties. + * + * @param buildProperties Feast build properties + */ + @Autowired + public FeastProperties(BuildProperties buildProperties) { + setVersion(buildProperties.getVersion()); + } + + /** Instantiates a new Feast properties. */ + public FeastProperties() {} + + /* Feast Core Build Version */ + @NotBlank private String version = "unknown"; + + /* Population job properties */ + @NotNull private JobProperties jobs; + + @NotNull + /* Feast Kafka stream properties */ private StreamProperties stream; + /** Feast job properties. These properties are used for ingestion jobs. */ @Getter @Setter public static class JobProperties { - private String runner; - private Map options; + @NotBlank + /* The active Apache Beam runner name. This name references one instance of the Runner class */ + private String activeRunner; + + /** List of configured job runners. */ + private List runners = new ArrayList<>(); + + /** + * Gets a {@link Runner} instance of the active runner + * + * @return the active runner + */ + public Runner getActiveRunner() { + for (Runner runner : getRunners()) { + if (activeRunner.equals(runner.getName())) { + return runner; + } + } + throw new RuntimeException( + String.format( + "Active runner is misconfigured. Could not find runner: %s.", activeRunner)); + } + + /** Job Runner class. */ + @Getter + @Setter + public static class Runner { + + /** Job runner name. This must be unique. */ + String name; + + /** Job runner type DirectRunner, DataflowRunner currently supported */ + String type; + + /** + * Job runner configuration options. See the following for options + * https://api.docs.feast.dev/grpc/feast.core.pb.html#Runner + */ + Map options = new HashMap<>(); + + /** + * Gets the job runner type as an enum. + * + * @return Returns the job runner type as {@link feast.core.job.Runner} + */ + public feast.core.job.Runner getType() { + return feast.core.job.Runner.fromString(type); + } + } + + @NotNull + /* Population job metric properties */ private MetricsProperties metrics; - private JobUpdatesProperties updates; - } - @Getter - @Setter - public static class JobUpdatesProperties { + /* Timeout in seconds for each attempt to update or submit a new job to the runner */ + @Positive private long jobUpdateTimeoutSeconds; - private long timeoutSeconds; - private long pollingIntervalMillis; + /* Job update polling interval in millisecond. How frequently Feast will update running jobs. */ + @Positive private long pollingIntervalMilliseconds; } + /** Properties used to configure Feast's managed Kafka feature stream. */ @Getter @Setter public static class StreamProperties { + /* Feature stream type. Only "kafka" is supported. */ + @OneOfStrings({"kafka"}) + @NotBlank private String type; - private Map options; + + /* Feature stream options */ + @NotNull private FeatureStreamOptions options; + + /** Feature stream options */ + @Getter + @Setter + public static class FeatureStreamOptions { + + /* Kafka topic to use for feature sets without source topics. */ + @NotBlank private String topic = "feast-features"; + + /** + * Comma separated list of Kafka bootstrap servers. Used for feature sets without a defined + * source. + */ + @NotBlank private String bootstrapServers = "localhost:9092"; + + /* Defines the number of copies of managed feature stream Kafka. */ + @Positive private short replicationFactor = 1; + + /* Number of Kafka partitions to to use for managed feature stream. */ + @Positive private int partitions = 1; + } } + /** Feast population job metrics */ @Getter @Setter public static class MetricsProperties { + /* Population job metrics enabled */ private boolean enabled; + + /* Metric type. Possible options: statsd */ + @OneOfStrings({"statsd"}) + @NotBlank private String type; + + /* Host of metric sink */ private String host; - private int port; + + /* Port of metric sink */ + @Positive private int port; + } + + /** + * Validates all FeastProperties. This method runs after properties have been initialized and + * individually and conditionally validates each class. + */ + @PostConstruct + public void validate() { + ValidatorFactory factory = Validation.buildDefaultValidatorFactory(); + Validator validator = factory.getValidator(); + + // Validate root fields in FeastProperties + Set> violations = validator.validate(this); + if (!violations.isEmpty()) { + throw new ConstraintViolationException(violations); + } + + // Validate Stream properties + Set> streamPropertyViolations = + validator.validate(getStream()); + if (!streamPropertyViolations.isEmpty()) { + throw new ConstraintViolationException(streamPropertyViolations); + } + + // Validate Stream Options + Set> featureStreamOptionsViolations = + validator.validate(getStream().getOptions()); + if (!featureStreamOptionsViolations.isEmpty()) { + throw new ConstraintViolationException(featureStreamOptionsViolations); + } + + // Validate JobProperties + Set> jobPropertiesViolations = validator.validate(getJobs()); + if (!jobPropertiesViolations.isEmpty()) { + throw new ConstraintViolationException(jobPropertiesViolations); + } + + // Validate MetricsProperties + if (getJobs().getMetrics().isEnabled()) { + Set> jobMetricViolations = + validator.validate(getJobs().getMetrics()); + if (!jobMetricViolations.isEmpty()) { + throw new ConstraintViolationException(jobMetricViolations); + } + // Additional custom check for hostname value because there is no built-in Spring annotation + // to validate the value is a DNS resolvable hostname or an IP address. + try { + //noinspection ResultOfMethodCallIgnored + InetAddress.getByName(getJobs().getMetrics().getHost()); + } catch (UnknownHostException e) { + throw new IllegalArgumentException( + "Invalid config value for feast.jobs.metrics.host: " + + getJobs().getMetrics().getHost() + + ". Make sure it is a valid IP address or DNS hostname e.g. localhost or 10.128.10.40. Error detail: " + + e.getMessage()); + } + } } } diff --git a/core/src/main/java/feast/core/config/FeatureStreamConfig.java b/core/src/main/java/feast/core/config/FeatureStreamConfig.java index 45de359ac7..c1982604c3 100644 --- a/core/src/main/java/feast/core/config/FeatureStreamConfig.java +++ b/core/src/main/java/feast/core/config/FeatureStreamConfig.java @@ -17,10 +17,10 @@ package feast.core.config; import com.google.common.base.Strings; -import feast.core.SourceProto.KafkaSourceConfig; -import feast.core.SourceProto.SourceType; import feast.core.config.FeastProperties.StreamProperties; import feast.core.model.Source; +import feast.proto.core.SourceProto.KafkaSourceConfig; +import feast.proto.core.SourceProto.SourceType; import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -48,8 +48,8 @@ public Source getDefaultSource(FeastProperties feastProperties) { SourceType featureStreamType = SourceType.valueOf(streamProperties.getType().toUpperCase()); switch (featureStreamType) { case KAFKA: - String bootstrapServers = streamProperties.getOptions().get("bootstrapServers"); - String topicName = streamProperties.getOptions().get("topic"); + String bootstrapServers = streamProperties.getOptions().getBootstrapServers(); + String topicName = streamProperties.getOptions().getTopic(); Map map = new HashMap<>(); map.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers); map.put( @@ -59,9 +59,8 @@ public Source getDefaultSource(FeastProperties feastProperties) { NewTopic newTopic = new NewTopic( topicName, - Integer.valueOf(streamProperties.getOptions().getOrDefault("numPartitions", "1")), - Short.valueOf( - streamProperties.getOptions().getOrDefault("replicationFactor", "1"))); + streamProperties.getOptions().getPartitions(), + streamProperties.getOptions().getReplicationFactor()); CreateTopicsResult createTopicsResult = client.createTopics(Collections.singleton(newTopic)); try { diff --git a/core/src/main/java/feast/core/config/JobConfig.java b/core/src/main/java/feast/core/config/JobConfig.java index 728fc0545b..30023de064 100644 --- a/core/src/main/java/feast/core/config/JobConfig.java +++ b/core/src/main/java/feast/core/config/JobConfig.java @@ -16,22 +16,16 @@ */ package feast.core.config; -import com.google.api.client.googleapis.auth.oauth2.GoogleCredential; -import com.google.api.client.googleapis.javanet.GoogleNetHttpTransport; -import com.google.api.client.json.jackson2.JacksonFactory; -import com.google.api.services.dataflow.Dataflow; -import com.google.api.services.dataflow.DataflowScopes; -import com.google.common.base.Strings; +import com.google.gson.Gson; +import com.google.protobuf.InvalidProtocolBufferException; +import com.google.protobuf.util.JsonFormat; import feast.core.config.FeastProperties.JobProperties; -import feast.core.config.FeastProperties.JobUpdatesProperties; import feast.core.job.JobManager; -import feast.core.job.Runner; import feast.core.job.dataflow.DataflowJobManager; import feast.core.job.direct.DirectJobRegistry; import feast.core.job.direct.DirectRunnerJobManager; -import java.io.IOException; -import java.security.GeneralSecurityException; -import java.util.HashMap; +import feast.proto.core.RunnerProto.DataflowRunnerConfigOptions; +import feast.proto.core.RunnerProto.DirectRunnerConfigOptions; import java.util.Map; import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Autowired; @@ -42,67 +36,39 @@ @Slf4j @Configuration public class JobConfig { + private final Gson gson = new Gson(); /** - * Get a JobManager according to the runner type and dataflow configuration. + * Get a JobManager according to the runner type and Dataflow configuration. * * @param feastProperties feast config properties */ @Bean @Autowired - public JobManager getJobManager( - FeastProperties feastProperties, DirectJobRegistry directJobRegistry) { + public JobManager getJobManager(FeastProperties feastProperties) + throws InvalidProtocolBufferException { JobProperties jobProperties = feastProperties.getJobs(); - Runner runner = Runner.fromString(jobProperties.getRunner()); - if (jobProperties.getOptions() == null) { - jobProperties.setOptions(new HashMap<>()); - } - Map jobOptions = jobProperties.getOptions(); - switch (runner) { - case DATAFLOW: - if (Strings.isNullOrEmpty(jobOptions.getOrDefault("region", null)) - || Strings.isNullOrEmpty(jobOptions.getOrDefault("project", null))) { - log.error("Project and location of the Dataflow runner is not configured"); - throw new IllegalStateException( - "Project and location of Dataflow runner must be specified for jobs to be run on Dataflow runner."); - } - try { - GoogleCredential credential = - GoogleCredential.getApplicationDefault().createScoped(DataflowScopes.all()); - Dataflow dataflow = - new Dataflow( - GoogleNetHttpTransport.newTrustedTransport(), - JacksonFactory.getDefaultInstance(), - credential); + FeastProperties.JobProperties.Runner runner = jobProperties.getActiveRunner(); + Map runnerConfigOptions = runner.getOptions(); + String configJson = gson.toJson(runnerConfigOptions); + + FeastProperties.MetricsProperties metrics = jobProperties.getMetrics(); - return new DataflowJobManager( - dataflow, jobProperties.getOptions(), jobProperties.getMetrics()); - } catch (IOException e) { - throw new IllegalStateException( - "Unable to find credential required for Dataflow monitoring API", e); - } catch (GeneralSecurityException e) { - throw new IllegalStateException("Security exception while connecting to Dataflow API", e); - } catch (Exception e) { - throw new IllegalStateException("Unable to initialize DataflowJobManager", e); - } + switch (runner.getType()) { + case DATAFLOW: + DataflowRunnerConfigOptions.Builder dataflowRunnerConfigOptions = + DataflowRunnerConfigOptions.newBuilder(); + JsonFormat.parser().merge(configJson, dataflowRunnerConfigOptions); + return new DataflowJobManager(dataflowRunnerConfigOptions.build(), metrics); case DIRECT: + DirectRunnerConfigOptions.Builder directRunnerConfigOptions = + DirectRunnerConfigOptions.newBuilder(); + JsonFormat.parser().merge(configJson, directRunnerConfigOptions); return new DirectRunnerJobManager( - jobProperties.getOptions(), directJobRegistry, jobProperties.getMetrics()); + directRunnerConfigOptions.build(), new DirectJobRegistry(), metrics); default: - throw new IllegalArgumentException("Unsupported runner: " + jobProperties.getRunner()); + throw new IllegalArgumentException("Unsupported runner: " + runner); } } - - /** Get a direct job registry */ - @Bean - public DirectJobRegistry directJobRegistry() { - return new DirectJobRegistry(); - } - - /** Extracts job update options from feast core options. */ - @Bean - public JobUpdatesProperties jobUpdatesProperties(FeastProperties feastProperties) { - return feastProperties.getJobs().getUpdates(); - } } diff --git a/core/src/main/java/feast/core/dao/FeatureSetRepository.java b/core/src/main/java/feast/core/dao/FeatureSetRepository.java index 3eba210888..b136650dfd 100644 --- a/core/src/main/java/feast/core/dao/FeatureSetRepository.java +++ b/core/src/main/java/feast/core/dao/FeatureSetRepository.java @@ -25,25 +25,16 @@ public interface FeatureSetRepository extends JpaRepository long count(); - // Find single feature set by project, name, and version - FeatureSet findFeatureSetByNameAndProject_NameAndVersion( - String name, String project, Integer version); + // Find single feature set by project and name + FeatureSet findFeatureSetByNameAndProject_Name(String name, String project); - // Find single latest version of a feature set by project and name (LIKE) - FeatureSet findFirstFeatureSetByNameLikeAndProject_NameOrderByVersionDesc( - String name, String project); + // find all feature sets and order by name + List findAllByOrderByNameAsc(); - // find all feature sets and order by name and version - List findAllByOrderByNameAscVersionAsc(); + // find all feature sets matching the given name pattern with a specific project. + List findAllByNameLikeAndProject_NameOrderByNameAsc(String name, String project_name); - // find all feature sets within a project and order by name and version - List findAllByProject_NameOrderByNameAscVersionAsc(String project_name); - - // find all versions of feature sets matching the given name pattern with a specific project. - List findAllByNameLikeAndProject_NameOrderByNameAscVersionAsc( - String name, String project_name); - - // find all versions of feature sets matching the given name pattern and project pattern - List findAllByNameLikeAndProject_NameLikeOrderByNameAscVersionAsc( + // find all feature sets matching the given name pattern and project pattern + List findAllByNameLikeAndProject_NameLikeOrderByNameAsc( String name, String project_name); } diff --git a/core/src/main/java/feast/core/dao/MetricsRepository.java b/core/src/main/java/feast/core/dao/MetricsRepository.java deleted file mode 100644 index 7146e1e3ec..0000000000 --- a/core/src/main/java/feast/core/dao/MetricsRepository.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * Copyright 2018-2019 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package feast.core.dao; - -import feast.core.model.Metrics; -import java.util.List; -import org.springframework.data.jpa.repository.JpaRepository; -import org.springframework.stereotype.Repository; - -@Repository -public interface MetricsRepository extends JpaRepository { - List findByJob_Id(String id); -} diff --git a/core/src/main/java/feast/core/grpc/CoreServiceImpl.java b/core/src/main/java/feast/core/grpc/CoreServiceImpl.java index 42bc0ba23d..db65328b82 100644 --- a/core/src/main/java/feast/core/grpc/CoreServiceImpl.java +++ b/core/src/main/java/feast/core/grpc/CoreServiceImpl.java @@ -18,37 +18,38 @@ import com.google.api.gax.rpc.InvalidArgumentException; import com.google.protobuf.InvalidProtocolBufferException; -import feast.core.CoreServiceGrpc.CoreServiceImplBase; -import feast.core.CoreServiceProto.ApplyFeatureSetRequest; -import feast.core.CoreServiceProto.ApplyFeatureSetResponse; -import feast.core.CoreServiceProto.ArchiveProjectRequest; -import feast.core.CoreServiceProto.ArchiveProjectResponse; -import feast.core.CoreServiceProto.CreateProjectRequest; -import feast.core.CoreServiceProto.CreateProjectResponse; -import feast.core.CoreServiceProto.GetFeastCoreVersionRequest; -import feast.core.CoreServiceProto.GetFeastCoreVersionResponse; -import feast.core.CoreServiceProto.GetFeatureSetRequest; -import feast.core.CoreServiceProto.GetFeatureSetResponse; -import feast.core.CoreServiceProto.ListFeatureSetsRequest; -import feast.core.CoreServiceProto.ListFeatureSetsResponse; -import feast.core.CoreServiceProto.ListIngestionJobsRequest; -import feast.core.CoreServiceProto.ListIngestionJobsResponse; -import feast.core.CoreServiceProto.ListProjectsRequest; -import feast.core.CoreServiceProto.ListProjectsResponse; -import feast.core.CoreServiceProto.ListStoresRequest; -import feast.core.CoreServiceProto.ListStoresResponse; -import feast.core.CoreServiceProto.RestartIngestionJobRequest; -import feast.core.CoreServiceProto.RestartIngestionJobResponse; -import feast.core.CoreServiceProto.StopIngestionJobRequest; -import feast.core.CoreServiceProto.StopIngestionJobResponse; -import feast.core.CoreServiceProto.UpdateStoreRequest; -import feast.core.CoreServiceProto.UpdateStoreResponse; +import feast.core.config.FeastProperties; import feast.core.exception.RetrievalException; import feast.core.grpc.interceptors.MonitoringInterceptor; import feast.core.model.Project; import feast.core.service.AccessManagementService; import feast.core.service.JobService; import feast.core.service.SpecService; +import feast.proto.core.CoreServiceGrpc.CoreServiceImplBase; +import feast.proto.core.CoreServiceProto.ApplyFeatureSetRequest; +import feast.proto.core.CoreServiceProto.ApplyFeatureSetResponse; +import feast.proto.core.CoreServiceProto.ArchiveProjectRequest; +import feast.proto.core.CoreServiceProto.ArchiveProjectResponse; +import feast.proto.core.CoreServiceProto.CreateProjectRequest; +import feast.proto.core.CoreServiceProto.CreateProjectResponse; +import feast.proto.core.CoreServiceProto.GetFeastCoreVersionRequest; +import feast.proto.core.CoreServiceProto.GetFeastCoreVersionResponse; +import feast.proto.core.CoreServiceProto.GetFeatureSetRequest; +import feast.proto.core.CoreServiceProto.GetFeatureSetResponse; +import feast.proto.core.CoreServiceProto.ListFeatureSetsRequest; +import feast.proto.core.CoreServiceProto.ListFeatureSetsResponse; +import feast.proto.core.CoreServiceProto.ListIngestionJobsRequest; +import feast.proto.core.CoreServiceProto.ListIngestionJobsResponse; +import feast.proto.core.CoreServiceProto.ListProjectsRequest; +import feast.proto.core.CoreServiceProto.ListProjectsResponse; +import feast.proto.core.CoreServiceProto.ListStoresRequest; +import feast.proto.core.CoreServiceProto.ListStoresResponse; +import feast.proto.core.CoreServiceProto.RestartIngestionJobRequest; +import feast.proto.core.CoreServiceProto.RestartIngestionJobResponse; +import feast.proto.core.CoreServiceProto.StopIngestionJobRequest; +import feast.proto.core.CoreServiceProto.StopIngestionJobResponse; +import feast.proto.core.CoreServiceProto.UpdateStoreRequest; +import feast.proto.core.CoreServiceProto.UpdateStoreResponse; import io.grpc.Status; import io.grpc.StatusRuntimeException; import io.grpc.stub.StreamObserver; @@ -64,6 +65,7 @@ @GRpcService(interceptors = {MonitoringInterceptor.class}) public class CoreServiceImpl extends CoreServiceImplBase { + private final FeastProperties feastProperties; private SpecService specService; private AccessManagementService accessManagementService; private JobService jobService; @@ -72,17 +74,28 @@ public class CoreServiceImpl extends CoreServiceImplBase { public CoreServiceImpl( SpecService specService, AccessManagementService accessManagementService, - JobService jobService) { + JobService jobService, + FeastProperties feastProperties) { this.specService = specService; this.accessManagementService = accessManagementService; this.jobService = jobService; + this.feastProperties = feastProperties; } @Override public void getFeastCoreVersion( GetFeastCoreVersionRequest request, StreamObserver responseObserver) { - super.getFeastCoreVersion(request, responseObserver); + try { + GetFeastCoreVersionResponse response = + GetFeastCoreVersionResponse.newBuilder().setVersion(feastProperties.getVersion()).build(); + responseObserver.onNext(response); + responseObserver.onCompleted(); + } catch (RetrievalException | StatusRuntimeException e) { + log.error("Could not determine Feast Core version: ", e); + responseObserver.onError( + Status.INTERNAL.withDescription(e.getMessage()).withCause(e).asRuntimeException()); + } } @Override @@ -183,6 +196,17 @@ public void archiveProject( accessManagementService.archiveProject(request.getName()); responseObserver.onNext(ArchiveProjectResponse.getDefaultInstance()); responseObserver.onCompleted(); + } catch (IllegalArgumentException e) { + log.error("Recieved an invalid request on calling archiveProject method:", e); + responseObserver.onError( + Status.INVALID_ARGUMENT + .withDescription(e.getMessage()) + .withCause(e) + .asRuntimeException()); + } catch (UnsupportedOperationException e) { + log.error("Attempted to archive an unsupported project:", e); + responseObserver.onError( + Status.UNIMPLEMENTED.withDescription(e.getMessage()).withCause(e).asRuntimeException()); } catch (Exception e) { log.error("Exception has occurred in the createProject method: ", e); responseObserver.onError( diff --git a/core/src/main/java/feast/core/grpc/HealthServiceImpl.java b/core/src/main/java/feast/core/grpc/HealthServiceImpl.java new file mode 100644 index 0000000000..3bd2f8748f --- /dev/null +++ b/core/src/main/java/feast/core/grpc/HealthServiceImpl.java @@ -0,0 +1,54 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2020 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.core.grpc; + +import feast.core.service.AccessManagementService; +import io.grpc.Status; +import io.grpc.health.v1.HealthGrpc.HealthImplBase; +import io.grpc.health.v1.HealthProto.HealthCheckRequest; +import io.grpc.health.v1.HealthProto.HealthCheckResponse; +import io.grpc.health.v1.HealthProto.HealthCheckResponse.ServingStatus; +import io.grpc.stub.StreamObserver; +import lombok.extern.slf4j.Slf4j; +import org.lognet.springboot.grpc.GRpcService; +import org.springframework.beans.factory.annotation.Autowired; + +@Slf4j +@GRpcService +public class HealthServiceImpl extends HealthImplBase { + private final AccessManagementService accessManagementService; + + @Autowired + public HealthServiceImpl(AccessManagementService accessManagementService) { + this.accessManagementService = accessManagementService; + } + + @Override + public void check( + HealthCheckRequest request, StreamObserver responseObserver) { + try { + accessManagementService.listProjects(); + responseObserver.onNext( + HealthCheckResponse.newBuilder().setStatus(ServingStatus.SERVING).build()); + responseObserver.onCompleted(); + } catch (Exception e) { + log.error("Health Check: unable to retrieve projects.\nError: %s", e); + responseObserver.onError( + Status.INTERNAL.withDescription(e.getMessage()).withCause(e).asRuntimeException()); + } + } +} diff --git a/core/src/main/java/feast/core/http/HealthController.java b/core/src/main/java/feast/core/http/HealthController.java deleted file mode 100644 index 2451ed793e..0000000000 --- a/core/src/main/java/feast/core/http/HealthController.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * Copyright 2018-2019 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package feast.core.http; - -import static org.springframework.http.HttpStatus.INTERNAL_SERVER_ERROR; - -import java.sql.Connection; -import java.sql.SQLException; -import javax.sql.DataSource; -import lombok.extern.slf4j.Slf4j; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.http.HttpStatus; -import org.springframework.http.ResponseEntity; -import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestMethod; -import org.springframework.web.bind.annotation.RestController; - -/** Web http for pod health-check endpoints. */ -@Slf4j -@RestController -public class HealthController { - - private final DataSource db; - - @Autowired - public HealthController(DataSource datasource) { - this.db = datasource; - } - - /** - * /ping endpoint checks if the application is ready to serve traffic by checking if it is able to - * access the metadata db. - */ - @RequestMapping(value = "/ping", method = RequestMethod.GET) - public ResponseEntity ping() { - return ResponseEntity.ok("pong"); - } - - /** - * /healthz endpoint checks if the application is healthy by checking if the application still has - * access to the metadata db. - */ - @RequestMapping(value = "/healthz", method = RequestMethod.GET) - public ResponseEntity healthz() { - try (Connection conn = db.getConnection()) { - if (conn.isValid(10)) { - return ResponseEntity.ok("healthy"); - } - log.error("Unable to reach DB"); - return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR) - .body("Unable to establish connection with DB"); - } catch (SQLException e) { - log.error("Unable to reach DB: {}", e); - return ResponseEntity.status(INTERNAL_SERVER_ERROR).body(e.getMessage()); - } - } -} diff --git a/core/src/main/java/feast/core/job/JobUpdateTask.java b/core/src/main/java/feast/core/job/JobUpdateTask.java index f3afe84df7..056da34b3a 100644 --- a/core/src/main/java/feast/core/job/JobUpdateTask.java +++ b/core/src/main/java/feast/core/job/JobUpdateTask.java @@ -16,9 +16,7 @@ */ package feast.core.job; -import feast.core.FeatureSetProto; -import feast.core.SourceProto; -import feast.core.StoreProto; +import com.google.common.collect.Sets; import feast.core.log.Action; import feast.core.log.AuditLogger; import feast.core.log.Resource; @@ -27,10 +25,10 @@ import feast.core.model.JobStatus; import feast.core.model.Source; import feast.core.model.Store; +import feast.proto.core.FeatureSetProto.FeatureSetStatus; import java.time.Instant; import java.util.List; import java.util.Optional; -import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; @@ -38,7 +36,6 @@ import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; -import java.util.stream.Collectors; import lombok.Getter; import lombok.extern.slf4j.Slf4j; @@ -52,134 +49,101 @@ @Getter public class JobUpdateTask implements Callable { - private final List featureSets; - private final SourceProto.Source sourceSpec; - private final StoreProto.Store store; + private final List featureSets; + private final Source source; + private final Store store; private final Optional currentJob; - private JobManager jobManager; - private long jobUpdateTimeoutSeconds; + private final JobManager jobManager; + private final long jobUpdateTimeoutSeconds; + private final String runnerName; public JobUpdateTask( - List featureSets, - SourceProto.Source sourceSpec, - StoreProto.Store store, + List featureSets, + Source source, + Store store, Optional currentJob, JobManager jobManager, long jobUpdateTimeoutSeconds) { this.featureSets = featureSets; - this.sourceSpec = sourceSpec; + this.source = source; this.store = store; this.currentJob = currentJob; this.jobManager = jobManager; this.jobUpdateTimeoutSeconds = jobUpdateTimeoutSeconds; + this.runnerName = jobManager.getRunnerType().toString(); } @Override public Job call() { ExecutorService executorService = Executors.newSingleThreadExecutor(); - Source source = Source.fromProto(sourceSpec); Future submittedJob; - if (currentJob.isPresent()) { - Set existingFeatureSetsPopulatedByJob = - currentJob.get().getFeatureSets().stream() - .map(FeatureSet::getId) - .collect(Collectors.toSet()); - Set newFeatureSetsPopulatedByJob = - featureSets.stream() - .map(fs -> FeatureSet.fromProto(fs).getId()) - .collect(Collectors.toSet()); - if (existingFeatureSetsPopulatedByJob.size() == newFeatureSetsPopulatedByJob.size() - && existingFeatureSetsPopulatedByJob.containsAll(newFeatureSetsPopulatedByJob)) { - Job job = currentJob.get(); - JobStatus newJobStatus = jobManager.getJobStatus(job); - if (newJobStatus != job.getStatus()) { - AuditLogger.log( - Resource.JOB, - job.getId(), - Action.STATUS_CHANGE, - "Job status updated: changed from %s to %s", - job.getStatus(), - newJobStatus); - } - job.setStatus(newJobStatus); - return job; + + if (currentJob.isEmpty()) { + submittedJob = executorService.submit(this::createJob); + } else { + Job job = currentJob.get(); + + if (requiresUpdate(job)) { + submittedJob = executorService.submit(() -> updateJob(job)); } else { - submittedJob = - executorService.submit(() -> updateJob(currentJob.get(), featureSets, store)); + return updateStatus(job); } - } else { - String jobId = createJobId(source.getId(), store.getName()); - submittedJob = executorService.submit(() -> startJob(jobId, featureSets, sourceSpec, store)); } - Job job = null; try { - job = submittedJob.get(getJobUpdateTimeoutSeconds(), TimeUnit.SECONDS); + return submittedJob.get(getJobUpdateTimeoutSeconds(), TimeUnit.SECONDS); } catch (InterruptedException | ExecutionException | TimeoutException e) { log.warn("Unable to start job for source {} and sink {}: {}", source, store, e.getMessage()); + return null; + } finally { executorService.shutdownNow(); } - return job; + } + + boolean requiresUpdate(Job job) { + // If set of feature sets has changed + if (!Sets.newHashSet(featureSets).equals(Sets.newHashSet(job.getFeatureSets()))) { + return true; + } + // If any of the incoming feature sets were updated + for (FeatureSet featureSet : featureSets) { + if (featureSet.getStatus() == FeatureSetStatus.STATUS_PENDING) { + return true; + } + } + return false; + } + + private Job createJob() { + String jobId = createJobId(source.getId(), store.getName()); + return startJob(jobId); } /** Start or update the job to ingest data to the sink. */ - private Job startJob( - String jobId, - List featureSetProtos, - SourceProto.Source source, - StoreProto.Store sinkSpec) { - - List featureSets = - featureSetProtos.stream() - .map( - fsp -> - FeatureSet.fromProto( - FeatureSetProto.FeatureSet.newBuilder() - .setSpec(fsp.getSpec()) - .setMeta(fsp.getMeta()) - .build())) - .collect(Collectors.toList()); + private Job startJob(String jobId) { + Job job = new Job( - jobId, - "", - jobManager.getRunnerType().name(), - Source.fromProto(source), - Store.fromProto(sinkSpec), - featureSets, - JobStatus.PENDING); + jobId, "", jobManager.getRunnerType(), source, store, featureSets, JobStatus.PENDING); try { - AuditLogger.log( - Resource.JOB, - jobId, - Action.SUBMIT, - "Building graph and submitting to %s", - jobManager.getRunnerType().toString()); + logAudit(Action.SUBMIT, job, "Building graph and submitting to %s", runnerName); job = jobManager.startJob(job); - if (job.getExtId().isEmpty()) { + var extId = job.getExtId(); + if (extId.isEmpty()) { throw new RuntimeException( String.format("Could not submit job: \n%s", "unable to retrieve job external id")); } - AuditLogger.log( - Resource.JOB, - jobId, - Action.STATUS_CHANGE, - "Job submitted to runner %s with ext id %s.", - jobManager.getRunnerType().toString(), - job.getExtId()); + var auditMessage = "Job submitted to runner %s with ext id %s."; + logAudit(Action.STATUS_CHANGE, job, auditMessage, runnerName, extId); return job; } catch (Exception e) { log.error(e.getMessage()); - AuditLogger.log( - Resource.JOB, - jobId, - Action.STATUS_CHANGE, - "Job failed to be submitted to runner %s. Job status changed to ERROR.", - jobManager.getRunnerType().toString()); + var auditMessage = "Job failed to be submitted to runner %s. Job status changed to ERROR."; + logAudit(Action.STATUS_CHANGE, job, auditMessage, runnerName); job.setStatus(JobStatus.ERROR); return job; @@ -187,33 +151,33 @@ private Job startJob( } /** Update the given job */ - private Job updateJob( - Job job, List featureSets, StoreProto.Store store) { - job.setFeatureSets( - featureSets.stream() - .map( - fs -> - FeatureSet.fromProto( - FeatureSetProto.FeatureSet.newBuilder() - .setSpec(fs.getSpec()) - .setMeta(fs.getMeta()) - .build())) - .collect(Collectors.toList())); - job.setStore(feast.core.model.Store.fromProto(store)); - AuditLogger.log( - Resource.JOB, - job.getId(), - Action.UPDATE, - "Updating job %s for runner %s", - job.getId(), - jobManager.getRunnerType().toString()); + private Job updateJob(Job job) { + job.setFeatureSets(featureSets); + job.setStore(store); + logAudit(Action.UPDATE, job, "Updating job %s for runner %s", job.getId(), runnerName); return jobManager.updateJob(job); } + private Job updateStatus(Job job) { + JobStatus currentStatus = job.getStatus(); + JobStatus newStatus = jobManager.getJobStatus(job); + if (newStatus != currentStatus) { + var auditMessage = "Job status updated: changed from %s to %s"; + logAudit(Action.STATUS_CHANGE, job, auditMessage, currentStatus, newStatus); + } + + job.setStatus(newStatus); + return job; + } + String createJobId(String sourceId, String storeName) { String dateSuffix = String.valueOf(Instant.now().toEpochMilli()); String sourceIdTrunc = sourceId.split("/")[0].toLowerCase(); String jobId = String.format("%s-to-%s", sourceIdTrunc, storeName) + dateSuffix; return jobId.replaceAll("_", "-"); } + + private void logAudit(Action action, Job job, String detail, Object... args) { + AuditLogger.log(Resource.JOB, job.getId(), action, detail, args); + } } diff --git a/core/src/main/java/feast/core/job/Runner.java b/core/src/main/java/feast/core/job/Runner.java index 4e2033fed6..acccb70c8b 100644 --- a/core/src/main/java/feast/core/job/Runner.java +++ b/core/src/main/java/feast/core/job/Runner.java @@ -16,33 +16,37 @@ */ package feast.core.job; +import java.util.NoSuchElementException; + +/** + * An Apache Beam Runner, for which Feast Core supports managing ingestion jobs. + * + * @see Beam Runners + */ public enum Runner { DATAFLOW("DataflowRunner"), FLINK("FlinkRunner"), DIRECT("DirectRunner"); - private final String name; + private final String humanName; - Runner(String name) { - this.name = name; + Runner(String humanName) { + this.humanName = humanName; } - /** - * Get the human readable name of this runner. Returns a human readable name of the runner that - * can be used for logging/config files/etc. - */ + /** Returns the human readable name of this runner, usable in logging, config files, etc. */ @Override public String toString() { - return name; + return humanName; } /** Parses a runner from its human readable name. */ - public static Runner fromString(String runner) { + public static Runner fromString(String humanName) { for (Runner r : Runner.values()) { - if (r.toString().equals(runner)) { + if (r.toString().equals(humanName)) { return r; } } - throw new IllegalArgumentException("Unknown value: " + runner); + throw new NoSuchElementException("Unknown Runner value: " + humanName); } } diff --git a/core/src/main/java/feast/core/job/dataflow/DataflowJobConfig.java b/core/src/main/java/feast/core/job/dataflow/DataflowJobConfig.java deleted file mode 100644 index a9bbf345d1..0000000000 --- a/core/src/main/java/feast/core/job/dataflow/DataflowJobConfig.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * Copyright 2018-2019 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package feast.core.job.dataflow; - -import lombok.Value; - -@Value -public class DataflowJobConfig { - private String projectId; - private String location; -} diff --git a/core/src/main/java/feast/core/job/dataflow/DataflowJobManager.java b/core/src/main/java/feast/core/job/dataflow/DataflowJobManager.java index c2313d75ec..2c3da255f5 100644 --- a/core/src/main/java/feast/core/job/dataflow/DataflowJobManager.java +++ b/core/src/main/java/feast/core/job/dataflow/DataflowJobManager.java @@ -18,30 +18,34 @@ import static feast.core.util.PipelineUtil.detectClassPathResourcesToStage; +import com.google.api.client.auth.oauth2.Credential; +import com.google.api.client.googleapis.auth.oauth2.GoogleCredential; +import com.google.api.client.googleapis.javanet.GoogleNetHttpTransport; +import com.google.api.client.json.jackson2.JacksonFactory; import com.google.api.services.dataflow.Dataflow; +import com.google.api.services.dataflow.DataflowScopes; import com.google.common.base.Strings; import com.google.protobuf.InvalidProtocolBufferException; import com.google.protobuf.util.JsonFormat; -import feast.core.FeatureSetProto; -import feast.core.SourceProto; -import feast.core.StoreProto; import feast.core.config.FeastProperties.MetricsProperties; import feast.core.exception.JobExecutionException; import feast.core.job.JobManager; import feast.core.job.Runner; import feast.core.job.option.FeatureSetJsonByteConverter; import feast.core.model.*; -import feast.core.util.TypeConversion; import feast.ingestion.ImportJob; import feast.ingestion.options.BZip2Compressor; import feast.ingestion.options.ImportOptions; import feast.ingestion.options.OptionCompressor; +import feast.proto.core.FeatureSetProto; +import feast.proto.core.RunnerProto.DataflowRunnerConfigOptions; +import feast.proto.core.SourceProto; +import feast.proto.core.StoreProto; import java.io.IOException; +import java.security.GeneralSecurityException; import java.util.ArrayList; import java.util.Collections; import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; import org.apache.beam.runners.dataflow.DataflowPipelineJob; import org.apache.beam.runners.dataflow.DataflowRunner; @@ -56,16 +60,48 @@ public class DataflowJobManager implements JobManager { private final String projectId; private final String location; private final Dataflow dataflow; - private final Map defaultOptions; + private final DataflowRunnerConfig defaultOptions; private final MetricsProperties metrics; public DataflowJobManager( - Dataflow dataflow, Map defaultOptions, MetricsProperties metricsProperties) { - this.defaultOptions = defaultOptions; + DataflowRunnerConfigOptions runnerConfigOptions, MetricsProperties metricsProperties) { + this(runnerConfigOptions, metricsProperties, getGoogleCredential()); + } + + public DataflowJobManager( + DataflowRunnerConfigOptions runnerConfigOptions, + MetricsProperties metricsProperties, + Credential credential) { + + defaultOptions = new DataflowRunnerConfig(runnerConfigOptions); + Dataflow dataflow = null; + try { + dataflow = + new Dataflow( + GoogleNetHttpTransport.newTrustedTransport(), + JacksonFactory.getDefaultInstance(), + credential); + } catch (GeneralSecurityException e) { + throw new IllegalStateException("Security exception while connecting to Dataflow API", e); + } catch (IOException e) { + throw new IllegalStateException("Unable to initialize DataflowJobManager", e); + } + this.dataflow = dataflow; this.metrics = metricsProperties; - this.projectId = defaultOptions.get("project"); - this.location = defaultOptions.get("region"); + this.projectId = defaultOptions.getProject(); + this.location = defaultOptions.getRegion(); + } + + private static Credential getGoogleCredential() { + GoogleCredential credential = null; + try { + credential = GoogleCredential.getApplicationDefault().createScoped(DataflowScopes.all()); + } catch (IOException e) { + throw new IllegalStateException( + "Unable to find credential required for Dataflow monitoring API", e); + } + return credential; } @Override @@ -80,12 +116,15 @@ public Job startJob(Job job) { for (FeatureSet featureSet : job.getFeatureSets()) { featureSetProtos.add(featureSet.toProto()); } - return submitDataflowJob( - job.getId(), - featureSetProtos, - job.getSource().toProto(), - job.getStore().toProto(), - false); + String extId = + submitDataflowJob( + job.getId(), + featureSetProtos, + job.getSource().toProto(), + job.getStore().toProto(), + false); + job.setExtId(extId); + return job; } catch (InvalidProtocolBufferException e) { log.error(e.getMessage()); @@ -110,8 +149,18 @@ public Job updateJob(Job job) { for (FeatureSet featureSet : job.getFeatureSets()) { featureSetProtos.add(featureSet.toProto()); } - return submitDataflowJob( - job.getId(), featureSetProtos, job.getSource().toProto(), job.getStore().toProto(), true); + + String extId = + submitDataflowJob( + job.getId(), + featureSetProtos, + job.getSource().toProto(), + job.getStore().toProto(), + true); + + job.setExtId(extId); + job.setStatus(JobStatus.PENDING); + return job; } catch (InvalidProtocolBufferException e) { log.error(e.getMessage()); throw new IllegalArgumentException( @@ -153,16 +202,15 @@ public void abortJob(String dataflowJobId) { } /** - * Restart a restart dataflow job. Dataflow should ensure continuity between during the restart, - * so no data should be lost during the restart operation. + * Restart a Dataflow job. Dataflow should ensure continuity such that no data should be lost + * during the restart operation. * * @param job job to restart * @return the restarted job */ @Override public Job restartJob(Job job) { - JobStatus status = job.getStatus(); - if (JobStatus.getTerminalState().contains(status)) { + if (job.getStatus().isTerminal()) { // job yet not running: just start job return this.startJob(job); } else { @@ -180,7 +228,7 @@ public Job restartJob(Job job) { */ @Override public JobStatus getJobStatus(Job job) { - if (!Runner.DATAFLOW.name().equals(job.getRunner())) { + if (job.getRunner() != RUNNER_TYPE) { return job.getStatus(); } @@ -197,7 +245,7 @@ public JobStatus getJobStatus(Job job) { return JobStatus.UNKNOWN; } - private Job submitDataflowJob( + private String submitDataflowJob( String jobName, List featureSetProtos, SourceProto.Source source, @@ -206,17 +254,8 @@ private Job submitDataflowJob( try { ImportOptions pipelineOptions = getPipelineOptions(jobName, featureSetProtos, sink, update); DataflowPipelineJob pipelineResult = runPipeline(pipelineOptions); - List featureSets = - featureSetProtos.stream().map(FeatureSet::fromProto).collect(Collectors.toList()); String jobId = waitForJobToRun(pipelineResult); - return new Job( - jobName, - jobId, - getRunnerType().name(), - Source.fromProto(source), - Store.fromProto(sink), - featureSets, - JobStatus.PENDING); + return jobId; } catch (Exception e) { log.error("Error submitting job", e); throw new JobExecutionException(String.format("Error running ingestion job: %s", e), e); @@ -228,9 +267,9 @@ private ImportOptions getPipelineOptions( List featureSets, StoreProto.Store sink, boolean update) - throws IOException { - String[] args = TypeConversion.convertMapToArgs(defaultOptions); - ImportOptions pipelineOptions = PipelineOptionsFactory.fromArgs(args).as(ImportOptions.class); + throws IOException, IllegalAccessException { + ImportOptions pipelineOptions = + PipelineOptionsFactory.fromArgs(defaultOptions.toArgs()).as(ImportOptions.class); OptionCompressor> featureSetJsonCompressor = new BZip2Compressor<>(new FeatureSetJsonByteConverter()); @@ -238,6 +277,7 @@ private ImportOptions getPipelineOptions( pipelineOptions.setFeatureSetJson(featureSetJsonCompressor.compress(featureSets)); pipelineOptions.setStoreJson(Collections.singletonList(JsonFormat.printer().print(sink))); pipelineOptions.setProject(projectId); + pipelineOptions.setDefaultFeastProject(Project.DEFAULT_NAME); pipelineOptions.setUpdate(update); pipelineOptions.setRunner(DataflowRunner.class); pipelineOptions.setJobName(jobName); diff --git a/core/src/main/java/feast/core/job/dataflow/DataflowRunnerConfig.java b/core/src/main/java/feast/core/job/dataflow/DataflowRunnerConfig.java new file mode 100644 index 0000000000..85628d2cd0 --- /dev/null +++ b/core/src/main/java/feast/core/job/dataflow/DataflowRunnerConfig.java @@ -0,0 +1,101 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.core.job.dataflow; + +import feast.core.job.option.RunnerConfig; +import feast.proto.core.RunnerProto.DataflowRunnerConfigOptions; +import java.util.*; +import javax.validation.*; +import javax.validation.constraints.NotBlank; +import lombok.Getter; +import lombok.Setter; + +/** DataflowRunnerConfig contains configuration fields for the Dataflow job runner. */ +@Getter +@Setter +public class DataflowRunnerConfig extends RunnerConfig { + + public DataflowRunnerConfig(DataflowRunnerConfigOptions runnerConfigOptions) { + this.project = runnerConfigOptions.getProject(); + this.region = runnerConfigOptions.getRegion(); + this.zone = runnerConfigOptions.getZone(); + this.serviceAccount = runnerConfigOptions.getServiceAccount(); + this.network = runnerConfigOptions.getNetwork(); + this.subnetwork = runnerConfigOptions.getSubnetwork(); + this.workerMachineType = runnerConfigOptions.getWorkerMachineType(); + this.autoscalingAlgorithm = runnerConfigOptions.getAutoscalingAlgorithm(); + this.usePublicIps = runnerConfigOptions.getUsePublicIps(); + this.tempLocation = runnerConfigOptions.getTempLocation(); + this.maxNumWorkers = runnerConfigOptions.getMaxNumWorkers(); + this.deadLetterTableSpec = runnerConfigOptions.getDeadLetterTableSpec(); + this.labels = runnerConfigOptions.getLabelsMap(); + validate(); + } + + /* Project id to use when launching jobs. */ + @NotBlank String project; + + /* The Google Compute Engine region for creating Dataflow jobs. */ + @NotBlank String region; + + /* GCP availability zone for operations. */ + @NotBlank String zone; + + /* Run the job as a specific service account, instead of the default GCE robot. */ + String serviceAccount; + + /* GCE network for launching workers. */ + @NotBlank String network; + + /* GCE subnetwork for launching workers. */ + @NotBlank String subnetwork; + + /* Machine type to create Dataflow worker VMs as. */ + String workerMachineType; + + /* The autoscaling algorithm to use for the workerpool. */ + String autoscalingAlgorithm; + + /* Specifies whether worker pools should be started with public IP addresses. */ + Boolean usePublicIps; + + /** + * A pipeline level default location for storing temporary files. Support Google Cloud Storage + * locations, e.g. gs://bucket/object + */ + @NotBlank String tempLocation; + + /* The maximum number of workers to use for the workerpool. */ + Integer maxNumWorkers; + + /* BigQuery table specification, e.g. PROJECT_ID:DATASET_ID.PROJECT_ID */ + String deadLetterTableSpec; + + Map labels; + + /** Validates Dataflow runner configuration options */ + public void validate() { + ValidatorFactory factory = Validation.buildDefaultValidatorFactory(); + Validator validator = factory.getValidator(); + + Set> dataflowRunnerConfigViolation = + validator.validate(this); + if (!dataflowRunnerConfigViolation.isEmpty()) { + throw new ConstraintViolationException(dataflowRunnerConfigViolation); + } + } +} diff --git a/core/src/main/java/feast/core/job/direct/DirectRunnerConfig.java b/core/src/main/java/feast/core/job/direct/DirectRunnerConfig.java new file mode 100644 index 0000000000..ebd327f2f7 --- /dev/null +++ b/core/src/main/java/feast/core/job/direct/DirectRunnerConfig.java @@ -0,0 +1,36 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2020 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.core.job.direct; + +import feast.core.job.option.RunnerConfig; +import feast.proto.core.RunnerProto.DirectRunnerConfigOptions; + +public class DirectRunnerConfig extends RunnerConfig { + /** + * Controls the amount of target parallelism the DirectRunner will use. Defaults to the greater of + * the number of available processors and 3. Must be a value greater than zero. + */ + Integer targetParallelism; + + /* BigQuery table specification, e.g. PROJECT_ID:DATASET_ID.PROJECT_ID */ + String deadletterTableSpec; + + public DirectRunnerConfig(DirectRunnerConfigOptions runnerConfigOptions) { + this.deadletterTableSpec = runnerConfigOptions.getDeadLetterTableSpec(); + this.targetParallelism = runnerConfigOptions.getTargetParallelism(); + } +} diff --git a/core/src/main/java/feast/core/job/direct/DirectRunnerJobManager.java b/core/src/main/java/feast/core/job/direct/DirectRunnerJobManager.java index 9b3a8473e4..715adbdd43 100644 --- a/core/src/main/java/feast/core/job/direct/DirectRunnerJobManager.java +++ b/core/src/main/java/feast/core/job/direct/DirectRunnerJobManager.java @@ -18,8 +18,6 @@ import com.google.common.base.Strings; import com.google.protobuf.util.JsonFormat; -import feast.core.FeatureSetProto; -import feast.core.StoreProto; import feast.core.config.FeastProperties.MetricsProperties; import feast.core.exception.JobExecutionException; import feast.core.job.JobManager; @@ -28,16 +26,18 @@ import feast.core.model.FeatureSet; import feast.core.model.Job; import feast.core.model.JobStatus; -import feast.core.util.TypeConversion; +import feast.core.model.Project; import feast.ingestion.ImportJob; import feast.ingestion.options.BZip2Compressor; import feast.ingestion.options.ImportOptions; import feast.ingestion.options.OptionCompressor; +import feast.proto.core.FeatureSetProto; +import feast.proto.core.RunnerProto.DirectRunnerConfigOptions; +import feast.proto.core.StoreProto; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; -import java.util.Map; import lombok.extern.slf4j.Slf4j; import org.apache.beam.runners.direct.DirectRunner; import org.apache.beam.sdk.PipelineResult; @@ -48,15 +48,15 @@ public class DirectRunnerJobManager implements JobManager { private final Runner RUNNER_TYPE = Runner.DIRECT; - protected Map defaultOptions; + private DirectRunnerConfig defaultOptions; private final DirectJobRegistry jobs; private MetricsProperties metrics; public DirectRunnerJobManager( - Map defaultOptions, + DirectRunnerConfigOptions directRunnerConfigOptions, DirectJobRegistry jobs, MetricsProperties metricsProperties) { - this.defaultOptions = defaultOptions; + this.defaultOptions = new DirectRunnerConfig(directRunnerConfigOptions); this.jobs = jobs; this.metrics = metricsProperties; } @@ -79,7 +79,7 @@ public Job startJob(Job job) { featureSetProtos.add(featureSet.toProto()); } ImportOptions pipelineOptions = - getPipelineOptions(featureSetProtos, job.getStore().toProto()); + getPipelineOptions(job.getId(), featureSetProtos, job.getStore().toProto()); PipelineResult pipelineResult = runPipeline(pipelineOptions); DirectJob directJob = new DirectJob(job.getId(), pipelineResult); jobs.add(directJob); @@ -93,16 +93,19 @@ public Job startJob(Job job) { } private ImportOptions getPipelineOptions( - List featureSets, StoreProto.Store sink) throws IOException { - String[] args = TypeConversion.convertMapToArgs(defaultOptions); - ImportOptions pipelineOptions = PipelineOptionsFactory.fromArgs(args).as(ImportOptions.class); + String jobName, List featureSets, StoreProto.Store sink) + throws IOException, IllegalAccessException { + ImportOptions pipelineOptions = + PipelineOptionsFactory.fromArgs(defaultOptions.toArgs()).as(ImportOptions.class); OptionCompressor> featureSetJsonCompressor = new BZip2Compressor<>(new FeatureSetJsonByteConverter()); pipelineOptions.setFeatureSetJson(featureSetJsonCompressor.compress(featureSets)); + pipelineOptions.setJobName(jobName); pipelineOptions.setStoreJson(Collections.singletonList(JsonFormat.printer().print(sink))); pipelineOptions.setRunner(DirectRunner.class); + pipelineOptions.setDefaultFeastProject(Project.DEFAULT_NAME); pipelineOptions.setProject(""); // set to default value to satisfy validation if (metrics.isEnabled()) { pipelineOptions.setMetricsExporterType(metrics.getType()); @@ -166,8 +169,7 @@ public PipelineResult runPipeline(ImportOptions pipelineOptions) throws IOExcept */ @Override public Job restartJob(Job job) { - JobStatus status = job.getStatus(); - if (JobStatus.getTerminalState().contains(status)) { + if (job.getStatus().isTerminal()) { // job yet not running: just start job return this.startJob(job); } else { diff --git a/core/src/main/java/feast/core/job/option/FeatureSetJsonByteConverter.java b/core/src/main/java/feast/core/job/option/FeatureSetJsonByteConverter.java index dbd04d668f..2f6b37df1b 100644 --- a/core/src/main/java/feast/core/job/option/FeatureSetJsonByteConverter.java +++ b/core/src/main/java/feast/core/job/option/FeatureSetJsonByteConverter.java @@ -18,8 +18,8 @@ import com.google.protobuf.InvalidProtocolBufferException; import com.google.protobuf.util.JsonFormat; -import feast.core.FeatureSetProto; import feast.ingestion.options.OptionByteConverter; +import feast.proto.core.FeatureSetProto; import java.util.ArrayList; import java.util.List; diff --git a/core/src/main/java/feast/core/job/option/RunnerConfig.java b/core/src/main/java/feast/core/job/option/RunnerConfig.java new file mode 100644 index 0000000000..4b937074a3 --- /dev/null +++ b/core/src/main/java/feast/core/job/option/RunnerConfig.java @@ -0,0 +1,75 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2020 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.core.job.option; + +import feast.core.util.TypeConversion; +import java.lang.reflect.Field; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +/** + * Value class containing the application-default configuration for a runner. When a job is started + * by core, all fields in the object will be converted into --key=value args to seed the beam + * pipeline options. + */ +public abstract class RunnerConfig { + + /** + * Converts the fields in this class to a list of --key=value args to be passed to a {@link + * org.apache.beam.sdk.options.PipelineOptionsFactory}. + * + *

Ignores values that are proto-default (e.g. empty string, 0). + * + * @return Array of string args in the format --key=value. + * @throws IllegalAccessException + */ + public String[] toArgs() throws IllegalAccessException { + List args = new ArrayList<>(); + for (Field field : this.getClass().getFields()) { + if (field.get(this) == null) { + continue; + } + Class type = field.getType(); + if (Map.class.equals(type)) { + String jsonString = + TypeConversion.convertMapToJsonString((Map) field.get(this)); + args.add(String.format("--%s=%s", field.getName(), jsonString)); + continue; + } + + if (String.class.equals(type)) { + String val = (String) field.get(this); + if (!val.equals("")) { + args.add(String.format("--%s=%s", field.getName(), val)); + } + continue; + } + + if (Integer.class.equals(type)) { + Integer val = (Integer) field.get(this); + if (val != 0) { + args.add(String.format("--%s=%d", field.getName(), val)); + } + continue; + } + + args.add(String.format("--%s=%s", field.getName(), field.get(this))); + } + return args.toArray(String[]::new); + } +} diff --git a/core/src/main/java/feast/core/model/Entity.java b/core/src/main/java/feast/core/model/Entity.java new file mode 100644 index 0000000000..6133d492fc --- /dev/null +++ b/core/src/main/java/feast/core/model/Entity.java @@ -0,0 +1,77 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.core.model; + +import feast.proto.core.FeatureSetProto.EntitySpec; +import feast.proto.types.ValueProto.ValueType; +import java.util.Objects; +import javax.persistence.*; +import lombok.Getter; +import lombok.Setter; + +/** Feast entity object. Contains name and type of the entity. */ +@Getter +@Setter +@javax.persistence.Entity +@Table( + name = "entities", + uniqueConstraints = @UniqueConstraint(columnNames = {"name", "feature_set_id"})) +public class Entity { + + @Id @GeneratedValue private Long id; + + private String name; + + @ManyToOne(fetch = FetchType.LAZY) + private FeatureSet featureSet; + + /** Data type of the entity. String representation of {@link ValueType} * */ + private String type; + + public Entity() {} + + private Entity(String name, ValueType.Enum type) { + this.setName(name); + this.setType(type.toString()); + } + + public static Entity fromProto(EntitySpec entitySpec) { + Entity entity = new Entity(entitySpec.getName(), entitySpec.getValueType()); + return entity; + } + + public EntitySpec toProto() { + return EntitySpec.newBuilder().setName(name).setValueType(ValueType.Enum.valueOf(type)).build(); + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Entity entity = (Entity) o; + return getName().equals(entity.getName()) && getType().equals(entity.getType()); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), getName(), getType()); + } +} diff --git a/core/src/main/java/feast/core/model/Feature.java b/core/src/main/java/feast/core/model/Feature.java new file mode 100644 index 0000000000..0b45749379 --- /dev/null +++ b/core/src/main/java/feast/core/model/Feature.java @@ -0,0 +1,280 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.core.model; + +import com.google.protobuf.InvalidProtocolBufferException; +import feast.core.util.TypeConversion; +import feast.proto.core.FeatureSetProto.FeatureSpec; +import feast.proto.core.FeatureSetProto.FeatureSpec.Builder; +import feast.proto.types.ValueProto.ValueType; +import java.util.Arrays; +import java.util.Map; +import java.util.Objects; +import javax.persistence.*; +import javax.persistence.Entity; +import lombok.Getter; +import lombok.Setter; +import org.tensorflow.metadata.v0.*; + +/** + * Feature belonging to a featureset. Contains name, type as well as domain metadata about the + * feature. + */ +@Getter +@Setter +@Entity +@Table( + name = "features", + uniqueConstraints = @UniqueConstraint(columnNames = {"name", "feature_set_id"})) +public class Feature { + + @Id @GeneratedValue private Long id; + + private String name; + + @ManyToOne(fetch = FetchType.LAZY) + private FeatureSet featureSet; + + /** Data type of the feature. String representation of {@link ValueType} * */ + private String type; + + // Labels for this feature + @Column(name = "labels", columnDefinition = "text") + private String labels; + + // Presence constraints (refer to proto feast.core.FeatureSet.FeatureSpec) + // Only one of them can be set. + private byte[] presence; + private byte[] groupPresence; + + // Shape type (refer to proto feast.core.FeatureSet.FeatureSpec) + // Only one of them can be set. + private byte[] shape; + private byte[] valueCount; + + // Domain info for the values (refer to proto feast.core.FeatureSet.FeatureSpec) + // Only one of them can be set. + private String domain; + private byte[] intDomain; + private byte[] floatDomain; + private byte[] stringDomain; + private byte[] boolDomain; + private byte[] structDomain; + private byte[] naturalLanguageDomain; + private byte[] imageDomain; + private byte[] midDomain; + private byte[] urlDomain; + private byte[] timeDomain; + private byte[] timeOfDayDomain; + + public Feature() {} + // Whether this feature has been archived. A archived feature cannot be + // retrieved from or written to. + private boolean archived = false; + + private Feature(String name, ValueType.Enum type) { + this.setName(name); + this.setType(type.toString()); + } + + public static Feature fromProto(FeatureSpec featureSpec) { + Feature feature = new Feature(featureSpec.getName(), featureSpec.getValueType()); + feature.labels = TypeConversion.convertMapToJsonString(featureSpec.getLabelsMap()); + feature.updateSchema(featureSpec); + return feature; + } + + public FeatureSpec toProto() throws InvalidProtocolBufferException { + Builder featureSpecBuilder = + FeatureSpec.newBuilder().setName(getName()).setValueType(ValueType.Enum.valueOf(getType())); + + if (getPresence() != null) { + featureSpecBuilder.setPresence(FeaturePresence.parseFrom(getPresence())); + } else if (getGroupPresence() != null) { + featureSpecBuilder.setGroupPresence(FeaturePresenceWithinGroup.parseFrom(getGroupPresence())); + } + + if (getShape() != null) { + featureSpecBuilder.setShape(FixedShape.parseFrom(getShape())); + } else if (getValueCount() != null) { + featureSpecBuilder.setValueCount(ValueCount.parseFrom(getValueCount())); + } + + if (getDomain() != null) { + featureSpecBuilder.setDomain(getDomain()); + } else if (getIntDomain() != null) { + featureSpecBuilder.setIntDomain(IntDomain.parseFrom(getIntDomain())); + } else if (getFloatDomain() != null) { + featureSpecBuilder.setFloatDomain(FloatDomain.parseFrom(getFloatDomain())); + } else if (getStringDomain() != null) { + featureSpecBuilder.setStringDomain(StringDomain.parseFrom(getStringDomain())); + } else if (getBoolDomain() != null) { + featureSpecBuilder.setBoolDomain(BoolDomain.parseFrom(getBoolDomain())); + } else if (getStructDomain() != null) { + featureSpecBuilder.setStructDomain(StructDomain.parseFrom(getStructDomain())); + } else if (getNaturalLanguageDomain() != null) { + featureSpecBuilder.setNaturalLanguageDomain( + NaturalLanguageDomain.parseFrom(getNaturalLanguageDomain())); + } else if (getImageDomain() != null) { + featureSpecBuilder.setImageDomain(ImageDomain.parseFrom(getImageDomain())); + } else if (getMidDomain() != null) { + featureSpecBuilder.setMidDomain(MIDDomain.parseFrom(getMidDomain())); + } else if (getUrlDomain() != null) { + featureSpecBuilder.setUrlDomain(URLDomain.parseFrom(getUrlDomain())); + } else if (getTimeDomain() != null) { + featureSpecBuilder.setTimeDomain(TimeDomain.parseFrom(getTimeDomain())); + } else if (getTimeOfDayDomain() != null) { + featureSpecBuilder.setTimeOfDayDomain(TimeOfDayDomain.parseFrom(getTimeOfDayDomain())); + } + + if (getLabels() != null) { + featureSpecBuilder.putAllLabels(getLabels()); + } + return featureSpecBuilder.build(); + } + + private void updateSchema(FeatureSpec featureSpec) { + switch (featureSpec.getPresenceConstraintsCase()) { + case PRESENCE: + setPresence(featureSpec.getPresence().toByteArray()); + break; + case GROUP_PRESENCE: + setGroupPresence(featureSpec.getGroupPresence().toByteArray()); + break; + case PRESENCECONSTRAINTS_NOT_SET: + break; + } + + switch (featureSpec.getShapeTypeCase()) { + case SHAPE: + setShape(featureSpec.getShape().toByteArray()); + break; + case VALUE_COUNT: + setValueCount(featureSpec.getValueCount().toByteArray()); + break; + case SHAPETYPE_NOT_SET: + break; + } + + switch (featureSpec.getDomainInfoCase()) { + case DOMAIN: + setDomain(featureSpec.getDomain()); + break; + case INT_DOMAIN: + setIntDomain(featureSpec.getIntDomain().toByteArray()); + break; + case FLOAT_DOMAIN: + setFloatDomain(featureSpec.getFloatDomain().toByteArray()); + break; + case STRING_DOMAIN: + setStringDomain(featureSpec.getStringDomain().toByteArray()); + break; + case BOOL_DOMAIN: + setBoolDomain(featureSpec.getBoolDomain().toByteArray()); + break; + case STRUCT_DOMAIN: + setStructDomain(featureSpec.getStructDomain().toByteArray()); + break; + case NATURAL_LANGUAGE_DOMAIN: + setNaturalLanguageDomain(featureSpec.getNaturalLanguageDomain().toByteArray()); + break; + case IMAGE_DOMAIN: + setImageDomain(featureSpec.getImageDomain().toByteArray()); + break; + case MID_DOMAIN: + setMidDomain(featureSpec.getMidDomain().toByteArray()); + break; + case URL_DOMAIN: + setUrlDomain(featureSpec.getUrlDomain().toByteArray()); + break; + case TIME_DOMAIN: + setTimeDomain(featureSpec.getTimeDomain().toByteArray()); + break; + case TIME_OF_DAY_DOMAIN: + setTimeOfDayDomain(featureSpec.getTimeOfDayDomain().toByteArray()); + break; + case DOMAININFO_NOT_SET: + break; + } + } + + /** Archive this feature. */ + public void archive() { + this.archived = true; + } + + /** + * Update the feature object with a valid feature spec. Only schema changes are allowed. + * + * @param featureSpec {@link FeatureSpec} containing schema changes. + */ + public void updateFromProto(FeatureSpec featureSpec) { + if (isArchived()) { + throw new IllegalArgumentException( + String.format( + "You are attempting to create a feature %s that was previously archived. This isn't allowed. Please create a new feature with a different name.", + featureSpec.getName())); + } + if (ValueType.Enum.valueOf(type) != featureSpec.getValueType()) { + throw new IllegalArgumentException( + String.format( + "You are attempting to change the type of feature %s from %s to %s. This isn't allowed. Please create a new feature.", + featureSpec.getName(), type, featureSpec.getValueType())); + } + updateSchema(featureSpec); + } + + public Map getLabels() { + return TypeConversion.convertJsonStringToMap(this.labels); + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Feature feature = (Feature) o; + return getName().equals(feature.getName()) + && getType().equals(feature.getType()) + && isArchived() == (feature.isArchived()) + && Objects.equals(getLabels(), feature.getLabels()) + && Arrays.equals(getPresence(), feature.getPresence()) + && Arrays.equals(getGroupPresence(), feature.getGroupPresence()) + && Arrays.equals(getShape(), feature.getShape()) + && Arrays.equals(getValueCount(), feature.getValueCount()) + && Objects.equals(getDomain(), feature.getDomain()) + && Arrays.equals(getIntDomain(), feature.getIntDomain()) + && Arrays.equals(getFloatDomain(), feature.getFloatDomain()) + && Arrays.equals(getStringDomain(), feature.getStringDomain()) + && Arrays.equals(getBoolDomain(), feature.getBoolDomain()) + && Arrays.equals(getStructDomain(), feature.getStructDomain()) + && Arrays.equals(getNaturalLanguageDomain(), feature.getNaturalLanguageDomain()) + && Arrays.equals(getImageDomain(), feature.getImageDomain()) + && Arrays.equals(getMidDomain(), feature.getMidDomain()) + && Arrays.equals(getUrlDomain(), feature.getUrlDomain()) + && Arrays.equals(getTimeDomain(), feature.getTimeDomain()) + && Arrays.equals(getTimeDomain(), feature.getTimeOfDayDomain()); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), getName(), getType(), getLabels()); + } +} diff --git a/core/src/main/java/feast/core/model/FeatureSet.java b/core/src/main/java/feast/core/model/FeatureSet.java index 232a5f67d1..f7b2dc7cd4 100644 --- a/core/src/main/java/feast/core/model/FeatureSet.java +++ b/core/src/main/java/feast/core/model/FeatureSet.java @@ -16,73 +16,35 @@ */ package feast.core.model; +import com.google.common.collect.Sets; import com.google.protobuf.Duration; import com.google.protobuf.InvalidProtocolBufferException; import com.google.protobuf.Timestamp; -import feast.core.FeatureSetProto; -import feast.core.FeatureSetProto.EntitySpec; -import feast.core.FeatureSetProto.FeatureSetMeta; -import feast.core.FeatureSetProto.FeatureSetSpec; -import feast.core.FeatureSetProto.FeatureSetStatus; -import feast.core.FeatureSetProto.FeatureSpec; -import feast.types.ValueProto.ValueType.Enum; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import javax.persistence.CascadeType; -import javax.persistence.CollectionTable; -import javax.persistence.Column; -import javax.persistence.ElementCollection; -import javax.persistence.Entity; -import javax.persistence.FetchType; -import javax.persistence.Id; -import javax.persistence.JoinColumn; -import javax.persistence.ManyToOne; -import javax.persistence.Table; -import javax.persistence.UniqueConstraint; +import feast.core.util.TypeConversion; +import feast.proto.core.FeatureSetProto; +import feast.proto.core.FeatureSetProto.*; +import java.util.*; +import java.util.stream.Collectors; +import javax.persistence.*; import lombok.Getter; import lombok.Setter; import org.apache.commons.lang3.builder.HashCodeBuilder; -import org.hibernate.annotations.Fetch; -import org.hibernate.annotations.FetchMode; -import org.tensorflow.metadata.v0.BoolDomain; -import org.tensorflow.metadata.v0.FeaturePresence; -import org.tensorflow.metadata.v0.FeaturePresenceWithinGroup; -import org.tensorflow.metadata.v0.FixedShape; -import org.tensorflow.metadata.v0.FloatDomain; -import org.tensorflow.metadata.v0.ImageDomain; -import org.tensorflow.metadata.v0.IntDomain; -import org.tensorflow.metadata.v0.MIDDomain; -import org.tensorflow.metadata.v0.NaturalLanguageDomain; -import org.tensorflow.metadata.v0.StringDomain; -import org.tensorflow.metadata.v0.StructDomain; -import org.tensorflow.metadata.v0.TimeDomain; -import org.tensorflow.metadata.v0.TimeOfDayDomain; -import org.tensorflow.metadata.v0.URLDomain; -import org.tensorflow.metadata.v0.ValueCount; +import org.tensorflow.metadata.v0.*; @Getter @Setter -@Entity -@Table(name = "feature_sets") -public class FeatureSet extends AbstractTimestampEntity implements Comparable { +@javax.persistence.Entity +@Table( + name = "feature_sets", + uniqueConstraints = @UniqueConstraint(columnNames = {"name", "project_name"})) +public class FeatureSet extends AbstractTimestampEntity { - // Id of the featureSet, defined as project/feature_set_name:feature_set_version - @Id - @Column(name = "id", nullable = false, unique = true) - private String id; + @Id @GeneratedValue private long id; // Name of the featureSet @Column(name = "name", nullable = false) private String name; - // Version of the featureSet - @Column(name = "version") - private int version; - // Project that this featureSet belongs to @ManyToOne(fetch = FetchType.LAZY) @JoinColumn(name = "project_name") @@ -93,19 +55,20 @@ public class FeatureSet extends AbstractTimestampEntity implements Comparable entities; + @OneToMany( + mappedBy = "featureSet", + cascade = CascadeType.ALL, + fetch = FetchType.EAGER, + orphanRemoval = true) + private Set entities; // Feature fields inside this feature set - @ElementCollection(fetch = FetchType.EAGER) - @CollectionTable( - name = "features", - joinColumns = @JoinColumn(name = "feature_set_id"), - uniqueConstraints = @UniqueConstraint(columnNames = {"name", "project", "version"})) - @Fetch(FetchMode.SUBSELECT) - private Set features; + @OneToMany( + mappedBy = "featureSet", + cascade = CascadeType.ALL, + fetch = FetchType.EAGER, + orphanRemoval = true) + private Set features; // Source on which feature rows can be found @ManyToOne(cascade = CascadeType.ALL, fetch = FetchType.EAGER) @@ -113,8 +76,13 @@ public class FeatureSet extends AbstractTimestampEntity implements Comparable entities, - List features, + List entities, + List features, Source source, + Map labels, FeatureSetStatus status) { this.maxAgeSeconds = maxAgeSeconds; this.source = source; - this.status = status.toString(); + this.status = status; this.entities = new HashSet<>(); this.features = new HashSet<>(); this.name = name; this.project = new Project(project); - this.version = version; - this.setId(project, name, version); + this.labels = TypeConversion.convertMapToJsonString(labels); addEntities(entities); addFeatures(features); } - private void setId(String project, String name, int version) { - this.id = project + "/" + name + ":" + version; - } - - public void setVersion(int version) { - this.version = version; - this.setId(getProjectName(), getName(), version); - } - public void setName(String name) { this.name = name; - this.setId(getProjectName(), name, getVersion()); } private String getProjectName() { @@ -166,202 +123,168 @@ private String getProjectName() { public void setProject(Project project) { this.project = project; - this.setId(project.getName(), getName(), getVersion()); } public static FeatureSet fromProto(FeatureSetProto.FeatureSet featureSetProto) { FeatureSetSpec featureSetSpec = featureSetProto.getSpec(); Source source = Source.fromProto(featureSetSpec.getSource()); - List featureSpecs = new ArrayList<>(); + List featureSpecs = new ArrayList<>(); for (FeatureSpec featureSpec : featureSetSpec.getFeaturesList()) { - featureSpecs.add(new Field(featureSpec)); + featureSpecs.add(Feature.fromProto(featureSpec)); } - List entitySpecs = new ArrayList<>(); + List entitySpecs = new ArrayList<>(); for (EntitySpec entitySpec : featureSetSpec.getEntitiesList()) { - entitySpecs.add(new Field(entitySpec)); + entitySpecs.add(Entity.fromProto(entitySpec)); } return new FeatureSet( featureSetProto.getSpec().getName(), featureSetProto.getSpec().getProject(), - featureSetProto.getSpec().getVersion(), featureSetSpec.getMaxAge().getSeconds(), entitySpecs, featureSpecs, source, + featureSetProto.getSpec().getLabelsMap(), featureSetProto.getMeta().getStatus()); } - public void addEntities(List fields) { - for (Field field : fields) { - addEntity(field); + // Updates the existing feature set from a proto. + public void updateFromProto(FeatureSetProto.FeatureSet featureSetProto) + throws InvalidProtocolBufferException { + FeatureSetSpec spec = featureSetProto.getSpec(); + if (this.toProto().getSpec().equals(spec)) { + return; + } + + // 1. validate + // 1a. check no change to identifiers + if (!name.equals(spec.getName())) { + throw new IllegalArgumentException( + String.format("Given feature set name %s does not match name %s.", spec.getName(), name)); + } + if (!project.getName().equals(spec.getProject())) { + throw new IllegalArgumentException( + String.format( + "You are attempting to change the project of feature set %s from %s to %s. This isn't allowed. Please create a new feature set under the desired project.", + spec.getName(), project, spec.getProject())); + } + + Set existingEntities = + entities.stream().map(Entity::toProto).collect(Collectors.toSet()); + + // 1b. check no change to entities + if (!Sets.newHashSet(spec.getEntitiesList()).equals(existingEntities)) { + throw new IllegalArgumentException( + String.format( + "You are attempting to change the entities of this feature set: Given set of entities \n{%s}\n does not match existing set of entities\n {%s}. This isn't allowed. Please create a new feature set. ", + spec.getEntitiesList(), existingEntities)); + } + + // 4. Update max age and source. + maxAgeSeconds = spec.getMaxAge().getSeconds(); + source = Source.fromProto(spec.getSource()); + + Map updatedFeatures = + spec.getFeaturesList().stream().collect(Collectors.toMap(FeatureSpec::getName, fs -> fs)); + + // 3. Tombstone features that are gone, update features that have changed + for (Feature existingFeature : features) { + String existingFeatureName = existingFeature.getName(); + FeatureSpec updatedFeatureSpec = updatedFeatures.get(existingFeatureName); + if (updatedFeatureSpec == null) { + existingFeature.archive(); + } else { + existingFeature.updateFromProto(updatedFeatureSpec); + updatedFeatures.remove(existingFeatureName); + } + } + + // 4. Add new features + for (FeatureSpec featureSpec : updatedFeatures.values()) { + Feature newFeature = Feature.fromProto(featureSpec); + addFeature(newFeature); } } - public void addEntity(Field field) { - field.setProject(this.project.getName()); - field.setVersion(this.getVersion()); - entities.add(field); + public void addEntities(List entities) { + for (Entity entity : entities) { + addEntity(entity); + } + } + + public void addEntity(Entity entity) { + entity.setFeatureSet(this); + entities.add(entity); } - public void addFeatures(List fields) { - for (Field field : fields) { - addFeature(field); + public void addFeatures(List features) { + for (Feature feature : features) { + addFeature(feature); } } - public void addFeature(Field field) { - field.setProject(this.project.getName()); - field.setVersion(this.getVersion()); - features.add(field); + public void addFeature(Feature feature) { + feature.setFeatureSet(this); + features.add(feature); } public FeatureSetProto.FeatureSet toProto() throws InvalidProtocolBufferException { List entitySpecs = new ArrayList<>(); - for (Field entityField : entities) { - EntitySpec.Builder entitySpecBuilder = EntitySpec.newBuilder(); - setEntitySpecFields(entitySpecBuilder, entityField); - entitySpecs.add(entitySpecBuilder.build()); + for (Entity entityField : entities) { + entitySpecs.add(entityField.toProto()); } List featureSpecs = new ArrayList<>(); - for (Field featureField : features) { - FeatureSpec.Builder featureSpecBuilder = FeatureSpec.newBuilder(); - setFeatureSpecFields(featureSpecBuilder, featureField); - featureSpecs.add(featureSpecBuilder.build()); + for (Feature featureField : features) { + if (!featureField.isArchived()) { + featureSpecs.add(featureField.toProto()); + } } FeatureSetMeta.Builder meta = FeatureSetMeta.newBuilder() .setCreatedTimestamp( Timestamp.newBuilder().setSeconds(super.getCreated().getTime() / 1000L)) - .setStatus(FeatureSetStatus.valueOf(status)); + .setStatus(status); FeatureSetSpec.Builder spec = FeatureSetSpec.newBuilder() .setName(getName()) - .setVersion(getVersion()) .setProject(project.getName()) .setMaxAge(Duration.newBuilder().setSeconds(maxAgeSeconds)) .addAllEntities(entitySpecs) .addAllFeatures(featureSpecs) + .putAllLabels(TypeConversion.convertJsonStringToMap(labels)) .setSource(source.toProto()); return FeatureSetProto.FeatureSet.newBuilder().setMeta(meta).setSpec(spec).build(); } - // setEntitySpecFields and setFeatureSpecFields methods contain duplicated code because - // Feast internally treat EntitySpec and FeatureSpec as Field class. However, the proto message - // builder for EntitySpec and FeatureSpec are of different class. - @SuppressWarnings("DuplicatedCode") - private void setEntitySpecFields(EntitySpec.Builder entitySpecBuilder, Field entityField) - throws InvalidProtocolBufferException { - entitySpecBuilder - .setName(entityField.getName()) - .setValueType(Enum.valueOf(entityField.getType())); - - if (entityField.getPresence() != null) { - entitySpecBuilder.setPresence(FeaturePresence.parseFrom(entityField.getPresence())); - } else if (entityField.getGroupPresence() != null) { - entitySpecBuilder.setGroupPresence( - FeaturePresenceWithinGroup.parseFrom(entityField.getGroupPresence())); - } - - if (entityField.getShape() != null) { - entitySpecBuilder.setShape(FixedShape.parseFrom(entityField.getShape())); - } else if (entityField.getValueCount() != null) { - entitySpecBuilder.setValueCount(ValueCount.parseFrom(entityField.getValueCount())); - } - - if (entityField.getDomain() != null) { - entitySpecBuilder.setDomain(entityField.getDomain()); - } else if (entityField.getIntDomain() != null) { - entitySpecBuilder.setIntDomain(IntDomain.parseFrom(entityField.getIntDomain())); - } else if (entityField.getFloatDomain() != null) { - entitySpecBuilder.setFloatDomain(FloatDomain.parseFrom(entityField.getFloatDomain())); - } else if (entityField.getStringDomain() != null) { - entitySpecBuilder.setStringDomain(StringDomain.parseFrom(entityField.getStringDomain())); - } else if (entityField.getBoolDomain() != null) { - entitySpecBuilder.setBoolDomain(BoolDomain.parseFrom(entityField.getBoolDomain())); - } else if (entityField.getStructDomain() != null) { - entitySpecBuilder.setStructDomain(StructDomain.parseFrom(entityField.getStructDomain())); - } else if (entityField.getNaturalLanguageDomain() != null) { - entitySpecBuilder.setNaturalLanguageDomain( - NaturalLanguageDomain.parseFrom(entityField.getNaturalLanguageDomain())); - } else if (entityField.getImageDomain() != null) { - entitySpecBuilder.setImageDomain(ImageDomain.parseFrom(entityField.getImageDomain())); - } else if (entityField.getMidDomain() != null) { - entitySpecBuilder.setIntDomain(IntDomain.parseFrom(entityField.getIntDomain())); - } else if (entityField.getUrlDomain() != null) { - entitySpecBuilder.setUrlDomain(URLDomain.parseFrom(entityField.getUrlDomain())); - } else if (entityField.getTimeDomain() != null) { - entitySpecBuilder.setTimeDomain(TimeDomain.parseFrom(entityField.getTimeDomain())); - } else if (entityField.getTimeOfDayDomain() != null) { - entitySpecBuilder.setTimeOfDayDomain( - TimeOfDayDomain.parseFrom(entityField.getTimeOfDayDomain())); - } + @Override + public int hashCode() { + HashCodeBuilder hcb = new HashCodeBuilder(); + hcb.append(project.getName()); + hcb.append(getName()); + return hcb.toHashCode(); } - // Refer to setEntitySpecFields method for the reason for code duplication. - @SuppressWarnings("DuplicatedCode") - private void setFeatureSpecFields(FeatureSpec.Builder featureSpecBuilder, Field featureField) - throws InvalidProtocolBufferException { - featureSpecBuilder - .setName(featureField.getName()) - .setValueType(Enum.valueOf(featureField.getType())); - - if (featureField.getPresence() != null) { - featureSpecBuilder.setPresence(FeaturePresence.parseFrom(featureField.getPresence())); - } else if (featureField.getGroupPresence() != null) { - featureSpecBuilder.setGroupPresence( - FeaturePresenceWithinGroup.parseFrom(featureField.getGroupPresence())); + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; } - - if (featureField.getShape() != null) { - featureSpecBuilder.setShape(FixedShape.parseFrom(featureField.getShape())); - } else if (featureField.getValueCount() != null) { - featureSpecBuilder.setValueCount(ValueCount.parseFrom(featureField.getValueCount())); + if (!(obj instanceof FeatureSet)) { + return false; } - if (featureField.getDomain() != null) { - featureSpecBuilder.setDomain(featureField.getDomain()); - } else if (featureField.getIntDomain() != null) { - featureSpecBuilder.setIntDomain(IntDomain.parseFrom(featureField.getIntDomain())); - } else if (featureField.getFloatDomain() != null) { - featureSpecBuilder.setFloatDomain(FloatDomain.parseFrom(featureField.getFloatDomain())); - } else if (featureField.getStringDomain() != null) { - featureSpecBuilder.setStringDomain(StringDomain.parseFrom(featureField.getStringDomain())); - } else if (featureField.getBoolDomain() != null) { - featureSpecBuilder.setBoolDomain(BoolDomain.parseFrom(featureField.getBoolDomain())); - } else if (featureField.getStructDomain() != null) { - featureSpecBuilder.setStructDomain(StructDomain.parseFrom(featureField.getStructDomain())); - } else if (featureField.getNaturalLanguageDomain() != null) { - featureSpecBuilder.setNaturalLanguageDomain( - NaturalLanguageDomain.parseFrom(featureField.getNaturalLanguageDomain())); - } else if (featureField.getImageDomain() != null) { - featureSpecBuilder.setImageDomain(ImageDomain.parseFrom(featureField.getImageDomain())); - } else if (featureField.getMidDomain() != null) { - featureSpecBuilder.setMidDomain(MIDDomain.parseFrom(featureField.getMidDomain())); - } else if (featureField.getUrlDomain() != null) { - featureSpecBuilder.setUrlDomain(URLDomain.parseFrom(featureField.getUrlDomain())); - } else if (featureField.getTimeDomain() != null) { - featureSpecBuilder.setTimeDomain(TimeDomain.parseFrom(featureField.getTimeDomain())); - } else if (featureField.getTimeOfDayDomain() != null) { - featureSpecBuilder.setTimeOfDayDomain( - TimeOfDayDomain.parseFrom(featureField.getTimeOfDayDomain())); + FeatureSet other = (FeatureSet) obj; + if (!getName().equals(other.getName())) { + return false; } - } - /** - * Checks if the given featureSet's schema and source has is different from this one. - * - * @param other FeatureSet to compare to - * @return boolean denoting if the source or schema have changed. - */ - public boolean equalTo(FeatureSet other) { - if (!getName().equals(other.getName())) { + if (!getLabels().equals(other.getLabels())) { return false; } @@ -378,65 +301,44 @@ public boolean equalTo(FeatureSet other) { } // Create a map of all fields in this feature set - Map fields = new HashMap<>(); + Map entitiesMap = new HashMap<>(); + Map featuresMap = new HashMap<>(); - for (Field e : entities) { - fields.putIfAbsent(e.getName(), e); + for (Entity e : entities) { + entitiesMap.putIfAbsent(e.getName(), e); } - for (Field f : features) { - fields.putIfAbsent(f.getName(), f); + for (Feature f : features) { + featuresMap.putIfAbsent(f.getName(), f); } // Ensure map size is consistent with existing fields - if (fields.size() != other.getFeatures().size() + other.getEntities().size()) { + if (entitiesMap.size() != other.getEntities().size()) { + return false; + } + if (featuresMap.size() != other.getFeatures().size()) { return false; } // Ensure the other entities and features exist in the field map - for (Field e : other.getEntities()) { - if (!fields.containsKey(e.getName())) { + for (Entity e : other.getEntities()) { + if (!entitiesMap.containsKey(e.getName())) { return false; } - if (!e.equals(fields.get(e.getName()))) { + if (!e.equals(entitiesMap.get(e.getName()))) { return false; } } - for (Field f : other.getFeatures()) { - if (!fields.containsKey(f.getName())) { + for (Feature f : other.getFeatures()) { + if (!featuresMap.containsKey(f.getName())) { return false; } - if (!f.equals(fields.get(f.getName()))) { + if (!f.equals(featuresMap.get(f.getName()))) { return false; } } return true; } - - @Override - public int hashCode() { - HashCodeBuilder hcb = new HashCodeBuilder(); - hcb.append(project.getName()); - hcb.append(getName()); - hcb.append(getVersion()); - return hcb.toHashCode(); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (!(obj instanceof FeatureSet)) { - return false; - } - return this.equalTo(((FeatureSet) obj)); - } - - @Override - public int compareTo(FeatureSet o) { - return Integer.compare(getVersion(), o.getVersion()); - } } diff --git a/core/src/main/java/feast/core/model/Field.java b/core/src/main/java/feast/core/model/Field.java deleted file mode 100644 index cb23e4eceb..0000000000 --- a/core/src/main/java/feast/core/model/Field.java +++ /dev/null @@ -1,252 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * Copyright 2018-2019 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package feast.core.model; - -import feast.core.FeatureSetProto.EntitySpec; -import feast.core.FeatureSetProto.FeatureSpec; -import feast.types.ValueProto.ValueType; -import java.util.Arrays; -import java.util.Objects; -import javax.persistence.Column; -import javax.persistence.Embeddable; -import lombok.Getter; -import lombok.Setter; - -@Getter -@Setter -@Embeddable -public class Field { - - // Name of the feature - @Column(name = "name", nullable = false) - private String name; - - // Type of the feature, should correspond with feast.types.ValueType - @Column(name = "type", nullable = false) - private String type; - - // Version of the field - @Column(name = "version") - private int version; - - // Project that this field belongs to - @Column(name = "project") - private String project; - - // Presence constraints (refer to proto feast.core.FeatureSet.FeatureSpec) - // Only one of them can be set. - private byte[] presence; - private byte[] groupPresence; - - // Shape type (refer to proto feast.core.FeatureSet.FeatureSpec) - // Only one of them can be set. - private byte[] shape; - private byte[] valueCount; - - // Domain info for the values (refer to proto feast.core.FeatureSet.FeatureSpec) - // Only one of them can be set. - private String domain; - private byte[] intDomain; - private byte[] floatDomain; - private byte[] stringDomain; - private byte[] boolDomain; - private byte[] structDomain; - private byte[] naturalLanguageDomain; - private byte[] imageDomain; - private byte[] midDomain; - private byte[] urlDomain; - private byte[] timeDomain; - private byte[] timeOfDayDomain; - - public Field() {} - - public Field(String name, ValueType.Enum type) { - this.name = name; - this.type = type.toString(); - } - - public Field(FeatureSpec featureSpec) { - this.name = featureSpec.getName(); - this.type = featureSpec.getValueType().toString(); - - switch (featureSpec.getPresenceConstraintsCase()) { - case PRESENCE: - this.presence = featureSpec.getPresence().toByteArray(); - break; - case GROUP_PRESENCE: - this.groupPresence = featureSpec.getGroupPresence().toByteArray(); - break; - case PRESENCECONSTRAINTS_NOT_SET: - break; - } - - switch (featureSpec.getShapeTypeCase()) { - case SHAPE: - this.shape = featureSpec.getShape().toByteArray(); - break; - case VALUE_COUNT: - this.valueCount = featureSpec.getValueCount().toByteArray(); - break; - case SHAPETYPE_NOT_SET: - break; - } - - switch (featureSpec.getDomainInfoCase()) { - case DOMAIN: - this.domain = featureSpec.getDomain(); - break; - case INT_DOMAIN: - this.intDomain = featureSpec.getIntDomain().toByteArray(); - break; - case FLOAT_DOMAIN: - this.floatDomain = featureSpec.getFloatDomain().toByteArray(); - break; - case STRING_DOMAIN: - this.stringDomain = featureSpec.getStringDomain().toByteArray(); - break; - case BOOL_DOMAIN: - this.boolDomain = featureSpec.getBoolDomain().toByteArray(); - break; - case STRUCT_DOMAIN: - this.structDomain = featureSpec.getStructDomain().toByteArray(); - break; - case NATURAL_LANGUAGE_DOMAIN: - this.naturalLanguageDomain = featureSpec.getNaturalLanguageDomain().toByteArray(); - break; - case IMAGE_DOMAIN: - this.imageDomain = featureSpec.getImageDomain().toByteArray(); - break; - case MID_DOMAIN: - this.midDomain = featureSpec.getMidDomain().toByteArray(); - break; - case URL_DOMAIN: - this.urlDomain = featureSpec.getUrlDomain().toByteArray(); - break; - case TIME_DOMAIN: - this.timeDomain = featureSpec.getTimeDomain().toByteArray(); - break; - case TIME_OF_DAY_DOMAIN: - this.timeOfDayDomain = featureSpec.getTimeOfDayDomain().toByteArray(); - break; - case DOMAININFO_NOT_SET: - break; - } - } - - public Field(EntitySpec entitySpec) { - this.name = entitySpec.getName(); - this.type = entitySpec.getValueType().toString(); - - switch (entitySpec.getPresenceConstraintsCase()) { - case PRESENCE: - this.presence = entitySpec.getPresence().toByteArray(); - break; - case GROUP_PRESENCE: - this.groupPresence = entitySpec.getGroupPresence().toByteArray(); - break; - case PRESENCECONSTRAINTS_NOT_SET: - break; - } - - switch (entitySpec.getShapeTypeCase()) { - case SHAPE: - this.shape = entitySpec.getShape().toByteArray(); - break; - case VALUE_COUNT: - this.valueCount = entitySpec.getValueCount().toByteArray(); - break; - case SHAPETYPE_NOT_SET: - break; - } - - switch (entitySpec.getDomainInfoCase()) { - case DOMAIN: - this.domain = entitySpec.getDomain(); - break; - case INT_DOMAIN: - this.intDomain = entitySpec.getIntDomain().toByteArray(); - break; - case FLOAT_DOMAIN: - this.floatDomain = entitySpec.getFloatDomain().toByteArray(); - break; - case STRING_DOMAIN: - this.stringDomain = entitySpec.getStringDomain().toByteArray(); - break; - case BOOL_DOMAIN: - this.boolDomain = entitySpec.getBoolDomain().toByteArray(); - break; - case STRUCT_DOMAIN: - this.structDomain = entitySpec.getStructDomain().toByteArray(); - break; - case NATURAL_LANGUAGE_DOMAIN: - this.naturalLanguageDomain = entitySpec.getNaturalLanguageDomain().toByteArray(); - break; - case IMAGE_DOMAIN: - this.imageDomain = entitySpec.getImageDomain().toByteArray(); - break; - case MID_DOMAIN: - this.midDomain = entitySpec.getMidDomain().toByteArray(); - break; - case URL_DOMAIN: - this.urlDomain = entitySpec.getUrlDomain().toByteArray(); - break; - case TIME_DOMAIN: - this.timeDomain = entitySpec.getTimeDomain().toByteArray(); - break; - case TIME_OF_DAY_DOMAIN: - this.timeOfDayDomain = entitySpec.getTimeOfDayDomain().toByteArray(); - break; - case DOMAININFO_NOT_SET: - break; - } - } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - Field field = (Field) o; - return Objects.equals(name, field.name) - && Objects.equals(type, field.type) - && Objects.equals(project, field.project) - && Arrays.equals(presence, field.presence) - && Arrays.equals(groupPresence, field.groupPresence) - && Arrays.equals(shape, field.shape) - && Arrays.equals(valueCount, field.valueCount) - && Objects.equals(domain, field.domain) - && Arrays.equals(intDomain, field.intDomain) - && Arrays.equals(floatDomain, field.floatDomain) - && Arrays.equals(stringDomain, field.stringDomain) - && Arrays.equals(boolDomain, field.boolDomain) - && Arrays.equals(structDomain, field.structDomain) - && Arrays.equals(naturalLanguageDomain, field.naturalLanguageDomain) - && Arrays.equals(imageDomain, field.imageDomain) - && Arrays.equals(midDomain, field.midDomain) - && Arrays.equals(urlDomain, field.urlDomain) - && Arrays.equals(timeDomain, field.timeDomain) - && Arrays.equals(timeOfDayDomain, field.timeOfDayDomain); - } - - @Override - public int hashCode() { - return Objects.hash(super.hashCode(), name, type); - } -} diff --git a/core/src/main/java/feast/core/model/Job.java b/core/src/main/java/feast/core/model/Job.java index 738a16db2d..5fce3dffbe 100644 --- a/core/src/main/java/feast/core/model/Job.java +++ b/core/src/main/java/feast/core/model/Job.java @@ -17,23 +17,13 @@ package feast.core.model; import com.google.protobuf.InvalidProtocolBufferException; -import feast.core.FeatureSetProto; -import feast.core.IngestionJobProto; +import feast.core.job.Runner; +import feast.proto.core.FeatureSetProto; +import feast.proto.core.IngestionJobProto; import java.util.ArrayList; import java.util.List; -import javax.persistence.CascadeType; -import javax.persistence.Column; +import javax.persistence.*; import javax.persistence.Entity; -import javax.persistence.EnumType; -import javax.persistence.Enumerated; -import javax.persistence.Id; -import javax.persistence.Index; -import javax.persistence.JoinColumn; -import javax.persistence.JoinTable; -import javax.persistence.ManyToMany; -import javax.persistence.ManyToOne; -import javax.persistence.OneToMany; -import javax.persistence.Table; import lombok.AllArgsConstructor; import lombok.Getter; import lombok.Setter; @@ -55,9 +45,9 @@ public class Job extends AbstractTimestampEntity { private String extId; // Runner type - // Use Runner.name() when converting a Runner to string to assign to this property. + @Enumerated(EnumType.STRING) @Column(name = "runner") - private String runner; + private Runner runner; // Source id @ManyToOne @@ -70,21 +60,17 @@ public class Job extends AbstractTimestampEntity { private Store store; // FeatureSets populated by the job - @ManyToMany + @ManyToMany(cascade = CascadeType.ALL) @JoinTable( name = "jobs_feature_sets", - joinColumns = @JoinColumn(name = "feature_sets_id"), - inverseJoinColumns = @JoinColumn(name = "job_id"), + joinColumns = @JoinColumn(name = "job_id"), + inverseJoinColumns = @JoinColumn(name = "feature_sets_id"), indexes = { @Index(name = "idx_jobs_feature_sets_job_id", columnList = "job_id"), @Index(name = "idx_jobs_feature_sets_feature_sets_id", columnList = "feature_sets_id") }) private List featureSets; - // Job Metrics - @OneToMany(mappedBy = "job", cascade = CascadeType.ALL) - private List metrics; - @Enumerated(EnumType.STRING) @Column(name = "status", length = 16) private JobStatus status; @@ -93,26 +79,12 @@ public Job() { super(); } - public Job( - String id, - String extId, - String runner, - Source source, - Store sink, - List featureSets, - JobStatus jobStatus) { - this.id = id; - this.extId = extId; - this.source = source; - this.runner = runner; - this.store = sink; - this.featureSets = featureSets; - this.status = jobStatus; + public boolean hasTerminated() { + return getStatus().isTerminal(); } - public void updateMetrics(List newMetrics) { - metrics.clear(); - metrics.addAll(newMetrics); + public boolean isRunning() { + return getStatus() == JobStatus.RUNNING; } public String getSinkName() { diff --git a/core/src/main/java/feast/core/model/JobStatus.java b/core/src/main/java/feast/core/model/JobStatus.java index 86aa512933..6bafc06ec9 100644 --- a/core/src/main/java/feast/core/model/JobStatus.java +++ b/core/src/main/java/feast/core/model/JobStatus.java @@ -16,11 +16,9 @@ */ package feast.core.model; -import feast.core.IngestionJobProto.IngestionJobStatus; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; +import feast.proto.core.IngestionJobProto.IngestionJobStatus; import java.util.Map; +import java.util.Set; public enum JobStatus { /** Job status is not known. */ @@ -53,33 +51,41 @@ public enum JobStatus { /** job has been suspended */ SUSPENDED; - private static final Collection TERMINAL_STATE = - Collections.unmodifiableList(Arrays.asList(COMPLETED, ABORTED, ERROR)); + private static final Set TERMINAL_STATES = Set.of(COMPLETED, ABORTED, ERROR); /** - * Get a collection of terminal job state. + * Get the set of terminal job states. * - *

Terminal job state is final and will not change to any other state. + *

A terminal job state is final and will not change to any other state. * - * @return collection of terminal job state. + * @return set of terminal job states. */ - public static Collection getTerminalState() { - return TERMINAL_STATE; + public static Set getTerminalStates() { + return TERMINAL_STATES; } - private static final Collection TRANSITIONAL_STATES = - Collections.unmodifiableList(Arrays.asList(PENDING, ABORTING, SUSPENDING)); + private static final Set TRANSITIONAL_STATES = Set.of(PENDING, ABORTING, SUSPENDING); /** - * Get Transitional Job Status states. Transitionals states are assigned to jobs that + * Get Transitional Job Status states. Transitional states are assigned to jobs that are * transitioning to a more stable state (ie SUSPENDED, ABORTED etc.) * - * @return Collection of transitional Job Status states. + * @return set of transitional Job Status states. */ - public static final Collection getTransitionalStates() { + public static Set getTransitionalStates() { return TRANSITIONAL_STATES; } + /** @return true if this {@code JobStatus} is a terminal state. */ + public boolean isTerminal() { + return getTerminalStates().contains(this); + } + + /** @return true if this {@code JobStatus} is a transitional state. */ + public boolean isTransitional() { + return getTransitionalStates().contains(this); + } + private static final Map INGESTION_JOB_STATUS_MAP = Map.of( JobStatus.UNKNOWN, IngestionJobStatus.UNKNOWN, @@ -95,7 +101,7 @@ public static final Collection getTransitionalStates() { /** * Convert a Job Status to Ingestion Job Status proto * - * @return IngestionJobStatus proto derieved from this job status + * @return IngestionJobStatus proto derived from this job status */ public IngestionJobStatus toProto() { // maps job models job status to ingestion job status diff --git a/core/src/main/java/feast/core/model/Metrics.java b/core/src/main/java/feast/core/model/Metrics.java deleted file mode 100644 index 0b7514816f..0000000000 --- a/core/src/main/java/feast/core/model/Metrics.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * Copyright 2018-2019 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package feast.core.model; - -import javax.persistence.Entity; -import javax.persistence.FetchType; -import javax.persistence.GeneratedValue; -import javax.persistence.GenerationType; -import javax.persistence.Id; -import javax.persistence.JoinColumn; -import javax.persistence.ManyToOne; -import javax.persistence.Table; -import lombok.Getter; -import lombok.NoArgsConstructor; -import lombok.Setter; - -@NoArgsConstructor -@Getter -@Setter -@Entity -@Table(name = "metrics") -public class Metrics extends AbstractTimestampEntity { - - @Id - @GeneratedValue(strategy = GenerationType.AUTO) - private long id; - - @ManyToOne(fetch = FetchType.LAZY) - @JoinColumn(name = "job_id") - private Job job; - - /** Metrics name */ - private String name; - - /** Metrics value */ - private double value; - - /** - * Create a metrics owned by a {@code job}. - * - * @param job owner of this metrics. - * @param metricsName metrics name. - * @param value metrics value. - */ - public Metrics(Job job, String metricsName, double value) { - this.job = job; - this.name = metricsName; - this.value = value; - } -} diff --git a/core/src/main/java/feast/core/model/Project.java b/core/src/main/java/feast/core/model/Project.java index d6e6149394..c55830c824 100644 --- a/core/src/main/java/feast/core/model/Project.java +++ b/core/src/main/java/feast/core/model/Project.java @@ -34,6 +34,7 @@ @Entity @Table(name = "projects") public class Project { + public static final String DEFAULT_NAME = "default"; // Name of the project @Id diff --git a/core/src/main/java/feast/core/model/Source.java b/core/src/main/java/feast/core/model/Source.java index 28db1e9a5b..d199a1b676 100644 --- a/core/src/main/java/feast/core/model/Source.java +++ b/core/src/main/java/feast/core/model/Source.java @@ -18,10 +18,10 @@ import com.google.common.collect.Sets; import com.google.protobuf.Message; -import feast.core.SourceProto; -import feast.core.SourceProto.KafkaSourceConfig; -import feast.core.SourceProto.Source.Builder; -import feast.core.SourceProto.SourceType; +import feast.proto.core.SourceProto; +import feast.proto.core.SourceProto.KafkaSourceConfig; +import feast.proto.core.SourceProto.Source.Builder; +import feast.proto.core.SourceProto.SourceType; import io.grpc.Status; import java.util.Objects; import java.util.Set; diff --git a/core/src/main/java/feast/core/model/Store.java b/core/src/main/java/feast/core/model/Store.java index 9dc44bdc73..1f7c373bdc 100644 --- a/core/src/main/java/feast/core/model/Store.java +++ b/core/src/main/java/feast/core/model/Store.java @@ -17,13 +17,14 @@ package feast.core.model; import com.google.protobuf.InvalidProtocolBufferException; -import feast.core.StoreProto; -import feast.core.StoreProto.Store.BigQueryConfig; -import feast.core.StoreProto.Store.Builder; -import feast.core.StoreProto.Store.CassandraConfig; -import feast.core.StoreProto.Store.RedisConfig; -import feast.core.StoreProto.Store.StoreType; -import feast.core.StoreProto.Store.Subscription; +import feast.proto.core.StoreProto; +import feast.proto.core.StoreProto.Store.BigQueryConfig; +import feast.proto.core.StoreProto.Store.Builder; +import feast.proto.core.StoreProto.Store.CassandraConfig; +import feast.proto.core.StoreProto.Store.RedisClusterConfig; +import feast.proto.core.StoreProto.Store.RedisConfig; +import feast.proto.core.StoreProto.Store.StoreType; +import feast.proto.core.StoreProto.Store.Subscription; import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -82,6 +83,9 @@ public static Store fromProto(StoreProto.Store storeProto) throws IllegalArgumen case CASSANDRA: config = storeProto.getCassandraConfig().toByteArray(); break; + case REDIS_CLUSTER: + config = storeProto.getRedisClusterConfig().toByteArray(); + break; default: throw new IllegalArgumentException("Invalid store provided"); } @@ -106,6 +110,9 @@ public StoreProto.Store toProto() throws InvalidProtocolBufferException { case CASSANDRA: CassandraConfig cassConfig = CassandraConfig.parseFrom(config); return storeProtoBuilder.setCassandraConfig(cassConfig).build(); + case REDIS_CLUSTER: + RedisClusterConfig redisClusterConfig = RedisClusterConfig.parseFrom(config); + return storeProtoBuilder.setRedisClusterConfig(redisClusterConfig).build(); default: throw new InvalidProtocolBufferException("Invalid store set"); } @@ -118,22 +125,18 @@ public List getSubscriptions() { } private static String convertSubscriptionToString(Subscription sub) { - if (sub.getVersion().isEmpty() || sub.getName().isEmpty() || sub.getProject().isEmpty()) { + if (sub.getName().isEmpty() || sub.getProject().isEmpty()) { throw new IllegalArgumentException( String.format("Missing arguments in subscription string: %s", sub.toString())); } - return String.format("%s:%s:%s", sub.getProject(), sub.getName(), sub.getVersion()); + return String.format("%s:%s", sub.getProject(), sub.getName()); } private Subscription convertStringToSubscription(String sub) { if (sub.equals("")) { return Subscription.newBuilder().build(); } - String[] split = sub.split(":", 3); - return Subscription.newBuilder() - .setProject(split[0]) - .setName(split[1]) - .setVersion(split[2]) - .build(); + String[] split = sub.split(":", 2); + return Subscription.newBuilder().setProject(split[0]).setName(split[1]).build(); } } diff --git a/core/src/main/java/feast/core/service/AccessManagementService.java b/core/src/main/java/feast/core/service/AccessManagementService.java index df92750e94..5b02d6f3c4 100644 --- a/core/src/main/java/feast/core/service/AccessManagementService.java +++ b/core/src/main/java/feast/core/service/AccessManagementService.java @@ -28,12 +28,15 @@ @Slf4j @Service public class AccessManagementService { - private ProjectRepository projectRepository; @Autowired public AccessManagementService(ProjectRepository projectRepository) { this.projectRepository = projectRepository; + // create default project if it does not yet exist. + if (!projectRepository.existsById(Project.DEFAULT_NAME)) { + this.createProject(Project.DEFAULT_NAME); + } } /** @@ -61,6 +64,9 @@ public void archiveProject(String name) { if (!project.isPresent()) { throw new IllegalArgumentException(String.format("Could not find project: \"%s\"", name)); } + if (name.equals(Project.DEFAULT_NAME)) { + throw new UnsupportedOperationException("Archiving the default project is not allowed."); + } Project p = project.get(); p.setArchived(true); projectRepository.saveAndFlush(p); diff --git a/core/src/main/java/feast/core/service/JobCoordinatorService.java b/core/src/main/java/feast/core/service/JobCoordinatorService.java index b66d181022..90ee54ca16 100644 --- a/core/src/main/java/feast/core/service/JobCoordinatorService.java +++ b/core/src/main/java/feast/core/service/JobCoordinatorService.java @@ -17,23 +17,21 @@ package feast.core.service; import com.google.protobuf.InvalidProtocolBufferException; -import feast.core.CoreServiceProto.ListFeatureSetsRequest; -import feast.core.CoreServiceProto.ListStoresRequest.Filter; -import feast.core.CoreServiceProto.ListStoresResponse; -import feast.core.FeatureSetProto; -import feast.core.FeatureSetProto.FeatureSetStatus; -import feast.core.StoreProto; -import feast.core.StoreProto.Store.Subscription; -import feast.core.config.FeastProperties.JobUpdatesProperties; +import feast.core.config.FeastProperties; +import feast.core.config.FeastProperties.JobProperties; import feast.core.dao.FeatureSetRepository; import feast.core.dao.JobRepository; import feast.core.job.JobManager; import feast.core.job.JobUpdateTask; import feast.core.model.FeatureSet; import feast.core.model.Job; -import feast.core.model.JobStatus; import feast.core.model.Source; import feast.core.model.Store; +import feast.proto.core.CoreServiceProto.ListStoresRequest.Filter; +import feast.proto.core.CoreServiceProto.ListStoresResponse; +import feast.proto.core.FeatureSetProto.FeatureSetStatus; +import feast.proto.core.StoreProto; +import feast.proto.core.StoreProto.Store.Subscription; import java.util.ArrayList; import java.util.HashSet; import java.util.List; @@ -44,6 +42,7 @@ import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.stream.Collectors; +import javax.validation.constraints.Positive; import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.scheduling.annotation.Scheduled; @@ -54,11 +53,11 @@ @Service public class JobCoordinatorService { - private JobRepository jobRepository; - private FeatureSetRepository featureSetRepository; - private SpecService specService; - private JobManager jobManager; - private JobUpdatesProperties jobUpdatesProperties; + private final JobRepository jobRepository; + private final FeatureSetRepository featureSetRepository; + private final SpecService specService; + private final JobManager jobManager; + private final JobProperties jobProperties; @Autowired public JobCoordinatorService( @@ -66,12 +65,12 @@ public JobCoordinatorService( FeatureSetRepository featureSetRepository, SpecService specService, JobManager jobManager, - JobUpdatesProperties jobUpdatesProperties) { + FeastProperties feastProperties) { this.jobRepository = jobRepository; this.featureSetRepository = featureSetRepository; this.specService = specService; this.jobManager = jobManager; - this.jobUpdatesProperties = jobUpdatesProperties; + this.jobProperties = feastProperties.getJobs(); } /** @@ -86,98 +85,93 @@ public JobCoordinatorService( *

4) Updates Feature set statuses */ @Transactional - @Scheduled(fixedDelayString = "${feast.jobs.updates.pollingIntervalMillis}") + @Scheduled(fixedDelayString = "${feast.jobs.polling_interval_milliseconds}") public void Poll() throws InvalidProtocolBufferException { log.info("Polling for new jobs..."); + @Positive long updateTimeout = jobProperties.getJobUpdateTimeoutSeconds(); List jobUpdateTasks = new ArrayList<>(); ListStoresResponse listStoresResponse = specService.listStores(Filter.newBuilder().build()); - for (StoreProto.Store store : listStoresResponse.getStoreList()) { - Set featureSets = new HashSet<>(); - for (Subscription subscription : store.getSubscriptionsList()) { - featureSets.addAll( - new ArrayList<>( - specService - .listFeatureSets( - ListFeatureSetsRequest.Filter.newBuilder() - .setFeatureSetName(subscription.getName()) - .setFeatureSetVersion(subscription.getVersion()) - .setProject(subscription.getProject()) - .build()) - .getFeatureSetsList())); - } - if (!featureSets.isEmpty()) { - featureSets.stream() - .collect(Collectors.groupingBy(fs -> fs.getSpec().getSource())) - .entrySet() - .stream() - .forEach( - kv -> { - Optional originalJob = - getJob(Source.fromProto(kv.getKey()), Store.fromProto(store)); - jobUpdateTasks.add( - new JobUpdateTask( - kv.getValue(), - kv.getKey(), - store, - originalJob, - jobManager, - jobUpdatesProperties.getTimeoutSeconds())); - }); + + for (StoreProto.Store storeSpec : listStoresResponse.getStoreList()) { + Set featureSets = new HashSet<>(); + Store store = Store.fromProto(storeSpec); + + for (Subscription subscription : store.getSubscriptions()) { + List featureSetsForSub = + featureSetRepository.findAllByNameLikeAndProject_NameLikeOrderByNameAsc( + subscription.getName().replace('*', '%'), + subscription.getProject().replace('*', '%')); + featureSets.addAll(featureSetsForSub); } + + featureSets.stream() + .collect(Collectors.groupingBy(FeatureSet::getSource)) + .forEach( + (source, setsForSource) -> { + Optional originalJob = getJob(source, store); + jobUpdateTasks.add( + new JobUpdateTask( + setsForSource, source, store, originalJob, jobManager, updateTimeout)); + }); } - if (jobUpdateTasks.size() == 0) { + if (jobUpdateTasks.isEmpty()) { log.info("No jobs found."); return; } log.info("Creating/Updating {} jobs...", jobUpdateTasks.size()); - ExecutorService executorService = Executors.newFixedThreadPool(jobUpdateTasks.size()); + startOrUpdateJobs(jobUpdateTasks); + + log.info("Updating feature set status"); + updateFeatureSetStatuses(jobUpdateTasks); + } + + void startOrUpdateJobs(List tasks) { + ExecutorService executorService = Executors.newFixedThreadPool(tasks.size()); ExecutorCompletionService ecs = new ExecutorCompletionService<>(executorService); - jobUpdateTasks.forEach(ecs::submit); + tasks.forEach(ecs::submit); int completedTasks = 0; - while (completedTasks < jobUpdateTasks.size()) { + List startedJobs = new ArrayList<>(); + while (completedTasks < tasks.size()) { try { Job job = ecs.take().get(); if (job != null) { - jobRepository.saveAndFlush(job); + startedJobs.add(job); } } catch (ExecutionException | InterruptedException e) { log.warn("Unable to start or update job: {}", e.getMessage()); } completedTasks++; } - - log.info("Updating feature set status"); - updateFeatureSetStatuses(jobUpdateTasks); + jobRepository.saveAll(startedJobs); + executorService.shutdown(); } // TODO: make this more efficient private void updateFeatureSetStatuses(List jobUpdateTasks) { Set ready = new HashSet<>(); Set pending = new HashSet<>(); - for (JobUpdateTask jobUpdateTask : jobUpdateTasks) { - Optional job = - getJob( - Source.fromProto(jobUpdateTask.getSourceSpec()), - Store.fromProto(jobUpdateTask.getStore())); - if (job.isPresent()) { - if (job.get().getStatus() == JobStatus.RUNNING) { - ready.addAll(job.get().getFeatureSets()); - } else { - pending.addAll(job.get().getFeatureSets()); - } - } + for (JobUpdateTask task : jobUpdateTasks) { + getJob(task.getSource(), task.getStore()) + .ifPresent( + job -> { + if (job.isRunning()) { + ready.addAll(job.getFeatureSets()); + } else { + pending.addAll(job.getFeatureSets()); + } + }); } ready.removeAll(pending); ready.forEach( fs -> { - fs.setStatus(FeatureSetStatus.STATUS_READY.toString()); + fs.setStatus(FeatureSetStatus.STATUS_READY); featureSetRepository.save(fs); }); pending.forEach( fs -> { - fs.setStatus(FeatureSetStatus.STATUS_PENDING.toString()); + fs.setStatus(FeatureSetStatus.STATUS_JOB_STARTING); featureSetRepository.save(fs); }); featureSetRepository.flush(); @@ -188,11 +182,8 @@ public Optional getJob(Source source, Store store) { List jobs = jobRepository.findBySourceIdAndStoreNameOrderByLastUpdatedDesc( source.getId(), store.getName()); - jobs = - jobs.stream() - .filter(job -> !JobStatus.getTerminalState().contains(job.getStatus())) - .collect(Collectors.toList()); - if (jobs.size() == 0) { + jobs = jobs.stream().filter(job -> !job.hasTerminated()).collect(Collectors.toList()); + if (jobs.isEmpty()) { return Optional.empty(); } // return the latest diff --git a/core/src/main/java/feast/core/service/JobService.java b/core/src/main/java/feast/core/service/JobService.java index bf74b90e80..cc125305ec 100644 --- a/core/src/main/java/feast/core/service/JobService.java +++ b/core/src/main/java/feast/core/service/JobService.java @@ -17,24 +17,25 @@ package feast.core.service; import com.google.protobuf.InvalidProtocolBufferException; -import feast.core.CoreServiceProto.ListFeatureSetsRequest; -import feast.core.CoreServiceProto.ListFeatureSetsResponse; -import feast.core.CoreServiceProto.ListIngestionJobsRequest; -import feast.core.CoreServiceProto.ListIngestionJobsResponse; -import feast.core.CoreServiceProto.RestartIngestionJobRequest; -import feast.core.CoreServiceProto.RestartIngestionJobResponse; -import feast.core.CoreServiceProto.StopIngestionJobRequest; -import feast.core.CoreServiceProto.StopIngestionJobResponse; -import feast.core.FeatureSetReferenceProto.FeatureSetReference; -import feast.core.IngestionJobProto; import feast.core.dao.JobRepository; import feast.core.job.JobManager; +import feast.core.job.Runner; import feast.core.log.Action; import feast.core.log.AuditLogger; import feast.core.log.Resource; import feast.core.model.FeatureSet; import feast.core.model.Job; import feast.core.model.JobStatus; +import feast.proto.core.CoreServiceProto.ListFeatureSetsRequest; +import feast.proto.core.CoreServiceProto.ListFeatureSetsResponse; +import feast.proto.core.CoreServiceProto.ListIngestionJobsRequest; +import feast.proto.core.CoreServiceProto.ListIngestionJobsResponse; +import feast.proto.core.CoreServiceProto.RestartIngestionJobRequest; +import feast.proto.core.CoreServiceProto.RestartIngestionJobResponse; +import feast.proto.core.CoreServiceProto.StopIngestionJobRequest; +import feast.proto.core.CoreServiceProto.StopIngestionJobResponse; +import feast.proto.core.FeatureSetReferenceProto.FeatureSetReference; +import feast.proto.core.IngestionJobProto; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; @@ -50,13 +51,13 @@ import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; -/** Defines a Job Managemenent Service that allows users to manage feast ingestion jobs. */ +/** A Job Management Service that allows users to manage Feast ingestion jobs. */ @Slf4j @Service public class JobService { - private JobRepository jobRepository; - private SpecService specService; - private Map jobManagers; + private final JobRepository jobRepository; + private final SpecService specService; + private final Map jobManagers; @Autowired public JobService( @@ -66,13 +67,13 @@ public JobService( this.jobManagers = new HashMap<>(); for (JobManager manager : jobManagerList) { - this.jobManagers.put(manager.getRunnerType().name(), manager); + this.jobManagers.put(manager.getRunnerType(), manager); } } /* Job Service API */ /** - * List Ingestion Jobs in feast matching the given request. See CoreService protobuf documentation + * List Ingestion Jobs in Feast matching the given request. See CoreService protobuf documentation * for more detailed documentation. * * @param request list ingestion jobs request specifying which jobs to include @@ -158,6 +159,7 @@ public RestartIngestionJobResponse restartJob(RestartIngestionJobRequest request // check job exists Optional getJob = this.jobRepository.findById(request.getId()); if (getJob.isEmpty()) { + // FIXME: if getJob.isEmpty then constructing this error message will always throw an error... throw new NoSuchElementException( "Attempted to stop nonexistent job with id: " + getJob.get().getId()); } @@ -165,9 +167,7 @@ public RestartIngestionJobResponse restartJob(RestartIngestionJobRequest request // check job status is valid for restarting Job job = getJob.get(); JobStatus status = job.getStatus(); - if (JobStatus.getTransitionalStates().contains(status) - || JobStatus.getTerminalState().contains(status) - || status.equals(JobStatus.UNKNOWN)) { + if (status.isTransitional() || status.isTerminal() || status == JobStatus.UNKNOWN) { throw new UnsupportedOperationException( "Restarting a job with a transitional, terminal or unknown status is unsupported"); } @@ -208,11 +208,10 @@ public StopIngestionJobResponse stopJob(StopIngestionJobRequest request) // check job status is valid for stopping Job job = getJob.get(); JobStatus status = job.getStatus(); - if (JobStatus.getTerminalState().contains(status)) { + if (status.isTerminal()) { // do nothing - job is already stopped return StopIngestionJobResponse.newBuilder().build(); - } else if (JobStatus.getTransitionalStates().contains(status) - || status.equals(JobStatus.UNKNOWN)) { + } else if (status.isTransitional() || status == JobStatus.UNKNOWN) { throw new UnsupportedOperationException( "Stopping a job with a transitional or unknown status is unsupported"); } @@ -249,7 +248,6 @@ private ListFeatureSetsRequest.Filter toListFeatureSetFilter(FeatureSetReference // match featuresets using contents of featureset reference String fsName = fsReference.getName(); String fsProject = fsReference.getProject(); - Integer fsVersion = fsReference.getVersion(); // construct list featureset request filter using feature set reference // for proto3, default value for missing values: @@ -259,7 +257,6 @@ private ListFeatureSetsRequest.Filter toListFeatureSetFilter(FeatureSetReference ListFeatureSetsRequest.Filter.newBuilder() .setFeatureSetName((fsName != "") ? fsName : "*") .setProject((fsProject != "") ? fsProject : "*") - .setFeatureSetVersion((fsVersion != 0) ? fsVersion.toString() : "*") .build(); return filter; diff --git a/core/src/main/java/feast/core/service/SpecService.java b/core/src/main/java/feast/core/service/SpecService.java index 8fec6ac511..01cd264c76 100644 --- a/core/src/main/java/feast/core/service/SpecService.java +++ b/core/src/main/java/feast/core/service/SpecService.java @@ -19,23 +19,7 @@ import static feast.core.validators.Matchers.checkValidCharacters; import static feast.core.validators.Matchers.checkValidCharactersAllowAsterisk; -import com.google.common.collect.Ordering; import com.google.protobuf.InvalidProtocolBufferException; -import feast.core.CoreServiceProto.ApplyFeatureSetResponse; -import feast.core.CoreServiceProto.ApplyFeatureSetResponse.Status; -import feast.core.CoreServiceProto.GetFeatureSetRequest; -import feast.core.CoreServiceProto.GetFeatureSetResponse; -import feast.core.CoreServiceProto.ListFeatureSetsRequest; -import feast.core.CoreServiceProto.ListFeatureSetsResponse; -import feast.core.CoreServiceProto.ListStoresRequest; -import feast.core.CoreServiceProto.ListStoresResponse; -import feast.core.CoreServiceProto.ListStoresResponse.Builder; -import feast.core.CoreServiceProto.UpdateStoreRequest; -import feast.core.CoreServiceProto.UpdateStoreResponse; -import feast.core.FeatureSetProto; -import feast.core.SourceProto; -import feast.core.StoreProto; -import feast.core.StoreProto.Store.Subscription; import feast.core.dao.FeatureSetRepository; import feast.core.dao.ProjectRepository; import feast.core.dao.StoreRepository; @@ -45,10 +29,25 @@ import feast.core.model.Source; import feast.core.model.Store; import feast.core.validators.FeatureSetValidator; +import feast.proto.core.CoreServiceProto.ApplyFeatureSetResponse; +import feast.proto.core.CoreServiceProto.ApplyFeatureSetResponse.Status; +import feast.proto.core.CoreServiceProto.GetFeatureSetRequest; +import feast.proto.core.CoreServiceProto.GetFeatureSetResponse; +import feast.proto.core.CoreServiceProto.ListFeatureSetsRequest; +import feast.proto.core.CoreServiceProto.ListFeatureSetsResponse; +import feast.proto.core.CoreServiceProto.ListStoresRequest; +import feast.proto.core.CoreServiceProto.ListStoresResponse; +import feast.proto.core.CoreServiceProto.ListStoresResponse.Builder; +import feast.proto.core.CoreServiceProto.UpdateStoreRequest; +import feast.proto.core.CoreServiceProto.UpdateStoreResponse; +import feast.proto.core.FeatureSetProto; +import feast.proto.core.FeatureSetProto.FeatureSetStatus; +import feast.proto.core.SourceProto; +import feast.proto.core.StoreProto; +import feast.proto.core.StoreProto.Store.Subscription; import java.util.ArrayList; import java.util.List; import lombok.extern.slf4j.Slf4j; -import org.apache.commons.lang3.StringUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; @@ -81,7 +80,8 @@ public SpecService( /** * Get a feature set matching the feature name and version and project. The feature set name and * project are required, but version can be omitted by providing 0 for its value. If the version - * is omitted, the latest feature set will be provided. + * is omitted, the latest feature set will be provided. If the project is omitted, the default + * would be used. * * @param request: GetFeatureSetRequest Request containing filter parameters. * @return Returns a GetFeatureSetResponse containing a feature set.. @@ -95,130 +95,88 @@ public GetFeatureSetResponse getFeatureSet(GetFeatureSetRequest request) if (request.getName().isEmpty()) { throw new IllegalArgumentException("No feature set name provided"); } + // Autofill default project if project is not specified if (request.getProject().isEmpty()) { - throw new IllegalArgumentException("No project provided"); - } - if (request.getVersion() < 0) { - throw new IllegalArgumentException("Version number cannot be less than 0"); + request = request.toBuilder().setProject(Project.DEFAULT_NAME).build(); } FeatureSet featureSet; - // Filter the list based on version - if (request.getVersion() == 0) { - featureSet = - featureSetRepository.findFirstFeatureSetByNameLikeAndProject_NameOrderByVersionDesc( - request.getName(), request.getProject()); - - if (featureSet == null) { - throw new RetrievalException( - String.format("Feature set with name \"%s\" could not be found.", request.getName())); - } - } else { - featureSet = - featureSetRepository.findFeatureSetByNameAndProject_NameAndVersion( - request.getName(), request.getProject(), request.getVersion()); + featureSet = + featureSetRepository.findFeatureSetByNameAndProject_Name( + request.getName(), request.getProject()); - if (featureSet == null) { - throw new RetrievalException( - String.format( - "Feature set with name \"%s\" and version \"%s\" could " + "not be found.", - request.getName(), request.getVersion())); - } + if (featureSet == null) { + throw new RetrievalException( + String.format("Feature set with name \"%s\" could not be found.", request.getName())); } - - // Only a single item in list, return successfully return GetFeatureSetResponse.newBuilder().setFeatureSet(featureSet.toProto()).build(); } /** - * Return a list of feature sets matching the feature set name, version, and project provided in - * the filter. All fields are requried. Use '*' for all three arguments in order to return all - * feature sets and versions in all projects. + * Return a list of feature sets matching the feature set name and project provided in the filter. + * All fields are requried. Use '*' for all arguments in order to return all feature sets in all + * projects. * *

Project name can be explicitly provided, or an asterisk can be provided to match all - * projects. It is not possible to provide a combination of asterisks/wildcards and text. + * projects. It is not possible to provide a combination of asterisks/wildcards and text. If the + * project name is omitted, the default project would be used. * *

The feature set name in the filter accepts an asterisk as a wildcard. All matching feature * sets will be returned. Regex is not supported. Explicitly defining a feature set name is not * possible if a project name is not set explicitly * - *

The version field can be one of - '*' - This will match all versions - 'latest' - This will - * match the latest feature set version - '<number>' - This will match a specific feature - * set version. This property can only be set if both the feature set name and project name are - * explicitly set. - * - * @param filter filter containing the desired featureSet name and version filter + * @param filter filter containing the desired featureSet name * @return ListFeatureSetsResponse with list of featureSets found matching the filter */ public ListFeatureSetsResponse listFeatureSets(ListFeatureSetsRequest.Filter filter) throws InvalidProtocolBufferException { String name = filter.getFeatureSetName(); String project = filter.getProject(); - String version = filter.getFeatureSetVersion(); - if (project.isEmpty() || name.isEmpty() || version.isEmpty()) { + if (name.isEmpty()) { throw new IllegalArgumentException( - String.format( - "Invalid listFeatureSetRequest, missing arguments. Must provide project, feature set name, and version.", - filter.toString())); + "Invalid listFeatureSetRequest, missing arguments. Must provide feature set name:"); } checkValidCharactersAllowAsterisk(name, "featureSetName"); checkValidCharactersAllowAsterisk(project, "projectName"); - List featureSets = new ArrayList() {}; + // Autofill default project if project not specified + if (project.isEmpty()) { + project = Project.DEFAULT_NAME; + } - if (project.equals("*")) { - // Matching all projects + List featureSets = new ArrayList() {}; - if (name.equals("*") && version.equals("*")) { + if (project.contains("*")) { + // Matching a wildcard project + if (name.contains("*")) { featureSets = - featureSetRepository.findAllByNameLikeAndProject_NameLikeOrderByNameAscVersionAsc( + featureSetRepository.findAllByNameLikeAndProject_NameLikeOrderByNameAsc( name.replace('*', '%'), project.replace('*', '%')); } else { throw new IllegalArgumentException( String.format( - "Invalid listFeatureSetRequest. Version and feature set name must be set to " + "Invalid listFeatureSetRequest. Feature set name must be set to " + "\"*\" if the project name and feature set name aren't set explicitly: \n%s", filter.toString())); } } else if (!project.contains("*")) { // Matching a specific project - - if (name.contains("*") && version.equals("*")) { - // Find all feature sets matching a pattern and versions in a specific project - featureSets = - featureSetRepository.findAllByNameLikeAndProject_NameOrderByNameAscVersionAsc( - name.replace('*', '%'), project); - - } else if (!name.contains("*") && version.equals("*")) { - // Find all versions of a specific feature set in a specific project + if (name.contains("*")) { + // Find all feature sets matching a pattern in a specific project featureSets = - featureSetRepository.findAllByNameLikeAndProject_NameOrderByNameAscVersionAsc( - name, project); - - } else if (version.equals("latest")) { - // Find the latest version of a feature set matching a specific pattern in a specific - // project - FeatureSet latestFeatureSet = - featureSetRepository.findFirstFeatureSetByNameLikeAndProject_NameOrderByVersionDesc( + featureSetRepository.findAllByNameLikeAndProject_NameOrderByNameAsc( name.replace('*', '%'), project); - featureSets.add(latestFeatureSet); - - } else if (!name.contains("*") && StringUtils.isNumeric(version)) { - // Find a specific version of a feature set matching a specific name in a specific project - FeatureSet specificFeatureSet = - featureSetRepository.findFeatureSetByNameAndProject_NameAndVersion( - name, project, Integer.parseInt(version)); - featureSets.add(specificFeatureSet); - } else { - throw new IllegalArgumentException( - String.format( - "Invalid listFeatureSetRequest. Version must be set to \"*\" if the project " - + "name and feature set name aren't set explicitly: \n%s", - filter.toString())); + } else if (!name.contains("*")) { + // Find a specific feature set in a specific project + FeatureSet featureSet = + featureSetRepository.findFeatureSetByNameAndProject_Name(name, project); + if (featureSet != null) { + featureSets.add(featureSet); + } } } else { throw new IllegalArgumentException( @@ -273,17 +231,25 @@ public ListStoresResponse listStores(ListStoresRequest.Filter filter) { } /** - * Creates or updates a feature set in the repository. If there is a change in the feature set - * schema, then the feature set version will be incremented. + * Creates or updates a feature set in the repository. * *

This function is idempotent. If no changes are detected in the incoming featureSet's schema, * this method will update the incoming featureSet spec with the latest version stored in the - * repository, and return that. + * repository, and return that. If project is not specified in the given featureSet, will assign + * the featureSet to the'default' project. * * @param newFeatureSet Feature set that will be created or updated. */ public ApplyFeatureSetResponse applyFeatureSet(FeatureSetProto.FeatureSet newFeatureSet) throws InvalidProtocolBufferException { + // Autofill default project if not specified + if (newFeatureSet.getSpec().getProject().isEmpty()) { + newFeatureSet = + newFeatureSet + .toBuilder() + .setSpec(newFeatureSet.getSpec().toBuilder().setProject(Project.DEFAULT_NAME).build()) + .build(); + } // Validate incoming feature set FeatureSetValidator.validateSpec(newFeatureSet); @@ -300,53 +266,48 @@ public ApplyFeatureSetResponse applyFeatureSet(FeatureSetProto.FeatureSet newFea throw new IllegalArgumentException(String.format("Project is archived: %s", project_name)); } - // Retrieve all existing FeatureSet objects - List existingFeatureSets = - featureSetRepository.findAllByNameLikeAndProject_NameOrderByNameAscVersionAsc( - newFeatureSet.getSpec().getName(), project_name); - - if (existingFeatureSets.size() == 0) { - // Create new feature set since it doesn't exist + // Set source to default if not set in proto + if (newFeatureSet.getSpec().getSource() == SourceProto.Source.getDefaultInstance()) { newFeatureSet = newFeatureSet .toBuilder() - .setSpec(newFeatureSet.getSpec().toBuilder().setVersion(1)) + .setSpec( + newFeatureSet.getSpec().toBuilder().setSource(defaultSource.toProto()).build()) .build(); - } else { - // Retrieve the latest feature set if the name does exist - existingFeatureSets = Ordering.natural().reverse().sortedCopy(existingFeatureSets); - FeatureSet latest = existingFeatureSets.get(0); - FeatureSet featureSet = FeatureSet.fromProto(newFeatureSet); + } + // Retrieve existing FeatureSet + FeatureSet featureSet = + featureSetRepository.findFeatureSetByNameAndProject_Name( + newFeatureSet.getSpec().getName(), project_name); + + Status status; + if (featureSet == null) { + // Create new feature set since it doesn't exist + newFeatureSet = newFeatureSet.toBuilder().setSpec(newFeatureSet.getSpec()).build(); + featureSet = FeatureSet.fromProto(newFeatureSet); + status = Status.CREATED; + } else { // If the featureSet remains unchanged, we do nothing. - if (featureSet.equalTo(latest)) { + if (featureSet.toProto().getSpec().equals(newFeatureSet.getSpec())) { return ApplyFeatureSetResponse.newBuilder() - .setFeatureSet(latest.toProto()) + .setFeatureSet(featureSet.toProto()) .setStatus(Status.NO_CHANGE) .build(); } - // TODO: There is a race condition here with incrementing the version - newFeatureSet = - newFeatureSet - .toBuilder() - .setSpec(newFeatureSet.getSpec().toBuilder().setVersion(latest.getVersion() + 1)) - .build(); - } - - // Build a new FeatureSet object which includes the new properties - FeatureSet featureSet = FeatureSet.fromProto(newFeatureSet); - if (newFeatureSet.getSpec().getSource() == SourceProto.Source.getDefaultInstance()) { - featureSet.setSource(defaultSource); + featureSet.updateFromProto(newFeatureSet); + status = Status.UPDATED; } // Persist the FeatureSet object + featureSet.setStatus(FeatureSetStatus.STATUS_PENDING); project.addFeatureSet(featureSet); projectRepository.saveAndFlush(project); // Build ApplyFeatureSetResponse return ApplyFeatureSetResponse.newBuilder() .setFeatureSet(featureSet.toProto()) - .setStatus(Status.CREATED) + .setStatus(status) .build(); } @@ -364,7 +325,7 @@ public UpdateStoreResponse updateStore(UpdateStoreRequest updateStoreRequest) List subs = newStoreProto.getSubscriptionsList(); for (Subscription sub : subs) { // Ensure that all fields in a subscription contain values - if ((sub.getVersion().isEmpty() || sub.getName().isEmpty()) || sub.getProject().isEmpty()) { + if ((sub.getName().isEmpty()) || sub.getProject().isEmpty()) { throw new IllegalArgumentException( String.format("Missing parameter in subscription: %s", sub)); } diff --git a/core/src/main/java/feast/core/util/TypeConversion.java b/core/src/main/java/feast/core/util/TypeConversion.java index fd582929c2..8b58eaec4c 100644 --- a/core/src/main/java/feast/core/util/TypeConversion.java +++ b/core/src/main/java/feast/core/util/TypeConversion.java @@ -16,12 +16,10 @@ */ package feast.core.util; -import com.google.common.base.Strings; import com.google.gson.Gson; import com.google.gson.reflect.TypeToken; import java.lang.reflect.Type; import java.util.*; -import java.util.Map.Entry; public class TypeConversion { private static Gson gson = new Gson(); @@ -73,23 +71,6 @@ public static Map convertJsonStringToMap(String jsonString) { * @return json string corresponding to given map */ public static String convertMapToJsonString(Map map) { - if (map.isEmpty()) { - return "{}"; - } return gson.toJson(map); } - - /** - * Convert a map of key value pairs to a array of java arguments in format --key=value - * - * @param map - * @return array of string arguments - */ - public static String[] convertMapToArgs(Map map) { - List args = new ArrayList<>(); - for (Entry arg : map.entrySet()) { - args.add(Strings.lenientFormat("--%s=%s", arg.getKey(), arg.getValue())); - } - return args.toArray(new String[] {}); - } } diff --git a/core/src/main/java/feast/core/validators/FeatureSetValidator.java b/core/src/main/java/feast/core/validators/FeatureSetValidator.java index 213e3898d5..9cfd136b84 100644 --- a/core/src/main/java/feast/core/validators/FeatureSetValidator.java +++ b/core/src/main/java/feast/core/validators/FeatureSetValidator.java @@ -19,14 +19,15 @@ import static feast.core.validators.Matchers.checkValidCharacters; import com.google.common.collect.Sets; -import feast.core.FeatureSetProto.EntitySpec; -import feast.core.FeatureSetProto.FeatureSet; -import feast.core.FeatureSetProto.FeatureSpec; +import feast.proto.core.FeatureSetProto.EntitySpec; +import feast.proto.core.FeatureSetProto.FeatureSet; +import feast.proto.core.FeatureSetProto.FeatureSpec; import java.util.HashSet; import java.util.List; import java.util.stream.Collectors; public class FeatureSetValidator { + public static void validateSpec(FeatureSet featureSet) { if (featureSet.getSpec().getProject().isEmpty()) { throw new IllegalArgumentException("Project name must be provided"); @@ -34,6 +35,9 @@ public static void validateSpec(FeatureSet featureSet) { if (featureSet.getSpec().getName().isEmpty()) { throw new IllegalArgumentException("Feature set name must be provided"); } + if (featureSet.getSpec().getLabelsMap().containsKey("")) { + throw new IllegalArgumentException("Feature set label keys must not be empty"); + } checkValidCharacters(featureSet.getSpec().getProject(), "project"); checkValidCharacters(featureSet.getSpec().getName(), "name"); @@ -44,6 +48,9 @@ public static void validateSpec(FeatureSet featureSet) { } for (FeatureSpec featureSpec : featureSet.getSpec().getFeaturesList()) { checkValidCharacters(featureSpec.getName(), "features::name"); + if (featureSpec.getLabelsMap().containsKey("")) { + throw new IllegalArgumentException("Feature label keys must not be empty"); + } } } diff --git a/core/src/main/java/feast/core/validators/OneOfStringValidator.java b/core/src/main/java/feast/core/validators/OneOfStringValidator.java new file mode 100644 index 0000000000..6b84e44b01 --- /dev/null +++ b/core/src/main/java/feast/core/validators/OneOfStringValidator.java @@ -0,0 +1,51 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2020 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.core.validators; + +import java.util.Arrays; +import javax.validation.ConstraintValidator; +import javax.validation.ConstraintValidatorContext; + +/** Validates whether a string value is found within a collection. */ +public class OneOfStringValidator implements ConstraintValidator { + + /** Values that are permitted for a specific instance of this validator */ + String[] allowedValues; + + /** + * Initialize the OneOfStringValidator with a collection of allowed String values. + * + * @param constraintAnnotation + */ + @Override + public void initialize(OneOfStrings constraintAnnotation) { + allowedValues = constraintAnnotation.value(); + } + + /** + * Validates whether a string value is found within the collection defined in the annotation. + * + * @param value String value that should be validated + * @param context Provides contextual data and operation when applying a given constraint + * validator + * @return Boolean value indicating whether the string is found within the allowed values. + */ + @Override + public boolean isValid(String value, ConstraintValidatorContext context) { + return Arrays.asList(allowedValues).contains(value); + } +} diff --git a/core/src/main/java/feast/core/validators/OneOfStrings.java b/core/src/main/java/feast/core/validators/OneOfStrings.java new file mode 100644 index 0000000000..dba290438c --- /dev/null +++ b/core/src/main/java/feast/core/validators/OneOfStrings.java @@ -0,0 +1,49 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2020 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.core.validators; + +import java.lang.annotation.*; +import javax.validation.Constraint; +import javax.validation.Payload; + +/** + * Annotation for String "one of" validation. Allows for the definition of a collection through an + * annotation. The collection is used to test values defined in the object. + */ +@Target({ + ElementType.METHOD, + ElementType.FIELD, + ElementType.ANNOTATION_TYPE, + ElementType.CONSTRUCTOR, + ElementType.PARAMETER +}) +@Retention(RetentionPolicy.RUNTIME) +@Documented +@Constraint(validatedBy = OneOfStringValidator.class) +public @interface OneOfStrings { + /** @return Default error message that is returned if the incorrect value is set */ + String message() default "Field value must be one of the following: {value}"; + + /** Allows for the specification of validation groups to which this constraint belongs. */ + Class[] groups() default {}; + + /** An attribute payload that can be used to assign custom payload objects to a constraint. */ + Class[] payload() default {}; + + /** @return Default value that is returned if no allowed values are configured */ + String[] value() default {}; +} diff --git a/core/src/main/resources/application.yml b/core/src/main/resources/application.yml index ee060fffc9..51395cf644 100644 --- a/core/src/main/resources/application.yml +++ b/core/src/main/resources/application.yml @@ -23,18 +23,39 @@ grpc: enable-reflection: true feast: -# version: @project.version@ jobs: - # Runner type for feature population jobs. Currently supported runner types are - # DirectRunner and DataflowRunner. - runner: DirectRunner - # Key-value dict of job options to be passed to the population jobs. - options: {} - updates: - # Job update polling interval in milliseconds: how often Feast checks if new jobs should be sent to the runner. - pollingIntervalMillis: 60000 - # Timeout in seconds for each attempt to update or submit a new job to the runner. - timeoutSeconds: 240 + # Job update polling interval in milliseconds: how often Feast checks if new jobs should be sent to the runner. + polling_interval_milliseconds: 60000 + + # Timeout in seconds for each attempt to update or submit a new job to the runner. + job_update_timeout_seconds: 240 + + # Name of the active runner in "runners" that should be used. Only a single runner can be active at one time. + active_runner: direct + + # List of runner configurations. Please see protos/feast/core/Runner.proto for more details + # Alternatively see the following for options https://api.docs.feast.dev/grpc/feast.core.pb.html#Runner + runners: + - name: direct + type: DirectRunner + options: {} + + - name: dataflow + type: DataflowRunner + options: + project: my_gcp_project + region: asia-east1 + zone: asia-east1-a + tempLocation: gs://bucket/tempLocation + network: default + subnetwork: regions/asia-east1/subnetworks/mysubnetwork + maxNumWorkers: 1 + autoscalingAlgorithm: THROUGHPUT_BASED + usePublicIps: false + workerMachineType: n1-standard-1 + deadLetterTableSpec: project_id:dataset_id.table_id + + # Configuration options for metric collection for all ingestion jobs metrics: # Enable metrics pushing for all ingestion jobs. enabled: false @@ -49,9 +70,10 @@ feast: # Feature stream type. Only kafka is supported. type: kafka # Feature stream options. + # See the following for options https://api.docs.feast.dev/grpc/feast.core.pb.html#KafkaSourceConfig options: topic: feast-features - bootstrapServers: kafka:9092 + bootstrapServers: localhost:9092 replicationFactor: 1 partitions: 1 diff --git a/core/src/test/java/feast/core/http/HealthControllerTest.java b/core/src/test/java/feast/core/http/HealthControllerTest.java deleted file mode 100644 index 2fcd622f34..0000000000 --- a/core/src/test/java/feast/core/http/HealthControllerTest.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * Copyright 2018-2019 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package feast.core.http; - -import static org.junit.Assert.assertEquals; -import static org.mockito.Mockito.*; - -import java.sql.Connection; -import java.sql.SQLException; -import javax.sql.DataSource; -import org.junit.Test; -import org.springframework.http.HttpStatus; -import org.springframework.http.ResponseEntity; - -public class HealthControllerTest { - @Test - public void ping() { - HealthController healthController = new HealthController(null); - assertEquals(ResponseEntity.ok("pong"), healthController.ping()); - } - - @Test - public void healthz() { - assertEquals(ResponseEntity.ok("healthy"), mockHealthyController().healthz()); - assertEquals( - ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR) - .body("Unable to establish connection with DB"), - mockUnhealthyControllerBecauseInvalidConn().healthz()); - assertEquals( - ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).body("mocked sqlexception"), - mockUnhealthyControllerBecauseSQLException().healthz()); - } - - private HealthController mockHealthyController() { - DataSource mockDataSource = mock(DataSource.class); - Connection mockConnection = mock(Connection.class); - try { - when(mockConnection.isValid(any(int.class))).thenReturn(Boolean.TRUE); - when(mockDataSource.getConnection()).thenReturn(mockConnection); - } catch (Exception e) { - e.printStackTrace(); - } - return new HealthController(mockDataSource); - } - - private HealthController mockUnhealthyControllerBecauseInvalidConn() { - DataSource mockDataSource = mock(DataSource.class); - Connection mockConnection = mock(Connection.class); - try { - when(mockConnection.isValid(any(int.class))).thenReturn(Boolean.FALSE); - when(mockDataSource.getConnection()).thenReturn(mockConnection); - } catch (Exception ignored) { - } - return new HealthController(mockDataSource); - } - - private HealthController mockUnhealthyControllerBecauseSQLException() { - DataSource mockDataSource = mock(DataSource.class); - Connection mockConnection = mock(Connection.class); - try { - when(mockDataSource.getConnection()).thenThrow(new SQLException("mocked sqlexception")); - } catch (SQLException ignored) { - } - return new HealthController(mockDataSource); - } -} diff --git a/core/src/test/java/feast/core/job/JobUpdateTaskTest.java b/core/src/test/java/feast/core/job/JobUpdateTaskTest.java index 2a1e80994a..d182673801 100644 --- a/core/src/test/java/feast/core/job/JobUpdateTaskTest.java +++ b/core/src/test/java/feast/core/job/JobUpdateTaskTest.java @@ -24,22 +24,24 @@ import static org.mockito.Mockito.when; import static org.mockito.MockitoAnnotations.initMocks; -import feast.core.FeatureSetProto; -import feast.core.FeatureSetProto.FeatureSetMeta; -import feast.core.FeatureSetProto.FeatureSetSpec; -import feast.core.SourceProto; -import feast.core.SourceProto.KafkaSourceConfig; -import feast.core.SourceProto.SourceType; -import feast.core.StoreProto; -import feast.core.StoreProto.Store.RedisConfig; -import feast.core.StoreProto.Store.StoreType; -import feast.core.StoreProto.Store.Subscription; import feast.core.model.FeatureSet; import feast.core.model.Job; import feast.core.model.JobStatus; import feast.core.model.Source; import feast.core.model.Store; +import feast.proto.core.FeatureSetProto; +import feast.proto.core.FeatureSetProto.FeatureSetMeta; +import feast.proto.core.FeatureSetProto.FeatureSetSpec; +import feast.proto.core.SourceProto; +import feast.proto.core.SourceProto.KafkaSourceConfig; +import feast.proto.core.SourceProto.SourceType; +import feast.proto.core.StoreProto; +import feast.proto.core.StoreProto.Store.RedisConfig; +import feast.proto.core.StoreProto.Store.StoreType; +import feast.proto.core.StoreProto.Store.Subscription; import java.util.Arrays; +import java.util.Collections; +import java.util.List; import java.util.Optional; import org.hamcrest.core.IsNull; import org.junit.Before; @@ -47,95 +49,69 @@ import org.mockito.Mock; public class JobUpdateTaskTest { + private static final Runner RUNNER = Runner.DATAFLOW; + + private static final FeatureSetProto.FeatureSet.Builder fsBuilder = + FeatureSetProto.FeatureSet.newBuilder().setMeta(FeatureSetMeta.newBuilder()); + private static final FeatureSetSpec.Builder specBuilder = FeatureSetSpec.newBuilder(); @Mock private JobManager jobManager; - private StoreProto.Store store; - private SourceProto.Source source; + private Store store; + private Source source; + private FeatureSet featureSet1; @Before public void setUp() { initMocks(this); + when(jobManager.getRunnerType()).thenReturn(RUNNER); + store = - StoreProto.Store.newBuilder() - .setName("test") - .setType(StoreType.REDIS) - .setRedisConfig(RedisConfig.newBuilder().build()) - .addSubscriptions( - Subscription.newBuilder().setProject("*").setName("*").setVersion("*").build()) - .build(); + Store.fromProto( + StoreProto.Store.newBuilder() + .setName("test") + .setType(StoreType.REDIS) + .setRedisConfig(RedisConfig.newBuilder().build()) + .addSubscriptions(Subscription.newBuilder().setProject("*").setName("*").build()) + .build()); source = - SourceProto.Source.newBuilder() - .setType(SourceType.KAFKA) - .setKafkaSourceConfig( - KafkaSourceConfig.newBuilder() - .setTopic("topic") - .setBootstrapServers("servers:9092") - .build()) - .build(); + Source.fromProto( + SourceProto.Source.newBuilder() + .setType(SourceType.KAFKA) + .setKafkaSourceConfig( + KafkaSourceConfig.newBuilder() + .setTopic("topic") + .setBootstrapServers("servers:9092") + .build()) + .build()); + + featureSet1 = + FeatureSet.fromProto(fsBuilder.setSpec(specBuilder.setName("featureSet1")).build()); + featureSet1.setSource(source); + } + + Job makeJob(String extId, List featureSets, JobStatus status) { + return new Job("job", extId, RUNNER, source, store, featureSets, status); + } + + JobUpdateTask makeTask(List featureSets, Optional currentJob) { + return new JobUpdateTask(featureSets, source, store, currentJob, jobManager, 100L); } @Test public void shouldUpdateJobIfPresent() { - FeatureSetProto.FeatureSet featureSet1 = - FeatureSetProto.FeatureSet.newBuilder() - .setSpec( - FeatureSetSpec.newBuilder() - .setSource(source) - .setProject("project1") - .setName("featureSet1") - .setVersion(1)) - .setMeta(FeatureSetMeta.newBuilder()) - .build(); - FeatureSetProto.FeatureSet featureSet2 = - FeatureSetProto.FeatureSet.newBuilder() - .setSpec( - FeatureSetSpec.newBuilder() - .setSource(source) - .setProject("project1") - .setName("featureSet2") - .setVersion(1)) - .setMeta(FeatureSetMeta.newBuilder()) - .build(); - Job originalJob = - new Job( - "job", - "old_ext", - Runner.DATAFLOW.name(), - feast.core.model.Source.fromProto(source), - feast.core.model.Store.fromProto(store), - Arrays.asList(FeatureSet.fromProto(featureSet1)), - JobStatus.RUNNING); - JobUpdateTask jobUpdateTask = - new JobUpdateTask( - Arrays.asList(featureSet1, featureSet2), - source, - store, - Optional.of(originalJob), - jobManager, - 100L); - Job submittedJob = - new Job( - "job", - "old_ext", - Runner.DATAFLOW.name(), - feast.core.model.Source.fromProto(source), - feast.core.model.Store.fromProto(store), - Arrays.asList(FeatureSet.fromProto(featureSet1), FeatureSet.fromProto(featureSet2)), - JobStatus.RUNNING); + FeatureSet featureSet2 = + FeatureSet.fromProto(fsBuilder.setSpec(specBuilder.setName("featureSet2")).build()); + List existingFeatureSetsPopulatedByJob = Collections.singletonList(featureSet1); + List newFeatureSetsPopulatedByJob = Arrays.asList(featureSet1, featureSet2); + + Job originalJob = makeJob("old_ext", existingFeatureSetsPopulatedByJob, JobStatus.RUNNING); + JobUpdateTask jobUpdateTask = makeTask(newFeatureSetsPopulatedByJob, Optional.of(originalJob)); + Job submittedJob = makeJob("old_ext", newFeatureSetsPopulatedByJob, JobStatus.RUNNING); - Job expected = - new Job( - "job", - "new_ext", - Runner.DATAFLOW.name(), - Source.fromProto(source), - Store.fromProto(store), - Arrays.asList(FeatureSet.fromProto(featureSet1), FeatureSet.fromProto(featureSet2)), - JobStatus.PENDING); + Job expected = makeJob("new_ext", newFeatureSetsPopulatedByJob, JobStatus.PENDING); when(jobManager.updateJob(submittedJob)).thenReturn(expected); - when(jobManager.getRunnerType()).thenReturn(Runner.DATAFLOW); Job actual = jobUpdateTask.call(); assertThat(actual, equalTo(expected)); @@ -143,43 +119,13 @@ public void shouldUpdateJobIfPresent() { @Test public void shouldCreateJobIfNotPresent() { - FeatureSetProto.FeatureSet featureSet1 = - FeatureSetProto.FeatureSet.newBuilder() - .setSpec( - FeatureSetSpec.newBuilder() - .setSource(source) - .setProject("project1") - .setName("featureSet1") - .setVersion(1)) - .setMeta(FeatureSetMeta.newBuilder()) - .build(); - JobUpdateTask jobUpdateTask = - spy( - new JobUpdateTask( - Arrays.asList(featureSet1), source, store, Optional.empty(), jobManager, 100L)); + var featureSets = Collections.singletonList(featureSet1); + JobUpdateTask jobUpdateTask = spy(makeTask(featureSets, Optional.empty())); doReturn("job").when(jobUpdateTask).createJobId("KAFKA/servers:9092/topic", "test"); - Job expectedInput = - new Job( - "job", - "", - Runner.DATAFLOW.name(), - feast.core.model.Source.fromProto(source), - feast.core.model.Store.fromProto(store), - Arrays.asList(FeatureSet.fromProto(featureSet1)), - JobStatus.PENDING); + Job expectedInput = makeJob("", featureSets, JobStatus.PENDING); + Job expected = makeJob("ext", featureSets, JobStatus.PENDING); - Job expected = - new Job( - "job", - "ext", - Runner.DATAFLOW.name(), - feast.core.model.Source.fromProto(source), - feast.core.model.Store.fromProto(store), - Arrays.asList(FeatureSet.fromProto(featureSet1)), - JobStatus.RUNNING); - - when(jobManager.getRunnerType()).thenReturn(Runner.DATAFLOW); when(jobManager.startJob(expectedInput)).thenReturn(expected); Job actual = jobUpdateTask.call(); @@ -188,83 +134,25 @@ public void shouldCreateJobIfNotPresent() { @Test public void shouldUpdateJobStatusIfNotCreateOrUpdate() { - FeatureSetProto.FeatureSet featureSet1 = - FeatureSetProto.FeatureSet.newBuilder() - .setSpec( - FeatureSetSpec.newBuilder() - .setSource(source) - .setProject("project1") - .setName("featureSet1") - .setVersion(1)) - .setMeta(FeatureSetMeta.newBuilder()) - .build(); - Job originalJob = - new Job( - "job", - "ext", - Runner.DATAFLOW.name(), - feast.core.model.Source.fromProto(source), - feast.core.model.Store.fromProto(store), - Arrays.asList(FeatureSet.fromProto(featureSet1)), - JobStatus.RUNNING); - JobUpdateTask jobUpdateTask = - new JobUpdateTask( - Arrays.asList(featureSet1), source, store, Optional.of(originalJob), jobManager, 100L); + var featureSets = Collections.singletonList(featureSet1); + Job originalJob = makeJob("ext", featureSets, JobStatus.RUNNING); + JobUpdateTask jobUpdateTask = makeTask(featureSets, Optional.of(originalJob)); when(jobManager.getJobStatus(originalJob)).thenReturn(JobStatus.ABORTING); - Job expected = - new Job( - "job", - "ext", - Runner.DATAFLOW.name(), - Source.fromProto(source), - Store.fromProto(store), - Arrays.asList(FeatureSet.fromProto(featureSet1)), - JobStatus.ABORTING); - Job actual = jobUpdateTask.call(); + Job updated = jobUpdateTask.call(); - assertThat(actual, equalTo(expected)); + assertThat(updated.getStatus(), equalTo(JobStatus.ABORTING)); } @Test public void shouldReturnJobWithErrorStatusIfFailedToSubmit() { - FeatureSetProto.FeatureSet featureSet1 = - FeatureSetProto.FeatureSet.newBuilder() - .setSpec( - FeatureSetSpec.newBuilder() - .setSource(source) - .setProject("project1") - .setName("featureSet1") - .setVersion(1)) - .setMeta(FeatureSetMeta.newBuilder()) - .build(); - JobUpdateTask jobUpdateTask = - spy( - new JobUpdateTask( - Arrays.asList(featureSet1), source, store, Optional.empty(), jobManager, 100L)); + var featureSets = Collections.singletonList(featureSet1); + JobUpdateTask jobUpdateTask = spy(makeTask(featureSets, Optional.empty())); doReturn("job").when(jobUpdateTask).createJobId("KAFKA/servers:9092/topic", "test"); - Job expectedInput = - new Job( - "job", - "", - Runner.DATAFLOW.name(), - feast.core.model.Source.fromProto(source), - feast.core.model.Store.fromProto(store), - Arrays.asList(FeatureSet.fromProto(featureSet1)), - JobStatus.PENDING); - - Job expected = - new Job( - "job", - "", - Runner.DATAFLOW.name(), - feast.core.model.Source.fromProto(source), - feast.core.model.Store.fromProto(store), - Arrays.asList(FeatureSet.fromProto(featureSet1)), - JobStatus.ERROR); + Job expectedInput = makeJob("", featureSets, JobStatus.PENDING); + Job expected = makeJob("", featureSets, JobStatus.ERROR); - when(jobManager.getRunnerType()).thenReturn(Runner.DATAFLOW); when(jobManager.startJob(expectedInput)) .thenThrow(new RuntimeException("Something went wrong")); @@ -274,21 +162,13 @@ public void shouldReturnJobWithErrorStatusIfFailedToSubmit() { @Test public void shouldTimeout() { - FeatureSetProto.FeatureSet featureSet1 = - FeatureSetProto.FeatureSet.newBuilder() - .setSpec( - FeatureSetSpec.newBuilder() - .setSource(source) - .setProject("project1") - .setName("featureSet1") - .setVersion(1)) - .setMeta(FeatureSetMeta.newBuilder()) - .build(); - + var featureSets = Collections.singletonList(featureSet1); + var timeoutSeconds = 0L; JobUpdateTask jobUpdateTask = spy( new JobUpdateTask( - Arrays.asList(featureSet1), source, store, Optional.empty(), jobManager, 0L)); + featureSets, source, store, Optional.empty(), jobManager, timeoutSeconds)); + Job actual = jobUpdateTask.call(); assertThat(actual, is(IsNull.nullValue())); } diff --git a/core/src/test/java/feast/core/job/RunnerTest.java b/core/src/test/java/feast/core/job/RunnerTest.java new file mode 100644 index 0000000000..ce1700acbe --- /dev/null +++ b/core/src/test/java/feast/core/job/RunnerTest.java @@ -0,0 +1,42 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2020 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.core.job; + +import static org.hamcrest.Matchers.is; +import static org.junit.Assert.assertThat; + +import java.util.NoSuchElementException; +import org.junit.Test; + +public class RunnerTest { + + @Test + public void toStringReturnsHumanReadableName() { + assertThat(Runner.DATAFLOW.toString(), is("DataflowRunner")); + } + + @Test + public void fromStringLoadsValueFromHumanReadableName() { + var humanName = Runner.DATAFLOW.toString(); + assertThat(Runner.fromString(humanName), is(Runner.DATAFLOW)); + } + + @Test(expected = NoSuchElementException.class) + public void fromStringThrowsNoSuchElementExceptionForUnknownValue() { + Runner.fromString("this is not a valid Runner"); + } +} diff --git a/core/src/test/java/feast/core/job/dataflow/DataflowJobManagerTest.java b/core/src/test/java/feast/core/job/dataflow/DataflowJobManagerTest.java index 2d562d38df..ea9caa91ff 100644 --- a/core/src/test/java/feast/core/job/dataflow/DataflowJobManagerTest.java +++ b/core/src/test/java/feast/core/job/dataflow/DataflowJobManagerTest.java @@ -22,21 +22,13 @@ import static org.mockito.Mockito.*; import static org.mockito.MockitoAnnotations.initMocks; +import com.google.api.client.auth.oauth2.Credential; +import com.google.api.client.googleapis.testing.auth.oauth2.MockGoogleCredential; import com.google.api.services.dataflow.Dataflow; import com.google.common.collect.Lists; import com.google.protobuf.Duration; import com.google.protobuf.util.JsonFormat; import com.google.protobuf.util.JsonFormat.Printer; -import feast.core.FeatureSetProto; -import feast.core.FeatureSetProto.FeatureSetMeta; -import feast.core.FeatureSetProto.FeatureSetSpec; -import feast.core.SourceProto; -import feast.core.SourceProto.KafkaSourceConfig; -import feast.core.SourceProto.SourceType; -import feast.core.StoreProto; -import feast.core.StoreProto.Store.RedisConfig; -import feast.core.StoreProto.Store.StoreType; -import feast.core.StoreProto.Store.Subscription; import feast.core.config.FeastProperties.MetricsProperties; import feast.core.exception.JobExecutionException; import feast.core.job.Runner; @@ -45,11 +37,21 @@ import feast.ingestion.options.BZip2Compressor; import feast.ingestion.options.ImportOptions; import feast.ingestion.options.OptionCompressor; +import feast.proto.core.FeatureSetProto; +import feast.proto.core.FeatureSetProto.FeatureSetMeta; +import feast.proto.core.FeatureSetProto.FeatureSetSpec; +import feast.proto.core.RunnerProto.DataflowRunnerConfigOptions; +import feast.proto.core.RunnerProto.DataflowRunnerConfigOptions.Builder; +import feast.proto.core.SourceProto; +import feast.proto.core.SourceProto.KafkaSourceConfig; +import feast.proto.core.SourceProto.SourceType; +import feast.proto.core.StoreProto; +import feast.proto.core.StoreProto.Store.RedisConfig; +import feast.proto.core.StoreProto.Store.StoreType; +import feast.proto.core.StoreProto.Store.Subscription; import java.io.IOException; import java.util.Collections; -import java.util.HashMap; import java.util.List; -import java.util.Map; import org.apache.beam.runners.dataflow.DataflowPipelineJob; import org.apache.beam.runners.dataflow.DataflowRunner; import org.apache.beam.sdk.PipelineResult.State; @@ -68,18 +70,31 @@ public class DataflowJobManagerTest { @Mock private Dataflow dataflow; - private Map defaults; + private DataflowRunnerConfigOptions defaults; private DataflowJobManager dfJobManager; @Before public void setUp() { initMocks(this); - defaults = new HashMap<>(); - defaults.put("project", "project"); - defaults.put("region", "region"); + Builder optionsBuilder = DataflowRunnerConfigOptions.newBuilder(); + optionsBuilder.setProject("project"); + optionsBuilder.setRegion("region"); + optionsBuilder.setZone("zone"); + optionsBuilder.setTempLocation("tempLocation"); + optionsBuilder.setNetwork("network"); + optionsBuilder.setSubnetwork("subnetwork"); + optionsBuilder.putLabels("orchestrator", "feast"); + defaults = optionsBuilder.build(); MetricsProperties metricsProperties = new MetricsProperties(); metricsProperties.setEnabled(false); - dfJobManager = new DataflowJobManager(dataflow, defaults, metricsProperties); + Credential credential = null; + try { + credential = MockGoogleCredential.getApplicationDefault(); + } catch (IOException e) { + e.printStackTrace(); + } + + dfJobManager = new DataflowJobManager(defaults, metricsProperties, credential); dfJobManager = spy(dfJobManager); } @@ -90,8 +105,7 @@ public void shouldStartJobWithCorrectPipelineOptions() throws IOException { .setName("SERVING") .setType(StoreType.REDIS) .setRedisConfig(RedisConfig.newBuilder().setHost("localhost").setPort(6379).build()) - .addSubscriptions( - Subscription.newBuilder().setProject("*").setName("*").setVersion("*").build()) + .addSubscriptions(Subscription.newBuilder().setProject("*").setName("*").build()) .build(); SourceProto.Source source = @@ -111,7 +125,6 @@ public void shouldStartJobWithCorrectPipelineOptions() throws IOException { FeatureSetSpec.newBuilder() .setSource(source) .setName("featureSet") - .setVersion(1) .setMaxAge(Duration.newBuilder().build())) .build(); @@ -126,6 +139,7 @@ public void shouldStartJobWithCorrectPipelineOptions() throws IOException { expectedPipelineOptions.setRegion("region"); expectedPipelineOptions.setUpdate(false); expectedPipelineOptions.setAppName("DataflowJobManager"); + expectedPipelineOptions.setLabels(defaults.getLabelsMap()); expectedPipelineOptions.setJobName(jobName); expectedPipelineOptions.setStoreJson(Lists.newArrayList(printer.print(store))); @@ -145,7 +159,7 @@ public void shouldStartJobWithCorrectPipelineOptions() throws IOException { new Job( jobName, "", - Runner.DATAFLOW.name(), + Runner.DATAFLOW, Source.fromProto(source), Store.fromProto(store), Lists.newArrayList(FeatureSet.fromProto(featureSet)), @@ -159,7 +173,7 @@ public void shouldStartJobWithCorrectPipelineOptions() throws IOException { actualPipelineOptions.getOptionsId()); // avoid comparing this value // We only check that we are calling getFilesToStage() manually, because the automatic approach - // throws an error: https://github.com/gojek/feast/pull/291 i.e. do not check for the actual + // throws an error: https://github.com/feast-dev/feast/pull/291 i.e. do not check for the actual // files that are staged assertThat( "filesToStage in pipelineOptions should not be null, job manager should set it.", @@ -207,12 +221,7 @@ public void shouldThrowExceptionWhenJobStateTerminal() throws IOException { FeatureSetProto.FeatureSet featureSet = FeatureSetProto.FeatureSet.newBuilder() - .setSpec( - FeatureSetSpec.newBuilder() - .setName("featureSet") - .setVersion(1) - .setSource(source) - .build()) + .setSpec(FeatureSetSpec.newBuilder().setName("featureSet").setSource(source).build()) .build(); dfJobManager = Mockito.spy(dfJobManager); @@ -226,7 +235,7 @@ public void shouldThrowExceptionWhenJobStateTerminal() throws IOException { new Job( "job", "", - Runner.DATAFLOW.name(), + Runner.DATAFLOW, Source.fromProto(source), Store.fromProto(store), Lists.newArrayList(FeatureSet.fromProto(featureSet)), diff --git a/core/src/test/java/feast/core/job/direct/DirectRunnerJobManagerTest.java b/core/src/test/java/feast/core/job/direct/DirectRunnerJobManagerTest.java index 76530d9f40..0128f5aa0b 100644 --- a/core/src/test/java/feast/core/job/direct/DirectRunnerJobManagerTest.java +++ b/core/src/test/java/feast/core/job/direct/DirectRunnerJobManagerTest.java @@ -29,15 +29,6 @@ import com.google.protobuf.Duration; import com.google.protobuf.util.JsonFormat; import com.google.protobuf.util.JsonFormat.Printer; -import feast.core.FeatureSetProto; -import feast.core.FeatureSetProto.FeatureSetSpec; -import feast.core.SourceProto; -import feast.core.SourceProto.KafkaSourceConfig; -import feast.core.SourceProto.SourceType; -import feast.core.StoreProto; -import feast.core.StoreProto.Store.RedisConfig; -import feast.core.StoreProto.Store.StoreType; -import feast.core.StoreProto.Store.Subscription; import feast.core.config.FeastProperties.MetricsProperties; import feast.core.job.Runner; import feast.core.job.option.FeatureSetJsonByteConverter; @@ -49,11 +40,19 @@ import feast.ingestion.options.BZip2Compressor; import feast.ingestion.options.ImportOptions; import feast.ingestion.options.OptionCompressor; +import feast.proto.core.FeatureSetProto; +import feast.proto.core.FeatureSetProto.FeatureSetSpec; +import feast.proto.core.RunnerProto.DirectRunnerConfigOptions; +import feast.proto.core.SourceProto; +import feast.proto.core.SourceProto.KafkaSourceConfig; +import feast.proto.core.SourceProto.SourceType; +import feast.proto.core.StoreProto; +import feast.proto.core.StoreProto.Store.RedisConfig; +import feast.proto.core.StoreProto.Store.StoreType; +import feast.proto.core.StoreProto.Store.Subscription; import java.io.IOException; import java.util.Collections; -import java.util.HashMap; import java.util.List; -import java.util.Map; import org.apache.beam.runners.direct.DirectRunner; import org.apache.beam.sdk.PipelineResult; import org.apache.beam.sdk.options.PipelineOptionsFactory; @@ -71,12 +70,12 @@ public class DirectRunnerJobManagerTest { @Mock private DirectJobRegistry directJobRegistry; private DirectRunnerJobManager drJobManager; - private Map defaults; + private DirectRunnerConfigOptions defaults; @Before public void setUp() { initMocks(this); - defaults = new HashMap<>(); + defaults = DirectRunnerConfigOptions.newBuilder().setTargetParallelism(1).build(); MetricsProperties metricsProperties = new MetricsProperties(); metricsProperties.setEnabled(false); @@ -91,8 +90,7 @@ public void shouldStartDirectJobAndRegisterPipelineResult() throws IOException { .setName("SERVING") .setType(StoreType.REDIS) .setRedisConfig(RedisConfig.newBuilder().setHost("localhost").setPort(6379).build()) - .addSubscriptions( - Subscription.newBuilder().setProject("*").setName("*").setVersion("*").build()) + .addSubscriptions(Subscription.newBuilder().setProject("*").setName("*").build()) .build(); SourceProto.Source source = @@ -110,7 +108,6 @@ public void shouldStartDirectJobAndRegisterPipelineResult() throws IOException { .setSpec( FeatureSetSpec.newBuilder() .setName("featureSet") - .setVersion(1) .setMaxAge(Duration.newBuilder()) .setSource(source) .build()) @@ -118,12 +115,14 @@ public void shouldStartDirectJobAndRegisterPipelineResult() throws IOException { Printer printer = JsonFormat.printer(); + String expectedJobId = "feast-job-0"; ImportOptions expectedPipelineOptions = PipelineOptionsFactory.fromArgs("").as(ImportOptions.class); + expectedPipelineOptions.setJobName(expectedJobId); expectedPipelineOptions.setAppName("DirectRunnerJobManager"); expectedPipelineOptions.setRunner(DirectRunner.class); expectedPipelineOptions.setBlockOnRun(false); - expectedPipelineOptions.setProject(""); + expectedPipelineOptions.setTargetParallelism(1); expectedPipelineOptions.setStoreJson(Lists.newArrayList(printer.print(store))); expectedPipelineOptions.setProject(""); @@ -132,7 +131,6 @@ public void shouldStartDirectJobAndRegisterPipelineResult() throws IOException { expectedPipelineOptions.setFeatureSetJson( featureSetJsonCompressor.compress(Collections.singletonList(featureSet))); - String expectedJobId = "feast-job-0"; ArgumentCaptor pipelineOptionsCaptor = ArgumentCaptor.forClass(ImportOptions.class); ArgumentCaptor directJobCaptor = ArgumentCaptor.forClass(DirectJob.class); @@ -144,7 +142,7 @@ public void shouldStartDirectJobAndRegisterPipelineResult() throws IOException { new Job( expectedJobId, "", - Runner.DIRECT.name(), + Runner.DIRECT, Source.fromProto(source), Store.fromProto(store), Lists.newArrayList(FeatureSet.fromProto(featureSet)), diff --git a/core/src/test/java/feast/core/job/option/FeatureSetJsonByteConverterTest.java b/core/src/test/java/feast/core/job/option/FeatureSetJsonByteConverterTest.java index 2dfeef1d96..a12452b593 100644 --- a/core/src/test/java/feast/core/job/option/FeatureSetJsonByteConverterTest.java +++ b/core/src/test/java/feast/core/job/option/FeatureSetJsonByteConverterTest.java @@ -19,9 +19,9 @@ import static org.junit.Assert.*; import com.google.protobuf.InvalidProtocolBufferException; -import feast.core.FeatureSetProto; -import feast.core.SourceProto; -import feast.types.ValueProto; +import feast.proto.core.FeatureSetProto; +import feast.proto.core.SourceProto; +import feast.proto.types.ValueProto; import java.util.List; import java.util.stream.Collectors; import java.util.stream.IntStream; @@ -29,7 +29,7 @@ public class FeatureSetJsonByteConverterTest { - private FeatureSetProto.FeatureSet newFeatureSet(Integer version, Integer numberOfFeatures) { + private FeatureSetProto.FeatureSet newFeatureSet(Integer numberOfFeatures) { List features = IntStream.range(1, numberOfFeatures + 1) .mapToObj( @@ -51,7 +51,6 @@ private FeatureSetProto.FeatureSet newFeatureSet(Integer version, Integer number .setBootstrapServers("somebrokers:9092") .setTopic("sometopic"))) .addAllFeatures(features) - .setVersion(version) .addEntities( FeatureSetProto.EntitySpec.newBuilder() .setName("entity") @@ -65,12 +64,11 @@ public void shouldConvertFeatureSetsAsJsonStringBytes() throws InvalidProtocolBu int nrOfFeatures = 1; List featureSets = IntStream.range(1, nrOfFeatureSet + 1) - .mapToObj(i -> newFeatureSet(i, nrOfFeatures)) + .mapToObj(i -> newFeatureSet(nrOfFeatures)) .collect(Collectors.toList()); String expectedOutputString = - "{\"version\":1," - + "\"entities\":[{\"name\":\"entity\",\"valueType\":2}]," + "{\"entities\":[{\"name\":\"entity\",\"valueType\":2}]," + "\"features\":[{\"name\":\"feature1\",\"valueType\":6}]," + "\"source\":{" + "\"type\":1," diff --git a/core/src/test/java/feast/core/model/FeatureSetTest.java b/core/src/test/java/feast/core/model/FeatureSetTest.java new file mode 100644 index 0000000000..270dc3f3bc --- /dev/null +++ b/core/src/test/java/feast/core/model/FeatureSetTest.java @@ -0,0 +1,205 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2020 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.core.model; + +import static org.hamcrest.CoreMatchers.containsString; +import static org.hamcrest.CoreMatchers.equalTo; +import static org.junit.Assert.assertThat; + +import com.google.protobuf.Duration; +import com.google.protobuf.InvalidProtocolBufferException; +import feast.proto.core.FeatureSetProto; +import feast.proto.core.FeatureSetProto.EntitySpec; +import feast.proto.core.FeatureSetProto.FeatureSetSpec; +import feast.proto.core.FeatureSetProto.FeatureSetStatus; +import feast.proto.core.FeatureSetProto.FeatureSpec; +import feast.proto.core.SourceProto; +import feast.proto.core.SourceProto.KafkaSourceConfig; +import feast.proto.core.SourceProto.SourceType; +import feast.proto.types.ValueProto.ValueType.Enum; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; +import org.tensorflow.metadata.v0.IntDomain; + +public class FeatureSetTest { + @Rule public final ExpectedException expectedException = ExpectedException.none(); + + private FeatureSetProto.FeatureSet oldFeatureSetProto; + + @Before + public void setUp() { + SourceProto.Source oldSource = + SourceProto.Source.newBuilder() + .setType(SourceType.KAFKA) + .setKafkaSourceConfig( + KafkaSourceConfig.newBuilder() + .setBootstrapServers("kafka:9092") + .setTopic("mytopic")) + .build(); + + oldFeatureSetProto = + FeatureSetProto.FeatureSet.newBuilder() + .setSpec( + FeatureSetSpec.newBuilder() + .setName("featureSet") + .setProject("project") + .setMaxAge(Duration.newBuilder().setSeconds(100)) + .setSource(oldSource) + .addFeatures( + FeatureSpec.newBuilder().setName("feature1").setValueType(Enum.INT64)) + .addFeatures( + FeatureSpec.newBuilder().setName("feature2").setValueType(Enum.STRING)) + .addEntities( + EntitySpec.newBuilder().setName("entity").setValueType(Enum.STRING)) + .build()) + .build(); + } + + @Test + public void shouldUpdateFromProto() throws InvalidProtocolBufferException { + SourceProto.Source newSource = + SourceProto.Source.newBuilder() + .setType(SourceType.KAFKA) + .setKafkaSourceConfig( + KafkaSourceConfig.newBuilder() + .setBootstrapServers("kafka:9092") + .setTopic("mytopic-changed")) + .build(); + + FeatureSetProto.FeatureSet newFeatureSetProto = + FeatureSetProto.FeatureSet.newBuilder() + .setSpec( + FeatureSetSpec.newBuilder() + .setName("featureSet") + .setProject("project") + .setMaxAge(Duration.newBuilder().setSeconds(101)) + .setSource(newSource) + .addFeatures( + FeatureSpec.newBuilder() + .setName("feature1") + .setValueType(Enum.INT64) + .setIntDomain(IntDomain.newBuilder().setMax(10).setMin(0))) + .addFeatures( + FeatureSpec.newBuilder().setName("feature3").setValueType(Enum.STRING)) + .addEntities( + EntitySpec.newBuilder().setName("entity").setValueType(Enum.STRING)) + .build()) + .build(); + + FeatureSet actual = FeatureSet.fromProto(oldFeatureSetProto); + actual.updateFromProto(newFeatureSetProto); + + FeatureSet expected = FeatureSet.fromProto(newFeatureSetProto); + Feature archivedFeature = + Feature.fromProto( + FeatureSpec.newBuilder().setName("feature2").setValueType(Enum.STRING).build()); + archivedFeature.setArchived(true); + expected.addFeature(archivedFeature); + assertThat(actual, equalTo(expected)); + } + + @Test + public void shouldNotUpdateIfNoChange() throws InvalidProtocolBufferException { + FeatureSet actual = FeatureSet.fromProto(oldFeatureSetProto); + actual.setStatus(FeatureSetStatus.STATUS_READY); + actual.updateFromProto(oldFeatureSetProto); + + FeatureSet expected = FeatureSet.fromProto(oldFeatureSetProto); + expected.setStatus(FeatureSetStatus.STATUS_READY); + + assertThat(actual, equalTo(expected)); + } + + @Test + public void shouldThrowExceptionIfUpdateWithEntitiesChanged() + throws InvalidProtocolBufferException { + SourceProto.Source newSource = + SourceProto.Source.newBuilder() + .setType(SourceType.KAFKA) + .setKafkaSourceConfig( + KafkaSourceConfig.newBuilder() + .setBootstrapServers("kafka:9092") + .setTopic("mytopic-changed")) + .build(); + + FeatureSetProto.FeatureSet newFeatureSetProto = + FeatureSetProto.FeatureSet.newBuilder() + .setSpec( + FeatureSetSpec.newBuilder() + .setName("featureSet") + .setProject("project") + .setMaxAge(Duration.newBuilder().setSeconds(101)) + .setSource(newSource) + .addFeatures( + FeatureSpec.newBuilder() + .setName("feature1") + .setValueType(Enum.INT64) + .setIntDomain(IntDomain.newBuilder().setMax(10).setMin(0))) + .addFeatures( + FeatureSpec.newBuilder().setName("feature3").setValueType(Enum.STRING)) + .addEntities(EntitySpec.newBuilder().setName("entity").setValueType(Enum.FLOAT)) + .build()) + .build(); + + expectedException.expect(IllegalArgumentException.class); + expectedException.expectMessage(containsString("does not match existing set of entities")); + FeatureSet existingFeatureSet = FeatureSet.fromProto(oldFeatureSetProto); + existingFeatureSet.updateFromProto(newFeatureSetProto); + } + + @Test + public void shouldThrowExceptionIfUpdateWithFeatureTypesChanged() + throws InvalidProtocolBufferException { + SourceProto.Source newSource = + SourceProto.Source.newBuilder() + .setType(SourceType.KAFKA) + .setKafkaSourceConfig( + KafkaSourceConfig.newBuilder() + .setBootstrapServers("kafka:9092") + .setTopic("mytopic-changed")) + .build(); + + FeatureSetProto.FeatureSet newFeatureSetProto = + FeatureSetProto.FeatureSet.newBuilder() + .setSpec( + FeatureSetSpec.newBuilder() + .setName("featureSet") + .setProject("project") + .setMaxAge(Duration.newBuilder().setSeconds(101)) + .setSource(newSource) + .addFeatures( + FeatureSpec.newBuilder() + .setName("feature1") + .setValueType(Enum.INT64) + .setIntDomain(IntDomain.newBuilder().setMax(10).setMin(0))) + .addFeatures( + FeatureSpec.newBuilder().setName("feature2").setValueType(Enum.FLOAT)) + .addEntities( + EntitySpec.newBuilder().setName("entity").setValueType(Enum.STRING)) + .build()) + .build(); + + expectedException.expect(IllegalArgumentException.class); + expectedException.expectMessage( + containsString( + "You are attempting to change the type of feature feature2 from STRING to FLOAT.")); + FeatureSet existingFeatureSet = FeatureSet.fromProto(oldFeatureSetProto); + existingFeatureSet.updateFromProto(newFeatureSetProto); + } +} diff --git a/core/src/test/java/feast/core/model/JobStatusTest.java b/core/src/test/java/feast/core/model/JobStatusTest.java new file mode 100644 index 0000000000..f5c8839386 --- /dev/null +++ b/core/src/test/java/feast/core/model/JobStatusTest.java @@ -0,0 +1,45 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2020 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.core.model; + +import static org.hamcrest.Matchers.is; +import static org.junit.Assert.assertThat; + +import org.junit.Test; + +public class JobStatusTest { + + @Test + public void isTerminalReturnsTrueForJobStatusWithTerminalState() { + JobStatus.getTerminalStates() + .forEach( + status -> { + assertThat(status.isTerminal(), is(true)); + assertThat(status.isTransitional(), is(false)); + }); + } + + @Test + public void isTransitionalReturnsTrueForJobStatusWithTransitionalState() { + JobStatus.getTransitionalStates() + .forEach( + status -> { + assertThat(status.isTransitional(), is(true)); + assertThat(status.isTerminal(), is(false)); + }); + } +} diff --git a/core/src/test/java/feast/core/service/AccessManagementServiceTest.java b/core/src/test/java/feast/core/service/AccessManagementServiceTest.java new file mode 100644 index 0000000000..15be203709 --- /dev/null +++ b/core/src/test/java/feast/core/service/AccessManagementServiceTest.java @@ -0,0 +1,74 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.core.service; + +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; +import static org.mockito.MockitoAnnotations.initMocks; + +import feast.core.dao.ProjectRepository; +import feast.core.model.Project; +import java.util.Optional; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; +import org.mockito.Mock; + +public class AccessManagementServiceTest { + @Rule public ExpectedException expectedException = ExpectedException.none(); + // mocks + @Mock private ProjectRepository projectRepository; + // dummy models + private Project defaultProject; + private Project testProject; + + // test target + private AccessManagementService accessService; + + @Before + public void setup() { + initMocks(this); + // setup dummy models for testing + this.defaultProject = new Project(Project.DEFAULT_NAME); + this.testProject = new Project("project"); + // setup test target + when(this.projectRepository.existsById(Project.DEFAULT_NAME)).thenReturn(false); + this.accessService = new AccessManagementService(this.projectRepository); + } + + @Test + public void testDefaultProjectCreateInConstructor() { + verify(this.projectRepository).saveAndFlush(this.defaultProject); + } + + @Test + public void testArchiveProject() { + when(this.projectRepository.findById("project")).thenReturn(Optional.of(this.testProject)); + this.accessService.archiveProject("project"); + this.testProject.setArchived(true); + verify(this.projectRepository).saveAndFlush(this.testProject); + // reset archived flag + this.testProject.setArchived(false); + } + + @Test + public void shouldNotArchiveDefaultProject() { + expectedException.expect(IllegalArgumentException.class); + this.accessService.archiveProject(Project.DEFAULT_NAME); + } +} diff --git a/core/src/test/java/feast/core/service/JobCoordinatorServiceTest.java b/core/src/test/java/feast/core/service/JobCoordinatorServiceTest.java index aa71f201dd..8386efb28f 100644 --- a/core/src/test/java/feast/core/service/JobCoordinatorServiceTest.java +++ b/core/src/test/java/feast/core/service/JobCoordinatorServiceTest.java @@ -25,21 +25,10 @@ import static org.mockito.Mockito.when; import static org.mockito.MockitoAnnotations.initMocks; +import com.google.common.collect.Lists; import com.google.protobuf.InvalidProtocolBufferException; -import feast.core.CoreServiceProto.ListFeatureSetsRequest.Filter; -import feast.core.CoreServiceProto.ListFeatureSetsResponse; -import feast.core.CoreServiceProto.ListStoresResponse; -import feast.core.FeatureSetProto; -import feast.core.FeatureSetProto.FeatureSetMeta; -import feast.core.FeatureSetProto.FeatureSetSpec; -import feast.core.SourceProto.KafkaSourceConfig; -import feast.core.SourceProto.Source; -import feast.core.SourceProto.SourceType; -import feast.core.StoreProto; -import feast.core.StoreProto.Store.RedisConfig; -import feast.core.StoreProto.Store.StoreType; -import feast.core.StoreProto.Store.Subscription; -import feast.core.config.FeastProperties.JobUpdatesProperties; +import feast.core.config.FeastProperties; +import feast.core.config.FeastProperties.JobProperties; import feast.core.dao.FeatureSetRepository; import feast.core.dao.JobRepository; import feast.core.job.JobManager; @@ -48,7 +37,21 @@ import feast.core.model.FeatureSet; import feast.core.model.Job; import feast.core.model.JobStatus; +import feast.proto.core.CoreServiceProto.ListFeatureSetsRequest.Filter; +import feast.proto.core.CoreServiceProto.ListFeatureSetsResponse; +import feast.proto.core.CoreServiceProto.ListStoresResponse; +import feast.proto.core.FeatureSetProto; +import feast.proto.core.FeatureSetProto.FeatureSetMeta; +import feast.proto.core.FeatureSetProto.FeatureSetSpec; +import feast.proto.core.SourceProto.KafkaSourceConfig; +import feast.proto.core.SourceProto.Source; +import feast.proto.core.SourceProto.SourceType; +import feast.proto.core.StoreProto; +import feast.proto.core.StoreProto.Store.RedisConfig; +import feast.proto.core.StoreProto.Store.StoreType; +import feast.proto.core.StoreProto.Store.Subscription; import java.util.Arrays; +import java.util.Collections; import java.util.List; import org.junit.Before; import org.junit.Rule; @@ -65,13 +68,15 @@ public class JobCoordinatorServiceTest { @Mock SpecService specService; @Mock FeatureSetRepository featureSetRepository; - private JobUpdatesProperties jobUpdatesProperties; + private FeastProperties feastProperties; @Before public void setUp() { initMocks(this); - jobUpdatesProperties = new JobUpdatesProperties(); - jobUpdatesProperties.setTimeoutSeconds(5); + feastProperties = new FeastProperties(); + JobProperties jobProperties = new JobProperties(); + jobProperties.setJobUpdateTimeoutSeconds(5); + feastProperties.setJobs(jobProperties); } @Test @@ -79,7 +84,7 @@ public void shouldDoNothingIfNoStoresFound() throws InvalidProtocolBufferExcepti when(specService.listStores(any())).thenReturn(ListStoresResponse.newBuilder().build()); JobCoordinatorService jcs = new JobCoordinatorService( - jobRepository, featureSetRepository, specService, jobManager, jobUpdatesProperties); + jobRepository, featureSetRepository, specService, jobManager, feastProperties); jcs.Poll(); verify(jobRepository, times(0)).saveAndFlush(any()); } @@ -91,21 +96,16 @@ public void shouldDoNothingIfNoMatchingFeatureSetsFound() throws InvalidProtocol .setName("test") .setType(StoreType.REDIS) .setRedisConfig(RedisConfig.newBuilder().build()) - .addSubscriptions( - Subscription.newBuilder().setProject("*").setName("*").setVersion("*").build()) + .addSubscriptions(Subscription.newBuilder().setProject("*").setName("*").build()) .build(); when(specService.listStores(any())) .thenReturn(ListStoresResponse.newBuilder().addStore(store).build()); when(specService.listFeatureSets( - Filter.newBuilder() - .setProject("*") - .setFeatureSetName("*") - .setFeatureSetVersion("*") - .build())) + Filter.newBuilder().setProject("*").setFeatureSetName("*").build())) .thenReturn(ListFeatureSetsResponse.newBuilder().build()); JobCoordinatorService jcs = new JobCoordinatorService( - jobRepository, featureSetRepository, specService, jobManager, jobUpdatesProperties); + jobRepository, featureSetRepository, specService, jobManager, feastProperties); jcs.Poll(); verify(jobRepository, times(0)).saveAndFlush(any()); } @@ -117,12 +117,7 @@ public void shouldGenerateAndSubmitJobsIfAny() throws InvalidProtocolBufferExcep .setName("test") .setType(StoreType.REDIS) .setRedisConfig(RedisConfig.newBuilder().build()) - .addSubscriptions( - Subscription.newBuilder() - .setProject("project1") - .setName("features") - .setVersion("*") - .build()) + .addSubscriptions(Subscription.newBuilder().setProject("project1").setName("*").build()) .build(); Source source = Source.newBuilder() @@ -134,60 +129,51 @@ public void shouldGenerateAndSubmitJobsIfAny() throws InvalidProtocolBufferExcep .build()) .build(); - FeatureSetProto.FeatureSet featureSet1 = + FeatureSetProto.FeatureSet featureSetProto1 = FeatureSetProto.FeatureSet.newBuilder() .setSpec( FeatureSetSpec.newBuilder() .setSource(source) .setProject("project1") - .setName("features") - .setVersion(1)) + .setName("features1")) .setMeta(FeatureSetMeta.newBuilder()) .build(); - FeatureSetProto.FeatureSet featureSet2 = + FeatureSet featureSet1 = FeatureSet.fromProto(featureSetProto1); + FeatureSetProto.FeatureSet featureSetProto2 = FeatureSetProto.FeatureSet.newBuilder() .setSpec( FeatureSetSpec.newBuilder() .setSource(source) .setProject("project1") - .setName("features") - .setVersion(2)) + .setName("features2")) .setMeta(FeatureSetMeta.newBuilder()) .build(); + FeatureSet featureSet2 = FeatureSet.fromProto(featureSetProto2); String extId = "ext"; - ArgumentCaptor jobArgCaptor = ArgumentCaptor.forClass(Job.class); + ArgumentCaptor> jobArgCaptor = ArgumentCaptor.forClass(List.class); Job expectedInput = new Job( "", "", - Runner.DATAFLOW.name(), + Runner.DATAFLOW, feast.core.model.Source.fromProto(source), feast.core.model.Store.fromProto(store), - Arrays.asList(FeatureSet.fromProto(featureSet1), FeatureSet.fromProto(featureSet2)), + Arrays.asList(featureSet1, featureSet2), JobStatus.PENDING); Job expected = new Job( "some_id", extId, - Runner.DATAFLOW.name(), + Runner.DATAFLOW, feast.core.model.Source.fromProto(source), feast.core.model.Store.fromProto(store), - Arrays.asList(FeatureSet.fromProto(featureSet1), FeatureSet.fromProto(featureSet2)), + Arrays.asList(featureSet1, featureSet2), JobStatus.RUNNING); - when(specService.listFeatureSets( - Filter.newBuilder() - .setProject("project1") - .setFeatureSetName("features") - .setFeatureSetVersion("*") - .build())) - .thenReturn( - ListFeatureSetsResponse.newBuilder() - .addFeatureSets(featureSet1) - .addFeatureSets(featureSet2) - .build()); + when(featureSetRepository.findAllByNameLikeAndProject_NameLikeOrderByNameAsc("%", "project1")) + .thenReturn(Lists.newArrayList(featureSet1, featureSet2)); when(specService.listStores(any())) .thenReturn(ListStoresResponse.newBuilder().addStore(store).build()); @@ -196,11 +182,11 @@ public void shouldGenerateAndSubmitJobsIfAny() throws InvalidProtocolBufferExcep JobCoordinatorService jcs = new JobCoordinatorService( - jobRepository, featureSetRepository, specService, jobManager, jobUpdatesProperties); + jobRepository, featureSetRepository, specService, jobManager, feastProperties); jcs.Poll(); - verify(jobRepository, times(1)).saveAndFlush(jobArgCaptor.capture()); - Job actual = jobArgCaptor.getValue(); - assertThat(actual, equalTo(expected)); + verify(jobRepository, times(1)).saveAll(jobArgCaptor.capture()); + List actual = jobArgCaptor.getValue(); + assertThat(actual, equalTo(Collections.singletonList(expected))); } @Test @@ -210,12 +196,7 @@ public void shouldGroupJobsBySource() throws InvalidProtocolBufferException { .setName("test") .setType(StoreType.REDIS) .setRedisConfig(RedisConfig.newBuilder().build()) - .addSubscriptions( - Subscription.newBuilder() - .setProject("project1") - .setName("features") - .setVersion("*") - .build()) + .addSubscriptions(Subscription.newBuilder().setProject("project1").setName("*").build()) .build(); Source source1 = Source.newBuilder() @@ -236,79 +217,72 @@ public void shouldGroupJobsBySource() throws InvalidProtocolBufferException { .build()) .build(); - FeatureSetProto.FeatureSet featureSet1 = + FeatureSetProto.FeatureSet featureSetProto1 = FeatureSetProto.FeatureSet.newBuilder() .setSpec( FeatureSetSpec.newBuilder() .setSource(source1) .setProject("project1") - .setName("features") - .setVersion(1)) + .setName("features1")) .setMeta(FeatureSetMeta.newBuilder()) .build(); - FeatureSetProto.FeatureSet featureSet2 = + FeatureSet featureSet1 = FeatureSet.fromProto(featureSetProto1); + + FeatureSetProto.FeatureSet featureSetProto2 = FeatureSetProto.FeatureSet.newBuilder() .setSpec( FeatureSetSpec.newBuilder() .setSource(source2) .setProject("project1") - .setName("features") - .setVersion(2)) + .setName("features2")) .setMeta(FeatureSetMeta.newBuilder()) .build(); + FeatureSet featureSet2 = FeatureSet.fromProto(featureSetProto2); Job expectedInput1 = new Job( "name1", "", - Runner.DATAFLOW.name(), + Runner.DATAFLOW, feast.core.model.Source.fromProto(source1), feast.core.model.Store.fromProto(store), - Arrays.asList(FeatureSet.fromProto(featureSet1)), + Arrays.asList(featureSet1), JobStatus.PENDING); Job expected1 = new Job( "name1", "extId1", - Runner.DATAFLOW.name(), + Runner.DATAFLOW, feast.core.model.Source.fromProto(source1), feast.core.model.Store.fromProto(store), - Arrays.asList(FeatureSet.fromProto(featureSet1)), + Arrays.asList(featureSet1), JobStatus.RUNNING); Job expectedInput2 = new Job( "", "extId2", - Runner.DATAFLOW.name(), + Runner.DATAFLOW, feast.core.model.Source.fromProto(source2), feast.core.model.Store.fromProto(store), - Arrays.asList(FeatureSet.fromProto(featureSet2)), + Arrays.asList(featureSet2), JobStatus.PENDING); Job expected2 = new Job( "name2", "extId2", - Runner.DATAFLOW.name(), + Runner.DATAFLOW, feast.core.model.Source.fromProto(source2), feast.core.model.Store.fromProto(store), - Arrays.asList(FeatureSet.fromProto(featureSet2)), + Arrays.asList(featureSet2), JobStatus.RUNNING); - ArgumentCaptor jobArgCaptor = ArgumentCaptor.forClass(Job.class); + ArgumentCaptor> jobArgCaptor = ArgumentCaptor.forClass(List.class); + + when(featureSetRepository.findAllByNameLikeAndProject_NameLikeOrderByNameAsc("%", "project1")) + .thenReturn(Lists.newArrayList(featureSet1, featureSet2)); - when(specService.listFeatureSets( - Filter.newBuilder() - .setProject("project1") - .setFeatureSetName("features") - .setFeatureSetVersion("*") - .build())) - .thenReturn( - ListFeatureSetsResponse.newBuilder() - .addFeatureSets(featureSet1) - .addFeatureSets(featureSet2) - .build()); when(specService.listStores(any())) .thenReturn(ListStoresResponse.newBuilder().addStore(store).build()); @@ -318,11 +292,11 @@ public void shouldGroupJobsBySource() throws InvalidProtocolBufferException { JobCoordinatorService jcs = new JobCoordinatorService( - jobRepository, featureSetRepository, specService, jobManager, jobUpdatesProperties); + jobRepository, featureSetRepository, specService, jobManager, feastProperties); jcs.Poll(); - verify(jobRepository, times(2)).saveAndFlush(jobArgCaptor.capture()); - List actual = jobArgCaptor.getAllValues(); + verify(jobRepository, times(1)).saveAll(jobArgCaptor.capture()); + List actual = jobArgCaptor.getValue(); assertThat(actual.get(0), equalTo(expected1)); assertThat(actual.get(1), equalTo(expected2)); diff --git a/core/src/test/java/feast/core/service/JobServiceTest.java b/core/src/test/java/feast/core/service/JobServiceTest.java index c0e90ca43f..ff056287f9 100644 --- a/core/src/test/java/feast/core/service/JobServiceTest.java +++ b/core/src/test/java/feast/core/service/JobServiceTest.java @@ -26,31 +26,23 @@ import static org.mockito.MockitoAnnotations.initMocks; import com.google.protobuf.InvalidProtocolBufferException; -import feast.core.CoreServiceProto.ListFeatureSetsRequest; -import feast.core.CoreServiceProto.ListFeatureSetsResponse; -import feast.core.CoreServiceProto.ListIngestionJobsRequest; -import feast.core.CoreServiceProto.ListIngestionJobsResponse; -import feast.core.CoreServiceProto.RestartIngestionJobRequest; -import feast.core.CoreServiceProto.RestartIngestionJobResponse; -import feast.core.CoreServiceProto.StopIngestionJobRequest; -import feast.core.CoreServiceProto.StopIngestionJobResponse; -import feast.core.FeatureSetProto.FeatureSetStatus; -import feast.core.FeatureSetReferenceProto.FeatureSetReference; -import feast.core.IngestionJobProto.IngestionJob; -import feast.core.SourceProto.KafkaSourceConfig; -import feast.core.SourceProto.SourceType; -import feast.core.StoreProto.Store.RedisConfig; -import feast.core.StoreProto.Store.StoreType; import feast.core.dao.JobRepository; import feast.core.job.JobManager; import feast.core.job.Runner; -import feast.core.model.FeatureSet; -import feast.core.model.Field; -import feast.core.model.Job; -import feast.core.model.JobStatus; -import feast.core.model.Source; -import feast.core.model.Store; -import feast.types.ValueProto.ValueType.Enum; +import feast.core.model.*; +import feast.proto.core.CoreServiceProto.ListFeatureSetsRequest; +import feast.proto.core.CoreServiceProto.ListFeatureSetsResponse; +import feast.proto.core.CoreServiceProto.ListIngestionJobsRequest; +import feast.proto.core.CoreServiceProto.ListIngestionJobsResponse; +import feast.proto.core.CoreServiceProto.RestartIngestionJobRequest; +import feast.proto.core.CoreServiceProto.RestartIngestionJobResponse; +import feast.proto.core.CoreServiceProto.StopIngestionJobRequest; +import feast.proto.core.CoreServiceProto.StopIngestionJobResponse; +import feast.proto.core.FeatureSetReferenceProto.FeatureSetReference; +import feast.proto.core.IngestionJobProto.IngestionJob; +import feast.proto.core.StoreProto.Store.RedisConfig; +import feast.proto.core.StoreProto.Store.StoreType; +import feast.proto.types.ValueProto.ValueType.Enum; import java.time.Instant; import java.util.ArrayList; import java.util.Arrays; @@ -77,21 +69,13 @@ public class JobServiceTest { // test target public JobService jobService; - /* unit test setup */ @Before public void setup() { initMocks(this); // create mock objects for testing // fake data source - this.dataSource = - new Source( - SourceType.KAFKA, - KafkaSourceConfig.newBuilder() - .setBootstrapServers("kafka:9092") - .setTopic("my-topic") - .build(), - true); + this.dataSource = TestObjectFactory.defaultSource; // fake data store this.dataStore = new Store( @@ -122,7 +106,6 @@ public void setup() { new JobService(this.jobRepository, this.specService, Arrays.asList(this.jobManager)); } - // setup fake spec service public void setupSpecService() { try { ListFeatureSetsResponse response = @@ -139,7 +122,6 @@ public void setupSpecService() { } } - // setup fake job repository public void setupJobRepository() { when(this.jobRepository.findById(this.job.getId())).thenReturn(Optional.of(this.job)); when(this.jobRepository.findByStoreName(this.dataStore.getName())) @@ -149,28 +131,19 @@ public void setupJobRepository() { when(this.jobRepository.findAll()).thenReturn(Arrays.asList(this.job)); } - // TODO: setup fake job manager public void setupJobManager() { when(this.jobManager.getRunnerType()).thenReturn(Runner.DATAFLOW); when(this.jobManager.restartJob(this.job)) .thenReturn(this.newDummyJob(this.job.getId(), this.job.getExtId(), JobStatus.PENDING)); } - // dummy model constructorss private FeatureSet newDummyFeatureSet(String name, int version, String project) { - Field feature = new Field(name + "_feature", Enum.INT64); - Field entity = new Field(name + "_entity", Enum.STRING); + Feature feature = TestObjectFactory.CreateFeature(name + "_feature", Enum.INT64); + Entity entity = TestObjectFactory.CreateEntity(name + "_entity", Enum.STRING); FeatureSet fs = - new FeatureSet( - name, - project, - version, - 100L, - Arrays.asList(entity), - Arrays.asList(feature), - this.dataSource, - FeatureSetStatus.STATUS_READY); + TestObjectFactory.CreateFeatureSet( + name, project, Arrays.asList(entity), Arrays.asList(feature)); fs.setCreated(Date.from(Instant.ofEpochSecond(10L))); return fs; } @@ -179,7 +152,7 @@ private Job newDummyJob(String id, String extId, JobStatus status) { return new Job( id, extId, - Runner.DATAFLOW.name(), + Runner.DATAFLOW, this.dataSource, this.dataStore, Arrays.asList(this.featureSet), @@ -190,7 +163,6 @@ private List newDummyFeatureSetReferences() { return Arrays.asList( // all provided: name, version and project FeatureSetReference.newBuilder() - .setVersion(this.featureSet.getVersion()) .setName(this.featureSet.getName()) .setProject(this.featureSet.getProject().toString()) .build(), @@ -202,10 +174,7 @@ private List newDummyFeatureSetReferences() { .build(), // name and version - FeatureSetReference.newBuilder() - .setName(this.featureSet.getName()) - .setVersion(this.featureSet.getVersion()) - .build()); + FeatureSetReference.newBuilder().setName(this.featureSet.getName()).build()); } private List newDummyListRequestFilters() { @@ -214,25 +183,21 @@ private List newDummyListRequestFilters() { ListFeatureSetsRequest.Filter.newBuilder() .setFeatureSetName(this.featureSet.getName()) .setProject(this.featureSet.getProject().toString()) - .setFeatureSetVersion(String.valueOf(this.featureSet.getVersion())) .build(), // name and project ListFeatureSetsRequest.Filter.newBuilder() .setFeatureSetName(this.featureSet.getName()) .setProject(this.featureSet.getProject().toString()) - .setFeatureSetVersion("*") .build(), // name and project ListFeatureSetsRequest.Filter.newBuilder() .setFeatureSetName(this.featureSet.getName()) .setProject("*") - .setFeatureSetVersion(String.valueOf(this.featureSet.getVersion())) .build()); } - /* unit tests */ private ListIngestionJobsResponse tryListJobs(ListIngestionJobsRequest request) { ListIngestionJobsResponse response = null; try { @@ -245,7 +210,6 @@ private ListIngestionJobsResponse tryListJobs(ListIngestionJobsRequest request) return response; } - // list jobs @Test public void testListJobsById() { ListIngestionJobsRequest.Filter filter = @@ -304,7 +268,6 @@ public void testListIngestionJobByFeatureSetReference() { assertThat(this.tryListJobs(request).getJobs(0), equalTo(this.ingestionJob)); } - // stop jobs private StopIngestionJobResponse tryStopJob( StopIngestionJobRequest request, boolean expectError) { StopIngestionJobResponse response = null; @@ -341,10 +304,9 @@ public void testStopJobForId() { } @Test - public void testStopAlreadyStop() { + public void testStopAlreadyStopped() { // check that stop jobs does not trying to stop jobs that are not already stopped - List doNothingStatuses = new ArrayList<>(); - doNothingStatuses.addAll(JobStatus.getTerminalState()); + List doNothingStatuses = new ArrayList<>(JobStatus.getTerminalStates()); JobStatus prevStatus = this.job.getStatus(); for (JobStatus status : doNothingStatuses) { diff --git a/core/src/test/java/feast/core/service/SpecServiceTest.java b/core/src/test/java/feast/core/service/SpecServiceTest.java index 43a66135dc..e584ee71e0 100644 --- a/core/src/test/java/feast/core/service/SpecServiceTest.java +++ b/core/src/test/java/feast/core/service/SpecServiceTest.java @@ -1,6 +1,6 @@ /* * SPDX-License-Identifier: Apache-2.0 - * Copyright 2018-2019 The Feast Authors + * Copyright 2018-2020 The Feast Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -26,48 +26,34 @@ import com.google.api.client.util.Lists; import com.google.protobuf.InvalidProtocolBufferException; -import feast.core.CoreServiceProto.ApplyFeatureSetResponse; -import feast.core.CoreServiceProto.ApplyFeatureSetResponse.Status; -import feast.core.CoreServiceProto.GetFeatureSetRequest; -import feast.core.CoreServiceProto.GetFeatureSetResponse; -import feast.core.CoreServiceProto.ListFeatureSetsRequest.Filter; -import feast.core.CoreServiceProto.ListFeatureSetsResponse; -import feast.core.CoreServiceProto.ListStoresRequest; -import feast.core.CoreServiceProto.ListStoresResponse; -import feast.core.CoreServiceProto.UpdateStoreRequest; -import feast.core.CoreServiceProto.UpdateStoreResponse; -import feast.core.FeatureSetProto; -import feast.core.FeatureSetProto.EntitySpec; -import feast.core.FeatureSetProto.FeatureSetSpec; -import feast.core.FeatureSetProto.FeatureSetStatus; -import feast.core.FeatureSetProto.FeatureSpec; -import feast.core.SourceProto.KafkaSourceConfig; -import feast.core.SourceProto.SourceType; -import feast.core.StoreProto; -import feast.core.StoreProto.Store.RedisConfig; -import feast.core.StoreProto.Store.StoreType; -import feast.core.StoreProto.Store.Subscription; import feast.core.dao.FeatureSetRepository; import feast.core.dao.ProjectRepository; import feast.core.dao.StoreRepository; import feast.core.exception.RetrievalException; -import feast.core.model.FeatureSet; -import feast.core.model.Field; -import feast.core.model.Project; -import feast.core.model.Source; -import feast.core.model.Store; -import feast.types.ValueProto.ValueType.Enum; +import feast.core.model.*; +import feast.proto.core.CoreServiceProto.ApplyFeatureSetResponse; +import feast.proto.core.CoreServiceProto.ApplyFeatureSetResponse.Status; +import feast.proto.core.CoreServiceProto.GetFeatureSetRequest; +import feast.proto.core.CoreServiceProto.GetFeatureSetResponse; +import feast.proto.core.CoreServiceProto.ListFeatureSetsRequest.Filter; +import feast.proto.core.CoreServiceProto.ListFeatureSetsResponse; +import feast.proto.core.CoreServiceProto.ListStoresRequest; +import feast.proto.core.CoreServiceProto.ListStoresResponse; +import feast.proto.core.CoreServiceProto.UpdateStoreRequest; +import feast.proto.core.CoreServiceProto.UpdateStoreResponse; +import feast.proto.core.FeatureSetProto; +import feast.proto.core.FeatureSetProto.EntitySpec; +import feast.proto.core.FeatureSetProto.FeatureSetSpec; +import feast.proto.core.FeatureSetProto.FeatureSpec; +import feast.proto.core.StoreProto; +import feast.proto.core.StoreProto.Store.RedisConfig; +import feast.proto.core.StoreProto.Store.StoreType; +import feast.proto.core.StoreProto.Store.Subscription; +import feast.proto.types.ValueProto.ValueType.Enum; import java.sql.Date; import java.time.Instant; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.Comparator; -import java.util.HashMap; -import java.util.List; -import java.util.Map; +import java.util.*; import java.util.Map.Entry; -import java.util.Optional; import java.util.stream.Collectors; import org.junit.Before; import org.junit.Rule; @@ -107,64 +93,39 @@ public class SpecServiceTest { private List stores; private Source defaultSource; + // TODO: Updates update features in place, so if tests follow the wrong order they might break. + // Refactor this maybe? @Before public void setUp() { initMocks(this); - defaultSource = - new Source( - SourceType.KAFKA, - KafkaSourceConfig.newBuilder() - .setBootstrapServers("kafka:9092") - .setTopic("my-topic") - .build(), - true); - - FeatureSet featureSet1v1 = newDummyFeatureSet("f1", 1, "project1"); - FeatureSet featureSet1v2 = newDummyFeatureSet("f1", 2, "project1"); - FeatureSet featureSet1v3 = newDummyFeatureSet("f1", 3, "project1"); - FeatureSet featureSet2v1 = newDummyFeatureSet("f2", 1, "project1"); - - Field f3f1 = new Field("f3f1", Enum.INT64); - Field f3f2 = new Field("f3f2", Enum.INT64); - Field f3e1 = new Field("f3e1", Enum.STRING); - FeatureSet featureSet3v1 = - new FeatureSet( - "f3", - "project1", - 1, - 100L, - Arrays.asList(f3e1), - Arrays.asList(f3f2, f3f1), - defaultSource, - FeatureSetStatus.STATUS_READY); - - featureSets = - Arrays.asList(featureSet1v1, featureSet1v2, featureSet1v3, featureSet2v1, featureSet3v1); - when(featureSetRepository.findAll()).thenReturn(featureSets); - when(featureSetRepository.findAllByOrderByNameAscVersionAsc()).thenReturn(featureSets); + defaultSource = TestObjectFactory.defaultSource; + + FeatureSet featureSet1 = newDummyFeatureSet("f1", "project1"); + FeatureSet featureSet2 = newDummyFeatureSet("f2", "project1"); + + Feature f3f1 = TestObjectFactory.CreateFeature("f3f1", Enum.INT64); + Feature f3f2 = TestObjectFactory.CreateFeature("f3f2", Enum.INT64); + Entity f3e1 = TestObjectFactory.CreateEntity("f3e1", Enum.STRING); + FeatureSet featureSet3 = + TestObjectFactory.CreateFeatureSet( + "f3", "project1", Arrays.asList(f3e1), Arrays.asList(f3f2, f3f1)); - when(featureSetRepository.findFeatureSetByNameAndProject_NameAndVersion("f1", "project1", 1)) + FeatureSet featureSet4 = newDummyFeatureSet("f4", Project.DEFAULT_NAME); + featureSets = Arrays.asList(featureSet1, featureSet2, featureSet3, featureSet4); + + when(featureSetRepository.findAll()).thenReturn(featureSets); + when(featureSetRepository.findAllByOrderByNameAsc()).thenReturn(featureSets); + when(featureSetRepository.findFeatureSetByNameAndProject_Name("f1", "project1")) .thenReturn(featureSets.get(0)); - when(featureSetRepository.findAllByNameLikeAndProject_NameOrderByNameAscVersionAsc( - "f1", "project1")) - .thenReturn(featureSets.subList(0, 3)); - when(featureSetRepository.findAllByNameLikeAndProject_NameOrderByNameAscVersionAsc( - "f3", "project1")) - .thenReturn(featureSets.subList(4, 5)); - when(featureSetRepository.findFirstFeatureSetByNameLikeAndProject_NameOrderByVersionDesc( - "f1", "project1")) - .thenReturn(featureSet1v3); - when(featureSetRepository.findAllByNameLikeAndProject_NameOrderByNameAscVersionAsc( - "f1", "project1")) - .thenReturn(featureSets.subList(0, 3)); - when(featureSetRepository.findAllByNameLikeAndProject_NameOrderByNameAscVersionAsc( - "asd", "project1")) + when(featureSetRepository.findFeatureSetByNameAndProject_Name("f2", "project1")) + .thenReturn(featureSets.get(1)); + when(featureSetRepository.findAllByNameLikeAndProject_NameOrderByNameAsc("f1", "project1")) + .thenReturn(featureSets.subList(0, 1)); + when(featureSetRepository.findAllByNameLikeAndProject_NameOrderByNameAsc("asd", "project1")) .thenReturn(Lists.newArrayList()); - when(featureSetRepository.findAllByNameLikeAndProject_NameOrderByNameAscVersionAsc( - "f%", "project1")) + when(featureSetRepository.findAllByNameLikeAndProject_NameOrderByNameAsc("f%", "project1")) .thenReturn(featureSets); - when(featureSetRepository.findAllByNameLikeAndProject_NameLikeOrderByNameAscVersionAsc( - "%", "%")) + when(featureSetRepository.findAllByNameLikeAndProject_NameLikeOrderByNameAsc("%", "%")) .thenReturn(featureSets); when(projectRepository.findAllByArchivedIsFalse()) @@ -191,11 +152,7 @@ public void shouldGetAllFeatureSetsIfOnlyWildcardsProvided() throws InvalidProtocolBufferException { ListFeatureSetsResponse actual = specService.listFeatureSets( - Filter.newBuilder() - .setFeatureSetName("*") - .setProject("*") - .setFeatureSetVersion("*") - .build()); + Filter.newBuilder().setFeatureSetName("*").setProject("*").build()); List list = new ArrayList<>(); for (FeatureSet featureSet : featureSets) { FeatureSetProto.FeatureSet toProto = featureSet.toProto(); @@ -206,48 +163,12 @@ public void shouldGetAllFeatureSetsIfOnlyWildcardsProvided() assertThat(actual, equalTo(expected)); } - @Test - public void listFeatureSetShouldFailIfFeatureSetProvidedWithoutProject() - throws InvalidProtocolBufferException { - expectedException.expect(IllegalArgumentException.class); - expectedException.expectMessage( - "Invalid listFeatureSetRequest, missing arguments. Must provide project, feature set name, and version."); - specService.listFeatureSets( - Filter.newBuilder().setFeatureSetName("f1").setFeatureSetVersion("1").build()); - } - - @Test - public void shouldGetAllFeatureSetsMatchingNameIfWildcardVersionProvided() - throws InvalidProtocolBufferException { - ListFeatureSetsResponse actual = - specService.listFeatureSets( - Filter.newBuilder() - .setProject("project1") - .setFeatureSetName("f1") - .setFeatureSetVersion("*") - .build()); - List expectedFeatureSets = - featureSets.stream().filter(fs -> fs.getName().equals("f1")).collect(Collectors.toList()); - List list = new ArrayList<>(); - for (FeatureSet expectedFeatureSet : expectedFeatureSets) { - FeatureSetProto.FeatureSet toProto = expectedFeatureSet.toProto(); - list.add(toProto); - } - ListFeatureSetsResponse expected = - ListFeatureSetsResponse.newBuilder().addAllFeatureSets(list).build(); - assertThat(actual, equalTo(expected)); - } - @Test public void shouldGetAllFeatureSetsMatchingNameWithWildcardSearch() throws InvalidProtocolBufferException { ListFeatureSetsResponse actual = specService.listFeatureSets( - Filter.newBuilder() - .setProject("project1") - .setFeatureSetName("f*") - .setFeatureSetVersion("*") - .build()); + Filter.newBuilder().setProject("project1").setFeatureSetName("f*").build()); List expectedFeatureSets = featureSets.stream() .filter(fs -> fs.getName().startsWith("f")) @@ -263,20 +184,12 @@ public void shouldGetAllFeatureSetsMatchingNameWithWildcardSearch() } @Test - public void shouldGetAllFeatureSetsMatchingVersionIfNoComparator() - throws InvalidProtocolBufferException { + public void shouldGetFeatureSetsByNameAndProject() throws InvalidProtocolBufferException { ListFeatureSetsResponse actual = specService.listFeatureSets( - Filter.newBuilder() - .setProject("project1") - .setFeatureSetName("f1") - .setFeatureSetVersion("1") - .build()); + Filter.newBuilder().setProject("project1").setFeatureSetName("f1").build()); List expectedFeatureSets = - featureSets.stream() - .filter(fs -> fs.getName().equals("f1")) - .filter(fs -> fs.getVersion() == 1) - .collect(Collectors.toList()); + featureSets.stream().filter(fs -> fs.getName().equals("f1")).collect(Collectors.toList()); List list = new ArrayList<>(); for (FeatureSet expectedFeatureSet : expectedFeatureSets) { FeatureSetProto.FeatureSet toProto = expectedFeatureSet.toProto(); @@ -287,80 +200,20 @@ public void shouldGetAllFeatureSetsMatchingVersionIfNoComparator() assertThat(actual, equalTo(expected)); } - @Test - public void shouldThrowExceptionIfGetAllFeatureSetsGivenVersionWithComparator() - throws InvalidProtocolBufferException { - expectedException.expect(IllegalArgumentException.class); - specService.listFeatureSets( - Filter.newBuilder() - .setProject("project1") - .setFeatureSetName("f1") - .setFeatureSetVersion(">1") - .build()); - } - - @Test - public void shouldGetLatestFeatureSetGivenMissingVersionFilter() - throws InvalidProtocolBufferException { - GetFeatureSetResponse actual = - specService.getFeatureSet( - GetFeatureSetRequest.newBuilder().setName("f1").setProject("project1").build()); - FeatureSet expected = featureSets.get(2); - assertThat(actual.getFeatureSet(), equalTo(expected.toProto())); - } - - @Test - public void shouldGetSpecificFeatureSetGivenSpecificVersionFilter() - throws InvalidProtocolBufferException { - when(featureSetRepository.findFeatureSetByNameAndProject_NameAndVersion("f1", "project1", 2)) - .thenReturn(featureSets.get(1)); - GetFeatureSetResponse actual = - specService.getFeatureSet( - GetFeatureSetRequest.newBuilder() - .setProject("project1") - .setName("f1") - .setVersion(2) - .build()); - FeatureSet expected = featureSets.get(1); - assertThat(actual.getFeatureSet(), equalTo(expected.toProto())); - } - @Test public void shouldThrowExceptionGivenMissingFeatureSetName() throws InvalidProtocolBufferException { expectedException.expect(IllegalArgumentException.class); expectedException.expectMessage("No feature set name provided"); - specService.getFeatureSet(GetFeatureSetRequest.newBuilder().setVersion(2).build()); + specService.getFeatureSet(GetFeatureSetRequest.newBuilder().build()); } @Test public void shouldThrowExceptionGivenMissingFeatureSet() throws InvalidProtocolBufferException { expectedException.expect(RetrievalException.class); - expectedException.expectMessage( - "Feature set with name \"f1000\" and version \"2\" could not be found."); + expectedException.expectMessage("Feature set with name \"f1000\" could not be found."); specService.getFeatureSet( - GetFeatureSetRequest.newBuilder() - .setName("f1000") - .setProject("project1") - .setVersion(2) - .build()); - } - - @Test - public void shouldThrowRetrievalExceptionGivenInvalidFeatureSetVersionComparator() - throws InvalidProtocolBufferException { - expectedException.expect(IllegalArgumentException.class); - expectedException.expectMessage( - "Invalid listFeatureSetRequest. Version must be set to \"*\" if the project name and feature set name aren't set explicitly: \n" - + "feature_set_name: \"f1\"\n" - + "feature_set_version: \">1\"\n" - + "project: \"project1\""); - specService.listFeatureSets( - Filter.newBuilder() - .setProject("project1") - .setFeatureSetName("f1") - .setFeatureSetVersion(">1") - .build()); + GetFeatureSetRequest.newBuilder().setName("f1000").setProject("project1").build()); } @Test @@ -395,10 +248,10 @@ public void shouldThrowRetrievalExceptionIfNoStoresFoundWithName() { } @Test - public void applyFeatureSetShouldReturnFeatureSetWithLatestVersionIfFeatureSetHasNotChanged() + public void applyFeatureSetShouldReturnFeatureSetIfFeatureSetHasNotChanged() throws InvalidProtocolBufferException { FeatureSetSpec incomingFeatureSetSpec = - featureSets.get(2).toProto().getSpec().toBuilder().clearVersion().build(); + featureSets.get(0).toProto().getSpec().toBuilder().build(); ApplyFeatureSetResponse applyFeatureSetResponse = specService.applyFeatureSet( @@ -406,21 +259,19 @@ public void applyFeatureSetShouldReturnFeatureSetWithLatestVersionIfFeatureSetHa verify(featureSetRepository, times(0)).save(ArgumentMatchers.any(FeatureSet.class)); assertThat(applyFeatureSetResponse.getStatus(), equalTo(Status.NO_CHANGE)); - assertThat(applyFeatureSetResponse.getFeatureSet(), equalTo(featureSets.get(2).toProto())); + assertThat(applyFeatureSetResponse.getFeatureSet(), equalTo(featureSets.get(0).toProto())); } @Test - public void applyFeatureSetShouldApplyFeatureSetWithInitVersionIfNotExists() + public void applyFeatureSetShouldApplyFeatureSetIfNotExists() throws InvalidProtocolBufferException { - when(featureSetRepository.findAllByNameLikeAndProject_NameOrderByNameAscVersionAsc( - "f2", "project1")) - .thenReturn(Lists.newArrayList()); + when(featureSetRepository.findFeatureSetByNameAndProject_Name("f2", "project1")) + .thenReturn(null); - FeatureSetProto.FeatureSet incomingFeatureSet = - newDummyFeatureSet("f2", 1, "project1").toProto(); + FeatureSetProto.FeatureSet incomingFeatureSet = newDummyFeatureSet("f2", "project1").toProto(); FeatureSetProto.FeatureSetSpec incomingFeatureSetSpec = - incomingFeatureSet.getSpec().toBuilder().clearVersion().build(); + incomingFeatureSet.getSpec().toBuilder().build(); ApplyFeatureSetResponse applyFeatureSetResponse = specService.applyFeatureSet( @@ -429,24 +280,16 @@ public void applyFeatureSetShouldApplyFeatureSetWithInitVersionIfNotExists() FeatureSetProto.FeatureSet expected = FeatureSetProto.FeatureSet.newBuilder() - .setSpec( - incomingFeatureSetSpec - .toBuilder() - .setVersion(1) - .setSource(defaultSource.toProto()) - .build()) + .setSpec(incomingFeatureSetSpec.toBuilder().setSource(defaultSource.toProto()).build()) .build(); assertThat(applyFeatureSetResponse.getStatus(), equalTo(Status.CREATED)); assertThat(applyFeatureSetResponse.getFeatureSet().getSpec(), equalTo(expected.getSpec())); - assertThat( - applyFeatureSetResponse.getFeatureSet().getSpec().getVersion(), - equalTo(expected.getSpec().getVersion())); } @Test - public void applyFeatureSetShouldIncrementFeatureSetVersionIfAlreadyExists() + public void applyFeatureSetShouldUpdateAndSaveFeatureSetIfAlreadyExists() throws InvalidProtocolBufferException { - FeatureSetProto.FeatureSet incomingFeatureSet = featureSets.get(2).toProto(); + FeatureSetProto.FeatureSet incomingFeatureSet = featureSets.get(0).toProto(); incomingFeatureSet = incomingFeatureSet .toBuilder() @@ -455,7 +298,6 @@ public void applyFeatureSetShouldIncrementFeatureSetVersionIfAlreadyExists() incomingFeatureSet .getSpec() .toBuilder() - .clearVersion() .addFeatures( FeatureSpec.newBuilder().setName("feature2").setValueType(Enum.STRING)) .build()) @@ -466,44 +308,27 @@ public void applyFeatureSetShouldIncrementFeatureSetVersionIfAlreadyExists() .toBuilder() .setMeta(incomingFeatureSet.getMeta().toBuilder().build()) .setSpec( - incomingFeatureSet - .getSpec() - .toBuilder() - .setVersion(4) - .setSource(defaultSource.toProto()) - .build()) + incomingFeatureSet.getSpec().toBuilder().setSource(defaultSource.toProto()).build()) .build(); ApplyFeatureSetResponse applyFeatureSetResponse = specService.applyFeatureSet(incomingFeatureSet); verify(projectRepository).saveAndFlush(ArgumentMatchers.any(Project.class)); - assertThat(applyFeatureSetResponse.getStatus(), equalTo(Status.CREATED)); + assertThat(applyFeatureSetResponse.getStatus(), equalTo(Status.UPDATED)); assertEquals( FeatureSet.fromProto(applyFeatureSetResponse.getFeatureSet()), FeatureSet.fromProto(expected)); - assertThat( - applyFeatureSetResponse.getFeatureSet().getSpec().getVersion(), - equalTo(expected.getSpec().getVersion())); } @Test public void applyFeatureSetShouldNotCreateFeatureSetIfFieldsUnordered() throws InvalidProtocolBufferException { - Field f3f1 = new Field("f3f1", Enum.INT64); - Field f3f2 = new Field("f3f2", Enum.INT64); - Field f3e1 = new Field("f3e1", Enum.STRING); - FeatureSetProto.FeatureSet incomingFeatureSet = - (new FeatureSet( - "f3", - "project1", - 5, - 100L, - Arrays.asList(f3e1), - Arrays.asList(f3f2, f3f1), - defaultSource, - FeatureSetStatus.STATUS_READY)) - .toProto(); + FeatureSet featureSet = featureSets.get(1); + List features = Lists.newArrayList(featureSet.getFeatures()); + Collections.shuffle(features); + featureSet.setFeatures(Set.copyOf(features)); + FeatureSetProto.FeatureSet incomingFeatureSet = featureSet.toProto(); ApplyFeatureSetResponse applyFeatureSetResponse = specService.applyFeatureSet(incomingFeatureSet); @@ -523,46 +348,11 @@ public void applyFeatureSetShouldNotCreateFeatureSetIfFieldsUnordered() public void applyFeatureSetShouldAcceptPresenceShapeAndDomainConstraints() throws InvalidProtocolBufferException { List entitySpecs = new ArrayList<>(); - entitySpecs.add( - EntitySpec.newBuilder() - .setName("entity1") - .setValueType(Enum.INT64) - .setPresence(FeaturePresence.getDefaultInstance()) - .setShape(FixedShape.getDefaultInstance()) - .setDomain("mydomain") - .build()); - entitySpecs.add( - EntitySpec.newBuilder() - .setName("entity2") - .setValueType(Enum.INT64) - .setGroupPresence(FeaturePresenceWithinGroup.getDefaultInstance()) - .setValueCount(ValueCount.getDefaultInstance()) - .setIntDomain(IntDomain.getDefaultInstance()) - .build()); - entitySpecs.add( - EntitySpec.newBuilder() - .setName("entity3") - .setValueType(Enum.FLOAT) - .setPresence(FeaturePresence.getDefaultInstance()) - .setValueCount(ValueCount.getDefaultInstance()) - .setFloatDomain(FloatDomain.getDefaultInstance()) - .build()); - entitySpecs.add( - EntitySpec.newBuilder() - .setName("entity4") - .setValueType(Enum.STRING) - .setPresence(FeaturePresence.getDefaultInstance()) - .setValueCount(ValueCount.getDefaultInstance()) - .setStringDomain(StringDomain.getDefaultInstance()) - .build()); - entitySpecs.add( - EntitySpec.newBuilder() - .setName("entity5") - .setValueType(Enum.BOOL) - .setPresence(FeaturePresence.getDefaultInstance()) - .setValueCount(ValueCount.getDefaultInstance()) - .setBoolDomain(BoolDomain.getDefaultInstance()) - .build()); + entitySpecs.add(EntitySpec.newBuilder().setName("entity1").setValueType(Enum.INT64).build()); + entitySpecs.add(EntitySpec.newBuilder().setName("entity2").setValueType(Enum.INT64).build()); + entitySpecs.add(EntitySpec.newBuilder().setName("entity3").setValueType(Enum.FLOAT).build()); + entitySpecs.add(EntitySpec.newBuilder().setName("entity4").setValueType(Enum.STRING).build()); + entitySpecs.add(EntitySpec.newBuilder().setName("entity5").setValueType(Enum.BOOL).build()); List featureSpecs = new ArrayList<>(); featureSpecs.add( @@ -630,27 +420,13 @@ public void applyFeatureSetShouldAcceptPresenceShapeAndDomainConstraints() new ArrayList<>(appliedFeatureSetSpec.getFeaturesList()); appliedFeatureSpecs.sort(Comparator.comparing(FeatureSpec::getName)); - assertEquals(appliedEntitySpecs.size(), entitySpecs.size()); - assertEquals(appliedFeatureSpecs.size(), featureSpecs.size()); - - for (int i = 0; i < appliedEntitySpecs.size(); i++) { - assertEquals(entitySpecs.get(i), appliedEntitySpecs.get(i)); - } - - for (int i = 0; i < appliedFeatureSpecs.size(); i++) { - assertEquals(featureSpecs.get(i), appliedFeatureSpecs.get(i)); - } + assertEquals(appliedEntitySpecs, entitySpecs); + assertEquals(appliedFeatureSpecs, featureSpecs); } @Test public void applyFeatureSetShouldUpdateFeatureSetWhenConstraintsAreUpdated() throws InvalidProtocolBufferException { - FeatureSetProto.FeatureSet existingFeatureSet = featureSets.get(2).toProto(); - assertThat( - "Existing feature set has version 3", existingFeatureSet.getSpec().getVersion() == 3); - assertThat( - "Existing feature set has at least 1 feature", - existingFeatureSet.getSpec().getFeaturesList().size() > 0); // Map of constraint field name -> value, e.g. "shape" -> FixedShape object. // If any of these fields are updated, SpecService should update the FeatureSet. @@ -675,6 +451,10 @@ public void applyFeatureSetShouldUpdateFeatureSetWhenConstraintsAreUpdated() contraintUpdates.put("time_of_day_domain", TimeOfDayDomain.getDefaultInstance()); for (Entry constraint : contraintUpdates.entrySet()) { + FeatureSet featureSet = newDummyFeatureSet("constraints", "project1"); + FeatureSetProto.FeatureSet existingFeatureSet = featureSet.toProto(); + when(featureSetRepository.findFeatureSetByNameAndProject_Name("constraints", "project1")) + .thenReturn(featureSet); String name = constraint.getKey(); Object value = constraint.getValue(); FeatureSpec newFeatureSpec = @@ -693,12 +473,8 @@ public void applyFeatureSetShouldUpdateFeatureSetWhenConstraintsAreUpdated() assertEquals( "Response should have CREATED status when field '" + name + "' is updated", - Status.CREATED, + Status.UPDATED, response.getStatus()); - assertEquals( - "FeatureSet should have new version when field '" + name + "' is updated", - existingFeatureSet.getSpec().getVersion() + 1, - response.getFeatureSet().getSpec().getVersion()); assertEquals( "Feature should have field '" + name + "' set correctly", constraint.getValue(), @@ -713,19 +489,12 @@ public void applyFeatureSetShouldUpdateFeatureSetWhenConstraintsAreUpdated() @Test public void applyFeatureSetShouldCreateProjectWhenNotAlreadyExists() throws InvalidProtocolBufferException { - Field f3f1 = new Field("f3f1", Enum.INT64); - Field f3f2 = new Field("f3f2", Enum.INT64); - Field f3e1 = new Field("f3e1", Enum.STRING); + Feature f3f1 = TestObjectFactory.CreateFeature("f3f1", Enum.INT64); + Feature f3f2 = TestObjectFactory.CreateFeature("f3f2", Enum.INT64); + Entity f3e1 = TestObjectFactory.CreateEntity("f3e1", Enum.STRING); FeatureSetProto.FeatureSet incomingFeatureSet = - (new FeatureSet( - "f3", - "newproject", - 5, - 100L, - Arrays.asList(f3e1), - Arrays.asList(f3f2, f3f1), - defaultSource, - FeatureSetStatus.STATUS_READY)) + TestObjectFactory.CreateFeatureSet( + "f3", "project", Arrays.asList(f3e1), Arrays.asList(f3f2, f3f1)) .toProto(); ApplyFeatureSetResponse applyFeatureSetResponse = @@ -736,22 +505,35 @@ public void applyFeatureSetShouldCreateProjectWhenNotAlreadyExists() equalTo(incomingFeatureSet.getSpec().getProject())); } + @Test + public void applyFeatureSetShouldUsedDefaultProjectIfUnspecified() + throws InvalidProtocolBufferException { + Feature f3f1 = TestObjectFactory.CreateFeature("f3f1", Enum.INT64); + Feature f3f2 = TestObjectFactory.CreateFeature("f3f2", Enum.INT64); + Entity f3e1 = TestObjectFactory.CreateEntity("f3e1", Enum.STRING); + + // In protov3, unspecified project defaults to "" + FeatureSetProto.FeatureSet incomingFeatureSet = + TestObjectFactory.CreateFeatureSet("f3", "", Arrays.asList(f3e1), Arrays.asList(f3f2, f3f1)) + .toProto(); + ApplyFeatureSetResponse applyFeatureSetResponse = + specService.applyFeatureSet(incomingFeatureSet); + assertThat(applyFeatureSetResponse.getStatus(), equalTo(Status.CREATED)); + + assertThat( + applyFeatureSetResponse.getFeatureSet().getSpec().getProject(), + equalTo(Project.DEFAULT_NAME)); + } + @Test public void applyFeatureSetShouldFailWhenProjectIsArchived() throws InvalidProtocolBufferException { - Field f3f1 = new Field("f3f1", Enum.INT64); - Field f3f2 = new Field("f3f2", Enum.INT64); - Field f3e1 = new Field("f3e1", Enum.STRING); + Feature f3f1 = TestObjectFactory.CreateFeature("f3f1", Enum.INT64); + Feature f3f2 = TestObjectFactory.CreateFeature("f3f2", Enum.INT64); + Entity f3e1 = TestObjectFactory.CreateEntity("f3e1", Enum.STRING); FeatureSetProto.FeatureSet incomingFeatureSet = - (new FeatureSet( - "f3", - "archivedproject", - 5, - 100L, - Arrays.asList(f3e1), - Arrays.asList(f3f2, f3f1), - defaultSource, - FeatureSetStatus.STATUS_READY)) + TestObjectFactory.CreateFeatureSet( + "f3", "archivedproject", Arrays.asList(f3e1), Arrays.asList(f3f2, f3f1)) .toProto(); expectedException.expect(IllegalArgumentException.class); @@ -759,6 +541,101 @@ public void applyFeatureSetShouldFailWhenProjectIsArchived() specService.applyFeatureSet(incomingFeatureSet); } + @Test + public void applyFeatureSetShouldAcceptFeatureLabels() throws InvalidProtocolBufferException { + List entitySpecs = new ArrayList<>(); + entitySpecs.add(EntitySpec.newBuilder().setName("entity1").setValueType(Enum.INT64).build()); + + Map featureLabels0 = + new HashMap<>() { + { + put("label1", "feast1"); + } + }; + + Map featureLabels1 = + new HashMap<>() { + { + put("label1", "feast1"); + put("label2", "feast2"); + } + }; + + List> featureLabels = new ArrayList<>(); + featureLabels.add(featureLabels0); + featureLabels.add(featureLabels1); + + List featureSpecs = new ArrayList<>(); + featureSpecs.add( + FeatureSpec.newBuilder() + .setName("feature1") + .setValueType(Enum.INT64) + .putAllLabels(featureLabels.get(0)) + .build()); + featureSpecs.add( + FeatureSpec.newBuilder() + .setName("feature2") + .setValueType(Enum.INT64) + .putAllLabels(featureLabels.get(1)) + .build()); + + FeatureSetSpec featureSetSpec = + FeatureSetSpec.newBuilder() + .setProject("project1") + .setName("featureSetWithConstraints") + .addAllEntities(entitySpecs) + .addAllFeatures(featureSpecs) + .build(); + FeatureSetProto.FeatureSet featureSet = + FeatureSetProto.FeatureSet.newBuilder().setSpec(featureSetSpec).build(); + + ApplyFeatureSetResponse applyFeatureSetResponse = specService.applyFeatureSet(featureSet); + FeatureSetSpec appliedFeatureSetSpec = applyFeatureSetResponse.getFeatureSet().getSpec(); + + // appliedEntitySpecs needs to be sorted because the list returned by specService may not + // follow the order in the request + List appliedEntitySpecs = new ArrayList<>(appliedFeatureSetSpec.getEntitiesList()); + appliedEntitySpecs.sort(Comparator.comparing(EntitySpec::getName)); + + // appliedFeatureSpecs needs to be sorted because the list returned by specService may not + // follow the order in the request + List appliedFeatureSpecs = + new ArrayList<>(appliedFeatureSetSpec.getFeaturesList()); + appliedFeatureSpecs.sort(Comparator.comparing(FeatureSpec::getName)); + + var featureSpecsLabels = + featureSpecs.stream().map(e -> e.getLabelsMap()).collect(Collectors.toList()); + assertEquals(appliedEntitySpecs, entitySpecs); + assertEquals(appliedFeatureSpecs, featureSpecs); + assertEquals(featureSpecsLabels, featureLabels); + } + + @Test + public void applyFeatureSetShouldAcceptFeatureSetLabels() throws InvalidProtocolBufferException { + Map featureSetLabels = + new HashMap<>() { + { + put("description", "My precious feature set"); + } + }; + + FeatureSetSpec featureSetSpec = + FeatureSetSpec.newBuilder() + .setProject("project1") + .setName("preciousFeatureSet") + .putAllLabels(featureSetLabels) + .build(); + FeatureSetProto.FeatureSet featureSet = + FeatureSetProto.FeatureSet.newBuilder().setSpec(featureSetSpec).build(); + + ApplyFeatureSetResponse applyFeatureSetResponse = specService.applyFeatureSet(featureSet); + FeatureSetSpec appliedFeatureSetSpec = applyFeatureSetResponse.getFeatureSet().getSpec(); + + var appliedLabels = appliedFeatureSetSpec.getLabelsMap(); + + assertEquals(featureSetLabels, appliedLabels); + } + @Test public void shouldUpdateStoreIfConfigChanges() throws InvalidProtocolBufferException { when(storeRepository.findById("SERVING")).thenReturn(Optional.of(stores.get(0))); @@ -767,8 +644,7 @@ public void shouldUpdateStoreIfConfigChanges() throws InvalidProtocolBufferExcep .setName("SERVING") .setType(StoreType.REDIS) .setRedisConfig(RedisConfig.newBuilder()) - .addSubscriptions( - Subscription.newBuilder().setProject("project1").setName("a").setVersion(">1")) + .addSubscriptions(Subscription.newBuilder().setProject("project1").setName("a")) .build(); UpdateStoreResponse actual = specService.updateStore(UpdateStoreRequest.newBuilder().setStore(newStore).build()); @@ -799,26 +675,35 @@ public void shouldDoNothingIfNoChange() throws InvalidProtocolBufferException { } @Test - public void shouldFailIfGetFeatureSetWithoutProject() throws InvalidProtocolBufferException { - expectedException.expect(IllegalArgumentException.class); - expectedException.expectMessage("No project provided"); - specService.getFeatureSet(GetFeatureSetRequest.newBuilder().setName("f1").build()); + public void getOrListFeatureSetShouldUseDefaultProjectIfProjectUnspecified() + throws InvalidProtocolBufferException { + when(featureSetRepository.findFeatureSetByNameAndProject_Name("f4", Project.DEFAULT_NAME)) + .thenReturn(featureSets.get(3)); + FeatureSet expected = featureSets.get(3); + // check getFeatureSet() + GetFeatureSetResponse getResponse = + specService.getFeatureSet(GetFeatureSetRequest.newBuilder().setName("f4").build()); + assertThat(getResponse.getFeatureSet(), equalTo(expected.toProto())); + + // check listFeatureSets() + ListFeatureSetsResponse listResponse = + specService.listFeatureSets(Filter.newBuilder().setFeatureSetName("f4").build()); + assertThat(listResponse.getFeatureSetsList(), equalTo(Arrays.asList(expected.toProto()))); } - private FeatureSet newDummyFeatureSet(String name, int version, String project) { - Field feature = new Field("feature", Enum.INT64); - Field entity = new Field("entity", Enum.STRING); + private FeatureSet newDummyFeatureSet(String name, String project) { + FeatureSpec f1 = + FeatureSpec.newBuilder() + .setName("feature") + .setValueType(Enum.STRING) + .putLabels("key", "value") + .build(); + Feature feature = Feature.fromProto(f1); + Entity entity = TestObjectFactory.CreateEntity("entity", Enum.STRING); FeatureSet fs = - new FeatureSet( - name, - project, - version, - 100L, - Arrays.asList(entity), - Arrays.asList(feature), - defaultSource, - FeatureSetStatus.STATUS_READY); + TestObjectFactory.CreateFeatureSet( + name, project, Arrays.asList(entity), Arrays.asList(feature)); fs.setCreated(Date.from(Instant.ofEpochSecond(10L))); return fs; } @@ -828,7 +713,7 @@ private Store newDummyStore(String name) { Store store = new Store(); store.setName(name); store.setType(StoreType.REDIS.toString()); - store.setSubscriptions("*:*:*"); + store.setSubscriptions("*:*"); store.setConfig(RedisConfig.newBuilder().setPort(6379).build().toByteArray()); return store; } diff --git a/core/src/test/java/feast/core/service/TestObjectFactory.java b/core/src/test/java/feast/core/service/TestObjectFactory.java new file mode 100644 index 0000000000..40c379d3cf --- /dev/null +++ b/core/src/test/java/feast/core/service/TestObjectFactory.java @@ -0,0 +1,62 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2020 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.core.service; + +import feast.core.model.Entity; +import feast.core.model.Feature; +import feast.core.model.FeatureSet; +import feast.core.model.Source; +import feast.proto.core.FeatureSetProto; +import feast.proto.core.SourceProto; +import feast.proto.types.ValueProto; +import java.util.HashMap; +import java.util.List; + +public class TestObjectFactory { + + public static Source defaultSource = + new Source( + SourceProto.SourceType.KAFKA, + SourceProto.KafkaSourceConfig.newBuilder() + .setBootstrapServers("kafka:9092") + .setTopic("my-topic") + .build(), + true); + + public static FeatureSet CreateFeatureSet( + String name, String project, List entities, List features) { + return new FeatureSet( + name, + project, + 100L, + entities, + features, + defaultSource, + new HashMap<>(), + FeatureSetProto.FeatureSetStatus.STATUS_READY); + } + + public static Feature CreateFeature(String name, ValueProto.ValueType.Enum valueType) { + return Feature.fromProto( + FeatureSetProto.FeatureSpec.newBuilder().setName(name).setValueType(valueType).build()); + } + + public static Entity CreateEntity(String name, ValueProto.ValueType.Enum valueType) { + return Entity.fromProto( + FeatureSetProto.EntitySpec.newBuilder().setName(name).setValueType(valueType).build()); + } +} diff --git a/core/src/test/java/feast/core/util/TypeConversionTest.java b/core/src/test/java/feast/core/util/TypeConversionTest.java index c84a252cd0..dd858082ba 100644 --- a/core/src/test/java/feast/core/util/TypeConversionTest.java +++ b/core/src/test/java/feast/core/util/TypeConversionTest.java @@ -19,16 +19,10 @@ import static com.jayway.jsonpath.matchers.JsonPathMatchers.hasJsonPath; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.nullValue; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.assertTrue; +import static org.junit.Assert.*; import com.google.protobuf.Timestamp; -import java.util.Arrays; -import java.util.Collections; -import java.util.Date; -import java.util.HashMap; -import java.util.List; -import java.util.Map; +import java.util.*; import org.junit.Test; public class TypeConversionTest { @@ -84,14 +78,8 @@ public void convertMapToJsonStringShouldReturnJsonStringForGivenMap() { } @Test - public void convertJsonStringToArgsShouldReturnCorrectListOfArgs() { + public void convertMapToJsonStringShouldReturnEmptyJsonForAnEmptyMap() { Map input = new HashMap<>(); - input.put("key", "value"); - input.put("key2", "value2"); - - String[] expected = new String[] {"--key=value", "--key2=value2"}; - String[] actual = TypeConversion.convertMapToArgs(input); - assertThat(actual.length, equalTo(expected.length)); - assertTrue(Arrays.asList(actual).containsAll(Arrays.asList(expected))); + assertThat(TypeConversion.convertMapToJsonString(input), equalTo("{}")); } } diff --git a/core/src/test/java/feast/core/validators/FeatureSetValidatorTest.java b/core/src/test/java/feast/core/validators/FeatureSetValidatorTest.java new file mode 100644 index 0000000000..155a52d100 --- /dev/null +++ b/core/src/test/java/feast/core/validators/FeatureSetValidatorTest.java @@ -0,0 +1,87 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2020 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.core.validators; + +import feast.proto.core.FeatureSetProto; +import feast.proto.types.ValueProto; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; + +public class FeatureSetValidatorTest { + + @Rule public final ExpectedException expectedException = ExpectedException.none(); + + @Test + public void shouldThrowExceptionForFeatureLabelsWithAnEmptyKey() { + Map featureLabels = + new HashMap<>() { + { + put("", "empty_key"); + } + }; + + List featureSpecs = new ArrayList<>(); + featureSpecs.add( + FeatureSetProto.FeatureSpec.newBuilder() + .setName("feature1") + .setValueType(ValueProto.ValueType.Enum.INT64) + .putAllLabels(featureLabels) + .build()); + + FeatureSetProto.FeatureSetSpec featureSetSpec = + FeatureSetProto.FeatureSetSpec.newBuilder() + .setProject("project1") + .setName("featureSetWithConstraints") + .addAllFeatures(featureSpecs) + .build(); + FeatureSetProto.FeatureSet featureSet = + FeatureSetProto.FeatureSet.newBuilder().setSpec(featureSetSpec).build(); + + expectedException.expect(IllegalArgumentException.class); + expectedException.expectMessage("Feature label keys must not be empty"); + FeatureSetValidator.validateSpec(featureSet); + } + + @Test + public void shouldThrowExceptionForFeatureSetLabelsWithAnEmptyKey() { + + Map featureSetLabels = + new HashMap<>() { + { + put("", "empty_key"); + } + }; + + FeatureSetProto.FeatureSetSpec featureSetSpec = + FeatureSetProto.FeatureSetSpec.newBuilder() + .setProject("project1") + .setName("featureSetWithConstraints") + .putAllLabels(featureSetLabels) + .build(); + FeatureSetProto.FeatureSet featureSet = + FeatureSetProto.FeatureSet.newBuilder().setSpec(featureSetSpec).build(); + + expectedException.expect(IllegalArgumentException.class); + expectedException.expectMessage("Feature set label keys must not be empty"); + FeatureSetValidator.validateSpec(featureSet); + } +} diff --git a/datatypes/java/README.md b/datatypes/java/README.md index 535fac73d2..f87a50f1bd 100644 --- a/datatypes/java/README.md +++ b/datatypes/java/README.md @@ -52,4 +52,4 @@ For developers, the publishing process is automated along with the Java SDK by it works. Artifacts are staged to Sonatype where a maintainer needs to take a release action for them to go live on Maven Central. -[prow task]: https://github.com/gojek/feast/blob/17e7dca8238aae4dcbf0ff9f0db5d80ef8e035cf/.prow/config.yaml#L166-L192 +[prow task]: https://github.com/feast-dev/feast/blob/17e7dca8238aae4dcbf0ff9f0db5d80ef8e035cf/.prow/config.yaml#L166-L192 diff --git a/datatypes/java/pom.xml b/datatypes/java/pom.xml index a127853258..5810a6db96 100644 --- a/datatypes/java/pom.xml +++ b/datatypes/java/pom.xml @@ -37,15 +37,6 @@ - - org.apache.maven.plugins - maven-compiler-plugin - - - 8 - - - org.apache.maven.plugins maven-dependency-plugin diff --git a/docs/.gitbook/assets/architecture.png b/docs/.gitbook/assets/architecture.png new file mode 100644 index 0000000000..bc655b60f3 Binary files /dev/null and b/docs/.gitbook/assets/architecture.png differ diff --git a/docs/.gitbook/assets/basic-architecture-diagram (2).svg b/docs/.gitbook/assets/basic-architecture-diagram (2).svg new file mode 100644 index 0000000000..b707f49046 --- /dev/null +++ b/docs/.gitbook/assets/basic-architecture-diagram (2).svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/.gitbook/assets/basic-architecture-diagram (3).svg b/docs/.gitbook/assets/basic-architecture-diagram (3).svg new file mode 100644 index 0000000000..b707f49046 --- /dev/null +++ b/docs/.gitbook/assets/basic-architecture-diagram (3).svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/.gitbook/assets/blank-diagram-4 (1).svg b/docs/.gitbook/assets/blank-diagram-4 (1).svg new file mode 100644 index 0000000000..fb5e0659e5 --- /dev/null +++ b/docs/.gitbook/assets/blank-diagram-4 (1).svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/.gitbook/assets/blank-diagram-4.svg b/docs/.gitbook/assets/blank-diagram-4.svg new file mode 100644 index 0000000000..fb5e0659e5 --- /dev/null +++ b/docs/.gitbook/assets/blank-diagram-4.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/.gitbook/assets/feast-docs-overview-diagram-2 (2).svg b/docs/.gitbook/assets/feast-docs-overview-diagram-2 (2).svg new file mode 100644 index 0000000000..7f30963ec7 --- /dev/null +++ b/docs/.gitbook/assets/feast-docs-overview-diagram-2 (2).svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/.gitbook/assets/feast-docs-overview-diagram-2 (3) (1).svg b/docs/.gitbook/assets/feast-docs-overview-diagram-2 (3) (1).svg new file mode 100644 index 0000000000..7f30963ec7 --- /dev/null +++ b/docs/.gitbook/assets/feast-docs-overview-diagram-2 (3) (1).svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/.gitbook/assets/feast-docs-overview-diagram-2 (3).svg b/docs/.gitbook/assets/feast-docs-overview-diagram-2 (3).svg new file mode 100644 index 0000000000..7f30963ec7 --- /dev/null +++ b/docs/.gitbook/assets/feast-docs-overview-diagram-2 (3).svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/.gitbook/assets/image (2) (1).png b/docs/.gitbook/assets/image (2) (1).png new file mode 100644 index 0000000000..d3b359a598 Binary files /dev/null and b/docs/.gitbook/assets/image (2) (1).png differ diff --git a/docs/.gitbook/assets/image (2).png b/docs/.gitbook/assets/image (2).png new file mode 100644 index 0000000000..d3b359a598 Binary files /dev/null and b/docs/.gitbook/assets/image (2).png differ diff --git a/docs/.gitbook/assets/image (3) (1).png b/docs/.gitbook/assets/image (3) (1).png new file mode 100644 index 0000000000..2442410112 Binary files /dev/null and b/docs/.gitbook/assets/image (3) (1).png differ diff --git a/docs/.gitbook/assets/image (3).png b/docs/.gitbook/assets/image (3).png new file mode 100644 index 0000000000..2442410112 Binary files /dev/null and b/docs/.gitbook/assets/image (3).png differ diff --git a/docs/README.md b/docs/README.md index 9593e9f26e..aa0be6090d 100644 --- a/docs/README.md +++ b/docs/README.md @@ -1,4 +1,4 @@ -# What is Feast? +# Introduction Feast \(**Fea**ture **St**ore\) is a tool for managing and serving machine learning features. @@ -13,7 +13,7 @@ Feast aims to: ![](.gitbook/assets/feast-docs-overview-diagram-2.svg) -**TL;DR:** Feast decouples feature engineering from feature usage. Features that are added to Feast become available immediately for training and serving. Models can retrieve the same features used in training from a low latency online store in production. +Feast decouples feature engineering from feature usage. Features that are added to Feast become available immediately for training and serving. Models can retrieve the same features used in training from a low latency online store in production. This means that new ML projects start with a process of feature selection from a catalog instead of having to do feature engineering from scratch. diff --git a/docs/SUMMARY.md b/docs/SUMMARY.md index f74a691970..aaea553e95 100644 --- a/docs/SUMMARY.md +++ b/docs/SUMMARY.md @@ -1,28 +1,34 @@ # Table of contents -* [What is Feast?](README.md) - -## Introduction - -* [Why Feast?](introduction/why-feast.md) -* [Getting Help](introduction/getting-help.md) -* [Roadmap](introduction/roadmap.md) -* [Changelog](https://github.com/gojek/feast/blob/master/CHANGELOG.md) - -## Concepts - -* [Concepts](concepts/concepts.md) - -## Installation - -* [Overview](installation/overview.md) -* [Docker Compose](installation/docker-compose.md) -* [Google Kubernetes Engine \(GKE\)](installation/gke.md) +* [Introduction](README.md) +* [Why Feast?](why-feast.md) +* [Getting Started](getting-started/README.md) + * [Deploying Feast](getting-started/deploying-feast/README.md) + * [Docker Compose](getting-started/deploying-feast/docker-compose.md) + * [Kubernetes \(GKE\)](getting-started/deploying-feast/kubernetes.md) + * [Connecting to Feast](getting-started/connecting-to-feast-1/README.md) + * [Python SDK](getting-started/connecting-to-feast-1/python-sdk.md) + * [Feast CLI](getting-started/connecting-to-feast-1/connecting-to-feast.md) +* [Roadmap](roadmap.md) +* [Changelog](https://github.com/feast-dev/feast/blob/master/CHANGELOG.md) +* [Community](getting-help.md) + +## User Guide + +* [Concepts](user-guide/overview.md) +* [Architecture](user-guide/architecture.md) +* [Feature Sets](user-guide/feature-sets.md) +* [Entities](user-guide/entities.md) +* [Features](user-guide/features.md) +* [Sources](user-guide/sources.md) +* [Data ingestion](user-guide/data-ingestion.md) +* [Stores](user-guide/stores.md) +* [Feature retrieval](user-guide/feature-retrieval.md) ## Tutorials -* [Basic](https://github.com/gojek/feast/blob/master/examples/basic/basic.ipynb) -* [Churn Prediction \(XGBoost\)](https://github.com/gojek/feast/blob/master/examples/feast-xgboost-churn-prediction-tutorial/Telecom%20Customer%20Churn%20Prediction%20%28with%20Feast%20and%20XGBoost%29.ipynb) +* [Basic Tutorial](https://github.com/feast-dev/feast/blob/master/examples/basic/basic.ipynb) +* [Churn Prediction \(XGBoost\)](https://github.com/feast-dev/feast/blob/master/examples/feast-xgboost-churn-prediction-tutorial/Telecom%20Customer%20Churn%20Prediction%20%28with%20Feast%20and%20XGBoost%29.ipynb) ## Administration @@ -30,11 +36,14 @@ ## Reference -* [Python SDK](https://api.docs.feast.dev/python/) -* [Go SDK](https://godoc.org/github.com/gojek/feast/sdk/go) -* [gRPC Types](https://api.docs.feast.dev/grpc/feast.types.pb.html) -* [Core gRPC API](https://api.docs.feast.dev/grpc/feast.core.pb.html) -* [Serving gRPC API](https://api.docs.feast.dev/grpc/feast.serving.pb.html) +* [Configuration Reference](reference/configuration-reference.md) +* [API](reference/api/README.md) + * [Core gRPC API](https://api.docs.feast.dev/grpc/feast.core.pb.html) + * [Serving gRPC API](https://api.docs.feast.dev/grpc/feast.serving.pb.html) + * [gRPC Types](https://api.docs.feast.dev/grpc/feast.types.pb.html) + * [Go SDK](https://godoc.org/github.com/feast-dev/feast/sdk/go) + * [Java SDK](https://javadoc.io/doc/dev.feast/feast-sdk) + * [Python SDK](https://api.docs.feast.dev/python/) ## Contributing @@ -42,4 +51,5 @@ * [Development Guide](contributing/development-guide.md) * [Style Guide](contributing/style-guide.md) * [Release Process](contributing/release-process.md) +* [Adding a New Store](contributing/adding-a-new-store.md) diff --git a/docs/administration/troubleshooting.md b/docs/administration/troubleshooting.md index a16a74d5d9..e293945acc 100644 --- a/docs/administration/troubleshooting.md +++ b/docs/administration/troubleshooting.md @@ -1,6 +1,6 @@ # Troubleshooting -If at any point in time you cannot resolve a problem, please see the [Getting Help](https://github.com/gojek/feast/tree/75f3b783e5a7c5e0217a3020422548fb0d0ce0bf/docs/getting-help.md) section for reaching out to the Feast community. +If at any point in time you cannot resolve a problem, please see the [Community](../getting-help.md) section for reaching out to the Feast community. ## How can I verify that all services are operational? diff --git a/docs/assets/feast_logo.png b/docs/assets/feast_logo.png new file mode 100644 index 0000000000..cdd97770ed Binary files /dev/null and b/docs/assets/feast_logo.png differ diff --git a/docs/contributing/adding-a-new-store-1.md b/docs/contributing/adding-a-new-store-1.md new file mode 100644 index 0000000000..56c06b9b4b --- /dev/null +++ b/docs/contributing/adding-a-new-store-1.md @@ -0,0 +1,87 @@ +# Adding a New Store + +The following guide will explain the process of adding a new store through the introduction of a storage connector. + +## 1. Storage API + +Feast has an external module where storage interfaces are defined: [Storage API](https://github.com/gojek/feast/tree/master/storage/api/src/main/java/feast/storage/api) + +Feast interacts with a store at three points . + +1. **During initialization:** Store configuration is loaded into memory by Feast Serving and synchronized with Feast Core +2. **During ingestion of feature data.** [writer interfaces](https://github.com/gojek/feast/tree/master/storage/api/src/main/java/feast/storage/api/writer) are used by the Apache Beam ingestion jobs in order to populate stores \(historical or online\). +3. **During retrieval of feature data:** [Retrieval interfaces](https://github.com/gojek/feast/tree/master/storage/api/src/main/java/feast/storage/api/retriever) are used by Feast Serving in order to read data from stores in order to create training datasets or to serve online data. + +All three of these components should be implemented in order to have a complete storage connector. + +## 2. Adding a Storage Connector + +### 2.1 Initialization and configuration + +Stores are configured in Feast Serving. Feast Serving publishes its store configuration to Feast Core, after which Feast Core can start ingestion/population jobs to populate it. + +Store configuration is always in the form of a map<String, String>. The keys and configuration for stores are defined in [protos](https://github.com/gojek/feast/blob/master/protos/feast/core/Store.proto). This must be added in order to define a new store + +Then the store must be configured to be loaded through Feast Serving. The above configuration is loaded through [FeastProperties.java](https://github.com/gojek/feast/blob/a1937c374a4e39b7a75d828e7b7c3b87a64d9d6e/serving/src/main/java/feast/serving/config/FeastProperties.java#L175). + +Once configuration is loaded, the store will then be instantiated. + +* Feast Core: The [StoreUtil.java](https://github.com/gojek/feast/blob/master/ingestion/src/main/java/feast/ingestion/utils/StoreUtil.java#L85) instantiates new stores for the purposes of feature ingestion. +* Feast Serving: The [ServingServiceConfig](https://github.com/gojek/feast/blob/a1937c374a4e39b7a75d828e7b7c3b87a64d9d6e/serving/src/main/java/feast/serving/config/ServingServiceConfig.java#L56) instantiates new stores for the purposes of retrieval + +{% hint style="info" %} +In the future we plan to provide a plugin interface for adding stores. +{% endhint %} + +### 2.2 Feature Ingestion \(Writer\) + +Feast creates and manages ingestion/population jobs that stream in data from upstream data sources. Currently Feast only supports Kafka as a data source, meaning these jobs are all long running. Batch ingestion \(from users\) results in data being pushed to Kafka topics after which they are picked up by these "population" jobs and written to stores. + +In order for ingestion to succeed, the destination store must be writable. This means that Feast must be able to create the appropriate tables/schemas in the store and also write data from the population job into the store. + +Currently Feast Core starts and manages these population jobs that ingest data into stores \(although we are planning to move this responsibility to the serving layer\). Feast Core starts an [Apache Beam](https://beam.apache.org/) job which synchronously runs migrations on the destination store and subsequently starts consuming [FeatureRows](https://github.com/gojek/feast/blob/master/protos/feast/types/FeatureRow.proto) from Kafka and writing it into stores using a [writer](https://github.com/gojek/feast/tree/master/storage/api/src/main/java/feast/storage/api/writer). + +Below is a "happy path" of a batch ingestion process which includes a blocking step at the Python SDK. + +![](https://user-images.githubusercontent.com/6728866/74807906-91e73c00-5324-11ea-8ba5-2b43c7c5282b.png) + + + +The complete ingestion flow is executed by a [FeatureSink](https://github.com/gojek/feast/blob/master/storage/api/src/main/java/feast/storage/api/writer/FeatureSink.java). Two methods should be implemented + +* [prepareWrite\(\)](https://github.com/gojek/feast/blob/a1937c374a4e39b7a75d828e7b7c3b87a64d9d6e/storage/api/src/main/java/feast/storage/api/writer/FeatureSink.java#L45): Sets up storage backend for writing/ingestion. This method will be called once during pipeline initialisation. Typically this is used to apply schemas. +* [writer\(\)](https://github.com/gojek/feast/blob/a1937c374a4e39b7a75d828e7b7c3b87a64d9d6e/storage/api/src/main/java/feast/storage/api/writer/FeatureSink.java#L53): Retrieves an Apache Beam PTransform that is used to write data to this store. + +### 2.2 Feature Serving \(Retriever\) + +Feast Serving can serve both historical/batch features and online features. Depending on the store that is being added, you should implement either a historical/batch store or an online storage. + +#### 2.2.1 Historical Serving + +The historical serving interface is defined through the [HistoricalRetriever](https://github.com/gojek/feast/blob/master/storage/api/src/main/java/feast/storage/api/retriever/HistoricalRetriever.java) interface. Historical retrieval is an asynchronous process. The client submits a request for a dataset to be produced, and polls until it is ready. + +![High-level flow for batch retrieval](https://user-images.githubusercontent.com/6728866/74797157-702a8c80-5305-11ea-8901-bf6f4eb075f9.png) + +The current implementation of batch retrieval starts and ends with a file \(dataset\) in a Google Cloud Storage bucket. The user ingests an entity dataset. This dataset is loaded into a store \(BigQuery0, joined to features in a point-in-time correct way, then exported again to the bucket. + +Additionally, we have also implemented a [batch retrieval method ](https://github.com/gojek/feast/blob/a1937c374a4e39b7a75d828e7b7c3b87a64d9d6e/sdk/python/feast/client.py#L509)in the Python SDK. Depending on the means through which this new store will export data, this client may have to change. At the very least it would change if Google Cloud Storage isn't used as the staging bucket. + +The means through which you implement the export/import of data into the store will depend on your store. + +#### 2.2.2 Online Serving + +In the case of online serving it is necessary to implement an [OnlineRetriever](https://github.com/gojek/feast/blob/master/storage/api/src/main/java/feast/storage/api/retriever/OnlineRetriever.java). This online retriever will read rows directly and synchronously from an online database. The exact encoding strategy you use to store your data in the store would be defined in the FeatureSink. The OnlineRetriever is expected to read and decode those rows. + +## 3. Storage Connectors Examples + +Feast currently provides support for the following storage types + +Historical storage + +* [BigQuery](https://github.com/gojek/feast/tree/master/storage/connectors/bigquery) + +Online storage + +* [Redis](https://github.com/gojek/feast/tree/master/storage/connectors/redis) +* [Redis Cluster](https://github.com/gojek/feast/tree/master/storage/connectors/rediscluster) + diff --git a/docs/contributing/adding-a-new-store.md b/docs/contributing/adding-a-new-store.md new file mode 100644 index 0000000000..56c06b9b4b --- /dev/null +++ b/docs/contributing/adding-a-new-store.md @@ -0,0 +1,87 @@ +# Adding a New Store + +The following guide will explain the process of adding a new store through the introduction of a storage connector. + +## 1. Storage API + +Feast has an external module where storage interfaces are defined: [Storage API](https://github.com/gojek/feast/tree/master/storage/api/src/main/java/feast/storage/api) + +Feast interacts with a store at three points . + +1. **During initialization:** Store configuration is loaded into memory by Feast Serving and synchronized with Feast Core +2. **During ingestion of feature data.** [writer interfaces](https://github.com/gojek/feast/tree/master/storage/api/src/main/java/feast/storage/api/writer) are used by the Apache Beam ingestion jobs in order to populate stores \(historical or online\). +3. **During retrieval of feature data:** [Retrieval interfaces](https://github.com/gojek/feast/tree/master/storage/api/src/main/java/feast/storage/api/retriever) are used by Feast Serving in order to read data from stores in order to create training datasets or to serve online data. + +All three of these components should be implemented in order to have a complete storage connector. + +## 2. Adding a Storage Connector + +### 2.1 Initialization and configuration + +Stores are configured in Feast Serving. Feast Serving publishes its store configuration to Feast Core, after which Feast Core can start ingestion/population jobs to populate it. + +Store configuration is always in the form of a map<String, String>. The keys and configuration for stores are defined in [protos](https://github.com/gojek/feast/blob/master/protos/feast/core/Store.proto). This must be added in order to define a new store + +Then the store must be configured to be loaded through Feast Serving. The above configuration is loaded through [FeastProperties.java](https://github.com/gojek/feast/blob/a1937c374a4e39b7a75d828e7b7c3b87a64d9d6e/serving/src/main/java/feast/serving/config/FeastProperties.java#L175). + +Once configuration is loaded, the store will then be instantiated. + +* Feast Core: The [StoreUtil.java](https://github.com/gojek/feast/blob/master/ingestion/src/main/java/feast/ingestion/utils/StoreUtil.java#L85) instantiates new stores for the purposes of feature ingestion. +* Feast Serving: The [ServingServiceConfig](https://github.com/gojek/feast/blob/a1937c374a4e39b7a75d828e7b7c3b87a64d9d6e/serving/src/main/java/feast/serving/config/ServingServiceConfig.java#L56) instantiates new stores for the purposes of retrieval + +{% hint style="info" %} +In the future we plan to provide a plugin interface for adding stores. +{% endhint %} + +### 2.2 Feature Ingestion \(Writer\) + +Feast creates and manages ingestion/population jobs that stream in data from upstream data sources. Currently Feast only supports Kafka as a data source, meaning these jobs are all long running. Batch ingestion \(from users\) results in data being pushed to Kafka topics after which they are picked up by these "population" jobs and written to stores. + +In order for ingestion to succeed, the destination store must be writable. This means that Feast must be able to create the appropriate tables/schemas in the store and also write data from the population job into the store. + +Currently Feast Core starts and manages these population jobs that ingest data into stores \(although we are planning to move this responsibility to the serving layer\). Feast Core starts an [Apache Beam](https://beam.apache.org/) job which synchronously runs migrations on the destination store and subsequently starts consuming [FeatureRows](https://github.com/gojek/feast/blob/master/protos/feast/types/FeatureRow.proto) from Kafka and writing it into stores using a [writer](https://github.com/gojek/feast/tree/master/storage/api/src/main/java/feast/storage/api/writer). + +Below is a "happy path" of a batch ingestion process which includes a blocking step at the Python SDK. + +![](https://user-images.githubusercontent.com/6728866/74807906-91e73c00-5324-11ea-8ba5-2b43c7c5282b.png) + + + +The complete ingestion flow is executed by a [FeatureSink](https://github.com/gojek/feast/blob/master/storage/api/src/main/java/feast/storage/api/writer/FeatureSink.java). Two methods should be implemented + +* [prepareWrite\(\)](https://github.com/gojek/feast/blob/a1937c374a4e39b7a75d828e7b7c3b87a64d9d6e/storage/api/src/main/java/feast/storage/api/writer/FeatureSink.java#L45): Sets up storage backend for writing/ingestion. This method will be called once during pipeline initialisation. Typically this is used to apply schemas. +* [writer\(\)](https://github.com/gojek/feast/blob/a1937c374a4e39b7a75d828e7b7c3b87a64d9d6e/storage/api/src/main/java/feast/storage/api/writer/FeatureSink.java#L53): Retrieves an Apache Beam PTransform that is used to write data to this store. + +### 2.2 Feature Serving \(Retriever\) + +Feast Serving can serve both historical/batch features and online features. Depending on the store that is being added, you should implement either a historical/batch store or an online storage. + +#### 2.2.1 Historical Serving + +The historical serving interface is defined through the [HistoricalRetriever](https://github.com/gojek/feast/blob/master/storage/api/src/main/java/feast/storage/api/retriever/HistoricalRetriever.java) interface. Historical retrieval is an asynchronous process. The client submits a request for a dataset to be produced, and polls until it is ready. + +![High-level flow for batch retrieval](https://user-images.githubusercontent.com/6728866/74797157-702a8c80-5305-11ea-8901-bf6f4eb075f9.png) + +The current implementation of batch retrieval starts and ends with a file \(dataset\) in a Google Cloud Storage bucket. The user ingests an entity dataset. This dataset is loaded into a store \(BigQuery0, joined to features in a point-in-time correct way, then exported again to the bucket. + +Additionally, we have also implemented a [batch retrieval method ](https://github.com/gojek/feast/blob/a1937c374a4e39b7a75d828e7b7c3b87a64d9d6e/sdk/python/feast/client.py#L509)in the Python SDK. Depending on the means through which this new store will export data, this client may have to change. At the very least it would change if Google Cloud Storage isn't used as the staging bucket. + +The means through which you implement the export/import of data into the store will depend on your store. + +#### 2.2.2 Online Serving + +In the case of online serving it is necessary to implement an [OnlineRetriever](https://github.com/gojek/feast/blob/master/storage/api/src/main/java/feast/storage/api/retriever/OnlineRetriever.java). This online retriever will read rows directly and synchronously from an online database. The exact encoding strategy you use to store your data in the store would be defined in the FeatureSink. The OnlineRetriever is expected to read and decode those rows. + +## 3. Storage Connectors Examples + +Feast currently provides support for the following storage types + +Historical storage + +* [BigQuery](https://github.com/gojek/feast/tree/master/storage/connectors/bigquery) + +Online storage + +* [Redis](https://github.com/gojek/feast/tree/master/storage/connectors/redis) +* [Redis Cluster](https://github.com/gojek/feast/tree/master/storage/connectors/rediscluster) + diff --git a/docs/contributing/contributing.md b/docs/contributing/contributing.md index 0a32ae284a..2be097d6ff 100644 --- a/docs/contributing/contributing.md +++ b/docs/contributing/contributing.md @@ -1,12 +1,10 @@ # Contribution Process -## 1. Contribution process +We use [RFCs](https://en.wikipedia.org/wiki/Request_for_Comments) and [GitHub issues](https://github.com/gojek/feast/issues) to communicate development ideas. The simplest way to contribute to Feast is to leave comments in our [RFCs](https://drive.google.com/drive/u/0/folders/1Lj1nIeRB868oZvKTPLYqAvKQ4O0BksjY) in the [Feast Google Drive](https://drive.google.com/drive/u/0/folders/0AAe8j7ZK3sxSUk9PVA) or our GitHub issues. You will need to join our [Google Group](../getting-help.md) in order to get access. -We use [RFCs](https://en.wikipedia.org/wiki/Request_for_Comments) and [GitHub issues](https://github.com/gojek/feast/issues) to communicate development ideas. The simplest way to contribute to Feast is to leave comments in our [RFCs](https://drive.google.com/drive/u/0/folders/1Lj1nIeRB868oZvKTPLYqAvKQ4O0BksjY) in the [Feast Google Drive](https://drive.google.com/drive/u/0/folders/0AAe8j7ZK3sxSUk9PVA) or our GitHub issues. +We follow a process of [lazy consensus](http://community.apache.org/committers/lazyConsensus.html). If you believe you know what the project needs then just start development. If you are unsure about which direction to take with development then please communicate your ideas through a GitHub issue or through our [Slack Channel](../getting-help.md) before starting development. -Please communicate your ideas through a GitHub issue or through our Slack Channel before starting development. - -Please [submit a PR ](https://github.com/gojek/feast/pulls)to the master branch of the Feast repository once you are ready to submit your contribution. Code submission to Feast \(including submission from project maintainers\) require review and approval from maintainers or code owners. +Please [submit a PR ](https://github.com/gojek/feast/pulls)to the master branch of the Feast repository once you are ready to submit your contribution. Code submission to Feast \(including submission from project maintainers\) require review and approval from maintainers or code owners. PRs that are submitted by the general public need to be identified as `ok-to-test`. Once enabled, [Prow](https://github.com/kubernetes/test-infra/tree/master/prow) will run a range of tests to verify the submission, after which community members will help to review the pull request. @@ -14,529 +12,3 @@ PRs that are submitted by the general public need to be identified as `ok-to-tes Please sign the [Google CLA](https://cla.developers.google.com/) in order to have your code merged into the Feast repository. {% endhint %} -## 2. Development guide - -### 2.1 Overview - -The following guide will help you quickly run Feast in your local machine. - -The main components of Feast are: - -* **Feast Core:** Handles feature registration, starts and manages ingestion jobs and ensures that Feast internal metadata is consistent. -* **Feast Ingestion Jobs:** Subscribes to streams of FeatureRows and writes these as feature - - values to registered databases \(online, historical\) that can be read by Feast Serving. - -* **Feast Serving:** Service that handles requests for features values, either online or batch. - -### 2.**2 Requirements** - -#### 2.**2.1 Development environment** - -The following software is required for Feast development - -* Java SE Development Kit 11 -* Python version 3.6 \(or above\) and pip -* [Maven ](https://maven.apache.org/install.html)version 3.6.x - -Additionally, [grpc\_cli](https://github.com/grpc/grpc/blob/master/doc/command_line_tool.md) is useful for debugging and quick testing of gRPC endpoints. - -#### 2.**2.2 Services** - -The following components/services are required to develop Feast: - -* **Feast Core:** Requires PostgreSQL \(version 11 and above\) to store state, and requires a Kafka \(tested on version 2.x\) setup to allow for ingestion of FeatureRows. -* **Feast Serving:** Requires Redis \(tested on version 5.x\). - -These services should be running before starting development. The following snippet will start the services using Docker. - -```bash -# Start Postgres -docker run --name postgres --rm -it -d --net host -e POSTGRES_DB=postgres -e POSTGRES_USER=postgres \ --e POSTGRES_PASSWORD=password postgres:12-alpine - -# Start Redis -docker run --name redis --rm -it --net host -d redis:5-alpine - -# Start Zookeeper (needed by Kafka) -docker run --rm \ - --net=host \ - --name=zookeeper \ - --env=ZOOKEEPER_CLIENT_PORT=2181 \ - --detach confluentinc/cp-zookeeper:5.2.1 - -# Start Kafka -docker run --rm \ - --net=host \ - --name=kafka \ - --env=KAFKA_ZOOKEEPER_CONNECT=localhost:2181 \ - --env=KAFKA_ADVERTISED_LISTENERS=PLAINTEXT://localhost:9092 \ - --env=KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR=1 \ - --detach confluentinc/cp-kafka:5.2.1 -``` - -### 2.3 Testing and development - -#### 2.3.1 Running unit tests - -```text -$ mvn test -``` - -#### 2.3.2 Running integration tests - -_Note: integration suite isn't yet separated from unit._ - -```text -$ mvn verify -``` - -#### 2.3.3 Running components locally - -The `core` and `serving` modules are Spring Boot applications. These may be run as usual for [the Spring Boot Maven plugin](https://docs.spring.io/spring-boot/docs/current/maven-plugin/index.html): - -```text -$ mvn --projects core spring-boot:run - -# Or for short: -$ mvn -pl core spring-boot:run -``` - -Note that you should execute `mvn` from the Feast repository root directory, as there are intermodule dependencies that Maven will not resolve if you `cd` to subdirectories to run. - -#### 2.3.4 Running from IntelliJ - -Compiling and running tests in IntelliJ should work as usual. - -Running the Spring Boot apps may work out of the box in IDEA Ultimate, which has built-in support for Spring Boot projects, but the Community Edition needs a bit of help: - -The Spring Boot Maven plugin automatically puts dependencies with `provided` scope on the runtime classpath when using `spring-boot:run`, such as its embedded Tomcat server. The "Play" buttons in the gutter or right-click menu of a `main()` method [do not do this](https://stackoverflow.com/questions/30237768/run-spring-boots-main-using-ide). - -A solution to this is: - -1. Open `View > Tool Windows > Maven` -2. Drill down to e.g. `Feast Core > Plugins > spring-boot:run`, right-click and `Create 'feast-core [spring-boot'…` -3. In the dialog that pops up, check the `Resolve Workspace artifacts` box -4. Recommended: add `-Dspring-boot.run.fork=false` to the `Command line` field to get Debug working too -5. Click `OK`. You should now be able to select this run configuration for the Play button in the main toolbar, keyboard shortcuts, etc. - -It is recommend to have IntelliJ delegate building to Maven, if this is not enabled out of the box when you import the project, for greater assurance that build behavior is consistent with CI / production builds. This is set in Preferences at `Build, Execution, Deployment > Build Tools > Maven > Runner > Delegate IDE build/run actions to Maven`. - -### 2.**4** Validating your setup - -The following section is a quick walk-through to test whether your local Feast deployment is functional for development purposes. - -**2.4.1 Assumptions** - -* PostgreSQL is running in `localhost:5432` and has a database called `postgres` which - - can be accessed with credentials user `postgres` and password `password`. Different database configurations can be supplied here \(`/core/src/main/resources/application.yml`\) - -* Redis is running locally and accessible from `localhost:6379` -* \(optional\) The local environment has been authentication with Google Cloud Platform and has full access to BigQuery. This is only necessary for BigQuery testing/development. - -#### 2.4.2 Clone Feast - -```bash -git clone https://github.com/gojek/feast.git && cd feast && \ -export FEAST_HOME_DIR=$(pwd) -``` - -#### 2.4.3 Starting Feast Core - -To run Feast Core locally using Maven: - -```bash -# Feast Core can be configured from the following .yml file -# $FEAST_HOME_DIR/core/src/main/resources/application.yml -mvn --projects core spring-boot:run -``` - -Test whether Feast Core is running - -```text -grpc_cli call localhost:6565 ListStores '' -``` - -The output should list **no** stores since no Feast Serving has registered its stores to Feast Core: - -```text -connecting to localhost:6565 - -Rpc succeeded with OK status -``` - -#### 2.4.4 Starting Feast Serving - -Feast Serving is configured through the `$FEAST_HOME_DIR/serving/src/main/resources/application.yml`. Each Serving deployment must be configured with a store. The default store is Redis \(used for online serving\). - -The configuration for this default store is located in a separate `.yml` file. The default location is `$FEAST_HOME_DIR/serving/sample_redis_config.yml`: - -```text -name: serving -type: REDIS -redis_config: - host: localhost - port: 6379 -subscriptions: - - name: "*" - project: "*" - version: "*" -``` - -Once Feast Serving is started, it will register its store with Feast Core \(by name\) and start to subscribe to a feature sets based on its subscription. - -Start Feast Serving GRPC server on localhost:6566 with store name `serving` - -```text -mvn --projects serving spring-boot:run -``` - -Test connectivity to Feast Serving - -```text -grpc_cli call localhost:6566 GetFeastServingInfo '' -``` - -```text -connecting to localhost:6566 -version: "0.4.2-SNAPSHOT" -type: FEAST_SERVING_TYPE_ONLINE - -Rpc succeeded with OK status -``` - -Test Feast Core to see whether it is aware of the Feast Serving deployment - -```text -grpc_cli call localhost:6565 ListStores '' -``` - -```text -connecting to localhost:6565 -store { - name: "serving" - type: REDIS - subscriptions { - name: "*" - version: "*" - project: "*" - } - redis_config { - host: "localhost" - port: 6379 - } -} - -Rpc succeeded with OK status -``` - -In order to use BigQuery as a historical store, it is necessary to start Feast Serving with a different store type. - -Copy `$FEAST_HOME_DIR/serving/sample_redis_config.yml` to the following location `$FEAST_HOME_DIR/serving/my_bigquery_config.yml` and update the configuration as below: - -```text -name: bigquery -type: BIGQUERY -bigquery_config: - project_id: YOUR_GCP_PROJECT_ID - dataset_id: YOUR_GCP_DATASET -subscriptions: - - name: "*" - version: "*" - project: "*" -``` - -Then inside `serving/src/main/resources/application.yml` modify the following key `feast.store.config-path` to point to the new store configuration. - -After making these changes, restart Feast Serving: - -```text -mvn --projects serving spring-boot:run -``` - -You should see two stores registered: - -```text -store { - name: "serving" - type: REDIS - subscriptions { - name: "*" - version: "*" - project: "*" - } - redis_config { - host: "localhost" - port: 6379 - } -} -store { - name: "bigquery" - type: BIGQUERY - subscriptions { - name: "*" - version: "*" - project: "*" - } - bigquery_config { - project_id: "my_project" - dataset_id: "my_bq_dataset" - } -} -``` - -#### 2.4.5 Registering a FeatureSet - -Before registering a new FeatureSet, a project is required. - -```text -grpc_cli call localhost:6565 CreateProject ' - name: "your_project_name" -' -``` - -When a feature set is successfully registered, Feast Core will start an **ingestion** job that listens for new features in the feature set. - -{% hint style="info" %} -Note that Feast currently only supports source of type `KAFKA`, so you must have access to a running Kafka broker to register a FeatureSet successfully. It is possible to omit the `source` from a Feature Set, but Feast Core will still use Kafka behind the scenes, it is simply abstracted away from the user. -{% endhint %} - -Create a new FeatureSet in Feast by sending a request to Feast Core: - -```text -# Example of registering a new driver feature set -# Note the source value, it assumes that you have access to a Kafka broker -# running on localhost:9092 - -grpc_cli call localhost:6565 ApplyFeatureSet ' -feature_set { - spec { - project: "your_project_name" - name: "driver" - version: 1 - - entities { - name: "driver_id" - value_type: INT64 - } - - features { - name: "city" - value_type: STRING - } - - source { - type: KAFKA - kafka_source_config { - bootstrap_servers: "localhost:9092" - topic: "your-kafka-topic" - } - } - } -} -' -``` - -Verify that the FeatureSet has been registered correctly. - -```text -# To check that the FeatureSet has been registered correctly. -# You should also see logs from Feast Core of the ingestion job being started -grpc_cli call localhost:6565 GetFeatureSet ' - project: "your_project_name" - name: "driver" -' -``` - -Or alternatively, list all feature sets - -```text -grpc_cli call localhost:6565 ListFeatureSets ' - filter { - project: "your_project_name" - feature_set_name: "driver" - feature_set_version: "1" - } -' -``` - -#### 2.4.6 Ingestion and Population of Feature Values - -```text -# Produce FeatureRow messages to Kafka so it will be ingested by Feast -# and written to the registered stores. -# Make sure the value here is the topic assigned to the feature set -# ... producer.send("feast-driver-features" ...) -# -# Install Python SDK to help writing FeatureRow messages to Kafka -cd $FEAST_HOMEDIR/sdk/python -pip3 install -e . -pip3 install pendulum - -# Produce FeatureRow messages to Kafka so it will be ingested by Feast -# and written to the corresponding store. -# Make sure the value here is the topic assigned to the feature set -# ... producer.send("feast-test_feature_set-features" ...) -python3 - <` property, and commit. +4. Push. For a new release branch, open a PR against master. +5. When CI passes, merge. \(Remember _not_ to delete the new release branch\). +6. Tag the merge commit with the release version, using a `v` prefix. Push the tag. +7. Bump to the next working version and append `-SNAPSHOT` in `pom.xml`. +8. Commit the POM and open a PR. +9. Create a [GitHub release](https://github.com/feast-dev/feast/releases) which includes a summary of important changes as well as any artifacts associated with the release. Make sure to include the same change log as added in [CHANGELOG.md](https://github.com/feast-dev/feast/blob/master/CHANGELOG.md). Use `Feast vX.Y.Z` as the title. +10. Create one final PR to the master branch and also update its [CHANGELOG.md](https://github.com/feast-dev/feast/blob/master/CHANGELOG.md). + +When a tag that matches a Semantic Version string is pushed, CI will automatically build and push the relevant artifacts to their repositories or package managers \(docker images, Python wheels, etc\). JVM artifacts are promoted from Sonatype OSSRH to Maven Central, but it sometimes takes some time for them to be available. + +### Creating a change log + +We use an [open source change log generator](https://hub.docker.com/r/ferrarimarco/github-changelog-generator/) to generate change logs. The process still requires a little bit of manual effort. 1. Create a GitHub token as [per these instructions ](https://github.com/github-changelog-generator/github-changelog-generator#github-token). The token is used as an input argument \(`-t`\) to the changelog generator. 2. The change log generator configuration below will look for unreleased changes on a specific branch. The branch will be `master` for a major/minor release, or a release branch \(`v0.4-branch`\) for a patch release. You will need to set the branch using the `--release-branch` argument. 3. You should also set the `--future-release` argument. This is the version you are releasing. The version can still be changed at a later date. 4. Update the arguments below and run the command to generate the change log to the console. + +```text +docker run -it --rm ferrarimarco/github-changelog-generator \ +--user feast-dev \ +--project feast \ +--release-branch \ +--future-release \ +--unreleased-only \ +--no-issues \ +--bug-labels kind/bug \ +--enhancement-labels kind/feature \ +--breaking-labels compat/breaking \ +-t \ +--max-issues 1 \ +-o +``` + +1. Review each change log item. + * Make sure that sentences are grammatically correct and well formatted \(although we will try to enforce this at the PR review stage\). + * Make sure that each item is categorized correctly. You will see the following categories: `Breaking changes`, `Implemented enhancements`, `Fixed bugs`, and `Merged pull requests`. Any unlabeled PRs will be found in `Merged pull requests`. It's important to make sure that any `breaking changes`, `enhancements`, or `bug fixes` are pulled up out of `merged pull requests` into the correct category. Housekeeping, tech debt clearing, infra changes, or refactoring do not count as `enhancements`. Only enhancements a user benefits from should be listed in that category. + * Make sure that the "Full Changelog" link is actually comparing the correct tags \(normally your released version against the previously version\). + * Make sure that release notes and breaking changes are present. diff --git a/docs/contributing/style-guide.md b/docs/contributing/style-guide.md index 322f1e3d03..1cf1c69671 100644 --- a/docs/contributing/style-guide.md +++ b/docs/contributing/style-guide.md @@ -1,6 +1,8 @@ # Style Guide -## 1. Java +## 1. Language Specific Style Guides + +### 1.1 Java We conform to the [Google Java Style Guide](https://google.github.io/styleguide/javaguide.html). Maven can helpfully take care of that for you before you commit: @@ -17,11 +19,25 @@ $ mvn verify -Dspotless.check.skip If you're using IntelliJ, you can import [these code style settings](https://github.com/google/styleguide/blob/gh-pages/intellij-java-google-style.xml) if you'd like to use the IDE's reformat function as you develop. -## 2. Go +### 1.2 Go Make sure you apply `go fmt`. -## 3. Python +### 1.3 Python We use [Python Black](https://github.com/psf/black) to format our Python code prior to submission. +## 2. Formatting and Linting + +Code can automatically be formatted by running the following command from the project root directory + +```text +make format +``` + +Once code that is submitted through a PR or direct push will be validated with the following command + +```text +make lint +``` + diff --git a/docs/coverage/java/pom.xml b/docs/coverage/java/pom.xml new file mode 100644 index 0000000000..9666b7092c --- /dev/null +++ b/docs/coverage/java/pom.xml @@ -0,0 +1,105 @@ + + + + 4.0.0 + + + + + dev.feast + feast-parent + ${revision} + ../../.. + + + Feast Coverage Java + feast-coverage + + + true + + + + + dev.feast + feast-storage-api + ${project.version} + + + + dev.feast + feast-storage-connector-bigquery + ${project.version} + + + + dev.feast + feast-storage-connector-redis + ${project.version} + + + + dev.feast + feast-ingestion + ${project.version} + + + + dev.feast + feast-core + ${project.version} + + + + dev.feast + feast-serving + ${project.version} + + + + dev.feast + feast-sdk + ${project.version} + + + + + + + org.jacoco + jacoco-maven-plugin + + + report-aggregate + prepare-package + + report-aggregate + + + + + + + + diff --git a/docs/getting-help.md b/docs/getting-help.md new file mode 100644 index 0000000000..32b21461b1 --- /dev/null +++ b/docs/getting-help.md @@ -0,0 +1,36 @@ +# Community + +## Chat + +* Come and say hello in [\#Feast](https://join.slack.com/t/kubeflow/shared_invite/zt-cpr020z4-PfcAue_2nw67~iIDy7maAQ) over in the Kubeflow Slack. + +## GitHub + +* Feast's GitHub repo can be [found here](https://github.com/feast-dev/feast/). +* Found a bug or need a feature? [Create an issue on GitHub](https://github.com/feast-dev/feast/issues/new) + +## Community Call + +We have a community call every 2 weeks. Alternating between two times. + +* 11 am \(UTC + 8\) +* 5 pm \(UTC + 8\) + +Please join the [feast-dev](getting-help.md#feast-development) mailing list to receive the the calendar invitation. + +## Mailing list + +### Feast discussion + +* Google Group: [https://groups.google.com/d/forum/feast-discuss](https://groups.google.com/d/forum/feast-discuss) +* Mailing List: [feast-discuss@googlegroups.com](mailto:feast-discuss@googlegroups.com) + +### Feast development + +* Google Group: [https://groups.google.com/d/forum/feast-dev](https://groups.google.com/d/forum/feast-dev) +* Mailing List: [feast-dev@googlegroups.com](mailto:feast-dev@googlegroups.com) + +## Google Drive + +The Feast community also maintains a [Google Drive](https://drive.google.com/drive/u/0/folders/0AAe8j7ZK3sxSUk9PVA) with documents like RFCs, meeting notes, or roadmaps. Please join one of the above mailing lists \(feast-dev or feast-discuss\) to gain access to the drive. + diff --git a/docs/getting-started/README.md b/docs/getting-started/README.md new file mode 100644 index 0000000000..9945ab197d --- /dev/null +++ b/docs/getting-started/README.md @@ -0,0 +1,14 @@ +# Getting Started + +If you want to learn more about Feast concepts and its architecture, see the user guide + +If you would like to connect to an existing Feast deployment then click on `Connecting to Feast` + +{% page-ref page="connecting-to-feast-1/" %} + +If you would like to deploy a new installation of Feast, then click on `Deploying Feast` + +{% page-ref page="deploying-feast/" %} + +If you are connected to a running Feast deployment, then have a look at our [example tutorials](https://github.com/gojek/feast/tree/master/examples). + diff --git a/docs/getting-started/connecting-to-feast-1/README.md b/docs/getting-started/connecting-to-feast-1/README.md new file mode 100644 index 0000000000..fd8fe8e498 --- /dev/null +++ b/docs/getting-started/connecting-to-feast-1/README.md @@ -0,0 +1,27 @@ +# Connecting to Feast + +## Python SDK + +* Define, register, and manage entities and features +* Ingest data into Feast +* Build and retrieve training datasets +* Retrieve online features + +{% page-ref page="python-sdk.md" %} + +## Feast CLI + +* Define, register, and manage entities and features from the terminal +* Ingest data into Feast +* Manage ingestion jobs + +{% page-ref page="connecting-to-feast.md" %} + +## Online Serving Clients + +Feast provides three ways to retrieve data in production for online serving + +* [Python SDK](https://api.docs.feast.dev/python/) \(as above\) +* [Go Client library](https://godoc.org/github.com/feast-dev/feast/sdk/go) +* [Java Client library](https://javadoc.io/doc/dev.feast/feast-sdk) + diff --git a/docs/getting-started/connecting-to-feast-1/connecting-to-feast.md b/docs/getting-started/connecting-to-feast-1/connecting-to-feast.md new file mode 100644 index 0000000000..a2cafda0db --- /dev/null +++ b/docs/getting-started/connecting-to-feast-1/connecting-to-feast.md @@ -0,0 +1,38 @@ +# Feast CLI + +The Feast CLI is installed through pip: + +```bash +pip install feast +``` + +Configure the CLI to connect to your Feast Core deployment + +```text +feast config set core_url your.feast.deployment +``` + +{% hint style="info" %} +By default, all configuration is stored in `~/.feast/config` +{% endhint %} + +The CLI is a wrapper around the [Feast Python SDK](python-sdk.md) + +```aspnet +$ feast + +Usage: feast [OPTIONS] COMMAND [ARGS]... + +Options: + --help Show this message and exit. + +Commands: + config View and edit Feast properties + feature-sets Create and manage feature sets + ingest Ingest feature data into a feature set + projects Create and manage projects + version Displays version and connectivity information +``` + + + diff --git a/docs/getting-started/connecting-to-feast-1/python-sdk.md b/docs/getting-started/connecting-to-feast-1/python-sdk.md new file mode 100644 index 0000000000..2fea0c31af --- /dev/null +++ b/docs/getting-started/connecting-to-feast-1/python-sdk.md @@ -0,0 +1,20 @@ +# Python SDK + +The Feast SDK can be installed directly using pip: + +```bash +pip install feast +``` + +Users should then be able to connect to a Feast deployment as follows + +```python +from feast import Client + +# Connect to an existing Feast Core deployment +client = Client(core_url='feast.example.com:6565') + +# Ensure that your client is connected by printing out some feature sets +client.list_feature_sets() +``` + diff --git a/docs/getting-started/deploying-feast/README.md b/docs/getting-started/deploying-feast/README.md new file mode 100644 index 0000000000..4a9aac11ba --- /dev/null +++ b/docs/getting-started/deploying-feast/README.md @@ -0,0 +1,16 @@ +# Deploying Feast + +## Docker Compose + +* Fastest way to get Feast up and running. +* Provides a pre-installed Jupyter Notebook with sample code. + +{% page-ref page="docker-compose.md" %} + +## Kubernetes \(GKE\) + +* Recommended way to install Feast for production use. +* The guide has dependencies on BigQuery, and Google Cloud Storage. + +{% page-ref page="kubernetes.md" %} + diff --git a/docs/getting-started/deploying-feast/docker-compose.md b/docs/getting-started/deploying-feast/docker-compose.md new file mode 100644 index 0000000000..a8895a0252 --- /dev/null +++ b/docs/getting-started/deploying-feast/docker-compose.md @@ -0,0 +1,112 @@ +# Docker Compose + +### Overview + +This guide will bring Feast up using Docker Compose. This will allow you to: + +* Create, register, and manage feature sets +* Ingest feature data into Feast +* Retrieve features for online serving +* Retrieve features for batch serving \(only if using Google Cloud Platform\) + +This guide is split into three parts: + +1. Setting up your environment +2. Starting Feast with **online serving support only** \(does not require GCP\). +3. Starting Feast with support for **both online and batch** serving \(requires GCP\) + +{% hint style="info" %} +The docker compose setup uses Direct Runner for the Apache Beam jobs that populate data stores. Running Beam with the Direct Runner means it does not need a dedicated runner like Flink or Dataflow, but this comes at the cost of performance. We recommend the use of a dedicated runner when running Feast with very large workloads. +{% endhint %} + +### 0. Requirements + +* [Docker compose](https://docs.docker.com/compose/install/) must be installed. +* The following list of TCP ports must be free: + * 6565, 6566, 8888, and 9094. + * Alternatively it is possible to modify port mappings in `/docker-compose/docker-compose.yml`. +* \(for batch serving only\) For batch serving you will also need a [GCP service account key](https://cloud.google.com/iam/docs/creating-managing-service-account-keys) that has access to [Google Cloud Storage](https://cloud.google.com/storage) and [BigQuery](https://cloud.google.com/bigquery). +* \(for batch serving only\) [Google Cloud SDK ](https://cloud.google.com/sdk/install)installed, authenticated, and configured to the project you will use. + +## 1. Set up environment + +Clone the [Feast repository](https://github.com/feast-dev/feast/) and navigate to the `docker-compose` sub-directory: + +```bash +git clone https://github.com/feast-dev/feast.git && \ +cd feast && export FEAST_HOME_DIR=$(pwd) && \ +cd infra/docker-compose +``` + +Make a copy of the `.env.sample` file: + +```bash +cp .env.sample .env +``` + +## 2. Docker Compose for Online Serving Only + +### 2.1 Start Feast \(without batch retrieval support\) + +If you do not require batch serving, then its possible to simply bring up Feast: + +```javascript +docker-compose up -d +``` + +A Jupyter Notebook environment is now available to use Feast: + +[http://localhost:8888/tree/feast/examples](http://localhost:8888/tree/feast/examples) + +## 3. Docker Compose for Online and Batch Serving + +{% hint style="info" %} +Batch serving requires Google Cloud Storage to function, specifically Google Cloud Storage \(GCP\) and BigQuery. +{% endhint %} + +### 3.1 Set up Google Cloud Platform + +Create a [service account ](https://cloud.google.com/iam/docs/creating-managing-service-accounts)from the GCP console and copy it to the `infra/docker-compose/gcp-service-accounts` folder: + +```javascript +cp my-service-account.json ${FEAST_HOME_DIR}/infra/docker-compose/gcp-service-accounts +``` + +Create a Google Cloud Storage bucket. Make sure that your service account above has read/write permissions to this bucket: + +```bash +gsutil mb gs://my-feast-staging-bucket +``` + +### 3.2 Configure .env + +Configure the `.env` file based on your environment. At the very least you have to modify: + +| Parameter | Description | +| :--- | :--- | +| FEAST\_CORE\_GCP\_SERVICE\_ACCOUNT\_KEY | This should be your service account file name, for example `key.json`. | +| FEAST\_BATCH\_SERVING\_GCP\_SERVICE\_ACCOUNT\_KEY | This should be your service account file name, for example `key.json` | +| FEAST\_JUPYTER\_GCP\_SERVICE\_ACCOUNT\_KEY | This should be your service account file name, for example `key.json` | +| FEAST\_JOB\_STAGING\_LOCATION | Google Cloud Storage bucket that Feast will use to stage data exports and batch retrieval requests, for example `gs://your-gcs-bucket/staging` | + +### 3.3 Configure .bq-store.yml + +We will also need to configure the `bq-store.yml` file inside `infra/docker-compose/serving/` to configure the BigQuery storage configuration as well as the feature sets that the store subscribes to. At a minimum you will need to set: + +| Parameter | Description | +| :--- | :--- | +| bigquery\_config.project\_id | This is you [GCP project Id](https://cloud.google.com/resource-manager/docs/creating-managing-projects). | +| bigquery\_config.dataset\_id | This is the name of the BigQuery dataset that tables will be created in. Each feature set will have one table in BigQuery. | + +### 3.4 Start Feast \(with batch retrieval support\) + +Start Feast: + +```javascript +docker-compose up -d +``` + +A Jupyter Notebook environment is now available to use Feast: + +[http://localhost:8888/tree/feast/examples](http://localhost:8888/tree/feast/examples) + diff --git a/docs/getting-started/deploying-feast/kubernetes.md b/docs/getting-started/deploying-feast/kubernetes.md new file mode 100644 index 0000000000..a7432836f3 --- /dev/null +++ b/docs/getting-started/deploying-feast/kubernetes.md @@ -0,0 +1,211 @@ +# Kubernetes \(GKE\) + +### Overview + +This guide will install Feast into a Kubernetes cluster on GCP. It assumes that all of your services will run within a single Kubernetes cluster. Once Feast is installed you will be able to: + +* Define and register features. +* Load feature data from both batch and streaming sources. +* Retrieve features for model training. +* Retrieve features for online serving. + +{% hint style="info" %} +This guide requires [Google Cloud Platform](https://cloud.google.com/) for installation. + +* [BigQuery](https://cloud.google.com/bigquery/) is used for storing historical features. +* [Google Cloud Storage](https://cloud.google.com/storage/) is used for intermediate data storage. +{% endhint %} + +## 0. Requirements + +1. [Google Cloud SDK ](https://cloud.google.com/sdk/install)installed, authenticated, and configured to the project you will use. +2. [Kubectl](https://kubernetes.io/docs/tasks/tools/install-kubectl/) installed. +3. [Helm](https://helm.sh/3) \(2.16.0 or greater\) installed on your local machine with Tiller installed in your cluster. Helm 3 has not been tested yet. + +## 1. Set up GCP + +First define the environmental variables that we will use throughout this installation. Please customize these to reflect your environment. + +```bash +export FEAST_GCP_PROJECT_ID=my-gcp-project +export FEAST_GCP_REGION=us-central1 +export FEAST_GCP_ZONE=us-central1-a +export FEAST_BIGQUERY_DATASET_ID=feast +export FEAST_GCS_BUCKET=${FEAST_GCP_PROJECT_ID}_feast_bucket +export FEAST_GKE_CLUSTER_NAME=feast +export FEAST_SERVICE_ACCOUNT_NAME=feast-sa +``` + +Create a Google Cloud Storage bucket for Feast to stage batch data exports: + +```bash +gsutil mb gs://${FEAST_GCS_BUCKET} +``` + +Create the service account that Feast will run as: + +```bash +gcloud iam service-accounts create ${FEAST_SERVICE_ACCOUNT_NAME} + +gcloud projects add-iam-policy-binding ${FEAST_GCP_PROJECT_ID} \ + --member serviceAccount:${FEAST_SERVICE_ACCOUNT_NAME}@${FEAST_GCP_PROJECT_ID}.iam.gserviceaccount.com \ + --role roles/editor + +gcloud iam service-accounts keys create key.json --iam-account \ +${FEAST_SERVICE_ACCOUNT_NAME}@${FEAST_GCP_PROJECT_ID}.iam.gserviceaccount.com +``` + +## 2. Set up a Kubernetes \(GKE\) cluster + +{% hint style="warning" %} +Provisioning a GKE cluster can expose your services publicly. This guide does not cover securing access to the cluster. +{% endhint %} + +Create a GKE cluster: + +```bash +gcloud container clusters create ${FEAST_GKE_CLUSTER_NAME} \ + --machine-type n1-standard-4 +``` + +Create a secret in the GKE cluster based on your local key `key.json`: + +```bash +kubectl create secret generic feast-gcp-service-account --from-file=key.json +``` + +For this guide we will use `NodePort` for exposing Feast services. In order to do so, we must find an External IP of at least one GKE node. This should be a public IP. + +```bash +export FEAST_IP=$(kubectl describe nodes | grep ExternalIP | awk '{print $2}' | head -n 1) +export FEAST_CORE_URL=${FEAST_IP}:32090 +export FEAST_ONLINE_SERVING_URL=${FEAST_IP}:32091 +export FEAST_BATCH_SERVING_URL=${FEAST_IP}:32092 +``` + +Add firewall rules to open up ports on your Google Cloud Platform project: + +```bash +gcloud compute firewall-rules create feast-core-port --allow tcp:32090 +gcloud compute firewall-rules create feast-online-port --allow tcp:32091 +gcloud compute firewall-rules create feast-batch-port --allow tcp:32092 +gcloud compute firewall-rules create feast-redis-port --allow tcp:32101 +gcloud compute firewall-rules create feast-kafka-ports --allow tcp:31090-31095 +``` + +## 3. Set up Helm + +Run the following command to provide Tiller with authorization to install Feast: + +```bash +kubectl apply -f - < + + + Option + Description + Default + + + + + FEAST_CONFIG + + Location of Feast configuration file + /.feast/config + + + + CONFIG_FEAST_ENV_VAR_PREFIX + + +

Default prefix to Feast environmental variable options.

+

Does not apply to FEAST_CONFIG +

+ + FEAST_ + + + + PROJECT + + Default Feast project to use + default + + + + CORE_URL + + URL used to connect to Feast Core + localhost:6565 + + + + CORE_ENABLE_SSL + + Enables TLS/SSL on connections to Feast Core + False + + + + CORE_AUTH_ENABLED + + Enable user authentication when connecting to a Feast Core instance + False + + + + CORE_AUTH_TOKEN + + Provide a static JWT token to authenticate with Feast Core + Null + + + + CORE_SERVER_SSL_CERT + + Path to certificate(s) used by Feast Client to authenticate TLS connection + to Feast Core (not to authenticate you as a client). + Null + + + + SERVING_URL + + URL used to connect to Feast Serving + localhost:6566 + + + + SERVING_ENABLE_SSL + + Enables TLS/SSL on connections to Feast Serving + False + + + + SERVING_SERVER_SSL_CERT + + Path to certificate(s) used by Feast Client to authenticate TLS connection + to Feast Serving (not to authenticate you as a client). + None + + + + GRPC_CONNECTION_TIMEOUT_DEFAULT + + Default gRPC connection timeout to both Feast Serving and Feast Core (in + seconds) + 3 + + + + GRPC_CONNECTION_TIMEOUT_APPLY + + Default gRPC connection timeout when sending an ApplyFeatureSet command + to Feast Core (in seconds) + 600 + + + + BATCH_FEATURE_REQUEST_WAIT_S + + Time to wait for batch feature requests before timing out. + 600 + + + + + +### Usage + +#### Configuration File + +Feast Configuration File \(`~/.feast/config`\) + +```text +[general] +project = default +core_url = localhost:6565 +``` + +#### Environmental Variables + +```bash +FEAST_CORE_URL=my_feast:6565 FEAST_PROJECT=default feast projects list +``` + +#### Feast SDK + +```python +client = Client( + core_url="localhost:6565", + project="default" +) +``` + diff --git a/docs/roadmap.md b/docs/roadmap.md new file mode 100644 index 0000000000..a047cf4125 --- /dev/null +++ b/docs/roadmap.md @@ -0,0 +1,31 @@ +# Roadmap + +## Feast 0.5 \(Technical Release\) + +[Discussion](https://github.com/gojek/feast/issues/527) + +### New functionality + +1. Streaming statistics and validation \(M1 from [Feature Validation RFC](https://docs.google.com/document/d/1TPmd7r4mniL9Y-V_glZaWNo5LMXLshEAUpYsohojZ-8/edit)\) +2. Support for Redis Clusters \([\#478](https://github.com/gojek/feast/issues/478), [\#502](https://github.com/gojek/feast/issues/502)\) +3. Add feature and feature set labels, i.e. key/value registry metadata \([\#463](https://github.com/gojek/feast/issues/463)\) +4. Job management API \([\#302](https://github.com/gojek/feast/issues/302)\) + +### Technical debt, refactoring, or housekeeping + +1. Clean up and document all configuration options \([\#525](https://github.com/gojek/feast/issues/525)\) +2. Externalize storage interfaces \([\#402](https://github.com/gojek/feast/issues/402)\) +3. Reduce memory usage in Redis \([\#515](https://github.com/gojek/feast/issues/515)\) +4. Support for handling out of order ingestion \([\#273](https://github.com/gojek/feast/issues/273)\) +5. Remove feature versions and enable automatic data migration \([\#386](https://github.com/gojek/feast/issues/386)\) \([\#462](https://github.com/gojek/feast/issues/462)\) +6. Tracking of batch ingestion by with dataset\_id/job\_id \([\#461](https://github.com/gojek/feast/issues/461)\) +7. Write Beam metrics after ingestion to store \(not prior\) \([\#489](https://github.com/gojek/feast/issues/489)\) + +## Feast 0.6 \(Feature Release\) + +### New functionality + +1. User authentication & authorization \([\#504](https://github.com/gojek/feast/issues/504)\) +2. Batch statistics and validation \(M2 from [Feature Validation RFC](https://docs.google.com/document/d/1TPmd7r4mniL9Y-V_glZaWNo5LMXLshEAUpYsohojZ-8/edit)\) +3. Online feature/entity status metadata \([\#658](https://github.com/gojek/feast/pull/658)\) + diff --git a/docs/user-guide/architecture.md b/docs/user-guide/architecture.md new file mode 100644 index 0000000000..5de6fe66eb --- /dev/null +++ b/docs/user-guide/architecture.md @@ -0,0 +1,41 @@ +# Architecture + +![Feast high-level flow](../.gitbook/assets/blank-diagram-4.svg) + +### **Feast Core** + +Feast Core is the central management service of a Feast deployment. It's role is to: + +* Allows users to create [entities](entities.md) and [features](features.md) through the creation and management of [feature sets](feature-sets.md). +* Starts and manages [ingestion jobs](data-ingestion.md). These jobs populate [stores](stores.md) from [sources](sources.md) based on the feature sets that are defined and the subscription\(s\) that a [store](stores.md) has. + +{% hint style="info" %} +Job management may move out of Feast Core to Feast Serving in the future. +{% endhint %} + +### **Feast Ingestion** + +Before a user ingests data into Feast, they should register one or more feature sets. These [feature sets](feature-sets.md) tell Feast where to find their data, how to ingest it, and also describe the characteristics of the data for validation purposes. Once a feature set is registered, Feast will start an Apache Beam job in order to populate a store with data from a source. + +In order for stores to be populated with data, users must publish the data to a [source](sources.md). Currently Feast only supports Apache Kafka as a source. Feast users \(or pipelines\) ingest batch data through the [Feast SDK](../getting-started/connecting-to-feast-1/connecting-to-feast.md) using its `ingest()` method. The SDK publishes the data straight to Kafka. + +Streaming systems can also ingest data into Feast. This is done by publishing to the correct Kafka topic in the expected format. Feast expects data to be in [FeatureRow.proto](https://api.docs.feast.dev/grpc/feast.types.pb.html#FeatureRow) format. The topic and brokers can be found on the feature set schema using the [Python SDK](../getting-started/connecting-to-feast-1/python-sdk.md). + +### **Stores** + +Stores are nothing more than databases used to store feature data. Feast loads data into stores through an ingestion process, after which the data can be served through the Feast Serving API. Stores are documented in the following section. + +{% page-ref page="stores.md" %} + +### **Feast Serving** + + `Feast Serving` is the data access layer through which end users and production systems retrieve feature data. Each `Serving` instance is backed by a [store](stores.md). + +Since Feast supports multiple store types \(online, historical\) it is common to have two instances of Feast Serving deployed, one for online serving and one for historical serving. However, Feast allows for any number of `Feast Serving` deployments, meaning it is possible to deploy a `Feast Serving` deployment per production system, with its own stores and population jobs. + +`Serving` deployments can subscribe to a subset of feature data. Meaning they do not have to consume all features known to a `Feast Core` deployment. + +Feature retrieval \(and feature references\) are documented in more detail in subsequent sections + +{% page-ref page="feature-retrieval.md" %} + diff --git a/docs/user-guide/data-ingestion.md b/docs/user-guide/data-ingestion.md new file mode 100644 index 0000000000..464a51bbfd --- /dev/null +++ b/docs/user-guide/data-ingestion.md @@ -0,0 +1,126 @@ +# Data ingestion + +Users don't necessarily have to provide their external data sources in order to load data into Feast. Feast also allows users to exclude the source when registering a feature set. The following is a valid feature set specification. + +```python +feature_set = FeatureSet( + name="stream_feature", + entities=[Entity("entity_id", ValueType.INT64)], + features=[Feature("feature_value1", ValueType.STRING)], +) +``` + +If a user does not provide a source of data then they are required to publish data to Feast. This process is called ingestion. + +### Ingesting data + +The following example demonstrates how data ingestion works. For a full tutorial please see the [Telco Customer Churn Prediction Notebook](https://github.com/gojek/feast/blob/master/examples/feast-xgboost-churn-prediction-tutorial/Telecom%20Customer%20Churn%20Prediction%20%28with%20Feast%20and%20XGBoost%29.ipynb). + +1. Connect to Feast Core and load in a Pandas DataFrame. + +```python +from feast import FeatureSet, Client, Entity +import pandas as pd + +# Connect to Feast core +client = Client(core_url="feast-core.example.com") + +# Load in customer data +df = pd.read_csv("customer.csv") +``` + +1. Create, infer, and register a feature set from the DataFrame. This is a once off step that is required to initially register a feature set with Feast + +```python +# Create an empty feature set +customer_churn_fs = FeatureSet("customer_churn") + +# Infer the schema of the feature set from the Pandas DataFrame +customer_churn_fs.infer_fields_from_df( + df, + entities=[Entity(name='customer_id', + dtype=ValueType.STRING)] + ) + +# Register the feature set with Feast +client.apply(customer_churn_fs) +``` + +1. We can also test that the feature set was correctly registered with Feast by retrieving it again and printing it out + +```text +customer_churn_fs = client.get_feature_set('customer_churn') +print(client.get_feature_set('customer_churn')) +``` + +```yaml +{ + "spec": { + "name": "customer_churn", + "entities": [ + { + "name": "customer_id", + "valueType": "STRING" + } + ], + "features": [ + { + "name": "churn", + "valueType": "INT64" + }, + { + "name": "contract_month_to_month", + "valueType": "INT64" + }, + { + "name": "streamingmovies", + "valueType": "INT64" + }, + { + "name": "paperlessbilling", + "valueType": "INT64" + }, + { + "name": "contract_two_year", + "valueType": "INT64" + }, + { + "name": "partner", + "valueType": "INT64" + } + ], + "maxAge": "0s", + "source": { + "type": "KAFKA", + "kafkaSourceConfig": { + "bootstrapServers": "10.202.250.99:31190", + "topic": "feast" + } + }, + "project": "default" + }, + "meta": { + "createdTimestamp": "2020-03-15T07:47:52Z", + "status": "STATUS_READY" + } +} +``` + +Once we are happy that the schema is correct, we can start to ingest the DataFrame into Feast. + +```text +client.ingest(customer_churn_fs, telcom) +``` + +```text +100%|██████████| 7032/7032 [00:02<00:00, 2771.19rows/s] +Ingestion complete! + +Ingestion statistics: +Success: 7032/7032A rows ingested +``` + +{% hint style="warning" %} +Feast ingestion maintains the order of data that is ingested. This means that data that is written later will replace those that are written prior in stores. This is important to note when ingesting data that will end in a production system. +{% endhint %} + diff --git a/docs/user-guide/entities.md b/docs/user-guide/entities.md new file mode 100644 index 0000000000..f2383e440c --- /dev/null +++ b/docs/user-guide/entities.md @@ -0,0 +1,43 @@ +# Entities + +An entity is any domain object that can be modeled and that information can be stored about. Entities are usually recognizable concepts, either concrete or abstract, such as persons, places, things, or events which have relevance to the modeled system. + +More formally, an entity is an instance of an entity type. An entity type is the class of entities where entities are the instances. + +* Examples of entity types in the context of ride-hailing and food delivery: `customer`, `order`, `driver`, `restaurant`, `dish`, `area`. +* A specific driver, for example a driver with ID `D011234` would be an entity of the entity type `driver` + +An entity is the object on which features are observed. For example we could have a feature `total_trips_24h` on the driver `D01123` with a feature value of `11`. + +In the context of Feast, entities are important because they are used as keys when looking up feature values. Entities are also used when joining feature values between different feature sets in order to build one large data set to train a model, or to serve a model. + +Example entity properties + +{% code title="customer\_id.yaml" %} +```yaml +# Entity name +name: customer_id + +# Entity value type +value_type: INT64 + +``` +{% endcode %} + +Entities can be created through the [Feast SDK](../getting-started/connecting-to-feast-1/connecting-to-feast.md) as follows + +```python +from feast import Entity, ValueType, FeatureSet + +# Create a customer entity +customer = Entity("customer_id", ValueType.INT64) + +# Create a feature set with only a single entity +customer_feature_set = FeatureSet("customer_fs", entities=[customer]) + +# Register the feature set with Feast +client.apply(customer_feature_set) +``` + +Please see the [EntitySpec](https://api.docs.feast.dev/grpc/feast.core.pb.html#EntitySpec) for the entity specification API. + diff --git a/docs/user-guide/feature-retrieval.md b/docs/user-guide/feature-retrieval.md new file mode 100644 index 0000000000..0ebf79ca88 --- /dev/null +++ b/docs/user-guide/feature-retrieval.md @@ -0,0 +1,128 @@ +# Feature retrieval + +## 1. Overview + +Feature retrieval \(or serving\) is the process of retrieving either historical features or online features from Feast, for the purposes of training or serving a model. + +Feast attempts to unify the process of retrieving features in both the historical and online case. It does this through the creation of feature references. One of the major advantages of using Feast is that you have a single semantic reference to a feature. These feature references can then be stored alongside your model and loaded into a serving layer where it can be used for online feature retrieval. + +## 2. Feature references + +In Feast, each feature can be uniquely addressed through a feature reference. A feature reference is composed of the following components + +* Feature Set +* Feature + +These components can be used to create a string based feature reference as follows + +`:` + +Feast will attempt to infer both the `feature-set` name if it is not provided, but a feature reference must provide a `feature` name. + +```python +# Feature references +features = [ + 'partner', + 'daily_transactions', + 'customer_feature_set:dependents', + 'customer_feature_set:has_phone_service', + ] + +target = 'churn' +``` + +{% hint style="info" %} +Where the Features from different Feature Sets use the same name, the Feature Set name `feature-set` is required to disambiguate which feature is specified. +{% endhint %} + +Feature references only apply to a single `project`. Features cannot be retrieved across projects in a single request. + +## 3. Historical feature retrieval + +Historical feature retrieval can be done through either the Feast SDK or directly through the Feast Serving gRPC API. Below is an example of historical retrieval from the [Churn Prediction Notebook](https://github.com/gojek/feast/blob/master/examples/feast-xgboost-churn-prediction-tutorial/Telecom%20Customer%20Churn%20Prediction%20%28with%20Feast%20and%20XGBoost%29.ipynb). + +```python +# Add the target variable to our feature list +features = self._features + [self._target] + +# Retrieve training dataset from Feast. The "entity_df" is a dataframe that contains +# timestamps and entity keys. In this case, it is a dataframe with two columns. +# One timestamp column, and one customer id column +dataset = client.get_batch_features( + feature_refs=features, + entity_rows=entity_df +) + +# Materialize the dataset object to a Pandas DataFrame. +# Alternatively it is possible to use a file reference if the data is too large +df = dataset.to_dataframe() +``` + +{% hint style="info" %} +When no project is specified when retrieving features with `get_batch_features()`, Feast infers that the features specified belong to the `default` project. To retrieve from another project, specify the `default` parameter when retrieving features. +{% endhint %} + +In the above example, Feast does a point in time correct query from a single feature set. For each timestamp and entity key combination that is provided by `entity_df`, Feast determines the values of all the features in the `features` list at that respective point in time and then joins features values to that specific entity value and timestamp, and repeats this process for all timestamps. + +This is called a point in time correct join. + +Feast allows users to retrieve features from any feature sets and join them together in a single response dataset. The only requirement is that the user provides the correct entities in order to look up the features. + +### **Point-in-time-correct Join** + +Below is another example of how a point-in-time-correct join works. We have two dataframes. The first is the `entity dataframe` that contains timestamps, entities, and labels. The user would like to have driver features joined onto this `entity dataframe` from the `driver dataframe` to produce an `output dataframe` that contains both labels and features. They would then like to train their model on this output + +![Input 1: Entity DataFrame](https://lh3.googleusercontent.com/ecS5sqj3FHLFSm06XF11NmTQSru-bQ4Az3Kuko_vg5YlBxXjHadlsGwmo7d7wUx4fA1ssdZvxrESDKfkGWjj3HNJg_jIqXY0avz2JzCcEOXLBLmtXNEY8k2u3f4QusHdDWdqRARQHYE) + +![Input 2: Driver DataFrame](https://lh3.googleusercontent.com/LRtCOzmcfhLWzpyndbRKZSVPanLLzfULoHx2YxY6N3i1gQd2Eh6MS1igahOe8ydA7zQulIFJEaQ0IXFXOsdkKRobOC6ThSOnT4hACbCl1jeM4O2JDVC_kvw8lwTCezVUD3d6ZUYj31Q) + +Typically the `input 1` DataFrame would be provided by the user, and the `input 2` DataFrame would already be ingested into Feast. To join these two, the user would call Feast as follows: + +```python +# Feature references +features = [ + 'conv_rate', + 'acc_rate', + 'avg_daily_trips', + 'trip_completed' + ] + + +dataset = client.get_batch_features( + feature_refs=features, # this is a list of feature references + entity_rows=entity_df # This is the entity dataframe above + ) + +# This prints out the dataframe below +print(dataset.to_dataframe()) +``` + +![Output: Joined DataFrame](https://lh5.googleusercontent.com/Gm-4Ru68KyIQ2tQtaVTDFngqO7pMtlMP1YAQO-bqln6_Mo2XAPdbij6w5ACnHAmQ053XUPu6G-c2aYRVJxPqPTMN_BcH6PY0-E1kCwXQAdW1CcQo5tc0g5ilcuVAtqsHcJB1R5mBdLo) + +Feast is able to intelligently join feature data with different timestamps to a single basis table in a point-in-time-correct way. This allows users to join daily batch data with high-frequency event data transparently. They simply need to know the feature names. + +Point-in-time-correct joins also prevents the occurrence of feature leakage by trying to accurate the state of the world at a single point in time, instead of just joining features based on the nearest timestamps. + +## Online feature retrieval + +Online feature retrieval works in much the same way as batch retrieval, with one important distinction: Online stores only maintain the current state of features. No historical data is served. + +```python +features = [ + 'conv_rate', + 'acc_rate', + 'avg_daily_trips', + ] + +data = client.get_online_features( + feature_refs=features, # Contains only feature references + entity_rows=entity_rows, # Contains only entities (driver ids) + ) +``` + +{% hint style="info" %} +When no project is specified when retrieving features with `get_online_feature()`, Feast infers that the features specified belong to the `default` project. To retrieve from another project, specify the `project` parameter when retrieving features. +{% endhint %} + +Online serving with Feast is built to be very low latency. Feast Serving provides a [gRPC API](https://api.docs.feast.dev/grpc/feast.serving.pb.html) that is backed by [Redis](https://redis.io/). We also provide support for [Python](https://api.docs.feast.dev/python/), [Go](https://godoc.org/github.com/gojek/feast/sdk/go), and Java clients. + diff --git a/docs/user-guide/feature-sets.md b/docs/user-guide/feature-sets.md new file mode 100644 index 0000000000..fb03adad34 --- /dev/null +++ b/docs/user-guide/feature-sets.md @@ -0,0 +1,91 @@ +# Feature Sets + +Feature sets are both a schema and a means of identifying data sources for features. + +Data typically comes in the form of flat files, dataframes, tables in a database, or events on a stream. Thus the data occurs with multiple columns/fields in multiple rows/events. + +Feature sets are a way for defining the unique properties of these data sources, how Feast should interpret them, and how Feast should source them. Feature sets allow for groups of fields in these data sources to be [ingested](data-ingestion.md) and [stored](stores.md) together. Feature sets allow for efficient storage and logical namespacing of data within [stores](stores.md). + +{% hint style="info" %} +Feature sets are a grouping of feature sets based on how they are loaded into Feast. They ensure that data is efficiently stored during ingestion. Feature sets are not a grouping of features for retrieval of features. During retrieval it is possible to retrieve feature values from any number of feature sets. +{% endhint %} + +### Customer Transactions Example + +Below is an example specification of a basic `customer transactions` feature set that has been exported to YAML: + +{% tabs %} +{% tab title="customer\_transactions\_feature\_set.yaml" %} +```yaml +name: customer_transactions +entities: +- name: customer_id + valueType: INT64 +features: +- name: daily_transactions + valueType: FLOAT +- name: total_transactions + valueType: FLOAT +``` +{% endtab %} +{% endtabs %} + +The dataframe below \(`customer_data.csv`\) contains the features and entities of the above feature set. + +| datetime | customer\_id | daily\_transactions | total\_tra**nsactions** | +| :--- | :--- | :--- | :--- | +| 2019-01-01 01:00:00 | 20001 | 5.0 | 14.0 | +| 2019-01-01 01:00:00 | 20002 | 2.6 | 43.0 | +| 2019-01-01 01:00:00 | 20003 | 4.1 | 154.0 | +| 2019-01-01 01:00:00 | 20004 | 3.4 | 74.0 | + +In order to ingest feature data into Feast for this specific feature set: + +```python +# Load dataframe +customer_df = pd.read_csv("customer_data.csv") + +# Create feature set from YAML (using YAML is optional) +cust_trans_fs = FeatureSet.from_yaml("customer_transactions_feature_set.yaml") + +# Apply new feature set +client.apply(cust_trans_fs) + +# Load feature data into Feast for this specific feature set +client.ingest(cust_trans_fs, customer_data) +``` + +{% hint style="info" %} +When applying a Feature Set without specifying a project in its specification, Feast creates/updates the Feature Set in the `default` project. To create a Feature Set in another project, specify the project of choice in the Feature Set specification's project field. +{% endhint %} + +### **Making changes to Feature Sets** + +In order to facilitate the need for feature set definitions to change over time, a limited set of changes can be made to existing feature sets. + +To apply changes to a feature set: + +```python +# With existing feature set +cust_trans_fs = FeatureSet.from_yaml("customer_transactions_feature_set.yaml") + +# Add new feature, avg_basket_size +cust_trans_fs.add(Feature(name="avg_basket_size", dtype=ValueType.INT32)) + +# Apply changed feature set +client.apply(cust_trans_fs) +``` + +Permitted changes include: + +* Adding new features +* Deleting existing features \(note that features are tombstoned and remain on record, rather than removed completely; as a result, new features will not be able to take the names of these deleted features\) +* Changing features' TFX schemas +* Changing the feature set's source and max age + +Note that the following are **not** allowed: + +* Changes to project or name of the feature set. +* Changes to entities. +* Changes to names and types of existing features. + diff --git a/docs/user-guide/features.md b/docs/user-guide/features.md new file mode 100644 index 0000000000..32a7b57b09 --- /dev/null +++ b/docs/user-guide/features.md @@ -0,0 +1,38 @@ +# Features + +A feature is an individual measurable property or characteristic of a phenomenon being observed. Features are the most important concepts within a feature store. Feature data is used both as input to models during training and when models are served in production. + +In the context of Feast, features are values that are associated with either one or more entities over time. In Feast, these values are either primitives or lists of primitives. Each feature can also have additional information attached to it. + +The following is a YAML representation of a feature specification. This specification would form part of a larger specification within a [feature set](feature-sets.md). + +{% code title="total\_trips\_feature.yaml" %} +```yaml +# Feature name +name: total_trips_24h + +# Feature value type +value_type: INT64 +``` +{% endcode %} + + Features can be created through the[ Feast SDK](../getting-started/connecting-to-feast-1/connecting-to-feast.md) as follows + +```python +from feast import Entity, Feature, ValueType, FeatureSet + +# Create a driver entity +driver = Entity("driver_id", ValueType.INT64) + +# Create a total trips 24h feature +total_trips_24h = Feature("total_trips_24h", ValueType.INT64) + +# Create a feature set with a single entity and a single feature +driver_fs = FeatureSet("driver_fs", entities=[driver], features=[total_trips_24h]) + +# Register the feature set with Feast +client.apply(driver_fs) +``` + +Please see the [FeatureSpec](https://api.docs.feast.dev/grpc/feast.core.pb.html#FeatureSpec) for the complete feature specification API. + diff --git a/docs/user-guide/overview.md b/docs/user-guide/overview.md new file mode 100644 index 0000000000..0d45825368 --- /dev/null +++ b/docs/user-guide/overview.md @@ -0,0 +1,58 @@ +# Concepts + +## Using Feast + +Feast acts as the interface between ML models and data. Feast enables your team to + +1. Create feature specifications to manage features and load in data that should be managed +2. Retrieve historical features for training models +3. Retrieve online features for serving models + +{% hint style="info" %} +Feast currently does not apply feature transformations to data. +{% endhint %} + +### 1. Creating and managing features + +Feature creators model the data within their organization into Feast through the creation of [feature sets](feature-sets.md). + +Feature sets are specifications that contain both schema and data source information. They allow Feast to know how to interpret your data, and optionally where to find it. Feature sets allow users to define domain [entities](entities.md) along with the [features](features.md) that are available on these entities. Feature sets also allow users to define schemas that describe the properties of the data, which in turn can be used for validation purposes. + +Once a feature set has been registered, Feast will create the relevant schemas to store feature data within it's feature [stores](stores.md). These stores are then automatically populated by by jobs that ingest data from data [sources](sources.md), making it possible for Feast to provide access to features for training and serving. It is also possible for users to [ingest](data-ingestion.md) data into Feast instead of using an external source. + +Read more about [feature sets](feature-sets.md). + +### 2. Retrieving historical features during training + +Both online and historical retrieval are executed through an API call to `Feast Serving` using [feature references](feature-retrieval.md). In the case of historical serving it is necessary to provide Feast with the entities and timestamps that feature data will be joined to. Feast eagerly produces a point-in-time correct dataset based on the features that have been requested. These features can come from any number of feature sets. + +Stores supported: [BigQuery](https://cloud.google.com/bigquery) + +### 3. Retrieving online features during serving + +Feast also allows users to call `Feast Serving` for online feature data. Feast only stores the latest values during online serving for each feature, as opposed to historical serving where all historical values are stored. Online serving allows for very low latency requests to feature data at very high throughput. + +Stores supported: [Redis](https://redis.io/), [Redis Cluster](https://redis.io/topics/cluster-tutorial) + +## Concept Hierarchy + +![](../.gitbook/assets/image%20%283%29.png) + +Feast resources are arranged in the above hierarchy, with projects grouping one or more [feature sets](feature-sets.md), which in turn groups multiple [features](features.md) or [entities](entities.md). + +The logical grouping of these resources are important for namespacing as well as retrieval. During retrieval time it is necessary to reference individual features through feature references. These references uniquely identify a feature or entity within a Feast deployment. + +## Concepts + +[Entities](entities.md) are objects in an organization that model a specific construct. Examples of these include customers, transactions, and drivers. + +[Features](features.md) are measurable properties that are observed on entities. Features are used as inputs to models. + +[Feature Sets](feature-sets.md) are schemas that define logical groupings of entities, features, data sources, and other related metadata. + +[Stores](stores.md) are databases that maintain feature data that gets served to models during training or inference. + +[Sources](sources.md) are either internal or external data sources where feature data can be found. + +[Ingestion](data-ingestion.md) is the process of loading data into Feast. + diff --git a/docs/user-guide/sources.md b/docs/user-guide/sources.md new file mode 100644 index 0000000000..35fcb41a11 --- /dev/null +++ b/docs/user-guide/sources.md @@ -0,0 +1,32 @@ +# Sources + +A `source` is a data source that can be used to find feature data. Users define sources as part of [feature sets](feature-sets.md). Once a feature set is registered with a source, Feast will automatically start to populate its stores with data from this source. + +{% hint style="info" %} +Feast only supports [Kafka](https://kafka.apache.org/) as a source currently. +{% endhint %} + +An example of a user provided source can be seen in the following code snippet + +```python +feature_set = FeatureSet( + name="stream_feature", + entities=[ + Entity("entity_id", ValueType.INT64) + ], + features=[ + Feature("feature_value1", ValueType.STRING) + ], + source=KafkaSource( + brokers="mybroker:9092", + topic="my_feature_topic" + ) +) +``` + +Once this feature set is registered, Feast will start an ingestion job that retrieves data from this source and starts to populate all [stores](stores.md) that subscribe to it. + +In most cases a feature set \(and by extension its source\) will be used to populate both an online store and a historical store. This allows users to both train and serve their model with the same feature data. + +Feast will ensure that the source complies with the schema of the feature set. The event data has to be [Protobuf](https://developers.google.com/protocol-buffers) encoded and must contain the necessary [FeatureRow](https://api.docs.feast.dev/grpc/feast.types.pb.html#FeatureRow) structure. + diff --git a/docs/user-guide/stores.md b/docs/user-guide/stores.md new file mode 100644 index 0000000000..c15cbb3db0 --- /dev/null +++ b/docs/user-guide/stores.md @@ -0,0 +1,37 @@ +# Stores + +In Feast, a store describes a database that is populated with feature data in order to be served to models. + +Feast supports two classes of stores + +* Historical stores +* Online stores + +In order to populate these stores, Feast Core creates a long running ingestion job that streams in data from all feature sources to all stores that subscribe to those feature sets. + +![](../.gitbook/assets/image%20%282%29.png) + +## Historical Stores + +Historical stores maintain a complete history of feature data for the feature sets they are subscribed to. + +Feast currently only supports [Google BigQuery](https://cloud.google.com/bigquery) as a feature store, but we have [developed a storage API ](https://github.com/gojek/feast/issues/482)that makes adding a new store possible. + +Each historical store models its data differently, but in the case of a relational store \(like BigQuery\), each feature set maps directly to a table. Each feature and entity within a feature set maps directly to a column within a table. + +Data from historical stores can be used to train a model. In order to retrieve data from a historical store it is necessary to connect to a Feast Serving deployment and request historical features. Please see feature retrieval for more details. + +{% hint style="danger" %} +Data is persisted in historical stores like BigQuery in log format. Repeated ingestions will duplicate the data is persisted in the store. Feast will automatically deduplicate data during retrieval, but it doesn't currently remove data from the stores themselves. +{% endhint %} + +## Online Stores + +Online stores maintain only the latest values for a specific feature. Feast currently supports Redis as an online store. Online stores are meant for very high throughput writes from ingestion jobs and very low latency access to features during online serving. + +Please continue to the [feature retrieval](feature-retrieval.md) section for more details on retrieving data from online storage. + +## Subscriptions + +Stores are populated by ingestion jobs \(Apache Beam\) that retrieve feature data from sources based on subscriptions. These subscriptions are typically defined by the administrators of the Feast deployment. In most cases a store would simply subscribe to all features, but in some cases it may subscribe to a subset in order to improve performance or efficiency. + diff --git a/docs/why-feast.md b/docs/why-feast.md new file mode 100644 index 0000000000..75048ace2c --- /dev/null +++ b/docs/why-feast.md @@ -0,0 +1,32 @@ +# Why Feast? + +## Lack of feature reuse + +**Problem:** The process of engineering features is one of the most time consuming activities in building an end-to-end ML system. Despite this, many teams continue to redevelop the same features from scratch for every new project. Often these features never leaving the notebooks or pipelines they are built in. + +**Solution:** A centralized feature store allows organizations to build up a foundation of features that can be reused across projects. Teams are then able to utilize features developed by other teams, and as more features are added to the store it becomes easier and cheaper to build models. + +## Serving features is hard + +**Problem:** Serving up to date features at scale is hard. Raw data can come from a variety of sources, from data lakes, to even streams, to data warehouse, to simply flat files. Data scientists need the ability to produce massive datasets of features from this data in order to train their models offline. These models then need access to real-time feature data at low latency and high throughput when they are served in production. + +**Solution:** Feast is built to be able to ingest data from a variety of sources, supporting both streaming and batch sources. Once data is loaded into Feast as features, they become available through both a batch serving API as well as an real-time \(online serving\) API. These APIs allows data scientists and ML engineers to easily retrieve feature data for their development, training, or in production. Feast also comes with a Java, Go, and Python SDK to make this experience easy. + +## **Models need point-in-time correctness** + +**Problem:** Most data sources are not built with ML use cases in mind and by extension don't provide point-in-time correct lookups of feature data. One of the reasons why features are often re-engineered is because ML practitioners need to ensure that their models are trained on a dataset that accurately models the state of the world when the model runs in production. + +**Solution:** Feast allows end users to create point-in-time correct datasets across multiple entities. Feast ensures that there is no data leakage, that cross feature set joins are valid, and that models are not fed expired data. + +## Definitions of features vary + +**Problem:** Teams define features differently and there is no easy access to the documentation of a feature. + +**Solution:** Feast becomes the single source of truth for all feature data for all models within an organization. Teams are able to capture documentation, metadata and metrics about features. This allows teams to communicate clearly about features, test features data, and determine if a feature is useful for a particular model. + +## **Inconsistency between training and serving** + +**Problem:** Training requires access to historical data, whereas models that serve predictions need the latest values. Inconsistencies arise when data is siloed into many independent systems requiring separate tooling. Often teams are using Python for creating batch features off line, but these features are redeveloped with different libraries and languages when moving to serving or streaming systems. + +**Solution:** Feast provides consistency by managing and unifying the ingestion of data from batch and streaming sources into both the feature warehouse and feature serving stores. Feast becomes the bridge between your model and your data, both for training and serving. This ensures that there is a consistency in the feature data that your model receives. + diff --git a/examples/basic/basic.ipynb b/examples/basic/basic.ipynb index a6feb0ef13..a56121328c 100644 --- a/examples/basic/basic.ipynb +++ b/examples/basic/basic.ipynb @@ -28,23 +28,20 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 1, "metadata": {}, "outputs": [], "source": [ "import os\n", "\n", "# Feast Core acts as the central feature registry\n", - "FEAST_CORE_URL = os.getenv('FEAST_CORE_URL', 'core:6565')\n", + "FEAST_CORE_URL = os.getenv('FEAST_CORE_URL', 'localhost:6565')\n", "\n", "# Feast Online Serving allows for the retrieval of real-time feature data\n", - "FEAST_ONLINE_SERVING_URL = os.getenv('FEAST_ONLINE_SERVING_URL', 'online-serving:6566')\n", + "FEAST_ONLINE_SERVING_URL = os.getenv('FEAST_ONLINE_SERVING_URL', 'localhost:6566')\n", "\n", "# Feast Batch Serving allows for the retrieval of historical feature data\n", - "FEAST_BATCH_SERVING_URL = os.getenv('FEAST_BATCH_SERVING_URL', 'batch-serving:6567')\n", - "\n", - "# PYTHON_REPOSITORY_PATH is the path to the Python SDK inside the Feast Git Repo\n", - "PYTHON_REPOSITORY_PATH = os.getenv('PYTHON_REPOSITORY_PATH', '../../')" + "FEAST_BATCH_SERVING_URL = os.getenv('FEAST_BATCH_SERVING_URL', 'localhost:6567')" ] }, { @@ -63,48 +60,57 @@ }, { "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "!python -m pip install --ignore-installed --upgrade feast" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "(Alternative) Install from local repository" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import os\n", - "import sys\n", - "os.environ['PYTHON_SDK_PATH'] = os.path.join(PYTHON_REPOSITORY_PATH, 'sdk/python')\n", - "sys.path.append(os.environ['PYTHON_SDK_PATH'])" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "!echo $PYTHON_SDK_PATH" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "!python -m pip install --ignore-installed --upgrade -e ${PYTHON_SDK_PATH}" + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Requirement already satisfied: feast in /home/zzy/.conda/envs/feast-ml/lib/python3.7/site-packages (0.5.0.post0)\n", + "Requirement already satisfied: google in /home/zzy/.conda/envs/feast-ml/lib/python3.7/site-packages (from feast) (2.0.3)\n", + "Requirement already satisfied: tabulate==0.8.* in /home/zzy/.conda/envs/feast-ml/lib/python3.7/site-packages (from feast) (0.8.7)\n", + "Requirement already satisfied: pandavro==1.5.* in /home/zzy/.conda/envs/feast-ml/lib/python3.7/site-packages (from feast) (1.5.1)\n", + "Requirement already satisfied: pandas==0.* in /home/zzy/.local/lib/python3.7/site-packages (from feast) (0.25.0)\n", + "Requirement already satisfied: google-cloud-core==1.0.* in /home/zzy/.conda/envs/feast-ml/lib/python3.7/site-packages (from feast) (1.0.3)\n", + "Requirement already satisfied: grpcio==1.* in /home/zzy/.conda/envs/feast-ml/lib/python3.7/site-packages (from feast) (1.29.0)\n", + "Requirement already satisfied: fastavro<0.23,>=0.22.11 in /home/zzy/.conda/envs/feast-ml/lib/python3.7/site-packages (from feast) (0.22.13)\n", + "Requirement already satisfied: PyYAML==5.1.* in /home/zzy/.conda/envs/feast-ml/lib/python3.7/site-packages (from feast) (5.1.2)\n", + "Requirement already satisfied: googleapis-common-protos==1.* in /home/zzy/.conda/envs/feast-ml/lib/python3.7/site-packages (from feast) (1.51.0)\n", + "Requirement already satisfied: tqdm==4.* in /home/zzy/.conda/envs/feast-ml/lib/python3.7/site-packages (from feast) (4.46.0)\n", + "Requirement already satisfied: numpy in /home/zzy/.local/lib/python3.7/site-packages (from feast) (1.17.4)\n", + "Requirement already satisfied: confluent-kafka in /home/zzy/.conda/envs/feast-ml/lib/python3.7/site-packages (from feast) (1.4.2)\n", + "Requirement already satisfied: google-cloud-bigquery-storage==0.7.* in /home/zzy/.conda/envs/feast-ml/lib/python3.7/site-packages (from feast) (0.7.0)\n", + "Requirement already satisfied: toml==0.10.* in /home/zzy/.conda/envs/feast-ml/lib/python3.7/site-packages (from feast) (0.10.1)\n", + "Requirement already satisfied: protobuf>=3.10 in /home/zzy/.conda/envs/feast-ml/lib/python3.7/site-packages (from feast) (3.12.1)\n", + "Requirement already satisfied: google-cloud-bigquery==1.18.* in /home/zzy/.conda/envs/feast-ml/lib/python3.7/site-packages (from feast) (1.18.1)\n", + "Requirement already satisfied: pyarrow>=0.15.1 in /home/zzy/.conda/envs/feast-ml/lib/python3.7/site-packages (from feast) (0.17.1)\n", + "Requirement already satisfied: google-api-core==1.14.* in /home/zzy/.conda/envs/feast-ml/lib/python3.7/site-packages (from feast) (1.14.3)\n", + "Requirement already satisfied: google-cloud-storage==1.20.* in /home/zzy/.conda/envs/feast-ml/lib/python3.7/site-packages (from feast) (1.20.0)\n", + "Requirement already satisfied: google-auth==1.6.* in /home/zzy/.conda/envs/feast-ml/lib/python3.7/site-packages (from feast) (1.6.3)\n", + "Requirement already satisfied: kafka-python==1.* in /home/zzy/.conda/envs/feast-ml/lib/python3.7/site-packages (from feast) (1.4.7)\n", + "Requirement already satisfied: Click==7.* in /home/zzy/.conda/envs/feast-ml/lib/python3.7/site-packages (from feast) (7.1.2)\n", + "Requirement already satisfied: beautifulsoup4 in /home/zzy/.conda/envs/feast-ml/lib/python3.7/site-packages (from google->feast) (4.9.1)\n", + "Requirement already satisfied: six>=1.9 in /home/zzy/.conda/envs/feast-ml/lib/python3.7/site-packages (from pandavro==1.5.*->feast) (1.14.0)\n", + "Requirement already satisfied: python-dateutil>=2.6.1 in /home/zzy/.local/lib/python3.7/site-packages (from pandas==0.*->feast) (2.8.0)\n", + "Requirement already satisfied: pytz>=2017.2 in /home/zzy/.local/lib/python3.7/site-packages (from pandas==0.*->feast) (2019.1)\n", + "Requirement already satisfied: setuptools in /home/zzy/.local/lib/python3.7/site-packages (from protobuf>=3.10->feast) (41.6.0)\n", + "Requirement already satisfied: google-resumable-media<0.5.0dev,>=0.3.1 in /home/zzy/.conda/envs/feast-ml/lib/python3.7/site-packages (from google-cloud-bigquery==1.18.*->feast) (0.4.1)\n", + "Requirement already satisfied: requests<3.0.0dev,>=2.18.0 in /home/zzy/.conda/envs/feast-ml/lib/python3.7/site-packages (from google-api-core==1.14.*->feast) (2.23.0)\n", + "Requirement already satisfied: rsa>=3.1.4 in /home/zzy/.conda/envs/feast-ml/lib/python3.7/site-packages (from google-auth==1.6.*->feast) (4.0)\n", + "Requirement already satisfied: cachetools>=2.0.0 in /home/zzy/.conda/envs/feast-ml/lib/python3.7/site-packages (from google-auth==1.6.*->feast) (4.1.0)\n", + "Requirement already satisfied: pyasn1-modules>=0.2.1 in /home/zzy/.conda/envs/feast-ml/lib/python3.7/site-packages (from google-auth==1.6.*->feast) (0.2.8)\n", + "Requirement already satisfied: soupsieve>1.2 in /home/zzy/.conda/envs/feast-ml/lib/python3.7/site-packages (from beautifulsoup4->google->feast) (2.0.1)\n", + "Requirement already satisfied: chardet<4,>=3.0.2 in /home/zzy/.conda/envs/feast-ml/lib/python3.7/site-packages (from requests<3.0.0dev,>=2.18.0->google-api-core==1.14.*->feast) (3.0.4)\n", + "Requirement already satisfied: idna<3,>=2.5 in /home/zzy/.conda/envs/feast-ml/lib/python3.7/site-packages (from requests<3.0.0dev,>=2.18.0->google-api-core==1.14.*->feast) (2.9)\n", + "Requirement already satisfied: urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1 in /home/zzy/.conda/envs/feast-ml/lib/python3.7/site-packages (from requests<3.0.0dev,>=2.18.0->google-api-core==1.14.*->feast) (1.25.8)\n", + "Requirement already satisfied: certifi>=2017.4.17 in /home/zzy/.local/lib/python3.7/site-packages (from requests<3.0.0dev,>=2.18.0->google-api-core==1.14.*->feast) (2019.6.16)\n", + "Requirement already satisfied: pyasn1>=0.1.3 in /home/zzy/.conda/envs/feast-ml/lib/python3.7/site-packages (from rsa>=3.1.4->google-auth==1.6.*->feast) (0.4.8)\n" + ] + } + ], + "source": [ + "!pip install feast" ] }, { @@ -116,7 +122,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 3, "metadata": {}, "outputs": [], "source": [ @@ -143,45 +149,13 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 4, "metadata": {}, "outputs": [], "source": [ "client = Client(core_url=FEAST_CORE_URL, serving_url=FEAST_ONLINE_SERVING_URL)" ] }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Create a project workspace" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "client.create_project('customer_project')" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Set the active project" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "client.set_project('customer_project')" - ] - }, { "cell_type": "markdown", "metadata": {}, @@ -198,7 +172,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 5, "metadata": {}, "outputs": [], "source": [ @@ -210,9 +184,142 @@ }, { "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
datetimecustomer_iddaily_transactionstotal_transactions
02020-05-25 00:00:00+00:0010018.70480277
12020-05-25 00:00:00+00:0010027.16388731
22020-05-25 00:00:00+00:0010039.93597668
32020-05-25 00:00:00+00:0010041.10798078
42020-05-25 00:00:00+00:0010058.30738136
52020-05-26 00:00:00+00:0010012.41681114
62020-05-26 00:00:00+00:0010024.8177359
72020-05-26 00:00:00+00:0010034.40971495
82020-05-26 00:00:00+00:0010046.6173176
92020-05-26 00:00:00+00:0010051.03252586
\n", + "
" + ], + "text/plain": [ + " datetime customer_id daily_transactions \\\n", + "0 2020-05-25 00:00:00+00:00 1001 8.704802 \n", + "1 2020-05-25 00:00:00+00:00 1002 7.163887 \n", + "2 2020-05-25 00:00:00+00:00 1003 9.935976 \n", + "3 2020-05-25 00:00:00+00:00 1004 1.107980 \n", + "4 2020-05-25 00:00:00+00:00 1005 8.307381 \n", + "5 2020-05-26 00:00:00+00:00 1001 2.416811 \n", + "6 2020-05-26 00:00:00+00:00 1002 4.817735 \n", + "7 2020-05-26 00:00:00+00:00 1003 4.409714 \n", + "8 2020-05-26 00:00:00+00:00 1004 6.617317 \n", + "9 2020-05-26 00:00:00+00:00 1005 1.032525 \n", + "\n", + " total_transactions \n", + "0 77 \n", + "1 31 \n", + "2 68 \n", + "3 78 \n", + "4 36 \n", + "5 14 \n", + "6 9 \n", + "7 95 \n", + "8 6 \n", + "9 86 " + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "customer_features = pd.DataFrame(\n", " {\n", @@ -223,7 +330,7 @@ " }\n", ")\n", "\n", - "print(customer_features.head(500))" + "customer_features.head(10)" ] }, { @@ -243,7 +350,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 7, "metadata": {}, "outputs": [], "source": [ @@ -263,9 +370,19 @@ }, { "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Feature daily_transactions (ValueType.DOUBLE) added from dataframe.\n", + "Feature total_transactions (ValueType.INT64) added from dataframe.\n", + "\n" + ] + } + ], "source": [ "customer_fs.infer_fields_from_df(customer_features, replace_existing_features=True)" ] @@ -286,26 +403,53 @@ }, { "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "client.apply(customer_fs)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "We test the retrieval of this feature set object (not its data), to ensure that we have the latest version" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Feature set created: \"customer_transactions\"\n", + "{\n", + " \"spec\": {\n", + " \"name\": \"customer_transactions\",\n", + " \"entities\": [\n", + " {\n", + " \"name\": \"customer_id\",\n", + " \"valueType\": \"INT64\"\n", + " }\n", + " ],\n", + " \"features\": [\n", + " {\n", + " \"name\": \"daily_transactions\",\n", + " \"valueType\": \"DOUBLE\"\n", + " },\n", + " {\n", + " \"name\": \"total_transactions\",\n", + " \"valueType\": \"INT64\"\n", + " }\n", + " ],\n", + " \"maxAge\": \"432000s\",\n", + " \"source\": {\n", + " \"type\": \"KAFKA\",\n", + " \"kafkaSourceConfig\": {\n", + " \"bootstrapServers\": \"kafka:9092,localhost:9094\",\n", + " \"topic\": \"feast-features\"\n", + " }\n", + " },\n", + " \"project\": \"default\"\n", + " },\n", + " \"meta\": {\n", + " \"createdTimestamp\": \"2020-05-27T03:58:07Z\",\n", + " \"status\": \"STATUS_PENDING\"\n", + " }\n", + "}\n" + ] + } + ], + "source": [ + "client.apply(customer_fs)\n", "customer_fs = client.get_feature_set(\"customer_transactions\")\n", "print(customer_fs)" ] @@ -319,9 +463,52 @@ }, { "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], + "execution_count": 10, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Waiting for feature set to be ready for ingestion...\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "100%|██████████| 15/15 [00:01<00:00, 13.99rows/s]" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Ingestion complete!\n", + "\n", + "Ingestion statistics:\n", + "Success: 15/15\n", + "Removing temporary file(s)...\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\n" + ] + }, + { + "data": { + "text/plain": [ + "'3b988d56-6885-36c6-804e-73ea76b7eae6'" + ] + }, + "execution_count": 10, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "client.ingest(\"customer_transactions\", customer_features)" ] @@ -349,9 +536,37 @@ }, { "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "field_values {\n", + " fields {\n", + " key: \"customer_id\"\n", + " value {\n", + " int64_val: 1001\n", + " }\n", + " }\n", + " fields {\n", + " key: \"daily_transactions\"\n", + " value {\n", + " double_val: 2.460333315469021\n", + " }\n", + " }\n", + " fields {\n", + " key: \"total_transactions\"\n", + " value {\n", + " int64_val: 11\n", + " }\n", + " }\n", + "}\n", + "\n" + ] + } + ], "source": [ "online_features = client.get_online_features(\n", " feature_refs=[\n", @@ -402,20 +617,117 @@ }, { "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], + "execution_count": 19, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
datetimecustomer_id
02020-05-25 00:00:00+00:001001
12020-05-25 00:00:00+00:001002
22020-05-25 00:00:00+00:001003
32020-05-25 00:00:00+00:001004
42020-05-25 00:00:00+00:001005
52020-05-26 00:00:00+00:001001
62020-05-26 00:00:00+00:001002
72020-05-26 00:00:00+00:001003
82020-05-26 00:00:00+00:001004
92020-05-26 00:00:00+00:001005
\n", + "
" + ], + "text/plain": [ + " datetime customer_id\n", + "0 2020-05-25 00:00:00+00:00 1001\n", + "1 2020-05-25 00:00:00+00:00 1002\n", + "2 2020-05-25 00:00:00+00:00 1003\n", + "3 2020-05-25 00:00:00+00:00 1004\n", + "4 2020-05-25 00:00:00+00:00 1005\n", + "5 2020-05-26 00:00:00+00:00 1001\n", + "6 2020-05-26 00:00:00+00:00 1002\n", + "7 2020-05-26 00:00:00+00:00 1003\n", + "8 2020-05-26 00:00:00+00:00 1004\n", + "9 2020-05-26 00:00:00+00:00 1005" + ] + }, + "execution_count": 19, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ - "event_timestamps = [datetime.utcnow().replace(tzinfo=utc) - timedelta(days=randrange(15), hours=randrange(24), minutes=randrange(60)) for day in range(30)]\n", - "\n", "entity_rows = pd.DataFrame(\n", " {\n", - " \"datetime\": event_timestamps,\n", - " \"customer_id\": [customers[idx % len(customers)] for idx in range(len(event_timestamps))],\n", + " \"datetime\": [day for day in days for customer in customers],\n", + " \"customer_id\": [customer for day in days for customer in customers],\n", " }\n", ")\n", "\n", - "print(entity_rows.head(10))" + "entity_rows.head(10)" ] }, { @@ -434,12 +746,11 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 20, "metadata": {}, "outputs": [], "source": [ - "batch_client = Client(core_url=FEAST_CORE_URL, serving_url=FEAST_BATCH_SERVING_URL)\n", - "batch_client.set_project(\"customer_project\")" + "batch_client = Client(core_url=FEAST_CORE_URL, serving_url=FEAST_BATCH_SERVING_URL)" ] }, { @@ -451,7 +762,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 21, "metadata": { "scrolled": true }, @@ -459,8 +770,8 @@ "source": [ "job = batch_client.get_batch_features(\n", " feature_refs=[\n", - " f\"customer_project/daily_transactions\", \n", - " f\"customer_project/total_transactions\", \n", + " f\"daily_transactions\", \n", + " f\"total_transactions\", \n", " ],\n", " entity_rows=entity_rows\n", " )" @@ -475,20 +786,108 @@ }, { "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], + "execution_count": 22, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
event_timestampcustomer_iddaily_transactionstotal_transactions
02020-05-26 00:00:00+00:0010012.41681114
12020-05-26 00:00:00+00:0010046.6173176
22020-05-26 00:00:00+00:0010034.40971495
32020-05-26 00:00:00+00:0010051.03252586
42020-05-26 00:00:00+00:0010024.8177359
\n", + "
" + ], + "text/plain": [ + " event_timestamp customer_id daily_transactions \\\n", + "0 2020-05-26 00:00:00+00:00 1001 2.416811 \n", + "1 2020-05-26 00:00:00+00:00 1004 6.617317 \n", + "2 2020-05-26 00:00:00+00:00 1003 4.409714 \n", + "3 2020-05-26 00:00:00+00:00 1005 1.032525 \n", + "4 2020-05-26 00:00:00+00:00 1002 4.817735 \n", + "\n", + " total_transactions \n", + "0 14 \n", + "1 6 \n", + "2 95 \n", + "3 86 \n", + "4 9 " + ] + }, + "execution_count": 22, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "df = job.to_dataframe()\n", - "print(df.head(10))" + "df.head()" ] } ], "metadata": { "kernelspec": { - "display_name": "Python 3", + "display_name": "feast-ml-py374", "language": "python", - "name": "python3" + "name": "feast-ml-py374" }, "language_info": { "codemirror_mode": { @@ -501,17 +900,8 @@ "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.7.4" - }, - "pycharm": { - "stem_cell": { - "cell_type": "raw", - "metadata": { - "collapsed": false - }, - "source": [] - } } }, "nbformat": 4, - "nbformat_minor": 2 + "nbformat_minor": 4 } diff --git a/examples/feast-xgboost-churn-prediction-tutorial/Telecom Customer Churn Prediction (with Feast and XGBoost).ipynb b/examples/feast-xgboost-churn-prediction-tutorial/Telecom Customer Churn Prediction (with Feast and XGBoost).ipynb index e88fe970d5..c29c01efff 100644 --- a/examples/feast-xgboost-churn-prediction-tutorial/Telecom Customer Churn Prediction (with Feast and XGBoost).ipynb +++ b/examples/feast-xgboost-churn-prediction-tutorial/Telecom Customer Churn Prediction (with Feast and XGBoost).ipynb @@ -6176,8 +6176,7 @@ "source": [ "os.environ['FEAST_CORE_URL'] = 'localhost:6565'\n", "os.environ['FEAST_ONLINE_URL'] = 'localhost:6566'\n", - "os.environ['FEAST_BATCH_URL'] = 'localhost:6567'\n", - "os.environ['FEAST_PROJECT'] = 'default'" + "os.environ['FEAST_BATCH_URL'] = 'localhost:6567'" ] }, { @@ -6195,8 +6194,7 @@ "metadata": {}, "outputs": [], "source": [ - "client = Client(core_url=os.environ['FEAST_CORE_URL'])\n", - "client.set_project(os.environ['FEAST_PROJECT'])" + "client = Client(core_url=os.environ['FEAST_CORE_URL'])" ] }, { @@ -6490,7 +6488,6 @@ "name": "stderr", "output_type": "stream", "text": [ - "\r", " 0%| | 0/7032 [00:00 with the desired value +kubectl create secret generic feast-postgresql \ + --from-literal=postgresql-password= + +# Install Feast with Online Serving and Beam DirectRunner +helm install --name myrelease feast-charts/feast \ + --set feast-core.postgresql.existingSecret=feast-postgresql \ + --set postgresql.existingSecret=feast-postgresql ``` -feast // top level feast chart -│ -├── feast-core // feast-core subchart -│ ├── postgresql // Postgresql dependency for feast-core (Feast database) -│ └── kafka // Kafka dependency for feast-core (default stream source) -│ -├── feast-serving-online // feast-serving subchart -│ └── redis // Redis dependency for installation of store together with feast-serving -│ -└── feast-serving-batch // feast-serving subchart -``` + +## Introduction +This chart install Feast deployment on a Kubernetes cluster using the [Helm](https://v2.helm.sh/docs/using_helm/#installing-helm) package manager. ## Prerequisites -- Kubernetes 1.13 or newer cluster -- Helm 2.15.2 or newer +- Kubernetes 1.12+ +- Helm 2.15+ (not tested with Helm 3) +- Persistent Volume support on the underlying infrastructure + +## Chart Requirements + +| Repository | Name | Version | +|------------|------|---------| +| | feast-core | 0.5.0-alpha.1 | +| | feast-serving | 0.5.0-alpha.1 | +| | feast-serving | 0.5.0-alpha.1 | +| | prometheus-statsd-exporter | 0.1.2 | +| https://kubernetes-charts-incubator.storage.googleapis.com/ | kafka | 0.20.8 | +| https://kubernetes-charts.storage.googleapis.com/ | grafana | 5.0.5 | +| https://kubernetes-charts.storage.googleapis.com/ | postgresql | 8.6.1 | +| https://kubernetes-charts.storage.googleapis.com/ | prometheus | 11.0.2 | +| https://kubernetes-charts.storage.googleapis.com/ | redis | 10.5.6 | -## Resources Required -The chart deploys pods that consume minimum resources as specified in the resources configuration parameter. +## Chart Values -## Installing the Chart +| Key | Type | Default | Description | +|-----|------|---------|-------------| +| feast-batch-serving.enabled | bool | `false` | Flag to install Feast Batch Serving | +| feast-core.enabled | bool | `true` | Flag to install Feast Core | +| feast-online-serving.enabled | bool | `true` | Flag to install Feast Online Serving | +| grafana.enabled | bool | `true` | Flag to install Grafana | +| kafka.enabled | bool | `true` | Flag to install Kafka | +| postgresql.enabled | bool | `true` | Flag to install Postgresql | +| prometheus-statsd-exporter.enabled | bool | `true` | Flag to install StatsD to Prometheus Exporter | +| prometheus.enabled | bool | `true` | Flag to install Prometheus | +| redis.enabled | bool | `true` | Flag to install Redis | + +## Configuration and installation details + +The default configuration will install Feast with Online Serving. Ingestion +of features will use Beam [DirectRunner](https://beam.apache.org/documentation/runners/direct/) +that runs on the same container where Feast Core is running. -Add repository for Feast chart: ```bash -helm repo add feast-charts https://feast-charts.storage.googleapis.com -helm repo update +# Create secret for Feast database, replace accordingly +kubectl create secret generic feast-postgresql \ + --from-literal=postgresql-password= + +# Install Feast with Online Serving and Beam DirectRunner +helm install --name myrelease feast-charts/feast \ + --set feast-core.postgresql.existingSecret=feast-postgresql \ + --set postgresql.existingSecret=feast-postgresql +``` + +In order to test that the installation is successful: +```bash +helm test myrelease + +# If the installation is successful, the following should be printed +RUNNING: myrelease-feast-online-serving-test +PASSED: myrelease-feast-online-serving-test +RUNNING: myrelease-grafana-test +PASSED: myrelease-grafana-test +RUNNING: myrelease-test-topic-create-consume-produce +PASSED: myrelease-test-topic-create-consume-produce + +# Once the test completes, to check the logs +kubectl logs myrelease-feast-online-serving-test +``` + +> The test pods can be safely deleted after the test finishes. +> Check the yaml files in `templates/tests/` folder to see the processes +> the test pods execute. + +### Feast metrics + +Feast default installation includes Grafana, StatsD exporter and Prometheus. Request +metrics from Feast Core and Feast Serving, as well as ingestion statistic from +Feast Ingestion are accessible from Prometheus and Grafana dashboard. The following +show a quick example how to access the metrics. + +``` +# Forwards local port 9090 to the Prometheus server pod +kubectl port-forward svc/myrelease-prometheus-server 9090:80 +``` + +Visit http://localhost:9090 to access the Prometheus server: + +![Prometheus Server](files/img/prometheus-server.png?raw=true) + +### Enable Batch Serving + +To install Feast Batch Serving for retrieval of historical features in offline +training, access to BigQuery is required. First, create a [service account](https://cloud.google.com/iam/docs/creating-managing-service-account-keys) key that +will provide the credentials to access BigQuery. Grant the service account `editor` +role so it has write permissions to BigQuery and Cloud Storage. + +> In production, it is advised to give only the required [permissions](foo-feast-batch-serving-test) for the +> the service account, versus `editor` role which is very permissive. + +Create a Kubernetes secret for the service account JSON file: +```bash +# By default Feast expects the secret to be named "feast-gcp-service-account" +# and the JSON file to be named "credentials.json" +kubectl create secret generic feast-gcp-service-account --from-file=credentials.json +``` + +Create a new Cloud Storage bucket (if not exists) and make sure the service +account has write access to the bucket: +```bash +gsutil mb ``` -Install Feast release with minimal features, without batch serving and persistence: +Use the following Helm values to enable Batch Serving: +```yaml +# values-batch-serving.yaml +feast-core: + gcpServiceAccount: + enabled: true + postgresql: + existingSecret: feast-postgresql + +feast-batch-serving: + enabled: true + gcpServiceAccount: + enabled: true + application-override.yaml: + feast: + active_store: historical + stores: + - name: historical + type: BIGQUERY + config: + project_id: + dataset_id: + staging_location: gs:///feast-staging-location + initial_retry_delay_seconds: 3 + total_timeout_seconds: 21600 + subscriptions: + - name: "*" + project: "*" + version: "*" + +postgresql: + existingSecret: feast-postgresql +``` + +> To delete the previous release, run `helm delete --purge myrelease` +> Note this will not delete the persistent volume that has been claimed (PVC). +> In a test cluster, run `kubectl delete pvc --all` to delete all claimed PVCs. + ```bash -RELEASE_NAME=demo -helm install feast-charts/feast --name $RELEASE_NAME -f values-demo.yaml +# Install a new release +helm install --name myrelease -f values-batch-serving.yaml feast-charts/feast + +# Wait until all pods are created and running/completed (can take about 5m) +kubectl get pods + +# Batch Serving is installed so `helm test` will also test for batch retrieval +helm test myrelease ``` -Install Feast release for typical use cases, with batch and online serving: +### Use DataflowRunner for ingestion + +Apache Beam [DirectRunner](https://beam.apache.org/documentation/runners/direct/) +is not suitable for production use case because it is not easy to scale the +number of workers and there is no convenient API to monitor and manage the +workers. Feast supports [DataflowRunner](https://beam.apache.org/documentation/runners/dataflow/) which is a managed service on Google Cloud. + +> Make sure `feast-gcp-service-account` Kubernetes secret containing the +> service account has been created and the service account has permissions +> to manage Dataflow jobs. + +Since Dataflow workers run outside the Kube cluster and they will need to interact +with Kafka brokers, Redis stores and StatsD server installed in the cluster, +these services need to be exposed for access outside the cluster by setting +`service.type: LoadBalancer`. + +In a typical use case, 5 `LoadBalancer` (internal) IP addresses are required by +Feast when running with `DataflowRunner`. In Google Cloud, these (internal) IP +addresses should be reserved first: ```bash -# To install Feast Batch serving, BigQuery and Google Cloud service account -# is required. The service account needs to have these roles: -# - bigquery.dataEditor -# - bigquery.jobUser -# -# Assuming a service account JSON file has been downloaded to /home/user/key.json, -# run the following command to create a secret in Kubernetes -# (make sure the file name is called key.json): -kubectl create secret generic feast-gcp-service-account --from-file=/home/user/key.json - -# Set these required configuration in Feast Batch Serving -STAGING_LOCATION=gs://bucket/path -PROJECT_ID=google-cloud-project-id -DATASET_ID=bigquery-dataset-id - -# Install the Helm release using default values.yaml -helm install feast-charts/feast --name feast \ - --set feast-serving-batch."application\.yaml".feast.jobs.staging-location=$STAGING_LOCATION \ - --set feast-serving-batch."store\.yaml".bigquery_config.project_id=$PROJECT_ID \ - --set feast-serving-batch."store\.yaml".bigquery_config.dataset_id=$DATASET_ID +# Check with your network configuration which IP addresses are available for use +gcloud compute addresses create \ + feast-kafka-1 feast-kafka-2 feast-kafka-3 feast-redis feast-statsd \ + --region --subnet \ + --addresses 10.128.0.11,10.128.0.12,10.128.0.13,10.128.0.14,10.128.0.15 ``` -## Parameters - -The following table lists the configurable parameters of the Feast chart and their default values. - -| Parameter | Description | Default -| --------- | ----------- | ------- -| `feast-core.enabled` | Flag to install Feast Core | `true` -| `feast-core.postgresql.enabled` | Flag to install Postgresql as Feast database | `true` -| `feast-core.postgresql.postgresqlDatabase` | Name of the database used by Feast Core | `feast` -| `feast-core.postgresql.postgresqlUsername` | Username to authenticate to Feast database | `postgres` -| `feast-core.postgresql.postgresqlPassword` | Passsword to authenticate to Feast database | `password` -| `feast-core.kafka.enabled` | Flag to install Kafka as the default source for Feast | `true` -| `feast-core.kafka.topics[0].name` | Default topic name in Kafka| `feast` -| `feast-core.kafka.topics[0].replicationFactor` | No of replication factor for the topic| `1` -| `feast-core.kafka.topics[0].partitions` | No of partitions for the topic | `1` -| `feast-core.prometheus-statsd-exporter.enabled` | Flag to install Prometheus StatsD Exporter | `false` -| `feast-core.prometheus-statsd-exporter.*` | Refer to this [link](charts/feast-core/charts/prometheus-statsd-exporter/values.yaml | -| `feast-core.replicaCount` | No of pods to create | `1` -| `feast-core.image.repository` | Repository for Feast Core Docker image | `gcr.io/kf-feast/feast-core` -| `feast-core.image.tag` | Tag for Feast Core Docker image | `0.4.4` -| `feast-core.image.pullPolicy` | Image pull policy for Feast Core Docker image | `IfNotPresent` -| `feast-core.prometheus.enabled` | Add annotations to enable Prometheus scraping | `false` -| `feast-core.application.yaml` | Configuration for Feast Core application | Refer to this [link](charts/feast-core/values.yaml) -| `feast-core.springConfigMountPath` | Directory to mount application.yaml | `/etc/feast/feast-core` -| `feast-core.gcpServiceAccount.useExistingSecret` | Flag to use existing secret for GCP service account | `false` -| `feast-core.gcpServiceAccount.existingSecret.name` | Secret name for the service account | `feast-gcp-service-account` -| `feast-core.gcpServiceAccount.existingSecret.key` | Secret key for the service account | `key.json` -| `feast-core.gcpServiceAccount.mountPath` | Directory to mount the JSON key file | `/etc/gcloud/service-accounts` -| `feast-core.gcpProjectId` | Project ID to set `GOOGLE_CLOUD_PROJECT` to change default project used by SDKs | `""` -| `feast-core.jarPath` | Path to Jar file in the Docker image | `/opt/feast/feast-core.jar` -| `feast-core.jvmOptions` | Options for the JVM | `[]` -| `feast-core.logLevel` | Application logging level | `warn` -| `feast-core.logType` | Application logging type (`JSON` or `Console`) | `JSON` -| `feast-core.springConfigProfiles` | Map of profile name to file content for additional Spring profiles | `{}` -| `feast-core.springSecretProfiles` | Map of profile name to file content for additional Spring profiles. Use this instead of springConfigProfiles if the content contains secrets. | `{}` -| `feast-core.springConfigProfilesActive` | CSV of profiles to enable from `springConfigProfiles` | `""` -| `feast-core.springSecretProfilesActive` | CSV of profiles to enable from `springSecretProfiles` | `""` -| `feast-core.livenessProbe.enabled` | Flag to enable liveness probe | `true` -| `feast-core.livenessProbe.initialDelaySeconds` | Delay before liveness probe is initiated | `60` -| `feast-core.livenessProbe.periodSeconds` | How often to perform the probe | `10` -| `feast-core.livenessProbe.timeoutSeconds` | Timeout duration for the probe | `5` -| `feast-core.livenessProbe.successThreshold` | Minimum no of consecutive successes for the probe to be considered successful | `1` -| `feast-core.livenessProbe.failureThreshold` | Minimum no of consecutive failures for the probe to be considered failed | `5` -| `feast-core.readinessProbe.enabled` | Flag to enable readiness probe | `true` -| `feast-core.readinessProbe.initialDelaySeconds` | Delay before readiness probe is initiated | `30` -| `feast-core.readinessProbe.periodSeconds` | How often to perform the probe | `10` -| `feast-core.readinessProbe.timeoutSeconds` | Timeout duration for the probe | `10` -| `feast-core.readinessProbe.successThreshold` | Minimum no of consecutive successes for the probe to be considered successful | `1` -| `feast-core.service.type` | Kubernetes Service Type | `ClusterIP` -| `feast-core.http.port` | Kubernetes Service port for HTTP request| `80` -| `feast-core.http.targetPort` | Container port for HTTP request | `8080` -| `feast-core.grpc.port` | Kubernetes Service port for GRPC request| `6565` -| `feast-core.grpc.targetPort` | Container port for GRPC request| `6565` -| `feast-core.resources` | CPU and memory allocation for the pod | `{}` -| `feast-core.ingress` | See *Ingress Parameters* [below](#ingress-parameters) | `{}` -| `feast-serving-online.enabled` | Flag to install Feast Online Serving | `true` -| `feast-serving-online.redis.enabled` | Flag to install Redis in Feast Serving | `false` -| `feast-serving-online.redis.usePassword` | Flag to use password to access Redis | `false` -| `feast-serving-online.redis.cluster.enabled` | Flag to enable Redis cluster | `false` -| `feast-serving-online.core.enabled` | Flag for Feast Serving to use Feast Core in the same Helm release | `true` -| `feast-serving-online.replicaCount` | No of pods to create | `1` -| `feast-serving-online.image.repository` | Repository for Feast Serving Docker image | `gcr.io/kf-feast/feast-serving` -| `feast-serving-online.image.tag` | Tag for Feast Serving Docker image | `0.4.4` -| `feast-serving-online.image.pullPolicy` | Image pull policy for Feast Serving Docker image | `IfNotPresent` -| `feast-serving-online.prometheus.enabled` | Add annotations to enable Prometheus scraping | `true` -| `feast-serving-online.application.yaml` | Application configuration for Feast Serving | Refer to this [link](charts/feast-serving/values.yaml) -| `feast-serving-online.store.yaml` | Store configuration for Feast Serving | Refer to this [link](charts/feast-serving/values.yaml) -| `feast-serving-online.springConfigMountPath` | Directory to mount application.yaml and store.yaml | `/etc/feast/feast-serving` -| `feast-serving-online.gcpServiceAccount.useExistingSecret` | Flag to use existing secret for GCP service account | `false` -| `feast-serving-online.gcpServiceAccount.existingSecret.name` | Secret name for the service account | `feast-gcp-service-account` -| `feast-serving-online.gcpServiceAccount.existingSecret.key` | Secret key for the service account | `key.json` -| `feast-serving-online.gcpServiceAccount.mountPath` | Directory to mount the JSON key file | `/etc/gcloud/service-accounts` -| `feast-serving-online.gcpProjectId` | Project ID to set `GOOGLE_CLOUD_PROJECT` to change default project used by SDKs | `""` -| `feast-serving-online.jarPath` | Path to Jar file in the Docker image | `/opt/feast/feast-serving.jar` -| `feast-serving-online.jvmOptions` | Options for the JVM | `[]` -| `feast-serving-online.logLevel` | Application logging level | `warn` -| `feast-serving-online.logType` | Application logging type (`JSON` or `Console`) | `JSON` -| `feast-serving-online.springConfigProfiles` | Map of profile name to file content for additional Spring profiles | `{}` -| `feast-serving-online.springSecretProfiles` | Map of profile name to file content for additional Spring profiles. Use this instead of springConfigProfiles if the content contains secrets. | `{}` -| `feast-serving-online.springConfigProfilesActive` | CSV of profiles to enable from `springConfigProfiles` | `""` -| `feast-serving-online.springSecretProfilesActive` | CSV of profiles to enable from `springSecretProfiles` | `""` -| `feast-serving-online.livenessProbe.enabled` | Flag to enable liveness probe | `true` -| `feast-serving-online.livenessProbe.initialDelaySeconds` | Delay before liveness probe is initiated | `60` -| `feast-serving-online.livenessProbe.periodSeconds` | How often to perform the probe | `10` -| `feast-serving-online.livenessProbe.timeoutSeconds` | Timeout duration for the probe | `5` -| `feast-serving-online.livenessProbe.successThreshold` | Minimum no of consecutive successes for the probe to be considered successful | `1` -| `feast-serving-online.livenessProbe.failureThreshold` | Minimum no of consecutive failures for the probe to be considered failed | `5` -| `feast-serving-online.readinessProbe.enabled` | Flag to enable readiness probe | `true` -| `feast-serving-online.readinessProbe.initialDelaySeconds` | Delay before readiness probe is initiated | `30` -| `feast-serving-online.readinessProbe.periodSeconds` | How often to perform the probe | `10` -| `feast-serving-online.readinessProbe.timeoutSeconds` | Timeout duration for the probe | `10` -| `feast-serving-online.readinessProbe.successThreshold` | Minimum no of consecutive successes for the probe to be considered successful | `1` -| `feast-serving-online.service.type` | Kubernetes Service Type | `ClusterIP` -| `feast-serving-online.http.port` | Kubernetes Service port for HTTP request| `80` -| `feast-serving-online.http.targetPort` | Container port for HTTP request | `8080` -| `feast-serving-online.grpc.port` | Kubernetes Service port for GRPC request| `6566` -| `feast-serving-online.grpc.targetPort` | Container port for GRPC request| `6566` -| `feast-serving-online.resources` | CPU and memory allocation for the pod | `{}` -| `feast-serving-online.ingress` | See *Ingress Parameters* [below](#ingress-parameters) | `{}` -| `feast-serving-batch.enabled` | Flag to install Feast Batch Serving | `true` -| `feast-serving-batch.redis.enabled` | Flag to install Redis in Feast Serving | `false` -| `feast-serving-batch.redis.usePassword` | Flag to use password to access Redis | `false` -| `feast-serving-batch.redis.cluster.enabled` | Flag to enable Redis cluster | `false` -| `feast-serving-batch.core.enabled` | Flag for Feast Serving to use Feast Core in the same Helm release | `true` -| `feast-serving-batch.replicaCount` | No of pods to create | `1` -| `feast-serving-batch.image.repository` | Repository for Feast Serving Docker image | `gcr.io/kf-feast/feast-serving` -| `feast-serving-batch.image.tag` | Tag for Feast Serving Docker image | `0.4.4` -| `feast-serving-batch.image.pullPolicy` | Image pull policy for Feast Serving Docker image | `IfNotPresent` -| `feast-serving-batch.prometheus.enabled` | Add annotations to enable Prometheus scraping | `true` -| `feast-serving-batch.application.yaml` | Application configuration for Feast Serving | Refer to this [link](charts/feast-serving/values.yaml) -| `feast-serving-batch.store.yaml` | Store configuration for Feast Serving | Refer to this [link](charts/feast-serving/values.yaml) -| `feast-serving-batch.springConfigMountPath` | Directory to mount application.yaml and store.yaml | `/etc/feast/feast-serving` -| `feast-serving-batch.gcpServiceAccount.useExistingSecret` | Flag to use existing secret for GCP service account | `false` -| `feast-serving-batch.gcpServiceAccount.existingSecret.name` | Secret name for the service account | `feast-gcp-service-account` -| `feast-serving-batch.gcpServiceAccount.existingSecret.key` | Secret key for the service account | `key.json` -| `feast-serving-batch.gcpServiceAccount.mountPath` | Directory to mount the JSON key file | `/etc/gcloud/service-accounts` -| `feast-serving-batch.gcpProjectId` | Project ID to set `GOOGLE_CLOUD_PROJECT` to change default project used by SDKs | `""` -| `feast-serving-batch.jarPath` | Path to Jar file in the Docker image | `/opt/feast/feast-serving.jar` -| `feast-serving-batch.jvmOptions` | Options for the JVM | `[]` -| `feast-serving-batch.logLevel` | Application logging level | `warn` -| `feast-serving-batch.logType` | Application logging type (`JSON` or `Console`) | `JSON` -| `feast-serving-batch.springConfigProfiles` | Map of profile name to file content for additional Spring profiles | `{}` -| `feast-serving-batch.springSecretProfiles` | Map of profile name to file content for additional Spring profiles. Use this instead of springConfigProfiles if the content contains secrets. | `{}` -| `feast-serving-batch.springConfigProfilesActive` | CSV of profiles to enable from `springConfigProfiles` | `""` -| `feast-serving-batch.springSecretProfilesActive` | CSV of profiles to enable from `springSecretProfiles` | `""` -| `feast-serving-batch.livenessProbe.enabled` | Flag to enable liveness probe | `true` -| `feast-serving-batch.livenessProbe.initialDelaySeconds` | Delay before liveness probe is initiated | `60` -| `feast-serving-batch.livenessProbe.periodSeconds` | How often to perform the probe | `10` -| `feast-serving-batch.livenessProbe.timeoutSeconds` | Timeout duration for the probe | `5` -| `feast-serving-batch.livenessProbe.successThreshold` | Minimum no of consecutive successes for the probe to be considered successful | `1` -| `feast-serving-batch.livenessProbe.failureThreshold` | Minimum no of consecutive failures for the probe to be considered failed | `5` -| `feast-serving-batch.readinessProbe.enabled` | Flag to enable readiness probe | `true` -| `feast-serving-batch.readinessProbe.initialDelaySeconds` | Delay before readiness probe is initiated | `30` -| `feast-serving-batch.readinessProbe.periodSeconds` | How often to perform the probe | `10` -| `feast-serving-batch.readinessProbe.timeoutSeconds` | Timeout duration for the probe | `10` -| `feast-serving-batch.readinessProbe.successThreshold` | Minimum no of consecutive successes for the probe to be considered successful | `1` -| `feast-serving-batch.service.type` | Kubernetes Service Type | `ClusterIP` -| `feast-serving-batch.http.port` | Kubernetes Service port for HTTP request| `80` -| `feast-serving-batch.http.targetPort` | Container port for HTTP request | `8080` -| `feast-serving-batch.grpc.port` | Kubernetes Service port for GRPC request| `6566` -| `feast-serving-batch.grpc.targetPort` | Container port for GRPC request| `6566` -| `feast-serving-batch.resources` | CPU and memory allocation for the pod | `{}` -| `feast-serving-batch.ingress` | See *Ingress Parameters* [below](#ingress-parameters) | `{}` - -## Ingress Parameters - -The following table lists the configurable parameters of the ingress section for each Feast module. - -Note, there are two ingresses available for each module - `grpc` and `http`. - -| Parameter | Description | Default -| ----------------------------- | ----------- | ------- -| `ingress.grcp.enabled` | Enables an ingress (endpoint) for the gRPC server | `false` -| `ingress.grcp.*` | See below | -| `ingress.http.enabled` | Enables an ingress (endpoint) for the HTTP server | `false` -| `ingress.http.*` | See below | -| `ingress.*.class` | Value for `kubernetes.io/ingress.class` | `nginx` -| `ingress.*.hosts` | List of host-names for the ingress | `[]` -| `ingress.*.annotations` | Additional ingress annotations | `{}` -| `ingress.*.https.enabled` | Add a tls section to the ingress | `true` -| `ingress.*.https.secretNames` | Map of hostname to TLS secret name | `{}` If not specified, defaults to `domain-tld-tls` e.g. `feast.example.com` uses secret `example-com-tls` -| `ingress.*.auth.enabled` | Enable auth on the ingress (only applicable for `nginx` type | `false` -| `ingress.*.auth.signinHost` | External hostname of the OAuth2 proxy to use | First item in `ingress.hosts`, replacing the sub-domain with 'auth' e.g. `feast.example.com` uses `auth.example.com` -| `ingress.*.auth.authUrl` | Internal URI to internal auth endpoint | `http://auth-server.auth-ns.svc.cluster.local/auth` -| `ingress.*.whitelist` | Subnet masks to whitelist (i.e. value for `nginx.ingress.kubernetes.io/whitelist-source-range`) | `"""` - -To enable all the ingresses will a config like the following (while also adding the hosts etc): +Use the following Helm values to enable DataflowRuner (and Batch Serving), +replacing the `<*load_balancer_ip*>` tags with the ip addresses reserved above: ```yaml +# values-dataflow-runner.yaml feast-core: - ingress: - grpc: - enabled: true - http: - enabled: true -feast-serving-online: - ingress: - grpc: - enabled: true - http: - enabled: true -feast-serving-batch: - ingress: - grpc: - enabled: true - http: - enabled: true + gcpServiceAccount: + enabled: true + postgresql: + existingSecret: feast-postgresql + application-override.yaml: + feast: + stream: + options: + bootstrapServers: + jobs: + active_runner: dataflow + metrics: + host: + runners: + - name: dataflow + type: DataflowRunner + options: + project: + region: + zone: + tempLocation: + network: + subnetwork: + maxNumWorkers: 1 + autoscalingAlgorithm: THROUGHPUT_BASED + usePublicIps: false + workerMachineType: n1-standard-1 + deadLetterTableSpec: + +feast-online-serving: + application-override.yaml: + feast: + stores: + - name: online + type: REDIS + config: + host: + port: 6379 + subscriptions: + - name: "*" + project: "*" + version: "*" + +feast-batch-serving: + enabled: true + gcpServiceAccount: + enabled: true + application-override.yaml: + feast: + active_store: historical + stores: + - name: historical + type: BIGQUERY + config: + project_id: + dataset_id: + staging_location: gs:///feast-staging-location + initial_retry_delay_seconds: 3 + total_timeout_seconds: 21600 + subscriptions: + - name: "*" + project: "*" + version: "*" + +postgresql: + existingSecret: feast-postgresql + +kafka: + external: + enabled: true + type: LoadBalancer + annotations: + cloud.google.com/load-balancer-type: Internal + loadBalancerSourceRanges: + - 10.0.0.0/8 + - 172.16.0.0/12 + - 192.168.0.0/16 + firstListenerPort: 31090 + loadBalancerIP: + - + - + - + configurationOverrides: + "advertised.listeners": |- + EXTERNAL://${LOAD_BALANCER_IP}:31090 + "listener.security.protocol.map": |- + PLAINTEXT:PLAINTEXT,EXTERNAL:PLAINTEXT + "log.retention.hours": 1 + +redis: + master: + service: + type: LoadBalancer + loadBalancerIP: + annotations: + cloud.google.com/load-balancer-type: Internal + loadBalancerSourceRanges: + - 10.0.0.0/8 + - 172.16.0.0/12 + - 192.168.0.0/16 + +prometheus-statsd-exporter: + service: + type: LoadBalancer + annotations: + cloud.google.com/load-balancer-type: Internal + loadBalancerSourceRanges: + - 10.0.0.0/8 + - 172.16.0.0/12 + - 192.168.0.0/16 + loadBalancerIP: +``` + +```bash +# Install a new release +helm install --name myrelease -f values-dataflow-runner.yaml feast-charts/feast + +# Wait until all pods are created and running/completed (can take about 5m) +kubectl get pods + +# Test the installation +helm test myrelease ``` +If the tests are successful, Dataflow jobs should appear in Google Cloud console +running features ingestion: https://console.cloud.google.com/dataflow + +![Dataflow Jobs](files/img/dataflow-jobs.png) + +### Production configuration + +#### Resources requests + +The `resources` field in the deployment spec is left empty in the examples. In +production these should be set according to the load each services are expected +to handle and the service level objectives (SLO). Also Feast Core and Serving +is Java application and it is [good practice](https://stackoverflow.com/a/6916718/3949303) +to set the minimum and maximum heap. This is an example reasonable value to set for Feast Serving: + +```yaml +feast-online-serving: + javaOpts: "-Xms2048m -Xmx2048m" + resources: + limits: + memory: "2048Mi" + requests: + memory: "2048Mi" + cpu: "1" +``` + +#### High availability + +Default Feast installation only configures a single instance of Redis +server. If due to network failures or out of memory error Redis is down, +Feast serving will fail to respond to requests. Soon, Feast will support +highly available Redis via [Redis cluster](https://redis.io/topics/cluster-tutorial), +sentinel or additional proxies. + +### Documentation development + +This `README.md` is generated using [helm-docs](https://github.com/norwoodj/helm-docs/). +Please run `helm-docs` to regenerate the `README.md` every time `README.md.gotmpl` +or `values.yaml` are updated. diff --git a/infra/charts/feast/README.md.gotmpl b/infra/charts/feast/README.md.gotmpl new file mode 100644 index 0000000000..69d40fbb25 --- /dev/null +++ b/infra/charts/feast/README.md.gotmpl @@ -0,0 +1,354 @@ +{{ template "chart.header" . }} + +{{ template "chart.description" . }} {{ template "chart.versionLine" . }} + +## TL;DR; + +```bash +# Add Feast Helm chart +helm repo add feast-charts https://feast-charts.storage.googleapis.com +helm repo update + +# Create secret for Feast database, replace with the desired value +kubectl create secret generic feast-postgresql \ + --from-literal=postgresql-password= + +# Install Feast with Online Serving and Beam DirectRunner +helm install --name myrelease feast-charts/feast \ + --set feast-core.postgresql.existingSecret=feast-postgresql \ + --set postgresql.existingSecret=feast-postgresql +``` + +## Introduction +This chart install Feast deployment on a Kubernetes cluster using the [Helm](https://v2.helm.sh/docs/using_helm/#installing-helm) package manager. + +## Prerequisites +- Kubernetes 1.12+ +- Helm 2.15+ (not tested with Helm 3) +- Persistent Volume support on the underlying infrastructure + +{{ template "chart.requirementsSection" . }} + +{{ template "chart.valuesSection" . }} + +## Configuration and installation details + +The default configuration will install Feast with Online Serving. Ingestion +of features will use Beam [DirectRunner](https://beam.apache.org/documentation/runners/direct/) +that runs on the same container where Feast Core is running. + +```bash +# Create secret for Feast database, replace accordingly +kubectl create secret generic feast-postgresql \ + --from-literal=postgresql-password= + +# Install Feast with Online Serving and Beam DirectRunner +helm install --name myrelease feast-charts/feast \ + --set feast-core.postgresql.existingSecret=feast-postgresql \ + --set postgresql.existingSecret=feast-postgresql +``` + +In order to test that the installation is successful: +```bash +helm test myrelease + +# If the installation is successful, the following should be printed +RUNNING: myrelease-feast-online-serving-test +PASSED: myrelease-feast-online-serving-test +RUNNING: myrelease-grafana-test +PASSED: myrelease-grafana-test +RUNNING: myrelease-test-topic-create-consume-produce +PASSED: myrelease-test-topic-create-consume-produce + +# Once the test completes, to check the logs +kubectl logs myrelease-feast-online-serving-test +``` + +> The test pods can be safely deleted after the test finishes. +> Check the yaml files in `templates/tests/` folder to see the processes +> the test pods execute. + +### Feast metrics + +Feast default installation includes Grafana, StatsD exporter and Prometheus. Request +metrics from Feast Core and Feast Serving, as well as ingestion statistic from +Feast Ingestion are accessible from Prometheus and Grafana dashboard. The following +show a quick example how to access the metrics. + +``` +# Forwards local port 9090 to the Prometheus server pod +kubectl port-forward svc/myrelease-prometheus-server 9090:80 +``` + +Visit http://localhost:9090 to access the Prometheus server: + +![Prometheus Server](files/img/prometheus-server.png?raw=true) + +### Enable Batch Serving + +To install Feast Batch Serving for retrieval of historical features in offline +training, access to BigQuery is required. First, create a [service account](https://cloud.google.com/iam/docs/creating-managing-service-account-keys) key that +will provide the credentials to access BigQuery. Grant the service account `editor` +role so it has write permissions to BigQuery and Cloud Storage. + +> In production, it is advised to give only the required [permissions](foo-feast-batch-serving-test) for the +> the service account, versus `editor` role which is very permissive. + +Create a Kubernetes secret for the service account JSON file: +```bash +# By default Feast expects the secret to be named "feast-gcp-service-account" +# and the JSON file to be named "credentials.json" +kubectl create secret generic feast-gcp-service-account --from-file=credentials.json +``` + +Create a new Cloud Storage bucket (if not exists) and make sure the service +account has write access to the bucket: +```bash +gsutil mb +``` + +Use the following Helm values to enable Batch Serving: +```yaml +# values-batch-serving.yaml +feast-core: + gcpServiceAccount: + enabled: true + postgresql: + existingSecret: feast-postgresql + +feast-batch-serving: + enabled: true + gcpServiceAccount: + enabled: true + application-override.yaml: + feast: + active_store: historical + stores: + - name: historical + type: BIGQUERY + config: + project_id: + dataset_id: + staging_location: gs:///feast-staging-location + initial_retry_delay_seconds: 3 + total_timeout_seconds: 21600 + subscriptions: + - name: "*" + project: "*" + version: "*" + +postgresql: + existingSecret: feast-postgresql +``` + +> To delete the previous release, run `helm delete --purge myrelease` +> Note this will not delete the persistent volume that has been claimed (PVC). +> In a test cluster, run `kubectl delete pvc --all` to delete all claimed PVCs. + +```bash +# Install a new release +helm install --name myrelease -f values-batch-serving.yaml feast-charts/feast + +# Wait until all pods are created and running/completed (can take about 5m) +kubectl get pods + +# Batch Serving is installed so `helm test` will also test for batch retrieval +helm test myrelease +``` + +### Use DataflowRunner for ingestion + +Apache Beam [DirectRunner](https://beam.apache.org/documentation/runners/direct/) +is not suitable for production use case because it is not easy to scale the +number of workers and there is no convenient API to monitor and manage the +workers. Feast supports [DataflowRunner](https://beam.apache.org/documentation/runners/dataflow/) which is a managed service on Google Cloud. + +> Make sure `feast-gcp-service-account` Kubernetes secret containing the +> service account has been created and the service account has permissions +> to manage Dataflow jobs. + +Since Dataflow workers run outside the Kube cluster and they will need to interact +with Kafka brokers, Redis stores and StatsD server installed in the cluster, +these services need to be exposed for access outside the cluster by setting +`service.type: LoadBalancer`. + +In a typical use case, 5 `LoadBalancer` (internal) IP addresses are required by +Feast when running with `DataflowRunner`. In Google Cloud, these (internal) IP +addresses should be reserved first: +```bash +# Check with your network configuration which IP addresses are available for use +gcloud compute addresses create \ + feast-kafka-1 feast-kafka-2 feast-kafka-3 feast-redis feast-statsd \ + --region --subnet \ + --addresses 10.128.0.11,10.128.0.12,10.128.0.13,10.128.0.14,10.128.0.15 +``` + +Use the following Helm values to enable DataflowRuner (and Batch Serving), +replacing the `<*load_balancer_ip*>` tags with the ip addresses reserved above: + +```yaml +# values-dataflow-runner.yaml +feast-core: + gcpServiceAccount: + enabled: true + postgresql: + existingSecret: feast-postgresql + application-override.yaml: + feast: + stream: + options: + bootstrapServers: + jobs: + active_runner: dataflow + metrics: + host: + runners: + - name: dataflow + type: DataflowRunner + options: + project: + region: + zone: + tempLocation: + network: + subnetwork: + maxNumWorkers: 1 + autoscalingAlgorithm: THROUGHPUT_BASED + usePublicIps: false + workerMachineType: n1-standard-1 + deadLetterTableSpec: + +feast-online-serving: + application-override.yaml: + feast: + stores: + - name: online + type: REDIS + config: + host: + port: 6379 + subscriptions: + - name: "*" + project: "*" + version: "*" + +feast-batch-serving: + enabled: true + gcpServiceAccount: + enabled: true + application-override.yaml: + feast: + active_store: historical + stores: + - name: historical + type: BIGQUERY + config: + project_id: + dataset_id: + staging_location: gs:///feast-staging-location + initial_retry_delay_seconds: 3 + total_timeout_seconds: 21600 + subscriptions: + - name: "*" + project: "*" + version: "*" + +postgresql: + existingSecret: feast-postgresql + +kafka: + external: + enabled: true + type: LoadBalancer + annotations: + cloud.google.com/load-balancer-type: Internal + loadBalancerSourceRanges: + - 10.0.0.0/8 + - 172.16.0.0/12 + - 192.168.0.0/16 + firstListenerPort: 31090 + loadBalancerIP: + - + - + - + configurationOverrides: + "advertised.listeners": |- + EXTERNAL://${LOAD_BALANCER_IP}:31090 + "listener.security.protocol.map": |- + PLAINTEXT:PLAINTEXT,EXTERNAL:PLAINTEXT + "log.retention.hours": 1 + +redis: + master: + service: + type: LoadBalancer + loadBalancerIP: + annotations: + cloud.google.com/load-balancer-type: Internal + loadBalancerSourceRanges: + - 10.0.0.0/8 + - 172.16.0.0/12 + - 192.168.0.0/16 + +prometheus-statsd-exporter: + service: + type: LoadBalancer + annotations: + cloud.google.com/load-balancer-type: Internal + loadBalancerSourceRanges: + - 10.0.0.0/8 + - 172.16.0.0/12 + - 192.168.0.0/16 + loadBalancerIP: +``` + +```bash +# Install a new release +helm install --name myrelease -f values-dataflow-runner.yaml feast-charts/feast + +# Wait until all pods are created and running/completed (can take about 5m) +kubectl get pods + +# Test the installation +helm test myrelease +``` + +If the tests are successful, Dataflow jobs should appear in Google Cloud console +running features ingestion: https://console.cloud.google.com/dataflow + +![Dataflow Jobs](files/img/dataflow-jobs.png) + +### Production configuration + +#### Resources requests + +The `resources` field in the deployment spec is left empty in the examples. In +production these should be set according to the load each services are expected +to handle and the service level objectives (SLO). Also Feast Core and Serving +is Java application and it is [good practice](https://stackoverflow.com/a/6916718/3949303) +to set the minimum and maximum heap. This is an example reasonable value to set for Feast Serving: + +```yaml +feast-online-serving: + javaOpts: "-Xms2048m -Xmx2048m" + resources: + limits: + memory: "2048Mi" + requests: + memory: "2048Mi" + cpu: "1" +``` + +#### High availability + +Default Feast installation only configures a single instance of Redis +server. If due to network failures or out of memory error Redis is down, +Feast serving will fail to respond to requests. Soon, Feast will support +highly available Redis via [Redis cluster](https://redis.io/topics/cluster-tutorial), +sentinel or additional proxies. + +### Documentation development + +This `README.md` is generated using [helm-docs](https://github.com/norwoodj/helm-docs/). +Please run `helm-docs` to regenerate the `README.md` every time `README.md.gotmpl` +or `values.yaml` are updated. diff --git a/infra/charts/feast/charts/feast-core/.helmignore b/infra/charts/feast/charts/feast-core/.helmignore deleted file mode 100644 index 50af031725..0000000000 --- a/infra/charts/feast/charts/feast-core/.helmignore +++ /dev/null @@ -1,22 +0,0 @@ -# Patterns to ignore when building packages. -# This supports shell glob matching, relative path matching, and -# negation (prefixed with !). Only one pattern per line. -.DS_Store -# Common VCS dirs -.git/ -.gitignore -.bzr/ -.bzrignore -.hg/ -.hgignore -.svn/ -# Common backup files -*.swp -*.bak -*.tmp -*~ -# Various IDEs -.project -.idea/ -*.tmproj -.vscode/ diff --git a/infra/charts/feast/charts/feast-core/Chart.yaml b/infra/charts/feast/charts/feast-core/Chart.yaml index 86d0699b9a..5b832943cf 100644 --- a/infra/charts/feast/charts/feast-core/Chart.yaml +++ b/infra/charts/feast/charts/feast-core/Chart.yaml @@ -1,4 +1,4 @@ apiVersion: v1 -description: A Helm chart for core component of Feast +description: Feast Core registers feature specifications and manage ingestion jobs. name: feast-core -version: 0.4.4 +version: 0.5.0-alpha.1 diff --git a/infra/charts/feast/charts/feast-core/README.md b/infra/charts/feast/charts/feast-core/README.md new file mode 100644 index 0000000000..40d6bd9112 --- /dev/null +++ b/infra/charts/feast/charts/feast-core/README.md @@ -0,0 +1,70 @@ +feast-core +========== +Feast Core registers feature specifications and manage ingestion jobs. + +Current chart version is `0.5.0-alpha.1` + + + + + +## Chart Values + +| Key | Type | Default | Description | +|-----|------|---------|-------------| +| "application-generated.yaml".enabled | bool | `true` | Flag to include Helm generated configuration for Feast database URL, Kafka bootstrap servers and jobs metrics host. This is useful for deployment that uses default configuration for Kafka, Postgres and StatsD exporter. Please set `application-override.yaml` to override this configuration. | +| "application-override.yaml" | object | `{"enabled":true}` | Configuration to override the default [application.yaml](https://github.com/feast-dev/feast/blob/master/core/src/main/resources/application.yml). Will be created as a ConfigMap. `application-override.yaml` has a higher precedence than `application-secret.yaml` | +| "application-secret.yaml" | object | `{"enabled":true}` | Configuration to override the default [application.yaml](https://github.com/feast-dev/feast/blob/master/core/src/main/resources/application.yml). Will be created as a Secret. `application-override.yaml` has a higher precedence than `application-secret.yaml`. It is recommended to either set `application-override.yaml` or `application-secret.yaml` only to simplify config management. | +| "application.yaml".enabled | bool | `true` | Flag to include the default [configuration](https://github.com/feast-dev/feast/blob/master/core/src/main/resources/application.yml). Please set `application-override.yaml` to override this configuration. | +| envOverrides | object | `{}` | Extra environment variables to set | +| gcpProjectId | string | `""` | Project ID to use when using Google Cloud services such as BigQuery, Cloud Storage and Dataflow | +| gcpServiceAccount.enabled | bool | `false` | Flag to use [service account](https://cloud.google.com/iam/docs/creating-managing-service-account-keys) JSON key | +| gcpServiceAccount.existingSecret.key | string | `"credentials.json"` | Key in the secret data (file name of the service account) | +| gcpServiceAccount.existingSecret.name | string | `"feast-gcp-service-account"` | Name of the existing secret containing the service account | +| image.pullPolicy | string | `"IfNotPresent"` | Image pull policy | +| image.repository | string | `"gcr.io/kf-feast/feast-core"` | Docker image repository | +| image.tag | string | `"dev"` | Image tag | +| ingress.grpc.annotations | object | `{}` | Extra annotations for the ingress | +| ingress.grpc.auth.enabled | bool | `false` | Flag to enable auth | +| ingress.grpc.class | string | `"nginx"` | Which ingress controller to use | +| ingress.grpc.enabled | bool | `false` | Flag to create an ingress resource for the service | +| ingress.grpc.hosts | list | `[]` | List of hostnames to match when routing requests | +| ingress.grpc.https.enabled | bool | `true` | Flag to enable HTTPS | +| ingress.grpc.https.secretNames | object | `{}` | Map of hostname to TLS secret name | +| ingress.grpc.whitelist | string | `""` | Allowed client IP source ranges | +| ingress.http.annotations | object | `{}` | Extra annotations for the ingress | +| ingress.http.auth.authUrl | string | `"http://auth-server.auth-ns.svc.cluster.local/auth"` | URL to an existing authentication service | +| ingress.http.auth.enabled | bool | `false` | Flag to enable auth | +| ingress.http.class | string | `"nginx"` | Which ingress controller to use | +| ingress.http.enabled | bool | `false` | Flag to create an ingress resource for the service | +| ingress.http.hosts | list | `[]` | List of hostnames to match when routing requests | +| ingress.http.https.enabled | bool | `true` | Flag to enable HTTPS | +| ingress.http.https.secretNames | object | `{}` | Map of hostname to TLS secret name | +| ingress.http.whitelist | string | `""` | Allowed client IP source ranges | +| javaOpts | string | `nil` | [JVM options](https://docs.oracle.com/cd/E22289_01/html/821-1274/configuring-the-default-jvm-and-java-arguments.html). For better performance, it is advised to set the min and max heap:
`-Xms2048m -Xmx2048m` | +| livenessProbe.enabled | bool | `true` | Flag to enabled the probe | +| livenessProbe.failureThreshold | int | `5` | Min consecutive failures for the probe to be considered failed | +| livenessProbe.initialDelaySeconds | int | `60` | Delay before the probe is initiated | +| livenessProbe.periodSeconds | int | `10` | How often to perform the probe | +| livenessProbe.successThreshold | int | `1` | Min consecutive success for the probe to be considered successful | +| livenessProbe.timeoutSeconds | int | `5` | When the probe times out | +| logLevel | string | `"WARN"` | Default log level, use either one of `DEBUG`, `INFO`, `WARN` or `ERROR` | +| logType | string | `"Console"` | Log format, either `JSON` or `Console` | +| nodeSelector | object | `{}` | Node labels for pod assignment | +| postgresql.existingSecret | string | `""` | Existing secret to use for authenticating to Postgres | +| prometheus.enabled | bool | `true` | Flag to enable scraping of Feast Core metrics | +| readinessProbe.enabled | bool | `true` | Flag to enabled the probe | +| readinessProbe.failureThreshold | int | `5` | Min consecutive failures for the probe to be considered failed | +| readinessProbe.initialDelaySeconds | int | `20` | Delay before the probe is initiated | +| readinessProbe.periodSeconds | int | `10` | How often to perform the probe | +| readinessProbe.successThreshold | int | `1` | Min consecutive success for the probe to be considered successful | +| readinessProbe.timeoutSeconds | int | `10` | When the probe times out | +| replicaCount | int | `1` | Number of pods that will be created | +| resources | object | `{}` | CPU/memory [resource requests/limit](https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/#resource-requests-and-limits-of-pod-and-container) | +| service.grpc.nodePort | string | `nil` | Port number that each cluster node will listen to | +| service.grpc.port | int | `6565` | Service port for GRPC requests | +| service.grpc.targetPort | int | `6565` | Container port serving GRPC requests | +| service.http.nodePort | string | `nil` | Port number that each cluster node will listen to | +| service.http.port | int | `80` | Service port for HTTP requests | +| service.http.targetPort | int | `8080` | Container port serving HTTP requests and Prometheus metrics | +| service.type | string | `"ClusterIP"` | Kubernetes service type | diff --git a/infra/charts/feast/charts/feast-core/charts/kafka-0.20.1.tgz b/infra/charts/feast/charts/feast-core/charts/kafka-0.20.1.tgz deleted file mode 100644 index 76a2247577..0000000000 Binary files a/infra/charts/feast/charts/feast-core/charts/kafka-0.20.1.tgz and /dev/null differ diff --git a/infra/charts/feast/charts/feast-core/charts/postgresql-6.5.5.tgz b/infra/charts/feast/charts/feast-core/charts/postgresql-6.5.5.tgz deleted file mode 100644 index f61421514d..0000000000 Binary files a/infra/charts/feast/charts/feast-core/charts/postgresql-6.5.5.tgz and /dev/null differ diff --git a/infra/charts/feast/charts/feast-core/requirements.yaml b/infra/charts/feast/charts/feast-core/requirements.yaml deleted file mode 100644 index ef1e39a7d0..0000000000 --- a/infra/charts/feast/charts/feast-core/requirements.yaml +++ /dev/null @@ -1,15 +0,0 @@ -dependencies: -- name: postgresql - version: 6.5.5 - repository: "@stable" - condition: postgresql.enabled -- name: kafka - version: 0.20.1 - repository: "@incubator" - condition: kafka.enabled -- name: common - version: 0.0.5 - repository: "@incubator" -- name: prometheus-statsd-exporter - version: 0.1.2 - condition: prometheus-statsd-exporter.enabled \ No newline at end of file diff --git a/infra/charts/feast/charts/feast-core/templates/configmap.yaml b/infra/charts/feast/charts/feast-core/templates/configmap.yaml index da45cad5bd..b48e15cc98 100644 --- a/infra/charts/feast/charts/feast-core/templates/configmap.yaml +++ b/infra/charts/feast/charts/feast-core/templates/configmap.yaml @@ -10,44 +10,29 @@ metadata: release: {{ .Release.Name }} heritage: {{ .Release.Service }} data: - application.yaml: | -{{- toYaml (index .Values "application.yaml") | nindent 4 }} - -{{- if .Values.postgresql.enabled }} - application-bundled-postgresql.yaml: | + application-generated.yaml: | +{{- if index .Values "application-generated.yaml" "enabled" }} spring: datasource: - url: {{ printf "jdbc:postgresql://%s:%s/%s" (printf "%s-postgresql" .Release.Name) (.Values.postgresql.service.port | toString) (.Values.postgresql.postgresqlDatabase) }} - driverClassName: org.postgresql.Driver -{{- end }} - -{{ if .Values.kafka.enabled }} - {{- $topic := index .Values.kafka.topics 0 }} - application-bundled-kafka.yaml: | + url: jdbc:postgresql://{{ .Release.Name }}-postgresql:5432/postgres feast: stream: type: kafka - options: - topic: {{ $topic.name | quote }} - replicationFactor: {{ $topic.replicationFactor }} - partitions: {{ $topic.partitions }} - {{- if not .Values.kafka.external.enabled }} - bootstrapServers: {{ printf "%s:9092" (printf "%s-kafka" .Release.Name) }} - {{- end }} -{{- end }} - -{{- if (index .Values "prometheus-statsd-exporter" "enabled" )}} - application-bundled-statsd.yaml: | - feast: + options: + bootstrapServers: {{ .Release.Name }}-kafka:9092 + topic: feast jobs: metrics: - enabled: true + enabled: true type: statsd - host: prometheus-statsd-exporter + host: {{ .Release.Name }}-prometheus-statsd-exporter-udp port: 9125 -{{- end }} -{{- range $name, $content := .Values.springConfigProfiles }} - application-{{ $name }}.yaml: | -{{- toYaml $content | nindent 4 }} + server: + port: {{ .Values.service.http.targetPort }} {{- end }} + + application-override.yaml: | +{{- if index .Values "application-override.yaml" "enabled" }} +{{- toYaml (index .Values "application-override.yaml") | nindent 4 }} +{{- end }} \ No newline at end of file diff --git a/infra/charts/feast/charts/feast-core/templates/deployment.yaml b/infra/charts/feast/charts/feast-core/templates/deployment.yaml index daa69f4782..6b75b5524c 100644 --- a/infra/charts/feast/charts/feast-core/templates/deployment.yaml +++ b/infra/charts/feast/charts/feast-core/templates/deployment.yaml @@ -19,14 +19,11 @@ spec: template: metadata: annotations: - checksum/config: {{ include (print $.Template.BasePath "/configmap.yaml") . | sha256sum }} - {{- if .Values.springSecretProfiles }} + checksum/configmap: {{ include (print $.Template.BasePath "/configmap.yaml") . | sha256sum }} checksum/secret: {{ include (print $.Template.BasePath "/secret.yaml") . | sha256sum }} - {{- end }} {{- if .Values.prometheus.enabled }} - {{ $config := index .Values "application.yaml" }} prometheus.io/path: /metrics - prometheus.io/port: "{{ $config.server.port }}" + prometheus.io/port: "{{ .Values.service.http.targetPort }}" prometheus.io/scrape: "true" {{- end }} labels: @@ -41,31 +38,31 @@ spec: volumes: - name: {{ template "feast-core.fullname" . }}-config - projected: - sources: - - configMap: - name: {{ template "feast-core.fullname" . }} - {{- if .Values.springSecretProfiles }} - - secret: - name: {{ template "feast-core.fullname" . }} - {{- end }} - {{- if .Values.gcpServiceAccount.useExistingSecret }} - - name: {{ template "feast-core.fullname" . }}-gcpserviceaccount + configMap: + name: {{ template "feast-core.fullname" . }} + - name: {{ template "feast-core.fullname" . }}-secret + secret: + secretName: {{ template "feast-core.fullname" . }} + {{- if .Values.gcpServiceAccount.enabled }} + - name: {{ template "feast-core.fullname" . }}-gcp-service-account secret: secretName: {{ .Values.gcpServiceAccount.existingSecret.name }} {{- end }} containers: - name: {{ .Chart.Name }} - image: '{{ .Values.image.repository }}:{{ required "No .image.tag found. This must be provided as input." .Values.image.tag }}' + image: {{ .Values.image.repository }}:{{ .Values.image.tag }} imagePullPolicy: {{ .Values.image.pullPolicy }} volumeMounts: - name: {{ template "feast-core.fullname" . }}-config - mountPath: "{{ .Values.springConfigMountPath }}" - {{- if .Values.gcpServiceAccount.useExistingSecret }} - - name: {{ template "feast-core.fullname" . }}-gcpserviceaccount - mountPath: {{ .Values.gcpServiceAccount.mountPath }} + mountPath: /etc/feast + - name: {{ template "feast-core.fullname" . }}-secret + mountPath: /etc/secrets/feast + readOnly: true + {{- if .Values.gcpServiceAccount.enabled }} + - name: {{ template "feast-core.fullname" . }}-gcp-service-account + mountPath: /etc/secrets/google readOnly: true {{- end }} @@ -75,40 +72,51 @@ spec: - name: LOG_LEVEL value: {{ .Values.logLevel | quote }} - {{- if .Values.postgresql.enabled }} - - name: SPRING_DATASOURCE_USERNAME - value: {{ .Values.postgresql.postgresqlUsername | quote }} + {{- if .Values.postgresql.existingSecret }} - name: SPRING_DATASOURCE_PASSWORD - value: {{ .Values.postgresql.postgresqlPassword | quote }} + valueFrom: + secretKeyRef: + name: {{ .Values.postgresql.existingSecret }} + key: postgresql-password {{- end }} - {{- if .Values.gcpServiceAccount.useExistingSecret }} + {{- if .Values.gcpServiceAccount.enabled }} - name: GOOGLE_APPLICATION_CREDENTIALS - value: {{ .Values.gcpServiceAccount.mountPath }}/{{ .Values.gcpServiceAccount.existingSecret.key }} + value: /etc/secrets/google/{{ .Values.gcpServiceAccount.existingSecret.key }} {{- end }} + {{- if .Values.gcpProjectId }} - name: GOOGLE_CLOUD_PROJECT value: {{ .Values.gcpProjectId | quote }} {{- end }} - command: - - java - {{- range .Values.jvmOptions }} - - {{ . | quote }} + {{- if .Values.javaOpts }} + - name: JAVA_TOOL_OPTIONS + value: {{ .Values.javaOpts }} {{- end }} - - -jar - - {{ .Values.jarPath | quote }} - - "--spring.config.location=file:{{ .Values.springConfigMountPath }}/" - {{- $combinedProfiles := printf "%s,%s" .Values.springConfigProfilesActive .Values.springSecretProfilesActive -}} - {{- $profilesArray := splitList "," $combinedProfiles -}} - {{- $profilesArray = append $profilesArray (.Values.postgresql.enabled | ternary "bundled-postgresql" "") -}} - {{- $profilesArray = append $profilesArray (.Values.kafka.enabled | ternary "bundled-kafka" "") -}} - {{- $profilesArray = append $profilesArray (index .Values "prometheus-statsd-exporter" "enabled" | ternary "bundled-statsd" "") -}} - {{- $profilesArray = compact $profilesArray -}} - {{- if $profilesArray }} - - "--spring.profiles.active={{ join "," $profilesArray }}" + + {{- range $key, $value := .Values.envOverrides }} + - name: {{ printf "%s" $key | replace "." "_" | upper | quote }} + value: {{ $value | quote }} {{- end }} + command: + - java + - -jar + - /opt/feast/feast-core.jar + - --spring.config.location= + {{- if index .Values "application.yaml" "enabled" -}} + classpath:/application.yml + {{- end }} + {{- if index .Values "application-generated.yaml" "enabled" -}} + ,file:/etc/feast/application-generated.yaml + {{- end }} + {{- if index .Values "application-secret.yaml" "enabled" -}} + ,file:/etc/secrets/feast/application-secret.yaml + {{- end }} + {{- if index .Values "application-override.yaml" "enabled" -}} + ,file:/etc/feast/application-override.yaml + {{- end }} ports: - name: http containerPort: {{ .Values.service.http.targetPort }} @@ -117,9 +125,8 @@ spec: {{- if .Values.livenessProbe.enabled }} livenessProbe: - httpGet: - path: /healthz - port: {{ .Values.service.http.targetPort }} + exec: + command: ["/usr/bin/grpc-health-probe", "-addr=:{{ .Values.service.grpc.targetPort }}"] initialDelaySeconds: {{ .Values.livenessProbe.initialDelaySeconds }} periodSeconds: {{ .Values.livenessProbe.periodSeconds }} successThreshold: {{ .Values.livenessProbe.successThreshold }} @@ -129,9 +136,8 @@ spec: {{- if .Values.readinessProbe.enabled }} readinessProbe: - httpGet: - path: /healthz - port: {{ .Values.service.http.targetPort }} + exec: + command: ["/usr/bin/grpc-health-probe", "-addr=:{{ .Values.service.grpc.targetPort }}"] initialDelaySeconds: {{ .Values.readinessProbe.initialDelaySeconds }} periodSeconds: {{ .Values.readinessProbe.periodSeconds }} successThreshold: {{ .Values.readinessProbe.successThreshold }} diff --git a/infra/charts/feast/charts/feast-core/templates/secret.yaml b/infra/charts/feast/charts/feast-core/templates/secret.yaml index dc4883dd4d..dd33e2dd48 100644 --- a/infra/charts/feast/charts/feast-core/templates/secret.yaml +++ b/infra/charts/feast/charts/feast-core/templates/secret.yaml @@ -1,4 +1,3 @@ -{{- if .Values.springSecretProfiles -}} apiVersion: v1 kind: Secret metadata: @@ -11,9 +10,6 @@ metadata: release: {{ .Release.Name }} heritage: {{ .Release.Service }} type: Opaque -data: -{{- range $name, $content := .Values.springSecretProfiles }} - application-{{ $name }}.yaml: | -{{- toYaml $content | b64enc | nindent 4 }} -{{- end }} -{{- end -}} \ No newline at end of file +stringData: + application-secret.yaml: | +{{- toYaml (index .Values "application-secret.yaml") | nindent 4 }} diff --git a/infra/charts/feast/charts/feast-core/values.yaml b/infra/charts/feast/charts/feast-core/values.yaml index 2224a591a9..a0a7823e52 100644 --- a/infra/charts/feast/charts/feast-core/values.yaml +++ b/infra/charts/feast/charts/feast-core/values.yaml @@ -1,248 +1,153 @@ -# ============================================================ -# Bundled PostgreSQL -# ============================================================ - -# Refer to https://github.com/helm/charts/tree/c42002a21abf8eff839ff1d2382152bde2bbe596/stable/postgresql -# for additional configuration. -postgresql: - # enabled specifies whether Postgresql should be installed as part of Feast Core. - # - # Feast Core requires a database to store data such as the created FeatureSets - # and job statuses. If enabled, the database and service port specified below - # will override "spring.datasource.url" value in application.yaml. The - # username and password will also be set as environment variables that will - # override "spring.datasource.username/password" in application.yaml. - enabled: true - # postgresqlDatabase is the name of the database used by Feast Core. - postgresqlDatabase: feast - # postgresqlUsername is the username to authenticate to the database. - postgresqlUsername: postgres - # postgresqlPassword is the password to authenticate to the database. - postgresqlPassword: password - service: - # port is the TCP port that Postgresql will listen to - port: 5432 - -# ============================================================ -# Bundled Kafka -# ============================================================ - -# Refer to https://github.com/helm/charts/tree/c42002a21abf8eff839ff1d2382152bde2bbe596/incubator/kafka -# for additional configuration. -kafka: - # enabled specifies whether Kafka should be installed as part of Feast Core. - # - # Feast Core requires a Kafka instance to be set as the default source for - # FeatureRows. If enabled, "feast.stream" option in application.yaml will - # be overridden by this installed Kafka configuration. - enabled: true - topics: - # topic that will be used as default in Feast Core for the default Kafka source. - - name: feast - replicationFactor: 1 - partitions: 1 - - -# ============================================================ -# Bundled Prometheus StatsD Exporter -# ============================================================ - -prometheus-statsd-exporter: - enabled: false - -# ============================================================ -# Feast Core -# ============================================================ - -# replicaCount is the number of pods that will be created. +# replicaCount -- Number of pods that will be created replicaCount: 1 -# image configures the Docker image for Feast Core image: + # image.repository -- Docker image repository repository: gcr.io/kf-feast/feast-core + # image.tag -- Image tag + tag: dev + # image.pullPolicy -- Image pull policy pullPolicy: IfNotPresent -# Add prometheus scraping annotations to the Pod metadata. -# If enabled, you must also ensure server.port is specified under application.yaml -prometheus: - enabled: false - -# application.yaml is the main configuration for Feast Core application. -# -# Feast Core is a Spring Boot app which uses this yaml configuration file. -# Refer to https://github.com/gojek/feast/blob/79eb4ab5fa3d37102c1dca9968162a98690526ba/core/src/main/resources/application.yml -# for a complete list and description of the configuration. -# -# Note that some properties defined in application.yaml may be overriden by -# Helm under certain conditions. For example, if postgresql and kafka dependencies -# are enabled. application.yaml: - grpc: - port: 6565 - enable-reflection: true - feast: - jobs: - runner: DirectRunner - options: {} - updates: - timeoutSeconds: 240 - metrics: - enabled: false - type: statsd - host: localhost - port: 9125 - stream: - type: kafka - options: - topic: TOPIC - bootstrapServers: HOST:PORT - replicationFactor: 1 - partitions: 1 - spring: - jpa: - properties.hibernate.format_sql: true - properties.hibernate.event.merge.entity_copy_observer: allow - hibernate.naming.physical-strategy=org.hibernate.boot.model.naming: PhysicalNamingStrategyStandardImpl - hibernate.ddl-auto: update - datasource: - driverClassName: org.postgresql.Driver - url: jdbc:postgresql://HOST:PORT/DATABASE - username: USERNAME - password: PASSWORD - management: - metrics: - export: - simple: - enabled: false - statsd: - enabled: false - host: localhost - port: 8125 - -springConfigProfiles: {} -springSecretProfiles: {} -# db: | -# spring: -# datasource: -# driverClassName: org.postgresql.Driver -# url: jdbc:postgresql://${DB_HOST:127.0.0.1}:${DB_PORT:5432}/${DB_DATABASE:postgres} -springConfigProfilesActive: "" -springSecretProfilesActive: "" -# springConfigMountPath is the directory path where application.yaml will be -# mounted in the container. -springConfigMountPath: /etc/feast/feast-core - -# gcpServiceAccount is the service account that Feast Core will use. + # "application.yaml".enabled -- Flag to include the default [configuration](https://github.com/feast-dev/feast/blob/master/core/src/main/resources/application.yml). Please set `application-override.yaml` to override this configuration. + enabled: true + +application-generated.yaml: + # "application-generated.yaml".enabled -- Flag to include Helm generated configuration for http port, Feast database URL, Kafka bootstrap servers and jobs metrics host. This is useful for deployment that uses default configuration for Kafka, Postgres and StatsD exporter. Please set `application-override.yaml` to override this configuration. + enabled: true + +# "application-secret.yaml" -- Configuration to override the default [application.yaml](https://github.com/feast-dev/feast/blob/master/core/src/main/resources/application.yml). Will be created as a Secret. `application-override.yaml` has a higher precedence than `application-secret.yaml`. It is recommended to either set `application-override.yaml` or `application-secret.yaml` only to simplify config management. +application-secret.yaml: + enabled: true + +# "application-override.yaml" -- Configuration to override the default [application.yaml](https://github.com/feast-dev/feast/blob/master/core/src/main/resources/application.yml). Will be created as a ConfigMap. `application-override.yaml` has a higher precedence than `application-secret.yaml` +application-override.yaml: + enabled: true + gcpServiceAccount: - # useExistingSecret specifies Feast to use an existing secret containing Google - # Cloud service account JSON key file. - useExistingSecret: false + # gcpServiceAccount.enabled -- Flag to use [service account](https://cloud.google.com/iam/docs/creating-managing-service-account-keys) JSON key + enabled: false existingSecret: - # name is the secret name of the existing secret for the service account. + # gcpServiceAccount.existingSecret.name -- Name of the existing secret containing the service account name: feast-gcp-service-account - # key is the secret key of the existing secret for the service account. - # key is normally derived from the file name of the JSON key file. - key: key.json - # mountPath is the directory path where the JSON key file will be mounted. - # the value of "existingSecret.key" is file name of the service account file. - mountPath: /etc/gcloud/service-accounts - -# Project ID picked up by the Cloud SDK (e.g. BigQuery run against this project) + # gcpServiceAccount.existingSecret.key -- Key in the secret data (file name of the service account) + key: credentials.json + +postgresql: + # postgresql.existingSecret -- Existing secret to use for authenticating to Postgres + existingSecret: "" + +# gcpProjectId -- Project ID to use when using Google Cloud services such as BigQuery, Cloud Storage and Dataflow gcpProjectId: "" -# Path to Jar file in the Docker image. -# If you are using gcr.io/kf-feast/feast-core this should not need to be changed -jarPath: /opt/feast/feast-core.jar - -# jvmOptions are options that will be passed to the Java Virtual Machine (JVM) -# running Feast Core. -# -# For example, it is good practice to set min and max heap size in JVM. -# https://stackoverflow.com/questions/6902135/side-effect-for-increasing-maxpermsize-and-max-heap-size -# -# Refer to https://docs.oracle.com/cd/E22289_01/html/821-1274/configuring-the-default-jvm-and-java-arguments.html -# to see other JVM options that can be set. -# -jvmOptions: [] -# - -Xms1024m -# - -Xmx1024m - -logType: JSON -logLevel: warn +# javaOpts -- [JVM options](https://docs.oracle.com/cd/E22289_01/html/821-1274/configuring-the-default-jvm-and-java-arguments.html). For better performance, it is advised to set the min and max heap:
`-Xms2048m -Xmx2048m` +javaOpts: -livenessProbe: +# logType -- Log format, either `JSON` or `Console` +logType: Console +# logLevel -- Default log level, use either one of `DEBUG`, `INFO`, `WARN` or `ERROR` +logLevel: WARN + +prometheus: + # prometheus.enabled -- Flag to enable scraping of Feast Core metrics enabled: true + +# By default we disable the liveness probe, since if the DB fails restarting core will not result +# in application healing. +livenessProbe: + # livenessProbe.enabled -- Flag to enabled the probe + enabled: false + # livenessProbe.initialDelaySeconds -- Delay before the probe is initiated initialDelaySeconds: 60 + # livenessProbe.periodSeconds -- How often to perform the probe periodSeconds: 10 + # livenessProbe.timeoutSeconds -- When the probe times out timeoutSeconds: 5 + # livenessProbe.successThreshold -- Min consecutive success for the probe to be considered successful successThreshold: 1 + # livenessProbe.failureThreshold -- Min consecutive failures for the probe to be considered failed failureThreshold: 5 readinessProbe: + # readinessProbe.enabled -- Flag to enabled the probe enabled: true - initialDelaySeconds: 15 + # readinessProbe.initialDelaySeconds -- Delay before the probe is initiated + initialDelaySeconds: 20 + # readinessProbe.periodSeconds -- How often to perform the probe periodSeconds: 10 + # readinessProbe.timeoutSeconds -- When the probe times out timeoutSeconds: 10 + # readinessProbe.successThreshold -- Min consecutive success for the probe to be considered successful successThreshold: 1 + # readinessProbe.failureThreshold -- Min consecutive failures for the probe to be considered failed failureThreshold: 5 service: + # service.type -- Kubernetes service type type: ClusterIP http: + # service.http.port -- Service port for HTTP requests port: 80 + # service.http.targetPort -- Container port serving HTTP requests and Prometheus metrics targetPort: 8080 - # nodePort is the port number that each cluster node will listen to - # https://kubernetes.io/docs/concepts/services-networking/service/#type-nodeport - # - # nodePort: + # service.http.nodePort -- Port number that each cluster node will listen to + nodePort: grpc: + # service.grpc.port -- Service port for GRPC requests port: 6565 + # service.grpc.targetPort -- Container port serving GRPC requests targetPort: 6565 - # nodePort is the port number that each cluster node will listen to - # https://kubernetes.io/docs/concepts/services-networking/service/#type-nodeport - # - # nodePort: + # service.grpc.nodePort -- Port number that each cluster node will listen to + nodePort: ingress: grpc: + # ingress.grpc.enabled -- Flag to create an ingress resource for the service enabled: false + # ingress.grpc.class -- Which ingress controller to use class: nginx + # ingress.grpc.hosts -- List of hostnames to match when routing requests hosts: [] + # ingress.grpc.annotations -- Extra annotations for the ingress annotations: {} https: + # ingress.grpc.https.enabled -- Flag to enable HTTPS enabled: true + # ingress.grpc.https.secretNames -- Map of hostname to TLS secret name secretNames: {} + # ingress.grpc.whitelist -- Allowed client IP source ranges whitelist: "" auth: + # ingress.grpc.auth.enabled -- Flag to enable auth enabled: false http: + # ingress.http.enabled -- Flag to create an ingress resource for the service enabled: false + # ingress.http.class -- Which ingress controller to use class: nginx + # ingress.http.hosts -- List of hostnames to match when routing requests hosts: [] + # ingress.http.annotations -- Extra annotations for the ingress annotations: {} https: + # ingress.http.https.enabled -- Flag to enable HTTPS enabled: true + # ingress.http.https.secretNames -- Map of hostname to TLS secret name secretNames: {} + # ingress.http.whitelist -- Allowed client IP source ranges whitelist: "" auth: + # ingress.http.auth.enabled -- Flag to enable auth enabled: false + # ingress.http.auth.authUrl -- URL to an existing authentication service authUrl: http://auth-server.auth-ns.svc.cluster.local/auth +# resources -- CPU/memory [resource requests/limit](https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/#resource-requests-and-limits-of-pod-and-container) resources: {} - # We usually recommend not to specify default resources and to leave this as a conscious - # choice for the user. This also increases chances charts run on environments with little - # resources, such as Minikube. If you do want to specify resources, uncomment the following - # lines, adjust them as necessary, and remove the curly braces after 'resources:'. - # - # limits: - # cpu: 100m - # memory: 128Mi - # requests: - # cpu: 100m - # memory: 128Mi +# nodeSelector -- Node labels for pod assignment nodeSelector: {} -tolerations: [] - -affinity: {} +# envOverrides -- Extra environment variables to set +envOverrides: {} \ No newline at end of file diff --git a/infra/charts/feast/charts/feast-serving/.helmignore b/infra/charts/feast/charts/feast-serving/.helmignore deleted file mode 100644 index 50af031725..0000000000 --- a/infra/charts/feast/charts/feast-serving/.helmignore +++ /dev/null @@ -1,22 +0,0 @@ -# Patterns to ignore when building packages. -# This supports shell glob matching, relative path matching, and -# negation (prefixed with !). Only one pattern per line. -.DS_Store -# Common VCS dirs -.git/ -.gitignore -.bzr/ -.bzrignore -.hg/ -.hgignore -.svn/ -# Common backup files -*.swp -*.bak -*.tmp -*~ -# Various IDEs -.project -.idea/ -*.tmproj -.vscode/ diff --git a/infra/charts/feast/charts/feast-serving/Chart.yaml b/infra/charts/feast/charts/feast-serving/Chart.yaml index 2e9cf89243..7c8e6131cf 100644 --- a/infra/charts/feast/charts/feast-serving/Chart.yaml +++ b/infra/charts/feast/charts/feast-serving/Chart.yaml @@ -1,4 +1,4 @@ apiVersion: v1 -description: A Helm chart for serving component of Feast +description: Feast Serving serves low-latency latest features and historical batch features. name: feast-serving -version: 0.4.4 +version: 0.5.0-alpha.1 diff --git a/infra/charts/feast/charts/feast-serving/README.md b/infra/charts/feast/charts/feast-serving/README.md new file mode 100644 index 0000000000..d30da2eb3a --- /dev/null +++ b/infra/charts/feast/charts/feast-serving/README.md @@ -0,0 +1,69 @@ +feast-serving +============= +Feast Serving serves low-latency latest features and historical batch features. + +Current chart version is `0.5.0-alpha.1` + + + + + +## Chart Values + +| Key | Type | Default | Description | +|-----|------|---------|-------------| +| "application-generated.yaml".enabled | bool | `true` | Flag to include Helm generated configuration for Feast Core host, Redis store and job store. This is useful for deployment that uses default configuration for Redis. Please set `application-override.yaml` to override this configuration. | +| "application-override.yaml" | object | `{"enabled":true}` | Configuration to override the default [application.yaml](https://github.com/feast-dev/feast/blob/master/serving/src/main/resources/application.yml). Will be created as a ConfigMap. `application-override.yaml` has a higher precedence than `application-secret.yaml` | +| "application-secret.yaml" | object | `{"enabled":true}` | Configuration to override the default [application.yaml](https://github.com/feast-dev/feast/blob/master/serving/src/main/resources/application.yml). Will be created as a Secret. `application-override.yaml` has a higher precedence than `application-secret.yaml`. It is recommended to either set `application-override.yaml` or `application-secret.yaml` only to simplify config management. | +| "application.yaml".enabled | bool | `true` | Flag to include the default [configuration](https://github.com/feast-dev/feast/blob/master/serving/src/main/resources/application.yml). Please set `application-override.yaml` to override this configuration. | +| envOverrides | object | `{}` | Extra environment variables to set | +| gcpProjectId | string | `""` | Project ID to use when using Google Cloud services such as BigQuery, Cloud Storage and Dataflow | +| gcpServiceAccount.enabled | bool | `false` | Flag to use [service account](https://cloud.google.com/iam/docs/creating-managing-service-account-keys) JSON key | +| gcpServiceAccount.existingSecret.key | string | `"credentials.json"` | Key in the secret data (file name of the service account) | +| gcpServiceAccount.existingSecret.name | string | `"feast-gcp-service-account"` | Name of the existing secret containing the service account | +| image.pullPolicy | string | `"IfNotPresent"` | Image pull policy | +| image.repository | string | `"gcr.io/kf-feast/feast-serving"` | Docker image repository | +| image.tag | string | `"dev"` | Image tag | +| ingress.grpc.annotations | object | `{}` | Extra annotations for the ingress | +| ingress.grpc.auth.enabled | bool | `false` | Flag to enable auth | +| ingress.grpc.class | string | `"nginx"` | Which ingress controller to use | +| ingress.grpc.enabled | bool | `false` | Flag to create an ingress resource for the service | +| ingress.grpc.hosts | list | `[]` | List of hostnames to match when routing requests | +| ingress.grpc.https.enabled | bool | `true` | Flag to enable HTTPS | +| ingress.grpc.https.secretNames | object | `{}` | Map of hostname to TLS secret name | +| ingress.grpc.whitelist | string | `""` | Allowed client IP source ranges | +| ingress.http.annotations | object | `{}` | Extra annotations for the ingress | +| ingress.http.auth.authUrl | string | `"http://auth-server.auth-ns.svc.cluster.local/auth"` | URL to an existing authentication service | +| ingress.http.auth.enabled | bool | `false` | Flag to enable auth | +| ingress.http.class | string | `"nginx"` | Which ingress controller to use | +| ingress.http.enabled | bool | `false` | Flag to create an ingress resource for the service | +| ingress.http.hosts | list | `[]` | List of hostnames to match when routing requests | +| ingress.http.https.enabled | bool | `true` | Flag to enable HTTPS | +| ingress.http.https.secretNames | object | `{}` | Map of hostname to TLS secret name | +| ingress.http.whitelist | string | `""` | Allowed client IP source ranges | +| javaOpts | string | `nil` | [JVM options](https://docs.oracle.com/cd/E22289_01/html/821-1274/configuring-the-default-jvm-and-java-arguments.html). For better performance, it is advised to set the min and max heap:
`-Xms2048m -Xmx2048m` | +| livenessProbe.enabled | bool | `true` | Flag to enabled the probe | +| livenessProbe.failureThreshold | int | `5` | Min consecutive failures for the probe to be considered failed | +| livenessProbe.initialDelaySeconds | int | `60` | Delay before the probe is initiated | +| livenessProbe.periodSeconds | int | `10` | How often to perform the probe | +| livenessProbe.successThreshold | int | `1` | Min consecutive success for the probe to be considered successful | +| livenessProbe.timeoutSeconds | int | `5` | When the probe times out | +| logLevel | string | `"WARN"` | Default log level, use either one of `DEBUG`, `INFO`, `WARN` or `ERROR` | +| logType | string | `"Console"` | Log format, either `JSON` or `Console` | +| nodeSelector | object | `{}` | Node labels for pod assignment | +| prometheus.enabled | bool | `true` | Flag to enable scraping of Feast Core metrics | +| readinessProbe.enabled | bool | `true` | Flag to enabled the probe | +| readinessProbe.failureThreshold | int | `5` | Min consecutive failures for the probe to be considered failed | +| readinessProbe.initialDelaySeconds | int | `15` | Delay before the probe is initiated | +| readinessProbe.periodSeconds | int | `10` | How often to perform the probe | +| readinessProbe.successThreshold | int | `1` | Min consecutive success for the probe to be considered successful | +| readinessProbe.timeoutSeconds | int | `10` | When the probe times out | +| replicaCount | int | `1` | Number of pods that will be created | +| resources | object | `{}` | CPU/memory [resource requests/limit](https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/#resource-requests-and-limits-of-pod-and-container) | +| service.grpc.nodePort | string | `nil` | Port number that each cluster node will listen to | +| service.grpc.port | int | `6566` | Service port for GRPC requests | +| service.grpc.targetPort | int | `6566` | Container port serving GRPC requests | +| service.http.nodePort | string | `nil` | Port number that each cluster node will listen to | +| service.http.port | int | `80` | Service port for HTTP requests | +| service.http.targetPort | int | `8080` | Container port serving HTTP requests | +| service.type | string | `"ClusterIP"` | Kubernetes service type | diff --git a/infra/charts/feast/charts/feast-serving/charts/redis-9.5.0.tgz b/infra/charts/feast/charts/feast-serving/charts/redis-9.5.0.tgz deleted file mode 100644 index 962893a825..0000000000 Binary files a/infra/charts/feast/charts/feast-serving/charts/redis-9.5.0.tgz and /dev/null differ diff --git a/infra/charts/feast/charts/feast-serving/requirements.yaml b/infra/charts/feast/charts/feast-serving/requirements.yaml deleted file mode 100644 index 2cee3f8149..0000000000 --- a/infra/charts/feast/charts/feast-serving/requirements.yaml +++ /dev/null @@ -1,8 +0,0 @@ -dependencies: -- name: redis - version: 9.5.0 - repository: "@stable" - condition: redis.enabled -- name: common - version: 0.0.5 - repository: "@incubator" diff --git a/infra/charts/feast/charts/feast-serving/templates/_helpers.tpl b/infra/charts/feast/charts/feast-serving/templates/_helpers.tpl index ab670cc8cc..49abb6b8e5 100644 --- a/infra/charts/feast/charts/feast-serving/templates/_helpers.tpl +++ b/infra/charts/feast/charts/feast-serving/templates/_helpers.tpl @@ -43,10 +43,3 @@ app.kubernetes.io/version: {{ .Chart.AppVersion | quote }} {{- end }} app.kubernetes.io/managed-by: {{ .Release.Service }} {{- end -}} - -{{/* -Helpers -*/}} -{{- define "bq_store_and_no_job_options" -}} -{{ and (eq (index .Values "store.yaml" "type") "BIGQUERY") (empty (index .Values "application.yaml" "feast" "jobs" "store-options")) }} -{{- end -}} diff --git a/infra/charts/feast/charts/feast-serving/templates/configmap.yaml b/infra/charts/feast/charts/feast-serving/templates/configmap.yaml index 3775d9f562..579fe18fd4 100644 --- a/infra/charts/feast/charts/feast-serving/templates/configmap.yaml +++ b/infra/charts/feast/charts/feast-serving/templates/configmap.yaml @@ -10,25 +10,31 @@ metadata: release: {{ .Release.Name }} heritage: {{ .Release.Service }} data: - application.yaml: | -{{- toYaml (index .Values "application.yaml") | nindent 4 }} - -{{- if .Values.core.enabled }} - application-bundled-core.yaml: | + application-generated.yaml: | +{{- if index .Values "application-generated.yaml" "enabled" }} feast: - core-host: {{ printf "%s-feast-core" .Release.Name }} -{{- end }} + core-host: {{ .Release.Name }}-feast-core -{{- if eq (include "bq_store_and_no_job_options" .) "true" }} - application-bundled-redis.yaml: | - feast: - jobs: - store-options: - host: {{ printf "%s-redis-headless" .Release.Name }} + stores: + - name: online + type: REDIS + config: + host: {{ .Release.Name }}-redis-master port: 6379 + subscriptions: + - name: "*" + project: "*" + version: "*" + + job_store: + redis_host: {{ .Release.Name }}-redis-master + redis_port: 6379 + + server: + port: {{ .Values.service.http.targetPort }} {{- end }} -{{- range $name, $content := .Values.springConfigProfiles }} - application-{{ $name }}.yaml: | -{{- toYaml $content | nindent 4 }} + application-override.yaml: | +{{- if index .Values "application-override.yaml" "enabled" }} +{{- toYaml (index .Values "application-override.yaml") | nindent 4 }} {{- end }} diff --git a/infra/charts/feast/charts/feast-serving/templates/deployment.yaml b/infra/charts/feast/charts/feast-serving/templates/deployment.yaml index 1a2e672758..bb8fdc55ae 100644 --- a/infra/charts/feast/charts/feast-serving/templates/deployment.yaml +++ b/infra/charts/feast/charts/feast-serving/templates/deployment.yaml @@ -19,14 +19,11 @@ spec: template: metadata: annotations: - checksum/config: {{ include (print $.Template.BasePath "/configmap.yaml") . | sha256sum }} - {{- if .Values.springSecretProfiles }} + checksum/configmap: {{ include (print $.Template.BasePath "/configmap.yaml") . | sha256sum }} checksum/secret: {{ include (print $.Template.BasePath "/secret.yaml") . | sha256sum }} - {{- end }} {{- if .Values.prometheus.enabled }} - {{ $config := index .Values "application.yaml" }} prometheus.io/path: /metrics - prometheus.io/port: "{{ $config.server.port }}" + prometheus.io/port: "{{ .Values.service.http.targetPort }}" prometheus.io/scrape: "true" {{- end }} labels: @@ -41,33 +38,31 @@ spec: volumes: - name: {{ template "feast-serving.fullname" . }}-config - projected: - sources: - - configMap: - name: {{ template "feast-serving.fullname" . }} - - configMap: - name: {{ template "feast-serving.fullname" . }}-store - {{- if .Values.springSecretProfiles }} - - secret: - name: {{ template "feast-serving.fullname" . }} - {{- end }} - {{- if .Values.gcpServiceAccount.useExistingSecret }} - - name: {{ template "feast-serving.fullname" . }}-gcpserviceaccount + configMap: + name: {{ template "feast-serving.fullname" . }} + - name: {{ template "feast-serving.fullname" . }}-secret + secret: + secretName: {{ template "feast-serving.fullname" . }} + {{- if .Values.gcpServiceAccount.enabled }} + - name: {{ template "feast-serving.fullname" . }}-gcp-service-account secret: secretName: {{ .Values.gcpServiceAccount.existingSecret.name }} {{- end }} containers: - name: {{ .Chart.Name }} - image: '{{ .Values.image.repository }}:{{ required "No .image.tag found. This must be provided as input." .Values.image.tag }}' + image: {{ .Values.image.repository }}:{{ .Values.image.tag }} imagePullPolicy: {{ .Values.image.pullPolicy }} volumeMounts: - name: {{ template "feast-serving.fullname" . }}-config - mountPath: "{{ .Values.springConfigMountPath }}" - {{- if .Values.gcpServiceAccount.useExistingSecret }} - - name: {{ template "feast-serving.fullname" . }}-gcpserviceaccount - mountPath: {{ .Values.gcpServiceAccount.mountPath }} + mountPath: /etc/feast + - name: {{ template "feast-serving.fullname" . }}-secret + mountPath: /etc/secrets/feast + readOnly: true + {{- if .Values.gcpServiceAccount.enabled }} + - name: {{ template "feast-serving.fullname" . }}-gcp-service-account + mountPath: /etc/secrets/google readOnly: true {{- end }} @@ -77,31 +72,43 @@ spec: - name: LOG_LEVEL value: {{ .Values.logLevel | quote }} - {{- if .Values.gcpServiceAccount.useExistingSecret }} + {{- if .Values.gcpServiceAccount.enabled }} - name: GOOGLE_APPLICATION_CREDENTIALS - value: {{ .Values.gcpServiceAccount.mountPath }}/{{ .Values.gcpServiceAccount.existingSecret.key }} + value: /etc/secrets/google/{{ .Values.gcpServiceAccount.existingSecret.key }} {{- end }} + {{- if .Values.gcpProjectId }} - name: GOOGLE_CLOUD_PROJECT value: {{ .Values.gcpProjectId | quote }} {{- end }} + {{- if .Values.javaOpts }} + - name: JAVA_TOOL_OPTIONS + value: {{ .Values.javaOpts }} + {{- end }} + + {{- range $key, $value := .Values.envOverrides }} + - name: {{ printf "%s" $key | replace "." "_" | upper | quote }} + value: {{ $value | quote }} + {{- end }} + command: - java - {{- range .Values.jvmOptions }} - - {{ . | quote }} - {{- end }} - -jar - - {{ .Values.jarPath | quote }} - - "--spring.config.location=file:{{ .Values.springConfigMountPath }}/" - {{- $combinedProfiles := printf "%s,%s" .Values.springConfigProfilesActive .Values.springSecretProfilesActive -}} - {{- $profilesArray := splitList "," $combinedProfiles -}} - {{- $profilesArray = append $profilesArray (.Values.core.enabled | ternary "bundled-core" "") -}} - {{- $profilesArray = append $profilesArray (eq (include "bq_store_and_no_job_options" .) "true" | ternary "bundled-redis" "") -}} - {{- $profilesArray = compact $profilesArray -}} - {{- if $profilesArray }} - - "--spring.profiles.active={{ join "," $profilesArray }}" - {{- end }} + - /opt/feast/feast-serving.jar + - --spring.config.location= + {{- if index .Values "application.yaml" "enabled" -}} + classpath:/application.yml + {{- end }} + {{- if index .Values "application-generated.yaml" "enabled" -}} + ,file:/etc/feast/application-generated.yaml + {{- end }} + {{- if index .Values "application-secret.yaml" "enabled" -}} + ,file:/etc/secrets/feast/application-secret.yaml + {{- end }} + {{- if index .Values "application-override.yaml" "enabled" -}} + ,file:/etc/feast/application-override.yaml + {{- end }} ports: - name: http diff --git a/infra/charts/feast/charts/feast-serving/templates/secret.yaml b/infra/charts/feast/charts/feast-serving/templates/secret.yaml index 941101ee4b..2ccbccfcf7 100644 --- a/infra/charts/feast/charts/feast-serving/templates/secret.yaml +++ b/infra/charts/feast/charts/feast-serving/templates/secret.yaml @@ -1,4 +1,3 @@ -{{- if .Values.springSecretProfiles -}} apiVersion: v1 kind: Secret metadata: @@ -11,9 +10,6 @@ metadata: release: {{ .Release.Name }} heritage: {{ .Release.Service }} type: Opaque -data: -{{- range $name, $content := .Values.springSecretProfiles }} - application-{{ $name }}.yaml: | -{{- toYaml $content | b64enc | nindent 4 }} -{{- end }} -{{- end -}} \ No newline at end of file +stringData: + application-secret.yaml: | +{{- toYaml (index .Values "application-secret.yaml") | nindent 4 }} diff --git a/infra/charts/feast/charts/feast-serving/values.yaml b/infra/charts/feast/charts/feast-serving/values.yaml index fc06c1ae57..c94c720526 100644 --- a/infra/charts/feast/charts/feast-serving/values.yaml +++ b/infra/charts/feast/charts/feast-serving/values.yaml @@ -1,236 +1,148 @@ -# redis configures Redis that is installed as part of Feast Serving. -# Refer to https://github.com/helm/charts/tree/99430c4afdc88213c1ca08f40eeb03868ffcc9d7/stable/redis -# for additional configuration -redis: - # enabled specifies whether Redis should be installed as part of Feast Serving. - # - # If enabled, "redis_config" in store.yaml will be overwritten by Helm - # to the configuration in this Redis installation. - enabled: false - # usePassword specifies if password is required to access Redis. Note that - # Feast 0.3 does not support Redis with password. - usePassword: false - # cluster configuration for Redis. - cluster: - # enabled specifies if Redis should be installed in cluster mode. - enabled: false - -# core configures Feast Core in the same parent feast chart that this Feast -# Serving connects to. -core: - # enabled specifies that Feast Serving will use Feast Core installed - # in the same parent feast chart. If enabled, Helm will overwrite - # "feast.core-host" in application.yaml with the correct value. - enabled: true - -# replicaCount is the number of pods that will be created. +# replicaCount -- Number of pods that will be created replicaCount: 1 -# image configures the Docker image for Feast Serving image: + # image.repository -- Docker image repository repository: gcr.io/kf-feast/feast-serving + # image.tag -- Image tag + tag: dev + # image.pullPolicy -- Image pull policy pullPolicy: IfNotPresent -# application.yaml is the main configuration for Feast Serving application. -# -# Feast Core is a Spring Boot app which uses this yaml configuration file. -# Refer to https://github.com/gojek/feast/blob/79eb4ab5fa3d37102c1dca9968162a98690526ba/serving/src/main/resources/application.yml -# for a complete list and description of the configuration. -# -# Note that some properties defined in application.yaml may be overridden by -# Helm under certain conditions. For example, if core is enabled, then -# "feast.core-host" will be overridden. Also, if "type: BIGQUERY" is specified -# in store.yaml, "feast.jobs.store-options" will be overridden as well with -# the default option supported in Feast 0.3. application.yaml: - feast: - version: 0.3 - core-host: localhost - core-grpc-port: 6565 - tracing: - enabled: false - tracer-name: jaeger - service-name: feast-serving - store: - config-path: /etc/feast/feast-serving/store.yaml - redis-pool-max-size: 128 - redis-pool-max-idle: 64 - jobs: - staging-location: "" - store-type: "" - store-options: {} - grpc: - port: 6566 - enable-reflection: true - server: - port: 8080 - -# store.yaml is the configuration for Feast Store. -# -# Refer to this link for description: -# https://github.com/gojek/feast/blob/79eb4ab5fa3d37102c1dca9968162a98690526ba/protos/feast/core/Store.proto -# -# Use the correct store configuration depending on whether the installed -# Feast Serving is "online" or "batch", by uncommenting the correct store.yaml. -# -# Note that if "redis.enabled: true" and "type: REDIS" in store.yaml, -# Helm will override "redis_config" with configuration of Redis installed -# in this chart. -# -# Note that if "type: BIGQUERY" in store.yaml, Helm assumes Feast Online serving -# is also installed with Redis store. Helm will then override "feast.jobs.store-options" -# in application.yaml with the installed Redis store configuration. This is -# because in Feast 0.3, Redis job store is required. -# -# store.yaml: -# name: online -# type: REDIS -# redis_config: -# host: localhost -# port: 6379 -# subscriptions: -# - project: "*" -# name: "*" -# version: "*" -# -# store.yaml: -# name: bigquery -# type: BIGQUERY -# bigquery_config: -# project_id: PROJECT_ID -# dataset_id: DATASET_ID -# subscriptions: -# - project: "*" -# name: "*" -# version: "*" - -springConfigProfiles: {} -springSecretProfiles: {} -# db: | -# spring: -# datasource: -# driverClassName: org.postgresql.Driver -# url: jdbc:postgresql://${DB_HOST:127.0.0.1}:${DB_PORT:5432}/${DB_DATABASE:postgres} -springConfigProfilesActive: "" -springSecretProfilesActive: "" -# springConfigMountPath is the directory path where application.yaml and -# store.yaml will be mounted in the container. -springConfigMountPath: /etc/feast/feast-serving - -# gcpServiceAccount is the service account that Feast Serving will use. + # "application.yaml".enabled -- Flag to include the default [configuration](https://github.com/feast-dev/feast/blob/master/serving/src/main/resources/application.yml). Please set `application-override.yaml` to override this configuration. + enabled: true + +application-generated.yaml: + # "application-generated.yaml".enabled -- Flag to include Helm generated configuration for http port, Feast Core host, Redis store and job store. This is useful for deployment that uses default configuration for Redis. Please set `application-override.yaml` to override this configuration. + enabled: true + +# "application-secret.yaml" -- Configuration to override the default [application.yaml](https://github.com/feast-dev/feast/blob/master/serving/src/main/resources/application.yml). Will be created as a Secret. `application-override.yaml` has a higher precedence than `application-secret.yaml`. It is recommended to either set `application-override.yaml` or `application-secret.yaml` only to simplify config management. +application-secret.yaml: + enabled: true + +# "application-override.yaml" -- Configuration to override the default [application.yaml](https://github.com/feast-dev/feast/blob/master/serving/src/main/resources/application.yml). Will be created as a ConfigMap. `application-override.yaml` has a higher precedence than `application-secret.yaml` +application-override.yaml: + enabled: true + gcpServiceAccount: - # useExistingSecret specifies Feast to use an existing secret containing Google + # gcpServiceAccount.enabled -- Flag to use [service account](https://cloud.google.com/iam/docs/creating-managing-service-account-keys) JSON key # Cloud service account JSON key file. - useExistingSecret: false + enabled: false existingSecret: - # name is the secret name of the existing secret for the service account. + # gcpServiceAccount.existingSecret.name -- Name of the existing secret containing the service account name: feast-gcp-service-account - # key is the secret key of the existing secret for the service account. - # key is normally derived from the file name of the JSON key file. - key: key.json - # mountPath is the directory path where the JSON key file will be mounted. - # the value of "existingSecret.key" is file name of the service account file. - mountPath: /etc/gcloud/service-accounts - -# Project ID picked up by the Cloud SDK (e.g. BigQuery run against this project) + # gcpServiceAccount.existingSecret.key -- Key in the secret data (file name of the service account) + key: credentials.json + +# gcpProjectId -- Project ID to use when using Google Cloud services such as BigQuery, Cloud Storage and Dataflow gcpProjectId: "" -# Path to Jar file in the Docker image. -# If using gcr.io/kf-feast/feast-serving this should not need to be changed. -jarPath: /opt/feast/feast-serving.jar - -# jvmOptions are options that will be passed to the Java Virtual Machine (JVM) -# running Feast Core. -# -# For example, it is good practice to set min and max heap size in JVM. -# https://stackoverflow.com/questions/6902135/side-effect-for-increasing-maxpermsize-and-max-heap-size -# -# Refer to https://docs.oracle.com/cd/E22289_01/html/821-1274/configuring-the-default-jvm-and-java-arguments.html -# to see other JVM options that can be set. -# -jvmOptions: [] -# - -Xms768m -# - -Xmx768m - -logType: JSON -logLevel: warn +# javaOpts -- [JVM options](https://docs.oracle.com/cd/E22289_01/html/821-1274/configuring-the-default-jvm-and-java-arguments.html). For better performance, it is advised to set the min and max heap:
`-Xms2048m -Xmx2048m` +javaOpts: + +# logType -- Log format, either `JSON` or `Console` +logType: Console +# logLevel -- Default log level, use either one of `DEBUG`, `INFO`, `WARN` or `ERROR` +logLevel: WARN + +prometheus: + # prometheus.enabled -- Flag to enable scraping of Feast Core metrics + enabled: true livenessProbe: - enabled: false + # livenessProbe.enabled -- Flag to enabled the probe + enabled: true + # livenessProbe.initialDelaySeconds -- Delay before the probe is initiated initialDelaySeconds: 60 + # livenessProbe.periodSeconds -- How often to perform the probe periodSeconds: 10 + # livenessProbe.timeoutSeconds -- When the probe times out timeoutSeconds: 5 + # livenessProbe.successThreshold -- Min consecutive success for the probe to be considered successful successThreshold: 1 + # livenessProbe.failureThreshold -- Min consecutive failures for the probe to be considered failed failureThreshold: 5 readinessProbe: - enabled: false + # readinessProbe.enabled -- Flag to enabled the probe + enabled: true + # readinessProbe.initialDelaySeconds -- Delay before the probe is initiated initialDelaySeconds: 15 + # readinessProbe.periodSeconds -- How often to perform the probe periodSeconds: 10 + # readinessProbe.timeoutSeconds -- When the probe times out timeoutSeconds: 10 + # readinessProbe.successThreshold -- Min consecutive success for the probe to be considered successful successThreshold: 1 + # readinessProbe.failureThreshold -- Min consecutive failures for the probe to be considered failed failureThreshold: 5 service: + # service.type -- Kubernetes service type type: ClusterIP http: + # service.http.port -- Service port for HTTP requests port: 80 + # service.http.targetPort -- Container port serving HTTP requests and Prometheus metrics targetPort: 8080 - # nodePort is the port number that each cluster node will listen to - # https://kubernetes.io/docs/concepts/services-networking/service/#type-nodeport - # - # nodePort: + # service.http.nodePort -- Port number that each cluster node will listen to + nodePort: grpc: + # service.grpc.port -- Service port for GRPC requests port: 6566 + # service.grpc.targetPort -- Container port serving GRPC requests targetPort: 6566 - # nodePort is the port number that each cluster node will listen to - # https://kubernetes.io/docs/concepts/services-networking/service/#type-nodeport - # - # nodePort: + # service.grpc.nodePort -- Port number that each cluster node will listen to + nodePort: ingress: grpc: + # ingress.grpc.enabled -- Flag to create an ingress resource for the service enabled: false + # ingress.grpc.class -- Which ingress controller to use class: nginx + # ingress.grpc.hosts -- List of hostnames to match when routing requests hosts: [] + # ingress.grpc.annotations -- Extra annotations for the ingress annotations: {} https: + # ingress.grpc.https.enabled -- Flag to enable HTTPS enabled: true + # ingress.grpc.https.secretNames -- Map of hostname to TLS secret name secretNames: {} + # ingress.grpc.whitelist -- Allowed client IP source ranges whitelist: "" auth: + # ingress.grpc.auth.enabled -- Flag to enable auth enabled: false http: + # ingress.http.enabled -- Flag to create an ingress resource for the service enabled: false + # ingress.http.class -- Which ingress controller to use class: nginx + # ingress.http.hosts -- List of hostnames to match when routing requests hosts: [] + # ingress.http.annotations -- Extra annotations for the ingress annotations: {} https: + # ingress.http.https.enabled -- Flag to enable HTTPS enabled: true + # ingress.http.https.secretNames -- Map of hostname to TLS secret name secretNames: {} + # ingress.http.whitelist -- Allowed client IP source ranges whitelist: "" auth: + # ingress.http.auth.enabled -- Flag to enable auth enabled: false + # ingress.http.auth.authUrl -- URL to an existing authentication service authUrl: http://auth-server.auth-ns.svc.cluster.local/auth -prometheus: - enabled: true - +# resources -- CPU/memory [resource requests/limit](https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/#resource-requests-and-limits-of-pod-and-container) resources: {} - # We usually recommend not to specify default resources and to leave this as a conscious - # choice for the user. This also increases chances charts run on environments with little - # resources, such as Minikube. If you do want to specify resources, uncomment the following - # lines, adjust them as necessary, and remove the curly braces after 'resources:'. - # - # limits: - # cpu: 100m - # memory: 128Mi - # requests: - # cpu: 100m - # memory: 128Mi +# nodeSelector -- Node labels for pod assignment nodeSelector: {} -tolerations: [] - -affinity: {} +# envOverrides -- Extra environment variables to set +envOverrides: {} \ No newline at end of file diff --git a/infra/charts/feast/charts/grafana-5.0.5.tgz b/infra/charts/feast/charts/grafana-5.0.5.tgz new file mode 100644 index 0000000000..06eb83a5e5 Binary files /dev/null and b/infra/charts/feast/charts/grafana-5.0.5.tgz differ diff --git a/infra/charts/feast/charts/kafka-0.20.8.tgz b/infra/charts/feast/charts/kafka-0.20.8.tgz new file mode 100644 index 0000000000..f61be294ae Binary files /dev/null and b/infra/charts/feast/charts/kafka-0.20.8.tgz differ diff --git a/infra/charts/feast/charts/postgresql-8.6.1.tgz b/infra/charts/feast/charts/postgresql-8.6.1.tgz new file mode 100644 index 0000000000..c1ee74e8e8 Binary files /dev/null and b/infra/charts/feast/charts/postgresql-8.6.1.tgz differ diff --git a/infra/charts/feast/charts/prometheus-11.0.2.tgz b/infra/charts/feast/charts/prometheus-11.0.2.tgz new file mode 100644 index 0000000000..32b3abbe4e Binary files /dev/null and b/infra/charts/feast/charts/prometheus-11.0.2.tgz differ diff --git a/infra/charts/feast/charts/feast-core/charts/prometheus-statsd-exporter/.helmignore b/infra/charts/feast/charts/prometheus-statsd-exporter/.helmignore similarity index 100% rename from infra/charts/feast/charts/feast-core/charts/prometheus-statsd-exporter/.helmignore rename to infra/charts/feast/charts/prometheus-statsd-exporter/.helmignore diff --git a/infra/charts/feast/charts/feast-core/charts/prometheus-statsd-exporter/Chart.yaml b/infra/charts/feast/charts/prometheus-statsd-exporter/Chart.yaml similarity index 100% rename from infra/charts/feast/charts/feast-core/charts/prometheus-statsd-exporter/Chart.yaml rename to infra/charts/feast/charts/prometheus-statsd-exporter/Chart.yaml diff --git a/infra/charts/feast/charts/feast-core/charts/prometheus-statsd-exporter/README.md b/infra/charts/feast/charts/prometheus-statsd-exporter/README.md similarity index 91% rename from infra/charts/feast/charts/feast-core/charts/prometheus-statsd-exporter/README.md rename to infra/charts/feast/charts/prometheus-statsd-exporter/README.md index 69eb33039b..8a6739f393 100644 --- a/infra/charts/feast/charts/feast-core/charts/prometheus-statsd-exporter/README.md +++ b/infra/charts/feast/charts/prometheus-statsd-exporter/README.md @@ -33,7 +33,8 @@ $ helm delete my-release ## Configuration - |Parameter | Description | Default | +|Parameter | Description | Default | +|----------------------------|------------------------------------------------------|----------------------------------------| |`extraArgs` | key:value list of extra arguments to give the binary | `{}` | |`image.pullPolicy` | Image pull policy | `IfNotPresent` | |`image.repository` | Image repository | `prom/statsd-exporter` | diff --git a/infra/charts/feast/charts/feast-core/charts/prometheus-statsd-exporter/templates/NOTES.txt b/infra/charts/feast/charts/prometheus-statsd-exporter/templates/NOTES.txt similarity index 100% rename from infra/charts/feast/charts/feast-core/charts/prometheus-statsd-exporter/templates/NOTES.txt rename to infra/charts/feast/charts/prometheus-statsd-exporter/templates/NOTES.txt diff --git a/infra/charts/feast/charts/feast-core/charts/prometheus-statsd-exporter/templates/_helpers.tpl b/infra/charts/feast/charts/prometheus-statsd-exporter/templates/_helpers.tpl similarity index 100% rename from infra/charts/feast/charts/feast-core/charts/prometheus-statsd-exporter/templates/_helpers.tpl rename to infra/charts/feast/charts/prometheus-statsd-exporter/templates/_helpers.tpl diff --git a/infra/charts/feast/charts/feast-core/charts/prometheus-statsd-exporter/templates/config.yaml b/infra/charts/feast/charts/prometheus-statsd-exporter/templates/config.yaml similarity index 100% rename from infra/charts/feast/charts/feast-core/charts/prometheus-statsd-exporter/templates/config.yaml rename to infra/charts/feast/charts/prometheus-statsd-exporter/templates/config.yaml diff --git a/infra/charts/feast/charts/feast-core/charts/prometheus-statsd-exporter/templates/deployment.yaml b/infra/charts/feast/charts/prometheus-statsd-exporter/templates/deployment.yaml similarity index 96% rename from infra/charts/feast/charts/feast-core/charts/prometheus-statsd-exporter/templates/deployment.yaml rename to infra/charts/feast/charts/prometheus-statsd-exporter/templates/deployment.yaml index 47308ef89b..44fac913b6 100644 --- a/infra/charts/feast/charts/feast-core/charts/prometheus-statsd-exporter/templates/deployment.yaml +++ b/infra/charts/feast/charts/prometheus-statsd-exporter/templates/deployment.yaml @@ -1,4 +1,4 @@ -apiVersion: extensions/v1beta1 +apiVersion: apps/v1 kind: Deployment metadata: name: {{ template "prometheus-statsd-exporter.fullname" . }} @@ -16,7 +16,7 @@ spec: template: metadata: annotations: -{{ toYaml .Values.statsdexporter.annotations | indent 8 }} +{{ toYaml .Values.statsdexporter.podAnnotations | indent 8 }} labels: app: {{ template "prometheus-statsd-exporter.name" . }} release: {{ .Release.Name }} diff --git a/infra/charts/feast/charts/feast-core/charts/prometheus-statsd-exporter/templates/pvc.yaml b/infra/charts/feast/charts/prometheus-statsd-exporter/templates/pvc.yaml similarity index 100% rename from infra/charts/feast/charts/feast-core/charts/prometheus-statsd-exporter/templates/pvc.yaml rename to infra/charts/feast/charts/prometheus-statsd-exporter/templates/pvc.yaml diff --git a/infra/charts/feast/charts/feast-core/charts/prometheus-statsd-exporter/templates/service.yaml b/infra/charts/feast/charts/prometheus-statsd-exporter/templates/service.yaml similarity index 69% rename from infra/charts/feast/charts/feast-core/charts/prometheus-statsd-exporter/templates/service.yaml rename to infra/charts/feast/charts/prometheus-statsd-exporter/templates/service.yaml index 88d01b24a6..1690ba7fe0 100644 --- a/infra/charts/feast/charts/feast-core/charts/prometheus-statsd-exporter/templates/service.yaml +++ b/infra/charts/feast/charts/prometheus-statsd-exporter/templates/service.yaml @@ -15,6 +15,38 @@ metadata: {{ toYaml .Values.service.labels | indent 4 }} {{- end }} name: {{ template "prometheus-statsd-exporter.fullname" . }} +spec: + ports: + - name: metrics + port: {{ .Values.service.metricsPort }} + protocol: TCP + targetPort: 9102 + - name: statsd-tcp + port: {{ .Values.service.statsdPort }} + protocol: TCP + targetPort: 9125 + selector: + app: {{ template "prometheus-statsd-exporter.name" . }} + release: {{ .Release.Name }} + type: ClusterIP +--- +apiVersion: v1 +kind: Service +metadata: +{{- if .Values.service.annotations }} + annotations: +{{ toYaml .Values.service.annotations | indent 4 }} +{{- end }} + labels: + app: {{ template "prometheus-statsd-exporter.fullname" . }} + chart: {{ .Chart.Name }}-{{ .Chart.Version }} + component: "{{ .Chart.Name }}" + heritage: {{ .Release.Service }} + release: {{ .Release.Name }} +{{- if .Values.service.labels }} +{{ toYaml .Values.service.labels | indent 4 }} +{{- end }} + name: {{ template "prometheus-statsd-exporter.fullname" . }}-udp spec: {{- if .Values.service.clusterIP }} clusterIP: {{ .Values.service.clusterIP }} @@ -33,14 +65,6 @@ spec: {{- end }} {{- end }} ports: - - name: metrics - port: {{ .Values.service.metricsPort }} - protocol: TCP - targetPort: 9102 - - name: statsd-tcp - port: {{ .Values.service.statsdPort }} - protocol: TCP - targetPort: 9125 - name: statsd-udp port: {{ .Values.service.statsdPort }} protocol: UDP diff --git a/infra/charts/feast/charts/feast-core/charts/prometheus-statsd-exporter/templates/serviceaccount.yaml b/infra/charts/feast/charts/prometheus-statsd-exporter/templates/serviceaccount.yaml similarity index 100% rename from infra/charts/feast/charts/feast-core/charts/prometheus-statsd-exporter/templates/serviceaccount.yaml rename to infra/charts/feast/charts/prometheus-statsd-exporter/templates/serviceaccount.yaml diff --git a/infra/charts/feast/charts/feast-core/charts/prometheus-statsd-exporter/values.yaml b/infra/charts/feast/charts/prometheus-statsd-exporter/values.yaml similarity index 96% rename from infra/charts/feast/charts/feast-core/charts/prometheus-statsd-exporter/values.yaml rename to infra/charts/feast/charts/prometheus-statsd-exporter/values.yaml index f2d523771e..b3f70ee264 100644 --- a/infra/charts/feast/charts/feast-core/charts/prometheus-statsd-exporter/values.yaml +++ b/infra/charts/feast/charts/prometheus-statsd-exporter/values.yaml @@ -20,6 +20,9 @@ service: statsdexporter: podAnnotations: + prometheus.io/path: /metrics + prometheus.io/port: "9102" + prometheus.io/scrape: "true" extraArgs: {} # - --persistence.file=data-perst diff --git a/infra/charts/feast/charts/redis-10.5.6.tgz b/infra/charts/feast/charts/redis-10.5.6.tgz new file mode 100644 index 0000000000..f1e4ec4105 Binary files /dev/null and b/infra/charts/feast/charts/redis-10.5.6.tgz differ diff --git a/infra/charts/feast/files/img/dataflow-jobs.png b/infra/charts/feast/files/img/dataflow-jobs.png new file mode 100644 index 0000000000..2acf48f19e Binary files /dev/null and b/infra/charts/feast/files/img/dataflow-jobs.png differ diff --git a/infra/charts/feast/files/img/prometheus-server.png b/infra/charts/feast/files/img/prometheus-server.png new file mode 100644 index 0000000000..efe31dc9e1 Binary files /dev/null and b/infra/charts/feast/files/img/prometheus-server.png differ diff --git a/infra/charts/feast/requirements.lock b/infra/charts/feast/requirements.lock index d9aeab1651..c6ff995bac 100644 --- a/infra/charts/feast/requirements.lock +++ b/infra/charts/feast/requirements.lock @@ -1,12 +1,30 @@ dependencies: - name: feast-core repository: "" - version: 0.4.4 + version: 0.5.0-alpha.1 - name: feast-serving repository: "" - version: 0.4.4 + version: 0.5.0-alpha.1 - name: feast-serving repository: "" - version: 0.4.4 -digest: sha256:1f812168f656d4725f80f52fcf2f919603ca4c69a9b48d62d55ea9271bf4755b -generated: "2020-02-25T14:15:39.256627Z" + version: 0.5.0-alpha.1 +- name: postgresql + repository: https://kubernetes-charts.storage.googleapis.com/ + version: 8.6.1 +- name: kafka + repository: https://kubernetes-charts-incubator.storage.googleapis.com/ + version: 0.20.8 +- name: redis + repository: https://kubernetes-charts.storage.googleapis.com/ + version: 10.5.6 +- name: prometheus-statsd-exporter + repository: "" + version: 0.1.2 +- name: prometheus + repository: https://kubernetes-charts.storage.googleapis.com/ + version: 11.0.2 +- name: grafana + repository: https://kubernetes-charts.storage.googleapis.com/ + version: 5.0.5 +digest: sha256:e325439384ef9b45428fbeafe8f1e230b331d4b5482c3f26f07b71cecae06c22 +generated: "2020-05-02T15:00:45.4365217+08:00" diff --git a/infra/charts/feast/requirements.yaml b/infra/charts/feast/requirements.yaml index 1fa1826965..7de7870575 100644 --- a/infra/charts/feast/requirements.yaml +++ b/infra/charts/feast/requirements.yaml @@ -1,12 +1,35 @@ dependencies: - name: feast-core - version: 0.4.4 + version: 0.5.0-alpha.1 condition: feast-core.enabled - name: feast-serving - alias: feast-serving-batch - version: 0.4.4 - condition: feast-serving-batch.enabled + alias: feast-online-serving + version: 0.5.0-alpha.1 + condition: feast-online-serving.enabled - name: feast-serving - alias: feast-serving-online - version: 0.4.4 - condition: feast-serving-online.enabled \ No newline at end of file + alias: feast-batch-serving + version: 0.5.0-alpha.1 + condition: feast-batch-serving.enabled +- name: postgresql + version: 8.6.1 + repository: https://kubernetes-charts.storage.googleapis.com/ + condition: postgresql.enabled +- name: kafka + version: 0.20.8 + repository: https://kubernetes-charts-incubator.storage.googleapis.com/ + condition: kafka.enabled +- name: redis + version: 10.5.6 + repository: https://kubernetes-charts.storage.googleapis.com/ + condition: redis.enabled +- name: prometheus-statsd-exporter + version: 0.1.2 + condition: prometheus-statsd-exporter.enabled +- name: prometheus + version: 11.0.2 + repository: https://kubernetes-charts.storage.googleapis.com/ + condition: prometheus.enabled +- name: grafana + version: 5.0.5 + repository: https://kubernetes-charts.storage.googleapis.com/ + condition: grafana.enabled diff --git a/infra/charts/feast/templates/tests/test-feast-batch-serving.yaml b/infra/charts/feast/templates/tests/test-feast-batch-serving.yaml new file mode 100644 index 0000000000..54173021d3 --- /dev/null +++ b/infra/charts/feast/templates/tests/test-feast-batch-serving.yaml @@ -0,0 +1,116 @@ +{{- if and (index .Values "feast-core" "enabled") (index .Values "feast-batch-serving" "enabled") }} + +apiVersion: v1 +kind: Pod +metadata: + name: "{{ .Release.Name }}-feast-batch-serving-test" + annotations: + "helm.sh/hook": test-success + namespace: {{ .Release.Namespace }} +spec: + containers: + - name: main + image: python:3.7 + command: + - bash + - -c + - | + pip install -U feast==0.4.* + + cat < featureset.yaml + kind: feature_set + spec: + name: customer_transactions + entities: + - name: customer_id + valueType: INT64 + features: + - name: daily_transactions + valueType: FLOAT + - name: total_transactions + valueType: FLOAT + maxAge: 3600s + EOF + + python < featureset.yaml + kind: feature_set + spec: + name: customer_transactions + entities: + - name: customer_id + valueType: INT64 + features: + - name: daily_transactions + valueType: FLOAT + - name: total_transactions + valueType: FLOAT + maxAge: 3600s + EOF + + python < + dataset_id: + staging_location: gs:///feast-staging-location + initial_retry_delay_seconds: 3 + total_timeout_seconds: 21600 + subscriptions: + - name: "*" + project: "*" + version: "*" + +postgresql: + existingSecret: feast-postgresql diff --git a/infra/charts/feast/values-dataflow-runner.yaml b/infra/charts/feast/values-dataflow-runner.yaml new file mode 100644 index 0000000000..0469a6349e --- /dev/null +++ b/infra/charts/feast/values-dataflow-runner.yaml @@ -0,0 +1,113 @@ +# values-dataflow-runner.yaml +feast-core: + gcpServiceAccount: + enabled: true + postgresql: + existingSecret: feast-postgresql + application-override.yaml: + feast: + stream: + options: + bootstrapServers: + jobs: + active_runner: dataflow + metrics: + host: + runners: + - name: dataflow + type: DataflowRunner + options: + project: + region: + zone: + tempLocation: + network: + subnetwork: + maxNumWorkers: 1 + autoscalingAlgorithm: THROUGHPUT_BASED + usePublicIps: false + workerMachineType: n1-standard-1 + deadLetterTableSpec: + +feast-online-serving: + application-override.yaml: + feast: + stores: + - name: online + type: REDIS + config: + host: + port: 6379 + subscriptions: + - name: "*" + project: "*" + version: "*" + +feast-batch-serving: + enabled: true + gcpServiceAccount: + enabled: true + application-override.yaml: + feast: + active_store: historical + stores: + - name: historical + type: BIGQUERY + config: + project_id: + dataset_id: + staging_location: gs:///feast-staging-location + initial_retry_delay_seconds: 3 + total_timeout_seconds: 21600 + subscriptions: + - name: "*" + project: "*" + version: "*" + +postgresql: + existingSecret: feast-postgresql + +kafka: + external: + enabled: true + type: LoadBalancer + annotations: + cloud.google.com/load-balancer-type: Internal + loadBalancerSourceRanges: + - 10.0.0.0/8 + - 172.16.0.0/12 + - 192.168.0.0/16 + firstListenerPort: 31090 + loadBalancerIP: + - + - + - + configurationOverrides: + "advertised.listeners": |- + EXTERNAL://${LOAD_BALANCER_IP}:31090 + "listener.security.protocol.map": |- + PLAINTEXT:PLAINTEXT,EXTERNAL:PLAINTEXT + "log.retention.hours": 1 + +redis: + master: + service: + type: LoadBalancer + loadBalancerIP: + annotations: + cloud.google.com/load-balancer-type: Internal + loadBalancerSourceRanges: + - 10.0.0.0/8 + - 172.16.0.0/12 + - 192.168.0.0/16 + +prometheus-statsd-exporter: + service: + type: LoadBalancer + annotations: + cloud.google.com/load-balancer-type: Internal + loadBalancerSourceRanges: + - 10.0.0.0/8 + - 172.16.0.0/12 + - 192.168.0.0/16 + loadBalancerIP: diff --git a/infra/charts/feast/values-demo.yaml b/infra/charts/feast/values-demo.yaml deleted file mode 100644 index 2cb5ccbe74..0000000000 --- a/infra/charts/feast/values-demo.yaml +++ /dev/null @@ -1,84 +0,0 @@ -# The following are values for installing Feast for demonstration purpose: -# - Persistence is disabled since for demo purpose data is not expected -# to be durable -# - Only online serving (no batch serving) is installed to remove dependency -# on Google Cloud services. Batch serving requires BigQuery dependency. -# - Replace all occurrences of "feast.example.com" with the domain name or -# external IP pointing to your cluster -# - -feast-core: - enabled: true - - gcpServiceAccount: - useExistingSecret: false - - service: - type: NodePort - grpc: - nodePort: 32090 - - - resources: - requests: - cpu: 250m - memory: 256Mi - - postgresql: - persistence: - enabled: false - - - kafka: - enabled: true - persistence: - enabled: false - external: - enabled: true - type: NodePort - domain: feast.example.com - configurationOverrides: - "advertised.listeners": |- - EXTERNAL://feast.example.com:$((31090 + ${KAFKA_BROKER_ID})) - "listener.security.protocol.map": |- - PLAINTEXT:PLAINTEXT,EXTERNAL:PLAINTEXT - - application.yaml: - feast: - stream: - options: - bootstrapServers: feast.example.com:31090 - -feast-serving-online: - enabled: true - redis: - enabled: true - - service: - type: NodePort - grpc: - nodePort: 32091 - - store.yaml: - name: redis - type: REDIS - subscriptions: - - name: "*" - project: "*" - version: "*" - -feast-serving-batch: -# enabled: false - enabled: true - store.yaml: - name: bigquery - type: BIGQUERY - bigquery_config: - project_id: PROJECT_ID - dataset_id: DATASET_ID - subscriptions: - - project: "*" - name: "*" - version: "*" - redis: - enabled: false \ No newline at end of file diff --git a/infra/charts/feast/values-external-store.yaml b/infra/charts/feast/values-external-store.yaml deleted file mode 100644 index d012bcec56..0000000000 --- a/infra/charts/feast/values-external-store.yaml +++ /dev/null @@ -1,5 +0,0 @@ -# TODO @dheryanto -# -# The following are sample values for installing Feast without setting up -# Kafka and Redis stores. In other words, using Feast with external stream -# source and stores. diff --git a/infra/charts/feast/values-production.yaml b/infra/charts/feast/values-production.yaml deleted file mode 100644 index 6b53dc19ea..0000000000 --- a/infra/charts/feast/values-production.yaml +++ /dev/null @@ -1,4 +0,0 @@ -# TODO @dheryanto -# -# The following are sample values for installing Feast for typical production -# environment. diff --git a/infra/charts/feast/values.yaml b/infra/charts/feast/values.yaml index fde03f9ad7..20ee2ab029 100644 --- a/infra/charts/feast/values.yaml +++ b/infra/charts/feast/values.yaml @@ -1,262 +1,35 @@ -# Feast deployment installs the following components: -# - Feast Core -# - Feast Serving Online -# - Feast Serving Batch -# - Prometheus StatsD Exporter -# -# The configuration for different components can be referenced from: -# - charts/feast-core/values.yaml -# - charts/feast-serving/values.yaml -# - charts/prometheus-statsd-exporter/values.yaml -# -# Note that "feast-serving-online" and "feast-serving-batch" are -# aliases to "feast-serving" chart since in typical scenario two instances -# of Feast Serving: online and batch will be deployed. Both described -# using the same chart "feast-serving". -# -# Note that the import job by default uses DirectRunner -# https://beam.apache.org/documentation/runners/direct/ -# in this configuration since it allows Feast to run in more environments -# (unlike DataflowRunner which requires Google Cloud services). -# -# A secret containing Google Cloud service account JSON key is required -# in this configuration. -# https://cloud.google.com/iam/docs/creating-managing-service-accounts -# -# The Google Cloud service account must have the following roles: -# - bigquery.dataEditor -# - bigquery.jobUser -# -# Assuming a service account JSON key file has been downloaded to -# (please name the file key.json): -# /home/user/key.json -# -# Run the following command to create the secret in your Kubernetes cluster: -# -# kubectl create secret generic feast-gcp-service-account \ -# --from-file=/home/user/key.json -# -# Replace every instance of EXTERNAL_IP with the external IP of your GKE cluster - -# ============================================================ -# Feast Core -# ============================================================ - feast-core: - # If enabled specifies whether to install Feast Core component. - # - # Normally, this is set to "false" when Feast users need access to low latency - # Feast Serving, by deploying multiple instances of Feast Serving closest - # to the client. These instances of Feast Serving however can still use - # the same shared Feast Core. + # feast-core.enabled -- Flag to install Feast Core enabled: true - # Specify which image tag to use. Keep this consistent for all components - image: - tag: "0.4.4" +feast-online-serving: + # feast-online-serving.enabled -- Flag to install Feast Online Serving + enabled: true - # jvmOptions are options that will be passed to the Java Virtual Machine (JVM) - # running Feast Core. - # - # For example, it is good practice to set min and max heap size in JVM. - # https://stackoverflow.com/questions/6902135/side-effect-for-increasing-maxpermsize-and-max-heap-size - jvmOptions: - - -Xms1024m - - -Xmx1024m +feast-batch-serving: + # feast-batch-serving.enabled -- Flag to install Feast Batch Serving + enabled: false - # resources that should be allocated to Feast Core. - resources: - requests: - cpu: 1000m - memory: 1024Mi - limits: - memory: 2048Mi +postgresql: + # postgresql.enabled -- Flag to install Postgresql + enabled: true - # gcpServiceAccount is the Google service account that Feast Core will use. - gcpServiceAccount: - # useExistingSecret specifies Feast to use an existing secret containing - # Google Cloud service account JSON key file. - # - # This is the only supported option for now to use a service account JSON. - # Feast admin is expected to create this secret before deploying Feast. - useExistingSecret: true - existingSecret: - # name is the secret name of the existing secret for the service account. - name: feast-gcp-service-account - # key is the secret key of the existing secret for the service account. - # key is normally derived from the file name of the JSON key file. - key: key.json - # Setting service.type to NodePort exposes feast-core service at a static port - service: - type: NodePort - grpc: - # this is the port that is exposed outside of the cluster - nodePort: 32090 - # Make kafka externally accessible using NodePort - # Please set EXTERNAL_IP to your cluster's external IP - kafka: - external: - enabled: true - type: NodePort - domain: EXTERNAL_IP - configurationOverrides: - "advertised.listeners": |- - EXTERNAL://EXTERNAL_IP:$((31090 + ${KAFKA_BROKER_ID})) - "listener.security.protocol.map": |- - PLAINTEXT:PLAINTEXT,EXTERNAL:PLAINTEXT - application.yaml: - feast: - stream: - options: - # Point to one of your Kafka brokers - # Please set EXTERNAL_IP to your cluster's external IP - bootstrapServers: EXTERNAL_IP:31090 +kafka: + # kafka.enabled -- Flag to install Kafka + enabled: true -# ============================================================ -# Feast Serving Online -# ============================================================ +redis: + # redis.enabled -- Flag to install Redis + enabled: true -feast-serving-online: - # enabled specifies whether to install Feast Serving Online component. +prometheus-statsd-exporter: + # prometheus-statsd-exporter.enabled -- Flag to install StatsD to Prometheus Exporter enabled: true - # Specify what image tag to use. Keep this consistent for all components - image: - tag: "0.4.4" - # redis.enabled specifies whether Redis should be installed as part of Feast Serving. - # - # If enabled is set to "false", Feast admin has to ensure there is an - # existing Redis running outside Feast, that Feast Serving can connect to. - # master.service.type set to NodePort exposes Redis to outside of the cluster - redis: - enabled: true - master: - service: - nodePort: 32101 - type: NodePort - # jvmOptions are options that will be passed to the Feast Serving JVM. - jvmOptions: - - -Xms1024m - - -Xmx1024m - # resources that should be allocated to Feast Serving. - resources: - requests: - cpu: 500m - memory: 1024Mi - limits: - memory: 2048Mi - # Make service accessible to outside of cluster using NodePort - service: - type: NodePort - grpc: - nodePort: 32091 - # store.yaml is the configuration for Feast Store. - # - # Refer to this link for more description: - # https://github.com/gojek/feast/blob/79eb4ab5fa3d37102c1dca9968162a98690526ba/protos/feast/core/Store.proto - store.yaml: - name: redis - type: REDIS - redis_config: - # If redis.enabled is set to false, Feast admin should uncomment and - # set the host value to an "existing" Redis instance Feast will use as - # online Store. Also use the correct port for that existing instance. - # - # Else, if redis.enabled is set to true, replace EXTERNAL_IP with your - # cluster's external IP. - # host: redis-host - host: EXTERNAL_IP - port: 32101 - subscriptions: - - name: "*" - project: "*" - version: "*" -# ============================================================ -# Feast Serving Batch -# ============================================================ +prometheus: + # prometheus.enabled -- Flag to install Prometheus + enabled: true -feast-serving-batch: - # enabled specifies whether to install Feast Serving Batch component. +grafana: + # grafana.enabled -- Flag to install Grafana enabled: true - # Specify what image tag to use. Keep this consistent for all components - image: - tag: "0.4.4" - # redis.enabled specifies whether Redis should be installed as part of Feast Serving. - # - # This is usually set to "false" for Feast Serving Batch because the default - # store is BigQuery. - redis: - enabled: false - # jvmOptions are options that will be passed to the Feast Serving JVM. - jvmOptions: - - -Xms1024m - - -Xmx1024m - # resources that should be allocated to Feast Serving. - resources: - requests: - cpu: 500m - memory: 1024Mi - limits: - memory: 2048Mi - # Make service accessible to outside of cluster using NodePort - service: - type: NodePort - grpc: - nodePort: 32092 - # gcpServiceAccount is the service account that Feast Serving will use. - gcpServiceAccount: - # useExistingSecret specifies Feast to use an existing secret containing - # Google Cloud service account JSON key file. - # - # This is the only supported option for now to use a service account JSON. - # Feast admin is expected to create this secret before deploying Feast. - useExistingSecret: true - existingSecret: - # name is the secret name of the existing secret for the service account. - name: feast-gcp-service-account - # key is the secret key of the existing secret for the service account. - # key is normally derived from the file name of the JSON key file. - key: key.json - # application.yaml is the main configuration for Feast Serving application. - # - # Feast Core is a Spring Boot app which uses this yaml configuration file. - # Refer to https://github.com/gojek/feast/blob/79eb4ab5fa3d37102c1dca9968162a98690526ba/serving/src/main/resources/application.yml - # for a complete list and description of the configuration. - application.yaml: - feast: - jobs: - # staging-location specifies the URI to store intermediate files for - # batch serving (required if using BigQuery as Store). - # - # Please set the value to an "existing" Google Cloud Storage URI that - # Feast serving has write access to. - staging-location: gs://YOUR_BUCKET_NAME/serving/batch - # Type of store to store job metadata. - # - # This default configuration assumes that Feast Serving Online is - # enabled as well. So Feast Serving Batch will share the same - # Redis instance to store job statuses. - store-type: REDIS - # Default to use the internal hostname of the redis instance deployed by Online service, - # otherwise use externally exposed by setting EXTERNAL_IP to your cluster's external IP - # store-options: - # host: EXTERNAL_IP - # port: 32101 - # store.yaml is the configuration for Feast Store. - # - # Refer to this link for more description: - # https://github.com/gojek/feast/blob/79eb4ab5fa3d37102c1dca9968162a98690526ba/protos/feast/core/Store.proto - store.yaml: - name: bigquery - type: BIGQUERY - bigquery_config: - # project_id specifies the Google Cloud Project. Please set this to the - # project id you are using BigQuery in. - project_id: PROJECT_ID - # dataset_id specifies an "existing" BigQuery dataset Feast Serving Batch - # will use. Please ensure this dataset is created beforehand. - dataset_id: DATASET_ID - subscriptions: - - name: "*" - project: "*" - version: "*" diff --git a/infra/docker-compose/.env.sample b/infra/docker-compose/.env.sample index c8652e8fe0..be071be994 100644 --- a/infra/docker-compose/.env.sample +++ b/infra/docker-compose/.env.sample @@ -1,21 +1,20 @@ # General COMPOSE_PROJECT_NAME=feast FEAST_VERSION=latest +FEAST_REPOSITORY_VERSION=v0.5-branch # Feast Core FEAST_CORE_IMAGE=gcr.io/kf-feast/feast-core -FEAST_CORE_CONFIG=direct-runner.yml +FEAST_CORE_CONFIG=core.yml FEAST_CORE_GCP_SERVICE_ACCOUNT_KEY=placeholder.json # Feast Serving FEAST_SERVING_IMAGE=gcr.io/kf-feast/feast-serving -FEAST_ONLINE_SERVING_CONFIG=online-serving.yml -FEAST_ONLINE_STORE_CONFIG=redis-store.yml +# Feast Serving - Batch (BigQuery) FEAST_BATCH_SERVING_CONFIG=batch-serving.yml -FEAST_BATCH_STORE_CONFIG=bq-store.yml FEAST_BATCH_SERVING_GCP_SERVICE_ACCOUNT_KEY=placeholder.json -FEAST_JOB_STAGING_LOCATION=gs://your-gcs-bucket/staging +# Feast Serving - Online (Redis) +FEAST_ONLINE_SERVING_CONFIG=online-serving.yml # Jupyter FEAST_JUPYTER_GCP_SERVICE_ACCOUNT_KEY=placeholder.json - diff --git a/infra/docker-compose/core/core.yml b/infra/docker-compose/core/core.yml new file mode 100644 index 0000000000..f54d05a36b --- /dev/null +++ b/infra/docker-compose/core/core.yml @@ -0,0 +1,14 @@ +feast: + jobs: + polling_interval_milliseconds: 30000 + job_update_timeout_seconds: 240 + active_runner: direct + runners: + - name: direct + type: DirectRunner + options: {} + stream: + type: kafka + options: + topic: feast-features + bootstrapServers: "kafka:9092,localhost:9094" diff --git a/infra/docker-compose/core/direct-runner.yml b/infra/docker-compose/core/direct-runner.yml deleted file mode 100644 index f9123480bb..0000000000 --- a/infra/docker-compose/core/direct-runner.yml +++ /dev/null @@ -1,3 +0,0 @@ -feast: - jobs: - runner: DirectRunner diff --git a/infra/docker-compose/docker-compose.batch.yml b/infra/docker-compose/docker-compose.batch.yml new file mode 100644 index 0000000000..71f61d634a --- /dev/null +++ b/infra/docker-compose/docker-compose.batch.yml @@ -0,0 +1,28 @@ +version: "3.7" + +services: + batch-serving: + image: ${FEAST_SERVING_IMAGE}:${FEAST_VERSION} + volumes: + - ./serving/${FEAST_BATCH_SERVING_CONFIG}:/etc/feast/application.yml + - ./gcp-service-accounts/${FEAST_BATCH_SERVING_GCP_SERVICE_ACCOUNT_KEY}:/etc/gcloud/service-accounts/key.json + depends_on: + - redis + ports: + - 6567:6567 + restart: on-failure + environment: + GOOGLE_APPLICATION_CREDENTIALS: /etc/gcloud/service-accounts/key.json + FEAST_JOB_STAGING_LOCATION: ${FEAST_BATCH_JOB_STAGING_LOCATION} + command: + - "java" + - "-Xms1024m" + - "-Xmx1024m" + - "-jar" + - "/opt/feast/feast-serving.jar" + - "--spring.config.location=classpath:/application.yml,file:/etc/feast/application.yml" + + redis: + image: redis:5-alpine + ports: + - "6379:6379" diff --git a/infra/docker-compose/docker-compose.dev.yml b/infra/docker-compose/docker-compose.dev.yml new file mode 100644 index 0000000000..840c6dd267 --- /dev/null +++ b/infra/docker-compose/docker-compose.dev.yml @@ -0,0 +1,70 @@ +version: "3.7" + +services: + core: + image: maven:3.6-openjdk-11 + volumes: + - ${HOME}/.m2:/root/.m2:delegated + - ../../.:/code:cached + environment: + DB_HOST: db + FEAST_STREAM_OPTIONS_BOOTSTRAPSERVERS: kafka:9092 + GOOGLE_APPLICATION_CREDENTIALS: /etc/gcloud/service-accounts/key.json + restart: on-failure + depends_on: + - db + - kafka + ports: + - 6565:6565 + + working_dir: /code + command: + - mvn + - -pl + - core + - spring-boot:run + + jupyter: + image: jupyter/minimal-notebook:619e9cc2fc07 + volumes: + - ./gcp-service-accounts/${FEAST_JUPYTER_GCP_SERVICE_ACCOUNT_KEY}:/etc/gcloud/service-accounts/key.json + - ./jupyter/startup.sh:/etc/startup.sh + depends_on: + - core + environment: + FEAST_CORE_URL: core:6565 + FEAST_ONLINE_SERVING_URL: online-serving:6566 + FEAST_BATCH_SERVING_URL: batch-serving:6567 + GOOGLE_APPLICATION_CREDENTIALS: /etc/gcloud/service-accounts/key.json + FEAST_REPOSITORY_VERSION: ${FEAST_REPOSITORY_VERSION} + ports: + - 8888:8888 + command: ["/etc/startup.sh"] + + kafka: + image: confluentinc/cp-kafka:5.2.1 + environment: + KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 + KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 + KAFKA_ADVERTISED_LISTENERS: INSIDE://kafka:9092,OUTSIDE://localhost:9094 + KAFKA_LISTENERS: INSIDE://:9092,OUTSIDE://:9094 + KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INSIDE:PLAINTEXT,OUTSIDE:PLAINTEXT + KAFKA_INTER_BROKER_LISTENER_NAME: INSIDE + ports: + - "9092:9092" + - "9094:9094" + + depends_on: + - zookeeper + + zookeeper: + image: confluentinc/cp-zookeeper:5.2.1 + environment: + ZOOKEEPER_CLIENT_PORT: 2181 + + db: + image: postgres:12-alpine + environment: + POSTGRES_PASSWORD: password + ports: + - "5432:5432" diff --git a/infra/docker-compose/docker-compose.online.yml b/infra/docker-compose/docker-compose.online.yml new file mode 100644 index 0000000000..97e8e26e94 --- /dev/null +++ b/infra/docker-compose/docker-compose.online.yml @@ -0,0 +1,25 @@ +version: "3.7" + +services: + online-serving: + build: + context: ../.. + dockerfile: infra/docker/serving/Dockerfile + image: ${FEAST_SERVING_IMAGE}:${FEAST_VERSION} + volumes: + - ./serving/${FEAST_ONLINE_SERVING_CONFIG}:/etc/feast/application.yml + depends_on: + - redis + ports: + - 6566:6566 + restart: on-failure + command: + - java + - -jar + - /opt/feast/feast-serving.jar + - --spring.config.location=classpath:/application.yml,file:/etc/feast/application.yml + + redis: + image: redis:5-alpine + ports: + - "6379:6379" diff --git a/infra/docker-compose/docker-compose.yml b/infra/docker-compose/docker-compose.yml index 2fb8a45866..53afff4def 100644 --- a/infra/docker-compose/docker-compose.yml +++ b/infra/docker-compose/docker-compose.yml @@ -22,81 +22,24 @@ services: - java - -jar - /opt/feast/feast-core.jar - - --spring.config.location=classpath:/application.yml,file:/etc/feast/application.yaml - healthcheck: - test: ["CMD", "curl", "-f", "http://localhost:8080/healthz"] - interval: 30s - timeout: 10s - retries: 5 - - online-serving: - build: - context: ../.. - dockerfile: infra/docker/serving/Dockerfile - image: ${FEAST_SERVING_IMAGE}:${FEAST_VERSION} - volumes: - - ./serving/${FEAST_ONLINE_SERVING_CONFIG}:/etc/feast/application.yml - - ./serving/${FEAST_ONLINE_STORE_CONFIG}:/etc/feast/store.yml - depends_on: - - core - - redis - ports: - - 6566:6566 - restart: on-failure - command: - - java - - -jar - - /opt/feast/feast-serving.jar - --spring.config.location=classpath:/application.yml,file:/etc/feast/application.yml - batch-serving: - build: - context: ../.. - dockerfile: infra/docker/serving/Dockerfile - image: ${FEAST_SERVING_IMAGE}:${FEAST_VERSION} - volumes: - - ./serving/${FEAST_BATCH_SERVING_CONFIG}:/etc/feast/application.yml - - ./serving/${FEAST_BATCH_STORE_CONFIG}:/etc/feast/store.yml - - ./gcp-service-accounts/${FEAST_BATCH_SERVING_GCP_SERVICE_ACCOUNT_KEY}:/etc/gcloud/service-accounts/key.json - depends_on: - - core - - redis - ports: - - 6567:6567 - restart: on-failure - environment: - GOOGLE_APPLICATION_CREDENTIALS: /etc/gcloud/service-accounts/key.json - FEAST_JOB_STAGING_LOCATION: ${FEAST_JOB_STAGING_LOCATION} - command: - - "java" - - "-Xms1024m" - - "-Xmx1024m" - - "-jar" - - "/opt/feast/feast-serving.jar" - - "--spring.config.location=classpath:/application.yml,file:/etc/feast/application.yml" - jupyter: - image: jupyter/datascience-notebook:63d0df23b673 + image: jupyter/minimal-notebook:619e9cc2fc07 volumes: - - ../../:/home/jovyan/feast - ./gcp-service-accounts/${FEAST_JUPYTER_GCP_SERVICE_ACCOUNT_KEY}:/etc/gcloud/service-accounts/key.json + - ./jupyter/startup.sh:/etc/startup.sh depends_on: - core - - online-serving environment: FEAST_CORE_URL: core:6565 - FEAST_SERVING_URL: online-serving:6566 + FEAST_ONLINE_SERVING_URL: online-serving:6566 + FEAST_BATCH_SERVING_URL: batch-serving:6567 GOOGLE_APPLICATION_CREDENTIALS: /etc/gcloud/service-accounts/key.json + FEAST_REPOSITORY_VERSION: ${FEAST_REPOSITORY_VERSION} ports: - 8888:8888 - command: - - start-notebook.sh - - --NotebookApp.token='' - - redis: - image: redis:5-alpine - ports: - - "6379:6379" + command: ["/etc/startup.sh"] kafka: image: confluentinc/cp-kafka:5.2.1 @@ -124,38 +67,4 @@ services: environment: POSTGRES_PASSWORD: password ports: - - "5432:5342" - - end-to-end-tests: - build: - context: ../.. - dockerfile: tests/e2e/Dockerfile - args: - FEAST_CI_IMAGE: ${FEAST_CI_IMAGE}:${FEAST_VERSION} - volumes: - - ../..:/feast - depends_on: - - core - - online-serving - command: - - bash - - -c - - | - set -ex - cd /feast - - make compile-protos-python - - # Ensure Feast Core is running and has initialized - for i in {1..60}; do curl -sf http://core:8080/healthz && break; echo "Waiting for Core..."; sleep 1; done - curl -f http://core:8080/healthz - - # Run single end-to-end test - cd tests/e2e - pytest basic-ingest-redis-serving.py \ - --core_url core:6565 \ - --serving_url online-serving:6566 \ - -k test_basic_register_feature_set_success \ - -sv \ - --junitxml=build/junit.xml \ - -o junit_family=xunit2 + - "5432:5432" diff --git a/infra/docker-compose/gcp-service-accounts/placeholder.json b/infra/docker-compose/gcp-service-accounts/placeholder.json index 9e26dfeeb6..5609d6923c 100644 --- a/infra/docker-compose/gcp-service-accounts/placeholder.json +++ b/infra/docker-compose/gcp-service-accounts/placeholder.json @@ -1 +1,4 @@ -{} \ No newline at end of file +{ + "type": "service_account", + "project_id": "just-some-project" +} \ No newline at end of file diff --git a/infra/docker-compose/jupyter/startup.sh b/infra/docker-compose/jupyter/startup.sh new file mode 100755 index 0000000000..5279323cf8 --- /dev/null +++ b/infra/docker-compose/jupyter/startup.sh @@ -0,0 +1,18 @@ +#!/usr/bin/env bash + +set -ex + +# Clone Feast repository into Jupyter container +git clone -b ${FEAST_REPOSITORY_VERSION} --single-branch https://github.com/feast-dev/feast.git || true + +# Install Python dependencies +make -C feast/ compile-protos-python + +# Install CI requirements (only needed for running tests) +pip install -r feast/sdk/python/requirements-ci.txt + +# Install Feast SDK +pip install -e feast/sdk/python -U + +# Start Jupyter Notebook +start-notebook.sh --NotebookApp.token='' \ No newline at end of file diff --git a/infra/docker-compose/serving/batch-serving.yml b/infra/docker-compose/serving/batch-serving.yml index 0b1c3fbf96..c34aba277c 100644 --- a/infra/docker-compose/serving/batch-serving.yml +++ b/infra/docker-compose/serving/batch-serving.yml @@ -1,14 +1,23 @@ feast: core-host: core - core-grpc-port: 6565 - store: - config-path: /etc/feast/store.yml - - jobs: - store-type: REDIS - store-options: - host: redis - port: 6379 - + active-store: historical + stores: + - name: historical + type: BIGQUERY + # Changes required for batch serving to work + # Please see https://api.docs.feast.dev/grpc/feast.core.pb.html#Store for configuration options + config: + project_id: project + dataset_id: dataset + staging_location: gs://gcs_bucket/prefix + initial_retry_delay_seconds: 1 + total_timeout_seconds: 21600 + subscriptions: + - name: "*" + project: "*" + job_store: + redis_host: redis + redis_port: 6379 + grpc: port: 6567 diff --git a/infra/docker-compose/serving/bq-store.yml b/infra/docker-compose/serving/bq-store.yml deleted file mode 100644 index cdebee3497..0000000000 --- a/infra/docker-compose/serving/bq-store.yml +++ /dev/null @@ -1,9 +0,0 @@ -name: warehouse -type: BIGQUERY -bigquery_config: - project_id: - dataset_id: -subscriptions: -- name: "*" - project: "*" - version: "*" \ No newline at end of file diff --git a/infra/docker-compose/serving/online-serving.yml b/infra/docker-compose/serving/online-serving.yml index 089af32dd4..7ca1e7113f 100644 --- a/infra/docker-compose/serving/online-serving.yml +++ b/infra/docker-compose/serving/online-serving.yml @@ -1,13 +1,12 @@ feast: core-host: core - core-grpc-port: 6565 - store: - config-path: /etc/feast/store.yml - jobs: - store-type: REDIS - store-options: - host: redis - port: 6379 - -grpc: - port: 6566 \ No newline at end of file + active_store: online + stores: + - name: online + type: REDIS + config: + host: redis + port: 6379 + subscriptions: + - name: "*" + project: "*" \ No newline at end of file diff --git a/infra/docker-compose/serving/redis-store.yml b/infra/docker-compose/serving/redis-store.yml deleted file mode 100644 index 16870be94d..0000000000 --- a/infra/docker-compose/serving/redis-store.yml +++ /dev/null @@ -1,9 +0,0 @@ -name: serving -type: REDIS -redis_config: - host: redis - port: 6379 -subscriptions: -- name: "*" - project: "*" - version: "*" diff --git a/infra/docker/ci/Dockerfile b/infra/docker/ci/Dockerfile index 47ad1ad7f6..08da02ae20 100644 --- a/infra/docker/ci/Dockerfile +++ b/infra/docker/ci/Dockerfile @@ -41,11 +41,4 @@ RUN PROTOC_ZIP=protoc-${PROTOC_VERSION}-linux-x86_64.zip && \ go get gopkg.in/russross/blackfriday.v2 && \ git clone https://github.com/istio/tools/ && \ cd tools/cmd/protoc-gen-docs && \ - go build && mkdir -p $HOME/bin && cp protoc-gen-docs $HOME/bin - -COPY Makefile /build/Makefile -COPY sdk/python/requirements-*.txt /build/sdk/python/ -RUN cd /build && \ - make install-python-ci-dependencies install-go-ci-dependencies && \ - pip install -r sdk/python/requirements-dev.txt && \ - rm -rf /build + go build && mkdir -p $HOME/bin && cp protoc-gen-docs $HOME/bin \ No newline at end of file diff --git a/infra/docker/core/Dockerfile b/infra/docker/core/Dockerfile index 86bd4f6018..476dfaef4f 100644 --- a/infra/docker/core/Dockerfile +++ b/infra/docker/core/Dockerfile @@ -2,14 +2,30 @@ # Build stage 1: Builder # ============================================================ -FROM maven:3.6-jdk-11-slim as builder +FROM maven:3.6-jdk-11 as builder ARG REVISION=dev + +# +# Download unarchiver +# +RUN apt-get -qq update && apt-get -y install unar + +# +# Download grpc_health_probe to run health check for Feast Serving +# https://kubernetes.io/blog/2018/10/01/health-checking-grpc-servers-on-kubernetes/ +# +RUN wget -q https://github.com/grpc-ecosystem/grpc-health-probe/releases/download/v0.3.1/grpc_health_probe-linux-amd64 \ + -O /usr/bin/grpc-health-probe && \ + chmod +x /usr/bin/grpc-health-probe + + WORKDIR /build COPY datatypes datatypes COPY storage storage COPY sdk/java sdk/java +COPY docs/coverage/java docs/coverage/java COPY core core COPY ingestion ingestion COPY protos protos @@ -35,9 +51,8 @@ RUN mvn --also-make --projects core,ingestion -Drevision=$REVISION \ # The final size of the production image will be bigger but it seems # a good tradeoff between speed and size. # -# https://github.com/gojek/feast/pull/291 -RUN apt-get -qq update && apt-get -y install unar && \ - unar /build/core/target/feast-core-$REVISION.jar -o /build/core/target/ +# https://github.com/feast-dev/feast/pull/291 +RUN unar /build/core/target/feast-core-$REVISION.jar -o /build/core/target/ # ============================================================ # Build stage 2: Production @@ -45,11 +60,14 @@ RUN apt-get -qq update && apt-get -y install unar && \ FROM openjdk:11-jre as production ARG REVISION=dev -COPY --from=builder /build/core/target/feast-core-$REVISION.jar /opt/feast/feast-core.jar + # Required for staging jar dependencies when submitting Dataflow jobs. COPY --from=builder /build/core/target/feast-core-$REVISION /opt/feast/feast-core +COPY --from=builder /build/core/target/feast-core-$REVISION.jar /opt/feast/feast-core.jar +COPY --from=builder /usr/bin/grpc-health-probe /usr/bin/grpc-health-probe + CMD ["java",\ "-Xms2048m",\ "-Xmx2048m",\ "-jar",\ - "/opt/feast/feast-core.jar"] \ No newline at end of file + "/opt/feast/feast-core.jar"] diff --git a/infra/docker/core/Dockerfile.debug b/infra/docker/core/Dockerfile.debug new file mode 100644 index 0000000000..e69de29bb2 diff --git a/infra/docker/serving/Dockerfile b/infra/docker/serving/Dockerfile index cf7d520d09..786452e6da 100644 --- a/infra/docker/serving/Dockerfile +++ b/infra/docker/serving/Dockerfile @@ -5,11 +5,21 @@ FROM maven:3.6-jdk-11 as builder ARG REVISION=dev +# +# Download grpc_health_probe to run health check for Feast Serving +# https://kubernetes.io/blog/2018/10/01/health-checking-grpc-servers-on-kubernetes/ +# +RUN wget -q https://github.com/grpc-ecosystem/grpc-health-probe/releases/download/v0.3.1/grpc_health_probe-linux-amd64 \ + -O /usr/bin/grpc-health-probe && \ + chmod +x /usr/bin/grpc-health-probe + + WORKDIR /build COPY datatypes datatypes COPY storage storage COPY sdk/java sdk/java +COPY docs/coverage/java docs/coverage/java COPY core core COPY ingestion ingestion COPY protos protos @@ -28,13 +38,6 @@ COPY LICENSE .m[2] .m2/ ENV MAVEN_OPTS="-Dmaven.repo.local=/build/.m2/repository -DdependencyLocationsEnabled=false" RUN mvn --also-make --projects serving -Drevision=$REVISION \ -DskipTests=true --batch-mode clean package -# -# Download grpc_health_probe to run health check for Feast Serving -# https://kubernetes.io/blog/2018/10/01/health-checking-grpc-servers-on-kubernetes/ -# -RUN wget -q https://github.com/grpc-ecosystem/grpc-health-probe/releases/download/v0.3.1/grpc_health_probe-linux-amd64 \ - -O /usr/bin/grpc-health-probe && \ - chmod +x /usr/bin/grpc-health-probe # ============================================================ # Build stage 2: Production @@ -42,10 +45,10 @@ RUN wget -q https://github.com/grpc-ecosystem/grpc-health-probe/releases/downloa FROM openjdk:11-jre-slim as production ARG REVISION=dev -COPY --from=builder /build/serving/target/feast-serving-$REVISION.jar /opt/feast/feast-serving.jar COPY --from=builder /usr/bin/grpc-health-probe /usr/bin/grpc-health-probe +COPY --from=builder /build/serving/target/feast-serving-$REVISION.jar /opt/feast/feast-serving.jar CMD ["java",\ "-Xms1024m",\ "-Xmx1024m",\ "-jar",\ - "/opt/feast/feast-serving.jar"] \ No newline at end of file + "/opt/feast/feast-serving.jar"] diff --git a/infra/scripts/setup-common-functions.sh b/infra/scripts/setup-common-functions.sh new file mode 100755 index 0000000000..e158e0f567 --- /dev/null +++ b/infra/scripts/setup-common-functions.sh @@ -0,0 +1,132 @@ +#!/usr/bin/env bash + +# Get Feast project repository root and scripts directory +export PROJECT_ROOT_DIR=$(git rev-parse --show-toplevel) +export SCRIPTS_DIR=${PROJECT_ROOT_DIR}/infra/scripts + +install_test_tools() { + apt-get -qq update + apt-get -y install wget netcat kafkacat build-essential +} + +install_gcloud_sdk() { + print_banner "Installing Google Cloud SDK" + if [[ ! $(command -v gsutil) ]]; then + CURRENT_DIR=$(dirname "$BASH_SOURCE") + . "${CURRENT_DIR}"/install-google-cloud-sdk.sh + fi + + export GOOGLE_APPLICATION_CREDENTIALS + gcloud auth activate-service-account --key-file ${GOOGLE_APPLICATION_CREDENTIALS} +} + +install_and_start_local_redis() { + print_banner "Installing and tarting Redis at localhost:6379" + # Allow starting serving in this Maven Docker image. Default set to not allowed. + echo "exit 0" >/usr/sbin/policy-rc.d + apt-get -y install redis-server >/var/log/redis.install.log + redis-server --daemonize yes + redis-cli ping +} + +install_and_start_local_redis_cluster() { + print_banner "Installing Redis at localhost:6379" + echo "exit 0" >/usr/sbin/policy-rc.d + ${SCRIPTS_DIR}/setup-redis-cluster.sh + redis-cli -c -p 7000 ping +} + +install_and_start_local_postgres() { + print_banner "Installing and starting Postgres at localhost:5432" + apt-get -y install postgresql >/var/log/postgresql.install.log + service postgresql start + # Initialize with database: 'postgres', user: 'postgres', password: 'password' + cat </tmp/update-postgres-role.sh +psql -c "ALTER USER postgres PASSWORD 'password';" +EOF + chmod +x /tmp/update-postgres-role.sh + su -s /bin/bash -c /tmp/update-postgres-role.sh postgres + export PGPASSWORD=password + pg_isready +} + +install_and_start_local_zookeeper_and_kafka() { + print_banner "Installing and starting Zookeeper at localhost:2181 and Kafka at localhost:9092" + wget -qO- https://www-eu.apache.org/dist/kafka/2.3.0/kafka_2.12-2.3.0.tgz | tar xz + mv kafka_2.12-2.3.0/ /tmp/kafka + + nohup /tmp/kafka/bin/zookeeper-server-start.sh /tmp/kafka/config/zookeeper.properties &>/var/log/zookeeper.log 2>&1 & + ${SCRIPTS_DIR}/wait-for-it.sh localhost:2181 --timeout=20 + tail -n10 /var/log/zookeeper.log + + nohup /tmp/kafka/bin/kafka-server-start.sh /tmp/kafka/config/server.properties &>/var/log/kafka.log 2>&1 & + ${SCRIPTS_DIR}/wait-for-it.sh localhost:9092 --timeout=40 + tail -n10 /var/log/kafka.log + kafkacat -b localhost:9092 -L +} + +build_feast_core_and_serving() { + print_banner "Building Feast Core and Feast Serving" + infra/scripts/download-maven-cache.sh \ + --archive-uri gs://feast-templocation-kf-feast/.m2.2019-10-24.tar \ + --output-dir /root/ + + # Build jars for Feast + mvn --quiet --batch-mode --define skipTests=true clean package + + ls -lh core/target/*jar + ls -lh serving/target/*jar +} + +start_feast_core() { + print_banner "Starting Feast Core" + + if [ -n "$1" ]; then + echo "Custom Spring application.yml location provided: $1" + export CONFIG_ARG="--spring.config.location=file://$1" + fi + + nohup java -jar core/target/feast-core-$FEAST_BUILD_VERSION.jar $CONFIG_ARG &>/var/log/feast-core.log & + ${SCRIPTS_DIR}/wait-for-it.sh localhost:6565 --timeout=90 + + tail -n10 /var/log/feast-core.log + nc -w2 localhost 6565 /var/log/feast-serving-online.log & + ${SCRIPTS_DIR}/wait-for-it.sh localhost:6566 --timeout=60 + + tail -n100 /var/log/feast-serving-online.log + nc -w2 localhost 6566 /var/log/redis.install.log + +mkdir 7000 7001 7002 7003 7004 7005 +for i in {0..5} ; do +echo "port 700$i +cluster-enabled yes +cluster-config-file nodes-$i.conf +cluster-node-timeout 5000 +appendonly yes" > 700$i/redis.conf +redis-server 700$i/redis.conf --daemonize yes +done +echo yes | redis-cli --cluster create 127.0.0.1:7000 127.0.0.1:7001 \ +127.0.0.1:7002 127.0.0.1:7003 127.0.0.1:7004 127.0.0.1:7005 \ +--cluster-replicas 1 diff --git a/infra/scripts/test-docker-compose.sh b/infra/scripts/test-docker-compose.sh new file mode 100755 index 0000000000..884c299ff1 --- /dev/null +++ b/infra/scripts/test-docker-compose.sh @@ -0,0 +1,45 @@ +#!/usr/bin/env bash + +set -e + +echo " +============================================================ +Running Docker Compose tests with pytest at 'tests/e2e' +============================================================ +" + +clean_up () { + ARG=$? + + # Shut down docker-compose images + docker-compose -f docker-compose.yml -f docker-compose.online.yml down + + # Remove configuration file + rm .env + + exit $ARG +} + +trap clean_up EXIT + +export PROJECT_ROOT_DIR=$(git rev-parse --show-toplevel) +export COMPOSE_INTERACTIVE_NO_CLI=1 + +# Create Docker Compose configuration file +cd ${PROJECT_ROOT_DIR}/infra/docker-compose/ +cp .env.sample .env + +# Start Docker Compose containers +docker-compose -f docker-compose.yml -f docker-compose.online.yml up -d + +# Get Jupyter container IP address +export JUPYTER_DOCKER_CONTAINER_IP_ADDRESS=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' feast_jupyter_1) + +# Print Jupyter container information +docker logs feast_jupyter_1 + +# Wait for Jupyter Notebook Container to come online +${PROJECT_ROOT_DIR}/infra/scripts/wait-for-it.sh ${JUPYTER_DOCKER_CONTAINER_IP_ADDRESS}:8888 --timeout=300 + +# Run e2e tests for Redis +docker exec feast_jupyter_1 bash -c 'cd feast/tests/e2e/ && pytest -s basic-ingest-redis-serving.py --core_url core:6565 --serving_url=online-serving:6566' diff --git a/infra/scripts/test-end-to-end-batch-dataflow.sh b/infra/scripts/test-end-to-end-batch-dataflow.sh new file mode 100755 index 0000000000..e8fb3b9565 --- /dev/null +++ b/infra/scripts/test-end-to-end-batch-dataflow.sh @@ -0,0 +1,290 @@ +#!/usr/bin/env bash +echo "Preparing environment variables..." + +set -e +set -o pipefail + +test -z ${GOOGLE_APPLICATION_CREDENTIALS} && GOOGLE_APPLICATION_CREDENTIALS="/etc/service-account-df/service-account-df.json" +test -z ${GCLOUD_PROJECT} && GCLOUD_PROJECT="kf-feast" +test -z ${GCLOUD_REGION} && GCLOUD_REGION="us-central1" +test -z ${GCLOUD_NETWORK} && GCLOUD_NETWORK="default" +test -z ${GCLOUD_SUBNET} && GCLOUD_SUBNET="default" +test -z ${TEMP_BUCKET} && TEMP_BUCKET="feast-templocation-kf-feast" +test -z ${K8_CLUSTER_NAME} && K8_CLUSTER_NAME="feast-e2e-dataflow" +test -z ${HELM_RELEASE_NAME} && HELM_RELEASE_NAME="pr-$PULL_NUMBER" +test -z ${HELM_COMMON_NAME} && HELM_COMMON_NAME="deps" +test -z ${DATASET_NAME} && DATASET_NAME=feast_e2e_$(date +%s) + +feast_kafka_1_ip_name="feast-kafka-1" +feast_kafka_2_ip_name="feast-kafka-2" +feast_kafka_3_ip_name="feast-kafka-3" +feast_redis_ip_name="feast-redis" +feast_statsd_ip_name="feast-statsd" + +echo " +This script will run end-to-end tests for Feast Core and Batch Serving using Dataflow Runner. + +1. Setup K8s cluster (optional, if it was not created before) +2. Reuse existing IP addresses or generate new ones for stateful services +3. Install stateful services (kafka, redis, postgres, etc) (optional) +4. Build core & serving docker images (optional) +5. Create temporary BQ table for Feast Serving. +6. Rollout target images to cluster via helm in dedicated namespace (pr-{number}) +7. Install Python 3.7.4, Feast Python SDK and run end-to-end tests from + tests/e2e via pytest. +8. Tear down feast services, keep stateful services. +" + +ORIGINAL_DIR=$(pwd) +echo $ORIGINAL_DIR + +echo "Environment:" +printenv + +export GOOGLE_APPLICATION_CREDENTIALS +gcloud auth activate-service-account --key-file ${GOOGLE_APPLICATION_CREDENTIALS} +gcloud -q auth configure-docker + +gcloud config set project ${GCLOUD_PROJECT} +gcloud config set compute/region ${GCLOUD_REGION} +gcloud config list + +apt-get -qq update +apt-get -y install wget build-essential gettext-base curl + +curl -fsSL -o get_helm.sh https://raw.githubusercontent.com/helm/helm/master/scripts/get-helm-3 +chmod 700 $ORIGINAL_DIR/get_helm.sh +$ORIGINAL_DIR/get_helm.sh + + +function getPublicAddresses() { + existing_addresses=$(gcloud compute addresses list --filter="region:($GCLOUD_REGION) name:kafka" --format "list(name)") + if [[ -z "$existing_addresses" ]]; then + echo " +============================================================ +Reserving IP addresses for Feast dependencies +============================================================ +" + + gcloud compute addresses create \ + $feast_kafka_1_ip_name $feast_kafka_2_ip_name $feast_kafka_3_ip_name $feast_redis_ip_name $feast_statsd_ip_name \ + --region ${GCLOUD_REGION} --subnet ${GCLOUD_SUBNET} + fi + + + export feast_kafka_1_ip=$(gcloud compute addresses describe $feast_kafka_1_ip_name --region=${GCLOUD_REGION} --format "value(address)") + export feast_kafka_2_ip=$(gcloud compute addresses describe $feast_kafka_2_ip_name --region=${GCLOUD_REGION} --format "value(address)") + export feast_kafka_3_ip=$(gcloud compute addresses describe $feast_kafka_3_ip_name --region=${GCLOUD_REGION} --format "value(address)") + export feast_redis_ip=$(gcloud compute addresses describe $feast_redis_ip_name --region=${GCLOUD_REGION} --format "value(address)") + export feast_statsd_ip=$(gcloud compute addresses describe $feast_statsd_ip_name --region=${GCLOUD_REGION} --format "value(address)") +} + +function createKubeCluster() { + echo " +============================================================ +Creating GKE nodepool for Feast e2e test with DataflowRunner +============================================================ +" + gcloud container clusters create ${K8_CLUSTER_NAME} --region ${GCLOUD_REGION} \ + --enable-cloud-logging \ + --enable-cloud-monitoring \ + --network ${GCLOUD_NETWORK} \ + --subnetwork ${GCLOUD_SUBNET} \ + --scopes https://www.googleapis.com/auth/devstorage.read_only,https://www.googleapis.com/auth/logging.write,\ +https://www.googleapis.com/auth/monitoring,https://www.googleapis.com/auth/service.management.readonly,\ +https://www.googleapis.com/auth/servicecontrol,https://www.googleapis.com/auth/trace.append,\ +https://www.googleapis.com/auth/bigquery \ + --machine-type n1-standard-2 + + echo " +============================================================ +Create feast-postgres-database Secret in GKE nodepool +============================================================ +" + kubectl create secret generic feast-postgresql --from-literal=postgresql-password=password + + echo " +============================================================ +Create feast-gcp-service-account Secret in GKE nodepool +============================================================ +" + cd $ORIGINAL_DIR/infra/scripts + kubectl create secret generic feast-gcp-service-account --from-file=credentials.json=${GOOGLE_APPLICATION_CREDENTIALS} +} + +function installDependencies() { + echo " +============================================================ +Helm install common parts (kafka, redis, etc) +============================================================ +" + cd $ORIGINAL_DIR/infra/charts/feast + + helm install --wait --debug --values="values-end-to-end-batch-dataflow-updated.yaml" \ + --set "feast-core.enabled=false" \ + --set "feast-online-serving.enabled=false" \ + --set "feast-batch-serving.enabled=false" \ + "$HELM_COMMON_NAME" . + +} + +function buildAndPushImage() +{ + echo docker build -t $1:$2 --build-arg REVISION=$2 -f $3 $ORIGINAL_DIR + docker build -t $1:$2 --build-arg REVISION=$2 -f $3 $ORIGINAL_DIR + docker push $1:$2 +} + +function buildTarget() { + buildAndPushImage "gcr.io/kf-feast/feast-core" "$PULL_NUMBER" "$ORIGINAL_DIR/infra/docker/core/Dockerfile" + buildAndPushImage "gcr.io/kf-feast/feast-serving" "$PULL_NUMBER" "$ORIGINAL_DIR/infra/docker/serving/Dockerfile" +} + +function installTarget() { + echo " +============================================================ +Helm install feast +============================================================ +" + cd $ORIGINAL_DIR/infra/charts/feast + + helm install --wait --timeout 300s --debug --values="values-end-to-end-batch-dataflow-updated.yaml" \ + --set "postgresql.enabled=false" \ + --set "kafka.enabled=false" \ + --set "redis.enabled=false" \ + --set "prometheus-statsd-exporter.enabled=false" \ + --set "prometheus.enabled=false" \ + "$HELM_RELEASE_NAME" . + +} + +function clean() { + echo " + ============================================================ + Cleaning up + ============================================================ + " + cd $ORIGINAL_DIR/tests/e2e + + # Remove BQ Dataset + bq rm -r -f ${GCLOUD_PROJECT}:${DATASET_NAME} + + # Uninstall helm release before clearing PVCs + helm uninstall ${HELM_RELEASE_NAME} + + # Stop Dataflow jobs from retrieved Dataflow job ids in ingesting_jobs.txt + if [ -f ingesting_jobs.txt ]; then + while read line + do + echo $line + gcloud dataflow jobs cancel $line --region=${GCLOUD_REGION} + done < ingesting_jobs.txt + fi +} + +# 1. +existing_cluster=$(gcloud container clusters list --format "list(name)" --filter "name:$K8_CLUSTER_NAME") +if [[ -z $existing_cluster ]]; then + createKubeCluster "$@" +else + gcloud container clusters get-credentials $K8_CLUSTER_NAME --region $GCLOUD_REGION --project $GCLOUD_PROJECT +fi + +# 2. +getPublicAddresses "$@" + +echo " +============================================================ +Export required environment variables +============================================================ +" + +export TEMP_BUCKET=$TEMP_BUCKET +export DATASET_NAME=$DATASET_NAME +export GCLOUD_PROJECT=$GCLOUD_PROJECT +export GCLOUD_NETWORK=$GCLOUD_NETWORK +export GCLOUD_SUBNET=$GCLOUD_SUBNET +export GCLOUD_REGION=$GCLOUD_REGION +export HELM_COMMON_NAME=$HELM_COMMON_NAME +export IMAGE_TAG=${PULL_PULL_SHA:1} + +envsubst $'$TEMP_BUCKET $DATASET_NAME $GCLOUD_PROJECT $GCLOUD_NETWORK \ + $GCLOUD_SUBNET $GCLOUD_REGION $IMAGE_TAG $HELM_COMMON_NAME $feast_kafka_1_ip + $feast_kafka_2_ip $feast_kafka_3_ip $feast_redis_ip $feast_statsd_ip' < $ORIGINAL_DIR/infra/scripts/test-templates/values-end-to-end-batch-dataflow.yaml > $ORIGINAL_DIR/infra/charts/feast/values-end-to-end-batch-dataflow-updated.yaml + + +# 3. +existing_deps=$(helm list --filter deps -q) +if [[ -z $existing_deps ]]; then + installDependencies "$@" +fi + +# 4. +# buildTarget "$@" + +# 5. +echo " +============================================================ +Creating temp BQ table for Feast Serving +============================================================ +" + +bq --location=US --project_id=${GCLOUD_PROJECT} mk \ + --dataset \ + --default_table_expiration 86400 \ + ${GCLOUD_PROJECT}:${DATASET_NAME} + + +# 6. + +set +e +installTarget "$@" + +# 7. +echo " +============================================================ +Installing Python 3.7 with Miniconda and Feast SDK +============================================================ +" +cd $ORIGINAL_DIR +# Install Python 3.7 with Miniconda +wget -q https://repo.continuum.io/miniconda/Miniconda3-4.7.12-Linux-x86_64.sh \ + -O /tmp/miniconda.sh +bash /tmp/miniconda.sh -b -p /root/miniconda -f +/root/miniconda/bin/conda init +source ~/.bashrc + +# Install Feast Python SDK and test requirements +cd $ORIGINAL_DIR +make compile-protos-python +pip install -qe sdk/python +pip install -qr tests/e2e/requirements.txt + +echo " +============================================================ +Running end-to-end tests with pytest at 'tests/e2e' +============================================================ +" +# Default artifact location setting in Prow jobs +LOGS_ARTIFACT_PATH=/logs/artifacts + +cd $ORIGINAL_DIR/tests/e2e + +core_ip=$(kubectl get -o jsonpath="{.spec.clusterIP}" service ${HELM_RELEASE_NAME}-feast-core) +serving_ip=$(kubectl get -o jsonpath="{.spec.clusterIP}" service ${HELM_RELEASE_NAME}-feast-batch-serving) + +set +e +pytest -v bq-batch-retrieval.py -m dataflow_runner --core_url "$core_ip:6565" --serving_url "$serving_ip:6566" --gcs_path "gs://${TEMP_BUCKET}/" --junitxml=${LOGS_ARTIFACT_PATH}/python-sdk-test-report.xml +TEST_EXIT_CODE=$? + +if [[ ${TEST_EXIT_CODE} != 0 ]]; then + echo "[DEBUG] Printing logs" + ls -ltrh /var/log/feast* + cat /var/log/feast-serving-warehouse.log /var/log/feast-core.log + + echo "[DEBUG] Printing Python packages list" + pip list +fi + +clean "$@" +exit ${TEST_EXIT_CODE} diff --git a/infra/scripts/test-end-to-end-batch.sh b/infra/scripts/test-end-to-end-batch.sh index 35553a9281..f03f7a10e0 100755 --- a/infra/scripts/test-end-to-end-batch.sh +++ b/infra/scripts/test-end-to-end-batch.sh @@ -3,12 +3,32 @@ set -e set -o pipefail +PYTEST_MARK='direct_runner' #default + +print_usage() { + printf "Usage: ./test-end-to-end-batch -m pytest_mark" +} + +while getopts 'm:' flag; do + case "${flag}" in + m) PYTEST_MARK="${OPTARG}" ;; + *) print_usage + exit 1 ;; + esac +done + test -z ${GOOGLE_APPLICATION_CREDENTIALS} && GOOGLE_APPLICATION_CREDENTIALS="/etc/service-account/service-account.json" test -z ${SKIP_BUILD_JARS} && SKIP_BUILD_JARS="false" test -z ${GOOGLE_CLOUD_PROJECT} && GOOGLE_CLOUD_PROJECT="kf-feast" test -z ${TEMP_BUCKET} && TEMP_BUCKET="feast-templocation-kf-feast" test -z ${JOBS_STAGING_LOCATION} && JOBS_STAGING_LOCATION="gs://${TEMP_BUCKET}/staging-location" -test -z ${JAR_VERSION_SUFFIX} && JAR_VERSION_SUFFIX="-SNAPSHOT" +test -z ${FEAST_BUILD_VERSION} && FEAST_BUILD_VERSION=$(mvn help:evaluate -Dexpression=project.version -q -DforceStdout) + +echo "Testing version: $FEAST_BUILD_VERSION" + +# Get Feast project repository root and scripts directory +export PROJECT_ROOT_DIR=$(git rev-parse --show-toplevel) +export SCRIPTS_DIR=${PROJECT_ROOT_DIR}/infra/scripts echo " This script will run end-to-end tests for Feast Core and Batch Serving. @@ -21,228 +41,77 @@ This script will run end-to-end tests for Feast Core and Batch Serving. tests/e2e via pytest. " -apt-get -qq update -apt-get -y install wget netcat kafkacat build-essential - - -echo " -============================================================ -Installing gcloud SDK -============================================================ -" -if [[ ! $(command -v gsutil) ]]; then - CURRENT_DIR=$(dirname "$BASH_SOURCE") - . "${CURRENT_DIR}"/install-google-cloud-sdk.sh -fi - -export GOOGLE_APPLICATION_CREDENTIALS -gcloud auth activate-service-account --key-file ${GOOGLE_APPLICATION_CREDENTIALS} - - - -echo " -============================================================ -Installing Redis at localhost:6379 -============================================================ -" -# Allow starting serving in this Maven Docker image. Default set to not allowed. -echo "exit 0" > /usr/sbin/policy-rc.d -apt-get -y install redis-server > /var/log/redis.install.log -redis-server --daemonize yes -redis-cli ping - -echo " -============================================================ -Installing Postgres at localhost:5432 -============================================================ -" -apt-get -y install postgresql > /var/log/postgresql.install.log -service postgresql start -# Initialize with database: 'postgres', user: 'postgres', password: 'password' -cat < /tmp/update-postgres-role.sh -psql -c "ALTER USER postgres PASSWORD 'password';" -EOF -chmod +x /tmp/update-postgres-role.sh -su -s /bin/bash -c /tmp/update-postgres-role.sh postgres -export PGPASSWORD=password -pg_isready +source ${SCRIPTS_DIR}/setup-common-functions.sh -echo " -============================================================ -Installing Zookeeper at localhost:2181 -Installing Kafka at localhost:9092 -============================================================ -" -wget -qO- https://www-eu.apache.org/dist/kafka/2.3.0/kafka_2.12-2.3.0.tgz | tar xz -mv kafka_2.12-2.3.0/ /tmp/kafka -nohup /tmp/kafka/bin/zookeeper-server-start.sh /tmp/kafka/config/zookeeper.properties &> /var/log/zookeeper.log 2>&1 & -sleep 5 -tail -n10 /var/log/zookeeper.log -nohup /tmp/kafka/bin/kafka-server-start.sh /tmp/kafka/config/server.properties &> /var/log/kafka.log 2>&1 & -sleep 20 -tail -n10 /var/log/kafka.log -kafkacat -b localhost:9092 -L +install_test_tools +install_gcloud_sdk +install_and_start_local_redis +install_and_start_local_postgres +install_and_start_local_zookeeper_and_kafka if [[ ${SKIP_BUILD_JARS} != "true" ]]; then - echo " - ============================================================ - Building jars for Feast - ============================================================ - " - - infra/scripts/download-maven-cache.sh \ - --archive-uri gs://feast-templocation-kf-feast/.m2.2019-10-24.tar \ - --output-dir /root/ - - # Build jars for Feast - mvn --quiet --batch-mode --define skipTests=true clean package - - ls -lh core/target/*jar - ls -lh serving/target/*jar + build_feast_core_and_serving else echo "[DEBUG] Skipping building jars" fi -echo " -============================================================ -Starting Feast Core -============================================================ -" -# Start Feast Core in background -cat < /tmp/core.application.yml -grpc: - port: 6565 - enable-reflection: true - -feast: - version: 0.3 - jobs: - runner: DirectRunner - options: {} - updates: - pollingIntervalMillis: 30000 - timeoutSeconds: 240 - metrics: - enabled: false - - stream: - type: kafka - options: - topic: feast-features - bootstrapServers: localhost:9092 - replicationFactor: 1 - partitions: 1 - -spring: - jpa: - properties.hibernate: - format_sql: true - event.merge.entity_copy_observer: allow - hibernate.naming.physical-strategy=org.hibernate.boot.model.naming: PhysicalNamingStrategyStandardImpl - hibernate.ddl-auto: update - datasource: - url: jdbc:postgresql://localhost:5432/postgres - username: postgres - password: password - -management: - metrics: - export: - simple: - enabled: false - statsd: - enabled: false -EOF - -nohup java -jar core/target/feast-core-*${JAR_VERSION_SUFFIX}.jar \ - --spring.config.location=file:///tmp/core.application.yml \ - &> /var/log/feast-core.log & -sleep 35 -tail -n10 /var/log/feast-core.log -nc -w2 localhost 6565 < /dev/null - -echo " -============================================================ -Starting Feast Warehouse Serving -============================================================ -" +export FEAST_JOBS_POLLING_INTERVAL_MILLISECONDS=10000 +start_feast_core DATASET_NAME=feast_$(date +%s) - bq --location=US --project_id=${GOOGLE_CLOUD_PROJECT} mk \ --dataset \ --default_table_expiration 86400 \ ${GOOGLE_CLOUD_PROJECT}:${DATASET_NAME} # Start Feast Online Serving in background -cat < /tmp/serving.store.bigquery.yml -name: warehouse -type: BIGQUERY -bigquery_config: - projectId: ${GOOGLE_CLOUD_PROJECT} - datasetId: ${DATASET_NAME} -subscriptions: - - name: "*" - version: "*" - project: "*" -EOF - cat < /tmp/serving.warehouse.application.yml feast: - version: 0.3 + # GRPC service address for Feast Core + # Feast Serving requires connection to Feast Core to retrieve and reload Feast metadata (e.g. FeatureSpecs, Store information) core-host: localhost core-grpc-port: 6565 + + # Indicates the active store. Only a single store in the last can be active at one time. In the future this key + # will be deprecated in order to allow multiple stores to be served from a single serving instance + active_store: historical + + # List of store configurations + stores: + - name: historical + type: BIGQUERY + config: + project_id: ${GOOGLE_CLOUD_PROJECT} + dataset_id: ${DATASET_NAME} + staging_location: ${JOBS_STAGING_LOCATION} + initial_retry_delay_seconds: 1 + total_timeout_seconds: 21600 + subscriptions: + - name: "*" + project: "*" + version: "*" + + job_store: + redis_host: localhost + redis_port: 6379 + tracing: enabled: false - store: - config-path: /tmp/serving.store.bigquery.yml - jobs: - staging-location: ${JOBS_STAGING_LOCATION} - store-type: REDIS - bigquery-initial-retry-delay-secs: 1 - bigquery-total-timeout-secs: 900 - store-options: - host: localhost - port: 6379 + grpc: port: 6566 enable-reflection: true -spring: - main: - web-environment: false +server: + port: 8081 EOF -nohup java -jar serving/target/feast-serving-*${JAR_VERSION_SUFFIX}.jar \ - --spring.config.location=file:///tmp/serving.warehouse.application.yml \ - &> /var/log/feast-serving-warehouse.log & -sleep 15 -tail -n100 /var/log/feast-serving-warehouse.log -nc -w2 localhost 6566 < /dev/null +start_feast_serving /tmp/serving.warehouse.application.yml -echo " -============================================================ -Installing Python 3.7 with Miniconda and Feast SDK -============================================================ -" -# Install Python 3.7 with Miniconda -wget -q https://repo.continuum.io/miniconda/Miniconda3-4.7.12-Linux-x86_64.sh \ - -O /tmp/miniconda.sh -bash /tmp/miniconda.sh -b -p /root/miniconda -f -/root/miniconda/bin/conda init -source ~/.bashrc - -# Install Feast Python SDK and test requirements -make compile-protos-python -pip install -qe sdk/python -pip install -qr tests/e2e/requirements.txt +install_python_with_miniconda_and_feast_sdk -echo " -============================================================ -Running end-to-end tests with pytest at 'tests/e2e' -============================================================ -" +print_banner "Running end-to-end tests with pytest at 'tests/e2e'" # Default artifact location setting in Prow jobs LOGS_ARTIFACT_PATH=/logs/artifacts @@ -250,7 +119,7 @@ ORIGINAL_DIR=$(pwd) cd tests/e2e set +e -pytest bq-batch-retrieval.py --gcs_path "gs://${TEMP_BUCKET}/" --junitxml=${LOGS_ARTIFACT_PATH}/python-sdk-test-report.xml +pytest bq-batch-retrieval.py -m ${PYTEST_MARK} --gcs_path "gs://${TEMP_BUCKET}/" --junitxml=${LOGS_ARTIFACT_PATH}/python-sdk-test-report.xml TEST_EXIT_CODE=$? if [[ ${TEST_EXIT_CODE} != 0 ]]; then @@ -263,12 +132,8 @@ if [[ ${TEST_EXIT_CODE} != 0 ]]; then fi cd ${ORIGINAL_DIR} -exit ${TEST_EXIT_CODE} -echo " -============================================================ -Cleaning up -============================================================ -" +print_banner "Cleaning up" bq rm -r -f ${GOOGLE_CLOUD_PROJECT}:${DATASET_NAME} +exit ${TEST_EXIT_CODE} \ No newline at end of file diff --git a/infra/scripts/test-end-to-end-redis-cluster.sh b/infra/scripts/test-end-to-end-redis-cluster.sh new file mode 100755 index 0000000000..a11aebf019 --- /dev/null +++ b/infra/scripts/test-end-to-end-redis-cluster.sh @@ -0,0 +1,105 @@ +#!/usr/bin/env bash + +set -e +set -o pipefail + +test -z ${GOOGLE_APPLICATION_CREDENTIALS} && GOOGLE_APPLICATION_CREDENTIALS="/etc/service-account/service-account.json" +test -z ${SKIP_BUILD_JARS} && SKIP_BUILD_JARS="false" +test -z ${GOOGLE_CLOUD_PROJECT} && GOOGLE_CLOUD_PROJECT="kf-feast" +test -z ${TEMP_BUCKET} && TEMP_BUCKET="feast-templocation-kf-feast" +test -z ${JOBS_STAGING_LOCATION} && JOBS_STAGING_LOCATION="gs://${TEMP_BUCKET}/staging-location" + +# Get the current build version using maven (and pom.xml) +export FEAST_BUILD_VERSION=$(mvn help:evaluate -Dexpression=project.version -q -DforceStdout) +echo Building version: $FEAST_BUILD_VERSION + +# Get Feast project repository root and scripts directory +export PROJECT_ROOT_DIR=$(git rev-parse --show-toplevel) +export SCRIPTS_DIR=${PROJECT_ROOT_DIR}/infra/scripts + +echo " +This script will run end-to-end tests for Feast Core and Online Serving. + +1. Install Redis as the store for Feast Online Serving. +2. Install Postgres for persisting Feast metadata. +3. Install Kafka and Zookeeper as the Source in Feast. +4. Install Python 3.7.4, Feast Python SDK and run end-to-end tests from + tests/e2e via pytest. +" + +source ${SCRIPTS_DIR}/setup-common-functions.sh + +install_test_tools +install_and_start_local_redis_cluster +install_and_start_local_postgres +install_and_start_local_zookeeper_and_kafka + +if [[ ${SKIP_BUILD_JARS} != "true" ]]; then + build_feast_core_and_serving +else + echo "[DEBUG] Skipping building jars" +fi + +start_feast_core + +cat < /tmp/serving.online.application.yml +feast: + core-host: localhost + core-grpc-port: 6565 + + active_store: online + + # List of store configurations + stores: + - name: online # Name of the store (referenced by active_store) + type: REDIS_CLUSTER # Type of the store. REDIS, BIGQUERY are available options + config: + # Connection string specifies the IP and ports of Redis instances in Redis cluster + connection_string: "localhost:7000,localhost:7001,localhost:7002,localhost:7003,localhost:7004,localhost:7005" + # Subscriptions indicate which feature sets needs to be retrieved and used to populate this store + subscriptions: + # Wildcards match all options. No filtering is done. + - name: "*" + project: "*" + version: "*" + + tracing: + enabled: false + +grpc: + port: 6566 + enable-reflection: true + +spring: + main: + web-environment: false + +EOF + +start_feast_serving /tmp/serving.online.application.yml + +install_python_with_miniconda_and_feast_sdk + +print_banner "Running end-to-end tests with pytest at 'tests/e2e'" + +# Default artifact location setting in Prow jobs +LOGS_ARTIFACT_PATH=/logs/artifacts + +ORIGINAL_DIR=$(pwd) +cd tests/e2e + +set +e +pytest basic-ingest-redis-serving.py --junitxml=${LOGS_ARTIFACT_PATH}/python-sdk-test-report.xml +TEST_EXIT_CODE=$? + +if [[ ${TEST_EXIT_CODE} != 0 ]]; then + echo "[DEBUG] Printing logs" + ls -ltrh /var/log/feast* + cat /var/log/feast-serving-online.log /var/log/feast-core.log + + echo "[DEBUG] Printing Python packages list" + pip list +fi + +cd ${ORIGINAL_DIR} +exit ${TEST_EXIT_CODE} diff --git a/infra/scripts/test-end-to-end.sh b/infra/scripts/test-end-to-end.sh index ee7304f208..2f0179ff90 100755 --- a/infra/scripts/test-end-to-end.sh +++ b/infra/scripts/test-end-to-end.sh @@ -8,7 +8,13 @@ test -z ${SKIP_BUILD_JARS} && SKIP_BUILD_JARS="false" test -z ${GOOGLE_CLOUD_PROJECT} && GOOGLE_CLOUD_PROJECT="kf-feast" test -z ${TEMP_BUCKET} && TEMP_BUCKET="feast-templocation-kf-feast" test -z ${JOBS_STAGING_LOCATION} && JOBS_STAGING_LOCATION="gs://${TEMP_BUCKET}/staging-location" -test -z ${JAR_VERSION_SUFFIX} && JAR_VERSION_SUFFIX="-SNAPSHOT" +test -z ${FEAST_BUILD_VERSION} && FEAST_BUILD_VERSION=$(mvn help:evaluate -Dexpression=project.version -q -DforceStdout) + +echo "Testing version: $FEAST_BUILD_VERSION" + +# Get Feast project repository root and scripts directory +export PROJECT_ROOT_DIR=$(git rev-parse --show-toplevel) +export SCRIPTS_DIR=${PROJECT_ROOT_DIR}/infra/scripts echo " This script will run end-to-end tests for Feast Core and Online Serving. @@ -20,203 +26,25 @@ This script will run end-to-end tests for Feast Core and Online Serving. tests/e2e via pytest. " -apt-get -qq update -apt-get -y install wget netcat kafkacat build-essential - -echo " -============================================================ -Installing Redis at localhost:6379 -============================================================ -" -# Allow starting serving in this Maven Docker image. Default set to not allowed. -echo "exit 0" > /usr/sbin/policy-rc.d -apt-get -y install redis-server > /var/log/redis.install.log -redis-server --daemonize yes -redis-cli ping - -echo " -============================================================ -Installing Postgres at localhost:5432 -============================================================ -" -apt-get -y install postgresql > /var/log/postgresql.install.log -service postgresql start -# Initialize with database: 'postgres', user: 'postgres', password: 'password' -cat < /tmp/update-postgres-role.sh -psql -c "ALTER USER postgres PASSWORD 'password';" -EOF -chmod +x /tmp/update-postgres-role.sh -su -s /bin/bash -c /tmp/update-postgres-role.sh postgres -export PGPASSWORD=password -pg_isready +source ${SCRIPTS_DIR}/setup-common-functions.sh -echo " -============================================================ -Installing Zookeeper at localhost:2181 -Installing Kafka at localhost:9092 -============================================================ -" -wget -qO- https://www-eu.apache.org/dist/kafka/2.3.0/kafka_2.12-2.3.0.tgz | tar xz -mv kafka_2.12-2.3.0/ /tmp/kafka -nohup /tmp/kafka/bin/zookeeper-server-start.sh /tmp/kafka/config/zookeeper.properties &> /var/log/zookeeper.log 2>&1 & -sleep 5 -tail -n10 /var/log/zookeeper.log -nohup /tmp/kafka/bin/kafka-server-start.sh /tmp/kafka/config/server.properties &> /var/log/kafka.log 2>&1 & -sleep 20 -tail -n10 /var/log/kafka.log -kafkacat -b localhost:9092 -L +install_test_tools +install_and_start_local_redis +install_and_start_local_postgres +install_and_start_local_zookeeper_and_kafka if [[ ${SKIP_BUILD_JARS} != "true" ]]; then -echo " -============================================================ -Building jars for Feast -============================================================ -" - -infra/scripts/download-maven-cache.sh \ - --archive-uri gs://feast-templocation-kf-feast/.m2.2019-10-24.tar \ - --output-dir /root/ - -# Build jars for Feast -mvn --quiet --batch-mode --define skipTests=true clean package - -ls -lh core/target/*jar -ls -lh serving/target/*jar + build_feast_core_and_serving else echo "[DEBUG] Skipping building jars" fi -echo " -============================================================ -Starting Feast Core -============================================================ -" -# Start Feast Core in background -cat < /tmp/core.application.yml -grpc: - port: 6565 - enable-reflection: true - -feast: - version: 0.3 - jobs: - runner: DirectRunner - options: {} - updates: - pollingIntervalMillis: 30000 - timeoutSeconds: 240 - metrics: - enabled: false - - stream: - type: kafka - options: - topic: feast-features - bootstrapServers: localhost:9092 - replicationFactor: 1 - partitions: 1 - -spring: - jpa: - properties.hibernate: - format_sql: true - event.merge.entity_copy_observer: allow - hibernate.naming.physical-strategy=org.hibernate.boot.model.naming: PhysicalNamingStrategyStandardImpl - hibernate.ddl-auto: update - datasource: - url: jdbc:postgresql://localhost:5432/postgres - username: postgres - password: password - -management: - metrics: - export: - simple: - enabled: false - statsd: - enabled: false -EOF - -nohup java -jar core/target/feast-core-*${JAR_VERSION_SUFFIX}.jar \ - --spring.config.location=file:///tmp/core.application.yml \ - &> /var/log/feast-core.log & -sleep 35 -tail -n10 /var/log/feast-core.log -nc -w2 localhost 6565 < /dev/null - -echo " -============================================================ -Starting Feast Online Serving -============================================================ -" -# Start Feast Online Serving in background -cat < /tmp/serving.store.redis.yml -name: serving -type: REDIS -redis_config: - host: localhost - port: 6379 -subscriptions: - - name: "*" - version: "*" - project: "*" -EOF - -cat < /tmp/serving.online.application.yml -feast: - version: 0.3 - core-host: localhost - core-grpc-port: 6565 - tracing: - enabled: false - store: - config-path: /tmp/serving.store.redis.yml - redis-pool-max-size: 128 - redis-pool-max-idle: 16 - jobs: - staging-location: ${JOBS_STAGING_LOCATION} - store-type: - store-options: {} - -grpc: - port: 6566 - enable-reflection: true - -spring: - main: - web-environment: false - -EOF - -nohup java -jar serving/target/feast-serving-*${JAR_VERSION_SUFFIX}.jar \ - --spring.config.location=file:///tmp/serving.online.application.yml \ - &> /var/log/feast-serving-online.log & -sleep 15 -tail -n100 /var/log/feast-serving-online.log -nc -w2 localhost 6566 < /dev/null +start_feast_core +start_feast_serving +install_python_with_miniconda_and_feast_sdk -echo " -============================================================ -Installing Python 3.7 with Miniconda and Feast SDK -============================================================ -" -# Install Python 3.7 with Miniconda -wget -q https://repo.continuum.io/miniconda/Miniconda3-4.7.12-Linux-x86_64.sh \ - -O /tmp/miniconda.sh -bash /tmp/miniconda.sh -b -p /root/miniconda -f -/root/miniconda/bin/conda init -source ~/.bashrc - -# Install Feast Python SDK and test requirements -make compile-protos-python -pip install -qe sdk/python -pip install -qr tests/e2e/requirements.txt +print_banner "Running end-to-end tests with pytest at 'tests/e2e'" -echo " -============================================================ -Running end-to-end tests with pytest at 'tests/e2e' -============================================================ -" # Default artifact location setting in Prow jobs LOGS_ARTIFACT_PATH=/logs/artifacts diff --git a/infra/scripts/test-templates/values-end-to-end-batch-dataflow.yaml b/infra/scripts/test-templates/values-end-to-end-batch-dataflow.yaml new file mode 100644 index 0000000000..44a377db5f --- /dev/null +++ b/infra/scripts/test-templates/values-end-to-end-batch-dataflow.yaml @@ -0,0 +1,145 @@ +feast-core: + # feast-core.enabled -- Flag to install Feast Core + enabled: true + gcpServiceAccount: + enabled: true + postgresql: + existingSecret: feast-postgresql + image: + tag: $IMAGE_TAG + application-override.yaml: + spring: + datasource: + url: jdbc:postgresql://$HELM_COMMON_NAME-postgresql:5432/postgres + feast: + stream: + options: + bootstrapServers: $feast_kafka_1_ip:31090 + jobs: + active_runner: dataflow + + runners: + - name: dataflow + type: DataflowRunner + options: + project: $GCLOUD_PROJECT + region: $GCLOUD_REGION + zone: $GCLOUD_REGION-a + tempLocation: gs://$TEMP_BUCKET/tempLocation + network: $GCLOUD_NETWORK + subnetwork: regions/$GCLOUD_REGION/subnetworks/$GCLOUD_SUBNET + maxNumWorkers: 1 + autoscalingAlgorithm: THROUGHPUT_BASED + usePublicIps: false + workerMachineType: n1-standard-1 + deadLetterTableSpec: $GCLOUD_PROJECT:$DATASET_NAME.deadletter + + metrics: + enabled: true + host: $feast_statsd_ip + +feast-online-serving: + # feast-online-serving.enabled -- Flag to install Feast Online Serving + enabled: true + image: + tag: $IMAGE_TAG + application-override.yaml: + feast: + active_store: online + + # List of store configurations + stores: + - name: online + type: REDIS + config: + host: $feast_redis_ip + port: 6379 + subscriptions: + - name: "*" + project: "*" + version: "*" + +feast-batch-serving: + # feast-batch-serving.enabled -- Flag to install Feast Batch Serving + enabled: true + image: + tag: $IMAGE_TAG + gcpServiceAccount: + enabled: true + application-override.yaml: + feast: + active_store: historical + + # List of store configurations + stores: + - name: historical + type: BIGQUERY + config: + project_id: $GCLOUD_PROJECT + dataset_id: $DATASET_NAME + staging_location: gs://$TEMP_BUCKET/stagingLocation + initial_retry_delay_seconds: 3 + total_timeout_seconds: 21600 + subscriptions: + - name: "*" + project: "*" + version: "*" + job_store: + redis_host: $HELM_COMMON_NAME-redis-master + +postgresql: + # postgresql.enabled -- Flag to install Postgresql + enabled: true + existingSecret: feast-postgresql + +kafka: + # kafka.enabled -- Flag to install Kafka + enabled: true + external: + enabled: true + type: LoadBalancer + annotations: + cloud.google.com/load-balancer-type: Internal + loadBalancerSourceRanges: + - 10.0.0.0/8 + - 172.16.0.0/12 + - 192.168.0.0/16 + firstListenerPort: 31090 + loadBalancerIP: + - $feast_kafka_1_ip + - $feast_kafka_2_ip + - $feast_kafka_3_ip + configurationOverrides: + "advertised.listeners": |- + EXTERNAL://${LOAD_BALANCER_IP}:31090 + "listener.security.protocol.map": |- + PLAINTEXT:PLAINTEXT,EXTERNAL:PLAINTEXT + "log.retention.hours": 1 + +redis: + # redis.enabled -- Flag to install Redis + enabled: true + usePassword: false + master: + service: + type: LoadBalancer + loadBalancerIP: $feast_redis_ip + annotations: + cloud.google.com/load-balancer-type: Internal + loadBalancerSourceRanges: + - 10.0.0.0/8 + - 172.16.0.0/12 + - 192.168.0.0/16 + +prometheus-statsd-exporter: + # prometheus-statsd-exporter.enabled -- Flag to install StatsD to Prometheus Exporter + enabled: true + service: + type: LoadBalancer + annotations: + cloud.google.com/load-balancer-type: Internal + loadBalancerSourceRanges: + - 10.0.0.0/8 + - 172.16.0.0/12 + - 192.168.0.0/16 + loadBalancerIP: $feast_statsd_ip diff --git a/infra/scripts/wait-for-it.sh b/infra/scripts/wait-for-it.sh new file mode 100755 index 0000000000..51942ce6dc --- /dev/null +++ b/infra/scripts/wait-for-it.sh @@ -0,0 +1,183 @@ +#!/usr/bin/env bash +# Use this script to test if a given TCP host/port are available +# Source: https://github.com/vishnubob/wait-for-it + +WAITFORIT_cmdname=${0##*/} + +echoerr() { if [[ $WAITFORIT_QUIET -ne 1 ]]; then echo "$@" 1>&2; fi } + +usage() +{ + cat << USAGE >&2 +Usage: + $WAITFORIT_cmdname host:port [-s] [-t timeout] [-- command args] + -h HOST | --host=HOST Host or IP under test + -p PORT | --port=PORT TCP port under test + Alternatively, you specify the host and port as host:port + -s | --strict Only execute subcommand if the test succeeds + -q | --quiet Don't output any status messages + -t TIMEOUT | --timeout=TIMEOUT + Timeout in seconds, zero for no timeout + -- COMMAND ARGS Execute command with args after the test finishes +USAGE + exit 1 +} + +wait_for() +{ + if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then + echoerr "$WAITFORIT_cmdname: waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT" + else + echoerr "$WAITFORIT_cmdname: waiting for $WAITFORIT_HOST:$WAITFORIT_PORT without a timeout" + fi + WAITFORIT_start_ts=$(date +%s) + while : + do + if [[ $WAITFORIT_ISBUSY -eq 1 ]]; then + nc -z $WAITFORIT_HOST $WAITFORIT_PORT + WAITFORIT_result=$? + else + (echo > /dev/tcp/$WAITFORIT_HOST/$WAITFORIT_PORT) >/dev/null 2>&1 + WAITFORIT_result=$? + fi + if [[ $WAITFORIT_result -eq 0 ]]; then + WAITFORIT_end_ts=$(date +%s) + echoerr "$WAITFORIT_cmdname: $WAITFORIT_HOST:$WAITFORIT_PORT is available after $((WAITFORIT_end_ts - WAITFORIT_start_ts)) seconds" + break + fi + sleep 1 + done + return $WAITFORIT_result +} + +wait_for_wrapper() +{ + # In order to support SIGINT during timeout: http://unix.stackexchange.com/a/57692 + if [[ $WAITFORIT_QUIET -eq 1 ]]; then + timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --quiet --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT & + else + timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT & + fi + WAITFORIT_PID=$! + trap "kill -INT -$WAITFORIT_PID" INT + wait $WAITFORIT_PID + WAITFORIT_RESULT=$? + if [[ $WAITFORIT_RESULT -ne 0 ]]; then + echoerr "$WAITFORIT_cmdname: timeout occurred after waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT" + fi + return $WAITFORIT_RESULT +} + +# process arguments +while [[ $# -gt 0 ]] +do + case "$1" in + *:* ) + WAITFORIT_hostport=(${1//:/ }) + WAITFORIT_HOST=${WAITFORIT_hostport[0]} + WAITFORIT_PORT=${WAITFORIT_hostport[1]} + shift 1 + ;; + --child) + WAITFORIT_CHILD=1 + shift 1 + ;; + -q | --quiet) + WAITFORIT_QUIET=1 + shift 1 + ;; + -s | --strict) + WAITFORIT_STRICT=1 + shift 1 + ;; + -h) + WAITFORIT_HOST="$2" + if [[ $WAITFORIT_HOST == "" ]]; then break; fi + shift 2 + ;; + --host=*) + WAITFORIT_HOST="${1#*=}" + shift 1 + ;; + -p) + WAITFORIT_PORT="$2" + if [[ $WAITFORIT_PORT == "" ]]; then break; fi + shift 2 + ;; + --port=*) + WAITFORIT_PORT="${1#*=}" + shift 1 + ;; + -t) + WAITFORIT_TIMEOUT="$2" + if [[ $WAITFORIT_TIMEOUT == "" ]]; then break; fi + shift 2 + ;; + --timeout=*) + WAITFORIT_TIMEOUT="${1#*=}" + shift 1 + ;; + --) + shift + WAITFORIT_CLI=("$@") + break + ;; + --help) + usage + ;; + *) + echoerr "Unknown argument: $1" + usage + ;; + esac +done + +if [[ "$WAITFORIT_HOST" == "" || "$WAITFORIT_PORT" == "" ]]; then + echoerr "Error: you need to provide a host and port to test." + usage +fi + +WAITFORIT_TIMEOUT=${WAITFORIT_TIMEOUT:-15} +WAITFORIT_STRICT=${WAITFORIT_STRICT:-0} +WAITFORIT_CHILD=${WAITFORIT_CHILD:-0} +WAITFORIT_QUIET=${WAITFORIT_QUIET:-0} + +# Check to see if timeout is from busybox? +WAITFORIT_TIMEOUT_PATH=$(type -p timeout) +WAITFORIT_TIMEOUT_PATH=$(realpath $WAITFORIT_TIMEOUT_PATH 2>/dev/null || readlink -f $WAITFORIT_TIMEOUT_PATH) + +WAITFORIT_BUSYTIMEFLAG="" +if [[ $WAITFORIT_TIMEOUT_PATH =~ "busybox" ]]; then + WAITFORIT_ISBUSY=1 + # Check if busybox timeout uses -t flag + # (recent Alpine versions don't support -t anymore) + if timeout &>/dev/stdout | grep -q -e '-t '; then + WAITFORIT_BUSYTIMEFLAG="-t" + fi +else + WAITFORIT_ISBUSY=0 +fi + +if [[ $WAITFORIT_CHILD -gt 0 ]]; then + wait_for + WAITFORIT_RESULT=$? + exit $WAITFORIT_RESULT +else + if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then + wait_for_wrapper + WAITFORIT_RESULT=$? + else + wait_for + WAITFORIT_RESULT=$? + fi +fi + +if [[ $WAITFORIT_CLI != "" ]]; then + if [[ $WAITFORIT_RESULT -ne 0 && $WAITFORIT_STRICT -eq 1 ]]; then + echoerr "$WAITFORIT_cmdname: strict mode, refusing to execute subprocess" + exit $WAITFORIT_RESULT + fi + exec "${WAITFORIT_CLI[@]}" +else + exit $WAITFORIT_RESULT +fi \ No newline at end of file diff --git a/ingestion/pom.xml b/ingestion/pom.xml index 9386d066bf..36d5cd6269 100644 --- a/ingestion/pom.xml +++ b/ingestion/pom.xml @@ -31,15 +31,6 @@ - - org.apache.maven.plugins - maven-compiler-plugin - - - 8 - - - org.apache.maven.plugins maven-shade-plugin @@ -70,18 +61,10 @@ org.springframework org.springframework.vendor - - io.grpc - io.grpc.vendor - io.opencensus io.opencensus.vendor - - feast.core - feast.core.vendor - com.google.cloud.bigquery com.google.cloud.bigquery.vendor @@ -91,6 +74,36 @@ + + + org.apache.maven.plugins + maven-enforcer-plugin + + + + enforce-bytecode-version + + enforce + + + + + 1.8 + + + + + + + + + org.jacoco + jacoco-maven-plugin + diff --git a/ingestion/src/main/java/feast/ingestion/ImportJob.java b/ingestion/src/main/java/feast/ingestion/ImportJob.java index ef6039e553..16efe11f55 100644 --- a/ingestion/src/main/java/feast/ingestion/ImportJob.java +++ b/ingestion/src/main/java/feast/ingestion/ImportJob.java @@ -20,24 +20,24 @@ import static feast.ingestion.utils.StoreUtil.getFeatureSink; import com.google.protobuf.InvalidProtocolBufferException; -import feast.core.FeatureSetProto.FeatureSet; -import feast.core.FeatureSetProto.FeatureSetSpec; -import feast.core.SourceProto.Source; -import feast.core.StoreProto.Store; import feast.ingestion.options.BZip2Decompressor; import feast.ingestion.options.ImportOptions; import feast.ingestion.options.StringListStreamConverter; +import feast.ingestion.transform.ProcessAndValidateFeatureRows; import feast.ingestion.transform.ReadFromSource; -import feast.ingestion.transform.ValidateFeatureRows; import feast.ingestion.transform.metrics.WriteFailureMetricsTransform; import feast.ingestion.transform.metrics.WriteSuccessMetricsTransform; import feast.ingestion.utils.SpecUtil; +import feast.proto.core.FeatureSetProto.FeatureSet; +import feast.proto.core.FeatureSetProto.FeatureSetSpec; +import feast.proto.core.SourceProto.Source; +import feast.proto.core.StoreProto.Store; +import feast.proto.types.FeatureRowProto.FeatureRow; import feast.storage.api.writer.DeadletterSink; import feast.storage.api.writer.FailedElement; import feast.storage.api.writer.FeatureSink; import feast.storage.api.writer.WriteResult; import feast.storage.connectors.bigquery.writer.BigQueryDeadletterSink; -import feast.types.FeatureRowProto.FeatureRow; import java.io.IOException; import java.util.HashMap; import java.util.List; @@ -124,12 +124,13 @@ public static PipelineResult runPipeline(ImportOptions options) throws IOExcepti .setFailureTag(DEADLETTER_OUT) .build()); - // Step 2. Validate incoming FeatureRows + // Step 2. Process and validate incoming FeatureRows PCollectionTuple validatedRows = convertedFeatureRows .get(FEATURE_ROW_OUT) .apply( - ValidateFeatureRows.newBuilder() + ProcessAndValidateFeatureRows.newBuilder() + .setDefaultProject(options.getDefaultFeastProject()) .setFeatureSetSpecs(featureSetSpecsByKey) .setSuccessTag(FEATURE_ROW_OUT) .setFailureTag(DEADLETTER_OUT) diff --git a/ingestion/src/main/java/feast/ingestion/options/ImportOptions.java b/ingestion/src/main/java/feast/ingestion/options/ImportOptions.java index 1fa127d662..e3a1b841c4 100644 --- a/ingestion/src/main/java/feast/ingestion/options/ImportOptions.java +++ b/ingestion/src/main/java/feast/ingestion/options/ImportOptions.java @@ -27,6 +27,13 @@ /** Options passed to Beam to influence the job's execution environment */ public interface ImportOptions extends PipelineOptions, DataflowPipelineOptions, DirectOptions { + @Required + @Description( + "Default feast project to apply to incoming rows that do not specify project in its feature set reference.") + String getDefaultFeastProject(); + + void setDefaultFeastProject(String defaultProject); + @Required @Description( "JSON string representation of the FeatureSet that the import job will process, in BZip2 binary format." diff --git a/ingestion/src/main/java/feast/ingestion/transform/ValidateFeatureRows.java b/ingestion/src/main/java/feast/ingestion/transform/ProcessAndValidateFeatureRows.java similarity index 69% rename from ingestion/src/main/java/feast/ingestion/transform/ValidateFeatureRows.java rename to ingestion/src/main/java/feast/ingestion/transform/ProcessAndValidateFeatureRows.java index 06df06c074..2ce8918ccf 100644 --- a/ingestion/src/main/java/feast/ingestion/transform/ValidateFeatureRows.java +++ b/ingestion/src/main/java/feast/ingestion/transform/ProcessAndValidateFeatureRows.java @@ -17,11 +17,12 @@ package feast.ingestion.transform; import com.google.auto.value.AutoValue; -import feast.core.FeatureSetProto; +import feast.ingestion.transform.fn.ProcessFeatureRowDoFn; import feast.ingestion.transform.fn.ValidateFeatureRowDoFn; import feast.ingestion.values.FeatureSet; +import feast.proto.core.FeatureSetProto; +import feast.proto.types.FeatureRowProto.FeatureRow; import feast.storage.api.writer.FailedElement; -import feast.types.FeatureRowProto.FeatureRow; import java.util.Map; import java.util.stream.Collectors; import org.apache.beam.sdk.transforms.PTransform; @@ -33,17 +34,19 @@ import org.apache.commons.lang3.tuple.Pair; @AutoValue -public abstract class ValidateFeatureRows +public abstract class ProcessAndValidateFeatureRows extends PTransform, PCollectionTuple> { public abstract Map getFeatureSetSpecs(); + public abstract String getDefaultProject(); + public abstract TupleTag getSuccessTag(); public abstract TupleTag getFailureTag(); public static Builder newBuilder() { - return new AutoValue_ValidateFeatureRows.Builder(); + return new AutoValue_ProcessAndValidateFeatureRows.Builder(); } @AutoValue.Builder @@ -52,11 +55,13 @@ public abstract static class Builder { public abstract Builder setFeatureSetSpecs( Map featureSets); + public abstract Builder setDefaultProject(String defaultProject); + public abstract Builder setSuccessTag(TupleTag successTag); public abstract Builder setFailureTag(TupleTag failureTag); - public abstract ValidateFeatureRows build(); + public abstract ProcessAndValidateFeatureRows build(); } @Override @@ -67,14 +72,16 @@ public PCollectionTuple expand(PCollection input) { .map(e -> Pair.of(e.getKey(), new FeatureSet(e.getValue()))) .collect(Collectors.toMap(Pair::getLeft, Pair::getRight)); - return input.apply( - "ValidateFeatureRows", - ParDo.of( - ValidateFeatureRowDoFn.newBuilder() - .setFeatureSets(featureSets) - .setSuccessTag(getSuccessTag()) - .setFailureTag(getFailureTag()) - .build()) - .withOutputTags(getSuccessTag(), TupleTagList.of(getFailureTag()))); + return input + .apply("ProcessFeatureRows", ParDo.of(new ProcessFeatureRowDoFn(getDefaultProject()))) + .apply( + "ValidateFeatureRows", + ParDo.of( + ValidateFeatureRowDoFn.newBuilder() + .setFeatureSets(featureSets) + .setSuccessTag(getSuccessTag()) + .setFailureTag(getFailureTag()) + .build()) + .withOutputTags(getSuccessTag(), TupleTagList.of(getFailureTag()))); } } diff --git a/ingestion/src/main/java/feast/ingestion/transform/ReadFromSource.java b/ingestion/src/main/java/feast/ingestion/transform/ReadFromSource.java index fb013d0375..4525373514 100644 --- a/ingestion/src/main/java/feast/ingestion/transform/ReadFromSource.java +++ b/ingestion/src/main/java/feast/ingestion/transform/ReadFromSource.java @@ -18,11 +18,11 @@ import com.google.auto.value.AutoValue; import com.google.common.base.Preconditions; -import feast.core.SourceProto.Source; -import feast.core.SourceProto.SourceType; import feast.ingestion.transform.fn.KafkaRecordToFeatureRowDoFn; +import feast.proto.core.SourceProto.Source; +import feast.proto.core.SourceProto.SourceType; +import feast.proto.types.FeatureRowProto.FeatureRow; import feast.storage.api.writer.FailedElement; -import feast.types.FeatureRowProto.FeatureRow; import org.apache.beam.sdk.io.kafka.KafkaIO; import org.apache.beam.sdk.transforms.PTransform; import org.apache.beam.sdk.transforms.ParDo; @@ -61,7 +61,7 @@ public ReadFromSource build() { Source source = read.getSource(); Preconditions.checkState( source.getType().equals(SourceType.KAFKA), - "Source type must be KAFKA. Please raise an issue in https://github.com/gojek/feast/issues to request additional source types."); + "Source type must be KAFKA. Please raise an issue in https://github.com/feast-dev/feast/issues to request additional source types."); Preconditions.checkState( !source.getKafkaSourceConfig().getBootstrapServers().isEmpty(), "bootstrap_servers cannot be empty."); diff --git a/ingestion/src/main/java/feast/ingestion/transform/fn/KafkaRecordToFeatureRowDoFn.java b/ingestion/src/main/java/feast/ingestion/transform/fn/KafkaRecordToFeatureRowDoFn.java index b332c0ca09..7654c1e2fa 100644 --- a/ingestion/src/main/java/feast/ingestion/transform/fn/KafkaRecordToFeatureRowDoFn.java +++ b/ingestion/src/main/java/feast/ingestion/transform/fn/KafkaRecordToFeatureRowDoFn.java @@ -18,8 +18,8 @@ import com.google.auto.value.AutoValue; import com.google.protobuf.InvalidProtocolBufferException; +import feast.proto.types.FeatureRowProto.FeatureRow; import feast.storage.api.writer.FailedElement; -import feast.types.FeatureRowProto.FeatureRow; import java.util.Base64; import org.apache.beam.sdk.io.kafka.KafkaRecord; import org.apache.beam.sdk.transforms.DoFn; diff --git a/ingestion/src/main/java/feast/ingestion/transform/fn/ProcessFeatureRowDoFn.java b/ingestion/src/main/java/feast/ingestion/transform/fn/ProcessFeatureRowDoFn.java new file mode 100644 index 0000000000..3680348cf0 --- /dev/null +++ b/ingestion/src/main/java/feast/ingestion/transform/fn/ProcessFeatureRowDoFn.java @@ -0,0 +1,52 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2020 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.ingestion.transform.fn; + +import feast.proto.types.FeatureRowProto.FeatureRow; +import org.apache.beam.sdk.transforms.DoFn; + +public class ProcessFeatureRowDoFn extends DoFn { + + private String defaultProject; + + public ProcessFeatureRowDoFn(String defaultProject) { + this.defaultProject = defaultProject; + } + + @ProcessElement + public void processElement(ProcessContext context) { + FeatureRow featureRow = context.element(); + String featureSetId = stripVersion(featureRow.getFeatureSet()); + featureSetId = applyDefaultProject(featureSetId); + featureRow = featureRow.toBuilder().setFeatureSet(featureSetId).build(); + context.output(featureRow); + } + + // For backward compatibility. Will be deprecated eventually. + private String stripVersion(String featureSetId) { + String[] split = featureSetId.split(":"); + return split[0]; + } + + private String applyDefaultProject(String featureSetId) { + String[] split = featureSetId.split("/"); + if (split.length == 1) { + return defaultProject + "/" + featureSetId; + } + return featureSetId; + } +} diff --git a/ingestion/src/main/java/feast/ingestion/transform/fn/ValidateFeatureRowDoFn.java b/ingestion/src/main/java/feast/ingestion/transform/fn/ValidateFeatureRowDoFn.java index 85ac3c86fa..856d94828a 100644 --- a/ingestion/src/main/java/feast/ingestion/transform/fn/ValidateFeatureRowDoFn.java +++ b/ingestion/src/main/java/feast/ingestion/transform/fn/ValidateFeatureRowDoFn.java @@ -19,10 +19,10 @@ import com.google.auto.value.AutoValue; import feast.ingestion.values.FeatureSet; import feast.ingestion.values.Field; +import feast.proto.types.FeatureRowProto.FeatureRow; +import feast.proto.types.FieldProto; +import feast.proto.types.ValueProto.Value.ValCase; import feast.storage.api.writer.FailedElement; -import feast.types.FeatureRowProto.FeatureRow; -import feast.types.FieldProto; -import feast.types.ValueProto.Value.ValCase; import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -58,14 +58,14 @@ public abstract static class Builder { public void processElement(ProcessContext context) { String error = null; FeatureRow featureRow = context.element(); - FeatureSet featureSet = getFeatureSets().getOrDefault(featureRow.getFeatureSet(), null); + FeatureSet featureSet = getFeatureSets().get(featureRow.getFeatureSet()); List fields = new ArrayList<>(); if (featureSet != null) { for (FieldProto.Field field : featureRow.getFieldsList()) { Field fieldSpec = featureSet.getField(field.getName()); if (fieldSpec == null) { // skip - break; + continue; } // If value is set in the FeatureRow, make sure the value type matches // that defined in FeatureSetSpec @@ -99,13 +99,8 @@ public void processElement(ProcessContext context) { .setPayload(featureRow.toString()) .setErrorMessage(error); if (featureSet != null) { - String[] split = featureSet.getReference().split(":"); - String[] nameSplit = split[0].split("/"); - failedElement = - failedElement - .setProjectName(nameSplit[0]) - .setFeatureSetName(nameSplit[1]) - .setFeatureSetVersion(split[1]); + String[] split = featureSet.getReference().split("/"); + failedElement = failedElement.setProjectName(split[0]).setFeatureSetName(split[1]); } context.output(getFailureTag(), failedElement.build()); } else { diff --git a/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteDeadletterRowMetricsDoFn.java b/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteDeadletterRowMetricsDoFn.java index b4338cda09..828fed6ced 100644 --- a/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteDeadletterRowMetricsDoFn.java +++ b/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteDeadletterRowMetricsDoFn.java @@ -76,7 +76,6 @@ public void processElement(ProcessContext c) { STORE_TAG_KEY + ":" + getStoreName(), PROJECT_TAG_KEY + ":" + ignored.getProjectName(), FEATURE_SET_NAME_TAG_KEY + ":" + ignored.getFeatureSetName(), - FEATURE_SET_VERSION_TAG_KEY + ":" + ignored.getFeatureSetVersion(), INGESTION_JOB_NAME_KEY + ":" + c.getPipelineOptions().getJobName()); } catch (StatsDClientException e) { log.warn("Unable to push metrics to server", e); diff --git a/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteFailureMetricsTransform.java b/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteFailureMetricsTransform.java index 65a27fa8bf..778515ae8a 100644 --- a/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteFailureMetricsTransform.java +++ b/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteFailureMetricsTransform.java @@ -19,6 +19,7 @@ import com.google.auto.value.AutoValue; import feast.ingestion.options.ImportOptions; import feast.storage.api.writer.FailedElement; +import org.apache.beam.sdk.transforms.DoFn; import org.apache.beam.sdk.transforms.PTransform; import org.apache.beam.sdk.transforms.ParDo; import org.apache.beam.sdk.values.PCollection; @@ -46,6 +47,14 @@ public PDone expand(PCollection input) { .setStatsdPort(options.getStatsdPort()) .setStoreName(getStoreName()) .build())); + } else { + input.apply( + "Noop", + ParDo.of( + new DoFn() { + @ProcessElement + public void processElement(ProcessContext c) {} + })); } return PDone.in(input.getPipeline()); } diff --git a/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteFeatureValueMetricsDoFn.java b/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteFeatureValueMetricsDoFn.java index cfecb858dc..809687a6b2 100644 --- a/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteFeatureValueMetricsDoFn.java +++ b/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteFeatureValueMetricsDoFn.java @@ -18,7 +18,6 @@ import static feast.ingestion.transform.metrics.WriteRowMetricsDoFn.FEATURE_SET_NAME_TAG_KEY; import static feast.ingestion.transform.metrics.WriteRowMetricsDoFn.FEATURE_SET_PROJECT_TAG_KEY; -import static feast.ingestion.transform.metrics.WriteRowMetricsDoFn.FEATURE_SET_VERSION_TAG_KEY; import static feast.ingestion.transform.metrics.WriteRowMetricsDoFn.FEATURE_TAG_KEY; import static feast.ingestion.transform.metrics.WriteRowMetricsDoFn.INGESTION_JOB_NAME_KEY; import static feast.ingestion.transform.metrics.WriteRowMetricsDoFn.METRIC_PREFIX; @@ -27,9 +26,9 @@ import com.google.auto.value.AutoValue; import com.timgroup.statsd.NonBlockingStatsDClient; import com.timgroup.statsd.StatsDClient; -import feast.types.FeatureRowProto.FeatureRow; -import feast.types.FieldProto.Field; -import feast.types.ValueProto.Value; +import feast.proto.types.FeatureRowProto.FeatureRow; +import feast.proto.types.FieldProto.Field; +import feast.proto.types.ValueProto.Value; import java.util.ArrayList; import java.util.DoubleSummaryStatistics; import java.util.HashMap; @@ -131,25 +130,17 @@ public void processElement( "Feature set reference in the feature row is null. Please check the input feature rows from previous steps"); return; } - String[] colonSplits = featureSetRef.split(":"); - if (colonSplits.length != 2) { - log.error( - "Skip writing feature value metrics because the feature set reference '{}' does not" - + "follow the required format /:", - featureSetRef); - return; - } - String[] slashSplits = colonSplits[0].split("/"); + + String[] slashSplits = featureSetRef.split("/"); if (slashSplits.length != 2) { log.error( "Skip writing feature value metrics because the feature set reference '{}' does not" - + "follow the required format /:", + + "follow the required format /", featureSetRef); return; } String projectName = slashSplits[0]; String featureSetName = slashSplits[1]; - String version = colonSplits[1]; Map featureNameToStats = new HashMap<>(); Map> featureNameToValues = new HashMap<>(); @@ -166,7 +157,6 @@ public void processElement( STORE_TAG_KEY + ":" + getStoreName(), FEATURE_SET_PROJECT_TAG_KEY + ":" + projectName, FEATURE_SET_NAME_TAG_KEY + ":" + featureSetName, - FEATURE_SET_VERSION_TAG_KEY + ":" + version, FEATURE_TAG_KEY + ":" + featureName, INGESTION_JOB_NAME_KEY + ":" + context.getPipelineOptions().getJobName() }; diff --git a/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteRowMetricsDoFn.java b/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteRowMetricsDoFn.java index 2fe1f2e7f0..3c7f04a98e 100644 --- a/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteRowMetricsDoFn.java +++ b/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteRowMetricsDoFn.java @@ -20,10 +20,10 @@ import com.google.protobuf.util.Timestamps; import com.timgroup.statsd.NonBlockingStatsDClient; import com.timgroup.statsd.StatsDClient; -import feast.types.FeatureRowProto.FeatureRow; -import feast.types.FieldProto.Field; -import feast.types.ValueProto.Value; -import feast.types.ValueProto.Value.ValCase; +import feast.proto.types.FeatureRowProto.FeatureRow; +import feast.proto.types.FieldProto.Field; +import feast.proto.types.ValueProto.Value; +import feast.proto.types.ValueProto.Value.ValCase; import java.time.Clock; import java.util.HashMap; import java.util.Map; @@ -142,15 +142,7 @@ public void processElement( "Feature set reference in the feature row is null. Please check the input feature rows from previous steps"); return; } - String[] colonSplits = featureSetRef.split(":"); - if (colonSplits.length != 2) { - log.error( - "Skip writing feature row metrics because the feature set reference '{}' does not" - + "follow the required format /:", - featureSetRef); - return; - } - String[] slashSplits = colonSplits[0].split("/"); + String[] slashSplits = featureSetRef.split("/"); if (slashSplits.length != 2) { log.error( "Skip writing feature row metrics because the feature set reference '{}' does not" @@ -161,7 +153,6 @@ public void processElement( String featureSetProject = slashSplits[0]; String featureSetName = slashSplits[1]; - String featureSetVersion = colonSplits[1]; // featureRowLagStats is stats for feature row lag for feature set "featureSetName" DescriptiveStatistics featureRowLagStats = new DescriptiveStatistics(); @@ -201,7 +192,6 @@ public void processElement( STORE_TAG_KEY + ":" + getStoreName(), FEATURE_SET_PROJECT_TAG_KEY + ":" + featureSetProject, FEATURE_SET_NAME_TAG_KEY + ":" + featureSetName, - FEATURE_SET_VERSION_TAG_KEY + ":" + featureSetVersion, INGESTION_JOB_NAME_KEY + ":" + c.getPipelineOptions().getJobName(), }; diff --git a/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteSuccessMetricsTransform.java b/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteSuccessMetricsTransform.java index 37eed7455a..e75debf4a9 100644 --- a/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteSuccessMetricsTransform.java +++ b/ingestion/src/main/java/feast/ingestion/transform/metrics/WriteSuccessMetricsTransform.java @@ -18,7 +18,7 @@ import com.google.auto.value.AutoValue; import feast.ingestion.options.ImportOptions; -import feast.types.FeatureRowProto.FeatureRow; +import feast.proto.types.FeatureRowProto.FeatureRow; import org.apache.beam.sdk.metrics.Counter; import org.apache.beam.sdk.metrics.Metrics; import org.apache.beam.sdk.transforms.*; diff --git a/ingestion/src/main/java/feast/ingestion/utils/SpecUtil.java b/ingestion/src/main/java/feast/ingestion/utils/SpecUtil.java index f28dfc9ee3..a1356584b3 100644 --- a/ingestion/src/main/java/feast/ingestion/utils/SpecUtil.java +++ b/ingestion/src/main/java/feast/ingestion/utils/SpecUtil.java @@ -18,13 +18,13 @@ import com.google.protobuf.InvalidProtocolBufferException; import com.google.protobuf.util.JsonFormat; -import feast.core.FeatureSetProto.EntitySpec; -import feast.core.FeatureSetProto.FeatureSet; -import feast.core.FeatureSetProto.FeatureSetSpec; -import feast.core.FeatureSetProto.FeatureSpec; -import feast.core.StoreProto.Store; -import feast.core.StoreProto.Store.Subscription; import feast.ingestion.values.Field; +import feast.proto.core.FeatureSetProto.EntitySpec; +import feast.proto.core.FeatureSetProto.FeatureSet; +import feast.proto.core.FeatureSetProto.FeatureSetSpec; +import feast.proto.core.FeatureSetProto.FeatureSpec; +import feast.proto.core.StoreProto.Store; +import feast.proto.core.StoreProto.Store.Subscription; import java.util.ArrayList; import java.util.HashMap; import java.util.List; @@ -34,9 +34,7 @@ public class SpecUtil { public static String getFeatureSetReference(FeatureSetSpec featureSetSpec) { - return String.format( - "%s/%s:%d", - featureSetSpec.getProject(), featureSetSpec.getName(), featureSetSpec.getVersion()); + return String.format("%s/%s", featureSetSpec.getProject(), featureSetSpec.getName()); } /** Get only feature set specs that matches the subscription */ @@ -46,28 +44,17 @@ public static List getSubscribedFeatureSets( for (FeatureSet featureSet : featureSets) { for (Subscription sub : subscriptions) { // If configuration missing, fail - if (sub.getProject().isEmpty() || sub.getName().isEmpty() || sub.getVersion().isEmpty()) { + if (sub.getProject().isEmpty() || sub.getName().isEmpty()) { throw new IllegalArgumentException( String.format("Subscription is missing arguments: %s", sub.toString())); } // If all wildcards, subscribe to everything - if (sub.getProject().equals("*") - || sub.getName().equals("*") - || sub.getVersion().equals("*")) { + if (sub.getProject().equals("*") || sub.getName().equals("*")) { subscribed.add(featureSet); break; } - // If all wildcards, subscribe to everything - if (sub.getProject().equals("*") - && (!sub.getName().equals("*") || !sub.getVersion().equals("*"))) { - throw new IllegalArgumentException( - String.format( - "Subscription cannot have feature set name and/or version set if project is not defined: %s", - sub.toString())); - } - // Match project name if (!featureSet.getSpec().getProject().equals(sub.getProject())) { continue; @@ -84,26 +71,7 @@ public static List getSubscribedFeatureSets( if (!pattern.matcher(featureSet.getSpec().getName()).matches()) { continue; } - - // If version is '*', match all - if (sub.getVersion().equals("*")) { - subscribed.add(featureSet); - break; - } else if (sub.getVersion().equals("latest")) { - // if version is "latest" - throw new RuntimeException( - String.format( - "Support for latest feature set subscription has not been implemented yet: %s", - sub.toString())); - - } else { - // If a specific version, match that version alone - int version = Integer.parseInt(sub.getVersion()); - if (featureSet.getSpec().getVersion() == version) { - subscribed.add(featureSet); - break; - } - } + subscribed.add(featureSet); } } return subscribed; diff --git a/ingestion/src/main/java/feast/ingestion/utils/StoreUtil.java b/ingestion/src/main/java/feast/ingestion/utils/StoreUtil.java index 1b88433381..4efec994f7 100644 --- a/ingestion/src/main/java/feast/ingestion/utils/StoreUtil.java +++ b/ingestion/src/main/java/feast/ingestion/utils/StoreUtil.java @@ -16,16 +16,16 @@ */ package feast.ingestion.utils; -import static feast.types.ValueProto.ValueType; +import static feast.proto.types.ValueProto.ValueType; import com.google.cloud.bigquery.StandardSQLTypeName; -import feast.core.FeatureSetProto.FeatureSetSpec; -import feast.core.StoreProto.Store; -import feast.core.StoreProto.Store.StoreType; +import feast.proto.core.FeatureSetProto.FeatureSetSpec; +import feast.proto.core.StoreProto.Store; +import feast.proto.core.StoreProto.Store.StoreType; +import feast.proto.types.ValueProto.ValueType.Enum; import feast.storage.api.writer.FeatureSink; import feast.storage.connectors.bigquery.writer.BigQueryFeatureSink; import feast.storage.connectors.redis.writer.RedisFeatureSink; -import feast.types.ValueProto.ValueType.Enum; import java.util.HashMap; import java.util.Map; import org.slf4j.Logger; @@ -82,15 +82,14 @@ public static FeatureSink getFeatureSink( Store store, Map featureSetSpecs) { StoreType storeType = store.getType(); switch (storeType) { + case REDIS_CLUSTER: + return RedisFeatureSink.fromConfig(store.getRedisClusterConfig(), featureSetSpecs); case REDIS: - return RedisFeatureSink.builder() - .setRedisConfig(store.getRedisConfig()) - .setFeatureSetSpecs(featureSetSpecs) - .build(); + return RedisFeatureSink.fromConfig(store.getRedisConfig(), featureSetSpecs); case BIGQUERY: - return BigQueryFeatureSink.fromConfig(store.getBigqueryConfig()); + return BigQueryFeatureSink.fromConfig(store.getBigqueryConfig(), featureSetSpecs); default: - throw new RuntimeException(String.format("Store type '{}' is unsupported", storeType)); + throw new RuntimeException(String.format("Store type '%s' is unsupported", storeType)); } } } diff --git a/ingestion/src/main/java/feast/ingestion/values/FailedElement.java b/ingestion/src/main/java/feast/ingestion/values/FailedElement.java deleted file mode 100644 index 9606c27d19..0000000000 --- a/ingestion/src/main/java/feast/ingestion/values/FailedElement.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * Copyright 2018-2019 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package feast.ingestion.values; - -import com.google.auto.value.AutoValue; -import javax.annotation.Nullable; -import org.apache.beam.sdk.schemas.AutoValueSchema; -import org.apache.beam.sdk.schemas.annotations.DefaultSchema; -import org.joda.time.Instant; - -@AutoValue -// Use DefaultSchema annotation so this AutoValue class can be serialized by Beam -// https://issues.apache.org/jira/browse/BEAM-1891 -// https://github.com/apache/beam/pull/7334 -@DefaultSchema(AutoValueSchema.class) -public abstract class FailedElement { - public abstract Instant getTimestamp(); - - @Nullable - public abstract String getJobName(); - - @Nullable - public abstract String getProjectName(); - - @Nullable - public abstract String getFeatureSetName(); - - @Nullable - public abstract String getFeatureSetVersion(); - - @Nullable - public abstract String getTransformName(); - - @Nullable - public abstract String getPayload(); - - @Nullable - public abstract String getErrorMessage(); - - @Nullable - public abstract String getStackTrace(); - - public static Builder newBuilder() { - return new AutoValue_FailedElement.Builder().setTimestamp(Instant.now()); - } - - @AutoValue.Builder - public abstract static class Builder { - public abstract Builder setTimestamp(Instant timestamp); - - public abstract Builder setProjectName(String projectName); - - public abstract Builder setFeatureSetName(String featureSetName); - - public abstract Builder setFeatureSetVersion(String featureSetVersion); - - public abstract Builder setJobName(String jobName); - - public abstract Builder setTransformName(String transformName); - - public abstract Builder setPayload(String payload); - - public abstract Builder setErrorMessage(String errorMessage); - - public abstract Builder setStackTrace(String stackTrace); - - public abstract FailedElement build(); - } -} diff --git a/ingestion/src/main/java/feast/ingestion/values/FeatureSet.java b/ingestion/src/main/java/feast/ingestion/values/FeatureSet.java index 758fbd0ba3..f49ab2f98a 100644 --- a/ingestion/src/main/java/feast/ingestion/values/FeatureSet.java +++ b/ingestion/src/main/java/feast/ingestion/values/FeatureSet.java @@ -19,12 +19,12 @@ import static feast.ingestion.utils.SpecUtil.getFeatureSetReference; import static feast.ingestion.utils.SpecUtil.getFieldsByName; -import feast.core.FeatureSetProto; +import feast.proto.core.FeatureSetProto; import java.io.Serializable; import java.util.Map; /** - * This class represents {@link feast.core.FeatureSetProto.FeatureSetSpec} but contains fields + * This class represents {@link feast.proto.core.FeatureSetProto.FeatureSetSpec} but contains fields * directly accessible by name for feature validation purposes. * *

The use for this class is mainly for validating the Fields in FeatureRow. diff --git a/ingestion/src/main/java/feast/ingestion/values/Field.java b/ingestion/src/main/java/feast/ingestion/values/Field.java index 9b4f570d7e..79cb83e76a 100644 --- a/ingestion/src/main/java/feast/ingestion/values/Field.java +++ b/ingestion/src/main/java/feast/ingestion/values/Field.java @@ -16,13 +16,13 @@ */ package feast.ingestion.values; -import feast.types.ValueProto.ValueType; +import feast.proto.types.ValueProto.ValueType; import java.io.Serializable; import org.apache.beam.sdk.coders.AvroCoder; import org.apache.beam.sdk.coders.DefaultCoder; /** - * Field class represents {@link feast.types.FieldProto.Field} but without value. + * Field class represents {@link feast.proto.types.FieldProto.Field} but without value. * *

The use for this class is mainly for validating the Fields in FeatureRow. */ diff --git a/ingestion/src/test/java/feast/ToOrderedFeatureRows.java b/ingestion/src/test/java/feast/ToOrderedFeatureRows.java index db552693f9..35453f0754 100644 --- a/ingestion/src/test/java/feast/ToOrderedFeatureRows.java +++ b/ingestion/src/test/java/feast/ToOrderedFeatureRows.java @@ -18,9 +18,9 @@ import com.google.common.collect.Lists; import com.google.common.primitives.UnsignedBytes; -import feast.types.FeatureRowExtendedProto.FeatureRowExtended; -import feast.types.FeatureRowProto.FeatureRow; -import feast.types.FieldProto.Field; +import feast.proto.types.FeatureRowExtendedProto.FeatureRowExtended; +import feast.proto.types.FeatureRowProto.FeatureRow; +import feast.proto.types.FieldProto.Field; import java.util.List; import org.apache.beam.sdk.transforms.MapElements; import org.apache.beam.sdk.transforms.PTransform; diff --git a/ingestion/src/test/java/feast/ingestion/ImportJobTest.java b/ingestion/src/test/java/feast/ingestion/ImportJobTest.java index 13df73e96a..39e4296378 100644 --- a/ingestion/src/test/java/feast/ingestion/ImportJobTest.java +++ b/ingestion/src/test/java/feast/ingestion/ImportJobTest.java @@ -19,26 +19,26 @@ import com.google.common.io.Files; import com.google.protobuf.InvalidProtocolBufferException; import com.google.protobuf.util.JsonFormat; -import feast.core.FeatureSetProto.EntitySpec; -import feast.core.FeatureSetProto.FeatureSet; -import feast.core.FeatureSetProto.FeatureSetSpec; -import feast.core.FeatureSetProto.FeatureSpec; -import feast.core.SourceProto.KafkaSourceConfig; -import feast.core.SourceProto.Source; -import feast.core.SourceProto.SourceType; -import feast.core.StoreProto.Store; -import feast.core.StoreProto.Store.RedisConfig; -import feast.core.StoreProto.Store.StoreType; -import feast.core.StoreProto.Store.Subscription; import feast.ingestion.options.BZip2Compressor; import feast.ingestion.options.ImportOptions; -import feast.storage.RedisProto.RedisKey; +import feast.proto.core.FeatureSetProto.EntitySpec; +import feast.proto.core.FeatureSetProto.FeatureSet; +import feast.proto.core.FeatureSetProto.FeatureSetSpec; +import feast.proto.core.FeatureSetProto.FeatureSpec; +import feast.proto.core.SourceProto.KafkaSourceConfig; +import feast.proto.core.SourceProto.Source; +import feast.proto.core.SourceProto.SourceType; +import feast.proto.core.StoreProto.Store; +import feast.proto.core.StoreProto.Store.RedisConfig; +import feast.proto.core.StoreProto.Store.StoreType; +import feast.proto.core.StoreProto.Store.Subscription; +import feast.proto.storage.RedisProto.RedisKey; +import feast.proto.types.FeatureRowProto.FeatureRow; +import feast.proto.types.FieldProto; +import feast.proto.types.ValueProto.ValueType.Enum; import feast.test.TestUtil; import feast.test.TestUtil.LocalKafka; import feast.test.TestUtil.LocalRedis; -import feast.types.FeatureRowProto.FeatureRow; -import feast.types.FieldProto; -import feast.types.ValueProto.ValueType.Enum; import io.lettuce.core.RedisClient; import io.lettuce.core.RedisURI; import io.lettuce.core.api.StatefulRedisConnection; @@ -120,7 +120,6 @@ public void runPipeline_ShouldWriteToRedisCorrectlyGivenValidSpecAndFeatureRow() FeatureSetSpec spec = FeatureSetSpec.newBuilder() .setName("feature_set") - .setVersion(3) .setProject("myproject") .addEntities( EntitySpec.newBuilder() @@ -164,7 +163,6 @@ public void runPipeline_ShouldWriteToRedisCorrectlyGivenValidSpecAndFeatureRow() Subscription.newBuilder() .setProject(spec.getProject()) .setName(spec.getName()) - .setVersion(String.valueOf(spec.getVersion())) .build()) .build(); @@ -178,6 +176,7 @@ public void runPipeline_ShouldWriteToRedisCorrectlyGivenValidSpecAndFeatureRow() }); options.setFeatureSetJson(compressor.compress(spec)); options.setStoreJson(Collections.singletonList(JsonFormat.printer().print(redis))); + options.setDefaultFeastProject("myproject"); options.setProject(""); options.setBlockOnRun(false); diff --git a/ingestion/src/test/java/feast/ingestion/transform/ValidateFeatureRowsTest.java b/ingestion/src/test/java/feast/ingestion/transform/ProcessAndValidateFeatureRowsTest.java similarity index 57% rename from ingestion/src/test/java/feast/ingestion/transform/ValidateFeatureRowsTest.java rename to ingestion/src/test/java/feast/ingestion/transform/ProcessAndValidateFeatureRowsTest.java index 3737a73616..8c5d7bd8ed 100644 --- a/ingestion/src/test/java/feast/ingestion/transform/ValidateFeatureRowsTest.java +++ b/ingestion/src/test/java/feast/ingestion/transform/ProcessAndValidateFeatureRowsTest.java @@ -16,15 +16,15 @@ */ package feast.ingestion.transform; -import feast.core.FeatureSetProto.EntitySpec; -import feast.core.FeatureSetProto.FeatureSetSpec; -import feast.core.FeatureSetProto.FeatureSpec; +import feast.proto.core.FeatureSetProto.EntitySpec; +import feast.proto.core.FeatureSetProto.FeatureSetSpec; +import feast.proto.core.FeatureSetProto.FeatureSpec; +import feast.proto.types.FeatureRowProto.FeatureRow; +import feast.proto.types.FieldProto.Field; +import feast.proto.types.ValueProto.Value; +import feast.proto.types.ValueProto.ValueType.Enum; import feast.storage.api.writer.FailedElement; import feast.test.TestUtil; -import feast.types.FeatureRowProto.FeatureRow; -import feast.types.FieldProto.Field; -import feast.types.ValueProto.Value; -import feast.types.ValueProto.ValueType.Enum; import java.util.ArrayList; import java.util.HashMap; import java.util.List; @@ -39,7 +39,7 @@ import org.junit.Rule; import org.junit.Test; -public class ValidateFeatureRowsTest { +public class ProcessAndValidateFeatureRowsTest { @Rule public transient TestPipeline p = TestPipeline.create(); @@ -52,7 +52,6 @@ public void shouldWriteSuccessAndFailureTagsCorrectly() { FeatureSetSpec fs1 = FeatureSetSpec.newBuilder() .setName("feature_set") - .setVersion(1) .setProject("myproject") .addEntities( EntitySpec.newBuilder() @@ -72,8 +71,7 @@ public void shouldWriteSuccessAndFailureTagsCorrectly() { FeatureSetSpec fs2 = FeatureSetSpec.newBuilder() - .setName("feature_set") - .setVersion(2) + .setName("feature_set_2") .setProject("myproject") .addEntities( EntitySpec.newBuilder() @@ -92,8 +90,8 @@ public void shouldWriteSuccessAndFailureTagsCorrectly() { .build(); Map featureSetSpecs = new HashMap<>(); - featureSetSpecs.put("myproject/feature_set:1", fs1); - featureSetSpecs.put("myproject/feature_set:2", fs2); + featureSetSpecs.put("myproject/feature_set", fs1); + featureSetSpecs.put("myproject/feature_set_2", fs2); List input = new ArrayList<>(); List expected = new ArrayList<>(); @@ -110,7 +108,8 @@ public void shouldWriteSuccessAndFailureTagsCorrectly() { p.apply(Create.of(input)) .setCoder(ProtoCoder.of(FeatureRow.class)) .apply( - ValidateFeatureRows.newBuilder() + ProcessAndValidateFeatureRows.newBuilder() + .setDefaultProject("myproject") .setFailureTag(FAILURE_TAG) .setSuccessTag(SUCCESS_TAG) .setFeatureSetSpecs(featureSetSpecs) @@ -122,12 +121,109 @@ public void shouldWriteSuccessAndFailureTagsCorrectly() { p.run(); } + @Test + public void shouldStripVersions() { + FeatureSetSpec fs1 = + FeatureSetSpec.newBuilder() + .setName("feature_set") + .setProject("myproject") + .addEntities( + EntitySpec.newBuilder() + .setName("entity_id_primary") + .setValueType(Enum.INT32) + .build()) + .addEntities( + EntitySpec.newBuilder() + .setName("entity_id_secondary") + .setValueType(Enum.STRING) + .build()) + .addFeatures( + FeatureSpec.newBuilder().setName("feature_1").setValueType(Enum.STRING).build()) + .addFeatures( + FeatureSpec.newBuilder().setName("feature_2").setValueType(Enum.INT64).build()) + .build(); + + Map featureSetSpecs = new HashMap<>(); + featureSetSpecs.put("myproject/feature_set", fs1); + + List input = new ArrayList<>(); + List expected = new ArrayList<>(); + + FeatureRow randomRow = TestUtil.createRandomFeatureRow(fs1); + expected.add(randomRow); + randomRow = randomRow.toBuilder().setFeatureSet("myproject/feature_set:1").build(); + input.add(randomRow); + + PCollectionTuple output = + p.apply(Create.of(input)) + .setCoder(ProtoCoder.of(FeatureRow.class)) + .apply( + ProcessAndValidateFeatureRows.newBuilder() + .setDefaultProject("myproject") + .setFailureTag(FAILURE_TAG) + .setSuccessTag(SUCCESS_TAG) + .setFeatureSetSpecs(featureSetSpecs) + .build()); + + PAssert.that(output.get(SUCCESS_TAG)).containsInAnyOrder(expected); + + p.run(); + } + + @Test + public void shouldApplyDefaultProject() { + FeatureSetSpec fs1 = + FeatureSetSpec.newBuilder() + .setName("feature_set") + .setProject("myproject") + .addEntities( + EntitySpec.newBuilder() + .setName("entity_id_primary") + .setValueType(Enum.INT32) + .build()) + .addEntities( + EntitySpec.newBuilder() + .setName("entity_id_secondary") + .setValueType(Enum.STRING) + .build()) + .addFeatures( + FeatureSpec.newBuilder().setName("feature_1").setValueType(Enum.STRING).build()) + .addFeatures( + FeatureSpec.newBuilder().setName("feature_2").setValueType(Enum.INT64).build()) + .build(); + + Map featureSetSpecs = new HashMap<>(); + featureSetSpecs.put("myproject/feature_set", fs1); + + List input = new ArrayList<>(); + List expected = new ArrayList<>(); + + FeatureRow randomRow = TestUtil.createRandomFeatureRow(fs1); + expected.add(randomRow); + randomRow = randomRow.toBuilder().setFeatureSet("feature_set").build(); + input.add(randomRow); + + PCollectionTuple output = + p.apply(Create.of(input)) + .setCoder(ProtoCoder.of(FeatureRow.class)) + .apply( + ProcessAndValidateFeatureRows.newBuilder() + .setDefaultProject("myproject") + .setFailureTag(FAILURE_TAG) + .setSuccessTag(SUCCESS_TAG) + .setFeatureSetSpecs(featureSetSpecs) + .build()); + + PAssert.that(output.get(SUCCESS_TAG)).containsInAnyOrder(expected); + + p.run(); + } + @Test public void shouldExcludeUnregisteredFields() { FeatureSetSpec fs1 = FeatureSetSpec.newBuilder() .setName("feature_set") - .setVersion(1) .setProject("myproject") .addEntities( EntitySpec.newBuilder() @@ -146,7 +242,7 @@ public void shouldExcludeUnregisteredFields() { .build(); Map featureSets = new HashMap<>(); - featureSets.put("myproject/feature_set:1", fs1); + featureSets.put("myproject/feature_set", fs1); List input = new ArrayList<>(); List expected = new ArrayList<>(); @@ -166,7 +262,8 @@ public void shouldExcludeUnregisteredFields() { p.apply(Create.of(input)) .setCoder(ProtoCoder.of(FeatureRow.class)) .apply( - ValidateFeatureRows.newBuilder() + ProcessAndValidateFeatureRows.newBuilder() + .setDefaultProject("myproject") .setFailureTag(FAILURE_TAG) .setSuccessTag(SUCCESS_TAG) .setFeatureSetSpecs(featureSets) diff --git a/ingestion/src/test/java/feast/ingestion/transform/metrics/WriteFeatureValueMetricsDoFnTest.java b/ingestion/src/test/java/feast/ingestion/transform/metrics/WriteFeatureValueMetricsDoFnTest.java index cc65f2cff9..e4686e89aa 100644 --- a/ingestion/src/test/java/feast/ingestion/transform/metrics/WriteFeatureValueMetricsDoFnTest.java +++ b/ingestion/src/test/java/feast/ingestion/transform/metrics/WriteFeatureValueMetricsDoFnTest.java @@ -21,18 +21,18 @@ import com.google.protobuf.ByteString; import com.google.protobuf.Timestamp; import com.google.protobuf.util.Timestamps; +import feast.proto.types.FeatureRowProto.FeatureRow; +import feast.proto.types.FeatureRowProto.FeatureRow.Builder; +import feast.proto.types.FieldProto.Field; +import feast.proto.types.ValueProto.BoolList; +import feast.proto.types.ValueProto.BytesList; +import feast.proto.types.ValueProto.DoubleList; +import feast.proto.types.ValueProto.FloatList; +import feast.proto.types.ValueProto.Int32List; +import feast.proto.types.ValueProto.Int64List; +import feast.proto.types.ValueProto.StringList; +import feast.proto.types.ValueProto.Value; import feast.test.TestUtil.DummyStatsDServer; -import feast.types.FeatureRowProto.FeatureRow; -import feast.types.FeatureRowProto.FeatureRow.Builder; -import feast.types.FieldProto.Field; -import feast.types.ValueProto.BoolList; -import feast.types.ValueProto.BytesList; -import feast.types.ValueProto.DoubleList; -import feast.types.ValueProto.FloatList; -import feast.types.ValueProto.Int32List; -import feast.types.ValueProto.Int64List; -import feast.types.ValueProto.StringList; -import feast.types.ValueProto.Value; import java.io.BufferedReader; import java.io.IOException; import java.net.URL; diff --git a/ingestion/src/test/java/feast/ingestion/transform/metrics/WriteRowMetricsDoFnTest.java b/ingestion/src/test/java/feast/ingestion/transform/metrics/WriteRowMetricsDoFnTest.java index 6e3caff56b..3309a0cdeb 100644 --- a/ingestion/src/test/java/feast/ingestion/transform/metrics/WriteRowMetricsDoFnTest.java +++ b/ingestion/src/test/java/feast/ingestion/transform/metrics/WriteRowMetricsDoFnTest.java @@ -20,8 +20,8 @@ import static feast.ingestion.transform.metrics.WriteFeatureValueMetricsDoFnTest.readTestOutput; import static org.junit.Assert.fail; +import feast.proto.types.FeatureRowProto.FeatureRow; import feast.test.TestUtil.DummyStatsDServer; -import feast.types.FeatureRowProto.FeatureRow; import java.io.IOException; import java.time.Clock; import java.time.Instant; diff --git a/ingestion/src/test/java/feast/test/TestUtil.java b/ingestion/src/test/java/feast/test/TestUtil.java index 2cd3242fb0..3204d93bcd 100644 --- a/ingestion/src/test/java/feast/test/TestUtil.java +++ b/ingestion/src/test/java/feast/test/TestUtil.java @@ -20,14 +20,14 @@ import com.google.protobuf.ByteString; import com.google.protobuf.util.Timestamps; -import feast.core.FeatureSetProto.FeatureSet; -import feast.core.FeatureSetProto.FeatureSetSpec; import feast.ingestion.transform.metrics.WriteSuccessMetricsTransform; -import feast.storage.RedisProto.RedisKey; -import feast.types.FeatureRowProto.FeatureRow; -import feast.types.FeatureRowProto.FeatureRow.Builder; -import feast.types.FieldProto.Field; -import feast.types.ValueProto.*; +import feast.proto.core.FeatureSetProto.FeatureSet; +import feast.proto.core.FeatureSetProto.FeatureSetSpec; +import feast.proto.storage.RedisProto.RedisKey; +import feast.proto.types.FeatureRowProto.FeatureRow; +import feast.proto.types.FeatureRowProto.FeatureRow.Builder; +import feast.proto.types.FieldProto.Field; +import feast.proto.types.ValueProto.*; import java.io.IOException; import java.net.DatagramPacket; import java.net.DatagramSocket; diff --git a/ingestion/src/test/resources/feast/ingestion/transform/WriteFeatureValueMetricsDoFnTest.input b/ingestion/src/test/resources/feast/ingestion/transform/WriteFeatureValueMetricsDoFnTest.input index d2985711ce..42731b9fe1 100644 --- a/ingestion/src/test/resources/feast/ingestion/transform/WriteFeatureValueMetricsDoFnTest.input +++ b/ingestion/src/test/resources/feast/ingestion/transform/WriteFeatureValueMetricsDoFnTest.input @@ -1,4 +1,4 @@ featuresetref,int32,int64,double,float,bool,int32list,int64list,doublelist,floatlist,boollist,bytes,byteslist,string,stringlist -project/featureset:1,1,5,8,5,true,1|4|3,5|1|12,5|7|3,-2.0,true|false,,,, -project/featureset:1,5,-10,8,10.0,true,1|12|5,,,-1.0|-3.0,false|true,,,, -project/featureset:1,6,-4,8,0.0,true,2,2|5,,,true|false,,,, \ No newline at end of file +project/featureset,1,5,8,5,true,1|4|3,5|1|12,5|7|3,-2.0,true|false,,,, +project/featureset,5,-10,8,10.0,true,1|12|5,,,-1.0|-3.0,false|true,,,, +project/featureset,6,-4,8,0.0,true,2,2|5,,,true|false,,,, \ No newline at end of file diff --git a/ingestion/src/test/resources/feast/ingestion/transform/WriteFeatureValueMetricsDoFnTest.output b/ingestion/src/test/resources/feast/ingestion/transform/WriteFeatureValueMetricsDoFnTest.output index 63bc7bbfa4..12ed4b7e1f 100644 --- a/ingestion/src/test/resources/feast/ingestion/transform/WriteFeatureValueMetricsDoFnTest.output +++ b/ingestion/src/test/resources/feast/ingestion/transform/WriteFeatureValueMetricsDoFnTest.output @@ -1,66 +1,66 @@ -feast_ingestion.feature_value_min:1|g|#ingestion_job_name:job,feast_feature_name:int32,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_max:6|g|#ingestion_job_name:job,feast_feature_name:int32,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_mean:4|g|#ingestion_job_name:job,feast_feature_name:int32,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_percentile_50:5|g|#ingestion_job_name:job,feast_feature_name:int32,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_percentile_90:6|g|#ingestion_job_name:job,feast_feature_name:int32,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_min:1|g|#ingestion_job_name:job,feast_feature_name:int32,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_max:6|g|#ingestion_job_name:job,feast_feature_name:int32,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_mean:4|g|#ingestion_job_name:job,feast_feature_name:int32,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_percentile_50:5|g|#ingestion_job_name:job,feast_feature_name:int32,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_percentile_90:6|g|#ingestion_job_name:job,feast_feature_name:int32,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_min:0|g|#ingestion_job_name:job,feast_feature_name:int64,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_min:-10|g|#ingestion_job_name:job,feast_feature_name:int64,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_max:5|g|#ingestion_job_name:job,feast_feature_name:int64,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_mean:0|g|#ingestion_job_name:job,feast_feature_name:int64,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_mean:-3|g|#ingestion_job_name:job,feast_feature_name:int64,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_percentile_50:-4|g|#ingestion_job_name:job,feast_feature_name:int64,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_percentile_90:5|g|#ingestion_job_name:job,feast_feature_name:int64,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_min:0|g|#ingestion_job_name:job,feast_feature_name:int64,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_min:-10|g|#ingestion_job_name:job,feast_feature_name:int64,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_max:5|g|#ingestion_job_name:job,feast_feature_name:int64,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_mean:0|g|#ingestion_job_name:job,feast_feature_name:int64,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_mean:-3|g|#ingestion_job_name:job,feast_feature_name:int64,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_percentile_50:-4|g|#ingestion_job_name:job,feast_feature_name:int64,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_percentile_90:5|g|#ingestion_job_name:job,feast_feature_name:int64,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_min:8|g|#ingestion_job_name:job,feast_feature_name:double,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_max:8|g|#ingestion_job_name:job,feast_feature_name:double,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_mean:8|g|#ingestion_job_name:job,feast_feature_name:double,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_percentile_50:8|g|#ingestion_job_name:job,feast_feature_name:double,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_percentile_90:8|g|#ingestion_job_name:job,feast_feature_name:double,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_min:8|g|#ingestion_job_name:job,feast_feature_name:double,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_max:8|g|#ingestion_job_name:job,feast_feature_name:double,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_mean:8|g|#ingestion_job_name:job,feast_feature_name:double,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_percentile_50:8|g|#ingestion_job_name:job,feast_feature_name:double,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_percentile_90:8|g|#ingestion_job_name:job,feast_feature_name:double,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_min:0|g|#ingestion_job_name:job,feast_feature_name:float,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_max:10|g|#ingestion_job_name:job,feast_feature_name:float,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_mean:5|g|#ingestion_job_name:job,feast_feature_name:float,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_percentile_50:5|g|#ingestion_job_name:job,feast_feature_name:float,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_percentile_90:10|g|#ingestion_job_name:job,feast_feature_name:float,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_min:0|g|#ingestion_job_name:job,feast_feature_name:float,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_max:10|g|#ingestion_job_name:job,feast_feature_name:float,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_mean:5|g|#ingestion_job_name:job,feast_feature_name:float,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_percentile_50:5|g|#ingestion_job_name:job,feast_feature_name:float,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_percentile_90:10|g|#ingestion_job_name:job,feast_feature_name:float,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_min:1|g|#ingestion_job_name:job,feast_feature_name:bool,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_max:1|g|#ingestion_job_name:job,feast_feature_name:bool,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_mean:1|g|#ingestion_job_name:job,feast_feature_name:bool,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_percentile_50:1|g|#ingestion_job_name:job,feast_feature_name:bool,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_percentile_90:1|g|#ingestion_job_name:job,feast_feature_name:bool,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_min:1|g|#ingestion_job_name:job,feast_feature_name:bool,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_max:1|g|#ingestion_job_name:job,feast_feature_name:bool,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_mean:1|g|#ingestion_job_name:job,feast_feature_name:bool,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_percentile_50:1|g|#ingestion_job_name:job,feast_feature_name:bool,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_percentile_90:1|g|#ingestion_job_name:job,feast_feature_name:bool,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_min:1|g|#ingestion_job_name:job,feast_feature_name:int32list,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_max:12|g|#ingestion_job_name:job,feast_feature_name:int32list,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_mean:4|g|#ingestion_job_name:job,feast_feature_name:int32list,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_percentile_50:3|g|#ingestion_job_name:job,feast_feature_name:int32list,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_percentile_90:12|g|#ingestion_job_name:job,feast_feature_name:int32list,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_min:1|g|#ingestion_job_name:job,feast_feature_name:int32list,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_max:12|g|#ingestion_job_name:job,feast_feature_name:int32list,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_mean:4|g|#ingestion_job_name:job,feast_feature_name:int32list,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_percentile_50:3|g|#ingestion_job_name:job,feast_feature_name:int32list,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_percentile_90:12|g|#ingestion_job_name:job,feast_feature_name:int32list,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_min:1|g|#ingestion_job_name:job,feast_feature_name:int64list,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_max:12|g|#ingestion_job_name:job,feast_feature_name:int64list,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_mean:5|g|#ingestion_job_name:job,feast_feature_name:int64list,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_percentile_50:5|g|#ingestion_job_name:job,feast_feature_name:int64list,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_percentile_90:12|g|#ingestion_job_name:job,feast_feature_name:int64list,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_min:1|g|#ingestion_job_name:job,feast_feature_name:int64list,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_max:12|g|#ingestion_job_name:job,feast_feature_name:int64list,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_mean:5|g|#ingestion_job_name:job,feast_feature_name:int64list,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_percentile_50:5|g|#ingestion_job_name:job,feast_feature_name:int64list,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_percentile_90:12|g|#ingestion_job_name:job,feast_feature_name:int64list,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_min:3|g|#ingestion_job_name:job,feast_feature_name:doublelist,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_max:7|g|#ingestion_job_name:job,feast_feature_name:doublelist,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_mean:5|g|#ingestion_job_name:job,feast_feature_name:doublelist,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_percentile_50:5|g|#ingestion_job_name:job,feast_feature_name:doublelist,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_percentile_90:7|g|#ingestion_job_name:job,feast_feature_name:doublelist,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_min:3|g|#ingestion_job_name:job,feast_feature_name:doublelist,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_max:7|g|#ingestion_job_name:job,feast_feature_name:doublelist,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_mean:5|g|#ingestion_job_name:job,feast_feature_name:doublelist,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_percentile_50:5|g|#ingestion_job_name:job,feast_feature_name:doublelist,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_percentile_90:7|g|#ingestion_job_name:job,feast_feature_name:doublelist,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_min:0|g|#ingestion_job_name:job,feast_feature_name:floatlist,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_min:-3|g|#ingestion_job_name:job,feast_feature_name:floatlist,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_max:0|g|#ingestion_job_name:job,feast_feature_name:floatlist,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_max:-1|g|#ingestion_job_name:job,feast_feature_name:floatlist,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_mean:0|g|#ingestion_job_name:job,feast_feature_name:floatlist,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_mean:-2|g|#ingestion_job_name:job,feast_feature_name:floatlist,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_percentile_50:0|g|#ingestion_job_name:job,feast_feature_name:floatlist,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_percentile_50:-2|g|#ingestion_job_name:job,feast_feature_name:floatlist,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_percentile_90:0|g|#ingestion_job_name:job,feast_feature_name:floatlist,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_percentile_90:-1|g|#ingestion_job_name:job,feast_feature_name:floatlist,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_min:0|g|#ingestion_job_name:job,feast_feature_name:floatlist,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_min:-3|g|#ingestion_job_name:job,feast_feature_name:floatlist,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_max:0|g|#ingestion_job_name:job,feast_feature_name:floatlist,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_max:-1|g|#ingestion_job_name:job,feast_feature_name:floatlist,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_mean:0|g|#ingestion_job_name:job,feast_feature_name:floatlist,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_mean:-2|g|#ingestion_job_name:job,feast_feature_name:floatlist,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_percentile_50:0|g|#ingestion_job_name:job,feast_feature_name:floatlist,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_percentile_50:-2|g|#ingestion_job_name:job,feast_feature_name:floatlist,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_percentile_90:0|g|#ingestion_job_name:job,feast_feature_name:floatlist,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_percentile_90:-1|g|#ingestion_job_name:job,feast_feature_name:floatlist,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_min:0|g|#ingestion_job_name:job,feast_feature_name:boollist,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_max:1|g|#ingestion_job_name:job,feast_feature_name:boollist,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_mean:0.5|g|#ingestion_job_name:job,feast_feature_name:boollist,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_percentile_50:0.5|g|#ingestion_job_name:job,feast_feature_name:boollist,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_percentile_90:1|g|#ingestion_job_name:job,feast_feature_name:boollist,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store \ No newline at end of file +feast_ingestion.feature_value_min:0|g|#ingestion_job_name:job,feast_feature_name:boollist,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_max:1|g|#ingestion_job_name:job,feast_feature_name:boollist,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_mean:0.5|g|#ingestion_job_name:job,feast_feature_name:boollist,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_percentile_50:0.5|g|#ingestion_job_name:job,feast_feature_name:boollist,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_percentile_90:1|g|#ingestion_job_name:job,feast_feature_name:boollist,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store \ No newline at end of file diff --git a/ingestion/src/test/resources/feast/ingestion/transform/WriteRowMetricsDoFnTest.input b/ingestion/src/test/resources/feast/ingestion/transform/WriteRowMetricsDoFnTest.input index 4d42f5bc4c..c5543d2889 100644 --- a/ingestion/src/test/resources/feast/ingestion/transform/WriteRowMetricsDoFnTest.input +++ b/ingestion/src/test/resources/feast/ingestion/transform/WriteRowMetricsDoFnTest.input @@ -1,4 +1,4 @@ featuresetref,int32,int64,timestamp -project/featureset:1,1,5,2020-03-30T06:10:38Z -project/featureset:1,5,8,2020-03-30T06:10:43Z -project/featureset:1,6,,2020-03-30T06:10:42Z \ No newline at end of file +project/featureset,1,5,2020-03-30T06:10:38Z +project/featureset,5,8,2020-03-30T06:10:43Z +project/featureset,6,,2020-03-30T06:10:42Z \ No newline at end of file diff --git a/ingestion/src/test/resources/feast/ingestion/transform/WriteRowMetricsDoFnTest.output b/ingestion/src/test/resources/feast/ingestion/transform/WriteRowMetricsDoFnTest.output index 318ce8eb08..954215764f 100644 --- a/ingestion/src/test/resources/feast/ingestion/transform/WriteRowMetricsDoFnTest.output +++ b/ingestion/src/test/resources/feast/ingestion/transform/WriteRowMetricsDoFnTest.output @@ -1,23 +1,23 @@ -feast_ingestion.feature_row_ingested_count:3|c|#ingestion_job_name:job,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_row_lag_ms_min:2000|g|#ingestion_job_name:job,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_row_lag_ms_max:7000|g|#ingestion_job_name:job,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_row_lag_ms_mean:4000|g|#ingestion_job_name:job,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_row_lag_ms_percentile_90:7000|g|#ingestion_job_name:job,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_row_lag_ms_percentile_95:7000|g|#ingestion_job_name:job,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_row_lag_ms_percentile_99:7000|g|#ingestion_job_name:job,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_row_ingested_count:3|c|#ingestion_job_name:job,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_row_lag_ms_min:2000|g|#ingestion_job_name:job,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_row_lag_ms_max:7000|g|#ingestion_job_name:job,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_row_lag_ms_mean:4000|g|#ingestion_job_name:job,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_row_lag_ms_percentile_90:7000|g|#ingestion_job_name:job,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_row_lag_ms_percentile_95:7000|g|#ingestion_job_name:job,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_row_lag_ms_percentile_99:7000|g|#ingestion_job_name:job,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_lag_ms_min:2000|g|#feast_feature_name:int32,ingestion_job_name:job,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_lag_ms_max:7000|g|#feast_feature_name:int32,ingestion_job_name:job,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_lag_ms_mean:4000|g|#feast_feature_name:int32,ingestion_job_name:job,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_lag_ms_percentile_90:7000|g|#feast_feature_name:int32,ingestion_job_name:job,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_lag_ms_percentile_95:7000|g|#feast_feature_name:int32,ingestion_job_name:job,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_lag_ms_percentile_99:7000|g|#feast_feature_name:int32,ingestion_job_name:job,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_missing_count:0|c|#feast_feature_name:int32,ingestion_job_name:job,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_lag_ms_min:2000|g|#feast_feature_name:int32,ingestion_job_name:job,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_lag_ms_max:7000|g|#feast_feature_name:int32,ingestion_job_name:job,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_lag_ms_mean:4000|g|#feast_feature_name:int32,ingestion_job_name:job,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_lag_ms_percentile_90:7000|g|#feast_feature_name:int32,ingestion_job_name:job,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_lag_ms_percentile_95:7000|g|#feast_feature_name:int32,ingestion_job_name:job,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_lag_ms_percentile_99:7000|g|#feast_feature_name:int32,ingestion_job_name:job,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_missing_count:0|c|#feast_feature_name:int32,ingestion_job_name:job,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_lag_ms_min:2000|g|#feast_feature_name:int64,ingestion_job_name:job,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_lag_ms_max:7000|g|#feast_feature_name:int64,ingestion_job_name:job,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_lag_ms_mean:4500|g|#feast_feature_name:int64,ingestion_job_name:job,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_lag_ms_percentile_90:7000|g|#feast_feature_name:int64,ingestion_job_name:job,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_lag_ms_percentile_95:7000|g|#feast_feature_name:int64,ingestion_job_name:job,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_lag_ms_percentile_99:7000|g|#feast_feature_name:int64,ingestion_job_name:job,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store -feast_ingestion.feature_value_missing_count:1|c|#feast_feature_name:int64,ingestion_job_name:job,feast_featureSet_version:1,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store \ No newline at end of file +feast_ingestion.feature_value_lag_ms_min:2000|g|#feast_feature_name:int64,ingestion_job_name:job,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_lag_ms_max:7000|g|#feast_feature_name:int64,ingestion_job_name:job,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_lag_ms_mean:4500|g|#feast_feature_name:int64,ingestion_job_name:job,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_lag_ms_percentile_90:7000|g|#feast_feature_name:int64,ingestion_job_name:job,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_lag_ms_percentile_95:7000|g|#feast_feature_name:int64,ingestion_job_name:job,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_lag_ms_percentile_99:7000|g|#feast_feature_name:int64,ingestion_job_name:job,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store +feast_ingestion.feature_value_missing_count:1|c|#feast_feature_name:int64,ingestion_job_name:job,feast_featureSet_name:featureset,feast_project_name:project,feast_store:store \ No newline at end of file diff --git a/pom.xml b/pom.xml index 5a9ab5292a..77db4d2137 100644 --- a/pom.xml +++ b/pom.xml @@ -35,11 +35,12 @@ core serving sdk/java + docs/coverage/java - 0.5-SNAPSHOT - https://github.com/gojek/feast + 0.6-SNAPSHOT + https://github.com/feast-dev/feast UTF-8 UTF-8 @@ -53,6 +54,7 @@ 0.8.0 1.9.10 1.3 + 5.3.6.Final 2.3.0 2.28.2 @@ -86,7 +88,7 @@ ${github.url} scm:git:${github.url}.git - scm:git:git@github.com:gojek/feast.git + scm:git:git@github.com:feast-dev/feast.git HEAD @@ -248,6 +250,11 @@ kafka-clients ${kafka.version} + + org.hibernate + hibernate-core + ${hibernate.version} + net.bytebuddy byte-buddy @@ -410,7 +417,8 @@ org.apache.maven.plugins maven-compiler-plugin - 11 + + 8 @@ -471,9 +479,9 @@ org.apache.maven.plugins maven-surefire-plugin - 2.22.1 + 3.0.0-M4 - -Xms2048m -Xmx2048m -Djdk.net.URLClassPath.disableClassPathURLCheck=true + @{argLine} -Xms2048m -Xmx2048m -Djdk.net.URLClassPath.disableClassPathURLCheck=true IntegrationTest @@ -486,6 +494,14 @@ true + + + build-info + + build-info + + + @@ -606,6 +622,18 @@ false + + org.jacoco + jacoco-maven-plugin + 0.8.5 + + + + prepare-agent + + + + org.springframework.boot spring-boot-maven-plugin diff --git a/protos/feast/core/CoreService.proto b/protos/feast/core/CoreService.proto index b7760d0b9a..3cd3c75683 100644 --- a/protos/feast/core/CoreService.proto +++ b/protos/feast/core/CoreService.proto @@ -15,12 +15,11 @@ // syntax = "proto3"; - package feast.core; -option go_package = "github.com/gojek/feast/sdk/go/protos/feast/core"; +option go_package = "github.com/feast-dev/feast/sdk/go/protos/feast/core"; option java_outer_classname = "CoreServiceProto"; -option java_package = "feast.core"; +option java_package = "feast.proto.core"; import "feast/core/FeatureSet.proto"; import "feast/core/Store.proto"; @@ -52,8 +51,11 @@ service CoreService { // Create or update and existing feature set. // // This function is idempotent - it will not create a new feature set if schema does not change. - // If an existing feature set is updated, core will advance the version number, which will be - // returned in response. + // Schema changes will update the feature set if the changes are valid. + // All changes except the following are valid: + // - Changes to feature set id (name, project) + // - Changes to entities + // - Changes to feature name and type rpc ApplyFeatureSet (ApplyFeatureSetRequest) returns (ApplyFeatureSetResponse); // Updates core with the configuration of the store. @@ -63,8 +65,8 @@ service CoreService { rpc UpdateStore (UpdateStoreRequest) returns (UpdateStoreResponse); // Creates a project. Projects serve as namespaces within which resources like features will be - // created. Both feature set names as well as field names must be unique within a project. Project - // names themselves must be globally unique. + // created. Feature set names as must be unique within a project while field (Feature/Entity) names + // must be unique within a Feature Set. Project names themselves must be globally unique. rpc CreateProject (CreateProjectRequest) returns (CreateProjectResponse); // Archives a project. Archived projects will continue to exist and function, but won't be visible @@ -97,14 +99,11 @@ service CoreService { // Request for a single feature set message GetFeatureSetRequest { - // Name of project the feature set belongs to (required) + // Name of project the feature set belongs to. If omitted will default to 'default' project. string project = 3; // Name of feature set (required). string name = 1; - - // Version of feature set (optional). If omitted then latest feature set will be returned. - int32 version = 2; } // Response containing a single feature set @@ -123,6 +122,7 @@ message ListFeatureSetsRequest { // If an asterisk is provided, filtering on projects will be disabled. All projects will // be matched. It is NOT possible to provide an asterisk with a string in order to do // pattern matching. + // If unspecified this field will default to the default project 'default'. string project = 3; // Name of the desired feature set. Asterisks can be used as wildcards in the name. @@ -133,15 +133,6 @@ message ListFeatureSetsRequest { // - my-feature-set* can be used to match all features prefixed by "my-feature-set" // - my-feature-set-6 can be used to select a single feature set string feature_set_name = 1; - - - // Versions of the given feature sets that will be returned. - // Valid options for version: - // "latest": only the latest version is returned. - // "*": Subscribe to all versions - // [version number]: pin to a specific version. Project and feature set name must be - // explicitly defined if a specific version is pinned. - string feature_set_version = 2; } } @@ -163,23 +154,28 @@ message ListStoresResponse { } message ApplyFeatureSetRequest { - // Feature set version and source will be ignored + // Feature set version + // If project is unspecified, will default to 'default' project. + // If project specified does not exist, the project would be automatically created. feast.core.FeatureSet feature_set = 1; } message ApplyFeatureSetResponse { + // TODO: 0 should correspond to invalid rather than NO_CHANGE enum Status { - // Latest feature set version is consistent with provided feature set + // Latest feature set is consistent with provided feature set NO_CHANGE = 0; - // New feature set or feature set version created + // New feature set created CREATED = 1; // Error occurred while trying to apply changes ERROR = 2; + + // Changes detected and updated successfully + UPDATED = 3; } - // Feature set response has been enriched with version and source information feast.core.FeatureSet feature_set = 1; Status status = 2; } diff --git a/protos/feast/core/FeatureSet.proto b/protos/feast/core/FeatureSet.proto index 429d99c854..b0b15276bc 100644 --- a/protos/feast/core/FeatureSet.proto +++ b/protos/feast/core/FeatureSet.proto @@ -16,9 +16,9 @@ syntax = "proto3"; package feast.core; -option java_package = "feast.core"; +option java_package = "feast.proto.core"; option java_outer_classname = "FeatureSetProto"; -option go_package = "github.com/gojek/feast/sdk/go/protos/feast/core"; +option go_package = "github.com/feast-dev/feast/sdk/go/protos/feast/core"; import "feast/types/Value.proto"; import "feast/core/Source.proto"; @@ -40,8 +40,8 @@ message FeatureSetSpec { // Name of the feature set. Must be unique. string name = 1; - // Feature set version. - int32 version = 2; + // Feature set version was removed in v0.5.0. + reserved 2; // List of entities contained within this featureSet. // This allows the feature to be used during joins between feature sets. @@ -60,54 +60,17 @@ message FeatureSetSpec { // Optional. Source on which feature rows can be found. // If not set, source will be set to the default value configured in Feast Core. Source source = 6; + + // User defined metadata + map labels = 8; } message EntitySpec { // Name of the entity. string name = 1; - // Value type of the feature. + // Value type of the entity. feast.types.ValueType.Enum value_type = 2; - - // presence_constraints, shape_type and domain_info are referenced from: - // https://github.com/tensorflow/metadata/blob/36f65d1268cbc92cdbcf812ee03dcf47fb53b91e/tensorflow_metadata/proto/v0/schema.proto#L107 - - oneof presence_constraints { - // Constraints on the presence of this feature in the examples. - tensorflow.metadata.v0.FeaturePresence presence = 3; - // Only used in the context of a "group" context, e.g., inside a sequence. - tensorflow.metadata.v0.FeaturePresenceWithinGroup group_presence = 4; - } - - // The shape of the feature which governs the number of values that appear in - // each example. - oneof shape_type { - // The feature has a fixed shape corresponding to a multi-dimensional - // tensor. - tensorflow.metadata.v0.FixedShape shape = 5; - // The feature doesn't have a well defined shape. All we know are limits on - // the minimum and maximum number of values. - tensorflow.metadata.v0.ValueCount value_count = 6; - } - - // Domain for the values of the feature. - oneof domain_info { - // Reference to a domain defined at the schema level. - string domain = 7; - // Inline definitions of domains. - tensorflow.metadata.v0.IntDomain int_domain = 8; - tensorflow.metadata.v0.FloatDomain float_domain = 9; - tensorflow.metadata.v0.StringDomain string_domain = 10; - tensorflow.metadata.v0.BoolDomain bool_domain = 11; - tensorflow.metadata.v0.StructDomain struct_domain = 12; - // Supported semantic domains. - tensorflow.metadata.v0.NaturalLanguageDomain natural_language_domain = 13; - tensorflow.metadata.v0.ImageDomain image_domain = 14; - tensorflow.metadata.v0.MIDDomain mid_domain = 15; - tensorflow.metadata.v0.URLDomain url_domain = 16; - tensorflow.metadata.v0.TimeDomain time_domain = 17; - tensorflow.metadata.v0.TimeOfDayDomain time_of_day_domain = 18; - } } message FeatureSpec { @@ -117,14 +80,24 @@ message FeatureSpec { // Value type of the feature. feast.types.ValueType.Enum value_type = 2; + // Reserve field numbers 15 and below for fields that will almost always be set + // https://developers.google.com/protocol-buffers/docs/proto3#assigning-field-numbers + reserved 3 to 15; + + // Labels for user defined metadata on a feature + map labels = 16; + + // Reserved for fundamental future additions less noisy in the schema that TFDV stats fields + reserved 17 to 29; + // presence_constraints, shape_type and domain_info are referenced from: // https://github.com/tensorflow/metadata/blob/36f65d1268cbc92cdbcf812ee03dcf47fb53b91e/tensorflow_metadata/proto/v0/schema.proto#L107 oneof presence_constraints { // Constraints on the presence of this feature in the examples. - tensorflow.metadata.v0.FeaturePresence presence = 3; + tensorflow.metadata.v0.FeaturePresence presence = 30; // Only used in the context of a "group" context, e.g., inside a sequence. - tensorflow.metadata.v0.FeaturePresenceWithinGroup group_presence = 4; + tensorflow.metadata.v0.FeaturePresenceWithinGroup group_presence = 31; } // The shape of the feature which governs the number of values that appear in @@ -132,29 +105,29 @@ message FeatureSpec { oneof shape_type { // The feature has a fixed shape corresponding to a multi-dimensional // tensor. - tensorflow.metadata.v0.FixedShape shape = 5; + tensorflow.metadata.v0.FixedShape shape = 32; // The feature doesn't have a well defined shape. All we know are limits on // the minimum and maximum number of values. - tensorflow.metadata.v0.ValueCount value_count = 6; + tensorflow.metadata.v0.ValueCount value_count = 33; } // Domain for the values of the feature. oneof domain_info { // Reference to a domain defined at the schema level. - string domain = 7; + string domain = 34; // Inline definitions of domains. - tensorflow.metadata.v0.IntDomain int_domain = 8; - tensorflow.metadata.v0.FloatDomain float_domain = 9; - tensorflow.metadata.v0.StringDomain string_domain = 10; - tensorflow.metadata.v0.BoolDomain bool_domain = 11; - tensorflow.metadata.v0.StructDomain struct_domain = 12; + tensorflow.metadata.v0.IntDomain int_domain = 35; + tensorflow.metadata.v0.FloatDomain float_domain = 36; + tensorflow.metadata.v0.StringDomain string_domain = 37; + tensorflow.metadata.v0.BoolDomain bool_domain = 38; + tensorflow.metadata.v0.StructDomain struct_domain = 39; // Supported semantic domains. - tensorflow.metadata.v0.NaturalLanguageDomain natural_language_domain = 13; - tensorflow.metadata.v0.ImageDomain image_domain = 14; - tensorflow.metadata.v0.MIDDomain mid_domain = 15; - tensorflow.metadata.v0.URLDomain url_domain = 16; - tensorflow.metadata.v0.TimeDomain time_domain = 17; - tensorflow.metadata.v0.TimeOfDayDomain time_of_day_domain = 18; + tensorflow.metadata.v0.NaturalLanguageDomain natural_language_domain = 40; + tensorflow.metadata.v0.ImageDomain image_domain = 41; + tensorflow.metadata.v0.MIDDomain mid_domain = 42; + tensorflow.metadata.v0.URLDomain url_domain = 43; + tensorflow.metadata.v0.TimeDomain time_domain = 44; + tensorflow.metadata.v0.TimeOfDayDomain time_of_day_domain = 45; } } @@ -174,5 +147,6 @@ message FeatureSetMeta { enum FeatureSetStatus { STATUS_INVALID = 0; STATUS_PENDING = 1; + STATUS_JOB_STARTING = 3; STATUS_READY = 2; } diff --git a/protos/feast/core/FeatureSetReference.proto b/protos/feast/core/FeatureSetReference.proto index 2501ec0931..85762512ca 100644 --- a/protos/feast/core/FeatureSetReference.proto +++ b/protos/feast/core/FeatureSetReference.proto @@ -18,9 +18,9 @@ syntax = "proto3"; package feast.core; -option go_package = "github.com/gojek/feast/sdk/go/protos/feast/core"; +option go_package = "github.com/feast-dev/feast/sdk/go/protos/feast/core"; option java_outer_classname = "FeatureSetReferenceProto"; -option java_package = "feast.core"; +option java_package = "feast.proto.core"; // Defines a composite key that refers to a unique FeatureSet message FeatureSetReference { @@ -28,6 +28,6 @@ message FeatureSetReference { string project = 1; // Name of the FeatureSet string name = 2; - // Version no. of the FeatureSet - int32 version = 3; + // Feature set version was removed in v0.5.0. + reserved 3; } diff --git a/protos/feast/core/IngestionJob.proto b/protos/feast/core/IngestionJob.proto index 68af28c076..c63e573ffe 100644 --- a/protos/feast/core/IngestionJob.proto +++ b/protos/feast/core/IngestionJob.proto @@ -18,9 +18,9 @@ syntax = "proto3"; package feast.core; -option go_package = "github.com/gojek/feast/sdk/go/protos/feast/core"; +option go_package = "github.com/feast-dev/feast/sdk/go/protos/feast/core"; option java_outer_classname = "IngestionJobProto"; -option java_package = "feast.core"; +option java_package = "feast.proto.core"; import "feast/core/FeatureSet.proto"; import "feast/core/Store.proto"; diff --git a/protos/feast/core/Runner.proto b/protos/feast/core/Runner.proto new file mode 100644 index 0000000000..91c1e99485 --- /dev/null +++ b/protos/feast/core/Runner.proto @@ -0,0 +1,76 @@ +// +// * Copyright 2020 The Feast Authors +// * +// * Licensed under the Apache License, Version 2.0 (the "License"); +// * you may not use this file except in compliance with the License. +// * You may obtain a copy of the License at +// * +// * https://www.apache.org/licenses/LICENSE-2.0 +// * +// * Unless required by applicable law or agreed to in writing, software +// * distributed under the License is distributed on an "AS IS" BASIS, +// * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// * See the License for the specific language governing permissions and +// * limitations under the License. +// + +syntax = "proto3"; +package feast.core; + +option java_package = "feast.proto.core"; +option java_outer_classname = "RunnerProto"; +option go_package = "github.com/feast-dev/feast/sdk/go/protos/feast/core"; + +message DirectRunnerConfigOptions { + /** + * Controls the amount of target parallelism the DirectRunner will use. + * Defaults to the greater of the number of available processors and 3. Must be a value + * greater than zero. + */ + int32 targetParallelism = 1; + + /* BigQuery table specification, e.g. PROJECT_ID:DATASET_ID.PROJECT_ID */ + string deadLetterTableSpec = 2; +} + +message DataflowRunnerConfigOptions { + /* Project id to use when launching jobs. */ + string project = 1; + + /* The Google Compute Engine region for creating Dataflow jobs. */ + string region = 2; + + /* GCP availability zone for operations. */ + string zone = 3; + + /* Run the job as a specific service account, instead of the default GCE robot. */ + string serviceAccount = 4; + + /* GCE network for launching workers. */ + string network = 5; + + /* GCE subnetwork for launching workers. e.g. regions/asia-east1/subnetworks/mysubnetwork */ + string subnetwork = 6; + + /* Machine type to create Dataflow worker VMs as. */ + string workerMachineType = 7; + + /* The autoscaling algorithm to use for the workerpool. */ + string autoscalingAlgorithm = 8; + + /* Specifies whether worker pools should be started with public IP addresses. */ + bool usePublicIps = 9; + + // A pipeline level default location for storing temporary files. Support Google Cloud Storage locations, + // e.g. gs://bucket/object + string tempLocation = 10; + + /* The maximum number of workers to use for the workerpool. */ + int32 maxNumWorkers = 11; + + /* BigQuery table specification, e.g. PROJECT_ID:DATASET_ID.PROJECT_ID */ + string deadLetterTableSpec = 12; + + /* Labels to apply to the dataflow job */ + map labels = 13; +} \ No newline at end of file diff --git a/protos/feast/core/Source.proto b/protos/feast/core/Source.proto index 8a6cbd415a..9dcbf2fa05 100644 --- a/protos/feast/core/Source.proto +++ b/protos/feast/core/Source.proto @@ -17,9 +17,9 @@ syntax = "proto3"; package feast.core; -option java_package = "feast.core"; +option java_package = "feast.proto.core"; option java_outer_classname = "SourceProto"; -option go_package = "github.com/gojek/feast/sdk/go/protos/feast/core"; +option go_package = "github.com/feast-dev/feast/sdk/go/protos/feast/core"; message Source { @@ -39,9 +39,15 @@ enum SourceType { } message KafkaSourceConfig { - // - bootstrapServers: [comma delimited value of host[:port]] + // Comma separated list of Kafka bootstrap servers. Used for feature sets without a defined source host[:port]] string bootstrap_servers = 1; - // - topics: [Kafka topic name. This value is provisioned by core and should not be set by the user.] + // Kafka topic to use for feature sets without user defined topics string topic = 2; + + // Number of Kafka partitions to to use for managed feature stream. + int32 partitions = 3; + + // Defines the number of copies of managed feature stream Kafka. + int32 replicationFactor = 4; } \ No newline at end of file diff --git a/protos/feast/core/Store.proto b/protos/feast/core/Store.proto index 931a9d46b6..7279c484ee 100644 --- a/protos/feast/core/Store.proto +++ b/protos/feast/core/Store.proto @@ -17,9 +17,9 @@ syntax = "proto3"; package feast.core; -option java_package = "feast.core"; +option java_package = "feast.proto.core"; option java_outer_classname = "StoreProto"; -option go_package = "github.com/gojek/feast/sdk/go/protos/feast/core"; +option go_package = "github.com/feast-dev/feast/sdk/go/protos/feast/core"; // Store provides a location where Feast reads and writes feature values. // Feature values will be written to the Store in the form of FeatureRow elements. @@ -48,12 +48,7 @@ message Store { // BigQuery stores a FeatureRow element as a row in a BigQuery table. // - // Table name is derived from the feature set name and version as: - // [feature_set_name]_v[feature_set_version] - // - // For example: - // A feature row for feature set "driver" and version "1" will be written - // to table "driver_v1". + // Table name is derived is the same as the feature set name. // // The entities and features in a FeatureSetSpec corresponds to the // fields in the BigQuery table (these make up the BigQuery schema). @@ -69,15 +64,11 @@ message Store { // ====================|==================|================================ // - event_timestamp | TIMESTAMP | event time of the FeatureRow // - created_timestamp | TIMESTAMP | processing time of the ingestion of the FeatureRow + // - ingestion_id | STRING | unique id identifying groups of rows that have been ingested together // - job_id | STRING | identifier for the job that writes the FeatureRow to the corresponding BigQuery table // // BigQuery table created will be partitioned by the field "event_timestamp" // of the FeatureRow (https://cloud.google.com/bigquery/docs/partitioned-tables). - // - // Since newer version of feature set can introduce breaking, non backward- - // compatible BigQuery schema updates, incrementing the version of a - // feature set will result in the creation of a new empty BigQuery table - // with the new schema. // // The following table shows how ValueType in Feast is mapped to // BigQuery Standard SQL data types @@ -105,6 +96,8 @@ message Store { // Unsupported in Feast 0.3 CASSANDRA = 3; + + REDIS_CLUSTER = 4; } message RedisConfig { @@ -120,6 +113,9 @@ message Store { message BigQueryConfig { string project_id = 1; string dataset_id = 2; + string staging_location = 3; + int32 initial_retry_delay_seconds = 4; + int32 total_timeout_seconds = 5; } message CassandraConfig { @@ -127,6 +123,13 @@ message Store { int32 port = 2; } + message RedisClusterConfig { + // List of Redis Uri for all the nodes in Redis Cluster, comma separated. Eg. host1:6379, host2:6379 + string connection_string = 1; + int32 initial_backoff_ms = 2; + int32 max_retries = 3; + } + message Subscription { // Name of project that the feature sets belongs to. This can be one of // - [project_name] @@ -136,7 +139,6 @@ message Store { // pattern matching. string project = 3; - // Name of the desired feature set. Asterisks can be used as wildcards in the name. // Matching on names is only permitted if a specific project is defined. It is disallowed // If the project name is set to "*" @@ -146,13 +148,8 @@ message Store { // - my-feature-set-6 can be used to select a single feature set string name = 1; - // Versions of the given feature sets that will be returned. - // Valid options for version: - // "latest": only the latest version is returned. - // "*": Subscribe to all versions - // [version number]: pin to a specific version. Project and feature set name must be - // explicitly defined if a specific version is pinned. - string version = 2; + // Feature set version was removed in v0.5.0. + reserved 2; } // Name of the store. @@ -169,5 +166,6 @@ message Store { RedisConfig redis_config = 11; BigQueryConfig bigquery_config = 12; CassandraConfig cassandra_config = 13; + RedisClusterConfig redis_cluster_config = 14; } } diff --git a/protos/feast/serving/ServingService.proto b/protos/feast/serving/ServingService.proto index 5145670ec9..cd7d51bd59 100644 --- a/protos/feast/serving/ServingService.proto +++ b/protos/feast/serving/ServingService.proto @@ -19,12 +19,11 @@ syntax = "proto3"; package feast.serving; import "google/protobuf/timestamp.proto"; -import "google/protobuf/duration.proto"; import "feast/types/Value.proto"; -option java_package = "feast.serving"; +option java_package = "feast.proto.serving"; option java_outer_classname = "ServingAPIProto"; -option go_package = "github.com/gojek/feast/sdk/go/protos/feast/serving"; +option go_package = "github.com/feast-dev/feast/sdk/go/protos/feast/serving"; service ServingService { // Get information about this Feast serving. @@ -63,21 +62,19 @@ message GetFeastServingInfoResponse { } message FeatureReference { - // Project name + // Project name. This field is optional, if unspecified will default to 'default'. string project = 1; // Feature name string name = 2; - // Feature version - int32 version = 3; + // Feature set name specifying the feature set of this referenced feature. + // This field is optional if the feature referenced is unique across the project + // in which case the feature set would be automatically infered + string feature_set = 5; - // The features will be retrieved if: - // entity_timestamp - max_age <= event_timestamp <= entity_timestamp - // - // If unspecified the default max_age specified in FeatureSetSpec will - // be used. - google.protobuf.Duration max_age = 4; + // Feature version and max_age was removed in v0.5.0 + reserved 3, 4; } message GetOnlineFeaturesRequest { diff --git a/protos/feast/storage/Redis.proto b/protos/feast/storage/Redis.proto index f58b137e9c..04052aa800 100644 --- a/protos/feast/storage/Redis.proto +++ b/protos/feast/storage/Redis.proto @@ -21,14 +21,14 @@ import "feast/types/Field.proto"; package feast.storage; option java_outer_classname = "RedisProto"; -option java_package = "feast.storage"; -option go_package = "github.com/gojek/feast/sdk/go/protos/feast/storage"; +option java_package = "feast.proto.storage"; +option go_package = "github.com/feast-dev/feast/sdk/go/protos/feast/storage"; message RedisKey { // Field number 1 is reserved for a future distributing hash if needed // (for when redis is clustered). - // FeatureSet this row belongs to, this is defined as featureSetName:version. + // FeatureSet this row belongs to, this is defined as featureSetName. string feature_set = 2; // List of fields containing entity names and their respective values diff --git a/protos/feast/types/FeatureRow.proto b/protos/feast/types/FeatureRow.proto index c170cd5d50..fd8a561c7b 100644 --- a/protos/feast/types/FeatureRow.proto +++ b/protos/feast/types/FeatureRow.proto @@ -21,9 +21,9 @@ import "feast/types/Field.proto"; package feast.types; -option java_package = "feast.types"; +option java_package = "feast.proto.types"; option java_outer_classname = "FeatureRowProto"; -option go_package = "github.com/gojek/feast/sdk/go/protos/feast/types"; +option go_package = "github.com/feast-dev/feast/sdk/go/protos/feast/types"; message FeatureRow { @@ -36,7 +36,10 @@ message FeatureRow { google.protobuf.Timestamp event_timestamp = 3; // Complete reference to the featureSet this featureRow belongs to, in the form of - // /:. This value will be used by the feast ingestion job to filter + // /. This value will be used by the feast ingestion job to filter // rows, and write the values to the correct tables. string feature_set = 6; + + // Identifier tying this feature row to a specific ingestion job. + string ingestion_id = 7; } diff --git a/protos/feast/types/FeatureRowExtended.proto b/protos/feast/types/FeatureRowExtended.proto index e88fbb7323..f922fe66bf 100644 --- a/protos/feast/types/FeatureRowExtended.proto +++ b/protos/feast/types/FeatureRowExtended.proto @@ -21,9 +21,9 @@ import "feast/types/FeatureRow.proto"; package feast.types; -option java_package = "feast.types"; +option java_package = "feast.proto.types"; option java_outer_classname = "FeatureRowExtendedProto"; -option go_package = "github.com/gojek/feast/sdk/go/protos/feast/types"; +option go_package = "github.com/feast-dev/feast/sdk/go/protos/feast/types"; message Error { string cause = 1; // exception class name diff --git a/protos/feast/types/Field.proto b/protos/feast/types/Field.proto index 3929b16a32..3b8416c253 100644 --- a/protos/feast/types/Field.proto +++ b/protos/feast/types/Field.proto @@ -20,9 +20,9 @@ import "feast/types/Value.proto"; package feast.types; -option java_package = "feast.types"; +option java_package = "feast.proto.types"; option java_outer_classname = "FieldProto"; -option go_package = "github.com/gojek/feast/sdk/go/protos/feast/types"; +option go_package = "github.com/feast-dev/feast/sdk/go/protos/feast/types"; message Field { string name = 1; diff --git a/protos/feast/types/Value.proto b/protos/feast/types/Value.proto index 065497f30a..b0beabd796 100644 --- a/protos/feast/types/Value.proto +++ b/protos/feast/types/Value.proto @@ -18,9 +18,9 @@ syntax = "proto3"; package feast.types; -option java_package = "feast.types"; +option java_package = "feast.proto.types"; option java_outer_classname = "ValueProto"; -option go_package = "github.com/gojek/feast/sdk/go/protos/feast/types"; +option go_package = "github.com/feast-dev/feast/sdk/go/protos/feast/types"; message ValueType { enum Enum { diff --git a/protos/tensorflow_metadata/proto/v0/path.proto b/protos/tensorflow_metadata/proto/v0/path.proto index 2d45e1326e..3a4e41bad9 100644 --- a/protos/tensorflow_metadata/proto/v0/path.proto +++ b/protos/tensorflow_metadata/proto/v0/path.proto @@ -20,7 +20,7 @@ package tensorflow.metadata.v0; option java_package = "org.tensorflow.metadata.v0"; option java_multiple_files = true; -option go_package = "github.com/gojek/feast/sdk/go/protos/tensorflow_metadata/proto/v0"; +option go_package = "github.com/feast-dev/feast/sdk/go/protos/tensorflow_metadata/proto/v0"; // A path is a more general substitute for the name of a field or feature that // can be used for flat examples as well as structured data. For example, if diff --git a/protos/tensorflow_metadata/proto/v0/schema.proto b/protos/tensorflow_metadata/proto/v0/schema.proto index 8d4da75e16..00005ee913 100644 --- a/protos/tensorflow_metadata/proto/v0/schema.proto +++ b/protos/tensorflow_metadata/proto/v0/schema.proto @@ -23,7 +23,7 @@ import "tensorflow_metadata/proto/v0/path.proto"; option cc_enable_arenas = true; option java_package = "org.tensorflow.metadata.v0"; option java_multiple_files = true; -option go_package = "github.com/gojek/feast/sdk/go/protos/tensorflow_metadata/proto/v0"; +option go_package = "github.com/feast-dev/feast/sdk/go/protos/tensorflow_metadata/proto/v0"; // LifecycleStage. Only UNKNOWN_STAGE, BETA, and PRODUCTION features are // actually validated. diff --git a/sdk/go/README.md b/sdk/go/README.md index 6084f90993..79211df4a5 100644 --- a/sdk/go/README.md +++ b/sdk/go/README.md @@ -6,7 +6,7 @@ The Feast golang SDK currently only supports retrieval from online stores. ```{go} import ( "context" - feast "github.com/gojek/feast/sdk/go" + feast "github.com/feast-dev/feast/sdk/go" ) func main() { @@ -17,7 +17,7 @@ func main() { ctx := context.Background() req := feast.OnlineFeaturesRequest{ - Features: []string{"my_project_1/feature1:1", "my_project_2/feature1:1", "my_project_4/feature3", "feature2:2", "feature2"}, + Features: []string{"my_project_1/feature1", "my_project_2/feature1", "my_project_4/feature3", "feature2", "feature2"}, Entities: []feast.Row{ {"entity1": feast.Int64Val(1), "entity2": feast.StrVal("bob")}, {"entity1": feast.Int64Val(1), "entity2": feast.StrVal("annie")}, @@ -40,10 +40,10 @@ func main() { If all features retrieved are of a single type, Feast provides convenience functions to retrieve your features as a vector of feature values: ```{go} arr, err := resp.Int64Arrays( - []string{"my_project_1/feature1:1", - "my_project_2/feature1:1", + []string{"my_project_1/feature1", + "my_project_2/feature1", "my_project_4/feature3", - "feature2:2", + "feature2", "feature2"}, // order of features []int64{1,2,3,4,5}) // fillNa values ``` diff --git a/sdk/go/client.go b/sdk/go/client.go index cb42bf08db..ef8ec8b8cf 100644 --- a/sdk/go/client.go +++ b/sdk/go/client.go @@ -5,7 +5,8 @@ import ( "fmt" "github.com/opentracing/opentracing-go" - "github.com/gojek/feast/sdk/go/protos/feast/serving" + "github.com/feast-dev/feast/sdk/go/protos/feast/serving" + "github.com/feast-dev/feast/sdk/go/protos/feast/types" "google.golang.org/grpc" "go.opencensus.io/plugin/ocgrpc" @@ -50,6 +51,31 @@ func (fc *GrpcClient) GetOnlineFeatures(ctx context.Context, req *OnlineFeatures } resp, err := fc.cli.GetOnlineFeatures(ctx, featuresRequest) + // collect unqiue entity refs from entity rows + entityRefs := make(map[string]struct{}) + for _, entityRows := range req.Entities { + for ref, _ := range entityRows { + entityRefs[ref] = struct{}{} + } + } + + // strip projects from to projects + for _, fieldValue := range resp.GetFieldValues() { + stripFields := make(map[string]*types.Value) + for refStr, value := range fieldValue.Fields { + _, isEntity := entityRefs[refStr] + if !isEntity { // is feature ref + featureRef, err := parseFeatureRef(refStr, true) + if err != nil { + return nil, err + } + refStr = toFeatureRefStr(featureRef) + } + stripFields[refStr] = value + } + fieldValue.Fields = stripFields + } + return &OnlineFeaturesResponse{RawResponse: resp}, err } diff --git a/sdk/go/go.mod b/sdk/go/go.mod index 58998bc5b5..06bd693616 100644 --- a/sdk/go/go.mod +++ b/sdk/go/go.mod @@ -1,4 +1,4 @@ -module github.com/gojek/feast/sdk/go +module github.com/feast-dev/feast/sdk/go go 1.13 diff --git a/sdk/go/protos/feast/core/CoreService.pb.go b/sdk/go/protos/feast/core/CoreService.pb.go index 90e5f7d240..4f22ae4c37 100644 --- a/sdk/go/protos/feast/core/CoreService.pb.go +++ b/sdk/go/protos/feast/core/CoreService.pb.go @@ -16,7 +16,7 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: -// protoc-gen-go v1.21.0 +// protoc-gen-go v1.24.0 // protoc v3.10.0 // source: feast/core/CoreService.proto @@ -45,15 +45,18 @@ const ( // of the legacy proto package is being used. const _ = proto.ProtoPackageIsVersion4 +// TODO: 0 should correspond to invalid rather than NO_CHANGE type ApplyFeatureSetResponse_Status int32 const ( - // Latest feature set version is consistent with provided feature set + // Latest feature set is consistent with provided feature set ApplyFeatureSetResponse_NO_CHANGE ApplyFeatureSetResponse_Status = 0 - // New feature set or feature set version created + // New feature set created ApplyFeatureSetResponse_CREATED ApplyFeatureSetResponse_Status = 1 // Error occurred while trying to apply changes ApplyFeatureSetResponse_ERROR ApplyFeatureSetResponse_Status = 2 + // Changes detected and updated successfully + ApplyFeatureSetResponse_UPDATED ApplyFeatureSetResponse_Status = 3 ) // Enum value maps for ApplyFeatureSetResponse_Status. @@ -62,11 +65,13 @@ var ( 0: "NO_CHANGE", 1: "CREATED", 2: "ERROR", + 3: "UPDATED", } ApplyFeatureSetResponse_Status_value = map[string]int32{ "NO_CHANGE": 0, "CREATED": 1, "ERROR": 2, + "UPDATED": 3, } ) @@ -151,12 +156,10 @@ type GetFeatureSetRequest struct { sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - // Name of project the feature set belongs to (required) + // Name of project the feature set belongs to. If omitted will default to 'default' project. Project string `protobuf:"bytes,3,opt,name=project,proto3" json:"project,omitempty"` // Name of feature set (required). Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` - // Version of feature set (optional). If omitted then latest feature set will be returned. - Version int32 `protobuf:"varint,2,opt,name=version,proto3" json:"version,omitempty"` } func (x *GetFeatureSetRequest) Reset() { @@ -205,13 +208,6 @@ func (x *GetFeatureSetRequest) GetName() string { return "" } -func (x *GetFeatureSetRequest) GetVersion() int32 { - if x != nil { - return x.Version - } - return 0 -} - // Response containing a single feature set type GetFeatureSetResponse struct { state protoimpl.MessageState @@ -454,7 +450,9 @@ type ApplyFeatureSetRequest struct { sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - // Feature set version and source will be ignored + // Feature set version + // If project is unspecified, will default to 'default' project. + // If project specified does not exist, the project would be automatically created. FeatureSet *FeatureSet `protobuf:"bytes,1,opt,name=feature_set,json=featureSet,proto3" json:"feature_set,omitempty"` } @@ -502,7 +500,6 @@ type ApplyFeatureSetResponse struct { sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - // Feature set response has been enriched with version and source information FeatureSet *FeatureSet `protobuf:"bytes,1,opt,name=feature_set,json=featureSet,proto3" json:"feature_set,omitempty"` Status ApplyFeatureSetResponse_Status `protobuf:"varint,2,opt,name=status,proto3,enum=feast.core.ApplyFeatureSetResponse_Status" json:"status,omitempty"` } @@ -1287,6 +1284,7 @@ type ListFeatureSetsRequest_Filter struct { // If an asterisk is provided, filtering on projects will be disabled. All projects will // be matched. It is NOT possible to provide an asterisk with a string in order to do // pattern matching. + // If unspecified this field will default to the default project 'default'. Project string `protobuf:"bytes,3,opt,name=project,proto3" json:"project,omitempty"` // Name of the desired feature set. Asterisks can be used as wildcards in the name. // Matching on names is only permitted if a specific project is defined. It is disallowed @@ -1296,13 +1294,6 @@ type ListFeatureSetsRequest_Filter struct { // - my-feature-set* can be used to match all features prefixed by "my-feature-set" // - my-feature-set-6 can be used to select a single feature set FeatureSetName string `protobuf:"bytes,1,opt,name=feature_set_name,json=featureSetName,proto3" json:"feature_set_name,omitempty"` - // Versions of the given feature sets that will be returned. - // Valid options for version: - // "latest": only the latest version is returned. - // "*": Subscribe to all versions - // [version number]: pin to a specific version. Project and feature set name must be - // explicitly defined if a specific version is pinned. - FeatureSetVersion string `protobuf:"bytes,2,opt,name=feature_set_version,json=featureSetVersion,proto3" json:"feature_set_version,omitempty"` } func (x *ListFeatureSetsRequest_Filter) Reset() { @@ -1351,13 +1342,6 @@ func (x *ListFeatureSetsRequest_Filter) GetFeatureSetName() string { return "" } -func (x *ListFeatureSetsRequest_Filter) GetFeatureSetVersion() string { - if x != nil { - return x.FeatureSetVersion - } - return "" -} - type ListStoresRequest_Filter struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -1485,202 +1469,199 @@ var file_feast_core_CoreService_proto_rawDesc = []byte{ 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1d, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x2f, 0x49, 0x6e, 0x67, 0x65, 0x73, 0x74, 0x69, 0x6f, 0x6e, 0x4a, 0x6f, 0x62, 0x2e, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x5e, 0x0a, 0x14, 0x47, 0x65, 0x74, 0x46, 0x65, 0x61, 0x74, 0x75, + 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x44, 0x0a, 0x14, 0x47, 0x65, 0x74, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x18, 0x0a, 0x07, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x76, 0x65, - 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x07, 0x76, 0x65, 0x72, - 0x73, 0x69, 0x6f, 0x6e, 0x22, 0x50, 0x0a, 0x15, 0x47, 0x65, 0x74, 0x46, 0x65, 0x61, 0x74, 0x75, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x50, 0x0a, 0x15, 0x47, 0x65, + 0x74, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, + 0x6e, 0x73, 0x65, 0x12, 0x37, 0x0a, 0x0b, 0x66, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x5f, 0x73, + 0x65, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, + 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, + 0x52, 0x0a, 0x66, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, 0x22, 0xa9, 0x01, 0x0a, + 0x16, 0x4c, 0x69, 0x73, 0x74, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, 0x73, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x41, 0x0a, 0x06, 0x66, 0x69, 0x6c, 0x74, 0x65, + 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, + 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, + 0x53, 0x65, 0x74, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x46, 0x69, 0x6c, 0x74, + 0x65, 0x72, 0x52, 0x06, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x1a, 0x4c, 0x0a, 0x06, 0x46, 0x69, + 0x6c, 0x74, 0x65, 0x72, 0x12, 0x18, 0x0a, 0x07, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x18, + 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x12, 0x28, + 0x0a, 0x10, 0x66, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x5f, 0x73, 0x65, 0x74, 0x5f, 0x6e, 0x61, + 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x66, 0x65, 0x61, 0x74, 0x75, 0x72, + 0x65, 0x53, 0x65, 0x74, 0x4e, 0x61, 0x6d, 0x65, 0x22, 0x54, 0x0a, 0x17, 0x4c, 0x69, 0x73, 0x74, + 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, + 0x6e, 0x73, 0x65, 0x12, 0x39, 0x0a, 0x0c, 0x66, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x5f, 0x73, + 0x65, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x66, 0x65, 0x61, 0x73, + 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x65, + 0x74, 0x52, 0x0b, 0x66, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, 0x73, 0x22, 0x6f, + 0x0a, 0x11, 0x4c, 0x69, 0x73, 0x74, 0x53, 0x74, 0x6f, 0x72, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x12, 0x3c, 0x0a, 0x06, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, + 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x53, 0x74, 0x6f, 0x72, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x2e, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x52, 0x06, 0x66, 0x69, 0x6c, 0x74, 0x65, + 0x72, 0x1a, 0x1c, 0x0a, 0x06, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x12, 0x12, 0x0a, 0x04, 0x6e, + 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x22, + 0x3d, 0x0a, 0x12, 0x4c, 0x69, 0x73, 0x74, 0x53, 0x74, 0x6f, 0x72, 0x65, 0x73, 0x52, 0x65, 0x73, + 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x27, 0x0a, 0x05, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x18, 0x01, + 0x20, 0x03, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, + 0x65, 0x2e, 0x53, 0x74, 0x6f, 0x72, 0x65, 0x52, 0x05, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x22, 0x51, + 0x0a, 0x16, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x65, + 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x37, 0x0a, 0x0b, 0x66, 0x65, 0x61, 0x74, + 0x75, 0x72, 0x65, 0x5f, 0x73, 0x65, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, + 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x46, 0x65, 0x61, 0x74, 0x75, + 0x72, 0x65, 0x53, 0x65, 0x74, 0x52, 0x0a, 0x66, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x65, + 0x74, 0x22, 0xd4, 0x01, 0x0a, 0x17, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x37, 0x0a, 0x0b, 0x66, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x5f, 0x73, 0x65, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, 0x52, 0x0a, 0x66, 0x65, 0x61, 0x74, - 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, 0x22, 0xd9, 0x01, 0x0a, 0x16, 0x4c, 0x69, 0x73, 0x74, 0x46, + 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, 0x12, 0x42, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x2a, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, + 0x6f, 0x72, 0x65, 0x2e, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, + 0x53, 0x65, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x53, 0x74, 0x61, 0x74, + 0x75, 0x73, 0x52, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, 0x3c, 0x0a, 0x06, 0x53, 0x74, + 0x61, 0x74, 0x75, 0x73, 0x12, 0x0d, 0x0a, 0x09, 0x4e, 0x4f, 0x5f, 0x43, 0x48, 0x41, 0x4e, 0x47, + 0x45, 0x10, 0x00, 0x12, 0x0b, 0x0a, 0x07, 0x43, 0x52, 0x45, 0x41, 0x54, 0x45, 0x44, 0x10, 0x01, + 0x12, 0x09, 0x0a, 0x05, 0x45, 0x52, 0x52, 0x4f, 0x52, 0x10, 0x02, 0x12, 0x0b, 0x0a, 0x07, 0x55, + 0x50, 0x44, 0x41, 0x54, 0x45, 0x44, 0x10, 0x03, 0x22, 0x1c, 0x0a, 0x1a, 0x47, 0x65, 0x74, 0x46, + 0x65, 0x61, 0x73, 0x74, 0x43, 0x6f, 0x72, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x37, 0x0a, 0x1b, 0x47, 0x65, 0x74, 0x46, 0x65, 0x61, + 0x73, 0x74, 0x43, 0x6f, 0x72, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x73, + 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x22, + 0x3d, 0x0a, 0x12, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x74, 0x6f, 0x72, 0x65, 0x52, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x27, 0x0a, 0x05, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, + 0x65, 0x2e, 0x53, 0x74, 0x6f, 0x72, 0x65, 0x52, 0x05, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x22, 0xa4, + 0x01, 0x0a, 0x13, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x74, 0x6f, 0x72, 0x65, 0x52, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x27, 0x0a, 0x05, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, + 0x72, 0x65, 0x2e, 0x53, 0x74, 0x6f, 0x72, 0x65, 0x52, 0x05, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x12, + 0x3e, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, + 0x26, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x55, 0x70, 0x64, + 0x61, 0x74, 0x65, 0x53, 0x74, 0x6f, 0x72, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, + 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, + 0x24, 0x0a, 0x06, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x0d, 0x0a, 0x09, 0x4e, 0x4f, 0x5f, + 0x43, 0x48, 0x41, 0x4e, 0x47, 0x45, 0x10, 0x00, 0x12, 0x0b, 0x0a, 0x07, 0x55, 0x50, 0x44, 0x41, + 0x54, 0x45, 0x44, 0x10, 0x01, 0x22, 0x2a, 0x0a, 0x14, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x50, + 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x12, 0x0a, + 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, + 0x65, 0x22, 0x17, 0x0a, 0x15, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x50, 0x72, 0x6f, 0x6a, 0x65, + 0x63, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x2b, 0x0a, 0x15, 0x41, 0x72, + 0x63, 0x68, 0x69, 0x76, 0x65, 0x50, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x18, 0x0a, 0x16, 0x41, 0x72, 0x63, 0x68, 0x69, + 0x76, 0x65, 0x50, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, + 0x65, 0x22, 0x15, 0x0a, 0x13, 0x4c, 0x69, 0x73, 0x74, 0x50, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, + 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x32, 0x0a, 0x14, 0x4c, 0x69, 0x73, 0x74, + 0x50, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, + 0x12, 0x1a, 0x0a, 0x08, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, + 0x28, 0x09, 0x52, 0x08, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x73, 0x22, 0xee, 0x01, 0x0a, + 0x18, 0x4c, 0x69, 0x73, 0x74, 0x49, 0x6e, 0x67, 0x65, 0x73, 0x74, 0x69, 0x6f, 0x6e, 0x4a, 0x6f, + 0x62, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x43, 0x0a, 0x06, 0x66, 0x69, 0x6c, + 0x74, 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2b, 0x2e, 0x66, 0x65, 0x61, 0x73, + 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x49, 0x6e, 0x67, 0x65, 0x73, + 0x74, 0x69, 0x6f, 0x6e, 0x4a, 0x6f, 0x62, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, + 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x52, 0x06, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x1a, 0x8c, + 0x01, 0x0a, 0x06, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x53, 0x0a, 0x15, 0x66, 0x65, 0x61, + 0x74, 0x75, 0x72, 0x65, 0x5f, 0x73, 0x65, 0x74, 0x5f, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, + 0x63, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, + 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, + 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x52, 0x13, 0x66, 0x65, 0x61, 0x74, 0x75, + 0x72, 0x65, 0x53, 0x65, 0x74, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x12, 0x1d, + 0x0a, 0x0a, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x09, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x22, 0x49, 0x0a, + 0x19, 0x4c, 0x69, 0x73, 0x74, 0x49, 0x6e, 0x67, 0x65, 0x73, 0x74, 0x69, 0x6f, 0x6e, 0x4a, 0x6f, + 0x62, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x2c, 0x0a, 0x04, 0x6a, 0x6f, + 0x62, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, + 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x49, 0x6e, 0x67, 0x65, 0x73, 0x74, 0x69, 0x6f, 0x6e, 0x4a, + 0x6f, 0x62, 0x52, 0x04, 0x6a, 0x6f, 0x62, 0x73, 0x22, 0x2c, 0x0a, 0x1a, 0x52, 0x65, 0x73, 0x74, + 0x61, 0x72, 0x74, 0x49, 0x6e, 0x67, 0x65, 0x73, 0x74, 0x69, 0x6f, 0x6e, 0x4a, 0x6f, 0x62, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x22, 0x1d, 0x0a, 0x1b, 0x52, 0x65, 0x73, 0x74, 0x61, 0x72, + 0x74, 0x49, 0x6e, 0x67, 0x65, 0x73, 0x74, 0x69, 0x6f, 0x6e, 0x4a, 0x6f, 0x62, 0x52, 0x65, 0x73, + 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x29, 0x0a, 0x17, 0x53, 0x74, 0x6f, 0x70, 0x49, 0x6e, 0x67, + 0x65, 0x73, 0x74, 0x69, 0x6f, 0x6e, 0x4a, 0x6f, 0x62, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, + 0x22, 0x1a, 0x0a, 0x18, 0x53, 0x74, 0x6f, 0x70, 0x49, 0x6e, 0x67, 0x65, 0x73, 0x74, 0x69, 0x6f, + 0x6e, 0x4a, 0x6f, 0x62, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x32, 0xcb, 0x08, 0x0a, + 0x0b, 0x43, 0x6f, 0x72, 0x65, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x66, 0x0a, 0x13, + 0x47, 0x65, 0x74, 0x46, 0x65, 0x61, 0x73, 0x74, 0x43, 0x6f, 0x72, 0x65, 0x56, 0x65, 0x72, 0x73, + 0x69, 0x6f, 0x6e, 0x12, 0x26, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, + 0x2e, 0x47, 0x65, 0x74, 0x46, 0x65, 0x61, 0x73, 0x74, 0x43, 0x6f, 0x72, 0x65, 0x56, 0x65, 0x72, + 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x27, 0x2e, 0x66, 0x65, + 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x47, 0x65, 0x74, 0x46, 0x65, 0x61, 0x73, + 0x74, 0x43, 0x6f, 0x72, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x54, 0x0a, 0x0d, 0x47, 0x65, 0x74, 0x46, 0x65, 0x61, 0x74, 0x75, + 0x72, 0x65, 0x53, 0x65, 0x74, 0x12, 0x20, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, + 0x72, 0x65, 0x2e, 0x47, 0x65, 0x74, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x21, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, + 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x47, 0x65, 0x74, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, + 0x65, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x5a, 0x0a, 0x0f, 0x4c, 0x69, + 0x73, 0x74, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, 0x73, 0x12, 0x22, 0x2e, + 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x12, 0x41, 0x0a, 0x06, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x0b, 0x32, 0x29, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4c, + 0x74, 0x1a, 0x23, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, 0x73, 0x52, 0x65, - 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x52, 0x06, 0x66, 0x69, - 0x6c, 0x74, 0x65, 0x72, 0x1a, 0x7c, 0x0a, 0x06, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x12, 0x18, - 0x0a, 0x07, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x07, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x12, 0x28, 0x0a, 0x10, 0x66, 0x65, 0x61, 0x74, - 0x75, 0x72, 0x65, 0x5f, 0x73, 0x65, 0x74, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x0e, 0x66, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, 0x4e, 0x61, - 0x6d, 0x65, 0x12, 0x2e, 0x0a, 0x13, 0x66, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x5f, 0x73, 0x65, - 0x74, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x11, 0x66, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, 0x56, 0x65, 0x72, 0x73, 0x69, - 0x6f, 0x6e, 0x22, 0x54, 0x0a, 0x17, 0x4c, 0x69, 0x73, 0x74, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, - 0x65, 0x53, 0x65, 0x74, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x39, 0x0a, - 0x0c, 0x66, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x5f, 0x73, 0x65, 0x74, 0x73, 0x18, 0x01, 0x20, - 0x03, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, - 0x2e, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, 0x52, 0x0b, 0x66, 0x65, 0x61, - 0x74, 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, 0x73, 0x22, 0x6f, 0x0a, 0x11, 0x4c, 0x69, 0x73, 0x74, - 0x53, 0x74, 0x6f, 0x72, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x3c, 0x0a, - 0x06, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, - 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x53, - 0x74, 0x6f, 0x72, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x46, 0x69, 0x6c, - 0x74, 0x65, 0x72, 0x52, 0x06, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x1a, 0x1c, 0x0a, 0x06, 0x46, - 0x69, 0x6c, 0x74, 0x65, 0x72, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x3d, 0x0a, 0x12, 0x4c, 0x69, 0x73, - 0x74, 0x53, 0x74, 0x6f, 0x72, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, - 0x27, 0x0a, 0x05, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x11, - 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x53, 0x74, 0x6f, 0x72, - 0x65, 0x52, 0x05, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x22, 0x51, 0x0a, 0x16, 0x41, 0x70, 0x70, 0x6c, - 0x79, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x12, 0x37, 0x0a, 0x0b, 0x66, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x5f, 0x73, 0x65, - 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, - 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, 0x52, - 0x0a, 0x66, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, 0x22, 0xc7, 0x01, 0x0a, 0x17, - 0x41, 0x70, 0x70, 0x6c, 0x79, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, 0x52, - 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x37, 0x0a, 0x0b, 0x66, 0x65, 0x61, 0x74, 0x75, - 0x72, 0x65, 0x5f, 0x73, 0x65, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x66, - 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, - 0x65, 0x53, 0x65, 0x74, 0x52, 0x0a, 0x66, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, - 0x12, 0x42, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, - 0x32, 0x2a, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x41, 0x70, - 0x70, 0x6c, 0x79, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, 0x52, 0x65, 0x73, - 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x06, 0x73, 0x74, - 0x61, 0x74, 0x75, 0x73, 0x22, 0x2f, 0x0a, 0x06, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x0d, - 0x0a, 0x09, 0x4e, 0x4f, 0x5f, 0x43, 0x48, 0x41, 0x4e, 0x47, 0x45, 0x10, 0x00, 0x12, 0x0b, 0x0a, - 0x07, 0x43, 0x52, 0x45, 0x41, 0x54, 0x45, 0x44, 0x10, 0x01, 0x12, 0x09, 0x0a, 0x05, 0x45, 0x52, - 0x52, 0x4f, 0x52, 0x10, 0x02, 0x22, 0x1c, 0x0a, 0x1a, 0x47, 0x65, 0x74, 0x46, 0x65, 0x61, 0x73, - 0x74, 0x43, 0x6f, 0x72, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, - 0x65, 0x73, 0x74, 0x22, 0x37, 0x0a, 0x1b, 0x47, 0x65, 0x74, 0x46, 0x65, 0x61, 0x73, 0x74, 0x43, - 0x6f, 0x72, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, - 0x73, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x22, 0x3d, 0x0a, 0x12, - 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x74, 0x6f, 0x72, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x12, 0x27, 0x0a, 0x05, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x0b, 0x32, 0x11, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x53, - 0x74, 0x6f, 0x72, 0x65, 0x52, 0x05, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x22, 0xa4, 0x01, 0x0a, 0x13, - 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x74, 0x6f, 0x72, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, - 0x6e, 0x73, 0x65, 0x12, 0x27, 0x0a, 0x05, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, - 0x53, 0x74, 0x6f, 0x72, 0x65, 0x52, 0x05, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x12, 0x3e, 0x0a, 0x06, - 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x26, 0x2e, 0x66, - 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, - 0x53, 0x74, 0x6f, 0x72, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x53, 0x74, - 0x61, 0x74, 0x75, 0x73, 0x52, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, 0x24, 0x0a, 0x06, - 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x0d, 0x0a, 0x09, 0x4e, 0x4f, 0x5f, 0x43, 0x48, 0x41, - 0x4e, 0x47, 0x45, 0x10, 0x00, 0x12, 0x0b, 0x0a, 0x07, 0x55, 0x50, 0x44, 0x41, 0x54, 0x45, 0x44, - 0x10, 0x01, 0x22, 0x2a, 0x0a, 0x14, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x50, 0x72, 0x6f, 0x6a, - 0x65, 0x63, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, - 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x17, - 0x0a, 0x15, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x50, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x52, - 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x2b, 0x0a, 0x15, 0x41, 0x72, 0x63, 0x68, 0x69, - 0x76, 0x65, 0x50, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, - 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, - 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x18, 0x0a, 0x16, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x50, - 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x15, - 0x0a, 0x13, 0x4c, 0x69, 0x73, 0x74, 0x50, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x73, 0x52, 0x65, - 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x32, 0x0a, 0x14, 0x4c, 0x69, 0x73, 0x74, 0x50, 0x72, 0x6f, - 0x6a, 0x65, 0x63, 0x74, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x1a, 0x0a, - 0x08, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, - 0x08, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x73, 0x22, 0xee, 0x01, 0x0a, 0x18, 0x4c, 0x69, - 0x73, 0x74, 0x49, 0x6e, 0x67, 0x65, 0x73, 0x74, 0x69, 0x6f, 0x6e, 0x4a, 0x6f, 0x62, 0x73, 0x52, - 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x43, 0x0a, 0x06, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2b, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, - 0x6f, 0x72, 0x65, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x49, 0x6e, 0x67, 0x65, 0x73, 0x74, 0x69, 0x6f, - 0x6e, 0x4a, 0x6f, 0x62, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x46, 0x69, 0x6c, - 0x74, 0x65, 0x72, 0x52, 0x06, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x1a, 0x8c, 0x01, 0x0a, 0x06, - 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x53, 0x0a, 0x15, 0x66, 0x65, 0x61, 0x74, 0x75, 0x72, - 0x65, 0x5f, 0x73, 0x65, 0x74, 0x5f, 0x72, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x18, - 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, - 0x72, 0x65, 0x2e, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, 0x52, 0x65, 0x66, - 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x52, 0x13, 0x66, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, - 0x65, 0x74, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x73, - 0x74, 0x6f, 0x72, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x09, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x22, 0x49, 0x0a, 0x19, 0x4c, 0x69, - 0x73, 0x74, 0x49, 0x6e, 0x67, 0x65, 0x73, 0x74, 0x69, 0x6f, 0x6e, 0x4a, 0x6f, 0x62, 0x73, 0x52, - 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x2c, 0x0a, 0x04, 0x6a, 0x6f, 0x62, 0x73, 0x18, - 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, - 0x72, 0x65, 0x2e, 0x49, 0x6e, 0x67, 0x65, 0x73, 0x74, 0x69, 0x6f, 0x6e, 0x4a, 0x6f, 0x62, 0x52, - 0x04, 0x6a, 0x6f, 0x62, 0x73, 0x22, 0x2c, 0x0a, 0x1a, 0x52, 0x65, 0x73, 0x74, 0x61, 0x72, 0x74, - 0x49, 0x6e, 0x67, 0x65, 0x73, 0x74, 0x69, 0x6f, 0x6e, 0x4a, 0x6f, 0x62, 0x52, 0x65, 0x71, 0x75, - 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x02, 0x69, 0x64, 0x22, 0x1d, 0x0a, 0x1b, 0x52, 0x65, 0x73, 0x74, 0x61, 0x72, 0x74, 0x49, 0x6e, - 0x67, 0x65, 0x73, 0x74, 0x69, 0x6f, 0x6e, 0x4a, 0x6f, 0x62, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, - 0x73, 0x65, 0x22, 0x29, 0x0a, 0x17, 0x53, 0x74, 0x6f, 0x70, 0x49, 0x6e, 0x67, 0x65, 0x73, 0x74, - 0x69, 0x6f, 0x6e, 0x4a, 0x6f, 0x62, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, - 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x22, 0x1a, 0x0a, - 0x18, 0x53, 0x74, 0x6f, 0x70, 0x49, 0x6e, 0x67, 0x65, 0x73, 0x74, 0x69, 0x6f, 0x6e, 0x4a, 0x6f, - 0x62, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x32, 0xcb, 0x08, 0x0a, 0x0b, 0x43, 0x6f, - 0x72, 0x65, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x66, 0x0a, 0x13, 0x47, 0x65, 0x74, - 0x46, 0x65, 0x61, 0x73, 0x74, 0x43, 0x6f, 0x72, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, - 0x12, 0x26, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x47, 0x65, - 0x74, 0x46, 0x65, 0x61, 0x73, 0x74, 0x43, 0x6f, 0x72, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, - 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x27, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, - 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x47, 0x65, 0x74, 0x46, 0x65, 0x61, 0x73, 0x74, 0x43, 0x6f, - 0x72, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, - 0x65, 0x12, 0x54, 0x0a, 0x0d, 0x47, 0x65, 0x74, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, - 0x65, 0x74, 0x12, 0x20, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, - 0x47, 0x65, 0x74, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, 0x52, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x1a, 0x21, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, - 0x65, 0x2e, 0x47, 0x65, 0x74, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, 0x52, - 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x5a, 0x0a, 0x0f, 0x4c, 0x69, 0x73, 0x74, 0x46, - 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, 0x73, 0x12, 0x22, 0x2e, 0x66, 0x65, 0x61, - 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x46, 0x65, 0x61, 0x74, - 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x23, - 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4c, 0x69, 0x73, 0x74, - 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, - 0x6e, 0x73, 0x65, 0x12, 0x4b, 0x0a, 0x0a, 0x4c, 0x69, 0x73, 0x74, 0x53, 0x74, 0x6f, 0x72, 0x65, - 0x73, 0x12, 0x1d, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4c, - 0x69, 0x73, 0x74, 0x53, 0x74, 0x6f, 0x72, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, - 0x1a, 0x1e, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4c, 0x69, - 0x73, 0x74, 0x53, 0x74, 0x6f, 0x72, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, - 0x12, 0x5a, 0x0a, 0x0f, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, - 0x53, 0x65, 0x74, 0x12, 0x22, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, - 0x2e, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, - 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x23, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, - 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, - 0x65, 0x53, 0x65, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x4e, 0x0a, 0x0b, - 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x74, 0x6f, 0x72, 0x65, 0x12, 0x1e, 0x2e, 0x66, 0x65, - 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, - 0x74, 0x6f, 0x72, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1f, 0x2e, 0x66, 0x65, - 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, - 0x74, 0x6f, 0x72, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x54, 0x0a, 0x0d, - 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x50, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x12, 0x20, 0x2e, - 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, - 0x65, 0x50, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, - 0x21, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x43, 0x72, 0x65, - 0x61, 0x74, 0x65, 0x50, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, - 0x73, 0x65, 0x12, 0x57, 0x0a, 0x0e, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x50, 0x72, 0x6f, - 0x6a, 0x65, 0x63, 0x74, 0x12, 0x21, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, - 0x65, 0x2e, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x50, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, - 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x22, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x4b, 0x0a, 0x0a, 0x4c, 0x69, 0x73, 0x74, 0x53, 0x74, + 0x6f, 0x72, 0x65, 0x73, 0x12, 0x1d, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, + 0x65, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x53, 0x74, 0x6f, 0x72, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x1a, 0x1e, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, + 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x53, 0x74, 0x6f, 0x72, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, + 0x6e, 0x73, 0x65, 0x12, 0x5a, 0x0a, 0x0f, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x46, 0x65, 0x61, 0x74, + 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, 0x12, 0x22, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, + 0x6f, 0x72, 0x65, 0x2e, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, + 0x53, 0x65, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x23, 0x2e, 0x66, 0x65, 0x61, + 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x46, 0x65, 0x61, + 0x74, 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, + 0x4e, 0x0a, 0x0b, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x74, 0x6f, 0x72, 0x65, 0x12, 0x1e, + 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x55, 0x70, 0x64, 0x61, + 0x74, 0x65, 0x53, 0x74, 0x6f, 0x72, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1f, + 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x55, 0x70, 0x64, 0x61, + 0x74, 0x65, 0x53, 0x74, 0x6f, 0x72, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, + 0x54, 0x0a, 0x0d, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x50, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, + 0x12, 0x20, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x43, 0x72, + 0x65, 0x61, 0x74, 0x65, 0x50, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x1a, 0x21, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, + 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x50, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x52, 0x65, 0x73, + 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x57, 0x0a, 0x0e, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, + 0x50, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x12, 0x21, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x50, 0x72, 0x6f, 0x6a, - 0x65, 0x63, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x51, 0x0a, 0x0c, 0x4c, - 0x69, 0x73, 0x74, 0x50, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x73, 0x12, 0x1f, 0x2e, 0x66, 0x65, - 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x50, 0x72, 0x6f, - 0x6a, 0x65, 0x63, 0x74, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x20, 0x2e, 0x66, - 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x50, 0x72, - 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x60, - 0x0a, 0x11, 0x4c, 0x69, 0x73, 0x74, 0x49, 0x6e, 0x67, 0x65, 0x73, 0x74, 0x69, 0x6f, 0x6e, 0x4a, - 0x6f, 0x62, 0x73, 0x12, 0x24, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, - 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x49, 0x6e, 0x67, 0x65, 0x73, 0x74, 0x69, 0x6f, 0x6e, 0x4a, 0x6f, - 0x62, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x25, 0x2e, 0x66, 0x65, 0x61, 0x73, - 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x49, 0x6e, 0x67, 0x65, 0x73, - 0x74, 0x69, 0x6f, 0x6e, 0x4a, 0x6f, 0x62, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, - 0x12, 0x66, 0x0a, 0x13, 0x52, 0x65, 0x73, 0x74, 0x61, 0x72, 0x74, 0x49, 0x6e, 0x67, 0x65, 0x73, - 0x74, 0x69, 0x6f, 0x6e, 0x4a, 0x6f, 0x62, 0x12, 0x26, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, - 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x52, 0x65, 0x73, 0x74, 0x61, 0x72, 0x74, 0x49, 0x6e, 0x67, 0x65, - 0x73, 0x74, 0x69, 0x6f, 0x6e, 0x4a, 0x6f, 0x62, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, - 0x27, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x52, 0x65, 0x73, - 0x74, 0x61, 0x72, 0x74, 0x49, 0x6e, 0x67, 0x65, 0x73, 0x74, 0x69, 0x6f, 0x6e, 0x4a, 0x6f, 0x62, - 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x5d, 0x0a, 0x10, 0x53, 0x74, 0x6f, 0x70, - 0x49, 0x6e, 0x67, 0x65, 0x73, 0x74, 0x69, 0x6f, 0x6e, 0x4a, 0x6f, 0x62, 0x12, 0x23, 0x2e, 0x66, - 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x53, 0x74, 0x6f, 0x70, 0x49, 0x6e, - 0x67, 0x65, 0x73, 0x74, 0x69, 0x6f, 0x6e, 0x4a, 0x6f, 0x62, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x1a, 0x24, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x53, - 0x74, 0x6f, 0x70, 0x49, 0x6e, 0x67, 0x65, 0x73, 0x74, 0x69, 0x6f, 0x6e, 0x4a, 0x6f, 0x62, 0x52, - 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x42, 0x4f, 0x0a, 0x0a, 0x66, 0x65, 0x61, 0x73, 0x74, - 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x42, 0x10, 0x43, 0x6f, 0x72, 0x65, 0x53, 0x65, 0x72, 0x76, 0x69, - 0x63, 0x65, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x5a, 0x2f, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, - 0x63, 0x6f, 0x6d, 0x2f, 0x67, 0x6f, 0x6a, 0x65, 0x6b, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, - 0x73, 0x64, 0x6b, 0x2f, 0x67, 0x6f, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x73, 0x2f, 0x66, 0x65, - 0x61, 0x73, 0x74, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x65, 0x63, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x22, 0x2e, 0x66, 0x65, 0x61, + 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x50, + 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x51, + 0x0a, 0x0c, 0x4c, 0x69, 0x73, 0x74, 0x50, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x73, 0x12, 0x1f, + 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4c, 0x69, 0x73, 0x74, + 0x50, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, + 0x20, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4c, 0x69, 0x73, + 0x74, 0x50, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, + 0x65, 0x12, 0x60, 0x0a, 0x11, 0x4c, 0x69, 0x73, 0x74, 0x49, 0x6e, 0x67, 0x65, 0x73, 0x74, 0x69, + 0x6f, 0x6e, 0x4a, 0x6f, 0x62, 0x73, 0x12, 0x24, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, + 0x6f, 0x72, 0x65, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x49, 0x6e, 0x67, 0x65, 0x73, 0x74, 0x69, 0x6f, + 0x6e, 0x4a, 0x6f, 0x62, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x25, 0x2e, 0x66, + 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x49, 0x6e, + 0x67, 0x65, 0x73, 0x74, 0x69, 0x6f, 0x6e, 0x4a, 0x6f, 0x62, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, + 0x6e, 0x73, 0x65, 0x12, 0x66, 0x0a, 0x13, 0x52, 0x65, 0x73, 0x74, 0x61, 0x72, 0x74, 0x49, 0x6e, + 0x67, 0x65, 0x73, 0x74, 0x69, 0x6f, 0x6e, 0x4a, 0x6f, 0x62, 0x12, 0x26, 0x2e, 0x66, 0x65, 0x61, + 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x52, 0x65, 0x73, 0x74, 0x61, 0x72, 0x74, 0x49, + 0x6e, 0x67, 0x65, 0x73, 0x74, 0x69, 0x6f, 0x6e, 0x4a, 0x6f, 0x62, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x1a, 0x27, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, + 0x52, 0x65, 0x73, 0x74, 0x61, 0x72, 0x74, 0x49, 0x6e, 0x67, 0x65, 0x73, 0x74, 0x69, 0x6f, 0x6e, + 0x4a, 0x6f, 0x62, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x5d, 0x0a, 0x10, 0x53, + 0x74, 0x6f, 0x70, 0x49, 0x6e, 0x67, 0x65, 0x73, 0x74, 0x69, 0x6f, 0x6e, 0x4a, 0x6f, 0x62, 0x12, + 0x23, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x53, 0x74, 0x6f, + 0x70, 0x49, 0x6e, 0x67, 0x65, 0x73, 0x74, 0x69, 0x6f, 0x6e, 0x4a, 0x6f, 0x62, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x1a, 0x24, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, + 0x65, 0x2e, 0x53, 0x74, 0x6f, 0x70, 0x49, 0x6e, 0x67, 0x65, 0x73, 0x74, 0x69, 0x6f, 0x6e, 0x4a, + 0x6f, 0x62, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x42, 0x59, 0x0a, 0x10, 0x66, 0x65, + 0x61, 0x73, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x42, 0x10, + 0x43, 0x6f, 0x72, 0x65, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x50, 0x72, 0x6f, 0x74, 0x6f, + 0x5a, 0x33, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x66, 0x65, 0x61, + 0x73, 0x74, 0x2d, 0x64, 0x65, 0x76, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x73, 0x64, 0x6b, + 0x2f, 0x67, 0x6f, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x73, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, + 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( @@ -2166,8 +2147,11 @@ type CoreServiceClient interface { // Create or update and existing feature set. // // This function is idempotent - it will not create a new feature set if schema does not change. - // If an existing feature set is updated, core will advance the version number, which will be - // returned in response. + // Schema changes will update the feature set if the changes are valid. + // All changes except the following are valid: + // - Changes to feature set id (name, project) + // - Changes to entities + // - Changes to feature name and type ApplyFeatureSet(ctx context.Context, in *ApplyFeatureSetRequest, opts ...grpc.CallOption) (*ApplyFeatureSetResponse, error) // Updates core with the configuration of the store. // @@ -2175,8 +2159,8 @@ type CoreServiceClient interface { // start or update the necessary feature population jobs for the updated store. UpdateStore(ctx context.Context, in *UpdateStoreRequest, opts ...grpc.CallOption) (*UpdateStoreResponse, error) // Creates a project. Projects serve as namespaces within which resources like features will be - // created. Both feature set names as well as field names must be unique within a project. Project - // names themselves must be globally unique. + // created. Feature set names as must be unique within a project while field (Feature/Entity) names + // must be unique within a Feature Set. Project names themselves must be globally unique. CreateProject(ctx context.Context, in *CreateProjectRequest, opts ...grpc.CallOption) (*CreateProjectResponse, error) // Archives a project. Archived projects will continue to exist and function, but won't be visible // through the Core API. Any existing ingestion or serving requests will continue to function, @@ -2339,8 +2323,11 @@ type CoreServiceServer interface { // Create or update and existing feature set. // // This function is idempotent - it will not create a new feature set if schema does not change. - // If an existing feature set is updated, core will advance the version number, which will be - // returned in response. + // Schema changes will update the feature set if the changes are valid. + // All changes except the following are valid: + // - Changes to feature set id (name, project) + // - Changes to entities + // - Changes to feature name and type ApplyFeatureSet(context.Context, *ApplyFeatureSetRequest) (*ApplyFeatureSetResponse, error) // Updates core with the configuration of the store. // @@ -2348,8 +2335,8 @@ type CoreServiceServer interface { // start or update the necessary feature population jobs for the updated store. UpdateStore(context.Context, *UpdateStoreRequest) (*UpdateStoreResponse, error) // Creates a project. Projects serve as namespaces within which resources like features will be - // created. Both feature set names as well as field names must be unique within a project. Project - // names themselves must be globally unique. + // created. Feature set names as must be unique within a project while field (Feature/Entity) names + // must be unique within a Feature Set. Project names themselves must be globally unique. CreateProject(context.Context, *CreateProjectRequest) (*CreateProjectResponse, error) // Archives a project. Archived projects will continue to exist and function, but won't be visible // through the Core API. Any existing ingestion or serving requests will continue to function, diff --git a/sdk/go/protos/feast/core/FeatureSet.pb.go b/sdk/go/protos/feast/core/FeatureSet.pb.go index bbf79e7d2a..acd111614d 100644 --- a/sdk/go/protos/feast/core/FeatureSet.pb.go +++ b/sdk/go/protos/feast/core/FeatureSet.pb.go @@ -16,15 +16,15 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: -// protoc-gen-go v1.21.0 -// protoc v3.10.1 +// protoc-gen-go v1.24.0 +// protoc v3.10.0 // source: feast/core/FeatureSet.proto package core import ( - types "github.com/gojek/feast/sdk/go/protos/feast/types" - v0 "github.com/gojek/feast/sdk/go/protos/tensorflow_metadata/proto/v0" + types "github.com/feast-dev/feast/sdk/go/protos/feast/types" + v0 "github.com/feast-dev/feast/sdk/go/protos/tensorflow_metadata/proto/v0" proto "github.com/golang/protobuf/proto" duration "github.com/golang/protobuf/ptypes/duration" timestamp "github.com/golang/protobuf/ptypes/timestamp" @@ -48,9 +48,10 @@ const _ = proto.ProtoPackageIsVersion4 type FeatureSetStatus int32 const ( - FeatureSetStatus_STATUS_INVALID FeatureSetStatus = 0 - FeatureSetStatus_STATUS_PENDING FeatureSetStatus = 1 - FeatureSetStatus_STATUS_READY FeatureSetStatus = 2 + FeatureSetStatus_STATUS_INVALID FeatureSetStatus = 0 + FeatureSetStatus_STATUS_PENDING FeatureSetStatus = 1 + FeatureSetStatus_STATUS_JOB_STARTING FeatureSetStatus = 3 + FeatureSetStatus_STATUS_READY FeatureSetStatus = 2 ) // Enum value maps for FeatureSetStatus. @@ -58,12 +59,14 @@ var ( FeatureSetStatus_name = map[int32]string{ 0: "STATUS_INVALID", 1: "STATUS_PENDING", + 3: "STATUS_JOB_STARTING", 2: "STATUS_READY", } FeatureSetStatus_value = map[string]int32{ - "STATUS_INVALID": 0, - "STATUS_PENDING": 1, - "STATUS_READY": 2, + "STATUS_INVALID": 0, + "STATUS_PENDING": 1, + "STATUS_JOB_STARTING": 3, + "STATUS_READY": 2, } ) @@ -160,8 +163,6 @@ type FeatureSetSpec struct { Project string `protobuf:"bytes,7,opt,name=project,proto3" json:"project,omitempty"` // Name of the feature set. Must be unique. Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` - // Feature set version. - Version int32 `protobuf:"varint,2,opt,name=version,proto3" json:"version,omitempty"` // List of entities contained within this featureSet. // This allows the feature to be used during joins between feature sets. // If the featureSet is ingested into a store that supports keys, this value @@ -176,6 +177,8 @@ type FeatureSetSpec struct { // Optional. Source on which feature rows can be found. // If not set, source will be set to the default value configured in Feast Core. Source *Source `protobuf:"bytes,6,opt,name=source,proto3" json:"source,omitempty"` + // User defined metadata + Labels map[string]string `protobuf:"bytes,8,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` } func (x *FeatureSetSpec) Reset() { @@ -224,13 +227,6 @@ func (x *FeatureSetSpec) GetName() string { return "" } -func (x *FeatureSetSpec) GetVersion() int32 { - if x != nil { - return x.Version - } - return 0 -} - func (x *FeatureSetSpec) GetEntities() []*EntitySpec { if x != nil { return x.Entities @@ -259,6 +255,13 @@ func (x *FeatureSetSpec) GetSource() *Source { return nil } +func (x *FeatureSetSpec) GetLabels() map[string]string { + if x != nil { + return x.Labels + } + return nil +} + type EntitySpec struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -266,35 +269,8 @@ type EntitySpec struct { // Name of the entity. Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` - // Value type of the feature. + // Value type of the entity. ValueType types.ValueType_Enum `protobuf:"varint,2,opt,name=value_type,json=valueType,proto3,enum=feast.types.ValueType_Enum" json:"value_type,omitempty"` - // Types that are assignable to PresenceConstraints: - // *EntitySpec_Presence - // *EntitySpec_GroupPresence - PresenceConstraints isEntitySpec_PresenceConstraints `protobuf_oneof:"presence_constraints"` - // The shape of the feature which governs the number of values that appear in - // each example. - // - // Types that are assignable to ShapeType: - // *EntitySpec_Shape - // *EntitySpec_ValueCount - ShapeType isEntitySpec_ShapeType `protobuf_oneof:"shape_type"` - // Domain for the values of the feature. - // - // Types that are assignable to DomainInfo: - // *EntitySpec_Domain - // *EntitySpec_IntDomain - // *EntitySpec_FloatDomain - // *EntitySpec_StringDomain - // *EntitySpec_BoolDomain - // *EntitySpec_StructDomain - // *EntitySpec_NaturalLanguageDomain - // *EntitySpec_ImageDomain - // *EntitySpec_MidDomain - // *EntitySpec_UrlDomain - // *EntitySpec_TimeDomain - // *EntitySpec_TimeOfDayDomain - DomainInfo isEntitySpec_DomainInfo `protobuf_oneof:"domain_info"` } func (x *EntitySpec) Reset() { @@ -343,256 +319,6 @@ func (x *EntitySpec) GetValueType() types.ValueType_Enum { return types.ValueType_INVALID } -func (m *EntitySpec) GetPresenceConstraints() isEntitySpec_PresenceConstraints { - if m != nil { - return m.PresenceConstraints - } - return nil -} - -func (x *EntitySpec) GetPresence() *v0.FeaturePresence { - if x, ok := x.GetPresenceConstraints().(*EntitySpec_Presence); ok { - return x.Presence - } - return nil -} - -func (x *EntitySpec) GetGroupPresence() *v0.FeaturePresenceWithinGroup { - if x, ok := x.GetPresenceConstraints().(*EntitySpec_GroupPresence); ok { - return x.GroupPresence - } - return nil -} - -func (m *EntitySpec) GetShapeType() isEntitySpec_ShapeType { - if m != nil { - return m.ShapeType - } - return nil -} - -func (x *EntitySpec) GetShape() *v0.FixedShape { - if x, ok := x.GetShapeType().(*EntitySpec_Shape); ok { - return x.Shape - } - return nil -} - -func (x *EntitySpec) GetValueCount() *v0.ValueCount { - if x, ok := x.GetShapeType().(*EntitySpec_ValueCount); ok { - return x.ValueCount - } - return nil -} - -func (m *EntitySpec) GetDomainInfo() isEntitySpec_DomainInfo { - if m != nil { - return m.DomainInfo - } - return nil -} - -func (x *EntitySpec) GetDomain() string { - if x, ok := x.GetDomainInfo().(*EntitySpec_Domain); ok { - return x.Domain - } - return "" -} - -func (x *EntitySpec) GetIntDomain() *v0.IntDomain { - if x, ok := x.GetDomainInfo().(*EntitySpec_IntDomain); ok { - return x.IntDomain - } - return nil -} - -func (x *EntitySpec) GetFloatDomain() *v0.FloatDomain { - if x, ok := x.GetDomainInfo().(*EntitySpec_FloatDomain); ok { - return x.FloatDomain - } - return nil -} - -func (x *EntitySpec) GetStringDomain() *v0.StringDomain { - if x, ok := x.GetDomainInfo().(*EntitySpec_StringDomain); ok { - return x.StringDomain - } - return nil -} - -func (x *EntitySpec) GetBoolDomain() *v0.BoolDomain { - if x, ok := x.GetDomainInfo().(*EntitySpec_BoolDomain); ok { - return x.BoolDomain - } - return nil -} - -func (x *EntitySpec) GetStructDomain() *v0.StructDomain { - if x, ok := x.GetDomainInfo().(*EntitySpec_StructDomain); ok { - return x.StructDomain - } - return nil -} - -func (x *EntitySpec) GetNaturalLanguageDomain() *v0.NaturalLanguageDomain { - if x, ok := x.GetDomainInfo().(*EntitySpec_NaturalLanguageDomain); ok { - return x.NaturalLanguageDomain - } - return nil -} - -func (x *EntitySpec) GetImageDomain() *v0.ImageDomain { - if x, ok := x.GetDomainInfo().(*EntitySpec_ImageDomain); ok { - return x.ImageDomain - } - return nil -} - -func (x *EntitySpec) GetMidDomain() *v0.MIDDomain { - if x, ok := x.GetDomainInfo().(*EntitySpec_MidDomain); ok { - return x.MidDomain - } - return nil -} - -func (x *EntitySpec) GetUrlDomain() *v0.URLDomain { - if x, ok := x.GetDomainInfo().(*EntitySpec_UrlDomain); ok { - return x.UrlDomain - } - return nil -} - -func (x *EntitySpec) GetTimeDomain() *v0.TimeDomain { - if x, ok := x.GetDomainInfo().(*EntitySpec_TimeDomain); ok { - return x.TimeDomain - } - return nil -} - -func (x *EntitySpec) GetTimeOfDayDomain() *v0.TimeOfDayDomain { - if x, ok := x.GetDomainInfo().(*EntitySpec_TimeOfDayDomain); ok { - return x.TimeOfDayDomain - } - return nil -} - -type isEntitySpec_PresenceConstraints interface { - isEntitySpec_PresenceConstraints() -} - -type EntitySpec_Presence struct { - // Constraints on the presence of this feature in the examples. - Presence *v0.FeaturePresence `protobuf:"bytes,3,opt,name=presence,proto3,oneof"` -} - -type EntitySpec_GroupPresence struct { - // Only used in the context of a "group" context, e.g., inside a sequence. - GroupPresence *v0.FeaturePresenceWithinGroup `protobuf:"bytes,4,opt,name=group_presence,json=groupPresence,proto3,oneof"` -} - -func (*EntitySpec_Presence) isEntitySpec_PresenceConstraints() {} - -func (*EntitySpec_GroupPresence) isEntitySpec_PresenceConstraints() {} - -type isEntitySpec_ShapeType interface { - isEntitySpec_ShapeType() -} - -type EntitySpec_Shape struct { - // The feature has a fixed shape corresponding to a multi-dimensional - // tensor. - Shape *v0.FixedShape `protobuf:"bytes,5,opt,name=shape,proto3,oneof"` -} - -type EntitySpec_ValueCount struct { - // The feature doesn't have a well defined shape. All we know are limits on - // the minimum and maximum number of values. - ValueCount *v0.ValueCount `protobuf:"bytes,6,opt,name=value_count,json=valueCount,proto3,oneof"` -} - -func (*EntitySpec_Shape) isEntitySpec_ShapeType() {} - -func (*EntitySpec_ValueCount) isEntitySpec_ShapeType() {} - -type isEntitySpec_DomainInfo interface { - isEntitySpec_DomainInfo() -} - -type EntitySpec_Domain struct { - // Reference to a domain defined at the schema level. - Domain string `protobuf:"bytes,7,opt,name=domain,proto3,oneof"` -} - -type EntitySpec_IntDomain struct { - // Inline definitions of domains. - IntDomain *v0.IntDomain `protobuf:"bytes,8,opt,name=int_domain,json=intDomain,proto3,oneof"` -} - -type EntitySpec_FloatDomain struct { - FloatDomain *v0.FloatDomain `protobuf:"bytes,9,opt,name=float_domain,json=floatDomain,proto3,oneof"` -} - -type EntitySpec_StringDomain struct { - StringDomain *v0.StringDomain `protobuf:"bytes,10,opt,name=string_domain,json=stringDomain,proto3,oneof"` -} - -type EntitySpec_BoolDomain struct { - BoolDomain *v0.BoolDomain `protobuf:"bytes,11,opt,name=bool_domain,json=boolDomain,proto3,oneof"` -} - -type EntitySpec_StructDomain struct { - StructDomain *v0.StructDomain `protobuf:"bytes,12,opt,name=struct_domain,json=structDomain,proto3,oneof"` -} - -type EntitySpec_NaturalLanguageDomain struct { - // Supported semantic domains. - NaturalLanguageDomain *v0.NaturalLanguageDomain `protobuf:"bytes,13,opt,name=natural_language_domain,json=naturalLanguageDomain,proto3,oneof"` -} - -type EntitySpec_ImageDomain struct { - ImageDomain *v0.ImageDomain `protobuf:"bytes,14,opt,name=image_domain,json=imageDomain,proto3,oneof"` -} - -type EntitySpec_MidDomain struct { - MidDomain *v0.MIDDomain `protobuf:"bytes,15,opt,name=mid_domain,json=midDomain,proto3,oneof"` -} - -type EntitySpec_UrlDomain struct { - UrlDomain *v0.URLDomain `protobuf:"bytes,16,opt,name=url_domain,json=urlDomain,proto3,oneof"` -} - -type EntitySpec_TimeDomain struct { - TimeDomain *v0.TimeDomain `protobuf:"bytes,17,opt,name=time_domain,json=timeDomain,proto3,oneof"` -} - -type EntitySpec_TimeOfDayDomain struct { - TimeOfDayDomain *v0.TimeOfDayDomain `protobuf:"bytes,18,opt,name=time_of_day_domain,json=timeOfDayDomain,proto3,oneof"` -} - -func (*EntitySpec_Domain) isEntitySpec_DomainInfo() {} - -func (*EntitySpec_IntDomain) isEntitySpec_DomainInfo() {} - -func (*EntitySpec_FloatDomain) isEntitySpec_DomainInfo() {} - -func (*EntitySpec_StringDomain) isEntitySpec_DomainInfo() {} - -func (*EntitySpec_BoolDomain) isEntitySpec_DomainInfo() {} - -func (*EntitySpec_StructDomain) isEntitySpec_DomainInfo() {} - -func (*EntitySpec_NaturalLanguageDomain) isEntitySpec_DomainInfo() {} - -func (*EntitySpec_ImageDomain) isEntitySpec_DomainInfo() {} - -func (*EntitySpec_MidDomain) isEntitySpec_DomainInfo() {} - -func (*EntitySpec_UrlDomain) isEntitySpec_DomainInfo() {} - -func (*EntitySpec_TimeDomain) isEntitySpec_DomainInfo() {} - -func (*EntitySpec_TimeOfDayDomain) isEntitySpec_DomainInfo() {} - type FeatureSpec struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -602,6 +328,8 @@ type FeatureSpec struct { Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` // Value type of the feature. ValueType types.ValueType_Enum `protobuf:"varint,2,opt,name=value_type,json=valueType,proto3,enum=feast.types.ValueType_Enum" json:"value_type,omitempty"` + // Labels for user defined metadata on a feature + Labels map[string]string `protobuf:"bytes,16,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` // Types that are assignable to PresenceConstraints: // *FeatureSpec_Presence // *FeatureSpec_GroupPresence @@ -677,6 +405,13 @@ func (x *FeatureSpec) GetValueType() types.ValueType_Enum { return types.ValueType_INVALID } +func (x *FeatureSpec) GetLabels() map[string]string { + if x != nil { + return x.Labels + } + return nil +} + func (m *FeatureSpec) GetPresenceConstraints() isFeatureSpec_PresenceConstraints { if m != nil { return m.PresenceConstraints @@ -816,12 +551,12 @@ type isFeatureSpec_PresenceConstraints interface { type FeatureSpec_Presence struct { // Constraints on the presence of this feature in the examples. - Presence *v0.FeaturePresence `protobuf:"bytes,3,opt,name=presence,proto3,oneof"` + Presence *v0.FeaturePresence `protobuf:"bytes,30,opt,name=presence,proto3,oneof"` } type FeatureSpec_GroupPresence struct { // Only used in the context of a "group" context, e.g., inside a sequence. - GroupPresence *v0.FeaturePresenceWithinGroup `protobuf:"bytes,4,opt,name=group_presence,json=groupPresence,proto3,oneof"` + GroupPresence *v0.FeaturePresenceWithinGroup `protobuf:"bytes,31,opt,name=group_presence,json=groupPresence,proto3,oneof"` } func (*FeatureSpec_Presence) isFeatureSpec_PresenceConstraints() {} @@ -835,13 +570,13 @@ type isFeatureSpec_ShapeType interface { type FeatureSpec_Shape struct { // The feature has a fixed shape corresponding to a multi-dimensional // tensor. - Shape *v0.FixedShape `protobuf:"bytes,5,opt,name=shape,proto3,oneof"` + Shape *v0.FixedShape `protobuf:"bytes,32,opt,name=shape,proto3,oneof"` } type FeatureSpec_ValueCount struct { // The feature doesn't have a well defined shape. All we know are limits on // the minimum and maximum number of values. - ValueCount *v0.ValueCount `protobuf:"bytes,6,opt,name=value_count,json=valueCount,proto3,oneof"` + ValueCount *v0.ValueCount `protobuf:"bytes,33,opt,name=value_count,json=valueCount,proto3,oneof"` } func (*FeatureSpec_Shape) isFeatureSpec_ShapeType() {} @@ -854,53 +589,53 @@ type isFeatureSpec_DomainInfo interface { type FeatureSpec_Domain struct { // Reference to a domain defined at the schema level. - Domain string `protobuf:"bytes,7,opt,name=domain,proto3,oneof"` + Domain string `protobuf:"bytes,34,opt,name=domain,proto3,oneof"` } type FeatureSpec_IntDomain struct { // Inline definitions of domains. - IntDomain *v0.IntDomain `protobuf:"bytes,8,opt,name=int_domain,json=intDomain,proto3,oneof"` + IntDomain *v0.IntDomain `protobuf:"bytes,35,opt,name=int_domain,json=intDomain,proto3,oneof"` } type FeatureSpec_FloatDomain struct { - FloatDomain *v0.FloatDomain `protobuf:"bytes,9,opt,name=float_domain,json=floatDomain,proto3,oneof"` + FloatDomain *v0.FloatDomain `protobuf:"bytes,36,opt,name=float_domain,json=floatDomain,proto3,oneof"` } type FeatureSpec_StringDomain struct { - StringDomain *v0.StringDomain `protobuf:"bytes,10,opt,name=string_domain,json=stringDomain,proto3,oneof"` + StringDomain *v0.StringDomain `protobuf:"bytes,37,opt,name=string_domain,json=stringDomain,proto3,oneof"` } type FeatureSpec_BoolDomain struct { - BoolDomain *v0.BoolDomain `protobuf:"bytes,11,opt,name=bool_domain,json=boolDomain,proto3,oneof"` + BoolDomain *v0.BoolDomain `protobuf:"bytes,38,opt,name=bool_domain,json=boolDomain,proto3,oneof"` } type FeatureSpec_StructDomain struct { - StructDomain *v0.StructDomain `protobuf:"bytes,12,opt,name=struct_domain,json=structDomain,proto3,oneof"` + StructDomain *v0.StructDomain `protobuf:"bytes,39,opt,name=struct_domain,json=structDomain,proto3,oneof"` } type FeatureSpec_NaturalLanguageDomain struct { // Supported semantic domains. - NaturalLanguageDomain *v0.NaturalLanguageDomain `protobuf:"bytes,13,opt,name=natural_language_domain,json=naturalLanguageDomain,proto3,oneof"` + NaturalLanguageDomain *v0.NaturalLanguageDomain `protobuf:"bytes,40,opt,name=natural_language_domain,json=naturalLanguageDomain,proto3,oneof"` } type FeatureSpec_ImageDomain struct { - ImageDomain *v0.ImageDomain `protobuf:"bytes,14,opt,name=image_domain,json=imageDomain,proto3,oneof"` + ImageDomain *v0.ImageDomain `protobuf:"bytes,41,opt,name=image_domain,json=imageDomain,proto3,oneof"` } type FeatureSpec_MidDomain struct { - MidDomain *v0.MIDDomain `protobuf:"bytes,15,opt,name=mid_domain,json=midDomain,proto3,oneof"` + MidDomain *v0.MIDDomain `protobuf:"bytes,42,opt,name=mid_domain,json=midDomain,proto3,oneof"` } type FeatureSpec_UrlDomain struct { - UrlDomain *v0.URLDomain `protobuf:"bytes,16,opt,name=url_domain,json=urlDomain,proto3,oneof"` + UrlDomain *v0.URLDomain `protobuf:"bytes,43,opt,name=url_domain,json=urlDomain,proto3,oneof"` } type FeatureSpec_TimeDomain struct { - TimeDomain *v0.TimeDomain `protobuf:"bytes,17,opt,name=time_domain,json=timeDomain,proto3,oneof"` + TimeDomain *v0.TimeDomain `protobuf:"bytes,44,opt,name=time_domain,json=timeDomain,proto3,oneof"` } type FeatureSpec_TimeOfDayDomain struct { - TimeOfDayDomain *v0.TimeOfDayDomain `protobuf:"bytes,18,opt,name=time_of_day_domain,json=timeOfDayDomain,proto3,oneof"` + TimeOfDayDomain *v0.TimeOfDayDomain `protobuf:"bytes,45,opt,name=time_of_day_domain,json=timeOfDayDomain,proto3,oneof"` } func (*FeatureSpec_Domain) isFeatureSpec_DomainInfo() {} @@ -1011,208 +746,149 @@ var file_feast_core_FeatureSet_proto_rawDesc = []byte{ 0x04, 0x73, 0x70, 0x65, 0x63, 0x12, 0x2e, 0x0a, 0x04, 0x6d, 0x65, 0x74, 0x61, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, 0x4d, 0x65, 0x74, 0x61, 0x52, - 0x04, 0x6d, 0x65, 0x74, 0x61, 0x22, 0xa1, 0x02, 0x0a, 0x0e, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, + 0x04, 0x6d, 0x65, 0x74, 0x61, 0x22, 0x88, 0x03, 0x0a, 0x0e, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, 0x53, 0x70, 0x65, 0x63, 0x12, 0x18, 0x0a, 0x07, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, - 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, - 0x12, 0x32, 0x0a, 0x08, 0x65, 0x6e, 0x74, 0x69, 0x74, 0x69, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, - 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, - 0x45, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x53, 0x70, 0x65, 0x63, 0x52, 0x08, 0x65, 0x6e, 0x74, 0x69, - 0x74, 0x69, 0x65, 0x73, 0x12, 0x33, 0x0a, 0x08, 0x66, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x73, - 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, - 0x6f, 0x72, 0x65, 0x2e, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x70, 0x65, 0x63, 0x52, - 0x08, 0x66, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x73, 0x12, 0x32, 0x0a, 0x07, 0x6d, 0x61, 0x78, - 0x5f, 0x61, 0x67, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x67, 0x6f, 0x6f, - 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x44, 0x75, 0x72, - 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x06, 0x6d, 0x61, 0x78, 0x41, 0x67, 0x65, 0x12, 0x2a, 0x0a, - 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, - 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x53, 0x6f, 0x75, 0x72, 0x63, - 0x65, 0x52, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x22, 0x9b, 0x0a, 0x0a, 0x0a, 0x45, 0x6e, - 0x74, 0x69, 0x74, 0x79, 0x53, 0x70, 0x65, 0x63, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x3a, 0x0a, 0x0a, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, - 0x32, 0x1b, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2e, 0x56, - 0x61, 0x6c, 0x75, 0x65, 0x54, 0x79, 0x70, 0x65, 0x2e, 0x45, 0x6e, 0x75, 0x6d, 0x52, 0x09, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x54, 0x79, 0x70, 0x65, 0x12, 0x45, 0x0a, 0x08, 0x70, 0x72, 0x65, 0x73, - 0x65, 0x6e, 0x63, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x74, 0x65, 0x6e, + 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x32, 0x0a, 0x08, 0x65, 0x6e, 0x74, 0x69, 0x74, 0x69, + 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, + 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x45, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x53, 0x70, 0x65, 0x63, + 0x52, 0x08, 0x65, 0x6e, 0x74, 0x69, 0x74, 0x69, 0x65, 0x73, 0x12, 0x33, 0x0a, 0x08, 0x66, 0x65, + 0x61, 0x74, 0x75, 0x72, 0x65, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x66, + 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, + 0x65, 0x53, 0x70, 0x65, 0x63, 0x52, 0x08, 0x66, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x73, 0x12, + 0x32, 0x0a, 0x07, 0x6d, 0x61, 0x78, 0x5f, 0x61, 0x67, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x19, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, + 0x75, 0x66, 0x2e, 0x44, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x06, 0x6d, 0x61, 0x78, + 0x41, 0x67, 0x65, 0x12, 0x2a, 0x0a, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x06, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, + 0x2e, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, + 0x3e, 0x0a, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x18, 0x08, 0x20, 0x03, 0x28, 0x0b, 0x32, + 0x26, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x46, 0x65, 0x61, + 0x74, 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, 0x53, 0x70, 0x65, 0x63, 0x2e, 0x4c, 0x61, 0x62, 0x65, + 0x6c, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x1a, + 0x39, 0x0a, 0x0b, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, + 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, + 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x4a, 0x04, 0x08, 0x02, 0x10, 0x03, + 0x22, 0x5c, 0x0a, 0x0a, 0x45, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x53, 0x70, 0x65, 0x63, 0x12, 0x12, + 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, + 0x6d, 0x65, 0x12, 0x3a, 0x0a, 0x0a, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x5f, 0x74, 0x79, 0x70, 0x65, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x1b, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x74, + 0x79, 0x70, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x54, 0x79, 0x70, 0x65, 0x2e, 0x45, + 0x6e, 0x75, 0x6d, 0x52, 0x09, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x54, 0x79, 0x70, 0x65, 0x22, 0xa0, + 0x0b, 0x0a, 0x0b, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x70, 0x65, 0x63, 0x12, 0x12, + 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, + 0x6d, 0x65, 0x12, 0x3a, 0x0a, 0x0a, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x5f, 0x74, 0x79, 0x70, 0x65, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x1b, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x74, + 0x79, 0x70, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x54, 0x79, 0x70, 0x65, 0x2e, 0x45, + 0x6e, 0x75, 0x6d, 0x52, 0x09, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x54, 0x79, 0x70, 0x65, 0x12, 0x3b, + 0x0a, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x18, 0x10, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x23, + 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x46, 0x65, 0x61, 0x74, + 0x75, 0x72, 0x65, 0x53, 0x70, 0x65, 0x63, 0x2e, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x45, 0x6e, + 0x74, 0x72, 0x79, 0x52, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x12, 0x45, 0x0a, 0x08, 0x70, + 0x72, 0x65, 0x73, 0x65, 0x6e, 0x63, 0x65, 0x18, 0x1e, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x27, 0x2e, + 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, + 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x50, 0x72, + 0x65, 0x73, 0x65, 0x6e, 0x63, 0x65, 0x48, 0x00, 0x52, 0x08, 0x70, 0x72, 0x65, 0x73, 0x65, 0x6e, + 0x63, 0x65, 0x12, 0x5b, 0x0a, 0x0e, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x5f, 0x70, 0x72, 0x65, 0x73, + 0x65, 0x6e, 0x63, 0x65, 0x18, 0x1f, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x32, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x50, 0x72, 0x65, 0x73, 0x65, - 0x6e, 0x63, 0x65, 0x48, 0x00, 0x52, 0x08, 0x70, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x63, 0x65, 0x12, - 0x5b, 0x0a, 0x0e, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x5f, 0x70, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x63, - 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x32, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, - 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, - 0x2e, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x50, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x63, 0x65, - 0x57, 0x69, 0x74, 0x68, 0x69, 0x6e, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x48, 0x00, 0x52, 0x0d, 0x67, - 0x72, 0x6f, 0x75, 0x70, 0x50, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x63, 0x65, 0x12, 0x3a, 0x0a, 0x05, - 0x73, 0x68, 0x61, 0x70, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x74, 0x65, - 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, - 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x46, 0x69, 0x78, 0x65, 0x64, 0x53, 0x68, 0x61, 0x70, 0x65, 0x48, - 0x01, 0x52, 0x05, 0x73, 0x68, 0x61, 0x70, 0x65, 0x12, 0x45, 0x0a, 0x0b, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, - 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, - 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x43, 0x6f, 0x75, 0x6e, - 0x74, 0x48, 0x01, 0x52, 0x0a, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12, - 0x18, 0x0a, 0x06, 0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x48, - 0x02, 0x52, 0x06, 0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x12, 0x42, 0x0a, 0x0a, 0x69, 0x6e, 0x74, - 0x5f, 0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, - 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, - 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x49, 0x6e, 0x74, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, - 0x48, 0x02, 0x52, 0x09, 0x69, 0x6e, 0x74, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x12, 0x48, 0x0a, - 0x0c, 0x66, 0x6c, 0x6f, 0x61, 0x74, 0x5f, 0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x18, 0x09, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x23, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, - 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x46, 0x6c, 0x6f, - 0x61, 0x74, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x48, 0x02, 0x52, 0x0b, 0x66, 0x6c, 0x6f, 0x61, - 0x74, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x12, 0x4b, 0x0a, 0x0d, 0x73, 0x74, 0x72, 0x69, 0x6e, - 0x67, 0x5f, 0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, + 0x6e, 0x63, 0x65, 0x57, 0x69, 0x74, 0x68, 0x69, 0x6e, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x48, 0x00, + 0x52, 0x0d, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x50, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x63, 0x65, 0x12, + 0x3a, 0x0a, 0x05, 0x73, 0x68, 0x61, 0x70, 0x65, 0x18, 0x20, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, - 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x44, 0x6f, - 0x6d, 0x61, 0x69, 0x6e, 0x48, 0x02, 0x52, 0x0c, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x44, 0x6f, - 0x6d, 0x61, 0x69, 0x6e, 0x12, 0x45, 0x0a, 0x0b, 0x62, 0x6f, 0x6f, 0x6c, 0x5f, 0x64, 0x6f, 0x6d, - 0x61, 0x69, 0x6e, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x74, 0x65, 0x6e, 0x73, - 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, - 0x76, 0x30, 0x2e, 0x42, 0x6f, 0x6f, 0x6c, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x48, 0x02, 0x52, - 0x0a, 0x62, 0x6f, 0x6f, 0x6c, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x12, 0x4b, 0x0a, 0x0d, 0x73, - 0x74, 0x72, 0x75, 0x63, 0x74, 0x5f, 0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x18, 0x0c, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, - 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x53, 0x74, 0x72, 0x75, - 0x63, 0x74, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x48, 0x02, 0x52, 0x0c, 0x73, 0x74, 0x72, 0x75, - 0x63, 0x74, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x12, 0x67, 0x0a, 0x17, 0x6e, 0x61, 0x74, 0x75, - 0x72, 0x61, 0x6c, 0x5f, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x5f, 0x64, 0x6f, 0x6d, - 0x61, 0x69, 0x6e, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2d, 0x2e, 0x74, 0x65, 0x6e, 0x73, - 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, - 0x76, 0x30, 0x2e, 0x4e, 0x61, 0x74, 0x75, 0x72, 0x61, 0x6c, 0x4c, 0x61, 0x6e, 0x67, 0x75, 0x61, - 0x67, 0x65, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x48, 0x02, 0x52, 0x15, 0x6e, 0x61, 0x74, 0x75, - 0x72, 0x61, 0x6c, 0x4c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x44, 0x6f, 0x6d, 0x61, 0x69, - 0x6e, 0x12, 0x48, 0x0a, 0x0c, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x64, 0x6f, 0x6d, 0x61, 0x69, - 0x6e, 0x18, 0x0e, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x23, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, - 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, - 0x2e, 0x49, 0x6d, 0x61, 0x67, 0x65, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x48, 0x02, 0x52, 0x0b, - 0x69, 0x6d, 0x61, 0x67, 0x65, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x12, 0x42, 0x0a, 0x0a, 0x6d, - 0x69, 0x64, 0x5f, 0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x18, 0x0f, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x21, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, - 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x4d, 0x49, 0x44, 0x44, 0x6f, 0x6d, 0x61, - 0x69, 0x6e, 0x48, 0x02, 0x52, 0x09, 0x6d, 0x69, 0x64, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x12, - 0x42, 0x0a, 0x0a, 0x75, 0x72, 0x6c, 0x5f, 0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x18, 0x10, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, - 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x55, 0x52, 0x4c, - 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x48, 0x02, 0x52, 0x09, 0x75, 0x72, 0x6c, 0x44, 0x6f, 0x6d, - 0x61, 0x69, 0x6e, 0x12, 0x45, 0x0a, 0x0b, 0x74, 0x69, 0x6d, 0x65, 0x5f, 0x64, 0x6f, 0x6d, 0x61, - 0x69, 0x6e, 0x18, 0x11, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, - 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, - 0x30, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x48, 0x02, 0x52, 0x0a, - 0x74, 0x69, 0x6d, 0x65, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x12, 0x56, 0x0a, 0x12, 0x74, 0x69, - 0x6d, 0x65, 0x5f, 0x6f, 0x66, 0x5f, 0x64, 0x61, 0x79, 0x5f, 0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, - 0x18, 0x12, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, + 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x46, 0x69, 0x78, 0x65, 0x64, 0x53, 0x68, 0x61, + 0x70, 0x65, 0x48, 0x01, 0x52, 0x05, 0x73, 0x68, 0x61, 0x70, 0x65, 0x12, 0x45, 0x0a, 0x0b, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x21, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x22, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, + 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x43, + 0x6f, 0x75, 0x6e, 0x74, 0x48, 0x01, 0x52, 0x0a, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x43, 0x6f, 0x75, + 0x6e, 0x74, 0x12, 0x18, 0x0a, 0x06, 0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x18, 0x22, 0x20, 0x01, + 0x28, 0x09, 0x48, 0x02, 0x52, 0x06, 0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x12, 0x42, 0x0a, 0x0a, + 0x69, 0x6e, 0x74, 0x5f, 0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x18, 0x23, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x21, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, + 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x49, 0x6e, 0x74, 0x44, 0x6f, 0x6d, + 0x61, 0x69, 0x6e, 0x48, 0x02, 0x52, 0x09, 0x69, 0x6e, 0x74, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, + 0x12, 0x48, 0x0a, 0x0c, 0x66, 0x6c, 0x6f, 0x61, 0x74, 0x5f, 0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, + 0x18, 0x24, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x23, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, - 0x54, 0x69, 0x6d, 0x65, 0x4f, 0x66, 0x44, 0x61, 0x79, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x48, - 0x02, 0x52, 0x0f, 0x74, 0x69, 0x6d, 0x65, 0x4f, 0x66, 0x44, 0x61, 0x79, 0x44, 0x6f, 0x6d, 0x61, - 0x69, 0x6e, 0x42, 0x16, 0x0a, 0x14, 0x70, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x63, 0x65, 0x5f, 0x63, - 0x6f, 0x6e, 0x73, 0x74, 0x72, 0x61, 0x69, 0x6e, 0x74, 0x73, 0x42, 0x0c, 0x0a, 0x0a, 0x73, 0x68, - 0x61, 0x70, 0x65, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x42, 0x0d, 0x0a, 0x0b, 0x64, 0x6f, 0x6d, 0x61, - 0x69, 0x6e, 0x5f, 0x69, 0x6e, 0x66, 0x6f, 0x22, 0x9c, 0x0a, 0x0a, 0x0b, 0x46, 0x65, 0x61, 0x74, - 0x75, 0x72, 0x65, 0x53, 0x70, 0x65, 0x63, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x3a, 0x0a, 0x0a, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, - 0x1b, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2e, 0x56, 0x61, - 0x6c, 0x75, 0x65, 0x54, 0x79, 0x70, 0x65, 0x2e, 0x45, 0x6e, 0x75, 0x6d, 0x52, 0x09, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x54, 0x79, 0x70, 0x65, 0x12, 0x45, 0x0a, 0x08, 0x70, 0x72, 0x65, 0x73, 0x65, - 0x6e, 0x63, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x74, 0x65, 0x6e, 0x73, - 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, - 0x76, 0x30, 0x2e, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x50, 0x72, 0x65, 0x73, 0x65, 0x6e, - 0x63, 0x65, 0x48, 0x00, 0x52, 0x08, 0x70, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x63, 0x65, 0x12, 0x5b, - 0x0a, 0x0e, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x5f, 0x70, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x63, 0x65, - 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x32, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, - 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, - 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x50, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x63, 0x65, 0x57, - 0x69, 0x74, 0x68, 0x69, 0x6e, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x48, 0x00, 0x52, 0x0d, 0x67, 0x72, - 0x6f, 0x75, 0x70, 0x50, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x63, 0x65, 0x12, 0x3a, 0x0a, 0x05, 0x73, - 0x68, 0x61, 0x70, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x74, 0x65, 0x6e, - 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, - 0x2e, 0x76, 0x30, 0x2e, 0x46, 0x69, 0x78, 0x65, 0x64, 0x53, 0x68, 0x61, 0x70, 0x65, 0x48, 0x01, - 0x52, 0x05, 0x73, 0x68, 0x61, 0x70, 0x65, 0x12, 0x45, 0x0a, 0x0b, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x74, + 0x46, 0x6c, 0x6f, 0x61, 0x74, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x48, 0x02, 0x52, 0x0b, 0x66, + 0x6c, 0x6f, 0x61, 0x74, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x12, 0x4b, 0x0a, 0x0d, 0x73, 0x74, + 0x72, 0x69, 0x6e, 0x67, 0x5f, 0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x18, 0x25, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x24, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, + 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x53, 0x74, 0x72, 0x69, 0x6e, + 0x67, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x48, 0x02, 0x52, 0x0c, 0x73, 0x74, 0x72, 0x69, 0x6e, + 0x67, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x12, 0x45, 0x0a, 0x0b, 0x62, 0x6f, 0x6f, 0x6c, 0x5f, + 0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x18, 0x26, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, - 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x43, 0x6f, 0x75, 0x6e, 0x74, - 0x48, 0x01, 0x52, 0x0a, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x18, - 0x0a, 0x06, 0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x48, 0x02, - 0x52, 0x06, 0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x12, 0x42, 0x0a, 0x0a, 0x69, 0x6e, 0x74, 0x5f, - 0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x74, + 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x42, 0x6f, 0x6f, 0x6c, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, + 0x48, 0x02, 0x52, 0x0a, 0x62, 0x6f, 0x6f, 0x6c, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x12, 0x4b, + 0x0a, 0x0d, 0x73, 0x74, 0x72, 0x75, 0x63, 0x74, 0x5f, 0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x18, + 0x27, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, + 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x53, + 0x74, 0x72, 0x75, 0x63, 0x74, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x48, 0x02, 0x52, 0x0c, 0x73, + 0x74, 0x72, 0x75, 0x63, 0x74, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x12, 0x67, 0x0a, 0x17, 0x6e, + 0x61, 0x74, 0x75, 0x72, 0x61, 0x6c, 0x5f, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x5f, + 0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x18, 0x28, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2d, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, - 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x49, 0x6e, 0x74, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x48, - 0x02, 0x52, 0x09, 0x69, 0x6e, 0x74, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x12, 0x48, 0x0a, 0x0c, - 0x66, 0x6c, 0x6f, 0x61, 0x74, 0x5f, 0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x18, 0x09, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x23, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, - 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x46, 0x6c, 0x6f, 0x61, - 0x74, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x48, 0x02, 0x52, 0x0b, 0x66, 0x6c, 0x6f, 0x61, 0x74, - 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x12, 0x4b, 0x0a, 0x0d, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, - 0x5f, 0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, - 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, - 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x44, 0x6f, 0x6d, - 0x61, 0x69, 0x6e, 0x48, 0x02, 0x52, 0x0c, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x44, 0x6f, 0x6d, - 0x61, 0x69, 0x6e, 0x12, 0x45, 0x0a, 0x0b, 0x62, 0x6f, 0x6f, 0x6c, 0x5f, 0x64, 0x6f, 0x6d, 0x61, - 0x69, 0x6e, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, - 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, - 0x30, 0x2e, 0x42, 0x6f, 0x6f, 0x6c, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x48, 0x02, 0x52, 0x0a, - 0x62, 0x6f, 0x6f, 0x6c, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x12, 0x4b, 0x0a, 0x0d, 0x73, 0x74, - 0x72, 0x75, 0x63, 0x74, 0x5f, 0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x18, 0x0c, 0x20, 0x01, 0x28, - 0x0b, 0x32, 0x24, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, - 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x53, 0x74, 0x72, 0x75, 0x63, - 0x74, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x48, 0x02, 0x52, 0x0c, 0x73, 0x74, 0x72, 0x75, 0x63, - 0x74, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x12, 0x67, 0x0a, 0x17, 0x6e, 0x61, 0x74, 0x75, 0x72, - 0x61, 0x6c, 0x5f, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x5f, 0x64, 0x6f, 0x6d, 0x61, - 0x69, 0x6e, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2d, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, - 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, - 0x30, 0x2e, 0x4e, 0x61, 0x74, 0x75, 0x72, 0x61, 0x6c, 0x4c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, - 0x65, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x48, 0x02, 0x52, 0x15, 0x6e, 0x61, 0x74, 0x75, 0x72, - 0x61, 0x6c, 0x4c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, - 0x12, 0x48, 0x0a, 0x0c, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, - 0x18, 0x0e, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x23, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, - 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, - 0x49, 0x6d, 0x61, 0x67, 0x65, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x48, 0x02, 0x52, 0x0b, 0x69, - 0x6d, 0x61, 0x67, 0x65, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x12, 0x42, 0x0a, 0x0a, 0x6d, 0x69, - 0x64, 0x5f, 0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x18, 0x0f, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, - 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, - 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x4d, 0x49, 0x44, 0x44, 0x6f, 0x6d, 0x61, 0x69, - 0x6e, 0x48, 0x02, 0x52, 0x09, 0x6d, 0x69, 0x64, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x12, 0x42, - 0x0a, 0x0a, 0x75, 0x72, 0x6c, 0x5f, 0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x18, 0x10, 0x20, 0x01, + 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x4e, 0x61, 0x74, 0x75, 0x72, 0x61, 0x6c, 0x4c, 0x61, 0x6e, + 0x67, 0x75, 0x61, 0x67, 0x65, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x48, 0x02, 0x52, 0x15, 0x6e, + 0x61, 0x74, 0x75, 0x72, 0x61, 0x6c, 0x4c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x44, 0x6f, + 0x6d, 0x61, 0x69, 0x6e, 0x12, 0x48, 0x0a, 0x0c, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x5f, 0x64, 0x6f, + 0x6d, 0x61, 0x69, 0x6e, 0x18, 0x29, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x23, 0x2e, 0x74, 0x65, 0x6e, + 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, + 0x2e, 0x76, 0x30, 0x2e, 0x49, 0x6d, 0x61, 0x67, 0x65, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x48, + 0x02, 0x52, 0x0b, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x12, 0x42, + 0x0a, 0x0a, 0x6d, 0x69, 0x64, 0x5f, 0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x18, 0x2a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, - 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x55, 0x52, 0x4c, 0x44, - 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x48, 0x02, 0x52, 0x09, 0x75, 0x72, 0x6c, 0x44, 0x6f, 0x6d, 0x61, - 0x69, 0x6e, 0x12, 0x45, 0x0a, 0x0b, 0x74, 0x69, 0x6d, 0x65, 0x5f, 0x64, 0x6f, 0x6d, 0x61, 0x69, - 0x6e, 0x18, 0x11, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, - 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, - 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x48, 0x02, 0x52, 0x0a, 0x74, - 0x69, 0x6d, 0x65, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x12, 0x56, 0x0a, 0x12, 0x74, 0x69, 0x6d, - 0x65, 0x5f, 0x6f, 0x66, 0x5f, 0x64, 0x61, 0x79, 0x5f, 0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x18, - 0x12, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, - 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x54, - 0x69, 0x6d, 0x65, 0x4f, 0x66, 0x44, 0x61, 0x79, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x48, 0x02, - 0x52, 0x0f, 0x74, 0x69, 0x6d, 0x65, 0x4f, 0x66, 0x44, 0x61, 0x79, 0x44, 0x6f, 0x6d, 0x61, 0x69, - 0x6e, 0x42, 0x16, 0x0a, 0x14, 0x70, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x63, 0x65, 0x5f, 0x63, 0x6f, - 0x6e, 0x73, 0x74, 0x72, 0x61, 0x69, 0x6e, 0x74, 0x73, 0x42, 0x0c, 0x0a, 0x0a, 0x73, 0x68, 0x61, - 0x70, 0x65, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x42, 0x0d, 0x0a, 0x0b, 0x64, 0x6f, 0x6d, 0x61, 0x69, - 0x6e, 0x5f, 0x69, 0x6e, 0x66, 0x6f, 0x22, 0x8f, 0x01, 0x0a, 0x0e, 0x46, 0x65, 0x61, 0x74, 0x75, - 0x72, 0x65, 0x53, 0x65, 0x74, 0x4d, 0x65, 0x74, 0x61, 0x12, 0x47, 0x0a, 0x11, 0x63, 0x72, 0x65, - 0x61, 0x74, 0x65, 0x64, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, - 0x52, 0x10, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, - 0x6d, 0x70, 0x12, 0x34, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x0e, 0x32, 0x1c, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, - 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, - 0x52, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x2a, 0x4c, 0x0a, 0x10, 0x46, 0x65, 0x61, 0x74, - 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x12, 0x0a, 0x0e, - 0x53, 0x54, 0x41, 0x54, 0x55, 0x53, 0x5f, 0x49, 0x4e, 0x56, 0x41, 0x4c, 0x49, 0x44, 0x10, 0x00, - 0x12, 0x12, 0x0a, 0x0e, 0x53, 0x54, 0x41, 0x54, 0x55, 0x53, 0x5f, 0x50, 0x45, 0x4e, 0x44, 0x49, - 0x4e, 0x47, 0x10, 0x01, 0x12, 0x10, 0x0a, 0x0c, 0x53, 0x54, 0x41, 0x54, 0x55, 0x53, 0x5f, 0x52, - 0x45, 0x41, 0x44, 0x59, 0x10, 0x02, 0x42, 0x4e, 0x0a, 0x0a, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, - 0x63, 0x6f, 0x72, 0x65, 0x42, 0x0f, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, - 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x5a, 0x2f, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, - 0x6d, 0x2f, 0x67, 0x6f, 0x6a, 0x65, 0x6b, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x73, 0x64, - 0x6b, 0x2f, 0x67, 0x6f, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x73, 0x2f, 0x66, 0x65, 0x61, 0x73, - 0x74, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x4d, 0x49, 0x44, 0x44, + 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x48, 0x02, 0x52, 0x09, 0x6d, 0x69, 0x64, 0x44, 0x6f, 0x6d, 0x61, + 0x69, 0x6e, 0x12, 0x42, 0x0a, 0x0a, 0x75, 0x72, 0x6c, 0x5f, 0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, + 0x18, 0x2b, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, + 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x2e, + 0x55, 0x52, 0x4c, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x48, 0x02, 0x52, 0x09, 0x75, 0x72, 0x6c, + 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x12, 0x45, 0x0a, 0x0b, 0x74, 0x69, 0x6d, 0x65, 0x5f, 0x64, + 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x18, 0x2c, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x74, 0x65, + 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, + 0x61, 0x2e, 0x76, 0x30, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x48, + 0x02, 0x52, 0x0a, 0x74, 0x69, 0x6d, 0x65, 0x44, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x12, 0x56, 0x0a, + 0x12, 0x74, 0x69, 0x6d, 0x65, 0x5f, 0x6f, 0x66, 0x5f, 0x64, 0x61, 0x79, 0x5f, 0x64, 0x6f, 0x6d, + 0x61, 0x69, 0x6e, 0x18, 0x2d, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x74, 0x65, 0x6e, 0x73, + 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, + 0x76, 0x30, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x4f, 0x66, 0x44, 0x61, 0x79, 0x44, 0x6f, 0x6d, 0x61, + 0x69, 0x6e, 0x48, 0x02, 0x52, 0x0f, 0x74, 0x69, 0x6d, 0x65, 0x4f, 0x66, 0x44, 0x61, 0x79, 0x44, + 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x1a, 0x39, 0x0a, 0x0b, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x45, + 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, + 0x42, 0x16, 0x0a, 0x14, 0x70, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x63, 0x65, 0x5f, 0x63, 0x6f, 0x6e, + 0x73, 0x74, 0x72, 0x61, 0x69, 0x6e, 0x74, 0x73, 0x42, 0x0c, 0x0a, 0x0a, 0x73, 0x68, 0x61, 0x70, + 0x65, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x42, 0x0d, 0x0a, 0x0b, 0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, + 0x5f, 0x69, 0x6e, 0x66, 0x6f, 0x4a, 0x04, 0x08, 0x03, 0x10, 0x10, 0x4a, 0x04, 0x08, 0x11, 0x10, + 0x1e, 0x22, 0x8f, 0x01, 0x0a, 0x0e, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, + 0x4d, 0x65, 0x74, 0x61, 0x12, 0x47, 0x0a, 0x11, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x5f, + 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, + 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x10, 0x63, 0x72, 0x65, + 0x61, 0x74, 0x65, 0x64, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x12, 0x34, 0x0a, + 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x1c, 0x2e, + 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x46, 0x65, 0x61, 0x74, 0x75, + 0x72, 0x65, 0x53, 0x65, 0x74, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x06, 0x73, 0x74, 0x61, + 0x74, 0x75, 0x73, 0x2a, 0x65, 0x0a, 0x10, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x65, + 0x74, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x12, 0x0a, 0x0e, 0x53, 0x54, 0x41, 0x54, 0x55, + 0x53, 0x5f, 0x49, 0x4e, 0x56, 0x41, 0x4c, 0x49, 0x44, 0x10, 0x00, 0x12, 0x12, 0x0a, 0x0e, 0x53, + 0x54, 0x41, 0x54, 0x55, 0x53, 0x5f, 0x50, 0x45, 0x4e, 0x44, 0x49, 0x4e, 0x47, 0x10, 0x01, 0x12, + 0x17, 0x0a, 0x13, 0x53, 0x54, 0x41, 0x54, 0x55, 0x53, 0x5f, 0x4a, 0x4f, 0x42, 0x5f, 0x53, 0x54, + 0x41, 0x52, 0x54, 0x49, 0x4e, 0x47, 0x10, 0x03, 0x12, 0x10, 0x0a, 0x0c, 0x53, 0x54, 0x41, 0x54, + 0x55, 0x53, 0x5f, 0x52, 0x45, 0x41, 0x44, 0x59, 0x10, 0x02, 0x42, 0x58, 0x0a, 0x10, 0x66, 0x65, + 0x61, 0x73, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x42, 0x0f, + 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x5a, + 0x33, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x66, 0x65, 0x61, 0x73, + 0x74, 0x2d, 0x64, 0x65, 0x76, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x73, 0x64, 0x6b, 0x2f, + 0x67, 0x6f, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x73, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, + 0x63, 0x6f, 0x72, 0x65, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( @@ -1228,7 +904,7 @@ func file_feast_core_FeatureSet_proto_rawDescGZIP() []byte { } var file_feast_core_FeatureSet_proto_enumTypes = make([]protoimpl.EnumInfo, 1) -var file_feast_core_FeatureSet_proto_msgTypes = make([]protoimpl.MessageInfo, 5) +var file_feast_core_FeatureSet_proto_msgTypes = make([]protoimpl.MessageInfo, 7) var file_feast_core_FeatureSet_proto_goTypes = []interface{}{ (FeatureSetStatus)(0), // 0: feast.core.FeatureSetStatus (*FeatureSet)(nil), // 1: feast.core.FeatureSet @@ -1236,72 +912,61 @@ var file_feast_core_FeatureSet_proto_goTypes = []interface{}{ (*EntitySpec)(nil), // 3: feast.core.EntitySpec (*FeatureSpec)(nil), // 4: feast.core.FeatureSpec (*FeatureSetMeta)(nil), // 5: feast.core.FeatureSetMeta - (*duration.Duration)(nil), // 6: google.protobuf.Duration - (*Source)(nil), // 7: feast.core.Source - (types.ValueType_Enum)(0), // 8: feast.types.ValueType.Enum - (*v0.FeaturePresence)(nil), // 9: tensorflow.metadata.v0.FeaturePresence - (*v0.FeaturePresenceWithinGroup)(nil), // 10: tensorflow.metadata.v0.FeaturePresenceWithinGroup - (*v0.FixedShape)(nil), // 11: tensorflow.metadata.v0.FixedShape - (*v0.ValueCount)(nil), // 12: tensorflow.metadata.v0.ValueCount - (*v0.IntDomain)(nil), // 13: tensorflow.metadata.v0.IntDomain - (*v0.FloatDomain)(nil), // 14: tensorflow.metadata.v0.FloatDomain - (*v0.StringDomain)(nil), // 15: tensorflow.metadata.v0.StringDomain - (*v0.BoolDomain)(nil), // 16: tensorflow.metadata.v0.BoolDomain - (*v0.StructDomain)(nil), // 17: tensorflow.metadata.v0.StructDomain - (*v0.NaturalLanguageDomain)(nil), // 18: tensorflow.metadata.v0.NaturalLanguageDomain - (*v0.ImageDomain)(nil), // 19: tensorflow.metadata.v0.ImageDomain - (*v0.MIDDomain)(nil), // 20: tensorflow.metadata.v0.MIDDomain - (*v0.URLDomain)(nil), // 21: tensorflow.metadata.v0.URLDomain - (*v0.TimeDomain)(nil), // 22: tensorflow.metadata.v0.TimeDomain - (*v0.TimeOfDayDomain)(nil), // 23: tensorflow.metadata.v0.TimeOfDayDomain - (*timestamp.Timestamp)(nil), // 24: google.protobuf.Timestamp + nil, // 6: feast.core.FeatureSetSpec.LabelsEntry + nil, // 7: feast.core.FeatureSpec.LabelsEntry + (*duration.Duration)(nil), // 8: google.protobuf.Duration + (*Source)(nil), // 9: feast.core.Source + (types.ValueType_Enum)(0), // 10: feast.types.ValueType.Enum + (*v0.FeaturePresence)(nil), // 11: tensorflow.metadata.v0.FeaturePresence + (*v0.FeaturePresenceWithinGroup)(nil), // 12: tensorflow.metadata.v0.FeaturePresenceWithinGroup + (*v0.FixedShape)(nil), // 13: tensorflow.metadata.v0.FixedShape + (*v0.ValueCount)(nil), // 14: tensorflow.metadata.v0.ValueCount + (*v0.IntDomain)(nil), // 15: tensorflow.metadata.v0.IntDomain + (*v0.FloatDomain)(nil), // 16: tensorflow.metadata.v0.FloatDomain + (*v0.StringDomain)(nil), // 17: tensorflow.metadata.v0.StringDomain + (*v0.BoolDomain)(nil), // 18: tensorflow.metadata.v0.BoolDomain + (*v0.StructDomain)(nil), // 19: tensorflow.metadata.v0.StructDomain + (*v0.NaturalLanguageDomain)(nil), // 20: tensorflow.metadata.v0.NaturalLanguageDomain + (*v0.ImageDomain)(nil), // 21: tensorflow.metadata.v0.ImageDomain + (*v0.MIDDomain)(nil), // 22: tensorflow.metadata.v0.MIDDomain + (*v0.URLDomain)(nil), // 23: tensorflow.metadata.v0.URLDomain + (*v0.TimeDomain)(nil), // 24: tensorflow.metadata.v0.TimeDomain + (*v0.TimeOfDayDomain)(nil), // 25: tensorflow.metadata.v0.TimeOfDayDomain + (*timestamp.Timestamp)(nil), // 26: google.protobuf.Timestamp } var file_feast_core_FeatureSet_proto_depIdxs = []int32{ 2, // 0: feast.core.FeatureSet.spec:type_name -> feast.core.FeatureSetSpec 5, // 1: feast.core.FeatureSet.meta:type_name -> feast.core.FeatureSetMeta 3, // 2: feast.core.FeatureSetSpec.entities:type_name -> feast.core.EntitySpec 4, // 3: feast.core.FeatureSetSpec.features:type_name -> feast.core.FeatureSpec - 6, // 4: feast.core.FeatureSetSpec.max_age:type_name -> google.protobuf.Duration - 7, // 5: feast.core.FeatureSetSpec.source:type_name -> feast.core.Source - 8, // 6: feast.core.EntitySpec.value_type:type_name -> feast.types.ValueType.Enum - 9, // 7: feast.core.EntitySpec.presence:type_name -> tensorflow.metadata.v0.FeaturePresence - 10, // 8: feast.core.EntitySpec.group_presence:type_name -> tensorflow.metadata.v0.FeaturePresenceWithinGroup - 11, // 9: feast.core.EntitySpec.shape:type_name -> tensorflow.metadata.v0.FixedShape - 12, // 10: feast.core.EntitySpec.value_count:type_name -> tensorflow.metadata.v0.ValueCount - 13, // 11: feast.core.EntitySpec.int_domain:type_name -> tensorflow.metadata.v0.IntDomain - 14, // 12: feast.core.EntitySpec.float_domain:type_name -> tensorflow.metadata.v0.FloatDomain - 15, // 13: feast.core.EntitySpec.string_domain:type_name -> tensorflow.metadata.v0.StringDomain - 16, // 14: feast.core.EntitySpec.bool_domain:type_name -> tensorflow.metadata.v0.BoolDomain - 17, // 15: feast.core.EntitySpec.struct_domain:type_name -> tensorflow.metadata.v0.StructDomain - 18, // 16: feast.core.EntitySpec.natural_language_domain:type_name -> tensorflow.metadata.v0.NaturalLanguageDomain - 19, // 17: feast.core.EntitySpec.image_domain:type_name -> tensorflow.metadata.v0.ImageDomain - 20, // 18: feast.core.EntitySpec.mid_domain:type_name -> tensorflow.metadata.v0.MIDDomain - 21, // 19: feast.core.EntitySpec.url_domain:type_name -> tensorflow.metadata.v0.URLDomain - 22, // 20: feast.core.EntitySpec.time_domain:type_name -> tensorflow.metadata.v0.TimeDomain - 23, // 21: feast.core.EntitySpec.time_of_day_domain:type_name -> tensorflow.metadata.v0.TimeOfDayDomain - 8, // 22: feast.core.FeatureSpec.value_type:type_name -> feast.types.ValueType.Enum - 9, // 23: feast.core.FeatureSpec.presence:type_name -> tensorflow.metadata.v0.FeaturePresence - 10, // 24: feast.core.FeatureSpec.group_presence:type_name -> tensorflow.metadata.v0.FeaturePresenceWithinGroup - 11, // 25: feast.core.FeatureSpec.shape:type_name -> tensorflow.metadata.v0.FixedShape - 12, // 26: feast.core.FeatureSpec.value_count:type_name -> tensorflow.metadata.v0.ValueCount - 13, // 27: feast.core.FeatureSpec.int_domain:type_name -> tensorflow.metadata.v0.IntDomain - 14, // 28: feast.core.FeatureSpec.float_domain:type_name -> tensorflow.metadata.v0.FloatDomain - 15, // 29: feast.core.FeatureSpec.string_domain:type_name -> tensorflow.metadata.v0.StringDomain - 16, // 30: feast.core.FeatureSpec.bool_domain:type_name -> tensorflow.metadata.v0.BoolDomain - 17, // 31: feast.core.FeatureSpec.struct_domain:type_name -> tensorflow.metadata.v0.StructDomain - 18, // 32: feast.core.FeatureSpec.natural_language_domain:type_name -> tensorflow.metadata.v0.NaturalLanguageDomain - 19, // 33: feast.core.FeatureSpec.image_domain:type_name -> tensorflow.metadata.v0.ImageDomain - 20, // 34: feast.core.FeatureSpec.mid_domain:type_name -> tensorflow.metadata.v0.MIDDomain - 21, // 35: feast.core.FeatureSpec.url_domain:type_name -> tensorflow.metadata.v0.URLDomain - 22, // 36: feast.core.FeatureSpec.time_domain:type_name -> tensorflow.metadata.v0.TimeDomain - 23, // 37: feast.core.FeatureSpec.time_of_day_domain:type_name -> tensorflow.metadata.v0.TimeOfDayDomain - 24, // 38: feast.core.FeatureSetMeta.created_timestamp:type_name -> google.protobuf.Timestamp - 0, // 39: feast.core.FeatureSetMeta.status:type_name -> feast.core.FeatureSetStatus - 40, // [40:40] is the sub-list for method output_type - 40, // [40:40] is the sub-list for method input_type - 40, // [40:40] is the sub-list for extension type_name - 40, // [40:40] is the sub-list for extension extendee - 0, // [0:40] is the sub-list for field type_name + 8, // 4: feast.core.FeatureSetSpec.max_age:type_name -> google.protobuf.Duration + 9, // 5: feast.core.FeatureSetSpec.source:type_name -> feast.core.Source + 6, // 6: feast.core.FeatureSetSpec.labels:type_name -> feast.core.FeatureSetSpec.LabelsEntry + 10, // 7: feast.core.EntitySpec.value_type:type_name -> feast.types.ValueType.Enum + 10, // 8: feast.core.FeatureSpec.value_type:type_name -> feast.types.ValueType.Enum + 7, // 9: feast.core.FeatureSpec.labels:type_name -> feast.core.FeatureSpec.LabelsEntry + 11, // 10: feast.core.FeatureSpec.presence:type_name -> tensorflow.metadata.v0.FeaturePresence + 12, // 11: feast.core.FeatureSpec.group_presence:type_name -> tensorflow.metadata.v0.FeaturePresenceWithinGroup + 13, // 12: feast.core.FeatureSpec.shape:type_name -> tensorflow.metadata.v0.FixedShape + 14, // 13: feast.core.FeatureSpec.value_count:type_name -> tensorflow.metadata.v0.ValueCount + 15, // 14: feast.core.FeatureSpec.int_domain:type_name -> tensorflow.metadata.v0.IntDomain + 16, // 15: feast.core.FeatureSpec.float_domain:type_name -> tensorflow.metadata.v0.FloatDomain + 17, // 16: feast.core.FeatureSpec.string_domain:type_name -> tensorflow.metadata.v0.StringDomain + 18, // 17: feast.core.FeatureSpec.bool_domain:type_name -> tensorflow.metadata.v0.BoolDomain + 19, // 18: feast.core.FeatureSpec.struct_domain:type_name -> tensorflow.metadata.v0.StructDomain + 20, // 19: feast.core.FeatureSpec.natural_language_domain:type_name -> tensorflow.metadata.v0.NaturalLanguageDomain + 21, // 20: feast.core.FeatureSpec.image_domain:type_name -> tensorflow.metadata.v0.ImageDomain + 22, // 21: feast.core.FeatureSpec.mid_domain:type_name -> tensorflow.metadata.v0.MIDDomain + 23, // 22: feast.core.FeatureSpec.url_domain:type_name -> tensorflow.metadata.v0.URLDomain + 24, // 23: feast.core.FeatureSpec.time_domain:type_name -> tensorflow.metadata.v0.TimeDomain + 25, // 24: feast.core.FeatureSpec.time_of_day_domain:type_name -> tensorflow.metadata.v0.TimeOfDayDomain + 26, // 25: feast.core.FeatureSetMeta.created_timestamp:type_name -> google.protobuf.Timestamp + 0, // 26: feast.core.FeatureSetMeta.status:type_name -> feast.core.FeatureSetStatus + 27, // [27:27] is the sub-list for method output_type + 27, // [27:27] is the sub-list for method input_type + 27, // [27:27] is the sub-list for extension type_name + 27, // [27:27] is the sub-list for extension extendee + 0, // [0:27] is the sub-list for field type_name } func init() { file_feast_core_FeatureSet_proto_init() } @@ -1372,24 +1037,6 @@ func file_feast_core_FeatureSet_proto_init() { } } } - file_feast_core_FeatureSet_proto_msgTypes[2].OneofWrappers = []interface{}{ - (*EntitySpec_Presence)(nil), - (*EntitySpec_GroupPresence)(nil), - (*EntitySpec_Shape)(nil), - (*EntitySpec_ValueCount)(nil), - (*EntitySpec_Domain)(nil), - (*EntitySpec_IntDomain)(nil), - (*EntitySpec_FloatDomain)(nil), - (*EntitySpec_StringDomain)(nil), - (*EntitySpec_BoolDomain)(nil), - (*EntitySpec_StructDomain)(nil), - (*EntitySpec_NaturalLanguageDomain)(nil), - (*EntitySpec_ImageDomain)(nil), - (*EntitySpec_MidDomain)(nil), - (*EntitySpec_UrlDomain)(nil), - (*EntitySpec_TimeDomain)(nil), - (*EntitySpec_TimeOfDayDomain)(nil), - } file_feast_core_FeatureSet_proto_msgTypes[3].OneofWrappers = []interface{}{ (*FeatureSpec_Presence)(nil), (*FeatureSpec_GroupPresence)(nil), @@ -1414,7 +1061,7 @@ func file_feast_core_FeatureSet_proto_init() { GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_feast_core_FeatureSet_proto_rawDesc, NumEnums: 1, - NumMessages: 5, + NumMessages: 7, NumExtensions: 0, NumServices: 0, }, diff --git a/sdk/go/protos/feast/core/FeatureSetReference.pb.go b/sdk/go/protos/feast/core/FeatureSetReference.pb.go index 52a63b6a8c..dfa47a25a7 100644 --- a/sdk/go/protos/feast/core/FeatureSetReference.pb.go +++ b/sdk/go/protos/feast/core/FeatureSetReference.pb.go @@ -16,7 +16,7 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: -// protoc-gen-go v1.21.0 +// protoc-gen-go v1.24.0 // protoc v3.10.0 // source: feast/core/FeatureSetReference.proto @@ -51,8 +51,6 @@ type FeatureSetReference struct { Project string `protobuf:"bytes,1,opt,name=project,proto3" json:"project,omitempty"` // Name of the FeatureSet Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` - // Version no. of the FeatureSet - Version int32 `protobuf:"varint,3,opt,name=version,proto3" json:"version,omitempty"` } func (x *FeatureSetReference) Reset() { @@ -101,32 +99,24 @@ func (x *FeatureSetReference) GetName() string { return "" } -func (x *FeatureSetReference) GetVersion() int32 { - if x != nil { - return x.Version - } - return 0 -} - var File_feast_core_FeatureSetReference_proto protoreflect.FileDescriptor var file_feast_core_FeatureSetReference_proto_rawDesc = []byte{ 0x0a, 0x24, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x2f, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0a, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, - 0x72, 0x65, 0x22, 0x5d, 0x0a, 0x13, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, + 0x72, 0x65, 0x22, 0x49, 0x0a, 0x13, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, - 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x52, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, - 0x6e, 0x42, 0x57, 0x0a, 0x0a, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x42, - 0x18, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, 0x52, 0x65, 0x66, 0x65, 0x72, - 0x65, 0x6e, 0x63, 0x65, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x5a, 0x2f, 0x67, 0x69, 0x74, 0x68, 0x75, - 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x67, 0x6f, 0x6a, 0x65, 0x6b, 0x2f, 0x66, 0x65, 0x61, 0x73, - 0x74, 0x2f, 0x73, 0x64, 0x6b, 0x2f, 0x67, 0x6f, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x73, 0x2f, - 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x33, + 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x4a, 0x04, 0x08, 0x03, 0x10, 0x04, 0x42, 0x61, 0x0a, + 0x10, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x63, 0x6f, 0x72, + 0x65, 0x42, 0x18, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, 0x52, 0x65, 0x66, + 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x5a, 0x33, 0x67, 0x69, 0x74, + 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2d, 0x64, 0x65, + 0x76, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x73, 0x64, 0x6b, 0x2f, 0x67, 0x6f, 0x2f, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x73, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x63, 0x6f, 0x72, 0x65, + 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( diff --git a/sdk/go/protos/feast/core/IngestionJob.pb.go b/sdk/go/protos/feast/core/IngestionJob.pb.go index 3623d95d88..a54b23c449 100644 --- a/sdk/go/protos/feast/core/IngestionJob.pb.go +++ b/sdk/go/protos/feast/core/IngestionJob.pb.go @@ -16,7 +16,7 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: -// protoc-gen-go v1.21.0 +// protoc-gen-go v1.24.0 // protoc v3.10.0 // source: feast/core/IngestionJob.proto @@ -248,12 +248,13 @@ var file_feast_core_IngestionJob_proto_rawDesc = []byte{ 0x0a, 0x07, 0x41, 0x42, 0x4f, 0x52, 0x54, 0x45, 0x44, 0x10, 0x05, 0x12, 0x09, 0x0a, 0x05, 0x45, 0x52, 0x52, 0x4f, 0x52, 0x10, 0x06, 0x12, 0x0e, 0x0a, 0x0a, 0x53, 0x55, 0x53, 0x50, 0x45, 0x4e, 0x44, 0x49, 0x4e, 0x47, 0x10, 0x07, 0x12, 0x0d, 0x0a, 0x09, 0x53, 0x55, 0x53, 0x50, 0x45, 0x4e, - 0x44, 0x45, 0x44, 0x10, 0x08, 0x42, 0x50, 0x0a, 0x0a, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, - 0x6f, 0x72, 0x65, 0x42, 0x11, 0x49, 0x6e, 0x67, 0x65, 0x73, 0x74, 0x69, 0x6f, 0x6e, 0x4a, 0x6f, - 0x62, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x5a, 0x2f, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, - 0x6f, 0x6d, 0x2f, 0x67, 0x6f, 0x6a, 0x65, 0x6b, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x73, - 0x64, 0x6b, 0x2f, 0x67, 0x6f, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x73, 0x2f, 0x66, 0x65, 0x61, - 0x73, 0x74, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x44, 0x45, 0x44, 0x10, 0x08, 0x42, 0x5a, 0x0a, 0x10, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x42, 0x11, 0x49, 0x6e, 0x67, 0x65, 0x73, + 0x74, 0x69, 0x6f, 0x6e, 0x4a, 0x6f, 0x62, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x5a, 0x33, 0x67, 0x69, + 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2d, 0x64, + 0x65, 0x76, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x73, 0x64, 0x6b, 0x2f, 0x67, 0x6f, 0x2f, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x73, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x63, 0x6f, 0x72, + 0x65, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( diff --git a/sdk/go/protos/feast/core/Runner.pb.go b/sdk/go/protos/feast/core/Runner.pb.go new file mode 100644 index 0000000000..b1255e52ba --- /dev/null +++ b/sdk/go/protos/feast/core/Runner.pb.go @@ -0,0 +1,376 @@ +// +// * Copyright 2020 The Feast Authors +// * +// * Licensed under the Apache License, Version 2.0 (the "License"); +// * you may not use this file except in compliance with the License. +// * You may obtain a copy of the License at +// * +// * https://www.apache.org/licenses/LICENSE-2.0 +// * +// * Unless required by applicable law or agreed to in writing, software +// * distributed under the License is distributed on an "AS IS" BASIS, +// * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// * See the License for the specific language governing permissions and +// * limitations under the License. +// + +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.24.0 +// protoc v3.10.0 +// source: feast/core/Runner.proto + +package core + +import ( + proto "github.com/golang/protobuf/proto" + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +// This is a compile-time assertion that a sufficiently up-to-date version +// of the legacy proto package is being used. +const _ = proto.ProtoPackageIsVersion4 + +type DirectRunnerConfigOptions struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + //* + // Controls the amount of target parallelism the DirectRunner will use. + // Defaults to the greater of the number of available processors and 3. Must be a value + // greater than zero. + TargetParallelism int32 `protobuf:"varint,1,opt,name=targetParallelism,proto3" json:"targetParallelism,omitempty"` + // BigQuery table specification, e.g. PROJECT_ID:DATASET_ID.PROJECT_ID + DeadLetterTableSpec string `protobuf:"bytes,2,opt,name=deadLetterTableSpec,proto3" json:"deadLetterTableSpec,omitempty"` +} + +func (x *DirectRunnerConfigOptions) Reset() { + *x = DirectRunnerConfigOptions{} + if protoimpl.UnsafeEnabled { + mi := &file_feast_core_Runner_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *DirectRunnerConfigOptions) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DirectRunnerConfigOptions) ProtoMessage() {} + +func (x *DirectRunnerConfigOptions) ProtoReflect() protoreflect.Message { + mi := &file_feast_core_Runner_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DirectRunnerConfigOptions.ProtoReflect.Descriptor instead. +func (*DirectRunnerConfigOptions) Descriptor() ([]byte, []int) { + return file_feast_core_Runner_proto_rawDescGZIP(), []int{0} +} + +func (x *DirectRunnerConfigOptions) GetTargetParallelism() int32 { + if x != nil { + return x.TargetParallelism + } + return 0 +} + +func (x *DirectRunnerConfigOptions) GetDeadLetterTableSpec() string { + if x != nil { + return x.DeadLetterTableSpec + } + return "" +} + +type DataflowRunnerConfigOptions struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Project id to use when launching jobs. + Project string `protobuf:"bytes,1,opt,name=project,proto3" json:"project,omitempty"` + // The Google Compute Engine region for creating Dataflow jobs. + Region string `protobuf:"bytes,2,opt,name=region,proto3" json:"region,omitempty"` + // GCP availability zone for operations. + Zone string `protobuf:"bytes,3,opt,name=zone,proto3" json:"zone,omitempty"` + // Run the job as a specific service account, instead of the default GCE robot. + ServiceAccount string `protobuf:"bytes,4,opt,name=serviceAccount,proto3" json:"serviceAccount,omitempty"` + // GCE network for launching workers. + Network string `protobuf:"bytes,5,opt,name=network,proto3" json:"network,omitempty"` + // GCE subnetwork for launching workers. e.g. regions/asia-east1/subnetworks/mysubnetwork + Subnetwork string `protobuf:"bytes,6,opt,name=subnetwork,proto3" json:"subnetwork,omitempty"` + // Machine type to create Dataflow worker VMs as. + WorkerMachineType string `protobuf:"bytes,7,opt,name=workerMachineType,proto3" json:"workerMachineType,omitempty"` + // The autoscaling algorithm to use for the workerpool. + AutoscalingAlgorithm string `protobuf:"bytes,8,opt,name=autoscalingAlgorithm,proto3" json:"autoscalingAlgorithm,omitempty"` + // Specifies whether worker pools should be started with public IP addresses. + UsePublicIps bool `protobuf:"varint,9,opt,name=usePublicIps,proto3" json:"usePublicIps,omitempty"` + // A pipeline level default location for storing temporary files. Support Google Cloud Storage locations, + // e.g. gs://bucket/object + TempLocation string `protobuf:"bytes,10,opt,name=tempLocation,proto3" json:"tempLocation,omitempty"` + // The maximum number of workers to use for the workerpool. + MaxNumWorkers int32 `protobuf:"varint,11,opt,name=maxNumWorkers,proto3" json:"maxNumWorkers,omitempty"` + // BigQuery table specification, e.g. PROJECT_ID:DATASET_ID.PROJECT_ID + DeadLetterTableSpec string `protobuf:"bytes,12,opt,name=deadLetterTableSpec,proto3" json:"deadLetterTableSpec,omitempty"` +} + +func (x *DataflowRunnerConfigOptions) Reset() { + *x = DataflowRunnerConfigOptions{} + if protoimpl.UnsafeEnabled { + mi := &file_feast_core_Runner_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *DataflowRunnerConfigOptions) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DataflowRunnerConfigOptions) ProtoMessage() {} + +func (x *DataflowRunnerConfigOptions) ProtoReflect() protoreflect.Message { + mi := &file_feast_core_Runner_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DataflowRunnerConfigOptions.ProtoReflect.Descriptor instead. +func (*DataflowRunnerConfigOptions) Descriptor() ([]byte, []int) { + return file_feast_core_Runner_proto_rawDescGZIP(), []int{1} +} + +func (x *DataflowRunnerConfigOptions) GetProject() string { + if x != nil { + return x.Project + } + return "" +} + +func (x *DataflowRunnerConfigOptions) GetRegion() string { + if x != nil { + return x.Region + } + return "" +} + +func (x *DataflowRunnerConfigOptions) GetZone() string { + if x != nil { + return x.Zone + } + return "" +} + +func (x *DataflowRunnerConfigOptions) GetServiceAccount() string { + if x != nil { + return x.ServiceAccount + } + return "" +} + +func (x *DataflowRunnerConfigOptions) GetNetwork() string { + if x != nil { + return x.Network + } + return "" +} + +func (x *DataflowRunnerConfigOptions) GetSubnetwork() string { + if x != nil { + return x.Subnetwork + } + return "" +} + +func (x *DataflowRunnerConfigOptions) GetWorkerMachineType() string { + if x != nil { + return x.WorkerMachineType + } + return "" +} + +func (x *DataflowRunnerConfigOptions) GetAutoscalingAlgorithm() string { + if x != nil { + return x.AutoscalingAlgorithm + } + return "" +} + +func (x *DataflowRunnerConfigOptions) GetUsePublicIps() bool { + if x != nil { + return x.UsePublicIps + } + return false +} + +func (x *DataflowRunnerConfigOptions) GetTempLocation() string { + if x != nil { + return x.TempLocation + } + return "" +} + +func (x *DataflowRunnerConfigOptions) GetMaxNumWorkers() int32 { + if x != nil { + return x.MaxNumWorkers + } + return 0 +} + +func (x *DataflowRunnerConfigOptions) GetDeadLetterTableSpec() string { + if x != nil { + return x.DeadLetterTableSpec + } + return "" +} + +var File_feast_core_Runner_proto protoreflect.FileDescriptor + +var file_feast_core_Runner_proto_rawDesc = []byte{ + 0x0a, 0x17, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x2f, 0x52, 0x75, 0x6e, + 0x6e, 0x65, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0a, 0x66, 0x65, 0x61, 0x73, 0x74, + 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x22, 0x7b, 0x0a, 0x19, 0x44, 0x69, 0x72, 0x65, 0x63, 0x74, 0x52, + 0x75, 0x6e, 0x6e, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x4f, 0x70, 0x74, 0x69, 0x6f, + 0x6e, 0x73, 0x12, 0x2c, 0x0a, 0x11, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x50, 0x61, 0x72, 0x61, + 0x6c, 0x6c, 0x65, 0x6c, 0x69, 0x73, 0x6d, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x11, 0x74, + 0x61, 0x72, 0x67, 0x65, 0x74, 0x50, 0x61, 0x72, 0x61, 0x6c, 0x6c, 0x65, 0x6c, 0x69, 0x73, 0x6d, + 0x12, 0x30, 0x0a, 0x13, 0x64, 0x65, 0x61, 0x64, 0x4c, 0x65, 0x74, 0x74, 0x65, 0x72, 0x54, 0x61, + 0x62, 0x6c, 0x65, 0x53, 0x70, 0x65, 0x63, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x13, 0x64, + 0x65, 0x61, 0x64, 0x4c, 0x65, 0x74, 0x74, 0x65, 0x72, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x53, 0x70, + 0x65, 0x63, 0x22, 0xc7, 0x03, 0x0a, 0x1b, 0x44, 0x61, 0x74, 0x61, 0x66, 0x6c, 0x6f, 0x77, 0x52, + 0x75, 0x6e, 0x6e, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x4f, 0x70, 0x74, 0x69, 0x6f, + 0x6e, 0x73, 0x12, 0x18, 0x0a, 0x07, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x07, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x12, 0x16, 0x0a, 0x06, + 0x72, 0x65, 0x67, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x72, 0x65, + 0x67, 0x69, 0x6f, 0x6e, 0x12, 0x12, 0x0a, 0x04, 0x7a, 0x6f, 0x6e, 0x65, 0x18, 0x03, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x04, 0x7a, 0x6f, 0x6e, 0x65, 0x12, 0x26, 0x0a, 0x0e, 0x73, 0x65, 0x72, 0x76, + 0x69, 0x63, 0x65, 0x41, 0x63, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x0e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x41, 0x63, 0x63, 0x6f, 0x75, 0x6e, 0x74, + 0x12, 0x18, 0x0a, 0x07, 0x6e, 0x65, 0x74, 0x77, 0x6f, 0x72, 0x6b, 0x18, 0x05, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x07, 0x6e, 0x65, 0x74, 0x77, 0x6f, 0x72, 0x6b, 0x12, 0x1e, 0x0a, 0x0a, 0x73, 0x75, + 0x62, 0x6e, 0x65, 0x74, 0x77, 0x6f, 0x72, 0x6b, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, + 0x73, 0x75, 0x62, 0x6e, 0x65, 0x74, 0x77, 0x6f, 0x72, 0x6b, 0x12, 0x2c, 0x0a, 0x11, 0x77, 0x6f, + 0x72, 0x6b, 0x65, 0x72, 0x4d, 0x61, 0x63, 0x68, 0x69, 0x6e, 0x65, 0x54, 0x79, 0x70, 0x65, 0x18, + 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x11, 0x77, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x4d, 0x61, 0x63, + 0x68, 0x69, 0x6e, 0x65, 0x54, 0x79, 0x70, 0x65, 0x12, 0x32, 0x0a, 0x14, 0x61, 0x75, 0x74, 0x6f, + 0x73, 0x63, 0x61, 0x6c, 0x69, 0x6e, 0x67, 0x41, 0x6c, 0x67, 0x6f, 0x72, 0x69, 0x74, 0x68, 0x6d, + 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, 0x52, 0x14, 0x61, 0x75, 0x74, 0x6f, 0x73, 0x63, 0x61, 0x6c, + 0x69, 0x6e, 0x67, 0x41, 0x6c, 0x67, 0x6f, 0x72, 0x69, 0x74, 0x68, 0x6d, 0x12, 0x22, 0x0a, 0x0c, + 0x75, 0x73, 0x65, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x49, 0x70, 0x73, 0x18, 0x09, 0x20, 0x01, + 0x28, 0x08, 0x52, 0x0c, 0x75, 0x73, 0x65, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x49, 0x70, 0x73, + 0x12, 0x22, 0x0a, 0x0c, 0x74, 0x65, 0x6d, 0x70, 0x4c, 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, + 0x18, 0x0a, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x74, 0x65, 0x6d, 0x70, 0x4c, 0x6f, 0x63, 0x61, + 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x24, 0x0a, 0x0d, 0x6d, 0x61, 0x78, 0x4e, 0x75, 0x6d, 0x57, 0x6f, + 0x72, 0x6b, 0x65, 0x72, 0x73, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0d, 0x6d, 0x61, 0x78, + 0x4e, 0x75, 0x6d, 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x73, 0x12, 0x30, 0x0a, 0x13, 0x64, 0x65, + 0x61, 0x64, 0x4c, 0x65, 0x74, 0x74, 0x65, 0x72, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x53, 0x70, 0x65, + 0x63, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x09, 0x52, 0x13, 0x64, 0x65, 0x61, 0x64, 0x4c, 0x65, 0x74, + 0x74, 0x65, 0x72, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x53, 0x70, 0x65, 0x63, 0x42, 0x54, 0x0a, 0x10, + 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x63, 0x6f, 0x72, 0x65, + 0x42, 0x0b, 0x52, 0x75, 0x6e, 0x6e, 0x65, 0x72, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x5a, 0x33, 0x67, + 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2d, + 0x64, 0x65, 0x76, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x73, 0x64, 0x6b, 0x2f, 0x67, 0x6f, + 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x73, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x63, 0x6f, + 0x72, 0x65, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_feast_core_Runner_proto_rawDescOnce sync.Once + file_feast_core_Runner_proto_rawDescData = file_feast_core_Runner_proto_rawDesc +) + +func file_feast_core_Runner_proto_rawDescGZIP() []byte { + file_feast_core_Runner_proto_rawDescOnce.Do(func() { + file_feast_core_Runner_proto_rawDescData = protoimpl.X.CompressGZIP(file_feast_core_Runner_proto_rawDescData) + }) + return file_feast_core_Runner_proto_rawDescData +} + +var file_feast_core_Runner_proto_msgTypes = make([]protoimpl.MessageInfo, 2) +var file_feast_core_Runner_proto_goTypes = []interface{}{ + (*DirectRunnerConfigOptions)(nil), // 0: feast.core.DirectRunnerConfigOptions + (*DataflowRunnerConfigOptions)(nil), // 1: feast.core.DataflowRunnerConfigOptions +} +var file_feast_core_Runner_proto_depIdxs = []int32{ + 0, // [0:0] is the sub-list for method output_type + 0, // [0:0] is the sub-list for method input_type + 0, // [0:0] is the sub-list for extension type_name + 0, // [0:0] is the sub-list for extension extendee + 0, // [0:0] is the sub-list for field type_name +} + +func init() { file_feast_core_Runner_proto_init() } +func file_feast_core_Runner_proto_init() { + if File_feast_core_Runner_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_feast_core_Runner_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*DirectRunnerConfigOptions); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_feast_core_Runner_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*DataflowRunnerConfigOptions); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_feast_core_Runner_proto_rawDesc, + NumEnums: 0, + NumMessages: 2, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_feast_core_Runner_proto_goTypes, + DependencyIndexes: file_feast_core_Runner_proto_depIdxs, + MessageInfos: file_feast_core_Runner_proto_msgTypes, + }.Build() + File_feast_core_Runner_proto = out.File + file_feast_core_Runner_proto_rawDesc = nil + file_feast_core_Runner_proto_goTypes = nil + file_feast_core_Runner_proto_depIdxs = nil +} diff --git a/sdk/go/protos/feast/core/Source.pb.go b/sdk/go/protos/feast/core/Source.pb.go index 30bd236272..2aa0c3f12e 100644 --- a/sdk/go/protos/feast/core/Source.pb.go +++ b/sdk/go/protos/feast/core/Source.pb.go @@ -16,7 +16,7 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: -// protoc-gen-go v1.21.0 +// protoc-gen-go v1.24.0 // protoc v3.10.0 // source: feast/core/Source.proto @@ -169,10 +169,14 @@ type KafkaSourceConfig struct { sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - // - bootstrapServers: [comma delimited value of host[:port]] + // Comma separated list of Kafka bootstrap servers. Used for feature sets without a defined source host[:port]] BootstrapServers string `protobuf:"bytes,1,opt,name=bootstrap_servers,json=bootstrapServers,proto3" json:"bootstrap_servers,omitempty"` - // - topics: [Kafka topic name. This value is provisioned by core and should not be set by the user.] + // Kafka topic to use for feature sets without user defined topics Topic string `protobuf:"bytes,2,opt,name=topic,proto3" json:"topic,omitempty"` + // Number of Kafka partitions to to use for managed feature stream. + Partitions int32 `protobuf:"varint,3,opt,name=partitions,proto3" json:"partitions,omitempty"` + // Defines the number of copies of managed feature stream Kafka. + ReplicationFactor int32 `protobuf:"varint,4,opt,name=replicationFactor,proto3" json:"replicationFactor,omitempty"` } func (x *KafkaSourceConfig) Reset() { @@ -221,6 +225,20 @@ func (x *KafkaSourceConfig) GetTopic() string { return "" } +func (x *KafkaSourceConfig) GetPartitions() int32 { + if x != nil { + return x.Partitions + } + return 0 +} + +func (x *KafkaSourceConfig) GetReplicationFactor() int32 { + if x != nil { + return x.ReplicationFactor + } + return 0 +} + var File_feast_core_Source_proto protoreflect.FileDescriptor var file_feast_core_Source_proto_rawDesc = []byte{ @@ -235,20 +253,26 @@ var file_feast_core_Source_proto_rawDesc = []byte{ 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x4b, 0x61, 0x66, 0x6b, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x48, 0x00, 0x52, 0x11, 0x6b, 0x61, 0x66, 0x6b, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x42, 0x0f, 0x0a, - 0x0d, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x22, 0x56, - 0x0a, 0x11, 0x4b, 0x61, 0x66, 0x6b, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x43, 0x6f, 0x6e, - 0x66, 0x69, 0x67, 0x12, 0x2b, 0x0a, 0x11, 0x62, 0x6f, 0x6f, 0x74, 0x73, 0x74, 0x72, 0x61, 0x70, - 0x5f, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x10, - 0x62, 0x6f, 0x6f, 0x74, 0x73, 0x74, 0x72, 0x61, 0x70, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x73, - 0x12, 0x14, 0x0a, 0x05, 0x74, 0x6f, 0x70, 0x69, 0x63, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x05, 0x74, 0x6f, 0x70, 0x69, 0x63, 0x2a, 0x24, 0x0a, 0x0a, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, - 0x54, 0x79, 0x70, 0x65, 0x12, 0x0b, 0x0a, 0x07, 0x49, 0x4e, 0x56, 0x41, 0x4c, 0x49, 0x44, 0x10, - 0x00, 0x12, 0x09, 0x0a, 0x05, 0x4b, 0x41, 0x46, 0x4b, 0x41, 0x10, 0x01, 0x42, 0x4a, 0x0a, 0x0a, - 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x42, 0x0b, 0x53, 0x6f, 0x75, 0x72, - 0x63, 0x65, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x5a, 0x2f, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, - 0x63, 0x6f, 0x6d, 0x2f, 0x67, 0x6f, 0x6a, 0x65, 0x6b, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, - 0x73, 0x64, 0x6b, 0x2f, 0x67, 0x6f, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x73, 0x2f, 0x66, 0x65, - 0x61, 0x73, 0x74, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x0d, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x22, 0xa4, + 0x01, 0x0a, 0x11, 0x4b, 0x61, 0x66, 0x6b, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x43, 0x6f, + 0x6e, 0x66, 0x69, 0x67, 0x12, 0x2b, 0x0a, 0x11, 0x62, 0x6f, 0x6f, 0x74, 0x73, 0x74, 0x72, 0x61, + 0x70, 0x5f, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x10, 0x62, 0x6f, 0x6f, 0x74, 0x73, 0x74, 0x72, 0x61, 0x70, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, + 0x73, 0x12, 0x14, 0x0a, 0x05, 0x74, 0x6f, 0x70, 0x69, 0x63, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x05, 0x74, 0x6f, 0x70, 0x69, 0x63, 0x12, 0x1e, 0x0a, 0x0a, 0x70, 0x61, 0x72, 0x74, 0x69, + 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0a, 0x70, 0x61, 0x72, + 0x74, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x2c, 0x0a, 0x11, 0x72, 0x65, 0x70, 0x6c, 0x69, + 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x46, 0x61, 0x63, 0x74, 0x6f, 0x72, 0x18, 0x04, 0x20, 0x01, + 0x28, 0x05, 0x52, 0x11, 0x72, 0x65, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x46, + 0x61, 0x63, 0x74, 0x6f, 0x72, 0x2a, 0x24, 0x0a, 0x0a, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x54, + 0x79, 0x70, 0x65, 0x12, 0x0b, 0x0a, 0x07, 0x49, 0x4e, 0x56, 0x41, 0x4c, 0x49, 0x44, 0x10, 0x00, + 0x12, 0x09, 0x0a, 0x05, 0x4b, 0x41, 0x46, 0x4b, 0x41, 0x10, 0x01, 0x42, 0x54, 0x0a, 0x10, 0x66, + 0x65, 0x61, 0x73, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x42, + 0x0b, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x5a, 0x33, 0x67, 0x69, + 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2d, 0x64, + 0x65, 0x76, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x73, 0x64, 0x6b, 0x2f, 0x67, 0x6f, 0x2f, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x73, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x63, 0x6f, 0x72, + 0x65, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( diff --git a/sdk/go/protos/feast/core/Store.pb.go b/sdk/go/protos/feast/core/Store.pb.go index 55c699d788..a36c6afc6a 100644 --- a/sdk/go/protos/feast/core/Store.pb.go +++ b/sdk/go/protos/feast/core/Store.pb.go @@ -16,7 +16,7 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: -// protoc-gen-go v1.21.0 +// protoc-gen-go v1.24.0 // protoc v3.10.0 // source: feast/core/Store.proto @@ -58,12 +58,7 @@ const ( Store_REDIS Store_StoreType = 1 // BigQuery stores a FeatureRow element as a row in a BigQuery table. // - // Table name is derived from the feature set name and version as: - // [feature_set_name]_v[feature_set_version] - // - // For example: - // A feature row for feature set "driver" and version "1" will be written - // to table "driver_v1". + // Table name is derived is the same as the feature set name. // // The entities and features in a FeatureSetSpec corresponds to the // fields in the BigQuery table (these make up the BigQuery schema). @@ -79,16 +74,12 @@ const ( // ====================|==================|================================ // - event_timestamp | TIMESTAMP | event time of the FeatureRow // - created_timestamp | TIMESTAMP | processing time of the ingestion of the FeatureRow + // - ingestion_id | STRING | unique id identifying groups of rows that have been ingested together // - job_id | STRING | identifier for the job that writes the FeatureRow to the corresponding BigQuery table // // BigQuery table created will be partitioned by the field "event_timestamp" // of the FeatureRow (https://cloud.google.com/bigquery/docs/partitioned-tables). // - // Since newer version of feature set can introduce breaking, non backward- - // compatible BigQuery schema updates, incrementing the version of a - // feature set will result in the creation of a new empty BigQuery table - // with the new schema. - // // The following table shows how ValueType in Feast is mapped to // BigQuery Standard SQL data types // (https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types): @@ -113,7 +104,8 @@ const ( // Store_BIGQUERY Store_StoreType = 2 // Unsupported in Feast 0.3 - Store_CASSANDRA Store_StoreType = 3 + Store_CASSANDRA Store_StoreType = 3 + Store_REDIS_CLUSTER Store_StoreType = 4 ) // Enum value maps for Store_StoreType. @@ -123,12 +115,14 @@ var ( 1: "REDIS", 2: "BIGQUERY", 3: "CASSANDRA", + 4: "REDIS_CLUSTER", } Store_StoreType_value = map[string]int32{ - "INVALID": 0, - "REDIS": 1, - "BIGQUERY": 2, - "CASSANDRA": 3, + "INVALID": 0, + "REDIS": 1, + "BIGQUERY": 2, + "CASSANDRA": 3, + "REDIS_CLUSTER": 4, } ) @@ -184,6 +178,7 @@ type Store struct { // *Store_RedisConfig_ // *Store_BigqueryConfig // *Store_CassandraConfig_ + // *Store_RedisClusterConfig_ Config isStore_Config `protobuf_oneof:"config"` } @@ -268,6 +263,13 @@ func (x *Store) GetCassandraConfig() *Store_CassandraConfig { return nil } +func (x *Store) GetRedisClusterConfig() *Store_RedisClusterConfig { + if x, ok := x.GetConfig().(*Store_RedisClusterConfig_); ok { + return x.RedisClusterConfig + } + return nil +} + type isStore_Config interface { isStore_Config() } @@ -284,12 +286,18 @@ type Store_CassandraConfig_ struct { CassandraConfig *Store_CassandraConfig `protobuf:"bytes,13,opt,name=cassandra_config,json=cassandraConfig,proto3,oneof"` } +type Store_RedisClusterConfig_ struct { + RedisClusterConfig *Store_RedisClusterConfig `protobuf:"bytes,14,opt,name=redis_cluster_config,json=redisClusterConfig,proto3,oneof"` +} + func (*Store_RedisConfig_) isStore_Config() {} func (*Store_BigqueryConfig) isStore_Config() {} func (*Store_CassandraConfig_) isStore_Config() {} +func (*Store_RedisClusterConfig_) isStore_Config() {} + type Store_RedisConfig struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -369,8 +377,11 @@ type Store_BigQueryConfig struct { sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` - DatasetId string `protobuf:"bytes,2,opt,name=dataset_id,json=datasetId,proto3" json:"dataset_id,omitempty"` + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + DatasetId string `protobuf:"bytes,2,opt,name=dataset_id,json=datasetId,proto3" json:"dataset_id,omitempty"` + StagingLocation string `protobuf:"bytes,3,opt,name=staging_location,json=stagingLocation,proto3" json:"staging_location,omitempty"` + InitialRetryDelaySeconds int32 `protobuf:"varint,4,opt,name=initial_retry_delay_seconds,json=initialRetryDelaySeconds,proto3" json:"initial_retry_delay_seconds,omitempty"` + TotalTimeoutSeconds int32 `protobuf:"varint,5,opt,name=total_timeout_seconds,json=totalTimeoutSeconds,proto3" json:"total_timeout_seconds,omitempty"` } func (x *Store_BigQueryConfig) Reset() { @@ -419,6 +430,27 @@ func (x *Store_BigQueryConfig) GetDatasetId() string { return "" } +func (x *Store_BigQueryConfig) GetStagingLocation() string { + if x != nil { + return x.StagingLocation + } + return "" +} + +func (x *Store_BigQueryConfig) GetInitialRetryDelaySeconds() int32 { + if x != nil { + return x.InitialRetryDelaySeconds + } + return 0 +} + +func (x *Store_BigQueryConfig) GetTotalTimeoutSeconds() int32 { + if x != nil { + return x.TotalTimeoutSeconds + } + return 0 +} + type Store_CassandraConfig struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -474,6 +506,70 @@ func (x *Store_CassandraConfig) GetPort() int32 { return 0 } +type Store_RedisClusterConfig struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // List of Redis Uri for all the nodes in Redis Cluster, comma separated. Eg. host1:6379, host2:6379 + ConnectionString string `protobuf:"bytes,1,opt,name=connection_string,json=connectionString,proto3" json:"connection_string,omitempty"` + InitialBackoffMs int32 `protobuf:"varint,2,opt,name=initial_backoff_ms,json=initialBackoffMs,proto3" json:"initial_backoff_ms,omitempty"` + MaxRetries int32 `protobuf:"varint,3,opt,name=max_retries,json=maxRetries,proto3" json:"max_retries,omitempty"` +} + +func (x *Store_RedisClusterConfig) Reset() { + *x = Store_RedisClusterConfig{} + if protoimpl.UnsafeEnabled { + mi := &file_feast_core_Store_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Store_RedisClusterConfig) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Store_RedisClusterConfig) ProtoMessage() {} + +func (x *Store_RedisClusterConfig) ProtoReflect() protoreflect.Message { + mi := &file_feast_core_Store_proto_msgTypes[4] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Store_RedisClusterConfig.ProtoReflect.Descriptor instead. +func (*Store_RedisClusterConfig) Descriptor() ([]byte, []int) { + return file_feast_core_Store_proto_rawDescGZIP(), []int{0, 3} +} + +func (x *Store_RedisClusterConfig) GetConnectionString() string { + if x != nil { + return x.ConnectionString + } + return "" +} + +func (x *Store_RedisClusterConfig) GetInitialBackoffMs() int32 { + if x != nil { + return x.InitialBackoffMs + } + return 0 +} + +func (x *Store_RedisClusterConfig) GetMaxRetries() int32 { + if x != nil { + return x.MaxRetries + } + return 0 +} + type Store_Subscription struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -494,19 +590,12 @@ type Store_Subscription struct { // - my-feature-set* can be used to match all features prefixed by "my-feature-set" // - my-feature-set-6 can be used to select a single feature set Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` - // Versions of the given feature sets that will be returned. - // Valid options for version: - // "latest": only the latest version is returned. - // "*": Subscribe to all versions - // [version number]: pin to a specific version. Project and feature set name must be - // explicitly defined if a specific version is pinned. - Version string `protobuf:"bytes,2,opt,name=version,proto3" json:"version,omitempty"` } func (x *Store_Subscription) Reset() { *x = Store_Subscription{} if protoimpl.UnsafeEnabled { - mi := &file_feast_core_Store_proto_msgTypes[4] + mi := &file_feast_core_Store_proto_msgTypes[5] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -519,7 +608,7 @@ func (x *Store_Subscription) String() string { func (*Store_Subscription) ProtoMessage() {} func (x *Store_Subscription) ProtoReflect() protoreflect.Message { - mi := &file_feast_core_Store_proto_msgTypes[4] + mi := &file_feast_core_Store_proto_msgTypes[5] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -532,7 +621,7 @@ func (x *Store_Subscription) ProtoReflect() protoreflect.Message { // Deprecated: Use Store_Subscription.ProtoReflect.Descriptor instead. func (*Store_Subscription) Descriptor() ([]byte, []int) { - return file_feast_core_Store_proto_rawDescGZIP(), []int{0, 3} + return file_feast_core_Store_proto_rawDescGZIP(), []int{0, 4} } func (x *Store_Subscription) GetProject() string { @@ -549,19 +638,12 @@ func (x *Store_Subscription) GetName() string { return "" } -func (x *Store_Subscription) GetVersion() string { - if x != nil { - return x.Version - } - return "" -} - var File_feast_core_Store_proto protoreflect.FileDescriptor var file_feast_core_Store_proto_rawDesc = []byte{ 0x0a, 0x16, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x2f, 0x53, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0a, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, - 0x63, 0x6f, 0x72, 0x65, 0x22, 0xa9, 0x06, 0x0a, 0x05, 0x53, 0x74, 0x6f, 0x72, 0x65, 0x12, 0x12, + 0x63, 0x6f, 0x72, 0x65, 0x22, 0xb4, 0x09, 0x0a, 0x05, 0x53, 0x74, 0x6f, 0x72, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x2f, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x1b, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x53, 0x74, @@ -584,40 +666,65 @@ var file_feast_core_Store_proto_rawDesc = []byte{ 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x53, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x43, 0x61, 0x73, 0x73, 0x61, 0x6e, 0x64, 0x72, 0x61, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x48, 0x00, 0x52, 0x0f, 0x63, 0x61, 0x73, 0x73, 0x61, - 0x6e, 0x64, 0x72, 0x61, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x1a, 0x84, 0x01, 0x0a, 0x0b, 0x52, - 0x65, 0x64, 0x69, 0x73, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x12, 0x0a, 0x04, 0x68, 0x6f, - 0x73, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x68, 0x6f, 0x73, 0x74, 0x12, 0x12, - 0x0a, 0x04, 0x70, 0x6f, 0x72, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x04, 0x70, 0x6f, - 0x72, 0x74, 0x12, 0x2c, 0x0a, 0x12, 0x69, 0x6e, 0x69, 0x74, 0x69, 0x61, 0x6c, 0x5f, 0x62, 0x61, - 0x63, 0x6b, 0x6f, 0x66, 0x66, 0x5f, 0x6d, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x52, 0x10, - 0x69, 0x6e, 0x69, 0x74, 0x69, 0x61, 0x6c, 0x42, 0x61, 0x63, 0x6b, 0x6f, 0x66, 0x66, 0x4d, 0x73, - 0x12, 0x1f, 0x0a, 0x0b, 0x6d, 0x61, 0x78, 0x5f, 0x72, 0x65, 0x74, 0x72, 0x69, 0x65, 0x73, 0x18, - 0x04, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0a, 0x6d, 0x61, 0x78, 0x52, 0x65, 0x74, 0x72, 0x69, 0x65, - 0x73, 0x1a, 0x4e, 0x0a, 0x0e, 0x42, 0x69, 0x67, 0x51, 0x75, 0x65, 0x72, 0x79, 0x43, 0x6f, 0x6e, - 0x66, 0x69, 0x67, 0x12, 0x1d, 0x0a, 0x0a, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x5f, 0x69, - 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, - 0x49, 0x64, 0x12, 0x1d, 0x0a, 0x0a, 0x64, 0x61, 0x74, 0x61, 0x73, 0x65, 0x74, 0x5f, 0x69, 0x64, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x64, 0x61, 0x74, 0x61, 0x73, 0x65, 0x74, 0x49, - 0x64, 0x1a, 0x39, 0x0a, 0x0f, 0x43, 0x61, 0x73, 0x73, 0x61, 0x6e, 0x64, 0x72, 0x61, 0x43, 0x6f, + 0x6e, 0x64, 0x72, 0x61, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x58, 0x0a, 0x14, 0x72, 0x65, + 0x64, 0x69, 0x73, 0x5f, 0x63, 0x6c, 0x75, 0x73, 0x74, 0x65, 0x72, 0x5f, 0x63, 0x6f, 0x6e, 0x66, + 0x69, 0x67, 0x18, 0x0e, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, + 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x53, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x52, 0x65, 0x64, 0x69, + 0x73, 0x43, 0x6c, 0x75, 0x73, 0x74, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x48, 0x00, + 0x52, 0x12, 0x72, 0x65, 0x64, 0x69, 0x73, 0x43, 0x6c, 0x75, 0x73, 0x74, 0x65, 0x72, 0x43, 0x6f, + 0x6e, 0x66, 0x69, 0x67, 0x1a, 0x84, 0x01, 0x0a, 0x0b, 0x52, 0x65, 0x64, 0x69, 0x73, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x12, 0x0a, 0x04, 0x68, 0x6f, 0x73, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x68, 0x6f, 0x73, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x6f, 0x72, 0x74, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x04, 0x70, 0x6f, 0x72, 0x74, 0x1a, 0x56, 0x0a, 0x0c, - 0x53, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x18, 0x0a, 0x07, - 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x70, - 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x76, 0x65, - 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x76, 0x65, 0x72, - 0x73, 0x69, 0x6f, 0x6e, 0x22, 0x40, 0x0a, 0x09, 0x53, 0x74, 0x6f, 0x72, 0x65, 0x54, 0x79, 0x70, - 0x65, 0x12, 0x0b, 0x0a, 0x07, 0x49, 0x4e, 0x56, 0x41, 0x4c, 0x49, 0x44, 0x10, 0x00, 0x12, 0x09, - 0x0a, 0x05, 0x52, 0x45, 0x44, 0x49, 0x53, 0x10, 0x01, 0x12, 0x0c, 0x0a, 0x08, 0x42, 0x49, 0x47, - 0x51, 0x55, 0x45, 0x52, 0x59, 0x10, 0x02, 0x12, 0x0d, 0x0a, 0x09, 0x43, 0x41, 0x53, 0x53, 0x41, - 0x4e, 0x44, 0x52, 0x41, 0x10, 0x03, 0x42, 0x08, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, - 0x42, 0x49, 0x0a, 0x0a, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x42, 0x0a, - 0x53, 0x74, 0x6f, 0x72, 0x65, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x5a, 0x2f, 0x67, 0x69, 0x74, 0x68, - 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x67, 0x6f, 0x6a, 0x65, 0x6b, 0x2f, 0x66, 0x65, 0x61, - 0x73, 0x74, 0x2f, 0x73, 0x64, 0x6b, 0x2f, 0x67, 0x6f, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x73, - 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x62, 0x06, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x33, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x04, 0x70, 0x6f, 0x72, 0x74, 0x12, 0x2c, 0x0a, 0x12, + 0x69, 0x6e, 0x69, 0x74, 0x69, 0x61, 0x6c, 0x5f, 0x62, 0x61, 0x63, 0x6b, 0x6f, 0x66, 0x66, 0x5f, + 0x6d, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x52, 0x10, 0x69, 0x6e, 0x69, 0x74, 0x69, 0x61, + 0x6c, 0x42, 0x61, 0x63, 0x6b, 0x6f, 0x66, 0x66, 0x4d, 0x73, 0x12, 0x1f, 0x0a, 0x0b, 0x6d, 0x61, + 0x78, 0x5f, 0x72, 0x65, 0x74, 0x72, 0x69, 0x65, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x05, 0x52, + 0x0a, 0x6d, 0x61, 0x78, 0x52, 0x65, 0x74, 0x72, 0x69, 0x65, 0x73, 0x1a, 0xec, 0x01, 0x0a, 0x0e, + 0x42, 0x69, 0x67, 0x51, 0x75, 0x65, 0x72, 0x79, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x1d, + 0x0a, 0x0a, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x09, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x49, 0x64, 0x12, 0x1d, 0x0a, + 0x0a, 0x64, 0x61, 0x74, 0x61, 0x73, 0x65, 0x74, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x09, 0x64, 0x61, 0x74, 0x61, 0x73, 0x65, 0x74, 0x49, 0x64, 0x12, 0x29, 0x0a, 0x10, + 0x73, 0x74, 0x61, 0x67, 0x69, 0x6e, 0x67, 0x5f, 0x6c, 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, + 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0f, 0x73, 0x74, 0x61, 0x67, 0x69, 0x6e, 0x67, 0x4c, + 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x3d, 0x0a, 0x1b, 0x69, 0x6e, 0x69, 0x74, 0x69, + 0x61, 0x6c, 0x5f, 0x72, 0x65, 0x74, 0x72, 0x79, 0x5f, 0x64, 0x65, 0x6c, 0x61, 0x79, 0x5f, 0x73, + 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x05, 0x52, 0x18, 0x69, 0x6e, + 0x69, 0x74, 0x69, 0x61, 0x6c, 0x52, 0x65, 0x74, 0x72, 0x79, 0x44, 0x65, 0x6c, 0x61, 0x79, 0x53, + 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x73, 0x12, 0x32, 0x0a, 0x15, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x5f, + 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x5f, 0x73, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x73, 0x18, + 0x05, 0x20, 0x01, 0x28, 0x05, 0x52, 0x13, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x54, 0x69, 0x6d, 0x65, + 0x6f, 0x75, 0x74, 0x53, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x73, 0x1a, 0x39, 0x0a, 0x0f, 0x43, 0x61, + 0x73, 0x73, 0x61, 0x6e, 0x64, 0x72, 0x61, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x12, 0x0a, + 0x04, 0x68, 0x6f, 0x73, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x68, 0x6f, 0x73, + 0x74, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x6f, 0x72, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, + 0x04, 0x70, 0x6f, 0x72, 0x74, 0x1a, 0x90, 0x01, 0x0a, 0x12, 0x52, 0x65, 0x64, 0x69, 0x73, 0x43, + 0x6c, 0x75, 0x73, 0x74, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x2b, 0x0a, 0x11, + 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x74, 0x72, 0x69, 0x6e, + 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x10, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x12, 0x2c, 0x0a, 0x12, 0x69, 0x6e, 0x69, + 0x74, 0x69, 0x61, 0x6c, 0x5f, 0x62, 0x61, 0x63, 0x6b, 0x6f, 0x66, 0x66, 0x5f, 0x6d, 0x73, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x10, 0x69, 0x6e, 0x69, 0x74, 0x69, 0x61, 0x6c, 0x42, 0x61, + 0x63, 0x6b, 0x6f, 0x66, 0x66, 0x4d, 0x73, 0x12, 0x1f, 0x0a, 0x0b, 0x6d, 0x61, 0x78, 0x5f, 0x72, + 0x65, 0x74, 0x72, 0x69, 0x65, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0a, 0x6d, 0x61, + 0x78, 0x52, 0x65, 0x74, 0x72, 0x69, 0x65, 0x73, 0x1a, 0x42, 0x0a, 0x0c, 0x53, 0x75, 0x62, 0x73, + 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x18, 0x0a, 0x07, 0x70, 0x72, 0x6f, 0x6a, + 0x65, 0x63, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x70, 0x72, 0x6f, 0x6a, 0x65, + 0x63, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x4a, 0x04, 0x08, 0x02, 0x10, 0x03, 0x22, 0x53, 0x0a, 0x09, + 0x53, 0x74, 0x6f, 0x72, 0x65, 0x54, 0x79, 0x70, 0x65, 0x12, 0x0b, 0x0a, 0x07, 0x49, 0x4e, 0x56, + 0x41, 0x4c, 0x49, 0x44, 0x10, 0x00, 0x12, 0x09, 0x0a, 0x05, 0x52, 0x45, 0x44, 0x49, 0x53, 0x10, + 0x01, 0x12, 0x0c, 0x0a, 0x08, 0x42, 0x49, 0x47, 0x51, 0x55, 0x45, 0x52, 0x59, 0x10, 0x02, 0x12, + 0x0d, 0x0a, 0x09, 0x43, 0x41, 0x53, 0x53, 0x41, 0x4e, 0x44, 0x52, 0x41, 0x10, 0x03, 0x12, 0x11, + 0x0a, 0x0d, 0x52, 0x45, 0x44, 0x49, 0x53, 0x5f, 0x43, 0x4c, 0x55, 0x53, 0x54, 0x45, 0x52, 0x10, + 0x04, 0x42, 0x08, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x42, 0x53, 0x0a, 0x10, 0x66, + 0x65, 0x61, 0x73, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x42, + 0x0a, 0x53, 0x74, 0x6f, 0x72, 0x65, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x5a, 0x33, 0x67, 0x69, 0x74, + 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2d, 0x64, 0x65, + 0x76, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x73, 0x64, 0x6b, 0x2f, 0x67, 0x6f, 0x2f, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x73, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x63, 0x6f, 0x72, 0x65, + 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( @@ -633,26 +740,28 @@ func file_feast_core_Store_proto_rawDescGZIP() []byte { } var file_feast_core_Store_proto_enumTypes = make([]protoimpl.EnumInfo, 1) -var file_feast_core_Store_proto_msgTypes = make([]protoimpl.MessageInfo, 5) +var file_feast_core_Store_proto_msgTypes = make([]protoimpl.MessageInfo, 6) var file_feast_core_Store_proto_goTypes = []interface{}{ - (Store_StoreType)(0), // 0: feast.core.Store.StoreType - (*Store)(nil), // 1: feast.core.Store - (*Store_RedisConfig)(nil), // 2: feast.core.Store.RedisConfig - (*Store_BigQueryConfig)(nil), // 3: feast.core.Store.BigQueryConfig - (*Store_CassandraConfig)(nil), // 4: feast.core.Store.CassandraConfig - (*Store_Subscription)(nil), // 5: feast.core.Store.Subscription + (Store_StoreType)(0), // 0: feast.core.Store.StoreType + (*Store)(nil), // 1: feast.core.Store + (*Store_RedisConfig)(nil), // 2: feast.core.Store.RedisConfig + (*Store_BigQueryConfig)(nil), // 3: feast.core.Store.BigQueryConfig + (*Store_CassandraConfig)(nil), // 4: feast.core.Store.CassandraConfig + (*Store_RedisClusterConfig)(nil), // 5: feast.core.Store.RedisClusterConfig + (*Store_Subscription)(nil), // 6: feast.core.Store.Subscription } var file_feast_core_Store_proto_depIdxs = []int32{ 0, // 0: feast.core.Store.type:type_name -> feast.core.Store.StoreType - 5, // 1: feast.core.Store.subscriptions:type_name -> feast.core.Store.Subscription + 6, // 1: feast.core.Store.subscriptions:type_name -> feast.core.Store.Subscription 2, // 2: feast.core.Store.redis_config:type_name -> feast.core.Store.RedisConfig 3, // 3: feast.core.Store.bigquery_config:type_name -> feast.core.Store.BigQueryConfig 4, // 4: feast.core.Store.cassandra_config:type_name -> feast.core.Store.CassandraConfig - 5, // [5:5] is the sub-list for method output_type - 5, // [5:5] is the sub-list for method input_type - 5, // [5:5] is the sub-list for extension type_name - 5, // [5:5] is the sub-list for extension extendee - 0, // [0:5] is the sub-list for field type_name + 5, // 5: feast.core.Store.redis_cluster_config:type_name -> feast.core.Store.RedisClusterConfig + 6, // [6:6] is the sub-list for method output_type + 6, // [6:6] is the sub-list for method input_type + 6, // [6:6] is the sub-list for extension type_name + 6, // [6:6] is the sub-list for extension extendee + 0, // [0:6] is the sub-list for field type_name } func init() { file_feast_core_Store_proto_init() } @@ -710,6 +819,18 @@ func file_feast_core_Store_proto_init() { } } file_feast_core_Store_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Store_RedisClusterConfig); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_feast_core_Store_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Store_Subscription); i { case 0: return &v.state @@ -726,6 +847,7 @@ func file_feast_core_Store_proto_init() { (*Store_RedisConfig_)(nil), (*Store_BigqueryConfig)(nil), (*Store_CassandraConfig_)(nil), + (*Store_RedisClusterConfig_)(nil), } type x struct{} out := protoimpl.TypeBuilder{ @@ -733,7 +855,7 @@ func file_feast_core_Store_proto_init() { GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_feast_core_Store_proto_rawDesc, NumEnums: 1, - NumMessages: 5, + NumMessages: 6, NumExtensions: 0, NumServices: 0, }, diff --git a/sdk/go/protos/feast/serving/ServingService.pb.go b/sdk/go/protos/feast/serving/ServingService.pb.go index 2485687d81..fc2748d5d8 100644 --- a/sdk/go/protos/feast/serving/ServingService.pb.go +++ b/sdk/go/protos/feast/serving/ServingService.pb.go @@ -15,7 +15,7 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: -// protoc-gen-go v1.21.0 +// protoc-gen-go v1.24.0 // protoc v3.10.0 // source: feast/serving/ServingService.proto @@ -23,9 +23,8 @@ package serving import ( context "context" - types "github.com/gojek/feast/sdk/go/protos/feast/types" + types "github.com/feast-dev/feast/sdk/go/protos/feast/types" proto "github.com/golang/protobuf/proto" - duration "github.com/golang/protobuf/ptypes/duration" timestamp "github.com/golang/protobuf/ptypes/timestamp" grpc "google.golang.org/grpc" codes "google.golang.org/grpc/codes" @@ -355,18 +354,14 @@ type FeatureReference struct { sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - // Project name + // Project name. This field is optional, if unspecified will default to 'default'. Project string `protobuf:"bytes,1,opt,name=project,proto3" json:"project,omitempty"` // Feature name Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` - // Feature version - Version int32 `protobuf:"varint,3,opt,name=version,proto3" json:"version,omitempty"` - // The features will be retrieved if: - // entity_timestamp - max_age <= event_timestamp <= entity_timestamp - // - // If unspecified the default max_age specified in FeatureSetSpec will - // be used. - MaxAge *duration.Duration `protobuf:"bytes,4,opt,name=max_age,json=maxAge,proto3" json:"max_age,omitempty"` + // Feature set name specifying the feature set of this referenced feature. + // This field is optional if the feature referenced is unique across the project + // in which case the feature set would be automatically infered + FeatureSet string `protobuf:"bytes,5,opt,name=feature_set,json=featureSet,proto3" json:"feature_set,omitempty"` } func (x *FeatureReference) Reset() { @@ -415,18 +410,11 @@ func (x *FeatureReference) GetName() string { return "" } -func (x *FeatureReference) GetVersion() int32 { +func (x *FeatureReference) GetFeatureSet() string { if x != nil { - return x.Version + return x.FeatureSet } - return 0 -} - -func (x *FeatureReference) GetMaxAge() *duration.Duration { - if x != nil { - return x.MaxAge - } - return nil + return "" } type GetOnlineFeaturesRequest struct { @@ -1080,8 +1068,6 @@ var file_feast_serving_ServingService_proto_rawDesc = []byte{ 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0d, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x6e, 0x67, 0x1a, 0x1f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2e, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x64, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x17, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x1c, 0x0a, 0x1a, 0x47, 0x65, 0x74, 0x46, 0x65, 0x61, 0x73, 0x74, 0x53, 0x65, 0x72, 0x76, 0x69, 0x6e, 0x67, @@ -1095,166 +1081,165 @@ var file_feast_serving_ServingService_proto_rawDesc = []byte{ 0x54, 0x79, 0x70, 0x65, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x30, 0x0a, 0x14, 0x6a, 0x6f, 0x62, 0x5f, 0x73, 0x74, 0x61, 0x67, 0x69, 0x6e, 0x67, 0x5f, 0x6c, 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x09, 0x52, 0x12, 0x6a, 0x6f, 0x62, 0x53, 0x74, 0x61, - 0x67, 0x69, 0x6e, 0x67, 0x4c, 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x8e, 0x01, 0x0a, - 0x10, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, - 0x65, 0x12, 0x18, 0x0a, 0x07, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x07, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, - 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, - 0x18, 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, - 0x52, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x32, 0x0a, 0x07, 0x6d, 0x61, 0x78, - 0x5f, 0x61, 0x67, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x67, 0x6f, 0x6f, - 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x44, 0x75, 0x72, - 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x06, 0x6d, 0x61, 0x78, 0x41, 0x67, 0x65, 0x22, 0xe1, 0x03, - 0x0a, 0x18, 0x47, 0x65, 0x74, 0x4f, 0x6e, 0x6c, 0x69, 0x6e, 0x65, 0x46, 0x65, 0x61, 0x74, 0x75, - 0x72, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x3b, 0x0a, 0x08, 0x66, 0x65, - 0x61, 0x74, 0x75, 0x72, 0x65, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x66, - 0x65, 0x61, 0x73, 0x74, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x6e, 0x67, 0x2e, 0x46, 0x65, 0x61, - 0x74, 0x75, 0x72, 0x65, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x52, 0x08, 0x66, - 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x73, 0x12, 0x52, 0x0a, 0x0b, 0x65, 0x6e, 0x74, 0x69, 0x74, - 0x79, 0x5f, 0x72, 0x6f, 0x77, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x31, 0x2e, 0x66, - 0x65, 0x61, 0x73, 0x74, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x6e, 0x67, 0x2e, 0x47, 0x65, 0x74, - 0x4f, 0x6e, 0x6c, 0x69, 0x6e, 0x65, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x73, 0x52, 0x65, - 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x45, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x52, 0x6f, 0x77, 0x52, - 0x0a, 0x65, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x52, 0x6f, 0x77, 0x73, 0x12, 0x39, 0x0a, 0x19, 0x6f, - 0x6d, 0x69, 0x74, 0x5f, 0x65, 0x6e, 0x74, 0x69, 0x74, 0x69, 0x65, 0x73, 0x5f, 0x69, 0x6e, 0x5f, - 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x52, 0x16, - 0x6f, 0x6d, 0x69, 0x74, 0x45, 0x6e, 0x74, 0x69, 0x74, 0x69, 0x65, 0x73, 0x49, 0x6e, 0x52, 0x65, - 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x1a, 0xf8, 0x01, 0x0a, 0x09, 0x45, 0x6e, 0x74, 0x69, 0x74, - 0x79, 0x52, 0x6f, 0x77, 0x12, 0x45, 0x0a, 0x10, 0x65, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x5f, 0x74, - 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, - 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, - 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x0f, 0x65, 0x6e, 0x74, 0x69, - 0x74, 0x79, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x12, 0x55, 0x0a, 0x06, 0x66, - 0x69, 0x65, 0x6c, 0x64, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x3d, 0x2e, 0x66, 0x65, - 0x61, 0x73, 0x74, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x6e, 0x67, 0x2e, 0x47, 0x65, 0x74, 0x4f, - 0x6e, 0x6c, 0x69, 0x6e, 0x65, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x73, 0x52, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x2e, 0x45, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x52, 0x6f, 0x77, 0x2e, 0x46, - 0x69, 0x65, 0x6c, 0x64, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x06, 0x66, 0x69, 0x65, 0x6c, - 0x64, 0x73, 0x1a, 0x4d, 0x0a, 0x0b, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x45, 0x6e, 0x74, 0x72, - 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, - 0x6b, 0x65, 0x79, 0x12, 0x28, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x73, - 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, - 0x01, 0x22, 0x9b, 0x01, 0x0a, 0x17, 0x47, 0x65, 0x74, 0x42, 0x61, 0x74, 0x63, 0x68, 0x46, 0x65, - 0x61, 0x74, 0x75, 0x72, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x3b, 0x0a, - 0x08, 0x66, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, - 0x1f, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x6e, 0x67, 0x2e, + 0x67, 0x69, 0x6e, 0x67, 0x4c, 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x6d, 0x0a, 0x10, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, - 0x52, 0x08, 0x66, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x73, 0x12, 0x43, 0x0a, 0x0e, 0x64, 0x61, - 0x74, 0x61, 0x73, 0x65, 0x74, 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, - 0x6e, 0x67, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x73, 0x65, 0x74, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, - 0x52, 0x0d, 0x64, 0x61, 0x74, 0x61, 0x73, 0x65, 0x74, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x22, - 0xad, 0x02, 0x0a, 0x19, 0x47, 0x65, 0x74, 0x4f, 0x6e, 0x6c, 0x69, 0x6e, 0x65, 0x46, 0x65, 0x61, - 0x74, 0x75, 0x72, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x57, 0x0a, - 0x0c, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x01, 0x20, - 0x03, 0x28, 0x0b, 0x32, 0x34, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x73, 0x65, 0x72, 0x76, - 0x69, 0x6e, 0x67, 0x2e, 0x47, 0x65, 0x74, 0x4f, 0x6e, 0x6c, 0x69, 0x6e, 0x65, 0x46, 0x65, 0x61, - 0x74, 0x75, 0x72, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x46, 0x69, - 0x65, 0x6c, 0x64, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x52, 0x0b, 0x66, 0x69, 0x65, 0x6c, 0x64, - 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x1a, 0xb6, 0x01, 0x0a, 0x0b, 0x46, 0x69, 0x65, 0x6c, 0x64, - 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, 0x58, 0x0a, 0x06, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, - 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x40, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x73, - 0x65, 0x72, 0x76, 0x69, 0x6e, 0x67, 0x2e, 0x47, 0x65, 0x74, 0x4f, 0x6e, 0x6c, 0x69, 0x6e, 0x65, - 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, - 0x2e, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x2e, 0x46, 0x69, 0x65, + 0x12, 0x18, 0x0a, 0x07, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x07, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, + 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x1f, + 0x0a, 0x0b, 0x66, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x5f, 0x73, 0x65, 0x74, 0x18, 0x05, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x0a, 0x66, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, 0x4a, + 0x04, 0x08, 0x03, 0x10, 0x04, 0x4a, 0x04, 0x08, 0x04, 0x10, 0x05, 0x22, 0xe1, 0x03, 0x0a, 0x18, + 0x47, 0x65, 0x74, 0x4f, 0x6e, 0x6c, 0x69, 0x6e, 0x65, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, + 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x3b, 0x0a, 0x08, 0x66, 0x65, 0x61, 0x74, + 0x75, 0x72, 0x65, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x66, 0x65, 0x61, + 0x73, 0x74, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x6e, 0x67, 0x2e, 0x46, 0x65, 0x61, 0x74, 0x75, + 0x72, 0x65, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x52, 0x08, 0x66, 0x65, 0x61, + 0x74, 0x75, 0x72, 0x65, 0x73, 0x12, 0x52, 0x0a, 0x0b, 0x65, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x5f, + 0x72, 0x6f, 0x77, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x31, 0x2e, 0x66, 0x65, 0x61, + 0x73, 0x74, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x6e, 0x67, 0x2e, 0x47, 0x65, 0x74, 0x4f, 0x6e, + 0x6c, 0x69, 0x6e, 0x65, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x2e, 0x45, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x52, 0x6f, 0x77, 0x52, 0x0a, 0x65, + 0x6e, 0x74, 0x69, 0x74, 0x79, 0x52, 0x6f, 0x77, 0x73, 0x12, 0x39, 0x0a, 0x19, 0x6f, 0x6d, 0x69, + 0x74, 0x5f, 0x65, 0x6e, 0x74, 0x69, 0x74, 0x69, 0x65, 0x73, 0x5f, 0x69, 0x6e, 0x5f, 0x72, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x52, 0x16, 0x6f, 0x6d, + 0x69, 0x74, 0x45, 0x6e, 0x74, 0x69, 0x74, 0x69, 0x65, 0x73, 0x49, 0x6e, 0x52, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x1a, 0xf8, 0x01, 0x0a, 0x09, 0x45, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x52, + 0x6f, 0x77, 0x12, 0x45, 0x0a, 0x10, 0x65, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x5f, 0x74, 0x69, 0x6d, + 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, + 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, + 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x0f, 0x65, 0x6e, 0x74, 0x69, 0x74, 0x79, + 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x12, 0x55, 0x0a, 0x06, 0x66, 0x69, 0x65, + 0x6c, 0x64, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x3d, 0x2e, 0x66, 0x65, 0x61, 0x73, + 0x74, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x6e, 0x67, 0x2e, 0x47, 0x65, 0x74, 0x4f, 0x6e, 0x6c, + 0x69, 0x6e, 0x65, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x2e, 0x45, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x52, 0x6f, 0x77, 0x2e, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x06, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x1a, 0x4d, 0x0a, 0x0b, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x28, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, - 0x40, 0x0a, 0x18, 0x47, 0x65, 0x74, 0x42, 0x61, 0x74, 0x63, 0x68, 0x46, 0x65, 0x61, 0x74, 0x75, - 0x72, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x24, 0x0a, 0x03, 0x6a, - 0x6f, 0x62, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, - 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x6e, 0x67, 0x2e, 0x4a, 0x6f, 0x62, 0x52, 0x03, 0x6a, 0x6f, - 0x62, 0x22, 0x35, 0x0a, 0x0d, 0x47, 0x65, 0x74, 0x4a, 0x6f, 0x62, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x12, 0x24, 0x0a, 0x03, 0x6a, 0x6f, 0x62, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x12, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x6e, 0x67, 0x2e, - 0x4a, 0x6f, 0x62, 0x52, 0x03, 0x6a, 0x6f, 0x62, 0x22, 0x36, 0x0a, 0x0e, 0x47, 0x65, 0x74, 0x4a, - 0x6f, 0x62, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x24, 0x0a, 0x03, 0x6a, 0x6f, - 0x62, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, - 0x73, 0x65, 0x72, 0x76, 0x69, 0x6e, 0x67, 0x2e, 0x4a, 0x6f, 0x62, 0x52, 0x03, 0x6a, 0x6f, 0x62, - 0x22, 0xe2, 0x01, 0x0a, 0x03, 0x4a, 0x6f, 0x62, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x2a, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x16, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x73, - 0x65, 0x72, 0x76, 0x69, 0x6e, 0x67, 0x2e, 0x4a, 0x6f, 0x62, 0x54, 0x79, 0x70, 0x65, 0x52, 0x04, - 0x74, 0x79, 0x70, 0x65, 0x12, 0x30, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x03, - 0x20, 0x01, 0x28, 0x0e, 0x32, 0x18, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x73, 0x65, 0x72, - 0x76, 0x69, 0x6e, 0x67, 0x2e, 0x4a, 0x6f, 0x62, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x06, - 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x14, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, - 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x1b, 0x0a, 0x09, - 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x75, 0x72, 0x69, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x09, 0x52, - 0x08, 0x66, 0x69, 0x6c, 0x65, 0x55, 0x72, 0x69, 0x73, 0x12, 0x3a, 0x0a, 0x0b, 0x64, 0x61, 0x74, - 0x61, 0x5f, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x19, - 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x6e, 0x67, 0x2e, 0x44, - 0x61, 0x74, 0x61, 0x46, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x52, 0x0a, 0x64, 0x61, 0x74, 0x61, 0x46, - 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x22, 0xd4, 0x01, 0x0a, 0x0d, 0x44, 0x61, 0x74, 0x61, 0x73, 0x65, - 0x74, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x4a, 0x0a, 0x0b, 0x66, 0x69, 0x6c, 0x65, 0x5f, - 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x66, + 0x9b, 0x01, 0x0a, 0x17, 0x47, 0x65, 0x74, 0x42, 0x61, 0x74, 0x63, 0x68, 0x46, 0x65, 0x61, 0x74, + 0x75, 0x72, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x3b, 0x0a, 0x08, 0x66, + 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1f, 0x2e, + 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x6e, 0x67, 0x2e, 0x46, 0x65, + 0x61, 0x74, 0x75, 0x72, 0x65, 0x52, 0x65, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x52, 0x08, + 0x66, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x73, 0x12, 0x43, 0x0a, 0x0e, 0x64, 0x61, 0x74, 0x61, + 0x73, 0x65, 0x74, 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x1c, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x6e, 0x67, + 0x2e, 0x44, 0x61, 0x74, 0x61, 0x73, 0x65, 0x74, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x0d, + 0x64, 0x61, 0x74, 0x61, 0x73, 0x65, 0x74, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x22, 0xad, 0x02, + 0x0a, 0x19, 0x47, 0x65, 0x74, 0x4f, 0x6e, 0x6c, 0x69, 0x6e, 0x65, 0x46, 0x65, 0x61, 0x74, 0x75, + 0x72, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x57, 0x0a, 0x0c, 0x66, + 0x69, 0x65, 0x6c, 0x64, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, + 0x0b, 0x32, 0x34, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x6e, + 0x67, 0x2e, 0x47, 0x65, 0x74, 0x4f, 0x6e, 0x6c, 0x69, 0x6e, 0x65, 0x46, 0x65, 0x61, 0x74, 0x75, + 0x72, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x46, 0x69, 0x65, 0x6c, + 0x64, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x52, 0x0b, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x56, 0x61, + 0x6c, 0x75, 0x65, 0x73, 0x1a, 0xb6, 0x01, 0x0a, 0x0b, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x56, 0x61, + 0x6c, 0x75, 0x65, 0x73, 0x12, 0x58, 0x0a, 0x06, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x18, 0x01, + 0x20, 0x03, 0x28, 0x0b, 0x32, 0x40, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x73, 0x65, 0x72, + 0x76, 0x69, 0x6e, 0x67, 0x2e, 0x47, 0x65, 0x74, 0x4f, 0x6e, 0x6c, 0x69, 0x6e, 0x65, 0x46, 0x65, + 0x61, 0x74, 0x75, 0x72, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x46, + 0x69, 0x65, 0x6c, 0x64, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x2e, 0x46, 0x69, 0x65, 0x6c, 0x64, + 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x06, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x1a, 0x4d, + 0x0a, 0x0b, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, + 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, + 0x28, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, + 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, + 0x75, 0x65, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x40, 0x0a, + 0x18, 0x47, 0x65, 0x74, 0x42, 0x61, 0x74, 0x63, 0x68, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, + 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x24, 0x0a, 0x03, 0x6a, 0x6f, 0x62, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x73, + 0x65, 0x72, 0x76, 0x69, 0x6e, 0x67, 0x2e, 0x4a, 0x6f, 0x62, 0x52, 0x03, 0x6a, 0x6f, 0x62, 0x22, + 0x35, 0x0a, 0x0d, 0x47, 0x65, 0x74, 0x4a, 0x6f, 0x62, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x12, 0x24, 0x0a, 0x03, 0x6a, 0x6f, 0x62, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, + 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x6e, 0x67, 0x2e, 0x4a, 0x6f, + 0x62, 0x52, 0x03, 0x6a, 0x6f, 0x62, 0x22, 0x36, 0x0a, 0x0e, 0x47, 0x65, 0x74, 0x4a, 0x6f, 0x62, + 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x24, 0x0a, 0x03, 0x6a, 0x6f, 0x62, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x73, 0x65, + 0x72, 0x76, 0x69, 0x6e, 0x67, 0x2e, 0x4a, 0x6f, 0x62, 0x52, 0x03, 0x6a, 0x6f, 0x62, 0x22, 0xe2, + 0x01, 0x0a, 0x03, 0x4a, 0x6f, 0x62, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x2a, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x0e, 0x32, 0x16, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x73, 0x65, 0x72, + 0x76, 0x69, 0x6e, 0x67, 0x2e, 0x4a, 0x6f, 0x62, 0x54, 0x79, 0x70, 0x65, 0x52, 0x04, 0x74, 0x79, + 0x70, 0x65, 0x12, 0x30, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x03, 0x20, 0x01, + 0x28, 0x0e, 0x32, 0x18, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, + 0x6e, 0x67, 0x2e, 0x4a, 0x6f, 0x62, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x06, 0x73, 0x74, + 0x61, 0x74, 0x75, 0x73, 0x12, 0x14, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x04, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x1b, 0x0a, 0x09, 0x66, 0x69, + 0x6c, 0x65, 0x5f, 0x75, 0x72, 0x69, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x09, 0x52, 0x08, 0x66, + 0x69, 0x6c, 0x65, 0x55, 0x72, 0x69, 0x73, 0x12, 0x3a, 0x0a, 0x0b, 0x64, 0x61, 0x74, 0x61, 0x5f, + 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x19, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x6e, 0x67, 0x2e, 0x44, 0x61, 0x74, - 0x61, 0x73, 0x65, 0x74, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x46, 0x69, 0x6c, 0x65, 0x53, - 0x6f, 0x75, 0x72, 0x63, 0x65, 0x48, 0x00, 0x52, 0x0a, 0x66, 0x69, 0x6c, 0x65, 0x53, 0x6f, 0x75, - 0x72, 0x63, 0x65, 0x1a, 0x65, 0x0a, 0x0a, 0x46, 0x69, 0x6c, 0x65, 0x53, 0x6f, 0x75, 0x72, 0x63, - 0x65, 0x12, 0x1b, 0x0a, 0x09, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x75, 0x72, 0x69, 0x73, 0x18, 0x01, - 0x20, 0x03, 0x28, 0x09, 0x52, 0x08, 0x66, 0x69, 0x6c, 0x65, 0x55, 0x72, 0x69, 0x73, 0x12, 0x3a, - 0x0a, 0x0b, 0x64, 0x61, 0x74, 0x61, 0x5f, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x0e, 0x32, 0x19, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x73, 0x65, 0x72, 0x76, - 0x69, 0x6e, 0x67, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x46, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x52, 0x0a, - 0x64, 0x61, 0x74, 0x61, 0x46, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x42, 0x10, 0x0a, 0x0e, 0x64, 0x61, - 0x74, 0x61, 0x73, 0x65, 0x74, 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2a, 0x6f, 0x0a, 0x10, - 0x46, 0x65, 0x61, 0x73, 0x74, 0x53, 0x65, 0x72, 0x76, 0x69, 0x6e, 0x67, 0x54, 0x79, 0x70, 0x65, - 0x12, 0x1e, 0x0a, 0x1a, 0x46, 0x45, 0x41, 0x53, 0x54, 0x5f, 0x53, 0x45, 0x52, 0x56, 0x49, 0x4e, - 0x47, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x49, 0x4e, 0x56, 0x41, 0x4c, 0x49, 0x44, 0x10, 0x00, - 0x12, 0x1d, 0x0a, 0x19, 0x46, 0x45, 0x41, 0x53, 0x54, 0x5f, 0x53, 0x45, 0x52, 0x56, 0x49, 0x4e, - 0x47, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x4f, 0x4e, 0x4c, 0x49, 0x4e, 0x45, 0x10, 0x01, 0x12, - 0x1c, 0x0a, 0x18, 0x46, 0x45, 0x41, 0x53, 0x54, 0x5f, 0x53, 0x45, 0x52, 0x56, 0x49, 0x4e, 0x47, - 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x42, 0x41, 0x54, 0x43, 0x48, 0x10, 0x02, 0x2a, 0x36, 0x0a, - 0x07, 0x4a, 0x6f, 0x62, 0x54, 0x79, 0x70, 0x65, 0x12, 0x14, 0x0a, 0x10, 0x4a, 0x4f, 0x42, 0x5f, - 0x54, 0x59, 0x50, 0x45, 0x5f, 0x49, 0x4e, 0x56, 0x41, 0x4c, 0x49, 0x44, 0x10, 0x00, 0x12, 0x15, - 0x0a, 0x11, 0x4a, 0x4f, 0x42, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x44, 0x4f, 0x57, 0x4e, 0x4c, - 0x4f, 0x41, 0x44, 0x10, 0x01, 0x2a, 0x68, 0x0a, 0x09, 0x4a, 0x6f, 0x62, 0x53, 0x74, 0x61, 0x74, - 0x75, 0x73, 0x12, 0x16, 0x0a, 0x12, 0x4a, 0x4f, 0x42, 0x5f, 0x53, 0x54, 0x41, 0x54, 0x55, 0x53, - 0x5f, 0x49, 0x4e, 0x56, 0x41, 0x4c, 0x49, 0x44, 0x10, 0x00, 0x12, 0x16, 0x0a, 0x12, 0x4a, 0x4f, - 0x42, 0x5f, 0x53, 0x54, 0x41, 0x54, 0x55, 0x53, 0x5f, 0x50, 0x45, 0x4e, 0x44, 0x49, 0x4e, 0x47, - 0x10, 0x01, 0x12, 0x16, 0x0a, 0x12, 0x4a, 0x4f, 0x42, 0x5f, 0x53, 0x54, 0x41, 0x54, 0x55, 0x53, - 0x5f, 0x52, 0x55, 0x4e, 0x4e, 0x49, 0x4e, 0x47, 0x10, 0x02, 0x12, 0x13, 0x0a, 0x0f, 0x4a, 0x4f, - 0x42, 0x5f, 0x53, 0x54, 0x41, 0x54, 0x55, 0x53, 0x5f, 0x44, 0x4f, 0x4e, 0x45, 0x10, 0x03, 0x2a, - 0x3b, 0x0a, 0x0a, 0x44, 0x61, 0x74, 0x61, 0x46, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x12, 0x17, 0x0a, - 0x13, 0x44, 0x41, 0x54, 0x41, 0x5f, 0x46, 0x4f, 0x52, 0x4d, 0x41, 0x54, 0x5f, 0x49, 0x4e, 0x56, - 0x41, 0x4c, 0x49, 0x44, 0x10, 0x00, 0x12, 0x14, 0x0a, 0x10, 0x44, 0x41, 0x54, 0x41, 0x5f, 0x46, - 0x4f, 0x52, 0x4d, 0x41, 0x54, 0x5f, 0x41, 0x56, 0x52, 0x4f, 0x10, 0x01, 0x32, 0x92, 0x03, 0x0a, - 0x0e, 0x53, 0x65, 0x72, 0x76, 0x69, 0x6e, 0x67, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, - 0x6c, 0x0a, 0x13, 0x47, 0x65, 0x74, 0x46, 0x65, 0x61, 0x73, 0x74, 0x53, 0x65, 0x72, 0x76, 0x69, - 0x6e, 0x67, 0x49, 0x6e, 0x66, 0x6f, 0x12, 0x29, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x73, - 0x65, 0x72, 0x76, 0x69, 0x6e, 0x67, 0x2e, 0x47, 0x65, 0x74, 0x46, 0x65, 0x61, 0x73, 0x74, 0x53, - 0x65, 0x72, 0x76, 0x69, 0x6e, 0x67, 0x49, 0x6e, 0x66, 0x6f, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x1a, 0x2a, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x6e, - 0x67, 0x2e, 0x47, 0x65, 0x74, 0x46, 0x65, 0x61, 0x73, 0x74, 0x53, 0x65, 0x72, 0x76, 0x69, 0x6e, - 0x67, 0x49, 0x6e, 0x66, 0x6f, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x66, 0x0a, - 0x11, 0x47, 0x65, 0x74, 0x4f, 0x6e, 0x6c, 0x69, 0x6e, 0x65, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, - 0x65, 0x73, 0x12, 0x27, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, - 0x6e, 0x67, 0x2e, 0x47, 0x65, 0x74, 0x4f, 0x6e, 0x6c, 0x69, 0x6e, 0x65, 0x46, 0x65, 0x61, 0x74, - 0x75, 0x72, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x28, 0x2e, 0x66, 0x65, - 0x61, 0x73, 0x74, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x6e, 0x67, 0x2e, 0x47, 0x65, 0x74, 0x4f, - 0x6e, 0x6c, 0x69, 0x6e, 0x65, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x73, 0x52, 0x65, 0x73, - 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x63, 0x0a, 0x10, 0x47, 0x65, 0x74, 0x42, 0x61, 0x74, 0x63, - 0x68, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x73, 0x12, 0x26, 0x2e, 0x66, 0x65, 0x61, 0x73, - 0x74, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x6e, 0x67, 0x2e, 0x47, 0x65, 0x74, 0x42, 0x61, 0x74, - 0x63, 0x68, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x1a, 0x27, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x6e, - 0x67, 0x2e, 0x47, 0x65, 0x74, 0x42, 0x61, 0x74, 0x63, 0x68, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, - 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x45, 0x0a, 0x06, 0x47, 0x65, - 0x74, 0x4a, 0x6f, 0x62, 0x12, 0x1c, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x73, 0x65, 0x72, - 0x76, 0x69, 0x6e, 0x67, 0x2e, 0x47, 0x65, 0x74, 0x4a, 0x6f, 0x62, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x1a, 0x1d, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, - 0x6e, 0x67, 0x2e, 0x47, 0x65, 0x74, 0x4a, 0x6f, 0x62, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, - 0x65, 0x42, 0x54, 0x0a, 0x0d, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, - 0x6e, 0x67, 0x42, 0x0f, 0x53, 0x65, 0x72, 0x76, 0x69, 0x6e, 0x67, 0x41, 0x50, 0x49, 0x50, 0x72, - 0x6f, 0x74, 0x6f, 0x5a, 0x32, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, - 0x67, 0x6f, 0x6a, 0x65, 0x6b, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x73, 0x64, 0x6b, 0x2f, - 0x67, 0x6f, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x73, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, - 0x73, 0x65, 0x72, 0x76, 0x69, 0x6e, 0x67, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x61, 0x46, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x52, 0x0a, 0x64, 0x61, 0x74, 0x61, 0x46, 0x6f, 0x72, + 0x6d, 0x61, 0x74, 0x22, 0xd4, 0x01, 0x0a, 0x0d, 0x44, 0x61, 0x74, 0x61, 0x73, 0x65, 0x74, 0x53, + 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x4a, 0x0a, 0x0b, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x73, 0x6f, + 0x75, 0x72, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x66, 0x65, 0x61, + 0x73, 0x74, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x6e, 0x67, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x73, + 0x65, 0x74, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x46, 0x69, 0x6c, 0x65, 0x53, 0x6f, 0x75, + 0x72, 0x63, 0x65, 0x48, 0x00, 0x52, 0x0a, 0x66, 0x69, 0x6c, 0x65, 0x53, 0x6f, 0x75, 0x72, 0x63, + 0x65, 0x1a, 0x65, 0x0a, 0x0a, 0x46, 0x69, 0x6c, 0x65, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, + 0x1b, 0x0a, 0x09, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x75, 0x72, 0x69, 0x73, 0x18, 0x01, 0x20, 0x03, + 0x28, 0x09, 0x52, 0x08, 0x66, 0x69, 0x6c, 0x65, 0x55, 0x72, 0x69, 0x73, 0x12, 0x3a, 0x0a, 0x0b, + 0x64, 0x61, 0x74, 0x61, 0x5f, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x0e, 0x32, 0x19, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x6e, + 0x67, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x46, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x52, 0x0a, 0x64, 0x61, + 0x74, 0x61, 0x46, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x42, 0x10, 0x0a, 0x0e, 0x64, 0x61, 0x74, 0x61, + 0x73, 0x65, 0x74, 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2a, 0x6f, 0x0a, 0x10, 0x46, 0x65, + 0x61, 0x73, 0x74, 0x53, 0x65, 0x72, 0x76, 0x69, 0x6e, 0x67, 0x54, 0x79, 0x70, 0x65, 0x12, 0x1e, + 0x0a, 0x1a, 0x46, 0x45, 0x41, 0x53, 0x54, 0x5f, 0x53, 0x45, 0x52, 0x56, 0x49, 0x4e, 0x47, 0x5f, + 0x54, 0x59, 0x50, 0x45, 0x5f, 0x49, 0x4e, 0x56, 0x41, 0x4c, 0x49, 0x44, 0x10, 0x00, 0x12, 0x1d, + 0x0a, 0x19, 0x46, 0x45, 0x41, 0x53, 0x54, 0x5f, 0x53, 0x45, 0x52, 0x56, 0x49, 0x4e, 0x47, 0x5f, + 0x54, 0x59, 0x50, 0x45, 0x5f, 0x4f, 0x4e, 0x4c, 0x49, 0x4e, 0x45, 0x10, 0x01, 0x12, 0x1c, 0x0a, + 0x18, 0x46, 0x45, 0x41, 0x53, 0x54, 0x5f, 0x53, 0x45, 0x52, 0x56, 0x49, 0x4e, 0x47, 0x5f, 0x54, + 0x59, 0x50, 0x45, 0x5f, 0x42, 0x41, 0x54, 0x43, 0x48, 0x10, 0x02, 0x2a, 0x36, 0x0a, 0x07, 0x4a, + 0x6f, 0x62, 0x54, 0x79, 0x70, 0x65, 0x12, 0x14, 0x0a, 0x10, 0x4a, 0x4f, 0x42, 0x5f, 0x54, 0x59, + 0x50, 0x45, 0x5f, 0x49, 0x4e, 0x56, 0x41, 0x4c, 0x49, 0x44, 0x10, 0x00, 0x12, 0x15, 0x0a, 0x11, + 0x4a, 0x4f, 0x42, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x44, 0x4f, 0x57, 0x4e, 0x4c, 0x4f, 0x41, + 0x44, 0x10, 0x01, 0x2a, 0x68, 0x0a, 0x09, 0x4a, 0x6f, 0x62, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, + 0x12, 0x16, 0x0a, 0x12, 0x4a, 0x4f, 0x42, 0x5f, 0x53, 0x54, 0x41, 0x54, 0x55, 0x53, 0x5f, 0x49, + 0x4e, 0x56, 0x41, 0x4c, 0x49, 0x44, 0x10, 0x00, 0x12, 0x16, 0x0a, 0x12, 0x4a, 0x4f, 0x42, 0x5f, + 0x53, 0x54, 0x41, 0x54, 0x55, 0x53, 0x5f, 0x50, 0x45, 0x4e, 0x44, 0x49, 0x4e, 0x47, 0x10, 0x01, + 0x12, 0x16, 0x0a, 0x12, 0x4a, 0x4f, 0x42, 0x5f, 0x53, 0x54, 0x41, 0x54, 0x55, 0x53, 0x5f, 0x52, + 0x55, 0x4e, 0x4e, 0x49, 0x4e, 0x47, 0x10, 0x02, 0x12, 0x13, 0x0a, 0x0f, 0x4a, 0x4f, 0x42, 0x5f, + 0x53, 0x54, 0x41, 0x54, 0x55, 0x53, 0x5f, 0x44, 0x4f, 0x4e, 0x45, 0x10, 0x03, 0x2a, 0x3b, 0x0a, + 0x0a, 0x44, 0x61, 0x74, 0x61, 0x46, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x12, 0x17, 0x0a, 0x13, 0x44, + 0x41, 0x54, 0x41, 0x5f, 0x46, 0x4f, 0x52, 0x4d, 0x41, 0x54, 0x5f, 0x49, 0x4e, 0x56, 0x41, 0x4c, + 0x49, 0x44, 0x10, 0x00, 0x12, 0x14, 0x0a, 0x10, 0x44, 0x41, 0x54, 0x41, 0x5f, 0x46, 0x4f, 0x52, + 0x4d, 0x41, 0x54, 0x5f, 0x41, 0x56, 0x52, 0x4f, 0x10, 0x01, 0x32, 0x92, 0x03, 0x0a, 0x0e, 0x53, + 0x65, 0x72, 0x76, 0x69, 0x6e, 0x67, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x6c, 0x0a, + 0x13, 0x47, 0x65, 0x74, 0x46, 0x65, 0x61, 0x73, 0x74, 0x53, 0x65, 0x72, 0x76, 0x69, 0x6e, 0x67, + 0x49, 0x6e, 0x66, 0x6f, 0x12, 0x29, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x73, 0x65, 0x72, + 0x76, 0x69, 0x6e, 0x67, 0x2e, 0x47, 0x65, 0x74, 0x46, 0x65, 0x61, 0x73, 0x74, 0x53, 0x65, 0x72, + 0x76, 0x69, 0x6e, 0x67, 0x49, 0x6e, 0x66, 0x6f, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, + 0x2a, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x6e, 0x67, 0x2e, + 0x47, 0x65, 0x74, 0x46, 0x65, 0x61, 0x73, 0x74, 0x53, 0x65, 0x72, 0x76, 0x69, 0x6e, 0x67, 0x49, + 0x6e, 0x66, 0x6f, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x66, 0x0a, 0x11, 0x47, + 0x65, 0x74, 0x4f, 0x6e, 0x6c, 0x69, 0x6e, 0x65, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x73, + 0x12, 0x27, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x6e, 0x67, + 0x2e, 0x47, 0x65, 0x74, 0x4f, 0x6e, 0x6c, 0x69, 0x6e, 0x65, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, + 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x28, 0x2e, 0x66, 0x65, 0x61, 0x73, + 0x74, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x6e, 0x67, 0x2e, 0x47, 0x65, 0x74, 0x4f, 0x6e, 0x6c, + 0x69, 0x6e, 0x65, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, + 0x6e, 0x73, 0x65, 0x12, 0x63, 0x0a, 0x10, 0x47, 0x65, 0x74, 0x42, 0x61, 0x74, 0x63, 0x68, 0x46, + 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x73, 0x12, 0x26, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, + 0x73, 0x65, 0x72, 0x76, 0x69, 0x6e, 0x67, 0x2e, 0x47, 0x65, 0x74, 0x42, 0x61, 0x74, 0x63, 0x68, + 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, + 0x27, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x6e, 0x67, 0x2e, + 0x47, 0x65, 0x74, 0x42, 0x61, 0x74, 0x63, 0x68, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x73, + 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x45, 0x0a, 0x06, 0x47, 0x65, 0x74, 0x4a, + 0x6f, 0x62, 0x12, 0x1c, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, + 0x6e, 0x67, 0x2e, 0x47, 0x65, 0x74, 0x4a, 0x6f, 0x62, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x1a, 0x1d, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x6e, 0x67, + 0x2e, 0x47, 0x65, 0x74, 0x4a, 0x6f, 0x62, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x42, + 0x5e, 0x0a, 0x13, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x73, + 0x65, 0x72, 0x76, 0x69, 0x6e, 0x67, 0x42, 0x0f, 0x53, 0x65, 0x72, 0x76, 0x69, 0x6e, 0x67, 0x41, + 0x50, 0x49, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x5a, 0x36, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, + 0x63, 0x6f, 0x6d, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2d, 0x64, 0x65, 0x76, 0x2f, 0x66, 0x65, + 0x61, 0x73, 0x74, 0x2f, 0x73, 0x64, 0x6b, 0x2f, 0x67, 0x6f, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x73, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x73, 0x65, 0x72, 0x76, 0x69, 0x6e, 0x67, 0x62, + 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( @@ -1292,44 +1277,42 @@ var file_feast_serving_ServingService_proto_goTypes = []interface{}{ (*GetOnlineFeaturesResponse_FieldValues)(nil), // 17: feast.serving.GetOnlineFeaturesResponse.FieldValues nil, // 18: feast.serving.GetOnlineFeaturesResponse.FieldValues.FieldsEntry (*DatasetSource_FileSource)(nil), // 19: feast.serving.DatasetSource.FileSource - (*duration.Duration)(nil), // 20: google.protobuf.Duration - (*timestamp.Timestamp)(nil), // 21: google.protobuf.Timestamp - (*types.Value)(nil), // 22: feast.types.Value + (*timestamp.Timestamp)(nil), // 20: google.protobuf.Timestamp + (*types.Value)(nil), // 21: feast.types.Value } var file_feast_serving_ServingService_proto_depIdxs = []int32{ 0, // 0: feast.serving.GetFeastServingInfoResponse.type:type_name -> feast.serving.FeastServingType - 20, // 1: feast.serving.FeatureReference.max_age:type_name -> google.protobuf.Duration - 6, // 2: feast.serving.GetOnlineFeaturesRequest.features:type_name -> feast.serving.FeatureReference - 15, // 3: feast.serving.GetOnlineFeaturesRequest.entity_rows:type_name -> feast.serving.GetOnlineFeaturesRequest.EntityRow - 6, // 4: feast.serving.GetBatchFeaturesRequest.features:type_name -> feast.serving.FeatureReference - 14, // 5: feast.serving.GetBatchFeaturesRequest.dataset_source:type_name -> feast.serving.DatasetSource - 17, // 6: feast.serving.GetOnlineFeaturesResponse.field_values:type_name -> feast.serving.GetOnlineFeaturesResponse.FieldValues - 13, // 7: feast.serving.GetBatchFeaturesResponse.job:type_name -> feast.serving.Job - 13, // 8: feast.serving.GetJobRequest.job:type_name -> feast.serving.Job - 13, // 9: feast.serving.GetJobResponse.job:type_name -> feast.serving.Job - 1, // 10: feast.serving.Job.type:type_name -> feast.serving.JobType - 2, // 11: feast.serving.Job.status:type_name -> feast.serving.JobStatus - 3, // 12: feast.serving.Job.data_format:type_name -> feast.serving.DataFormat - 19, // 13: feast.serving.DatasetSource.file_source:type_name -> feast.serving.DatasetSource.FileSource - 21, // 14: feast.serving.GetOnlineFeaturesRequest.EntityRow.entity_timestamp:type_name -> google.protobuf.Timestamp - 16, // 15: feast.serving.GetOnlineFeaturesRequest.EntityRow.fields:type_name -> feast.serving.GetOnlineFeaturesRequest.EntityRow.FieldsEntry - 22, // 16: feast.serving.GetOnlineFeaturesRequest.EntityRow.FieldsEntry.value:type_name -> feast.types.Value - 18, // 17: feast.serving.GetOnlineFeaturesResponse.FieldValues.fields:type_name -> feast.serving.GetOnlineFeaturesResponse.FieldValues.FieldsEntry - 22, // 18: feast.serving.GetOnlineFeaturesResponse.FieldValues.FieldsEntry.value:type_name -> feast.types.Value - 3, // 19: feast.serving.DatasetSource.FileSource.data_format:type_name -> feast.serving.DataFormat - 4, // 20: feast.serving.ServingService.GetFeastServingInfo:input_type -> feast.serving.GetFeastServingInfoRequest - 7, // 21: feast.serving.ServingService.GetOnlineFeatures:input_type -> feast.serving.GetOnlineFeaturesRequest - 8, // 22: feast.serving.ServingService.GetBatchFeatures:input_type -> feast.serving.GetBatchFeaturesRequest - 11, // 23: feast.serving.ServingService.GetJob:input_type -> feast.serving.GetJobRequest - 5, // 24: feast.serving.ServingService.GetFeastServingInfo:output_type -> feast.serving.GetFeastServingInfoResponse - 9, // 25: feast.serving.ServingService.GetOnlineFeatures:output_type -> feast.serving.GetOnlineFeaturesResponse - 10, // 26: feast.serving.ServingService.GetBatchFeatures:output_type -> feast.serving.GetBatchFeaturesResponse - 12, // 27: feast.serving.ServingService.GetJob:output_type -> feast.serving.GetJobResponse - 24, // [24:28] is the sub-list for method output_type - 20, // [20:24] is the sub-list for method input_type - 20, // [20:20] is the sub-list for extension type_name - 20, // [20:20] is the sub-list for extension extendee - 0, // [0:20] is the sub-list for field type_name + 6, // 1: feast.serving.GetOnlineFeaturesRequest.features:type_name -> feast.serving.FeatureReference + 15, // 2: feast.serving.GetOnlineFeaturesRequest.entity_rows:type_name -> feast.serving.GetOnlineFeaturesRequest.EntityRow + 6, // 3: feast.serving.GetBatchFeaturesRequest.features:type_name -> feast.serving.FeatureReference + 14, // 4: feast.serving.GetBatchFeaturesRequest.dataset_source:type_name -> feast.serving.DatasetSource + 17, // 5: feast.serving.GetOnlineFeaturesResponse.field_values:type_name -> feast.serving.GetOnlineFeaturesResponse.FieldValues + 13, // 6: feast.serving.GetBatchFeaturesResponse.job:type_name -> feast.serving.Job + 13, // 7: feast.serving.GetJobRequest.job:type_name -> feast.serving.Job + 13, // 8: feast.serving.GetJobResponse.job:type_name -> feast.serving.Job + 1, // 9: feast.serving.Job.type:type_name -> feast.serving.JobType + 2, // 10: feast.serving.Job.status:type_name -> feast.serving.JobStatus + 3, // 11: feast.serving.Job.data_format:type_name -> feast.serving.DataFormat + 19, // 12: feast.serving.DatasetSource.file_source:type_name -> feast.serving.DatasetSource.FileSource + 20, // 13: feast.serving.GetOnlineFeaturesRequest.EntityRow.entity_timestamp:type_name -> google.protobuf.Timestamp + 16, // 14: feast.serving.GetOnlineFeaturesRequest.EntityRow.fields:type_name -> feast.serving.GetOnlineFeaturesRequest.EntityRow.FieldsEntry + 21, // 15: feast.serving.GetOnlineFeaturesRequest.EntityRow.FieldsEntry.value:type_name -> feast.types.Value + 18, // 16: feast.serving.GetOnlineFeaturesResponse.FieldValues.fields:type_name -> feast.serving.GetOnlineFeaturesResponse.FieldValues.FieldsEntry + 21, // 17: feast.serving.GetOnlineFeaturesResponse.FieldValues.FieldsEntry.value:type_name -> feast.types.Value + 3, // 18: feast.serving.DatasetSource.FileSource.data_format:type_name -> feast.serving.DataFormat + 4, // 19: feast.serving.ServingService.GetFeastServingInfo:input_type -> feast.serving.GetFeastServingInfoRequest + 7, // 20: feast.serving.ServingService.GetOnlineFeatures:input_type -> feast.serving.GetOnlineFeaturesRequest + 8, // 21: feast.serving.ServingService.GetBatchFeatures:input_type -> feast.serving.GetBatchFeaturesRequest + 11, // 22: feast.serving.ServingService.GetJob:input_type -> feast.serving.GetJobRequest + 5, // 23: feast.serving.ServingService.GetFeastServingInfo:output_type -> feast.serving.GetFeastServingInfoResponse + 9, // 24: feast.serving.ServingService.GetOnlineFeatures:output_type -> feast.serving.GetOnlineFeaturesResponse + 10, // 25: feast.serving.ServingService.GetBatchFeatures:output_type -> feast.serving.GetBatchFeaturesResponse + 12, // 26: feast.serving.ServingService.GetJob:output_type -> feast.serving.GetJobResponse + 23, // [23:27] is the sub-list for method output_type + 19, // [19:23] is the sub-list for method input_type + 19, // [19:19] is the sub-list for extension type_name + 19, // [19:19] is the sub-list for extension extendee + 0, // [0:19] is the sub-list for field type_name } func init() { file_feast_serving_ServingService_proto_init() } diff --git a/sdk/go/protos/feast/storage/Redis.pb.go b/sdk/go/protos/feast/storage/Redis.pb.go index 1dca28e26a..92c1040ca5 100644 --- a/sdk/go/protos/feast/storage/Redis.pb.go +++ b/sdk/go/protos/feast/storage/Redis.pb.go @@ -15,14 +15,14 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: -// protoc-gen-go v1.21.0 +// protoc-gen-go v1.24.0 // protoc v3.10.0 // source: feast/storage/Redis.proto package storage import ( - types "github.com/gojek/feast/sdk/go/protos/feast/types" + types "github.com/feast-dev/feast/sdk/go/protos/feast/types" proto "github.com/golang/protobuf/proto" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" @@ -46,7 +46,7 @@ type RedisKey struct { sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - // FeatureSet this row belongs to, this is defined as featureSetName:version. + // FeatureSet this row belongs to, this is defined as featureSetName. FeatureSet string `protobuf:"bytes,2,opt,name=feature_set,json=featureSet,proto3" json:"feature_set,omitempty"` // List of fields containing entity names and their respective values // contained within this feature row. The entities should be sorted @@ -113,12 +113,13 @@ var file_feast_storage_Redis_proto_rawDesc = []byte{ 0x12, 0x2e, 0x0a, 0x08, 0x65, 0x6e, 0x74, 0x69, 0x74, 0x69, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2e, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x52, 0x08, 0x65, 0x6e, 0x74, 0x69, 0x74, 0x69, 0x65, 0x73, - 0x42, 0x4f, 0x0a, 0x0d, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, - 0x65, 0x42, 0x0a, 0x52, 0x65, 0x64, 0x69, 0x73, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x5a, 0x32, 0x67, - 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x67, 0x6f, 0x6a, 0x65, 0x6b, 0x2f, - 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x73, 0x64, 0x6b, 0x2f, 0x67, 0x6f, 0x2f, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x73, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, - 0x65, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x42, 0x59, 0x0a, 0x13, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, + 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x42, 0x0a, 0x52, 0x65, 0x64, 0x69, 0x73, 0x50, 0x72, + 0x6f, 0x74, 0x6f, 0x5a, 0x36, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, + 0x66, 0x65, 0x61, 0x73, 0x74, 0x2d, 0x64, 0x65, 0x76, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, + 0x73, 0x64, 0x6b, 0x2f, 0x67, 0x6f, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x73, 0x2f, 0x66, 0x65, + 0x61, 0x73, 0x74, 0x2f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x62, 0x06, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x33, } var ( diff --git a/sdk/go/protos/feast/types/FeatureRow.pb.go b/sdk/go/protos/feast/types/FeatureRow.pb.go index 769f219d32..4c094379eb 100644 --- a/sdk/go/protos/feast/types/FeatureRow.pb.go +++ b/sdk/go/protos/feast/types/FeatureRow.pb.go @@ -15,7 +15,7 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: -// protoc-gen-go v1.21.0 +// protoc-gen-go v1.24.0 // protoc v3.10.0 // source: feast/types/FeatureRow.proto @@ -53,9 +53,11 @@ type FeatureRow struct { // will use to perform joins, determine latest values, and coalesce rows. EventTimestamp *timestamp.Timestamp `protobuf:"bytes,3,opt,name=event_timestamp,json=eventTimestamp,proto3" json:"event_timestamp,omitempty"` // Complete reference to the featureSet this featureRow belongs to, in the form of - // /:. This value will be used by the feast ingestion job to filter + // /. This value will be used by the feast ingestion job to filter // rows, and write the values to the correct tables. FeatureSet string `protobuf:"bytes,6,opt,name=feature_set,json=featureSet,proto3" json:"feature_set,omitempty"` + // Identifier tying this feature row to a specific ingestion job. + IngestionId string `protobuf:"bytes,7,opt,name=ingestion_id,json=ingestionId,proto3" json:"ingestion_id,omitempty"` } func (x *FeatureRow) Reset() { @@ -111,6 +113,13 @@ func (x *FeatureRow) GetFeatureSet() string { return "" } +func (x *FeatureRow) GetIngestionId() string { + if x != nil { + return x.IngestionId + } + return "" +} + var File_feast_types_FeatureRow_proto protoreflect.FileDescriptor var file_feast_types_FeatureRow_proto_rawDesc = []byte{ @@ -120,7 +129,7 @@ var file_feast_types_FeatureRow_proto_rawDesc = []byte{ 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x17, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x2e, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x9e, 0x01, 0x0a, 0x0a, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0xc1, 0x01, 0x0a, 0x0a, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x52, 0x6f, 0x77, 0x12, 0x2a, 0x0a, 0x06, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2e, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x52, 0x06, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x73, @@ -130,12 +139,15 @@ var file_feast_types_FeatureRow_proto_rawDesc = []byte{ 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x0e, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x12, 0x1f, 0x0a, 0x0b, 0x66, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x5f, 0x73, 0x65, 0x74, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x66, 0x65, 0x61, 0x74, - 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, 0x42, 0x50, 0x0a, 0x0b, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, - 0x74, 0x79, 0x70, 0x65, 0x73, 0x42, 0x0f, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x52, 0x6f, - 0x77, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x5a, 0x30, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, - 0x6f, 0x6d, 0x2f, 0x67, 0x6f, 0x6a, 0x65, 0x6b, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x73, - 0x64, 0x6b, 0x2f, 0x67, 0x6f, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x73, 0x2f, 0x66, 0x65, 0x61, - 0x73, 0x74, 0x2f, 0x74, 0x79, 0x70, 0x65, 0x73, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, 0x12, 0x21, 0x0a, 0x0c, 0x69, 0x6e, 0x67, 0x65, 0x73, 0x74, + 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x69, 0x6e, + 0x67, 0x65, 0x73, 0x74, 0x69, 0x6f, 0x6e, 0x49, 0x64, 0x42, 0x5a, 0x0a, 0x11, 0x66, 0x65, 0x61, + 0x73, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x73, 0x42, 0x0f, + 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x52, 0x6f, 0x77, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x5a, + 0x34, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x66, 0x65, 0x61, 0x73, + 0x74, 0x2d, 0x64, 0x65, 0x76, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x73, 0x64, 0x6b, 0x2f, + 0x67, 0x6f, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x73, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, + 0x74, 0x79, 0x70, 0x65, 0x73, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( diff --git a/sdk/go/protos/feast/types/FeatureRowExtended.pb.go b/sdk/go/protos/feast/types/FeatureRowExtended.pb.go index 8ca9ee1bc9..3cb15a21ce 100644 --- a/sdk/go/protos/feast/types/FeatureRowExtended.pb.go +++ b/sdk/go/protos/feast/types/FeatureRowExtended.pb.go @@ -15,7 +15,7 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: -// protoc-gen-go v1.21.0 +// protoc-gen-go v1.24.0 // protoc v3.10.0 // source: feast/types/FeatureRowExtended.proto @@ -264,13 +264,13 @@ var file_feast_types_FeatureRowExtended_proto_rawDesc = []byte{ 0x73, 0x65, 0x65, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x66, 0x69, 0x72, 0x73, 0x74, 0x53, 0x65, 0x65, - 0x6e, 0x42, 0x58, 0x0a, 0x0b, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x73, - 0x42, 0x17, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x52, 0x6f, 0x77, 0x45, 0x78, 0x74, 0x65, - 0x6e, 0x64, 0x65, 0x64, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x5a, 0x30, 0x67, 0x69, 0x74, 0x68, 0x75, - 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x67, 0x6f, 0x6a, 0x65, 0x6b, 0x2f, 0x66, 0x65, 0x61, 0x73, - 0x74, 0x2f, 0x73, 0x64, 0x6b, 0x2f, 0x67, 0x6f, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x73, 0x2f, - 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x74, 0x79, 0x70, 0x65, 0x73, 0x62, 0x06, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x33, + 0x6e, 0x42, 0x62, 0x0a, 0x11, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x2e, 0x74, 0x79, 0x70, 0x65, 0x73, 0x42, 0x17, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x52, + 0x6f, 0x77, 0x45, 0x78, 0x74, 0x65, 0x6e, 0x64, 0x65, 0x64, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x5a, + 0x34, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x66, 0x65, 0x61, 0x73, + 0x74, 0x2d, 0x64, 0x65, 0x76, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x73, 0x64, 0x6b, 0x2f, + 0x67, 0x6f, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x73, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, + 0x74, 0x79, 0x70, 0x65, 0x73, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( diff --git a/sdk/go/protos/feast/types/Field.pb.go b/sdk/go/protos/feast/types/Field.pb.go index c7b5193db5..08b35f6627 100644 --- a/sdk/go/protos/feast/types/Field.pb.go +++ b/sdk/go/protos/feast/types/Field.pb.go @@ -15,7 +15,7 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: -// protoc-gen-go v1.21.0 +// protoc-gen-go v1.24.0 // protoc v3.10.0 // source: feast/types/Field.proto @@ -106,12 +106,13 @@ var file_feast_types_Field_proto_rawDesc = []byte{ 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x28, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, - 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x42, 0x4b, 0x0a, 0x0b, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, - 0x74, 0x79, 0x70, 0x65, 0x73, 0x42, 0x0a, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x50, 0x72, 0x6f, 0x74, - 0x6f, 0x5a, 0x30, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x67, 0x6f, - 0x6a, 0x65, 0x6b, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x73, 0x64, 0x6b, 0x2f, 0x67, 0x6f, - 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x73, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x74, 0x79, - 0x70, 0x65, 0x73, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x42, 0x55, 0x0a, 0x11, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x73, 0x42, 0x0a, 0x46, 0x69, 0x65, + 0x6c, 0x64, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x5a, 0x34, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, + 0x63, 0x6f, 0x6d, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2d, 0x64, 0x65, 0x76, 0x2f, 0x66, 0x65, + 0x61, 0x73, 0x74, 0x2f, 0x73, 0x64, 0x6b, 0x2f, 0x67, 0x6f, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x73, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x74, 0x79, 0x70, 0x65, 0x73, 0x62, 0x06, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( diff --git a/sdk/go/protos/feast/types/Value.pb.go b/sdk/go/protos/feast/types/Value.pb.go index ba530b2dac..bb777a0766 100644 --- a/sdk/go/protos/feast/types/Value.pb.go +++ b/sdk/go/protos/feast/types/Value.pb.go @@ -15,7 +15,7 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: -// protoc-gen-go v1.21.0 +// protoc-gen-go v1.24.0 // protoc v3.10.0 // source: feast/types/Value.proto @@ -819,12 +819,13 @@ var file_feast_types_Value_proto_rawDesc = []byte{ 0x73, 0x74, 0x12, 0x10, 0x0a, 0x03, 0x76, 0x61, 0x6c, 0x18, 0x01, 0x20, 0x03, 0x28, 0x02, 0x52, 0x03, 0x76, 0x61, 0x6c, 0x22, 0x1c, 0x0a, 0x08, 0x42, 0x6f, 0x6f, 0x6c, 0x4c, 0x69, 0x73, 0x74, 0x12, 0x10, 0x0a, 0x03, 0x76, 0x61, 0x6c, 0x18, 0x01, 0x20, 0x03, 0x28, 0x08, 0x52, 0x03, 0x76, - 0x61, 0x6c, 0x42, 0x4b, 0x0a, 0x0b, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x74, 0x79, 0x70, 0x65, - 0x73, 0x42, 0x0a, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x5a, 0x30, 0x67, - 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x67, 0x6f, 0x6a, 0x65, 0x6b, 0x2f, - 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x73, 0x64, 0x6b, 0x2f, 0x67, 0x6f, 0x2f, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x73, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x74, 0x79, 0x70, 0x65, 0x73, 0x62, - 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x61, 0x6c, 0x42, 0x55, 0x0a, 0x11, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x2e, 0x74, 0x79, 0x70, 0x65, 0x73, 0x42, 0x0a, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x50, 0x72, + 0x6f, 0x74, 0x6f, 0x5a, 0x34, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, + 0x66, 0x65, 0x61, 0x73, 0x74, 0x2d, 0x64, 0x65, 0x76, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, + 0x73, 0x64, 0x6b, 0x2f, 0x67, 0x6f, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x73, 0x2f, 0x66, 0x65, + 0x61, 0x73, 0x74, 0x2f, 0x74, 0x79, 0x70, 0x65, 0x73, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x33, } var ( diff --git a/sdk/go/protos/tensorflow_metadata/proto/v0/path.pb.go b/sdk/go/protos/tensorflow_metadata/proto/v0/path.pb.go index 5b55e5bca5..f58247299d 100644 --- a/sdk/go/protos/tensorflow_metadata/proto/v0/path.pb.go +++ b/sdk/go/protos/tensorflow_metadata/proto/v0/path.pb.go @@ -15,7 +15,7 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: -// protoc-gen-go v1.21.0 +// protoc-gen-go v1.24.0 // protoc v3.10.0 // source: tensorflow_metadata/proto/v0/path.proto @@ -112,14 +112,14 @@ var file_tensorflow_metadata_proto_v0_path_proto_rawDesc = []byte{ 0x61, 0x74, 0x68, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x16, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x22, 0x1a, 0x0a, 0x04, 0x50, 0x61, 0x74, 0x68, 0x12, 0x12, 0x0a, 0x04, 0x73, 0x74, 0x65, - 0x70, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x04, 0x73, 0x74, 0x65, 0x70, 0x42, 0x70, 0x0a, + 0x70, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x04, 0x73, 0x74, 0x65, 0x70, 0x42, 0x68, 0x0a, 0x1a, 0x6f, 0x72, 0x67, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, - 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x50, 0x01, 0x5a, 0x4d, 0x67, - 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x67, 0x6f, 0x6a, 0x65, 0x6b, 0x2f, - 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x73, 0x64, 0x6b, 0x2f, 0x67, 0x6f, 0x2f, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x73, 0x2f, 0x74, 0x68, 0x69, 0x72, 0x64, 0x5f, 0x70, 0x61, 0x72, 0x74, 0x79, 0x2f, - 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, - 0x61, 0x74, 0x61, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x76, 0x30, 0xf8, 0x01, 0x01, + 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x50, 0x01, 0x5a, 0x45, 0x67, + 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2d, + 0x64, 0x65, 0x76, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x73, 0x64, 0x6b, 0x2f, 0x67, 0x6f, + 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x73, 0x2f, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, + 0x6f, 0x77, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2f, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x2f, 0x76, 0x30, 0xf8, 0x01, 0x01, } var ( diff --git a/sdk/go/protos/tensorflow_metadata/proto/v0/schema.pb.go b/sdk/go/protos/tensorflow_metadata/proto/v0/schema.pb.go index 5eec2f259d..a04f5bba8c 100644 --- a/sdk/go/protos/tensorflow_metadata/proto/v0/schema.pb.go +++ b/sdk/go/protos/tensorflow_metadata/proto/v0/schema.pb.go @@ -15,7 +15,7 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: -// protoc-gen-go v1.21.0 +// protoc-gen-go v1.24.0 // protoc v3.10.0 // source: tensorflow_metadata/proto/v0/schema.proto @@ -3476,14 +3476,13 @@ var file_tensorflow_metadata_proto_v0_schema_proto_rawDesc = []byte{ 0x57, 0x4e, 0x10, 0x00, 0x12, 0x09, 0x0a, 0x05, 0x42, 0x59, 0x54, 0x45, 0x53, 0x10, 0x01, 0x12, 0x07, 0x0a, 0x03, 0x49, 0x4e, 0x54, 0x10, 0x02, 0x12, 0x09, 0x0a, 0x05, 0x46, 0x4c, 0x4f, 0x41, 0x54, 0x10, 0x03, 0x12, 0x0a, 0x0a, 0x06, 0x53, 0x54, 0x52, 0x55, 0x43, 0x54, 0x10, 0x04, 0x42, - 0x70, 0x0a, 0x1a, 0x6f, 0x72, 0x67, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, + 0x68, 0x0a, 0x1a, 0x6f, 0x72, 0x67, 0x2e, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x76, 0x30, 0x50, 0x01, 0x5a, - 0x4d, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x67, 0x6f, 0x6a, 0x65, - 0x6b, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x73, 0x64, 0x6b, 0x2f, 0x67, 0x6f, 0x2f, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x73, 0x2f, 0x74, 0x68, 0x69, 0x72, 0x64, 0x5f, 0x70, 0x61, 0x72, 0x74, - 0x79, 0x2f, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x6d, 0x65, 0x74, - 0x61, 0x64, 0x61, 0x74, 0x61, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x76, 0x30, 0xf8, 0x01, - 0x01, + 0x45, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x66, 0x65, 0x61, 0x73, + 0x74, 0x2d, 0x64, 0x65, 0x76, 0x2f, 0x66, 0x65, 0x61, 0x73, 0x74, 0x2f, 0x73, 0x64, 0x6b, 0x2f, + 0x67, 0x6f, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x73, 0x2f, 0x74, 0x65, 0x6e, 0x73, 0x6f, 0x72, + 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2f, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x2f, 0x76, 0x30, 0xf8, 0x01, 0x01, } var ( diff --git a/sdk/go/request.go b/sdk/go/request.go index a3a3fe6d71..683f17c16e 100644 --- a/sdk/go/request.go +++ b/sdk/go/request.go @@ -2,37 +2,34 @@ package feast import ( "fmt" - "github.com/gojek/feast/sdk/go/protos/feast/serving" - "strconv" + "github.com/feast-dev/feast/sdk/go/protos/feast/serving" "strings" ) var ( - // ErrInvalidFeatureName indicates that the user has provided a feature reference with the wrong structure or contents - ErrInvalidFeatureName = "invalid feature references %s provided, feature names must be in the format /:" + // ErrInvalidFeatureRef indicates that the user has provided a feature reference + // with the wrong structure or contents + ErrInvalidFeatureRef = "Invalid Feature Reference %s provided, " + + "feature reference must be in the format [featureset:]name" ) // OnlineFeaturesRequest wrapper on feast.serving.GetOnlineFeaturesRequest. type OnlineFeaturesRequest struct { // Features is the list of features to obtain from Feast. Each feature can be given as - // - // : - // / - // /: - // The only required components are the feature name and project. + // the format feature_set:feature, where "feature_set" & "feature" are feature set name + // and feature name respectively. The only required components is feature name. Features []string // Entities is the list of entity rows to retrieve features on. Each row is a map of entity name to entity value. Entities []Row - // Project is the default project to use when looking up features. This is only used when a project is not found - // within the feature id. + // Project specifies the project would contain the feature sets where the requested features belong to. Project string } // Builds the feast-specified request payload from the wrapper. func (r OnlineFeaturesRequest) buildRequest() (*serving.GetOnlineFeaturesRequest, error) { - features, err := buildFeatures(r.Features, r.Project) + featureRefs, err := buildFeatureRefs(r.Features, r.Project) if err != nil { return nil, err } @@ -45,57 +42,74 @@ func (r OnlineFeaturesRequest) buildRequest() (*serving.GetOnlineFeaturesRequest } } return &serving.GetOnlineFeaturesRequest{ - Features: features, + Features: featureRefs, EntityRows: entityRows, }, nil } -// buildFeatures create a slice of FeatureReferences from a slice of "/:" -// It returns an error when the format is invalid -func buildFeatures(featureReferences []string, defaultProject string) ([]*serving.FeatureReference, error) { - var features []*serving.FeatureReference - - for _, featureRef := range featureReferences { - var project string - var name string - var version int - var featureSplit []string - - projectSplit := strings.Split(featureRef, "/") - - if len(projectSplit) == 2 { - project = projectSplit[0] - featureSplit = strings.Split(projectSplit[1], ":") - } else if len(projectSplit) == 1 { - project = defaultProject - featureSplit = strings.Split(projectSplit[0], ":") - } else { - return nil, fmt.Errorf(ErrInvalidFeatureName, featureRef) +// Creates a slice of FeatureReferences from string representation in +// the format featureset:feature. +// featureRefStrs - string feature references to parse. +// project - Optionally sets the project in parsed FeatureReferences. Otherwise pass "" +// Returns parsed FeatureReferences. +// Returns an error when the format of the string feature reference is invalid +func buildFeatureRefs(featureRefStrs []string, project string) ([]*serving.FeatureReference, error) { + var featureRefs []*serving.FeatureReference + + for _, featureRefStr := range featureRefStrs { + featureRef, err := parseFeatureRef(featureRefStr, false) + if err != nil { + return nil, err } + // apply project if specified + if len(project) != 0 { + featureRef.Project = project + } + featureRefs = append(featureRefs, featureRef) + } + return featureRefs, nil +} - if len(featureSplit) == 2 { - name = featureSplit[0] - v, err := strconv.Atoi(featureSplit[1]) - if err != nil { - return nil, fmt.Errorf(ErrInvalidFeatureName, featureRef) - } - version = v - } else if len(featureSplit) == 1 { - name = featureSplit[0] +// Parses a string FeatureReference into FeatureReference proto +// featureRefStr - the string feature reference to parse. +// ignoreProject - if true would ignore if project is specified in the given featureRefStr +// Otherwise, would return an error if project is detected in featureRefStr. +// Returns parsed FeatureReference. +// Returns an error when the format of the string feature reference is invalid +func parseFeatureRef(featureRefStr string, ignoreProject bool) (*serving.FeatureReference, error) { + if len(featureRefStr) == 0 { + return nil, fmt.Errorf(ErrInvalidFeatureRef, featureRefStr) + } + + var featureRef serving.FeatureReference + if strings.Contains(featureRefStr, "/") { + if ignoreProject { + projectSplit := strings.Split(featureRefStr, "/") + featureRefStr = projectSplit[1] } else { - return nil, fmt.Errorf(ErrInvalidFeatureName, featureRef) + return nil, fmt.Errorf(ErrInvalidFeatureRef, featureRefStr) } + } + // parse featureset if specified + if strings.Contains(featureRefStr, ":") { + refSplit := strings.Split(featureRefStr, ":") + featureRef.FeatureSet, featureRefStr = refSplit[0], refSplit[1] + } + featureRef.Name = featureRefStr - if project == "" || name == "" || version < 0 { - return nil, fmt.Errorf(ErrInvalidFeatureName, featureRef) - } + return &featureRef, nil +} - features = append(features, &serving.FeatureReference{ - Name: name, - Version: int32(version), - Project: project, - }) +// Converts a FeatureReference proto into a string +// featureRef - The FeatureReference to render as string +// Returns string representation of the given FeatureReference +func toFeatureRefStr(featureRef *serving.FeatureReference) string { + refStr := "" + // In protov3, unset string and default to "" + if len(featureRef.FeatureSet) > 0 { + refStr += featureRef.FeatureSet + ":" } + refStr += featureRef.Name - return features, nil + return refStr } diff --git a/sdk/go/request_test.go b/sdk/go/request_test.go index b686663867..70e1f0d8f5 100644 --- a/sdk/go/request_test.go +++ b/sdk/go/request_test.go @@ -2,8 +2,8 @@ package feast import ( "fmt" - "github.com/gojek/feast/sdk/go/protos/feast/serving" - "github.com/gojek/feast/sdk/go/protos/feast/types" + "github.com/feast-dev/feast/sdk/go/protos/feast/serving" + "github.com/feast-dev/feast/sdk/go/protos/feast/types" json "github.com/golang/protobuf/jsonpb" "github.com/golang/protobuf/proto" "testing" @@ -20,40 +20,27 @@ func TestGetOnlineFeaturesRequest(t *testing.T) { { name: "valid", req: OnlineFeaturesRequest{ - Features: []string{"my_project_1/feature1:1", "my_project_2/feature1:1", "my_project_4/feature3", "feature2:2", "feature2"}, + Features: []string{ + "driver:driver_id", + "driver_id", + }, Entities: []Row{ {"entity1": Int64Val(1), "entity2": StrVal("bob")}, {"entity1": Int64Val(1), "entity2": StrVal("annie")}, {"entity1": Int64Val(1), "entity2": StrVal("jane")}, }, - Project: "my_project_3", + Project: "driver_project", }, want: &serving.GetOnlineFeaturesRequest{ Features: []*serving.FeatureReference{ { - Project: "my_project_1", - Name: "feature1", - Version: 1, - }, - { - Project: "my_project_2", - Name: "feature1", - Version: 1, - }, - { - Project: "my_project_4", - Name: "feature3", - Version: 0, + Project: "driver_project", + FeatureSet: "driver", + Name: "driver_id", }, { - Project: "my_project_3", - Name: "feature2", - Version: 2, - }, - { - Project: "my_project_3", - Name: "feature2", - Version: 0, + Project: "driver_project", + Name: "driver_id", }, }, EntityRows: []*serving.GetOnlineFeaturesRequest_EntityRow{ @@ -81,53 +68,15 @@ func TestGetOnlineFeaturesRequest(t *testing.T) { wantErr: false, err: nil, }, - { - name: "valid_project_in_name", - req: OnlineFeaturesRequest{ - Features: []string{"project/feature1"}, - Entities: []Row{}, - }, - want: &serving.GetOnlineFeaturesRequest{ - Features: []*serving.FeatureReference{ - { - Project: "project", - Name: "feature1", - Version: 0, - }, - }, - EntityRows: []*serving.GetOnlineFeaturesRequest_EntityRow{}, - OmitEntitiesInResponse: false, - }, - wantErr: false, - err: nil, - }, - { - name: "no_project", - req: OnlineFeaturesRequest{ - Features: []string{"feature1"}, - Entities: []Row{}, - }, - wantErr: true, - err: fmt.Errorf(ErrInvalidFeatureName, "feature1"), - }, { name: "invalid_feature_name/wrong_format", req: OnlineFeaturesRequest{ - Features: []string{"fs1:3:feature1"}, + Features: []string{"/fs1:feature1"}, Entities: []Row{}, Project: "my_project", }, wantErr: true, - err: fmt.Errorf(ErrInvalidFeatureName, "fs1:3:feature1"), - }, - { - name: "invalid_feature_name/invalid_version", - req: OnlineFeaturesRequest{ - Features: []string{"project/a:feature1"}, - Entities: []Row{}, - }, - wantErr: true, - err: fmt.Errorf(ErrInvalidFeatureName, "project/a:feature1"), + err: fmt.Errorf(ErrInvalidFeatureRef, "/fs1:feature1"), }, } for _, tc := range tt { diff --git a/sdk/go/response.go b/sdk/go/response.go index e6e59268ea..1bc80af9fb 100644 --- a/sdk/go/response.go +++ b/sdk/go/response.go @@ -2,8 +2,8 @@ package feast import ( "fmt" - "github.com/gojek/feast/sdk/go/protos/feast/serving" - "github.com/gojek/feast/sdk/go/protos/feast/types" + "github.com/feast-dev/feast/sdk/go/protos/feast/serving" + "github.com/feast-dev/feast/sdk/go/protos/feast/types" ) var ( diff --git a/sdk/go/response_test.go b/sdk/go/response_test.go index 5aa2c276d6..6a7c421118 100644 --- a/sdk/go/response_test.go +++ b/sdk/go/response_test.go @@ -2,8 +2,8 @@ package feast import ( "fmt" - "github.com/gojek/feast/sdk/go/protos/feast/serving" - "github.com/gojek/feast/sdk/go/protos/feast/types" + "github.com/feast-dev/feast/sdk/go/protos/feast/serving" + "github.com/feast-dev/feast/sdk/go/protos/feast/types" "github.com/google/go-cmp/cmp" "testing" ) @@ -67,7 +67,7 @@ func TestOnlineFeaturesResponseToInt64Array(t *testing.T) { { name: "length mismatch", args: args{ - order: []string{"fs:1:feature2", "fs:1:feature1"}, + order: []string{"fs:feature2", "fs:feature1"}, fillNa: []int64{-1}, }, want: nil, diff --git a/sdk/go/types.go b/sdk/go/types.go index 9af888e355..dd6553163b 100644 --- a/sdk/go/types.go +++ b/sdk/go/types.go @@ -1,7 +1,7 @@ package feast import ( - "github.com/gojek/feast/sdk/go/protos/feast/types" + "github.com/feast-dev/feast/sdk/go/protos/feast/types" "github.com/golang/protobuf/proto" ) @@ -21,12 +21,12 @@ func (r Row) equalTo(other Row) bool { return true } -// StrVal is a int64 type feast value +// StrVal is a string type feast value func StrVal(val string) *types.Value { return &types.Value{Val: &types.Value_StringVal{StringVal: val}} } -// Int32Val is a int64 type feast value +// Int32Val is a int32 type feast value func Int32Val(val int32) *types.Value { return &types.Value{Val: &types.Value_Int32Val{Int32Val: val}} } diff --git a/sdk/java/pom.xml b/sdk/java/pom.xml index e8a82a485f..75d82edcc0 100644 --- a/sdk/java/pom.xml +++ b/sdk/java/pom.xml @@ -94,12 +94,8 @@ - maven-surefire-plugin - 2.22.2 - - - maven-failsafe-plugin - 2.22.2 + org.jacoco + jacoco-maven-plugin diff --git a/sdk/java/src/main/java/com/gojek/feast/FeastClient.java b/sdk/java/src/main/java/com/gojek/feast/FeastClient.java index 8014231836..a81fdab21a 100644 --- a/sdk/java/src/main/java/com/gojek/feast/FeastClient.java +++ b/sdk/java/src/main/java/com/gojek/feast/FeastClient.java @@ -16,15 +16,18 @@ */ package com.gojek.feast; -import feast.serving.ServingAPIProto.FeatureReference; -import feast.serving.ServingAPIProto.GetFeastServingInfoRequest; -import feast.serving.ServingAPIProto.GetFeastServingInfoResponse; -import feast.serving.ServingAPIProto.GetOnlineFeaturesRequest; -import feast.serving.ServingAPIProto.GetOnlineFeaturesRequest.EntityRow; -import feast.serving.ServingAPIProto.GetOnlineFeaturesResponse; -import feast.serving.ServingServiceGrpc; +import feast.proto.serving.ServingAPIProto.FeatureReference; +import feast.proto.serving.ServingAPIProto.GetFeastServingInfoRequest; +import feast.proto.serving.ServingAPIProto.GetFeastServingInfoResponse; +import feast.proto.serving.ServingAPIProto.GetOnlineFeaturesRequest; +import feast.proto.serving.ServingAPIProto.GetOnlineFeaturesRequest.EntityRow; +import feast.proto.serving.ServingAPIProto.GetOnlineFeaturesResponse; +import feast.proto.serving.ServingServiceGrpc; +import feast.proto.serving.ServingServiceGrpc.ServingServiceBlockingStub; +import feast.proto.types.ValueProto.Value; import io.grpc.ManagedChannel; import io.grpc.ManagedChannelBuilder; +import java.util.HashSet; import java.util.List; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; @@ -38,7 +41,7 @@ public class FeastClient implements AutoCloseable { private static final int CHANNEL_SHUTDOWN_TIMEOUT_SEC = 5; private final ManagedChannel channel; - private final ServingServiceGrpc.ServingServiceBlockingStub stub; + private final ServingServiceBlockingStub stub; /** * Create a client to access Feast @@ -56,61 +59,78 @@ public GetFeastServingInfoResponse getFeastServingInfo() { return stub.getFeastServingInfo(GetFeastServingInfoRequest.newBuilder().build()); } + /** + * Get online features from Feast from FeatureSets + * + *

See {@link #getOnlineFeatures(List, List, String, boolean)} + * + * @param featureRefs list of string feature references to retrieve in the following format + * featureSet:feature, where 'featureSet' and 'feature' refer to the FeatureSet and Feature + * names respectively. Only the Feature name is required. + * @param rows list of {@link Row} to select the entities to retrieve the features for. + * @return list of {@link Row} containing retrieved data fields. + */ + public List getOnlineFeatures(List featureRefs, List rows) { + return getOnlineFeatures(featureRefs, rows, ""); + } + /** * Get online features from Feast. * - *

See {@link #getOnlineFeatures(List, List, String)} + *

See {@link #getOnlineFeatures(List, List, String, boolean)} * - * @param features list of string feature references to retrieve, feature reference follows this - * format [project]/[name]:[version] + * @param featureRefs list of string feature references to retrieve in the following format + * featureSet:feature, where 'featureSet' and 'feature' refer to the FeatureSet and Feature + * names respectively. Only the Feature name is required. * @param rows list of {@link Row} to select the entities to retrieve the features for - * @param defaultProject {@link String} Default project to find features in if not provided in - * feature reference. - * @return list of {@link Row} containing features + * @param project {@link String} Specifies the project which contains the FeatureSets which the + * Feature requested belong to. + * @return list of {@link Row} containing retrieved data fields. */ - public List getOnlineFeatures(List features, List rows, String defaultProject) { - return getOnlineFeatures(features, rows, defaultProject, false); + public List getOnlineFeatures(List featureRefs, List rows, String project) { + return getOnlineFeatures(featureRefs, rows, project, false); } /** * Get online features from Feast. * - *

Example of retrieving online features for the driver project, with features driver_id and - * driver_name, both version 1 + *

Example of retrieving online features for the driver featureset, with features driver_id and + * driver_name * *

{@code
    * FeastClient client = FeastClient.create("localhost", 6566);
-   * List requestedFeatureIds = Arrays.asList("driver/driver_id:1", "driver/driver_name:1");
+   * List requestedFeatureIds = Arrays.asList("driver:driver_id", "driver:driver_name");
    * List requestedRows =
    *         Arrays.asList(Row.create().set("driver_id", 123), Row.create().set("driver_id", 456));
    * List retrievedFeatures = client.getOnlineFeatures(requestedFeatureIds, requestedRows);
    * retrievedFeatures.forEach(System.out::println);
    * }
* - * @param featureRefStrings list of feature refs to retrieve, feature refs follow this format - * [project]/[name]:[version] + * @param featureRefs list of string feature references to retrieve in the following format + * featureSet:feature, where 'featureSet' and 'feature' refer to the FeatureSet and Feature + * names respectively. Only the Feature name is required. * @param rows list of {@link Row} to select the entities to retrieve the features for - * @param defaultProject {@link String} Default project to find features in if not provided in - * feature reference. + * @param project {@link String} Specifies the project which contains the FeatureSets which the + * Feature requested belong to. * @param omitEntitiesInResponse if true, the returned {@link Row} will not contain field and * value for the entity - * @return list of {@link Row} containing features + * @return list of {@link Row} containing retrieved data fields. */ public List getOnlineFeatures( - List featureRefStrings, - List rows, - String defaultProject, - boolean omitEntitiesInResponse) { - List features = - RequestUtil.createFeatureRefs(featureRefStrings, defaultProject); + List featureRefs, List rows, String project, boolean omitEntitiesInResponse) { + List features = RequestUtil.createFeatureRefs(featureRefs, project); + // build entity rows and collect entity references + HashSet entityRefs = new HashSet<>(); List entityRows = rows.stream() .map( - row -> - EntityRow.newBuilder() - .setEntityTimestamp(row.getEntityTimestamp()) - .putAllFields(row.getFields()) - .build()) + row -> { + entityRefs.addAll(row.getFields().keySet()); + return EntityRow.newBuilder() + .setEntityTimestamp(row.getEntityTimestamp()) + .putAllFields(row.getFields()) + .build(); + }) .collect(Collectors.toList()); GetOnlineFeaturesResponse response = @@ -125,15 +145,26 @@ public List getOnlineFeatures( .map( field -> { Row row = Row.create(); - field.getFieldsMap().forEach(row::set); + field + .getFieldsMap() + .forEach( + (String name, Value value) -> { + // Strip project from string Feature References from returned from serving + if (!entityRefs.contains(name)) { + FeatureReference featureRef = + RequestUtil.parseFeatureRef(name, true).build(); + name = RequestUtil.renderFeatureRef(featureRef); + } + row.set(name, value); + }); return row; }) .collect(Collectors.toList()); } - private FeastClient(ManagedChannel channel) { + protected FeastClient(ManagedChannel channel) { this.channel = channel; - stub = ServingServiceGrpc.newBlockingStub(channel); + this.stub = ServingServiceGrpc.newBlockingStub(channel); } public void close() throws Exception { diff --git a/sdk/java/src/main/java/com/gojek/feast/RequestUtil.java b/sdk/java/src/main/java/com/gojek/feast/RequestUtil.java index 874196e92b..3a1a0919d3 100644 --- a/sdk/java/src/main/java/com/gojek/feast/RequestUtil.java +++ b/sdk/java/src/main/java/com/gojek/feast/RequestUtil.java @@ -16,76 +16,90 @@ */ package com.gojek.feast; -import feast.serving.ServingAPIProto.FeatureReference; -import java.util.ArrayList; +import feast.proto.serving.ServingAPIProto.FeatureReference; import java.util.List; +import java.util.stream.Collectors; @SuppressWarnings("WeakerAccess") public class RequestUtil { + /** + * Create feature references protos from given string feature reference. + * + * @param featureRefStrings to create Feature Reference protos from + * @param project specifies to the project set in parsed Feature Reference protos otherwise "" + * @return List of parsed {@link FeatureReference} protos + */ public static List createFeatureRefs( - List featureRefStrings, String defaultProject) { + List featureRefStrings, String project) { if (featureRefStrings == null) { throw new IllegalArgumentException("featureRefs cannot be null"); } - List featureRefs = new ArrayList<>(); + List featureRefs = + featureRefStrings.stream() + .map(refStr -> parseFeatureRef(refStr, false)) + .collect(Collectors.toList()); + // apply project override if specified + if (!project.isEmpty()) { + featureRefs = + featureRefs.stream().map(ref -> ref.setProject(project)).collect(Collectors.toList()); + } - for (String featureRefString : featureRefStrings) { - String project; - String name; - int version = 0; - String[] featureSplit; - String[] projectSplit = featureRefString.split("/"); + return featureRefs.stream().map(ref -> ref.build()).collect(Collectors.toList()); + } - if (projectSplit.length == 2) { - project = projectSplit[0]; - featureSplit = projectSplit[1].split(":"); - } else if (projectSplit.length == 1) { - project = defaultProject; - featureSplit = projectSplit[0].split(":"); - } else { - throw new IllegalArgumentException( - String.format( - "Feature id '%s' has invalid format. Expected format: ::.", - featureRefString)); - } + /** + * Parse a feature reference proto builder from the given featureRefString + * + * @param featureRefString string feature reference to parse from. + * @param ignoreProject If true, would ignore if project is specified in given ref string. + * Otherwise, throwws a {@link IllegalArgumentException} if project is specified. + * @return a parsed {@link FeatureReference.Builder} + */ + public static FeatureReference.Builder parseFeatureRef( + String featureRefString, boolean ignoreProject) { + featureRefString = featureRefString.trim(); + if (featureRefString.isEmpty()) { + throw new IllegalArgumentException("Cannot parse a empty feature reference"); + } + FeatureReference.Builder featureRef = FeatureReference.newBuilder(); - if (featureSplit.length == 2) { - name = featureSplit[0]; - try { - version = Integer.parseInt(featureSplit[1]); - } catch (NumberFormatException e) { - throw new IllegalArgumentException( - String.format( - "Feature id '%s' contains invalid version. Expected format: /:.", - featureRefString)); - } - } else if (featureSplit.length == 1) { - name = featureSplit[0]; + // parse project if specified + if (featureRefString.contains("/")) { + if (ignoreProject) { + String[] projectSplit = featureRefString.split("/"); + featureRefString = projectSplit[1]; } else { throw new IllegalArgumentException( - String.format( - "Feature id '%s' has invalid format. Expected format: /:.", - featureRefString)); + String.format("Unsupported feature reference: %s", featureRefString)); } + } - if (project.isEmpty() || name.isEmpty() || version < 0) { - throw new IllegalArgumentException( - String.format( - "Feature id '%s' has invalid format. Expected format: /:.", - featureRefString)); - } + // parse featureset if specified + if (featureRefString.contains(":")) { + String[] featureSetSplit = featureRefString.split(":"); + featureRef.setFeatureSet(featureSetSplit[0]); + featureRefString = featureSetSplit[1]; + } + featureRef.setName(featureRefString); + return featureRef; + } - featureRefs.add( - FeatureReference.newBuilder() - .setName(name) - .setProject(project) - .setVersion(version) - .build()); + /** + * Render a feature reference as string. + * + * @param featureReference to render as string + * @return string represenation of feature reference. + */ + public static String renderFeatureRef(FeatureReference featureReference) { + String refStr = ""; + // In protov3, unset string and int fields default to "" and 0 respectively + if (!featureReference.getFeatureSet().isEmpty()) { + refStr += featureReference.getFeatureSet() + ":"; } + refStr = refStr + featureReference.getName(); - ; - return featureRefs; + return refStr; } } diff --git a/sdk/java/src/main/java/com/gojek/feast/Row.java b/sdk/java/src/main/java/com/gojek/feast/Row.java index ceef139aa1..4a3035f8f1 100644 --- a/sdk/java/src/main/java/com/gojek/feast/Row.java +++ b/sdk/java/src/main/java/com/gojek/feast/Row.java @@ -19,8 +19,8 @@ import com.google.protobuf.ByteString; import com.google.protobuf.Timestamp; import com.google.protobuf.util.Timestamps; -import feast.types.ValueProto.Value; -import feast.types.ValueProto.Value.ValCase; +import feast.proto.types.ValueProto.Value; +import feast.proto.types.ValueProto.Value.ValCase; import java.time.Instant; import java.util.ArrayList; import java.util.HashMap; @@ -76,7 +76,7 @@ public Row set(String fieldName, Object value) { fields.put( fieldName, Value.newBuilder().setBytesVal(ByteString.copyFrom((byte[]) value)).build()); break; - case "feast.types.ValueProto.Value": + case "feast.proto.types.ValueProto.Value": fields.put(fieldName, (Value) value); break; default: diff --git a/sdk/java/src/test/java/com/gojek/feast/RequestUtilTest.java b/sdk/java/src/test/java/com/gojek/feast/RequestUtilTest.java index 3b9429ad8f..6064311555 100644 --- a/sdk/java/src/test/java/com/gojek/feast/RequestUtilTest.java +++ b/sdk/java/src/test/java/com/gojek/feast/RequestUtilTest.java @@ -19,12 +19,13 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertThrows; +import com.google.common.collect.ImmutableList; import com.google.protobuf.TextFormat; -import feast.serving.ServingAPIProto.FeatureReference; +import feast.proto.serving.ServingAPIProto.FeatureReference; import java.util.Arrays; -import java.util.Collections; import java.util.Comparator; import java.util.List; +import java.util.stream.Collectors; import java.util.stream.Stream; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; @@ -33,56 +34,27 @@ class RequestUtilTest { - private static Stream provideValidFeatureIds() { + private static Stream provideValidFeatureRefs() { return Stream.of( Arguments.of( - Collections.singletonList("driver_project/driver_id:1"), - Collections.singletonList( - FeatureReference.newBuilder() - .setProject("driver_project") - .setName("driver_id") - .setVersion(1) - .build())), - Arguments.of( - Arrays.asList("driver_project/driver_id:1", "driver_project/driver_name:1"), + Arrays.asList("driver:driver_id", "driver_id"), Arrays.asList( FeatureReference.newBuilder() .setProject("driver_project") + .setFeatureSet("driver") .setName("driver_id") - .setVersion(1) .build(), FeatureReference.newBuilder() .setProject("driver_project") - .setName("driver_name") - .setVersion(1) - .build())), - Arguments.of( - Arrays.asList( - "driver_project/driver_id:1", - "driver_project/driver_name:1", - "booking_project/driver_name"), - Arrays.asList( - FeatureReference.newBuilder() - .setProject("driver_project") - .setVersion(1) .setName("driver_id") - .build(), - FeatureReference.newBuilder() - .setProject("driver_project") - .setVersion(1) - .setName("driver_name") - .build(), - FeatureReference.newBuilder() - .setProject("booking_project") - .setName("driver_name") .build()))); } @ParameterizedTest - @MethodSource("provideValidFeatureIds") - void createFeatureSets_ShouldReturnFeatureSetsForValidFeatureIds( + @MethodSource("provideValidFeatureRefs") + void createFeatureSets_ShouldReturnFeatureSetsForValidFeatureRefs( List input, List expected) { - List actual = RequestUtil.createFeatureRefs(input, "my-project"); + List actual = RequestUtil.createFeatureRefs(input, "driver_project"); // Order of the actual and expected featureSets do no not matter actual.sort(Comparator.comparing(FeatureReference::getName)); expected.sort(Comparator.comparing(FeatureReference::getName)); @@ -94,23 +66,35 @@ void createFeatureSets_ShouldReturnFeatureSetsForValidFeatureIds( } } + @ParameterizedTest + @MethodSource("provideValidFeatureRefs") + void renderFeatureRef_ShouldReturnFeatureRefString( + List expected, List input) { + input = + input.stream() + .map(ref -> ref.toBuilder().clearProject().build()) + .collect(Collectors.toList()); + List actual = + input.stream().map(ref -> RequestUtil.renderFeatureRef(ref)).collect(Collectors.toList()); + assertEquals(expected.size(), actual.size()); + for (int i = 0; i < expected.size(); i++) { + assertEquals(expected.get(i), actual.get(i)); + } + } + private static Stream provideInvalidFeatureRefs() { - return Stream.of( - Arguments.of(Collections.singletonList("missing:bad_version")), - Arguments.of(Collections.singletonList(""))); + return Stream.of(Arguments.of(ImmutableList.of("project/feature", ""))); } @ParameterizedTest @MethodSource("provideInvalidFeatureRefs") void createFeatureSets_ShouldThrowExceptionForInvalidFeatureRefs(List input) { - assertThrows( - IllegalArgumentException.class, () -> RequestUtil.createFeatureRefs(input, "my-project")); + assertThrows(IllegalArgumentException.class, () -> RequestUtil.createFeatureRefs(input, "")); } @ParameterizedTest @NullSource void createFeatureSets_ShouldThrowExceptionForNullFeatureRefs(List input) { - assertThrows( - IllegalArgumentException.class, () -> RequestUtil.createFeatureRefs(input, "my-project")); + assertThrows(IllegalArgumentException.class, () -> RequestUtil.createFeatureRefs(input, "")); } } diff --git a/sdk/python/feast/cli.py b/sdk/python/feast/cli.py index cd1146b481..6d1df29276 100644 --- a/sdk/python/feast/cli.py +++ b/sdk/python/feast/cli.py @@ -22,9 +22,9 @@ from feast.client import Client from feast.config import Config +from feast.core.IngestionJob_pb2 import IngestionJobStatus from feast.feature_set import FeatureSet, FeatureSetRef from feast.loaders.yaml import yaml_loader -from feast.core.IngestionJob_pb2 import IngestionJobStatus _logger = logging.getLogger(__name__) @@ -128,15 +128,16 @@ def feature_set_list(): feast_client = Client() # type: Client table = [] - for fs in feast_client.list_feature_sets(): - table.append([fs.name, fs.version, repr(fs)]) + for fs in feast_client.list_feature_sets(project="*", name="*"): + table.append([fs.name, repr(fs)]) from tabulate import tabulate - print(tabulate(table, headers=["NAME", "VERSION", "REFERENCE"], tablefmt="plain")) + print(tabulate(table, headers=["NAME", "REFERENCE"], tablefmt="plain")) @feature_set.command("apply") +# TODO: add project option to overwrite project setting. @click.option( "--filename", "-f", @@ -155,17 +156,22 @@ def feature_set_create(filename): @feature_set.command("describe") @click.argument("name", type=click.STRING) -@click.argument("version", type=click.INT) -def feature_set_describe(name: str, version: int): +@click.option( + "--project", + "-p", + help="Project that feature set belongs to", + type=click.STRING, + default="default", +) +def feature_set_describe(name: str, project: str): """ Describe a feature set """ feast_client = Client() # type: Client - fs = feast_client.get_feature_set(name=name, version=version) + fs = feast_client.get_feature_set(name=name, project=project) + if not fs: - print( - f'Feature set with name "{name}" and version "{version}" could not be found' - ) + print(f'Feature set with name "{name}" could not be found') return print(yaml.dump(yaml.safe_load(str(fs)), default_flow_style=False, sort_keys=False)) @@ -329,9 +335,6 @@ def ingest_job_restart(job_id: str): @click.option( "--name", "-n", help="Feature set name to ingest data into", required=True ) -@click.option( - "--version", "-v", help="Feature set version to ingest data into", type=int -) @click.option( "--filename", "-f", @@ -345,13 +348,13 @@ def ingest_job_restart(job_id: str): type=click.Choice(["CSV"], case_sensitive=False), help="Type of file to ingest. Defaults to CSV.", ) -def ingest(name, version, filename, file_type): +def ingest(name, filename, file_type): """ Ingest feature data into a feature set """ feast_client = Client() # type: Client - feature_set = feast_client.get_feature_set(name=name, version=version) + feature_set = feast_client.get_feature_set(name=name) feature_set.ingest_file(file_path=filename) diff --git a/sdk/python/feast/client.py b/sdk/python/feast/client.py index f5aed118cf..1c34213ad8 100644 --- a/sdk/python/feast/client.py +++ b/sdk/python/feast/client.py @@ -18,6 +18,7 @@ import shutil import tempfile import time +import uuid from collections import OrderedDict from math import ceil from typing import Dict, List, Optional, Tuple, Union @@ -35,6 +36,7 @@ CONFIG_PROJECT_KEY, CONFIG_SERVING_SECURE_KEY, CONFIG_SERVING_URL_KEY, + FEAST_DEFAULT_OPTIONS, ) from feast.core.CoreService_pb2 import ( ApplyFeatureSetRequest, @@ -48,16 +50,17 @@ GetFeatureSetResponse, ListFeatureSetsRequest, ListFeatureSetsResponse, + ListIngestionJobsRequest, ListProjectsRequest, ListProjectsResponse, - ListIngestionJobsRequest, RestartIngestionJobRequest, StopIngestionJobRequest, ) from feast.core.CoreService_pb2_grpc import CoreServiceStub from feast.core.FeatureSet_pb2 import FeatureSetStatus +from feast.feature import FeatureRef from feast.feature_set import Entity, FeatureSet, FeatureSetRef -from feast.job import RetrievalJob, IngestJob +from feast.job import IngestJob, RetrievalJob from feast.loaders.abstract_producer import get_producer from feast.loaders.file import export_source_to_staging_location from feast.loaders.ingest import KAFKA_CHUNK_PRODUCTION_TIMEOUT, get_feature_row_chunks @@ -192,7 +195,13 @@ def version(self): """ Returns version information from Feast Core and Feast Serving """ - result = {} + import pkg_resources + + result = { + "sdk": {"version": pkg_resources.get_distribution("feast").version}, + "serving": "not configured", + "core": "not configured", + } if self.serving_url: self._connect_serving() @@ -289,13 +298,15 @@ def project(self) -> Union[str, None]: """ return self._config.get(CONFIG_PROJECT_KEY) - def set_project(self, project: str): + def set_project(self, project: Optional[str] = None): """ Set currently active Feast project Args: - project: Project to set as active + project: Project to set as active. If unset, will reset to the default project. """ + if project is None: + project = FEAST_DEFAULT_OPTIONS[CONFIG_PROJECT_KEY] self._config.set(CONFIG_PROJECT_KEY, project) def list_projects(self) -> List[str]: @@ -338,13 +349,17 @@ def archive_project(self, project): """ self._connect_core() - self._core_service_stub.ArchiveProject( - ArchiveProjectRequest(name=project), - timeout=self._config.getint(CONFIG_GRPC_CONNECTION_TIMEOUT_DEFAULT_KEY), - ) # type: ArchiveProjectResponse + try: + self._core_service_stub.ArchiveProject( + ArchiveProjectRequest(name=project), + timeout=self._config.getint(CONFIG_GRPC_CONNECTION_TIMEOUT_DEFAULT_KEY), + ) # type: ArchiveProjectResponse + except grpc.RpcError as e: + raise grpc.RpcError(e.details()) + # revert to the default project if self._project == project: - self._project = "" + self._project = FEAST_DEFAULT_OPTIONS[CONFIG_PROJECT_KEY] def apply(self, feature_sets: Union[List[FeatureSet], FeatureSet]): """ @@ -376,13 +391,7 @@ def _apply_feature_set(self, feature_set: FeatureSet): feature_set.is_valid() feature_set_proto = feature_set.to_proto() if len(feature_set_proto.spec.project) == 0: - if self.project is None: - raise ValueError( - f"No project found in feature set {feature_set.name}. " - f"Please set the project within the feature set or within " - f"your Feast Client." - ) - else: + if self.project is not None: feature_set_proto.spec.project = self.project # Convert the feature set to a request and send to Feast Core @@ -399,9 +408,10 @@ def _apply_feature_set(self, feature_set: FeatureSet): # If the feature set has changed, update the local copy if apply_fs_response.status == ApplyFeatureSetResponse.Status.CREATED: - print( - f'Feature set updated/created: "{applied_fs.name}:{applied_fs.version}"' - ) + print(f'Feature set created: "{applied_fs.name}"') + + if apply_fs_response.status == ApplyFeatureSetResponse.Status.UPDATED: + print(f'Feature set updated: "{applied_fs.name}"') # If no change has been applied, do nothing if apply_fs_response.status == ApplyFeatureSetResponse.Status.NO_CHANGE: @@ -411,7 +421,7 @@ def _apply_feature_set(self, feature_set: FeatureSet): feature_set._update_from_feature_set(applied_fs) def list_feature_sets( - self, project: str = None, name: str = None, version: str = None + self, project: str = None, name: str = None, ) -> List[FeatureSet]: """ Retrieve a list of feature sets from Feast Core @@ -419,7 +429,6 @@ def list_feature_sets( Args: project: Filter feature sets based on project name name: Filter feature sets based on feature set name - version: Filter feature sets based on version numbf, Returns: List of feature sets @@ -435,12 +444,7 @@ def list_feature_sets( if name is None: name = "*" - if version is None: - version = "*" - - filter = ListFeatureSetsRequest.Filter( - project=project, feature_set_name=name, feature_set_version=version - ) + filter = ListFeatureSetsRequest.Filter(project=project, feature_set_name=name) # Get latest feature sets from Feast Core feature_set_protos = self._core_service_stub.ListFeatureSets( @@ -456,16 +460,14 @@ def list_feature_sets( return feature_sets def get_feature_set( - self, name: str, version: int = None, project: str = None + self, name: str, project: str = None ) -> Union[FeatureSet, None]: """ - Retrieves a feature set. If no version is specified then the latest - version will be returned. + Retrieves a feature set. Args: project: Feast project that this feature set belongs to name: Name of feature set - version: Version of feature set Returns: Returns either the specified feature set, or raises an exception if @@ -479,14 +481,9 @@ def get_feature_set( else: raise ValueError("No project has been configured.") - if version is None: - version = 0 - try: get_feature_set_response = self._core_service_stub.GetFeatureSet( - GetFeatureSetRequest( - project=project, name=name.strip(), version=int(version) - ) + GetFeatureSetRequest(project=project, name=name.strip()) ) # type: GetFeatureSetResponse except grpc.RpcError as e: raise grpc.RpcError(e.details()) @@ -509,23 +506,24 @@ def get_batch_features( self, feature_refs: List[str], entity_rows: Union[pd.DataFrame, str], - default_project: str = None, + project: str = None, ) -> RetrievalJob: """ Retrieves historical features from a Feast Serving deployment. Args: - feature_refs (List[str]): - List of feature references that will be returned for each entity. - Each feature reference should have the following format - "project/feature:version". - + feature_refs: List of feature references that will be returned for each entity. + Each feature reference should have the following format: + "feature_set:feature" where "feature_set" & "feature" refer to + the feature and feature set names respectively. + Only the feature name is required. entity_rows (Union[pd.DataFrame, str]): Pandas dataframe containing entities and a 'datetime' column. Each entity in a feature set must be present as a column in this dataframe. The datetime column must contain timestamps in datetime64 format. - default_project: Default project where feature values will be found. + project: Specifies the project which contain the FeatureSets + which the requested features belong to. Returns: feast.job.RetrievalJob: @@ -538,7 +536,7 @@ def get_batch_features( >>> from datetime import datetime >>> >>> feast_client = Client(core_url="localhost:6565", serving_url="localhost:6566") - >>> feature_refs = ["my_project/bookings_7d:1", "booking_14d"] + >>> feature_refs = ["my_project/bookings_7d", "booking_14d"] >>> entity_rows = pd.DataFrame( >>> { >>> "datetime": [pd.datetime.now() for _ in range(3)], @@ -553,10 +551,6 @@ def get_batch_features( self._connect_serving() - feature_references = _build_feature_references( - feature_refs=feature_refs, default_project=default_project - ) - # Retrieve serving information to determine store type and # staging location serving_info = self._serving_service_stub.GetFeastServingInfo( @@ -566,8 +560,8 @@ def get_batch_features( if serving_info.type != FeastServingType.FEAST_SERVING_TYPE_BATCH: raise Exception( - f'You are connected to a store "{self._serving_url}" which ' - f"does not support batch retrieval " + f'You are connected to a store "{self.serving_url}" which ' + f"does not support batch retrieval" ) if isinstance(entity_rows, pd.DataFrame): @@ -584,7 +578,7 @@ def get_batch_features( # String based source if not entity_rows.endswith((".avro", "*")): raise Exception( - f"Only .avro and wildcard paths are accepted as entity_rows" + "Only .avro and wildcard paths are accepted as entity_rows" ) else: raise Exception( @@ -597,9 +591,11 @@ def get_batch_features( staged_files = export_source_to_staging_location( entity_rows, serving_info.job_staging_location ) # type: List[str] - request = GetBatchFeaturesRequest( - features=feature_references, + features=_build_feature_references( + feature_ref_strs=feature_refs, + project=project if project is not None else self.project, + ), dataset_source=DatasetSource( file_source=DatasetSource.FileSource( file_uris=staged_files, data_format=DataFormat.DATA_FORMAT_AVRO @@ -608,30 +604,33 @@ def get_batch_features( ) # Retrieve Feast Job object to manage life cycle of retrieval - response = self._serving_service_stub.GetBatchFeatures(request) + try: + response = self._serving_service_stub.GetBatchFeatures(request) + except grpc.RpcError as e: + raise grpc.RpcError(e.details()) + return RetrievalJob(response.job, self._serving_service_stub) def get_online_features( self, feature_refs: List[str], entity_rows: List[GetOnlineFeaturesRequest.EntityRow], - default_project: Optional[str] = None, + project: Optional[str] = None, ) -> GetOnlineFeaturesResponse: """ Retrieves the latest online feature data from Feast Serving Args: - feature_refs: List of feature references in the following format - [project]/[feature_name]:[version]. Only the feature name - is a required component in the reference. - example: - ["my_project/my_feature_1:3", - "my_project3/my_feature_4:1",] + feature_refs: List of feature references that will be returned for each entity. + Each feature reference should have the following format: + "feature_set:feature" where "feature_set" & "feature" refer to + the feature and feature set names respectively. + Only the feature name is required. entity_rows: List of GetFeaturesRequest.EntityRow where each row contains entities. Timestamp should not be set for online retrieval. All entity types within a feature - default_project: This project will be used if the project name is - not provided in the feature reference + project: Specifies the project which contain the FeatureSets + which the requested features belong to. Returns: Returns a list of maps where each item in the list contains the @@ -639,17 +638,45 @@ def get_online_features( """ self._connect_serving() - return self._serving_service_stub.GetOnlineFeatures( - GetOnlineFeaturesRequest( - features=_build_feature_references( - feature_refs=feature_refs, - default_project=( - default_project if not self.project else self.project + try: + response = self._serving_service_stub.GetOnlineFeatures( + GetOnlineFeaturesRequest( + features=_build_feature_references( + feature_ref_strs=feature_refs, + project=project if project is not None else self.project, ), - ), - entity_rows=entity_rows, + entity_rows=entity_rows, + ) ) - ) + # collect entity row refs + entity_refs = set() + for entity_row in entity_rows: + entity_refs.update(entity_row.fields.keys()) + + strip_field_values = [] + for field_value in response.field_values: + # strip the project part the string feature references returned from serving + strip_fields = {} + for ref_str, value in field_value.fields.items(): + # find and ignore entities + if ref_str in entity_refs: + strip_fields[ref_str] = value + else: + strip_ref_str = repr( + FeatureRef.from_str(ref_str, ignore_project=True) + ) + strip_fields[strip_ref_str] = value + strip_field_values.append( + GetOnlineFeaturesResponse.FieldValues(fields=strip_fields) + ) + + del response.field_values[:] + response.field_values.extend(strip_field_values) + + except grpc.RpcError as e: + raise grpc.RpcError(e.details()) + + return response def list_ingest_jobs( self, @@ -722,19 +749,16 @@ def ingest( feature_set: Union[str, FeatureSet], source: Union[pd.DataFrame, str], chunk_size: int = 10000, - version: int = None, - force_update: bool = False, max_workers: int = max(CPU_COUNT - 1, 1), disable_progress_bar: bool = False, timeout: int = KAFKA_CHUNK_PRODUCTION_TIMEOUT, - ) -> None: + ) -> str: """ Loads feature data into Feast for a specific feature set. Args: feature_set (typing.Union[str, feast.feature_set.FeatureSet]): Feature set object or the string name of the feature set - (without a version). source (typing.Union[pd.DataFrame, str]): Either a file path or Pandas Dataframe to ingest into Feast @@ -746,13 +770,6 @@ def ingest( chunk_size (int): Amount of rows to load and ingest at a time. - version (int): - Feature set version. - - force_update (bool): - Automatically update feature set based on source data prior to - ingesting. This will also register changes to Feast. - max_workers (int): Number of worker processes to use to encode values. @@ -763,18 +780,16 @@ def ingest( Timeout in seconds to wait for completion. Returns: - None: - None + str: + ingestion id for this dataset """ if isinstance(feature_set, FeatureSet): name = feature_set.name - if version is None: - version = feature_set.version elif isinstance(feature_set, str): name = feature_set else: - raise Exception(f"Feature set name must be provided") + raise Exception("Feature set name must be provided") # Read table and get row count dir_path, dest_path = _read_table_from_source(source, chunk_size, max_workers) @@ -783,21 +798,13 @@ def ingest( row_count = pq_file.metadata.num_rows - # Update the feature set based on PyArrow table of first row group - if force_update: - feature_set.infer_fields_from_pa( - table=pq_file.read_row_group(0), - discard_unused_fields=True, - replace_existing_features=True, - ) - self.apply(feature_set) current_time = time.time() print("Waiting for feature set to be ready for ingestion...") while True: if timeout is not None and time.time() - current_time >= timeout: raise TimeoutError("Timed out waiting for feature set to be ready") - feature_set = self.get_feature_set(name, version) + feature_set = self.get_feature_set(name) if ( feature_set is not None and feature_set.status == FeatureSetStatus.STATUS_READY @@ -817,6 +824,7 @@ def ingest( # Loop optimization declarations produce = producer.produce flush = producer.flush + ingestion_id = _generate_ingestion_id(feature_set) # Transform and push data to Kafka if feature_set.source.source_type == "Kafka": @@ -824,6 +832,7 @@ def ingest( file=dest_path, row_groups=list(range(pq_file.num_row_groups)), fs=feature_set, + ingestion_id=ingestion_id, max_workers=max_workers, ): @@ -851,61 +860,43 @@ def ingest( print("Removing temporary file(s)...") shutil.rmtree(dir_path) - return None + return ingestion_id def _build_feature_references( - feature_refs: List[str], default_project: str = None + feature_ref_strs: List[str], project: Optional[str] = None ) -> List[FeatureReference]: """ - Builds a list of FeatureSet objects from feature set ids in order to - retrieve feature data from Feast Serving + Builds a list of FeatureReference protos from string feature set references Args: - feature_refs: List of feature reference strings - ("project/feature:version") - default_project: This project will be used if the project name is - not provided in the feature reference - """ - - features = [] + feature_ref_strs: List of string feature references + project: Optionally specifies the project in the parsed feature references. - for feature_ref in feature_refs: - project_split = feature_ref.split("/") - version = 0 + Returns: + A list of FeatureReference protos parsed from args. + """ + feature_refs = [FeatureRef.from_str(ref_str) for ref_str in feature_ref_strs] + feature_ref_protos = [ref.to_proto() for ref in feature_refs] + # apply project if specified + if project is not None: + for feature_ref_proto in feature_ref_protos: + feature_ref_proto.project = project + return feature_ref_protos - if len(project_split) == 2: - project, feature_version = project_split - elif len(project_split) == 1: - feature_version = project_split[0] - if default_project is None: - raise ValueError( - f"No project specified in {feature_ref} and no default project provided" - ) - project = default_project - else: - raise ValueError( - f'Could not parse feature ref {feature_ref}, expecting "project/feature:version"' - ) - feature_split = feature_version.split(":") - if len(feature_split) == 2: - name, version = feature_split - version = int(version) - elif len(feature_split) == 1: - name = feature_split[0] - else: - raise ValueError( - f'Could not parse feature ref {feature_ref}, expecting "project/feature:version"' - ) +def _generate_ingestion_id(feature_set: FeatureSet) -> str: + """ + Generates a UUID from the feature set name, version, and the current time. - if len(project) == 0 or len(name) == 0 or version < 0: - raise ValueError( - f'Could not parse feature ref {feature_ref}, expecting "project/feature:version"' - ) + Args: + feature_set: Feature set of the dataset to be ingested. - features.append(FeatureReference(project=project, name=name, version=version)) - return features + Returns: + UUID unique to current time and the feature set provided. + """ + uuid_str = f"{feature_set.name}_{int(time.time())}" + return str(uuid.uuid3(uuid.NAMESPACE_DNS, uuid_str)) def _read_table_from_source( diff --git a/sdk/python/feast/core/__init__.py b/sdk/python/feast/core/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/sdk/python/feast/entity.py b/sdk/python/feast/entity.py index 9c5a027b97..012d01631a 100644 --- a/sdk/python/feast/entity.py +++ b/sdk/python/feast/entity.py @@ -29,26 +29,7 @@ def to_proto(self) -> EntityProto: Returns EntitySpec object """ value_type = ValueTypeProto.ValueType.Enum.Value(self.dtype.name) - return EntityProto( - name=self.name, - value_type=value_type, - presence=self.presence, - group_presence=self.group_presence, - shape=self.shape, - value_count=self.value_count, - domain=self.domain, - int_domain=self.int_domain, - float_domain=self.float_domain, - string_domain=self.string_domain, - bool_domain=self.bool_domain, - struct_domain=self.struct_domain, - natural_language_domain=self.natural_language_domain, - image_domain=self.image_domain, - mid_domain=self.mid_domain, - url_domain=self.url_domain, - time_domain=self.time_domain, - time_of_day_domain=self.time_of_day_domain, - ) + return EntityProto(name=self.name, value_type=value_type,) @classmethod def from_proto(cls, entity_proto: EntityProto): @@ -62,7 +43,4 @@ def from_proto(cls, entity_proto: EntityProto): Entity object """ entity = cls(name=entity_proto.name, dtype=ValueType(entity_proto.value_type)) - entity.update_presence_constraints(entity_proto) - entity.update_shape_type(entity_proto) - entity.update_domain_info(entity_proto) return entity diff --git a/sdk/python/feast/feature.py b/sdk/python/feast/feature.py index 9c7ff20f9e..35e0ea32ae 100644 --- a/sdk/python/feast/feature.py +++ b/sdk/python/feast/feature.py @@ -14,6 +14,7 @@ from feast.core.FeatureSet_pb2 import FeatureSpec as FeatureProto from feast.field import Field +from feast.serving.ServingService_pb2 import FeatureReference as FeatureRefProto from feast.types import Value_pb2 as ValueTypeProto from feast.value_type import ValueType @@ -56,9 +57,92 @@ def from_proto(cls, feature_proto: FeatureProto): Feature object """ feature = cls( - name=feature_proto.name, dtype=ValueType(feature_proto.value_type) + name=feature_proto.name, dtype=ValueType(feature_proto.value_type), ) feature.update_presence_constraints(feature_proto) feature.update_shape_type(feature_proto) feature.update_domain_info(feature_proto) return feature + + +class FeatureRef: + """ Feature Reference represents a reference to a specific feature. """ + + def __init__(self, name: str, feature_set: str = None): + self.proto = FeatureRefProto(name=name, feature_set=feature_set) + + @classmethod + def from_proto(cls, proto: FeatureRefProto): + """ + Construct a feature reference from the given FeatureReference proto + + Arg: + proto: Protobuf FeatureReference to construct from + + Returns: + FeatureRef that refers to the given feature + """ + return cls(name=proto.name, feature_set=proto.feature_set) + + @classmethod + def from_str(cls, feature_ref_str: str, ignore_project: bool = False): + """ + Parse the given string feature reference into FeatureRef model + String feature reference should be in the format feature_set:feature. + Where "feature_set" and "name" are the feature_set name and feature name + respectively. + + Args: + feature_ref_str: String representation of the feature reference + ignore_project: Ignore projects in given string feature reference + instead throwing an error + + Returns: + FeatureRef that refers to the given feature + """ + proto = FeatureRefProto() + if "/" in feature_ref_str: + if ignore_project: + _, feature_ref_str = feature_ref_str.split("/") + else: + raise ValueError(f"Unsupported feature reference: {feature_ref_str}") + + # parse feature set name if specified + if ":" in feature_ref_str: + proto.feature_set, feature_ref_str = feature_ref_str.split(":") + + proto.name = feature_ref_str + return cls.from_proto(proto) + + def to_proto(self) -> FeatureRefProto: + """ + Convert and return this feature set reference to protobuf. + + Returns: + Protobuf respresentation of this feature set reference. + """ + return self.proto + + def __repr__(self): + # return string representation of the reference + # [project/][feature_set:]name + # in protov3 unset string and int fields default to "" and 0 + ref_str = "" + if len(self.proto.project) > 0: + ref_str += self.proto.project + "/" + if len(self.proto.feature_set) > 0: + ref_str += self.proto.feature_set + ":" + ref_str += self.proto.name + return ref_str + + def __str__(self): + # human readable string of the reference + return f"FeatureRef<{self.__repr__()}>" + + def __eq__(self, other): + # compare with other feature set + return hash(self) == hash(other) + + def __hash__(self): + # hash this reference + return hash(repr(self)) diff --git a/sdk/python/feast/feature_set.py b/sdk/python/feast/feature_set.py index c6104f47a0..aebee52ca4 100644 --- a/sdk/python/feast/feature_set.py +++ b/sdk/python/feast/feature_set.py @@ -13,8 +13,7 @@ # limitations under the License. import warnings from collections import OrderedDict -from typing import Dict -from typing import List, Optional +from typing import Dict, List, Optional import pandas as pd import pyarrow as pa @@ -24,7 +23,6 @@ from google.protobuf.message import Message from pandas.api.types import is_datetime64_ns_dtype from pyarrow.lib import TimestampType -from tensorflow_metadata.proto.v0 import schema_pb2 from feast.core.FeatureSet_pb2 import FeatureSet as FeatureSetProto from feast.core.FeatureSet_pb2 import FeatureSetMeta as FeatureSetMetaProto @@ -41,6 +39,7 @@ pa_to_feast_value_type, python_type_to_feast_value_type, ) +from tensorflow_metadata.proto.v0 import schema_pb2 class FeatureSet: @@ -69,7 +68,6 @@ def __init__( else: self._source = source self._max_age = max_age - self._version = None self._status = None self._created_timestamp = None @@ -196,20 +194,6 @@ def source(self, source: Source): """ self._source = source - @property - def version(self): - """ - Returns the version of this feature set - """ - return self._version - - @version.setter - def version(self, version): - """ - Sets the version of this feature set - """ - self._version = version - @property def max_age(self): """ @@ -622,7 +606,6 @@ def _update_from_feature_set(self, feature_set): self.name = feature_set.name self.project = feature_set.project - self.version = feature_set.version self.source = feature_set.source self.max_age = feature_set.max_age self.features = feature_set.features @@ -654,10 +637,10 @@ def is_valid(self): """ if not self.name: - raise ValueError(f"No name found in feature set.") + raise ValueError("No name found in feature set.") if len(self.entities) == 0: - raise ValueError(f"No entities found in feature set {self.name}") + raise ValueError("No entities found in feature set {self.name}") def import_tfx_schema(self, schema: schema_pb2.Schema): """ @@ -717,6 +700,8 @@ def export_tfx_schema(self) -> schema_pb2.Schema: ] for _, field in self._fields.items(): + if isinstance(field, Entity): + continue feature = schema_pb2.Feature() for attr in attributes_to_copy_from_field_to_feature: if getattr(field, attr) is None: @@ -808,7 +793,6 @@ def from_proto(cls, feature_set_proto: FeatureSetProto): if len(feature_set_proto.spec.project) == 0 else feature_set_proto.spec.project, ) - feature_set._version = feature_set_proto.spec.version feature_set._status = feature_set_proto.meta.status feature_set._created_timestamp = feature_set_proto.meta.created_timestamp return feature_set @@ -827,7 +811,6 @@ def to_proto(self) -> FeatureSetProto: spec = FeatureSetSpecProto( name=self.name, - version=self.version, project=self.project, max_age=self.max_age, source=self.source.to_proto() if self.source is not None else None, @@ -851,10 +834,8 @@ class FeatureSetRef: Represents a reference to a featureset """ - def __init__(self, project: str = None, name: str = None, version: int = None): - self.proto = FeatureSetReferenceProto( - project=project, name=name, version=version - ) + def __init__(self, project: str = None, name: str = None): + self.proto = FeatureSetReferenceProto(project=project, name=name) @property def project(self) -> str: @@ -870,13 +851,6 @@ def name(self) -> str: """ return self.proto.name - @property - def version(self) -> int: - """ - Get the version of feature set referenced by this reference - """ - return self.proto.version - @classmethod def from_feature_set(cls, feature_set: FeatureSet): """ @@ -888,7 +862,7 @@ def from_feature_set(cls, feature_set: FeatureSet): Returns: FeatureSetRef that refers to the given feature set """ - return cls(feature_set.project, feature_set.name, feature_set.version) + return cls(feature_set.project, feature_set.name) @classmethod def from_str(cls, ref_str: str): @@ -902,15 +876,13 @@ def from_str(cls, ref_str: str): Returns: FeatureSetRef constructed from the string """ + project = "" if "/" in ref_str: project, ref_str = ref_str.split("/") - if ":" in ref_str: - ref_str, version_str = ref_str.split(":") - name = ref_str - return cls(project, name, int(version_str)) + return cls(project, ref_str) - def to_proto(self, arg1) -> FeatureSetReferenceProto: + def to_proto(self) -> FeatureSetReferenceProto: """ Convert and return this feature set reference to protobuf. @@ -925,14 +897,12 @@ def __str__(self): def __repr__(self): # return string representation of the reference - # [project/]name[:version] + # [project/]name + # in protov3 unset string and int fields default to "" and 0 ref_str = "" - if self.proto.project: + if len(self.proto.project) > 0: ref_str += self.proto.project + "/" - if self.proto.name: - ref_str += self.proto.name - if self.proto.version: - ref_str += ":" + str(self.proto.version).strip() + ref_str += self.proto.name return ref_str def __eq__(self, other): diff --git a/sdk/python/feast/job.py b/sdk/python/feast/job.py index 3576bc1b38..21b08224ba 100644 --- a/sdk/python/feast/job.py +++ b/sdk/python/feast/job.py @@ -1,16 +1,20 @@ import tempfile import time from datetime import datetime, timedelta -from urllib.parse import urlparse from typing import List +from urllib.parse import urlparse import fastavro import pandas as pd from google.cloud import storage from google.protobuf.json_format import MessageToJson +from feast.core.CoreService_pb2 import ListIngestionJobsRequest +from feast.core.CoreService_pb2_grpc import CoreServiceStub +from feast.core.IngestionJob_pb2 import IngestionJob as IngestJobProto +from feast.core.IngestionJob_pb2 import IngestionJobStatus +from feast.core.Store_pb2 import Store from feast.feature_set import FeatureSet -from feast.source import Source from feast.serving.ServingService_pb2 import ( DATA_FORMAT_AVRO, JOB_STATUS_DONE, @@ -18,11 +22,7 @@ ) from feast.serving.ServingService_pb2 import Job as JobProto from feast.serving.ServingService_pb2_grpc import ServingServiceStub -from feast.core.Store_pb2 import Store -from feast.core.IngestionJob_pb2 import IngestionJob as IngestJobProto -from feast.core.IngestionJob_pb2 import IngestionJobStatus -from feast.core.CoreService_pb2_grpc import CoreServiceStub -from feast.core.CoreService_pb2 import ListIngestionJobsRequest +from feast.source import Source # Maximum no of seconds to wait until the retrieval jobs status is DONE in Feast # Currently set to the maximum query execution time limit in BigQuery @@ -37,7 +37,9 @@ class RetrievalJob: A class representing a job for feature retrieval in Feast. """ - def __init__(self, job_proto: JobProto, serving_stub: ServingServiceStub): + def __init__( + self, job_proto: JobProto, serving_stub: ServingServiceStub, + ): """ Args: job_proto: Job proto object (wrapped by this job object) @@ -45,7 +47,8 @@ def __init__(self, job_proto: JobProto, serving_stub: ServingServiceStub): """ self.job_proto = job_proto self.serving_stub = serving_stub - self.storage_client = storage.Client(project=None) + # TODO: abstract away GCP depedency + self.gcs_client = storage.Client(project=None) @property def id(self): @@ -125,7 +128,7 @@ def result(self, timeout_sec: int = DEFAULT_TIMEOUT_SEC): for file_uri in uris: if file_uri.scheme == "gs": file_obj = tempfile.TemporaryFile() - self.storage_client.download_blob_to_file(file_uri.geturl(), file_obj) + self.gcs_client.download_blob_to_file(file_uri.geturl(), file_obj) elif file_uri.scheme == "file": file_obj = open(file_uri.path, "rb") else: diff --git a/sdk/python/feast/loaders/abstract_producer.py b/sdk/python/feast/loaders/abstract_producer.py index 6030d14ecc..14d9bc42b7 100644 --- a/sdk/python/feast/loaders/abstract_producer.py +++ b/sdk/python/feast/loaders/abstract_producer.py @@ -25,8 +25,6 @@ class AbstractProducer: def __init__(self, brokers: str, row_count: int, disable_progress_bar: bool): self.brokers = brokers self.row_count = row_count - self.error_count = 0 - self.last_exception = "" # Progress bar will always display average rate self.pbar = tqdm( @@ -45,8 +43,7 @@ def _inc_pbar(self, meta): self.pbar.update(1) def _set_error(self, exception: str): - self.error_count += 1 - self.last_exception = exception + raise Exception(exception) def print_results(self) -> None: """ @@ -62,24 +59,7 @@ def print_results(self) -> None: print("Ingestion complete!") - failed_message = ( - "" - if self.error_count == 0 - else f"\nFail: {self.error_count / self.row_count}" - ) - - last_exception_message = ( - "" - if self.last_exception == "" - else f"\nLast exception:\n{self.last_exception}" - ) - - print( - f"\nIngestion statistics:" - f"\nSuccess: {self.pbar.n}/{self.row_count}" - f"{failed_message}" - f"{last_exception_message}" - ) + print(f"\nIngestion statistics:" f"\nSuccess: {self.pbar.n}/{self.row_count}") return None @@ -129,7 +109,10 @@ def flush(self, timeout: Optional[int]): Returns: int: Number of messages still in queue. """ - return self.producer.flush(timeout=timeout) + messages = self.producer.flush(timeout=timeout) + if messages: + raise Exception("Not all Kafka messages are successfully delivered.") + return messages def _delivery_callback(self, err: str, msg) -> None: """ @@ -200,7 +183,10 @@ def flush(self, timeout: Optional[int]): KafkaTimeoutError: failure to flush buffered records within the provided timeout """ - return self.producer.flush(timeout=timeout) + messages = self.producer.flush(timeout=timeout) + if messages: + raise Exception("Not all Kafka messages are successfully delivered.") + return messages def get_producer( diff --git a/sdk/python/feast/loaders/ingest.py b/sdk/python/feast/loaders/ingest.py index b4490f025c..b439dbd302 100644 --- a/sdk/python/feast/loaders/ingest.py +++ b/sdk/python/feast/loaders/ingest.py @@ -25,7 +25,9 @@ KAFKA_CHUNK_PRODUCTION_TIMEOUT = 120 # type: int -def _encode_pa_tables(file: str, fs: FeatureSet, row_group_idx: int) -> List[bytes]: +def _encode_pa_tables( + file: str, feature_set: str, fields: dict, ingestion_id: str, row_group_idx: int +) -> List[bytes]: """ Helper function to encode a PyArrow table(s) read from parquet file(s) into FeatureRows. @@ -41,8 +43,14 @@ def _encode_pa_tables(file: str, fs: FeatureSet, row_group_idx: int) -> List[byt File directory of all the parquet file to encode. Parquet file must have more than one row group. - fs (feast.feature_set.FeatureSet): - FeatureSet describing parquet files. + feature_set (str): + Feature set reference in the format f"{project}/{name}". + + fields (dict[str, enum.Enum.ValueType]): + A mapping of field names to their value types. + + ingestion_id (str): + UUID unique to this ingestion job. row_group_idx(int): Row group index to read and encode into byte like FeatureRow @@ -61,12 +69,10 @@ def _encode_pa_tables(file: str, fs: FeatureSet, row_group_idx: int) -> List[byt # Preprocess the columns by converting all its values to Proto values proto_columns = { - field_name: pa_column_to_proto_column(field.dtype, table.column(field_name)) - for field_name, field in fs.fields.items() + field_name: pa_column_to_proto_column(dtype, table.column(field_name)) + for field_name, dtype in fields.items() } - feature_set = f"{fs.project}/{fs.name}:{fs.version}" - # List to store result feature_rows = [] @@ -78,7 +84,9 @@ def _encode_pa_tables(file: str, fs: FeatureSet, row_group_idx: int) -> List[byt # Iterate through the rows for row_idx in range(table.num_rows): feature_row = FeatureRow( - event_timestamp=datetime_col[row_idx], feature_set=feature_set + event_timestamp=datetime_col[row_idx], + feature_set=feature_set, + ingestion_id=ingestion_id, ) # Loop optimization declaration ext = feature_row.fields.extend @@ -94,7 +102,11 @@ def _encode_pa_tables(file: str, fs: FeatureSet, row_group_idx: int) -> List[byt def get_feature_row_chunks( - file: str, row_groups: List[int], fs: FeatureSet, max_workers: int + file: str, + row_groups: List[int], + fs: FeatureSet, + ingestion_id: str, + max_workers: int, ) -> Iterable[List[bytes]]: """ Iterator function to encode a PyArrow table read from a parquet file to @@ -112,6 +124,9 @@ def get_feature_row_chunks( fs (feast.feature_set.FeatureSet): FeatureSet describing parquet files. + ingestion_id (str): + UUID unique to this ingestion job. + max_workers (int): Maximum number of workers to spawn. @@ -120,8 +135,12 @@ def get_feature_row_chunks( Iterable list of byte encoded FeatureRow(s). """ + feature_set = f"{fs.project}/{fs.name}" + + field_map = {field.name: field.dtype for field in fs.fields.values()} + pool = Pool(max_workers) - func = partial(_encode_pa_tables, file, fs) + func = partial(_encode_pa_tables, file, feature_set, field_map, ingestion_id) for chunk in pool.imap(func, row_groups): yield chunk return diff --git a/sdk/python/feast/serving/__init__.py b/sdk/python/feast/serving/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/sdk/python/feast/storage/__init__.py b/sdk/python/feast/storage/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/sdk/python/feast/type_map.py b/sdk/python/feast/type_map.py index 8df0499239..85def25fcb 100644 --- a/sdk/python/feast/type_map.py +++ b/sdk/python/feast/type_map.py @@ -147,7 +147,7 @@ def convert_series_to_proto_values(row: pd.Series): event_timestamp=_pd_datetime_to_timestamp_proto( dataframe[DATETIME_COLUMN].dtype, row[DATETIME_COLUMN] ), - feature_set=feature_set.name + ":" + str(feature_set.version), + feature_set=feature_set.project + "/" + feature_set.name, ) for field_name, field in feature_set.fields.items(): @@ -185,11 +185,7 @@ def convert_dict_to_proto_values( event_timestamp=_pd_datetime_to_timestamp_proto( df_datetime_dtype, row[DATETIME_COLUMN] ), - feature_set=feature_set.project - + "/" - + feature_set.name - + ":" - + str(feature_set.version), + feature_set=f"{feature_set.project}/{feature_set.name}", ) for field_name, field in feature_set.fields.items(): diff --git a/sdk/python/feast/types/__init__.py b/sdk/python/feast/types/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/sdk/python/setup.py b/sdk/python/setup.py index 9d8a378650..69ea44a187 100644 --- a/sdk/python/setup.py +++ b/sdk/python/setup.py @@ -19,7 +19,7 @@ NAME = "feast" DESCRIPTION = "Python SDK for Feast" -URL = "https://github.com/gojek/feast" +URL = "https://github.com/feast-dev/feast" AUTHOR = "Feast" REQUIRES_PYTHON = ">=3.6.0" diff --git a/sdk/python/tensorflow_metadata/__init__.py b/sdk/python/tensorflow_metadata/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/sdk/python/tensorflow_metadata/proto/__init__.py b/sdk/python/tensorflow_metadata/proto/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/sdk/python/tensorflow_metadata/proto/v0/__init__.py b/sdk/python/tensorflow_metadata/proto/v0/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/sdk/python/tests/data/tensorflow_metadata/bikeshare_feature_set.yaml b/sdk/python/tests/data/tensorflow_metadata/bikeshare_feature_set.yaml index daa0a35f0a..48c595712c 100644 --- a/sdk/python/tests/data/tensorflow_metadata/bikeshare_feature_set.yaml +++ b/sdk/python/tests/data/tensorflow_metadata/bikeshare_feature_set.yaml @@ -3,15 +3,6 @@ spec: entities: - name: station_id valueType: INT64 - intDomain: - min: 1 - max: 5000 - presence: - minFraction: 1.0 - minCount: 1 - shape: - dim: - - size: 1 features: - name: location valueType: STRING diff --git a/sdk/python/tests/data/tensorflow_metadata/bikeshare_schema.json b/sdk/python/tests/data/tensorflow_metadata/bikeshare_schema.json index e7a886053c..fa9f97cca0 100644 --- a/sdk/python/tests/data/tensorflow_metadata/bikeshare_schema.json +++ b/sdk/python/tests/data/tensorflow_metadata/bikeshare_schema.json @@ -85,25 +85,6 @@ } ] } - }, - { - "name": "station_id", - "type": "INT", - "presence": { - "minFraction": 1.0, - "minCount": "1" - }, - "int_domain": { - "min": 1, - "max": 5000 - }, - "shape": { - "dim": [ - { - "size": "1" - } - ] - } } ], "stringDomain": [ diff --git a/sdk/python/tests/feast_core_server.py b/sdk/python/tests/feast_core_server.py index b6efe2cb6d..3ac1b17d00 100644 --- a/sdk/python/tests/feast_core_server.py +++ b/sdk/python/tests/feast_core_server.py @@ -40,21 +40,12 @@ def ListFeatureSets(self, request: ListFeatureSetsRequest, context): or request.filter.feature_set_name == "*" or fs.spec.name == request.filter.feature_set_name ) - and ( - not request.filter.feature_set_version - or str(fs.spec.version) == request.filter.feature_set_version - or request.filter.feature_set_version == "*" - ) ] return ListFeatureSetsResponse(feature_sets=filtered_feature_set_response) def ApplyFeatureSet(self, request: ApplyFeatureSetRequest, context): feature_set = request.feature_set - if feature_set.spec.version is None: - feature_set.spec.version = 1 - else: - feature_set.spec.version = feature_set.spec.version + 1 if feature_set.spec.source.type == SourceTypeProto.INVALID: feature_set.spec.source.kafka_source_config.CopyFrom( diff --git a/sdk/python/tests/feast_serving_server.py b/sdk/python/tests/feast_serving_server.py index 364c190714..983e74e885 100644 --- a/sdk/python/tests/feast_serving_server.py +++ b/sdk/python/tests/feast_serving_server.py @@ -67,7 +67,6 @@ def GetOnlineFeatures(self, request: GetOnlineFeaturesRequest, context): feature_data_sets=[ GetOnlineFeaturesResponse.FeatureDataSet( name="feature_set_1", - version="1", feature_rows=[ FeatureRowProto.FeatureRow( feature_set="feature_set_1", diff --git a/sdk/python/tests/test_client.py b/sdk/python/tests/test_client.py index f7f5676ced..380557ce92 100644 --- a/sdk/python/tests/test_client.py +++ b/sdk/python/tests/test_client.py @@ -14,13 +14,18 @@ import pkgutil +import tempfile from concurrent import futures +from datetime import datetime from unittest import mock import grpc +import pandas as pd +import pandavro import pytest from google.protobuf.duration_pb2 import Duration from mock import MagicMock, patch +from pytz import timezone import dataframes import feast.core.CoreService_pb2_grpc as Core @@ -31,26 +36,30 @@ GetFeatureSetResponse, ListIngestionJobsResponse, ) -from feast.core.Store_pb2 import Store -from feast.core.IngestionJob_pb2 import ( - IngestionJob as IngestJobProto, - IngestionJobStatus, -) from feast.core.FeatureSet_pb2 import EntitySpec as EntitySpecProto from feast.core.FeatureSet_pb2 import FeatureSet as FeatureSetProto from feast.core.FeatureSet_pb2 import FeatureSetMeta as FeatureSetMetaProto from feast.core.FeatureSet_pb2 import FeatureSetSpec as FeatureSetSpecProto from feast.core.FeatureSet_pb2 import FeatureSetStatus as FeatureSetStatusProto from feast.core.FeatureSet_pb2 import FeatureSpec as FeatureSpecProto +from feast.core.IngestionJob_pb2 import IngestionJob as IngestJobProto +from feast.core.IngestionJob_pb2 import IngestionJobStatus from feast.core.Source_pb2 import KafkaSourceConfig, Source, SourceType +from feast.core.Store_pb2 import Store from feast.entity import Entity from feast.feature_set import Feature, FeatureSet, FeatureSetRef from feast.job import IngestJob from feast.serving.ServingService_pb2 import ( + DataFormat, + FeastServingType, + GetBatchFeaturesResponse, GetFeastServingInfoResponse, + GetJobResponse, GetOnlineFeaturesRequest, GetOnlineFeaturesResponse, ) +from feast.serving.ServingService_pb2 import Job as BatchRetrievalJob +from feast.serving.ServingService_pb2 import JobStatus, JobType from feast.source import KafkaSource from feast.types import Value_pb2 as ValueProto from feast.value_type import ValueType @@ -198,12 +207,17 @@ def test_get_online_features(self, mocked_client, mocker): grpc.insecure_channel("") ) - fields = dict() - for feature_num in range(1, 10): - fields[f"my_project/feature_{str(feature_num)}:1"] = ValueProto.Value( - int64_val=feature_num - ) - field_values = GetOnlineFeaturesResponse.FieldValues(fields=fields) + def int_val(x): + return ValueProto.Value(int64_val=x) + + # serving can return feature references with projects, + # get_online_features() should strip the project part. + field_values = GetOnlineFeaturesResponse.FieldValues( + fields={ + "driver_project/driver:driver_id": int_val(1), + "driver_project/driver_id": int_val(9), + } + ) response = GetOnlineFeaturesResponse() entity_rows = [] @@ -211,7 +225,7 @@ def test_get_online_features(self, mocked_client, mocker): response.field_values.append(field_values) entity_rows.append( GetOnlineFeaturesRequest.EntityRow( - fields={"customer_id": ValueProto.Value(int64_val=row_number)} + fields={"customer_id": int_val(row_number)} ) ) @@ -221,24 +235,17 @@ def test_get_online_features(self, mocked_client, mocker): return_value=response, ) + # NOTE: Feast Serving does not allow for feature references + # that specify the same feature in the same request response = mocked_client.get_online_features( entity_rows=entity_rows, - feature_refs=[ - "my_project/feature_1:1", - "my_project/feature_2:1", - "my_project/feature_3:1", - "my_project/feature_4:1", - "my_project/feature_5:1", - "my_project/feature_6:1", - "my_project/feature_7:1", - "my_project/feature_8:1", - "my_project/feature_9:1", - ], + feature_refs=["driver:driver_id", "driver_id"], + project="driver_project", ) # type: GetOnlineFeaturesResponse assert ( - response.field_values[0].fields["my_project/feature_1:1"].int64_val == 1 - and response.field_values[0].fields["my_project/feature_9:1"].int64_val == 9 + response.field_values[0].fields["driver:driver_id"].int64_val == 1 + and response.field_values[0].fields["driver_id"].int64_val == 9 ) @pytest.mark.parametrize( @@ -259,7 +266,6 @@ def test_get_feature_set(self, mocked_client, mocker): feature_set=FeatureSetProto( spec=FeatureSetSpecProto( name="my_feature_set", - version=2, max_age=Duration(seconds=3600), features=[ FeatureSpecProto( @@ -289,11 +295,10 @@ def test_get_feature_set(self, mocked_client, mocker): ), ) mocked_client.set_project("my_project") - feature_set = mocked_client.get_feature_set("my_feature_set", version=2) + feature_set = mocked_client.get_feature_set("my_feature_set") assert ( feature_set.name == "my_feature_set" - and feature_set.version == 2 and feature_set.fields["my_feature_1"].name == "my_feature_1" and feature_set.fields["my_feature_1"].dtype == ValueType.FLOAT and feature_set.fields["my_entity_1"].name == "my_entity_1" @@ -404,131 +409,125 @@ def test_stop_ingest_job(self, mocked_client, mocker): mocked_client.stop_ingest_job(ingest_job) assert mocked_client._core_service_stub.StopIngestionJob.called - # @pytest.mark.parametrize - # "mocked_client", - # [pytest.lazy_fixture("mock_client"), pytest.lazy_fixture("secure_mock_client")], - # ) - # def test_get_batch_features(self, mocked_client, mocker): - # - # mocked_client._serving_service_stub = Serving.ServingServiceStub( - # grpc.insecure_channel("") - # ) - # mocked_client._core_service_stub = Core.CoreServiceStub( - # grpc.insecure_channel("") - # ) - # - # mocker.patch.object( - # mocked_client._core_service_stub, - # "GetFeatureSet", - # return_value=GetFeatureSetResponse( - # feature_set=FeatureSetProto( - # spec=FeatureSetSpecProto( - # name="customer_fs", - # version=1, - # project="my_project", - # entities=[ - # EntitySpecProto( - # name="customer", value_type=ValueProto.ValueType.INT64 - # ), - # EntitySpecProto( - # name="transaction", - # value_type=ValueProto.ValueType.INT64, - # ), - # ], - # features=[ - # FeatureSpecProto( - # name="customer_feature_1", - # value_type=ValueProto.ValueType.FLOAT, - # ), - # FeatureSpecProto( - # name="customer_feature_2", - # value_type=ValueProto.ValueType.STRING, - # ), - # ], - # ), - # meta=FeatureSetMetaProto(status=FeatureSetStatusProto.STATUS_READY), - # ) - # ), - # ) - # - # expected_dataframe = pd.DataFrame( - # { - # "datetime": [datetime.utcnow() for _ in range(3)], - # "customer": [1001, 1002, 1003], - # "transaction": [1001, 1002, 1003], - # "my_project/customer_feature_1:1": [1001, 1002, 1003], - # "my_project/customer_feature_2:1": [1001, 1002, 1003], - # } - # ) - # - # final_results = tempfile.mktemp() - # to_avro(file_path_or_buffer=final_results, df=expected_dataframe) - # - # mocker.patch.object( - # mocked_client._serving_service_stub, - # "GetBatchFeatures", - # return_value=GetBatchFeaturesResponse( - # job=BatchFeaturesJob( - # id="123", - # type=JobType.JOB_TYPE_DOWNLOAD, - # status=JobStatus.JOB_STATUS_DONE, - # file_uris=[f"file://{final_results}"], - # data_format=DataFormat.DATA_FORMAT_AVRO, - # ) - # ), - # ) - # - # mocker.patch.object( - # mocked_client._serving_service_stub, - # "GetJob", - # return_value=GetJobResponse( - # job=BatchFeaturesJob( - # id="123", - # type=JobType.JOB_TYPE_DOWNLOAD, - # status=JobStatus.JOB_STATUS_DONE, - # file_uris=[f"file://{final_results}"], - # data_format=DataFormat.DATA_FORMAT_AVRO, - # ) - # ), - # ) - # - # mocker.patch.object( - # mocked_client._serving_service_stub, - # "GetFeastServingInfo", - # return_value=GetFeastServingInfoResponse( - # job_staging_location=f"file://{tempfile.mkdtemp()}/", - # type=FeastServingType.FEAST_SERVING_TYPE_BATCH, - # ), - # ) - # - # mocked_client.set_project("project1") - # response = mocked_client.get_batch_features( - # entity_rows=pd.DataFrame( - # { - # "datetime": [ - # pd.datetime.now(tz=timezone("Asia/Singapore")) for _ in range(3) - # ], - # "customer": [1001, 1002, 1003], - # "transaction": [1001, 1002, 1003], - # } - # ), - # feature_refs=[ - # "my_project/customer_feature_1:1", - # "my_project/customer_feature_2:1", - # ], - # ) # type: Job - # - # assert response.id == "123" and response.status == JobStatus.JOB_STATUS_DONE - # - # actual_dataframe = response.to_dataframe() - # - # assert actual_dataframe[ - # ["my_project/customer_feature_1:1", "my_project/customer_feature_2:1"] - # ].equals( - # expected_dataframe[ - # ["my_project/customer_feature_1:1", "my_project/customer_feature_2:1"] - # ] - # ) + @pytest.mark.parametrize( + "mocked_client", + [pytest.lazy_fixture("mock_client"), pytest.lazy_fixture("secure_mock_client")], + ) + def test_get_batch_features(self, mocked_client, mocker): + + mocked_client._serving_service_stub = Serving.ServingServiceStub( + grpc.insecure_channel("") + ) + mocked_client._core_service_stub = Core.CoreServiceStub( + grpc.insecure_channel("") + ) + + mocker.patch.object( + mocked_client._core_service_stub, + "GetFeatureSet", + return_value=GetFeatureSetResponse( + feature_set=FeatureSetProto( + spec=FeatureSetSpecProto( + name="driver", + project="driver_project", + entities=[ + EntitySpecProto( + name="driver", value_type=ValueProto.ValueType.INT64 + ), + EntitySpecProto( + name="transaction", + value_type=ValueProto.ValueType.INT64, + ), + ], + features=[ + FeatureSpecProto( + name="driver_id", value_type=ValueProto.ValueType.FLOAT, + ), + FeatureSpecProto( + name="driver_name", + value_type=ValueProto.ValueType.STRING, + ), + ], + ), + meta=FeatureSetMetaProto(status=FeatureSetStatusProto.STATUS_READY), + ) + ), + ) + + expected_dataframe = pd.DataFrame( + { + "datetime": [datetime.utcnow() for _ in range(3)], + "driver": [1001, 1002, 1003], + "transaction": [1001, 1002, 1003], + "driver_id": [1001, 1002, 1003], + } + ) + + final_results = tempfile.mktemp() + pandavro.to_avro(file_path_or_buffer=final_results, df=expected_dataframe) + + mocker.patch.object( + mocked_client._serving_service_stub, + "GetBatchFeatures", + return_value=GetBatchFeaturesResponse( + job=BatchRetrievalJob( + id="123", + type=JobType.JOB_TYPE_DOWNLOAD, + status=JobStatus.JOB_STATUS_DONE, + file_uris=[f"file://{final_results}"], + data_format=DataFormat.DATA_FORMAT_AVRO, + ) + ), + ) + + mocker.patch.object( + mocked_client._serving_service_stub, + "GetJob", + return_value=GetJobResponse( + job=BatchRetrievalJob( + id="123", + type=JobType.JOB_TYPE_DOWNLOAD, + status=JobStatus.JOB_STATUS_DONE, + file_uris=[f"file://{final_results}"], + data_format=DataFormat.DATA_FORMAT_AVRO, + ) + ), + ) + + mocker.patch.object( + mocked_client._serving_service_stub, + "GetFeastServingInfo", + return_value=GetFeastServingInfoResponse( + job_staging_location=f"file://{tempfile.mkdtemp()}/", + type=FeastServingType.FEAST_SERVING_TYPE_BATCH, + ), + ) + + mocked_client.set_project("project1") + # TODO: Abstract away GCS client and GCP dependency + # NOTE: Feast Serving does not allow for feature references + # that specify the same feature in the same request. + with patch("google.cloud.storage.Client"): + response = mocked_client.get_batch_features( + entity_rows=pd.DataFrame( + { + "datetime": [ + pd.datetime.now(tz=timezone("Asia/Singapore")) + for _ in range(3) + ], + "driver": [1001, 1002, 1003], + "transaction": [1001, 1002, 1003], + } + ), + feature_refs=["driver:driver_id", "driver_id"], + project="driver_project", + ) # Type: GetBatchFeaturesResponse + + assert response.id == "123" and response.status == JobStatus.JOB_STATUS_DONE + + actual_dataframe = response.to_dataframe() + + assert actual_dataframe[["driver_id"]].equals(expected_dataframe[["driver_id"]]) @pytest.mark.parametrize( "test_client", @@ -561,7 +560,16 @@ def test_apply_feature_set_success(self, test_client): and feature_sets[0].name == "my-feature-set-1" and feature_sets[0].features[0].name == "fs1-my-feature-1" and feature_sets[0].features[0].dtype == ValueType.INT64 + and feature_sets[0].features[1].name == "fs1-my-feature-2" + and feature_sets[0].features[1].dtype == ValueType.STRING + and feature_sets[0].entities[0].name == "fs1-my-entity-1" + and feature_sets[0].entities[0].dtype == ValueType.INT64 + and feature_sets[1].features[0].name == "fs2-my-feature-1" + and feature_sets[1].features[0].dtype == ValueType.STRING_LIST + and feature_sets[1].features[1].name == "fs2-my-feature-2" and feature_sets[1].features[1].dtype == ValueType.BYTES_LIST + and feature_sets[1].entities[0].name == "fs2-my-entity-1" + and feature_sets[1].entities[0].dtype == ValueType.INT64 ) @pytest.mark.parametrize( @@ -597,6 +605,38 @@ def test_feature_set_ingest_success(self, dataframe, test_client, mocker): # Ingest data into Feast test_client.ingest("driver-feature-set", dataframe) + @pytest.mark.parametrize( + "dataframe,test_client,exception", + [(dataframes.GOOD, pytest.lazy_fixture("client"), Exception)], + ) + def test_feature_set_ingest_throws_exception_if_kafka_down( + self, dataframe, test_client, exception, mocker + ): + + test_client.set_project("project1") + driver_fs = FeatureSet( + "driver-feature-set", + source=KafkaSource(brokers="localhost:4412", topic="test"), + ) + driver_fs.add(Feature(name="feature_1", dtype=ValueType.FLOAT)) + driver_fs.add(Feature(name="feature_2", dtype=ValueType.STRING)) + driver_fs.add(Feature(name="feature_3", dtype=ValueType.INT64)) + driver_fs.add(Entity(name="entity_id", dtype=ValueType.INT64)) + + # Register with Feast core + test_client.apply(driver_fs) + driver_fs = driver_fs.to_proto() + driver_fs.meta.status = FeatureSetStatusProto.STATUS_READY + + mocker.patch.object( + test_client._core_service_stub, + "GetFeatureSet", + return_value=GetFeatureSetResponse(feature_set=driver_fs), + ) + + with pytest.raises(exception): + test_client.ingest("driver-feature-set", dataframe) + @pytest.mark.parametrize( "dataframe,exception,test_client", [ @@ -702,7 +742,7 @@ def test_feature_set_types_success(self, test_client, dataframe, mocker): Feature(name="bytes_list_feature", dtype=ValueType.BYTES_LIST), # Feature(name="bool_list_feature", # dtype=ValueType.BOOL_LIST), # TODO: Add support for this - # type again https://github.com/gojek/feast/issues/341 + # type again https://github.com/feast-dev/feast/issues/341 Feature(name="double_list_feature", dtype=ValueType.DOUBLE_LIST), ], max_age=Duration(seconds=3600), diff --git a/sdk/python/tests/test_feature.py b/sdk/python/tests/test_feature.py new file mode 100644 index 0000000000..bc83683e0f --- /dev/null +++ b/sdk/python/tests/test_feature.py @@ -0,0 +1,23 @@ +# Copyright 2019 The Feast Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from feast.feature import FeatureRef + + +class TestFeatureRef: + def test_str_ref(self): + original_ref = FeatureRef(feature_set="test", name="test") + ref_str = repr(original_ref) + parsed_ref = FeatureRef.from_str(ref_str) + assert original_ref == parsed_ref diff --git a/sdk/python/tests/test_feature_set.py b/sdk/python/tests/test_feature_set.py index 6f087d98bb..04e75c9e76 100644 --- a/sdk/python/tests/test_feature_set.py +++ b/sdk/python/tests/test_feature_set.py @@ -20,7 +20,6 @@ import pytest import pytz from google.protobuf import json_format -from tensorflow_metadata.proto.v0 import schema_pb2 import dataframes import feast.core.CoreService_pb2_grpc as Core @@ -34,6 +33,7 @@ ) from feast.value_type import ValueType from feast_core_server import CoreServicer +from tensorflow_metadata.proto.v0 import schema_pb2 CORE_URL = "core.feast.local" SERVING_URL = "serving.feast.local" @@ -210,9 +210,6 @@ def test_import_tfx_schema(self): feature_set.import_tfx_schema(test_input_schema) # After update - for entity in feature_set.entities: - assert entity.presence is not None - assert entity.shape is not None for feature in feature_set.features: assert feature.presence is not None assert feature.shape is not None @@ -271,15 +268,13 @@ def make_tfx_schema_domain_info_inline(schema): class TestFeatureSetRef: def test_from_feature_set(self): feature_set = FeatureSet("test", "test") - feature_set.version = 2 ref = FeatureSetRef.from_feature_set(feature_set) assert ref.name == "test" assert ref.project == "test" - assert ref.version == 2 def test_str_ref(self): - original_ref = FeatureSetRef(project="test", name="test", version=2) + original_ref = FeatureSetRef(project="test", name="test") ref_str = repr(original_ref) parsed_ref = FeatureSetRef.from_str(ref_str) assert original_ref == parsed_ref diff --git a/serving/README.md b/serving/README.md index f88f30923b..39eef31103 100644 --- a/serving/README.md +++ b/serving/README.md @@ -28,7 +28,6 @@ grpc_cli call localhost:6566 GetFeastServingType '' grpc_cli call localhost:6565 ApplyFeatureSet ' feature_set { name: "driver" - version: 1 entities { name: "driver_id" value_type: STRING @@ -53,14 +52,12 @@ feature_set { grpc_cli call localhost:6565 GetFeatureSets ' filter { feature_set_name: "driver" - feature_set_version: "1" } ' grpc_cli call localhost:6566 GetBatchFeatures ' feature_sets { name: "driver" - version: 1 feature_names: "booking_completed_count" max_age { seconds: 86400 diff --git a/serving/lombok.config b/serving/lombok.config deleted file mode 100644 index 8f7e8aa1ac..0000000000 --- a/serving/lombok.config +++ /dev/null @@ -1 +0,0 @@ -lombok.addLombokGeneratedAnnotation = true \ No newline at end of file diff --git a/serving/pom.xml b/serving/pom.xml index 69c5b333dc..8304899b51 100644 --- a/serving/pom.xml +++ b/serving/pom.xml @@ -34,18 +34,39 @@ spring-plugins Spring Plugins - http://repo.spring.io/plugins-release + https://repo.spring.io/plugins-release + + org.apache.maven.plugins + maven-compiler-plugin + + 11 + + + + + org.jacoco + jacoco-maven-plugin + + org.springframework.boot spring-boot-maven-plugin false + + + build-info + + build-info + + + org.apache.maven.plugins @@ -76,6 +97,7 @@ ${project.version} + dev.feast feast-storage-api @@ -233,21 +255,12 @@ test - org.mockito mockito-core - 2.28.2 test - - - org.hibernate - hibernate-core - 5.4.5.Final - - com.fasterxml.jackson.dataformat diff --git a/serving/sample_redis_config.yml b/serving/sample_redis_config.yml deleted file mode 100644 index b3461649a1..0000000000 --- a/serving/sample_redis_config.yml +++ /dev/null @@ -1,9 +0,0 @@ -name: serving -type: REDIS -redis_config: - host: localhost - port: 6379 -subscriptions: - - name: "*" - project: "*" - version: "*" diff --git a/serving/src/main/java/feast/serving/FeastProperties.java b/serving/src/main/java/feast/serving/FeastProperties.java deleted file mode 100644 index 505d7d0330..0000000000 --- a/serving/src/main/java/feast/serving/FeastProperties.java +++ /dev/null @@ -1,191 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * Copyright 2018-2019 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package feast.serving; - -// Feast configuration properties that maps Feast configuration from default application.yml file to -// a Java object. -// https://www.baeldung.com/configuration-properties-in-spring-boot -// https://docs.spring.io/spring-boot/docs/current/reference/html/boot-features-external-config.html#boot-features-external-config-typesafe-configuration-properties - -import java.util.Map; -import org.springframework.boot.context.properties.ConfigurationProperties; - -@ConfigurationProperties(prefix = "feast") -public class FeastProperties { - private String version; - private String coreHost; - private int coreGrpcPort; - private StoreProperties store; - private JobProperties jobs; - private TracingProperties tracing; - - public String getVersion() { - return this.version; - } - - public String getCoreHost() { - return this.coreHost; - } - - public int getCoreGrpcPort() { - return this.coreGrpcPort; - } - - public StoreProperties getStore() { - return this.store; - } - - public JobProperties getJobs() { - return this.jobs; - } - - public TracingProperties getTracing() { - return this.tracing; - } - - public void setVersion(String version) { - this.version = version; - } - - public void setCoreHost(String coreHost) { - this.coreHost = coreHost; - } - - public void setCoreGrpcPort(int coreGrpcPort) { - this.coreGrpcPort = coreGrpcPort; - } - - public void setStore(StoreProperties store) { - this.store = store; - } - - public void setJobs(JobProperties jobs) { - this.jobs = jobs; - } - - public void setTracing(TracingProperties tracing) { - this.tracing = tracing; - } - - public static class StoreProperties { - private String configPath; - private int redisPoolMaxSize; - private int redisPoolMaxIdle; - - public String getConfigPath() { - return this.configPath; - } - - public int getRedisPoolMaxSize() { - return this.redisPoolMaxSize; - } - - public int getRedisPoolMaxIdle() { - return this.redisPoolMaxIdle; - } - - public void setConfigPath(String configPath) { - this.configPath = configPath; - } - - public void setRedisPoolMaxSize(int redisPoolMaxSize) { - this.redisPoolMaxSize = redisPoolMaxSize; - } - - public void setRedisPoolMaxIdle(int redisPoolMaxIdle) { - this.redisPoolMaxIdle = redisPoolMaxIdle; - } - } - - public static class JobProperties { - private String stagingLocation; - private int bigqueryInitialRetryDelaySecs; - private int bigqueryTotalTimeoutSecs; - private String storeType; - private Map storeOptions; - - public String getStagingLocation() { - return this.stagingLocation; - } - - public int getBigqueryInitialRetryDelaySecs() { - return bigqueryInitialRetryDelaySecs; - } - - public int getBigqueryTotalTimeoutSecs() { - return bigqueryTotalTimeoutSecs; - } - - public String getStoreType() { - return this.storeType; - } - - public Map getStoreOptions() { - return this.storeOptions; - } - - public void setStagingLocation(String stagingLocation) { - this.stagingLocation = stagingLocation; - } - - public void setBigqueryInitialRetryDelaySecs(int bigqueryInitialRetryDelaySecs) { - this.bigqueryInitialRetryDelaySecs = bigqueryInitialRetryDelaySecs; - } - - public void setBigqueryTotalTimeoutSecs(int bigqueryTotalTimeoutSecs) { - this.bigqueryTotalTimeoutSecs = bigqueryTotalTimeoutSecs; - } - - public void setStoreType(String storeType) { - this.storeType = storeType; - } - - public void setStoreOptions(Map storeOptions) { - this.storeOptions = storeOptions; - } - } - - public static class TracingProperties { - private boolean enabled; - private String tracerName; - private String serviceName; - - public boolean isEnabled() { - return this.enabled; - } - - public String getTracerName() { - return this.tracerName; - } - - public String getServiceName() { - return this.serviceName; - } - - public void setEnabled(boolean enabled) { - this.enabled = enabled; - } - - public void setTracerName(String tracerName) { - this.tracerName = tracerName; - } - - public void setServiceName(String serviceName) { - this.serviceName = serviceName; - } - } -} diff --git a/serving/src/main/java/feast/serving/ServingApplication.java b/serving/src/main/java/feast/serving/ServingApplication.java index ae9bb87a0b..ab036d04d1 100644 --- a/serving/src/main/java/feast/serving/ServingApplication.java +++ b/serving/src/main/java/feast/serving/ServingApplication.java @@ -16,11 +16,20 @@ */ package feast.serving; +import feast.serving.config.FeastProperties; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; +import org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration; +import org.springframework.boot.autoconfigure.jdbc.DataSourceTransactionManagerAutoConfiguration; +import org.springframework.boot.autoconfigure.orm.jpa.HibernateJpaAutoConfiguration; import org.springframework.boot.context.properties.EnableConfigurationProperties; -@SpringBootApplication +@SpringBootApplication( + exclude = { + DataSourceAutoConfiguration.class, + DataSourceTransactionManagerAutoConfiguration.class, + HibernateJpaAutoConfiguration.class + }) @EnableConfigurationProperties(FeastProperties.class) public class ServingApplication { public static void main(String[] args) { diff --git a/serving/src/main/java/feast/serving/configuration/ContextClosedHandler.java b/serving/src/main/java/feast/serving/config/ContextClosedHandler.java similarity index 96% rename from serving/src/main/java/feast/serving/configuration/ContextClosedHandler.java rename to serving/src/main/java/feast/serving/config/ContextClosedHandler.java index a4f6d64d84..2bc97439f3 100644 --- a/serving/src/main/java/feast/serving/configuration/ContextClosedHandler.java +++ b/serving/src/main/java/feast/serving/config/ContextClosedHandler.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package feast.serving.configuration; +package feast.serving.config; import java.util.concurrent.ScheduledExecutorService; import org.springframework.beans.factory.annotation.Autowired; diff --git a/serving/src/main/java/feast/serving/config/FeastProperties.java b/serving/src/main/java/feast/serving/config/FeastProperties.java new file mode 100644 index 0000000000..f905f5f5c0 --- /dev/null +++ b/serving/src/main/java/feast/serving/config/FeastProperties.java @@ -0,0 +1,542 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.serving.config; + +// Feast configuration properties that maps Feast configuration from default application.yml file to +// a Java object. +// https://www.baeldung.com/configuration-properties-in-spring-boot +// https://docs.spring.io/spring-boot/docs/current/reference/html/boot-features-external-config.html#boot-features-external-config-typesafe-configuration-properties + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.protobuf.InvalidProtocolBufferException; +import com.google.protobuf.util.JsonFormat; +import feast.proto.core.StoreProto; +import java.util.*; +import java.util.stream.Collectors; +import javax.validation.constraints.NotBlank; +import javax.validation.constraints.Positive; +import org.apache.logging.log4j.core.config.plugins.validation.constraints.ValidHost; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.context.properties.ConfigurationProperties; +import org.springframework.boot.info.BuildProperties; + +/** Feast Serving properties. */ +@ConfigurationProperties(prefix = "feast", ignoreInvalidFields = true) +public class FeastProperties { + + /** + * Instantiates a new Feast Serving properties. + * + * @param buildProperties the build properties + */ + @Autowired + public FeastProperties(BuildProperties buildProperties) { + setVersion(buildProperties.getVersion()); + } + + /** Instantiates a new Feast class. */ + public FeastProperties() {} + + /* Feast Serving build version */ + @NotBlank private String version = "unknown"; + + /* Feast Core host to connect to. */ + @ValidHost @NotBlank private String coreHost; + + /* Feast Core port to connect to. */ + @Positive private int coreGrpcPort; + + /** + * Finds and returns the active store + * + * @return Returns the {@link Store} model object + */ + public Store getActiveStore() { + for (Store store : getStores()) { + if (activeStore.equals(store.getName())) { + return store; + } + } + throw new RuntimeException( + String.format("Active store is misconfigured. Could not find store: %s.", activeStore)); + } + + /** + * Set the name of the active store found in the "stores" configuration list + * + * @param activeStore String name to active store + */ + public void setActiveStore(String activeStore) { + this.activeStore = activeStore; + } + + /** Name of the active store configuration (only one store can be active at a time). */ + @NotBlank private String activeStore; + + /** + * Collection of store configurations. The active store is selected by the "activeStore" field. + */ + private List stores = new ArrayList<>(); + + /* Job Store properties to retain state of async jobs. */ + private JobStoreProperties jobStore; + + /* Metric tracing properties. */ + private TracingProperties tracing; + + /** + * Gets Serving store configuration as a list of {@link Store}. + * + * @return List of stores objects + */ + public List getStores() { + return stores; + } + + /** + * Gets Feast Serving build version. + * + * @return the build version + */ + public String getVersion() { + return version; + } + + /** + * Sets build version + * + * @param version the build version + */ + public void setVersion(String version) { + this.version = version; + } + + /** + * Gets Feast Core host. + * + * @return Feast Core host + */ + public String getCoreHost() { + return coreHost; + } + + /** + * Sets Feast Core host to connect to. + * + * @param coreHost Feast Core host + */ + public void setCoreHost(String coreHost) { + this.coreHost = coreHost; + } + + /** + * Gets Feast Core gRPC port. + * + * @return Feast Core gRPC port + */ + public int getCoreGrpcPort() { + return coreGrpcPort; + } + + /** + * Sets Feast Core gRPC port. + * + * @param coreGrpcPort gRPC port of Feast Core + */ + public void setCoreGrpcPort(int coreGrpcPort) { + this.coreGrpcPort = coreGrpcPort; + } + + /** + * Sets the collection of configured stores. + * + * @param stores List of {@link Store} + */ + public void setStores(List stores) { + this.stores = stores; + } + + /** Store configuration class for database that this Feast Serving uses. */ + public static class Store { + + private String name; + + private String type; + + private Map config = new HashMap<>(); + + private List subscriptions = new ArrayList<>(); + + /** + * Gets name of this store. This is unique to this specific instance. + * + * @return the name of the store + */ + public String getName() { + return name; + } + + /** + * Sets the name of this store. + * + * @param name the name of the store + */ + public void setName(String name) { + this.name = name; + } + + /** + * Gets the store type. Example are REDIS or BIGQUERY + * + * @return the store type as a String. + */ + public String getType() { + return type; + } + + /** + * Sets the store type + * + * @param type the type + */ + public void setType(String type) { + this.type = type; + } + + /** + * Converts this {@link Store} to a {@link StoreProto.Store} + * + * @return {@link StoreProto.Store} with configuration set + * @throws InvalidProtocolBufferException the invalid protocol buffer exception + * @throws JsonProcessingException the json processing exception + */ + public StoreProto.Store toProto() + throws InvalidProtocolBufferException, JsonProcessingException { + List subscriptions = getSubscriptions(); + List subscriptionProtos = + subscriptions.stream().map(Subscription::toProto).collect(Collectors.toList()); + + StoreProto.Store.Builder storeProtoBuilder = + StoreProto.Store.newBuilder() + .setName(name) + .setType(StoreProto.Store.StoreType.valueOf(type)) + .addAllSubscriptions(subscriptionProtos); + + ObjectMapper jsonWriter = new ObjectMapper(); + + // TODO: All of this logic should be moved to the store layer. Only a Map + // should be sent to a store and it should do its own validation. + switch (StoreProto.Store.StoreType.valueOf(type)) { + case REDIS_CLUSTER: + StoreProto.Store.RedisClusterConfig.Builder redisClusterConfig = + StoreProto.Store.RedisClusterConfig.newBuilder(); + JsonFormat.parser().merge(jsonWriter.writeValueAsString(config), redisClusterConfig); + return storeProtoBuilder.setRedisClusterConfig(redisClusterConfig.build()).build(); + case REDIS: + StoreProto.Store.RedisConfig.Builder redisConfig = + StoreProto.Store.RedisConfig.newBuilder(); + JsonFormat.parser().merge(jsonWriter.writeValueAsString(config), redisConfig); + return storeProtoBuilder.setRedisConfig(redisConfig.build()).build(); + case BIGQUERY: + StoreProto.Store.BigQueryConfig.Builder bqConfig = + StoreProto.Store.BigQueryConfig.newBuilder(); + JsonFormat.parser().merge(jsonWriter.writeValueAsString(config), bqConfig); + return storeProtoBuilder.setBigqueryConfig(bqConfig.build()).build(); + case CASSANDRA: + StoreProto.Store.CassandraConfig.Builder cassandraConfig = + StoreProto.Store.CassandraConfig.newBuilder(); + JsonFormat.parser().merge(jsonWriter.writeValueAsString(config), cassandraConfig); + return storeProtoBuilder.setCassandraConfig(cassandraConfig.build()).build(); + default: + throw new InvalidProtocolBufferException("Invalid store set"); + } + } + + /** + * Get the subscriptions to this specific store. The subscriptions indicate which feature sets a + * store subscribes to. + * + * @return List of subscriptions. + */ + public List getSubscriptions() { + return subscriptions; + } + + /** + * Sets the store specific configuration. See getSubscriptions() for more details. + * + * @param subscriptions the subscriptions list + */ + public void setSubscriptions(List subscriptions) { + this.subscriptions = subscriptions; + } + + /** + * Gets the configuration to this specific store. This is a map of strings. These options are + * unique to the store. Please see protos/feast/core/Store.proto for the store specific + * configuration options + * + * @return Returns the store specific configuration + */ + public Map getConfig() { + return config; + } + + /** + * Sets the store config. Please protos/feast/core/Store.proto for the specific options for each + * store. + * + * @param config the config map + */ + public void setConfig(Map config) { + this.config = config; + } + + /** + * The Subscription type. + * + *

Note: Please see protos/feast/core/CoreService.proto for details on how to subscribe to + * feature sets. + */ + public static class Subscription { + /** Feast project to subscribe to. */ + String project; + + /** Feature set to subscribe to. */ + String name; + + /** Feature set versions to subscribe to. */ + String version; + + /** + * Gets Feast project subscribed to. + * + * @return the project string + */ + public String getProject() { + return project; + } + + /** + * Sets Feast project to subscribe to for this store. + * + * @param project the project + */ + public void setProject(String project) { + this.project = project; + } + + /** + * Gets the feature set name to subscribe to. + * + * @return the name + */ + public String getName() { + return name; + } + + /** + * Sets the feature set name to subscribe to. + * + * @param name the name + */ + public void setName(String name) { + this.name = name; + } + + /** + * Gets the feature set version that is being subscribed to by this store. + * + * @return the version + */ + public String getVersion() { + return version; + } + + /** + * Sets the feature set version that is being subscribed to by this store. + * + * @param version the version + */ + public void setVersion(String version) { + this.version = version; + } + + /** + * Convert this {@link Subscription} to a {@link StoreProto.Store.Subscription}. + * + * @return the store proto . store . subscription + */ + public StoreProto.Store.Subscription toProto() { + return StoreProto.Store.Subscription.newBuilder() + .setName(getName()) + .setProject(getProject()) + .build(); + } + } + } + + /** + * Gets job store properties + * + * @return the job store properties + */ + public JobStoreProperties getJobStore() { + return jobStore; + } + + /** + * Set job store properties + * + * @param jobStore Job store properties to set + */ + public void setJobStore(JobStoreProperties jobStore) { + this.jobStore = jobStore; + } + + /** + * Gets tracing properties + * + * @return tracing properties + */ + public TracingProperties getTracing() { + return tracing; + } + + /** + * Sets the tracing configuration. + * + * @param tracing the tracing + */ + public void setTracing(TracingProperties tracing) { + this.tracing = tracing; + } + + /** The type Job store properties. */ + public static class JobStoreProperties { + + /** Job Store Redis Host */ + private String redisHost; + + /** Job Store Redis Host */ + private int redisPort; + + /** + * Gets redis host. + * + * @return the redis host + */ + public String getRedisHost() { + return redisHost; + } + + /** + * Sets redis host. + * + * @param redisHost the redis host + */ + public void setRedisHost(String redisHost) { + this.redisHost = redisHost; + } + + /** + * Gets redis port. + * + * @return the redis port + */ + public int getRedisPort() { + return redisPort; + } + + /** + * Sets redis port. + * + * @param redisPort the redis port + */ + public void setRedisPort(int redisPort) { + this.redisPort = redisPort; + } + } + + /** Trace metric collection properties */ + public static class TracingProperties { + + /** Tracing enabled/disabled */ + private boolean enabled; + + /** Name of tracer to use (only "jaeger") */ + private String tracerName; + + /** Service name uniquely identifies this Feast Serving deployment */ + private String serviceName; + + /** + * Is tracing enabled + * + * @return boolean flag + */ + public boolean isEnabled() { + return enabled; + } + + /** + * Sets tracing enabled or disabled. + * + * @param enabled flag + */ + public void setEnabled(boolean enabled) { + this.enabled = enabled; + } + + /** + * Gets tracer name ('jaeger') + * + * @return the tracer name + */ + public String getTracerName() { + return tracerName; + } + + /** + * Sets tracer name. + * + * @param tracerName the tracer name + */ + public void setTracerName(String tracerName) { + this.tracerName = tracerName; + } + + /** + * Gets the service name. The service name uniquely identifies this Feast serving instance. + * + * @return the service name + */ + public String getServiceName() { + return serviceName; + } + + /** + * Sets service name. + * + * @param serviceName the service name + */ + public void setServiceName(String serviceName) { + this.serviceName = serviceName; + } + } +} diff --git a/serving/src/main/java/feast/serving/configuration/InstrumentationConfig.java b/serving/src/main/java/feast/serving/config/InstrumentationConfig.java similarity index 96% rename from serving/src/main/java/feast/serving/configuration/InstrumentationConfig.java rename to serving/src/main/java/feast/serving/config/InstrumentationConfig.java index 2cd284829c..30269c5d0e 100644 --- a/serving/src/main/java/feast/serving/configuration/InstrumentationConfig.java +++ b/serving/src/main/java/feast/serving/config/InstrumentationConfig.java @@ -14,9 +14,8 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package feast.serving.configuration; +package feast.serving.config; -import feast.serving.FeastProperties; import io.opentracing.Tracer; import io.opentracing.noop.NoopTracerFactory; import io.prometheus.client.exporter.MetricsServlet; diff --git a/serving/src/main/java/feast/serving/configuration/JobServiceConfig.java b/serving/src/main/java/feast/serving/config/JobServiceConfig.java similarity index 54% rename from serving/src/main/java/feast/serving/configuration/JobServiceConfig.java rename to serving/src/main/java/feast/serving/config/JobServiceConfig.java index fa94dab832..b85e24062d 100644 --- a/serving/src/main/java/feast/serving/configuration/JobServiceConfig.java +++ b/serving/src/main/java/feast/serving/config/JobServiceConfig.java @@ -14,14 +14,14 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package feast.serving.configuration; +package feast.serving.config; -import feast.core.StoreProto.Store.StoreType; -import feast.serving.FeastProperties; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.google.protobuf.InvalidProtocolBufferException; +import feast.proto.core.StoreProto.Store.StoreType; import feast.serving.service.JobService; import feast.serving.service.NoopJobService; import feast.serving.service.RedisBackedJobService; -import feast.serving.specs.CachedSpecService; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; @@ -29,24 +29,11 @@ public class JobServiceConfig { @Bean - public JobService jobService( - FeastProperties feastProperties, - CachedSpecService specService, - StoreConfiguration storeConfiguration) { - if (!specService.getStore().getType().equals(StoreType.BIGQUERY)) { + public JobService jobService(FeastProperties feastProperties) + throws InvalidProtocolBufferException, JsonProcessingException { + if (!feastProperties.getActiveStore().toProto().getType().equals(StoreType.BIGQUERY)) { return new NoopJobService(); } - StoreType storeType = StoreType.valueOf(feastProperties.getJobs().getStoreType()); - switch (storeType) { - case REDIS: - return new RedisBackedJobService(storeConfiguration.getJobStoreRedisConnection()); - case INVALID: - case BIGQUERY: - case CASSANDRA: - case UNRECOGNIZED: - default: - throw new IllegalArgumentException( - String.format("Unsupported store type '%s' for job store", storeType)); - } + return new RedisBackedJobService(feastProperties.getJobStore()); } } diff --git a/serving/src/main/java/feast/serving/configuration/ServingApiConfiguration.java b/serving/src/main/java/feast/serving/config/ServingApiConfiguration.java similarity index 97% rename from serving/src/main/java/feast/serving/configuration/ServingApiConfiguration.java rename to serving/src/main/java/feast/serving/config/ServingApiConfiguration.java index 539b25a0fc..ce4fe13437 100644 --- a/serving/src/main/java/feast/serving/configuration/ServingApiConfiguration.java +++ b/serving/src/main/java/feast/serving/config/ServingApiConfiguration.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package feast.serving.configuration; +package feast.serving.config; import java.util.List; import org.springframework.beans.factory.annotation.Autowired; diff --git a/serving/src/main/java/feast/serving/config/ServingServiceConfig.java b/serving/src/main/java/feast/serving/config/ServingServiceConfig.java new file mode 100644 index 0000000000..41a92e4bc6 --- /dev/null +++ b/serving/src/main/java/feast/serving/config/ServingServiceConfig.java @@ -0,0 +1,84 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.serving.config; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.google.protobuf.InvalidProtocolBufferException; +import feast.proto.core.StoreProto; +import feast.serving.service.HistoricalServingService; +import feast.serving.service.JobService; +import feast.serving.service.NoopJobService; +import feast.serving.service.OnlineServingService; +import feast.serving.service.ServingService; +import feast.serving.specs.CachedSpecService; +import feast.storage.api.retriever.HistoricalRetriever; +import feast.storage.api.retriever.OnlineRetriever; +import feast.storage.connectors.bigquery.retriever.BigQueryHistoricalRetriever; +import feast.storage.connectors.redis.retriever.RedisClusterOnlineRetriever; +import feast.storage.connectors.redis.retriever.RedisOnlineRetriever; +import io.opentracing.Tracer; +import java.util.Map; +import org.slf4j.Logger; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +@Configuration +public class ServingServiceConfig { + + private static final Logger log = org.slf4j.LoggerFactory.getLogger(ServingServiceConfig.class); + + @Bean + public ServingService servingService( + FeastProperties feastProperties, + CachedSpecService specService, + JobService jobService, + Tracer tracer) + throws InvalidProtocolBufferException, JsonProcessingException { + ServingService servingService = null; + FeastProperties.Store store = feastProperties.getActiveStore(); + StoreProto.Store.StoreType storeType = store.toProto().getType(); + Map config = store.getConfig(); + + switch (storeType) { + case REDIS_CLUSTER: + OnlineRetriever redisClusterRetriever = RedisClusterOnlineRetriever.create(config); + servingService = new OnlineServingService(redisClusterRetriever, specService, tracer); + break; + case REDIS: + OnlineRetriever redisRetriever = RedisOnlineRetriever.create(config); + servingService = new OnlineServingService(redisRetriever, specService, tracer); + break; + case BIGQUERY: + if (jobService.getClass() == NoopJobService.class) { + throw new IllegalArgumentException( + "Unable to instantiate JobService which is required by BigQueryHistoricalRetriever."); + } + HistoricalRetriever bqRetriever = BigQueryHistoricalRetriever.create(config); + servingService = new HistoricalServingService(bqRetriever, specService, jobService); + break; + case CASSANDRA: + case UNRECOGNIZED: + case INVALID: + throw new IllegalArgumentException( + String.format( + "Unsupported store type '%s' for store name '%s'", + store.getType(), store.getName())); + } + + return servingService; + } +} diff --git a/serving/src/main/java/feast/serving/configuration/SpecServiceConfig.java b/serving/src/main/java/feast/serving/config/SpecServiceConfig.java similarity index 86% rename from serving/src/main/java/feast/serving/configuration/SpecServiceConfig.java rename to serving/src/main/java/feast/serving/config/SpecServiceConfig.java index 26ebfa956c..0a62557077 100644 --- a/serving/src/main/java/feast/serving/configuration/SpecServiceConfig.java +++ b/serving/src/main/java/feast/serving/config/SpecServiceConfig.java @@ -14,13 +14,13 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package feast.serving.configuration; +package feast.serving.config; -import feast.serving.FeastProperties; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.google.protobuf.InvalidProtocolBufferException; +import feast.proto.core.StoreProto; import feast.serving.specs.CachedSpecService; import feast.serving.specs.CoreSpecService; -import java.nio.file.Path; -import java.nio.file.Paths; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; @@ -58,10 +58,11 @@ public ScheduledExecutorService cachedSpecServiceScheduledExecutorService( } @Bean - public CachedSpecService specService(FeastProperties feastProperties) { + public CachedSpecService specService(FeastProperties feastProperties) + throws InvalidProtocolBufferException, JsonProcessingException { CoreSpecService coreService = new CoreSpecService(feastCoreHost, feastCorePort); - Path path = Paths.get(feastProperties.getStore().getConfigPath()); - CachedSpecService cachedSpecStorage = new CachedSpecService(coreService, path); + StoreProto.Store storeProto = feastProperties.getActiveStore().toProto(); + CachedSpecService cachedSpecStorage = new CachedSpecService(coreService, storeProto); try { cachedSpecStorage.populateCache(); } catch (Exception e) { diff --git a/serving/src/main/java/feast/serving/configuration/ServingServiceConfig.java b/serving/src/main/java/feast/serving/configuration/ServingServiceConfig.java deleted file mode 100644 index 28df853e22..0000000000 --- a/serving/src/main/java/feast/serving/configuration/ServingServiceConfig.java +++ /dev/null @@ -1,138 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * Copyright 2018-2019 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package feast.serving.configuration; - -import com.google.cloud.bigquery.BigQuery; -import com.google.cloud.bigquery.BigQueryOptions; -import com.google.cloud.storage.Storage; -import com.google.cloud.storage.StorageOptions; -import feast.core.StoreProto.Store; -import feast.core.StoreProto.Store.BigQueryConfig; -import feast.core.StoreProto.Store.RedisConfig; -import feast.core.StoreProto.Store.Subscription; -import feast.serving.FeastProperties; -import feast.serving.service.*; -import feast.serving.specs.CachedSpecService; -import feast.storage.api.retriever.HistoricalRetriever; -import feast.storage.api.retriever.OnlineRetriever; -import feast.storage.connectors.bigquery.retriever.BigQueryHistoricalRetriever; -import feast.storage.connectors.redis.retriever.RedisOnlineRetriever; -import io.opentracing.Tracer; -import java.util.Map; -import org.slf4j.Logger; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; - -@Configuration -public class ServingServiceConfig { - - private static final Logger log = org.slf4j.LoggerFactory.getLogger(ServingServiceConfig.class); - - private Store setStoreConfig(Store.Builder builder, Map options) { - switch (builder.getType()) { - case REDIS: - RedisConfig redisConfig = - RedisConfig.newBuilder() - .setHost(options.get("host")) - .setPort(Integer.parseInt(options.get("port"))) - .build(); - return builder.setRedisConfig(redisConfig).build(); - case BIGQUERY: - BigQueryConfig bqConfig = - BigQueryConfig.newBuilder() - .setProjectId(options.get("projectId")) - .setDatasetId(options.get("datasetId")) - .build(); - return builder.setBigqueryConfig(bqConfig).build(); - case CASSANDRA: - default: - throw new IllegalArgumentException( - String.format( - "Unsupported store %s provided, only REDIS or BIGQUERY are currently supported.", - builder.getType())); - } - } - - @Bean - public ServingService servingService( - FeastProperties feastProperties, - CachedSpecService specService, - JobService jobService, - Tracer tracer, - StoreConfiguration storeConfiguration) { - ServingService servingService = null; - Store store = specService.getStore(); - - switch (store.getType()) { - case REDIS: - OnlineRetriever redisRetriever = - new RedisOnlineRetriever(storeConfiguration.getServingRedisConnection()); - servingService = new OnlineServingService(redisRetriever, specService, tracer); - break; - case BIGQUERY: - BigQueryConfig bqConfig = store.getBigqueryConfig(); - BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService(); - Storage storage = StorageOptions.getDefaultInstance().getService(); - String jobStagingLocation = feastProperties.getJobs().getStagingLocation(); - if (!jobStagingLocation.contains("://")) { - throw new IllegalArgumentException( - String.format("jobStagingLocation is not a valid URI: %s", jobStagingLocation)); - } - if (jobStagingLocation.endsWith("/")) { - jobStagingLocation = jobStagingLocation.substring(0, jobStagingLocation.length() - 1); - } - if (!jobStagingLocation.startsWith("gs://")) { - throw new IllegalArgumentException( - "Store type BIGQUERY requires job staging location to be a valid and existing Google Cloud Storage URI. Invalid staging location: " - + jobStagingLocation); - } - if (jobService.getClass() == NoopJobService.class) { - throw new IllegalArgumentException( - "Unable to instantiate jobService for BigQuery store."); - } - - HistoricalRetriever bqRetriever = - BigQueryHistoricalRetriever.builder() - .setBigquery(bigquery) - .setDatasetId(bqConfig.getDatasetId()) - .setProjectId(bqConfig.getProjectId()) - .setJobStagingLocation(jobStagingLocation) - .setInitialRetryDelaySecs( - feastProperties.getJobs().getBigqueryInitialRetryDelaySecs()) - .setTotalTimeoutSecs(feastProperties.getJobs().getBigqueryTotalTimeoutSecs()) - .setStorage(storage) - .build(); - - servingService = new HistoricalServingService(bqRetriever, specService, jobService); - break; - case CASSANDRA: - case UNRECOGNIZED: - case INVALID: - throw new IllegalArgumentException( - String.format( - "Unsupported store type '%s' for store name '%s'", - store.getType(), store.getName())); - } - - return servingService; - } - - private Subscription parseSubscription(String subscription) { - String[] split = subscription.split(":"); - return Subscription.newBuilder().setName(split[0]).setVersion(split[1]).build(); - } -} diff --git a/serving/src/main/java/feast/serving/configuration/StoreConfiguration.java b/serving/src/main/java/feast/serving/configuration/StoreConfiguration.java deleted file mode 100644 index 84dc7b7f8d..0000000000 --- a/serving/src/main/java/feast/serving/configuration/StoreConfiguration.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * Copyright 2018-2020 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package feast.serving.configuration; - -import io.lettuce.core.api.StatefulRedisConnection; -import org.springframework.beans.factory.ObjectProvider; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.context.annotation.Configuration; - -@Configuration -public class StoreConfiguration { - - // We can define other store specific beans here - // These beans can be autowired or can be created in this class. - private final StatefulRedisConnection servingRedisConnection; - private final StatefulRedisConnection jobStoreRedisConnection; - - @Autowired - public StoreConfiguration( - ObjectProvider> servingRedisConnection, - ObjectProvider> jobStoreRedisConnection) { - this.servingRedisConnection = servingRedisConnection.getIfAvailable(); - this.jobStoreRedisConnection = jobStoreRedisConnection.getIfAvailable(); - } - - public StatefulRedisConnection getServingRedisConnection() { - return servingRedisConnection; - } - - public StatefulRedisConnection getJobStoreRedisConnection() { - return jobStoreRedisConnection; - } -} diff --git a/serving/src/main/java/feast/serving/configuration/redis/JobStoreRedisConfig.java b/serving/src/main/java/feast/serving/configuration/redis/JobStoreRedisConfig.java deleted file mode 100644 index 77d9262bcb..0000000000 --- a/serving/src/main/java/feast/serving/configuration/redis/JobStoreRedisConfig.java +++ /dev/null @@ -1,68 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * Copyright 2018-2020 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package feast.serving.configuration.redis; - -import com.google.common.base.Enums; -import feast.core.StoreProto; -import feast.serving.FeastProperties; -import io.lettuce.core.RedisClient; -import io.lettuce.core.RedisURI; -import io.lettuce.core.api.StatefulRedisConnection; -import io.lettuce.core.codec.ByteArrayCodec; -import io.lettuce.core.resource.ClientResources; -import io.lettuce.core.resource.DefaultClientResources; -import java.util.Map; -import org.springframework.beans.factory.ObjectProvider; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; - -@Configuration -public class JobStoreRedisConfig { - - @Bean(destroyMethod = "shutdown") - ClientResources jobStoreClientResources() { - return DefaultClientResources.create(); - } - - @Bean(destroyMethod = "shutdown") - RedisClient jobStoreRedisClient( - ClientResources jobStoreClientResources, FeastProperties feastProperties) { - StoreProto.Store.StoreType storeType = - Enums.getIfPresent( - StoreProto.Store.StoreType.class, feastProperties.getJobs().getStoreType()) - .orNull(); - if (storeType != StoreProto.Store.StoreType.REDIS) return null; - Map jobStoreConf = feastProperties.getJobs().getStoreOptions(); - // If job conf is empty throw StoreException - if (jobStoreConf == null - || jobStoreConf.get("host") == null - || jobStoreConf.get("host").isEmpty() - || jobStoreConf.get("port") == null - || jobStoreConf.get("port").isEmpty()) - throw new IllegalArgumentException("Store Configuration is not set"); - RedisURI uri = - RedisURI.create(jobStoreConf.get("host"), Integer.parseInt(jobStoreConf.get("port"))); - return RedisClient.create(jobStoreClientResources, uri); - } - - @Bean(destroyMethod = "close") - StatefulRedisConnection jobStoreRedisConnection( - ObjectProvider jobStoreRedisClient) { - if (jobStoreRedisClient.getIfAvailable() == null) return null; - return jobStoreRedisClient.getIfAvailable().connect(new ByteArrayCodec()); - } -} diff --git a/serving/src/main/java/feast/serving/configuration/redis/ServingStoreRedisConfig.java b/serving/src/main/java/feast/serving/configuration/redis/ServingStoreRedisConfig.java deleted file mode 100644 index 17a50eef6d..0000000000 --- a/serving/src/main/java/feast/serving/configuration/redis/ServingStoreRedisConfig.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * Copyright 2018-2020 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package feast.serving.configuration.redis; - -import feast.core.StoreProto; -import feast.serving.specs.CachedSpecService; -import io.lettuce.core.RedisClient; -import io.lettuce.core.RedisURI; -import io.lettuce.core.api.StatefulRedisConnection; -import io.lettuce.core.codec.ByteArrayCodec; -import io.lettuce.core.resource.ClientResources; -import io.lettuce.core.resource.DefaultClientResources; -import org.springframework.beans.factory.ObjectProvider; -import org.springframework.context.annotation.*; - -@Configuration -public class ServingStoreRedisConfig { - - @Bean - StoreProto.Store.RedisConfig servingStoreRedisConf(CachedSpecService specService) { - if (specService.getStore().getType() != StoreProto.Store.StoreType.REDIS) return null; - return specService.getStore().getRedisConfig(); - } - - @Bean(destroyMethod = "shutdown") - ClientResources servingClientResources() { - return DefaultClientResources.create(); - } - - @Bean(destroyMethod = "shutdown") - RedisClient servingRedisClient( - ClientResources servingClientResources, - ObjectProvider servingStoreRedisConf) { - if (servingStoreRedisConf.getIfAvailable() == null) return null; - RedisURI redisURI = - RedisURI.create( - servingStoreRedisConf.getIfAvailable().getHost(), - servingStoreRedisConf.getIfAvailable().getPort()); - return RedisClient.create(servingClientResources, redisURI); - } - - @Bean(destroyMethod = "close") - StatefulRedisConnection servingRedisConnection( - ObjectProvider servingRedisClient) { - if (servingRedisClient.getIfAvailable() == null) return null; - return servingRedisClient.getIfAvailable().connect(new ByteArrayCodec()); - } -} diff --git a/serving/src/main/java/feast/serving/controller/HealthServiceController.java b/serving/src/main/java/feast/serving/controller/HealthServiceController.java index 3d34aea97b..0810429183 100644 --- a/serving/src/main/java/feast/serving/controller/HealthServiceController.java +++ b/serving/src/main/java/feast/serving/controller/HealthServiceController.java @@ -16,8 +16,8 @@ */ package feast.serving.controller; -import feast.core.StoreProto.Store; -import feast.serving.ServingAPIProto.GetFeastServingInfoRequest; +import feast.proto.core.StoreProto.Store; +import feast.proto.serving.ServingAPIProto.GetFeastServingInfoRequest; import feast.serving.interceptors.GrpcMonitoringInterceptor; import feast.serving.service.ServingService; import feast.serving.specs.CachedSpecService; diff --git a/serving/src/main/java/feast/serving/controller/ServingServiceGRpcController.java b/serving/src/main/java/feast/serving/controller/ServingServiceGRpcController.java index cc1f856d72..3fae6ae65a 100644 --- a/serving/src/main/java/feast/serving/controller/ServingServiceGRpcController.java +++ b/serving/src/main/java/feast/serving/controller/ServingServiceGRpcController.java @@ -16,19 +16,21 @@ */ package feast.serving.controller; -import feast.serving.FeastProperties; -import feast.serving.ServingAPIProto.GetBatchFeaturesRequest; -import feast.serving.ServingAPIProto.GetBatchFeaturesResponse; -import feast.serving.ServingAPIProto.GetFeastServingInfoRequest; -import feast.serving.ServingAPIProto.GetFeastServingInfoResponse; -import feast.serving.ServingAPIProto.GetJobRequest; -import feast.serving.ServingAPIProto.GetJobResponse; -import feast.serving.ServingAPIProto.GetOnlineFeaturesRequest; -import feast.serving.ServingAPIProto.GetOnlineFeaturesResponse; -import feast.serving.ServingServiceGrpc.ServingServiceImplBase; +import feast.proto.serving.ServingAPIProto.GetBatchFeaturesRequest; +import feast.proto.serving.ServingAPIProto.GetBatchFeaturesResponse; +import feast.proto.serving.ServingAPIProto.GetFeastServingInfoRequest; +import feast.proto.serving.ServingAPIProto.GetFeastServingInfoResponse; +import feast.proto.serving.ServingAPIProto.GetJobRequest; +import feast.proto.serving.ServingAPIProto.GetJobResponse; +import feast.proto.serving.ServingAPIProto.GetOnlineFeaturesRequest; +import feast.proto.serving.ServingAPIProto.GetOnlineFeaturesResponse; +import feast.proto.serving.ServingServiceGrpc.ServingServiceImplBase; +import feast.serving.config.FeastProperties; +import feast.serving.exception.SpecRetrievalException; import feast.serving.interceptors.GrpcMonitoringInterceptor; import feast.serving.service.ServingService; import feast.serving.util.RequestHelper; +import io.grpc.Status; import io.grpc.stub.StreamObserver; import io.opentracing.Scope; import io.opentracing.Span; @@ -74,6 +76,10 @@ public void getOnlineFeatures( GetOnlineFeaturesResponse onlineFeatures = servingService.getOnlineFeatures(request); responseObserver.onNext(onlineFeatures); responseObserver.onCompleted(); + } catch (SpecRetrievalException e) { + log.error("Failed to retrieve specs in SpecService", e); + responseObserver.onError( + Status.NOT_FOUND.withDescription(e.getMessage()).withCause(e).asException()); } catch (Exception e) { log.warn("Failed to get Online Features", e); responseObserver.onError(e); @@ -89,6 +95,10 @@ public void getBatchFeatures( GetBatchFeaturesResponse batchFeatures = servingService.getBatchFeatures(request); responseObserver.onNext(batchFeatures); responseObserver.onCompleted(); + } catch (SpecRetrievalException e) { + log.error("Failed to retrieve specs in SpecService", e); + responseObserver.onError( + Status.NOT_FOUND.withDescription(e.getMessage()).withCause(e).asException()); } catch (Exception e) { log.warn("Failed to get Batch Features", e); responseObserver.onError(e); diff --git a/serving/src/main/java/feast/serving/controller/ServingServiceRestController.java b/serving/src/main/java/feast/serving/controller/ServingServiceRestController.java index b0e349fd6b..df71112ef7 100644 --- a/serving/src/main/java/feast/serving/controller/ServingServiceRestController.java +++ b/serving/src/main/java/feast/serving/controller/ServingServiceRestController.java @@ -18,11 +18,11 @@ import static feast.serving.util.mappers.ResponseJSONMapper.mapGetOnlineFeaturesResponse; -import feast.serving.FeastProperties; -import feast.serving.ServingAPIProto.GetFeastServingInfoRequest; -import feast.serving.ServingAPIProto.GetFeastServingInfoResponse; -import feast.serving.ServingAPIProto.GetOnlineFeaturesRequest; -import feast.serving.ServingAPIProto.GetOnlineFeaturesResponse; +import feast.proto.serving.ServingAPIProto.GetFeastServingInfoRequest; +import feast.proto.serving.ServingAPIProto.GetFeastServingInfoResponse; +import feast.proto.serving.ServingAPIProto.GetOnlineFeaturesRequest; +import feast.proto.serving.ServingAPIProto.GetOnlineFeaturesResponse; +import feast.serving.config.FeastProperties; import feast.serving.service.ServingService; import feast.serving.util.RequestHelper; import io.opentracing.Tracer; diff --git a/serving/src/main/java/feast/serving/service/HistoricalServingService.java b/serving/src/main/java/feast/serving/service/HistoricalServingService.java index cc6df1b6b5..b4a694e3bf 100644 --- a/serving/src/main/java/feast/serving/service/HistoricalServingService.java +++ b/serving/src/main/java/feast/serving/service/HistoricalServingService.java @@ -16,9 +16,9 @@ */ package feast.serving.service; -import feast.serving.ServingAPIProto; -import feast.serving.ServingAPIProto.*; -import feast.serving.ServingAPIProto.Job.Builder; +import feast.proto.serving.ServingAPIProto; +import feast.proto.serving.ServingAPIProto.*; +import feast.proto.serving.ServingAPIProto.Job.Builder; import feast.serving.specs.CachedSpecService; import feast.storage.api.retriever.FeatureSetRequest; import feast.storage.api.retriever.HistoricalRetrievalResult; diff --git a/serving/src/main/java/feast/serving/service/JobService.java b/serving/src/main/java/feast/serving/service/JobService.java index 96af5cd4d6..3198fffd4c 100644 --- a/serving/src/main/java/feast/serving/service/JobService.java +++ b/serving/src/main/java/feast/serving/service/JobService.java @@ -16,7 +16,7 @@ */ package feast.serving.service; -import feast.serving.ServingAPIProto.Job; +import feast.proto.serving.ServingAPIProto.Job; import java.util.Optional; // JobService interface specifies the operations to manage Job instances internally in Feast diff --git a/serving/src/main/java/feast/serving/service/NoopJobService.java b/serving/src/main/java/feast/serving/service/NoopJobService.java index 41fd561073..5407cdcbe5 100644 --- a/serving/src/main/java/feast/serving/service/NoopJobService.java +++ b/serving/src/main/java/feast/serving/service/NoopJobService.java @@ -16,7 +16,7 @@ */ package feast.serving.service; -import feast.serving.ServingAPIProto.Job; +import feast.proto.serving.ServingAPIProto.Job; import java.util.Optional; // No-op implementation of the JobService, for online serving stores. diff --git a/serving/src/main/java/feast/serving/service/OnlineServingService.java b/serving/src/main/java/feast/serving/service/OnlineServingService.java index 30addd2b9f..bb73e34f51 100644 --- a/serving/src/main/java/feast/serving/service/OnlineServingService.java +++ b/serving/src/main/java/feast/serving/service/OnlineServingService.java @@ -16,18 +16,19 @@ */ package feast.serving.service; +import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; import com.google.protobuf.Duration; -import feast.serving.ServingAPIProto.*; -import feast.serving.ServingAPIProto.GetOnlineFeaturesRequest.EntityRow; -import feast.serving.ServingAPIProto.GetOnlineFeaturesResponse.FieldValues; +import feast.proto.serving.ServingAPIProto.*; +import feast.proto.serving.ServingAPIProto.GetOnlineFeaturesRequest.EntityRow; +import feast.proto.serving.ServingAPIProto.GetOnlineFeaturesResponse.FieldValues; +import feast.proto.types.FeatureRowProto.FeatureRow; +import feast.proto.types.ValueProto.Value; import feast.serving.specs.CachedSpecService; import feast.serving.util.Metrics; import feast.serving.util.RefUtil; import feast.storage.api.retriever.FeatureSetRequest; import feast.storage.api.retriever.OnlineRetriever; -import feast.types.FeatureRowProto.FeatureRow; -import feast.types.ValueProto.Value; import io.grpc.Status; import io.opentracing.Scope; import io.opentracing.Tracer; @@ -75,6 +76,9 @@ public GetOnlineFeaturesResponse getOnlineFeatures(GetOnlineFeaturesRequest requ // feature set request. List> featureRows = retriever.getOnlineFeatures(entityRows, featureSetRequests); + if (scope != null) { + scope.span().log(ImmutableMap.of("event", "featureRows", "value", featureRows)); + } // For each feature set request, read the feature rows returned by the retriever, and // populate the featureValuesMap with the feature values corresponding to that entity row. @@ -98,7 +102,6 @@ public GetOnlineFeaturesResponse getOnlineFeatures(GetOnlineFeaturesRequest requ if (isStale(featureSetRequest, entityRow, featureRow)) { featureSetRequest .getFeatureReferences() - .parallelStream() .forEach( ref -> { populateStaleKeyCountMetrics(project, ref); @@ -133,9 +136,7 @@ public GetOnlineFeaturesResponse getOnlineFeatures(GetOnlineFeaturesRequest requ } private void populateStaleKeyCountMetrics(String project, FeatureReference ref) { - Metrics.staleKeyCount - .labels(project, RefUtil.generateFeatureStringRefWithoutProject(ref)) - .inc(); + Metrics.staleKeyCount.labels(project, ref.getName()).inc(); } private void populateRequestCountMetrics(FeatureSetRequest featureSetRequest) { @@ -143,11 +144,7 @@ private void populateRequestCountMetrics(FeatureSetRequest featureSetRequest) { featureSetRequest .getFeatureReferences() .parallelStream() - .forEach( - ref -> - Metrics.requestCount - .labels(project, RefUtil.generateFeatureStringRefWithoutProject(ref)) - .inc()); + .forEach(ref -> Metrics.requestCount.labels(project, ref.getName()).inc()); } @Override diff --git a/serving/src/main/java/feast/serving/service/RedisBackedJobService.java b/serving/src/main/java/feast/serving/service/RedisBackedJobService.java index 0bf5363037..9081b03f51 100644 --- a/serving/src/main/java/feast/serving/service/RedisBackedJobService.java +++ b/serving/src/main/java/feast/serving/service/RedisBackedJobService.java @@ -17,10 +17,15 @@ package feast.serving.service; import com.google.protobuf.util.JsonFormat; -import feast.serving.ServingAPIProto.Job; -import feast.serving.ServingAPIProto.Job.Builder; +import feast.proto.serving.ServingAPIProto.Job; +import feast.proto.serving.ServingAPIProto.Job.Builder; +import feast.serving.config.FeastProperties; +import io.lettuce.core.RedisClient; +import io.lettuce.core.RedisURI; import io.lettuce.core.api.StatefulRedisConnection; import io.lettuce.core.api.sync.RedisCommands; +import io.lettuce.core.codec.ByteArrayCodec; +import io.lettuce.core.resource.DefaultClientResources; import java.util.Optional; import org.joda.time.Duration; import org.slf4j.Logger; @@ -37,6 +42,16 @@ public class RedisBackedJobService implements JobService { // and since users normally don't require info about relatively old jobs. private final int defaultExpirySeconds = (int) Duration.standardDays(1).getStandardSeconds(); + public RedisBackedJobService(FeastProperties.JobStoreProperties jobStoreProperties) { + RedisURI uri = + RedisURI.create(jobStoreProperties.getRedisHost(), jobStoreProperties.getRedisPort()); + + this.syncCommand = + RedisClient.create(DefaultClientResources.create(), uri) + .connect(new ByteArrayCodec()) + .sync(); + } + public RedisBackedJobService(StatefulRedisConnection connection) { this.syncCommand = connection.sync(); } diff --git a/serving/src/main/java/feast/serving/service/ServingService.java b/serving/src/main/java/feast/serving/service/ServingService.java index 5e662229ee..1fe9840d59 100644 --- a/serving/src/main/java/feast/serving/service/ServingService.java +++ b/serving/src/main/java/feast/serving/service/ServingService.java @@ -16,14 +16,14 @@ */ package feast.serving.service; -import feast.serving.ServingAPIProto.GetBatchFeaturesRequest; -import feast.serving.ServingAPIProto.GetBatchFeaturesResponse; -import feast.serving.ServingAPIProto.GetFeastServingInfoRequest; -import feast.serving.ServingAPIProto.GetFeastServingInfoResponse; -import feast.serving.ServingAPIProto.GetJobRequest; -import feast.serving.ServingAPIProto.GetJobResponse; -import feast.serving.ServingAPIProto.GetOnlineFeaturesRequest; -import feast.serving.ServingAPIProto.GetOnlineFeaturesResponse; +import feast.proto.serving.ServingAPIProto.GetBatchFeaturesRequest; +import feast.proto.serving.ServingAPIProto.GetBatchFeaturesResponse; +import feast.proto.serving.ServingAPIProto.GetFeastServingInfoRequest; +import feast.proto.serving.ServingAPIProto.GetFeastServingInfoResponse; +import feast.proto.serving.ServingAPIProto.GetJobRequest; +import feast.proto.serving.ServingAPIProto.GetJobResponse; +import feast.proto.serving.ServingAPIProto.GetOnlineFeaturesRequest; +import feast.proto.serving.ServingAPIProto.GetOnlineFeaturesResponse; public interface ServingService { /** @@ -41,31 +41,31 @@ GetFeastServingInfoResponse getFeastServingInfo( /** * Get features from an online serving store, given a list of {@link - * feast.serving.ServingAPIProto.FeatureReference}s to retrieve, and list of {@link - * feast.serving.ServingAPIProto.GetOnlineFeaturesRequest.EntityRow}s to join the retrieved values - * to. + * feast.proto.serving.ServingAPIProto.FeatureReference}s to retrieve, and list of {@link + * feast.proto.serving.ServingAPIProto.GetOnlineFeaturesRequest.EntityRow}s to join the retrieved + * values to. * *

Features can be queried across feature sets, but each {@link - * feast.serving.ServingAPIProto.GetOnlineFeaturesRequest.EntityRow} must contain all entities for - * all feature sets included in the request. + * feast.proto.serving.ServingAPIProto.GetOnlineFeaturesRequest.EntityRow} must contain all + * entities for all feature sets included in the request. * *

This request is fulfilled synchronously. * * @param getFeaturesRequest {@link GetOnlineFeaturesRequest} containing list of {@link - * feast.serving.ServingAPIProto.FeatureReference}s to retrieve and list of {@link - * feast.serving.ServingAPIProto.GetOnlineFeaturesRequest.EntityRow}s to join the retrieved - * values to. + * feast.proto.serving.ServingAPIProto.FeatureReference}s to retrieve and list of {@link + * feast.proto.serving.ServingAPIProto.GetOnlineFeaturesRequest.EntityRow}s to join the + * retrieved values to. * @return {@link GetOnlineFeaturesResponse} with list of {@link - * feast.serving.ServingAPIProto.GetOnlineFeaturesResponse.FieldValues} for each {@link - * feast.serving.ServingAPIProto.GetOnlineFeaturesRequest.EntityRow} supplied. + * feast.proto.serving.ServingAPIProto.GetOnlineFeaturesResponse.FieldValues} for each {@link + * feast.proto.serving.ServingAPIProto.GetOnlineFeaturesRequest.EntityRow} supplied. */ GetOnlineFeaturesResponse getOnlineFeatures(GetOnlineFeaturesRequest getFeaturesRequest); /** * Get features from a batch serving store, given a list of {@link - * feast.serving.ServingAPIProto.FeatureReference}s to retrieve, and {@link - * feast.serving.ServingAPIProto.DatasetSource} pointing to remote location of dataset to join - * retrieved features to. All columns in the provided dataset will be preserved in the output + * feast.proto.serving.ServingAPIProto.FeatureReference}s to retrieve, and {@link + * feast.proto.serving.ServingAPIProto.DatasetSource} pointing to remote location of dataset to + * join retrieved features to. All columns in the provided dataset will be preserved in the output * dataset. * *

Due to the potential size of batch retrieval requests, this request is fulfilled @@ -73,11 +73,11 @@ GetFeastServingInfoResponse getFeastServingInfo( * #getJob(GetJobRequest)} will return the status of the retrieval job. * * @param getFeaturesRequest {@link GetBatchFeaturesRequest} containing a list of {@link - * feast.serving.ServingAPIProto.FeatureReference}s to retrieve, and {@link - * feast.serving.ServingAPIProto.DatasetSource} pointing to remote location of dataset to join - * retrieved features to. + * feast.proto.serving.ServingAPIProto.FeatureReference}s to retrieve, and {@link + * feast.proto.serving.ServingAPIProto.DatasetSource} pointing to remote location of dataset + * to join retrieved features to. * @return {@link GetBatchFeaturesResponse} containing reference to a retrieval {@link - * feast.serving.ServingAPIProto.Job}. + * feast.proto.serving.ServingAPIProto.Job}. */ GetBatchFeaturesResponse getBatchFeatures(GetBatchFeaturesRequest getFeaturesRequest); diff --git a/serving/src/main/java/feast/serving/specs/CachedSpecService.java b/serving/src/main/java/feast/serving/specs/CachedSpecService.java index 47f4934d52..07b0b8bbbd 100644 --- a/serving/src/main/java/feast/serving/specs/CachedSpecService.java +++ b/serving/src/main/java/feast/serving/specs/CachedSpecService.java @@ -18,32 +18,27 @@ import static feast.serving.util.RefUtil.generateFeatureSetStringRef; import static feast.serving.util.RefUtil.generateFeatureStringRef; -import static feast.serving.util.mappers.YamlToProtoMapper.yamlToStoreProto; -import static java.util.Comparator.comparingInt; import static java.util.stream.Collectors.groupingBy; import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; -import feast.core.CoreServiceProto.ListFeatureSetsRequest; -import feast.core.CoreServiceProto.ListFeatureSetsResponse; -import feast.core.CoreServiceProto.UpdateStoreRequest; -import feast.core.CoreServiceProto.UpdateStoreResponse; -import feast.core.FeatureSetProto.FeatureSet; -import feast.core.FeatureSetProto.FeatureSetSpec; -import feast.core.FeatureSetProto.FeatureSpec; -import feast.core.StoreProto.Store; -import feast.core.StoreProto.Store.Subscription; -import feast.serving.ServingAPIProto.FeatureReference; +import feast.proto.core.CoreServiceProto.ListFeatureSetsRequest; +import feast.proto.core.CoreServiceProto.ListFeatureSetsResponse; +import feast.proto.core.FeatureSetProto.FeatureSet; +import feast.proto.core.FeatureSetProto.FeatureSetSpec; +import feast.proto.core.FeatureSetProto.FeatureSpec; +import feast.proto.core.StoreProto; +import feast.proto.core.StoreProto.Store; +import feast.proto.core.StoreProto.Store.Subscription; +import feast.proto.serving.ServingAPIProto.FeatureReference; import feast.serving.exception.SpecRetrievalException; import feast.storage.api.retriever.FeatureSetRequest; import io.grpc.StatusRuntimeException; import io.prometheus.client.Gauge; -import java.io.IOException; -import java.nio.file.Files; -import java.nio.file.Path; import java.util.ArrayList; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; @@ -52,18 +47,20 @@ import org.apache.commons.lang3.tuple.Pair; import org.slf4j.Logger; -/** In-memory cache of specs. */ +/** In-memory cache of specs hosted in Feast Core. */ public class CachedSpecService { private static final int MAX_SPEC_COUNT = 1000; private static final Logger log = org.slf4j.LoggerFactory.getLogger(CachedSpecService.class); + private static final String DEFAULT_PROJECT_NAME = "default"; + // flag to signal that multiple featuresets match a specific + // string feature reference in the feature to featureset mapping. + private static final String FEATURE_SET_CONFLICT_FLAG = "##CONFLICT##"; private final CoreSpecService coreService; - private final Path configPath; - private final Map featureToFeatureSetMapping; + private Map featureToFeatureSetMapping; - private final CacheLoader featureSetCacheLoader; private final LoadingCache featureSetCache; private Store store; @@ -80,15 +77,14 @@ public class CachedSpecService { .help("epoch time of the last time the cache was updated") .register(); - public CachedSpecService(CoreSpecService coreService, Path configPath) { - this.configPath = configPath; + public CachedSpecService(CoreSpecService coreService, StoreProto.Store store) { this.coreService = coreService; - this.store = updateStore(readConfig(configPath)); + this.store = coreService.registerStore(store); Map featureSets = getFeatureSetMap(); featureToFeatureSetMapping = new ConcurrentHashMap<>(getFeatureToFeatureSetMapping(featureSets)); - featureSetCacheLoader = CacheLoader.from(featureSets::get); + CacheLoader featureSetCacheLoader = CacheLoader.from(featureSets::get); featureSetCache = CacheBuilder.newBuilder().maximumSize(MAX_SPEC_COUNT).build(featureSetCacheLoader); featureSetCache.putAll(featureSets); @@ -108,7 +104,9 @@ public FeatureSetSpec getFeatureSetSpec(String featureSetRef) throws ExecutionEx } /** - * Get FeatureSetSpecs for the given features. + * Get FeatureSetSpecs for the given features references. If the project is unspecified in the + * given references, autofills the default project. Throws a {@link SpecRetrievalException}. If + * multiple feature sets match given string reference, * * @return FeatureSetRequest containing the specs, and their respective feature references */ @@ -117,14 +115,22 @@ public List getFeatureSets(List featureRefe featureReferences.stream() .map( featureReference -> { - String featureSet = - featureToFeatureSetMapping.getOrDefault( - generateFeatureStringRef(featureReference), ""); - if (featureSet.isEmpty()) { + // map feature reference to coresponding feature set name + String fsName = + featureToFeatureSetMapping.get(generateFeatureStringRef(featureReference)); + if (fsName == null) { throw new SpecRetrievalException( - String.format("Unable to retrieve feature %s", featureReference)); + String.format( + "Unable to find Feature Set for the given Feature Reference: %s", + generateFeatureStringRef(featureReference))); + } else if (fsName == FEATURE_SET_CONFLICT_FLAG) { + throw new SpecRetrievalException( + String.format( + "Given Feature Reference is amibigous as it matches multiple Feature Sets: %s." + + "Please specify a more specific Feature Reference (ie specify the project or feature set)", + generateFeatureStringRef(featureReference))); } - return Pair.of(featureSet, featureReference); + return Pair.of(fsName, featureReference); }) .collect(groupingBy(Pair::getLeft)) .forEach( @@ -133,6 +139,19 @@ public List getFeatureSets(List featureRefe FeatureSetSpec featureSetSpec = featureSetCache.get(fsName); List requestedFeatures = featureRefs.stream().map(Pair::getRight).collect(Collectors.toList()); + + // check that requested features reference point to different features in the + // featureset. + HashSet featureNames = new HashSet<>(); + requestedFeatures.forEach( + ref -> { + if (featureNames.contains(ref.getName())) { + throw new SpecRetrievalException( + "Multiple Feature References referencing the same feature in a featureset is not allowed."); + } + featureNames.add(ref.getName()); + }); + FeatureSetRequest featureSetRequest = FeatureSetRequest.newBuilder() .setSpec(featureSetSpec) @@ -141,7 +160,7 @@ public List getFeatureSets(List featureRefe featureSetRequests.add(featureSetRequest); } catch (ExecutionException e) { throw new SpecRetrievalException( - String.format("Unable to retrieve featureSet with id %s", fsName), e); + String.format("Unable to find featureSet with name: %s", fsName), e); } }); return featureSetRequests; @@ -152,10 +171,12 @@ public List getFeatureSets(List featureRefe * from core to preload the cache. */ public void populateCache() { - this.store = updateStore(readConfig(configPath)); Map featureSetMap = getFeatureSetMap(); + + featureSetCache.invalidateAll(); featureSetCache.putAll(featureSetMap); - featureToFeatureSetMapping.putAll(getFeatureToFeatureSetMapping(featureSetMap)); + + featureToFeatureSetMapping = getFeatureToFeatureSetMapping(featureSetMap); featureSetsCount.set(featureSetCache.size()); cacheLastUpdated.set(System.currentTimeMillis()); @@ -180,8 +201,7 @@ private Map getFeatureSetMap() { .setFilter( ListFeatureSetsRequest.Filter.newBuilder() .setProject(subscription.getProject()) - .setFeatureSetName(subscription.getName()) - .setFeatureSetVersion(subscription.getVersion())) + .setFeatureSetName(subscription.getName())) .build()); for (FeatureSet featureSet : featureSetsResponse.getFeatureSetsList()) { @@ -196,68 +216,81 @@ private Map getFeatureSetMap() { return featureSets; } + /** + * Generate a feature to feature set mapping from the given feature sets map. Accounts for + * variations (missing project, feature_set) in string feature references generated by creating + * multiple entries in the returned mapping for each variation. + * + * @param featureSets map of feature set name to feature set specs + * @return mapping of string feature references to name of feature sets + */ private Map getFeatureToFeatureSetMapping( Map featureSets) { - HashMap mapping = new HashMap<>(); + Map mapping = new HashMap<>(); featureSets.values().stream() - .collect(groupingBy(featureSet -> Pair.of(featureSet.getProject(), featureSet.getName()))) .forEach( - (group, groupedFeatureSets) -> { - groupedFeatureSets = - groupedFeatureSets.stream() - .sorted(comparingInt(FeatureSetSpec::getVersion)) - .collect(Collectors.toList()); - for (int i = 0; i < groupedFeatureSets.size(); i++) { - FeatureSetSpec featureSetSpec = groupedFeatureSets.get(i); - for (FeatureSpec featureSpec : featureSetSpec.getFeaturesList()) { - FeatureReference featureRef = - FeatureReference.newBuilder() - .setProject(featureSetSpec.getProject()) - .setName(featureSpec.getName()) - .setVersion(featureSetSpec.getVersion()) - .build(); - mapping.put( - generateFeatureStringRef(featureRef), - generateFeatureSetStringRef(featureSetSpec)); - if (i == groupedFeatureSets.size() - 1) { - featureRef = - FeatureReference.newBuilder() - .setProject(featureSetSpec.getProject()) - .setName(featureSpec.getName()) - .build(); - mapping.put( - generateFeatureStringRef(featureRef), - generateFeatureSetStringRef(featureSetSpec)); + featureSetSpec -> { + for (FeatureSpec featureSpec : featureSetSpec.getFeaturesList()) { + // Register the different permutations of string feature references + // that refers to this feature in the feature to featureset mapping. + + // Features in FeatureSets in default project can be referenced without project. + boolean isInDefaultProject = + featureSetSpec.getProject().equals(DEFAULT_PROJECT_NAME); + + for (boolean hasProject : new boolean[] {true, false}) { + if (!isInDefaultProject && !hasProject) continue; + // Features can be referenced without a featureset if there are no conflicts. + for (boolean hasFeatureSet : new boolean[] {true, false}) { + // Get mapping between string feature reference and featureset + Pair singleMapping = + this.generateFeatureToFeatureSetMapping( + featureSpec, featureSetSpec, hasProject, hasFeatureSet); + String featureRef = singleMapping.getKey(); + String featureSetRef = singleMapping.getValue(); + // Check if another feature set has already mapped to this + // string feature reference. if so mark the conflict. + if (mapping.containsKey(featureRef)) { + mapping.put(featureRef, FEATURE_SET_CONFLICT_FLAG); + } else { + mapping.put(featureRef, featureSetRef); + } } } } }); + return mapping; } - private Store readConfig(Path path) { - try { - List fileContents = Files.readAllLines(path); - String yaml = fileContents.stream().reduce("", (l1, l2) -> l1 + "\n" + l2); - log.info("loaded store config at {}: \n{}", path.toString(), yaml); - return yamlToStoreProto(yaml); - } catch (IOException e) { - throw new RuntimeException( - String.format("Unable to read store config at %s", path.toAbsolutePath()), e); + /** + * Generate a single mapping between the given feature and the featureset. Maps a feature + * reference refering to the given feature to the corresponding featureset's name. + * + * @param featureSpec specifying the feature to create mapping for. + * @param featureSetSpec specifying the feature set to create mapping for. + * @param hasProject whether generated mapping's string feature ref has a project. + * @param hasFeatureSet whether generated mapping's string feature ref has a featureSet. + * @return a pair mapping a string feature reference to a featureset name. + */ + private Pair generateFeatureToFeatureSetMapping( + FeatureSpec featureSpec, + FeatureSetSpec featureSetSpec, + boolean hasProject, + boolean hasFeatureSet) { + FeatureReference.Builder featureRef = + FeatureReference.newBuilder() + .setProject(featureSetSpec.getProject()) + .setFeatureSet(featureSetSpec.getName()) + .setName(featureSpec.getName()); + if (!hasProject) { + featureRef = featureRef.clearProject(); } - } - - private Store updateStore(Store store) { - UpdateStoreRequest request = UpdateStoreRequest.newBuilder().setStore(store).build(); - try { - UpdateStoreResponse updateStoreResponse = coreService.updateStore(request); - if (!updateStoreResponse.getStore().equals(store)) { - throw new RuntimeException("Core store config not matching current store config"); - } - return updateStoreResponse.getStore(); - } catch (Exception e) { - throw new RuntimeException("Unable to update store configuration", e); + if (!hasFeatureSet) { + featureRef = featureRef.clearFeatureSet(); } + return Pair.of( + generateFeatureStringRef(featureRef.build()), generateFeatureSetStringRef(featureSetSpec)); } } diff --git a/serving/src/main/java/feast/serving/specs/CoreSpecService.java b/serving/src/main/java/feast/serving/specs/CoreSpecService.java index 2f5cef342e..e2feaebccb 100644 --- a/serving/src/main/java/feast/serving/specs/CoreSpecService.java +++ b/serving/src/main/java/feast/serving/specs/CoreSpecService.java @@ -16,18 +16,19 @@ */ package feast.serving.specs; -import feast.core.CoreServiceGrpc; -import feast.core.CoreServiceProto.GetFeatureSetRequest; -import feast.core.CoreServiceProto.GetFeatureSetResponse; -import feast.core.CoreServiceProto.ListFeatureSetsRequest; -import feast.core.CoreServiceProto.ListFeatureSetsResponse; -import feast.core.CoreServiceProto.UpdateStoreRequest; -import feast.core.CoreServiceProto.UpdateStoreResponse; +import feast.proto.core.CoreServiceGrpc; +import feast.proto.core.CoreServiceProto.GetFeatureSetRequest; +import feast.proto.core.CoreServiceProto.GetFeatureSetResponse; +import feast.proto.core.CoreServiceProto.ListFeatureSetsRequest; +import feast.proto.core.CoreServiceProto.ListFeatureSetsResponse; +import feast.proto.core.CoreServiceProto.UpdateStoreRequest; +import feast.proto.core.CoreServiceProto.UpdateStoreResponse; +import feast.proto.core.StoreProto.Store; import io.grpc.ManagedChannel; import io.grpc.ManagedChannelBuilder; import org.slf4j.Logger; -/** Client for spec retrieval from core. */ +/** Client for interfacing with specs in Feast Core. */ public class CoreSpecService { private static final Logger log = org.slf4j.LoggerFactory.getLogger(CoreSpecService.class); @@ -50,4 +51,24 @@ public ListFeatureSetsResponse listFeatureSets(ListFeatureSetsRequest ListFeatur public UpdateStoreResponse updateStore(UpdateStoreRequest updateStoreRequest) { return blockingStub.updateStore(updateStoreRequest); } + + /** + * Register the given store entry in Feast Core. If store already exists in Feast Core, updates + * the store entry in feast core. + * + * @param store entry to register/update in Feast Core. + * @return The register/updated store entry + */ + public Store registerStore(Store store) { + UpdateStoreRequest request = UpdateStoreRequest.newBuilder().setStore(store).build(); + try { + UpdateStoreResponse updateStoreResponse = this.updateStore(request); + if (!updateStoreResponse.getStore().equals(store)) { + throw new RuntimeException("Core store config not matching current store config"); + } + return updateStoreResponse.getStore(); + } catch (Exception e) { + throw new RuntimeException("Unable to update store configuration", e); + } + } } diff --git a/serving/src/main/java/feast/serving/util/RefUtil.java b/serving/src/main/java/feast/serving/util/RefUtil.java index c3bcb0827a..e3c36f1f9f 100644 --- a/serving/src/main/java/feast/serving/util/RefUtil.java +++ b/serving/src/main/java/feast/serving/util/RefUtil.java @@ -16,31 +16,23 @@ */ package feast.serving.util; -import feast.core.FeatureSetProto.FeatureSetSpec; -import feast.serving.ServingAPIProto.FeatureReference; +import feast.proto.core.FeatureSetProto.FeatureSetSpec; +import feast.proto.serving.ServingAPIProto.FeatureReference; public class RefUtil { public static String generateFeatureStringRef(FeatureReference featureReference) { - String ref = String.format("%s/%s", featureReference.getProject(), featureReference.getName()); - if (featureReference.getVersion() > 0) { - return ref + String.format(":%d", featureReference.getVersion()); + String ref = featureReference.getName(); + if (!featureReference.getFeatureSet().isEmpty()) { + ref = featureReference.getFeatureSet() + ":" + ref; } - return ref; - } - - public static String generateFeatureStringRefWithoutProject(FeatureReference featureReference) { - String ref = String.format("%s", featureReference.getName()); - if (featureReference.getVersion() > 0) { - return ref + String.format(":%d", featureReference.getVersion()); + if (!featureReference.getProject().isEmpty()) { + ref = featureReference.getProject() + "/" + ref; } return ref; } public static String generateFeatureSetStringRef(FeatureSetSpec featureSetSpec) { String ref = String.format("%s/%s", featureSetSpec.getProject(), featureSetSpec.getName()); - if (featureSetSpec.getVersion() > 0) { - return ref + String.format(":%d", featureSetSpec.getVersion()); - } return ref; } } diff --git a/serving/src/main/java/feast/serving/util/RequestHelper.java b/serving/src/main/java/feast/serving/util/RequestHelper.java index e6e8e8629a..16073e9b5c 100644 --- a/serving/src/main/java/feast/serving/util/RequestHelper.java +++ b/serving/src/main/java/feast/serving/util/RequestHelper.java @@ -16,9 +16,9 @@ */ package feast.serving.util; -import feast.serving.ServingAPIProto.FeatureReference; -import feast.serving.ServingAPIProto.GetBatchFeaturesRequest; -import feast.serving.ServingAPIProto.GetOnlineFeaturesRequest; +import feast.proto.serving.ServingAPIProto.FeatureReference; +import feast.proto.serving.ServingAPIProto.GetBatchFeaturesRequest; +import feast.proto.serving.ServingAPIProto.GetOnlineFeaturesRequest; import io.grpc.Status; import java.util.Set; import java.util.stream.Collectors; diff --git a/serving/src/main/java/feast/serving/util/mappers/ResponseJSONMapper.java b/serving/src/main/java/feast/serving/util/mappers/ResponseJSONMapper.java index 14723efe7b..6aba17ac43 100644 --- a/serving/src/main/java/feast/serving/util/mappers/ResponseJSONMapper.java +++ b/serving/src/main/java/feast/serving/util/mappers/ResponseJSONMapper.java @@ -16,9 +16,9 @@ */ package feast.serving.util.mappers; -import feast.serving.ServingAPIProto.GetOnlineFeaturesResponse; -import feast.serving.ServingAPIProto.GetOnlineFeaturesResponse.FieldValues; -import feast.types.ValueProto.Value; +import feast.proto.serving.ServingAPIProto.GetOnlineFeaturesResponse; +import feast.proto.serving.ServingAPIProto.GetOnlineFeaturesResponse.FieldValues; +import feast.proto.types.ValueProto.Value; import java.util.List; import java.util.Map; import java.util.stream.Collectors; diff --git a/serving/src/main/java/feast/serving/util/mappers/YamlToProtoMapper.java b/serving/src/main/java/feast/serving/util/mappers/YamlToProtoMapper.java deleted file mode 100644 index 00ad1fabb1..0000000000 --- a/serving/src/main/java/feast/serving/util/mappers/YamlToProtoMapper.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * Copyright 2018-2019 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package feast.serving.util.mappers; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; -import com.google.protobuf.util.JsonFormat; -import feast.core.StoreProto.Store; -import feast.core.StoreProto.Store.Builder; -import java.io.IOException; - -public class YamlToProtoMapper { - private static final ObjectMapper yamlReader = new ObjectMapper(new YAMLFactory()); - private static final ObjectMapper jsonWriter = new ObjectMapper(); - - public static Store yamlToStoreProto(String yaml) throws IOException { - Object obj = yamlReader.readValue(yaml, Object.class); - String jsonString = jsonWriter.writeValueAsString(obj); - Builder builder = Store.newBuilder(); - JsonFormat.parser().merge(jsonString, builder); - return builder.build(); - } -} diff --git a/serving/src/main/resources/application.yml b/serving/src/main/resources/application.yml index 96713c8028..9158ee7fa5 100644 --- a/serving/src/main/resources/application.yml +++ b/serving/src/main/resources/application.yml @@ -1,12 +1,49 @@ feast: - # This value is retrieved from project.version properties in pom.xml - # https://docs.spring.io/spring-boot/docs/current/reference/html/ - version: @project.version@ # GRPC service address for Feast Core # Feast Serving requires connection to Feast Core to retrieve and reload Feast metadata (e.g. FeatureSpecs, Store information) core-host: ${FEAST_CORE_HOST:localhost} core-grpc-port: ${FEAST_CORE_GRPC_PORT:6565} + # Indicates the active store. Only a single store in the last can be active at one time. In the future this key + # will be deprecated in order to allow multiple stores to be served from a single serving instance + active_store: online + + # List of store configurations + stores: + # Below are two store configurations. One for Redis and one for BigQuery. + # Please see https://api.docs.feast.dev/grpc/feast.core.pb.html#Store for configuration options + - name: online # Name of the store (referenced by active_store) + type: REDIS # Type of the store. REDIS, BIGQUERY are available options + config: # Store specific configuration. See + host: localhost + port: 6379 + # Subscriptions indicate which feature sets needs to be retrieved and used to populate this store + subscriptions: + # Wildcards match all options. No filtering is done. + - name: "*" + project: "*" + + - name: historical + type: BIGQUERY + config: # Store specific configuration. + # GCP Project + project_id: my_project + # BigQuery Dataset Id + dataset_id: my_dataset + # staging-location specifies the URI to store intermediate files for batch serving. + # Feast Serving client is expected to have read access to this staging location + # to download the batch features. + # For example: gs://mybucket/myprefix + # Please omit the trailing slash in the URI. + staging_location: gs://mybucket/myprefix + # Retry options for BigQuery retrieval jobs + initial_retry_delay_seconds: 1 + # BigQuery timeout for retrieval jobs + total_timeout_seconds: 21600 + subscriptions: + - name: "*" + project: "*" + tracing: # If true, Feast will provide tracing data (using OpenTracing API) for various RPC method calls # which can be useful to debug performance issues and perform benchmarking @@ -17,41 +54,13 @@ feast: # The service name identifier for the tracing data service-name: feast_serving - store: - # Path containing the store configuration for this serving store. - config-path: ${FEAST_STORE_CONFIG_PATH:serving/sample_redis_config.yml} - # If serving redis, the redis pool max size - redis-pool-max-size: ${FEAST_REDIS_POOL_MAX_SIZE:128} - # If serving redis, the redis pool max idle conns - redis-pool-max-idle: ${FEAST_REDIS_POOL_MAX_IDLE:16} - - jobs: - # staging-location specifies the URI to store intermediate files for batch serving. - # Feast Serving client is expected to have read access to this staging location - # to download the batch features. - # - # For example: gs://mybucket/myprefix - # Please omit the trailing slash in the URI. - staging-location: ${FEAST_JOB_STAGING_LOCATION:} - # - # Retry options for BigQuery jobs: - bigquery-initial-retry-delay-secs: 1 - bigquery-total-timeout-secs: 21600 - # - # Type of store to store job metadata. This only needs to be set if the - # serving store type is Bigquery. - store-type: ${FEAST_JOB_STORE_TYPE:} - # - # Job store connection options. If the job store is redis, the following items are required: - # - # store-options: - # host: localhost - # port: 6379 - # Optionally, you can configure the connection pool with the following items: - # max-conn: 8 - # max-idle: 8 - # max-wait-millis: 50 - store-options: {} + # The job store is used to maintain job management state for Feast Serving. This is required when using certain + # historical stores like BigQuery. Only Redis is supported as a job store. + job_store: + # Redis host to connect to + redis_host: localhost + # Redis port to connect to + redis_port: 6379 grpc: # The port number Feast Serving GRPC service should listen on diff --git a/serving/src/main/resources/templates/join_featuresets.sql b/serving/src/main/resources/templates/join_featuresets.sql deleted file mode 100644 index 60b7c7d7a1..0000000000 --- a/serving/src/main/resources/templates/join_featuresets.sql +++ /dev/null @@ -1,24 +0,0 @@ -/* - Joins the outputs of multiple point-in-time-correctness joins to a single table. - */ -WITH joined as ( -SELECT * FROM `{{ leftTableName }}` -{% for featureSet in featureSets %} -LEFT JOIN ( - SELECT - uuid, - {% for featureName in featureSet.features %} - {{ featureSet.project }}_{{ featureName }}_v{{ featureSet.version }}{% if loop.last %}{% else %}, {% endif %} - {% endfor %} - FROM `{{ featureSet.table }}` -) USING (uuid) -{% endfor %} -) SELECT - event_timestamp, - {{ entities | join(', ') }} - {% for featureSet in featureSets %} - {% for featureName in featureSet.features %} - ,{{ featureSet.project }}_{{ featureName }}_v{{ featureSet.version }} as {{ featureName }} - {% endfor %} - {% endfor %} -FROM joined \ No newline at end of file diff --git a/serving/src/main/resources/templates/single_featureset_pit_join.sql b/serving/src/main/resources/templates/single_featureset_pit_join.sql deleted file mode 100644 index f3f20828ff..0000000000 --- a/serving/src/main/resources/templates/single_featureset_pit_join.sql +++ /dev/null @@ -1,90 +0,0 @@ -/* - This query template performs the point-in-time correctness join for a single feature set table - to the provided entity table. - - 1. Concatenate the timestamp and entities from the feature set table with the entity dataset. - Feature values are joined to this table later for improved efficiency. - featureset_timestamp is equal to null in rows from the entity dataset. - */ -WITH union_features AS ( -SELECT - -- uuid is a unique identifier for each row in the entity dataset. Generated by `QueryTemplater.createEntityTableUUIDQuery` - uuid, - -- event_timestamp contains the timestamps to join onto - event_timestamp, - -- the feature_timestamp, i.e. the latest occurrence of the requested feature relative to the entity_dataset timestamp - NULL as {{ featureSet.project }}_{{ featureSet.name }}_v{{ featureSet.version }}_feature_timestamp, - -- created timestamp of the feature at the corresponding feature_timestamp - NULL as created_timestamp, - -- select only entities belonging to this feature set - {{ featureSet.entities | join(', ')}}, - -- boolean for filtering the dataset later - true AS is_entity_table -FROM `{{leftTableName}}` -UNION ALL -SELECT - NULL as uuid, - event_timestamp, - event_timestamp as {{ featureSet.project }}_{{ featureSet.name }}_v{{ featureSet.version }}_feature_timestamp, - created_timestamp, - {{ featureSet.entities | join(', ')}}, - false AS is_entity_table -FROM `{{projectId}}.{{datasetId}}.{{ featureSet.project }}_{{ featureSet.name }}_v{{ featureSet.version }}` WHERE event_timestamp <= '{{maxTimestamp}}' -{% if featureSet.maxAge == 0 %}{% else %}AND event_timestamp >= Timestamp_sub(TIMESTAMP '{{ minTimestamp }}', interval {{ featureSet.maxAge }} second){% endif %} -), -/* - 2. Window the data in the unioned dataset, partitioning by entity and ordering by event_timestamp, as - well as is_entity_table. - Within each window, back-fill the feature_timestamp - as a result of this, the null feature_timestamps - in the rows from the entity table should now contain the latest timestamps relative to the row's - event_timestamp. - - For rows where event_timestamp(provided datetime) - feature_timestamp > max age, set the - feature_timestamp to null. - */ -joined AS ( -SELECT - uuid, - event_timestamp, - {{ featureSet.entities | join(', ')}}, - {% for featureName in featureSet.features %} - IF(event_timestamp >= {{ featureSet.project }}_{{ featureSet.name }}_v{{ featureSet.version }}_feature_timestamp {% if featureSet.maxAge == 0 %}{% else %}AND Timestamp_sub(event_timestamp, interval {{ featureSet.maxAge }} second) < {{ featureSet.project }}_{{ featureSet.name }}_v{{ featureSet.version }}_feature_timestamp{% endif %}, {{ featureSet.project }}_{{ featureName }}_v{{ featureSet.version }}, NULL) as {{ featureSet.project }}_{{ featureName }}_v{{ featureSet.version }}{% if loop.last %}{% else %}, {% endif %} - {% endfor %} -FROM ( -SELECT - uuid, - event_timestamp, - {{ featureSet.entities | join(', ')}}, - FIRST_VALUE(created_timestamp IGNORE NULLS) over w AS created_timestamp, - FIRST_VALUE({{ featureSet.project }}_{{ featureSet.name }}_v{{ featureSet.version }}_feature_timestamp IGNORE NULLS) over w AS {{ featureSet.project }}_{{ featureSet.name }}_v{{ featureSet.version }}_feature_timestamp, - is_entity_table -FROM union_features -WINDOW w AS (PARTITION BY {{ featureSet.entities | join(', ') }} ORDER BY event_timestamp DESC, is_entity_table DESC, created_timestamp DESC ROWS BETWEEN CURRENT ROW AND UNBOUNDED FOLLOWING) -) -/* - 3. Select only the rows from the entity table, and join the features from the original feature set table - to the dataset using the entity values, feature_timestamp, and created_timestamps. - */ -LEFT JOIN ( -SELECT - event_timestamp as {{ featureSet.project }}_{{ featureSet.name }}_v{{ featureSet.version }}_feature_timestamp, - created_timestamp, - {{ featureSet.entities | join(', ')}}, - {% for featureName in featureSet.features %} - {{ featureName }} as {{ featureSet.project }}_{{ featureName }}_v{{ featureSet.version }}{% if loop.last %}{% else %}, {% endif %} - {% endfor %} -FROM `{{projectId}}.{{datasetId}}.{{ featureSet.project }}_{{ featureSet.name }}_v{{ featureSet.version }}` WHERE event_timestamp <= '{{maxTimestamp}}' -{% if featureSet.maxAge == 0 %}{% else %}AND event_timestamp >= Timestamp_sub(TIMESTAMP '{{ minTimestamp }}', interval {{ featureSet.maxAge }} second){% endif %} -) USING ({{ featureSet.project }}_{{ featureSet.name }}_v{{ featureSet.version }}_feature_timestamp, created_timestamp, {{ featureSet.entities | join(', ')}}) -WHERE is_entity_table -) -/* - 4. Finally, deduplicate the rows by selecting the first occurrence of each entity table row UUID. - */ -SELECT - k.* -FROM ( - SELECT ARRAY_AGG(row LIMIT 1)[OFFSET(0)] k - FROM joined row - GROUP BY uuid -) \ No newline at end of file diff --git a/serving/src/test/java/feast/serving/controller/ServingServiceGRpcControllerTest.java b/serving/src/test/java/feast/serving/controller/ServingServiceGRpcControllerTest.java index f2c51bc7dd..5c8308daea 100644 --- a/serving/src/test/java/feast/serving/controller/ServingServiceGRpcControllerTest.java +++ b/serving/src/test/java/feast/serving/controller/ServingServiceGRpcControllerTest.java @@ -19,13 +19,13 @@ import static org.mockito.MockitoAnnotations.initMocks; import com.google.protobuf.Timestamp; -import feast.serving.FeastProperties; -import feast.serving.ServingAPIProto.FeatureReference; -import feast.serving.ServingAPIProto.GetOnlineFeaturesRequest; -import feast.serving.ServingAPIProto.GetOnlineFeaturesRequest.EntityRow; -import feast.serving.ServingAPIProto.GetOnlineFeaturesResponse; +import feast.proto.serving.ServingAPIProto.FeatureReference; +import feast.proto.serving.ServingAPIProto.GetOnlineFeaturesRequest; +import feast.proto.serving.ServingAPIProto.GetOnlineFeaturesRequest.EntityRow; +import feast.proto.serving.ServingAPIProto.GetOnlineFeaturesResponse; +import feast.proto.types.ValueProto.Value; +import feast.serving.config.FeastProperties; import feast.serving.service.ServingService; -import feast.types.ValueProto.Value; import io.grpc.StatusRuntimeException; import io.grpc.stub.StreamObserver; import io.jaegertracing.Configuration; @@ -51,18 +51,8 @@ public void setUp() { validRequest = GetOnlineFeaturesRequest.newBuilder() - .addFeatures( - FeatureReference.newBuilder() - .setName("feature1") - .setVersion(1) - .setProject("project") - .build()) - .addFeatures( - FeatureReference.newBuilder() - .setName("feature2") - .setVersion(1) - .setProject("project") - .build()) + .addFeatures(FeatureReference.newBuilder().setName("feature1").build()) + .addFeatures(FeatureReference.newBuilder().setName("feature2").build()) .addEntityRows( EntityRow.newBuilder() .setEntityTimestamp(Timestamp.newBuilder().setSeconds(100)) diff --git a/serving/src/test/java/feast/serving/service/CachedSpecServiceTest.java b/serving/src/test/java/feast/serving/service/CachedSpecServiceTest.java index 01c9304bda..85f590d523 100644 --- a/serving/src/test/java/feast/serving/service/CachedSpecServiceTest.java +++ b/serving/src/test/java/feast/serving/service/CachedSpecServiceTest.java @@ -19,34 +19,26 @@ import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.junit.Assert.assertThat; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import static org.mockito.MockitoAnnotations.initMocks; -import com.google.common.collect.Lists; -import feast.core.CoreServiceProto.ListFeatureSetsRequest; -import feast.core.CoreServiceProto.ListFeatureSetsResponse; -import feast.core.CoreServiceProto.UpdateStoreRequest; -import feast.core.CoreServiceProto.UpdateStoreResponse; -import feast.core.FeatureSetProto; -import feast.core.FeatureSetProto.FeatureSetSpec; -import feast.core.FeatureSetProto.FeatureSpec; -import feast.core.StoreProto.Store; -import feast.core.StoreProto.Store.RedisConfig; -import feast.core.StoreProto.Store.StoreType; -import feast.core.StoreProto.Store.Subscription; -import feast.serving.ServingAPIProto.FeatureReference; +import feast.proto.core.CoreServiceProto.ListFeatureSetsRequest; +import feast.proto.core.CoreServiceProto.ListFeatureSetsResponse; +import feast.proto.core.FeatureSetProto; +import feast.proto.core.FeatureSetProto.FeatureSetSpec; +import feast.proto.core.FeatureSetProto.FeatureSpec; +import feast.proto.core.StoreProto.Store; +import feast.proto.core.StoreProto.Store.Subscription; +import feast.proto.serving.ServingAPIProto.FeatureReference; +import feast.serving.exception.SpecRetrievalException; import feast.serving.specs.CachedSpecService; import feast.serving.specs.CoreSpecService; import feast.storage.api.retriever.FeatureSetRequest; -import java.io.BufferedWriter; -import java.io.File; -import java.io.FileWriter; -import java.io.IOException; -import java.util.Collections; -import java.util.LinkedHashMap; +import java.util.HashMap; import java.util.List; import java.util.Map; -import org.junit.After; import org.junit.Before; import org.junit.Rule; import org.junit.Test; @@ -55,7 +47,6 @@ public class CachedSpecServiceTest { - private File configFile; private Store store; @Rule public final ExpectedException expectedException = ExpectedException.none(); @@ -66,110 +57,68 @@ public class CachedSpecServiceTest { private CachedSpecService cachedSpecService; @Before - public void setUp() throws IOException { + public void setUp() { initMocks(this); - configFile = File.createTempFile("serving", ".yml"); - String yamlString = - "name: SERVING\n" - + "type: REDIS\n" - + "redis_config:\n" - + " host: localhost\n" - + " port: 6379\n" - + "subscriptions:\n" - + "- project: project\n" - + " name: fs1\n" - + " version: \"*\"\n" - + "- project: project\n" - + " name: fs2\n" - + " version: \"*\""; - BufferedWriter writer = new BufferedWriter(new FileWriter(configFile)); - writer.write(yamlString); - writer.close(); - - store = - Store.newBuilder() - .setName("SERVING") - .setType(StoreType.REDIS) - .setRedisConfig(RedisConfig.newBuilder().setHost("localhost").setPort(6379)) - .addSubscriptions( - Subscription.newBuilder() - .setProject("project") - .setName("fs1") - .setVersion("*") - .build()) - .addSubscriptions( - Subscription.newBuilder() - .setProject("project") - .setName("fs2") - .setVersion("*") - .build()) - .build(); + this.store = Store.newBuilder().build(); + this.featureSetSpecs = new HashMap<>(); - when(coreService.updateStore(UpdateStoreRequest.newBuilder().setStore(store).build())) - .thenReturn(UpdateStoreResponse.newBuilder().setStore(store).build()); + this.setupFeatureSetAndStoreSubscription( + "project", + "fs1", + List.of( + FeatureSpec.newBuilder().setName("feature").build(), + FeatureSpec.newBuilder().setName("feature2").build())); - featureSetSpecs = new LinkedHashMap<>(); - featureSetSpecs.put( - "fs1:1", - FeatureSetSpec.newBuilder() - .setProject("project") - .setName("fs1") - .setVersion(1) - .addFeatures(FeatureSpec.newBuilder().setName("feature")) - .build()); - featureSetSpecs.put( - "fs1:2", - FeatureSetSpec.newBuilder() - .setProject("project") - .setName("fs1") - .setVersion(2) - .addFeatures(FeatureSpec.newBuilder().setName("feature")) - .addFeatures(FeatureSpec.newBuilder().setName("feature2")) - .build()); - featureSetSpecs.put( - "fs2:1", + this.setupFeatureSetAndStoreSubscription( + "default", + "fs2", + List.of( + FeatureSpec.newBuilder().setName("feature3").build(), + FeatureSpec.newBuilder().setName("feature4").build(), + FeatureSpec.newBuilder().setName("feature5").build())); + + this.setupFeatureSetAndStoreSubscription( + "default", "fs3", List.of(FeatureSpec.newBuilder().setName("feature4").build())); + + when(this.coreService.registerStore(store)).thenReturn(store); + cachedSpecService = new CachedSpecService(this.coreService, this.store); + } + + private void setupFeatureSetAndStoreSubscription( + String project, String name, List featureSpecs) { + FeatureSetSpec fsSpec = FeatureSetSpec.newBuilder() - .setProject("project") - .setName("fs2") - .setVersion(1) - .addFeatures(FeatureSpec.newBuilder().setName("feature3")) - .build()); - - List fs1FeatureSets = - Lists.newArrayList( - FeatureSetProto.FeatureSet.newBuilder().setSpec(featureSetSpecs.get("fs1:1")).build(), - FeatureSetProto.FeatureSet.newBuilder().setSpec(featureSetSpecs.get("fs1:2")).build()); - List fs2FeatureSets = - Lists.newArrayList( - FeatureSetProto.FeatureSet.newBuilder().setSpec(featureSetSpecs.get("fs2:1")).build()); - when(coreService.listFeatureSets( - ListFeatureSetsRequest.newBuilder() - .setFilter( - ListFeatureSetsRequest.Filter.newBuilder() - .setProject("project") - .setFeatureSetName("fs1") - .setFeatureSetVersion("*") - .build()) - .build())) - .thenReturn(ListFeatureSetsResponse.newBuilder().addAllFeatureSets(fs1FeatureSets).build()); + .setProject(project) + .setName(name) + .addAllFeatures(featureSpecs) + .build(); + this.featureSetSpecs.put(String.format("%s", name), fsSpec); + + this.store = + this.store + .toBuilder() + .addSubscriptions(Subscription.newBuilder().setProject(project).setName(name).build()) + .build(); + + // collect the different versions the featureset with the given name + FeatureSetProto.FeatureSet featureSet = + FeatureSetProto.FeatureSet.newBuilder().setSpec(fsSpec).build(); + when(coreService.listFeatureSets( ListFeatureSetsRequest.newBuilder() .setFilter( ListFeatureSetsRequest.Filter.newBuilder() - .setProject("project") - .setFeatureSetName("fs2") - .setFeatureSetVersion("*") + .setProject(project) + .setFeatureSetName(name) .build()) .build())) - .thenReturn(ListFeatureSetsResponse.newBuilder().addAllFeatureSets(fs2FeatureSets).build()); - - cachedSpecService = new CachedSpecService(coreService, configFile.toPath()); + .thenReturn(ListFeatureSetsResponse.newBuilder().addFeatureSets(featureSet).build()); } - @After - public void tearDown() { - configFile.delete(); + @Test + public void shouldRegisterStoreWithCore() { + verify(coreService, times(1)).registerStore(cachedSpecService.getStore()); } @Test @@ -181,81 +130,96 @@ public void shouldPopulateAndReturnStore() { @Test public void shouldPopulateAndReturnFeatureSets() { + // test that CachedSpecService can retrieve fully qualified feature references. cachedSpecService.populateCache(); - FeatureReference frv1 = + FeatureReference fs1fr1 = FeatureReference.newBuilder() .setProject("project") .setName("feature") - .setVersion(1) + .setFeatureSet("fs1") .build(); - FeatureReference frv2 = + FeatureReference fs1fr2 = FeatureReference.newBuilder() .setProject("project") - .setName("feature") - .setVersion(2) + .setName("feature2") + .setFeatureSet("fs1") .build(); assertThat( - cachedSpecService.getFeatureSets(Collections.singletonList(frv1)), + cachedSpecService.getFeatureSets(List.of(fs1fr1, fs1fr2)), equalTo( - Lists.newArrayList( + List.of( FeatureSetRequest.newBuilder() - .addFeatureReference(frv1) - .setSpec(featureSetSpecs.get("fs1:1")) + .addFeatureReference(fs1fr1) + .addFeatureReference(fs1fr2) + .setSpec(featureSetSpecs.get("fs1")) .build()))); + } + + @Test + public void shouldPopulateAndReturnFeatureSetWithDefaultProjectIfProjectNotSupplied() { + // test that CachedSpecService will use default project when project unspecified + FeatureReference fs2fr3 = + FeatureReference.newBuilder().setName("feature3").setFeatureSet("fs2").build(); + // check that this is true for references in where feature set is unspecified + FeatureReference fs2fr5 = FeatureReference.newBuilder().setName("feature5").build(); + assertThat( - cachedSpecService.getFeatureSets(Collections.singletonList(frv2)), + cachedSpecService.getFeatureSets(List.of(fs2fr3, fs2fr5)), equalTo( - Lists.newArrayList( + List.of( FeatureSetRequest.newBuilder() - .addFeatureReference(frv2) - .setSpec(featureSetSpecs.get("fs1:2")) + .addFeatureReference(fs2fr3) + .addFeatureReference(fs2fr5) + .setSpec(featureSetSpecs.get("fs2")) .build()))); } @Test - public void shouldPopulateAndReturnLatestFeatureSetIfVersionsNotSupplied() { - cachedSpecService.populateCache(); - FeatureReference frv1 = + public void shouldPopulateAndReturnClosestFeatureSetIfFeatureSetNotSupplied() { + // test that CachedSpecService will try to match a featureset without a featureset name in + // reference + FeatureReference fs1fr1 = FeatureReference.newBuilder().setProject("project").setName("feature").build(); + // check that this is true for reference in which project is unspecified + FeatureReference fs2fr3 = FeatureReference.newBuilder().setName("feature3").build(); + assertThat( - cachedSpecService.getFeatureSets(Collections.singletonList(frv1)), - equalTo( - Lists.newArrayList( - FeatureSetRequest.newBuilder() - .addFeatureReference(frv1) - .setSpec(featureSetSpecs.get("fs1:2")) - .build()))); + cachedSpecService.getFeatureSets(List.of(fs1fr1, fs2fr3)), + containsInAnyOrder( + List.of( + FeatureSetRequest.newBuilder() + .addFeatureReference(fs1fr1) + .setSpec(featureSetSpecs.get("fs1")) + .build(), + FeatureSetRequest.newBuilder() + .addFeatureReference(fs2fr3) + .setSpec(featureSetSpecs.get("fs2")) + .build()) + .toArray())); } @Test public void shouldPopulateAndReturnFeatureSetsGivenFeaturesFromDifferentFeatureSets() { cachedSpecService.populateCache(); - FeatureReference frv1 = - FeatureReference.newBuilder() - .setProject("project") - .setName("feature") - .setVersion(1) - .build(); - FeatureReference fr3 = - FeatureReference.newBuilder() - .setProject("project") - .setName("feature3") - .setVersion(1) - .build(); + FeatureReference fs1fr1 = + FeatureReference.newBuilder().setProject("project").setName("feature").build(); + + FeatureReference fs2fr3 = + FeatureReference.newBuilder().setProject("default").setName("feature3").build(); assertThat( - cachedSpecService.getFeatureSets(Lists.newArrayList(frv1, fr3)), + cachedSpecService.getFeatureSets(List.of(fs1fr1, fs2fr3)), containsInAnyOrder( - Lists.newArrayList( + List.of( FeatureSetRequest.newBuilder() - .addFeatureReference(frv1) - .setSpec(featureSetSpecs.get("fs1:1")) + .addFeatureReference(fs1fr1) + .setSpec(featureSetSpecs.get("fs1")) .build(), FeatureSetRequest.newBuilder() - .addFeatureReference(fr3) - .setSpec(featureSetSpecs.get("fs2:1")) + .addFeatureReference(fs2fr3) + .setSpec(featureSetSpecs.get("fs2")) .build()) .toArray())); } @@ -264,26 +228,31 @@ public void shouldPopulateAndReturnFeatureSetsGivenFeaturesFromDifferentFeatureS public void shouldPopulateAndReturnFeatureSetGivenFeaturesFromSameFeatureSet() { cachedSpecService.populateCache(); FeatureReference fr1 = - FeatureReference.newBuilder() - .setProject("project") - .setName("feature") - .setVersion(2) - .build(); + FeatureReference.newBuilder().setProject("project").setName("feature").build(); FeatureReference fr2 = - FeatureReference.newBuilder() - .setProject("project") - .setName("feature2") - .setVersion(2) - .build(); + FeatureReference.newBuilder().setProject("project").setName("feature2").build(); assertThat( - cachedSpecService.getFeatureSets(Lists.newArrayList(fr1, fr2)), + cachedSpecService.getFeatureSets(List.of(fr1, fr2)), equalTo( - Lists.newArrayList( + List.of( FeatureSetRequest.newBuilder() .addFeatureReference(fr1) .addFeatureReference(fr2) - .setSpec(featureSetSpecs.get("fs1:2")) + .setSpec(featureSetSpecs.get("fs1")) .build()))); } + + @Test + public void shouldThrowExceptionWhenMultipleFeatureSetMapToFeatureReference() + throws SpecRetrievalException { + // both fs2 and fs3 have the feature with the same name. + // using a generic feature reference only specifying the feature's name + // should cause a multiple feature sets to match and throw an error + FeatureReference fs2fr4 = FeatureReference.newBuilder().setName("feature4").build(); + FeatureReference fs3fr4 = FeatureReference.newBuilder().setName("feature4").build(); + + expectedException.expect(SpecRetrievalException.class); + cachedSpecService.getFeatureSets(List.of(fs2fr4, fs3fr4)); + } } diff --git a/serving/src/test/java/feast/serving/service/OnlineServingServiceTest.java b/serving/src/test/java/feast/serving/service/OnlineServingServiceTest.java index b78fcb6917..6358460a07 100644 --- a/serving/src/test/java/feast/serving/service/OnlineServingServiceTest.java +++ b/serving/src/test/java/feast/serving/service/OnlineServingServiceTest.java @@ -24,19 +24,19 @@ import com.google.common.collect.Lists; import com.google.protobuf.Duration; import com.google.protobuf.Timestamp; -import feast.core.FeatureSetProto.EntitySpec; -import feast.core.FeatureSetProto.FeatureSetSpec; -import feast.serving.ServingAPIProto.FeatureReference; -import feast.serving.ServingAPIProto.GetOnlineFeaturesRequest; -import feast.serving.ServingAPIProto.GetOnlineFeaturesRequest.EntityRow; -import feast.serving.ServingAPIProto.GetOnlineFeaturesResponse; -import feast.serving.ServingAPIProto.GetOnlineFeaturesResponse.FieldValues; +import feast.proto.core.FeatureSetProto.EntitySpec; +import feast.proto.core.FeatureSetProto.FeatureSetSpec; +import feast.proto.serving.ServingAPIProto.FeatureReference; +import feast.proto.serving.ServingAPIProto.GetOnlineFeaturesRequest; +import feast.proto.serving.ServingAPIProto.GetOnlineFeaturesRequest.EntityRow; +import feast.proto.serving.ServingAPIProto.GetOnlineFeaturesResponse; +import feast.proto.serving.ServingAPIProto.GetOnlineFeaturesResponse.FieldValues; +import feast.proto.types.FeatureRowProto.FeatureRow; +import feast.proto.types.FieldProto.Field; +import feast.proto.types.ValueProto.Value; import feast.serving.specs.CachedSpecService; import feast.storage.api.retriever.FeatureSetRequest; import feast.storage.connectors.redis.retriever.RedisOnlineRetriever; -import feast.types.FeatureRowProto.FeatureRow; -import feast.types.FieldProto.Field; -import feast.types.ValueProto.Value; import io.opentracing.Tracer; import io.opentracing.Tracer.SpanBuilder; import java.util.Collections; @@ -69,18 +69,8 @@ public void setUp() { public void shouldReturnResponseWithValuesIfKeysPresent() { GetOnlineFeaturesRequest request = GetOnlineFeaturesRequest.newBuilder() - .addFeatures( - FeatureReference.newBuilder() - .setName("feature1") - .setVersion(1) - .setProject("project") - .build()) - .addFeatures( - FeatureReference.newBuilder() - .setName("feature2") - .setVersion(1) - .setProject("project") - .build()) + .addFeatures(FeatureReference.newBuilder().setName("feature1").build()) + .addFeatures(FeatureReference.newBuilder().setName("feature2").build()) .addEntityRows( EntityRow.newBuilder() .setEntityTimestamp(Timestamp.newBuilder().setSeconds(100)) @@ -103,7 +93,6 @@ public void shouldReturnResponseWithValuesIfKeysPresent() { Field.newBuilder().setName("entity2").setValue(strValue("a")).build(), Field.newBuilder().setName("feature1").setValue(intValue(1)).build(), Field.newBuilder().setName("feature2").setValue(intValue(1)).build())) - .setFeatureSet("featureSet:1") .build(), FeatureRow.newBuilder() .setEventTimestamp(Timestamp.newBuilder().setSeconds(100)) @@ -113,7 +102,6 @@ public void shouldReturnResponseWithValuesIfKeysPresent() { Field.newBuilder().setName("entity2").setValue(strValue("b")).build(), Field.newBuilder().setName("feature1").setValue(intValue(2)).build(), Field.newBuilder().setName("feature2").setValue(intValue(2)).build())) - .setFeatureSet("featureSet:1") .build()); FeatureSetRequest featureSetRequest = @@ -135,94 +123,14 @@ public void shouldReturnResponseWithValuesIfKeysPresent() { FieldValues.newBuilder() .putFields("entity1", intValue(1)) .putFields("entity2", strValue("a")) - .putFields("project/feature1:1", intValue(1)) - .putFields("project/feature2:1", intValue(1))) + .putFields("feature1", intValue(1)) + .putFields("feature2", intValue(1))) .addFieldValues( FieldValues.newBuilder() .putFields("entity1", intValue(2)) .putFields("entity2", strValue("b")) - .putFields("project/feature1:1", intValue(2)) - .putFields("project/feature2:1", intValue(2))) - .build(); - GetOnlineFeaturesResponse actual = onlineServingService.getOnlineFeatures(request); - assertThat( - responseToMapList(actual), containsInAnyOrder(responseToMapList(expected).toArray())); - } - - @Test - public void shouldReturnKeysWithoutVersionIfNotProvided() { - GetOnlineFeaturesRequest request = - GetOnlineFeaturesRequest.newBuilder() - .addFeatures( - FeatureReference.newBuilder() - .setName("feature1") - .setVersion(1) - .setProject("project") - .build()) - .addFeatures( - FeatureReference.newBuilder().setName("feature2").setProject("project").build()) - .addEntityRows( - EntityRow.newBuilder() - .setEntityTimestamp(Timestamp.newBuilder().setSeconds(100)) - .putFields("entity1", intValue(1)) - .putFields("entity2", strValue("a"))) - .addEntityRows( - EntityRow.newBuilder() - .setEntityTimestamp(Timestamp.newBuilder().setSeconds(100)) - .putFields("entity1", intValue(2)) - .putFields("entity2", strValue("b"))) - .build(); - - List featureRows = - Lists.newArrayList( - FeatureRow.newBuilder() - .setEventTimestamp(Timestamp.newBuilder().setSeconds(100)) - .addAllFields( - Lists.newArrayList( - Field.newBuilder().setName("entity1").setValue(intValue(1)).build(), - Field.newBuilder().setName("entity2").setValue(strValue("a")).build(), - Field.newBuilder().setName("feature1").setValue(intValue(1)).build(), - Field.newBuilder().setName("feature2").setValue(intValue(1)).build())) - .setFeatureSet("featureSet:1") - .build(), - FeatureRow.newBuilder() - .setEventTimestamp(Timestamp.newBuilder().setSeconds(100)) - .addAllFields( - Lists.newArrayList( - Field.newBuilder().setName("entity1").setValue(intValue(2)).build(), - Field.newBuilder().setName("entity2").setValue(strValue("b")).build(), - Field.newBuilder().setName("feature1").setValue(intValue(2)).build(), - Field.newBuilder().setName("feature2").setValue(intValue(2)).build())) - .setFeatureSet("featureSet:1") - .build()); - - FeatureSetRequest featureSetRequest = - FeatureSetRequest.newBuilder() - .addAllFeatureReferences(request.getFeaturesList()) - .setSpec(getFeatureSetSpec()) - .build(); - - when(specService.getFeatureSets(request.getFeaturesList())) - .thenReturn(Collections.singletonList(featureSetRequest)); - when(retriever.getOnlineFeatures( - request.getEntityRowsList(), Collections.singletonList(featureSetRequest))) - .thenReturn(Collections.singletonList(featureRows)); - when(tracer.buildSpan(ArgumentMatchers.any())).thenReturn(Mockito.mock(SpanBuilder.class)); - - GetOnlineFeaturesResponse expected = - GetOnlineFeaturesResponse.newBuilder() - .addFieldValues( - FieldValues.newBuilder() - .putFields("entity1", intValue(1)) - .putFields("entity2", strValue("a")) - .putFields("project/feature1:1", intValue(1)) - .putFields("project/feature2", intValue(1))) - .addFieldValues( - FieldValues.newBuilder() - .putFields("entity1", intValue(2)) - .putFields("entity2", strValue("b")) - .putFields("project/feature1:1", intValue(2)) - .putFields("project/feature2", intValue(2))) + .putFields("feature1", intValue(2)) + .putFields("feature2", intValue(2))) .build(); GetOnlineFeaturesResponse actual = onlineServingService.getOnlineFeatures(request); assertThat( @@ -234,18 +142,8 @@ public void shouldReturnResponseWithUnsetValuesIfKeysNotPresent() { // some keys not present, should have empty values GetOnlineFeaturesRequest request = GetOnlineFeaturesRequest.newBuilder() - .addFeatures( - FeatureReference.newBuilder() - .setName("feature1") - .setVersion(1) - .setProject("project") - .build()) - .addFeatures( - FeatureReference.newBuilder() - .setName("feature2") - .setVersion(1) - .setProject("project") - .build()) + .addFeatures(FeatureReference.newBuilder().setName("feature1").build()) + .addFeatures(FeatureReference.newBuilder().setName("feature2").build()) .addEntityRows( EntityRow.newBuilder() .setEntityTimestamp(Timestamp.newBuilder().setSeconds(100)) @@ -268,14 +166,14 @@ public void shouldReturnResponseWithUnsetValuesIfKeysNotPresent() { Lists.newArrayList( FeatureRow.newBuilder() .setEventTimestamp(Timestamp.newBuilder().setSeconds(100)) - .setFeatureSet("project/featureSet:1") + .setFeatureSet("project/featureSet") .addAllFields( Lists.newArrayList( Field.newBuilder().setName("feature1").setValue(intValue(1)).build(), Field.newBuilder().setName("feature2").setValue(intValue(1)).build())) .build(), FeatureRow.newBuilder() - .setFeatureSet("project/featureSet:1") + .setFeatureSet("project/featureSet") .addAllFields( Lists.newArrayList( Field.newBuilder().setName("feature1").build(), @@ -295,14 +193,14 @@ public void shouldReturnResponseWithUnsetValuesIfKeysNotPresent() { FieldValues.newBuilder() .putFields("entity1", intValue(1)) .putFields("entity2", strValue("a")) - .putFields("project/feature1:1", intValue(1)) - .putFields("project/feature2:1", intValue(1))) + .putFields("feature1", intValue(1)) + .putFields("feature2", intValue(1))) .addFieldValues( FieldValues.newBuilder() .putFields("entity1", intValue(2)) .putFields("entity2", strValue("b")) - .putFields("project/feature1:1", Value.newBuilder().build()) - .putFields("project/feature2:1", Value.newBuilder().build())) + .putFields("feature1", Value.newBuilder().build()) + .putFields("feature2", Value.newBuilder().build())) .build(); GetOnlineFeaturesResponse actual = onlineServingService.getOnlineFeatures(request); assertThat( @@ -314,18 +212,8 @@ public void shouldReturnResponseWithUnsetValuesIfMaxAgeIsExceeded() { // keys present, but too stale comp. to maxAge GetOnlineFeaturesRequest request = GetOnlineFeaturesRequest.newBuilder() - .addFeatures( - FeatureReference.newBuilder() - .setName("feature1") - .setVersion(1) - .setProject("project") - .build()) - .addFeatures( - FeatureReference.newBuilder() - .setName("feature2") - .setVersion(1) - .setProject("project") - .build()) + .addFeatures(FeatureReference.newBuilder().setName("feature1").build()) + .addFeatures(FeatureReference.newBuilder().setName("feature2").build()) .addEntityRows( EntityRow.newBuilder() .setEntityTimestamp(Timestamp.newBuilder().setSeconds(100)) @@ -348,7 +236,7 @@ public void shouldReturnResponseWithUnsetValuesIfMaxAgeIsExceeded() { Field.newBuilder().setName("entity2").setValue(strValue("a")).build(), Field.newBuilder().setName("feature1").setValue(intValue(1)).build(), Field.newBuilder().setName("feature2").setValue(intValue(1)).build())) - .setFeatureSet("featureSet:1") + .setFeatureSet("project/featureSet") .build(), FeatureRow.newBuilder() .setEventTimestamp( @@ -359,7 +247,7 @@ public void shouldReturnResponseWithUnsetValuesIfMaxAgeIsExceeded() { Field.newBuilder().setName("entity2").setValue(strValue("b")).build(), Field.newBuilder().setName("feature1").setValue(intValue(2)).build(), Field.newBuilder().setName("feature2").setValue(intValue(2)).build())) - .setFeatureSet("featureSet:1") + .setFeatureSet("project/featureSet") .build()); FeatureSetSpec spec = @@ -383,14 +271,14 @@ public void shouldReturnResponseWithUnsetValuesIfMaxAgeIsExceeded() { FieldValues.newBuilder() .putFields("entity1", intValue(1)) .putFields("entity2", strValue("a")) - .putFields("project/feature1:1", intValue(1)) - .putFields("project/feature2:1", intValue(1))) + .putFields("feature1", intValue(1)) + .putFields("feature2", intValue(1))) .addFieldValues( FieldValues.newBuilder() .putFields("entity1", intValue(2)) .putFields("entity2", strValue("b")) - .putFields("project/feature1:1", Value.newBuilder().build()) - .putFields("project/feature2:1", Value.newBuilder().build())) + .putFields("feature1", Value.newBuilder().build()) + .putFields("feature2", Value.newBuilder().build())) .build(); GetOnlineFeaturesResponse actual = onlineServingService.getOnlineFeatures(request); assertThat( @@ -402,12 +290,7 @@ public void shouldFilterOutUndesiredRows() { // requested rows less than the rows available in the featureset GetOnlineFeaturesRequest request = GetOnlineFeaturesRequest.newBuilder() - .addFeatures( - FeatureReference.newBuilder() - .setName("feature1") - .setVersion(1) - .setProject("project") - .build()) + .addFeatures(FeatureReference.newBuilder().setName("feature1").build()) .addEntityRows( EntityRow.newBuilder() .setEntityTimestamp(Timestamp.newBuilder().setSeconds(100)) @@ -430,7 +313,6 @@ public void shouldFilterOutUndesiredRows() { Field.newBuilder().setName("entity2").setValue(strValue("a")).build(), Field.newBuilder().setName("feature1").setValue(intValue(1)).build(), Field.newBuilder().setName("feature2").setValue(intValue(1)).build())) - .setFeatureSet("featureSet:1") .build(), FeatureRow.newBuilder() .setEventTimestamp(Timestamp.newBuilder().setSeconds(100)) @@ -440,7 +322,6 @@ public void shouldFilterOutUndesiredRows() { Field.newBuilder().setName("entity2").setValue(strValue("b")).build(), Field.newBuilder().setName("feature1").setValue(intValue(2)).build(), Field.newBuilder().setName("feature2").setValue(intValue(2)).build())) - .setFeatureSet("featureSet:1") .build()); FeatureSetRequest featureSetRequest = @@ -462,12 +343,12 @@ public void shouldFilterOutUndesiredRows() { FieldValues.newBuilder() .putFields("entity1", intValue(1)) .putFields("entity2", strValue("a")) - .putFields("project/feature1:1", intValue(1))) + .putFields("feature1", intValue(1))) .addFieldValues( FieldValues.newBuilder() .putFields("entity1", intValue(2)) .putFields("entity2", strValue("b")) - .putFields("project/feature1:1", intValue(2))) + .putFields("feature1", intValue(2))) .build(); GetOnlineFeaturesResponse actual = onlineServingService.getOnlineFeatures(request); assertThat( @@ -490,23 +371,10 @@ private Value strValue(String val) { private FeatureSetSpec getFeatureSetSpec() { return FeatureSetSpec.newBuilder() - .setProject("project") .setName("featureSet") - .setVersion(1) .addEntities(EntitySpec.newBuilder().setName("entity1")) .addEntities(EntitySpec.newBuilder().setName("entity2")) .setMaxAge(Duration.newBuilder().setSeconds(30)) // default .build(); } - - private FeatureSetSpec getFeatureSetSpecWithNoMaxAge() { - return FeatureSetSpec.newBuilder() - .setProject("project") - .setName("featureSet") - .setVersion(1) - .addEntities(EntitySpec.newBuilder().setName("entity1")) - .addEntities(EntitySpec.newBuilder().setName("entity2")) - .setMaxAge(Duration.newBuilder().setSeconds(0).setNanos(0).build()) - .build(); - } } diff --git a/serving/src/test/java/feast/serving/service/RedisBackedJobServiceTest.java b/serving/src/test/java/feast/serving/service/RedisBackedJobServiceTest.java index 34bc31d2c2..23626c2cb8 100644 --- a/serving/src/test/java/feast/serving/service/RedisBackedJobServiceTest.java +++ b/serving/src/test/java/feast/serving/service/RedisBackedJobServiceTest.java @@ -26,6 +26,7 @@ import redis.embedded.RedisServer; public class RedisBackedJobServiceTest { + private static Integer REDIS_PORT = 51235; private RedisServer redis; @@ -41,7 +42,7 @@ public void teardown() { } @Test - public void shouldRecoverIfRedisConnectionIsLost() throws IOException { + public void shouldRecoverIfRedisConnectionIsLost() { RedisClient client = RedisClient.create(RedisURI.create("localhost", REDIS_PORT)); RedisBackedJobService jobService = new RedisBackedJobService(client.connect(new ByteArrayCodec())); diff --git a/serving/src/test/java/feast/serving/util/mappers/YamlToProtoMapperTest.java b/serving/src/test/java/feast/serving/util/mappers/YamlToProtoMapperTest.java deleted file mode 100644 index 6f95f5307b..0000000000 --- a/serving/src/test/java/feast/serving/util/mappers/YamlToProtoMapperTest.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * Copyright 2018-2019 The Feast Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package feast.serving.util.mappers; - -import static org.hamcrest.core.IsEqual.equalTo; -import static org.junit.Assert.*; - -import feast.core.StoreProto.Store; -import feast.core.StoreProto.Store.RedisConfig; -import feast.core.StoreProto.Store.StoreType; -import feast.core.StoreProto.Store.Subscription; -import java.io.IOException; -import org.junit.Test; - -public class YamlToProtoMapperTest { - - @Test - public void shouldConvertYamlToProto() throws IOException { - String yaml = - "name: test\n" - + "type: REDIS\n" - + "redis_config:\n" - + " host: localhost\n" - + " port: 6379\n" - + "subscriptions:\n" - + "- project: \"*\"\n" - + " name: \"*\"\n" - + " version: \"*\"\n"; - Store store = YamlToProtoMapper.yamlToStoreProto(yaml); - Store expected = - Store.newBuilder() - .setName("test") - .setType(StoreType.REDIS) - .setRedisConfig(RedisConfig.newBuilder().setHost("localhost").setPort(6379)) - .addSubscriptions( - Subscription.newBuilder().setProject("*").setName("*").setVersion("*")) - .build(); - assertThat(store, equalTo(expected)); - } -} diff --git a/storage/api/src/main/java/feast/storage/api/retriever/FeatureSetRequest.java b/storage/api/src/main/java/feast/storage/api/retriever/FeatureSetRequest.java index d181abfbe6..3482529a97 100644 --- a/storage/api/src/main/java/feast/storage/api/retriever/FeatureSetRequest.java +++ b/storage/api/src/main/java/feast/storage/api/retriever/FeatureSetRequest.java @@ -18,8 +18,8 @@ import com.google.auto.value.AutoValue; import com.google.common.collect.ImmutableSet; -import feast.core.FeatureSetProto.FeatureSetSpec; -import feast.serving.ServingAPIProto.FeatureReference; +import feast.proto.core.FeatureSetProto.FeatureSetSpec; +import feast.proto.serving.ServingAPIProto.FeatureReference; import java.util.List; import java.util.Map; import java.util.stream.Collectors; diff --git a/storage/api/src/main/java/feast/storage/api/retriever/HistoricalRetrievalResult.java b/storage/api/src/main/java/feast/storage/api/retriever/HistoricalRetrievalResult.java index a81ce77625..6127ce616f 100644 --- a/storage/api/src/main/java/feast/storage/api/retriever/HistoricalRetrievalResult.java +++ b/storage/api/src/main/java/feast/storage/api/retriever/HistoricalRetrievalResult.java @@ -17,8 +17,8 @@ package feast.storage.api.retriever; import com.google.auto.value.AutoValue; -import feast.serving.ServingAPIProto.DataFormat; -import feast.serving.ServingAPIProto.JobStatus; +import feast.proto.serving.ServingAPIProto.DataFormat; +import feast.proto.serving.ServingAPIProto.JobStatus; import java.io.Serializable; import java.util.List; import javax.annotation.Nullable; diff --git a/storage/api/src/main/java/feast/storage/api/retriever/HistoricalRetriever.java b/storage/api/src/main/java/feast/storage/api/retriever/HistoricalRetriever.java index 95a89c1a3c..f07a6cd0df 100644 --- a/storage/api/src/main/java/feast/storage/api/retriever/HistoricalRetriever.java +++ b/storage/api/src/main/java/feast/storage/api/retriever/HistoricalRetriever.java @@ -16,7 +16,7 @@ */ package feast.storage.api.retriever; -import feast.serving.ServingAPIProto.DatasetSource; +import feast.proto.serving.ServingAPIProto.DatasetSource; import java.util.List; /** diff --git a/storage/api/src/main/java/feast/storage/api/retriever/OnlineRetriever.java b/storage/api/src/main/java/feast/storage/api/retriever/OnlineRetriever.java index 5eb27b995e..45d6b35263 100644 --- a/storage/api/src/main/java/feast/storage/api/retriever/OnlineRetriever.java +++ b/storage/api/src/main/java/feast/storage/api/retriever/OnlineRetriever.java @@ -16,8 +16,8 @@ */ package feast.storage.api.retriever; -import feast.serving.ServingAPIProto.GetOnlineFeaturesRequest.EntityRow; -import feast.types.FeatureRowProto.FeatureRow; +import feast.proto.serving.ServingAPIProto.GetOnlineFeaturesRequest.EntityRow; +import feast.proto.types.FeatureRowProto.FeatureRow; import java.util.List; /** diff --git a/storage/api/src/main/java/feast/storage/api/writer/FailedElement.java b/storage/api/src/main/java/feast/storage/api/writer/FailedElement.java index d582341477..c6db877216 100644 --- a/storage/api/src/main/java/feast/storage/api/writer/FailedElement.java +++ b/storage/api/src/main/java/feast/storage/api/writer/FailedElement.java @@ -39,9 +39,6 @@ public abstract class FailedElement { @Nullable public abstract String getFeatureSetName(); - @Nullable - public abstract String getFeatureSetVersion(); - @Nullable public abstract String getTransformName(); @@ -66,8 +63,6 @@ public abstract static class Builder { public abstract Builder setFeatureSetName(String featureSetName); - public abstract Builder setFeatureSetVersion(String featureSetVersion); - public abstract Builder setJobName(String jobName); public abstract Builder setTransformName(String transformName); diff --git a/storage/api/src/main/java/feast/storage/api/writer/FeatureSink.java b/storage/api/src/main/java/feast/storage/api/writer/FeatureSink.java index 3dfe7e8f10..f49ea9f423 100644 --- a/storage/api/src/main/java/feast/storage/api/writer/FeatureSink.java +++ b/storage/api/src/main/java/feast/storage/api/writer/FeatureSink.java @@ -16,8 +16,8 @@ */ package feast.storage.api.writer; -import feast.core.FeatureSetProto; -import feast.types.FeatureRowProto.FeatureRow; +import feast.proto.core.FeatureSetProto; +import feast.proto.types.FeatureRowProto.FeatureRow; import java.io.Serializable; import org.apache.beam.sdk.transforms.PTransform; import org.apache.beam.sdk.values.PCollection; diff --git a/storage/api/src/main/java/feast/storage/api/writer/WriteResult.java b/storage/api/src/main/java/feast/storage/api/writer/WriteResult.java index e378c2b46a..abe06e41ee 100644 --- a/storage/api/src/main/java/feast/storage/api/writer/WriteResult.java +++ b/storage/api/src/main/java/feast/storage/api/writer/WriteResult.java @@ -17,7 +17,7 @@ package feast.storage.api.writer; import com.google.common.collect.ImmutableMap; -import feast.types.FeatureRowProto.FeatureRow; +import feast.proto.types.FeatureRowProto.FeatureRow; import java.io.Serializable; import java.util.Map; import org.apache.beam.sdk.Pipeline; diff --git a/storage/api/src/main/java/feast/storage/common/testing/TestUtil.java b/storage/api/src/main/java/feast/storage/common/testing/TestUtil.java index 6047a93dc1..43a96e97ef 100644 --- a/storage/api/src/main/java/feast/storage/common/testing/TestUtil.java +++ b/storage/api/src/main/java/feast/storage/common/testing/TestUtil.java @@ -18,12 +18,12 @@ import com.google.protobuf.ByteString; import com.google.protobuf.util.Timestamps; -import feast.core.FeatureSetProto.FeatureSet; -import feast.core.FeatureSetProto.FeatureSetSpec; -import feast.types.FeatureRowProto.FeatureRow; -import feast.types.FeatureRowProto.FeatureRow.Builder; -import feast.types.FieldProto.Field; -import feast.types.ValueProto.*; +import feast.proto.core.FeatureSetProto.FeatureSet; +import feast.proto.core.FeatureSetProto.FeatureSetSpec; +import feast.proto.types.FeatureRowProto.FeatureRow; +import feast.proto.types.FeatureRowProto.FeatureRow.Builder; +import feast.proto.types.FieldProto.Field; +import feast.proto.types.ValueProto.*; import java.util.concurrent.ThreadLocalRandom; import org.apache.commons.lang3.RandomStringUtils; @@ -87,7 +87,7 @@ public static FeatureRow createRandomFeatureRow(FeatureSet featureSet, int rando private static String getFeatureSetReference(FeatureSet featureSet) { FeatureSetSpec spec = featureSet.getSpec(); - return String.format("%s/%s:%d", spec.getProject(), spec.getName(), spec.getVersion()); + return String.format("%s/%s:%d", spec.getProject(), spec.getName()); } /** diff --git a/storage/connectors/bigquery/src/main/java/feast/storage/connectors/bigquery/common/TypeUtil.java b/storage/connectors/bigquery/src/main/java/feast/storage/connectors/bigquery/common/TypeUtil.java index dcd1309317..a8e4cda678 100644 --- a/storage/connectors/bigquery/src/main/java/feast/storage/connectors/bigquery/common/TypeUtil.java +++ b/storage/connectors/bigquery/src/main/java/feast/storage/connectors/bigquery/common/TypeUtil.java @@ -17,7 +17,7 @@ package feast.storage.connectors.bigquery.common; import com.google.cloud.bigquery.StandardSQLTypeName; -import feast.types.ValueProto; +import feast.proto.types.ValueProto; import java.util.HashMap; import java.util.Map; @@ -54,7 +54,7 @@ public class TypeUtil { } /** - * Converts {@link feast.types.ValueProto.ValueType} to its corresponding {@link + * Converts {@link feast.proto.types.ValueProto.ValueType} to its corresponding {@link * StandardSQLTypeName} * * @param valueType value type to convert diff --git a/storage/connectors/bigquery/src/main/java/feast/storage/connectors/bigquery/retriever/BigQueryHistoricalRetriever.java b/storage/connectors/bigquery/src/main/java/feast/storage/connectors/bigquery/retriever/BigQueryHistoricalRetriever.java index 27ba07e82e..4b2ee45543 100644 --- a/storage/connectors/bigquery/src/main/java/feast/storage/connectors/bigquery/retriever/BigQueryHistoricalRetriever.java +++ b/storage/connectors/bigquery/src/main/java/feast/storage/connectors/bigquery/retriever/BigQueryHistoricalRetriever.java @@ -24,8 +24,9 @@ import com.google.cloud.bigquery.*; import com.google.cloud.storage.Blob; import com.google.cloud.storage.Storage; -import feast.serving.ServingAPIProto; -import feast.serving.ServingAPIProto.DatasetSource; +import com.google.cloud.storage.StorageOptions; +import feast.proto.serving.ServingAPIProto; +import feast.proto.serving.ServingAPIProto.DatasetSource; import feast.storage.api.retriever.FeatureSetRequest; import feast.storage.api.retriever.HistoricalRetrievalResult; import feast.storage.api.retriever.HistoricalRetriever; @@ -33,6 +34,7 @@ import java.io.IOException; import java.util.ArrayList; import java.util.List; +import java.util.Map; import java.util.UUID; import java.util.concurrent.*; import java.util.stream.Collectors; @@ -48,6 +50,36 @@ public abstract class BigQueryHistoricalRetriever implements HistoricalRetriever public static final long TEMP_TABLE_EXPIRY_DURATION_MS = Duration.ofDays(1).toMillis(); private static final long SUBQUERY_TIMEOUT_SECS = 900; // 15 minutes + public static HistoricalRetriever create(Map config) { + + BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService(); + Storage storage = StorageOptions.getDefaultInstance().getService(); + + String jobStagingLocation = config.get("staging_location"); + if (!jobStagingLocation.contains("://")) { + throw new IllegalArgumentException( + String.format("jobStagingLocation is not a valid URI: %s", jobStagingLocation)); + } + if (jobStagingLocation.endsWith("/")) { + jobStagingLocation = jobStagingLocation.substring(0, jobStagingLocation.length() - 1); + } + if (!jobStagingLocation.startsWith("gs://")) { + throw new IllegalArgumentException( + "Store type BIGQUERY requires job staging location to be a valid and existing Google Cloud Storage URI. Invalid staging location: " + + jobStagingLocation); + } + + return builder() + .setBigquery(bigquery) + .setDatasetId(config.get("dataset_id")) + .setProjectId(config.get("project_id")) + .setJobStagingLocation(config.get("staging_location")) + .setInitialRetryDelaySecs(Integer.parseInt(config.get("initial_retry_delay_seconds"))) + .setTotalTimeoutSecs(Integer.parseInt(config.get("total_timeout_seconds"))) + .setStorage(storage) + .build(); + } + public abstract String projectId(); public abstract String datasetId(); diff --git a/storage/connectors/bigquery/src/main/java/feast/storage/connectors/bigquery/retriever/FeatureSetQueryInfo.java b/storage/connectors/bigquery/src/main/java/feast/storage/connectors/bigquery/retriever/FeatureSetQueryInfo.java index 5a7d56e984..befdc56490 100644 --- a/storage/connectors/bigquery/src/main/java/feast/storage/connectors/bigquery/retriever/FeatureSetQueryInfo.java +++ b/storage/connectors/bigquery/src/main/java/feast/storage/connectors/bigquery/retriever/FeatureSetQueryInfo.java @@ -16,29 +16,27 @@ */ package feast.storage.connectors.bigquery.retriever; +import feast.proto.serving.ServingAPIProto.FeatureReference; import java.util.List; public class FeatureSetQueryInfo { private final String project; private final String name; - private final int version; private final long maxAge; private final List entities; - private final List features; + private final List features; private final String table; public FeatureSetQueryInfo( String project, String name, - int version, long maxAge, List entities, - List features, + List features, String table) { this.project = project; this.name = name; - this.version = version; this.maxAge = maxAge; this.entities = entities; this.features = features; @@ -49,7 +47,6 @@ public FeatureSetQueryInfo(FeatureSetQueryInfo featureSetInfo, String table) { this.project = featureSetInfo.getProject(); this.name = featureSetInfo.getName(); - this.version = featureSetInfo.getVersion(); this.maxAge = featureSetInfo.getMaxAge(); this.entities = featureSetInfo.getEntities(); this.features = featureSetInfo.getFeatures(); @@ -64,10 +61,6 @@ public String getName() { return name; } - public int getVersion() { - return version; - } - public long getMaxAge() { return maxAge; } @@ -76,7 +69,7 @@ public List getEntities() { return entities; } - public List getFeatures() { + public List getFeatures() { return features; } diff --git a/storage/connectors/bigquery/src/main/java/feast/storage/connectors/bigquery/retriever/QueryTemplater.java b/storage/connectors/bigquery/src/main/java/feast/storage/connectors/bigquery/retriever/QueryTemplater.java index cba997b6ab..969efb36c3 100644 --- a/storage/connectors/bigquery/src/main/java/feast/storage/connectors/bigquery/retriever/QueryTemplater.java +++ b/storage/connectors/bigquery/src/main/java/feast/storage/connectors/bigquery/retriever/QueryTemplater.java @@ -20,9 +20,9 @@ import com.google.protobuf.Duration; import com.mitchellbosecke.pebble.PebbleEngine; import com.mitchellbosecke.pebble.template.PebbleTemplate; -import feast.core.FeatureSetProto.EntitySpec; -import feast.core.FeatureSetProto.FeatureSetSpec; -import feast.serving.ServingAPIProto.FeatureReference; +import feast.proto.core.FeatureSetProto.EntitySpec; +import feast.proto.core.FeatureSetProto.FeatureSetSpec; +import feast.proto.serving.ServingAPIProto.FeatureReference; import feast.storage.api.retriever.FeatureSetRequest; import java.io.IOException; import java.io.StringWriter; @@ -79,19 +79,10 @@ public static List getFeatureSetInfos( Duration maxAge = spec.getMaxAge(); List fsEntities = spec.getEntitiesList().stream().map(EntitySpec::getName).collect(Collectors.toList()); - List features = - featureSetRequest.getFeatureReferences().stream() - .map(FeatureReference::getName) - .collect(Collectors.toList()); + List features = featureSetRequest.getFeatureReferences().asList(); featureSetInfos.add( new FeatureSetQueryInfo( - spec.getProject(), - spec.getName(), - spec.getVersion(), - maxAge.getSeconds(), - fsEntities, - features, - "")); + spec.getProject(), spec.getName(), maxAge.getSeconds(), fsEntities, features, "")); } return featureSetInfos; } diff --git a/storage/connectors/bigquery/src/main/java/feast/storage/connectors/bigquery/writer/BigQueryFeatureSink.java b/storage/connectors/bigquery/src/main/java/feast/storage/connectors/bigquery/writer/BigQueryFeatureSink.java index 8860db2622..8dffe62976 100644 --- a/storage/connectors/bigquery/src/main/java/feast/storage/connectors/bigquery/writer/BigQueryFeatureSink.java +++ b/storage/connectors/bigquery/src/main/java/feast/storage/connectors/bigquery/writer/BigQueryFeatureSink.java @@ -19,12 +19,12 @@ import com.google.auto.value.AutoValue; import com.google.cloud.bigquery.*; import com.google.common.collect.ImmutableMap; -import feast.core.FeatureSetProto; -import feast.core.StoreProto.Store.BigQueryConfig; +import feast.proto.core.FeatureSetProto; +import feast.proto.core.StoreProto.Store.BigQueryConfig; +import feast.proto.types.FeatureRowProto; import feast.storage.api.writer.FeatureSink; import feast.storage.api.writer.WriteResult; import feast.storage.connectors.bigquery.common.TypeUtil; -import feast.types.FeatureRowProto; import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -42,6 +42,8 @@ public abstract class BigQueryFeatureSink implements FeatureSink { "Event time for the FeatureRow"; public static final String BIGQUERY_CREATED_TIMESTAMP_FIELD_DESCRIPTION = "Processing time of the FeatureRow ingestion in Feast\""; + public static final String BIGQUERY_INGESTION_ID_FIELD_DESCRIPTION = + "Unique id identifying groups of rows that have been ingested together"; public static final String BIGQUERY_JOB_ID_FIELD_DESCRIPTION = "Feast import job ID for the FeatureRow"; @@ -57,9 +59,11 @@ public abstract class BigQueryFeatureSink implements FeatureSink { * your own client. * * @param config {@link BigQueryConfig} + * @param featureSetSpecs * @return {@link BigQueryFeatureSink.Builder} */ - public static BigQueryFeatureSink fromConfig(BigQueryConfig config) { + public static FeatureSink fromConfig( + BigQueryConfig config, Map featureSetSpecs) { return builder() .setDatasetId(config.getDatasetId()) .setProjectId(config.getProjectId()) @@ -96,20 +100,20 @@ public void prepareWrite(FeatureSetProto.FeatureSet featureSet) { bigquery.create(DatasetInfo.of(datasetId)); } String tableName = - String.format( - "%s_%s_v%d", - featureSetSpec.getProject(), featureSetSpec.getName(), featureSetSpec.getVersion()) + String.format("%s_%s", featureSetSpec.getProject(), featureSetSpec.getName()) .replaceAll("-", "_"); TableId tableId = TableId.of(datasetId.getProject(), datasetId.getDataset(), tableName); - // Return if there is an existing table Table table = bigquery.getTable(tableId); + TableDefinition tableDefinition = createBigQueryTableDefinition(table, featureSet.getSpec()); + TableInfo tableInfo = TableInfo.of(tableId, tableDefinition); if (table != null) { log.info( - "Writing to existing BigQuery table '{}:{}.{}'", - getProjectId(), + "Updating and writing to existing BigQuery table '{}:{}.{}'", + datasetId.getProject(), datasetId.getDataset(), tableName); + bigquery.update(tableInfo); return; } @@ -118,8 +122,6 @@ public void prepareWrite(FeatureSetProto.FeatureSet featureSet) { tableId.getTable(), datasetId.getDataset(), datasetId.getProject()); - TableDefinition tableDefinition = createBigQueryTableDefinition(featureSet.getSpec()); - TableInfo tableInfo = TableInfo.of(tableId, tableDefinition); bigquery.create(tableInfo); } @@ -127,8 +129,18 @@ public void prepareWrite(FeatureSetProto.FeatureSet featureSet) { public PTransform, WriteResult> writer() { return new BigQueryWrite(DatasetId.of(getProjectId(), getDatasetId())); } - - private TableDefinition createBigQueryTableDefinition(FeatureSetProto.FeatureSetSpec spec) { + /** + * Creates a BigQuery {@link TableDefinition} based on the provided FeatureSetSpec and the + * existing table, if any. If a table already exists, existing fields will be retained, and new + * fields present in the feature set will be appended to the existing FieldsList. + * + * @param existingTable existing {@link Table} retrieved using bigquery.GetTable(). If the table + * does not exist, will be null. + * @param spec FeatureSet spec that this table is for + * @return {@link TableDefinition} containing all tombstoned and active fields. + */ + private TableDefinition createBigQueryTableDefinition( + Table existingTable, FeatureSetProto.FeatureSetSpec spec) { List fields = new ArrayList<>(); log.info("Table will have the following fields:"); @@ -164,6 +176,8 @@ private TableDefinition createBigQueryTableDefinition(FeatureSetProto.FeatureSet "created_timestamp", Pair.of( StandardSQLTypeName.TIMESTAMP, BIGQUERY_CREATED_TIMESTAMP_FIELD_DESCRIPTION), + "ingestion_id", + Pair.of(StandardSQLTypeName.STRING, BIGQUERY_INGESTION_ID_FIELD_DESCRIPTION), "job_id", Pair.of(StandardSQLTypeName.STRING, BIGQUERY_JOB_ID_FIELD_DESCRIPTION)); for (Map.Entry> entry : @@ -180,9 +194,21 @@ private TableDefinition createBigQueryTableDefinition(FeatureSetProto.FeatureSet TimePartitioning.newBuilder(TimePartitioning.Type.DAY).setField("event_timestamp").build(); log.info("Table partitioning: " + timePartitioning.toString()); + List fieldsList = new ArrayList<>(); + if (existingTable != null) { + Schema existingSchema = existingTable.getDefinition().getSchema(); + fieldsList.addAll(existingSchema.getFields()); + } + + for (Field field : fields) { + if (!fieldsList.contains(field)) { + fieldsList.add(field); + } + } + return StandardTableDefinition.newBuilder() .setTimePartitioning(timePartitioning) - .setSchema(Schema.of(fields)) + .setSchema(Schema.of(FieldList.of(fieldsList))) .build(); } } diff --git a/storage/connectors/bigquery/src/main/java/feast/storage/connectors/bigquery/writer/BigQueryWrite.java b/storage/connectors/bigquery/src/main/java/feast/storage/connectors/bigquery/writer/BigQueryWrite.java index e3f5e5ae71..b1309e59b5 100644 --- a/storage/connectors/bigquery/src/main/java/feast/storage/connectors/bigquery/writer/BigQueryWrite.java +++ b/storage/connectors/bigquery/src/main/java/feast/storage/connectors/bigquery/writer/BigQueryWrite.java @@ -19,9 +19,9 @@ import com.google.api.services.bigquery.model.TableDataInsertAllResponse; import com.google.api.services.bigquery.model.TableRow; import com.google.cloud.bigquery.DatasetId; +import feast.proto.types.FeatureRowProto; import feast.storage.api.writer.FailedElement; import feast.storage.api.writer.WriteResult; -import feast.types.FeatureRowProto; import java.io.IOException; import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO; import org.apache.beam.sdk.io.gcp.bigquery.BigQueryInsertError; diff --git a/storage/connectors/bigquery/src/main/java/feast/storage/connectors/bigquery/writer/FeatureRowToTableRow.java b/storage/connectors/bigquery/src/main/java/feast/storage/connectors/bigquery/writer/FeatureRowToTableRow.java index 12833b31b8..f936c951fe 100644 --- a/storage/connectors/bigquery/src/main/java/feast/storage/connectors/bigquery/writer/FeatureRowToTableRow.java +++ b/storage/connectors/bigquery/src/main/java/feast/storage/connectors/bigquery/writer/FeatureRowToTableRow.java @@ -18,8 +18,8 @@ import com.google.api.services.bigquery.model.TableRow; import com.google.protobuf.util.Timestamps; -import feast.types.FeatureRowProto.FeatureRow; -import feast.types.FieldProto.Field; +import feast.proto.types.FeatureRowProto.FeatureRow; +import feast.proto.types.FieldProto.Field; import java.util.Base64; import java.util.stream.Collectors; import org.apache.beam.sdk.transforms.SerializableFunction; @@ -31,6 +31,7 @@ public class FeatureRowToTableRow implements SerializableFunction { private static final String EVENT_TIMESTAMP_COLUMN = "event_timestamp"; private static final String CREATED_TIMESTAMP_COLUMN = "created_timestamp"; + private static final String INGESTION_ID_COLUMN = "ingestion_id"; private static final String JOB_ID_COLUMN = "job_id"; private final String jobId; @@ -47,6 +48,7 @@ public TableRow apply(FeatureRow featureRow) { TableRow tableRow = new TableRow(); tableRow.set(EVENT_TIMESTAMP_COLUMN, Timestamps.toString(featureRow.getEventTimestamp())); tableRow.set(CREATED_TIMESTAMP_COLUMN, Instant.now().toString()); + tableRow.set(INGESTION_ID_COLUMN, featureRow.getIngestionId()); tableRow.set(JOB_ID_COLUMN, jobId); for (Field field : featureRow.getFieldsList()) { diff --git a/storage/connectors/bigquery/src/main/java/feast/storage/connectors/bigquery/writer/GetTableDestination.java b/storage/connectors/bigquery/src/main/java/feast/storage/connectors/bigquery/writer/GetTableDestination.java index 5903d36b85..b7774ae7ec 100644 --- a/storage/connectors/bigquery/src/main/java/feast/storage/connectors/bigquery/writer/GetTableDestination.java +++ b/storage/connectors/bigquery/src/main/java/feast/storage/connectors/bigquery/writer/GetTableDestination.java @@ -17,7 +17,7 @@ package feast.storage.connectors.bigquery.writer; import com.google.api.services.bigquery.model.TimePartitioning; -import feast.types.FeatureRowProto.FeatureRow; +import feast.proto.types.FeatureRowProto.FeatureRow; import org.apache.beam.sdk.io.gcp.bigquery.TableDestination; import org.apache.beam.sdk.transforms.SerializableFunction; import org.apache.beam.sdk.values.ValueInSingleWindow; @@ -35,8 +35,7 @@ public GetTableDestination(String projectId, String datasetId) { @Override public TableDestination apply(ValueInSingleWindow input) { - String[] split = input.getValue().getFeatureSet().split(":"); - String[] splitName = split[0].split("/"); + String[] splitName = input.getValue().getFeatureSet().split("/"); TimePartitioning timePartitioning = new TimePartitioning() @@ -44,8 +43,7 @@ public TableDestination apply(ValueInSingleWindow input) { .setField(FeatureRowToTableRow.getEventTimestampColumn()); return new TableDestination( - String.format( - "%s:%s.%s_%s_v%s", projectId, datasetId, splitName[0], splitName[1], split[1]), + String.format("%s:%s.%s_%s", projectId, datasetId, splitName[0], splitName[1]), String.format("Feast table for %s", input.getValue().getFeatureSet()), timePartitioning); } diff --git a/storage/connectors/bigquery/src/main/resources/templates/join_featuresets.sql b/storage/connectors/bigquery/src/main/resources/templates/join_featuresets.sql index 60b7c7d7a1..ddddac8d2c 100644 --- a/storage/connectors/bigquery/src/main/resources/templates/join_featuresets.sql +++ b/storage/connectors/bigquery/src/main/resources/templates/join_featuresets.sql @@ -7,8 +7,8 @@ SELECT * FROM `{{ leftTableName }}` LEFT JOIN ( SELECT uuid, - {% for featureName in featureSet.features %} - {{ featureSet.project }}_{{ featureName }}_v{{ featureSet.version }}{% if loop.last %}{% else %}, {% endif %} + {% for feature in featureSet.features %} + {{ featureSet.project }}__{{ featureSet.name }}__{{ feature.name }}{% if loop.last %}{% else %}, {% endif %} {% endfor %} FROM `{{ featureSet.table }}` ) USING (uuid) @@ -17,8 +17,8 @@ LEFT JOIN ( event_timestamp, {{ entities | join(', ') }} {% for featureSet in featureSets %} - {% for featureName in featureSet.features %} - ,{{ featureSet.project }}_{{ featureName }}_v{{ featureSet.version }} as {{ featureName }} + {% for feature in featureSet.features %} + ,{{ featureSet.project }}__{{ featureSet.name }}__{{ feature.name }} as {% if feature.featureSet != "" %}{{ featureSet.name }}__{% endif %}{{ feature.name }} {% endfor %} {% endfor %} -FROM joined \ No newline at end of file +FROM joined diff --git a/storage/connectors/bigquery/src/main/resources/templates/single_featureset_pit_join.sql b/storage/connectors/bigquery/src/main/resources/templates/single_featureset_pit_join.sql index fb4c555b52..24bdab2c29 100644 --- a/storage/connectors/bigquery/src/main/resources/templates/single_featureset_pit_join.sql +++ b/storage/connectors/bigquery/src/main/resources/templates/single_featureset_pit_join.sql @@ -13,7 +13,7 @@ SELECT -- event_timestamp contains the timestamps to join onto event_timestamp, -- the feature_timestamp, i.e. the latest occurrence of the requested feature relative to the entity_dataset timestamp - NULL as {{ featureSet.project }}_{{ featureSet.name }}_v{{ featureSet.version }}_feature_timestamp, + NULL as {{ featureSet.project }}_{{ featureSet.name }}_feature_timestamp, -- created timestamp of the feature at the corresponding feature_timestamp NULL as created_timestamp, -- select only entities belonging to this feature set @@ -25,11 +25,11 @@ UNION ALL SELECT NULL as uuid, event_timestamp, - event_timestamp as {{ featureSet.project }}_{{ featureSet.name }}_v{{ featureSet.version }}_feature_timestamp, + event_timestamp as {{ featureSet.project }}_{{ featureSet.name }}_feature_timestamp, created_timestamp, {{ featureSet.entities | join(', ')}}, false AS is_entity_table -FROM `{{projectId}}.{{datasetId}}.{{ featureSet.project }}_{{ featureSet.name }}_v{{ featureSet.version }}` WHERE event_timestamp <= '{{maxTimestamp}}' +FROM `{{projectId}}.{{datasetId}}.{{ featureSet.project }}_{{ featureSet.name }}` WHERE event_timestamp <= '{{maxTimestamp}}' {% if featureSet.maxAge == 0 %}{% else %}AND event_timestamp >= Timestamp_sub(TIMESTAMP '{{ minTimestamp }}', interval {{ featureSet.maxAge }} second){% endif %} ), /* @@ -47,8 +47,8 @@ SELECT uuid, event_timestamp, {{ featureSet.entities | join(', ')}}, - {% for featureName in featureSet.features %} - IF(event_timestamp >= {{ featureSet.project }}_{{ featureSet.name }}_v{{ featureSet.version }}_feature_timestamp {% if featureSet.maxAge == 0 %}{% else %}AND Timestamp_sub(event_timestamp, interval {{ featureSet.maxAge }} second) < {{ featureSet.project }}_{{ featureSet.name }}_v{{ featureSet.version }}_feature_timestamp{% endif %}, {{ featureSet.project }}_{{ featureName }}_v{{ featureSet.version }}, NULL) as {{ featureSet.project }}_{{ featureName }}_v{{ featureSet.version }}{% if loop.last %}{% else %}, {% endif %} + {% for feature in featureSet.features %} + IF(event_timestamp >= {{ featureSet.project }}_{{ featureSet.name }}_feature_timestamp {% if featureSet.maxAge == 0 %}{% else %}AND Timestamp_sub(event_timestamp, interval {{ featureSet.maxAge }} second) < {{ featureSet.project }}_{{ featureSet.name }}_feature_timestamp{% endif %}, {{ featureSet.project }}__{{ featureSet.name }}__{{ feature.name }}, NULL) as {{ featureSet.project }}__{{ featureSet.name }}__{{ feature.name }}{% if loop.last %}{% else %}, {% endif %} {% endfor %} FROM ( SELECT @@ -56,7 +56,7 @@ SELECT event_timestamp, {{ featureSet.entities | join(', ')}}, FIRST_VALUE(created_timestamp IGNORE NULLS) over w AS created_timestamp, - FIRST_VALUE({{ featureSet.project }}_{{ featureSet.name }}_v{{ featureSet.version }}_feature_timestamp IGNORE NULLS) over w AS {{ featureSet.project }}_{{ featureSet.name }}_v{{ featureSet.version }}_feature_timestamp, + FIRST_VALUE({{ featureSet.project }}_{{ featureSet.name }}_feature_timestamp IGNORE NULLS) over w AS {{ featureSet.project }}_{{ featureSet.name }}_feature_timestamp, is_entity_table FROM union_features WINDOW w AS (PARTITION BY {{ featureSet.entities | join(', ') }} ORDER BY event_timestamp DESC, is_entity_table DESC, created_timestamp DESC ROWS BETWEEN CURRENT ROW AND UNBOUNDED FOLLOWING) @@ -67,15 +67,15 @@ WINDOW w AS (PARTITION BY {{ featureSet.entities | join(', ') }} ORDER BY event_ */ LEFT JOIN ( SELECT - event_timestamp as {{ featureSet.project }}_{{ featureSet.name }}_v{{ featureSet.version }}_feature_timestamp, + event_timestamp as {{ featureSet.project }}_{{ featureSet.name }}_feature_timestamp, created_timestamp, {{ featureSet.entities | join(', ')}}, - {% for featureName in featureSet.features %} - {{ featureName }} as {{ featureSet.project }}_{{ featureName }}_v{{ featureSet.version }}{% if loop.last %}{% else %}, {% endif %} + {% for feature in featureSet.features %} + {{ feature.name }} as {{ featureSet.project }}__{{ featureSet.name }}__{{ feature.name }}{% if loop.last %}{% else %}, {% endif %} {% endfor %} -FROM `{{ projectId }}.{{ datasetId }}.{{ featureSet.project }}_{{ featureSet.name }}_v{{ featureSet.version }}` WHERE event_timestamp <= '{{maxTimestamp}}' +FROM `{{ projectId }}.{{ datasetId }}.{{ featureSet.project }}_{{ featureSet.name }}` WHERE event_timestamp <= '{{maxTimestamp}}' {% if featureSet.maxAge == 0 %}{% else %}AND event_timestamp >= Timestamp_sub(TIMESTAMP '{{ minTimestamp }}', interval {{ featureSet.maxAge }} second){% endif %} -) USING ({{ featureSet.project }}_{{ featureSet.name }}_v{{ featureSet.version }}_feature_timestamp, created_timestamp, {{ featureSet.entities | join(', ')}}) +) USING ({{ featureSet.project }}_{{ featureSet.name }}_feature_timestamp, created_timestamp, {{ featureSet.entities | join(', ')}}) WHERE is_entity_table ) /* diff --git a/storage/connectors/pom.xml b/storage/connectors/pom.xml index b52668a31a..6cd949acfd 100644 --- a/storage/connectors/pom.xml +++ b/storage/connectors/pom.xml @@ -31,6 +31,11 @@ + + + org.jacoco + jacoco-maven-plugin + diff --git a/storage/connectors/redis/pom.xml b/storage/connectors/redis/pom.xml index 6c50895bd2..2a19c0e828 100644 --- a/storage/connectors/redis/pom.xml +++ b/storage/connectors/redis/pom.xml @@ -70,6 +70,13 @@ test + + net.ishiis.redis + redis-unit + 1.0.3 + test + + junit diff --git a/storage/connectors/redis/src/main/java/feast/storage/connectors/redis/retriever/FeatureRowDecoder.java b/storage/connectors/redis/src/main/java/feast/storage/connectors/redis/retriever/FeatureRowDecoder.java index a5506028cb..fd9556841e 100644 --- a/storage/connectors/redis/src/main/java/feast/storage/connectors/redis/retriever/FeatureRowDecoder.java +++ b/storage/connectors/redis/src/main/java/feast/storage/connectors/redis/retriever/FeatureRowDecoder.java @@ -16,10 +16,10 @@ */ package feast.storage.connectors.redis.retriever; -import feast.core.FeatureSetProto.FeatureSetSpec; -import feast.core.FeatureSetProto.FeatureSpec; -import feast.types.FeatureRowProto.FeatureRow; -import feast.types.FieldProto.Field; +import feast.proto.core.FeatureSetProto.FeatureSetSpec; +import feast.proto.core.FeatureSetProto.FeatureSpec; +import feast.proto.types.FeatureRowProto.FeatureRow; +import feast.proto.types.FieldProto.Field; import java.util.Comparator; import java.util.List; import java.util.stream.Collectors; diff --git a/storage/connectors/redis/src/main/java/feast/storage/connectors/redis/retriever/RedisClusterOnlineRetriever.java b/storage/connectors/redis/src/main/java/feast/storage/connectors/redis/retriever/RedisClusterOnlineRetriever.java new file mode 100644 index 0000000000..18619252c2 --- /dev/null +++ b/storage/connectors/redis/src/main/java/feast/storage/connectors/redis/retriever/RedisClusterOnlineRetriever.java @@ -0,0 +1,223 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2020 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.storage.connectors.redis.retriever; + +import com.google.protobuf.AbstractMessageLite; +import com.google.protobuf.InvalidProtocolBufferException; +import feast.proto.core.FeatureSetProto.EntitySpec; +import feast.proto.core.FeatureSetProto.FeatureSetSpec; +import feast.proto.serving.ServingAPIProto.FeatureReference; +import feast.proto.serving.ServingAPIProto.GetOnlineFeaturesRequest.EntityRow; +import feast.proto.storage.RedisProto.RedisKey; +import feast.proto.types.FeatureRowProto.FeatureRow; +import feast.proto.types.FieldProto.Field; +import feast.proto.types.ValueProto.Value; +import feast.storage.api.retriever.FeatureSetRequest; +import feast.storage.api.retriever.OnlineRetriever; +import io.grpc.Status; +import io.lettuce.core.RedisURI; +import io.lettuce.core.cluster.RedisClusterClient; +import io.lettuce.core.cluster.api.StatefulRedisClusterConnection; +import io.lettuce.core.cluster.api.sync.RedisAdvancedClusterCommands; +import io.lettuce.core.codec.ByteArrayCodec; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ExecutionException; +import java.util.stream.Collectors; + +public class RedisClusterOnlineRetriever implements OnlineRetriever { + + private final RedisAdvancedClusterCommands syncCommands; + + private RedisClusterOnlineRetriever(StatefulRedisClusterConnection connection) { + this.syncCommands = connection.sync(); + } + + public static OnlineRetriever create(Map config) { + List redisURIList = + Arrays.stream(config.get("connection_string").split(",")) + .map( + hostPort -> { + String[] hostPortSplit = hostPort.trim().split(":"); + return RedisURI.create(hostPortSplit[0], Integer.parseInt(hostPortSplit[1])); + }) + .collect(Collectors.toList()); + + StatefulRedisClusterConnection connection = + RedisClusterClient.create(redisURIList).connect(new ByteArrayCodec()); + + return new RedisClusterOnlineRetriever(connection); + } + + public static OnlineRetriever create(StatefulRedisClusterConnection connection) { + return new RedisClusterOnlineRetriever(connection); + } + + /** + * Gets online features from redis. This method returns a list of {@link FeatureRow}s + * corresponding to each feature set spec. Each feature row in the list then corresponds to an + * {@link EntityRow} provided by the user. + * + * @param entityRows list of entity rows in the feature request + * @param featureSetRequests Map of {@link FeatureSetSpec} to feature references in the request + * tied to that feature set. + * @return List of List of {@link FeatureRow} + */ + @Override + public List> getOnlineFeatures( + List entityRows, List featureSetRequests) { + + List> featureRows = new ArrayList<>(); + for (FeatureSetRequest featureSetRequest : featureSetRequests) { + List redisKeys = buildRedisKeys(entityRows, featureSetRequest.getSpec()); + try { + List featureRowsForFeatureSet = + sendAndProcessMultiGet( + redisKeys, + featureSetRequest.getSpec(), + featureSetRequest.getFeatureReferences().asList()); + featureRows.add(featureRowsForFeatureSet); + } catch (InvalidProtocolBufferException | ExecutionException e) { + throw Status.INTERNAL + .withDescription("Unable to parse protobuf while retrieving feature") + .withCause(e) + .asRuntimeException(); + } + } + return featureRows; + } + + private List buildRedisKeys(List entityRows, FeatureSetSpec featureSetSpec) { + String featureSetRef = generateFeatureSetStringRef(featureSetSpec); + List featureSetEntityNames = + featureSetSpec.getEntitiesList().stream() + .map(EntitySpec::getName) + .collect(Collectors.toList()); + List redisKeys = + entityRows.stream() + .map(row -> makeRedisKey(featureSetRef, featureSetEntityNames, row)) + .collect(Collectors.toList()); + return redisKeys; + } + + /** + * Create {@link RedisKey} + * + * @param featureSet featureSet reference of the feature. E.g. feature_set_1:1 + * @param featureSetEntityNames entity names that belong to the featureSet + * @param entityRow entityRow to build the key from + * @return {@link RedisKey} + */ + private RedisKey makeRedisKey( + String featureSet, List featureSetEntityNames, EntityRow entityRow) { + RedisKey.Builder builder = RedisKey.newBuilder().setFeatureSet(featureSet); + Map fieldsMap = entityRow.getFieldsMap(); + featureSetEntityNames.sort(String::compareTo); + for (int i = 0; i < featureSetEntityNames.size(); i++) { + String entityName = featureSetEntityNames.get(i); + + if (!fieldsMap.containsKey(entityName)) { + throw Status.INVALID_ARGUMENT + .withDescription( + String.format( + "Entity row fields \"%s\" does not contain required entity field \"%s\"", + fieldsMap.keySet().toString(), entityName)) + .asRuntimeException(); + } + + builder.addEntities( + Field.newBuilder().setName(entityName).setValue(fieldsMap.get(entityName))); + } + return builder.build(); + } + + private List sendAndProcessMultiGet( + List redisKeys, + FeatureSetSpec featureSetSpec, + List featureReferences) + throws InvalidProtocolBufferException, ExecutionException { + + List values = sendMultiGet(redisKeys); + List featureRows = new ArrayList<>(); + + FeatureRow.Builder nullFeatureRowBuilder = + FeatureRow.newBuilder().setFeatureSet(generateFeatureSetStringRef(featureSetSpec)); + for (FeatureReference featureReference : featureReferences) { + nullFeatureRowBuilder.addFields(Field.newBuilder().setName(featureReference.getName())); + } + + for (int i = 0; i < values.size(); i++) { + + byte[] value = values.get(i); + if (value == null) { + featureRows.add(nullFeatureRowBuilder.build()); + continue; + } + + FeatureRow featureRow = FeatureRow.parseFrom(value); + String featureSetRef = redisKeys.get(i).getFeatureSet(); + FeatureRowDecoder decoder = new FeatureRowDecoder(featureSetRef, featureSetSpec); + if (decoder.isEncodingValid(featureRow)) { + featureRow = decoder.decode(featureRow); + } else { + featureRows.add(nullFeatureRowBuilder.build()); + continue; + } + + featureRows.add(featureRow); + } + return featureRows; + } + + /** + * Send a list of get request as an mget + * + * @param keys list of {@link RedisKey} + * @return list of {@link FeatureRow} in primitive byte representation for each {@link RedisKey} + */ + private List sendMultiGet(List keys) { + try { + byte[][] binaryKeys = + keys.stream() + .map(AbstractMessageLite::toByteArray) + .collect(Collectors.toList()) + .toArray(new byte[0][0]); + return syncCommands.mget(binaryKeys).stream() + .map( + keyValue -> { + if (keyValue == null) { + return null; + } + return keyValue.getValueOrElse(null); + }) + .collect(Collectors.toList()); + } catch (Exception e) { + throw Status.NOT_FOUND + .withDescription("Unable to retrieve feature from Redis") + .withCause(e) + .asRuntimeException(); + } + } + + // TODO: Refactor this out to common package? + private static String generateFeatureSetStringRef(FeatureSetSpec featureSetSpec) { + String ref = String.format("%s/%s", featureSetSpec.getProject(), featureSetSpec.getName()); + return ref; + } +} diff --git a/storage/connectors/redis/src/main/java/feast/storage/connectors/redis/retriever/RedisOnlineRetriever.java b/storage/connectors/redis/src/main/java/feast/storage/connectors/redis/retriever/RedisOnlineRetriever.java index c8bb33de5f..0db4837c06 100644 --- a/storage/connectors/redis/src/main/java/feast/storage/connectors/redis/retriever/RedisOnlineRetriever.java +++ b/storage/connectors/redis/src/main/java/feast/storage/connectors/redis/retriever/RedisOnlineRetriever.java @@ -18,19 +18,22 @@ import com.google.protobuf.AbstractMessageLite; import com.google.protobuf.InvalidProtocolBufferException; -import feast.core.FeatureSetProto.EntitySpec; -import feast.core.FeatureSetProto.FeatureSetSpec; -import feast.serving.ServingAPIProto.FeatureReference; -import feast.serving.ServingAPIProto.GetOnlineFeaturesRequest.EntityRow; -import feast.storage.RedisProto.RedisKey; +import feast.proto.core.FeatureSetProto.EntitySpec; +import feast.proto.core.FeatureSetProto.FeatureSetSpec; +import feast.proto.serving.ServingAPIProto.FeatureReference; +import feast.proto.serving.ServingAPIProto.GetOnlineFeaturesRequest.EntityRow; +import feast.proto.storage.RedisProto.RedisKey; +import feast.proto.types.FeatureRowProto.FeatureRow; +import feast.proto.types.FieldProto.Field; +import feast.proto.types.ValueProto.Value; import feast.storage.api.retriever.FeatureSetRequest; import feast.storage.api.retriever.OnlineRetriever; -import feast.types.FeatureRowProto.FeatureRow; -import feast.types.FieldProto.Field; -import feast.types.ValueProto.Value; import io.grpc.Status; +import io.lettuce.core.RedisClient; +import io.lettuce.core.RedisURI; import io.lettuce.core.api.StatefulRedisConnection; import io.lettuce.core.api.sync.RedisCommands; +import io.lettuce.core.codec.ByteArrayCodec; import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -41,18 +44,32 @@ public class RedisOnlineRetriever implements OnlineRetriever { private final RedisCommands syncCommands; - public RedisOnlineRetriever(StatefulRedisConnection connection) { + private RedisOnlineRetriever(StatefulRedisConnection connection) { this.syncCommands = connection.sync(); } + public static OnlineRetriever create(Map config) { + + StatefulRedisConnection connection = + RedisClient.create( + RedisURI.create(config.get("host"), Integer.parseInt(config.get("port")))) + .connect(new ByteArrayCodec()); + + return new RedisOnlineRetriever(connection); + } + + public static OnlineRetriever create(StatefulRedisConnection connection) { + return new RedisOnlineRetriever(connection); + } + /** * Gets online features from redis. This method returns a list of {@link FeatureRow}s * corresponding to each feature set spec. Each feature row in the list then corresponds to an * {@link EntityRow} provided by the user. * * @param entityRows list of entity rows in the feature request - * @param featureSetRequests Map of {@link feast.core.FeatureSetProto.FeatureSetSpec} to feature - * references in the request tied to that feature set. + * @param featureSetRequests Map of {@link feast.proto.core.FeatureSetProto.FeatureSetSpec} to + * feature references in the request tied to that feature set. * @return List of List of {@link FeatureRow} */ @Override @@ -95,7 +112,7 @@ private List buildRedisKeys(List entityRows, FeatureSetSpec /** * Create {@link RedisKey} * - * @param featureSet featureSet reference of the feature. E.g. feature_set_1:1 + * @param featureSet featureSet reference of the feature. E.g. feature_set_1 * @param featureSetEntityNames entity names that belong to the featureSet * @param entityRow entityRow to build the key from * @return {@link RedisKey} @@ -196,9 +213,6 @@ private List sendMultiGet(List keys) { // TODO: Refactor this out to common package? private static String generateFeatureSetStringRef(FeatureSetSpec featureSetSpec) { String ref = String.format("%s/%s", featureSetSpec.getProject(), featureSetSpec.getName()); - if (featureSetSpec.getVersion() > 0) { - return ref + String.format(":%d", featureSetSpec.getVersion()); - } return ref; } } diff --git a/storage/connectors/redis/src/main/java/feast/storage/connectors/redis/writer/RedisClusterIngestionClient.java b/storage/connectors/redis/src/main/java/feast/storage/connectors/redis/writer/RedisClusterIngestionClient.java new file mode 100644 index 0000000000..389db4be3a --- /dev/null +++ b/storage/connectors/redis/src/main/java/feast/storage/connectors/redis/writer/RedisClusterIngestionClient.java @@ -0,0 +1,132 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2020 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.storage.connectors.redis.writer; + +import com.google.common.collect.Lists; +import feast.proto.core.StoreProto; +import feast.storage.common.retry.BackOffExecutor; +import io.lettuce.core.LettuceFutures; +import io.lettuce.core.RedisFuture; +import io.lettuce.core.RedisURI; +import io.lettuce.core.cluster.RedisClusterClient; +import io.lettuce.core.cluster.api.StatefulRedisClusterConnection; +import io.lettuce.core.cluster.api.async.RedisAdvancedClusterAsyncCommands; +import io.lettuce.core.codec.ByteArrayCodec; +import java.util.Arrays; +import java.util.List; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; +import org.joda.time.Duration; + +public class RedisClusterIngestionClient implements RedisIngestionClient { + + private final BackOffExecutor backOffExecutor; + private final List uriList; + private transient RedisClusterClient clusterClient; + private StatefulRedisClusterConnection connection; + private RedisAdvancedClusterAsyncCommands commands; + private List futures = Lists.newArrayList(); + + public RedisClusterIngestionClient(StoreProto.Store.RedisClusterConfig redisClusterConfig) { + this.uriList = + Arrays.stream(redisClusterConfig.getConnectionString().split(",")) + .map( + hostPort -> { + String[] hostPortSplit = hostPort.trim().split(":"); + return RedisURI.create(hostPortSplit[0], Integer.parseInt(hostPortSplit[1])); + }) + .collect(Collectors.toList()); + + long backoffMs = + redisClusterConfig.getInitialBackoffMs() > 0 ? redisClusterConfig.getInitialBackoffMs() : 1; + this.backOffExecutor = + new BackOffExecutor(redisClusterConfig.getMaxRetries(), Duration.millis(backoffMs)); + this.clusterClient = RedisClusterClient.create(uriList); + } + + @Override + public void setup() { + this.clusterClient = RedisClusterClient.create(this.uriList); + } + + @Override + public BackOffExecutor getBackOffExecutor() { + return this.backOffExecutor; + } + + @Override + public void shutdown() { + this.clusterClient.shutdown(); + } + + @Override + public void connect() { + if (!isConnected()) { + this.connection = clusterClient.connect(new ByteArrayCodec()); + this.commands = connection.async(); + } + } + + @Override + public boolean isConnected() { + return this.connection != null; + } + + @Override + public void sync() { + try { + LettuceFutures.awaitAll(60, TimeUnit.SECONDS, futures.toArray(new RedisFuture[0])); + } finally { + futures.clear(); + } + } + + @Override + public void pexpire(byte[] key, Long expiryMillis) { + futures.add(commands.pexpire(key, expiryMillis)); + } + + @Override + public void append(byte[] key, byte[] value) { + futures.add(commands.append(key, value)); + } + + @Override + public void set(byte[] key, byte[] value) { + futures.add(commands.set(key, value)); + } + + @Override + public void lpush(byte[] key, byte[] value) { + futures.add(commands.lpush(key, value)); + } + + @Override + public void rpush(byte[] key, byte[] value) { + futures.add(commands.rpush(key, value)); + } + + @Override + public void sadd(byte[] key, byte[] value) { + futures.add(commands.sadd(key, value)); + } + + @Override + public void zadd(byte[] key, Long score, byte[] value) { + futures.add(commands.zadd(key, score, value)); + } +} diff --git a/storage/connectors/redis/src/main/java/feast/storage/connectors/redis/writer/RedisCustomIO.java b/storage/connectors/redis/src/main/java/feast/storage/connectors/redis/writer/RedisCustomIO.java index cfe7771b32..b7fd68755c 100644 --- a/storage/connectors/redis/src/main/java/feast/storage/connectors/redis/writer/RedisCustomIO.java +++ b/storage/connectors/redis/src/main/java/feast/storage/connectors/redis/writer/RedisCustomIO.java @@ -16,19 +16,19 @@ */ package feast.storage.connectors.redis.writer; -import feast.core.FeatureSetProto.EntitySpec; -import feast.core.FeatureSetProto.FeatureSetSpec; -import feast.core.FeatureSetProto.FeatureSpec; -import feast.core.StoreProto.Store.RedisConfig; -import feast.storage.RedisProto.RedisKey; -import feast.storage.RedisProto.RedisKey.Builder; +import feast.proto.core.FeatureSetProto.EntitySpec; +import feast.proto.core.FeatureSetProto.FeatureSetSpec; +import feast.proto.core.FeatureSetProto.FeatureSpec; +import feast.proto.core.StoreProto.Store.*; +import feast.proto.storage.RedisProto.RedisKey; +import feast.proto.storage.RedisProto.RedisKey.Builder; +import feast.proto.types.FeatureRowProto.FeatureRow; +import feast.proto.types.FieldProto.Field; +import feast.proto.types.ValueProto; import feast.storage.api.writer.FailedElement; import feast.storage.api.writer.WriteResult; import feast.storage.common.retry.Retriable; -import feast.types.FeatureRowProto.FeatureRow; -import feast.types.FieldProto.Field; -import feast.types.ValueProto; -import io.lettuce.core.RedisConnectionException; +import io.lettuce.core.RedisException; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; @@ -54,28 +54,31 @@ public class RedisCustomIO { private static final int DEFAULT_BATCH_SIZE = 1000; private static final int DEFAULT_TIMEOUT = 2000; - private static TupleTag successfulInsertsTag = new TupleTag<>("successfulInserts") {}; - private static TupleTag failedInsertsTupleTag = new TupleTag<>("failedInserts") {}; + private static TupleTag successfulInsertsTag = + new TupleTag("successfulInserts") {}; + private static TupleTag failedInsertsTupleTag = + new TupleTag("failedInserts") {}; private static final Logger log = LoggerFactory.getLogger(RedisCustomIO.class); private RedisCustomIO() {} - public static Write write(RedisConfig redisConfig, Map featureSetSpecs) { - return new Write(redisConfig, featureSetSpecs); + public static Write write( + RedisIngestionClient redisIngestionClient, Map featureSetSpecs) { + return new Write(redisIngestionClient, featureSetSpecs); } /** ServingStoreWrite data to a Redis server. */ public static class Write extends PTransform, WriteResult> { private Map featureSetSpecs; - private RedisConfig redisConfig; + private RedisIngestionClient redisIngestionClient; private int batchSize; private int timeout; - public Write(RedisConfig redisConfig, Map featureSetSpecs) { - - this.redisConfig = redisConfig; + public Write( + RedisIngestionClient redisIngestionClient, Map featureSetSpecs) { + this.redisIngestionClient = redisIngestionClient; this.featureSetSpecs = featureSetSpecs; } @@ -93,7 +96,7 @@ public Write withTimeout(int timeout) { public WriteResult expand(PCollection input) { PCollectionTuple redisWrite = input.apply( - ParDo.of(new WriteDoFn(redisConfig, featureSetSpecs)) + ParDo.of(new WriteDoFn(redisIngestionClient, featureSetSpecs)) .withOutputTags(successfulInsertsTag, TupleTagList.of(failedInsertsTupleTag))); return WriteResult.in( input.getPipeline(), @@ -109,9 +112,10 @@ public static class WriteDoFn extends DoFn { private int timeout = DEFAULT_TIMEOUT; private RedisIngestionClient redisIngestionClient; - WriteDoFn(RedisConfig config, Map featureSetSpecs) { + WriteDoFn( + RedisIngestionClient redisIngestionClient, Map featureSetSpecs) { - this.redisIngestionClient = new RedisStandaloneIngestionClient(config); + this.redisIngestionClient = redisIngestionClient; this.featureSetSpecs = featureSetSpecs; } @@ -138,7 +142,7 @@ public void setup() { public void startBundle() { try { redisIngestionClient.connect(); - } catch (RedisConnectionException e) { + } catch (RedisException e) { log.error("Connection to redis cannot be established ", e); } featureRows.clear(); @@ -163,7 +167,7 @@ public void execute() throws ExecutionException, InterruptedException { @Override public Boolean isExceptionRetriable(Exception e) { - return e instanceof RedisConnectionException; + return e instanceof RedisException; } @Override diff --git a/storage/connectors/redis/src/main/java/feast/storage/connectors/redis/writer/RedisFeatureSink.java b/storage/connectors/redis/src/main/java/feast/storage/connectors/redis/writer/RedisFeatureSink.java index 63c8c68d9b..3ddaef7779 100644 --- a/storage/connectors/redis/src/main/java/feast/storage/connectors/redis/writer/RedisFeatureSink.java +++ b/storage/connectors/redis/src/main/java/feast/storage/connectors/redis/writer/RedisFeatureSink.java @@ -17,24 +17,48 @@ package feast.storage.connectors.redis.writer; import com.google.auto.value.AutoValue; -import feast.core.FeatureSetProto.FeatureSet; -import feast.core.FeatureSetProto.FeatureSetSpec; -import feast.core.StoreProto.Store.RedisConfig; +import feast.proto.core.FeatureSetProto.FeatureSet; +import feast.proto.core.FeatureSetProto.FeatureSetSpec; +import feast.proto.core.StoreProto; +import feast.proto.core.StoreProto.Store.RedisClusterConfig; +import feast.proto.core.StoreProto.Store.RedisConfig; +import feast.proto.types.FeatureRowProto.FeatureRow; import feast.storage.api.writer.FeatureSink; import feast.storage.api.writer.WriteResult; -import feast.types.FeatureRowProto.FeatureRow; import io.lettuce.core.RedisClient; import io.lettuce.core.RedisConnectionException; import io.lettuce.core.RedisURI; import java.util.Map; +import javax.annotation.Nullable; import org.apache.beam.sdk.transforms.PTransform; import org.apache.beam.sdk.values.PCollection; @AutoValue public abstract class RedisFeatureSink implements FeatureSink { + /** + * Initialize a {@link RedisFeatureSink.Builder} from a {@link StoreProto.Store.RedisConfig}. + * + * @param redisConfig {@link RedisConfig} + * @param featureSetSpecs + * @return {@link RedisFeatureSink.Builder} + */ + public static FeatureSink fromConfig( + RedisConfig redisConfig, Map featureSetSpecs) { + return builder().setFeatureSetSpecs(featureSetSpecs).setRedisConfig(redisConfig).build(); + } + + public static FeatureSink fromConfig( + RedisClusterConfig redisConfig, Map featureSetSpecs) { + return builder().setFeatureSetSpecs(featureSetSpecs).setRedisClusterConfig(redisConfig).build(); + } + + @Nullable public abstract RedisConfig getRedisConfig(); + @Nullable + public abstract RedisClusterConfig getRedisClusterConfig(); + public abstract Map getFeatureSetSpecs(); public abstract Builder toBuilder(); @@ -47,6 +71,8 @@ public static Builder builder() { public abstract static class Builder { public abstract Builder setRedisConfig(RedisConfig redisConfig); + public abstract Builder setRedisClusterConfig(RedisClusterConfig redisConfig); + public abstract Builder setFeatureSetSpecs(Map featureSetSpecs); public abstract RedisFeatureSink build(); @@ -54,21 +80,36 @@ public abstract static class Builder { @Override public void prepareWrite(FeatureSet featureSet) { - RedisClient redisClient = - RedisClient.create(RedisURI.create(getRedisConfig().getHost(), getRedisConfig().getPort())); - try { - redisClient.connect(); - } catch (RedisConnectionException e) { + if (getRedisConfig() != null) { + RedisClient redisClient = + RedisClient.create( + RedisURI.create(getRedisConfig().getHost(), getRedisConfig().getPort())); + try { + redisClient.connect(); + } catch (RedisConnectionException e) { + throw new RuntimeException( + String.format( + "Failed to connect to Redis at host: '%s' port: '%d'. Please check that your Redis is running and accessible from Feast.", + getRedisConfig().getHost(), getRedisConfig().getPort())); + } + redisClient.shutdown(); + } else if (getRedisClusterConfig() == null) { throw new RuntimeException( - String.format( - "Failed to connect to Redis at host: '%s' port: '%d'. Please check that your Redis is running and accessible from Feast.", - getRedisConfig().getHost(), getRedisConfig().getPort())); + "At least one RedisConfig or RedisClusterConfig must be provided to Redis Sink"); } - redisClient.shutdown(); } @Override public PTransform, WriteResult> writer() { - return new RedisCustomIO.Write(getRedisConfig(), getFeatureSetSpecs()); + if (getRedisClusterConfig() != null) { + return new RedisCustomIO.Write( + new RedisClusterIngestionClient(getRedisClusterConfig()), getFeatureSetSpecs()); + } else if (getRedisConfig() != null) { + return new RedisCustomIO.Write( + new RedisStandaloneIngestionClient(getRedisConfig()), getFeatureSetSpecs()); + } else { + throw new RuntimeException( + "At least one RedisConfig or RedisClusterConfig must be provided to Redis Sink"); + } } } diff --git a/storage/connectors/redis/src/main/java/feast/storage/connectors/redis/writer/RedisStandaloneIngestionClient.java b/storage/connectors/redis/src/main/java/feast/storage/connectors/redis/writer/RedisStandaloneIngestionClient.java index 95bd7ad151..24591a1dc0 100644 --- a/storage/connectors/redis/src/main/java/feast/storage/connectors/redis/writer/RedisStandaloneIngestionClient.java +++ b/storage/connectors/redis/src/main/java/feast/storage/connectors/redis/writer/RedisStandaloneIngestionClient.java @@ -17,7 +17,7 @@ package feast.storage.connectors.redis.writer; import com.google.common.collect.Lists; -import feast.core.StoreProto; +import feast.proto.core.StoreProto; import feast.storage.common.retry.BackOffExecutor; import io.lettuce.core.LettuceFutures; import io.lettuce.core.RedisClient; diff --git a/storage/connectors/redis/src/test/java/feast/storage/connectors/redis/retriever/FeatureRowDecoderTest.java b/storage/connectors/redis/src/test/java/feast/storage/connectors/redis/retriever/FeatureRowDecoderTest.java index 0f37e68941..63ad7aa26d 100644 --- a/storage/connectors/redis/src/test/java/feast/storage/connectors/redis/retriever/FeatureRowDecoderTest.java +++ b/storage/connectors/redis/src/test/java/feast/storage/connectors/redis/retriever/FeatureRowDecoderTest.java @@ -19,12 +19,12 @@ import static org.junit.Assert.*; import com.google.protobuf.Timestamp; -import feast.core.FeatureSetProto; -import feast.core.FeatureSetProto.FeatureSetSpec; -import feast.types.FeatureRowProto; -import feast.types.FieldProto.Field; -import feast.types.ValueProto.Value; -import feast.types.ValueProto.ValueType; +import feast.proto.core.FeatureSetProto; +import feast.proto.core.FeatureSetProto.FeatureSetSpec; +import feast.proto.types.FeatureRowProto; +import feast.proto.types.FieldProto.Field; +import feast.proto.types.ValueProto.Value; +import feast.proto.types.ValueProto.ValueType; import java.util.Collections; import org.junit.Test; diff --git a/storage/connectors/redis/src/test/java/feast/storage/connectors/redis/retriever/RedisClusterOnlineRetrieverTest.java b/storage/connectors/redis/src/test/java/feast/storage/connectors/redis/retriever/RedisClusterOnlineRetrieverTest.java new file mode 100644 index 0000000000..45842cef6e --- /dev/null +++ b/storage/connectors/redis/src/test/java/feast/storage/connectors/redis/retriever/RedisClusterOnlineRetrieverTest.java @@ -0,0 +1,248 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2020 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.storage.connectors.redis.retriever; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; +import static org.mockito.Mockito.when; +import static org.mockito.MockitoAnnotations.initMocks; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Lists; +import com.google.protobuf.AbstractMessageLite; +import com.google.protobuf.Duration; +import com.google.protobuf.Timestamp; +import feast.proto.core.FeatureSetProto.EntitySpec; +import feast.proto.core.FeatureSetProto.FeatureSetSpec; +import feast.proto.core.FeatureSetProto.FeatureSpec; +import feast.proto.serving.ServingAPIProto.FeatureReference; +import feast.proto.serving.ServingAPIProto.GetOnlineFeaturesRequest.EntityRow; +import feast.proto.storage.RedisProto.RedisKey; +import feast.proto.types.FeatureRowProto.FeatureRow; +import feast.proto.types.FieldProto.Field; +import feast.proto.types.ValueProto.Value; +import feast.storage.api.retriever.FeatureSetRequest; +import feast.storage.api.retriever.OnlineRetriever; +import io.lettuce.core.KeyValue; +import io.lettuce.core.cluster.api.StatefulRedisClusterConnection; +import io.lettuce.core.cluster.api.sync.RedisAdvancedClusterCommands; +import java.util.List; +import java.util.Optional; +import java.util.stream.Collectors; +import org.junit.Before; +import org.junit.Test; +import org.mockito.Mock; + +public class RedisClusterOnlineRetrieverTest { + + @Mock StatefulRedisClusterConnection connection; + + @Mock RedisAdvancedClusterCommands syncCommands; + + private OnlineRetriever redisClusterOnlineRetriever; + private byte[][] redisKeyList; + + @Before + public void setUp() { + initMocks(this); + when(connection.sync()).thenReturn(syncCommands); + redisClusterOnlineRetriever = RedisClusterOnlineRetriever.create(connection); + redisKeyList = + Lists.newArrayList( + RedisKey.newBuilder() + .setFeatureSet("project/featureSet") + .addAllEntities( + Lists.newArrayList( + Field.newBuilder().setName("entity1").setValue(intValue(1)).build(), + Field.newBuilder().setName("entity2").setValue(strValue("a")).build())) + .build(), + RedisKey.newBuilder() + .setFeatureSet("project/featureSet") + .addAllEntities( + Lists.newArrayList( + Field.newBuilder().setName("entity1").setValue(intValue(2)).build(), + Field.newBuilder().setName("entity2").setValue(strValue("b")).build())) + .build()) + .stream() + .map(AbstractMessageLite::toByteArray) + .collect(Collectors.toList()) + .toArray(new byte[0][0]); + } + + @Test + public void shouldReturnResponseWithValuesIfKeysPresent() { + FeatureSetRequest featureSetRequest = + FeatureSetRequest.newBuilder() + .setSpec(getFeatureSetSpec()) + .addFeatureReference( + FeatureReference.newBuilder().setName("feature1").setProject("project").build()) + .addFeatureReference( + FeatureReference.newBuilder().setName("feature2").setProject("project").build()) + .build(); + List entityRows = + ImmutableList.of( + EntityRow.newBuilder() + .setEntityTimestamp(Timestamp.newBuilder().setSeconds(100)) + .putFields("entity1", intValue(1)) + .putFields("entity2", strValue("a")) + .build(), + EntityRow.newBuilder() + .setEntityTimestamp(Timestamp.newBuilder().setSeconds(100)) + .putFields("entity1", intValue(2)) + .putFields("entity2", strValue("b")) + .build()); + + List featureRows = + Lists.newArrayList( + FeatureRow.newBuilder() + .setEventTimestamp(Timestamp.newBuilder().setSeconds(100)) + .addAllFields( + Lists.newArrayList( + Field.newBuilder().setValue(intValue(1)).build(), + Field.newBuilder().setValue(intValue(1)).build())) + .build(), + FeatureRow.newBuilder() + .setEventTimestamp(Timestamp.newBuilder().setSeconds(100)) + .addAllFields( + Lists.newArrayList( + Field.newBuilder().setValue(intValue(2)).build(), + Field.newBuilder().setValue(intValue(2)).build())) + .build()); + + List> featureRowBytes = + featureRows.stream() + .map(x -> KeyValue.from(new byte[1], Optional.of(x.toByteArray()))) + .collect(Collectors.toList()); + + redisClusterOnlineRetriever = RedisClusterOnlineRetriever.create(connection); + when(connection.sync()).thenReturn(syncCommands); + when(syncCommands.mget(redisKeyList)).thenReturn(featureRowBytes); + + List> expected = + ImmutableList.of( + Lists.newArrayList( + FeatureRow.newBuilder() + .setEventTimestamp(Timestamp.newBuilder().setSeconds(100)) + .setFeatureSet("project/featureSet") + .addAllFields( + Lists.newArrayList( + Field.newBuilder().setName("feature1").setValue(intValue(1)).build(), + Field.newBuilder().setName("feature2").setValue(intValue(1)).build())) + .build(), + FeatureRow.newBuilder() + .setEventTimestamp(Timestamp.newBuilder().setSeconds(100)) + .setFeatureSet("project/featureSet") + .addAllFields( + Lists.newArrayList( + Field.newBuilder().setName("feature1").setValue(intValue(2)).build(), + Field.newBuilder().setName("feature2").setValue(intValue(2)).build())) + .build())); + + List> actual = + redisClusterOnlineRetriever.getOnlineFeatures( + entityRows, ImmutableList.of(featureSetRequest)); + assertThat(actual, equalTo(expected)); + } + + @Test + public void shouldReturnResponseWithUnsetValuesIfKeysNotPresent() { + FeatureSetRequest featureSetRequest = + FeatureSetRequest.newBuilder() + .setSpec(getFeatureSetSpec()) + .addFeatureReference( + FeatureReference.newBuilder().setName("feature1").setProject("project").build()) + .addFeatureReference( + FeatureReference.newBuilder().setName("feature2").setProject("project").build()) + .build(); + List entityRows = + ImmutableList.of( + EntityRow.newBuilder() + .setEntityTimestamp(Timestamp.newBuilder().setSeconds(100)) + .putFields("entity1", intValue(1)) + .putFields("entity2", strValue("a")) + .build(), + EntityRow.newBuilder() + .setEntityTimestamp(Timestamp.newBuilder().setSeconds(100)) + .putFields("entity1", intValue(2)) + .putFields("entity2", strValue("b")) + .build()); + + List featureRows = + Lists.newArrayList( + FeatureRow.newBuilder() + .setEventTimestamp(Timestamp.newBuilder().setSeconds(100)) + .addAllFields( + Lists.newArrayList( + Field.newBuilder().setValue(intValue(1)).build(), + Field.newBuilder().setValue(intValue(1)).build())) + .build()); + + List> featureRowBytes = + featureRows.stream() + .map(x -> KeyValue.from(new byte[1], Optional.of(x.toByteArray()))) + .collect(Collectors.toList()); + featureRowBytes.add(null); + + redisClusterOnlineRetriever = RedisClusterOnlineRetriever.create(connection); + when(connection.sync()).thenReturn(syncCommands); + when(syncCommands.mget(redisKeyList)).thenReturn(featureRowBytes); + + List> expected = + ImmutableList.of( + Lists.newArrayList( + FeatureRow.newBuilder() + .setEventTimestamp(Timestamp.newBuilder().setSeconds(100)) + .setFeatureSet("project/featureSet") + .addAllFields( + Lists.newArrayList( + Field.newBuilder().setName("feature1").setValue(intValue(1)).build(), + Field.newBuilder().setName("feature2").setValue(intValue(1)).build())) + .build(), + FeatureRow.newBuilder() + .setFeatureSet("project/featureSet") + .addAllFields( + Lists.newArrayList( + Field.newBuilder().setName("feature1").build(), + Field.newBuilder().setName("feature2").build())) + .build())); + + List> actual = + redisClusterOnlineRetriever.getOnlineFeatures( + entityRows, ImmutableList.of(featureSetRequest)); + assertThat(actual, equalTo(expected)); + } + + private Value intValue(int val) { + return Value.newBuilder().setInt64Val(val).build(); + } + + private Value strValue(String val) { + return Value.newBuilder().setStringVal(val).build(); + } + + private FeatureSetSpec getFeatureSetSpec() { + return FeatureSetSpec.newBuilder() + .setProject("project") + .setName("featureSet") + .addEntities(EntitySpec.newBuilder().setName("entity1")) + .addEntities(EntitySpec.newBuilder().setName("entity2")) + .addFeatures(FeatureSpec.newBuilder().setName("feature1")) + .addFeatures(FeatureSpec.newBuilder().setName("feature2")) + .setMaxAge(Duration.newBuilder().setSeconds(30)) // default + .build(); + } +} diff --git a/storage/connectors/redis/src/test/java/feast/storage/connectors/redis/retriever/RedisOnlineRetrieverTest.java b/storage/connectors/redis/src/test/java/feast/storage/connectors/redis/retriever/RedisOnlineRetrieverTest.java index 11c216c5a0..adacacb941 100644 --- a/storage/connectors/redis/src/test/java/feast/storage/connectors/redis/retriever/RedisOnlineRetrieverTest.java +++ b/storage/connectors/redis/src/test/java/feast/storage/connectors/redis/retriever/RedisOnlineRetrieverTest.java @@ -26,16 +26,17 @@ import com.google.protobuf.AbstractMessageLite; import com.google.protobuf.Duration; import com.google.protobuf.Timestamp; -import feast.core.FeatureSetProto.EntitySpec; -import feast.core.FeatureSetProto.FeatureSetSpec; -import feast.core.FeatureSetProto.FeatureSpec; -import feast.serving.ServingAPIProto.FeatureReference; -import feast.serving.ServingAPIProto.GetOnlineFeaturesRequest.EntityRow; -import feast.storage.RedisProto.RedisKey; +import feast.proto.core.FeatureSetProto.EntitySpec; +import feast.proto.core.FeatureSetProto.FeatureSetSpec; +import feast.proto.core.FeatureSetProto.FeatureSpec; +import feast.proto.serving.ServingAPIProto.FeatureReference; +import feast.proto.serving.ServingAPIProto.GetOnlineFeaturesRequest.EntityRow; +import feast.proto.storage.RedisProto.RedisKey; +import feast.proto.types.FeatureRowProto.FeatureRow; +import feast.proto.types.FieldProto.Field; +import feast.proto.types.ValueProto.Value; import feast.storage.api.retriever.FeatureSetRequest; -import feast.types.FeatureRowProto.FeatureRow; -import feast.types.FieldProto.Field; -import feast.types.ValueProto.Value; +import feast.storage.api.retriever.OnlineRetriever; import io.lettuce.core.KeyValue; import io.lettuce.core.api.StatefulRedisConnection; import io.lettuce.core.api.sync.RedisCommands; @@ -52,25 +53,25 @@ public class RedisOnlineRetrieverTest { @Mock RedisCommands syncCommands; - private RedisOnlineRetriever redisOnlineRetriever; + private OnlineRetriever redisOnlineRetriever; private byte[][] redisKeyList; @Before public void setUp() { initMocks(this); when(connection.sync()).thenReturn(syncCommands); - redisOnlineRetriever = new RedisOnlineRetriever(connection); + redisOnlineRetriever = RedisOnlineRetriever.create(connection); redisKeyList = Lists.newArrayList( RedisKey.newBuilder() - .setFeatureSet("project/featureSet:1") + .setFeatureSet("project/featureSet") .addAllEntities( Lists.newArrayList( Field.newBuilder().setName("entity1").setValue(intValue(1)).build(), Field.newBuilder().setName("entity2").setValue(strValue("a")).build())) .build(), RedisKey.newBuilder() - .setFeatureSet("project/featureSet:1") + .setFeatureSet("project/featureSet") .addAllEntities( Lists.newArrayList( Field.newBuilder().setName("entity1").setValue(intValue(2)).build(), @@ -88,17 +89,9 @@ public void shouldReturnResponseWithValuesIfKeysPresent() { FeatureSetRequest.newBuilder() .setSpec(getFeatureSetSpec()) .addFeatureReference( - FeatureReference.newBuilder() - .setName("feature1") - .setVersion(1) - .setProject("project") - .build()) + FeatureReference.newBuilder().setName("feature1").setProject("project").build()) .addFeatureReference( - FeatureReference.newBuilder() - .setName("feature2") - .setVersion(1) - .setProject("project") - .build()) + FeatureReference.newBuilder().setName("feature2").setProject("project").build()) .build(); List entityRows = ImmutableList.of( @@ -135,16 +128,16 @@ public void shouldReturnResponseWithValuesIfKeysPresent() { .map(x -> KeyValue.from(new byte[1], Optional.of(x.toByteArray()))) .collect(Collectors.toList()); - redisOnlineRetriever = new RedisOnlineRetriever(connection); + redisOnlineRetriever = RedisOnlineRetriever.create(connection); when(connection.sync()).thenReturn(syncCommands); when(syncCommands.mget(redisKeyList)).thenReturn(featureRowBytes); List> expected = - List.of( + ImmutableList.of( Lists.newArrayList( FeatureRow.newBuilder() .setEventTimestamp(Timestamp.newBuilder().setSeconds(100)) - .setFeatureSet("project/featureSet:1") + .setFeatureSet("project/featureSet") .addAllFields( Lists.newArrayList( Field.newBuilder().setName("feature1").setValue(intValue(1)).build(), @@ -152,7 +145,7 @@ public void shouldReturnResponseWithValuesIfKeysPresent() { .build(), FeatureRow.newBuilder() .setEventTimestamp(Timestamp.newBuilder().setSeconds(100)) - .setFeatureSet("project/featureSet:1") + .setFeatureSet("project/featureSet") .addAllFields( Lists.newArrayList( Field.newBuilder().setName("feature1").setValue(intValue(2)).build(), @@ -160,7 +153,7 @@ public void shouldReturnResponseWithValuesIfKeysPresent() { .build())); List> actual = - redisOnlineRetriever.getOnlineFeatures(entityRows, List.of(featureSetRequest)); + redisOnlineRetriever.getOnlineFeatures(entityRows, ImmutableList.of(featureSetRequest)); assertThat(actual, equalTo(expected)); } @@ -170,17 +163,9 @@ public void shouldReturnResponseWithUnsetValuesIfKeysNotPresent() { FeatureSetRequest.newBuilder() .setSpec(getFeatureSetSpec()) .addFeatureReference( - FeatureReference.newBuilder() - .setName("feature1") - .setVersion(1) - .setProject("project") - .build()) + FeatureReference.newBuilder().setName("feature1").setProject("project").build()) .addFeatureReference( - FeatureReference.newBuilder() - .setName("feature2") - .setVersion(1) - .setProject("project") - .build()) + FeatureReference.newBuilder().setName("feature2").setProject("project").build()) .build(); List entityRows = ImmutableList.of( @@ -211,23 +196,23 @@ public void shouldReturnResponseWithUnsetValuesIfKeysNotPresent() { .collect(Collectors.toList()); featureRowBytes.add(null); - redisOnlineRetriever = new RedisOnlineRetriever(connection); + redisOnlineRetriever = RedisOnlineRetriever.create(connection); when(connection.sync()).thenReturn(syncCommands); when(syncCommands.mget(redisKeyList)).thenReturn(featureRowBytes); List> expected = - List.of( + ImmutableList.of( Lists.newArrayList( FeatureRow.newBuilder() .setEventTimestamp(Timestamp.newBuilder().setSeconds(100)) - .setFeatureSet("project/featureSet:1") + .setFeatureSet("project/featureSet") .addAllFields( Lists.newArrayList( Field.newBuilder().setName("feature1").setValue(intValue(1)).build(), Field.newBuilder().setName("feature2").setValue(intValue(1)).build())) .build(), FeatureRow.newBuilder() - .setFeatureSet("project/featureSet:1") + .setFeatureSet("project/featureSet") .addAllFields( Lists.newArrayList( Field.newBuilder().setName("feature1").build(), @@ -235,7 +220,7 @@ public void shouldReturnResponseWithUnsetValuesIfKeysNotPresent() { .build())); List> actual = - redisOnlineRetriever.getOnlineFeatures(entityRows, List.of(featureSetRequest)); + redisOnlineRetriever.getOnlineFeatures(entityRows, ImmutableList.of(featureSetRequest)); assertThat(actual, equalTo(expected)); } @@ -251,7 +236,6 @@ private FeatureSetSpec getFeatureSetSpec() { return FeatureSetSpec.newBuilder() .setProject("project") .setName("featureSet") - .setVersion(1) .addEntities(EntitySpec.newBuilder().setName("entity1")) .addEntities(EntitySpec.newBuilder().setName("entity2")) .addFeatures(FeatureSpec.newBuilder().setName("feature1")) diff --git a/storage/connectors/redis/src/test/java/feast/storage/connectors/redis/writer/RedisClusterFeatureSinkTest.java b/storage/connectors/redis/src/test/java/feast/storage/connectors/redis/writer/RedisClusterFeatureSinkTest.java new file mode 100644 index 0000000000..7bdc9bba2b --- /dev/null +++ b/storage/connectors/redis/src/test/java/feast/storage/connectors/redis/writer/RedisClusterFeatureSinkTest.java @@ -0,0 +1,503 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * Copyright 2018-2019 The Feast Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package feast.storage.connectors.redis.writer; + +import static feast.storage.common.testing.TestUtil.field; +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.MatcherAssert.assertThat; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.protobuf.Timestamp; +import feast.proto.core.FeatureSetProto.EntitySpec; +import feast.proto.core.FeatureSetProto.FeatureSetSpec; +import feast.proto.core.FeatureSetProto.FeatureSpec; +import feast.proto.core.StoreProto.Store.RedisClusterConfig; +import feast.proto.storage.RedisProto.RedisKey; +import feast.proto.types.FeatureRowProto.FeatureRow; +import feast.proto.types.FieldProto.Field; +import feast.proto.types.ValueProto.Value; +import feast.proto.types.ValueProto.ValueType.Enum; +import io.lettuce.core.RedisURI; +import io.lettuce.core.cluster.RedisClusterClient; +import io.lettuce.core.cluster.api.StatefulRedisClusterConnection; +import io.lettuce.core.cluster.api.sync.RedisClusterCommands; +import io.lettuce.core.codec.ByteArrayCodec; +import java.io.File; +import java.io.IOException; +import java.nio.file.Paths; +import java.util.*; +import java.util.concurrent.ScheduledFuture; +import java.util.concurrent.ScheduledThreadPoolExecutor; +import java.util.concurrent.TimeUnit; +import net.ishiis.redis.unit.RedisCluster; +import org.apache.beam.sdk.testing.PAssert; +import org.apache.beam.sdk.testing.TestPipeline; +import org.apache.beam.sdk.transforms.Count; +import org.apache.beam.sdk.transforms.Create; +import org.apache.beam.sdk.values.PCollection; +import org.junit.After; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; + +public class RedisClusterFeatureSinkTest { + @Rule public transient TestPipeline p = TestPipeline.create(); + + private static String REDIS_CLUSTER_HOST = "localhost"; + private static int REDIS_CLUSTER_PORT1 = 6380; + private static int REDIS_CLUSTER_PORT2 = 6381; + private static int REDIS_CLUSTER_PORT3 = 6382; + private static String CONNECTION_STRING = "localhost:6380,localhost:6381,localhost:6382"; + private RedisCluster redisCluster; + private RedisClusterClient redisClusterClient; + private RedisClusterCommands redisClusterCommands; + + private RedisFeatureSink redisClusterFeatureSink; + + @Before + public void setUp() throws IOException { + redisCluster = new RedisCluster(REDIS_CLUSTER_PORT1, REDIS_CLUSTER_PORT2, REDIS_CLUSTER_PORT3); + redisCluster.start(); + redisClusterClient = + RedisClusterClient.create( + Arrays.asList( + RedisURI.create(REDIS_CLUSTER_HOST, REDIS_CLUSTER_PORT1), + RedisURI.create(REDIS_CLUSTER_HOST, REDIS_CLUSTER_PORT2), + RedisURI.create(REDIS_CLUSTER_HOST, REDIS_CLUSTER_PORT3))); + StatefulRedisClusterConnection connection = + redisClusterClient.connect(new ByteArrayCodec()); + redisClusterCommands = connection.sync(); + redisClusterCommands.setTimeout(java.time.Duration.ofMillis(600000)); + + FeatureSetSpec spec1 = + FeatureSetSpec.newBuilder() + .setName("fs") + .setProject("myproject") + .addEntities(EntitySpec.newBuilder().setName("entity").setValueType(Enum.INT64).build()) + .addFeatures( + FeatureSpec.newBuilder().setName("feature").setValueType(Enum.STRING).build()) + .build(); + + FeatureSetSpec spec2 = + FeatureSetSpec.newBuilder() + .setName("feature_set") + .setProject("myproject") + .addEntities( + EntitySpec.newBuilder() + .setName("entity_id_primary") + .setValueType(Enum.INT32) + .build()) + .addEntities( + EntitySpec.newBuilder() + .setName("entity_id_secondary") + .setValueType(Enum.STRING) + .build()) + .addFeatures( + FeatureSpec.newBuilder().setName("feature_1").setValueType(Enum.STRING).build()) + .addFeatures( + FeatureSpec.newBuilder().setName("feature_2").setValueType(Enum.INT64).build()) + .build(); + + Map specMap = + ImmutableMap.of("myproject/fs", spec1, "myproject/feature_set", spec2); + RedisClusterConfig redisClusterConfig = + RedisClusterConfig.newBuilder() + .setConnectionString(CONNECTION_STRING) + .setInitialBackoffMs(2000) + .setMaxRetries(4) + .build(); + + redisClusterFeatureSink = + RedisFeatureSink.builder() + .setFeatureSetSpecs(specMap) + .setRedisClusterConfig(redisClusterConfig) + .build(); + } + + static boolean deleteDirectory(File directoryToBeDeleted) { + File[] allContents = directoryToBeDeleted.listFiles(); + if (allContents != null) { + for (File file : allContents) { + deleteDirectory(file); + } + } + return directoryToBeDeleted.delete(); + } + + @After + public void teardown() { + redisCluster.stop(); + redisClusterClient.shutdown(); + deleteDirectory(new File(String.valueOf(Paths.get(System.getProperty("user.dir"), ".redis")))); + } + + @Test + public void shouldWriteToRedis() { + + HashMap kvs = new LinkedHashMap<>(); + kvs.put( + RedisKey.newBuilder() + .setFeatureSet("myproject/fs") + .addEntities(field("entity", 1, Enum.INT64)) + .build(), + FeatureRow.newBuilder() + .setEventTimestamp(Timestamp.getDefaultInstance()) + .addFields(Field.newBuilder().setValue(Value.newBuilder().setStringVal("one"))) + .build()); + kvs.put( + RedisKey.newBuilder() + .setFeatureSet("myproject/fs") + .addEntities(field("entity", 2, Enum.INT64)) + .build(), + FeatureRow.newBuilder() + .setEventTimestamp(Timestamp.getDefaultInstance()) + .addFields(Field.newBuilder().setValue(Value.newBuilder().setStringVal("two"))) + .build()); + + List featureRows = + ImmutableList.of( + FeatureRow.newBuilder() + .setFeatureSet("myproject/fs") + .addFields(field("entity", 1, Enum.INT64)) + .addFields(field("feature", "one", Enum.STRING)) + .build(), + FeatureRow.newBuilder() + .setFeatureSet("myproject/fs") + .addFields(field("entity", 2, Enum.INT64)) + .addFields(field("feature", "two", Enum.STRING)) + .build()); + + p.apply(Create.of(featureRows)).apply(redisClusterFeatureSink.writer()); + p.run(); + + kvs.forEach( + (key, value) -> { + byte[] actual = redisClusterCommands.get(key.toByteArray()); + assertThat(actual, equalTo(value.toByteArray())); + }); + } + + @Test(timeout = 15000) + public void shouldRetryFailConnection() throws InterruptedException { + HashMap kvs = new LinkedHashMap<>(); + kvs.put( + RedisKey.newBuilder() + .setFeatureSet("myproject/fs") + .addEntities(field("entity", 1, Enum.INT64)) + .build(), + FeatureRow.newBuilder() + .setEventTimestamp(Timestamp.getDefaultInstance()) + .addFields(Field.newBuilder().setValue(Value.newBuilder().setStringVal("one"))) + .build()); + + List featureRows = + ImmutableList.of( + FeatureRow.newBuilder() + .setFeatureSet("myproject/fs") + .addFields(field("entity", 1, Enum.INT64)) + .addFields(field("feature", "one", Enum.STRING)) + .build()); + + PCollection failedElementCount = + p.apply(Create.of(featureRows)) + .apply(redisClusterFeatureSink.writer()) + .getFailedInserts() + .apply(Count.globally()); + + redisCluster.stop(); + final ScheduledThreadPoolExecutor redisRestartExecutor = new ScheduledThreadPoolExecutor(1); + ScheduledFuture scheduledRedisRestart = + redisRestartExecutor.schedule( + () -> { + redisCluster.start(); + }, + 3, + TimeUnit.SECONDS); + + PAssert.that(failedElementCount).containsInAnyOrder(0L); + p.run(); + scheduledRedisRestart.cancel(true); + + kvs.forEach( + (key, value) -> { + byte[] actual = redisClusterCommands.get(key.toByteArray()); + assertThat(actual, equalTo(value.toByteArray())); + }); + } + + @Test + public void shouldProduceFailedElementIfRetryExceeded() { + RedisClusterConfig redisClusterConfig = + RedisClusterConfig.newBuilder() + .setConnectionString(CONNECTION_STRING) + .setInitialBackoffMs(2000) + .setMaxRetries(1) + .build(); + + FeatureSetSpec spec1 = + FeatureSetSpec.newBuilder() + .setName("fs") + .setProject("myproject") + .addEntities(EntitySpec.newBuilder().setName("entity").setValueType(Enum.INT64).build()) + .addFeatures( + FeatureSpec.newBuilder().setName("feature").setValueType(Enum.STRING).build()) + .build(); + Map specMap = ImmutableMap.of("myproject/fs", spec1); + redisClusterFeatureSink = + RedisFeatureSink.builder() + .setFeatureSetSpecs(specMap) + .setRedisClusterConfig(redisClusterConfig) + .build(); + redisCluster.stop(); + + List featureRows = + ImmutableList.of( + FeatureRow.newBuilder() + .setFeatureSet("myproject/fs") + .addFields(field("entity", 1, Enum.INT64)) + .addFields(field("feature", "one", Enum.STRING)) + .build()); + + PCollection failedElementCount = + p.apply(Create.of(featureRows)) + .apply(redisClusterFeatureSink.writer()) + .getFailedInserts() + .apply(Count.globally()); + + PAssert.that(failedElementCount).containsInAnyOrder(1L); + p.run(); + } + + @Test + public void shouldConvertRowWithDuplicateEntitiesToValidKey() { + + FeatureRow offendingRow = + FeatureRow.newBuilder() + .setFeatureSet("myproject/feature_set") + .setEventTimestamp(Timestamp.newBuilder().setSeconds(10)) + .addFields( + Field.newBuilder() + .setName("entity_id_primary") + .setValue(Value.newBuilder().setInt32Val(1))) + .addFields( + Field.newBuilder() + .setName("entity_id_primary") + .setValue(Value.newBuilder().setInt32Val(2))) + .addFields( + Field.newBuilder() + .setName("entity_id_secondary") + .setValue(Value.newBuilder().setStringVal("a"))) + .addFields( + Field.newBuilder() + .setName("feature_1") + .setValue(Value.newBuilder().setStringVal("strValue1"))) + .addFields( + Field.newBuilder() + .setName("feature_2") + .setValue(Value.newBuilder().setInt64Val(1001))) + .build(); + + RedisKey expectedKey = + RedisKey.newBuilder() + .setFeatureSet("myproject/feature_set") + .addEntities( + Field.newBuilder() + .setName("entity_id_primary") + .setValue(Value.newBuilder().setInt32Val(1))) + .addEntities( + Field.newBuilder() + .setName("entity_id_secondary") + .setValue(Value.newBuilder().setStringVal("a"))) + .build(); + + FeatureRow expectedValue = + FeatureRow.newBuilder() + .setEventTimestamp(Timestamp.newBuilder().setSeconds(10)) + .addFields(Field.newBuilder().setValue(Value.newBuilder().setStringVal("strValue1"))) + .addFields(Field.newBuilder().setValue(Value.newBuilder().setInt64Val(1001))) + .build(); + + p.apply(Create.of(offendingRow)).apply(redisClusterFeatureSink.writer()); + + p.run(); + + byte[] actual = redisClusterCommands.get(expectedKey.toByteArray()); + assertThat(actual, equalTo(expectedValue.toByteArray())); + } + + @Test + public void shouldConvertRowWithOutOfOrderFieldsToValidKey() { + FeatureRow offendingRow = + FeatureRow.newBuilder() + .setFeatureSet("myproject/feature_set") + .setEventTimestamp(Timestamp.newBuilder().setSeconds(10)) + .addFields( + Field.newBuilder() + .setName("entity_id_secondary") + .setValue(Value.newBuilder().setStringVal("a"))) + .addFields( + Field.newBuilder() + .setName("entity_id_primary") + .setValue(Value.newBuilder().setInt32Val(1))) + .addFields( + Field.newBuilder() + .setName("feature_2") + .setValue(Value.newBuilder().setInt64Val(1001))) + .addFields( + Field.newBuilder() + .setName("feature_1") + .setValue(Value.newBuilder().setStringVal("strValue1"))) + .build(); + + RedisKey expectedKey = + RedisKey.newBuilder() + .setFeatureSet("myproject/feature_set") + .addEntities( + Field.newBuilder() + .setName("entity_id_primary") + .setValue(Value.newBuilder().setInt32Val(1))) + .addEntities( + Field.newBuilder() + .setName("entity_id_secondary") + .setValue(Value.newBuilder().setStringVal("a"))) + .build(); + + List expectedFields = + Arrays.asList( + Field.newBuilder().setValue(Value.newBuilder().setStringVal("strValue1")).build(), + Field.newBuilder().setValue(Value.newBuilder().setInt64Val(1001)).build()); + FeatureRow expectedValue = + FeatureRow.newBuilder() + .setEventTimestamp(Timestamp.newBuilder().setSeconds(10)) + .addAllFields(expectedFields) + .build(); + + p.apply(Create.of(offendingRow)).apply(redisClusterFeatureSink.writer()); + + p.run(); + + byte[] actual = redisClusterCommands.get(expectedKey.toByteArray()); + assertThat(actual, equalTo(expectedValue.toByteArray())); + } + + @Test + public void shouldMergeDuplicateFeatureFields() { + FeatureRow featureRowWithDuplicatedFeatureFields = + FeatureRow.newBuilder() + .setFeatureSet("myproject/feature_set") + .setEventTimestamp(Timestamp.newBuilder().setSeconds(10)) + .addFields( + Field.newBuilder() + .setName("entity_id_primary") + .setValue(Value.newBuilder().setInt32Val(1))) + .addFields( + Field.newBuilder() + .setName("entity_id_secondary") + .setValue(Value.newBuilder().setStringVal("a"))) + .addFields( + Field.newBuilder() + .setName("feature_1") + .setValue(Value.newBuilder().setStringVal("strValue1"))) + .addFields( + Field.newBuilder() + .setName("feature_1") + .setValue(Value.newBuilder().setStringVal("strValue1"))) + .addFields( + Field.newBuilder() + .setName("feature_2") + .setValue(Value.newBuilder().setInt64Val(1001))) + .build(); + + RedisKey expectedKey = + RedisKey.newBuilder() + .setFeatureSet("myproject/feature_set") + .addEntities( + Field.newBuilder() + .setName("entity_id_primary") + .setValue(Value.newBuilder().setInt32Val(1))) + .addEntities( + Field.newBuilder() + .setName("entity_id_secondary") + .setValue(Value.newBuilder().setStringVal("a"))) + .build(); + + FeatureRow expectedValue = + FeatureRow.newBuilder() + .setEventTimestamp(Timestamp.newBuilder().setSeconds(10)) + .addFields(Field.newBuilder().setValue(Value.newBuilder().setStringVal("strValue1"))) + .addFields(Field.newBuilder().setValue(Value.newBuilder().setInt64Val(1001))) + .build(); + + p.apply(Create.of(featureRowWithDuplicatedFeatureFields)) + .apply(redisClusterFeatureSink.writer()); + + p.run(); + + byte[] actual = redisClusterCommands.get(expectedKey.toByteArray()); + assertThat(actual, equalTo(expectedValue.toByteArray())); + } + + @Test + public void shouldPopulateMissingFeatureValuesWithDefaultInstance() { + FeatureRow featureRowWithDuplicatedFeatureFields = + FeatureRow.newBuilder() + .setFeatureSet("myproject/feature_set") + .setEventTimestamp(Timestamp.newBuilder().setSeconds(10)) + .addFields( + Field.newBuilder() + .setName("entity_id_primary") + .setValue(Value.newBuilder().setInt32Val(1))) + .addFields( + Field.newBuilder() + .setName("entity_id_secondary") + .setValue(Value.newBuilder().setStringVal("a"))) + .addFields( + Field.newBuilder() + .setName("feature_1") + .setValue(Value.newBuilder().setStringVal("strValue1"))) + .build(); + + RedisKey expectedKey = + RedisKey.newBuilder() + .setFeatureSet("myproject/feature_set") + .addEntities( + Field.newBuilder() + .setName("entity_id_primary") + .setValue(Value.newBuilder().setInt32Val(1))) + .addEntities( + Field.newBuilder() + .setName("entity_id_secondary") + .setValue(Value.newBuilder().setStringVal("a"))) + .build(); + + FeatureRow expectedValue = + FeatureRow.newBuilder() + .setEventTimestamp(Timestamp.newBuilder().setSeconds(10)) + .addFields(Field.newBuilder().setValue(Value.newBuilder().setStringVal("strValue1"))) + .addFields(Field.newBuilder().setValue(Value.getDefaultInstance())) + .build(); + + p.apply(Create.of(featureRowWithDuplicatedFeatureFields)) + .apply(redisClusterFeatureSink.writer()); + + p.run(); + + byte[] actual = redisClusterCommands.get(expectedKey.toByteArray()); + assertThat(actual, equalTo(expectedValue.toByteArray())); + } +} diff --git a/storage/connectors/redis/src/test/java/feast/storage/connectors/redis/writer/RedisFeatureSinkTest.java b/storage/connectors/redis/src/test/java/feast/storage/connectors/redis/writer/RedisFeatureSinkTest.java index beeabc2c88..0da4c102cd 100644 --- a/storage/connectors/redis/src/test/java/feast/storage/connectors/redis/writer/RedisFeatureSinkTest.java +++ b/storage/connectors/redis/src/test/java/feast/storage/connectors/redis/writer/RedisFeatureSinkTest.java @@ -23,16 +23,16 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.protobuf.Timestamp; -import feast.core.FeatureSetProto.EntitySpec; -import feast.core.FeatureSetProto.FeatureSetSpec; -import feast.core.FeatureSetProto.FeatureSpec; -import feast.core.StoreProto; -import feast.core.StoreProto.Store.RedisConfig; -import feast.storage.RedisProto.RedisKey; -import feast.types.FeatureRowProto.FeatureRow; -import feast.types.FieldProto.Field; -import feast.types.ValueProto.Value; -import feast.types.ValueProto.ValueType.Enum; +import feast.proto.core.FeatureSetProto.EntitySpec; +import feast.proto.core.FeatureSetProto.FeatureSetSpec; +import feast.proto.core.FeatureSetProto.FeatureSpec; +import feast.proto.core.StoreProto; +import feast.proto.core.StoreProto.Store.RedisConfig; +import feast.proto.storage.RedisProto.RedisKey; +import feast.proto.types.FeatureRowProto.FeatureRow; +import feast.proto.types.FieldProto.Field; +import feast.proto.types.ValueProto.Value; +import feast.proto.types.ValueProto.ValueType.Enum; import io.lettuce.core.RedisClient; import io.lettuce.core.RedisURI; import io.lettuce.core.api.StatefulRedisConnection; @@ -78,7 +78,6 @@ public void setUp() throws IOException { FeatureSetSpec spec1 = FeatureSetSpec.newBuilder() .setName("fs") - .setVersion(1) .setProject("myproject") .addEntities(EntitySpec.newBuilder().setName("entity").setValueType(Enum.INT64).build()) .addFeatures( @@ -89,7 +88,6 @@ public void setUp() throws IOException { FeatureSetSpec.newBuilder() .setName("feature_set") .setProject("myproject") - .setVersion(1) .addEntities( EntitySpec.newBuilder() .setName("entity_id_primary") @@ -107,7 +105,7 @@ public void setUp() throws IOException { .build(); Map specMap = - ImmutableMap.of("myproject/fs:1", spec1, "myproject/feature_set:1", spec2); + ImmutableMap.of("myproject/fs", spec1, "myproject/feature_set", spec2); StoreProto.Store.RedisConfig redisConfig = StoreProto.Store.RedisConfig.newBuilder().setHost(REDIS_HOST).setPort(REDIS_PORT).build(); @@ -127,7 +125,7 @@ public void shouldWriteToRedis() { HashMap kvs = new LinkedHashMap<>(); kvs.put( RedisKey.newBuilder() - .setFeatureSet("myproject/fs:1") + .setFeatureSet("myproject/fs") .addEntities(field("entity", 1, Enum.INT64)) .build(), FeatureRow.newBuilder() @@ -136,7 +134,7 @@ public void shouldWriteToRedis() { .build()); kvs.put( RedisKey.newBuilder() - .setFeatureSet("myproject/fs:1") + .setFeatureSet("myproject/fs") .addEntities(field("entity", 2, Enum.INT64)) .build(), FeatureRow.newBuilder() @@ -147,12 +145,12 @@ public void shouldWriteToRedis() { List featureRows = ImmutableList.of( FeatureRow.newBuilder() - .setFeatureSet("myproject/fs:1") + .setFeatureSet("myproject/fs") .addFields(field("entity", 1, Enum.INT64)) .addFields(field("feature", "one", Enum.STRING)) .build(), FeatureRow.newBuilder() - .setFeatureSet("myproject/fs:1") + .setFeatureSet("myproject/fs") .addFields(field("entity", 2, Enum.INT64)) .addFields(field("feature", "two", Enum.STRING)) .build()); @@ -181,7 +179,7 @@ public void shouldRetryFailConnection() throws InterruptedException { HashMap kvs = new LinkedHashMap<>(); kvs.put( RedisKey.newBuilder() - .setFeatureSet("myproject/fs:1") + .setFeatureSet("myproject/fs") .addEntities(field("entity", 1, Enum.INT64)) .build(), FeatureRow.newBuilder() @@ -192,7 +190,7 @@ public void shouldRetryFailConnection() throws InterruptedException { List featureRows = ImmutableList.of( FeatureRow.newBuilder() - .setFeatureSet("myproject/fs:1") + .setFeatureSet("myproject/fs") .addFields(field("entity", 1, Enum.INT64)) .addFields(field("feature", "one", Enum.STRING)) .build()); @@ -234,7 +232,7 @@ public void shouldProduceFailedElementIfRetryExceeded() { HashMap kvs = new LinkedHashMap<>(); kvs.put( RedisKey.newBuilder() - .setFeatureSet("myproject/fs:1") + .setFeatureSet("myproject/fs") .addEntities(field("entity", 1, Enum.INT64)) .build(), FeatureRow.newBuilder() @@ -245,7 +243,7 @@ public void shouldProduceFailedElementIfRetryExceeded() { List featureRows = ImmutableList.of( FeatureRow.newBuilder() - .setFeatureSet("myproject/fs:1") + .setFeatureSet("myproject/fs") .addFields(field("entity", 1, Enum.INT64)) .addFields(field("feature", "one", Enum.STRING)) .build()); @@ -266,7 +264,7 @@ public void shouldConvertRowWithDuplicateEntitiesToValidKey() { FeatureRow offendingRow = FeatureRow.newBuilder() - .setFeatureSet("myproject/feature_set:1") + .setFeatureSet("myproject/feature_set") .setEventTimestamp(Timestamp.newBuilder().setSeconds(10)) .addFields( Field.newBuilder() @@ -292,7 +290,7 @@ public void shouldConvertRowWithDuplicateEntitiesToValidKey() { RedisKey expectedKey = RedisKey.newBuilder() - .setFeatureSet("myproject/feature_set:1") + .setFeatureSet("myproject/feature_set") .addEntities( Field.newBuilder() .setName("entity_id_primary") @@ -322,7 +320,7 @@ public void shouldConvertRowWithDuplicateEntitiesToValidKey() { public void shouldConvertRowWithOutOfOrderFieldsToValidKey() { FeatureRow offendingRow = FeatureRow.newBuilder() - .setFeatureSet("myproject/feature_set:1") + .setFeatureSet("myproject/feature_set") .setEventTimestamp(Timestamp.newBuilder().setSeconds(10)) .addFields( Field.newBuilder() @@ -344,7 +342,7 @@ public void shouldConvertRowWithOutOfOrderFieldsToValidKey() { RedisKey expectedKey = RedisKey.newBuilder() - .setFeatureSet("myproject/feature_set:1") + .setFeatureSet("myproject/feature_set") .addEntities( Field.newBuilder() .setName("entity_id_primary") @@ -377,7 +375,7 @@ public void shouldConvertRowWithOutOfOrderFieldsToValidKey() { public void shouldMergeDuplicateFeatureFields() { FeatureRow featureRowWithDuplicatedFeatureFields = FeatureRow.newBuilder() - .setFeatureSet("myproject/feature_set:1") + .setFeatureSet("myproject/feature_set") .setEventTimestamp(Timestamp.newBuilder().setSeconds(10)) .addFields( Field.newBuilder() @@ -403,7 +401,7 @@ public void shouldMergeDuplicateFeatureFields() { RedisKey expectedKey = RedisKey.newBuilder() - .setFeatureSet("myproject/feature_set:1") + .setFeatureSet("myproject/feature_set") .addEntities( Field.newBuilder() .setName("entity_id_primary") @@ -433,7 +431,7 @@ public void shouldMergeDuplicateFeatureFields() { public void shouldPopulateMissingFeatureValuesWithDefaultInstance() { FeatureRow featureRowWithDuplicatedFeatureFields = FeatureRow.newBuilder() - .setFeatureSet("myproject/feature_set:1") + .setFeatureSet("myproject/feature_set") .setEventTimestamp(Timestamp.newBuilder().setSeconds(10)) .addFields( Field.newBuilder() @@ -451,7 +449,7 @@ public void shouldPopulateMissingFeatureValuesWithDefaultInstance() { RedisKey expectedKey = RedisKey.newBuilder() - .setFeatureSet("myproject/feature_set:1") + .setFeatureSet("myproject/feature_set") .addEntities( Field.newBuilder() .setName("entity_id_primary") diff --git a/tests/e2e/Dockerfile b/tests/e2e/Dockerfile deleted file mode 100644 index a89a4d50a0..0000000000 --- a/tests/e2e/Dockerfile +++ /dev/null @@ -1,26 +0,0 @@ -ARG FEAST_CI_IMAGE - -FROM ${FEAST_CI_IMAGE} - -WORKDIR /feast - -COPY Makefile . -COPY protos protos -COPY sdk/python sdk/python - -# Compile feast.core Python protobuf module -# FIXME this is redundant with infra/docker/ci/Dockerfile -RUN make compile-protos-python - -# Install Feast Python SDK and test requirements - -COPY tests/e2e/requirements.txt tests/e2e/requirements.txt -COPY README.md README.md - -# setup.py in sdk/python grabs the version number from the .git -# files, but we don't want to COPY .git so as to maximize -# layer reuse. Fool it by initialize an empty .git repository. -RUN git init . - -RUN pip install -qe sdk/python && \ - pip install -qr tests/e2e/requirements.txt diff --git a/tests/e2e/all_types_parquet/all_types_parquet.yaml b/tests/e2e/all_types_parquet/all_types_parquet.yaml index 2043b6b473..fa95ce13be 100644 --- a/tests/e2e/all_types_parquet/all_types_parquet.yaml +++ b/tests/e2e/all_types_parquet/all_types_parquet.yaml @@ -6,7 +6,7 @@ spec: valueType: INT64 features: - name: int32_feature_parquet - valueType: INT64 + valueType: INT32 - name: int64_feature_parquet valueType: INT64 - name: float_feature_parquet diff --git a/tests/e2e/basic-ingest-redis-serving.py b/tests/e2e/basic-ingest-redis-serving.py index 8e40794344..da0967fd76 100644 --- a/tests/e2e/basic-ingest-redis-serving.py +++ b/tests/e2e/basic-ingest-redis-serving.py @@ -2,16 +2,20 @@ import math import random import time +import grpc from feast.entity import Entity from feast.serving.ServingService_pb2 import ( GetOnlineFeaturesRequest, GetOnlineFeaturesResponse, ) from feast.core.IngestionJob_pb2 import IngestionJobStatus +from feast.core.CoreService_pb2_grpc import CoreServiceStub +from feast.core import CoreService_pb2 from feast.types.Value_pb2 import Value as Value from feast.client import Client from feast.feature_set import FeatureSet, FeatureSetRef from feast.type_map import ValueType +from feast.constants import FEAST_DEFAULT_OPTIONS, CONFIG_PROJECT_KEY from google.protobuf.duration_pb2 import Duration from datetime import datetime import pytz @@ -26,6 +30,7 @@ FLOAT_TOLERANCE = 0.00001 PROJECT_NAME = 'basic_' + uuid.uuid4().hex.upper()[0:6] + @pytest.fixture(scope='module') def core_url(pytestconfig): return pytestconfig.getoption("core_url") @@ -47,7 +52,6 @@ def client(core_url, serving_url, allow_dirty): # Get client for core and serving client = Client(core_url=core_url, serving_url=serving_url) client.create_project(PROJECT_NAME) - client.set_project(PROJECT_NAME) # Ensure Feast core is active, but empty if not allow_dirty: @@ -60,78 +64,104 @@ def client(core_url, serving_url, allow_dirty): return client -@pytest.fixture(scope='module') -def basic_dataframe(): +def basic_dataframe(entities, features, ingest_time, n_size): offset = random.randint(1000, 100000) # ensure a unique key space is used - return pd.DataFrame( - { - "datetime": [datetime.utcnow().replace(tzinfo=pytz.utc) for _ in - range(5)], - "customer_id": [offset + inc for inc in range(5)], - "daily_transactions": [np.random.rand() for _ in range(5)], - "total_transactions": [512 for _ in range(5)], - } - ) + df_dict = { + "datetime": [ingest_time.replace(tzinfo=pytz.utc) for _ in + range(n_size)], + } + for entity_name in entities: + df_dict[entity_name] = list(range(1, n_size + 1)) + for feature_name in features: + df_dict[feature_name] = [np.random.rand() for _ in range(n_size)] + return pd.DataFrame(df_dict) + + +@pytest.fixture(scope="module") +def ingest_time(): + return datetime.utcnow() + + +@pytest.fixture(scope="module") +def cust_trans_df(ingest_time): + return basic_dataframe(entities=["customer_id"], + features=["daily_transactions", "total_transactions"], + ingest_time=ingest_time, + n_size=5) + + +@pytest.fixture(scope="module") +def driver_df(ingest_time): + return basic_dataframe(entities=["driver_id"], + features=["rating", "cost"], + ingest_time=ingest_time, + n_size=5) + + +def test_version_returns_results(client): + version_info = client.version() + assert not version_info['core'] is 'not configured' + assert not version_info['serving'] is 'not configured' @pytest.mark.timeout(45) @pytest.mark.run(order=10) def test_basic_register_feature_set_success(client): - # Load feature set from file + # Register feature set without project cust_trans_fs_expected = FeatureSet.from_yaml("basic/cust_trans_fs.yaml") + driver_fs_expected = FeatureSet.from_yaml("basic/driver_fs.yaml") + client.apply(cust_trans_fs_expected) + client.apply(driver_fs_expected) + cust_trans_fs_actual = client.get_feature_set("customer_transactions") + assert cust_trans_fs_actual == cust_trans_fs_expected + driver_fs_actual = client.get_feature_set("driver") + assert driver_fs_actual == driver_fs_expected + # Register feature set with project + cust_trans_fs_expected = FeatureSet.from_yaml("basic/cust_trans_fs.yaml") client.set_project(PROJECT_NAME) - - # Register feature set client.apply(cust_trans_fs_expected) - - cust_trans_fs_actual = client.get_feature_set(name="customer_transactions") - + cust_trans_fs_actual = client.get_feature_set("customer_transactions", + project=PROJECT_NAME) assert cust_trans_fs_actual == cust_trans_fs_expected - if cust_trans_fs_actual is None: - raise Exception( - "Client cannot retrieve 'customer_transactions' FeatureSet " - "after registration. Either Feast Core does not save the " - "FeatureSet correctly or the client needs to wait longer for FeatureSet " - "to be committed." - ) + # reset client's project for other tests + client.set_project() @pytest.mark.timeout(300) @pytest.mark.run(order=11) -def test_basic_ingest_success(client, basic_dataframe): - client.set_project(PROJECT_NAME) - +def test_basic_ingest_success(client, cust_trans_df, driver_df): cust_trans_fs = client.get_feature_set(name="customer_transactions") + driver_fs = client.get_feature_set(name="driver") # Ingest customer transaction data - client.ingest(cust_trans_fs, basic_dataframe) + client.ingest(cust_trans_fs, cust_trans_df) + client.ingest(driver_fs, driver_df) time.sleep(5) -@pytest.mark.timeout(45) + +@pytest.mark.timeout(90) @pytest.mark.run(order=12) -def test_basic_retrieve_online_success(client, basic_dataframe): +def test_basic_retrieve_online_success(client, cust_trans_df): # Poll serving for feature values until the correct values are returned while True: time.sleep(1) - - client.set_project(PROJECT_NAME) - response = client.get_online_features( entity_rows=[ GetOnlineFeaturesRequest.EntityRow( fields={ "customer_id": Value( - int64_val=basic_dataframe.iloc[0]["customer_id"] + int64_val=cust_trans_df.iloc[0]["customer_id"] ) } ) ], + # Test retrieve with different variations of the string feature refs feature_refs=[ "daily_transactions", "total_transactions", - ], + ] ) # type: GetOnlineFeaturesResponse if response is None: @@ -139,22 +169,80 @@ def test_basic_retrieve_online_success(client, basic_dataframe): returned_daily_transactions = float( response.field_values[0] - .fields[PROJECT_NAME + "/daily_transactions"] + .fields["daily_transactions"] .float_val ) sent_daily_transactions = float( - basic_dataframe.iloc[0]["daily_transactions"]) + cust_trans_df.iloc[0]["daily_transactions"]) if math.isclose( - sent_daily_transactions, - returned_daily_transactions, - abs_tol=FLOAT_TOLERANCE, + sent_daily_transactions, + returned_daily_transactions, + abs_tol=FLOAT_TOLERANCE, ): break + +@pytest.mark.timeout(90) +@pytest.mark.run(order=13) +def test_basic_retrieve_online_multiple_featureset(client, cust_trans_df, driver_df): + # Poll serving for feature values until the correct values are returned + while True: + time.sleep(1) + # Test retrieve with different variations of the string feature refs + # ie feature set inference for feature refs without specified feature set + feature_ref_df_mapping = [ + ("customer_transactions:daily_transactions", cust_trans_df), + ("driver:rating", driver_df), + ("total_transactions", cust_trans_df), + ] + response = client.get_online_features( + entity_rows=[ + GetOnlineFeaturesRequest.EntityRow( + fields={ + "customer_id": Value( + int64_val=cust_trans_df.iloc[0]["customer_id"] + ), + "driver_id": Value( + int64_val=driver_df.iloc[0]["driver_id"] + ) + } + ) + ], + feature_refs=[mapping[0] for mapping in feature_ref_df_mapping], + ) # type: GetOnlineFeaturesResponse + + if response is None: + continue + + def check_response(ingest_df, response, feature_ref): + returned_value = float( + response.field_values[0] + .fields[feature_ref] + .float_val + ) + feature_ref_splits = feature_ref.split(":") + if len(feature_ref_splits) == 1: + feature_name = feature_ref + else: + _, feature_name = feature_ref_splits + + sent_value = float( + ingest_df.iloc[0][feature_name]) + + return math.isclose( + sent_value, + returned_value, + abs_tol=FLOAT_TOLERANCE, + ) + + if all([check_response(df, response, ref) for ref, df in feature_ref_df_mapping]): + break + + @pytest.mark.timeout(300) @pytest.mark.run(order=19) -def test_basic_ingest_jobs(client, basic_dataframe): +def test_basic_ingest_jobs(client): # list ingestion jobs given featureset cust_trans_fs = client.get_feature_set(name="customer_transactions") ingest_jobs = client.list_ingest_jobs( @@ -225,7 +313,7 @@ def all_types_dataframe(): # np.array([True, False, True]), # np.array([True, False, True]), # ], - # TODO: https://github.com/gojek/feast/issues/341 + # TODO: https://github.com/feast-dev/feast/issues/341 } ) @@ -285,7 +373,7 @@ def test_all_types_ingest_success(client, all_types_dataframe): client.ingest(all_types_fs, all_types_dataframe) -@pytest.mark.timeout(45) +@pytest.mark.timeout(90) @pytest.mark.run(order=22) def test_all_types_retrieve_online_success(client, all_types_dataframe): # Poll serving for feature values until the correct values are returned @@ -319,20 +407,20 @@ def test_all_types_retrieve_online_success(client, all_types_dataframe): if response is None: continue - returned_float_list = ( response.field_values[0] - .fields[PROJECT_NAME+"/float_list_feature"] + .fields["float_list_feature"] .float_list_val.val ) sent_float_list = all_types_dataframe.iloc[0]["float_list_feature"] if math.isclose( - returned_float_list[0], sent_float_list[0], abs_tol=FLOAT_TOLERANCE + returned_float_list[0], sent_float_list[0], abs_tol=FLOAT_TOLERANCE ): break + @pytest.mark.timeout(300) @pytest.mark.run(order=29) def test_all_types_ingest_jobs(client, all_types_dataframe): @@ -355,6 +443,7 @@ def test_all_types_ingest_jobs(client, all_types_dataframe): ingest_job.wait(IngestionJobStatus.ABORTED) assert ingest_job.status == IngestionJobStatus.ABORTED + @pytest.fixture(scope='module') def large_volume_dataframe(): ROW_COUNT = 100000 @@ -409,7 +498,7 @@ def test_large_volume_ingest_success(client, large_volume_dataframe): client.ingest(cust_trans_fs, large_volume_dataframe) -@pytest.mark.timeout(45) +@pytest.mark.timeout(90) @pytest.mark.run(order=32) def test_large_volume_retrieve_online_success(client, large_volume_dataframe): # Poll serving for feature values until the correct values are returned @@ -438,16 +527,16 @@ def test_large_volume_retrieve_online_success(client, large_volume_dataframe): returned_daily_transactions = float( response.field_values[0] - .fields[PROJECT_NAME + "/daily_transactions_large"] + .fields["daily_transactions_large"] .float_val ) sent_daily_transactions = float( large_volume_dataframe.iloc[0]["daily_transactions_large"]) if math.isclose( - sent_daily_transactions, - returned_daily_transactions, - abs_tol=FLOAT_TOLERANCE, + sent_daily_transactions, + returned_daily_transactions, + abs_tol=FLOAT_TOLERANCE, ): break @@ -462,14 +551,14 @@ def all_types_parquet_file(): "customer_id": [np.int32(random.randint(0, 10000)) for _ in range(COUNT)], "int32_feature_parquet": [np.int32(random.randint(0, 10000)) for _ in - range(COUNT)], + range(COUNT)], "int64_feature_parquet": [np.int64(random.randint(0, 10000)) for _ in - range(COUNT)], + range(COUNT)], "float_feature_parquet": [np.float(random.random()) for _ in range(COUNT)], "double_feature_parquet": [np.float64(random.random()) for _ in - range(COUNT)], + range(COUNT)], "string_feature_parquet": ["one" + str(random.random()) for _ in - range(COUNT)], + range(COUNT)], "bytes_feature_parquet": [b"one" for _ in range(COUNT)], "int32_list_feature_parquet": [ np.array([1, 2, 3, random.randint(0, 10000)], dtype=np.int32) @@ -503,12 +592,13 @@ def all_types_parquet_file(): ) # TODO: Boolean list is not being tested. - # https://github.com/gojek/feast/issues/341 + # https://github.com/feast-dev/feast/issues/341 file_path = os.path.join(tempfile.mkdtemp(), 'all_types.parquet') df.to_parquet(file_path, allow_truncated_timestamps=True) return file_path + @pytest.mark.timeout(300) @pytest.mark.run(order=40) def test_all_types_parquet_register_feature_set_success(client): @@ -539,10 +629,84 @@ def test_all_types_parquet_register_feature_set_success(client): @pytest.mark.timeout(600) @pytest.mark.run(order=41) def test_all_types_infer_register_ingest_file_success(client, - all_types_parquet_file): + all_types_parquet_file): # Get feature set all_types_fs = client.get_feature_set(name="all_types_parquet") # Ingest user embedding data - client.ingest(feature_set=all_types_fs, source=all_types_parquet_file, - force_update=True) + client.ingest(feature_set=all_types_fs, source=all_types_parquet_file) + + +# TODO: rewrite these using python SDK once the labels are implemented there +class TestsBasedOnGrpc: + GRPC_CONNECTION_TIMEOUT = 3 + LABEL_KEY = "my" + LABEL_VALUE = "label" + + @pytest.fixture(scope="module") + def core_service_stub(self, core_url): + if core_url.endswith(":443"): + core_channel = grpc.secure_channel( + core_url, grpc.ssl_channel_credentials() + ) + else: + core_channel = grpc.insecure_channel(core_url) + + try: + grpc.channel_ready_future(core_channel).result(timeout=self.GRPC_CONNECTION_TIMEOUT) + except grpc.FutureTimeoutError: + raise ConnectionError( + f"Connection timed out while attempting to connect to Feast " + f"Core gRPC server {core_url} " + ) + core_service_stub = CoreServiceStub(core_channel) + return core_service_stub + + def apply_feature_set(self, core_service_stub, feature_set_proto): + try: + apply_fs_response = core_service_stub.ApplyFeatureSet( + CoreService_pb2.ApplyFeatureSetRequest(feature_set=feature_set_proto), + timeout=self.GRPC_CONNECTION_TIMEOUT, + ) # type: ApplyFeatureSetResponse + except grpc.RpcError as e: + raise grpc.RpcError(e.details()) + return apply_fs_response.feature_set + + def get_feature_set(self, core_service_stub, name, project): + try: + get_feature_set_response = core_service_stub.GetFeatureSet( + CoreService_pb2.GetFeatureSetRequest( + project=project, name=name.strip(), + ) + ) # type: GetFeatureSetResponse + except grpc.RpcError as e: + raise grpc.RpcError(e.details()) + return get_feature_set_response.feature_set + + @pytest.mark.timeout(45) + @pytest.mark.run(order=51) + def test_register_feature_set_with_labels(self, core_service_stub): + feature_set_name = "test_feature_set_labels" + feature_set_proto = FeatureSet(feature_set_name, PROJECT_NAME).to_proto() + feature_set_proto.spec.labels[self.LABEL_KEY] = self.LABEL_VALUE + self.apply_feature_set(core_service_stub, feature_set_proto) + + retrieved_feature_set = self.get_feature_set(core_service_stub, feature_set_name, PROJECT_NAME) + + assert self.LABEL_KEY in retrieved_feature_set.spec.labels + assert retrieved_feature_set.spec.labels[self.LABEL_KEY] == self.LABEL_VALUE + + @pytest.mark.timeout(45) + @pytest.mark.run(order=52) + def test_register_feature_with_labels(self, core_service_stub): + feature_set_name = "test_feature_labels" + feature_set_proto = FeatureSet(feature_set_name, PROJECT_NAME, features=[Feature("rating", ValueType.INT64)]) \ + .to_proto() + feature_set_proto.spec.features[0].labels[self.LABEL_KEY] = self.LABEL_VALUE + self.apply_feature_set(core_service_stub, feature_set_proto) + + retrieved_feature_set = self.get_feature_set(core_service_stub, feature_set_name, PROJECT_NAME) + retrieved_feature = retrieved_feature_set.spec.features[0] + + assert self.LABEL_KEY in retrieved_feature.labels + assert retrieved_feature.labels[self.LABEL_KEY] == self.LABEL_VALUE diff --git a/tests/e2e/basic/driver_fs.yaml b/tests/e2e/basic/driver_fs.yaml new file mode 100644 index 0000000000..f25ca95678 --- /dev/null +++ b/tests/e2e/basic/driver_fs.yaml @@ -0,0 +1,12 @@ +kind: feature_set +spec: + name: driver + entities: + - name: driver_id + valueType: INT64 + features: + - name: rating + valueType: FLOAT + - name: cost + valueType: FLOAT + maxAge: 3600s diff --git a/tests/e2e/bq-batch-retrieval.py b/tests/e2e/bq-batch-retrieval.py index 0cf05e77e1..99b88a8dff 100644 --- a/tests/e2e/bq-batch-retrieval.py +++ b/tests/e2e/bq-batch-retrieval.py @@ -1,27 +1,31 @@ +import math +import os import random import time +import uuid from datetime import datetime from datetime import timedelta from urllib.parse import urlparse -import uuid import numpy as np import pandas as pd import pytest import pytz from feast.client import Client +from feast.core.CoreService_pb2 import ListStoresRequest +from feast.core.IngestionJob_pb2 import IngestionJobStatus from feast.entity import Entity from feast.feature import Feature from feast.feature_set import FeatureSet from feast.type_map import ValueType -from google.cloud import storage +from google.cloud import storage, bigquery +from google.cloud.storage import Blob from google.protobuf.duration_pb2 import Duration from pandavro import to_avro -pd.set_option('display.max_columns', None) - -PROJECT_NAME = 'batch_' + uuid.uuid4().hex.upper()[0:6] +pd.set_option("display.max_columns", None) +PROJECT_NAME = "batch_" + uuid.uuid4().hex.upper()[0:6] @pytest.fixture(scope="module") def core_url(pytestconfig): @@ -48,66 +52,71 @@ def client(core_url, serving_url, allow_dirty): # Get client for core and serving client = Client(core_url=core_url, serving_url=serving_url) client.create_project(PROJECT_NAME) - client.set_project(PROJECT_NAME) # Ensure Feast core is active, but empty if not allow_dirty: feature_sets = client.list_feature_sets() if len(feature_sets) > 0: - raise Exception("Feast cannot have existing feature sets registered. Exiting tests.") + raise Exception( + "Feast cannot have existing feature sets registered. Exiting tests." + ) return client + @pytest.mark.first -def test_apply_all_featuresets(client): +@pytest.mark.direct_runner +@pytest.mark.dataflow_runner +@pytest.mark.run(order=1) +def test_batch_apply_all_featuresets(client): client.set_project(PROJECT_NAME) file_fs1 = FeatureSet( - "file_feature_set", - features=[Feature("feature_value1", ValueType.STRING)], - entities=[Entity("entity_id", ValueType.INT64)], - max_age=Duration(seconds=100), - ) + "file_feature_set", + features=[Feature("feature_value1", ValueType.STRING)], + entities=[Entity("entity_id", ValueType.INT64)], + max_age=Duration(seconds=100), + ) client.apply(file_fs1) gcs_fs1 = FeatureSet( - "gcs_feature_set", - features=[Feature("feature_value2", ValueType.STRING)], - entities=[Entity("entity_id", ValueType.INT64)], - max_age=Duration(seconds=100), - ) + "gcs_feature_set", + features=[Feature("feature_value2", ValueType.STRING)], + entities=[Entity("entity_id", ValueType.INT64)], + max_age=Duration(seconds=100), + ) client.apply(gcs_fs1) proc_time_fs = FeatureSet( - "processing_time", - features=[Feature("feature_value3", ValueType.STRING)], - entities=[Entity("entity_id", ValueType.INT64)], - max_age=Duration(seconds=100), - ) + "processing_time", + features=[Feature("feature_value3", ValueType.STRING)], + entities=[Entity("entity_id", ValueType.INT64)], + max_age=Duration(seconds=100), + ) client.apply(proc_time_fs) add_cols_fs = FeatureSet( - "additional_columns", - features=[Feature("feature_value4", ValueType.STRING)], - entities=[Entity("entity_id", ValueType.INT64)], - max_age=Duration(seconds=100), - ) + "additional_columns", + features=[Feature("feature_value4", ValueType.STRING)], + entities=[Entity("entity_id", ValueType.INT64)], + max_age=Duration(seconds=100), + ) client.apply(add_cols_fs) historical_fs = FeatureSet( - "historical", - features=[Feature("feature_value5", ValueType.STRING)], - entities=[Entity("entity_id", ValueType.INT64)], - max_age=Duration(seconds=100), - ) + "historical", + features=[Feature("feature_value5", ValueType.STRING)], + entities=[Entity("entity_id", ValueType.INT64)], + max_age=Duration(seconds=100), + ) client.apply(historical_fs) fs1 = FeatureSet( - "feature_set_1", - features=[Feature("feature_value6", ValueType.STRING)], - entities=[Entity("entity_id", ValueType.INT64)], - max_age=Duration(seconds=100), - ) + "feature_set_1", + features=[Feature("feature_value6", ValueType.STRING)], + entities=[Entity("entity_id", ValueType.INT64)], + max_age=Duration(seconds=100), + ) fs2 = FeatureSet( "feature_set_2", @@ -127,8 +136,11 @@ def test_apply_all_featuresets(client): client.apply(no_max_age_fs) -def test_get_batch_features_with_file(client): - file_fs1 = client.get_feature_set(name="file_feature_set", version=1) +@pytest.mark.direct_runner +@pytest.mark.dataflow_runner +@pytest.mark.run(order=10) +def test_batch_get_batch_features_with_file(client): + file_fs1 = client.get_feature_set(name="file_feature_set") N_ROWS = 10 time_offset = datetime.utcnow().replace(tzinfo=pytz.utc) @@ -139,26 +151,38 @@ def test_get_batch_features_with_file(client): "feature_value1": [f"{i}" for i in range(N_ROWS)], } ) - client.ingest(file_fs1, features_1_df) + client.ingest(file_fs1, features_1_df, timeout=480) # Rename column (datetime -> event_timestamp) + features_1_df['datetime'] + pd.Timedelta(seconds=1) # adds buffer to avoid rounding errors features_1_df = features_1_df.rename(columns={"datetime": "event_timestamp"}) - to_avro(df=features_1_df[["event_timestamp", "entity_id"]], file_path_or_buffer="file_feature_set.avro") + to_avro( + df=features_1_df[["event_timestamp", "entity_id"]], + file_path_or_buffer="file_feature_set.avro", + ) time.sleep(15) feature_retrieval_job = client.get_batch_features( - entity_rows="file://file_feature_set.avro", feature_refs=[f"{PROJECT_NAME}/feature_value1:1"] + entity_rows="file://file_feature_set.avro", + feature_refs=["feature_value1"], + project=PROJECT_NAME, ) output = feature_retrieval_job.to_dataframe() + clean_up_remote_files(feature_retrieval_job.get_avro_files()) print(output.head()) - assert output["entity_id"].to_list() == [int(i) for i in output["feature_value1"].to_list()] + assert output["entity_id"].to_list() == [ + int(i) for i in output["feature_value1"].to_list() + ] -def test_get_batch_features_with_gs_path(client, gcs_path): - gcs_fs1 = client.get_feature_set(name="gcs_feature_set", version=1) +@pytest.mark.direct_runner +@pytest.mark.dataflow_runner +@pytest.mark.run(order=11) +def test_batch_get_batch_features_with_gs_path(client, gcs_path): + gcs_fs1 = client.get_feature_set(name="gcs_feature_set") N_ROWS = 10 time_offset = datetime.utcnow().replace(tzinfo=pytz.utc) @@ -169,14 +193,18 @@ def test_get_batch_features_with_gs_path(client, gcs_path): "feature_value2": [f"{i}" for i in range(N_ROWS)], } ) - client.ingest(gcs_fs1, features_1_df) + client.ingest(gcs_fs1, features_1_df, timeout=360) # Rename column (datetime -> event_timestamp) + features_1_df['datetime'] + pd.Timedelta(seconds=1) # adds buffer to avoid rounding errors features_1_df = features_1_df.rename(columns={"datetime": "event_timestamp"}) # Output file to local file_name = "gcs_feature_set.avro" - to_avro(df=features_1_df[["event_timestamp", "entity_id"]], file_path_or_buffer=file_name) + to_avro( + df=features_1_df[["event_timestamp", "entity_id"]], + file_path_or_buffer=file_name, + ) uri = urlparse(gcs_path) bucket = uri.hostname @@ -192,17 +220,24 @@ def test_get_batch_features_with_gs_path(client, gcs_path): time.sleep(15) feature_retrieval_job = client.get_batch_features( entity_rows=f"{gcs_path}{ts}/*", - feature_refs=[f"{PROJECT_NAME}/feature_value2:1"] + feature_refs=["feature_value2"], + project=PROJECT_NAME, ) output = feature_retrieval_job.to_dataframe() + clean_up_remote_files(feature_retrieval_job.get_avro_files()) + blob.delete() print(output.head()) - assert output["entity_id"].to_list() == [int(i) for i in output["feature_value2"].to_list()] + assert output["entity_id"].to_list() == [ + int(i) for i in output["feature_value2"].to_list() + ] -def test_order_by_creation_time(client): - proc_time_fs = client.get_feature_set(name="processing_time", version=1) +@pytest.mark.direct_runner +@pytest.mark.run(order=12) +def test_batch_order_by_creation_time(client): + proc_time_fs = client.get_feature_set(name="processing_time") time_offset = datetime.utcnow().replace(tzinfo=pytz.utc) N_ROWS = 10 @@ -224,21 +259,30 @@ def test_order_by_creation_time(client): time.sleep(15) client.ingest(proc_time_fs, correct_df) feature_retrieval_job = client.get_batch_features( - entity_rows=incorrect_df[["datetime", "entity_id"]], feature_refs=[f"{PROJECT_NAME}/feature_value3:1"] + entity_rows=incorrect_df[["datetime", "entity_id"]], + feature_refs=["feature_value3"], + project=PROJECT_NAME, ) output = feature_retrieval_job.to_dataframe() + clean_up_remote_files(feature_retrieval_job.get_avro_files()) print(output.head()) assert output["feature_value3"].to_list() == ["CORRECT"] * N_ROWS -def test_additional_columns_in_entity_table(client): - add_cols_fs = client.get_feature_set(name="additional_columns", version=1) +@pytest.mark.direct_runner +@pytest.mark.run(order=13) +def test_batch_additional_columns_in_entity_table(client): + add_cols_fs = client.get_feature_set(name="additional_columns") N_ROWS = 10 time_offset = datetime.utcnow().replace(tzinfo=pytz.utc) features_df = pd.DataFrame( - {"datetime": [time_offset] * N_ROWS, "entity_id": [i for i in range(N_ROWS)], "feature_value4": ["abc"] * N_ROWS} + { + "datetime": [time_offset] * N_ROWS, + "entity_id": [i for i in range(N_ROWS)], + "feature_value4": ["abc"] * N_ROWS, + } ) client.ingest(add_cols_fs, features_df) @@ -253,18 +297,28 @@ def test_additional_columns_in_entity_table(client): time.sleep(15) feature_retrieval_job = client.get_batch_features( - entity_rows=entity_df, feature_refs=[f"{PROJECT_NAME}/feature_value4:1"] + entity_rows=entity_df, + feature_refs=["feature_value4"], + project=PROJECT_NAME, ) output = feature_retrieval_job.to_dataframe().sort_values(by=["entity_id"]) + clean_up_remote_files(feature_retrieval_job.get_avro_files()) print(output.head(10)) - assert np.allclose(output["additional_float_col"], entity_df["additional_float_col"]) - assert output["additional_string_col"].to_list() == entity_df["additional_string_col"].to_list() + assert np.allclose( + output["additional_float_col"], entity_df["additional_float_col"] + ) + assert ( + output["additional_string_col"].to_list() + == entity_df["additional_string_col"].to_list() + ) assert output["feature_value4"].to_list() == features_df["feature_value4"].to_list() -def test_point_in_time_correctness_join(client): - historical_fs = client.get_feature_set(name="historical", version=1) +@pytest.mark.direct_runner +@pytest.mark.run(order=14) +def test_batch_point_in_time_correctness_join(client): + historical_fs = client.get_feature_set(name="historical") time_offset = datetime.utcnow().replace(tzinfo=pytz.utc) N_EXAMPLES = 10 @@ -281,22 +335,32 @@ def test_point_in_time_correctness_join(client): } ) entity_df = pd.DataFrame( - {"datetime": [time_offset - timedelta(seconds=10)] * N_EXAMPLES, "entity_id": [i for i in range(N_EXAMPLES)]} + { + "datetime": [time_offset - timedelta(seconds=10)] * N_EXAMPLES, + "entity_id": [i for i in range(N_EXAMPLES)], + } ) client.ingest(historical_fs, historical_df) time.sleep(15) - feature_retrieval_job = client.get_batch_features(entity_rows=entity_df, feature_refs=[f"{PROJECT_NAME}/feature_value5"]) + feature_retrieval_job = client.get_batch_features( + entity_rows=entity_df, + feature_refs=["feature_value5"], + project=PROJECT_NAME, + ) output = feature_retrieval_job.to_dataframe() + clean_up_remote_files(feature_retrieval_job.get_avro_files()) print(output.head()) assert output["feature_value5"].to_list() == ["CORRECT"] * N_EXAMPLES -def test_multiple_featureset_joins(client): - fs1 = client.get_feature_set(name="feature_set_1", version=1) - fs2 = client.get_feature_set(name="feature_set_2", version=1) +@pytest.mark.direct_runner +@pytest.mark.run(order=15) +def test_batch_multiple_featureset_joins(client): + fs1 = client.get_feature_set(name="feature_set_1") + fs2 = client.get_feature_set(name="feature_set_2") N_ROWS = 10 time_offset = datetime.utcnow().replace(tzinfo=pytz.utc) @@ -327,18 +391,32 @@ def test_multiple_featureset_joins(client): ) time.sleep(15) + # Test retrieve with different variations of the string feature refs + # ie feature set inference for feature refs without specified feature set feature_retrieval_job = client.get_batch_features( - entity_rows=entity_df, feature_refs=[f"{PROJECT_NAME}/feature_value6:1", f"{PROJECT_NAME}/other_feature_value7:1"] + entity_rows=entity_df, + feature_refs=[ + "feature_value6", + "feature_set_2:other_feature_value7", + ], + project=PROJECT_NAME, ) output = feature_retrieval_job.to_dataframe() + clean_up_remote_files(feature_retrieval_job.get_avro_files()) print(output.head()) - assert output["entity_id"].to_list() == [int(i) for i in output["feature_value6"].to_list()] - assert output["other_entity_id"].to_list() == output["other_feature_value7"].to_list() + assert output["entity_id"].to_list() == [ + int(i) for i in output["feature_value6"].to_list() + ] + assert ( + output["other_entity_id"].to_list() == output["feature_set_2__other_feature_value7"].to_list() + ) -def test_no_max_age(client): - no_max_age_fs = client.get_feature_set(name="no_max_age", version=1) +@pytest.mark.direct_runner +@pytest.mark.run(order=16) +def test_batch_no_max_age(client): + no_max_age_fs = client.get_feature_set(name="no_max_age") time_offset = datetime.utcnow().replace(tzinfo=pytz.utc) N_ROWS = 10 @@ -353,10 +431,232 @@ def test_no_max_age(client): time.sleep(15) feature_retrieval_job = client.get_batch_features( - entity_rows=features_8_df[["datetime", "entity_id"]], feature_refs=[f"{PROJECT_NAME}/feature_value8:1"] + entity_rows=features_8_df[["datetime", "entity_id"]], + feature_refs=["feature_value8"], + project=PROJECT_NAME, ) output = feature_retrieval_job.to_dataframe() + clean_up_remote_files(feature_retrieval_job.get_avro_files()) + print(output.head()) + + assert output["entity_id"].to_list() == output["feature_value8"].to_list() + + +@pytest.fixture(scope="module", autouse=True) +def infra_teardown(pytestconfig, core_url, serving_url): + client = Client(core_url=core_url, serving_url=serving_url) + client.set_project(PROJECT_NAME) + + marker = pytestconfig.getoption("-m") + yield marker + if marker == "dataflow_runner": + ingest_jobs = client.list_ingest_jobs() + ingest_jobs = [ + client.list_ingest_jobs(job.id)[0].external_id + for job in ingest_jobs + if job.status == IngestionJobStatus.RUNNING + ] + + cwd = os.getcwd() + with open(f"{cwd}/ingesting_jobs.txt", "w+") as output: + for job in ingest_jobs: + output.write("%s\n" % job) + else: + print("Cleaning up not required") + + + +''' +This suite of tests tests the apply feature set - update feature set - retrieve +event sequence. It ensures that when a feature set is updated, tombstoned features +are no longer retrieved, and added features are null for previously ingested +rows. + +It is marked separately because of the length of time required +to perform this test, due to bigquery schema caching for streaming writes. +''' + +@pytest.fixture(scope="module") +def update_featureset_dataframe(): + n_rows = 10 + time_offset = datetime.utcnow().replace(tzinfo=pytz.utc) + return pd.DataFrame( + { + "datetime": [time_offset] * n_rows, + "entity_id": [i for i in range(n_rows)], + "update_feature1": ["a" for i in range(n_rows)], + "update_feature2": [i + 2 for i in range(n_rows)], + "update_feature3": [i for i in range(n_rows)], + "update_feature4": ["b" for i in range(n_rows)], + } + ) + + +@pytest.mark.fs_update +@pytest.mark.run(order=20) +def test_update_featureset_apply_featureset_and_ingest_first_subset( + client, update_featureset_dataframe +): + subset_columns = ["datetime", "entity_id", "update_feature1", "update_feature2"] + subset_df = update_featureset_dataframe.iloc[:5][subset_columns] + update_fs = FeatureSet( + "update_fs", + entities=[Entity(name="entity_id", dtype=ValueType.INT64)], + max_age=Duration(seconds=432000), + ) + update_fs.infer_fields_from_df(subset_df) + client.apply(update_fs) + + client.ingest(feature_set=update_fs, source=subset_df) + + time.sleep(15) + feature_retrieval_job = client.get_batch_features( + entity_rows=update_featureset_dataframe[["datetime", "entity_id"]].iloc[:5], + feature_refs=[ + "update_feature1", + "update_feature2", + ], + project=PROJECT_NAME + ) + + output = feature_retrieval_job.to_dataframe().sort_values(by=["entity_id"]) + clean_up_remote_files(feature_retrieval_job.get_avro_files()) + print(output.head()) + + assert output["update_feature1"].to_list() == subset_df["update_feature1"].to_list() + assert output["update_feature2"].to_list() == subset_df["update_feature2"].to_list() + + +@pytest.mark.fs_update +@pytest.mark.timeout(600) +@pytest.mark.run(order=21) +def test_update_featureset_update_featureset_and_ingest_second_subset( + client, update_featureset_dataframe +): + subset_columns = [ + "datetime", + "entity_id", + "update_feature1", + "update_feature3", + "update_feature4", + ] + subset_df = update_featureset_dataframe.iloc[5:][subset_columns] + update_fs = FeatureSet( + "update_fs", + entities=[Entity(name="entity_id", dtype=ValueType.INT64)], + max_age=Duration(seconds=432000), + ) + update_fs.infer_fields_from_df(subset_df) + client.apply(update_fs) + + # We keep retrying this ingestion until all values make it into the buffer. + # This is a necessary step because bigquery streaming caches table schemas + # and as a result, rows may be lost. + while True: + ingestion_id = client.ingest(feature_set=update_fs, source=subset_df) + time.sleep(15) # wait for rows to get written to bq + rows_ingested = get_rows_ingested(client, update_fs, ingestion_id) + if rows_ingested == len(subset_df): + print(f"Number of rows successfully ingested: {rows_ingested}. Continuing.") + break + print( + f"Number of rows successfully ingested: {rows_ingested}. Retrying ingestion." + ) + time.sleep(30) + + feature_retrieval_job = client.get_batch_features( + entity_rows=update_featureset_dataframe[["datetime", "entity_id"]].iloc[5:], + feature_refs=[ + "update_feature1", + "update_feature3", + "update_feature4", + ], + project=PROJECT_NAME, + ) + + output = feature_retrieval_job.to_dataframe().sort_values(by=["entity_id"]) + clean_up_remote_files(feature_retrieval_job.get_avro_files()) print(output.head()) - assert output["entity_id"].to_list() == output["feature_value8"].to_list() \ No newline at end of file + assert output["update_feature1"].to_list() == subset_df["update_feature1"].to_list() + assert output["update_feature3"].to_list() == subset_df["update_feature3"].to_list() + assert output["update_feature4"].to_list() == subset_df["update_feature4"].to_list() + + +@pytest.mark.fs_update +@pytest.mark.run(order=22) +def test_update_featureset_retrieve_all_fields(client, update_featureset_dataframe): + with pytest.raises(Exception): + feature_retrieval_job = client.get_batch_features( + entity_rows=update_featureset_dataframe[["datetime", "entity_id"]], + feature_refs=[ + "update_feature1", + "update_feature2", + "update_feature3", + "update_feature4", + ], + project=PROJECT_NAME, + ) + feature_retrieval_job.result() + + +@pytest.mark.fs_update +@pytest.mark.run(order=23) +def test_update_featureset_retrieve_valid_fields(client, update_featureset_dataframe): + feature_retrieval_job = client.get_batch_features( + entity_rows=update_featureset_dataframe[["datetime", "entity_id"]], + feature_refs=[ + "update_feature1", + "update_feature3", + "update_feature4", + ], + project=PROJECT_NAME, + ) + output = feature_retrieval_job.to_dataframe().sort_values(by=["entity_id"]) + clean_up_remote_files(feature_retrieval_job.get_avro_files()) + print(output.head(10)) + assert ( + output["update_feature1"].to_list() + == update_featureset_dataframe["update_feature1"].to_list() + ) + # we have to convert to float because the column contains np.NaN + assert [math.isnan(i) for i in output["update_feature3"].to_list()[:5]] == [ + True + ] * 5 + assert output["update_feature3"].to_list()[5:] == [ + float(i) for i in update_featureset_dataframe["update_feature3"].to_list()[5:] + ] + assert ( + output["update_feature4"].to_list() + == [None] * 5 + update_featureset_dataframe["update_feature4"].to_list()[5:] + ) + + +def get_rows_ingested( + client: Client, feature_set: FeatureSet, ingestion_id: str +) -> int: + response = client._core_service_stub.ListStores( + ListStoresRequest(filter=ListStoresRequest.Filter(name="historical")) + ) + bq_config = response.store[0].bigquery_config + project = bq_config.project_id + dataset = bq_config.dataset_id + table = f"{PROJECT_NAME}_{feature_set.name}" + + bq_client = bigquery.Client(project=project) + rows = bq_client.query( + f'SELECT COUNT(*) as count FROM `{project}.{dataset}.{table}` WHERE ingestion_id = "{ingestion_id}"' + ).result() + + for row in rows: + return row["count"] + + +def clean_up_remote_files(files): + storage_client = storage.Client() + for file_uri in files: + if file_uri.scheme == "gs": + blob = Blob.from_string(file_uri.geturl(), client=storage_client) + blob.delete() +