From fcd11ced9a2d708e1c6abe438f71e5483dd4cd85 Mon Sep 17 00:00:00 2001 From: pradithya aria Date: Mon, 9 Sep 2019 23:22:23 +0800 Subject: [PATCH 01/19] Fix tracing to continue from existing trace created by grpc client --- serving/pom.xml | 9 +++--- .../java/feast/serving/config/AppConfig.java | 3 -- .../serving/config/InstrumentationConfig.java | 5 +++- .../config/ServingApiConfiguration.java | 20 ++++--------- .../serving/grpc/ServingGrpcService.java | 16 ++++++----- .../serving/http/ServingHttpService.java | 18 ++++++++++-- .../feast/serving/service/FeastServing.java | 23 ++++++++++----- .../service/FeatureRetrievalDispatcher.java | 28 +++++-------------- .../serving/service/RedisFeatureStorage.java | 27 ++++++++++++------ .../src/main/resources/application.properties | 1 - 10 files changed, 80 insertions(+), 70 deletions(-) diff --git a/serving/pom.xml b/serving/pom.xml index 33e3665f7e..9e05bae460 100644 --- a/serving/pom.xml +++ b/serving/pom.xml @@ -169,20 +169,19 @@ io.jaegertracing jaeger-client - 0.31.0 + 0.35.5 io.opentracing opentracing-api - 0.31.0 + 0.33.0 io.opentracing.contrib - opentracing-concurrent - 0.2.0 + opentracing-grpc + 0.1.3 - io.micrometer diff --git a/serving/src/main/java/feast/serving/config/AppConfig.java b/serving/src/main/java/feast/serving/config/AppConfig.java index 77b56116ee..2796c31ec9 100644 --- a/serving/src/main/java/feast/serving/config/AppConfig.java +++ b/serving/src/main/java/feast/serving/config/AppConfig.java @@ -29,9 +29,6 @@ public class AppConfig { // number of connection allowed to be idle int redisMaxIdleSize; - // max number of entity per thread. - int maxEntityPerBatch; - // timeout for feature retrieval int timeout; } diff --git a/serving/src/main/java/feast/serving/config/InstrumentationConfig.java b/serving/src/main/java/feast/serving/config/InstrumentationConfig.java index 519f3d3025..6188fc1414 100644 --- a/serving/src/main/java/feast/serving/config/InstrumentationConfig.java +++ b/serving/src/main/java/feast/serving/config/InstrumentationConfig.java @@ -21,6 +21,7 @@ import com.timgroup.statsd.StatsDClient; import io.micrometer.core.instrument.MeterRegistry; import io.opentracing.Tracer; +import io.opentracing.util.GlobalTracer; import java.net.InetAddress; import java.net.UnknownHostException; import org.springframework.beans.factory.annotation.Value; @@ -42,7 +43,9 @@ public StatsDClient getStatsDClient(@Value("${statsd.host}") String host, public Tracer getTracer() { io.jaegertracing.Configuration tracingConfig = io.jaegertracing.Configuration.fromEnv(APP_NAME); - return tracingConfig.getTracer(); + Tracer tracer = tracingConfig.getTracer(); + GlobalTracer.registerIfAbsent(tracer); + return tracer; } @Bean diff --git a/serving/src/main/java/feast/serving/config/ServingApiConfiguration.java b/serving/src/main/java/feast/serving/config/ServingApiConfiguration.java index 3b3ae6f35a..db9883f27c 100644 --- a/serving/src/main/java/feast/serving/config/ServingApiConfiguration.java +++ b/serving/src/main/java/feast/serving/config/ServingApiConfiguration.java @@ -18,8 +18,6 @@ package feast.serving.config; import com.google.common.base.Strings; -import com.google.common.util.concurrent.ListeningExecutorService; -import com.google.common.util.concurrent.MoreExecutors; import com.google.gson.Gson; import com.google.gson.reflect.TypeToken; import feast.serving.service.CachedSpecStorage; @@ -28,7 +26,6 @@ import feast.serving.service.SpecStorage; import feast.specs.StorageSpecProto.StorageSpec; import io.opentracing.Tracer; -import io.opentracing.contrib.concurrent.TracedExecutorService; import java.lang.reflect.Type; import java.util.Collections; import java.util.List; @@ -72,10 +69,8 @@ private static Map convertJsonStringToMap(String jsonString) { public AppConfig getAppConfig( @Value("${feast.redispool.maxsize}") int redisPoolMaxSize, @Value("${feast.redispool.maxidle}") int redisPoolMaxIdle, - @Value("${feast.maxentity}") int maxEntityPerBatch, @Value("${feast.timeout}") int timeout) { return AppConfig.builder() - .maxEntityPerBatch(maxEntityPerBatch) .redisMaxPoolSize(redisPoolMaxSize) .redisMaxIdleSize(redisPoolMaxIdle) .timeout(timeout) @@ -112,10 +107,10 @@ public FeatureStorageRegistry getFeatureStorageRegistry( Map optionsMap = convertJsonStringToMap(storageOptions); StorageSpec storageSpec = StorageSpec.newBuilder() - .setId("SERVING") - .setType(storageType) - .putAllOptions(optionsMap) - .build(); + .setId("SERVING") + .setType(storageType) + .putAllOptions(optionsMap) + .build(); FeatureStorageRegistry registry = new FeatureStorageRegistry(appConfig, tracer); try { @@ -129,11 +124,8 @@ public FeatureStorageRegistry getFeatureStorageRegistry( } @Bean - public ListeningExecutorService getExecutorService( - Tracer tracer, @Value("${feast.threadpool.max}") int maxPoolSize) { - - ExecutorService executor = Executors.newFixedThreadPool(maxPoolSize); - return MoreExecutors.listeningDecorator(new TracedExecutorService(executor, tracer)); + public ExecutorService getExecutorService(@Value("${feast.threadpool.max}") int maxPoolSize) { + return Executors.newFixedThreadPool(maxPoolSize); } @Bean diff --git a/serving/src/main/java/feast/serving/grpc/ServingGrpcService.java b/serving/src/main/java/feast/serving/grpc/ServingGrpcService.java index 51622d8ea6..33bcd8c00c 100644 --- a/serving/src/main/java/feast/serving/grpc/ServingGrpcService.java +++ b/serving/src/main/java/feast/serving/grpc/ServingGrpcService.java @@ -33,13 +33,15 @@ import io.opentracing.Scope; import io.opentracing.Span; import io.opentracing.Tracer; +import io.opentracing.contrib.grpc.OpenTracingContextKey; +import io.opentracing.contrib.grpc.ServerTracingInterceptor; import lombok.extern.slf4j.Slf4j; import org.lognet.springboot.grpc.GRpcService; import org.springframework.beans.factory.annotation.Autowired; /** Grpc service implementation for Serving API. */ @Slf4j -@GRpcService +@GRpcService(interceptors = {ServerTracingInterceptor.class}) public class ServingGrpcService extends ServingAPIImplBase { private final FeastServing feast; @@ -57,21 +59,20 @@ public ServingGrpcService(FeastServing feast, Tracer tracer, StatsDClient statsD @Override public void queryFeatures(QueryFeaturesRequest request, StreamObserver responseObserver) { long currentMicro = TimeUtil.microTime(); - Span span = tracer.buildSpan("ServingGrpcService-queryFeatures").start(); + Span span =tracer + .buildSpan("ServingGrpcService.queryFeatures") + .asChildOf(OpenTracingContextKey.activeSpan()) + .start(); String[] tags = makeStatsdTags(request); statsDClient.increment("query_features_count", tags); statsDClient.gauge("query_features_entity_count", request.getEntityIdCount(), tags); statsDClient.gauge("query_features_feature_count", request.getFeatureIdCount(), tags); - try (Scope scope = tracer.scopeManager().activate(span, false)) { - Span innerSpan = scope.span(); + try (Scope scope = tracer.scopeManager().activate(span)) { validateRequest(request); QueryFeaturesResponse response = feast.queryFeatures(request); - innerSpan.log("calling onNext"); responseObserver.onNext(response); - innerSpan.log("calling onCompleted"); responseObserver.onCompleted(); - innerSpan.log("all done"); statsDClient.increment("query_feature_success", tags); } catch (Exception e) { statsDClient.increment("query_feature_failed", tags); @@ -79,6 +80,7 @@ public void queryFeatures(QueryFeaturesRequest request, StreamObserver featureSpecs = getFeatureSpecs(request.getFeatureIdList()); - scope.span().log("start retrieving all feature"); + span.log("start retrieving all feature"); Map result = featureRetrievalDispatcher.dispatchFeatureRetrieval( request.getEntityName(), request.getEntityIdList(), featureSpecs); - scope.span().log("finished retrieving all feature"); + span.log("finished retrieving all feature"); // build response return QueryFeaturesResponse.newBuilder() .setEntityName(request.getEntityName()) .putAllEntities(result) .build(); + } finally { + span.finish(); } } @@ -88,10 +92,15 @@ public QueryFeaturesResponse queryFeatures(QueryFeaturesRequest request) { * @return collection of feature spec */ private Collection getFeatureSpecs(Collection featureIds) { - // dedup feature ID. - Collection featureIdSet = Sets.newHashSet(featureIds); + Span span = tracer.buildSpan("FeastServing.getFeatureSpecs").start(); + try (Scope scope = tracer.scopeManager().activate(span)) { + // dedup feature ID. + Collection featureIdSet = Sets.newHashSet(featureIds); - Map featureSpecMap = specStorage.getFeatureSpecs(featureIdSet); - return featureSpecMap.values(); + Map featureSpecMap = specStorage.getFeatureSpecs(featureIdSet); + return featureSpecMap.values(); + } finally { + span.finish(); + } } } diff --git a/serving/src/main/java/feast/serving/service/FeatureRetrievalDispatcher.java b/serving/src/main/java/feast/serving/service/FeatureRetrievalDispatcher.java index e9a9ee67bd..a117567926 100644 --- a/serving/src/main/java/feast/serving/service/FeatureRetrievalDispatcher.java +++ b/serving/src/main/java/feast/serving/service/FeatureRetrievalDispatcher.java @@ -17,12 +17,12 @@ package feast.serving.service; -import com.google.common.collect.Lists; import feast.serving.ServingAPIProto.Entity; import feast.serving.model.FeatureValue; import feast.serving.util.EntityMapBuilder; import feast.specs.FeatureSpecProto.FeatureSpec; import io.opentracing.Scope; +import io.opentracing.Span; import io.opentracing.Tracer; import java.util.Collection; import java.util.List; @@ -52,31 +52,15 @@ public FeatureRetrievalDispatcher( *

If request is small enough (only one request type and one source storage) it will be * executed in the current thread. Otherwise, the execution takes place in separate thread. * - * @param entityName entity name of the feature. - * @param entityIds list of entity ids. + * @param entityName entity name of the feature. + * @param entityIds list of entity ids. * @param featureSpecs list of request. * @return map of entityID and Entity instance. */ public Map dispatchFeatureRetrieval( String entityName, Collection entityIds, Collection featureSpecs) { - - return runInCurrentThread(entityName, entityIds, Lists.newArrayList(featureSpecs)); - } - - /** - * Execute request in current thread. - * - * @param entityName entity name of of the feature. - * @param entityIds list of entity ID of the feature to be retrieved. - * @param featureSpecs list of feature specs - * @return entity map containing the result of feature retrieval. - */ - private Map runInCurrentThread( - String entityName, - Collection entityIds, - List featureSpecs) { - try (Scope scope = - tracer.buildSpan("FeatureRetrievalDispatcher-runInCurrentThread").startActive(true)) { + Span span = tracer.buildSpan("FeatureRetrievalDispatcher.runInCurrentThread").start(); + try (Scope scope = tracer.scopeManager().activate(span)) { String storageId = FeastServing.SERVING_STORAGE_ID; FeatureStorage featureStorage = featureStorageRegistry.get(storageId); @@ -87,6 +71,8 @@ private Map runInCurrentThread( EntityMapBuilder builder = new EntityMapBuilder(); builder.addFeatureValueList(featureValues); return builder.toEntityMap(); + } finally { + span.finish(); } } } diff --git a/serving/src/main/java/feast/serving/service/RedisFeatureStorage.java b/serving/src/main/java/feast/serving/service/RedisFeatureStorage.java index d481c9554e..53302056ed 100644 --- a/serving/src/main/java/feast/serving/service/RedisFeatureStorage.java +++ b/serving/src/main/java/feast/serving/service/RedisFeatureStorage.java @@ -67,18 +67,21 @@ public RedisFeatureStorage(JedisPool jedisPool, Tracer tracer) { @Override public List getFeature( String entityName, Collection entityIds, Collection featureSpecs) { - try (Scope scope = tracer.buildSpan("Redis-getFeature").startActive(true)) { + Span span = tracer.buildSpan("Redis.getFeature").start(); + try (Scope scope = tracer.scopeManager().activate(span)) { List getRequests = new ArrayList<>(entityIds.size() * featureSpecs.size()); for (FeatureSpec featureSpec : featureSpecs) { String featureId = featureSpec.getId(); String featureIdSha1Prefix = makeFeatureIdSha1Prefix(featureId); for (String entityId : entityIds) { RedisBucketKey key = makeBucketKey(entityId, featureIdSha1Prefix, BUCKET_ID_ZERO); - getRequests.add(new GetRequest(entityId, featureId, key)); + getRequests.add(new GetRequest(entityId, featureId, key)); } } - scope.span().log("completed request creation"); + span.log("completed request creation"); return sendAndProcessMultiGet(getRequests); + } finally { + span.finish(); } } @@ -89,9 +92,8 @@ public List getFeature( * @return list of feature value. */ private List sendAndProcessMultiGet(List getRequests) { - try (Scope scope = tracer.buildSpan("Redis-sendAndProcessMultiGet").startActive(true)) { - Span span = scope.span(); - + Span span = tracer.buildSpan("Redis.sendAndProcessMultiGet").start(); + try (Scope scope = tracer.scopeManager().activate(span)) { if (getRequests.isEmpty()) { return Collections.emptyList(); } @@ -106,13 +108,17 @@ private List sendAndProcessMultiGet(List getRequests) span.log("completed creating mget request"); List binaryValues; - try (Jedis jedis = jedisPool.getResource()) { + Span mgetSpan = tracer.buildSpan("Redis.mget").start(); + try (Jedis jedis = jedisPool.getResource(); + Scope mgetScope = tracer.scopeManager().activate(mgetSpan)) { span.log("sending mget"); binaryValues = jedis.mget(binaryKeys); span.log("completed mget"); } catch (Exception e) { log.error("Exception while retrieving feature from Redis", e); throw new FeatureRetrievalException("Unable to retrieve feature from Redis", e); + } finally { + mgetSpan.finish(); } try { @@ -121,6 +127,8 @@ private List sendAndProcessMultiGet(List getRequests) log.error("Unable to parse protobuf", e); throw new FeatureRetrievalException("Unable to parse protobuf while retrieving feature", e); } + } finally { + span.finish(); } } @@ -134,7 +142,8 @@ private List sendAndProcessMultiGet(List getRequests) */ private List processMGet(List requests, List results) throws InvalidProtocolBufferException { - try (Scope scope = tracer.buildSpan("Redis-processMGet").startActive(true)) { + Span span = tracer.buildSpan("Redis.processMGet").start(); + try (Scope scope = tracer.scopeManager().activate(span)) { int keySize = requests.size(); List featureValues = new ArrayList<>(keySize); @@ -154,6 +163,8 @@ private List processMGet(List requests, List r featureValues.add(featureValue); } return featureValues; + } finally { + span.finish(); } } diff --git a/serving/src/main/resources/application.properties b/serving/src/main/resources/application.properties index f7d0023638..ecf43a5e29 100644 --- a/serving/src/main/resources/application.properties +++ b/serving/src/main/resources/application.properties @@ -23,7 +23,6 @@ feast.store.serving.type = ${STORE_SERVING_TYPE:} feast.store.serving.options = ${STORE_SERVING_OPTIONS:{}} feast.threadpool.max=${FEAST_MAX_NB_THREAD:128} -feast.maxentity=${FEAST_MAX_ENTITY_PER_BATCH:2000} feast.timeout=${FEAST_RETRIEVAL_TIMEOUT:5} feast.redispool.maxsize=${FEAST_REDIS_POOL_MAX_SIZE:128} feast.redispool.maxidle=${FEAST_REDIS_POOL_MAX_IDLE:16} From d92587cec421a680cbe6e839403fd82bdf983138 Mon Sep 17 00:00:00 2001 From: David Heryanto Date: Tue, 10 Sep 2019 11:35:01 +0800 Subject: [PATCH 02/19] Update python SDK with updated protos in Feast 0.1.x Removal of storage specs --- sdk/python/feast/core/CoreService_pb2.pyi | 234 ----------- sdk/python/feast/core/DatasetService_pb2.pyi | 140 ------- sdk/python/feast/core/JobService_pb2.pyi | 215 ----------- sdk/python/feast/core/UIService_pb2.pyi | 362 ------------------ sdk/python/feast/sdk/client.py | 2 - sdk/python/feast/sdk/importer.py | 29 +- sdk/python/feast/sdk/resources/feature.py | 52 +-- .../feast/sdk/resources/feature_group.py | 33 +- sdk/python/feast/specs/EntitySpec_pb2.pyi | 39 -- .../feast/specs/FeatureGroupSpec_pb2.pyi | 60 --- sdk/python/feast/specs/FeatureSpec_pb2.pyi | 78 ---- sdk/python/feast/specs/ImportJobSpecs_pb2.py | 32 +- sdk/python/feast/specs/ImportJobSpecs_pb2.pyi | 77 ---- sdk/python/feast/specs/ImportSpec_pb2.pyi | 137 ------- sdk/python/feast/specs/StorageSpec_pb2.pyi | 55 --- sdk/python/feast/storage/BigTable_pb2.pyi | 34 -- sdk/python/feast/storage/Redis_pb2.pyi | 87 ----- .../feast/types/FeatureRowExtended_pb2.pyi | 92 ----- sdk/python/feast/types/FeatureRow_pb2.pyi | 55 --- sdk/python/feast/types/Feature_pb2.pyi | 40 -- sdk/python/feast/types/Value_pb2.pyi | 267 ------------- sdk/python/tests/sample/valid_feature.yaml | 7 +- .../tests/sample/valid_feature_group.yaml | 7 +- .../tests/sdk/resources/test_feature.py | 10 +- .../tests/sdk/resources/test_feature_group.py | 2 - sdk/python/tests/sdk/test_client.py | 29 +- sdk/python/tests/sdk/test_importer.py | 155 ++++---- 27 files changed, 132 insertions(+), 2198 deletions(-) delete mode 100644 sdk/python/feast/core/CoreService_pb2.pyi delete mode 100644 sdk/python/feast/core/DatasetService_pb2.pyi delete mode 100644 sdk/python/feast/core/JobService_pb2.pyi delete mode 100644 sdk/python/feast/core/UIService_pb2.pyi delete mode 100644 sdk/python/feast/specs/EntitySpec_pb2.pyi delete mode 100644 sdk/python/feast/specs/FeatureGroupSpec_pb2.pyi delete mode 100644 sdk/python/feast/specs/FeatureSpec_pb2.pyi delete mode 100644 sdk/python/feast/specs/ImportJobSpecs_pb2.pyi delete mode 100644 sdk/python/feast/specs/ImportSpec_pb2.pyi delete mode 100644 sdk/python/feast/specs/StorageSpec_pb2.pyi delete mode 100644 sdk/python/feast/storage/BigTable_pb2.pyi delete mode 100644 sdk/python/feast/storage/Redis_pb2.pyi delete mode 100644 sdk/python/feast/types/FeatureRowExtended_pb2.pyi delete mode 100644 sdk/python/feast/types/FeatureRow_pb2.pyi delete mode 100644 sdk/python/feast/types/Feature_pb2.pyi delete mode 100644 sdk/python/feast/types/Value_pb2.pyi diff --git a/sdk/python/feast/core/CoreService_pb2.pyi b/sdk/python/feast/core/CoreService_pb2.pyi deleted file mode 100644 index 5a85361af1..0000000000 --- a/sdk/python/feast/core/CoreService_pb2.pyi +++ /dev/null @@ -1,234 +0,0 @@ -# @generated by generate_proto_mypy_stubs.py. Do not edit! -import sys -from feast.specs.EntitySpec_pb2 import ( - EntitySpec as feast___specs___EntitySpec_pb2___EntitySpec, -) - -from feast.specs.FeatureSpec_pb2 import ( - FeatureSpec as feast___specs___FeatureSpec_pb2___FeatureSpec, -) - -from feast.specs.StorageSpec_pb2 import ( - StorageSpec as feast___specs___StorageSpec_pb2___StorageSpec, -) - -from google.protobuf.internal.containers import ( - RepeatedCompositeFieldContainer as google___protobuf___internal___containers___RepeatedCompositeFieldContainer, - RepeatedScalarFieldContainer as google___protobuf___internal___containers___RepeatedScalarFieldContainer, -) - -from google.protobuf.message import ( - Message as google___protobuf___message___Message, -) - -from typing import ( - Iterable as typing___Iterable, - Optional as typing___Optional, - Text as typing___Text, -) - -from typing_extensions import ( - Literal as typing_extensions___Literal, -) - - -class CoreServiceTypes(google___protobuf___message___Message): - class GetEntitiesRequest(google___protobuf___message___Message): - ids = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text] - - def __init__(self, - ids : typing___Optional[typing___Iterable[typing___Text]] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> CoreServiceTypes.GetEntitiesRequest: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def ClearField(self, field_name: typing_extensions___Literal[u"ids"]) -> None: ... - else: - def ClearField(self, field_name: typing_extensions___Literal[b"ids"]) -> None: ... - - class GetEntitiesResponse(google___protobuf___message___Message): - - @property - def entities(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[feast___specs___EntitySpec_pb2___EntitySpec]: ... - - def __init__(self, - entities : typing___Optional[typing___Iterable[feast___specs___EntitySpec_pb2___EntitySpec]] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> CoreServiceTypes.GetEntitiesResponse: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def ClearField(self, field_name: typing_extensions___Literal[u"entities"]) -> None: ... - else: - def ClearField(self, field_name: typing_extensions___Literal[b"entities"]) -> None: ... - - class ListEntitiesResponse(google___protobuf___message___Message): - - @property - def entities(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[feast___specs___EntitySpec_pb2___EntitySpec]: ... - - def __init__(self, - entities : typing___Optional[typing___Iterable[feast___specs___EntitySpec_pb2___EntitySpec]] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> CoreServiceTypes.ListEntitiesResponse: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def ClearField(self, field_name: typing_extensions___Literal[u"entities"]) -> None: ... - else: - def ClearField(self, field_name: typing_extensions___Literal[b"entities"]) -> None: ... - - class GetFeaturesRequest(google___protobuf___message___Message): - ids = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text] - - def __init__(self, - ids : typing___Optional[typing___Iterable[typing___Text]] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> CoreServiceTypes.GetFeaturesRequest: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def ClearField(self, field_name: typing_extensions___Literal[u"ids"]) -> None: ... - else: - def ClearField(self, field_name: typing_extensions___Literal[b"ids"]) -> None: ... - - class GetFeaturesResponse(google___protobuf___message___Message): - - @property - def features(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[feast___specs___FeatureSpec_pb2___FeatureSpec]: ... - - def __init__(self, - features : typing___Optional[typing___Iterable[feast___specs___FeatureSpec_pb2___FeatureSpec]] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> CoreServiceTypes.GetFeaturesResponse: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def ClearField(self, field_name: typing_extensions___Literal[u"features"]) -> None: ... - else: - def ClearField(self, field_name: typing_extensions___Literal[b"features"]) -> None: ... - - class ListFeaturesResponse(google___protobuf___message___Message): - - @property - def features(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[feast___specs___FeatureSpec_pb2___FeatureSpec]: ... - - def __init__(self, - features : typing___Optional[typing___Iterable[feast___specs___FeatureSpec_pb2___FeatureSpec]] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> CoreServiceTypes.ListFeaturesResponse: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def ClearField(self, field_name: typing_extensions___Literal[u"features"]) -> None: ... - else: - def ClearField(self, field_name: typing_extensions___Literal[b"features"]) -> None: ... - - class GetStorageRequest(google___protobuf___message___Message): - ids = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text] - - def __init__(self, - ids : typing___Optional[typing___Iterable[typing___Text]] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> CoreServiceTypes.GetStorageRequest: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def ClearField(self, field_name: typing_extensions___Literal[u"ids"]) -> None: ... - else: - def ClearField(self, field_name: typing_extensions___Literal[b"ids"]) -> None: ... - - class GetStorageResponse(google___protobuf___message___Message): - - @property - def storageSpecs(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[feast___specs___StorageSpec_pb2___StorageSpec]: ... - - def __init__(self, - storageSpecs : typing___Optional[typing___Iterable[feast___specs___StorageSpec_pb2___StorageSpec]] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> CoreServiceTypes.GetStorageResponse: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def ClearField(self, field_name: typing_extensions___Literal[u"storageSpecs"]) -> None: ... - else: - def ClearField(self, field_name: typing_extensions___Literal[b"storageSpecs"]) -> None: ... - - class ListStorageResponse(google___protobuf___message___Message): - - @property - def storageSpecs(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[feast___specs___StorageSpec_pb2___StorageSpec]: ... - - def __init__(self, - storageSpecs : typing___Optional[typing___Iterable[feast___specs___StorageSpec_pb2___StorageSpec]] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> CoreServiceTypes.ListStorageResponse: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def ClearField(self, field_name: typing_extensions___Literal[u"storageSpecs"]) -> None: ... - else: - def ClearField(self, field_name: typing_extensions___Literal[b"storageSpecs"]) -> None: ... - - class ApplyEntityResponse(google___protobuf___message___Message): - entityName = ... # type: typing___Text - - def __init__(self, - entityName : typing___Optional[typing___Text] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> CoreServiceTypes.ApplyEntityResponse: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def ClearField(self, field_name: typing_extensions___Literal[u"entityName"]) -> None: ... - else: - def ClearField(self, field_name: typing_extensions___Literal[b"entityName"]) -> None: ... - - class ApplyFeatureResponse(google___protobuf___message___Message): - featureId = ... # type: typing___Text - - def __init__(self, - featureId : typing___Optional[typing___Text] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> CoreServiceTypes.ApplyFeatureResponse: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def ClearField(self, field_name: typing_extensions___Literal[u"featureId"]) -> None: ... - else: - def ClearField(self, field_name: typing_extensions___Literal[b"featureId"]) -> None: ... - - class ApplyFeatureGroupResponse(google___protobuf___message___Message): - featureGroupId = ... # type: typing___Text - - def __init__(self, - featureGroupId : typing___Optional[typing___Text] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> CoreServiceTypes.ApplyFeatureGroupResponse: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def ClearField(self, field_name: typing_extensions___Literal[u"featureGroupId"]) -> None: ... - else: - def ClearField(self, field_name: typing_extensions___Literal[b"featureGroupId"]) -> None: ... - - - def __init__(self, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> CoreServiceTypes: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... diff --git a/sdk/python/feast/core/DatasetService_pb2.pyi b/sdk/python/feast/core/DatasetService_pb2.pyi deleted file mode 100644 index a630ee2eaa..0000000000 --- a/sdk/python/feast/core/DatasetService_pb2.pyi +++ /dev/null @@ -1,140 +0,0 @@ -# @generated by generate_proto_mypy_stubs.py. Do not edit! -import sys -from google.protobuf.internal.containers import ( - RepeatedScalarFieldContainer as google___protobuf___internal___containers___RepeatedScalarFieldContainer, -) - -from google.protobuf.message import ( - Message as google___protobuf___message___Message, -) - -from google.protobuf.timestamp_pb2 import ( - Timestamp as google___protobuf___timestamp_pb2___Timestamp, -) - -from typing import ( - Iterable as typing___Iterable, - Mapping as typing___Mapping, - MutableMapping as typing___MutableMapping, - Optional as typing___Optional, - Text as typing___Text, -) - -from typing_extensions import ( - Literal as typing_extensions___Literal, -) - - -class DatasetServiceTypes(google___protobuf___message___Message): - class CreateDatasetRequest(google___protobuf___message___Message): - class FiltersEntry(google___protobuf___message___Message): - key = ... # type: typing___Text - value = ... # type: typing___Text - - def __init__(self, - key : typing___Optional[typing___Text] = None, - value : typing___Optional[typing___Text] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> DatasetServiceTypes.CreateDatasetRequest.FiltersEntry: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def ClearField(self, field_name: typing_extensions___Literal[u"key",u"value"]) -> None: ... - else: - def ClearField(self, field_name: typing_extensions___Literal[b"key",b"value"]) -> None: ... - - limit = ... # type: int - namePrefix = ... # type: typing___Text - - @property - def featureSet(self) -> FeatureSet: ... - - @property - def startDate(self) -> google___protobuf___timestamp_pb2___Timestamp: ... - - @property - def endDate(self) -> google___protobuf___timestamp_pb2___Timestamp: ... - - @property - def filters(self) -> typing___MutableMapping[typing___Text, typing___Text]: ... - - def __init__(self, - featureSet : typing___Optional[FeatureSet] = None, - startDate : typing___Optional[google___protobuf___timestamp_pb2___Timestamp] = None, - endDate : typing___Optional[google___protobuf___timestamp_pb2___Timestamp] = None, - limit : typing___Optional[int] = None, - namePrefix : typing___Optional[typing___Text] = None, - filters : typing___Optional[typing___Mapping[typing___Text, typing___Text]] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> DatasetServiceTypes.CreateDatasetRequest: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def HasField(self, field_name: typing_extensions___Literal[u"endDate",u"featureSet",u"startDate"]) -> bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"endDate",u"featureSet",u"filters",u"limit",u"namePrefix",u"startDate"]) -> None: ... - else: - def HasField(self, field_name: typing_extensions___Literal[u"endDate",b"endDate",u"featureSet",b"featureSet",u"startDate",b"startDate"]) -> bool: ... - def ClearField(self, field_name: typing_extensions___Literal[b"endDate",b"featureSet",b"filters",b"limit",b"namePrefix",b"startDate"]) -> None: ... - - class CreateDatasetResponse(google___protobuf___message___Message): - - @property - def datasetInfo(self) -> DatasetInfo: ... - - def __init__(self, - datasetInfo : typing___Optional[DatasetInfo] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> DatasetServiceTypes.CreateDatasetResponse: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def HasField(self, field_name: typing_extensions___Literal[u"datasetInfo"]) -> bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"datasetInfo"]) -> None: ... - else: - def HasField(self, field_name: typing_extensions___Literal[u"datasetInfo",b"datasetInfo"]) -> bool: ... - def ClearField(self, field_name: typing_extensions___Literal[b"datasetInfo"]) -> None: ... - - - def __init__(self, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> DatasetServiceTypes: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - -class FeatureSet(google___protobuf___message___Message): - entityName = ... # type: typing___Text - featureIds = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text] - - def __init__(self, - entityName : typing___Optional[typing___Text] = None, - featureIds : typing___Optional[typing___Iterable[typing___Text]] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> FeatureSet: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def ClearField(self, field_name: typing_extensions___Literal[u"entityName",u"featureIds"]) -> None: ... - else: - def ClearField(self, field_name: typing_extensions___Literal[b"entityName",b"featureIds"]) -> None: ... - -class DatasetInfo(google___protobuf___message___Message): - name = ... # type: typing___Text - tableUrl = ... # type: typing___Text - - def __init__(self, - name : typing___Optional[typing___Text] = None, - tableUrl : typing___Optional[typing___Text] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> DatasetInfo: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def ClearField(self, field_name: typing_extensions___Literal[u"name",u"tableUrl"]) -> None: ... - else: - def ClearField(self, field_name: typing_extensions___Literal[b"name",b"tableUrl"]) -> None: ... diff --git a/sdk/python/feast/core/JobService_pb2.pyi b/sdk/python/feast/core/JobService_pb2.pyi deleted file mode 100644 index a60fcc02bb..0000000000 --- a/sdk/python/feast/core/JobService_pb2.pyi +++ /dev/null @@ -1,215 +0,0 @@ -# @generated by generate_proto_mypy_stubs.py. Do not edit! -import sys -from feast.specs.ImportSpec_pb2 import ( - ImportSpec as feast___specs___ImportSpec_pb2___ImportSpec, -) - -from google.protobuf.internal.containers import ( - RepeatedCompositeFieldContainer as google___protobuf___internal___containers___RepeatedCompositeFieldContainer, - RepeatedScalarFieldContainer as google___protobuf___internal___containers___RepeatedScalarFieldContainer, -) - -from google.protobuf.message import ( - Message as google___protobuf___message___Message, -) - -from google.protobuf.timestamp_pb2 import ( - Timestamp as google___protobuf___timestamp_pb2___Timestamp, -) - -from typing import ( - Iterable as typing___Iterable, - Mapping as typing___Mapping, - MutableMapping as typing___MutableMapping, - Optional as typing___Optional, - Text as typing___Text, -) - -from typing_extensions import ( - Literal as typing_extensions___Literal, -) - - -class JobServiceTypes(google___protobuf___message___Message): - class SubmitImportJobRequest(google___protobuf___message___Message): - name = ... # type: typing___Text - - @property - def importSpec(self) -> feast___specs___ImportSpec_pb2___ImportSpec: ... - - def __init__(self, - importSpec : typing___Optional[feast___specs___ImportSpec_pb2___ImportSpec] = None, - name : typing___Optional[typing___Text] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> JobServiceTypes.SubmitImportJobRequest: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def HasField(self, field_name: typing_extensions___Literal[u"importSpec"]) -> bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"importSpec",u"name"]) -> None: ... - else: - def HasField(self, field_name: typing_extensions___Literal[u"importSpec",b"importSpec"]) -> bool: ... - def ClearField(self, field_name: typing_extensions___Literal[b"importSpec",b"name"]) -> None: ... - - class SubmitImportJobResponse(google___protobuf___message___Message): - jobId = ... # type: typing___Text - - def __init__(self, - jobId : typing___Optional[typing___Text] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> JobServiceTypes.SubmitImportJobResponse: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def ClearField(self, field_name: typing_extensions___Literal[u"jobId"]) -> None: ... - else: - def ClearField(self, field_name: typing_extensions___Literal[b"jobId"]) -> None: ... - - class ListJobsResponse(google___protobuf___message___Message): - - @property - def jobs(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[JobServiceTypes.JobDetail]: ... - - def __init__(self, - jobs : typing___Optional[typing___Iterable[JobServiceTypes.JobDetail]] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> JobServiceTypes.ListJobsResponse: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def ClearField(self, field_name: typing_extensions___Literal[u"jobs"]) -> None: ... - else: - def ClearField(self, field_name: typing_extensions___Literal[b"jobs"]) -> None: ... - - class GetJobRequest(google___protobuf___message___Message): - id = ... # type: typing___Text - - def __init__(self, - id : typing___Optional[typing___Text] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> JobServiceTypes.GetJobRequest: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def ClearField(self, field_name: typing_extensions___Literal[u"id"]) -> None: ... - else: - def ClearField(self, field_name: typing_extensions___Literal[b"id"]) -> None: ... - - class GetJobResponse(google___protobuf___message___Message): - - @property - def job(self) -> JobServiceTypes.JobDetail: ... - - def __init__(self, - job : typing___Optional[JobServiceTypes.JobDetail] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> JobServiceTypes.GetJobResponse: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def HasField(self, field_name: typing_extensions___Literal[u"job"]) -> bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"job"]) -> None: ... - else: - def HasField(self, field_name: typing_extensions___Literal[u"job",b"job"]) -> bool: ... - def ClearField(self, field_name: typing_extensions___Literal[b"job"]) -> None: ... - - class AbortJobRequest(google___protobuf___message___Message): - id = ... # type: typing___Text - - def __init__(self, - id : typing___Optional[typing___Text] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> JobServiceTypes.AbortJobRequest: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def ClearField(self, field_name: typing_extensions___Literal[u"id"]) -> None: ... - else: - def ClearField(self, field_name: typing_extensions___Literal[b"id"]) -> None: ... - - class AbortJobResponse(google___protobuf___message___Message): - id = ... # type: typing___Text - - def __init__(self, - id : typing___Optional[typing___Text] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> JobServiceTypes.AbortJobResponse: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def ClearField(self, field_name: typing_extensions___Literal[u"id"]) -> None: ... - else: - def ClearField(self, field_name: typing_extensions___Literal[b"id"]) -> None: ... - - class JobDetail(google___protobuf___message___Message): - class MetricsEntry(google___protobuf___message___Message): - key = ... # type: typing___Text - value = ... # type: float - - def __init__(self, - key : typing___Optional[typing___Text] = None, - value : typing___Optional[float] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> JobServiceTypes.JobDetail.MetricsEntry: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def ClearField(self, field_name: typing_extensions___Literal[u"key",u"value"]) -> None: ... - else: - def ClearField(self, field_name: typing_extensions___Literal[b"key",b"value"]) -> None: ... - - id = ... # type: typing___Text - extId = ... # type: typing___Text - type = ... # type: typing___Text - runner = ... # type: typing___Text - status = ... # type: typing___Text - entities = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text] - features = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text] - - @property - def metrics(self) -> typing___MutableMapping[typing___Text, float]: ... - - @property - def lastUpdated(self) -> google___protobuf___timestamp_pb2___Timestamp: ... - - @property - def created(self) -> google___protobuf___timestamp_pb2___Timestamp: ... - - def __init__(self, - id : typing___Optional[typing___Text] = None, - extId : typing___Optional[typing___Text] = None, - type : typing___Optional[typing___Text] = None, - runner : typing___Optional[typing___Text] = None, - status : typing___Optional[typing___Text] = None, - entities : typing___Optional[typing___Iterable[typing___Text]] = None, - features : typing___Optional[typing___Iterable[typing___Text]] = None, - metrics : typing___Optional[typing___Mapping[typing___Text, float]] = None, - lastUpdated : typing___Optional[google___protobuf___timestamp_pb2___Timestamp] = None, - created : typing___Optional[google___protobuf___timestamp_pb2___Timestamp] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> JobServiceTypes.JobDetail: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def HasField(self, field_name: typing_extensions___Literal[u"created",u"lastUpdated"]) -> bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"created",u"entities",u"extId",u"features",u"id",u"lastUpdated",u"metrics",u"runner",u"status",u"type"]) -> None: ... - else: - def HasField(self, field_name: typing_extensions___Literal[u"created",b"created",u"lastUpdated",b"lastUpdated"]) -> bool: ... - def ClearField(self, field_name: typing_extensions___Literal[b"created",b"entities",b"extId",b"features",b"id",b"lastUpdated",b"metrics",b"runner",b"status",b"type"]) -> None: ... - - - def __init__(self, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> JobServiceTypes: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... diff --git a/sdk/python/feast/core/UIService_pb2.pyi b/sdk/python/feast/core/UIService_pb2.pyi deleted file mode 100644 index cb39928dce..0000000000 --- a/sdk/python/feast/core/UIService_pb2.pyi +++ /dev/null @@ -1,362 +0,0 @@ -# @generated by generate_proto_mypy_stubs.py. Do not edit! -import sys -from feast.specs.EntitySpec_pb2 import ( - EntitySpec as feast___specs___EntitySpec_pb2___EntitySpec, -) - -from feast.specs.FeatureGroupSpec_pb2 import ( - FeatureGroupSpec as feast___specs___FeatureGroupSpec_pb2___FeatureGroupSpec, -) - -from feast.specs.FeatureSpec_pb2 import ( - FeatureSpec as feast___specs___FeatureSpec_pb2___FeatureSpec, -) - -from feast.specs.StorageSpec_pb2 import ( - StorageSpec as feast___specs___StorageSpec_pb2___StorageSpec, -) - -from google.protobuf.internal.containers import ( - RepeatedCompositeFieldContainer as google___protobuf___internal___containers___RepeatedCompositeFieldContainer, - RepeatedScalarFieldContainer as google___protobuf___internal___containers___RepeatedScalarFieldContainer, -) - -from google.protobuf.message import ( - Message as google___protobuf___message___Message, -) - -from google.protobuf.timestamp_pb2 import ( - Timestamp as google___protobuf___timestamp_pb2___Timestamp, -) - -from typing import ( - Iterable as typing___Iterable, - Optional as typing___Optional, - Text as typing___Text, -) - -from typing_extensions import ( - Literal as typing_extensions___Literal, -) - - -class UIServiceTypes(google___protobuf___message___Message): - class EntityDetail(google___protobuf___message___Message): - jobs = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text] - - @property - def spec(self) -> feast___specs___EntitySpec_pb2___EntitySpec: ... - - @property - def lastUpdated(self) -> google___protobuf___timestamp_pb2___Timestamp: ... - - def __init__(self, - spec : typing___Optional[feast___specs___EntitySpec_pb2___EntitySpec] = None, - jobs : typing___Optional[typing___Iterable[typing___Text]] = None, - lastUpdated : typing___Optional[google___protobuf___timestamp_pb2___Timestamp] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> UIServiceTypes.EntityDetail: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def HasField(self, field_name: typing_extensions___Literal[u"lastUpdated",u"spec"]) -> bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"jobs",u"lastUpdated",u"spec"]) -> None: ... - else: - def HasField(self, field_name: typing_extensions___Literal[u"lastUpdated",b"lastUpdated",u"spec",b"spec"]) -> bool: ... - def ClearField(self, field_name: typing_extensions___Literal[b"jobs",b"lastUpdated",b"spec"]) -> None: ... - - class GetEntityRequest(google___protobuf___message___Message): - id = ... # type: typing___Text - - def __init__(self, - id : typing___Optional[typing___Text] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> UIServiceTypes.GetEntityRequest: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def ClearField(self, field_name: typing_extensions___Literal[u"id"]) -> None: ... - else: - def ClearField(self, field_name: typing_extensions___Literal[b"id"]) -> None: ... - - class GetEntityResponse(google___protobuf___message___Message): - - @property - def entity(self) -> UIServiceTypes.EntityDetail: ... - - def __init__(self, - entity : typing___Optional[UIServiceTypes.EntityDetail] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> UIServiceTypes.GetEntityResponse: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def HasField(self, field_name: typing_extensions___Literal[u"entity"]) -> bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"entity"]) -> None: ... - else: - def HasField(self, field_name: typing_extensions___Literal[u"entity",b"entity"]) -> bool: ... - def ClearField(self, field_name: typing_extensions___Literal[b"entity"]) -> None: ... - - class ListEntitiesResponse(google___protobuf___message___Message): - - @property - def entities(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[UIServiceTypes.EntityDetail]: ... - - def __init__(self, - entities : typing___Optional[typing___Iterable[UIServiceTypes.EntityDetail]] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> UIServiceTypes.ListEntitiesResponse: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def ClearField(self, field_name: typing_extensions___Literal[u"entities"]) -> None: ... - else: - def ClearField(self, field_name: typing_extensions___Literal[b"entities"]) -> None: ... - - class FeatureDetail(google___protobuf___message___Message): - bigqueryView = ... # type: typing___Text - enabled = ... # type: bool - jobs = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text] - - @property - def spec(self) -> feast___specs___FeatureSpec_pb2___FeatureSpec: ... - - @property - def lastUpdated(self) -> google___protobuf___timestamp_pb2___Timestamp: ... - - @property - def created(self) -> google___protobuf___timestamp_pb2___Timestamp: ... - - def __init__(self, - spec : typing___Optional[feast___specs___FeatureSpec_pb2___FeatureSpec] = None, - bigqueryView : typing___Optional[typing___Text] = None, - enabled : typing___Optional[bool] = None, - jobs : typing___Optional[typing___Iterable[typing___Text]] = None, - lastUpdated : typing___Optional[google___protobuf___timestamp_pb2___Timestamp] = None, - created : typing___Optional[google___protobuf___timestamp_pb2___Timestamp] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> UIServiceTypes.FeatureDetail: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def HasField(self, field_name: typing_extensions___Literal[u"created",u"lastUpdated",u"spec"]) -> bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"bigqueryView",u"created",u"enabled",u"jobs",u"lastUpdated",u"spec"]) -> None: ... - else: - def HasField(self, field_name: typing_extensions___Literal[u"created",b"created",u"lastUpdated",b"lastUpdated",u"spec",b"spec"]) -> bool: ... - def ClearField(self, field_name: typing_extensions___Literal[b"bigqueryView",b"created",b"enabled",b"jobs",b"lastUpdated",b"spec"]) -> None: ... - - class GetFeatureRequest(google___protobuf___message___Message): - id = ... # type: typing___Text - - def __init__(self, - id : typing___Optional[typing___Text] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> UIServiceTypes.GetFeatureRequest: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def ClearField(self, field_name: typing_extensions___Literal[u"id"]) -> None: ... - else: - def ClearField(self, field_name: typing_extensions___Literal[b"id"]) -> None: ... - - class GetFeatureResponse(google___protobuf___message___Message): - - @property - def feature(self) -> UIServiceTypes.FeatureDetail: ... - - @property - def rawSpec(self) -> feast___specs___FeatureSpec_pb2___FeatureSpec: ... - - def __init__(self, - feature : typing___Optional[UIServiceTypes.FeatureDetail] = None, - rawSpec : typing___Optional[feast___specs___FeatureSpec_pb2___FeatureSpec] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> UIServiceTypes.GetFeatureResponse: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def HasField(self, field_name: typing_extensions___Literal[u"feature",u"rawSpec"]) -> bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"feature",u"rawSpec"]) -> None: ... - else: - def HasField(self, field_name: typing_extensions___Literal[u"feature",b"feature",u"rawSpec",b"rawSpec"]) -> bool: ... - def ClearField(self, field_name: typing_extensions___Literal[b"feature",b"rawSpec"]) -> None: ... - - class ListFeaturesResponse(google___protobuf___message___Message): - - @property - def features(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[UIServiceTypes.FeatureDetail]: ... - - def __init__(self, - features : typing___Optional[typing___Iterable[UIServiceTypes.FeatureDetail]] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> UIServiceTypes.ListFeaturesResponse: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def ClearField(self, field_name: typing_extensions___Literal[u"features"]) -> None: ... - else: - def ClearField(self, field_name: typing_extensions___Literal[b"features"]) -> None: ... - - class FeatureGroupDetail(google___protobuf___message___Message): - - @property - def spec(self) -> feast___specs___FeatureGroupSpec_pb2___FeatureGroupSpec: ... - - @property - def lastUpdated(self) -> google___protobuf___timestamp_pb2___Timestamp: ... - - def __init__(self, - spec : typing___Optional[feast___specs___FeatureGroupSpec_pb2___FeatureGroupSpec] = None, - lastUpdated : typing___Optional[google___protobuf___timestamp_pb2___Timestamp] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> UIServiceTypes.FeatureGroupDetail: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def HasField(self, field_name: typing_extensions___Literal[u"lastUpdated",u"spec"]) -> bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"lastUpdated",u"spec"]) -> None: ... - else: - def HasField(self, field_name: typing_extensions___Literal[u"lastUpdated",b"lastUpdated",u"spec",b"spec"]) -> bool: ... - def ClearField(self, field_name: typing_extensions___Literal[b"lastUpdated",b"spec"]) -> None: ... - - class GetFeatureGroupRequest(google___protobuf___message___Message): - id = ... # type: typing___Text - - def __init__(self, - id : typing___Optional[typing___Text] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> UIServiceTypes.GetFeatureGroupRequest: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def ClearField(self, field_name: typing_extensions___Literal[u"id"]) -> None: ... - else: - def ClearField(self, field_name: typing_extensions___Literal[b"id"]) -> None: ... - - class GetFeatureGroupResponse(google___protobuf___message___Message): - - @property - def featureGroup(self) -> UIServiceTypes.FeatureGroupDetail: ... - - def __init__(self, - featureGroup : typing___Optional[UIServiceTypes.FeatureGroupDetail] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> UIServiceTypes.GetFeatureGroupResponse: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def HasField(self, field_name: typing_extensions___Literal[u"featureGroup"]) -> bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"featureGroup"]) -> None: ... - else: - def HasField(self, field_name: typing_extensions___Literal[u"featureGroup",b"featureGroup"]) -> bool: ... - def ClearField(self, field_name: typing_extensions___Literal[b"featureGroup"]) -> None: ... - - class ListFeatureGroupsResponse(google___protobuf___message___Message): - - @property - def featureGroups(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[UIServiceTypes.FeatureGroupDetail]: ... - - def __init__(self, - featureGroups : typing___Optional[typing___Iterable[UIServiceTypes.FeatureGroupDetail]] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> UIServiceTypes.ListFeatureGroupsResponse: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def ClearField(self, field_name: typing_extensions___Literal[u"featureGroups"]) -> None: ... - else: - def ClearField(self, field_name: typing_extensions___Literal[b"featureGroups"]) -> None: ... - - class StorageDetail(google___protobuf___message___Message): - - @property - def spec(self) -> feast___specs___StorageSpec_pb2___StorageSpec: ... - - @property - def lastUpdated(self) -> google___protobuf___timestamp_pb2___Timestamp: ... - - def __init__(self, - spec : typing___Optional[feast___specs___StorageSpec_pb2___StorageSpec] = None, - lastUpdated : typing___Optional[google___protobuf___timestamp_pb2___Timestamp] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> UIServiceTypes.StorageDetail: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def HasField(self, field_name: typing_extensions___Literal[u"lastUpdated",u"spec"]) -> bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"lastUpdated",u"spec"]) -> None: ... - else: - def HasField(self, field_name: typing_extensions___Literal[u"lastUpdated",b"lastUpdated",u"spec",b"spec"]) -> bool: ... - def ClearField(self, field_name: typing_extensions___Literal[b"lastUpdated",b"spec"]) -> None: ... - - class GetStorageRequest(google___protobuf___message___Message): - id = ... # type: typing___Text - - def __init__(self, - id : typing___Optional[typing___Text] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> UIServiceTypes.GetStorageRequest: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def ClearField(self, field_name: typing_extensions___Literal[u"id"]) -> None: ... - else: - def ClearField(self, field_name: typing_extensions___Literal[b"id"]) -> None: ... - - class GetStorageResponse(google___protobuf___message___Message): - - @property - def storage(self) -> UIServiceTypes.StorageDetail: ... - - def __init__(self, - storage : typing___Optional[UIServiceTypes.StorageDetail] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> UIServiceTypes.GetStorageResponse: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def HasField(self, field_name: typing_extensions___Literal[u"storage"]) -> bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"storage"]) -> None: ... - else: - def HasField(self, field_name: typing_extensions___Literal[u"storage",b"storage"]) -> bool: ... - def ClearField(self, field_name: typing_extensions___Literal[b"storage"]) -> None: ... - - class ListStorageResponse(google___protobuf___message___Message): - - @property - def storage(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[UIServiceTypes.StorageDetail]: ... - - def __init__(self, - storage : typing___Optional[typing___Iterable[UIServiceTypes.StorageDetail]] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> UIServiceTypes.ListStorageResponse: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def ClearField(self, field_name: typing_extensions___Literal[u"storage"]) -> None: ... - else: - def ClearField(self, field_name: typing_extensions___Literal[b"storage"]) -> None: ... - - - def __init__(self, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> UIServiceTypes: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... diff --git a/sdk/python/feast/sdk/client.py b/sdk/python/feast/sdk/client.py index 79db6575ff..dc6f404ce2 100644 --- a/sdk/python/feast/sdk/client.py +++ b/sdk/python/feast/sdk/client.py @@ -362,8 +362,6 @@ def _apply(self, obj): return self._apply_entity(obj) elif isinstance(obj, FeatureGroup): return self._apply_feature_group(obj) - elif isinstance(obj, Storage): - return self._apply_storage(obj) else: raise TypeError( "Apply can only be passed one of the following \ diff --git a/sdk/python/feast/sdk/importer.py b/sdk/python/feast/sdk/importer.py index 0a86f48120..b3b9d10c1f 100644 --- a/sdk/python/feast/sdk/importer.py +++ b/sdk/python/feast/sdk/importer.py @@ -124,8 +124,7 @@ def from_csv(cls, schema, features = \ _detect_schema_and_feature(entity, owner, id_column, feature_columns, timestamp_column, - timestamp_value, serving_store, - warehouse_store, df) + timestamp_value,df) iport_spec = _create_import(src_type, source_options, job_options, entity, schema) @@ -133,6 +132,7 @@ def from_csv(cls, source_options["path"])) specs = _specs(iport_spec, Entity(name=entity), features) + return cls(specs, df, props) @classmethod @@ -191,8 +191,7 @@ def from_bq(cls, schema, features = \ _detect_schema_and_feature(entity, owner, id_column, feature_columns, timestamp_column, - timestamp_value, serving_store, - warehouse_store, df) + timestamp_value, df) iport_spec = _create_import("bigquery", source_options, job_options, entity, schema) @@ -250,8 +249,7 @@ def from_df(cls, schema, features = \ _detect_schema_and_feature(entity, owner, id_column, feature_columns, timestamp_column, - timestamp_value, serving_store, - warehouse_store, df) + timestamp_value, df) iport_spec = _create_import(src_type, source_options, job_options, entity, schema) @@ -349,8 +347,7 @@ def _get_remote_location(path, staging_location): def _detect_schema_and_feature(entity, owner, id_column, feature_columns, - timestamp_column, timestamp_value, - serving_store, warehouse_store, df): + timestamp_column, timestamp_value,df): """Create schema object for import spec. Args: @@ -361,10 +358,6 @@ def _detect_schema_and_feature(entity, owner, id_column, feature_columns, rows in dataset feature_columns (str): list of column to be extracted df (pandas.Dataframe): pandas dataframe of the data - serving_store (feast.sdk.resources.feature.DataStore): Defaults to None. - Serving store to write the features in this instance to. - warehouse_store (feast.sdk.resources.feature.DataStore): Defaults to None. - Warehouse store to write the features in this instance to. Returns: feast.specs.ImportSpec_pb2.Schema: schema of the data @@ -401,16 +394,14 @@ def _detect_schema_and_feature(entity, owner, id_column, feature_columns, if column not in df.columns: raise ValueError( "Column with name {} is not found".format(column)) - features[column] = _create_feature(df[column], entity, owner, - serving_store, warehouse_store) + features[column] = _create_feature(df[column], entity, owner) else: # get all column except entity id and timestampColumn feature_columns = list(df.columns.values) _remove_safely(feature_columns, schema.entityIdColumn) _remove_safely(feature_columns, schema.timestampColumn) for column in feature_columns: - features[column] = _create_feature(df[column], entity, owner, - serving_store, warehouse_store) + features[column] = _create_feature(df[column], entity, owner) for col in df.columns: field = schema.fields.add() @@ -425,7 +416,7 @@ def _detect_schema_and_feature(entity, owner, id_column, feature_columns, return schema, features_dict -def _create_feature(column, entity, owner, serving_store, warehouse_store): +def _create_feature(column, entity, owner): """Create Feature object. Args: @@ -446,10 +437,6 @@ def _create_feature(column, entity, owner, serving_store, warehouse_store): entity=entity, owner=owner, value_type=dtype_to_value_type(column.dtype)) - if serving_store is not None: - feature.serving_store = serving_store - if warehouse_store is not None: - feature.warehouse_store = warehouse_store return feature diff --git a/sdk/python/feast/sdk/resources/feature.py b/sdk/python/feast/sdk/resources/feature.py index 714a140451..d9f746813c 100644 --- a/sdk/python/feast/sdk/resources/feature.py +++ b/sdk/python/feast/sdk/resources/feature.py @@ -19,7 +19,7 @@ from google.protobuf.json_format import Parse from feast.sdk.utils.print_utils import spec_to_yaml -from feast.specs.FeatureSpec_pb2 import FeatureSpec, DataStores, DataStore +from feast.specs.FeatureSpec_pb2 import FeatureSpec class ValueType(enum.Enum): @@ -49,8 +49,6 @@ def __init__(self, value_type=ValueType.DOUBLE, description='', uri='', - warehouse_store=None, - serving_store=None, group='', tags=[], options={}): @@ -65,29 +63,16 @@ def __init__(self, description (str): defaults to "". description of the feature uri (str): defaults to "". uri pointing to the source code or origin of this feature - warehouse_store (feast.sdk.resources.feature.Datastore): - warehouse store id and options - serving_store (feast.sdk.resources.feature.Datastore): serving - store id and options group (str, optional): feature group to inherit from tags (list[str], optional): tags assigned to the feature options (dic, optional): additional options for the feature """ id = '{}.{}'.format(entity, name).lower() - warehouse_store_spec = None - serving_store_spec = None - if serving_store is not None: - serving_store_spec = serving_store.spec - if warehouse_store is not None: - warehouse_store_spec = warehouse_store.spec - data_stores = DataStores( - serving=serving_store_spec, warehouse=warehouse_store_spec) self.__spec = FeatureSpec( id=id, name=name, entity=entity, owner=owner, - dataStores=data_stores, description=description, uri=uri, valueType=value_type.value, @@ -133,24 +118,6 @@ def owner(self): def owner(self, value): self.__spec.owner = value - @property - def warehouse_store(self): - return self.__spec.dataStores.warehouse - - @warehouse_store.setter - def warehouse_store(self, value): - """Set warehouse store from given Datastore""" - self.__spec.dataStores.warehouse.CopyFrom(value.spec) - - @property - def serving_store(self): - return self.__spec.dataStores.serving - - @serving_store.setter - def serving_store(self, value): - """Set serving store from given Datastore""" - self.__spec.dataStores.serving.CopyFrom(value.spec) - @property def description(self): return self.__spec.description @@ -238,20 +205,3 @@ def dump(self, path): with open(path, 'w') as file: file.write(str(self)) print("Saved spec to {}".format(path)) - - -class Datastore: - def __init__(self, id, options={}): - self.__spec = DataStore(id=id, options=options) - - def __str__(self): - """Print the datastore in yaml format - - Returns: - str: yaml formatted representation of the Datastore - """ - return spec_to_yaml(self.__spec) - - @property - def spec(self): - return self.__spec diff --git a/sdk/python/feast/sdk/resources/feature_group.py b/sdk/python/feast/sdk/resources/feature_group.py index 3f73c571ca..b2d68e253c 100644 --- a/sdk/python/feast/sdk/resources/feature_group.py +++ b/sdk/python/feast/sdk/resources/feature_group.py @@ -15,7 +15,6 @@ import yaml import json -from feast.specs.FeatureSpec_pb2 import DataStores from feast.specs.FeatureGroupSpec_pb2 import FeatureGroupSpec from feast.sdk.utils.print_utils import spec_to_yaml from google.protobuf.json_format import Parse @@ -26,28 +25,16 @@ class FeatureGroup(): Wrapper class for feast feature group """ - def __init__(self, id, tags=[], warehouse_store=None, serving_store=None): + def __init__(self, id, tags=[]): """Create FeatureGroup instance. Args: id (str): id of feature group tags (list): Defaults to []. tags assigned to feature group as well as all children features. - warehouse_store (feast.sdk.resources.feature.Datastore): - warehouse store id and options - serving_store (feast.sdk.resources.feature.Datastore): - serving store id and options """ - warehouse_store_spec = None - serving_store_spec = None - if (serving_store is not None): - serving_store_spec = serving_store.spec - if (warehouse_store is not None): - warehouse_store_spec = warehouse_store.spec - data_stores = DataStores( - serving=serving_store_spec, warehouse=warehouse_store_spec) self.__spec = FeatureGroupSpec( - id=id, tags=tags, dataStores=data_stores) + id=id, tags=tags) @property def spec(self): @@ -61,22 +48,6 @@ def id(self): def id(self, value): self.__spec.id = value - @property - def warehouse_store(self): - return self.__spec.dataStores.warehouse - - @warehouse_store.setter - def warehouse_store(self, value): - self.__spec.dataStores.serving.CopyFrom(value) - - @property - def serving_store(self): - return self.__spec.dataStores.serving - - @serving_store.setter - def serving_store(self, value): - self.__spec.dataStores.warehouse.CopyFrom(value) - @property def tags(self): return self.__spec.tags diff --git a/sdk/python/feast/specs/EntitySpec_pb2.pyi b/sdk/python/feast/specs/EntitySpec_pb2.pyi deleted file mode 100644 index 603e839e6b..0000000000 --- a/sdk/python/feast/specs/EntitySpec_pb2.pyi +++ /dev/null @@ -1,39 +0,0 @@ -# @generated by generate_proto_mypy_stubs.py. Do not edit! -import sys -from google.protobuf.internal.containers import ( - RepeatedScalarFieldContainer as google___protobuf___internal___containers___RepeatedScalarFieldContainer, -) - -from google.protobuf.message import ( - Message as google___protobuf___message___Message, -) - -from typing import ( - Iterable as typing___Iterable, - Optional as typing___Optional, - Text as typing___Text, -) - -from typing_extensions import ( - Literal as typing_extensions___Literal, -) - - -class EntitySpec(google___protobuf___message___Message): - name = ... # type: typing___Text - description = ... # type: typing___Text - tags = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text] - - def __init__(self, - name : typing___Optional[typing___Text] = None, - description : typing___Optional[typing___Text] = None, - tags : typing___Optional[typing___Iterable[typing___Text]] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> EntitySpec: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def ClearField(self, field_name: typing_extensions___Literal[u"description",u"name",u"tags"]) -> None: ... - else: - def ClearField(self, field_name: typing_extensions___Literal[b"description",b"name",b"tags"]) -> None: ... diff --git a/sdk/python/feast/specs/FeatureGroupSpec_pb2.pyi b/sdk/python/feast/specs/FeatureGroupSpec_pb2.pyi deleted file mode 100644 index 1274401977..0000000000 --- a/sdk/python/feast/specs/FeatureGroupSpec_pb2.pyi +++ /dev/null @@ -1,60 +0,0 @@ -# @generated by generate_proto_mypy_stubs.py. Do not edit! -import sys -from google.protobuf.internal.containers import ( - RepeatedScalarFieldContainer as google___protobuf___internal___containers___RepeatedScalarFieldContainer, -) - -from google.protobuf.message import ( - Message as google___protobuf___message___Message, -) - -from typing import ( - Iterable as typing___Iterable, - Mapping as typing___Mapping, - MutableMapping as typing___MutableMapping, - Optional as typing___Optional, - Text as typing___Text, -) - -from typing_extensions import ( - Literal as typing_extensions___Literal, -) - - -class FeatureGroupSpec(google___protobuf___message___Message): - class OptionsEntry(google___protobuf___message___Message): - key = ... # type: typing___Text - value = ... # type: typing___Text - - def __init__(self, - key : typing___Optional[typing___Text] = None, - value : typing___Optional[typing___Text] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> FeatureGroupSpec.OptionsEntry: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def ClearField(self, field_name: typing_extensions___Literal[u"key",u"value"]) -> None: ... - else: - def ClearField(self, field_name: typing_extensions___Literal[b"key",b"value"]) -> None: ... - - id = ... # type: typing___Text - tags = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text] - - @property - def options(self) -> typing___MutableMapping[typing___Text, typing___Text]: ... - - def __init__(self, - id : typing___Optional[typing___Text] = None, - tags : typing___Optional[typing___Iterable[typing___Text]] = None, - options : typing___Optional[typing___Mapping[typing___Text, typing___Text]] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> FeatureGroupSpec: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def ClearField(self, field_name: typing_extensions___Literal[u"id",u"options",u"tags"]) -> None: ... - else: - def ClearField(self, field_name: typing_extensions___Literal[b"id",b"options",b"tags"]) -> None: ... diff --git a/sdk/python/feast/specs/FeatureSpec_pb2.pyi b/sdk/python/feast/specs/FeatureSpec_pb2.pyi deleted file mode 100644 index 6c703c2d4e..0000000000 --- a/sdk/python/feast/specs/FeatureSpec_pb2.pyi +++ /dev/null @@ -1,78 +0,0 @@ -# @generated by generate_proto_mypy_stubs.py. Do not edit! -import sys -from feast.types.Value_pb2 import ( - ValueType as feast___types___Value_pb2___ValueType, -) - -from google.protobuf.internal.containers import ( - RepeatedScalarFieldContainer as google___protobuf___internal___containers___RepeatedScalarFieldContainer, -) - -from google.protobuf.message import ( - Message as google___protobuf___message___Message, -) - -from typing import ( - Iterable as typing___Iterable, - Mapping as typing___Mapping, - MutableMapping as typing___MutableMapping, - Optional as typing___Optional, - Text as typing___Text, -) - -from typing_extensions import ( - Literal as typing_extensions___Literal, -) - - -class FeatureSpec(google___protobuf___message___Message): - class OptionsEntry(google___protobuf___message___Message): - key = ... # type: typing___Text - value = ... # type: typing___Text - - def __init__(self, - key : typing___Optional[typing___Text] = None, - value : typing___Optional[typing___Text] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> FeatureSpec.OptionsEntry: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def ClearField(self, field_name: typing_extensions___Literal[u"key",u"value"]) -> None: ... - else: - def ClearField(self, field_name: typing_extensions___Literal[b"key",b"value"]) -> None: ... - - id = ... # type: typing___Text - name = ... # type: typing___Text - owner = ... # type: typing___Text - description = ... # type: typing___Text - uri = ... # type: typing___Text - valueType = ... # type: feast___types___Value_pb2___ValueType.Enum - entity = ... # type: typing___Text - group = ... # type: typing___Text - tags = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text] - - @property - def options(self) -> typing___MutableMapping[typing___Text, typing___Text]: ... - - def __init__(self, - id : typing___Optional[typing___Text] = None, - name : typing___Optional[typing___Text] = None, - owner : typing___Optional[typing___Text] = None, - description : typing___Optional[typing___Text] = None, - uri : typing___Optional[typing___Text] = None, - valueType : typing___Optional[feast___types___Value_pb2___ValueType.Enum] = None, - entity : typing___Optional[typing___Text] = None, - group : typing___Optional[typing___Text] = None, - tags : typing___Optional[typing___Iterable[typing___Text]] = None, - options : typing___Optional[typing___Mapping[typing___Text, typing___Text]] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> FeatureSpec: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def ClearField(self, field_name: typing_extensions___Literal[u"description",u"entity",u"group",u"id",u"name",u"options",u"owner",u"tags",u"uri",u"valueType"]) -> None: ... - else: - def ClearField(self, field_name: typing_extensions___Literal[b"description",b"entity",b"group",b"id",b"name",b"options",b"owner",b"tags",b"uri",b"valueType"]) -> None: ... diff --git a/sdk/python/feast/specs/ImportJobSpecs_pb2.py b/sdk/python/feast/specs/ImportJobSpecs_pb2.py index 7f8dac2911..205c4e5e8a 100644 --- a/sdk/python/feast/specs/ImportJobSpecs_pb2.py +++ b/sdk/python/feast/specs/ImportJobSpecs_pb2.py @@ -24,7 +24,7 @@ package='feast.specs', syntax='proto3', serialized_options=_b('\n\013feast.specsB\023ImportJobSpecsProtoZ6github.com/gojek/feast/protos/generated/go/feast/specs'), - serialized_pb=_b('\n feast/specs/ImportJobSpecs.proto\x12\x0b\x66\x65\x61st.specs\x1a\x1c\x66\x65\x61st/specs/ImportSpec.proto\x1a\x1c\x66\x65\x61st/specs/EntitySpec.proto\x1a\x1d\x66\x65\x61st/specs/FeatureSpec.proto\x1a\x1d\x66\x65\x61st/specs/StorageSpec.proto\"\xcd\x02\n\x0eImportJobSpecs\x12\r\n\x05jobId\x18\x01 \x01(\t\x12+\n\nimportSpec\x18\x02 \x01(\x0b\x32\x17.feast.specs.ImportSpec\x12,\n\x0b\x65ntitySpecs\x18\x03 \x03(\x0b\x32\x17.feast.specs.EntitySpec\x12.\n\x0c\x66\x65\x61tureSpecs\x18\x04 \x03(\x0b\x32\x18.feast.specs.FeatureSpec\x12\x34\n\x12servingStorageSpec\x18\x05 \x01(\x0b\x32\x18.feast.specs.StorageSpec\x12\x36\n\x14warehouseStorageSpec\x18\x06 \x01(\x0b\x32\x18.feast.specs.StorageSpec\x12\x33\n\x11\x65rrorsStorageSpec\x18\x07 \x01(\x0b\x32\x18.feast.specs.StorageSpecBZ\n\x0b\x66\x65\x61st.specsB\x13ImportJobSpecsProtoZ6github.com/gojek/feast/protos/generated/go/feast/specsb\x06proto3') + serialized_pb=_b('\n feast/specs/ImportJobSpecs.proto\x12\x0b\x66\x65\x61st.specs\x1a\x1c\x66\x65\x61st/specs/ImportSpec.proto\x1a\x1c\x66\x65\x61st/specs/EntitySpec.proto\x1a\x1d\x66\x65\x61st/specs/FeatureSpec.proto\x1a\x1d\x66\x65\x61st/specs/StorageSpec.proto\"\xcb\x03\n\x0eImportJobSpecs\x12\r\n\x05jobId\x18\x01 \x01(\t\x12+\n\nimportSpec\x18\x02 \x01(\x0b\x32\x17.feast.specs.ImportSpec\x12,\n\x0b\x65ntitySpecs\x18\x03 \x03(\x0b\x32\x17.feast.specs.EntitySpec\x12.\n\x0c\x66\x65\x61tureSpecs\x18\x04 \x03(\x0b\x32\x18.feast.specs.FeatureSpec\x12\x34\n\x12servingStorageSpec\x18\x05 \x01(\x0b\x32\x18.feast.specs.StorageSpec\x12\x36\n\x14warehouseStorageSpec\x18\x06 \x01(\x0b\x32\x18.feast.specs.StorageSpec\x12\x33\n\x11\x65rrorsStorageSpec\x18\x07 \x01(\x0b\x32\x18.feast.specs.StorageSpec\x12*\n\"write_feature_metrics_to_influx_db\x18\x08 \x01(\x08\x12\x15\n\rinflux_db_url\x18\t \x01(\t\x12\x1a\n\x12influx_db_database\x18\n \x01(\t\x12\x1d\n\x15influx_db_measurement\x18\x0b \x01(\tBZ\n\x0b\x66\x65\x61st.specsB\x13ImportJobSpecsProtoZ6github.com/gojek/feast/protos/generated/go/feast/specsb\x06proto3') , dependencies=[feast_dot_specs_dot_ImportSpec__pb2.DESCRIPTOR,feast_dot_specs_dot_EntitySpec__pb2.DESCRIPTOR,feast_dot_specs_dot_FeatureSpec__pb2.DESCRIPTOR,feast_dot_specs_dot_StorageSpec__pb2.DESCRIPTOR,]) @@ -87,6 +87,34 @@ message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='write_feature_metrics_to_influx_db', full_name='feast.specs.ImportJobSpecs.write_feature_metrics_to_influx_db', index=7, + number=8, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='influx_db_url', full_name='feast.specs.ImportJobSpecs.influx_db_url', index=8, + number=9, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='influx_db_database', full_name='feast.specs.ImportJobSpecs.influx_db_database', index=9, + number=10, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='influx_db_measurement', full_name='feast.specs.ImportJobSpecs.influx_db_measurement', index=10, + number=11, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -100,7 +128,7 @@ oneofs=[ ], serialized_start=172, - serialized_end=505, + serialized_end=631, ) _IMPORTJOBSPECS.fields_by_name['importSpec'].message_type = feast_dot_specs_dot_ImportSpec__pb2._IMPORTSPEC diff --git a/sdk/python/feast/specs/ImportJobSpecs_pb2.pyi b/sdk/python/feast/specs/ImportJobSpecs_pb2.pyi deleted file mode 100644 index 3aa9744f6a..0000000000 --- a/sdk/python/feast/specs/ImportJobSpecs_pb2.pyi +++ /dev/null @@ -1,77 +0,0 @@ -# @generated by generate_proto_mypy_stubs.py. Do not edit! -import sys -from feast.specs.EntitySpec_pb2 import ( - EntitySpec as feast___specs___EntitySpec_pb2___EntitySpec, -) - -from feast.specs.FeatureSpec_pb2 import ( - FeatureSpec as feast___specs___FeatureSpec_pb2___FeatureSpec, -) - -from feast.specs.ImportSpec_pb2 import ( - ImportSpec as feast___specs___ImportSpec_pb2___ImportSpec, -) - -from feast.specs.StorageSpec_pb2 import ( - StorageSpec as feast___specs___StorageSpec_pb2___StorageSpec, -) - -from google.protobuf.internal.containers import ( - RepeatedCompositeFieldContainer as google___protobuf___internal___containers___RepeatedCompositeFieldContainer, -) - -from google.protobuf.message import ( - Message as google___protobuf___message___Message, -) - -from typing import ( - Iterable as typing___Iterable, - Optional as typing___Optional, - Text as typing___Text, -) - -from typing_extensions import ( - Literal as typing_extensions___Literal, -) - - -class ImportJobSpecs(google___protobuf___message___Message): - jobId = ... # type: typing___Text - - @property - def importSpec(self) -> feast___specs___ImportSpec_pb2___ImportSpec: ... - - @property - def entitySpecs(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[feast___specs___EntitySpec_pb2___EntitySpec]: ... - - @property - def featureSpecs(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[feast___specs___FeatureSpec_pb2___FeatureSpec]: ... - - @property - def servingStorageSpec(self) -> feast___specs___StorageSpec_pb2___StorageSpec: ... - - @property - def warehouseStorageSpec(self) -> feast___specs___StorageSpec_pb2___StorageSpec: ... - - @property - def errorsStorageSpec(self) -> feast___specs___StorageSpec_pb2___StorageSpec: ... - - def __init__(self, - jobId : typing___Optional[typing___Text] = None, - importSpec : typing___Optional[feast___specs___ImportSpec_pb2___ImportSpec] = None, - entitySpecs : typing___Optional[typing___Iterable[feast___specs___EntitySpec_pb2___EntitySpec]] = None, - featureSpecs : typing___Optional[typing___Iterable[feast___specs___FeatureSpec_pb2___FeatureSpec]] = None, - servingStorageSpec : typing___Optional[feast___specs___StorageSpec_pb2___StorageSpec] = None, - warehouseStorageSpec : typing___Optional[feast___specs___StorageSpec_pb2___StorageSpec] = None, - errorsStorageSpec : typing___Optional[feast___specs___StorageSpec_pb2___StorageSpec] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> ImportJobSpecs: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def HasField(self, field_name: typing_extensions___Literal[u"errorsStorageSpec",u"importSpec",u"servingStorageSpec",u"warehouseStorageSpec"]) -> bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"entitySpecs",u"errorsStorageSpec",u"featureSpecs",u"importSpec",u"jobId",u"servingStorageSpec",u"warehouseStorageSpec"]) -> None: ... - else: - def HasField(self, field_name: typing_extensions___Literal[u"errorsStorageSpec",b"errorsStorageSpec",u"importSpec",b"importSpec",u"servingStorageSpec",b"servingStorageSpec",u"warehouseStorageSpec",b"warehouseStorageSpec"]) -> bool: ... - def ClearField(self, field_name: typing_extensions___Literal[b"entitySpecs",b"errorsStorageSpec",b"featureSpecs",b"importSpec",b"jobId",b"servingStorageSpec",b"warehouseStorageSpec"]) -> None: ... diff --git a/sdk/python/feast/specs/ImportSpec_pb2.pyi b/sdk/python/feast/specs/ImportSpec_pb2.pyi deleted file mode 100644 index 6c686a9e1d..0000000000 --- a/sdk/python/feast/specs/ImportSpec_pb2.pyi +++ /dev/null @@ -1,137 +0,0 @@ -# @generated by generate_proto_mypy_stubs.py. Do not edit! -import sys -from google.protobuf.internal.containers import ( - RepeatedCompositeFieldContainer as google___protobuf___internal___containers___RepeatedCompositeFieldContainer, - RepeatedScalarFieldContainer as google___protobuf___internal___containers___RepeatedScalarFieldContainer, -) - -from google.protobuf.message import ( - Message as google___protobuf___message___Message, -) - -from google.protobuf.timestamp_pb2 import ( - Timestamp as google___protobuf___timestamp_pb2___Timestamp, -) - -from typing import ( - Iterable as typing___Iterable, - Mapping as typing___Mapping, - MutableMapping as typing___MutableMapping, - Optional as typing___Optional, - Text as typing___Text, -) - -from typing_extensions import ( - Literal as typing_extensions___Literal, -) - - -class ImportSpec(google___protobuf___message___Message): - class SourceOptionsEntry(google___protobuf___message___Message): - key = ... # type: typing___Text - value = ... # type: typing___Text - - def __init__(self, - key : typing___Optional[typing___Text] = None, - value : typing___Optional[typing___Text] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> ImportSpec.SourceOptionsEntry: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def ClearField(self, field_name: typing_extensions___Literal[u"key",u"value"]) -> None: ... - else: - def ClearField(self, field_name: typing_extensions___Literal[b"key",b"value"]) -> None: ... - - class JobOptionsEntry(google___protobuf___message___Message): - key = ... # type: typing___Text - value = ... # type: typing___Text - - def __init__(self, - key : typing___Optional[typing___Text] = None, - value : typing___Optional[typing___Text] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> ImportSpec.JobOptionsEntry: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def ClearField(self, field_name: typing_extensions___Literal[u"key",u"value"]) -> None: ... - else: - def ClearField(self, field_name: typing_extensions___Literal[b"key",b"value"]) -> None: ... - - type = ... # type: typing___Text - entities = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text] - - @property - def sourceOptions(self) -> typing___MutableMapping[typing___Text, typing___Text]: ... - - @property - def jobOptions(self) -> typing___MutableMapping[typing___Text, typing___Text]: ... - - @property - def schema(self) -> Schema: ... - - def __init__(self, - type : typing___Optional[typing___Text] = None, - sourceOptions : typing___Optional[typing___Mapping[typing___Text, typing___Text]] = None, - jobOptions : typing___Optional[typing___Mapping[typing___Text, typing___Text]] = None, - entities : typing___Optional[typing___Iterable[typing___Text]] = None, - schema : typing___Optional[Schema] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> ImportSpec: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def HasField(self, field_name: typing_extensions___Literal[u"schema"]) -> bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"entities",u"jobOptions",u"schema",u"sourceOptions",u"type"]) -> None: ... - else: - def HasField(self, field_name: typing_extensions___Literal[u"schema",b"schema"]) -> bool: ... - def ClearField(self, field_name: typing_extensions___Literal[b"entities",b"jobOptions",b"schema",b"sourceOptions",b"type"]) -> None: ... - -class Schema(google___protobuf___message___Message): - timestampColumn = ... # type: typing___Text - entityIdColumn = ... # type: typing___Text - - @property - def fields(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[Field]: ... - - @property - def timestampValue(self) -> google___protobuf___timestamp_pb2___Timestamp: ... - - def __init__(self, - fields : typing___Optional[typing___Iterable[Field]] = None, - timestampColumn : typing___Optional[typing___Text] = None, - timestampValue : typing___Optional[google___protobuf___timestamp_pb2___Timestamp] = None, - entityIdColumn : typing___Optional[typing___Text] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> Schema: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def HasField(self, field_name: typing_extensions___Literal[u"timestamp",u"timestampColumn",u"timestampValue"]) -> bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"entityIdColumn",u"fields",u"timestamp",u"timestampColumn",u"timestampValue"]) -> None: ... - else: - def HasField(self, field_name: typing_extensions___Literal[u"timestamp",b"timestamp",u"timestampColumn",b"timestampColumn",u"timestampValue",b"timestampValue"]) -> bool: ... - def ClearField(self, field_name: typing_extensions___Literal[b"entityIdColumn",b"fields",b"timestamp",b"timestampColumn",b"timestampValue"]) -> None: ... - def WhichOneof(self, oneof_group: typing_extensions___Literal[u"timestamp",b"timestamp"]) -> typing_extensions___Literal["timestampColumn","timestampValue"]: ... - -class Field(google___protobuf___message___Message): - name = ... # type: typing___Text - featureId = ... # type: typing___Text - - def __init__(self, - name : typing___Optional[typing___Text] = None, - featureId : typing___Optional[typing___Text] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> Field: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def ClearField(self, field_name: typing_extensions___Literal[u"featureId",u"name"]) -> None: ... - else: - def ClearField(self, field_name: typing_extensions___Literal[b"featureId",b"name"]) -> None: ... diff --git a/sdk/python/feast/specs/StorageSpec_pb2.pyi b/sdk/python/feast/specs/StorageSpec_pb2.pyi deleted file mode 100644 index f305abeb77..0000000000 --- a/sdk/python/feast/specs/StorageSpec_pb2.pyi +++ /dev/null @@ -1,55 +0,0 @@ -# @generated by generate_proto_mypy_stubs.py. Do not edit! -import sys -from google.protobuf.message import ( - Message as google___protobuf___message___Message, -) - -from typing import ( - Mapping as typing___Mapping, - MutableMapping as typing___MutableMapping, - Optional as typing___Optional, - Text as typing___Text, -) - -from typing_extensions import ( - Literal as typing_extensions___Literal, -) - - -class StorageSpec(google___protobuf___message___Message): - class OptionsEntry(google___protobuf___message___Message): - key = ... # type: typing___Text - value = ... # type: typing___Text - - def __init__(self, - key : typing___Optional[typing___Text] = None, - value : typing___Optional[typing___Text] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> StorageSpec.OptionsEntry: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def ClearField(self, field_name: typing_extensions___Literal[u"key",u"value"]) -> None: ... - else: - def ClearField(self, field_name: typing_extensions___Literal[b"key",b"value"]) -> None: ... - - id = ... # type: typing___Text - type = ... # type: typing___Text - - @property - def options(self) -> typing___MutableMapping[typing___Text, typing___Text]: ... - - def __init__(self, - id : typing___Optional[typing___Text] = None, - type : typing___Optional[typing___Text] = None, - options : typing___Optional[typing___Mapping[typing___Text, typing___Text]] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> StorageSpec: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def ClearField(self, field_name: typing_extensions___Literal[u"id",u"options",u"type"]) -> None: ... - else: - def ClearField(self, field_name: typing_extensions___Literal[b"id",b"options",b"type"]) -> None: ... diff --git a/sdk/python/feast/storage/BigTable_pb2.pyi b/sdk/python/feast/storage/BigTable_pb2.pyi deleted file mode 100644 index 1744f29485..0000000000 --- a/sdk/python/feast/storage/BigTable_pb2.pyi +++ /dev/null @@ -1,34 +0,0 @@ -# @generated by generate_proto_mypy_stubs.py. Do not edit! -import sys -from google.protobuf.message import ( - Message as google___protobuf___message___Message, -) - -from typing import ( - Optional as typing___Optional, - Text as typing___Text, -) - -from typing_extensions import ( - Literal as typing_extensions___Literal, -) - - -class BigTableRowKey(google___protobuf___message___Message): - sha1Prefix = ... # type: typing___Text - entityKey = ... # type: typing___Text - reversedMillis = ... # type: typing___Text - - def __init__(self, - sha1Prefix : typing___Optional[typing___Text] = None, - entityKey : typing___Optional[typing___Text] = None, - reversedMillis : typing___Optional[typing___Text] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> BigTableRowKey: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def ClearField(self, field_name: typing_extensions___Literal[u"entityKey",u"reversedMillis",u"sha1Prefix"]) -> None: ... - else: - def ClearField(self, field_name: typing_extensions___Literal[b"entityKey",b"reversedMillis",b"sha1Prefix"]) -> None: ... diff --git a/sdk/python/feast/storage/Redis_pb2.pyi b/sdk/python/feast/storage/Redis_pb2.pyi deleted file mode 100644 index fa4dba708b..0000000000 --- a/sdk/python/feast/storage/Redis_pb2.pyi +++ /dev/null @@ -1,87 +0,0 @@ -# @generated by generate_proto_mypy_stubs.py. Do not edit! -import sys -from feast.types.Value_pb2 import ( - Value as feast___types___Value_pb2___Value, -) - -from google.protobuf.internal.containers import ( - RepeatedCompositeFieldContainer as google___protobuf___internal___containers___RepeatedCompositeFieldContainer, -) - -from google.protobuf.message import ( - Message as google___protobuf___message___Message, -) - -from google.protobuf.timestamp_pb2 import ( - Timestamp as google___protobuf___timestamp_pb2___Timestamp, -) - -from typing import ( - Iterable as typing___Iterable, - Optional as typing___Optional, - Text as typing___Text, -) - -from typing_extensions import ( - Literal as typing_extensions___Literal, -) - - -class RedisBucketKey(google___protobuf___message___Message): - entityKey = ... # type: typing___Text - featureIdSha1Prefix = ... # type: typing___Text - bucketId = ... # type: int - - def __init__(self, - entityKey : typing___Optional[typing___Text] = None, - featureIdSha1Prefix : typing___Optional[typing___Text] = None, - bucketId : typing___Optional[int] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> RedisBucketKey: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def ClearField(self, field_name: typing_extensions___Literal[u"bucketId",u"entityKey",u"featureIdSha1Prefix"]) -> None: ... - else: - def ClearField(self, field_name: typing_extensions___Literal[b"bucketId",b"entityKey",b"featureIdSha1Prefix"]) -> None: ... - -class RedisBucketValue(google___protobuf___message___Message): - - @property - def value(self) -> feast___types___Value_pb2___Value: ... - - @property - def eventTimestamp(self) -> google___protobuf___timestamp_pb2___Timestamp: ... - - def __init__(self, - value : typing___Optional[feast___types___Value_pb2___Value] = None, - eventTimestamp : typing___Optional[google___protobuf___timestamp_pb2___Timestamp] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> RedisBucketValue: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def HasField(self, field_name: typing_extensions___Literal[u"eventTimestamp",u"value"]) -> bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"eventTimestamp",u"value"]) -> None: ... - else: - def HasField(self, field_name: typing_extensions___Literal[u"eventTimestamp",b"eventTimestamp",u"value",b"value"]) -> bool: ... - def ClearField(self, field_name: typing_extensions___Literal[b"eventTimestamp",b"value"]) -> None: ... - -class RedisBucketValueList(google___protobuf___message___Message): - - @property - def values(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[RedisBucketValue]: ... - - def __init__(self, - values : typing___Optional[typing___Iterable[RedisBucketValue]] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> RedisBucketValueList: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def ClearField(self, field_name: typing_extensions___Literal[u"values"]) -> None: ... - else: - def ClearField(self, field_name: typing_extensions___Literal[b"values"]) -> None: ... diff --git a/sdk/python/feast/types/FeatureRowExtended_pb2.pyi b/sdk/python/feast/types/FeatureRowExtended_pb2.pyi deleted file mode 100644 index 6a02035dc4..0000000000 --- a/sdk/python/feast/types/FeatureRowExtended_pb2.pyi +++ /dev/null @@ -1,92 +0,0 @@ -# @generated by generate_proto_mypy_stubs.py. Do not edit! -import sys -from feast.types.FeatureRow_pb2 import ( - FeatureRow as feast___types___FeatureRow_pb2___FeatureRow, -) - -from google.protobuf.message import ( - Message as google___protobuf___message___Message, -) - -from google.protobuf.timestamp_pb2 import ( - Timestamp as google___protobuf___timestamp_pb2___Timestamp, -) - -from typing import ( - Optional as typing___Optional, - Text as typing___Text, -) - -from typing_extensions import ( - Literal as typing_extensions___Literal, -) - - -class Error(google___protobuf___message___Message): - cause = ... # type: typing___Text - transform = ... # type: typing___Text - message = ... # type: typing___Text - stackTrace = ... # type: typing___Text - - def __init__(self, - cause : typing___Optional[typing___Text] = None, - transform : typing___Optional[typing___Text] = None, - message : typing___Optional[typing___Text] = None, - stackTrace : typing___Optional[typing___Text] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> Error: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def ClearField(self, field_name: typing_extensions___Literal[u"cause",u"message",u"stackTrace",u"transform"]) -> None: ... - else: - def ClearField(self, field_name: typing_extensions___Literal[b"cause",b"message",b"stackTrace",b"transform"]) -> None: ... - -class Attempt(google___protobuf___message___Message): - attempts = ... # type: int - - @property - def error(self) -> Error: ... - - def __init__(self, - attempts : typing___Optional[int] = None, - error : typing___Optional[Error] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> Attempt: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def HasField(self, field_name: typing_extensions___Literal[u"error"]) -> bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"attempts",u"error"]) -> None: ... - else: - def HasField(self, field_name: typing_extensions___Literal[u"error",b"error"]) -> bool: ... - def ClearField(self, field_name: typing_extensions___Literal[b"attempts",b"error"]) -> None: ... - -class FeatureRowExtended(google___protobuf___message___Message): - - @property - def row(self) -> feast___types___FeatureRow_pb2___FeatureRow: ... - - @property - def lastAttempt(self) -> Attempt: ... - - @property - def firstSeen(self) -> google___protobuf___timestamp_pb2___Timestamp: ... - - def __init__(self, - row : typing___Optional[feast___types___FeatureRow_pb2___FeatureRow] = None, - lastAttempt : typing___Optional[Attempt] = None, - firstSeen : typing___Optional[google___protobuf___timestamp_pb2___Timestamp] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> FeatureRowExtended: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def HasField(self, field_name: typing_extensions___Literal[u"firstSeen",u"lastAttempt",u"row"]) -> bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"firstSeen",u"lastAttempt",u"row"]) -> None: ... - else: - def HasField(self, field_name: typing_extensions___Literal[u"firstSeen",b"firstSeen",u"lastAttempt",b"lastAttempt",u"row",b"row"]) -> bool: ... - def ClearField(self, field_name: typing_extensions___Literal[b"firstSeen",b"lastAttempt",b"row"]) -> None: ... diff --git a/sdk/python/feast/types/FeatureRow_pb2.pyi b/sdk/python/feast/types/FeatureRow_pb2.pyi deleted file mode 100644 index 1b93879d2a..0000000000 --- a/sdk/python/feast/types/FeatureRow_pb2.pyi +++ /dev/null @@ -1,55 +0,0 @@ -# @generated by generate_proto_mypy_stubs.py. Do not edit! -import sys -from feast.types.Feature_pb2 import ( - Feature as feast___types___Feature_pb2___Feature, -) - -from google.protobuf.internal.containers import ( - RepeatedCompositeFieldContainer as google___protobuf___internal___containers___RepeatedCompositeFieldContainer, -) - -from google.protobuf.message import ( - Message as google___protobuf___message___Message, -) - -from google.protobuf.timestamp_pb2 import ( - Timestamp as google___protobuf___timestamp_pb2___Timestamp, -) - -from typing import ( - Iterable as typing___Iterable, - Optional as typing___Optional, - Text as typing___Text, -) - -from typing_extensions import ( - Literal as typing_extensions___Literal, -) - - -class FeatureRow(google___protobuf___message___Message): - entityKey = ... # type: typing___Text - entityName = ... # type: typing___Text - - @property - def features(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[feast___types___Feature_pb2___Feature]: ... - - @property - def eventTimestamp(self) -> google___protobuf___timestamp_pb2___Timestamp: ... - - def __init__(self, - entityKey : typing___Optional[typing___Text] = None, - features : typing___Optional[typing___Iterable[feast___types___Feature_pb2___Feature]] = None, - eventTimestamp : typing___Optional[google___protobuf___timestamp_pb2___Timestamp] = None, - entityName : typing___Optional[typing___Text] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> FeatureRow: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def HasField(self, field_name: typing_extensions___Literal[u"eventTimestamp"]) -> bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"entityKey",u"entityName",u"eventTimestamp",u"features"]) -> None: ... - else: - def HasField(self, field_name: typing_extensions___Literal[u"eventTimestamp",b"eventTimestamp"]) -> bool: ... - def ClearField(self, field_name: typing_extensions___Literal[b"entityKey",b"entityName",b"eventTimestamp",b"features"]) -> None: ... diff --git a/sdk/python/feast/types/Feature_pb2.pyi b/sdk/python/feast/types/Feature_pb2.pyi deleted file mode 100644 index 5d7fbc46ed..0000000000 --- a/sdk/python/feast/types/Feature_pb2.pyi +++ /dev/null @@ -1,40 +0,0 @@ -# @generated by generate_proto_mypy_stubs.py. Do not edit! -import sys -from feast.types.Value_pb2 import ( - Value as feast___types___Value_pb2___Value, -) - -from google.protobuf.message import ( - Message as google___protobuf___message___Message, -) - -from typing import ( - Optional as typing___Optional, - Text as typing___Text, -) - -from typing_extensions import ( - Literal as typing_extensions___Literal, -) - - -class Feature(google___protobuf___message___Message): - id = ... # type: typing___Text - - @property - def value(self) -> feast___types___Value_pb2___Value: ... - - def __init__(self, - id : typing___Optional[typing___Text] = None, - value : typing___Optional[feast___types___Value_pb2___Value] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> Feature: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def HasField(self, field_name: typing_extensions___Literal[u"value"]) -> bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"id",u"value"]) -> None: ... - else: - def HasField(self, field_name: typing_extensions___Literal[u"value",b"value"]) -> bool: ... - def ClearField(self, field_name: typing_extensions___Literal[b"id",b"value"]) -> None: ... diff --git a/sdk/python/feast/types/Value_pb2.pyi b/sdk/python/feast/types/Value_pb2.pyi deleted file mode 100644 index 8aa5092fdf..0000000000 --- a/sdk/python/feast/types/Value_pb2.pyi +++ /dev/null @@ -1,267 +0,0 @@ -# @generated by generate_proto_mypy_stubs.py. Do not edit! -import sys -from google.protobuf.descriptor import ( - EnumDescriptor as google___protobuf___descriptor___EnumDescriptor, -) - -from google.protobuf.internal.containers import ( - RepeatedCompositeFieldContainer as google___protobuf___internal___containers___RepeatedCompositeFieldContainer, - RepeatedScalarFieldContainer as google___protobuf___internal___containers___RepeatedScalarFieldContainer, -) - -from google.protobuf.message import ( - Message as google___protobuf___message___Message, -) - -from google.protobuf.timestamp_pb2 import ( - Timestamp as google___protobuf___timestamp_pb2___Timestamp, -) - -from typing import ( - Iterable as typing___Iterable, - List as typing___List, - Optional as typing___Optional, - Text as typing___Text, - Tuple as typing___Tuple, - cast as typing___cast, -) - -from typing_extensions import ( - Literal as typing_extensions___Literal, -) - - -class ValueType(google___protobuf___message___Message): - class Enum(int): - DESCRIPTOR: google___protobuf___descriptor___EnumDescriptor = ... - @classmethod - def Name(cls, number: int) -> str: ... - @classmethod - def Value(cls, name: str) -> ValueType.Enum: ... - @classmethod - def keys(cls) -> typing___List[str]: ... - @classmethod - def values(cls) -> typing___List[ValueType.Enum]: ... - @classmethod - def items(cls) -> typing___List[typing___Tuple[str, ValueType.Enum]]: ... - UNKNOWN = typing___cast(Enum, 0) - BYTES = typing___cast(Enum, 1) - STRING = typing___cast(Enum, 2) - INT32 = typing___cast(Enum, 3) - INT64 = typing___cast(Enum, 4) - DOUBLE = typing___cast(Enum, 5) - FLOAT = typing___cast(Enum, 6) - BOOL = typing___cast(Enum, 7) - TIMESTAMP = typing___cast(Enum, 8) - - - def __init__(self, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> ValueType: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - -class Value(google___protobuf___message___Message): - bytesVal = ... # type: bytes - stringVal = ... # type: typing___Text - int32Val = ... # type: int - int64Val = ... # type: int - doubleVal = ... # type: float - floatVal = ... # type: float - boolVal = ... # type: bool - - @property - def timestampVal(self) -> google___protobuf___timestamp_pb2___Timestamp: ... - - def __init__(self, - bytesVal : typing___Optional[bytes] = None, - stringVal : typing___Optional[typing___Text] = None, - int32Val : typing___Optional[int] = None, - int64Val : typing___Optional[int] = None, - doubleVal : typing___Optional[float] = None, - floatVal : typing___Optional[float] = None, - boolVal : typing___Optional[bool] = None, - timestampVal : typing___Optional[google___protobuf___timestamp_pb2___Timestamp] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> Value: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def HasField(self, field_name: typing_extensions___Literal[u"boolVal",u"bytesVal",u"doubleVal",u"floatVal",u"int32Val",u"int64Val",u"stringVal",u"timestampVal",u"val"]) -> bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"boolVal",u"bytesVal",u"doubleVal",u"floatVal",u"int32Val",u"int64Val",u"stringVal",u"timestampVal",u"val"]) -> None: ... - else: - def HasField(self, field_name: typing_extensions___Literal[u"boolVal",b"boolVal",u"bytesVal",b"bytesVal",u"doubleVal",b"doubleVal",u"floatVal",b"floatVal",u"int32Val",b"int32Val",u"int64Val",b"int64Val",u"stringVal",b"stringVal",u"timestampVal",b"timestampVal",u"val",b"val"]) -> bool: ... - def ClearField(self, field_name: typing_extensions___Literal[b"boolVal",b"bytesVal",b"doubleVal",b"floatVal",b"int32Val",b"int64Val",b"stringVal",b"timestampVal",b"val"]) -> None: ... - def WhichOneof(self, oneof_group: typing_extensions___Literal[u"val",b"val"]) -> typing_extensions___Literal["bytesVal","stringVal","int32Val","int64Val","doubleVal","floatVal","boolVal","timestampVal"]: ... - -class ValueList(google___protobuf___message___Message): - - @property - def bytesList(self) -> BytesList: ... - - @property - def stringList(self) -> StringList: ... - - @property - def int32List(self) -> Int32List: ... - - @property - def int64List(self) -> Int64List: ... - - @property - def doubleList(self) -> DoubleList: ... - - @property - def floatList(self) -> FloatList: ... - - @property - def boolList(self) -> BoolList: ... - - @property - def timestampList(self) -> TimestampList: ... - - def __init__(self, - bytesList : typing___Optional[BytesList] = None, - stringList : typing___Optional[StringList] = None, - int32List : typing___Optional[Int32List] = None, - int64List : typing___Optional[Int64List] = None, - doubleList : typing___Optional[DoubleList] = None, - floatList : typing___Optional[FloatList] = None, - boolList : typing___Optional[BoolList] = None, - timestampList : typing___Optional[TimestampList] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> ValueList: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def HasField(self, field_name: typing_extensions___Literal[u"boolList",u"bytesList",u"doubleList",u"floatList",u"int32List",u"int64List",u"stringList",u"timestampList",u"valueList"]) -> bool: ... - def ClearField(self, field_name: typing_extensions___Literal[u"boolList",u"bytesList",u"doubleList",u"floatList",u"int32List",u"int64List",u"stringList",u"timestampList",u"valueList"]) -> None: ... - else: - def HasField(self, field_name: typing_extensions___Literal[u"boolList",b"boolList",u"bytesList",b"bytesList",u"doubleList",b"doubleList",u"floatList",b"floatList",u"int32List",b"int32List",u"int64List",b"int64List",u"stringList",b"stringList",u"timestampList",b"timestampList",u"valueList",b"valueList"]) -> bool: ... - def ClearField(self, field_name: typing_extensions___Literal[b"boolList",b"bytesList",b"doubleList",b"floatList",b"int32List",b"int64List",b"stringList",b"timestampList",b"valueList"]) -> None: ... - def WhichOneof(self, oneof_group: typing_extensions___Literal[u"valueList",b"valueList"]) -> typing_extensions___Literal["bytesList","stringList","int32List","int64List","doubleList","floatList","boolList","timestampList"]: ... - -class BytesList(google___protobuf___message___Message): - val = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[bytes] - - def __init__(self, - val : typing___Optional[typing___Iterable[bytes]] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> BytesList: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def ClearField(self, field_name: typing_extensions___Literal[u"val"]) -> None: ... - else: - def ClearField(self, field_name: typing_extensions___Literal[b"val"]) -> None: ... - -class StringList(google___protobuf___message___Message): - val = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text] - - def __init__(self, - val : typing___Optional[typing___Iterable[typing___Text]] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> StringList: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def ClearField(self, field_name: typing_extensions___Literal[u"val"]) -> None: ... - else: - def ClearField(self, field_name: typing_extensions___Literal[b"val"]) -> None: ... - -class Int32List(google___protobuf___message___Message): - val = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[int] - - def __init__(self, - val : typing___Optional[typing___Iterable[int]] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> Int32List: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def ClearField(self, field_name: typing_extensions___Literal[u"val"]) -> None: ... - else: - def ClearField(self, field_name: typing_extensions___Literal[b"val"]) -> None: ... - -class Int64List(google___protobuf___message___Message): - val = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[int] - - def __init__(self, - val : typing___Optional[typing___Iterable[int]] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> Int64List: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def ClearField(self, field_name: typing_extensions___Literal[u"val"]) -> None: ... - else: - def ClearField(self, field_name: typing_extensions___Literal[b"val"]) -> None: ... - -class DoubleList(google___protobuf___message___Message): - val = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[float] - - def __init__(self, - val : typing___Optional[typing___Iterable[float]] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> DoubleList: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def ClearField(self, field_name: typing_extensions___Literal[u"val"]) -> None: ... - else: - def ClearField(self, field_name: typing_extensions___Literal[b"val"]) -> None: ... - -class FloatList(google___protobuf___message___Message): - val = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[float] - - def __init__(self, - val : typing___Optional[typing___Iterable[float]] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> FloatList: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def ClearField(self, field_name: typing_extensions___Literal[u"val"]) -> None: ... - else: - def ClearField(self, field_name: typing_extensions___Literal[b"val"]) -> None: ... - -class BoolList(google___protobuf___message___Message): - val = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[bool] - - def __init__(self, - val : typing___Optional[typing___Iterable[bool]] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> BoolList: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def ClearField(self, field_name: typing_extensions___Literal[u"val"]) -> None: ... - else: - def ClearField(self, field_name: typing_extensions___Literal[b"val"]) -> None: ... - -class TimestampList(google___protobuf___message___Message): - - @property - def val(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[google___protobuf___timestamp_pb2___Timestamp]: ... - - def __init__(self, - val : typing___Optional[typing___Iterable[google___protobuf___timestamp_pb2___Timestamp]] = None, - ) -> None: ... - @classmethod - def FromString(cls, s: bytes) -> TimestampList: ... - def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... - if sys.version_info >= (3,): - def ClearField(self, field_name: typing_extensions___Literal[u"val"]) -> None: ... - else: - def ClearField(self, field_name: typing_extensions___Literal[b"val"]) -> None: ... diff --git a/sdk/python/tests/sample/valid_feature.yaml b/sdk/python/tests/sample/valid_feature.yaml index a54915641b..824b1a1ce3 100644 --- a/sdk/python/tests/sample/valid_feature.yaml +++ b/sdk/python/tests/sample/valid_feature.yaml @@ -4,9 +4,4 @@ entity: myentity owner: bob@example.com description: test entity. valueType: BOOL -uri: https://github.com/bob/example -dataStores: - serving: - id: REDIS1 - warehouse: - id: BIGQUERY1 \ No newline at end of file +uri: https://github.com/bob/example \ No newline at end of file diff --git a/sdk/python/tests/sample/valid_feature_group.yaml b/sdk/python/tests/sample/valid_feature_group.yaml index f877d209fe..035010877a 100644 --- a/sdk/python/tests/sample/valid_feature_group.yaml +++ b/sdk/python/tests/sample/valid_feature_group.yaml @@ -1,7 +1,2 @@ id: my_fg -tags: ["tag1", "tag2"] -dataStores: - serving: - id: "REDIS1" - warehouse: - id: "BIGQUERY1" \ No newline at end of file +tags: ["tag1", "tag2"] \ No newline at end of file diff --git a/sdk/python/tests/sdk/resources/test_feature.py b/sdk/python/tests/sdk/resources/test_feature.py index 7f296db235..80b57fbc3d 100644 --- a/sdk/python/tests/sdk/resources/test_feature.py +++ b/sdk/python/tests/sdk/resources/test_feature.py @@ -12,22 +12,18 @@ # See the License for the specific language governing permissions and # limitations under the License. -from feast.sdk.resources.feature import Feature, Datastore, ValueType +from feast.sdk.resources.feature import Feature, ValueType class TestFeature(object): def dummy_feature(self): - warehouse_data_store = Datastore(id="BIGQUERY1", options={}) - serving_data_store = Datastore(id="REDIS1", options={}) my_feature = Feature( name="my_feature", entity="my_entity", value_type=ValueType.BYTES, owner="feast@web.com", description="test feature", - uri="github.com/feature_repo", - warehouse_store=warehouse_data_store, - serving_store=serving_data_store) + uri="github.com/feature_repo") return my_feature def test_set_name(self): @@ -53,5 +49,3 @@ def test_read_from_yaml(self): assert feature.description == "test entity." assert feature.value_type == ValueType.BOOL assert feature.uri == "https://github.com/bob/example" - assert feature.serving_store.id == "REDIS1" - assert feature.warehouse_store.id == "BIGQUERY1" diff --git a/sdk/python/tests/sdk/resources/test_feature_group.py b/sdk/python/tests/sdk/resources/test_feature_group.py index 244211d192..c553db6d6d 100644 --- a/sdk/python/tests/sdk/resources/test_feature_group.py +++ b/sdk/python/tests/sdk/resources/test_feature_group.py @@ -20,6 +20,4 @@ def test_read_from_yaml(self): feature_group = FeatureGroup.from_yaml( "tests/sample/valid_feature_group.yaml") assert feature_group.id == "my_fg" - assert feature_group.serving_store.id == "REDIS1" - assert feature_group.warehouse_store.id == "BIGQUERY1" assert feature_group.tags == ["tag1", "tag2"] diff --git a/sdk/python/tests/sdk/test_client.py b/sdk/python/tests/sdk/test_client.py index 9e76cde301..cbcc163ec0 100644 --- a/sdk/python/tests/sdk/test_client.py +++ b/sdk/python/tests/sdk/test_client.py @@ -39,7 +39,7 @@ from feast.sdk.utils.bq_util import TableDownloader from feast.serving.Serving_pb2 import QueryFeaturesRequest, \ QueryFeaturesResponse, FeatureValue -from feast.specs.FeatureSpec_pb2 import FeatureSpec, DataStores, DataStore +from feast.specs.FeatureSpec_pb2 import FeatureSpec from feast.specs.ImportSpec_pb2 import ImportSpec from feast.specs.StorageSpec_pb2 import StorageSpec from feast.types.Value_pb2 import Value @@ -92,19 +92,6 @@ def test_apply_single_feature_group(self, client, mocker): name = client.apply(my_feature_group) assert name == "test" - def test_apply_single_storage(self, client, mocker): - my_storage = Storage(id="TEST", type="redis") - grpc_stub = core.CoreServiceStub(grpc.insecure_channel("")) - - with mocker.patch.object( - grpc_stub, - 'ApplyStorage', - return_value=CoreServiceTypes.ApplyStorageResponse( - storageId="TEST")): - client._core_service_stub = grpc_stub - name = client.apply(my_storage) - assert name == "TEST" - def test_apply_unsupported_object(self, client): with pytest.raises(TypeError) as e_info: client.apply(None) @@ -112,17 +99,11 @@ def test_apply_unsupported_object(self, client): + "following types: [Feature, Entity, FeatureGroup, Storage, Importer]" def test_apply_multiple(self, client, mocker): - my_storage = Storage(id="TEST", type="redis") my_feature_group = FeatureGroup(id="test") my_entity = Entity(name="test") grpc_stub = core.CoreServiceStub(grpc.insecure_channel("")) - mocker.patch.object( - grpc_stub, - 'ApplyStorage', - return_value=CoreServiceTypes.ApplyStorageResponse( - storageId="TEST")) mocker.patch.object( grpc_stub, 'ApplyFeatureGroup', @@ -135,8 +116,8 @@ def test_apply_multiple(self, client, mocker): entityName="test")) client._core_service_stub = grpc_stub - ids = client.apply([my_storage, my_entity, my_feature_group]) - assert ids == ["TEST", "test", "test"] + ids = client.apply([my_entity, my_feature_group]) + assert ids == ["test", "test"] def test_run_job_no_staging(self, client, mocker): grpc_stub = jobs.JobServiceStub(grpc.insecure_channel("")) @@ -493,9 +474,7 @@ def _create_query_features_response(self, entity_name, entities): return response def _create_feature_spec(self, feature_id, wh_id): - wh_store = DataStore(id=wh_id) - datastores = DataStores(warehouse=wh_store) - return FeatureSpec(id=feature_id, dataStores=datastores) + return FeatureSpec(id=feature_id) def _create_bq_spec(self, id, project, dataset): return StorageSpec( diff --git a/sdk/python/tests/sdk/test_importer.py b/sdk/python/tests/sdk/test_importer.py index 6a0c9b0784..aa05bcb82e 100644 --- a/sdk/python/tests/sdk/test_importer.py +++ b/sdk/python/tests/sdk/test_importer.py @@ -15,8 +15,7 @@ import pandas as pd import pytest import ntpath -from feast.sdk.resources.feature import Feature, ValueType, \ - Datastore +from feast.sdk.resources.feature import Feature, ValueType from feast.sdk.importer import _create_feature, Importer from feast.sdk.utils.gs_utils import is_gs_path @@ -28,9 +27,7 @@ def test_from_csv(self): owner = "owner@feast.com" staging_location = "gs://test-bucket" id_column = "driver_id" - feature_columns = [ - "avg_distance_completed", "avg_customer_distance_completed" - ] + feature_columns = ["avg_distance_completed", "avg_customer_distance_completed"] timestamp_column = "ts" importer = Importer.from_csv( @@ -40,17 +37,25 @@ def test_from_csv(self): staging_location=staging_location, id_column=id_column, feature_columns=feature_columns, - timestamp_column=timestamp_column) + timestamp_column=timestamp_column, + ) - self._validate_csv_importer(importer, csv_path, entity_name, - owner, staging_location, id_column, - feature_columns, timestamp_column) + self._validate_csv_importer( + importer, + csv_path, + entity_name, + owner, + staging_location, + id_column, + feature_columns, + timestamp_column, + ) def test_from_csv_id_column_not_specified(self): - with pytest.raises( - ValueError, match="Column with name driver is not found"): + with pytest.raises(ValueError, match="Column with name driver is not found"): feature_columns = [ - "avg_distance_completed", "avg_customer_distance_completed" + "avg_distance_completed", + "avg_customer_distance_completed", ] csv_path = "tests/data/driver_features.csv" Importer.from_csv( @@ -59,12 +64,14 @@ def test_from_csv_id_column_not_specified(self): owner="owner@feast.com", staging_location="gs://test-bucket", feature_columns=feature_columns, - timestamp_column="ts") + timestamp_column="ts", + ) def test_from_csv_timestamp_column_not_specified(self): feature_columns = [ - "avg_distance_completed", "avg_customer_distance_completed", - "avg_distance_cancelled" + "avg_distance_completed", + "avg_customer_distance_completed", + "avg_distance_cancelled", ] csv_path = "tests/data/driver_features.csv" entity_name = "driver" @@ -77,7 +84,8 @@ def test_from_csv_timestamp_column_not_specified(self): owner=owner, staging_location=staging_location, id_column=id_column, - feature_columns=feature_columns) + feature_columns=feature_columns, + ) self._validate_csv_importer( importer, @@ -86,7 +94,8 @@ def test_from_csv_timestamp_column_not_specified(self): owner, staging_location=staging_location, id_column=id_column, - feature_columns=feature_columns) + feature_columns=feature_columns, + ) def test_from_csv_feature_columns_not_specified(self): csv_path = "tests/data/driver_features.csv" @@ -101,7 +110,8 @@ def test_from_csv_feature_columns_not_specified(self): owner=owner, staging_location=staging_location, id_column=id_column, - timestamp_column=timestamp_column) + timestamp_column=timestamp_column, + ) self._validate_csv_importer( importer, @@ -110,15 +120,17 @@ def test_from_csv_feature_columns_not_specified(self): owner, staging_location=staging_location, id_column=id_column, - timestamp_column=timestamp_column) + timestamp_column=timestamp_column, + ) def test_from_csv_staging_location_not_specified(self): with pytest.raises( - ValueError, - match= - "Specify staging_location for importing local file/dataframe"): + ValueError, + match="Specify staging_location for importing local file/dataframe", + ): feature_columns = [ - "avg_distance_completed", "avg_customer_distance_completed" + "avg_distance_completed", + "avg_customer_distance_completed", ] csv_path = "tests/data/driver_features.csv" Importer.from_csv( @@ -126,12 +138,15 @@ def test_from_csv_staging_location_not_specified(self): entity="driver", owner="owner@feast.com", feature_columns=feature_columns, - timestamp_column="ts") + timestamp_column="ts", + ) with pytest.raises( - ValueError, match="Staging location must be in GCS") as e_info: + ValueError, match="Staging location must be in GCS" + ) as e_info: feature_columns = [ - "avg_distance_completed", "avg_customer_distance_completed" + "avg_distance_completed", + "avg_customer_distance_completed", ] csv_path = "tests/data/driver_features.csv" Importer.from_csv( @@ -140,7 +155,8 @@ def test_from_csv_staging_location_not_specified(self): owner="owner@feast.com", staging_location="/home", feature_columns=feature_columns, - timestamp_column="ts") + timestamp_column="ts", + ) def test_from_df(self): csv_path = "tests/data/driver_features.csv" @@ -154,39 +170,40 @@ def test_from_df(self): owner="owner@feast.com", staging_location=staging_location, id_column="driver_id", - timestamp_column="ts") + timestamp_column="ts", + ) assert importer.require_staging == True - assert ("{}/tmp_{}".format(staging_location, - entity) in importer.remote_path) + assert "{}/tmp_{}".format(staging_location, entity) in importer.remote_path for feature in importer.features.values(): assert feature.name in df.columns assert feature.id == "driver." + feature.name import_spec = importer.spec assert import_spec.type == "file.csv" - assert import_spec.sourceOptions == { - "path": importer.remote_path - } + assert import_spec.sourceOptions == {"path": importer.remote_path} assert import_spec.entities == ["driver"] schema = import_spec.schema assert schema.entityIdColumn == "driver_id" assert schema.timestampValue is not None feature_columns = [ - "completed", "avg_distance_completed", - "avg_customer_distance_completed", "avg_distance_cancelled" + "completed", + "avg_distance_completed", + "avg_customer_distance_completed", + "avg_distance_cancelled", ] for col, field in zip(df.columns.values, schema.fields): assert col == field.name if col in feature_columns: assert field.featureId == "driver." + col - + def test_stage_df_without_timestamp(self, mocker): mocker.patch("feast.sdk.importer.df_to_gcs", return_value=True) feature_columns = [ - "avg_distance_completed", "avg_customer_distance_completed", - "avg_distance_cancelled" + "avg_distance_completed", + "avg_customer_distance_completed", + "avg_distance_cancelled", ] csv_path = "tests/data/driver_features.csv" entity_name = "driver" @@ -199,25 +216,29 @@ def test_stage_df_without_timestamp(self, mocker): owner=owner, staging_location=staging_location, id_column=id_column, - feature_columns=feature_columns) + feature_columns=feature_columns, + ) importer.stage() - def _validate_csv_importer(self, - importer, - csv_path, - entity_name, - owner, - staging_location=None, - id_column=None, - feature_columns=None, - timestamp_column=None, - timestamp_value=None): + def _validate_csv_importer( + self, + importer, + csv_path, + entity_name, + owner, + staging_location=None, + id_column=None, + feature_columns=None, + timestamp_column=None, + timestamp_value=None, + ): df = pd.read_csv(csv_path) assert not importer.require_staging == is_gs_path(csv_path) if importer.require_staging: assert importer.remote_path == "{}/{}".format( - staging_location, ntpath.basename(csv_path)) + staging_location, ntpath.basename(csv_path) + ) # check features created for feature in importer.features.values(): @@ -231,7 +252,9 @@ def _validate_csv_importer(self, assert import_spec.entities == [entity_name] schema = import_spec.schema - assert schema.entityIdColumn == id_column if id_column is not None else entity_name + assert ( + schema.entityIdColumn == id_column if id_column is not None else entity_name + ) if timestamp_column is not None: assert schema.timestampColumn == timestamp_column elif timestamp_value is not None: @@ -246,38 +269,26 @@ def _validate_csv_importer(self, for col, field in zip(df.columns.values, schema.fields): assert col == field.name if col in feature_columns: - assert field.featureId == '{}.{}'.format(entity_name, - col).lower() - + assert field.featureId == "{}.{}".format(entity_name, col).lower() class TestHelpers: def test_create_feature(self): - col = pd.Series([1] * 3, dtype='int32', name="test") + col = pd.Series([1] * 3, dtype="int32", name="test") expected = Feature( - name="test", - entity="test", - owner="person", - value_type=ValueType.INT32) - actual = _create_feature(col, "test", "person", None, None) + name="test", entity="test", owner="person", value_type=ValueType.INT32 + ) + actual = _create_feature(col, "test", "person") assert actual.id == expected.id assert actual.value_type == expected.value_type assert actual.owner == expected.owner def test_create_feature_with_stores(self): - col = pd.Series([1] * 3, dtype='int32', name="test") + col = pd.Series([1] * 3, dtype="int32", name="test") expected = Feature( - name="test", - entity="test", - owner="person", - value_type=ValueType.INT32, - serving_store=Datastore(id="SERVING"), - warehouse_store=Datastore(id="WAREHOUSE")) - actual = _create_feature(col, "test", "person", - Datastore(id="SERVING"), - Datastore(id="WAREHOUSE")) + name="test", entity="test", owner="person", value_type=ValueType.INT32 + ) + actual = _create_feature(col, "test", "person") assert actual.id == expected.id assert actual.value_type == expected.value_type assert actual.owner == expected.owner - assert actual.serving_store == expected.serving_store - assert actual.warehouse_store == expected.warehouse_store From e05cd440c525928501f7236016c909a710b54c17 Mon Sep 17 00:00:00 2001 From: David Heryanto Date: Tue, 10 Sep 2019 11:55:10 +0800 Subject: [PATCH 03/19] Add __init__ file in feast/types to fix module not found error --- sdk/python/feast/types/__init__.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 sdk/python/feast/types/__init__.py diff --git a/sdk/python/feast/types/__init__.py b/sdk/python/feast/types/__init__.py new file mode 100644 index 0000000000..e69de29bb2 From fae0041025c7c9344f7be9eba4edebf7e83e2a3d Mon Sep 17 00:00:00 2001 From: David Heryanto Date: Tue, 10 Sep 2019 12:47:41 +0800 Subject: [PATCH 04/19] Add support to configure import job metrics in Feast Core --- .../core/config/ImportJobMetricsConfig.java | 13 ++++++++++++ .../core/config/InstrumentationConfig.java | 13 ++++++++++-- .../core/service/JobManagementService.java | 12 +++++++++-- .../src/main/resources/application.properties | 5 +++++ .../java/feast/core/CoreApplicationTest.java | 10 +++++++++- .../CoreApplicationWithNoServingTest.java | 6 +++++- .../service/JobManagementServiceTest.java | 20 +++++++++++-------- pom.xml | 2 +- protos/feast/specs/ImportJobSpecs.proto | 18 ++++++++++------- 9 files changed, 77 insertions(+), 22 deletions(-) create mode 100644 core/src/main/java/feast/core/config/ImportJobMetricsConfig.java diff --git a/core/src/main/java/feast/core/config/ImportJobMetricsConfig.java b/core/src/main/java/feast/core/config/ImportJobMetricsConfig.java new file mode 100644 index 0000000000..7bbb3bbc72 --- /dev/null +++ b/core/src/main/java/feast/core/config/ImportJobMetricsConfig.java @@ -0,0 +1,13 @@ +package feast.core.config; + +import lombok.AllArgsConstructor; +import lombok.Getter; + +@Getter +@AllArgsConstructor +public class ImportJobMetricsConfig { + private final boolean ingestionMetricsEnabled; + private final String influxDbUrl; + private final String influxDbName; + private final String influxDbMeasurementName; +} diff --git a/core/src/main/java/feast/core/config/InstrumentationConfig.java b/core/src/main/java/feast/core/config/InstrumentationConfig.java index eed2487355..d9b84b8b19 100644 --- a/core/src/main/java/feast/core/config/InstrumentationConfig.java +++ b/core/src/main/java/feast/core/config/InstrumentationConfig.java @@ -26,8 +26,17 @@ @Configuration public class InstrumentationConfig { @Bean - public StatsDClient getStatsDClient(@Value("${statsd.host}") String host, - @Value("${statsd.port}") int port) { + public StatsDClient getStatsDClient( + @Value("${statsd.host}") String host, @Value("${statsd.port}") int port) { return new NonBlockingStatsDClient("feast_core", host, port); } + + @Bean + public ImportJobMetricsConfig getImportJobMetricsConfig( + @Value("${ingestion.metrics.enabled}") boolean enabled, + @Value("${ingestion.metrics.influxUrl}") String influxDbUrl, + @Value("${ingestion.metrics.dbName}") String influxDbName, + @Value("${ingestion.metrics.dbMeasurement}") String influxDbMeasurementName) { + return new ImportJobMetricsConfig(enabled, influxDbUrl, influxDbName, influxDbMeasurementName); + } } diff --git a/core/src/main/java/feast/core/service/JobManagementService.java b/core/src/main/java/feast/core/service/JobManagementService.java index 5b2ee9217b..003b0bcd53 100644 --- a/core/src/main/java/feast/core/service/JobManagementService.java +++ b/core/src/main/java/feast/core/service/JobManagementService.java @@ -30,6 +30,7 @@ import com.google.protobuf.util.JsonFormat; import feast.core.JobServiceProto.JobServiceTypes.JobDetail; import feast.core.config.ImportJobDefaults; +import feast.core.config.ImportJobMetricsConfig; import feast.core.config.StorageConfig.StorageSpecs; import feast.core.dao.JobInfoRepository; import feast.core.dao.MetricsRepository; @@ -73,6 +74,7 @@ public class JobManagementService { private static final String JOB_PREFIX_DEFAULT = "feastimport"; private static final String UNKNOWN_EXT_JOB_ID = ""; private static final String IMPORT_JOB_SPECS_FILENAME = "importJobSpecs.yaml"; + private ImportJobMetricsConfig metricsConfig; private JobInfoRepository jobInfoRepository; private MetricsRepository metricsRepository; @@ -88,13 +90,15 @@ public JobManagementService( JobManager jobManager, ImportJobDefaults defaults, SpecService specService, - StorageSpecs storageSpecs) { + StorageSpecs storageSpecs, + ImportJobMetricsConfig metricsConfig) { this.jobInfoRepository = jobInfoRepository; this.metricsRepository = metricsRepository; this.jobManager = jobManager; this.defaults = defaults; this.specService = specService; this.storageSpecs = storageSpecs; + this.metricsConfig = metricsConfig; } public void writeImportJobSpecs(ImportJobSpecs importJobSpecs, Path workspace) { @@ -132,7 +136,11 @@ private ImportJobSpecs buildImportJobSpecs(ImportSpec importSpec, String jobId) .setJobId(jobId) .setImportSpec(importSpec) .addAllEntitySpecs(entitySpecs) - .addAllFeatureSpecs(featureSpecs); + .addAllFeatureSpecs(featureSpecs) + .setWriteFeatureMetricsToInfluxDb(metricsConfig.isIngestionMetricsEnabled()) + .setInfluxDbUrl(metricsConfig.getInfluxDbUrl()) + .setInfluxDbDatabase(metricsConfig.getInfluxDbName()) + .setInfluxDbMeasurement(metricsConfig.getInfluxDbMeasurementName()); if (storageSpecs.getServingStorageSpec() != null) { importJobSpecsBuilder.setServingStorageSpec(storageSpecs.getServingStorageSpec()); } diff --git a/core/src/main/resources/application.properties b/core/src/main/resources/application.properties index 58d8af33b3..38553b5a13 100644 --- a/core/src/main/resources/application.properties +++ b/core/src/main/resources/application.properties @@ -55,3 +55,8 @@ management.metrics.export.simple.enabled=false management.metrics.export.statsd.enabled=true management.metrics.export.statsd.host=${STATSD_HOST:localhost} management.metrics.export.statsd.port=${STATSD_PORT:8125} + +ingestion.metrics.enabled=${INGESTION_METRICS_ENABLED:false} +ingestion.metrics.influxUrl=${INGESTION_METRICS_INFLUX_URL:} +ingestion.metrics.dbName=${INGESTION_METRICS_INFLUX_DB_NAME:} +ingestion.metrics.dbMeasurement=${INGESTION_METRICS_INFLUX_DB_MEASUREMENT:} \ No newline at end of file diff --git a/core/src/test/java/feast/core/CoreApplicationTest.java b/core/src/test/java/feast/core/CoreApplicationTest.java index 01f92fcd00..73113fcc82 100644 --- a/core/src/test/java/feast/core/CoreApplicationTest.java +++ b/core/src/test/java/feast/core/CoreApplicationTest.java @@ -52,7 +52,11 @@ "feast.store.warehouse.options={\"path\":\"/tmp/foobar\"}", "feast.store.serving.type=redis", "feast.store.serving.options={\"host\":\"localhost\",\"port\":1234}", - "feast.store.errors.type=stderr" + "feast.store.errors.type=stderr", + "ingestion.metrics.enabled=true", + "ingestion.metrics.influxUrl=localhost", + "ingestion.metrics.dbName=db", + "ingestion.metrics.dbMeasurement=measurement" }) @DirtiesContext public class CoreApplicationTest { @@ -138,6 +142,10 @@ public void test_withProperties_systemServingAndWarehouseStoresRegistered() thro .setId(DEFAULT_WAREHOUSE_ID) .setType("file.json") .putOptions("path", "/tmp/foobar")) + .setWriteFeatureMetricsToInfluxDb(true) + .setInfluxDbUrl("localhost") + .setInfluxDbDatabase("db") + .setInfluxDbMeasurement("measurement") .build(), args.get(0)); } diff --git a/core/src/test/java/feast/core/CoreApplicationWithNoServingTest.java b/core/src/test/java/feast/core/CoreApplicationWithNoServingTest.java index 4b6cbb7838..6e326d526c 100644 --- a/core/src/test/java/feast/core/CoreApplicationWithNoServingTest.java +++ b/core/src/test/java/feast/core/CoreApplicationWithNoServingTest.java @@ -42,7 +42,11 @@ "spring.datasource.url=jdbc:h2:mem:testdb", "feast.store.warehouse.type=file.json", "feast.store.warehouse.options={\"path\":\"/tmp/foobar\"}", - "feast.store.errors.type=stderr" + "feast.store.errors.type=stderr", + "ingestion.metrics.enabled=true", + "ingestion.metrics.influxUrl=localhost", + "ingestion.metrics.dbName=db", + "ingestion.metrics.dbMeasurement=measurement" }) @DirtiesContext public class CoreApplicationWithNoServingTest { diff --git a/core/src/test/java/feast/core/service/JobManagementServiceTest.java b/core/src/test/java/feast/core/service/JobManagementServiceTest.java index 71967f8da8..03d17d143d 100644 --- a/core/src/test/java/feast/core/service/JobManagementServiceTest.java +++ b/core/src/test/java/feast/core/service/JobManagementServiceTest.java @@ -28,6 +28,7 @@ import com.google.protobuf.Timestamp; import feast.core.JobServiceProto.JobServiceTypes.JobDetail; import feast.core.config.ImportJobDefaults; +import feast.core.config.ImportJobMetricsConfig; import feast.core.config.StorageConfig.StorageSpecs; import feast.core.dao.JobInfoRepository; import feast.core.dao.MetricsRepository; @@ -62,6 +63,7 @@ public class JobManagementServiceTest { @Mock private SpecService specService; private StorageSpecs storageSpecs; + private ImportJobMetricsConfig metricsConfig; @Before public void setUp() { @@ -71,6 +73,8 @@ public void setUp() { .runner("DirectRunner").executable("/feast-import.jar").build(); storageSpecs = StorageSpecs.builder() .errorsStorageSpec(StorageSpec.newBuilder().setType("stderr").build()).build(); + + metricsConfig = new ImportJobMetricsConfig(true, "localhost", "db", "measurement"); } @Test @@ -108,7 +112,7 @@ public void shouldListAllJobDetails() { when(jobInfoRepository.findAll()).thenReturn(Lists.newArrayList(jobInfo1, jobInfo2)); JobManagementService jobManagementService = new JobManagementService(jobInfoRepository, metricsRepository, jobManager, defaults, - specService, storageSpecs); + specService, storageSpecs, metricsConfig); List actual = jobManagementService.listJobs(); List expected = Lists.newArrayList( @@ -147,7 +151,7 @@ public void shouldReturnDetailOfRequestedJobId() { when(jobInfoRepository.findById("job1")).thenReturn(Optional.of(jobInfo1)); JobManagementService jobManagementService = new JobManagementService(jobInfoRepository, metricsRepository, jobManager, defaults, - specService, storageSpecs); + specService, storageSpecs, metricsConfig); JobDetail actual = jobManagementService.getJob("job1"); JobDetail expected = JobDetail.newBuilder() @@ -163,7 +167,7 @@ public void shouldReturnDetailOfRequestedJobId() { public void shouldThrowErrorIfJobIdNotFoundWhenGettingJob() { when(jobInfoRepository.findById("job1")).thenReturn(Optional.empty()); JobManagementService jobManagementService = - new JobManagementService(jobInfoRepository, metricsRepository, jobManager, defaults, specService, storageSpecs); + new JobManagementService(jobInfoRepository, metricsRepository, jobManager, defaults, specService, storageSpecs, metricsConfig); exception.expect(RetrievalException.class); exception.expectMessage("Unable to retrieve job with id job1"); jobManagementService.getJob("job1"); @@ -173,7 +177,7 @@ public void shouldThrowErrorIfJobIdNotFoundWhenGettingJob() { public void shouldThrowErrorIfJobIdNotFoundWhenAbortingJob() { when(jobInfoRepository.findById("job1")).thenReturn(Optional.empty()); JobManagementService jobManagementService = - new JobManagementService(jobInfoRepository, metricsRepository, jobManager, defaults, specService, storageSpecs); + new JobManagementService(jobInfoRepository, metricsRepository, jobManager, defaults, specService, storageSpecs, metricsConfig); exception.expect(RetrievalException.class); exception.expectMessage("Unable to retrieve job with id job1"); jobManagementService.abortJob("job1"); @@ -185,7 +189,7 @@ public void shouldThrowErrorIfJobInTerminalStateWhenAbortingJob() { job.setStatus(JobStatus.COMPLETED); when(jobInfoRepository.findById("job1")).thenReturn(Optional.of(job)); JobManagementService jobManagementService = - new JobManagementService(jobInfoRepository, metricsRepository, jobManager, defaults, specService, storageSpecs); + new JobManagementService(jobInfoRepository, metricsRepository, jobManager, defaults, specService, storageSpecs, metricsConfig); exception.expect(IllegalStateException.class); exception.expectMessage("Unable to stop job already in terminal state"); jobManagementService.abortJob("job1"); @@ -198,7 +202,7 @@ public void shouldUpdateJobAfterAborting() { job.setExtId("extId1"); when(jobInfoRepository.findById("job1")).thenReturn(Optional.of(job)); JobManagementService jobManagementService = - new JobManagementService(jobInfoRepository, metricsRepository, jobManager, defaults, specService, storageSpecs); + new JobManagementService(jobInfoRepository, metricsRepository, jobManager, defaults, specService, storageSpecs, metricsConfig); jobManagementService.abortJob("job1"); ArgumentCaptor jobCapture = ArgumentCaptor.forClass(JobInfo.class); verify(jobInfoRepository).saveAndFlush(jobCapture.capture()); @@ -212,7 +216,7 @@ public void shouldUpdateJobStatusIfExists() { ArgumentCaptor jobInfoArgumentCaptor = ArgumentCaptor.forClass(JobInfo.class); JobManagementService jobExecutionService = - new JobManagementService(jobInfoRepository, metricsRepository, jobManager, defaults, specService, storageSpecs); + new JobManagementService(jobInfoRepository, metricsRepository, jobManager, defaults, specService, storageSpecs, metricsConfig); jobExecutionService.updateJobStatus("jobid", JobStatus.PENDING); verify(jobInfoRepository, times(1)).save(jobInfoArgumentCaptor.capture()); @@ -229,7 +233,7 @@ public void shouldUpdateJobExtIdIfExists() { ArgumentCaptor jobInfoArgumentCaptor = ArgumentCaptor.forClass(JobInfo.class); JobManagementService jobExecutionService = - new JobManagementService(jobInfoRepository, metricsRepository, jobManager, defaults, specService, storageSpecs); + new JobManagementService(jobInfoRepository, metricsRepository, jobManager, defaults, specService, storageSpecs, metricsConfig); jobExecutionService.updateJobExtId("jobid", "extid"); verify(jobInfoRepository, times(1)).save(jobInfoArgumentCaptor.capture()); diff --git a/pom.xml b/pom.xml index 8afd15d0a5..9235ff7002 100644 --- a/pom.xml +++ b/pom.xml @@ -24,7 +24,7 @@ Feast Parent - 0.1.1 + 0.1.5 UTF-8 3.6.1 1.14.0 diff --git a/protos/feast/specs/ImportJobSpecs.proto b/protos/feast/specs/ImportJobSpecs.proto index dff5e79aa4..66df3ea4c6 100644 --- a/protos/feast/specs/ImportJobSpecs.proto +++ b/protos/feast/specs/ImportJobSpecs.proto @@ -40,14 +40,18 @@ message ImportJobSpecs { // and will be backward incompatible for versions greater than 0.1.x // ============================================================ // + // (optional, default: false) // write_feature_metrics_to_influx_db specifies if Feast should write feature metrics - // (such as lag and value summaries) to Influx DB. If this field is true, the next three fields: - // influx_db_url, influx_db_database, influx_db_measurement should be set as well. - // - // When retrieving the data in Influx DB: - // The field name for feature lag value in seconds unit is "lag_in_seconds" - // The field name for value is "value" - // Points will be tagged with "feature_id" and "entity_name" + // (such as lag and value summaries) to an existing Influx DB database. + // If set to true, the next three fields must all be set: + // - influx_db_url + // - influx_db_database + // - influx_db_measurement + // NOTE: + // When retrieving the data from Influx DB, + // - The field name for feature lag value in seconds unit is "lag_in_seconds" + // - The field name for value is "value" + // - Points will be tagged with "feature_id" and "entity_name" bool write_feature_metrics_to_influx_db = 8; // influx_db_url is the url for Feast to connect to Influx DB. Example: http://localhost:8086 string influx_db_url = 9; From e16fd0f4a0434e17f578e332e558e396e14b8cac Mon Sep 17 00:00:00 2001 From: Pradithya Aria Date: Tue, 10 Sep 2019 15:44:58 +0800 Subject: [PATCH 05/19] Fix CoreApplicationWithNoWarehouseTest --- .../feast/core/CoreApplicationWithNoWarehouseTest.java | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/core/src/test/java/feast/core/CoreApplicationWithNoWarehouseTest.java b/core/src/test/java/feast/core/CoreApplicationWithNoWarehouseTest.java index 1acd238b8b..d3b57226d8 100644 --- a/core/src/test/java/feast/core/CoreApplicationWithNoWarehouseTest.java +++ b/core/src/test/java/feast/core/CoreApplicationWithNoWarehouseTest.java @@ -51,7 +51,11 @@ "spring.datasource.url=jdbc:h2:mem:testdb", "feast.store.serving.type=redis", "feast.store.serving.options={\"host\":\"localhost\",\"port\":1234}", - "feast.store.errors.type=stderr" + "feast.store.errors.type=stderr", + "ingestion.metrics.enabled=true", + "ingestion.metrics.influxUrl=localhost", + "ingestion.metrics.dbName=db", + "ingestion.metrics.dbMeasurement=measurement" }) @DirtiesContext public class CoreApplicationWithNoWarehouseTest { @@ -132,6 +136,10 @@ public void test_withProperties_systemServingAndWarehouseStoresRegistered() thro .setId(DEFAULT_SERVING_ID) .setType("redis") .putOptions("host", "localhost").putOptions("port", "1234")) + .setWriteFeatureMetricsToInfluxDb(true) + .setInfluxDbUrl("localhost") + .setInfluxDbDatabase("db") + .setInfluxDbMeasurement("measurement") .build(), args.get(0)); } From a424fc1c1cdd49d0b47eb3575dd13e6cb6a0a35a Mon Sep 17 00:00:00 2001 From: David Heryanto Date: Tue, 10 Sep 2019 16:36:25 +0800 Subject: [PATCH 06/19] Update charts Add import job metrics configuration Update jaeger tracing configuration for serving --- charts/feast/Chart.yaml | 4 ++-- charts/feast/README.md | 5 ++++- charts/feast/templates/core-deploy.yaml | 12 ++++++++++ charts/feast/templates/serving-deploy.yaml | 4 +++- charts/feast/values.yaml | 26 +++++++++++++++++++++- 5 files changed, 46 insertions(+), 5 deletions(-) diff --git a/charts/feast/Chart.yaml b/charts/feast/Chart.yaml index ec4e84e79b..842ab7f13f 100644 --- a/charts/feast/Chart.yaml +++ b/charts/feast/Chart.yaml @@ -1,5 +1,5 @@ apiVersion: v1 -appVersion: "0.1.4" +appVersion: "0.1.5" description: A Helm chart to install Feast on kubernetes name: feast -version: 0.1.2 +version: 0.1.3 diff --git a/charts/feast/README.md b/charts/feast/README.md index 02979c65d3..1b75bc7c5e 100644 --- a/charts/feast/README.md +++ b/charts/feast/README.md @@ -46,7 +46,6 @@ Components that Feast supports, but this installation will not include are: - Note that if you do not provision a metrics store, feast will only retain the latest metrics from your jobs. - [Jaeger tracing](www.jaegertracing.io) for serving performance. - Set `serving.jaeger.enabled` to `true`, and configure the following parameters: - - `serving.jaeger.host` - `serving.jaeger.port` - `serving.jaeger.options.samplerType` - `serving.jaeger.options.samplerParam` @@ -72,6 +71,10 @@ The following table lists the configurable parameters of the Feast chart and the | `core.jobs.options` | additional options to be provided to the beam job. Should be a char escaped json k-v object | {} | | `core.jobs.runner` | beam job runner - one of `DirectRunner`, `FlinkRunner` or `DataflowRunner` | DirectRunner | | `core.jobs.workspace` | workspace path for ingestion jobs, used for separate job workspaces to share importJobSpecs.yaml with ingestion and for writing errors to if no default errors store is configured | nil | +| `core.jobs.writeFeatureMetricsToInfluxDb` | specifies whether Feast import job will write feature metrics (such as feature lag and values summaries) to Influx DB for monitoring and alert purpose | false | +| `core.jobs.influxDbUrl` | Influx DB url e.g. http://localhost:8086 (required if `core.jobs.writeFeatureMetricsToInfluxDb = true`) | | +| `core.jobs.influxDbDatabase` | Influx DB database name (required if `core.jobs.writeFeatureMetricsToInfluxDb = true`) | | +| `core.jobs.influxDbMeasurement` | Influx DB [measurement name](https://docs.influxdata.com/influxdb/v1.7/concepts/key_concepts/#measurement) (required if `core.jobs.writeFeatureMetricsToInfluxDb = true`) | | | `core.replicaCount` | core deployment replica count | 3 | | `core.resources.limits.cpu` | core cpu limits | 1 | | `core.resources.limits.memory` | core memory limits | 2G | diff --git a/charts/feast/templates/core-deploy.yaml b/charts/feast/templates/core-deploy.yaml index 74b625de02..6194edbefc 100644 --- a/charts/feast/templates/core-deploy.yaml +++ b/charts/feast/templates/core-deploy.yaml @@ -98,6 +98,18 @@ spec: value: "{{ .Values.core.jobs.monitoring.period }}" - name: JOB_MONITOR_INITIAL_DELAY_MS value: "{{ .Values.core.jobs.monitoring.initialDelay }}" + + {{- if .Values.core.jobs.writeFeatureMetricsToInfluxDb }} + - name: INGESTION_METRICS_ENABLED + value: true + - name: INGESTION_METRICS_INFLUX_URL + value: {{ .Values.core.jobs.influxDbUrl }} + - name: INGESTION_METRICS_INFLUX_DB_NAME + value: {{ .Values.core.jobs.influxDbDatabase }} + - name: INGESTION_METRICS_INFLUX_DB_MEASUREMENT + value: {{ .Values.core.jobs.influxDbMeasurement }} + {{- end }} + {{- if .Values.store }} {{- if .Values.store.serving }} - name: STORE_SERVING_TYPE diff --git a/charts/feast/templates/serving-deploy.yaml b/charts/feast/templates/serving-deploy.yaml index b84aca8c71..78994c5a09 100644 --- a/charts/feast/templates/serving-deploy.yaml +++ b/charts/feast/templates/serving-deploy.yaml @@ -96,7 +96,9 @@ spec: - name: JAEGER_ENABLED value: "{{ .Values.serving.jaeger.enabled }}" - name: JAEGER_AGENT_HOST - value: "{{ .Values.serving.jaeger.host }}" + valueFrom: + fieldRef: + fieldPath: status.hostIP - name: JAEGER_AGENT_PORT value: "{{ .Values.serving.jaeger.port }}" - name: JAEGER_SAMPLER_TYPE diff --git a/charts/feast/values.yaml b/charts/feast/values.yaml index 9c70703f8c..27955b5ef3 100644 --- a/charts/feast/values.yaml +++ b/charts/feast/values.yaml @@ -1,4 +1,17 @@ ---- +# [required] +# global.postgresql.secretName is an existing Kubernetes secret name containing Postgresql password +# The secret needs to have this key: postgresql-password +# +# Example of how to generate this secret: +# kubectl create secret generic -postgresql \ +# --from-literal=postgresql-password= +# +# With the example above the secretName will be -postgresql +# +# global: +# postgresql: +# secretName: + core: projectId: "gcp-project-id" image: @@ -38,6 +51,14 @@ core: monitoring: period: 5000 initialDelay: 60000 + # writeFeatureMetricsToInfluxDb specifies whether Feast import job will write feature metrics (such as feature lag and values summaries) to Influx DB for monitoring and alert purpose + writeFeatureMetricsToInfluxDb: false + # influxDbUrl, influxDbDatabase and influxDbMeasurement sets the Influx DB configuration where Feast import job will write the feature metrics + # Uncomment the following 3 fields if writeFeatureMetricsToInfluxDb = true + # + # influxDbUrl: http://localhost:8086 + # influxDbDatabase: influx_db_database + # influxDbMeasurement: influx_db_measurement trainingDatasetPrefix: "fs" # logType: JSON livenessProbe: @@ -109,6 +130,9 @@ serving: # loadBalancerSourceRanges: ["10.0.0.0/8"] jaeger: enabled: false + # options: + # samplerType: constants + # samplerParam: 1 livenessProbe: initialDelaySeconds: 120 failureThreshold: 3 From ebc50dc1f346c28eaffe099553283b7f198d5562 Mon Sep 17 00:00:00 2001 From: David Heryanto Date: Tue, 10 Sep 2019 17:45:49 +0800 Subject: [PATCH 07/19] Add lazy annotation in getBigQueryTrainingDatasetTemplater in TrainingConfig to resolve circular dependency --- charts/feast/templates/core-deploy.yaml | 2 +- charts/feast/values.yaml | 4 ++-- core/src/main/java/feast/core/config/TrainingConfig.java | 3 ++- 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/charts/feast/templates/core-deploy.yaml b/charts/feast/templates/core-deploy.yaml index 6194edbefc..79eec61f9f 100644 --- a/charts/feast/templates/core-deploy.yaml +++ b/charts/feast/templates/core-deploy.yaml @@ -101,7 +101,7 @@ spec: {{- if .Values.core.jobs.writeFeatureMetricsToInfluxDb }} - name: INGESTION_METRICS_ENABLED - value: true + value: "true" - name: INGESTION_METRICS_INFLUX_URL value: {{ .Values.core.jobs.influxDbUrl }} - name: INGESTION_METRICS_INFLUX_DB_NAME diff --git a/charts/feast/values.yaml b/charts/feast/values.yaml index 27955b5ef3..259e1fa744 100644 --- a/charts/feast/values.yaml +++ b/charts/feast/values.yaml @@ -18,7 +18,7 @@ core: pullPolicy: IfNotPresent registry: gcr.io/kf-feast repository: feast-core - tag: "0.1.4" + tag: "0.1.5" replicaCount: 1 resources: limits: @@ -105,7 +105,7 @@ serving: pullPolicy: IfNotPresent registry: gcr.io/kf-feast repository: feast-serving - tag: "0.1.4" + tag: "0.1.5" replicaCount: 1 resources: limits: diff --git a/core/src/main/java/feast/core/config/TrainingConfig.java b/core/src/main/java/feast/core/config/TrainingConfig.java index 9bcff052f1..f560b13eff 100644 --- a/core/src/main/java/feast/core/config/TrainingConfig.java +++ b/core/src/main/java/feast/core/config/TrainingConfig.java @@ -14,6 +14,7 @@ import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.Lazy; import org.springframework.core.io.ClassPathResource; import org.springframework.core.io.Resource; @@ -23,7 +24,7 @@ public class TrainingConfig { @Bean public BigQueryDatasetTemplater getBigQueryTrainingDatasetTemplater( - StorageSpecs storageSpecs, FeatureInfoRepository featureInfoRepository) throws IOException { + @Lazy StorageSpecs storageSpecs, FeatureInfoRepository featureInfoRepository) throws IOException { Resource resource = new ClassPathResource("templates/bq_training.tmpl"); InputStream resourceInputStream = resource.getInputStream(); String tmpl = CharStreams.toString(new InputStreamReader(resourceInputStream, Charsets.UTF_8)); From 9210cf761dfb5591bac4a510facf3116f01010c7 Mon Sep 17 00:00:00 2001 From: David Heryanto Date: Tue, 10 Sep 2019 19:27:50 +0800 Subject: [PATCH 08/19] Use provided values for INGESTION_METRICS_ENABLED --- charts/feast/templates/core-deploy.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/charts/feast/templates/core-deploy.yaml b/charts/feast/templates/core-deploy.yaml index 79eec61f9f..97a99603ab 100644 --- a/charts/feast/templates/core-deploy.yaml +++ b/charts/feast/templates/core-deploy.yaml @@ -101,7 +101,7 @@ spec: {{- if .Values.core.jobs.writeFeatureMetricsToInfluxDb }} - name: INGESTION_METRICS_ENABLED - value: "true" + value: "{{ .Values.core.jobs.writeFeatureMetricsToInfluxDb }}" - name: INGESTION_METRICS_INFLUX_URL value: {{ .Values.core.jobs.influxDbUrl }} - name: INGESTION_METRICS_INFLUX_DB_NAME From 1634a963d6c1ce9b74fa1895fecc967ecc340960 Mon Sep 17 00:00:00 2001 From: David Heryanto Date: Wed, 11 Sep 2019 00:08:31 +0800 Subject: [PATCH 09/19] Add Python mypy definition --- protos/Makefile | 2 +- sdk/python/feast/core/CoreService_pb2.pyi | 234 +++++++++++ sdk/python/feast/core/DatasetService_pb2.pyi | 140 +++++++ sdk/python/feast/core/JobService_pb2.pyi | 215 +++++++++++ sdk/python/feast/core/UIService_pb2.pyi | 362 ++++++++++++++++++ sdk/python/feast/specs/EntitySpec_pb2.pyi | 39 ++ .../feast/specs/FeatureGroupSpec_pb2.pyi | 60 +++ sdk/python/feast/specs/FeatureSpec_pb2.pyi | 78 ++++ sdk/python/feast/specs/ImportJobSpecs_pb2.pyi | 85 ++++ sdk/python/feast/specs/ImportSpec_pb2.pyi | 137 +++++++ sdk/python/feast/specs/StorageSpec_pb2.pyi | 55 +++ sdk/python/feast/storage/BigTable_pb2.pyi | 34 ++ sdk/python/feast/storage/Redis_pb2.pyi | 87 +++++ .../feast/types/FeatureRowExtended_pb2.pyi | 92 +++++ sdk/python/feast/types/FeatureRow_pb2.pyi | 55 +++ sdk/python/feast/types/Feature_pb2.pyi | 40 ++ sdk/python/feast/types/Value_pb2.pyi | 267 +++++++++++++ 17 files changed, 1981 insertions(+), 1 deletion(-) create mode 100644 sdk/python/feast/core/CoreService_pb2.pyi create mode 100644 sdk/python/feast/core/DatasetService_pb2.pyi create mode 100644 sdk/python/feast/core/JobService_pb2.pyi create mode 100644 sdk/python/feast/core/UIService_pb2.pyi create mode 100644 sdk/python/feast/specs/EntitySpec_pb2.pyi create mode 100644 sdk/python/feast/specs/FeatureGroupSpec_pb2.pyi create mode 100644 sdk/python/feast/specs/FeatureSpec_pb2.pyi create mode 100644 sdk/python/feast/specs/ImportJobSpecs_pb2.pyi create mode 100644 sdk/python/feast/specs/ImportSpec_pb2.pyi create mode 100644 sdk/python/feast/specs/StorageSpec_pb2.pyi create mode 100644 sdk/python/feast/storage/BigTable_pb2.pyi create mode 100644 sdk/python/feast/storage/Redis_pb2.pyi create mode 100644 sdk/python/feast/types/FeatureRowExtended_pb2.pyi create mode 100644 sdk/python/feast/types/FeatureRow_pb2.pyi create mode 100644 sdk/python/feast/types/Feature_pb2.pyi create mode 100644 sdk/python/feast/types/Value_pb2.pyi diff --git a/protos/Makefile b/protos/Makefile index e49b8c32d6..367a1fa025 100644 --- a/protos/Makefile +++ b/protos/Makefile @@ -8,5 +8,5 @@ gen-go: gen-python: pip install grpcio-tools - @$(foreach dir,$(dirs),python -m grpc_tools.protoc -I. --python_out=../sdk/python/ feast/$(dir)/*.proto;) + @$(foreach dir,$(dirs),python -m grpc_tools.protoc -I. --python_out=../sdk/python/ --mypy_out=../sdk/python/ feast/$(dir)/*.proto;) @$(foreach dir,$(service_dirs),python -m grpc_tools.protoc -I. --grpc_python_out=../sdk/python/ feast/$(dir)/*.proto;) \ No newline at end of file diff --git a/sdk/python/feast/core/CoreService_pb2.pyi b/sdk/python/feast/core/CoreService_pb2.pyi new file mode 100644 index 0000000000..5a85361af1 --- /dev/null +++ b/sdk/python/feast/core/CoreService_pb2.pyi @@ -0,0 +1,234 @@ +# @generated by generate_proto_mypy_stubs.py. Do not edit! +import sys +from feast.specs.EntitySpec_pb2 import ( + EntitySpec as feast___specs___EntitySpec_pb2___EntitySpec, +) + +from feast.specs.FeatureSpec_pb2 import ( + FeatureSpec as feast___specs___FeatureSpec_pb2___FeatureSpec, +) + +from feast.specs.StorageSpec_pb2 import ( + StorageSpec as feast___specs___StorageSpec_pb2___StorageSpec, +) + +from google.protobuf.internal.containers import ( + RepeatedCompositeFieldContainer as google___protobuf___internal___containers___RepeatedCompositeFieldContainer, + RepeatedScalarFieldContainer as google___protobuf___internal___containers___RepeatedScalarFieldContainer, +) + +from google.protobuf.message import ( + Message as google___protobuf___message___Message, +) + +from typing import ( + Iterable as typing___Iterable, + Optional as typing___Optional, + Text as typing___Text, +) + +from typing_extensions import ( + Literal as typing_extensions___Literal, +) + + +class CoreServiceTypes(google___protobuf___message___Message): + class GetEntitiesRequest(google___protobuf___message___Message): + ids = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text] + + def __init__(self, + ids : typing___Optional[typing___Iterable[typing___Text]] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> CoreServiceTypes.GetEntitiesRequest: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"ids"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[b"ids"]) -> None: ... + + class GetEntitiesResponse(google___protobuf___message___Message): + + @property + def entities(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[feast___specs___EntitySpec_pb2___EntitySpec]: ... + + def __init__(self, + entities : typing___Optional[typing___Iterable[feast___specs___EntitySpec_pb2___EntitySpec]] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> CoreServiceTypes.GetEntitiesResponse: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"entities"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[b"entities"]) -> None: ... + + class ListEntitiesResponse(google___protobuf___message___Message): + + @property + def entities(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[feast___specs___EntitySpec_pb2___EntitySpec]: ... + + def __init__(self, + entities : typing___Optional[typing___Iterable[feast___specs___EntitySpec_pb2___EntitySpec]] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> CoreServiceTypes.ListEntitiesResponse: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"entities"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[b"entities"]) -> None: ... + + class GetFeaturesRequest(google___protobuf___message___Message): + ids = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text] + + def __init__(self, + ids : typing___Optional[typing___Iterable[typing___Text]] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> CoreServiceTypes.GetFeaturesRequest: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"ids"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[b"ids"]) -> None: ... + + class GetFeaturesResponse(google___protobuf___message___Message): + + @property + def features(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[feast___specs___FeatureSpec_pb2___FeatureSpec]: ... + + def __init__(self, + features : typing___Optional[typing___Iterable[feast___specs___FeatureSpec_pb2___FeatureSpec]] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> CoreServiceTypes.GetFeaturesResponse: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"features"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[b"features"]) -> None: ... + + class ListFeaturesResponse(google___protobuf___message___Message): + + @property + def features(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[feast___specs___FeatureSpec_pb2___FeatureSpec]: ... + + def __init__(self, + features : typing___Optional[typing___Iterable[feast___specs___FeatureSpec_pb2___FeatureSpec]] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> CoreServiceTypes.ListFeaturesResponse: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"features"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[b"features"]) -> None: ... + + class GetStorageRequest(google___protobuf___message___Message): + ids = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text] + + def __init__(self, + ids : typing___Optional[typing___Iterable[typing___Text]] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> CoreServiceTypes.GetStorageRequest: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"ids"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[b"ids"]) -> None: ... + + class GetStorageResponse(google___protobuf___message___Message): + + @property + def storageSpecs(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[feast___specs___StorageSpec_pb2___StorageSpec]: ... + + def __init__(self, + storageSpecs : typing___Optional[typing___Iterable[feast___specs___StorageSpec_pb2___StorageSpec]] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> CoreServiceTypes.GetStorageResponse: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"storageSpecs"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[b"storageSpecs"]) -> None: ... + + class ListStorageResponse(google___protobuf___message___Message): + + @property + def storageSpecs(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[feast___specs___StorageSpec_pb2___StorageSpec]: ... + + def __init__(self, + storageSpecs : typing___Optional[typing___Iterable[feast___specs___StorageSpec_pb2___StorageSpec]] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> CoreServiceTypes.ListStorageResponse: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"storageSpecs"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[b"storageSpecs"]) -> None: ... + + class ApplyEntityResponse(google___protobuf___message___Message): + entityName = ... # type: typing___Text + + def __init__(self, + entityName : typing___Optional[typing___Text] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> CoreServiceTypes.ApplyEntityResponse: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"entityName"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[b"entityName"]) -> None: ... + + class ApplyFeatureResponse(google___protobuf___message___Message): + featureId = ... # type: typing___Text + + def __init__(self, + featureId : typing___Optional[typing___Text] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> CoreServiceTypes.ApplyFeatureResponse: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"featureId"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[b"featureId"]) -> None: ... + + class ApplyFeatureGroupResponse(google___protobuf___message___Message): + featureGroupId = ... # type: typing___Text + + def __init__(self, + featureGroupId : typing___Optional[typing___Text] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> CoreServiceTypes.ApplyFeatureGroupResponse: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"featureGroupId"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[b"featureGroupId"]) -> None: ... + + + def __init__(self, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> CoreServiceTypes: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... diff --git a/sdk/python/feast/core/DatasetService_pb2.pyi b/sdk/python/feast/core/DatasetService_pb2.pyi new file mode 100644 index 0000000000..a630ee2eaa --- /dev/null +++ b/sdk/python/feast/core/DatasetService_pb2.pyi @@ -0,0 +1,140 @@ +# @generated by generate_proto_mypy_stubs.py. Do not edit! +import sys +from google.protobuf.internal.containers import ( + RepeatedScalarFieldContainer as google___protobuf___internal___containers___RepeatedScalarFieldContainer, +) + +from google.protobuf.message import ( + Message as google___protobuf___message___Message, +) + +from google.protobuf.timestamp_pb2 import ( + Timestamp as google___protobuf___timestamp_pb2___Timestamp, +) + +from typing import ( + Iterable as typing___Iterable, + Mapping as typing___Mapping, + MutableMapping as typing___MutableMapping, + Optional as typing___Optional, + Text as typing___Text, +) + +from typing_extensions import ( + Literal as typing_extensions___Literal, +) + + +class DatasetServiceTypes(google___protobuf___message___Message): + class CreateDatasetRequest(google___protobuf___message___Message): + class FiltersEntry(google___protobuf___message___Message): + key = ... # type: typing___Text + value = ... # type: typing___Text + + def __init__(self, + key : typing___Optional[typing___Text] = None, + value : typing___Optional[typing___Text] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> DatasetServiceTypes.CreateDatasetRequest.FiltersEntry: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"key",u"value"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[b"key",b"value"]) -> None: ... + + limit = ... # type: int + namePrefix = ... # type: typing___Text + + @property + def featureSet(self) -> FeatureSet: ... + + @property + def startDate(self) -> google___protobuf___timestamp_pb2___Timestamp: ... + + @property + def endDate(self) -> google___protobuf___timestamp_pb2___Timestamp: ... + + @property + def filters(self) -> typing___MutableMapping[typing___Text, typing___Text]: ... + + def __init__(self, + featureSet : typing___Optional[FeatureSet] = None, + startDate : typing___Optional[google___protobuf___timestamp_pb2___Timestamp] = None, + endDate : typing___Optional[google___protobuf___timestamp_pb2___Timestamp] = None, + limit : typing___Optional[int] = None, + namePrefix : typing___Optional[typing___Text] = None, + filters : typing___Optional[typing___Mapping[typing___Text, typing___Text]] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> DatasetServiceTypes.CreateDatasetRequest: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def HasField(self, field_name: typing_extensions___Literal[u"endDate",u"featureSet",u"startDate"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"endDate",u"featureSet",u"filters",u"limit",u"namePrefix",u"startDate"]) -> None: ... + else: + def HasField(self, field_name: typing_extensions___Literal[u"endDate",b"endDate",u"featureSet",b"featureSet",u"startDate",b"startDate"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[b"endDate",b"featureSet",b"filters",b"limit",b"namePrefix",b"startDate"]) -> None: ... + + class CreateDatasetResponse(google___protobuf___message___Message): + + @property + def datasetInfo(self) -> DatasetInfo: ... + + def __init__(self, + datasetInfo : typing___Optional[DatasetInfo] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> DatasetServiceTypes.CreateDatasetResponse: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def HasField(self, field_name: typing_extensions___Literal[u"datasetInfo"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"datasetInfo"]) -> None: ... + else: + def HasField(self, field_name: typing_extensions___Literal[u"datasetInfo",b"datasetInfo"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[b"datasetInfo"]) -> None: ... + + + def __init__(self, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> DatasetServiceTypes: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + +class FeatureSet(google___protobuf___message___Message): + entityName = ... # type: typing___Text + featureIds = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text] + + def __init__(self, + entityName : typing___Optional[typing___Text] = None, + featureIds : typing___Optional[typing___Iterable[typing___Text]] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> FeatureSet: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"entityName",u"featureIds"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[b"entityName",b"featureIds"]) -> None: ... + +class DatasetInfo(google___protobuf___message___Message): + name = ... # type: typing___Text + tableUrl = ... # type: typing___Text + + def __init__(self, + name : typing___Optional[typing___Text] = None, + tableUrl : typing___Optional[typing___Text] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> DatasetInfo: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"name",u"tableUrl"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[b"name",b"tableUrl"]) -> None: ... diff --git a/sdk/python/feast/core/JobService_pb2.pyi b/sdk/python/feast/core/JobService_pb2.pyi new file mode 100644 index 0000000000..a60fcc02bb --- /dev/null +++ b/sdk/python/feast/core/JobService_pb2.pyi @@ -0,0 +1,215 @@ +# @generated by generate_proto_mypy_stubs.py. Do not edit! +import sys +from feast.specs.ImportSpec_pb2 import ( + ImportSpec as feast___specs___ImportSpec_pb2___ImportSpec, +) + +from google.protobuf.internal.containers import ( + RepeatedCompositeFieldContainer as google___protobuf___internal___containers___RepeatedCompositeFieldContainer, + RepeatedScalarFieldContainer as google___protobuf___internal___containers___RepeatedScalarFieldContainer, +) + +from google.protobuf.message import ( + Message as google___protobuf___message___Message, +) + +from google.protobuf.timestamp_pb2 import ( + Timestamp as google___protobuf___timestamp_pb2___Timestamp, +) + +from typing import ( + Iterable as typing___Iterable, + Mapping as typing___Mapping, + MutableMapping as typing___MutableMapping, + Optional as typing___Optional, + Text as typing___Text, +) + +from typing_extensions import ( + Literal as typing_extensions___Literal, +) + + +class JobServiceTypes(google___protobuf___message___Message): + class SubmitImportJobRequest(google___protobuf___message___Message): + name = ... # type: typing___Text + + @property + def importSpec(self) -> feast___specs___ImportSpec_pb2___ImportSpec: ... + + def __init__(self, + importSpec : typing___Optional[feast___specs___ImportSpec_pb2___ImportSpec] = None, + name : typing___Optional[typing___Text] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> JobServiceTypes.SubmitImportJobRequest: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def HasField(self, field_name: typing_extensions___Literal[u"importSpec"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"importSpec",u"name"]) -> None: ... + else: + def HasField(self, field_name: typing_extensions___Literal[u"importSpec",b"importSpec"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[b"importSpec",b"name"]) -> None: ... + + class SubmitImportJobResponse(google___protobuf___message___Message): + jobId = ... # type: typing___Text + + def __init__(self, + jobId : typing___Optional[typing___Text] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> JobServiceTypes.SubmitImportJobResponse: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"jobId"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[b"jobId"]) -> None: ... + + class ListJobsResponse(google___protobuf___message___Message): + + @property + def jobs(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[JobServiceTypes.JobDetail]: ... + + def __init__(self, + jobs : typing___Optional[typing___Iterable[JobServiceTypes.JobDetail]] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> JobServiceTypes.ListJobsResponse: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"jobs"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[b"jobs"]) -> None: ... + + class GetJobRequest(google___protobuf___message___Message): + id = ... # type: typing___Text + + def __init__(self, + id : typing___Optional[typing___Text] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> JobServiceTypes.GetJobRequest: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"id"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[b"id"]) -> None: ... + + class GetJobResponse(google___protobuf___message___Message): + + @property + def job(self) -> JobServiceTypes.JobDetail: ... + + def __init__(self, + job : typing___Optional[JobServiceTypes.JobDetail] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> JobServiceTypes.GetJobResponse: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def HasField(self, field_name: typing_extensions___Literal[u"job"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"job"]) -> None: ... + else: + def HasField(self, field_name: typing_extensions___Literal[u"job",b"job"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[b"job"]) -> None: ... + + class AbortJobRequest(google___protobuf___message___Message): + id = ... # type: typing___Text + + def __init__(self, + id : typing___Optional[typing___Text] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> JobServiceTypes.AbortJobRequest: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"id"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[b"id"]) -> None: ... + + class AbortJobResponse(google___protobuf___message___Message): + id = ... # type: typing___Text + + def __init__(self, + id : typing___Optional[typing___Text] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> JobServiceTypes.AbortJobResponse: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"id"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[b"id"]) -> None: ... + + class JobDetail(google___protobuf___message___Message): + class MetricsEntry(google___protobuf___message___Message): + key = ... # type: typing___Text + value = ... # type: float + + def __init__(self, + key : typing___Optional[typing___Text] = None, + value : typing___Optional[float] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> JobServiceTypes.JobDetail.MetricsEntry: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"key",u"value"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[b"key",b"value"]) -> None: ... + + id = ... # type: typing___Text + extId = ... # type: typing___Text + type = ... # type: typing___Text + runner = ... # type: typing___Text + status = ... # type: typing___Text + entities = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text] + features = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text] + + @property + def metrics(self) -> typing___MutableMapping[typing___Text, float]: ... + + @property + def lastUpdated(self) -> google___protobuf___timestamp_pb2___Timestamp: ... + + @property + def created(self) -> google___protobuf___timestamp_pb2___Timestamp: ... + + def __init__(self, + id : typing___Optional[typing___Text] = None, + extId : typing___Optional[typing___Text] = None, + type : typing___Optional[typing___Text] = None, + runner : typing___Optional[typing___Text] = None, + status : typing___Optional[typing___Text] = None, + entities : typing___Optional[typing___Iterable[typing___Text]] = None, + features : typing___Optional[typing___Iterable[typing___Text]] = None, + metrics : typing___Optional[typing___Mapping[typing___Text, float]] = None, + lastUpdated : typing___Optional[google___protobuf___timestamp_pb2___Timestamp] = None, + created : typing___Optional[google___protobuf___timestamp_pb2___Timestamp] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> JobServiceTypes.JobDetail: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def HasField(self, field_name: typing_extensions___Literal[u"created",u"lastUpdated"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"created",u"entities",u"extId",u"features",u"id",u"lastUpdated",u"metrics",u"runner",u"status",u"type"]) -> None: ... + else: + def HasField(self, field_name: typing_extensions___Literal[u"created",b"created",u"lastUpdated",b"lastUpdated"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[b"created",b"entities",b"extId",b"features",b"id",b"lastUpdated",b"metrics",b"runner",b"status",b"type"]) -> None: ... + + + def __init__(self, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> JobServiceTypes: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... diff --git a/sdk/python/feast/core/UIService_pb2.pyi b/sdk/python/feast/core/UIService_pb2.pyi new file mode 100644 index 0000000000..cb39928dce --- /dev/null +++ b/sdk/python/feast/core/UIService_pb2.pyi @@ -0,0 +1,362 @@ +# @generated by generate_proto_mypy_stubs.py. Do not edit! +import sys +from feast.specs.EntitySpec_pb2 import ( + EntitySpec as feast___specs___EntitySpec_pb2___EntitySpec, +) + +from feast.specs.FeatureGroupSpec_pb2 import ( + FeatureGroupSpec as feast___specs___FeatureGroupSpec_pb2___FeatureGroupSpec, +) + +from feast.specs.FeatureSpec_pb2 import ( + FeatureSpec as feast___specs___FeatureSpec_pb2___FeatureSpec, +) + +from feast.specs.StorageSpec_pb2 import ( + StorageSpec as feast___specs___StorageSpec_pb2___StorageSpec, +) + +from google.protobuf.internal.containers import ( + RepeatedCompositeFieldContainer as google___protobuf___internal___containers___RepeatedCompositeFieldContainer, + RepeatedScalarFieldContainer as google___protobuf___internal___containers___RepeatedScalarFieldContainer, +) + +from google.protobuf.message import ( + Message as google___protobuf___message___Message, +) + +from google.protobuf.timestamp_pb2 import ( + Timestamp as google___protobuf___timestamp_pb2___Timestamp, +) + +from typing import ( + Iterable as typing___Iterable, + Optional as typing___Optional, + Text as typing___Text, +) + +from typing_extensions import ( + Literal as typing_extensions___Literal, +) + + +class UIServiceTypes(google___protobuf___message___Message): + class EntityDetail(google___protobuf___message___Message): + jobs = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text] + + @property + def spec(self) -> feast___specs___EntitySpec_pb2___EntitySpec: ... + + @property + def lastUpdated(self) -> google___protobuf___timestamp_pb2___Timestamp: ... + + def __init__(self, + spec : typing___Optional[feast___specs___EntitySpec_pb2___EntitySpec] = None, + jobs : typing___Optional[typing___Iterable[typing___Text]] = None, + lastUpdated : typing___Optional[google___protobuf___timestamp_pb2___Timestamp] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> UIServiceTypes.EntityDetail: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def HasField(self, field_name: typing_extensions___Literal[u"lastUpdated",u"spec"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"jobs",u"lastUpdated",u"spec"]) -> None: ... + else: + def HasField(self, field_name: typing_extensions___Literal[u"lastUpdated",b"lastUpdated",u"spec",b"spec"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[b"jobs",b"lastUpdated",b"spec"]) -> None: ... + + class GetEntityRequest(google___protobuf___message___Message): + id = ... # type: typing___Text + + def __init__(self, + id : typing___Optional[typing___Text] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> UIServiceTypes.GetEntityRequest: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"id"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[b"id"]) -> None: ... + + class GetEntityResponse(google___protobuf___message___Message): + + @property + def entity(self) -> UIServiceTypes.EntityDetail: ... + + def __init__(self, + entity : typing___Optional[UIServiceTypes.EntityDetail] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> UIServiceTypes.GetEntityResponse: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def HasField(self, field_name: typing_extensions___Literal[u"entity"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"entity"]) -> None: ... + else: + def HasField(self, field_name: typing_extensions___Literal[u"entity",b"entity"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[b"entity"]) -> None: ... + + class ListEntitiesResponse(google___protobuf___message___Message): + + @property + def entities(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[UIServiceTypes.EntityDetail]: ... + + def __init__(self, + entities : typing___Optional[typing___Iterable[UIServiceTypes.EntityDetail]] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> UIServiceTypes.ListEntitiesResponse: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"entities"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[b"entities"]) -> None: ... + + class FeatureDetail(google___protobuf___message___Message): + bigqueryView = ... # type: typing___Text + enabled = ... # type: bool + jobs = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text] + + @property + def spec(self) -> feast___specs___FeatureSpec_pb2___FeatureSpec: ... + + @property + def lastUpdated(self) -> google___protobuf___timestamp_pb2___Timestamp: ... + + @property + def created(self) -> google___protobuf___timestamp_pb2___Timestamp: ... + + def __init__(self, + spec : typing___Optional[feast___specs___FeatureSpec_pb2___FeatureSpec] = None, + bigqueryView : typing___Optional[typing___Text] = None, + enabled : typing___Optional[bool] = None, + jobs : typing___Optional[typing___Iterable[typing___Text]] = None, + lastUpdated : typing___Optional[google___protobuf___timestamp_pb2___Timestamp] = None, + created : typing___Optional[google___protobuf___timestamp_pb2___Timestamp] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> UIServiceTypes.FeatureDetail: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def HasField(self, field_name: typing_extensions___Literal[u"created",u"lastUpdated",u"spec"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"bigqueryView",u"created",u"enabled",u"jobs",u"lastUpdated",u"spec"]) -> None: ... + else: + def HasField(self, field_name: typing_extensions___Literal[u"created",b"created",u"lastUpdated",b"lastUpdated",u"spec",b"spec"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[b"bigqueryView",b"created",b"enabled",b"jobs",b"lastUpdated",b"spec"]) -> None: ... + + class GetFeatureRequest(google___protobuf___message___Message): + id = ... # type: typing___Text + + def __init__(self, + id : typing___Optional[typing___Text] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> UIServiceTypes.GetFeatureRequest: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"id"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[b"id"]) -> None: ... + + class GetFeatureResponse(google___protobuf___message___Message): + + @property + def feature(self) -> UIServiceTypes.FeatureDetail: ... + + @property + def rawSpec(self) -> feast___specs___FeatureSpec_pb2___FeatureSpec: ... + + def __init__(self, + feature : typing___Optional[UIServiceTypes.FeatureDetail] = None, + rawSpec : typing___Optional[feast___specs___FeatureSpec_pb2___FeatureSpec] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> UIServiceTypes.GetFeatureResponse: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def HasField(self, field_name: typing_extensions___Literal[u"feature",u"rawSpec"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"feature",u"rawSpec"]) -> None: ... + else: + def HasField(self, field_name: typing_extensions___Literal[u"feature",b"feature",u"rawSpec",b"rawSpec"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[b"feature",b"rawSpec"]) -> None: ... + + class ListFeaturesResponse(google___protobuf___message___Message): + + @property + def features(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[UIServiceTypes.FeatureDetail]: ... + + def __init__(self, + features : typing___Optional[typing___Iterable[UIServiceTypes.FeatureDetail]] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> UIServiceTypes.ListFeaturesResponse: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"features"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[b"features"]) -> None: ... + + class FeatureGroupDetail(google___protobuf___message___Message): + + @property + def spec(self) -> feast___specs___FeatureGroupSpec_pb2___FeatureGroupSpec: ... + + @property + def lastUpdated(self) -> google___protobuf___timestamp_pb2___Timestamp: ... + + def __init__(self, + spec : typing___Optional[feast___specs___FeatureGroupSpec_pb2___FeatureGroupSpec] = None, + lastUpdated : typing___Optional[google___protobuf___timestamp_pb2___Timestamp] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> UIServiceTypes.FeatureGroupDetail: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def HasField(self, field_name: typing_extensions___Literal[u"lastUpdated",u"spec"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"lastUpdated",u"spec"]) -> None: ... + else: + def HasField(self, field_name: typing_extensions___Literal[u"lastUpdated",b"lastUpdated",u"spec",b"spec"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[b"lastUpdated",b"spec"]) -> None: ... + + class GetFeatureGroupRequest(google___protobuf___message___Message): + id = ... # type: typing___Text + + def __init__(self, + id : typing___Optional[typing___Text] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> UIServiceTypes.GetFeatureGroupRequest: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"id"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[b"id"]) -> None: ... + + class GetFeatureGroupResponse(google___protobuf___message___Message): + + @property + def featureGroup(self) -> UIServiceTypes.FeatureGroupDetail: ... + + def __init__(self, + featureGroup : typing___Optional[UIServiceTypes.FeatureGroupDetail] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> UIServiceTypes.GetFeatureGroupResponse: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def HasField(self, field_name: typing_extensions___Literal[u"featureGroup"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"featureGroup"]) -> None: ... + else: + def HasField(self, field_name: typing_extensions___Literal[u"featureGroup",b"featureGroup"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[b"featureGroup"]) -> None: ... + + class ListFeatureGroupsResponse(google___protobuf___message___Message): + + @property + def featureGroups(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[UIServiceTypes.FeatureGroupDetail]: ... + + def __init__(self, + featureGroups : typing___Optional[typing___Iterable[UIServiceTypes.FeatureGroupDetail]] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> UIServiceTypes.ListFeatureGroupsResponse: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"featureGroups"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[b"featureGroups"]) -> None: ... + + class StorageDetail(google___protobuf___message___Message): + + @property + def spec(self) -> feast___specs___StorageSpec_pb2___StorageSpec: ... + + @property + def lastUpdated(self) -> google___protobuf___timestamp_pb2___Timestamp: ... + + def __init__(self, + spec : typing___Optional[feast___specs___StorageSpec_pb2___StorageSpec] = None, + lastUpdated : typing___Optional[google___protobuf___timestamp_pb2___Timestamp] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> UIServiceTypes.StorageDetail: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def HasField(self, field_name: typing_extensions___Literal[u"lastUpdated",u"spec"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"lastUpdated",u"spec"]) -> None: ... + else: + def HasField(self, field_name: typing_extensions___Literal[u"lastUpdated",b"lastUpdated",u"spec",b"spec"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[b"lastUpdated",b"spec"]) -> None: ... + + class GetStorageRequest(google___protobuf___message___Message): + id = ... # type: typing___Text + + def __init__(self, + id : typing___Optional[typing___Text] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> UIServiceTypes.GetStorageRequest: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"id"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[b"id"]) -> None: ... + + class GetStorageResponse(google___protobuf___message___Message): + + @property + def storage(self) -> UIServiceTypes.StorageDetail: ... + + def __init__(self, + storage : typing___Optional[UIServiceTypes.StorageDetail] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> UIServiceTypes.GetStorageResponse: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def HasField(self, field_name: typing_extensions___Literal[u"storage"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"storage"]) -> None: ... + else: + def HasField(self, field_name: typing_extensions___Literal[u"storage",b"storage"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[b"storage"]) -> None: ... + + class ListStorageResponse(google___protobuf___message___Message): + + @property + def storage(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[UIServiceTypes.StorageDetail]: ... + + def __init__(self, + storage : typing___Optional[typing___Iterable[UIServiceTypes.StorageDetail]] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> UIServiceTypes.ListStorageResponse: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"storage"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[b"storage"]) -> None: ... + + + def __init__(self, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> UIServiceTypes: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... diff --git a/sdk/python/feast/specs/EntitySpec_pb2.pyi b/sdk/python/feast/specs/EntitySpec_pb2.pyi new file mode 100644 index 0000000000..603e839e6b --- /dev/null +++ b/sdk/python/feast/specs/EntitySpec_pb2.pyi @@ -0,0 +1,39 @@ +# @generated by generate_proto_mypy_stubs.py. Do not edit! +import sys +from google.protobuf.internal.containers import ( + RepeatedScalarFieldContainer as google___protobuf___internal___containers___RepeatedScalarFieldContainer, +) + +from google.protobuf.message import ( + Message as google___protobuf___message___Message, +) + +from typing import ( + Iterable as typing___Iterable, + Optional as typing___Optional, + Text as typing___Text, +) + +from typing_extensions import ( + Literal as typing_extensions___Literal, +) + + +class EntitySpec(google___protobuf___message___Message): + name = ... # type: typing___Text + description = ... # type: typing___Text + tags = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text] + + def __init__(self, + name : typing___Optional[typing___Text] = None, + description : typing___Optional[typing___Text] = None, + tags : typing___Optional[typing___Iterable[typing___Text]] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> EntitySpec: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"description",u"name",u"tags"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[b"description",b"name",b"tags"]) -> None: ... diff --git a/sdk/python/feast/specs/FeatureGroupSpec_pb2.pyi b/sdk/python/feast/specs/FeatureGroupSpec_pb2.pyi new file mode 100644 index 0000000000..1274401977 --- /dev/null +++ b/sdk/python/feast/specs/FeatureGroupSpec_pb2.pyi @@ -0,0 +1,60 @@ +# @generated by generate_proto_mypy_stubs.py. Do not edit! +import sys +from google.protobuf.internal.containers import ( + RepeatedScalarFieldContainer as google___protobuf___internal___containers___RepeatedScalarFieldContainer, +) + +from google.protobuf.message import ( + Message as google___protobuf___message___Message, +) + +from typing import ( + Iterable as typing___Iterable, + Mapping as typing___Mapping, + MutableMapping as typing___MutableMapping, + Optional as typing___Optional, + Text as typing___Text, +) + +from typing_extensions import ( + Literal as typing_extensions___Literal, +) + + +class FeatureGroupSpec(google___protobuf___message___Message): + class OptionsEntry(google___protobuf___message___Message): + key = ... # type: typing___Text + value = ... # type: typing___Text + + def __init__(self, + key : typing___Optional[typing___Text] = None, + value : typing___Optional[typing___Text] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> FeatureGroupSpec.OptionsEntry: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"key",u"value"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[b"key",b"value"]) -> None: ... + + id = ... # type: typing___Text + tags = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text] + + @property + def options(self) -> typing___MutableMapping[typing___Text, typing___Text]: ... + + def __init__(self, + id : typing___Optional[typing___Text] = None, + tags : typing___Optional[typing___Iterable[typing___Text]] = None, + options : typing___Optional[typing___Mapping[typing___Text, typing___Text]] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> FeatureGroupSpec: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"id",u"options",u"tags"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[b"id",b"options",b"tags"]) -> None: ... diff --git a/sdk/python/feast/specs/FeatureSpec_pb2.pyi b/sdk/python/feast/specs/FeatureSpec_pb2.pyi new file mode 100644 index 0000000000..6c703c2d4e --- /dev/null +++ b/sdk/python/feast/specs/FeatureSpec_pb2.pyi @@ -0,0 +1,78 @@ +# @generated by generate_proto_mypy_stubs.py. Do not edit! +import sys +from feast.types.Value_pb2 import ( + ValueType as feast___types___Value_pb2___ValueType, +) + +from google.protobuf.internal.containers import ( + RepeatedScalarFieldContainer as google___protobuf___internal___containers___RepeatedScalarFieldContainer, +) + +from google.protobuf.message import ( + Message as google___protobuf___message___Message, +) + +from typing import ( + Iterable as typing___Iterable, + Mapping as typing___Mapping, + MutableMapping as typing___MutableMapping, + Optional as typing___Optional, + Text as typing___Text, +) + +from typing_extensions import ( + Literal as typing_extensions___Literal, +) + + +class FeatureSpec(google___protobuf___message___Message): + class OptionsEntry(google___protobuf___message___Message): + key = ... # type: typing___Text + value = ... # type: typing___Text + + def __init__(self, + key : typing___Optional[typing___Text] = None, + value : typing___Optional[typing___Text] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> FeatureSpec.OptionsEntry: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"key",u"value"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[b"key",b"value"]) -> None: ... + + id = ... # type: typing___Text + name = ... # type: typing___Text + owner = ... # type: typing___Text + description = ... # type: typing___Text + uri = ... # type: typing___Text + valueType = ... # type: feast___types___Value_pb2___ValueType.Enum + entity = ... # type: typing___Text + group = ... # type: typing___Text + tags = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text] + + @property + def options(self) -> typing___MutableMapping[typing___Text, typing___Text]: ... + + def __init__(self, + id : typing___Optional[typing___Text] = None, + name : typing___Optional[typing___Text] = None, + owner : typing___Optional[typing___Text] = None, + description : typing___Optional[typing___Text] = None, + uri : typing___Optional[typing___Text] = None, + valueType : typing___Optional[feast___types___Value_pb2___ValueType.Enum] = None, + entity : typing___Optional[typing___Text] = None, + group : typing___Optional[typing___Text] = None, + tags : typing___Optional[typing___Iterable[typing___Text]] = None, + options : typing___Optional[typing___Mapping[typing___Text, typing___Text]] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> FeatureSpec: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"description",u"entity",u"group",u"id",u"name",u"options",u"owner",u"tags",u"uri",u"valueType"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[b"description",b"entity",b"group",b"id",b"name",b"options",b"owner",b"tags",b"uri",b"valueType"]) -> None: ... diff --git a/sdk/python/feast/specs/ImportJobSpecs_pb2.pyi b/sdk/python/feast/specs/ImportJobSpecs_pb2.pyi new file mode 100644 index 0000000000..9643ccc25f --- /dev/null +++ b/sdk/python/feast/specs/ImportJobSpecs_pb2.pyi @@ -0,0 +1,85 @@ +# @generated by generate_proto_mypy_stubs.py. Do not edit! +import sys +from feast.specs.EntitySpec_pb2 import ( + EntitySpec as feast___specs___EntitySpec_pb2___EntitySpec, +) + +from feast.specs.FeatureSpec_pb2 import ( + FeatureSpec as feast___specs___FeatureSpec_pb2___FeatureSpec, +) + +from feast.specs.ImportSpec_pb2 import ( + ImportSpec as feast___specs___ImportSpec_pb2___ImportSpec, +) + +from feast.specs.StorageSpec_pb2 import ( + StorageSpec as feast___specs___StorageSpec_pb2___StorageSpec, +) + +from google.protobuf.internal.containers import ( + RepeatedCompositeFieldContainer as google___protobuf___internal___containers___RepeatedCompositeFieldContainer, +) + +from google.protobuf.message import ( + Message as google___protobuf___message___Message, +) + +from typing import ( + Iterable as typing___Iterable, + Optional as typing___Optional, + Text as typing___Text, +) + +from typing_extensions import ( + Literal as typing_extensions___Literal, +) + + +class ImportJobSpecs(google___protobuf___message___Message): + jobId = ... # type: typing___Text + write_feature_metrics_to_influx_db = ... # type: bool + influx_db_url = ... # type: typing___Text + influx_db_database = ... # type: typing___Text + influx_db_measurement = ... # type: typing___Text + + @property + def importSpec(self) -> feast___specs___ImportSpec_pb2___ImportSpec: ... + + @property + def entitySpecs(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[feast___specs___EntitySpec_pb2___EntitySpec]: ... + + @property + def featureSpecs(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[feast___specs___FeatureSpec_pb2___FeatureSpec]: ... + + @property + def servingStorageSpec(self) -> feast___specs___StorageSpec_pb2___StorageSpec: ... + + @property + def warehouseStorageSpec(self) -> feast___specs___StorageSpec_pb2___StorageSpec: ... + + @property + def errorsStorageSpec(self) -> feast___specs___StorageSpec_pb2___StorageSpec: ... + + def __init__(self, + jobId : typing___Optional[typing___Text] = None, + importSpec : typing___Optional[feast___specs___ImportSpec_pb2___ImportSpec] = None, + entitySpecs : typing___Optional[typing___Iterable[feast___specs___EntitySpec_pb2___EntitySpec]] = None, + featureSpecs : typing___Optional[typing___Iterable[feast___specs___FeatureSpec_pb2___FeatureSpec]] = None, + servingStorageSpec : typing___Optional[feast___specs___StorageSpec_pb2___StorageSpec] = None, + warehouseStorageSpec : typing___Optional[feast___specs___StorageSpec_pb2___StorageSpec] = None, + errorsStorageSpec : typing___Optional[feast___specs___StorageSpec_pb2___StorageSpec] = None, + write_feature_metrics_to_influx_db : typing___Optional[bool] = None, + influx_db_url : typing___Optional[typing___Text] = None, + influx_db_database : typing___Optional[typing___Text] = None, + influx_db_measurement : typing___Optional[typing___Text] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> ImportJobSpecs: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def HasField(self, field_name: typing_extensions___Literal[u"errorsStorageSpec",u"importSpec",u"servingStorageSpec",u"warehouseStorageSpec"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"entitySpecs",u"errorsStorageSpec",u"featureSpecs",u"importSpec",u"influx_db_database",u"influx_db_measurement",u"influx_db_url",u"jobId",u"servingStorageSpec",u"warehouseStorageSpec",u"write_feature_metrics_to_influx_db"]) -> None: ... + else: + def HasField(self, field_name: typing_extensions___Literal[u"errorsStorageSpec",b"errorsStorageSpec",u"importSpec",b"importSpec",u"servingStorageSpec",b"servingStorageSpec",u"warehouseStorageSpec",b"warehouseStorageSpec"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[b"entitySpecs",b"errorsStorageSpec",b"featureSpecs",b"importSpec",b"influx_db_database",b"influx_db_measurement",b"influx_db_url",b"jobId",b"servingStorageSpec",b"warehouseStorageSpec",b"write_feature_metrics_to_influx_db"]) -> None: ... diff --git a/sdk/python/feast/specs/ImportSpec_pb2.pyi b/sdk/python/feast/specs/ImportSpec_pb2.pyi new file mode 100644 index 0000000000..6c686a9e1d --- /dev/null +++ b/sdk/python/feast/specs/ImportSpec_pb2.pyi @@ -0,0 +1,137 @@ +# @generated by generate_proto_mypy_stubs.py. Do not edit! +import sys +from google.protobuf.internal.containers import ( + RepeatedCompositeFieldContainer as google___protobuf___internal___containers___RepeatedCompositeFieldContainer, + RepeatedScalarFieldContainer as google___protobuf___internal___containers___RepeatedScalarFieldContainer, +) + +from google.protobuf.message import ( + Message as google___protobuf___message___Message, +) + +from google.protobuf.timestamp_pb2 import ( + Timestamp as google___protobuf___timestamp_pb2___Timestamp, +) + +from typing import ( + Iterable as typing___Iterable, + Mapping as typing___Mapping, + MutableMapping as typing___MutableMapping, + Optional as typing___Optional, + Text as typing___Text, +) + +from typing_extensions import ( + Literal as typing_extensions___Literal, +) + + +class ImportSpec(google___protobuf___message___Message): + class SourceOptionsEntry(google___protobuf___message___Message): + key = ... # type: typing___Text + value = ... # type: typing___Text + + def __init__(self, + key : typing___Optional[typing___Text] = None, + value : typing___Optional[typing___Text] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> ImportSpec.SourceOptionsEntry: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"key",u"value"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[b"key",b"value"]) -> None: ... + + class JobOptionsEntry(google___protobuf___message___Message): + key = ... # type: typing___Text + value = ... # type: typing___Text + + def __init__(self, + key : typing___Optional[typing___Text] = None, + value : typing___Optional[typing___Text] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> ImportSpec.JobOptionsEntry: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"key",u"value"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[b"key",b"value"]) -> None: ... + + type = ... # type: typing___Text + entities = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text] + + @property + def sourceOptions(self) -> typing___MutableMapping[typing___Text, typing___Text]: ... + + @property + def jobOptions(self) -> typing___MutableMapping[typing___Text, typing___Text]: ... + + @property + def schema(self) -> Schema: ... + + def __init__(self, + type : typing___Optional[typing___Text] = None, + sourceOptions : typing___Optional[typing___Mapping[typing___Text, typing___Text]] = None, + jobOptions : typing___Optional[typing___Mapping[typing___Text, typing___Text]] = None, + entities : typing___Optional[typing___Iterable[typing___Text]] = None, + schema : typing___Optional[Schema] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> ImportSpec: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def HasField(self, field_name: typing_extensions___Literal[u"schema"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"entities",u"jobOptions",u"schema",u"sourceOptions",u"type"]) -> None: ... + else: + def HasField(self, field_name: typing_extensions___Literal[u"schema",b"schema"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[b"entities",b"jobOptions",b"schema",b"sourceOptions",b"type"]) -> None: ... + +class Schema(google___protobuf___message___Message): + timestampColumn = ... # type: typing___Text + entityIdColumn = ... # type: typing___Text + + @property + def fields(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[Field]: ... + + @property + def timestampValue(self) -> google___protobuf___timestamp_pb2___Timestamp: ... + + def __init__(self, + fields : typing___Optional[typing___Iterable[Field]] = None, + timestampColumn : typing___Optional[typing___Text] = None, + timestampValue : typing___Optional[google___protobuf___timestamp_pb2___Timestamp] = None, + entityIdColumn : typing___Optional[typing___Text] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> Schema: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def HasField(self, field_name: typing_extensions___Literal[u"timestamp",u"timestampColumn",u"timestampValue"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"entityIdColumn",u"fields",u"timestamp",u"timestampColumn",u"timestampValue"]) -> None: ... + else: + def HasField(self, field_name: typing_extensions___Literal[u"timestamp",b"timestamp",u"timestampColumn",b"timestampColumn",u"timestampValue",b"timestampValue"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[b"entityIdColumn",b"fields",b"timestamp",b"timestampColumn",b"timestampValue"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions___Literal[u"timestamp",b"timestamp"]) -> typing_extensions___Literal["timestampColumn","timestampValue"]: ... + +class Field(google___protobuf___message___Message): + name = ... # type: typing___Text + featureId = ... # type: typing___Text + + def __init__(self, + name : typing___Optional[typing___Text] = None, + featureId : typing___Optional[typing___Text] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> Field: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"featureId",u"name"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[b"featureId",b"name"]) -> None: ... diff --git a/sdk/python/feast/specs/StorageSpec_pb2.pyi b/sdk/python/feast/specs/StorageSpec_pb2.pyi new file mode 100644 index 0000000000..f305abeb77 --- /dev/null +++ b/sdk/python/feast/specs/StorageSpec_pb2.pyi @@ -0,0 +1,55 @@ +# @generated by generate_proto_mypy_stubs.py. Do not edit! +import sys +from google.protobuf.message import ( + Message as google___protobuf___message___Message, +) + +from typing import ( + Mapping as typing___Mapping, + MutableMapping as typing___MutableMapping, + Optional as typing___Optional, + Text as typing___Text, +) + +from typing_extensions import ( + Literal as typing_extensions___Literal, +) + + +class StorageSpec(google___protobuf___message___Message): + class OptionsEntry(google___protobuf___message___Message): + key = ... # type: typing___Text + value = ... # type: typing___Text + + def __init__(self, + key : typing___Optional[typing___Text] = None, + value : typing___Optional[typing___Text] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> StorageSpec.OptionsEntry: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"key",u"value"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[b"key",b"value"]) -> None: ... + + id = ... # type: typing___Text + type = ... # type: typing___Text + + @property + def options(self) -> typing___MutableMapping[typing___Text, typing___Text]: ... + + def __init__(self, + id : typing___Optional[typing___Text] = None, + type : typing___Optional[typing___Text] = None, + options : typing___Optional[typing___Mapping[typing___Text, typing___Text]] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> StorageSpec: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"id",u"options",u"type"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[b"id",b"options",b"type"]) -> None: ... diff --git a/sdk/python/feast/storage/BigTable_pb2.pyi b/sdk/python/feast/storage/BigTable_pb2.pyi new file mode 100644 index 0000000000..1744f29485 --- /dev/null +++ b/sdk/python/feast/storage/BigTable_pb2.pyi @@ -0,0 +1,34 @@ +# @generated by generate_proto_mypy_stubs.py. Do not edit! +import sys +from google.protobuf.message import ( + Message as google___protobuf___message___Message, +) + +from typing import ( + Optional as typing___Optional, + Text as typing___Text, +) + +from typing_extensions import ( + Literal as typing_extensions___Literal, +) + + +class BigTableRowKey(google___protobuf___message___Message): + sha1Prefix = ... # type: typing___Text + entityKey = ... # type: typing___Text + reversedMillis = ... # type: typing___Text + + def __init__(self, + sha1Prefix : typing___Optional[typing___Text] = None, + entityKey : typing___Optional[typing___Text] = None, + reversedMillis : typing___Optional[typing___Text] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> BigTableRowKey: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"entityKey",u"reversedMillis",u"sha1Prefix"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[b"entityKey",b"reversedMillis",b"sha1Prefix"]) -> None: ... diff --git a/sdk/python/feast/storage/Redis_pb2.pyi b/sdk/python/feast/storage/Redis_pb2.pyi new file mode 100644 index 0000000000..fa4dba708b --- /dev/null +++ b/sdk/python/feast/storage/Redis_pb2.pyi @@ -0,0 +1,87 @@ +# @generated by generate_proto_mypy_stubs.py. Do not edit! +import sys +from feast.types.Value_pb2 import ( + Value as feast___types___Value_pb2___Value, +) + +from google.protobuf.internal.containers import ( + RepeatedCompositeFieldContainer as google___protobuf___internal___containers___RepeatedCompositeFieldContainer, +) + +from google.protobuf.message import ( + Message as google___protobuf___message___Message, +) + +from google.protobuf.timestamp_pb2 import ( + Timestamp as google___protobuf___timestamp_pb2___Timestamp, +) + +from typing import ( + Iterable as typing___Iterable, + Optional as typing___Optional, + Text as typing___Text, +) + +from typing_extensions import ( + Literal as typing_extensions___Literal, +) + + +class RedisBucketKey(google___protobuf___message___Message): + entityKey = ... # type: typing___Text + featureIdSha1Prefix = ... # type: typing___Text + bucketId = ... # type: int + + def __init__(self, + entityKey : typing___Optional[typing___Text] = None, + featureIdSha1Prefix : typing___Optional[typing___Text] = None, + bucketId : typing___Optional[int] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> RedisBucketKey: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"bucketId",u"entityKey",u"featureIdSha1Prefix"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[b"bucketId",b"entityKey",b"featureIdSha1Prefix"]) -> None: ... + +class RedisBucketValue(google___protobuf___message___Message): + + @property + def value(self) -> feast___types___Value_pb2___Value: ... + + @property + def eventTimestamp(self) -> google___protobuf___timestamp_pb2___Timestamp: ... + + def __init__(self, + value : typing___Optional[feast___types___Value_pb2___Value] = None, + eventTimestamp : typing___Optional[google___protobuf___timestamp_pb2___Timestamp] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> RedisBucketValue: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def HasField(self, field_name: typing_extensions___Literal[u"eventTimestamp",u"value"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"eventTimestamp",u"value"]) -> None: ... + else: + def HasField(self, field_name: typing_extensions___Literal[u"eventTimestamp",b"eventTimestamp",u"value",b"value"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[b"eventTimestamp",b"value"]) -> None: ... + +class RedisBucketValueList(google___protobuf___message___Message): + + @property + def values(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[RedisBucketValue]: ... + + def __init__(self, + values : typing___Optional[typing___Iterable[RedisBucketValue]] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> RedisBucketValueList: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"values"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[b"values"]) -> None: ... diff --git a/sdk/python/feast/types/FeatureRowExtended_pb2.pyi b/sdk/python/feast/types/FeatureRowExtended_pb2.pyi new file mode 100644 index 0000000000..6a02035dc4 --- /dev/null +++ b/sdk/python/feast/types/FeatureRowExtended_pb2.pyi @@ -0,0 +1,92 @@ +# @generated by generate_proto_mypy_stubs.py. Do not edit! +import sys +from feast.types.FeatureRow_pb2 import ( + FeatureRow as feast___types___FeatureRow_pb2___FeatureRow, +) + +from google.protobuf.message import ( + Message as google___protobuf___message___Message, +) + +from google.protobuf.timestamp_pb2 import ( + Timestamp as google___protobuf___timestamp_pb2___Timestamp, +) + +from typing import ( + Optional as typing___Optional, + Text as typing___Text, +) + +from typing_extensions import ( + Literal as typing_extensions___Literal, +) + + +class Error(google___protobuf___message___Message): + cause = ... # type: typing___Text + transform = ... # type: typing___Text + message = ... # type: typing___Text + stackTrace = ... # type: typing___Text + + def __init__(self, + cause : typing___Optional[typing___Text] = None, + transform : typing___Optional[typing___Text] = None, + message : typing___Optional[typing___Text] = None, + stackTrace : typing___Optional[typing___Text] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> Error: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"cause",u"message",u"stackTrace",u"transform"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[b"cause",b"message",b"stackTrace",b"transform"]) -> None: ... + +class Attempt(google___protobuf___message___Message): + attempts = ... # type: int + + @property + def error(self) -> Error: ... + + def __init__(self, + attempts : typing___Optional[int] = None, + error : typing___Optional[Error] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> Attempt: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def HasField(self, field_name: typing_extensions___Literal[u"error"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"attempts",u"error"]) -> None: ... + else: + def HasField(self, field_name: typing_extensions___Literal[u"error",b"error"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[b"attempts",b"error"]) -> None: ... + +class FeatureRowExtended(google___protobuf___message___Message): + + @property + def row(self) -> feast___types___FeatureRow_pb2___FeatureRow: ... + + @property + def lastAttempt(self) -> Attempt: ... + + @property + def firstSeen(self) -> google___protobuf___timestamp_pb2___Timestamp: ... + + def __init__(self, + row : typing___Optional[feast___types___FeatureRow_pb2___FeatureRow] = None, + lastAttempt : typing___Optional[Attempt] = None, + firstSeen : typing___Optional[google___protobuf___timestamp_pb2___Timestamp] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> FeatureRowExtended: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def HasField(self, field_name: typing_extensions___Literal[u"firstSeen",u"lastAttempt",u"row"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"firstSeen",u"lastAttempt",u"row"]) -> None: ... + else: + def HasField(self, field_name: typing_extensions___Literal[u"firstSeen",b"firstSeen",u"lastAttempt",b"lastAttempt",u"row",b"row"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[b"firstSeen",b"lastAttempt",b"row"]) -> None: ... diff --git a/sdk/python/feast/types/FeatureRow_pb2.pyi b/sdk/python/feast/types/FeatureRow_pb2.pyi new file mode 100644 index 0000000000..1b93879d2a --- /dev/null +++ b/sdk/python/feast/types/FeatureRow_pb2.pyi @@ -0,0 +1,55 @@ +# @generated by generate_proto_mypy_stubs.py. Do not edit! +import sys +from feast.types.Feature_pb2 import ( + Feature as feast___types___Feature_pb2___Feature, +) + +from google.protobuf.internal.containers import ( + RepeatedCompositeFieldContainer as google___protobuf___internal___containers___RepeatedCompositeFieldContainer, +) + +from google.protobuf.message import ( + Message as google___protobuf___message___Message, +) + +from google.protobuf.timestamp_pb2 import ( + Timestamp as google___protobuf___timestamp_pb2___Timestamp, +) + +from typing import ( + Iterable as typing___Iterable, + Optional as typing___Optional, + Text as typing___Text, +) + +from typing_extensions import ( + Literal as typing_extensions___Literal, +) + + +class FeatureRow(google___protobuf___message___Message): + entityKey = ... # type: typing___Text + entityName = ... # type: typing___Text + + @property + def features(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[feast___types___Feature_pb2___Feature]: ... + + @property + def eventTimestamp(self) -> google___protobuf___timestamp_pb2___Timestamp: ... + + def __init__(self, + entityKey : typing___Optional[typing___Text] = None, + features : typing___Optional[typing___Iterable[feast___types___Feature_pb2___Feature]] = None, + eventTimestamp : typing___Optional[google___protobuf___timestamp_pb2___Timestamp] = None, + entityName : typing___Optional[typing___Text] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> FeatureRow: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def HasField(self, field_name: typing_extensions___Literal[u"eventTimestamp"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"entityKey",u"entityName",u"eventTimestamp",u"features"]) -> None: ... + else: + def HasField(self, field_name: typing_extensions___Literal[u"eventTimestamp",b"eventTimestamp"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[b"entityKey",b"entityName",b"eventTimestamp",b"features"]) -> None: ... diff --git a/sdk/python/feast/types/Feature_pb2.pyi b/sdk/python/feast/types/Feature_pb2.pyi new file mode 100644 index 0000000000..5d7fbc46ed --- /dev/null +++ b/sdk/python/feast/types/Feature_pb2.pyi @@ -0,0 +1,40 @@ +# @generated by generate_proto_mypy_stubs.py. Do not edit! +import sys +from feast.types.Value_pb2 import ( + Value as feast___types___Value_pb2___Value, +) + +from google.protobuf.message import ( + Message as google___protobuf___message___Message, +) + +from typing import ( + Optional as typing___Optional, + Text as typing___Text, +) + +from typing_extensions import ( + Literal as typing_extensions___Literal, +) + + +class Feature(google___protobuf___message___Message): + id = ... # type: typing___Text + + @property + def value(self) -> feast___types___Value_pb2___Value: ... + + def __init__(self, + id : typing___Optional[typing___Text] = None, + value : typing___Optional[feast___types___Value_pb2___Value] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> Feature: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def HasField(self, field_name: typing_extensions___Literal[u"value"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"id",u"value"]) -> None: ... + else: + def HasField(self, field_name: typing_extensions___Literal[u"value",b"value"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[b"id",b"value"]) -> None: ... diff --git a/sdk/python/feast/types/Value_pb2.pyi b/sdk/python/feast/types/Value_pb2.pyi new file mode 100644 index 0000000000..8aa5092fdf --- /dev/null +++ b/sdk/python/feast/types/Value_pb2.pyi @@ -0,0 +1,267 @@ +# @generated by generate_proto_mypy_stubs.py. Do not edit! +import sys +from google.protobuf.descriptor import ( + EnumDescriptor as google___protobuf___descriptor___EnumDescriptor, +) + +from google.protobuf.internal.containers import ( + RepeatedCompositeFieldContainer as google___protobuf___internal___containers___RepeatedCompositeFieldContainer, + RepeatedScalarFieldContainer as google___protobuf___internal___containers___RepeatedScalarFieldContainer, +) + +from google.protobuf.message import ( + Message as google___protobuf___message___Message, +) + +from google.protobuf.timestamp_pb2 import ( + Timestamp as google___protobuf___timestamp_pb2___Timestamp, +) + +from typing import ( + Iterable as typing___Iterable, + List as typing___List, + Optional as typing___Optional, + Text as typing___Text, + Tuple as typing___Tuple, + cast as typing___cast, +) + +from typing_extensions import ( + Literal as typing_extensions___Literal, +) + + +class ValueType(google___protobuf___message___Message): + class Enum(int): + DESCRIPTOR: google___protobuf___descriptor___EnumDescriptor = ... + @classmethod + def Name(cls, number: int) -> str: ... + @classmethod + def Value(cls, name: str) -> ValueType.Enum: ... + @classmethod + def keys(cls) -> typing___List[str]: ... + @classmethod + def values(cls) -> typing___List[ValueType.Enum]: ... + @classmethod + def items(cls) -> typing___List[typing___Tuple[str, ValueType.Enum]]: ... + UNKNOWN = typing___cast(Enum, 0) + BYTES = typing___cast(Enum, 1) + STRING = typing___cast(Enum, 2) + INT32 = typing___cast(Enum, 3) + INT64 = typing___cast(Enum, 4) + DOUBLE = typing___cast(Enum, 5) + FLOAT = typing___cast(Enum, 6) + BOOL = typing___cast(Enum, 7) + TIMESTAMP = typing___cast(Enum, 8) + + + def __init__(self, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> ValueType: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + +class Value(google___protobuf___message___Message): + bytesVal = ... # type: bytes + stringVal = ... # type: typing___Text + int32Val = ... # type: int + int64Val = ... # type: int + doubleVal = ... # type: float + floatVal = ... # type: float + boolVal = ... # type: bool + + @property + def timestampVal(self) -> google___protobuf___timestamp_pb2___Timestamp: ... + + def __init__(self, + bytesVal : typing___Optional[bytes] = None, + stringVal : typing___Optional[typing___Text] = None, + int32Val : typing___Optional[int] = None, + int64Val : typing___Optional[int] = None, + doubleVal : typing___Optional[float] = None, + floatVal : typing___Optional[float] = None, + boolVal : typing___Optional[bool] = None, + timestampVal : typing___Optional[google___protobuf___timestamp_pb2___Timestamp] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> Value: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def HasField(self, field_name: typing_extensions___Literal[u"boolVal",u"bytesVal",u"doubleVal",u"floatVal",u"int32Val",u"int64Val",u"stringVal",u"timestampVal",u"val"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"boolVal",u"bytesVal",u"doubleVal",u"floatVal",u"int32Val",u"int64Val",u"stringVal",u"timestampVal",u"val"]) -> None: ... + else: + def HasField(self, field_name: typing_extensions___Literal[u"boolVal",b"boolVal",u"bytesVal",b"bytesVal",u"doubleVal",b"doubleVal",u"floatVal",b"floatVal",u"int32Val",b"int32Val",u"int64Val",b"int64Val",u"stringVal",b"stringVal",u"timestampVal",b"timestampVal",u"val",b"val"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[b"boolVal",b"bytesVal",b"doubleVal",b"floatVal",b"int32Val",b"int64Val",b"stringVal",b"timestampVal",b"val"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions___Literal[u"val",b"val"]) -> typing_extensions___Literal["bytesVal","stringVal","int32Val","int64Val","doubleVal","floatVal","boolVal","timestampVal"]: ... + +class ValueList(google___protobuf___message___Message): + + @property + def bytesList(self) -> BytesList: ... + + @property + def stringList(self) -> StringList: ... + + @property + def int32List(self) -> Int32List: ... + + @property + def int64List(self) -> Int64List: ... + + @property + def doubleList(self) -> DoubleList: ... + + @property + def floatList(self) -> FloatList: ... + + @property + def boolList(self) -> BoolList: ... + + @property + def timestampList(self) -> TimestampList: ... + + def __init__(self, + bytesList : typing___Optional[BytesList] = None, + stringList : typing___Optional[StringList] = None, + int32List : typing___Optional[Int32List] = None, + int64List : typing___Optional[Int64List] = None, + doubleList : typing___Optional[DoubleList] = None, + floatList : typing___Optional[FloatList] = None, + boolList : typing___Optional[BoolList] = None, + timestampList : typing___Optional[TimestampList] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> ValueList: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def HasField(self, field_name: typing_extensions___Literal[u"boolList",u"bytesList",u"doubleList",u"floatList",u"int32List",u"int64List",u"stringList",u"timestampList",u"valueList"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[u"boolList",u"bytesList",u"doubleList",u"floatList",u"int32List",u"int64List",u"stringList",u"timestampList",u"valueList"]) -> None: ... + else: + def HasField(self, field_name: typing_extensions___Literal[u"boolList",b"boolList",u"bytesList",b"bytesList",u"doubleList",b"doubleList",u"floatList",b"floatList",u"int32List",b"int32List",u"int64List",b"int64List",u"stringList",b"stringList",u"timestampList",b"timestampList",u"valueList",b"valueList"]) -> bool: ... + def ClearField(self, field_name: typing_extensions___Literal[b"boolList",b"bytesList",b"doubleList",b"floatList",b"int32List",b"int64List",b"stringList",b"timestampList",b"valueList"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions___Literal[u"valueList",b"valueList"]) -> typing_extensions___Literal["bytesList","stringList","int32List","int64List","doubleList","floatList","boolList","timestampList"]: ... + +class BytesList(google___protobuf___message___Message): + val = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[bytes] + + def __init__(self, + val : typing___Optional[typing___Iterable[bytes]] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> BytesList: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"val"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[b"val"]) -> None: ... + +class StringList(google___protobuf___message___Message): + val = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text] + + def __init__(self, + val : typing___Optional[typing___Iterable[typing___Text]] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> StringList: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"val"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[b"val"]) -> None: ... + +class Int32List(google___protobuf___message___Message): + val = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[int] + + def __init__(self, + val : typing___Optional[typing___Iterable[int]] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> Int32List: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"val"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[b"val"]) -> None: ... + +class Int64List(google___protobuf___message___Message): + val = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[int] + + def __init__(self, + val : typing___Optional[typing___Iterable[int]] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> Int64List: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"val"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[b"val"]) -> None: ... + +class DoubleList(google___protobuf___message___Message): + val = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[float] + + def __init__(self, + val : typing___Optional[typing___Iterable[float]] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> DoubleList: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"val"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[b"val"]) -> None: ... + +class FloatList(google___protobuf___message___Message): + val = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[float] + + def __init__(self, + val : typing___Optional[typing___Iterable[float]] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> FloatList: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"val"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[b"val"]) -> None: ... + +class BoolList(google___protobuf___message___Message): + val = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[bool] + + def __init__(self, + val : typing___Optional[typing___Iterable[bool]] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> BoolList: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"val"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[b"val"]) -> None: ... + +class TimestampList(google___protobuf___message___Message): + + @property + def val(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[google___protobuf___timestamp_pb2___Timestamp]: ... + + def __init__(self, + val : typing___Optional[typing___Iterable[google___protobuf___timestamp_pb2___Timestamp]] = None, + ) -> None: ... + @classmethod + def FromString(cls, s: bytes) -> TimestampList: ... + def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ... + if sys.version_info >= (3,): + def ClearField(self, field_name: typing_extensions___Literal[u"val"]) -> None: ... + else: + def ClearField(self, field_name: typing_extensions___Literal[b"val"]) -> None: ... From 31737409330d887f4a0b8f36c194915de139c7d0 Mon Sep 17 00:00:00 2001 From: David Heryanto Date: Wed, 11 Sep 2019 00:50:03 +0800 Subject: [PATCH 10/19] Increase version of google-cloud-storage in python sdk --- sdk/python/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sdk/python/setup.py b/sdk/python/setup.py index a8b0c7cdc8..20b4f7ff97 100644 --- a/sdk/python/setup.py +++ b/sdk/python/setup.py @@ -26,7 +26,7 @@ "google-api-core>=1.7.0", "google-auth>=1.6.0", "google-cloud-bigquery>=1.8.0", - "google-cloud-storage>=1.13.0", + "google-cloud-storage>=1.18.0", "googleapis-common-protos>=1.5.5", "grpcio>=1.16.1", "pandas>=0.24.0", From cba6139dd7ec34f8d23077c5802eded58de812ad Mon Sep 17 00:00:00 2001 From: David Heryanto Date: Wed, 11 Sep 2019 11:05:09 +0800 Subject: [PATCH 11/19] Update docs for influx_db_url field in ImportJobSpecs proto --- protos/feast/specs/ImportJobSpecs.proto | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/protos/feast/specs/ImportJobSpecs.proto b/protos/feast/specs/ImportJobSpecs.proto index 66df3ea4c6..6dd3b17d09 100644 --- a/protos/feast/specs/ImportJobSpecs.proto +++ b/protos/feast/specs/ImportJobSpecs.proto @@ -53,7 +53,9 @@ message ImportJobSpecs { // - The field name for value is "value" // - Points will be tagged with "feature_id" and "entity_name" bool write_feature_metrics_to_influx_db = 8; - // influx_db_url is the url for Feast to connect to Influx DB. Example: http://localhost:8086 + // influx_db_url is the URL for Feast to connect to Influx DB server. + // For example: http://localhost:8086 + // Feast and the import jobs created by Feast must be able to reach this URL. string influx_db_url = 9; // influx_db_database is the name of the Influx DB database for the metrics. string influx_db_database = 10; From 9f5aa657d94d8cbea6e94975722baea4572d39a6 Mon Sep 17 00:00:00 2001 From: David Heryanto Date: Thu, 12 Sep 2019 09:17:14 +0800 Subject: [PATCH 12/19] Write feature metrics in fixed window for better performance --- .../WriteFeatureMetricsToInfluxDB.java | 176 +++++++++++++----- 1 file changed, 126 insertions(+), 50 deletions(-) diff --git a/ingestion/src/main/java/feast/ingestion/transform/WriteFeatureMetricsToInfluxDB.java b/ingestion/src/main/java/feast/ingestion/transform/WriteFeatureMetricsToInfluxDB.java index 7a4cb466e4..6f33de1c24 100644 --- a/ingestion/src/main/java/feast/ingestion/transform/WriteFeatureMetricsToInfluxDB.java +++ b/ingestion/src/main/java/feast/ingestion/transform/WriteFeatureMetricsToInfluxDB.java @@ -4,21 +4,31 @@ import feast.types.FeatureRowExtendedProto.FeatureRowExtended; import feast.types.FeatureRowProto.FeatureRow; import feast.types.ValueProto.Value; +import java.util.DoubleSummaryStatistics; +import java.util.LongSummaryStatistics; import java.util.concurrent.TimeUnit; +import org.apache.beam.sdk.coders.AvroCoder; +import org.apache.beam.sdk.coders.DefaultCoder; import org.apache.beam.sdk.transforms.DoFn; +import org.apache.beam.sdk.transforms.GroupByKey; import org.apache.beam.sdk.transforms.PTransform; import org.apache.beam.sdk.transforms.ParDo; +import org.apache.beam.sdk.transforms.windowing.FixedWindows; +import org.apache.beam.sdk.transforms.windowing.Window; +import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.PDone; import org.influxdb.BatchOptions; import org.influxdb.InfluxDB; import org.influxdb.InfluxDBFactory; import org.influxdb.dto.Point; +import org.joda.time.Duration; -@SuppressWarnings("CatchMayIgnoreException") public class WriteFeatureMetricsToInfluxDB extends PTransform, PDone> { + private static final int DEFAULT_WINDOW_DURATION_IN_SEC = 30; + private static final int DEFAULT_INFLUX_DB_JITTER_DURATION_IN_MS = 100; private String influxDbUrl; private String influxDbDatabase; private String influxDbMeasurement; @@ -30,58 +40,124 @@ public WriteFeatureMetricsToInfluxDB( this.influxDbMeasurement = influxDbMeasurement; } + @DefaultCoder(AvroCoder.class) + static class FeatureMetric { + // value is the numeric value of the feature, FeatureMetric only supports double type + // if the value is a timestamp type, value corresponds to epoch seconds + // if the value is a boolean type, value of 1 corresponds to "true" and 0 otherwise + // if the value type is other non number format, value will be set to 0 + double value; + // lagInSeconds is the delta between processing time in the Dataflow job + // and the event time of the FeatureRow containing this feature + long lagInSeconds; + String entityName; + + public FeatureMetric() {} + + FeatureMetric(double value, long lagInSeconds, String entityName) { + this.value = value; + this.lagInSeconds = lagInSeconds; + this.entityName = entityName; + } + } + @Override public PDone expand(PCollection input) { - input.apply( - ParDo.of( - new DoFn() { - InfluxDB influxDB; - - @Setup - public void setup() { - try { - influxDB = InfluxDBFactory.connect(influxDbUrl); - influxDB.setDatabase(influxDbDatabase); - influxDB.enableBatch(BatchOptions.DEFAULTS); - } catch (Exception e) { - // Ignored because writing metrics is not a critical component of Feaast - // and we do not want to get overwhelmed with failed connection logs - } - } - - @FinishBundle - public void finishBundle() { - if (influxDB != null) { - influxDB.close(); - } - } - - @ProcessElement - public void processElement( - ProcessContext c, @Element FeatureRowExtended featureRowExtended) { - FeatureRow featureRow = featureRowExtended.getRow(); - for (Feature feature : featureRow.getFeaturesList()) { - String featureId = feature.getId(); - long lagInSeconds = - System.currentTimeMillis() / 1000L - - featureRow.getEventTimestamp().getSeconds(); - double value = getValue(feature); - try { - influxDB.write( - Point.measurement(influxDbMeasurement) - .time(System.currentTimeMillis(), TimeUnit.MILLISECONDS) - .addField("lag_in_seconds", lagInSeconds) - .addField("value", value) - .tag("feature_id", featureId) - .tag("entity_name", featureRow.getEntityName()) - .build()); - } catch (Exception e) { - // Ignored because writing metrics is not a critical component of Feaast - // and we do not want to get overwhelmed with failed connection logs + input + .apply( + Window.into(FixedWindows.of(Duration.standardSeconds(DEFAULT_WINDOW_DURATION_IN_SEC)))) + .apply( + "Create feature metric keyed by feature id", + ParDo.of( + new DoFn>() { + @ProcessElement + public void processElement(ProcessContext c) { + FeatureRow featureRow = c.element().getRow(); + long lagInSeconds = + System.currentTimeMillis() / 1000L + - featureRow.getEventTimestamp().getSeconds(); + for (Feature feature : featureRow.getFeaturesList()) { + c.output( + KV.of( + feature.getId(), + new FeatureMetric( + getValue(feature), lagInSeconds, featureRow.getEntityName()))); + } + } + })) + .apply(GroupByKey.create()) + .apply( + ParDo.of( + new DoFn>, Void>() { + InfluxDB influxDB; + + @Setup + public void setup() { + try { + influxDB = InfluxDBFactory.connect(influxDbUrl); + influxDB.setDatabase(influxDbDatabase); + influxDB.enableBatch( + BatchOptions.DEFAULTS.jitterDuration( + DEFAULT_INFLUX_DB_JITTER_DURATION_IN_MS)); + } catch (Exception e) { + // Ignored because writing metrics is not a critical component of Feaast + // and we do not want to get overwhelmed with connection error logs + // due to timeouts and downtime in upstream Influx DB server + } + } + + @Teardown + public void tearDown() { + if (influxDB != null) { + influxDB.close(); + } + } + + @ProcessElement + public void processElement(ProcessContext c) { + if (influxDB == null) { + // Influx DB client is not setup properly, skip writing metrics + return; + } + + DoubleSummaryStatistics statsForValue = new DoubleSummaryStatistics(); + LongSummaryStatistics statsForLagInSeconds = new LongSummaryStatistics(); + + String entityName = null; + String featureId = c.element().getKey(); + + for (FeatureMetric featureMetric : c.element().getValue()) { + statsForValue.accept(featureMetric.value); + statsForLagInSeconds.accept(featureMetric.lagInSeconds); + if (entityName == null) { + entityName = featureMetric.entityName; + } + } + + if (featureId == null || entityName == null) { + return; + } + + try { + influxDB.write( + Point.measurement(influxDbMeasurement) + .time(System.currentTimeMillis(), TimeUnit.MILLISECONDS) + .addField("lag_in_seconds_mean", statsForLagInSeconds.getAverage()) + .addField("lag_in_seconds_min", statsForLagInSeconds.getMin()) + .addField("lag_in_seconds_max", statsForLagInSeconds.getMax()) + .addField("value_mean", statsForValue.getAverage()) + .addField("value_min", statsForValue.getMin()) + .addField("value_max", statsForValue.getMax()) + .tag("feature_id", featureId) + .tag("entity_name", entityName) + .build()); + } catch (Exception e) { + // Ignored because writing metrics is not a critical component of Feaast + // and we do not want to get overwhelmed with failed metric write logs + // due to timeouts and downtime in upstream Influx DB server + } } - } - } - })); + })); return PDone.in(input.getPipeline()); } From d78b01d0b05c15b9bf57a498154dfa3a0723b5c4 Mon Sep 17 00:00:00 2001 From: David Heryanto Date: Thu, 12 Sep 2019 09:20:38 +0800 Subject: [PATCH 13/19] Update default image in Helm chart --- charts/feast/Chart.yaml | 2 +- charts/feast/values.yaml | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/charts/feast/Chart.yaml b/charts/feast/Chart.yaml index 842ab7f13f..0bf935e3ef 100644 --- a/charts/feast/Chart.yaml +++ b/charts/feast/Chart.yaml @@ -1,5 +1,5 @@ apiVersion: v1 -appVersion: "0.1.5" +appVersion: "0.1.6" description: A Helm chart to install Feast on kubernetes name: feast version: 0.1.3 diff --git a/charts/feast/values.yaml b/charts/feast/values.yaml index 259e1fa744..b6a9439493 100644 --- a/charts/feast/values.yaml +++ b/charts/feast/values.yaml @@ -18,7 +18,7 @@ core: pullPolicy: IfNotPresent registry: gcr.io/kf-feast repository: feast-core - tag: "0.1.5" + tag: "0.1.6" replicaCount: 1 resources: limits: @@ -105,7 +105,7 @@ serving: pullPolicy: IfNotPresent registry: gcr.io/kf-feast repository: feast-serving - tag: "0.1.5" + tag: "0.1.6" replicaCount: 1 resources: limits: From 5ed43803ea152ab9999a536b662c498a7dffd684 Mon Sep 17 00:00:00 2001 From: David Heryanto Date: Thu, 12 Sep 2019 17:41:58 +0800 Subject: [PATCH 14/19] Write feature metrics only if the job type is streaming --- charts/feast/Chart.yaml | 2 +- .../src/main/java/feast/ingestion/ImportJob.java | 16 +++++++++++----- .../options/ImportJobPipelineOptions.java | 3 ++- 3 files changed, 14 insertions(+), 7 deletions(-) diff --git a/charts/feast/Chart.yaml b/charts/feast/Chart.yaml index 0bf935e3ef..f5e39025fb 100644 --- a/charts/feast/Chart.yaml +++ b/charts/feast/Chart.yaml @@ -1,5 +1,5 @@ apiVersion: v1 -appVersion: "0.1.6" +appVersion: "0.1.7" description: A Helm chart to install Feast on kubernetes name: feast version: 0.1.3 diff --git a/ingestion/src/main/java/feast/ingestion/ImportJob.java b/ingestion/src/main/java/feast/ingestion/ImportJob.java index 9961f759f3..136796d673 100644 --- a/ingestion/src/main/java/feast/ingestion/ImportJob.java +++ b/ingestion/src/main/java/feast/ingestion/ImportJob.java @@ -57,6 +57,7 @@ import org.apache.beam.sdk.coders.CoderRegistry; import org.apache.beam.sdk.extensions.protobuf.ProtoCoder; import org.apache.beam.sdk.io.gcp.bigquery.TableRowJsonCoder; +import org.apache.beam.sdk.options.PipelineOptions; import org.apache.beam.sdk.options.PipelineOptionsFactory; import org.apache.beam.sdk.transforms.ParDo; import org.apache.beam.sdk.transforms.Sample; @@ -203,11 +204,16 @@ public void expand() { } if (!dryRun) { - servingRows.apply( - new WriteFeatureMetricsToInfluxDB( - importJobSpecs.getInfluxDbUrl(), - importJobSpecs.getInfluxDbDatabase(), - importJobSpecs.getInfluxDbMeasurement())); + + if (options.isStreaming()) { + // Write feature metrics only if it is a streaming job + servingRows.apply( + new WriteFeatureMetricsToInfluxDB( + importJobSpecs.getInfluxDbUrl(), + importJobSpecs.getInfluxDbDatabase(), + importJobSpecs.getInfluxDbMeasurement())); + } + servingRows.apply("Write to Serving Stores", servingStoreTransform); if (!Strings.isNullOrEmpty(importJobSpecs.getWarehouseStorageSpec().getId())) { diff --git a/ingestion/src/main/java/feast/ingestion/options/ImportJobPipelineOptions.java b/ingestion/src/main/java/feast/ingestion/options/ImportJobPipelineOptions.java index aea751b8f7..934242ff54 100644 --- a/ingestion/src/main/java/feast/ingestion/options/ImportJobPipelineOptions.java +++ b/ingestion/src/main/java/feast/ingestion/options/ImportJobPipelineOptions.java @@ -23,10 +23,11 @@ import org.apache.beam.sdk.options.Description; import org.apache.beam.sdk.options.PipelineOptions; import org.apache.beam.sdk.options.PipelineOptionsRegistrar; +import org.apache.beam.sdk.options.StreamingOptions; import org.apache.beam.sdk.options.Validation.Required; /** Options passed to Beam to influence the job's execution environment */ -public interface ImportJobPipelineOptions extends PipelineOptions { +public interface ImportJobPipelineOptions extends PipelineOptions, StreamingOptions { @Description("Path to a workspace directory containing importJobSpecs.yaml") @Required From 94905f987ef4cccbe800cf90bba8f8b6367adcdc Mon Sep 17 00:00:00 2001 From: David Heryanto Date: Thu, 12 Sep 2019 17:48:28 +0800 Subject: [PATCH 15/19] Set transform name for writing feature metrics to Influx DB --- ingestion/src/main/java/feast/ingestion/ImportJob.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ingestion/src/main/java/feast/ingestion/ImportJob.java b/ingestion/src/main/java/feast/ingestion/ImportJob.java index 136796d673..f845615a5e 100644 --- a/ingestion/src/main/java/feast/ingestion/ImportJob.java +++ b/ingestion/src/main/java/feast/ingestion/ImportJob.java @@ -207,7 +207,7 @@ public void expand() { if (options.isStreaming()) { // Write feature metrics only if it is a streaming job - servingRows.apply( + servingRows.apply("Write feature metrics to Influx DB", new WriteFeatureMetricsToInfluxDB( importJobSpecs.getInfluxDbUrl(), importJobSpecs.getInfluxDbDatabase(), From c4da34cb8aadbd43736d319b55debca182df42f4 Mon Sep 17 00:00:00 2001 From: David Heryanto Date: Thu, 12 Sep 2019 18:26:54 +0800 Subject: [PATCH 16/19] Update chart, making resources limit optional Following how stable postgresql chart specifies it https://github.com/helm/charts/blob/master/stable/postgresql/values.yaml --- charts/feast/Chart.yaml | 2 +- charts/feast/templates/core-deploy.yaml | 7 +------ charts/feast/templates/serving-deploy.yaml | 7 +------ charts/feast/values.yaml | 6 ------ 4 files changed, 3 insertions(+), 19 deletions(-) diff --git a/charts/feast/Chart.yaml b/charts/feast/Chart.yaml index f5e39025fb..6cc6b68c83 100644 --- a/charts/feast/Chart.yaml +++ b/charts/feast/Chart.yaml @@ -2,4 +2,4 @@ apiVersion: v1 appVersion: "0.1.7" description: A Helm chart to install Feast on kubernetes name: feast -version: 0.1.3 +version: 0.1.4 diff --git a/charts/feast/templates/core-deploy.yaml b/charts/feast/templates/core-deploy.yaml index 97a99603ab..8825a3515b 100644 --- a/charts/feast/templates/core-deploy.yaml +++ b/charts/feast/templates/core-deploy.yaml @@ -58,12 +58,7 @@ spec: timeoutSeconds: 3 failureThreshold: {{ .Values.core.readinessProbe.failureThreshold }} resources: - requests: - cpu: {{ .Values.core.resources.requests.cpu }} - memory: {{ .Values.core.resources.requests.memory }} - limits: - cpu: {{ .Values.core.resources.limits.cpu }} - memory: {{ .Values.core.resources.limits.memory }} +{{ toYaml .Values.core.resources | indent 10 }} {{- if .Values.serviceAccount }} volumeMounts: - name: "{{ .Values.serviceAccount.name }}" diff --git a/charts/feast/templates/serving-deploy.yaml b/charts/feast/templates/serving-deploy.yaml index 78994c5a09..470117fabf 100644 --- a/charts/feast/templates/serving-deploy.yaml +++ b/charts/feast/templates/serving-deploy.yaml @@ -59,12 +59,7 @@ spec: timeoutSeconds: 3 failureThreshold: {{ .Values.serving.readinessProbe.failureThreshold }} resources: - requests: - cpu: "{{ .Values.serving.resources.requests.cpu }}" - memory: "{{ .Values.serving.resources.requests.memory }}" - limits: - cpu: "{{ .Values.serving.resources.limits.cpu }}" - memory: "{{ .Values.serving.resources.limits.memory }}" +{{ toYaml .Values.serving.resources | indent 10 }} env: - name: FEAST_SERVING_HTTP_PORT value: "{{ .Values.serving.service.http.targetPort }}" diff --git a/charts/feast/values.yaml b/charts/feast/values.yaml index b6a9439493..a118b1e5d6 100644 --- a/charts/feast/values.yaml +++ b/charts/feast/values.yaml @@ -21,9 +21,6 @@ core: tag: "0.1.6" replicaCount: 1 resources: - limits: - cpu: 4 - memory: 6G requests: cpu: 1 memory: 2G @@ -108,9 +105,6 @@ serving: tag: "0.1.6" replicaCount: 1 resources: - limits: - cpu: 2 - memory: 4G requests: cpu: 1 memory: 1G From a4497dcaffff51fd889b220a8de1897282ed1b80 Mon Sep 17 00:00:00 2001 From: David Heryanto Date: Fri, 13 Sep 2019 02:23:50 +0800 Subject: [PATCH 17/19] Fix the check for unbounded sources Check against the ImportSpec.Type value (kafka or pubsub) instead of PipelineOptions.Streaming (the streaming status cannot be reliably determined at pipeline creation stage) --- .../main/java/feast/ingestion/ImportJob.java | 23 +++++++++++++++---- .../options/ImportJobPipelineOptions.java | 2 +- 2 files changed, 20 insertions(+), 5 deletions(-) diff --git a/ingestion/src/main/java/feast/ingestion/ImportJob.java b/ingestion/src/main/java/feast/ingestion/ImportJob.java index f845615a5e..536db15ff7 100644 --- a/ingestion/src/main/java/feast/ingestion/ImportJob.java +++ b/ingestion/src/main/java/feast/ingestion/ImportJob.java @@ -204,9 +204,25 @@ public void expand() { } if (!dryRun) { - - if (options.isStreaming()) { - // Write feature metrics only if it is a streaming job + // Write feature metrics only if the source is unbounded. + // Feature metrics such as lag is usually relevant when we are consuming real-time + // data from unbounded sources, because we want to check how responsive or how fresh + // our processed data are in the streaming pipelines. + // + // When the source of the data is bounded, it is normal for users to provide + // data with event timestamp way behind the current timestamp. For example, when + // users are doing tests or performing a backfill. + // + // Combining feature metrics from these 2 different sources may confuse the interpretation + // of the metrics. For example, we may think our processing pipelines are lagging, + // when in fact someone is running a backfill of data in the past. + // + // TODO: Consider tagging the data point with data source name (e.g. Kafka, Pub/Sub) or + // data source type (bounded vs unbounded) in order to differentiate metrics coming + // from different sources. + // + // Kafka and Pub/Sub are the unbounded sources supported in Feast 0.1.x. + if (importJobSpecs.getImportSpec().getType().matches("(?i)kafka|pubsub")) { servingRows.apply("Write feature metrics to Influx DB", new WriteFeatureMetricsToInfluxDB( importJobSpecs.getInfluxDbUrl(), @@ -214,7 +230,6 @@ public void expand() { importJobSpecs.getInfluxDbMeasurement())); } - servingRows.apply("Write to Serving Stores", servingStoreTransform); if (!Strings.isNullOrEmpty(importJobSpecs.getWarehouseStorageSpec().getId())) { warehouseRows.apply("Write to Warehouse Stores", warehouseStoreTransform); diff --git a/ingestion/src/main/java/feast/ingestion/options/ImportJobPipelineOptions.java b/ingestion/src/main/java/feast/ingestion/options/ImportJobPipelineOptions.java index 934242ff54..cc88ee861c 100644 --- a/ingestion/src/main/java/feast/ingestion/options/ImportJobPipelineOptions.java +++ b/ingestion/src/main/java/feast/ingestion/options/ImportJobPipelineOptions.java @@ -27,7 +27,7 @@ import org.apache.beam.sdk.options.Validation.Required; /** Options passed to Beam to influence the job's execution environment */ -public interface ImportJobPipelineOptions extends PipelineOptions, StreamingOptions { +public interface ImportJobPipelineOptions extends PipelineOptions { @Description("Path to a workspace directory containing importJobSpecs.yaml") @Required From 06f1514ec21dec5aea465fe7b1c1908bbf7a4e0f Mon Sep 17 00:00:00 2001 From: David Heryanto Date: Thu, 19 Sep 2019 11:15:42 +0800 Subject: [PATCH 18/19] Fix typo --- .../ingestion/transform/WriteFeatureMetricsToInfluxDB.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ingestion/src/main/java/feast/ingestion/transform/WriteFeatureMetricsToInfluxDB.java b/ingestion/src/main/java/feast/ingestion/transform/WriteFeatureMetricsToInfluxDB.java index 6f33de1c24..aeeabad6ba 100644 --- a/ingestion/src/main/java/feast/ingestion/transform/WriteFeatureMetricsToInfluxDB.java +++ b/ingestion/src/main/java/feast/ingestion/transform/WriteFeatureMetricsToInfluxDB.java @@ -100,7 +100,7 @@ public void setup() { BatchOptions.DEFAULTS.jitterDuration( DEFAULT_INFLUX_DB_JITTER_DURATION_IN_MS)); } catch (Exception e) { - // Ignored because writing metrics is not a critical component of Feaast + // Ignored because writing metrics is not a critical component of Feast // and we do not want to get overwhelmed with connection error logs // due to timeouts and downtime in upstream Influx DB server } @@ -152,7 +152,7 @@ public void processElement(ProcessContext c) { .tag("entity_name", entityName) .build()); } catch (Exception e) { - // Ignored because writing metrics is not a critical component of Feaast + // Ignored because writing metrics is not a critical component of Feast // and we do not want to get overwhelmed with failed metric write logs // due to timeouts and downtime in upstream Influx DB server } From 2b79d476eaa53f25f01e479a3ac568bec5b09e20 Mon Sep 17 00:00:00 2001 From: David Heryanto Date: Thu, 19 Sep 2019 11:16:43 +0800 Subject: [PATCH 19/19] Update default Feast image tag used in the chart --- charts/feast/values.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/charts/feast/values.yaml b/charts/feast/values.yaml index a118b1e5d6..b63d8b7305 100644 --- a/charts/feast/values.yaml +++ b/charts/feast/values.yaml @@ -18,7 +18,7 @@ core: pullPolicy: IfNotPresent registry: gcr.io/kf-feast repository: feast-core - tag: "0.1.6" + tag: "0.1.7" replicaCount: 1 resources: requests: @@ -102,7 +102,7 @@ serving: pullPolicy: IfNotPresent registry: gcr.io/kf-feast repository: feast-serving - tag: "0.1.6" + tag: "0.1.7" replicaCount: 1 resources: requests: