From 308c175e7d109342a281b93a9219adceeb1c270b Mon Sep 17 00:00:00 2001 From: tanlocnguyen Date: Wed, 6 Mar 2024 16:52:45 +0700 Subject: [PATCH] (fix) lint error import pyspark DataFrame --- .../contrib/spark_offline_store/spark.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark.py b/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark.py index cf89c811a29..168a488343a 100644 --- a/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark.py +++ b/sdk/python/feast/infra/offline_stores/contrib/spark_offline_store/spark.py @@ -13,7 +13,7 @@ import pyspark from pydantic import StrictStr from pyspark import SparkConf -from pyspark.sql import SparkSession, DataFrame as SparkDataFrame +from pyspark.sql import SparkSession from pytz import utc from feast import FeatureView, OnDemandFeatureView @@ -125,7 +125,7 @@ def get_historical_features( config: RepoConfig, feature_views: List[FeatureView], feature_refs: List[str], - entity_df: Union[pandas.DataFrame, str, SparkDataFrame], + entity_df: Union[pandas.DataFrame, str, pyspark.sql.DataFrame], registry: Registry, project: str, full_feature_names: bool = False, @@ -473,7 +473,7 @@ def _get_entity_df_event_timestamp_range( entity_df_event_timestamp.min().to_pydatetime(), entity_df_event_timestamp.max().to_pydatetime(), ) - elif isinstance(entity_df, str) or isinstance(entity_df, SparkDataFrame): + elif isinstance(entity_df, str) or isinstance(entity_df, pyspark.sql.DataFrame): # If the entity_df is a string (SQL query), determine range # from table if isinstance(entity_df, str): @@ -501,7 +501,7 @@ def _get_entity_schema( ) -> Dict[str, np.dtype]: if isinstance(entity_df, pd.DataFrame): return dict(zip(entity_df.columns, entity_df.dtypes)) - elif isinstance(entity_df, str) or isinstance(entity_df,SparkDataFrame): + elif isinstance(entity_df, str) or isinstance(entity_df,pyspark.sql.DataFrame): if isinstance(entity_df, str): entity_spark_df = spark_session.sql(entity_df) else: @@ -530,7 +530,7 @@ def _upload_entity_df( return elif isinstance(entity_df, str): spark_session.sql(entity_df).createOrReplaceTempView(table_name) - elif isinstance(entity_df, SparkDataFrame): + elif isinstance(entity_df, pyspark.sql.DataFrame): entity_df.createOrReplaceTempView(table_name) return else: