Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: add to_string method to SparkLikeExprDateTimeNamespace #1842

Open
wants to merge 8 commits into
base: main
Choose a base branch
from
50 changes: 50 additions & 0 deletions narwhals/_spark_like/expr_dt.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,56 @@ class SparkLikeExprDateTimeNamespace:
def __init__(self: Self, expr: SparkLikeExpr) -> None:
self._compliant_expr = expr

def to_string(self: Self, format: str) -> SparkLikeExpr: # noqa: A002
def _format_iso_week_with_day(_input: Column) -> Column:
"""Format datetime as ISO week string with day."""
year = F.date_format(_input, "YYYY")
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
year = F.date_format(_input, "YYYY")
year = F.date_format(_input, "yyyy")

week = F.lpad(F.weekofyear(_input).cast("string"), 2, "0")
day = F.dayofweek(_input)
# Adjust Sunday from 1 to 7
day = F.when(day == 1, 7).otherwise(day - 1)
return F.concat(year, F.lit("-W"), week, F.lit("-"), day.cast("string"))

def _format_iso_week(_input: Column) -> Column:
"""Format datetime as ISO week string."""
year = F.date_format(_input, "YYYY")
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
year = F.date_format(_input, "YYYY")
year = F.date_format(_input, "yyyy")

week = F.lpad(F.weekofyear(_input).cast("string"), 2, "0")
return F.concat(year, F.lit("-W"), week)

def _format_iso_datetime(_input: Column) -> Column:
"""Format datetime as ISO datetime with microseconds."""
date_part = F.date_format(_input, "yyyy-MM-dd")
time_part = F.date_format(_input, "HH:mm:ss")
micros = F.unix_micros(_input) % 1_000_000
micros_str = F.lpad(micros.cast("string"), 6, "0")
return F.concat(date_part, F.lit("T"), time_part, F.lit("."), micros_str)

def _to_string(_input: Column) -> Column:
# Handle special formats
if format == "%G-W%V":
return _format_iso_week(_input)
if format == "%G-W%V-%u":
return _format_iso_week_with_day(_input)
if format in ("%Y-%m-%dT%H:%M:%S.%f", "%Y-%m-%dT%H:%M:%S%.f"):
return _format_iso_datetime(_input)

# Standard format conversions
java_fmt = (
format.replace("%Y", "yyyy")
.replace("%m", "MM")
.replace("%d", "dd")
.replace("%H", "HH")
.replace("%M", "mm")
.replace("%S", "ss")
)
FBruzzesi marked this conversation as resolved.
Show resolved Hide resolved
return F.date_format(_input, java_fmt)

return self._compliant_expr._from_call(
_to_string,
"to_string",
returns_scalar=self._compliant_expr._returns_scalar,
)

def date(self: Self) -> SparkLikeExpr:
return self._compliant_expr._from_call(
F.to_date,
Expand Down
1 change: 1 addition & 0 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -159,6 +159,7 @@ def pyspark_lazy_constructor() -> Callable[[Any], IntoFrame]: # pragma: no cove
.config("spark.sql.shuffle.partitions", "2")
# common timezone for all tests environments
.config("spark.sql.session.timeZone", "UTC")
.config("spark.sql.legacy.timeParserPolicy", "LEGACY")
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

what does this do?

Copy link
Contributor Author

@Dhanunjaya-Elluri Dhanunjaya-Elluri Jan 20, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I faced the date migration issue from spark 2.0 to 3.0 (basically to match the behaviour datetime parsing of spark versions <3). I refered this issue here https://stackoverflow.com/questions/62602720/string-to-date-migration-from-spark-2-0-to-3-0-gives-fail-to-recognize-eee-mmm

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Instead of setting this, I suggest using a valid pattern for Spark 3.0: https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
Otherwise we need to ask users to also set this config

When we remove the config we get the error:

pyspark.errors.exceptions.captured.SparkUpgradeException: [INCONSISTENT_BEHAVIOR_CROSS_VERSION.DATETIME_PATTERN_RECOGNITION] You may get a different result due to the upgrading to Spark >= 3.0:
    Fail to recognize 'YYYY' pattern in the DateTimeFormatter. 1) You can set "spark.sql.legacy.timeParserPolicy" to "LEGACY" to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from 'https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html'.

we need to substitute YYYY to yyyy (as we do in [strptime_to_pyspark_format](https://github.com/narwhals-dev/narwhals/blob/main/narwhals/_spark_like/expr_str.py#L136)?)

.getOrCreate()
)

Expand Down
11 changes: 2 additions & 9 deletions tests/expr_and_series/dt/to_string_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,11 +59,7 @@ def test_dt_to_string_series(constructor_eager: ConstructorEager, fmt: str) -> N
],
)
@pytest.mark.skipif(is_windows(), reason="pyarrow breaking on windows")
def test_dt_to_string_expr(
constructor: Constructor, fmt: str, request: pytest.FixtureRequest
) -> None:
if "pyspark" in str(constructor):
request.applymarker(pytest.mark.xfail)
def test_dt_to_string_expr(constructor: Constructor, fmt: str) -> None:
input_frame = nw.from_native(constructor(data))

expected_col = [datetime.strftime(d, fmt) for d in data["a"]]
Expand Down Expand Up @@ -141,7 +137,7 @@ def test_dt_to_string_iso_local_datetime_expr(
expected: str,
request: pytest.FixtureRequest,
) -> None:
if ("pyspark" in str(constructor)) or "duckdb" in str(constructor):
if "duckdb" in str(constructor):
request.applymarker(pytest.mark.xfail)
df = constructor({"a": [data]})

Expand Down Expand Up @@ -178,10 +174,7 @@ def test_dt_to_string_iso_local_date_expr(
constructor: Constructor,
data: datetime,
expected: str,
request: pytest.FixtureRequest,
) -> None:
if "pyspark" in str(constructor):
request.applymarker(pytest.mark.xfail)
df = constructor({"a": [data]})
result = nw.from_native(df).with_columns(
nw.col("a").dt.to_string("%Y-%m-%d").alias("b")
Expand Down
Loading