-
Notifications
You must be signed in to change notification settings - Fork 146
Spark expr replace strict #2254
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. Weβll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
312f372
d240c5f
87f8614
a3cd1d7
8c41b99
b98ffcd
2569746
eaff2e5
cadce60
0337095
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,10 +1,12 @@ | ||
from __future__ import annotations | ||
|
||
import operator | ||
from itertools import chain | ||
from typing import TYPE_CHECKING | ||
from typing import Any | ||
from typing import Callable | ||
from typing import Literal | ||
from typing import Mapping | ||
from typing import Sequence | ||
from typing import cast | ||
|
||
|
@@ -560,6 +562,65 @@ def _len(_input: Column) -> Column: | |
|
||
return self._with_callable(_len) | ||
|
||
def replace_strict( | ||
self, | ||
old: Sequence[Any] | Mapping[Any, Any], | ||
new: Sequence[Any], | ||
*, | ||
return_dtype: DType | type[DType] | None, | ||
) -> Self: | ||
mapping = old if isinstance(old, Mapping) else dict(zip(old, new)) | ||
|
||
mapping_keys = list(mapping.keys()) | ||
|
||
# Create an array of all valid keys for our IN check | ||
# Note: None/null handling is special in Spark - we'll handle it separately | ||
non_null_keys = [k for k in mapping_keys if k is not None] | ||
has_null_key = None in mapping_keys | ||
|
||
mapping_expr = self._F.create_map( | ||
[self._F.lit(x) for x in chain(*mapping.items())] | ||
) | ||
|
||
def _replace_strict(_input: Column) -> Column: | ||
validation_expr = ( | ||
self._F.when( | ||
_input.isNull() & self._F.lit(has_null_key), | ||
self._F.lit(True), # noqa: FBT003 | ||
) | ||
.when(_input.isNull() & ~self._F.lit(has_null_key), self._F.lit(False)) # noqa: FBT003 | ||
.otherwise( | ||
self._F.array_contains( | ||
self._F.array([self._F.lit(k) for k in non_null_keys]), _input | ||
) | ||
) | ||
) | ||
|
||
mapped_col = ( | ||
mapping_expr[_input] | ||
if self._implementation.is_pyspark() | ||
else mapping_expr.getItem(_input) | ||
) | ||
Comment on lines
+599
to
+603
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. PySpark raises a warning for |
||
|
||
try: | ||
result = self._F.when(validation_expr, mapped_col).otherwise( | ||
self._F.assert_true(self._F.lit(False)) # noqa: FBT003 | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. SQLFrame does not have |
||
) | ||
except Exception as exc: | ||
msg = "replace_strict did not replace all non-null values." | ||
raise ValueError(msg) from exc | ||
Comment on lines
+609
to
+611
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Also, I am not really able to capture any spark exception at this level |
||
|
||
if return_dtype is not None: | ||
result = result.cast( | ||
narwhals_to_native_dtype( | ||
return_dtype, self._version, self._native_dtypes | ||
) | ||
) | ||
|
||
return result | ||
|
||
return self._with_callable(_replace_strict) | ||
|
||
def round(self, decimals: int) -> Self: | ||
def _round(_input: Column) -> Column: | ||
return self._F.round(_input, decimals) | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
I might need an extra look at this