Skip to content

Commit 5f78992

Browse files
fix more tests
1 parent 7cf8a3e commit 5f78992

File tree

4 files changed

+23
-8
lines changed

4 files changed

+23
-8
lines changed

tests/system/small/test_dataframe.py

Lines changed: 10 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5324,9 +5324,12 @@ def test_df_to_latex(scalars_df_index, scalars_pandas_df_index):
53245324

53255325

53265326
def test_df_to_json_local_str(scalars_df_index, scalars_pandas_df_index):
5327-
bf_result = scalars_df_index.to_json()
5327+
# pandas 3.0 bugged for serializing date col
5328+
bf_result = scalars_df_index.drop(columns="date_col").to_json()
53285329
# default_handler for arrow types that have no default conversion
5329-
pd_result = scalars_pandas_df_index.to_json(default_handler=str)
5330+
pd_result = scalars_pandas_df_index.drop(columns="date_col").to_json(
5331+
default_handler=str
5332+
)
53305333

53315334
assert bf_result == pd_result
53325335

@@ -6170,6 +6173,11 @@ def test_agg_with_dict_lists_strings(scalars_dfs):
61706173
)
61716174

61726175

6176+
@pytest.mark.skipif(
6177+
pandas.__version__.startswith("3"),
6178+
# See: https://github.com/python/cpython/issues/112282
6179+
reason="pandas 3.0 miscaculates variance",
6180+
)
61736181
def test_agg_with_dict_lists_callables(scalars_dfs):
61746182
bf_df, pd_df = scalars_dfs
61756183
agg_funcs = {

tests/system/small/test_dataframe_io.py

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -510,8 +510,9 @@ def test_to_csv_index(
510510
dtype = scalars_df.reset_index().dtypes.to_dict()
511511
dtype.pop("geography_col")
512512
dtype.pop("rowindex")
513-
# read_csv will decode into bytes inproperly, convert_pandas_dtypes will encode properly from string
513+
# read_csv will decode into bytes, numeric inproperly, convert_pandas_dtypes will encode properly from string
514514
dtype.pop("bytes_col")
515+
dtype.pop("numeric_col")
515516
gcs_df = pd.read_csv(
516517
utils.get_first_file_from_wildcard(path),
517518
dtype=dtype,
@@ -548,8 +549,9 @@ def test_to_csv_tabs(
548549
dtype = scalars_df.reset_index().dtypes.to_dict()
549550
dtype.pop("geography_col")
550551
dtype.pop("rowindex")
551-
# read_csv will decode into bytes inproperly, convert_pandas_dtypes will encode properly from string
552-
# dtype.pop("bytes_col")
552+
# read_csv will decode into bytes, numeric inproperly, convert_pandas_dtypes will encode properly from string
553+
dtype.pop("bytes_col")
554+
dtype.pop("numeric_col")
553555
gcs_df = pd.read_csv(
554556
utils.get_first_file_from_wildcard(path),
555557
sep="\t",

tests/system/small/test_pandas.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -541,7 +541,9 @@ def _convert_pandas_category(pd_s: pd.Series):
541541
f"Input must be a pandas Series with categorical data: {pd_s.dtype}"
542542
)
543543

544-
if pd.api.types.is_object_dtype(pd_s.cat.categories.dtype):
544+
if pd.api.types.is_object_dtype(
545+
pd_s.cat.categories.dtype
546+
) or pd.api.types.is_string_dtype(pd_s.cat.categories.dtype):
545547
return pd_s.astype(pd.StringDtype(storage="pyarrow"))
546548

547549
if not isinstance(pd_s.cat.categories.dtype, pd.IntervalDtype):

tests/unit/test_dataframe_polars.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4116,9 +4116,12 @@ def test_df_to_dict(scalars_df_index, scalars_pandas_df_index):
41164116

41174117

41184118
def test_df_to_json_local_str(scalars_df_index, scalars_pandas_df_index):
4119-
bf_result = scalars_df_index.to_json()
4119+
# pandas 3.0 bugged for serializing date col
4120+
bf_result = scalars_df_index.drop(columns="date_col").to_json()
41204121
# default_handler for arrow types that have no default conversion
4121-
pd_result = scalars_pandas_df_index.to_json(default_handler=str)
4122+
pd_result = scalars_pandas_df_index.drop(columns="date_col").to_json(
4123+
default_handler=str
4124+
)
41224125

41234126
assert bf_result == pd_result
41244127

0 commit comments

Comments
 (0)