Skip to content

Commit 23a52cb

Browse files
amend tests to work with duration_col
1 parent 0b8c2f8 commit 23a52cb

File tree

3 files changed

+17
-6
lines changed

3 files changed

+17
-6
lines changed

tests/system/small/test_dataframe.py

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -553,7 +553,7 @@ def test_df_info(scalars_dfs):
553553
expected = (
554554
"<class 'bigframes.dataframe.DataFrame'>\n"
555555
"Index: 9 entries, 0 to 8\n"
556-
"Data columns (total 13 columns):\n"
556+
"Data columns (total 14 columns):\n"
557557
" # Column Non-Null Count Dtype\n"
558558
"--- ------------- ---------------- ------------------------------\n"
559559
" 0 bool_col 8 non-null boolean\n"
@@ -569,8 +569,9 @@ def test_df_info(scalars_dfs):
569569
" 10 string_col 8 non-null string\n"
570570
" 11 time_col 6 non-null time64[us][pyarrow]\n"
571571
" 12 timestamp_col 6 non-null timestamp[us, tz=UTC][pyarrow]\n"
572-
"dtypes: Float64(1), Int64(3), binary[pyarrow](1), boolean(1), date32[day][pyarrow](1), decimal128(38, 9)[pyarrow](1), geometry(1), string(1), time64[us][pyarrow](1), timestamp[us, tz=UTC][pyarrow](1), timestamp[us][pyarrow](1)\n"
573-
"memory usage: 1269 bytes\n"
572+
" 13 duration_col 7 non-null duration[us][pyarrow]\n"
573+
"dtypes: Float64(1), Int64(3), binary[pyarrow](1), boolean(1), date32[day][pyarrow](1), decimal128(38, 9)[pyarrow](1), duration[us][pyarrow](1), geometry(1), string(1), time64[us][pyarrow](1), timestamp[us, tz=UTC][pyarrow](1), timestamp[us][pyarrow](1)\n"
574+
"memory usage: 1341 bytes\n"
574575
)
575576

576577
scalars_df, _ = scalars_dfs
@@ -4771,6 +4772,8 @@ def test_df_to_json_local_str(scalars_df_index, scalars_pandas_df_index):
47714772
def test_df_to_json_local_file(scalars_df_index, scalars_pandas_df_index):
47724773
# TODO: supply a reason why this isn't compatible with pandas 1.x
47734774
pytest.importorskip("pandas", minversion="2.0.0")
4775+
scalars_df_index = scalars_df_index.drop("duration_col")
4776+
scalars_pandas_df_index = scalars_pandas_df_index.drop("duration_col")
47744777
with tempfile.TemporaryFile() as bf_result_file, tempfile.TemporaryFile() as pd_result_file:
47754778
scalars_df_index.to_json(bf_result_file, orient="table")
47764779
# default_handler for arrow types that have no default conversion
@@ -4882,6 +4885,7 @@ def test_df_to_orc(scalars_df_index, scalars_pandas_df_index):
48824885
"time_col",
48834886
"timestamp_col",
48844887
"geography_col",
4888+
"duration_col",
48854889
]
48864890

48874891
bf_result_file = tempfile.TemporaryFile()

tests/unit/core/compile/sqlglot/test_compile_readlocal.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,8 @@
2424
def test_compile_readlocal(
2525
scalar_types_pandas_df: pd.DataFrame, compiler_session: bigframes.Session, snapshot
2626
):
27+
# Durations not yet supported
28+
scalar_types_pandas_df = scalar_types_pandas_df.drop("duration_col")
2729
bf_df = bpd.DataFrame(scalar_types_pandas_df, session=compiler_session)
2830
snapshot.assert_match(bf_df.sql, "out.sql")
2931

tests/unit/test_dataframe_polars.py

Lines changed: 8 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -508,7 +508,7 @@ def test_df_info(scalars_dfs):
508508
expected = (
509509
"<class 'bigframes.dataframe.DataFrame'>\n"
510510
"Index: 9 entries, 0 to 8\n"
511-
"Data columns (total 13 columns):\n"
511+
"Data columns (total 14 columns):\n"
512512
" # Column Non-Null Count Dtype\n"
513513
"--- ------------- ---------------- ------------------------------\n"
514514
" 0 bool_col 8 non-null boolean\n"
@@ -524,8 +524,9 @@ def test_df_info(scalars_dfs):
524524
" 10 string_col 8 non-null string\n"
525525
" 11 time_col 6 non-null time64[us][pyarrow]\n"
526526
" 12 timestamp_col 6 non-null timestamp[us, tz=UTC][pyarrow]\n"
527-
"dtypes: Float64(1), Int64(3), binary[pyarrow](1), boolean(1), date32[day][pyarrow](1), decimal128(38, 9)[pyarrow](1), geometry(1), string(1), time64[us][pyarrow](1), timestamp[us, tz=UTC][pyarrow](1), timestamp[us][pyarrow](1)\n"
528-
"memory usage: 1269 bytes\n"
527+
" 13 duration_col 7 non-null duration[us][pyarrow]\n"
528+
"dtypes: Float64(1), Int64(3), binary[pyarrow](1), boolean(1), date32[day][pyarrow](1), decimal128(38, 9)[pyarrow](1), duration[us][pyarrow](1), geometry(1), string(1), time64[us][pyarrow](1), timestamp[us, tz=UTC][pyarrow](1), timestamp[us][pyarrow](1)\n"
529+
"memory usage: 1341 bytes\n"
529530
)
530531

531532
scalars_df, _ = scalars_dfs
@@ -4086,6 +4087,9 @@ def test_df_to_json_local_str(scalars_df_index, scalars_pandas_df_index):
40864087
def test_df_to_json_local_file(scalars_df_index, scalars_pandas_df_index):
40874088
# TODO: supply a reason why this isn't compatible with pandas 1.x
40884089
pytest.importorskip("pandas", minversion="2.0.0")
4090+
# duration not fully supported at pandas level
4091+
scalars_df_index = scalars_df_index.drop("duration_col")
4092+
scalars_pandas_df_index = scalars_pandas_df_index.drop("duration_col")
40894093
with tempfile.TemporaryFile() as bf_result_file, tempfile.TemporaryFile() as pd_result_file:
40904094
scalars_df_index.to_json(bf_result_file, orient="table")
40914095
# default_handler for arrow types that have no default conversion
@@ -4197,6 +4201,7 @@ def test_df_to_orc(scalars_df_index, scalars_pandas_df_index):
41974201
"time_col",
41984202
"timestamp_col",
41994203
"geography_col",
4204+
"duration_col",
42004205
]
42014206

42024207
bf_result_file = tempfile.TemporaryFile()

0 commit comments

Comments
 (0)