Skip to content

Commit 24fb388

Browse files
committed
Oops
1 parent e5c41c7 commit 24fb388

File tree

1 file changed

+34
-0
lines changed

1 file changed

+34
-0
lines changed

tests/integration/test_writes/test_writes.py

Lines changed: 34 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1780,6 +1780,40 @@ def test_write_optional_list(session_catalog: Catalog) -> None:
17801780
assert len(session_catalog.load_table(identifier).scan().to_arrow()) == 4
17811781

17821782

1783+
@pytest.mark.integration
1784+
@pytest.mark.parametrize("format_version", [1, 2])
1785+
def test_evolve_and_write(
1786+
spark: SparkSession, session_catalog: Catalog, arrow_table_with_null: pa.Table, format_version: int
1787+
) -> None:
1788+
identifier = "default.test_evolve_and_write"
1789+
tbl = _create_table(session_catalog, identifier, properties={"format-version": format_version}, schema=Schema())
1790+
other_table = session_catalog.load_table(identifier)
1791+
1792+
numbers = pa.array([1, 2, 3, 4], type=pa.int32())
1793+
1794+
with tbl.update_schema() as upd:
1795+
# This is not known by other_table
1796+
upd.add_column("id", IntegerType())
1797+
1798+
with other_table.transaction() as tx:
1799+
# Refreshes the underlying metadata, and the schema
1800+
other_table.refresh()
1801+
tx.append(
1802+
pa.Table.from_arrays(
1803+
[
1804+
numbers,
1805+
],
1806+
schema=pa.schema(
1807+
[
1808+
pa.field("id", pa.int32(), nullable=True),
1809+
]
1810+
),
1811+
)
1812+
)
1813+
1814+
assert session_catalog.load_table(identifier).scan().to_arrow().column(0).combine_chunks() == numbers
1815+
1816+
17831817
@pytest.mark.integration
17841818
def test_read_write_decimals(session_catalog: Catalog) -> None:
17851819
"""Roundtrip decimal types to make sure that we correctly write them as ints"""

0 commit comments

Comments
 (0)