Skip to content

Commit e5c41c7

Browse files
committed
Cleanup
1 parent ea22749 commit e5c41c7

File tree

2 files changed

+0
-36
lines changed

2 files changed

+0
-36
lines changed

pyiceberg/io/pyarrow.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1753,8 +1753,6 @@ def _cast_if_needed(self, field: NestedField, values: pa.Array) -> pa.Array:
17531753
elif target_type.unit == "us" and values.type.unit in {"s", "ms", "us"}:
17541754
return values.cast(target_type)
17551755
raise ValueError(f"Unsupported schema projection from {values.type} to {target_type}")
1756-
else:
1757-
pass
17581756
return values
17591757

17601758
def _construct_field(self, field: NestedField, arrow_type: pa.DataType) -> pa.Field:

tests/integration/test_writes/test_writes.py

Lines changed: 0 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -1780,40 +1780,6 @@ def test_write_optional_list(session_catalog: Catalog) -> None:
17801780
assert len(session_catalog.load_table(identifier).scan().to_arrow()) == 4
17811781

17821782

1783-
@pytest.mark.integration
1784-
@pytest.mark.parametrize("format_version", [1, 2])
1785-
def test_evolve_and_write(
1786-
spark: SparkSession, session_catalog: Catalog, arrow_table_with_null: pa.Table, format_version: int
1787-
) -> None:
1788-
identifier = "default.test_evolve_and_write"
1789-
tbl = _create_table(session_catalog, identifier, properties={"format-version": format_version}, schema=Schema())
1790-
other_table = session_catalog.load_table(identifier)
1791-
1792-
numbers = pa.array([1, 2, 3, 4], type=pa.int32())
1793-
1794-
with tbl.update_schema() as upd:
1795-
# This is not known by other_table
1796-
upd.add_column("id", IntegerType())
1797-
1798-
with other_table.transaction() as tx:
1799-
# Refreshes the underlying metadata, and the schema
1800-
other_table.refresh()
1801-
tx.append(
1802-
pa.Table.from_arrays(
1803-
[
1804-
numbers,
1805-
],
1806-
schema=pa.schema(
1807-
[
1808-
pa.field("id", pa.int32(), nullable=True),
1809-
]
1810-
),
1811-
)
1812-
)
1813-
1814-
assert session_catalog.load_table(identifier).scan().to_arrow().column(0).combine_chunks() == numbers
1815-
1816-
18171783
@pytest.mark.integration
18181784
def test_read_write_decimals(session_catalog: Catalog) -> None:
18191785
"""Roundtrip decimal types to make sure that we correctly write them as ints"""

0 commit comments

Comments
 (0)