Skip to content

Commit 9fb30da

Browse files
committed
Conflicts
1 parent 8befdbd commit 9fb30da

File tree

3 files changed

+7
-21
lines changed

3 files changed

+7
-21
lines changed

pyiceberg/avro/file.py

Lines changed: 2 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -284,14 +284,9 @@ def _write_header(self) -> None:
284284
codec = "deflate"
285285

286286
json_schema = json.dumps(AvroSchemaConversion().iceberg_to_avro(self.file_schema, schema_name=self.schema_name))
287-
<<<<<<< Updated upstream
288-
meta = {**self.metadata, _SCHEMA_KEY: json_schema, _CODEC_KEY: "null"}
287+
288+
meta = {**self.metadata, _SCHEMA_KEY: json_schema, AVRO_CODEC_KEY: codec}
289289
header = AvroFileHeader(MAGIC, meta, self.sync_bytes)
290-
=======
291-
header = AvroFileHeader(
292-
magic=MAGIC, meta={**self.metadata, _SCHEMA_KEY: json_schema, AVRO_CODEC_KEY: codec}, sync=self.sync_bytes
293-
)
294-
>>>>>>> Stashed changes
295290
construct_writer(META_SCHEMA).write(self.encoder, header)
296291

297292
def compression_codec(self) -> Optional[Type[Codec]]:

pyiceberg/manifest.py

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -799,7 +799,6 @@ class ManifestWriter(ABC):
799799
_deleted_rows: int
800800
_min_sequence_number: Optional[int]
801801
_partitions: List[Record]
802-
_reused_entry_wrapper: ManifestEntry
803802
_compression: AvroCompressionCodec
804803

805804
def __init__(
@@ -824,11 +823,7 @@ def __init__(
824823
self._deleted_rows = 0
825824
self._min_sequence_number = None
826825
self._partitions = []
827-
<<<<<<< Updated upstream
828-
=======
829-
self._reused_entry_wrapper = ManifestEntry()
830826
self._compression = avro_compression
831-
>>>>>>> Stashed changes
832827

833828
def __enter__(self) -> ManifestWriter:
834829
"""Open the writer."""

pyiceberg/serializers.py

Lines changed: 5 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -19,15 +19,13 @@
1919
import codecs
2020
import gzip
2121
from abc import ABC, abstractmethod
22-
from typing import TYPE_CHECKING, Callable
22+
from typing import Callable
2323

2424
from pyiceberg.io import InputFile, InputStream, OutputFile
25+
from pyiceberg.table.metadata import TableMetadata, TableMetadataUtil
2526
from pyiceberg.typedef import UTF8
2627
from pyiceberg.utils.config import Config
2728

28-
if TYPE_CHECKING:
29-
from pyiceberg.table.metadata import TableMetadata
30-
3129
GZIP = "gzip"
3230

3331

@@ -81,7 +79,7 @@ class FromByteStream:
8179
@staticmethod
8280
def table_metadata(
8381
byte_stream: InputStream, encoding: str = UTF8, compression: Compressor = NOOP_COMPRESSOR
84-
) -> "TableMetadata":
82+
) -> TableMetadata:
8583
"""Instantiate a TableMetadata object from a byte stream.
8684
8785
Args:
@@ -94,16 +92,14 @@ def table_metadata(
9492
json_bytes = reader(byte_stream)
9593
metadata = json_bytes.read()
9694

97-
from pyiceberg.table.metadata import TableMetadataUtil
98-
9995
return TableMetadataUtil.parse_raw(metadata)
10096

10197

10298
class FromInputFile:
10399
"""A collection of methods that deserialize InputFiles into Iceberg objects."""
104100

105101
@staticmethod
106-
def table_metadata(input_file: InputFile, encoding: str = UTF8) -> "TableMetadata":
102+
def table_metadata(input_file: InputFile, encoding: str = UTF8) -> TableMetadata:
107103
"""Create a TableMetadata instance from an input file.
108104
109105
Args:
@@ -124,7 +120,7 @@ class ToOutputFile:
124120
"""A collection of methods that serialize Iceberg objects into files given an OutputFile instance."""
125121

126122
@staticmethod
127-
def table_metadata(metadata: "TableMetadata", output_file: OutputFile, overwrite: bool = False) -> None:
123+
def table_metadata(metadata: TableMetadata, output_file: OutputFile, overwrite: bool = False) -> None:
128124
"""Write a TableMetadata instance to an output file.
129125
130126
Args:

0 commit comments

Comments
 (0)