Skip to content

Commit 8a8c33c

Browse files
HonahXkevinjqliu
andauthored
minor fixes (#564)
Co-authored-by: Kevin Liu <kevinjqliu@users.noreply.github.com>
1 parent 9cd349f commit 8a8c33c

File tree

3 files changed

+6
-6
lines changed

3 files changed

+6
-6
lines changed

pyiceberg/table/__init__.py

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1056,8 +1056,9 @@ def append(self, df: pa.Table) -> None:
10561056

10571057
_check_schema_compatible(self.schema(), other_schema=df.schema)
10581058
# cast if the two schemas are compatible but not equal
1059-
if schema_to_pyarrow(self.schema()) != df.schema:
1060-
df = df.cast(schema_to_pyarrow(self.schema()))
1059+
table_arrow_schema = schema_to_pyarrow(self.schema())
1060+
if table_arrow_schema != df.schema:
1061+
df = df.cast(table_arrow_schema)
10611062

10621063
merge = _MergingSnapshotProducer(operation=Operation.APPEND, table=self)
10631064

@@ -1096,8 +1097,9 @@ def overwrite(self, df: pa.Table, overwrite_filter: BooleanExpression = ALWAYS_T
10961097

10971098
_check_schema_compatible(self.schema(), other_schema=df.schema)
10981099
# cast if the two schemas are compatible but not equal
1099-
if schema_to_pyarrow(self.schema()) != df.schema:
1100-
df = df.cast(schema_to_pyarrow(self.schema()))
1100+
table_arrow_schema = schema_to_pyarrow(self.schema())
1101+
if table_arrow_schema != df.schema:
1102+
df = df.cast(table_arrow_schema)
11011103

11021104
merge = _MergingSnapshotProducer(
11031105
operation=Operation.OVERWRITE if self.current_snapshot() is not None else Operation.APPEND,

tests/integration/test_reads.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -230,7 +230,6 @@ def test_ray_nan_rewritten(catalog: Catalog) -> None:
230230
def test_ray_not_nan_count(catalog: Catalog) -> None:
231231
table_test_null_nan_rewritten = catalog.load_table("default.test_null_nan_rewritten")
232232
ray_dataset = table_test_null_nan_rewritten.scan(row_filter=NotNaN("col_numeric"), selected_fields=("idx",)).to_ray()
233-
print(ray_dataset.take())
234233
assert ray_dataset.count() == 2
235234

236235

tests/integration/test_writes.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -481,7 +481,6 @@ def test_write_parquet_other_properties(
481481
properties: Dict[str, Any],
482482
expected_kwargs: Dict[str, Any],
483483
) -> None:
484-
print(type(mocker))
485484
identifier = "default.test_write_parquet_other_properties"
486485

487486
# The properties we test cannot be checked on the resulting Parquet file, so we spy on the ParquetWriter call instead

0 commit comments

Comments
 (0)