|
10 | 10 | """
|
11 | 11 | Module for Stream and related classes
|
12 | 12 | """
|
| 13 | +import io |
| 14 | + |
13 | 15 | ##########################################################################
|
14 | 16 | ## Imports
|
15 | 17 | ##########################################################################
|
@@ -512,8 +514,8 @@ def arrow_insert(self, data: pa.Table, merge: str = "never") -> int:
|
512 | 514 | logger.debug(f"tmp_table schema: {tmp_table.schema}")
|
513 | 515 | new_schema = pa.schema(
|
514 | 516 | [
|
515 |
| - (pa.field("time", pa.timestamp(unit="ns", tz="UTC"))), |
516 |
| - (pa.field("value", pa.float64())), |
| 517 | + (pa.field("time", pa.timestamp(unit="ns", tz="UTC"), nullable=False)), |
| 518 | + (pa.field("value", pa.float64(), nullable=False)), |
517 | 519 | ]
|
518 | 520 | )
|
519 | 521 | tmp_table = tmp_table.cast(new_schema)
|
@@ -1930,10 +1932,12 @@ def _materialize_stream_as_table(arrow_bytes):
|
1930 | 1932 |
|
1931 | 1933 |
|
1932 | 1934 | def _table_slice_to_feather_bytes(table_slice: pa.Table) -> bytes:
|
1933 |
| - sink = pa.BufferOutputStream() |
| 1935 | + # sink = pa.BufferOutputStream() |
| 1936 | + sink = io.BytesIO() |
1934 | 1937 | with pa.ipc.new_stream(sink=sink, schema=table_slice.schema) as writer:
|
1935 |
| - writer.write_table(table_slice) |
1936 |
| - return sink.readall() |
| 1938 | + writer.write(table_slice) |
| 1939 | + buf = sink.getvalue() |
| 1940 | + return buf |
1937 | 1941 |
|
1938 | 1942 |
|
1939 | 1943 | def _coalesce_table_deque(tables: deque):
|
|
0 commit comments