From d60c862ad2f9eb952233bef4e89929a2b2b5a820 Mon Sep 17 00:00:00 2001 From: Clement Rey Date: Tue, 29 Oct 2024 18:14:55 +0100 Subject: [PATCH] enable dataframe streaming across FFI --- rerun_py/src/dataframe.rs | 18 ------------------ 1 file changed, 18 deletions(-) diff --git a/rerun_py/src/dataframe.rs b/rerun_py/src/dataframe.rs index 005574b7466a..9c5dc9cc2838 100644 --- a/rerun_py/src/dataframe.rs +++ b/rerun_py/src/dataframe.rs @@ -737,21 +737,12 @@ impl PyRecordingView { let metadata = schema.metadata.clone().into_iter().collect(); let schema = arrow::datatypes::Schema::new(fields).with_metadata(metadata); - // TODO(jleibs): Need to keep the engine alive - /* let reader = RecordBatchIterator::new( query_handle .into_batch_iter() .map(|batch| batch.try_to_arrow_record_batch()), std::sync::Arc::new(schema), ); - */ - let batches = query_handle - .into_batch_iter() - .map(|batch| batch.try_to_arrow_record_batch()) - .collect::>(); - - let reader = RecordBatchIterator::new(batches.into_iter(), std::sync::Arc::new(schema)); Ok(PyArrowType(Box::new(reader))) } @@ -829,21 +820,12 @@ impl PyRecordingView { let metadata = schema.metadata.clone().into_iter().collect(); let schema = arrow::datatypes::Schema::new(fields).with_metadata(metadata); - // TODO(jleibs): Need to keep the engine alive - /* let reader = RecordBatchIterator::new( query_handle .into_batch_iter() .map(|batch| batch.try_to_arrow_record_batch()), std::sync::Arc::new(schema), ); - */ - let batches = query_handle - .into_batch_iter() - .map(|batch| batch.try_to_arrow_record_batch()) - .collect::>(); - - let reader = RecordBatchIterator::new(batches.into_iter(), std::sync::Arc::new(schema)); Ok(PyArrowType(Box::new(reader))) }