Skip to content

Commit

Permalink
IllegalStateException reading data with struct type (apache#43)
Browse files Browse the repository at this point in the history
  • Loading branch information
zhztheplayer authored Nov 11, 2021
1 parent 16e554b commit 20244e5
Show file tree
Hide file tree
Showing 3 changed files with 73 additions and 2 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,8 @@ public class NativeScanner implements Scanner {
private final ReadWriteLock lock = new ReentrantReadWriteLock();
private final Lock writeLock = lock.writeLock();
private final Lock readLock = lock.readLock();

private Schema schema = null;
private boolean closed = false;

public NativeScanner(NativeContext context, ScanOptions options, long scannerId) {
Expand Down Expand Up @@ -92,7 +94,7 @@ public boolean hasNext() {
}
peek = UnsafeRecordBatchSerializer.deserializeUnsafe(context.getAllocator(), bytes);
if (options.getColumns() != null) {
Preconditions.checkState(peek.getNodes().size() == options.getColumns().length);
Preconditions.checkState(schema().getFields().size() == options.getColumns().length);
}
return true;
}
Expand Down Expand Up @@ -122,12 +124,19 @@ public Iterable<? extends NativeScanTask> scan() {

@Override
public Schema schema() {
if (schema != null) {
return schema;
}
readLock.lock();
try {
if (schema != null) {
return schema;
}
if (closed) {
throw new NativeInstanceReleasedException();
}
return SchemaUtility.deserialize(JniWrapper.get().getSchemaFromScanner(scannerId), context.getAllocator());
schema = SchemaUtility.deserialize(JniWrapper.get().getSchemaFromScanner(scannerId), context.getAllocator());
return schema;
} catch (IOException e) {
throw new RuntimeException(e);
} finally {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -200,6 +200,68 @@ public void testCsvReadTab() throws Exception {
AutoCloseables.close(vsr, allocator);
}


@Test
public void testStructTypeRead() throws Exception {
RootAllocator allocator = new RootAllocator(Long.MAX_VALUE);
FileSystemDatasetFactory factory = new FileSystemDatasetFactory(allocator,
NativeMemoryPool.getDefault(), ParquetFileFormat.createDefault(), "file://" + resourcePath("data/struct_example.parquet"));
ScanOptions options = new ScanOptions(new String[] {"_1"}, Filter.EMPTY, 100);
Schema schema = factory.inspect();
NativeDataset dataset = factory.finish(schema);
NativeScanner nativeScanner = dataset.newScan(options);
List<? extends ScanTask> scanTasks = collect(nativeScanner.scan());
Assert.assertEquals(1, scanTasks.size());
ScanTask scanTask = scanTasks.get(0);
ScanTask.BatchIterator itr = scanTask.execute();

VectorSchemaRoot vsr = VectorSchemaRoot.create(schema, allocator);
VectorLoader loader = new VectorLoader(vsr);
int rowCount = 0;
while (itr.hasNext()) {
try (ArrowRecordBatch next = itr.next()) {
loader.load(next);
}
rowCount += vsr.getRowCount();

}
Assert.assertEquals(50, rowCount);
assertEquals(1, schema.getFields().size());
assertEquals("_1", schema.getFields().get(0).getName());
AutoCloseables.close(vsr, allocator);
}

@Test
public void testStructTypeReadWithEmptyProjector() throws Exception {
RootAllocator allocator = new RootAllocator(Long.MAX_VALUE);
FileSystemDatasetFactory factory = new FileSystemDatasetFactory(allocator,
NativeMemoryPool.getDefault(), ParquetFileFormat.createDefault(), "file://" + resourcePath("data/struct_example.parquet"));
ScanOptions options = new ScanOptions(new String[0], Filter.EMPTY, 100);
Schema schema = factory.inspect();
NativeDataset dataset = factory.finish(schema);
NativeScanner nativeScanner = dataset.newScan(options);
List<? extends ScanTask> scanTasks = collect(nativeScanner.scan());
Assert.assertEquals(1, scanTasks.size());
ScanTask scanTask = scanTasks.get(0);
ScanTask.BatchIterator itr = scanTask.execute();
Schema scannerSchema = nativeScanner.schema();
VectorSchemaRoot vsr = VectorSchemaRoot.create(scannerSchema, allocator);
VectorLoader loader = new VectorLoader(vsr);
int rowCount = 0;
while (itr.hasNext()) {
try (ArrowRecordBatch next = itr.next()) {
loader.load(next);
}
rowCount += vsr.getRowCount();

}
Assert.assertEquals(50, rowCount);
assertEquals(1, schema.getFields().size());
assertEquals("_1", schema.getFields().get(0).getName());
assertEquals(0, scannerSchema.getFields().size());
AutoCloseables.close(vsr, allocator);
}

@Test
public void testReadPartialFile() throws Exception {
ParquetWriteSupport writeSupport = ParquetWriteSupport.writeTempFile(AVRO_SCHEMA_USER, TMP.newFolder(), 1, "a");
Expand Down
Binary file not shown.

0 comments on commit 20244e5

Please sign in to comment.