Skip to content

fix for duckdb v1.3.0 #25

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 3 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 8 additions & 8 deletions .github/workflows/MainDistributionPipeline.yml
Original file line number Diff line number Diff line change
Expand Up @@ -28,23 +28,23 @@ jobs:
# We have to build v1.2.0 based due to go-duckdb restrictions
duckdb-1-2-0-build:
name: Build extension binaries
uses: duckdb/extension-ci-tools/.github/workflows/_extension_distribution.yml@v1.2.1
uses: duckdb/extension-ci-tools/.github/workflows/_extension_distribution.yml@v1.3.0
with:
duckdb_version: v1.2.0
ci_tools_version: v1.2.0
duckdb_version: v1.3.0
ci_tools_version: v1.3.0
extension_name: chsql

duckdb-stable-build:
name: Build extension binaries
uses: duckdb/extension-ci-tools/.github/workflows/_extension_distribution.yml@v1.2.1
uses: duckdb/extension-ci-tools/.github/workflows/_extension_distribution.yml@v1.3.0
with:
duckdb_version: v1.2.1
ci_tools_version: v1.2.1
duckdb_version: v1.3.0
ci_tools_version: v1.3.0
extension_name: chsql

release-all-artifacts:
name: Process Extension Artifacts
needs: [duckdb-1-2-0-build, duckdb-stable-build]
needs: [duckdb-1-3-0-build, duckdb-stable-build]
if: github.event_name == 'release' && github.event.action == 'published'
runs-on: ubuntu-latest
steps:
Expand Down Expand Up @@ -72,4 +72,4 @@ jobs:
- name: Upload Release Assets
uses: softprops/action-gh-release@v1
with:
files: to-upload/*
files: to-upload/*
2 changes: 1 addition & 1 deletion chsql/src/duck_flock.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@ namespace duckdb {

try {
if (res->TryFetch(data_chunk, error_data)) {
if (data_chunk && !data_chunk->size() == 0) {
if (data_chunk && data_chunk->size() != 0) {
output.Append(*data_chunk);
return;
}
Expand Down
27 changes: 13 additions & 14 deletions chsql/src/parquet_ordered_scan.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
#include "duckdb/common/exception.hpp"
#include <parquet_reader.hpp>
#include "chsql_extension.hpp"
#include <duckdb/common/multi_file_list.hpp>
#include <duckdb/common/multi_file/multi_file_list.hpp>
#include "chsql_parquet_types.h"

namespace duckdb {
Expand Down Expand Up @@ -35,11 +35,10 @@ namespace duckdb {
haveAbsentColumns = true;
continue;
}
columnMap.push_back(schema_column - reader->metadata->metadata->schema.begin() - 1);
reader->reader_data.column_ids.push_back(
schema_column - reader->metadata->metadata->schema.begin() - 1);
reader->reader_data.column_mapping.push_back(
it - returnCols.begin());
columnMap.push_back(static_cast<column_t>(schema_column - reader->metadata->metadata->schema.begin() - 1));
reader->column_ids.push_back(
MultiFileLocalColumnId(static_cast<column_t>(schema_column - reader->metadata->metadata->schema.begin() - 1)));
reader->column_indexes.emplace_back(static_cast<idx_t>(it - returnCols.begin()));
}
auto order_by_column_it = find_if(
reader->metadata->metadata->schema.begin(),
Expand All @@ -55,7 +54,7 @@ namespace duckdb {
}
void Scan(ClientContext& ctx) {
chunk->Reset();
reader->Scan(*scanState, *chunk);
reader->Scan(ctx, *scanState, *chunk);
if (!haveAbsentColumns || chunk->size() == 0) {
return;
}
Expand Down Expand Up @@ -180,7 +179,7 @@ namespace duckdb {
ParquetOptions po;
po.binary_as_string = true;
set->reader = make_uniq<ParquetReader>(context, file, po, nullptr);
res.push_back(move(set));
res.push_back(std::move(set));
}
}

Expand All @@ -189,16 +188,16 @@ namespace duckdb {
Connection conn(*context.db);
auto res = make_uniq<OrderedReadFunctionData>();
auto files = ListValue::GetChildren(input.inputs[0]);
vector<string> fileNames;
vector<OpenFileInfo> fileInfoList;
for (auto & file : files) {
fileNames.push_back(file.ToString());
fileInfoList.emplace_back(file.ToString());
}
GlobMultiFileList fileList(context, fileNames, FileGlobOptions::ALLOW_EMPTY);
string filename;
GlobMultiFileList fileList(context, fileInfoList, FileGlobOptions::ALLOW_EMPTY);
OpenFileInfo file_info;
MultiFileListScanData it;
fileList.InitializeScan(it);
while (fileList.Scan(it, filename)) {
res->files.push_back(filename);
while (fileList.Scan(it, file_info)) {
res->files.push_back(file_info.path);
}
if (res->files.empty()) {
throw InvalidInputException("No files matched the provided pattern.");
Expand Down
2 changes: 1 addition & 1 deletion duckdb
Submodule duckdb updated 1948 files