diff --git a/.github/workflows/MainDistributionPipeline.yml b/.github/workflows/MainDistributionPipeline.yml deleted file mode 100644 index 59039be..0000000 --- a/.github/workflows/MainDistributionPipeline.yml +++ /dev/null @@ -1,32 +0,0 @@ -# -# This workflow calls the main distribution pipeline from DuckDB to build, test and (optionally) release the extension -# -name: Main Extension Distribution Pipeline -on: - push: - pull_request: - workflow_dispatch: - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }}-${{ github.head_ref || '' }}-${{ github.base_ref || '' }}-${{ github.ref != 'refs/heads/main' || github.sha }} - cancel-in-progress: true - -jobs: - duckdb-stable-build: - name: Build extension binaries - uses: duckdb/extension-ci-tools/.github/workflows/_extension_distribution.yml@v1.0.0 - with: - duckdb_version: v1.0.0 - extension_name: arrow - exclude_archs: 'wasm_mvp;wasm_eh;wasm_threads;windows_amd64;windows_amd64_rtools' - - duckdb-stable-deploy: - name: Deploy extension binaries - needs: duckdb-stable-build - uses: duckdb/extension-ci-tools/.github/workflows/_extension_deploy.yml@v1.0.0 - secrets: inherit - with: - duckdb_version: v1.0.0 - extension_name: arrow - exclude_archs: 'wasm_mvp;wasm_eh;wasm_threads;windows_amd64;windows_amd64_rtools' - deploy_latest: ${{ startsWith(github.ref, 'refs/tags/v') || github.ref == 'refs/heads/main' }} diff --git a/duckdb b/duckdb index 1f98600..29329f0 160000 --- a/duckdb +++ b/duckdb @@ -1 +1 @@ -Subproject commit 1f98600c2cf8722a6d2f2d805bb4af5e701319fc +Subproject commit 29329f0576bd72886f98ad41a27da237f1a157ed diff --git a/src/arrow_scan_ipc.cpp b/src/arrow_scan_ipc.cpp index a60d255..37e0e81 100644 --- a/src/arrow_scan_ipc.cpp +++ b/src/arrow_scan_ipc.cpp @@ -15,7 +15,6 @@ TableFunction ArrowIPCTableFunction::GetFunction() { ArrowTableFunction::ArrowScanInitLocal); scan_arrow_ipc_func.cardinality = ArrowTableFunction::ArrowScanCardinality; - scan_arrow_ipc_func.get_batch_index = nullptr; // TODO implement scan_arrow_ipc_func.projection_pushdown = true; scan_arrow_ipc_func.filter_pushdown = false; @@ -71,9 +70,12 @@ unique_ptr ArrowIPCTableFunction::ArrowScanBind( if (!schema.release) { throw InvalidInputException("arrow_scan: released schema passed"); } - auto arrow_type = GetArrowLogicalType(schema); + auto arrow_type = + ArrowType::GetArrowLogicalType(DBConfig::GetConfig(context), schema); + if (schema.dictionary) { - auto dictionary_type = GetArrowLogicalType(*schema.dictionary); + auto dictionary_type = ArrowType::GetArrowLogicalType( + DBConfig::GetConfig(context), *schema.dictionary); return_types.emplace_back(dictionary_type->GetDuckType()); arrow_type->SetDictionary(std::move(dictionary_type)); } else { diff --git a/src/arrow_to_ipc.cpp b/src/arrow_to_ipc.cpp index c316d85..905df2b 100644 --- a/src/arrow_to_ipc.cpp +++ b/src/arrow_to_ipc.cpp @@ -76,9 +76,9 @@ ToArrowIPCFunction::Bind(ClientContext &context, TableFunctionBindInput &input, // Create the Arrow schema ArrowSchema schema; + auto properties = context.GetClientProperties(); ArrowConverter::ToArrowSchema(&schema, input.input_table_types, - input.input_table_names, - context.GetClientProperties()); + input.input_table_names, properties); result->schema = arrow::ImportSchema(&schema).ValueOrDie(); return std::move(result); @@ -116,9 +116,10 @@ OperatorResultType ToArrowIPCFunction::Function(ExecutionContext &context, output.data[1].SetValue(0, Value::BOOLEAN(1)); } else { if (!local_state.appender) { - local_state.appender = - make_uniq(input.GetTypes(), data.chunk_size, - context.client.GetClientProperties()); + local_state.appender = make_uniq(input.GetTypes(), data.chunk_size, + context.client.GetClientProperties(), + ArrowTypeExtensionData::GetExtensionTypes( + context.client, input.GetTypes())); } // Append input chunk