diff --git a/src/arrow_scan_ipc.cpp b/src/arrow_scan_ipc.cpp index e3bf858..2797ae2 100644 --- a/src/arrow_scan_ipc.cpp +++ b/src/arrow_scan_ipc.cpp @@ -31,6 +31,10 @@ unique_ptr ArrowIPCTableFunction::ArrowScanBind(ClientContext &co uint64_t ptr = unpacked[0].GetValue(); uint64_t size = unpacked[1].GetValue(); + if (stream_decoder->buffer()->is_eos()) { + throw IOException("More IPC buffers passed while the stream has already ended"); + } + // Feed stream into decoder auto res = stream_decoder->Consume((const uint8_t *) ptr, size); diff --git a/test/nodejs/arrow_test.js b/test/nodejs/arrow_test.js index 1db452e..6816b2d 100644 --- a/test/nodejs/arrow_test.js +++ b/test/nodejs/arrow_test.js @@ -211,6 +211,24 @@ for (const [name, fun] of Object.entries(to_ipc_functions)) { }); }); }); + + it(`Registering multiple IPC buffers for the same table in DuckDB`, async () => { + const table = await fun(db, 'SELECT * FROM range(1, 10000) tbl(i)'); + + db.register_buffer("table1", table, true, (err) => { + assert(!err); + }); + }); + + it(`Registering IPC buffers for two different tables fails`, async () => { + const ipc_buffers1 = await fun(db, 'SELECT * FROM range(1, 3) tbl(i)'); + const ipc_buffers2 = await fun(db, 'SELECT * FROM range(2, 4) tbl(i)'); + + // This will fail when reading buffers for the second table + db.register_buffer("table1", [...ipc_buffers1, ...ipc_buffers2], true, (err) => { + assert(err); + }); + }); }) }