Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -175,6 +175,7 @@ $ cat events.csv \
| `--json` | Output results as a JSON array of objects (mutually exclusive with `-H`) |
| `--max-rows <n>` | Stop if more than `n` data rows are read (exit 1) |
| `--columns` | Read the CSV header row, print each column name on its own line, and exit 0. With `-v`/`--verbose`, also shows the inferred type per column (`name INTEGER`). Respects `--delimiter` and `--tsv`. Mutually exclusive with a query argument. |
| `--output <file>` | Write results to the given file instead of stdout. Creates or overwrites the file. Exits 1 if the file cannot be created. |
| `-v`, `--verbose` | Print `Loaded <n> rows in <t>s` to stderr after loading (always on TTY; forced with flag) |
| `-h`, `--help` | Show usage help and exit |
| `-V`, `--version` | Print version and exit |
Expand Down
54 changes: 54 additions & 0 deletions build.zig
Original file line number Diff line number Diff line change
Expand Up @@ -427,6 +427,60 @@ pub fn build(b: *std.Build) void {
test_columns_short_verbose.step.dependOn(b.getInstallStep());
test_step.dependOn(&test_columns_short_verbose.step);

// Integration test 42: --output writes results to a file
const test_output_file = b.addSystemCommand(&.{
"bash", "-c",
\\tmp=$(mktemp); printf 'name,age\nAlice,30\nBob,25\n' | ./zig-out/bin/sql-pipe --output "$tmp" 'SELECT name FROM t ORDER BY age'; diff "$tmp" <(printf 'Bob\nAlice\n'); rm -f "$tmp"
});
test_output_file.step.dependOn(b.getInstallStep());
test_step.dependOn(&test_output_file.step);

// Integration test 43: --output works with --json
const test_output_json = b.addSystemCommand(&.{
"bash", "-c",
\\tmp=$(mktemp); printf 'name,age\nAlice,30\n' | ./zig-out/bin/sql-pipe --json --output "$tmp" 'SELECT * FROM t'; grep -q '"name":"Alice"' "$tmp"; rm -f "$tmp"
});
test_output_json.step.dependOn(b.getInstallStep());
test_step.dependOn(&test_output_json.step);

// Integration test 44: --output with missing parent directory exits 1 with error message
const test_output_bad_path = b.addSystemCommand(&.{
"bash", "-c",
\\msg=$(printf 'a\n1\n' | ./zig-out/bin/sql-pipe --output '/nonexistent/dir/file.csv' 'SELECT * FROM t' 2>&1 >/dev/null; echo "EXIT:$?")
\\echo "$msg" | grep -q "^error:" && echo "$msg" | grep -q 'EXIT:1'
});
test_output_bad_path.step.dependOn(b.getInstallStep());
test_step.dependOn(&test_output_bad_path.step);

// Integration test 45: --output works with --header
const test_output_header = b.addSystemCommand(&.{
"bash", "-c",
\\tmp=$(mktemp); printf 'name,age\nAlice,30\n' | ./zig-out/bin/sql-pipe --header --output "$tmp" 'SELECT name FROM t'; diff "$tmp" <(printf 'name\nAlice\n'); rm -f "$tmp"
});
test_output_header.step.dependOn(b.getInstallStep());
test_step.dependOn(&test_output_header.step);

// Integration test 46: --output cannot be combined with --columns (exits 1 with error)
const test_output_with_columns = b.addSystemCommand(&.{
"bash", "-c",
\\msg=$(printf 'a,b\n1,2\n' | ./zig-out/bin/sql-pipe --columns --output /tmp/out.csv 2>&1 >/dev/null; echo "EXIT:$?")
\\echo "$msg" | grep -q 'error: --output cannot be combined with --columns' && echo "$msg" | grep -q 'EXIT:1'
});
test_output_with_columns.step.dependOn(b.getInstallStep());
test_step.dependOn(&test_output_with_columns.step);

// Integration test 47: --output on SQL error flushes partial output before exit
const test_output_sql_error_flush = b.addSystemCommand(&.{
"bash", "-c",
\\tmp=$(mktemp)
\\printf 'name,age\nAlice,30\nBob,25\n' | ./zig-out/bin/sql-pipe --header --output "$tmp" 'SELECT * FROM nonexistent_table' 2>/dev/null; test $? -eq 3
\\rm -f "$tmp"
});
test_output_sql_error_flush.step.dependOn(b.getInstallStep());
test_step.dependOn(&test_output_sql_error_flush.step);
test_output_bad_path.step.dependOn(b.getInstallStep());
test_step.dependOn(&test_output_bad_path.step);

// Unit tests for the RFC 4180 CSV parser (src/csv.zig)
const unit_tests = b.addTest(.{
.root_module = b.createModule(.{
Expand Down
6 changes: 6 additions & 0 deletions docs/sql-pipe.1.scd
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,12 @@ OPTIONS
for each column, using the first 100 data rows for inference. Respects
*--delimiter* and *--tsv*. Mutually exclusive with a query argument.

*--output* <file>
Write results to <file> instead of standard output. Creates or
overwrites the file. Compatible with all output modes (*--json*,
*--header*, CSV). Exits with code 1 and an error message if the file
cannot be created (bad path or insufficient permissions).

*-h, --help*
Print the help message and exit with code 0.

Expand Down
62 changes: 58 additions & 4 deletions src/main.zig
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,8 @@ const SqlPipeError = error{
StepFailed,
CommitFailed,
PrepareQueryFailed,
InvalidOutputPath,
OutputWithColumns,
};

// ─── Column type inference ────────────────────────────
Expand Down Expand Up @@ -72,6 +74,8 @@ const ParsedArgs = struct {
/// Print "Loaded <n> rows" to stderr after all CSV rows are inserted when true.
/// When false, the message is still shown automatically when stderr is a TTY.
verbose: bool,
/// Write results to this file path instead of stdout; null = write to stdout.
output: ?[]const u8,
};

/// Arguments for `--columns` mode.
Expand Down Expand Up @@ -117,6 +121,8 @@ fn printUsage(writer: *std.Io.Writer) !void {
\\ With --columns: show inferred type per column
\\ --columns List column names from header (one per line) and exit
\\ Combine with -v/--verbose to include inferred types
\\ Cannot be combined with --output or a query argument
\\ --output <file> Write results to file instead of stdout
\\ -h, --help Show this help message and exit
\\ -V, --version Show version and exit
\\
Expand Down Expand Up @@ -166,6 +172,7 @@ fn parseArgs(args: []const [:0]const u8) SqlPipeError!ArgsResult {
var max_rows: ?usize = null;
var verbose = false;
var list_columns = false;
var output: ?[]const u8 = null;

// Loop invariant I: all args[1..i] have been processed;
// query holds the first non-flag argument seen, or null;
Expand Down Expand Up @@ -214,6 +221,16 @@ fn parseArgs(args: []const [:0]const u8) SqlPipeError!ArgsResult {
verbose = true;
} else if (std.mem.eql(u8, arg, "--columns")) {
list_columns = true;
} else if (std.mem.eql(u8, arg, "--output")) {
i += 1;
if (i >= args.len) return error.InvalidOutputPath;
const trimmed = std.mem.trim(u8, args[i], " \t");
if (trimmed.len == 0) return error.InvalidOutputPath;
output = trimmed;
} else if (std.mem.startsWith(u8, arg, "--output=")) {
const trimmed = std.mem.trim(u8, arg["--output=".len..], " \t");
if (trimmed.len == 0) return error.InvalidOutputPath;
output = trimmed;
} else {
if (query == null) query = arg;
}
Expand All @@ -223,6 +240,10 @@ fn parseArgs(args: []const [:0]const u8) SqlPipeError!ArgsResult {
if (json and header)
return error.IncompatibleFlags;

// --output is mutually exclusive with --columns (--columns always writes to stdout)
if (output != null and list_columns)
return error.OutputWithColumns;

// --columns is mutually exclusive with a query argument
if (list_columns and query != null)
return error.ColumnsWithQuery;
Expand All @@ -239,6 +260,7 @@ fn parseArgs(args: []const [:0]const u8) SqlPipeError!ArgsResult {
.json = json,
.max_rows = max_rows,
.verbose = verbose,
.output = output,
} };
}

Expand Down Expand Up @@ -1251,6 +1273,7 @@ fn run(
}

execQuery(allocator, db, query, stdout_writer, parsed.header, parsed.json) catch {
stdout_writer.flush() catch |err| std.log.err("failed to flush output before fatal: {}", .{err});
fatalSqlWithContext(allocator, db, std.mem.span(c.sqlite3_errmsg(db)), stderr_writer);
};
// {A10: all result rows written to stdout as CSV lines}
Expand Down Expand Up @@ -1299,6 +1322,20 @@ pub fn main(init: std.process.Init.Minimal) void {
stderr_writer.flush() catch |ferr| std.log.err("failed to flush: {}", .{ferr});
std.process.exit(@intFromEnum(ExitCode.usage));
},
error.InvalidOutputPath => {
stderr_writer.writeAll("error: --output requires a non-empty file path\n") catch |werr| {
std.log.err("failed to write error message: {}", .{werr});
};
stderr_writer.flush() catch |ferr| std.log.err("failed to flush: {}", .{ferr});
std.process.exit(@intFromEnum(ExitCode.usage));
},
error.OutputWithColumns => {
stderr_writer.writeAll("error: --output cannot be combined with --columns\n") catch |werr| {
std.log.err("failed to write error message: {}", .{werr});
};
stderr_writer.flush() catch |ferr| std.log.err("failed to flush: {}", .{ferr});
std.process.exit(@intFromEnum(ExitCode.usage));
},
else => {},
}
printUsage(stderr_writer) catch |werr| {
Expand Down Expand Up @@ -1333,10 +1370,27 @@ pub fn main(init: std.process.Init.Minimal) void {
};
},
.parsed => |parsed| {
run(parsed, allocator, io.io(), stderr_writer, stdout_writer);
stdout_file_writer.flush() catch |err| {
std.log.err("failed to flush stdout: {}", .{err});
};
if (parsed.output) |output_path| {
const output_file = std.Io.Dir.createFile(std.Io.Dir.cwd(), io.io(), output_path, .{}) catch |err| {
stderr_writer.print("error: cannot create output file '{s}': {s}\n", .{ output_path, @errorName(err) }) catch |werr| {
std.log.err("failed to write error message: {}", .{werr});
};
stderr_writer.flush() catch |ferr| std.log.err("failed to flush: {}", .{ferr});
std.process.exit(@intFromEnum(ExitCode.usage));
};
defer std.Io.File.close(output_file, io.io());
var output_buf: [4096]u8 = undefined;
var output_file_writer = std.Io.File.writer(output_file, io.io(), &output_buf);
run(parsed, allocator, io.io(), stderr_writer, &output_file_writer.interface);
output_file_writer.flush() catch |err| {
std.log.err("failed to flush output file: {}", .{err});
};
} else {
run(parsed, allocator, io.io(), stderr_writer, stdout_writer);
stdout_file_writer.flush() catch |err| {
std.log.err("failed to flush stdout: {}", .{err});
};
}
stderr_file_writer.flush() catch |err| {
std.log.err("failed to flush stderr: {}", .{err});
};
Expand Down
Loading