Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -6,3 +6,4 @@ deps.zig
core.*
/qemu*.core
/fuzz/outputs
.DS_Store
29 changes: 18 additions & 11 deletions build.zig
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ const mem = std.mem;
const ResolvedTarget = std.Build.ResolvedTarget;
const Query = std.Target.Query;
const builtin = @import("builtin");
const Io = std.Io;

const Preprocessor = @import("build/Preprocessor.zig");

Expand Down Expand Up @@ -121,15 +122,15 @@ fn makeSQLiteLib(b: *std.Build, dep: *std.Build.Dependency, c_flags: []const []c
.root_module = mod,
});

lib.addIncludePath(dep.path("."));
lib.addIncludePath(b.path("c"));
lib.root_module.addIncludePath(dep.path("."));
lib.root_module.addIncludePath(b.path("c"));
if (sqlite_c == .with) {
lib.addCSourceFile(.{
lib.root_module.addCSourceFile(.{
.file = dep.path("sqlite3.c"),
.flags = c_flags,
});
}
lib.addCSourceFile(.{
lib.root_module.addCSourceFile(.{
.file = b.path("c/workaround.c"),
.flags = c_flags,
});
Expand All @@ -145,6 +146,8 @@ pub fn build(b: *std.Build) !void {
const query = b.standardTargetOptionsQueryOnly(.{});
const target = b.resolveTargetQuery(query);
const optimize = b.standardOptimizeOption(.{});
var threaded = Io.Threaded.init_single_threaded;
const io = threaded.io();

// Upstream dependency
const sqlite_dep = b.dependency("sqlite", .{
Expand Down Expand Up @@ -242,9 +245,9 @@ pub fn build(b: *std.Build) !void {
.name = test_name,
.root_module = mod,
});
tests.addIncludePath(b.path("c"));
tests.addIncludePath(sqlite_dep.path("."));
tests.linkLibrary(test_sqlite_lib);
tests.root_module.addIncludePath(b.path("c"));
tests.root_module.addIncludePath(sqlite_dep.path("."));
tests.root_module.linkLibrary(test_sqlite_lib);

const tests_options = b.addOptions();
tests.root_module.addImport("build_options", tests_options.createModule());
Expand All @@ -268,16 +271,17 @@ pub fn build(b: *std.Build) !void {
// Tools
//

addPreprocessStep(b, sqlite_dep);
addPreprocessStep(b, io, sqlite_dep);
}

fn addPreprocessStep(b: *std.Build, sqlite_dep: *std.Build.Dependency) void {
fn addPreprocessStep(b: *std.Build, io: Io, sqlite_dep: *std.Build.Dependency) void {
var wf = b.addWriteFiles();

// Preprocessing step
const preprocess = PreprocessStep.create(b, .{
.source = sqlite_dep.path("."),
.target = wf.getDirectory(),
.io = io,
});
preprocess.step.dependOn(&wf.step);

Expand Down Expand Up @@ -341,12 +345,14 @@ const PreprocessStep = struct {
const Config = struct {
source: std.Build.LazyPath,
target: std.Build.LazyPath,
io: Io,
};

step: std.Build.Step,

source: std.Build.LazyPath,
target: std.Build.LazyPath,
io: Io,

fn create(owner: *std.Build, config: Config) *PreprocessStep {
const step = owner.allocator.create(PreprocessStep) catch @panic("OOM");
Expand All @@ -359,6 +365,7 @@ const PreprocessStep = struct {
}),
.source = config.source,
.target = config.target,
.io = config.io,
};

return step;
Expand All @@ -374,7 +381,7 @@ const PreprocessStep = struct {
const loadable_sqlite3_h = try ps.target.path(owner, "loadable-ext-sqlite3.h").getPath3(owner, step).toString(owner.allocator);
const loadable_sqlite3ext_h = try ps.target.path(owner, "loadable-ext-sqlite3ext.h").getPath3(owner, step).toString(owner.allocator);

try Preprocessor.sqlite3(owner.allocator, sqlite3_h, loadable_sqlite3_h);
try Preprocessor.sqlite3ext(owner.allocator, sqlite3ext_h, loadable_sqlite3ext_h);
try Preprocessor.sqlite3(owner.allocator, ps.io, sqlite3_h, loadable_sqlite3_h);
try Preprocessor.sqlite3ext(owner.allocator, ps.io, sqlite3ext_h, loadable_sqlite3ext_h);
}
};
41 changes: 21 additions & 20 deletions build/Preprocessor.zig
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
const std = @import("std");
const debug = std.debug;
const mem = std.mem;
const Io = std.Io;

// This tool is used to preprocess the sqlite3 headers to make them usable to build loadable extensions.
//
Expand All @@ -24,11 +25,11 @@ const mem = std.mem;
// This works but it requires fairly extensive modifications of both sqlite3.h and sqlite3ext.h which is time consuming to do manually;
// this tool is intended to automate all these modifications.

fn readOriginalData(allocator: mem.Allocator, path: []const u8) ![]const u8 {
var file = try std.fs.cwd().openFile(path, .{});
defer file.close();
fn readOriginalData(allocator: mem.Allocator, io: Io, path: []const u8) ![]const u8 {
var file = try Io.Dir.cwd().openFile(io, path, .{});
defer file.close(io);
var buf: [1024]u8 = undefined;
var reader = file.reader(&buf);
var reader = file.reader(io, &buf);

const data = reader.interface.readAlloc(allocator, 1024 * 1024);
return data;
Expand Down Expand Up @@ -153,8 +154,8 @@ const Processor = struct {
}
};

pub fn sqlite3(allocator: mem.Allocator, input_path: []const u8, output_path: []const u8) !void {
const data = try readOriginalData(allocator, input_path);
pub fn sqlite3(allocator: mem.Allocator, io: Io, input_path: []const u8, output_path: []const u8) !void {
const data = try readOriginalData(allocator, io, input_path);

var processor = try Processor.init(allocator, data);

Expand Down Expand Up @@ -192,17 +193,18 @@ pub fn sqlite3(allocator: mem.Allocator, input_path: []const u8, output_path: []

// Write the result

var output_file = try std.fs.cwd().createFile(output_path, .{ .mode = 0o0644 });
defer output_file.close();
var output_file = try Io.Dir.cwd().createFile(io, output_path, .{});
defer output_file.close(io);

try output_file.writeAll("/* sqlite3.h edited by the zig-sqlite build script */\n");
var buf: [1024]u8 = undefined;
var out_writer = output_file.writer(&buf);
try processor.dump(&out_writer);
var write_buff: [1028]u8 = undefined;

var w = output_file.writer(io, &write_buff);

try w.interface.writeAll("/* sqlite3.h edited by the zig-sqlite build script */\n");
}

pub fn sqlite3ext(allocator: mem.Allocator, input_path: []const u8, output_path: []const u8) !void {
const data = try readOriginalData(allocator, input_path);
pub fn sqlite3ext(allocator: mem.Allocator, io: Io, input_path: []const u8, output_path: []const u8) !void {
const data = try readOriginalData(allocator, io, input_path);

var processor = try Processor.init(allocator, data);

Expand Down Expand Up @@ -230,11 +232,10 @@ pub fn sqlite3ext(allocator: mem.Allocator, input_path: []const u8, output_path:

// Write the result

var output_file = try std.fs.cwd().createFile(output_path, .{ .mode = 0o0644 });
defer output_file.close();
var output_file = try Io.Dir.cwd().createFile(io, output_path, .{});
defer output_file.close(io);

try output_file.writeAll("/* sqlite3ext.h edited by the zig-sqlite build script */\n");
var buf: [1024]u8 = undefined;
var out_writer = output_file.writer(&buf);
try processor.dump(&out_writer);
var write_buff: [1028]u8 = undefined;
var w = output_file.writer(io, &write_buff);
try w.interface.writeAll("/* sqlite3ext.h edited by the zig-sqlite build script */\n");
}
25 changes: 12 additions & 13 deletions query.zig
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@ pub fn ParsedQuery(comptime tmp_query: []const u8) type {
};

fn parse() ParsedQueryResult {
@setEvalBranchQuota(100000);
// This contains the final SQL query after parsing with our
// own typed bind markers removed.
var buf: [tmp_query.len]u8 = undefined;
Expand All @@ -69,6 +70,7 @@ pub fn ParsedQuery(comptime tmp_query: []const u8) type {
var hold_pos = 0;

for (tmp_query) |c| {
@setEvalBranchQuota(100000);
switch (state) {
.start => switch (c) {
'?', ':', '@', '$' => {
Expand Down Expand Up @@ -166,11 +168,7 @@ pub fn ParsedQuery(comptime tmp_query: []const u8) type {
// Handles optional types
const typ = if (type_info_string[0] == '?') blk: {
const child_type = ParseType(type_info_string[1..]);
break :blk @Type(std.builtin.Type{
.optional = .{
.child = child_type,
},
});
break :blk ?child_type;
} else blk: {
break :blk ParseType(type_info_string);
};
Expand Down Expand Up @@ -227,14 +225,11 @@ fn ParseType(comptime type_info: []const u8) type {
if (mem.eql(u8, "isize", type_info)) return isize;

if (type_info[0] == 'u' or type_info[0] == 'i') {
return @Type(std.builtin.Type{
.int = std.builtin.Type.Int{
.signedness = if (type_info[0] == 'i') .signed else .unsigned,
.bits = std.fmt.parseInt(usize, type_info[1..type_info.len], 10) catch {
@compileError("invalid type info " ++ type_info);
},
},
});
return @Int(
if (type_info[0] == 'i') .signed else .unsigned,
std.fmt.parseInt(u16, type_info[1..], 10) catch
@compileError("invalid type info " ++ type_info),
);
}

// Float
Expand Down Expand Up @@ -323,6 +318,10 @@ test "parsed query: bind markers types" {
.query = "foobar " ++ prefix ++ "{?[]const u8}",
.expected_marker = .{ .typed = ?[]const u8 },
},
.{
.query = "foobar " ++ prefix ++ "{[]const u8}",
.expected_marker = .{ .typed = []const u8 },
},
};

inline for (testCases) |tc| {
Expand Down
60 changes: 20 additions & 40 deletions sqlite.zig
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ const builtin = @import("builtin");
const build_options = @import("build_options");
const debug = std.debug;
const heap = std.heap;
const io = std.io;
const Io = std.Io;
const mem = std.mem;
const testing = std.testing;

Expand Down Expand Up @@ -116,7 +116,7 @@ pub const Blob = struct {
}
};

// Used when reading or binding data.
// TODO may be worth while to remove with the new reader and writer updates
data: []const u8,

// Used for incremental i/o.
Expand All @@ -132,16 +132,9 @@ pub const Blob = struct {
}
}

pub const Reader = io.GenericReader(*Self, errors.Error, read);

/// reader returns a io.Reader.
pub fn reader(self: *Self) Reader {
return .{ .context = self };
}

fn read(self: *Self, buffer: []u8) Error!usize {
pub fn read_from_db(self: *Self, buffer: []u8) Error![]u8 {
if (self.offset >= self.size) {
return 0;
return "";
}

const tmp_buffer = blk: {
Expand All @@ -161,17 +154,10 @@ pub const Blob = struct {

self.offset += @intCast(tmp_buffer.len);

return tmp_buffer.len;
return tmp_buffer;
}

pub const Writer = io.GenericWriter(*Self, Error, write);

/// writer returns a io.Writer.
pub fn writer(self: *Self) Writer {
return .{ .context = self };
}

fn write(self: *Self, data: []const u8) Error!usize {
pub fn write_data_to_db(self: *Self, data: []const u8) Error!void {
const result = c.sqlite3_blob_write(
self.handle,
data.ptr,
Expand All @@ -183,8 +169,6 @@ pub const Blob = struct {
}

self.offset += @intCast(data.len);

return data.len;
}

/// Reset the offset used for reading and writing.
Expand Down Expand Up @@ -1470,7 +1454,7 @@ pub fn Iterator(comptime Type: type) type {
},
inline .@"struct", .@"union" => |TI| {
if (TI.layout == .@"packed" and !@hasField(FieldType, "readField")) {
const Backing = @Type(.{ .int = .{ .signedness = .unsigned, .bits = @bitSizeOf(FieldType) } });
const Backing = @TypeOf(.{ .int = .{ .signedness = .unsigned, .bits = @bitSizeOf(FieldType) } });
return @bitCast(self.readInt(Backing, i));
}

Expand Down Expand Up @@ -1701,7 +1685,7 @@ pub const DynamicStatement = struct {
},
.@"union" => |info| {
if (info.layout == .@"packed") {
const Backing = @Type(.{ .int = .{ .signedness = .unsigned, .bits = @bitSizeOf(FieldType) } });
const Backing = @TypeOf(.{ .int = .{ .signedness = .unsigned, .bits = @bitSizeOf(FieldType) } });
try self.bindField(Backing, options, field_name, i, @as(Backing, @bitCast(field)));
return;
}
Expand Down Expand Up @@ -3091,10 +3075,6 @@ test "sqlite: statement iterator" {
}

test "sqlite: blob open, reopen" {
var arena = std.heap.ArenaAllocator.init(testing.allocator);
defer arena.deinit();
const allocator = arena.allocator();

var db = try getTestDb();
defer db.deinit();

Expand All @@ -3120,14 +3100,14 @@ test "sqlite: blob open, reopen" {

{
// Write the first blob data
var blob_writer = blob.writer();
try blob_writer.writeAll(blob_data1);
try blob_writer.writeAll(blob_data1);
try blob.write_data_to_db(blob_data1);
try blob.write_data_to_db(blob_data1);

blob.reset();

var blob_reader = blob.reader();
const data = try blob_reader.readAllAlloc(allocator, 8192);
var read_buff: [8192]u8 = undefined;

const data = try blob.read_from_db(&read_buff);

try testing.expectEqualSlices(u8, blob_data1 ** 2, data);
}
Expand All @@ -3137,14 +3117,14 @@ test "sqlite: blob open, reopen" {

{
// Write the second blob data
var blob_writer = blob.writer();
try blob_writer.writeAll(blob_data2);
try blob_writer.writeAll(blob_data2);
try blob.write_data_to_db(blob_data2);
try blob.write_data_to_db(blob_data2);

blob.reset();

var blob_reader = blob.reader();
const data = try blob_reader.readAllAlloc(allocator, 8192);
var read_buff: [8192]u8 = undefined;

const data = try blob.read_from_db(&read_buff);

try testing.expectEqualSlices(u8, blob_data2 ** 2, data);
}
Expand Down Expand Up @@ -3749,7 +3729,7 @@ test "sqlite: create aggregate function with no aggregate context" {
var db = try getTestDb();
defer db.deinit();

var rand = std.Random.DefaultPrng.init(@intCast(std.time.milliTimestamp()));
var rand = std.Random.DefaultPrng.init(@intCast((try std.time.Instant.now()).timestamp.nsec));

// Create an aggregate function working with a MyContext

Expand Down Expand Up @@ -3810,7 +3790,7 @@ test "sqlite: create aggregate function with an aggregate context" {
var db = try getTestDb();
defer db.deinit();

var rand = std.Random.DefaultPrng.init(@intCast(std.time.milliTimestamp()));
var rand = std.Random.DefaultPrng.init(@intCast((try std.time.Instant.now()).timestamp.nsec));

try db.createAggregateFunction(
"mySum",
Expand Down
Loading
Loading