diff --git a/benches/bench.ts b/benches/bench.ts new file mode 100644 index 00000000..4b4a2674 --- /dev/null +++ b/benches/bench.ts @@ -0,0 +1,136 @@ +const MAX_ITER = 10; +const MAX_DURATION = 60_000; + +type BenchFn = (cb: () => Promise) => void; + +type TimeitFn = (name: string, cb: () => Promise, opt?: BenchOptions) => void; + +interface BenchOptions { + maxIter?: number; + maxDuration?: number; +} + +export const bench = async ( + cb: (params: { beforeAll: BenchFn; afterAll: BenchFn, time: TimeitFn }) => Promise, + opt?: BenchOptions +) => { + const { maxIter = MAX_ITER, maxDuration = MAX_DURATION } = opt ?? {}; + const beforePromises: (() => Promise)[] = []; + const afterPromises: (() => Promise)[] = []; + const beforeAll = (cb: () => Promise) => { + beforePromises.push(cb); + }; + const afterAll = (cb: () => Promise) => { + afterPromises.push(cb); + }; + const variants: { name: string, cb: () => Promise, durations: number[], totalDuration: number, maxIter: number, maxDuration: number }[] = []; + const time = (name: string, cb: () => Promise, opt?: BenchOptions) => { + variants.push({ + name, + cb, + durations: [], + totalDuration: 0, + maxIter: opt?.maxIter ?? maxIter, + maxDuration: opt?.maxDuration ?? maxDuration, + }); + }; + + await cb({ beforeAll, afterAll, time }); + + await Promise.all(beforePromises.map(async (cb) => cb())); + + for (const variant of variants) { + console.log(`Running "${variant.name}"...`); + while (variant.durations.length < variant.maxIter && variant.totalDuration < variant.maxDuration) { + try { + const start = performance.now(); + await variant.cb(); + const duration = performance.now() - start; + variant.durations.push(duration); + variant.totalDuration += duration; + } catch (error) { + console.error(`Error running "${variant.name}":`, error); + break; + } + } + } + + await Promise.all(afterPromises.map((cb) => cb())); + + const summary = summarize(variants); + console.log(format(summary)); +}; + +interface Summary { + name: string; + iter: number; + first: number; + min: number; + max: number; + mean: number; + median: number; + p90: number; + p95: number; +} + +const summarize = (variants: { name: string, durations: number[] }[]): Summary[] => { + return variants.map((variant) => { + const sorted = [...variant.durations].sort((a, b) => a - b); + const total = sorted.reduce((a, b) => a + b, 0); + const count = sorted.length; + + const min = sorted[0] || 0; + const max = sorted[count - 1] || 0; + const mean = count > 0 ? total / count : 0; + const median = + count === 0 + ? 0 + : count % 2 === 0 + ? (sorted[count / 2 - 1] + sorted[count / 2]) / 2 + : sorted[Math.floor(count / 2)]; + + const p90 = count === 0 ? 0 : sorted[Math.floor(count * 0.9)]; + const p95 = count === 0 ? 0 : sorted[Math.floor(count * 0.95)]; + + return { + name: variant.name, + iter: variant.durations.length, + first: variant.durations[0] ?? 0, + min, + max, + mean, + median, + p90, + p95, + }; + }); +}; + +const format = (summary: Summary[]): string => { + const headers = ["name", "iter", "first", "min", "max", "mean", "median"] as const; + + const rows = summary.map((s) => ({ + name: s.name.slice(0, 50), + iter: s.iter.toString(), + first: s.first.toFixed(4), + min: s.min.toFixed(4), + max: s.max.toFixed(4), + mean: s.mean.toFixed(4), + median: s.median.toFixed(4), + })); + + const allRows = [ + { name: "name", iter: "iter", first: "first", min: "min", max: "max", mean: "mean", median: "median" }, + ...rows, + ]; + + const widths = headers.map((h) => + Math.max(...allRows.map((r) => r[h].length)) + ); + + return allRows + .map((row) => + headers.map((h, i) => row[h].padEnd(widths[i])).join(" | ") + ) + .join("\n"); +}; \ No newline at end of file diff --git a/benches/bench.v2.sh b/benches/bench.v2.sh new file mode 100755 index 00000000..647c6915 --- /dev/null +++ b/benches/bench.v2.sh @@ -0,0 +1,90 @@ +#!/usr/bin/env bash + +CONTAINER_NAME=borm_bench_v2 +USER=borm_bench +PASSWORD=borm_bench +NAMESPACE=borm_bench +DATABASE=borm_bench +SCHEMA_FILE="./benches/schema.v2.surql" + +# Function to clean up the container +cleanup() { + echo "Stopping and removing container..." + docker stop ${CONTAINER_NAME} >/dev/null 2>&1 + docker rm ${CONTAINER_NAME} >/dev/null 2>&1 + exit ${EXIT_CODE:-1} # Default to 1 if EXIT_CODE is unset (e.g. early crash) +} + +# Set up trap to call cleanup function on script exit +trap cleanup EXIT INT TERM + +# Function to parse command line arguments +parse_args() { + VITEST_ARGS=() + for arg in "$@" + do + case $arg in + -link=*) + # We'll ignore this parameter now + ;; + *) + VITEST_ARGS+=("$arg") + ;; + esac + done +} + +# Parse the command line arguments +parse_args "$@" + +# Start the container +if ! docker run \ + --rm \ + --detach \ + --name $CONTAINER_NAME \ + --user root \ + -p 8002:8002 \ + --pull always \ + surrealdb/surrealdb:v2.3.7 \ + start \ + --allow-all \ + -u $USER \ + -p $PASSWORD \ + --bind 0.0.0.0:8002 \ + rocksdb:///data/blitz.db; then + echo "Failed to start SurrealDB container" + exit 1 +fi + +until [ "`docker inspect -f {{.State.Running}} $CONTAINER_NAME`" == "true" ]; do + sleep 0.1; +done; + +# Wait for SurrealDB to be ready +echo "Waiting for SurrealDB to be ready..." +until docker exec $CONTAINER_NAME ./surreal is-ready --endpoint http://localhost:8002 2>/dev/null; do + sleep 0.5; +done; +echo "SurrealDB is ready!" + +# Setup surrealdb database: create the namespace, database, and user dynamically +docker exec -i $CONTAINER_NAME ./surreal sql -u $USER -p $PASSWORD --endpoint http://localhost:8002 < { + const a: A[] = []; + const b: B[] = []; + + const randomInt = (min: number, max: number) => Math.floor(Math.random() * (max - min + 1)) + min; + const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789'; + const randomString = (min: number, max: number) => { + const length = randomInt(min, max); + let result = ''; + for (let i = 0; i < length; i++) { + result += chars.charAt(Math.floor(Math.random() * chars.length)); + } + return result; + }; + const randomBoolean = () => Math.random() < 0.5; + const randomDate = () => { + const start = new Date('2020-01-01').getTime(); + const end = new Date('2026-01-01').getTime(); + return new Date(start + Math.random() * (end - start)); + }; + + const generateBase = (): Base => ({ + id: uid(), + string_1: randomString(10, 20), + number_1: Math.floor(Math.random() * Number.MAX_SAFE_INTEGER), + boolean_1: randomBoolean(), + datetime_1: randomDate(), + }); + + for (let i = 0; i < params.records; i++) { + b.push(generateBase()); + } + + for (let i = 0; i < params.records; i++) { + const fewLength = randomInt(params.few.min, params.few.max); + const manyLength = randomInt(params.many.min, params.many.max); + const fewSet = new Set(); + const manySet = new Set(); + + while (fewSet.size < fewLength) { + fewSet.add(b[randomInt(0, b.length - 1)].id); + } + + while (manySet.size < manyLength) { + manySet.add(b[randomInt(0, b.length - 1)].id); + } + + a.push({ + ...generateBase(), + one: b[i].id, + few: Array.from(fewSet), + many: Array.from(manySet), + }); + } + + return { a, b }; +} + +const uid = () => { + const firstChar = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz'; + const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789'; + let result = firstChar.charAt(Math.floor(Math.random() * firstChar.length)); + for (let i = 0; i < 15; i++) { + result += chars.charAt(Math.floor(Math.random() * chars.length)); + } + return result; +}; diff --git a/benches/insertData.v2.ts b/benches/insertData.v2.ts new file mode 100644 index 00000000..b45487d7 --- /dev/null +++ b/benches/insertData.v2.ts @@ -0,0 +1,125 @@ +import Surreal from 'surrealdb'; +import { type A, type B, type Base, generateData } from './generateData'; + +const URL = 'ws://127.0.0.1:8002'; +const NAMESPACE = 'borm_bench'; +const DATABASE = 'borm_bench'; +const USERNAME = 'borm_bench'; +const PASSWORD = 'borm_bench'; + +const insertData = async () => { + const db = await connect(); + console.log('generating data'); + const data = generateData({ + records: 10, + few: { min: 2, max: 2 }, + many: { min: 3, max: 3 }, + }); + const surql = createSurql(data); + console.log('\n> surql\n', surql); + console.log('inserting data'); + const start = performance.now(); + const result = await db.query(surql); + const end = performance.now(); + console.log(`Time taken: ${end - start} milliseconds`); + return result; +} + +const connect = async () => { + const db = new Surreal(); + await db.connect(URL, { + namespace: NAMESPACE, + database: DATABASE, + auth: { + username: USERNAME, + password: PASSWORD, + }, + versionCheck: false, + }); + return db; +} + +// const createSurql = (data: { a: A[]; b: B[]; }): string => { +// const lines = ['BEGIN TRANSACTION;']; + +// for (const b of data.b) { +// lines.push(`CREATE t_b:${b.id} SET ${createSurqlBaseSet(b)};`); +// } + +// for (const a of data.a) { +// const refFew = `[${a.few.map((i) => `t_b:${i}`).join(', ')}]`; +// const refMany = `[${a.many.map((i) => `t_b:${i}`).join(', ')}]`; +// lines.push(`CREATE t_a:${a.id} SET ${createSurqlBaseSet(a)}, ref_one = t_b:${a.one}, ref_few = ${refFew}, ref_many = ${refMany};`); +// lines.push(`UPDATE t_b:${a.one} SET ref_one = t_a:${a.id};`); +// lines.push(`RELATE t_a:${a.id}->t_a_b_one->t_b:${a.one};`); +// for (const i of a.few) { +// lines.push(`UPDATE t_b:${i} SET ref_few += t_a:${a.id};`); +// lines.push(`RELATE t_a:${a.id}->t_a_b_few->t_b:${i};`); +// } +// for (const i of a.many) { +// lines.push(`UPDATE t_b:${i} SET ref_many += t_a:${a.id};`); +// lines.push(`RELATE t_a:${a.id}->t_a_b_many->t_b:${i};`); +// } +// } + +// lines.push('COMMIT TRANSACTION;'); + +// return lines.join('\n'); +// }; + +const createSurql = (data: { a: A[]; b: B[]; }): string => { + const lines = ['BEGIN TRANSACTION;']; + + for (const b of data.b) { + lines.push(`CREATE t_b:${b.id} SET ${createSurqlBaseSet(b)};`); + } + + for (const a of data.a) { + const refFew = `[${a.few.map((i) => `t_b:${i}`).join(', ')}]`; + const refMany = `[${a.many.map((i) => `t_b:${i}`).join(', ')}]`; + + const tunnelOneId = `${a.id}_${a.one}`; + const tunnelFewIds = a.few.map((i) => `${a.id}_${i}`); + const tunnelManyIds = a.many.map((i) => `${a.id}_${i}`); + const tunnelOne = `tunnel_one:${tunnelOneId}`; + const tunnelFew = `[${tunnelFewIds.map((i) => `tunnel_few:${i}`).join(', ')}]`; + const tunnelMany = `[${tunnelManyIds.map((i) => `tunnel_many:${i}`).join(', ')}]`; + + lines.push(`CREATE t_a:${a.id} SET ${createSurqlBaseSet(a)}, ref_one = t_b:${a.one}, ref_few = ${refFew}, ref_many = ${refMany};`); + + lines.push(`CREATE ${tunnelOne} SET a = t_a:${a.id}, b = t_b:${a.one};`); + lines.push(`UPDATE t_b:${a.one} SET ref_one = t_a:${a.id}, tunnel_one = tunnel_one:${tunnelOneId};`); + lines.push(`RELATE t_a:${a.id}->edge_one->t_b:${a.one};`); + + for (const b of a.few) { + const tId = `${a.id}_${b}`; + lines.push(`CREATE tunnel_few:${tId} SET a = t_a:${a.id}, b = t_b:${b};`); + lines.push(`UPDATE t_b:${b} SET ref_few += t_a:${a.id}, tunnel_few += tunnel_few:${tId};`); + lines.push(`RELATE t_a:${a.id}->edge_few->t_b:${b};`); + } + + for (const b of a.many) { + const tId = `${a.id}_${b}`; + lines.push(`CREATE tunnel_many:${tId} SET a = t_a:${a.id}, b = t_b:${b};`); + lines.push(`UPDATE t_b:${b} SET ref_many += t_a:${a.id}, tunnel_many += tunnel_many:${tId};`); + lines.push(`RELATE t_a:${a.id}->edge_many->t_b:${b};`); + } + + lines.push(`UPDATE t_a:${a.id} SET tunnel_one = ${tunnelOne}, tunnel_few = ${tunnelFew}, tunnel_many = ${tunnelMany};`); + } + + lines.push('COMMIT TRANSACTION;'); + + return lines.join('\n'); +}; + +const createSurqlBaseSet = (data: Base): string => { + return `string_1 = "${data.string_1}", number_1 = ${data.number_1}, boolean_1 = ${data.boolean_1}, datetime_1 = type::datetime("${data.datetime_1.toISOString()}")`; +}; + +insertData().then(() => { + console.log('Data inserted successfully'); +}).catch((error) => { + console.error('Error inserting data:', error); +}); + diff --git a/benches/insertData.v3.ts b/benches/insertData.v3.ts new file mode 100644 index 00000000..7a0ef932 --- /dev/null +++ b/benches/insertData.v3.ts @@ -0,0 +1,163 @@ +import Surreal from 'surrealdb'; + +const URL = 'ws://127.0.0.1:8001'; +const NAMESPACE = 'borm_bench'; +const DATABASE = 'borm_bench'; +const USERNAME = 'borm_bench'; +const PASSWORD = 'borm_bench'; + +const insertData = async () => { + const db = await connect(); + console.log('generating data'); + const data = generateData({ + records: 10, + few: { min: 2, max: 2 }, + many: { min: 2, max: 2 }, + }); + const surql = createSurql(data); + // console.log(surql); + console.log('inserting data'); + const start = performance.now(); + const result = await db.query(surql); + const end = performance.now(); + console.log(`Time taken: ${end - start} milliseconds`); + return result; +} + +const connect = async () => { + const db = new Surreal(); + await db.connect(URL, { + namespace: NAMESPACE, + database: DATABASE, + auth: { + username: USERNAME, + password: PASSWORD, + }, + versionCheck: false, + }); + return db; +} + +interface Base { + id: string; + string_1: string; + number_1: number; + boolean_1: boolean; + datetime_1: Date; +} + +interface A extends Base { + one: B['id']; + few: B['id'][]; + many: B['id'][]; +} + +type B = Base; + +const generateData = (params: { + records: number; + few: { min: number; max: number }; + many: { min: number; max: number }; +}): { a: A[]; b: B[]; } => { + const a: A[] = []; + const b: B[] = []; + + const randomInt = (min: number, max: number) => Math.floor(Math.random() * (max - min + 1)) + min; + const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789'; + const randomString = (min: number, max: number) => { + const length = randomInt(min, max); + let result = ''; + for (let i = 0; i < length; i++) { + result += chars.charAt(Math.floor(Math.random() * chars.length)); + } + return result; + }; + const randomBoolean = () => Math.random() < 0.5; + const randomDate = () => { + const start = new Date('2020-01-01').getTime(); + const end = new Date('2026-01-01').getTime(); + return new Date(start + Math.random() * (end - start)); + }; + + const generateBase = (): Base => ({ + id: uid(), + string_1: randomString(10, 100), + number_1: Math.floor(Math.random() * Number.MAX_SAFE_INTEGER), + boolean_1: randomBoolean(), + datetime_1: randomDate(), + }); + + for (let i = 0; i < params.records; i++) { + b.push(generateBase()); + } + + for (let i = 0; i < params.records; i++) { + const fewLength = randomInt(params.few.min, params.few.max); + const manyLength = randomInt(params.many.min, params.many.max); + const fewSet = new Set(); + const manySet = new Set(); + + while (fewSet.size < fewLength) { + fewSet.add(b[randomInt(0, b.length - 1)].id); + } + + while (manySet.size < manyLength) { + manySet.add(b[randomInt(0, b.length - 1)].id); + } + + a.push({ + ...generateBase(), + one: b[i].id, + few: Array.from(fewSet), + many: Array.from(manySet), + }); + } + + return { a, b }; +} + +const uid = () => { + const firstChar = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz'; + const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789'; + let result = firstChar.charAt(Math.floor(Math.random() * firstChar.length)); + for (let i = 0; i < 15; i++) { + result += chars.charAt(Math.floor(Math.random() * chars.length)); + } + return result; +}; + +const createSurql = (data: { a: A[]; b: B[]; }): string => { + const lines = ['BEGIN TRANSACTION;']; + + for (const b of data.b) { + lines.push(`CREATE t_b:${b.id} SET ${createSurqlBaseSet(b)};`); + } + + for (const a of data.a) { + const refFew = `[${a.few.map((i) => `t_b:${i}`).join(', ')}]`; + const refMany = `[${a.many.map((i) => `t_b:${i}`).join(', ')}]`; + lines.push(`CREATE t_a:${a.id} SET ${createSurqlBaseSet(a)}, ref_one = t_b:${a.one}, ref_few = ${refFew}, ref_many = ${refMany};`); + lines.push(`RELATE t_a:${a.id}->t_a_b_one->t_b:${a.one};`); + for (const i of a.few) { + lines.push(`RELATE t_a:${a.id}->t_a_b_few->t_b:${i};`); + } + for (const i of a.many) { + lines.push(`RELATE t_a:${a.id}->t_a_b_many->t_b:${i};`); + } + } + + lines.push('COMMIT TRANSACTION;'); + + return lines.join('\n'); +}; + +const createSurqlBaseSet = (data: Base): string => { + return `string_1 = "${data.string_1}", number_1 = ${data.number_1}, boolean_1 = ${data.boolean_1}, datetime_1 = type::datetime("${data.datetime_1.toISOString()}")`; +}; + +insertData().then(() => { + console.log('Data inserted successfully'); +}).catch((error) => { + console.error('Error inserting data:', error); +}); + diff --git a/benches/query.v3.ts b/benches/query.v3.ts new file mode 100644 index 00000000..17a7fd0f --- /dev/null +++ b/benches/query.v3.ts @@ -0,0 +1,41 @@ + +import Surreal from 'surrealdb'; + +const URL = 'ws://127.0.0.1:8001'; +const NAMESPACE = 'borm_bench'; +const DATABASE = 'borm_bench'; +const USERNAME = 'borm_bench'; +const PASSWORD = 'borm_bench'; + +const query = async () => { + const db = await connect(); + const result = await db.query( + 'SELECT id FROM type::table($table) WHERE id = type::record($id) LIMIT 2', + { + table: 't_a', + id: 't_a:A0HE7yuafcaZYxFd', + // alias: 'tableId', + b: [true], + } + ); + console.log(JSON.stringify(result, null, 2)); + return result; +} + +const connect = async () => { + const db = new Surreal(); + await db.connect(URL, { + namespace: NAMESPACE, + database: DATABASE, + auth: { + username: USERNAME, + password: PASSWORD, + }, + versionCheck: false, + }); + return db; +} + +query().then(() => { + process.exit(0); +}); \ No newline at end of file diff --git a/benches/rundb.v2.sh b/benches/rundb.v2.sh new file mode 100755 index 00000000..532a7a9d --- /dev/null +++ b/benches/rundb.v2.sh @@ -0,0 +1,50 @@ +#!/usr/bin/env bash + +CONTAINER_NAME=borm_bench_v2 +USER=borm_bench +PASSWORD=borm_bench +NAMESPACE=borm_bench +DATABASE=borm_bench +SCHEMA_FILE="./benches/schema.v2.surql" + +# Start the container +docker run \ + --rm \ + --detach \ + --name $CONTAINER_NAME \ + -v borm_bench_data_v2:/data \ + -e SURREAL_CAPS_ALLOW_EXPERIMENTAL=graphql \ + --user root \ + -p 8002:8002 \ + --pull always \ + surrealdb/surrealdb:v2 \ + start \ + --allow-all \ + -u $USER \ + -p $PASSWORD \ + --bind 0.0.0.0:8002 \ + rocksdb:///data/blitz.db + # surrealkv:///data/blitz.db + +until [ "`docker inspect -f {{.State.Running}} $CONTAINER_NAME`" == "true" ]; do + sleep 0.1; +done; + +# Wait for SurrealDB to be ready +echo "Waiting for SurrealDB to be ready..." +until docker exec $CONTAINER_NAME ./surreal is-ready --endpoint http://localhost:8002 2>/dev/null; do + sleep 0.5; +done; +echo "SurrealDB is ready!" + +# Setup surrealdb database: create the namespace, database, and user dynamically +docker exec -i $CONTAINER_NAME ./surreal sql -u $USER -p $PASSWORD --endpoint http://localhost:8002 </dev/null; do + sleep 0.5; +done; +echo "SurrealDB is ready!" + +# Setup surrealdb database: create the namespace, database, and user dynamically +docker exec -i $CONTAINER_NAME ./surreal sql -u $USER -p $PASSWORD --endpoint http://localhost:8001 <; +DEFINE FIELD OVERWRITE number_1 ON TABLE t_a TYPE option; +DEFINE FIELD OVERWRITE boolean_1 ON TABLE t_a TYPE option; +DEFINE FIELD OVERWRITE datetime_1 ON TABLE t_a TYPE option; +DEFINE FIELD OVERWRITE ref_one ON TABLE t_a TYPE option>; +DEFINE FIELD OVERWRITE ref_few ON TABLE t_a TYPE option>>; +DEFINE FIELD OVERWRITE ref_many ON TABLE t_a TYPE option>>; +DEFINE FIELD OVERWRITE fut_one ON TABLE t_a VALUE { RETURN array::first(SELECT VALUE b FROM $parent.tunnel_one) || [] }; +-- DEFINE FIELD OVERWRITE fut_one ON TABLE t_a VALUE { RETURN SELECT VALUE b FROM $this.tunnel_one }; +DEFINE FIELD OVERWRITE fut_few ON TABLE t_a VALUE { RETURN SELECT VALUE b FROM $this.tunnel_few }; +DEFINE FIELD OVERWRITE fut_many ON TABLE t_a VALUE { RETURN SELECT VALUE b FROM $this.tunnel_many }; +DEFINE FIELD OVERWRITE tunnel_one ON TABLE t_a TYPE option>; +DEFINE FIELD OVERWRITE tunnel_few ON TABLE t_a TYPE option>>; +DEFINE FIELD OVERWRITE tunnel_many ON TABLE t_a TYPE option>>; +DEFINE INDEX IF NOT EXISTS idx_a_string_1 ON TABLE t_a COLUMNS string_1; +DEFINE INDEX IF NOT EXISTS idx_a_ref_one ON TABLE t_a COLUMNS ref_one; +DEFINE INDEX IF NOT EXISTS idx_a_ref_few ON TABLE t_a COLUMNS ref_few; +DEFINE INDEX IF NOT EXISTS idx_a_ref_many ON TABLE t_a COLUMNS ref_many; +DEFINE INDEX IF NOT EXISTS idx_a_tunnel_one ON TABLE t_a COLUMNS tunnel_one; +DEFINE INDEX IF NOT EXISTS idx_a_tunnel_few ON TABLE t_a COLUMNS tunnel_few; +DEFINE INDEX IF NOT EXISTS idx_a_tunnel_many ON TABLE t_a COLUMNS tunnel_many; + +DEFINE TABLE IF NOT EXISTS t_b SCHEMAFULL PERMISSIONS FULL; +DEFINE FIELD OVERWRITE string_1 ON TABLE t_b TYPE option; +DEFINE FIELD OVERWRITE number_1 ON TABLE t_b TYPE option; +DEFINE FIELD OVERWRITE boolean_1 ON TABLE t_b TYPE option; +DEFINE FIELD OVERWRITE datetime_1 ON TABLE t_b TYPE option; +DEFINE FIELD OVERWRITE ref_one ON TABLE t_b TYPE option>; +DEFINE FIELD OVERWRITE ref_few ON TABLE t_b TYPE option>>; +DEFINE FIELD OVERWRITE ref_many ON TABLE t_b TYPE option>>; +DEFINE FIELD OVERWRITE fut_one ON TABLE t_b VALUE { RETURN array::first(SELECT VALUE a FROM $parent.tunnel_one) || [] }; +-- DEFINE FIELD OVERWRITE fut_one ON TABLE t_b VALUE { RETURN SELECT VALUE a FROM $this.tunnel_one }; +DEFINE FIELD OVERWRITE fut_few ON TABLE t_b VALUE { RETURN SELECT VALUE a FROM $this.tunnel_few }; +DEFINE FIELD OVERWRITE fut_many ON TABLE t_b VALUE { RETURN SELECT VALUE a FROM $this.tunnel_many }; +DEFINE FIELD OVERWRITE tunnel_one ON TABLE t_b TYPE option>; +DEFINE FIELD OVERWRITE tunnel_few ON TABLE t_b TYPE option>>; +DEFINE FIELD OVERWRITE tunnel_many ON TABLE t_b TYPE option>>; +DEFINE INDEX IF NOT EXISTS idx_b_string_1 ON TABLE t_b COLUMNS string_1; +DEFINE INDEX IF NOT EXISTS idx_b_ref_one ON TABLE t_b COLUMNS ref_one; +DEFINE INDEX IF NOT EXISTS idx_b_ref_few ON TABLE t_b COLUMNS ref_few; +DEFINE INDEX IF NOT EXISTS idx_b_ref_many ON TABLE t_b COLUMNS ref_many; +DEFINE INDEX IF NOT EXISTS idx_b_tunnel_one ON TABLE t_b COLUMNS tunnel_one; +DEFINE INDEX IF NOT EXISTS idx_b_tunnel_few ON TABLE t_b COLUMNS tunnel_few; +DEFINE INDEX IF NOT EXISTS idx_b_tunnel_many ON TABLE t_b COLUMNS tunnel_many; + +DEFINE TABLE IF NOT EXISTS tunnel_one SCHEMAFULL PERMISSIONS FULL; +DEFINE FIELD OVERWRITE a ON TABLE tunnel_one TYPE option>; +DEFINE FIELD OVERWRITE b ON TABLE tunnel_one TYPE option>; +DEFINE INDEX IF NOT EXISTS unique_tunnel_one_a ON TABLE tunnel_one COLUMNS a UNIQUE; +DEFINE INDEX IF NOT EXISTS unique_tunnel_one_b ON TABLE tunnel_one COLUMNS b UNIQUE; + +DEFINE TABLE IF NOT EXISTS tunnel_few SCHEMAFULL PERMISSIONS FULL; +DEFINE FIELD OVERWRITE a ON TABLE tunnel_few TYPE option>; +DEFINE FIELD OVERWRITE b ON TABLE tunnel_few TYPE option>; +DEFINE INDEX IF NOT EXISTS unique_tunnel_few_a_b ON TABLE tunnel_few COLUMNS a, b UNIQUE; +DEFINE INDEX IF NOT EXISTS idx_tunnel_few_b ON TABLE tunnel_few COLUMNS b; + +DEFINE TABLE IF NOT EXISTS tunnel_many SCHEMAFULL PERMISSIONS FULL; +DEFINE FIELD OVERWRITE a ON TABLE tunnel_many TYPE option>; +DEFINE FIELD OVERWRITE b ON TABLE tunnel_many TYPE option>; +DEFINE INDEX IF NOT EXISTS unique_tunnel_many_a_b ON TABLE tunnel_many COLUMNS a, b UNIQUE; +DEFINE INDEX IF NOT EXISTS idx_tunnel_many_b ON TABLE tunnel_many COLUMNS b; + +-- These tables are not needed for the benchmark, but they are here for reference + +DEFINE TABLE IF NOT EXISTS edge_one SCHEMAFULL TYPE RELATION IN t_a OUT t_b PERMISSIONS FULL; +DEFINE INDEX IF NOT EXISTS unique_edge_one_in ON TABLE edge_one COLUMNS in UNIQUE; +DEFINE INDEX IF NOT EXISTS unique_edge_one_out ON TABLE edge_one COLUMNS out UNIQUE; + +DEFINE TABLE IF NOT EXISTS edge_few SCHEMAFULL TYPE RELATION IN t_a OUT t_b PERMISSIONS FULL; +DEFINE INDEX IF NOT EXISTS unique_edge_few_in_out ON TABLE edge_few COLUMNS in, out UNIQUE; +DEFINE INDEX IF NOT EXISTS idx_edge_few_out ON TABLE edge_few COLUMNS out; + +DEFINE TABLE IF NOT EXISTS edge_many SCHEMAFULL TYPE RELATION IN t_a OUT t_b PERMISSIONS FULL; +DEFINE INDEX IF NOT EXISTS unique_edge_many_in_out ON TABLE edge_many COLUMNS in, out UNIQUE; +DEFINE INDEX IF NOT EXISTS idx_edge_many_out ON TABLE edge_many COLUMNS out; + +DEFINE FUNCTION fn::as_array($var: option|record>) { + RETURN (type::is::array($var) AND $var) OR [$var] +}; + +COMMIT TRANSACTION; diff --git a/benches/schema.v2.ts b/benches/schema.v2.ts new file mode 100644 index 00000000..511339d2 --- /dev/null +++ b/benches/schema.v2.ts @@ -0,0 +1,196 @@ +import type { BormSchema } from "../src"; + +export const schema: BormSchema = { + entities: {}, + relations: { + t_a: { + idFields: ['id'], + defaultDBConnector: { id: 'default', path: 't_a' }, + dataFields: [ + { contentType: 'BOOLEAN', path: 'boolean_1' }, + { contentType: 'NUMBER', path: 'number_1' }, + { contentType: 'TEXT', path: 'string_1' }, + { contentType: 'DATE', path: 'datetime_1' }, + ], + roles: {}, + linkFields: [ + { + path: 'ref_one', + cardinality: 'ONE', + relation: 't_b', + plays: 'ref_one', + target: 'relation', + }, + { + path: 'ref_few', + cardinality: 'MANY', + relation: 't_b', + plays: 'ref_few', + target: 'relation', + }, + { + path: 'ref_many', + cardinality: 'MANY', + relation: 't_b', + plays: 'ref_many', + target: 'relation', + }, + { + path: 'fut_one', + relation: 'tunnel_one', + plays: 'a', + target: 'role', + targetRole: 'b', + cardinality: 'ONE', + }, + { + path: 'fut_few', + relation: 'tunnel_few', + plays: 'a', + target: 'role', + targetRole: 'b', + cardinality: 'MANY', + }, + { + path: 'fut_many', + relation: 'tunnel_many', + plays: 'a', + target: 'role', + targetRole: 'b', + cardinality: 'MANY', + }, + { + path: 'tunnel_one', + relation: 'tunnel_one', + plays: 'a', + target: 'relation', + cardinality: 'ONE', + }, + { + path: 'tunnel_few', + relation: 'tunnel_few', + plays: 'a', + target: 'relation', + cardinality: 'MANY', + }, + { + path: 'tunnel_many', + relation: 'tunnel_many', + plays: 'a', + target: 'relation', + cardinality: 'MANY', + }, + ], + }, + t_b: { + idFields: ['id'], + defaultDBConnector: { id: 'default', path: 't_b' }, + dataFields: [ + { contentType: 'BOOLEAN', path: 'boolean_1' }, + { contentType: 'NUMBER', path: 'number_1' }, + { contentType: 'TEXT', path: 'string_1' }, + { contentType: 'DATE', path: 'datetime_1' }, + ], + roles: { + ref_one: { + cardinality: 'ONE', + }, + ref_few: { + cardinality: 'MANY', + }, + ref_many: { + cardinality: 'MANY', + }, + }, + linkFields: [ + { + path: 'fut_one', + relation: 'tunnel_one', + plays: 'b', + target: 'role', + targetRole: 'a', + cardinality: 'ONE', + }, + { + path: 'fut_few', + relation: 'tunnel_few', + plays: 'b', + target: 'role', + targetRole: 'a', + cardinality: 'MANY', + }, + { + path: 'fut_many', + relation: 'tunnel_many', + plays: 'b', + target: 'role', + targetRole: 'a', + cardinality: 'MANY', + }, + { + path: 'tunnel_one', + relation: 'tunnel_one', + plays: 'b', + target: 'relation', + cardinality: 'ONE', + }, + { + path: 'tunnel_few', + relation: 'tunnel_few', + plays: 'b', + target: 'relation', + cardinality: 'MANY', + }, + { + path: 'tunnel_many', + relation: 'tunnel_many', + plays: 'b', + target: 'relation', + cardinality: 'MANY', + }, + ], + }, + tunnel_one: { + idFields: ['id'], + defaultDBConnector: { id: 'default', path: 'tunnel_one' }, + dataFields: [], + roles: { + a: { + cardinality: 'ONE', + }, + b: { + cardinality: 'ONE', + }, + }, + linkFields: [], + }, + tunnel_few: { + idFields: ['id'], + defaultDBConnector: { id: 'default', path: 'tunnel_few' }, + dataFields: [], + roles: { + a: { + cardinality: 'ONE', + }, + b: { + cardinality: 'ONE', + }, + }, + linkFields: [], + }, + tunnel_many: { + idFields: ['id'], + defaultDBConnector: { id: 'default', path: 'tunnel_many' }, + dataFields: [], + roles: { + a: { + cardinality: 'ONE', + }, + b: { + cardinality: 'ONE', + }, + }, + linkFields: [], + }, + }, +} \ No newline at end of file diff --git a/benches/schema.v3.surql b/benches/schema.v3.surql new file mode 100644 index 00000000..80c2b60e --- /dev/null +++ b/benches/schema.v3.surql @@ -0,0 +1,41 @@ +USE NS borm_bench; +USE DB borm_bench; + +BEGIN TRANSACTION; + +DEFINE TABLE IF NOT EXISTS t_a SCHEMAFULL PERMISSIONS FULL; +DEFINE FIELD OVERWRITE string_1 ON TABLE t_a TYPE option; +DEFINE FIELD OVERWRITE number_1 ON TABLE t_a TYPE option; +DEFINE FIELD OVERWRITE boolean_1 ON TABLE t_a TYPE option; +DEFINE FIELD OVERWRITE datetime_1 ON TABLE t_a TYPE option; +DEFINE FIELD OVERWRITE ref_one ON TABLE t_a TYPE option> REFERENCE; +DEFINE FIELD OVERWRITE ref_few ON TABLE t_a TYPE option>> REFERENCE; +DEFINE FIELD OVERWRITE ref_many ON TABLE t_a TYPE option>> REFERENCE; +DEFINE INDEX IF NOT EXISTS idx_a_string_1 ON TABLE t_a COLUMNS string_1; +DEFINE INDEX IF NOT EXISTS idx_a_ref_one ON TABLE t_a COLUMNS ref_one; +DEFINE INDEX IF NOT EXISTS idx_a_ref_few ON TABLE t_a COLUMNS ref_few; +DEFINE INDEX IF NOT EXISTS idx_a_ref_many ON TABLE t_a COLUMNS ref_many; + +DEFINE TABLE IF NOT EXISTS t_b SCHEMAFULL PERMISSIONS FULL; +DEFINE FIELD OVERWRITE string_1 ON TABLE t_b TYPE option; +DEFINE FIELD OVERWRITE number_1 ON TABLE t_b TYPE option; +DEFINE FIELD OVERWRITE boolean_1 ON TABLE t_b TYPE option; +DEFINE FIELD OVERWRITE datetime_1 ON TABLE t_b TYPE option; +DEFINE FIELD OVERWRITE computed_one ON TABLE t_b COMPUTED <~(t_a FIELD ref_one); +DEFINE FIELD OVERWRITE computed_few ON TABLE t_b COMPUTED <~(t_a FIELD ref_few); +DEFINE FIELD OVERWRITE computed_many ON TABLE t_b COMPUTED <~(t_a FIELD ref_many); +DEFINE INDEX IF NOT EXISTS idx_b_string_1 ON TABLE t_b COLUMNS string_1; + +DEFINE TABLE IF NOT EXISTS t_a_b_one SCHEMAFULL TYPE RELATION IN t_a OUT t_b PERMISSIONS FULL; +DEFINE INDEX IF NOT EXISTS unique_a_b_one_in ON TABLE t_a_b_one COLUMNS in UNIQUE; +DEFINE INDEX IF NOT EXISTS unique_a_b_one_out ON TABLE t_a_b_one COLUMNS out UNIQUE; + +DEFINE TABLE IF NOT EXISTS t_a_b_few SCHEMAFULL TYPE RELATION IN t_a OUT t_b PERMISSIONS FULL; +DEFINE INDEX IF NOT EXISTS unique_a_b_few_in_out ON TABLE t_a_b_few COLUMNS in, out UNIQUE; +DEFINE INDEX IF NOT EXISTS idx_a_b_few_out ON TABLE t_a_b_few COLUMNS out; + +DEFINE TABLE IF NOT EXISTS t_a_b_many SCHEMAFULL TYPE RELATION IN t_a OUT t_b PERMISSIONS FULL; +DEFINE INDEX IF NOT EXISTS unique_a_b_many_in_out ON TABLE t_a_b_many COLUMNS in, out UNIQUE; +DEFINE INDEX IF NOT EXISTS idx_a_b_many_out ON TABLE t_a_b_many COLUMNS out; + +COMMIT TRANSACTION; diff --git a/benches/v2-2.bench.ts b/benches/v2-2.bench.ts new file mode 100644 index 00000000..6005d7dd --- /dev/null +++ b/benches/v2-2.bench.ts @@ -0,0 +1,176 @@ +import Surreal from 'surrealdb'; +import type BormClient from '../src'; +import { setup } from '../tests/helpers/setup'; +import { bench } from './bench'; +import { type A, type B, type Base, generateData } from './generateData'; +import { schema } from './schema.v2'; + +const URL = 'ws://127.0.0.1:8002'; +const NAMESPACE = 'borm_bench'; +const DATABASE = 'borm_bench'; +const USERNAME = 'borm_bench'; +const PASSWORD = 'borm_bench'; + +let client: BormClient; +let cleanup: () => Promise; +let data: { a: A[]; b: B[]; }; + +bench(async ({ beforeAll, afterAll, time }) => { + beforeAll(async () => { + const result = await setup({ + config: { + server: { + provider: 'blitz-orm-js', + }, + dbConnectors: [ + { + id: 'default', + provider: 'surrealDB', + providerConfig: { linkMode: 'refs' }, + url: URL, + namespace: NAMESPACE, + dbName: DATABASE, + username: USERNAME, + password: PASSWORD, + }, + ], + }, + schema, + }); + client = result.client; + cleanup = result.cleanup; + console.log('Generating data'); + data = generateData({ + records: 1000, + few: { min: 5, max: 5 }, + many: { min: 20, max: 20 }, + // records: 20, + // few: { min: 1, max: 1 }, + // many: { min: 2, max: 2 }, + }); + console.log('Connecting to database'); + const surrealDB = await connect(); + console.log('Creating surql'); + const surql = createSurql(data); + console.log('Inserting data'); + await surrealDB.query(surql); + }); + + afterAll(async () => { + console.log('Cleaning up'); + await cleanup(); + }); + + time('Select all, sort by string_1, and limit 100', async () => { + await client.query({ $relation: 't_a', $limit: 100, $sort: [{ field: 'string_1', desc: true }] }); + }); + + time('Filter by indexed field', async () => { + const a = data.a[Math.floor(data.a.length / 2) + 1]; + await client.query({ $relation: 't_a', $filter: { string_1: a.string_1 } }); + }); + + time('Filter by ref_one', async () => { + const b = data.b[Math.floor(data.b.length / 2) + 2]; + await client.query({ $relation: 't_a', $filter: { ref_one: b.id } }); + }); + + time('Filter by ref_many', async () => { + const b = data.b[Math.floor(data.b.length / 2) + 3]; + await client.query({ $relation: 't_a', $filter: { ref_many: b.id } }); + }); + + time('Filter by ref_one string_1', async () => { + const b = data.b[Math.floor(data.b.length / 2) + 4]; + await client.query({ $relation: 't_a', $filter: { ref_one: { string_1: b.string_1 } } }); + }); + + time('Filter by ref_many string_1', async () => { + const b = data.b[Math.floor(data.b.length / 2) + 5]; + await client.query({ $relation: 't_a', $filter: { ref_many: { string_1: b.string_1 } } }); + }); + + time('Filter by fut_one', async () => { + const b = data.b[Math.floor(data.b.length / 2) + 6]; + await client.query({ $relation: 't_a', $filter: { fut_one: b.id } }); + }); + + time('Filter by fut_many', async () => { + const b = data.b[Math.floor(data.b.length / 2 + 7)]; + await client.query({ $relation: 't_a', $filter: { fut_many: b.id } }); + }); + + time('Filter by fut_one string_1', async () => { + const b = data.b[Math.floor(data.b.length / 2) + 8]; + await client.query({ $relation: 't_a', $filter: { fut_one: { string_1: b.string_1 } } }); + }); + + time('Filter by fut_many string_1', async () => { + const b = data.b[Math.floor(data.b.length / 2) + 9]; + await client.query({ $relation: 't_a', $filter: { fut_many: { string_1: b.string_1 } } }); + }); +}); + +const connect = async () => { + const db = new Surreal(); + await db.connect(URL, { + namespace: NAMESPACE, + database: DATABASE, + auth: { + username: USERNAME, + password: PASSWORD, + }, + versionCheck: false, + }); + return db; +} + +const createSurql = (data: { a: A[]; b: B[]; }): string => { + const lines = ['BEGIN TRANSACTION;']; + + for (const b of data.b) { + lines.push(`CREATE t_b:${b.id} SET ${createSurqlBaseSet(b)};`); + } + + for (const a of data.a) { + const refFew = `[${a.few.map((i) => `t_b:${i}`).join(', ')}]`; + const refMany = `[${a.many.map((i) => `t_b:${i}`).join(', ')}]`; + + const tunnelOneId = `${a.id}_${a.one}`; + const tunnelFewIds = a.few.map((i) => `${a.id}_${i}`); + const tunnelManyIds = a.many.map((i) => `${a.id}_${i}`); + const tunnelOne = `tunnel_one:${tunnelOneId}`; + const tunnelFew = `[${tunnelFewIds.map((i) => `tunnel_few:${i}`).join(', ')}]`; + const tunnelMany = `[${tunnelManyIds.map((i) => `tunnel_many:${i}`).join(', ')}]`; + + lines.push(`CREATE t_a:${a.id} SET ${createSurqlBaseSet(a)}, ref_one = t_b:${a.one}, ref_few = ${refFew}, ref_many = ${refMany};`); + + lines.push(`CREATE ${tunnelOne} SET a = t_a:${a.id}, b = t_b:${a.one};`); + lines.push(`UPDATE t_b:${a.one} SET ref_one = t_a:${a.id}, tunnel_one = tunnel_one:${tunnelOneId};`); + lines.push(`RELATE t_a:${a.id}->edge_one->t_b:${a.one};`); + + for (const b of a.few) { + const tId = `${a.id}_${b}`; + lines.push(`CREATE tunnel_few:${tId} SET a = t_a:${a.id}, b = t_b:${b};`); + lines.push(`UPDATE t_b:${b} SET ref_few += t_a:${a.id}, tunnel_few += tunnel_few:${tId};`); + lines.push(`RELATE t_a:${a.id}->edge_few->t_b:${b};`); + } + + for (const b of a.many) { + const tId = `${a.id}_${b}`; + lines.push(`CREATE tunnel_many:${tId} SET a = t_a:${a.id}, b = t_b:${b};`); + lines.push(`UPDATE t_b:${b} SET ref_many += t_a:${a.id}, tunnel_many += tunnel_many:${tId};`); + lines.push(`RELATE t_a:${a.id}->edge_many->t_b:${b};`); + } + + lines.push(`UPDATE t_a:${a.id} SET tunnel_one = ${tunnelOne}, tunnel_few = ${tunnelFew}, tunnel_many = ${tunnelMany};`); + } + + lines.push('COMMIT TRANSACTION;'); + + return lines.join('\n'); +}; + +const createSurqlBaseSet = (data: Base): string => { + return `string_1 = "${data.string_1}", number_1 = ${data.number_1}, boolean_1 = ${data.boolean_1}, datetime_1 = type::datetime("${data.datetime_1.toISOString()}")`; +}; \ No newline at end of file diff --git a/benches/v2.bench.ts b/benches/v2.bench.ts new file mode 100644 index 00000000..7e056d41 --- /dev/null +++ b/benches/v2.bench.ts @@ -0,0 +1,149 @@ +import Surreal from 'surrealdb'; +import { afterAll, beforeAll, bench, describe } from 'vitest'; +import type BormClient from '../src'; +import { setup } from '../tests/helpers/setup'; +import { type A, type B, type Base, generateData } from './generateData'; +import { schema } from './schema.v2'; + +const URL = 'ws://127.0.0.1:8002'; +const NAMESPACE = 'borm_bench'; +const DATABASE = 'borm_bench'; +const USERNAME = 'borm_bench'; +const PASSWORD = 'borm_bench'; + +let client: BormClient; +let cleanup: () => Promise; +let data: { a: A[]; b: B[]; }; + +beforeAll(async () => { + console.log('beforeAll'); + const result = await setup({ + config: { + server: { + provider: 'blitz-orm-js', + }, + dbConnectors: [ + { + id: 'default', + provider: 'surrealDB', + providerConfig: { linkMode: 'refs' }, + url: URL, + namespace: NAMESPACE, + dbName: DATABASE, + username: USERNAME, + password: PASSWORD, + }, + ], + }, + schema, + }); + client = result.client; + cleanup = result.cleanup; + console.log('Generating data'); + data = generateData({ + records: 1000, + few: { min: 5, max: 5 }, + many: { min: 10, max: 10 }, + }); + console.log('Connecting to database'); + const surrealDB = await connect(); + console.log('Creating surql'); + const surql = createSurql(data); + console.log('Inserting data'); + await surrealDB.query(surql); + // console.log('Result:', res); +}, 300_000); + +afterAll(async () => { + console.log('afterAll'); + await cleanup(); +}); + +describe('v2', () => { + + bench('Select all, sort by string_1, and limit 100', async () => { + const start = performance.now(); + await client.query({ $relation: 't_a', $limit: 100, $sort: [{ field: 'string_1', desc: true }] }); + console.log(`Select all, sort by string_1, and limit 100: ${performance.now() - start}ms`); + }); + + bench('Filter by ref_one', async () => { + const start = performance.now(); + // Pick in the middle of the b array + 1 + const b = data.b[Math.floor(data.b.length / 2) + 1]; + await client.query({ $relation: 't_a', $filter: { ref_one: b.id } }); + console.log(`Filter by ref_one: ${performance.now() - start}ms`); + }); + + bench('Filter by ref_one string_1', async () => { + const start = performance.now(); + // Pick in the middle of the b array + 2 + const b = data.b[Math.floor(data.b.length / 2) + 1]; + await client.query({ $relation: 't_a', $filter: { ref_one: { string_1: b.string_1 } } }); + console.log(`Filter by ref_one string_1: ${performance.now() - start}ms`); + }); +}); + +const connect = async () => { + const db = new Surreal(); + await db.connect(URL, { + namespace: NAMESPACE, + database: DATABASE, + auth: { + username: USERNAME, + password: PASSWORD, + }, + versionCheck: false, + }); + return db; +} + +const createSurql = (data: { a: A[]; b: B[]; }): string => { + const lines = ['BEGIN TRANSACTION;']; + + for (const b of data.b) { + lines.push(`CREATE t_b:${b.id} SET ${createSurqlBaseSet(b)};`); + } + + for (const a of data.a) { + const refFew = `[${a.few.map((i) => `t_b:${i}`).join(', ')}]`; + const refMany = `[${a.many.map((i) => `t_b:${i}`).join(', ')}]`; + + const tunnelOneId = `${a.id}_${a.one}`; + const tunnelFewIds = a.few.map((i) => `${a.id}_${i}`); + const tunnelManyIds = a.many.map((i) => `${a.id}_${i}`); + const tunnelOne = `tunnel_one:${tunnelOneId}`; + const tunnelFew = `[${tunnelFewIds.map((i) => `tunnel_few:${i}`).join(', ')}]`; + const tunnelMany = `[${tunnelManyIds.map((i) => `tunnel_many:${i}`).join(', ')}]`; + + lines.push(`CREATE t_a:${a.id} SET ${createSurqlBaseSet(a)}, ref_one = t_b:${a.one}, ref_few = ${refFew}, ref_many = ${refMany};`); + + lines.push(`CREATE ${tunnelOne} SET a = t_a:${a.id}, b = t_b:${a.one};`); + lines.push(`UPDATE t_b:${a.one} SET ref_one = t_a:${a.id}, tunnel_one = tunnel_one:${tunnelOneId};`); + lines.push(`RELATE t_a:${a.id}->edge_one->t_b:${a.one};`); + + for (const b of a.few) { + const tId = `${a.id}_${b}`; + lines.push(`CREATE tunnel_few:${tId} SET a = t_a:${a.id}, b = t_b:${b};`); + lines.push(`UPDATE t_b:${b} SET ref_few += t_a:${a.id}, tunnel_few += tunnel_few:${tId};`); + lines.push(`RELATE t_a:${a.id}->edge_few->t_b:${b};`); + } + + for (const b of a.many) { + const tId = `${a.id}_${b}`; + lines.push(`CREATE tunnel_many:${tId} SET a = t_a:${a.id}, b = t_b:${b};`); + lines.push(`UPDATE t_b:${b} SET ref_many += t_a:${a.id}, tunnel_many += tunnel_many:${tId};`); + lines.push(`RELATE t_a:${a.id}->edge_many->t_b:${b};`); + } + + lines.push(`UPDATE t_a:${a.id} SET tunnel_one = ${tunnelOne}, tunnel_few = ${tunnelFew}, tunnel_many = ${tunnelMany};`); + } + + lines.push('COMMIT TRANSACTION;'); + + return lines.join('\n'); +}; + +const createSurqlBaseSet = (data: Base): string => { + return `string_1 = "${data.string_1}", number_1 = ${data.number_1}, boolean_1 = ${data.boolean_1}, datetime_1 = type::datetime("${data.datetime_1.toISOString()}")`; +}; \ No newline at end of file diff --git a/package.json b/package.json index 2248d65e..5e53c526 100644 --- a/package.json +++ b/package.json @@ -23,6 +23,7 @@ "prepare": "husky", "pub": "pnpm build && pnpm publish", "knip": "knip", + "bench:surrealdb": "./benches/bench.v2.sh", "bench:surrealdb:edges": "cross-env BORM_TEST_ADAPTER=surrealDB BORM_TEST_SURREALDB_LINK_MODE=edges ./tests/bench.sh tests/unit/bench", "bench:surrealdb:refs": "cross-env BORM_TEST_ADAPTER=surrealDB BORM_TEST_SURREALDB_LINK_MODE=refs ./tests/bench.sh tests/unit/bench", "bench:typedb": "vitest bench typedb/bench", @@ -71,7 +72,8 @@ "robot3": "^1.1.1", "surrealdb": "^1.3.2", "typedb-driver": "^2.29.2", - "uuid": "^11.1.0" + "uuid": "^11.1.0", + "zod": "^4.2.1" }, "devDependencies": { "@biomejs/biome": "^2.1.3", @@ -100,5 +102,6 @@ "homepage": "https://github.com/Blitzapps/blitz-orm#readme", "directories": { "test": "tests" - } + }, + "packageManager": "pnpm@8.10.2+sha512.0782093d5ba6c7ad9462081bc1ef0775016a4b4109eca1e1fedcea6f110143af5f50993db36c427d4fa8c62be3920a3224db12da719d246ca19dd9f18048c33c" } diff --git a/src/enrichSchema.draft.ts b/src/enrichSchema.draft.ts new file mode 100644 index 00000000..1a5e93bd --- /dev/null +++ b/src/enrichSchema.draft.ts @@ -0,0 +1,453 @@ +import { isEqual } from "radash"; +import type { BormEntity, BormRelation, BormSchema, DataField, LinkField, RefField, RoleField } from "./types"; +import type { + DRAFT_EnrichedBormComputedField, + DRAFT_EnrichedBormConstantField, + DRAFT_EnrichedBormDataField, + DRAFT_EnrichedBormEntity, + DRAFT_EnrichedBormField, + DRAFT_EnrichedBormLinkField, + DRAFT_EnrichedBormRefField, + DRAFT_EnrichedBormRelation, + DRAFT_EnrichedBormRoleField, + DRAFT_EnrichedBormSchema +} from "./types/schema/enriched.draft"; + +export const enrichSchemaDraft = (schema: BormSchema): DRAFT_EnrichedBormSchema => { + const extendedSchema = extendSchema(schema); + const enrichedSchema: DRAFT_EnrichedBormSchema = {}; + const rolePlayerMap: RolePlayerMap = buildRolePlayerMap(extendedSchema); + + for (const entity in extendedSchema.entities) { + enrichThing('entity', entity, enrichedSchema, extendedSchema, rolePlayerMap); + } + + for (const relation in extendedSchema.relations) { + enrichThing('relation', relation, enrichedSchema, extendedSchema, rolePlayerMap); + } + + return enrichedSchema; +} + +/** + * Mutate the enriched schema in place. + */ +const enrichThing = ( + type: 'entity' | 'relation', + thingName: string, + mutEnrichedSchema: DRAFT_EnrichedBormSchema, + schema: BormSchema, + rolePlayerMap: RolePlayerMap, +): DRAFT_EnrichedBormEntity | DRAFT_EnrichedBormRelation => { + const enrichedEntity = mutEnrichedSchema[thingName]; + if (enrichedEntity) { + if (enrichedEntity.type === type) { + return enrichedEntity; + } + throw new Error(`Found entity and relation with the same name: ${thingName}`); + } + const thing = type === 'entity' ? schema.entities[thingName] : schema.relations[thingName] as BormEntity | BormRelation; + if (!thing) { + throw new Error(`${type === 'entity' ? 'Entity' : 'Relation'} "${thingName}" not found`); + } + + const extended = 'extends' in thing && thing.extends ? enrichThing(type, thing.extends, mutEnrichedSchema, schema, rolePlayerMap) : undefined; + + if (extended) { + addSubType(extended.name, thingName, mutEnrichedSchema); + } + + const fields: Record = {}; + const idFields = extended ? extended.idFields : getIdFields(thingName, thing); + + enrichDataFields(fields, thing.dataFields ?? [], thingName); + enrichRefFields(fields, thing.refFields ?? {}, thingName); + enrichLinkFields(fields, thing.linkFields ?? [], thingName, schema, rolePlayerMap); + + if (type === 'entity') { + const enriched: DRAFT_EnrichedBormEntity = { + type: 'entity', + name: thingName, + idFields, + extends: extended ? extended.name : undefined, + subTypes: [], + indexes: thing.indexes ?? [], + fields: fields as DRAFT_EnrichedBormEntity['fields'], + }; + mutEnrichedSchema[thingName] = enriched; + return enriched; + } + + if ('roles' in thing && thing.roles) { + enrichRoleFields(fields as Record, thing.roles as Record ?? {}, thingName, rolePlayerMap); + } + + const enriched: DRAFT_EnrichedBormRelation = { + type: 'relation', + name: thingName, + idFields, + extends: extended ? extended.name : undefined, + subTypes: [], + indexes: thing.indexes ?? [], + fields, + }; + mutEnrichedSchema[thingName] = enriched; + return enriched; +}; + +const addSubType = (thing: string, subThing: string, mutSchema: DRAFT_EnrichedBormSchema) => { + let currentThing: string | undefined = thing; + while (currentThing) { + const enrichedThing: DRAFT_EnrichedBormEntity | DRAFT_EnrichedBormRelation | undefined = mutSchema[currentThing]; + if (!enrichedThing) { + throw new Error(`Thing "${currentThing}" not found`); + } + enrichedThing.subTypes.push(subThing); + currentThing = enrichedThing.extends; + } +}; + +/** + * Mutate the enriched fields in place. + */ +const enrichDataFields = ( + mutEnrichedFields: Record, + dataFields: readonly DataField[], + thingName: string, +) => { + for (const df of dataFields ?? []) { + const existing = mutEnrichedFields[df.path]; + if (df.isVirtual) { + if (df.default?.type === 'fn' && typeof df.default.fn === 'function') { + const enriched: DRAFT_EnrichedBormComputedField = { + type: 'computed', + name: df.path, + contentType: df.contentType, + cardinality: df.cardinality ?? 'ONE', + fn: df.default.fn, + }; + assertNoDuplicateField(thingName, enriched, existing); + mutEnrichedFields[df.path] = enriched; + continue; + } + + if (df.default?.type === 'value') { + const enriched: DRAFT_EnrichedBormConstantField = { + type: 'constant', + name: df.path, + contentType: df.contentType, + cardinality: df.cardinality ?? 'ONE', + value: df.default.value, + }; + assertNoDuplicateField(thingName, enriched, existing); + mutEnrichedFields[df.path] = enriched; + continue; + } + } + + const enriched: DRAFT_EnrichedBormDataField = { + type: 'data', + name: df.path, + contentType: df.contentType, + cardinality: df.cardinality ?? 'ONE', + unique: df.validations?.unique ?? false, + }; + assertNoDuplicateField(thingName, enriched, existing); + mutEnrichedFields[df.path] = enriched; + } +} + +/** + * Mutate the enriched fields in place. + */ +const enrichRefFields = ( + mutEnrichedFields: Record, + refFields: Record, + thingName: string, +) => { + for (const [refName, ref] of Object.entries(refFields ?? {})) { + const existing = mutEnrichedFields[refName]; + const enriched: DRAFT_EnrichedBormRefField = { + type: 'ref', + name: refName, + contentType: ref.contentType, + cardinality: ref.cardinality ?? 'ONE', + }; + assertNoDuplicateField(thingName, enriched, existing); + mutEnrichedFields[refName] = enriched; + } +} + +/** + * Mutate the enriched fields in place. + */ +const enrichLinkFields = ( + mutEnrichedFields: Record, + linkFields: readonly LinkField[], + thingName: string, + schema: BormSchema, + rolePlayerMap: RolePlayerMap, +) => { + for (const lf of linkFields ?? []) { + const targetRel = schema.relations[lf.relation]; + if (!targetRel) { + throw new Error(`Relation ${lf.relation} not found`); + } + const targetRole = targetRel.roles?.[lf.plays]; + if (!targetRole) { + throw new Error(`Role ${lf.plays} not found in relation ${lf.relation}`); + } + const existing = mutEnrichedFields[lf.path]; + + if (lf.target === 'relation') { + const enriched: DRAFT_EnrichedBormLinkField = { + type: 'link', + name: lf.path, + cardinality: lf.cardinality, + target: 'relation', + opposite: { + thing: lf.relation, + path: lf.plays, + cardinality: targetRole.cardinality, + }, + }; + assertNoDuplicateField(thingName, enriched, existing); + mutEnrichedFields[lf.path] = enriched; + continue; + } + + // NOTE: If the opposite role path is not defined, the opposite role path is the same as the link field path. + // This is an implicit rule and it's not standardized yet. + const oppositeRole = rolePlayerMap[lf.relation]?.[lf.targetRole]; + if (!oppositeRole) { + throw new Error(`Role ${lf.path} in relation ${lf.relation} does not exist`); + } + const rolePlayer = oppositeRole.targetingRole; + if (!rolePlayer) { + throw new Error(`Role ${oppositeRole} in relation ${lf.relation} is not played by any other thing`); + } + const enriched: DRAFT_EnrichedBormLinkField = { + type: 'link', + name: lf.path, + cardinality: lf.cardinality, + target: 'role', + opposite: rolePlayer, + }; + assertNoDuplicateField(thingName, enriched, existing); + mutEnrichedFields[lf.path] = enriched; + } +} + +/** + * Mutate the enriched fields in place. + */ +const enrichRoleFields = ( + mutEnrichedFields: Record, + roles: Record, + thingName: string, + rolePlayerMap: RolePlayerMap, +) => { + for (const [roleName, role] of Object.entries(roles)) { + // TODO: This is WRONG. + // It should not fallback to targetingRole if targetingRelation is not found + // because in the SurrealDB schema the value of the targetingRelation.thing[targetingRelation.path] is not thingName. + // This becomes problematic when we transform filter into sub-query: + // SELECT * FROM WHERE = xyz + // Is not the same as: + // SELECT * FROM (SELECT VALUE FROM WHERE id = xyz) + const opposite = rolePlayerMap[thingName]?.[roleName]?.targetingRelation ?? rolePlayerMap[thingName]?.[roleName]?.targetingRole; + if (!opposite) { + throw new Error(`Role ${roleName} in relation ${thingName} is not played by any other thing`); + } + const existing = mutEnrichedFields[roleName]; + const enriched: DRAFT_EnrichedBormRoleField = { + type: 'role', + name: roleName, + cardinality: role.cardinality ?? 'ONE', + opposite: opposite, + }; + assertNoDuplicateField(thingName, enriched, existing); + mutEnrichedFields[roleName] = enriched; + } +} + +const assertNoDuplicateField = (thing: string, newField: DRAFT_EnrichedBormField, existing?: DRAFT_EnrichedBormField) => { + if (!existing) { + return; + } + if (isEqual(newField, existing)) { + return; + } + console.log('\n> assertNoDuplicateField/newField\n', JSON.stringify(newField, null, 2)); + console.log('\n> assertNoDuplicateField/existing\n', JSON.stringify(existing, null, 2)); + throw new Error(`Duplicate field name "${newField.name}" in "${thing}"`); +}; + +type RolePlayerMap = Record< + DRAFT_EnrichedBormRelation['name'], + Record< + DRAFT_EnrichedBormRoleField['name'], + { targetingRole?: DRAFT_EnrichedBormRoleField['opposite'], targetingRelation?: DRAFT_EnrichedBormRoleField['opposite'] } + > +>; + +const buildRolePlayerMap = (schema: BormSchema): RolePlayerMap => { + const rolePlayerMap: RolePlayerMap = {}; + for (const [relName, rel] of [...Object.entries(schema.relations), ...Object.entries(schema.entities)]) { + for (const lf of rel.linkFields ?? []) { + const roleMap = rolePlayerMap[lf.relation] ?? {}; + rolePlayerMap[lf.relation] = roleMap; + const rolePlayer = roleMap[lf.plays] ?? {}; + roleMap[lf.plays] = rolePlayer; + const existingOpposite = lf.target === 'relation' ? rolePlayer.targetingRelation : rolePlayer.targetingRole; + if (existingOpposite) { + if (existingOpposite.thing === relName) { + // Multiple link fields of the same thing may play the same role. And it's fine. + continue; + } + if (isExtend(relName, existingOpposite.thing, schema)) { + // The current relation extends the role's opposite relation. Keep it. + continue; + } + if (!isExtend(existingOpposite.thing, relName, schema)) { + throw new Error(`Found multiple players for role ${lf.plays} in relation ${lf.relation}`); + } + } + if (lf.target === 'relation') { + rolePlayer.targetingRelation = { + thing: relName, + path: lf.path, + cardinality: lf.cardinality, + }; + } else { + rolePlayer.targetingRole = { + thing: relName, + path: lf.path, + cardinality: lf.cardinality, + }; + } + } + } + return rolePlayerMap; +} + +/** + * Return true if thingA extends thingB directly or indirectly. + */ +const isExtend = (thingA: string, thingB: string, schema: BormSchema): boolean => { + const ancestorsA = getAncestors(thingA, schema); + return ancestorsA.includes(thingB) +}; + +const getAncestors = (thing: string, schema: BormSchema): string[] => { + const ancestors: string[] = []; + let current = thing; + while (current) { + const _thing = schema.entities[current] ?? schema.relations[current]; + if (!_thing) { + throw new Error(`Thing "${current}" not found`); + } + if (!('extends' in _thing) || !_thing.extends) { + break; + } + ancestors.push(_thing.extends); + current = _thing.extends; + } + return ancestors.reverse(); +} + +const getIdFields = (name: string, entity: BormEntity | BormRelation): [string, ...string[]] => { + if (entity.idFields && entity.idFields.length > 0) { + return [entity.idFields[0], ...entity.idFields.slice(1)]; + } + const f = entity.dataFields?.find((f) => f.contentType === 'ID'); + if (f) { + return [f.path]; + } + throw new Error(`No id field found for entity "${name}"`); +} + +const extendSchema = (schema: BormSchema): BormSchema => { + const extendedSchema: BormSchema = { + entities: {}, + relations: {}, + }; + for (const name in schema.entities) { + extendEntity(name, schema, extendedSchema); + } + for (const name in schema.relations) { + extendRelation(name, schema, extendedSchema); + } + return extendedSchema; +}; + +/** + * NOTE: Mutate the extended schema in place. + */ +const extendEntity = (name: string, schema: BormSchema, mutExtendedSchema: BormSchema): BormEntity => { + const entity = schema.entities[name]; + if (!entity) { + throw new Error(`Entity "${name}" not found`); + } + if ('extends' in entity && entity.extends) { + const ancestor = extendEntity(entity.extends, schema, mutExtendedSchema); + const extended = { + ...entity, + idFields: entity.idFields ?? ancestor.idFields, + dataFields: extendDataFields(ancestor, entity), + linkFields: extendLinkFields(ancestor, entity), + refFields: extendRefFields(ancestor, entity), + }; + mutExtendedSchema.entities[name] = extended; + return extended; + } + mutExtendedSchema.entities[name] = entity; + return entity; +} + +/** + * NOTE: Mutate the extended schema in place. + */ +const extendRelation = (name: string, schema: BormSchema, mutExtendedSchema: BormSchema): BormRelation => { + const relation = schema.relations[name]; + if (!relation) { + throw new Error(`Relation "${name}" not found`); + } + if ('extends' in relation && relation.extends) { + const ancestor = extendRelation(relation.extends, schema, mutExtendedSchema); + const extended = { + ...relation, + idFields: relation.idFields ?? ancestor.idFields, + dataFields: extendDataFields(ancestor, relation), + linkFields: extendLinkFields(ancestor, relation), + refFields: extendRefFields(ancestor, relation), + roles: extendRoles(ancestor, relation), + }; + mutExtendedSchema.relations[name] = extended; + return extended; + } + mutExtendedSchema.relations[name] = relation; + return relation; +}; + +const extendDataFields = (ancestor: BormEntity | BormRelation, entity: BormEntity | BormRelation): DataField[] => { + const explicitDataFieldSet = new Set(entity.dataFields?.map((df) => df.path) ?? []); + const inheritedDataFields = ancestor.dataFields?.filter((df) => !explicitDataFieldSet.has(df.path)) ?? []; + return [...inheritedDataFields, ...entity.dataFields ?? []]; +} + +const extendLinkFields = (ancestor: BormEntity | BormRelation, entity: BormEntity | BormRelation): LinkField[] => { + const explicitLinkFieldSet = new Set(entity.linkFields?.map((lf) => lf.path) ?? []); + const inheritedLinkFields = ancestor.linkFields?.filter((lf) => !explicitLinkFieldSet.has(lf.path)) ?? []; + return [...inheritedLinkFields, ...entity.linkFields ?? []]; +} + +const extendRefFields = (ancestor: BormEntity | BormRelation, entity: BormEntity | BormRelation): Record => { + const inheritedRefFields = Object.fromEntries(Object.entries(ancestor.refFields ?? {}).filter(([k]) => !entity.refFields?.[k])); + return { ...inheritedRefFields, ...entity.refFields ?? {} }; +} + +const extendRoles = (ancestor: BormRelation, entity: BormRelation): Record => { + const inheritedRoles = Object.fromEntries(Object.entries(ancestor.roles ?? {}).filter(([k]) => !entity.roles?.[k])); + return { ...inheritedRoles, ...entity.roles ?? {} }; +} \ No newline at end of file diff --git a/src/helpers.ts b/src/helpers.ts index 3fa3b04b..ae6f6dee 100644 --- a/src/helpers.ts +++ b/src/helpers.ts @@ -1,9 +1,9 @@ /* eslint-disable no-param-reassign */ import type { Draft } from 'immer'; -import { current, isDraft } from 'immer'; +import { current, isDraft, produce } from 'immer'; import { customAlphabet } from 'nanoid'; -import type { TraversalMeta } from 'object-traversal'; -import { getNodeByPath } from 'object-traversal'; +import type { TraversalCallbackContext, TraversalMeta } from 'object-traversal'; +import { getNodeByPath, traverse } from 'object-traversal'; import { isArray, isObject, listify, tryit } from 'radash'; // todo: split helpers between common helpers, typeDBhelpers, dgraphelpers... @@ -430,3 +430,16 @@ export const genId = (n?: number) => { const nanoid = customAlphabet(alphabet, idLength); return nanoid(); }; + +export const deepRemoveMetaData = (obj: object) => { + const removeMeta = ({ value }: TraversalCallbackContext) => { + if (value && typeof value === 'object' && '$id' in value) { + const metas = Object.keys(value).filter((k) => k.startsWith('$')); + metas.forEach((k) => { delete value[k]; }); + const symbols = Object.keys(value).filter((s) => typeof s === 'symbol'); + symbols.forEach((s) => { delete value[s]; }); + } + return value; + }; + return produce(obj, (draft) => traverse(draft, removeMeta)); +}; diff --git a/src/index.ts b/src/index.ts index 0585f1f7..589e7623 100644 --- a/src/index.ts +++ b/src/index.ts @@ -20,6 +20,9 @@ import type { QueryConfig, RawBQLQuery, } from './types'; +import type { DRAFT_EnrichedBormSchema } from './types/schema/enriched.draft'; +import { enrichSchemaDraft } from './enrichSchema.draft'; +import { runSurrealDbQueryMachine2 } from './stateMachine/query/surql2/run'; export * from './types'; @@ -37,7 +40,7 @@ class BormClient { private config: BormConfig; private initializing = false; private subscribers: ((err?: unknown) => void)[] = []; - private initialized: { enrichedSchema: EnrichedBormSchema; dbHandles: DBHandles } | null = null; + private initialized: { enrichedSchema: EnrichedBormSchema; draftSchema: DRAFT_EnrichedBormSchema; dbHandles: DBHandles } | null = null; constructor({ schema, config }: BormProps) { this.schema = schema; @@ -140,6 +143,7 @@ class BormClient { ); this.initialized = { + draftSchema: enrichSchemaDraft(this.schema), enrichedSchema: enrichSchema(this.schema, dbHandles), dbHandles, }; @@ -149,6 +153,7 @@ class BormClient { s(); } } catch (e) { + console.error('error initializing', e); const subscribers = this.subscribers; this.subscribers = []; for (const s of subscribers) { @@ -201,6 +206,12 @@ class BormClient { const isBatched = Array.isArray(query); const queries = isBatched ? query : [query]; + const surrealDBClient = initialized.dbHandles.surrealDB?.get('default')?.client; + if (surrealDBClient && Date.now() > 0) { + const result = await runSurrealDbQueryMachine2(queries, initialized.draftSchema, qConfig, surrealDBClient); + return isBatched ? result : result[0]; + } + const [errorRes, res] = await tryit(runQueryMachine)( queries, initialized.enrichedSchema, diff --git a/src/stateMachine/mutation/bql/parse.ts b/src/stateMachine/mutation/bql/parse.ts index 91da8573..8ae6d269 100644 --- a/src/stateMachine/mutation/bql/parse.ts +++ b/src/stateMachine/mutation/bql/parse.ts @@ -3,9 +3,8 @@ import { traverse } from 'object-traversal'; import { isArray, isObject, mapEntries, pick, shake } from 'radash'; import { v4 as uuidv4 } from 'uuid'; -import { deepRemoveMetaData } from '../../../../tests/helpers/matchers'; import { computeField } from '../../../engine/compute'; -import { getCurrentFields, getCurrentSchema, getParentNode, oFilter } from '../../../helpers'; +import { deepRemoveMetaData, getCurrentFields, getCurrentSchema, getParentNode, oFilter } from '../../../helpers'; import type { BormOperation, BQLMutationBlock, diff --git a/src/stateMachine/query/surql2/buildLogical.ts b/src/stateMachine/query/surql2/buildLogical.ts new file mode 100644 index 00000000..321f609d --- /dev/null +++ b/src/stateMachine/query/surql2/buildLogical.ts @@ -0,0 +1,653 @@ +import z from "zod/v4"; +import { type NestedBQLFilter, NestedBQLFilterParser, StrictBQLValueFilterParser, type BQLField, type BQLFilter, type BQLFilterValue, type BQLFilterValueList, type BQLQuery } from "../../../types/requests/parser"; +import type { DRAFT_EnrichedBormDataField, DRAFT_EnrichedBormEntity, DRAFT_EnrichedBormField, DRAFT_EnrichedBormLinkField, DRAFT_EnrichedBormRefField, DRAFT_EnrichedBormRelation, DRAFT_EnrichedBormRoleField, DRAFT_EnrichedBormSchema } from "../../../types/schema/enriched.draft"; +import type { DataSource, Filter, ListFilter, LogicalQuery, Projection, ProjectionField, ScalarFilter, Sort } from "./logical"; + +export const buildLogicalQuery = (query: BQLQuery, schema: DRAFT_EnrichedBormSchema, metadata: boolean): LogicalQuery => { + const thingSchema = schema[query.$thing]; + const projection = buildProjection({ fields: query.$fields, thing: thingSchema, schema, metadata }); + const filter = query.$filter ? buildFilter(query.$filter, thingSchema, schema) : undefined; + const ids = Array.isArray(query.$id) ? query.$id : query.$id ? [query.$id] : []; + const cardinality = ids.length === 1 || isUniqueFilter(thingSchema, filter) ? 'ONE' : 'MANY'; + const source: DataSource = ids.length > 0 ? { + type: 'record_pointer', + thing: [thingSchema.name, ...thingSchema.subTypes], + ids, + } : { + type: 'table_scan', + thing: [thingSchema.name, ...thingSchema.subTypes], + }; + + return { + source, + projection, + filter, + limit: validateLimit(query.$limit), + offset: validateOffset(query.$offset), + sort: validateSort(projection, buildSort(query.$sort)), + cardinality, + }; +} + +const buildProjection = (params: { + fields?: BQLField[]; + thing: DRAFT_EnrichedBormEntity | DRAFT_EnrichedBormRelation; + schema: DRAFT_EnrichedBormSchema; + metadata: boolean; +}): Projection => { + const { fields, thing, schema, metadata } = params; + const projectionFields: ProjectionField[] = []; + + if (metadata) { + projectionFields.push({ + type: 'metadata', + path: '$id', + }); + projectionFields.push({ + type: 'metadata', + path: '$thing', + }); + } + + // No fields specified. Project all fields. + if (!fields) { + for (const field of Object.values(thing.fields)) { + projectionFields.push(buildSimpleFieldProjection(field)); + } + return { fields: projectionFields }; + } + + for (const field of fields) { + if (typeof field === 'string') { + if (field === '$id' || field === '$thing') { + projectionFields.push({ + type: 'metadata', + path: field, + }); + continue; + } + + const fieldSchema = thing.fields[field]; + if (!fieldSchema) { + throw new Error(`Field ${field} not found in ${thing.name}`); + } + projectionFields.push(buildSimpleFieldProjection(fieldSchema)); + continue; + } + + const alias = validateAlias(field.$as); + + if (field.$path === '$id' || field.$path === '$thing') { + projectionFields.push({ + type: 'metadata', + path: field.$path, + alias, + }); + continue; + } + + const fieldSchema = thing.fields[field.$path]; + if (!fieldSchema) { + throw new Error(`Field ${field} not found in ${thing.name}`); + } + + if (fieldSchema.type === 'constant' || fieldSchema.type === 'computed') { + continue; + } + + if (fieldSchema.type === 'data' || fieldSchema.type === 'ref') { + projectionFields.push(buildSimpleFieldProjection(fieldSchema, alias)); + continue; + } + + const oppositeThingSchema = schema[fieldSchema.opposite.thing]; + const oppositeProjection = buildProjection({ fields: field.$fields, thing: oppositeThingSchema, schema, metadata }); + const filter = '$filter' in field && field.$filter ? buildFilter(field.$filter, oppositeThingSchema, schema) : undefined; + projectionFields.push({ + type: 'nested_reference', + path: field.$path, + projection: oppositeProjection, + cardinality: typeof field.$id === 'string' || isUniqueFilter(oppositeThingSchema, filter) ? 'ONE' : fieldSchema.cardinality, + alias, + ids: typeof field.$id === 'string' ? [field.$id] : field.$id, + filter, + limit: validateLimit(field.$limit), + offset: validateOffset(field.$offset), + sort: validateSort(oppositeProjection, buildSort(field.$sort)), + }); + } + + return { + fields: projectionFields, + } +}; + +const buildSimpleFieldProjection = (field: DRAFT_EnrichedBormField, alias?: string): ProjectionField => { + if (field.type === 'data') { + return { + type: 'data', + path: field.name, + alias, + }; + } + if (field.type === 'ref' && field.contentType === 'FLEX') { + return { + type: 'flex', + path: field.name, + cardinality: field.cardinality, + alias, + } + } + return { + type: 'reference', + path: field.name, + cardinality: field.cardinality, + alias, + } +}; + +const buildFilter = ( + filter: BQLFilter | BQLFilter[], + thing: DRAFT_EnrichedBormEntity | DRAFT_EnrichedBormRelation, + schema: DRAFT_EnrichedBormSchema +): Filter | undefined => { + if (Array.isArray(filter)) { + const filters = filter.map((f) => buildFilter(f, thing, schema)).filter((f) => !!f); + return { + type: 'or', + filters: filters, + }; + } + + const filters = buildFilters(filter, thing, schema); + return { + type: 'and', + filters, + }; +} + +const buildFilters = ( + filter: BQLFilter, + thing: DRAFT_EnrichedBormEntity | DRAFT_EnrichedBormRelation, + schema: DRAFT_EnrichedBormSchema, +): Filter[] => { + const filters: Filter[] = []; + + for (const [key, value] of Object.entries(filter)) { + if (key === '$not' && filter.$not) { + pushIfDefined(filters, buildNotFilter(filter.$not, thing, schema)); + continue; + } + + if (key === '$or' && filter.$or) { + pushIfDefined(filters, buildOrFilter(filter.$or, thing, schema)); + continue; + } + + const fieldSchema = thing.fields[key]; + + if (!fieldSchema) { + throw new Error(`Field ${key} not found in ${thing.name}`); + } + + if (fieldSchema.type === 'constant' || fieldSchema.type === 'computed') { + throw new Error(`Filtering on constant or computed field ${key} is not supported`); + } + + if (fieldSchema.type === 'data') { + if (value !== undefined) { + filters.push(buildDataFieldFilter(fieldSchema, value as BQLFilterValue | BQLFilterValueList | NestedBQLFilter)); + } + continue; + } + + if (fieldSchema.type === 'ref') { + if (value !== undefined) { + pushIfDefined(filters, buildRefFieldFilter(fieldSchema, value)); + } + continue; + } + + if (value !== undefined) { + filters.push(buildLinkFieldFilter(fieldSchema, value, schema)); + } + } + + return filters; +}; + +const buildNotFilter = ( + $not: BQLFilter, + thing: DRAFT_EnrichedBormEntity | DRAFT_EnrichedBormRelation, + schema: DRAFT_EnrichedBormSchema +): Filter | undefined => { + const inner = buildFilter($not, thing, schema); + return inner ? { + type: 'not', + filter: inner, + } : undefined; +}; + +const buildOrFilter = ( + $or: BQLFilter[], + thing: DRAFT_EnrichedBormEntity | DRAFT_EnrichedBormRelation, + schema: DRAFT_EnrichedBormSchema, +): Filter | undefined => { + return buildFilter($or, thing, schema); +}; + +const buildDataFieldFilter = ( + field: DRAFT_EnrichedBormDataField, + filter: BQLFilterValue | BQLFilterValueList | NestedBQLFilter, +): Filter => { + // No-sub field. Only scalar and list filters are allowed. + // If `right` is not of the same type as the field, the query will return an empty result. + // Ideally SurrealDB's query planner should skip the query. + + // scalar and list operators + const result = StrictBQLValueFilterParser.safeParse(filter); + if (result.success) { + const filters: Filter[] = []; + for (const [op, right] of Object.entries(result.data)) { + if (op === '$exists') { + filters.push({ + type: 'null', + op: right ? 'IS NOT' : 'IS', + left: field.name, + tunnel: false, + }); + continue; + } + if ((op === '$eq' || op === '$ne') && right === null) { + filters.push({ + type: 'null', + op: op === '$eq' ? 'IS' : 'IS NOT', + left: field.name, + tunnel: false, + }); + continue; + } + const scalarOp = scalarOpMap[op]; + if (scalarOp) { + filters.push({ + type: 'scalar', + op: scalarOp, + left: field.name, + right: right as BQLFilterValue, + }); + continue; + } + const listOp = listOpMap[op]; + if (listOp) { + filters.push({ + type: 'list', + op: listOp, + left: field.name, + right: right as BQLFilterValueList, + }); + continue; + } + throw new Error(`Invalid filter operation: ${op}`); + } + return { + type: 'and', + filters, + }; + } + + // List value + if (Array.isArray(filter)) { + if (field.cardinality === 'ONE') { + return { + type: 'list', + op: 'IN', + left: field.name, + right: filter, + }; + } + + return { + type: 'list', + op: 'CONTAINSANY', + left: field.name, + right: filter, + }; + } + + // Single value + if (field.cardinality === 'ONE') { + if (filter === null) { + return { + type: 'null', + op: 'IS', + left: field.name, + tunnel: false, + }; + } + return { + type: 'scalar', + op: '=', + left: field.name, + right: filter as BQLFilterValue, + }; + } + + return { + type: 'scalar', + op: 'CONTAINS', + left: field.name, + right: filter as BQLFilterValue, + }; +} + +const buildRefFieldFilter = (field: DRAFT_EnrichedBormRefField, filter: BQLFilterValue | BQLFilterValueList | NestedBQLFilter | BQLFilter[]): Filter | undefined => { + if (field.contentType === 'REF') { + if (field.cardinality === 'ONE') { + if (typeof filter === 'string') { + return { + type: 'ref', + op: 'IN', + left: field.name, + right: [filter], + tunnel: false, + }; + } + if (StringArrayParser.safeParse(filter).success) { + return { + type: 'ref', + op: 'IN', + left: field.name, + right: filter as string[], + tunnel: false, + }; + } + throw new Error(`Invalid filter value for ref field ${field.name}: ${JSON.stringify(filter)}`); + } + if (typeof filter === 'string') { + return { + type: 'ref', + op: 'CONTAINSANY', + left: field.name, + right: [filter], + tunnel: false, + }; + } + if (StringArrayParser.safeParse(filter).success) { + return { + type: 'ref', + op: 'CONTAINSANY', + left: field.name, + right: filter as string[], + tunnel: false, + }; + } + throw new Error(`Invalid filter value for ref field ${field.name}: ${JSON.stringify(filter)}`); + } + // The cast can't be determined. + throw new Error('Filtering by FLEX reference is not supported'); +} + +const buildLinkFieldFilter = ( + field: DRAFT_EnrichedBormLinkField | DRAFT_EnrichedBormRoleField, + filter: BQLFilterValue | BQLFilterValueList | NestedBQLFilter | BQLFilter[], + schema: DRAFT_EnrichedBormSchema, +): Filter => { + const tunnel = field.type === 'link' && field.target === 'role'; + + if (filter === null) { + return { + type: 'null', + op: 'IS', + left: field.name, + tunnel, + }; + } + + if (typeof filter === 'string') { + return { + type: 'ref', + op: field.cardinality === 'ONE' ? 'IN' : 'CONTAINSANY', + left: field.name, + right: [filter], + tunnel, + }; + } + + if (StringArrayParser.safeParse(filter).success) { + return { + type: 'ref', + op: field.cardinality === 'ONE' ? 'IN' : 'CONTAINSANY', + left: field.name, + right: filter as string[], + tunnel, + }; + } + + const nestedFilter = z.union([NestedBQLFilterParser, z.array(NestedBQLFilterParser)]).safeParse(filter); + + if (nestedFilter.error) { + throw new Error(`Invalid nested filter: ${nestedFilter.error.message}`); + } + + const oppositeThingSchema = schema[field.opposite.thing]; + + if (!oppositeThingSchema) { + throw new Error(`Opposite thing ${field.opposite.thing} not found`); + } + + const oppositeThings: [string, ...string[]] = [field.opposite.thing, ...oppositeThingSchema.subTypes]; + + if (Array.isArray(nestedFilter.data)) { + const filters = nestedFilter.data.map((f) => buildLinkFieldFilter(field, f, schema)).filter((f) => !!f); + return { + type: 'or', + filters, + }; + } + + const { + $eq: _eq, + $ne: _ne, + $contains: _contains, + $containsNot: _containsNot, + $in: _in, + $nin: _nin, + $containsAll: _containsAll, + $containsAny: _containsAny, + $containsNone: _containsNone, + ...rest + } = nestedFilter.data; + + for (const unsupportedOp of ['$gt', '$lt', '$gte', '$lte']) { + if (rest[unsupportedOp]) { + throw new Error(`Filtering ${field.type} field with ${unsupportedOp} operator is not supported`); + } + } + + const filters: Filter[] = []; + + for (const op of ['$exists', '$eq', '$ne', '$contains', '$containsNot']) { + const value = nestedFilter.data[op]; + if (value === undefined) { + continue; + } + if (op === '$exists') { + filters.push({ + type: 'null', + op: value ? 'IS NOT' : 'IS', + left: field.name, + tunnel, + }); + continue; + } + if ((op === '$eq' || op === '$ne') && value === null) { + filters.push({ + type: 'null', + op: op === '$eq' ? 'IS' : 'IS NOT', + left: field.name, + tunnel, + }); + continue; + } + if (typeof value !== 'string') { + throw new Error(`Filter value for ${field.type} field with operator ${op} must be a string`); + } + filters.push({ + type: 'ref', + op: op === '$eq' ? 'IN' : 'NOT IN', + left: field.name, + right: [value], + thing: oppositeThings, + tunnel, + }); + } + + for (const op of ['$in', '$nin', '$containsAll', '$containsAny', '$containsNone']) { + const value = nestedFilter.data[op]; + if (value === undefined) { + continue; + } + const stringArray = StringArrayParser.safeParse(value); + if (!stringArray.success) { + throw new Error(`Filter value for ${field.type} field with operator ${op} must be a string array`); + } + const listOp = listOpMap[op]; + if (!listOp) { + throw new Error(`Invalid list operator: ${op}`); + } + filters.push({ + type: 'ref', + op: listOp, + left: field.name, + right: stringArray.data, + thing: oppositeThings, + tunnel, + }); + } + + const oppositeSchema = schema[field.opposite.thing]; + if (!oppositeSchema) { + throw new Error(`Unknown thing: ${field.opposite.thing}`); + } + + const nestedLogicalFilter = buildFilter(rest, oppositeSchema, schema); + if (nestedLogicalFilter) { + filters.push({ + type: 'nested', + path: field.name, + filter: nestedLogicalFilter, + cardinality: field.cardinality, + }); + } + + return { + type: 'and', + filters, + }; +} + +const isUniqueFilter = (thing: DRAFT_EnrichedBormEntity | DRAFT_EnrichedBormRelation, filter?: Filter): boolean => { + if (!filter) { + return false; + } + if (filter.type === 'scalar') { + if (filter.op !== '=') { + return false; + } + const field = thing.fields[filter.left]; + if (!field) { + throw new Error(`Field ${filter.left} not found in ${thing.name}`); + } + return field.type === 'data' && field.unique; + } + if (filter.type === 'list') { + if (filter.op !== 'IN' || filter.right.length > 1) { + return false; + } + const field = thing.fields[filter.left]; + if (!field) { + throw new Error(`Field ${filter.left} not found in ${thing.name}`); + } + return field.type === 'data' && field.unique; + } + if (filter.type === 'and') { + return filter.filters.some((f) => isUniqueFilter(thing, f)); + } + return false; +}; + +const buildSort = (sort?: ({ field: string; desc?: boolean } | string)[]): Sort[] | undefined => { + if (!sort || sort.length === 0) { + return undefined; + } + return sort.map((s) => { + if (typeof s === 'string') { + return { field: s, desc: false }; + } + return { field: s.field, desc: s.desc ?? false } + }); +}; + +const scalarOpMap: Record = { + '$eq': '=', + '$ne': '!=', + '$gt': '>', + '$lt': '<', + '$gte': '>=', + '$lte': '<=', + '$contains': 'CONTAINS', + '$containsNot': 'CONTAINSNOT', +}; + +const listOpMap: Record = { + '$in': 'IN', + '$nin': 'NOT IN', + '$containsAll': 'CONTAINSALL', + '$containsAny': 'CONTAINSANY', + '$containsNone': 'CONTAINSNONE', +}; + +const StringArrayParser = z.array(z.string()); + +/** + * Push the item to the array if it is defined. + */ +const pushIfDefined = (array: T[], item: T | undefined) => { + if (item !== undefined) { + array.push(item); + } +}; + +const validateAlias = (alias?: string): string | undefined => { + if (alias !== undefined && !/^[a-zA-Z0-9_-]+$/.test(alias)) { + throw new Error(`Invalid alias: ${alias}`); + } + return alias; +} + +const validateLimit = (limit?: number): number | undefined => { + if (limit !== undefined && (typeof limit !== 'number' || limit < 0)) { + throw new Error(`Invalid limit: ${limit}`); + } + return limit; +} + +const validateOffset = (offset?: number): number | undefined => { + if (offset !== undefined && (typeof offset !== 'number' || offset < 0)) { + throw new Error(`Invalid offset: ${offset}`); + } + return offset; +} + +const validateSort = (projection: Projection, sort?: Sort[]): Sort[] | undefined => { + if (!sort || sort.length === 0) { + return undefined; + } + const projectionSet = new Set(projection.fields.map((f) => f.type === 'metadata' ? f.path : f.alias ?? f.path)); + for (const s of sort) { + if (!projectionSet.has(s.field)) { + throw new Error(`Missing sorter field in the selected fields: ${s.field}`); + } + } + return sort; +} diff --git a/src/stateMachine/query/surql2/buildSurql.ts b/src/stateMachine/query/surql2/buildSurql.ts new file mode 100644 index 00000000..01b51d96 --- /dev/null +++ b/src/stateMachine/query/surql2/buildSurql.ts @@ -0,0 +1,341 @@ +import type { + DataField, + DataSource, + Filter, + FlexField, + LogicalQuery, + MetadataField, + NestedReferenceField, + Projection, + ReferenceField, + Sort, +} from "./logical"; + +export type SurqlParams = Record; + +/** + * NOTE: Mutate `params`. + */ +export const buildSurql = (query: LogicalQuery, params: SurqlParams): string => { + const lines: string[] = []; + const level = query.cardinality === 'MANY' ? 0 : 1; + + if (query.cardinality === 'ONE') { + lines.push('array::first(') + } + lines.push(buildProjection(query.projection, level, params)); + lines.push(buildFrom(query.source, level, params)); + const filter = query.filter && buildFilter(query.filter, params); + if (filter) { + lines.push(indent(`WHERE ${filter}`, level)); + } + if (query.sort && query.sort.length > 0) { + lines.push(buildOrderBy(query.sort, level)); + } + if (query.limit !== undefined) { + lines.push(indent(`LIMIT ${query.limit}`, level)); + } + if (query.offset !== undefined) { + lines.push(indent(`START ${query.offset}`, level)); + } + if (query.cardinality === 'ONE') { + lines.push(')') + } + + return lines.join('\n'); +}; + +/** + * NOTE: Mutate `params`. + */ +const buildProjection = (projection: Projection, level: number, params: SurqlParams): string => { + const fieldLines: string[] = []; + const fieldLevel = level + 1; + for (const field of projection.fields) { + if (field.type === 'metadata') { + fieldLines.push(buildMetadataFieldProjection(field, fieldLevel)); + } else if (field.type === 'data') { + fieldLines.push(buildDataFieldProjection(field, fieldLevel)); + } else if (field.type === 'reference') { + fieldLines.push(buildReferenceFieldProjection(field, fieldLevel)); + } else if (field.type === 'nested_reference') { + fieldLines.push(buildNestedFieldProjection(field, fieldLevel, params)); + } else if (field.type === 'flex') { + fieldLines.push(buildFlexFieldProjection(field, fieldLevel)); + } + } + + const lines: string[] = []; + lines.push(indent('SELECT', level)); + lines.push(fieldLines.join(',\n')); + + return lines.join('\n'); +}; + +const buildMetadataFieldProjection = (field: MetadataField, level: number) => { + if (field.path === '$id') { + return indent(`record::id(id) AS ${esc(field.alias ?? '$id')}`, level); + } else if (field.path === '$thing') { + return indent(`record::tb(id) AS ${esc(field.alias ?? '$thing')}`, level); + } + throw new Error(`Unsupported metadata field: ${field.path}`); +}; + +const buildDataFieldProjection = (field: DataField, level: number) => { + if (field.path === 'id') { + return indent(`record::id(id) AS ${esc(field.alias ?? 'id')}`, level); + } + const escapedPath = esc(field.path); + if (field.alias) { + return indent(`${escapedPath} AS ${esc(field.alias)}`, level); + } + return indent(escapedPath, level); +}; + +const buildReferenceFieldProjection = (field: ReferenceField, level: number) => { + const { path, alias, cardinality } = field; + const escapedPath = esc(path); + const escapedAlias = esc(alias || path); + if (cardinality === 'ONE') { + return indent(`array::first(SELECT VALUE record::id(id) FROM $this.${escapedPath}[*]) AS ${escapedAlias}`, level); + } + return indent(`(SELECT VALUE record::id(id) FROM $this.${escapedPath}[*]) AS ${escapedAlias}`, level); +}; + +const buildNestedFieldProjection = (field: NestedReferenceField, level: number, params: SurqlParams) => { + // SELECT + // ( + // SELECT * FROM $this.ref_one + // ) as ref_one + // FROM t_a + const lines: string[] = []; + if (field.cardinality === 'MANY') { + lines.push(indent('(', level)); + } else { + lines.push(indent('array::first(', level)); + } + lines.push(buildProjection(field.projection, level + 1, params)); + const filter = field.filter ? buildFilter(field.filter, params) : undefined; + lines.push(indent(`FROM $this.${esc(field.path)}[*]`, level + 1)); + const conditions: string[] = []; + if (field.ids && field.ids.length > 0) { + const ids = field.ids.map((i) => `$${insertParam(params, i)}`); + if (ids.length === 1) { + conditions.push(`record::id(id) = ${ids[0]}`); + } else { + conditions.push(`record::id(id) IN [${ids.join(', ')}]`); + } + } + if (filter) { + conditions.push(filter); + } + if (conditions.length > 0) { + lines.push(indent(`WHERE ${conditions.join(' AND ')}`, level + 1)); + } + if (field.sort && field.sort.length > 0) { + lines.push(buildOrderBy(field.sort, level + 1)); + } + if (field.limit !== undefined) { + lines.push(indent(`LIMIT ${field.limit}`, level + 1)); + } + if (field.offset !== undefined) { + lines.push(indent(`START ${field.offset}`, level + 1)); + } + lines.push(indent(`) AS ${esc(field.alias || field.path)}`, level)); + return lines.join('\n'); +}; + +const buildFlexFieldProjection = (field: FlexField, level: number) => { + const { path, alias, cardinality } = field; + const escapedPath = esc(path); + const escapedAlias = esc(alias || path); + if (cardinality === 'ONE') { + return indent(`${escapedPath} && IF type::is::record(${escapedPath}) { record::id(${escapedPath}) } ELSE { ${escapedPath} } AS ${escapedAlias}`, level); + } + return indent(`${escapedPath} && ${escapedPath}.map(|$i| IF type::is::record($i) { record::id($i)} ELSE { $i }) AS ${escapedAlias}`, level); +} + +const buildFrom = (source: DataSource, level: number, params: SurqlParams): string => { + const lines: string[] = []; + switch (source.type) { + case 'table_scan': { + lines.push(indent(`FROM ${source.thing.map(esc)}`, level)); + break; + } + case 'record_pointer': { + const pointers = source.thing + .flatMap((t) => source.ids.map((i) => `${esc(t)}:${esc(i)}`)) + .map((p) => `type::record($${insertParam(params, p)})`) + .join(', '); + lines.push(indent(`FROM ${pointers}`, level)); + break; + } + case 'subquery': { + lines.push(indent(source.cardinality === 'MANY' ? 'FROM array::distinct(array::flatten(' : 'FROM (', level)); + source.oppositePath; + lines.push(indent(`SELECT VALUE ${esc(source.oppositePath)}`, level + 1)); + lines.push(buildFrom(source.source, level + 1, params)); + const filter = source.filter ? buildFilter(source.filter, params) : undefined; + if (filter) { + lines.push(indent(`WHERE ${filter}`, level + 1)); + } + lines.push(indent(source.cardinality === 'MANY' ? '))' : ')', level)); + break; + } + } + return lines.join('\n'); +} + +/** + * Mutate `params`. + */ +const buildFilter = (filter: Filter, params: Record, prefix?: string): string | undefined => { + const _prefix = prefix ?? ''; + switch (filter.type) { + case 'scalar': { + const path = filter.left === 'id' + ? `record::id(${_prefix}id)` + : `${_prefix}${esc(filter.left)}`; + const key = insertParam(params, filter.right); + return `${path} ${filter.op} $${key}`; + } + case 'list': { + const items = filter.right.map((i) => `$${insertParam(params, i)}`).join(', '); + const path = `${_prefix}${esc(filter.left)}`; + return `${path} ${filter.op} [${items}]`; + } + case 'ref': { + const path = filter.left === 'id' + ? `record::id(${_prefix}id)` + : `${_prefix}${esc(filter.left)}`; + if (filter.thing) { + const right = filter.thing.flatMap((t) => filter.right.map((i) => { + const pointer = `${esc(t)}:${esc(i)}` + const key = insertParam(params, pointer); + return `type::record($${key})`; + })); + if (right.length === 1) { + const key = insertParam(params, right[0]); + if (filter.op === 'IN') { + return `${path} = $${key}`; + } + if (filter.op === 'NOT IN') { + return `${path} != $${key}`; + } + if (filter.op === 'CONTAINSANY') { + return `$${key} IN ${path}`; + } + if (filter.op === 'CONTAINSNONE') { + return `$${key} NOT IN ${path}`; + } + } + return `${path} ${filter.op} [${right.join(', ')}]`; + } + if (filter.right.length === 1) { + if (filter.op === 'IN') { + if (filter.tunnel) { + return `(array::first(${path}) && record::id(array::first(${path})) = $${insertParam(params, filter.right[0])})`; + } + return `${path} && record::id(${path}) = $${insertParam(params, filter.right[0])}`; + } + if (filter.op === 'NOT IN') { + if (filter.tunnel) { + return `(!array::first(${path}) || record::id(array::first(${path})) != $${insertParam(params, filter.right[0])})`; + } + return `${path} && record::id(${path}) != $${insertParam(params, filter.right[0])}`; + } + if (filter.op === 'CONTAINSANY') { + if (filter.tunnel) { + return `$${insertParam(params, filter.right[0])} IN ${path}.map(|$i| record::id($i))` + } + return `$${insertParam(params, filter.right[0])} IN (${path} ?: []).map(|$i| record::id($i))` + } + if (filter.op === 'CONTAINSNONE') { + if (filter.tunnel) { + return `$${insertParam(params, filter.right[0])} NOT IN ${path}.map(|$i| record::id($i))` + } + return `$${insertParam(params, filter.right[0])} NOT IN (${path} ?: []).map(|$i| record::id($i))` + } + } + if (filter.tunnel) { + return `${path}.map(|$i| record::id($i)) ${filter.op} [${filter.right.map((i) => `$${insertParam(params, i)}`).join(', ')}]`; + } + return `(${path} ?: []).map(|$i| record::id($i)) ${filter.op} [${filter.right.map((i) => `$${insertParam(params, i)}`).join(', ')}]`; + } + case 'null': { + if (filter.tunnel) { + return `array::len(${_prefix}${esc(filter.left)}) = 0`; + } + return `${_prefix}${esc(filter.left)} ${filter.op} NONE`; + } + case 'and': { + const conditions = filter.filters.map((f) => { + const condition = buildFilter(f, params, prefix); + return condition ? `(${condition})` : undefined; + }).filter((i) => !!i); + return conditions.length > 0 ? conditions.join(' AND ') : undefined; + } + case 'or': { + const conditions = filter.filters.map((f) => { + const condition = buildFilter(f, params, prefix); + return condition ? `(${condition})` : undefined; + }).filter((i) => !!i); + return conditions.length > 0 ? conditions.join(' OR ') : undefined; + } + case 'not': { + return `NOT(${buildFilter(filter.filter, params, prefix)})`; + } + case 'nested': { + const path = `${_prefix}${esc(filter.path)}`; + if (filter.cardinality === 'ONE') { + return buildFilter(filter.filter, params, `${path}.`); + } + const subFilter = buildFilter(filter.filter, params); + if (!subFilter) { + return undefined; + } + return `${path}[WHERE ${subFilter}]`; + } + } +} + +const buildOrderBy = (sort: Sort[], level: number): string => { + const sorters = sort.map((s) => `${esc(s.field)} ${s.desc ? 'DESC' : 'ASC'}`).join(', '); + return indent(`ORDER BY ${sorters}`, level); +} + +const indent = (text: string, level: number) => { + const spaces = ' '.repeat(level * 2); + return `${spaces}${text}`; +}; + +/** + * Insert `value` into `params` and return the param key. + */ +const insertParam = (params: Record, value: unknown): string => { + let key = generateAlphaKey(5); + while (params[key] !== undefined) { + key = generateAlphaKey(5); + } + params[key] = value; + return key; +} + +const generateAlphaKey = (length: number): string => { + const letters = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'; + let result = ''; + for (let i = 0; i < length; i++) { + result += letters[Math.floor(Math.random() * letters.length)]; + } + return result; +}; + +/** + * Escape identifier with for SurrealDB + * Only escapes when identifier contains non-alphanumeric characters or starts with a number + */ +const esc = (identifier: string): string => { + // Check if identifier starts with a number or contains non-alphanumeric characters (excluding underscore) + const needsEscaping = /^[0-9]/.test(identifier) || /[^a-zA-Z0-9_]/.test(identifier); + return needsEscaping ? `⟨${identifier}⟩` : identifier; +}; \ No newline at end of file diff --git a/src/stateMachine/query/surql2/logical.ts b/src/stateMachine/query/surql2/logical.ts new file mode 100644 index 00000000..2b427b97 --- /dev/null +++ b/src/stateMachine/query/surql2/logical.ts @@ -0,0 +1,169 @@ +import type { BQLFilterValue, BQLFilterValueList } from "../../../types/requests/parser"; + +export interface LogicalQuery { + source: DataSource; + projection: Projection; + filter?: Filter; + limit?: number; + offset?: number; + sort?: Sort[]; + cardinality: 'MANY' | 'ONE'; +}; + +export type DataSource = + | TableScan + | RecordPointer + | SubQuery; + +export interface TableScan { + type: 'table_scan'; + thing: [string, ...string[]]; +}; + +export interface RecordPointer { + type: 'record_pointer'; + thing: [string, ...string[]]; + ids: string[]; +}; + +export interface SubQuery { + type: 'subquery'; + source: DataSource; + /** + * The link/role field path of the `source` thing. Example: If the source thing is "Post" then path is "author", not "authoredPosts". + */ + oppositePath: string; + /** + * The cardinality of the reference in DB. COMPUTED REFERENCE is always 'MANY'. + */ + filter?: Filter; + /** + * This is the cardinality of + */ + cardinality: 'MANY' | 'ONE'; +}; + +export interface Projection { + fields: ProjectionField[]; +} + +export type ProjectionField = + | MetadataField + | DataField + | ReferenceField + | NestedReferenceField + | FlexField; + +export interface MetadataField { + type: 'metadata'; + path: '$id' | '$thing'; + alias?: string; +}; + +export interface DataField { + type: 'data'; + path: string; + alias?: string; +}; + +export interface ReferenceField { + type: 'reference'; + path: string; + alias?: string; + cardinality: 'MANY' | 'ONE'; +}; + +export interface NestedReferenceField { + type: 'nested_reference'; + path: string; + projection: Projection; + ids?: string[]; + filter?: Filter; + alias?: string; + cardinality: 'MANY' | 'ONE'; + limit?: number; + offset?: number; + sort?: Sort[]; +}; + +export interface FlexField { + type: 'flex'; + path: string; + alias?: string; + cardinality: 'MANY' | 'ONE'; +}; + +export type Filter = + | ScalarFilter + | ListFilter + | RefFilter + | LogicalOp + | NotOp + | NestedFilter + | NullFilter; + +export interface ScalarFilter { + type: 'scalar'; + op: '=' | '!=' | '>' | '<' | '>=' | '<=' | 'CONTAINS' | 'CONTAINSNOT'; + left: string; + right: BQLFilterValue; +}; + +export interface ListFilter { + type: 'list'; + op: 'IN' | 'NOT IN' | 'CONTAINSALL' | 'CONTAINSANY' | 'CONTAINSNONE'; + left: string; + right: BQLFilterValueList; +}; + +export interface RefFilter { + type: 'ref'; + op: 'IN' | 'NOT IN' | 'CONTAINSALL' | 'CONTAINSANY' | 'CONTAINSNONE'; + left: string; + right: string[]; + /** + * Used for reference filter optimization when `cast` is 'record'. If specified the execution may use indexes. + * If not specified the filter will be transformed into `record::id() IN [, ...]`, + * which is a little bit slower than ` IN [type::record(), ...]` when both are executed without indexes. + */ + thing?: [string, ...string[]]; + /** + * True if it's a link field with target "role". + */ + tunnel: boolean; +}; + +export interface NullFilter { + type: 'null'; + op: 'IS' | 'IS NOT'; + left: string; + /** + * True if it's a link field with target "role". + */ + tunnel: boolean; +} + +export interface LogicalOp { + type: 'and' | 'or'; + filters: Filter[]; +}; + +export interface NotOp { + type: 'not'; + filter: Filter; +}; + +export interface NestedFilter { + type: 'nested'; + filter: Filter; + path: string; + cardinality: 'MANY' | 'ONE'; +} + +export type ScalarList = Scalar[]; +export type Scalar = string | number | boolean | null; + +export interface Sort { + field: string; + desc: boolean; +} diff --git a/src/stateMachine/query/surql2/optimize.ts b/src/stateMachine/query/surql2/optimize.ts new file mode 100644 index 00000000..352a58d6 --- /dev/null +++ b/src/stateMachine/query/surql2/optimize.ts @@ -0,0 +1,406 @@ +import z from "zod/v4"; +import type { DRAFT_EnrichedBormEntity, DRAFT_EnrichedBormField, DRAFT_EnrichedBormRelation, DRAFT_EnrichedBormSchema, Index } from "../../../types/schema/enriched.draft"; +import type { DataSource, Filter, ListFilter, LogicalQuery, NestedFilter, Projection, ProjectionField, RecordPointer, RefFilter, ScalarFilter, SubQuery, TableScan } from "./logical"; + +export const optimizeLogicalQuery = (query: LogicalQuery, schema: DRAFT_EnrichedBormSchema): LogicalQuery => { + const thing = getSourceThing(query.source, schema); + const filter = query.filter ? optimizeLocalFilter(query.filter, schema, thing) : undefined; + const { source, filter: optimizedFilter } = optimizeSource({ source: query.source, filter, schema, thing }); + + return { + source, + projection: query.projection, + filter: optimizedFilter, + cardinality: query.cardinality, + limit: query.limit, + offset: query.offset, + sort: query.sort, + } +}; + +/** + * If the source is a table scan and the filter is a nested filter, convert the filter to a relationship traversal. + */ +const optimizeSource = (params: { source: DataSource, filter?: Filter, schema: DRAFT_EnrichedBormSchema, thing: DRAFT_EnrichedBormEntity | DRAFT_EnrichedBormRelation }): { source: DataSource, filter?: Filter } => { + const { source, filter, schema, thing } = params; + + if (source.type !== 'table_scan') { + return { source, filter }; + } + + // TODO: If we use SurrealDB(v3) REFERENCE, convert computed reference filter into relationship traversal. + + const [firstFilter, ...restFilters] = filter?.type === 'and' ? filter.filters : filter ? [filter] : []; + + const traversal = firstFilter?.type === 'scalar' || firstFilter?.type === 'list' + ? convertIdFilterToRecordPointer(firstFilter, source) + : firstFilter?.type === 'nested' + ? convertNestedFilterToRelationshipTraversal(firstFilter, schema, thing) + : firstFilter?.type === 'ref' + ? convertRefFilterToRelationshipTraversal(firstFilter, schema, thing) + : undefined; + + if (traversal) { + return { + source: traversal, + filter: restFilters.length === 0 + ? undefined + : restFilters.length === 1 + ? restFilters[0] + : { type: 'and', filters: restFilters }, + } + } + + return { + source, + filter: filter ? pushDownIndexedFilter(filter, thing) : undefined, + } +} + +const convertIdFilterToRecordPointer = (filter: ScalarFilter | ListFilter, source: TableScan): RecordPointer | undefined => { + if (filter.left !== 'id') { + return undefined; + } + if (filter.op === '=' && typeof filter.right === 'string') { + return { + type: 'record_pointer', + thing: [source.thing[0], ...source.thing.slice(1)], + ids: [filter.right], + } + } + if (filter.op === 'IN' && z.array(z.string()).safeParse(filter.right).success) { + return { + type: 'record_pointer', + thing: [source.thing[0], ...source.thing.slice(1)], + ids: filter.right as string[], + } + } + return undefined; +}; + +/** + * Return sub query if the filter can be converted to a relationship traversal. + */ +const convertRefFilterToRelationshipTraversal = ( + filter: RefFilter, + schema: DRAFT_EnrichedBormSchema, + thing: DRAFT_EnrichedBormEntity | DRAFT_EnrichedBormRelation, +): SubQuery | undefined => { + const field = thing.fields[filter.left]; + if (!field) { + throw new Error(`Field ${filter.left} not found in ${thing.name}`); + } + if ((field.type !== 'role' && field.type !== 'link') || (filter.op !== 'IN' && filter.op !== 'CONTAINSANY')) { + return undefined; + } + if (field.type === 'role') { + // We can't do this optimization for role fields that are not played by a link field with target 'relation'. + // This relation is only used as intermediary relation. + const oppositeLinkField = schema[field.opposite.thing]?.fields?.[field.opposite.path]; + if (oppositeLinkField?.type !== 'link') { + throw new Error(`Role field ${field.name} in relation ${thing.name} is not played by a link field`); + } + if (oppositeLinkField.target !== 'relation') { + return undefined; + } + } + const { thing: oppositeThing, path: oppositePath, cardinality } = field.opposite; + const oppositeThingSchema = getThingSchema(oppositeThing, schema); + const source: RecordPointer = { + type: 'record_pointer', + thing: [oppositeThing, ...oppositeThingSchema.subTypes], + ids: filter.right, + }; + const traversal: SubQuery = { + type: 'subquery', + source, + oppositePath, + cardinality, + }; + return traversal; +}; + +const convertNestedFilterToRelationshipTraversal = ( + filter: NestedFilter, + schema: DRAFT_EnrichedBormSchema, + thing: DRAFT_EnrichedBormEntity | DRAFT_EnrichedBormRelation, +): SubQuery | undefined => { + const field = thing.fields[filter.path]; + if (!field) { + throw new Error(`Field ${filter.path} not found in ${thing.name}`); + } + if (field.type !== 'link' && field.type !== 'role') { + return undefined; + } + if (field.type === 'role') { + // We can't do this optimization for role fields that are not played by a link field with target 'relation'. + // This relation is only used as intermediary relation. + const oppositeLinkField = schema[field.opposite.thing]?.fields?.[field.opposite.path]; + if (oppositeLinkField?.type !== 'link') { + throw new Error(`Role field ${field.name} in relation ${thing.name} is not played by a link field`); + } + if (oppositeLinkField.target !== 'relation') { + return undefined; + } + } + const { thing: oppositeThing, path: oppositePath, cardinality } = field.opposite; + const oppositeThingSchema = getThingSchema(oppositeThing, schema); + const source: TableScan = { type: 'table_scan', thing: [oppositeThing, ...oppositeThingSchema.subTypes] }; + const optimized = optimizeSource({ source, filter: filter.filter, schema, thing: oppositeThingSchema }); + const traversal: SubQuery = { + type: 'subquery', + source: optimized.source, + oppositePath, + cardinality, + filter: optimized.filter, + }; + return traversal; +} + +const optimizeProjection = (projection: Projection, schema: DRAFT_EnrichedBormSchema, thing: DRAFT_EnrichedBormEntity | DRAFT_EnrichedBormRelation): Projection => { + return { + fields: projection.fields.map((field) => optimizeProjectionField(field, schema, thing)), + }; +} + +const optimizeProjectionField = (field: ProjectionField, schema: DRAFT_EnrichedBormSchema, thing: DRAFT_EnrichedBormEntity | DRAFT_EnrichedBormRelation): ProjectionField => { + if (field.type === 'metadata' || field.type === 'data' || field.type === 'flex' || field.type === 'reference') { + return field; + } + return { + type: 'nested_reference', + path: field.path, + projection: optimizeProjection(field.projection, schema, thing), + filter: field.filter ? optimizeLocalFilter(field.filter, schema, thing) : undefined, + cardinality: field.cardinality, + limit: field.limit, + offset: field.offset, + sort: field.sort, + } +} + +/** + * Flatten "and" and "or" filters into a single filter. Order the filters by cost. + * This optimization doesn't consider indexes. + */ +const optimizeLocalFilter = (filter: Filter, schema: DRAFT_EnrichedBormSchema, thing: DRAFT_EnrichedBormEntity | DRAFT_EnrichedBormRelation): Filter | undefined => { + if (filter.type === 'list' && filter.right.length === 1) { + // TODO: Convert into simpler form if possible. Example: ` IN []` into ` = ` + } + + if (filter.type === 'ref' && filter.right.length === 0) { + // TODO: Convert into simpler form if possible. Example: ` IN []` into ` IS NULL` + } + + + if (filter.type === 'and' || filter.type === 'or') { + const filters = filter.filters.flatMap((f) => { + const optimized = optimizeLocalFilter(f, schema, thing); + if (optimized === undefined) { + return []; + } + // Flatten nested "and" and nested "or" filters. + if (optimized.type === filter.type) { + return optimized.filters; + } + return [optimized]; + }); + if (filters.length === 0) { + return undefined; + } + if (filters.length === 1) { + return filters[0]; + } + // TODO: Combine multiple "=" of the same field inside "or" filter into "in" filter. + // TODO: Improve the scoring. + const scored = filters.map((i): { filter: Filter; score: number} => { + if (i.type === 'scalar') { + return { filter: i, score: filterOpScoreMap[i.op] ?? 0 }; + } + if (i.type === 'list') { + return { filter: i, score: 0.5 ** (i.right.length - 1) }; + } + if (i.type === 'ref') { + if (i.thing) { + return { filter: i, score: 0.5 ** ((i.right.length - 1) * i.thing.length) }; + } + // Without thing the filter is a bit slower because we need to call record::id() + return { filter: i, score: 0.5 ** (i.right.length - 1) * 0.9 }; + } + return { filter: i, score: 0 }; + }); + const sorted = scored.sort((a, b) => b.score - a.score); + return { + type: filter.type, + filters: sorted.map((i) => i.filter), + }; + } + + if (filter.type === 'not') { + const inner = optimizeLocalFilter(filter.filter, schema, thing); + if (inner === undefined) { + return undefined; + } + if (inner.type === 'not') { + return inner.filter; + } + if (inner.type === 'scalar') { + if (inner.op === '=') { + return { + type: 'scalar', + op: '!=', + left: inner.left, + right: inner.right, + }; + } + if (inner.op === '!=') { + return { + type: 'scalar', + op: '=', + left: inner.left, + right: inner.right, + }; + } + } + return { + type: 'not', + filter: inner, + }; + } + + if (filter.type === 'nested') { + const optimizedSubFilter = optimizeLocalFilter(filter.filter, schema, thing); + if (!optimizedSubFilter) { + return undefined; + } + return { + type: 'nested', + filter: optimizedSubFilter, + path: filter.path, + cardinality: filter.cardinality, + }; + } + + return filter; +} + +const filterOpScoreMap: Record = { + '=': 1, + '>': 0.5, + '<': 0.5, + '>=': 0.5, + '<=': 0.5, +}; + +/** + * Put indexed filters first. Only one set of indexed filter is pushed down. + * This function assumes all link/role fields are indexed. + */ +const pushDownIndexedFilter = (filter: Filter, thing: DRAFT_EnrichedBormEntity | DRAFT_EnrichedBormRelation): Filter => { + if (filter.type === 'and') { + const filterMap = Object.fromEntries(filter.filters.map((f, i): [string, { index: number, filter: Filter, score: number }] | undefined => { + if (f.type !== 'scalar') { + return undefined; + } + const score = filterOpScoreMap[f.op]; + if (!score) { + return undefined; + } + return [f.left, { filter: f, index: i, score }]; + }).filter((i) => i !== undefined)); + // Longest composite indexes first. + const compositeIndexes = thing.indexes.filter((index) => index.type !== 'single').sort((a, b) => b.fields.length - a.fields.length); + if (compositeIndexes.length > 0) { + const compositeFilters: { filters: { index: number, filter: Filter }[], score: number }[] = []; + for (const index of compositeIndexes) { + const fs: { index: number, filter: Filter, score: number }[] = []; + for (const field of index.fields) { + const filter = filterMap[field]; + if (!filter || fs.some((f) => f.index === filter.index)) { // Avoid duplicate filters. + break; + } + fs.push(filter); + } + if (fs.length > 0) { + compositeFilters.push({ filters: fs, score: fs.reduce((a, b) => a + (a * b.score), 1) }); + } + } + compositeFilters.sort((a, b) => b.score - a.score); + const [longestCompositeFilter] = compositeFilters; + if (longestCompositeFilter && longestCompositeFilter.score > 1) { + return { + type: 'and', + filters: [ + ...longestCompositeFilter.filters.map((f) => f.filter), + ...filter.filters.filter((_, i) => !longestCompositeFilter.filters.some((f) => f.index === i)), + ], + } + } + } + } + + if (filter.type === 'and' || filter.type === 'or') { + const scored = filter.filters.map((f, index) => { + if (f.type === 'scalar' && f.op === '=') { + const field = thing.fields[f.left]; + if (isIndexed(field, thing.indexes)) { + return { filter: f, score: 1, index }; + } + } else if (f.type === 'list' && f.op === 'IN') { + const field = thing.fields[f.left]; + if (isIndexed(field, thing.indexes)) { + const score = 0.5 ** (f.right.length - 1); + return { filter: f, score, index }; + } + } + return { filter: f, score: 0, index }; + }) + const sorted = scored.sort((a, b) => b.score - a.score); + const [first] = sorted; + const indexed = first && first.score !== 0 ? first.filter : undefined; + // Convert indexed filter with IN operator to an OR filter with scalar filters. + const optimized: Filter | undefined = indexed?.type === 'list' && indexed.op === 'IN' ? { + type: 'or', + filters: indexed.right.map((r) => ({ type: 'scalar', op: '=', left: indexed.left, right: r })), + } : indexed; + return { + type: filter.type, + filters: optimized ? [optimized, ...filter.filters.filter((_, i) => i !== first.index)] : filter.filters, + }; + } + return filter; +}; + +const isIndexed = (field: DRAFT_EnrichedBormField, indexes: Index[]): boolean => { + // SurrealDB reference fields are assumed to be indexed. + return field.type === 'role' || field.type === 'link' || indexes.some((i) => (i.type === 'single' && i.field === field.name) || (i.type === 'composite' && i.fields.includes(field.name))); +}; + +const getThingSchema = (thing: string, schema: DRAFT_EnrichedBormSchema): DRAFT_EnrichedBormEntity | DRAFT_EnrichedBormRelation => { + const thingSchema = schema[thing]; + if (!thingSchema) { + throw new Error(`Thing ${thing} not found in schema`); + } + return thingSchema; +} + +const getSourceThing = (source: DataSource, schema: DRAFT_EnrichedBormSchema): DRAFT_EnrichedBormEntity | DRAFT_EnrichedBormRelation => { + if (source.type === 'table_scan' || source.type === 'record_pointer') { + const thingSchema = getThingSchema(source.thing[0], schema); + return thingSchema; + } + + const subThing = getSourceThing(source.source, schema); + const field = subThing.fields[source.oppositePath]; + if (!field) { + throw new Error(`Field ${source.oppositePath} not found in ${subThing.name}`); + } + if (field.type === 'constant' || field.type === 'computed' || field.type === 'data' || field.type === 'ref') { + throw new Error(`Invalid source: ${JSON.stringify(source)}`); + } + const thing = schema[field.opposite.thing]; + if (!thing) { + throw new Error(`Thing ${field.opposite.thing} not found in schema`); + } + return thing; +} diff --git a/src/stateMachine/query/surql2/processResults.ts b/src/stateMachine/query/surql2/processResults.ts new file mode 100644 index 00000000..e7cc3187 --- /dev/null +++ b/src/stateMachine/query/surql2/processResults.ts @@ -0,0 +1,129 @@ +import type { BQLQuery, NestedBQL } from "../../../types/requests/parser"; +import type { DRAFT_EnrichedBormEntity, DRAFT_EnrichedBormRelation, DRAFT_EnrichedBormSchema } from "../../../types/schema/enriched.draft"; + +export const processResults = (params: { + batch: BQLQuery[], + results: unknown[], + schema: DRAFT_EnrichedBormSchema, + metadata: boolean, + returnNulls: boolean, +}) => { + const { batch, results, schema, metadata, returnNulls } = params; + return batch.map((query, i) => processQueryResult({ query, result: results[i], schema, metadata, returnNulls })); +} + +const processQueryResult = (params: { + query: BQLQuery, + result: unknown, + schema: DRAFT_EnrichedBormSchema, + metadata: boolean, + returnNulls: boolean, +}) => { + const { query, result, schema, metadata, returnNulls } = params; + if (!result) { + return result ?? null; + } + const thing = schema[query.$thing]; + if (!thing) { + throw new Error(`Thing ${query.$thing} not found in schema`); + } + if (Array.isArray(result)) { + return result.map((r) => transformResultObject({ query, result: r, thing, schema, metadata, returnNulls })); + } + return transformResultObject({ query, result, thing, schema, metadata, returnNulls }); +} + +const processNestedResult = (params: { + query: NestedBQL, + result: unknown, + thing: DRAFT_EnrichedBormEntity | DRAFT_EnrichedBormRelation, + schema: DRAFT_EnrichedBormSchema, + metadata: boolean, + returnNulls: boolean, +}) => { + const { query, result, thing, schema, metadata, returnNulls } = params; + if (Array.isArray(result)) { + if (result.length === 0) { + return null; + } + return result.map((r) => transformResultObject({ query, result: r, thing, schema, metadata, returnNulls })); + } + return transformResultObject({ query, result, thing, schema, metadata, returnNulls }); +}; + +const transformResultObject = (params: { + query: BQLQuery | NestedBQL, + result: unknown, + thing: DRAFT_EnrichedBormEntity | DRAFT_EnrichedBormRelation, + schema: DRAFT_EnrichedBormSchema, + metadata: boolean, + returnNulls: boolean, +}) => { + const { query, result, thing, schema, metadata, returnNulls } = params; + if (!result || typeof result !== 'object') { + return result ?? null; + } + + const obj = result as Record; + const newResult: Record = {}; + + if (metadata) { + newResult.$thing = obj.$thing; + newResult.$id = obj.$id; + newResult.$thingType = thing.type; + } + + for (const fieldQuery of query.$fields ?? Object.keys(thing.fields)) { + const path = typeof fieldQuery === 'string' ? fieldQuery : fieldQuery.$path; + const alias = typeof fieldQuery === 'string' ? fieldQuery : fieldQuery.$as ?? path; + // Skip excluded fields. + if (query.$excludedFields?.includes(path)) { + continue; + } + + if (path === '$id' || path === '$thing') { + newResult[alias] = obj[alias] ?? null; + continue; + } + + const field = thing.fields[path]; + + if (!field) { + throw new Error(`Field ${path} not found in ${thing.name}`); + } + + if (field.type === 'constant') { + newResult[alias] = field.value; + continue; + } + + if (field.type === 'computed') { + newResult[alias] = field.fn(obj); + continue; + } + + const value = obj[alias] ?? null; + + if (field.type === 'data') { + if (!returnNulls && (value === null || value === undefined)) { + continue; + } + newResult[alias] = value ?? null; + continue; + } + + if (!returnNulls && (value === null || value === undefined || (Array.isArray(value) && value.length === 0))) { + continue; + } + + if (typeof fieldQuery === 'string' || field.type === 'ref') { + newResult[alias] = Array.isArray(value) && value.length === 0 ? null : value ?? null; + continue; + } + + const opposite = schema[field.opposite.thing]; + newResult[alias] = processNestedResult({ query: fieldQuery, result: value, thing: opposite, schema, metadata, returnNulls }); + } + + return newResult; +}; diff --git a/src/stateMachine/query/surql2/query.ts b/src/stateMachine/query/surql2/query.ts new file mode 100644 index 00000000..717883c9 --- /dev/null +++ b/src/stateMachine/query/surql2/query.ts @@ -0,0 +1,26 @@ +import type { SimpleSurrealClient } from '../../../adapters/surrealDB/client'; +import { logDebug } from '../../../logger'; +import type { BormConfig } from '../../../types'; +import { VERSION } from '../../../version'; +import type { SurqlParams } from './buildSurql'; + +export const query = async (props: { + client: SimpleSurrealClient; + queries: string[]; + config: BormConfig; + params: SurqlParams; +}): Promise => { + const { client, queries, config, params } = props; + const batchedQuery = `BEGIN TRANSACTION; +${queries.join(';')}; +COMMIT TRANSACTION;`; + // console.log('\n> batchedQuery\n', batchedQuery); + + if (config.query?.debugger) { + logDebug(`batchedQuery[${VERSION}]`, JSON.stringify({ batchedQuery })); + } + + const result = await client.query(batchedQuery, params); + // console.log('> result', JSON.stringify(result, null, 2)); + return result as any[][]; +}; diff --git a/src/stateMachine/query/surql2/run.ts b/src/stateMachine/query/surql2/run.ts new file mode 100644 index 00000000..aae2c95c --- /dev/null +++ b/src/stateMachine/query/surql2/run.ts @@ -0,0 +1,36 @@ +import type { SimpleSurrealClient } from '../../../adapters/surrealDB/client'; +import type { BormConfig } from '../../../types'; +import { BQLQueryParser } from '../../../types/requests/parser'; +import type { DRAFT_EnrichedBormSchema } from '../../../types/schema/enriched.draft'; +import { buildLogicalQuery } from './buildLogical'; +import { buildSurql, type SurqlParams } from './buildSurql'; +import { optimizeLogicalQuery } from './optimize'; +import { processResults } from './processResults'; +import { query } from './query'; + +export const runSurrealDbQueryMachine2 = async ( + bql: unknown[], + schema: DRAFT_EnrichedBormSchema, + config: BormConfig, + client: SimpleSurrealClient, +) => { + const bqlQueries = bql.map((q) => BQLQueryParser.parse(q)); + const logicalQueries = bqlQueries.map((q) => buildLogicalQuery(q, schema, !config.query?.noMetadata)); + // console.log('> logicalQueries\n', JSON.stringify(logicalQueries, null, 2)); + const optimizedQueries = logicalQueries.map((q) => optimizeLogicalQuery(q, schema)); + const params: SurqlParams = {}; + const surqlQueries = optimizedQueries.map((q) => buildSurql(q, params)); + // for (const surqlQuery of surqlQueries) { + // console.log('\n> surqlQuery\n', surqlQuery); + // } + // console.log('> params\n', JSON.stringify(params, null, 2)); + const result = await query({ client, queries: surqlQueries, config, params }); + const finalResult = processResults({ + batch: bqlQueries, + results: result, + schema, + metadata: !config.query?.noMetadata, + returnNulls: !!config.query?.returnNulls, + }); + return finalResult; +}; diff --git a/src/types/requests/parser.ts b/src/types/requests/parser.ts new file mode 100644 index 00000000..64b38598 --- /dev/null +++ b/src/types/requests/parser.ts @@ -0,0 +1,166 @@ +import { z } from 'zod/v4'; + +export const BQLFilterValueParser = z.json(); + +export type BQLFilterValue = z.infer; + +export const BQLFilterValueListParser = z.array(BQLFilterValueParser); + +export type BQLFilterValueList = z.infer; + +export type BQLFilter = { + $or?: BQLFilter[]; + $not?: BQLFilter; + /** + * Depending on the field type, only a subset of this type is allowed. + */ + [key: string]: BQLFilterValue | BQLFilterValueList | NestedBQLFilter | NestedBQLFilter[] | undefined; +}; + +export interface NestedBQLFilter extends BQLFilter { + $exists?: boolean; + // Scalar Operators + $eq?: BQLFilterValue; + $neq?: BQLFilterValue; + $gt?: BQLFilterValue; + $lt?: BQLFilterValue; + $gte?: BQLFilterValue; + $lte?: BQLFilterValue; + $contains?: BQLFilterValue; + $containsNot?: BQLFilterValue; + // List Operators + $in?: BQLFilterValueList; + $nin?: BQLFilterValueList; + $containsAll?: BQLFilterValueList; + $containsAny?: BQLFilterValueList; + $containsNone?: BQLFilterValueList; +}; + +export const StrictBQLValueFilterParser = z.strictObject({ + $exists: z.boolean().optional(), + $eq: BQLFilterValueParser.optional(), + $neq: BQLFilterValueParser.optional(), + $gt: BQLFilterValueParser.optional(), + $lt: BQLFilterValueParser.optional(), + $gte: BQLFilterValueParser.optional(), + $lte: BQLFilterValueParser.optional(), + $contains: BQLFilterValueParser.optional(), + $containsNot: BQLFilterValueParser.optional(), + $in: BQLFilterValueListParser.optional(), + $nin: BQLFilterValueListParser.optional(), + $containsAll: BQLFilterValueListParser.optional(), + $containsAny: BQLFilterValueListParser.optional(), + $containsNone: BQLFilterValueListParser.optional(), +}); + +export const BQLFilterParser: z.ZodType = z.lazy(() => + z.object({ + // Recursive Operators + $or: z.array(z.lazy(() => BQLFilterParser)).optional(), + $not: z.lazy(() => BQLFilterParser).optional(), + }).catchall( + // "Everything else" (Custom fields) + z.union([ + BQLFilterValueParser, + BQLFilterValueListParser, + z.lazy(() => z.union([NestedBQLFilterParser, z.array(NestedBQLFilterParser)])), + ]) + ) +); + +export const NestedBQLFilterParser: z.ZodType = z.lazy(() => + z.object({ + // Recursive Operators + $or: z.array(z.lazy(() => BQLFilterParser)).optional(), + $not: z.lazy(() => BQLFilterParser).optional(), + // Exists Operators + $exists: z.boolean().optional(), + // Scalar Value Operators + $eq: BQLFilterValueParser.optional(), + $neq: BQLFilterValueParser.optional(), + $gt: BQLFilterValueParser.optional(), + $lt: BQLFilterValueParser.optional(), + $gte: BQLFilterValueParser.optional(), + $lte: BQLFilterValueParser.optional(), + $contains: BQLFilterValueParser.optional(), + $containsNot: BQLFilterValueParser.optional(), + // List Value Operators + $in: BQLFilterValueListParser.optional(), + $nin: BQLFilterValueListParser.optional(), + $containsAll: BQLFilterValueListParser.optional(), + $containsAny: BQLFilterValueListParser.optional(), + $containsNone: BQLFilterValueListParser.optional(), + }).catchall( + // "Everything else" (Custom fields) + z.union([ + BQLFilterValueParser, + BQLFilterValueListParser, + z.lazy(() => z.union([NestedBQLFilterParser, z.array(NestedBQLFilterParser)])), + ]) + ) +); + +const BaseBQLParser = z.object({ + $id: z.union([z.string(), z.array(z.string())]).optional(), + $filter: z.union([BQLFilterParser, z.array(BQLFilterParser)]).optional(), + $fields: z.array(z.union([z.string(), z.lazy(() => NestedBQLParser)])).optional(), + $excludedFields: z.array(z.string()).optional(), + $limit: z.number().optional(), + $offset: z.number().optional(), + $sort: z.array( + z.union([ + z.object({ + field: z.string(), + desc: z.boolean().optional(), + }), + z.string() + ]) + ).optional(), +}); + +interface BaseBQL { + $id?: string | string[]; + $filter?: BQLFilter | BQLFilter[]; + $fields?: (string | NestedBQL)[]; + $excludedFields?: string[]; + $limit?: number; + $offset?: number; + $sort?: ({ field: string; desc?: boolean } | string)[]; +} + +export const NestedBQLParser: z.ZodType = BaseBQLParser.extend({ + $path: z.string(), + $as: z.string().optional(), +}); + +export interface NestedBQL extends BaseBQL { + $path: string; + $as?: string; +} + +export type BQLField = string | NestedBQL; + +export const BQLQueryParser = BaseBQLParser.extend({ + $thing: z.string().optional(), + $entity: z.string().optional(), + $relation: z.string().optional(), +}) + .superRefine((data, ctx) => { + if (!data.$thing && !data.$entity && !data.$relation) { + ctx.addIssue({ + code: 'custom', + message: "Query must contain at least one of: $thing, $entity, or $relation", + path: ["$thing"], + }); + } + }) + .transform((data) => { + const { $thing, $entity, $relation, ...rest } = data; + + return { + ...rest, + $thing: $thing ?? $entity ?? $relation as string, // Guaranteed to exist by superRefine + }; + }); + +export type BQLQuery = z.infer; \ No newline at end of file diff --git a/src/types/schema/base.ts b/src/types/schema/base.ts index 770b8b57..9d1e0d3c 100644 --- a/src/types/schema/base.ts +++ b/src/types/schema/base.ts @@ -1,4 +1,5 @@ import type { DataField, DBConnector, EnrichedBQLMutationBlock, LinkField, RefField, RoleField } from '..'; +import type { Index } from './enriched.draft'; export type BormSchema = { entities: { [s: string]: BormEntity }; @@ -14,6 +15,7 @@ export type BormEntity = linkFields?: readonly LinkField[]; refFields?: { [key: string]: RefField }; hooks?: Hooks; + indexes?: Index[]; } | { idFields: readonly string[]; @@ -22,11 +24,13 @@ export type BormEntity = linkFields?: readonly LinkField[]; refFields?: { [key: string]: RefField }; hooks?: Hooks; + indexes?: Index[]; }; export type BormRelation = BormEntity & { defaultDBConnector: DBConnector & { path: string }; /// mandatory in relations roles?: { [key: string]: RoleField }; + indexes?: Index[]; }; export type BormOperation = 'create' | 'update' | 'delete' | 'link' | 'unlink' | 'replace' | 'match'; diff --git a/src/types/schema/enriched.draft.ts b/src/types/schema/enriched.draft.ts new file mode 100644 index 00000000..fb7d281a --- /dev/null +++ b/src/types/schema/enriched.draft.ts @@ -0,0 +1,104 @@ +/** + * These types are design for SurrealDB query in mind. For other DBs or for mutation, they may be missing some fields. + */ + +import type { DataField, DiscreteCardinality } from "./fields"; + +export type DRAFT_EnrichedBormSchema = Record; + +export interface DRAFT_EnrichedBormEntity extends EnrichedBormThing { + type: 'entity'; + fields: Record; +} + +export interface DRAFT_EnrichedBormRelation extends EnrichedBormThing { + type: 'relation'; + fields: Record; +} + +interface EnrichedBormThing { + name: string; + idFields: [string, ...string[]]; + extends?: string; + subTypes: string[]; + indexes: Index[]; +} + +export type DRAFT_EnrichedBaseBormField = + | DRAFT_EnrichedBormConstantField + | DRAFT_EnrichedBormComputedField + | DRAFT_EnrichedBormDataField + | DRAFT_EnrichedBormLinkField + | DRAFT_EnrichedBormRefField; + +export type DRAFT_EnrichedBormField = DRAFT_EnrichedBaseBormField | DRAFT_EnrichedBormRoleField; + +export interface DRAFT_EnrichedBormConstantField { + type: 'constant'; + name: string; + contentType: DataField['contentType']; + cardinality: DiscreteCardinality; + value: unknown; +} + +export interface DRAFT_EnrichedBormComputedField { + type: 'computed'; + name: string; + contentType: DataField['contentType']; + cardinality: DiscreteCardinality; + fn: (currentNode: Record) => unknown; +} + +export interface DRAFT_EnrichedBormDataField { + type: 'data'; + name: string; + contentType: DataField['contentType']; + cardinality: DiscreteCardinality; + unique: boolean; +} + +export interface DRAFT_EnrichedBormRoleField { + type: 'role'; + name: string; + cardinality: DiscreteCardinality; + opposite: { + thing: string; + path: string; + cardinality: DiscreteCardinality; + }; +} + +export interface DRAFT_EnrichedBormLinkField { + type: 'link'; + name: string; + cardinality: DiscreteCardinality; + target: 'relation' | 'role'; + opposite: { + thing: string; + path: string; + cardinality: DiscreteCardinality; + }; +} + +/** + * Content type REF allows referencing any record in the database. + * Content type FLEX allows storing any type of data including reference to any record in the database. + */ +export interface DRAFT_EnrichedBormRefField { + type: 'ref'; + name: string; + contentType: 'REF' | 'FLEX'; + cardinality: 'ONE' | 'MANY'; +} + +export type Index = SingleIndex | CompositeIndex; + +export interface SingleIndex { + type: 'single'; + field: string; +} + +export interface CompositeIndex { + type: 'composite'; + fields: [string, ...string[]]; +} \ No newline at end of file diff --git a/src/types/schema/fields.ts b/src/types/schema/fields.ts index 9d6c3e5e..216ea271 100644 --- a/src/types/schema/fields.ts +++ b/src/types/schema/fields.ts @@ -35,6 +35,7 @@ export type LinkField = BormField & { | { target: 'role'; filter?: Filter | Filter[]; + targetRole: string; } | { target: 'relation'; diff --git a/tests/helpers/matchers.ts b/tests/helpers/matchers.ts index ebf94ee1..6c67e04c 100644 --- a/tests/helpers/matchers.ts +++ b/tests/helpers/matchers.ts @@ -1,8 +1,5 @@ /* eslint-disable no-param-reassign */ // eslint-disable-next-line import/no-extraneous-dependencies -import { produce } from 'immer'; -import type { TraversalCallbackContext } from 'object-traversal'; -import { traverse } from 'object-traversal'; import { expect } from 'vitest'; export const deepSort = (obj: T, key = '$id'): T => { @@ -48,19 +45,6 @@ export const deepSort = (obj: T, key = '$id'): T => { return obj; }; -export const deepRemoveMetaData = (obj: object) => { - const removeMeta = ({ value }: TraversalCallbackContext) => { - if (value && typeof value === 'object' && '$id' in value) { - const metas = Object.keys(value).filter((k) => k.startsWith('$')); - metas.forEach((k) => delete value[k]); - const symbols = Object.keys(value).filter((s) => typeof s === 'symbol'); - symbols.forEach((s) => delete value[s]); - } - return value; - }; - return produce(obj, (draft) => traverse(draft, removeMeta)); -}; - const checkRecursive = (obj: T): T => { if (Array.isArray(obj)) { return expect.arrayContaining(obj.map(checkRecursive)) as unknown as T; diff --git a/tests/mocks/schema.ts b/tests/mocks/schema.ts index 2f954914..36d3e8a4 100644 --- a/tests/mocks/schema.ts +++ b/tests/mocks/schema.ts @@ -55,6 +55,7 @@ export const schema: BormSchema = { cardinality: 'MANY', plays: 'user', target: 'role', + targetRole: 'accounts', }, { path: 'sessions', @@ -62,6 +63,7 @@ export const schema: BormSchema = { cardinality: 'MANY', plays: 'user', target: 'role', + targetRole: 'sessions', }, { path: 'spaces', @@ -69,6 +71,7 @@ export const schema: BormSchema = { cardinality: 'MANY', plays: 'users', target: 'role', + targetRole: 'spaces', }, { path: 'user-tags', @@ -275,6 +278,7 @@ export const schema: BormSchema = { relation: 'Space-User', plays: 'spaces', target: 'role', + targetRole: 'users', }, { path: 'objects', @@ -362,6 +366,7 @@ export const schema: BormSchema = { relation: 'User-Accounts', plays: 'accounts', target: 'role', + targetRole: 'user', /// rights => Either you want to make it 1) read only 2)replace only 3) update only 4) delete only 5) create only ... }, ], @@ -381,6 +386,7 @@ export const schema: BormSchema = { relation: 'User-Sessions', plays: 'sessions', target: 'role', + targetRole: 'user', }, ], }, @@ -518,6 +524,7 @@ export const schema: BormSchema = { relation: 'UserTagGroup', plays: 'color', target: 'role', + targetRole: 'tags', }, { path: 'group', @@ -638,6 +645,7 @@ export const schema: BormSchema = { relation: 'HookATag', plays: 'hookTypeA', target: 'role', + targetRole: 'otherHooks', isVirtual: true, dbValue: { surrealDB: @@ -650,6 +658,7 @@ export const schema: BormSchema = { relation: 'HookATag', plays: 'otherHooks', target: 'role', + targetRole: 'hookTypeA', isVirtual: true, dbValue: { surrealDB: @@ -755,6 +764,7 @@ export const schema: BormSchema = { { path: 'color', target: 'role', + targetRole: 'color', cardinality: 'ONE', plays: 'tags', relation: 'UserTagGroup', diff --git a/tests/unit/bench/bench.ts b/tests/unit/bench/bench.ts index bb60911b..5cfd4005 100644 --- a/tests/unit/bench/bench.ts +++ b/tests/unit/bench/bench.ts @@ -4,9 +4,10 @@ import { bench, expect } from 'vitest'; import type { WithBormMetadata } from '../../../src/index'; import type { TypeGen } from '../../../src/types/typeGen'; import { createTest } from '../../helpers/createTest'; -import { deepRemoveMetaData, deepSort, expectArraysInObjectToContainSameElements } from '../../helpers/matchers'; +import { deepSort, expectArraysInObjectToContainSameElements } from '../../helpers/matchers'; import type { typesSchema } from '../../mocks/generatedSchema'; import type { UserType } from '../../types/testTypes'; +import { deepRemoveMetaData } from '../../../src/helpers'; export const allBench = createTest('Bench', (ctx) => { bench('v1[validation] - $entity missing', async () => { diff --git a/tests/unit/queries/query.ts b/tests/unit/queries/query.ts index c4580d0b..482eec4b 100644 --- a/tests/unit/queries/query.ts +++ b/tests/unit/queries/query.ts @@ -4,9 +4,10 @@ import { expect, it } from 'vitest'; import type { WithBormMetadata } from '../../../src/index'; import type { TypeGen } from '../../../src/types/typeGen'; import { createTest } from '../../helpers/createTest'; -import { deepRemoveMetaData, deepSort, expectArraysInObjectToContainSameElements } from '../../helpers/matchers'; +import { deepSort, expectArraysInObjectToContainSameElements } from '../../helpers/matchers'; import type { typesSchema } from '../../mocks/generatedSchema'; import type { UserType } from '../../types/testTypes'; +import { deepRemoveMetaData } from '../../../src/helpers'; export const testQuery = createTest('Query', (ctx) => { it('v1[validation] - $entity missing', async () => { @@ -1479,8 +1480,9 @@ export const testQuery = createTest('Query', (ctx) => { const res = await ctx.query( { $entity: 'User', - //@ts-expect-error - TODO: This is valid syntax but requires refactoring the filters + // @ts-expect-error - TODO: This is valid syntax but requires refactoring the filters $filter: [{ spaces: ['space-1'] }, { email: 'ann@test.com' }], + // $filter: { $or: { spaces: ['space-1'], email: 'ann@test.com' } }, $fields: ['id'], }, { noMetadata: true }, @@ -2827,6 +2829,22 @@ export const testQuery = createTest('Query', (ctx) => { }); it('TODO{T}:ref1n[ref, ONE, nested] Get also nested data', async () => { + // SELECT + // "0" AS `$$queryPath`, + // (id && record::id(id)) || null AS `$id`, + // (id && record::tb(id)) || null AS `$thing`, + // record::id(id) AS id, + // ( + // SELECT + // "0.$fields.1" AS `$$queryPath`, + // (id && record::id(id)) || null AS `$id`, + // (id && record::tb(id)) || null AS `$thing`, + // (id && null) || $this AS `$value`, + // * + // FROM $parent.reference + // ) AS reference + // FROM FlexRef + // WHERE id AND record::id(id) IN ['fr1']; const res = await ctx.query( { $entity: 'FlexRef', @@ -2852,6 +2870,25 @@ export const testQuery = createTest('Query', (ctx) => { }); it('TODO{T}:ref1nf[ref, ONE, nested, someFields] Get also nested data but only some fields', async () => { + // SELECT + // "0" AS `$$queryPath`, + // id && record::id(id) || null AS `$id`, + // id && record::tb(id) || null AS `$thing`, + // record::id(id) AS id, + // ( + // SELECT + // "0.$fields.1" AS `$$queryPath`, + // id && record::id(id) || null AS `$id`, + // id && record::tb(id) || null AS `$thing`, + // id && null || $this AS `$value`, + // , + // , + // + // FROM $parent.reference + // ) AS reference + // FROM FlexRef + // WHERE id + // AND (record::id(id) IN ['fr1']); const res = await ctx.query( { $entity: 'FlexRef', @@ -2901,6 +2938,22 @@ export const testQuery = createTest('Query', (ctx) => { }); it('TODO{T}:ref4nf[ref, flex, MANY, nested] Get flexReferences with nested data', async () => { + // SELECT + // "0" AS `$$queryPath`, + // id && record::id(id) || null AS `$id`, + // id && record::tb(id) || null AS `$thing`, + // record::id(id) AS id, + // ( + // SELECT + // "0.$fields.1" AS `$$queryPath`, + // id && record::id(id) || null AS `$id`, + // id && record::tb(id) || null AS `$thing`, + // id && null || $this AS `$value`, + // * + // FROM $parent.flexReferences + // ) AS flexReferences + // FROM FlexRef + // WHERE id AND (record::id(id) IN ['fr5']); const res = await ctx.query( { $entity: 'FlexRef', $id: 'fr5', $fields: ['id', { $path: 'flexReferences' }] }, { noMetadata: true }, @@ -2926,6 +2979,25 @@ export const testQuery = createTest('Query', (ctx) => { }); it('TODO{T}:ref4n[ref, flex, MANY, nested, $fields] Get flexReferences with nested data but only some fields', async () => { + // SELECT + // "0" AS `$$queryPath`, + // (id && record::id(id)) || null AS `$id`, + // (id && record::tb(id)) || null AS `$thing`, + // record::id(id) AS id, + // ( + // SELECT + // "0.$fields.1" AS `$$queryPath`, + // (id && record::id(id)) || null AS `$id`, + // (id && record::tb(id)) || null AS `$thing`, + // (id && null) || $this AS `$value`, + // ⟨id⟩, + // ⟨name⟩, + // ⟨user-tags⟩ + // FROM $parent.`flexReferences` + // ) AS `flexReferences` + // FROM FlexRef + // WHERE id + // AND record::id(id) IN ['fr5']; const res = await ctx.query( { $entity: 'FlexRef',