[fix] handle errors

Signed-off-by: Anton Nesterov <anton@demiurg.io>
This commit is contained in:
Anton Nesterov 2024-09-03 01:11:17 +02:00
parent 059fe4ca76
commit e62a155e04
No known key found for this signature in database
GPG key ID: 59121E8AE2851FB5
15 changed files with 107 additions and 84 deletions

View file

@ -1,4 +1,4 @@
import type { Request, ExecResult } from "./Protocol";
import type { Request, ExecResult, IError } from "./Protocol";
import {
METHODS,
encodeRequest,
@ -163,7 +163,7 @@ export default class Builder<I extends abstract new (...args: any) => any> {
this.dtoTemplate = template;
return this;
}
async *Rows<T = InstanceType<I>>(): AsyncGenerator<T> {
async *Rows<T = InstanceType<I>>(): AsyncGenerator<[T, IError]> {
this.formatRequest();
const response = await fetch(this.url, {
method: "POST",
@ -174,26 +174,38 @@ export default class Builder<I extends abstract new (...args: any) => any> {
},
});
if (response.status !== 200) {
throw new Error(await response.text());
return [[], await response.text()];
}
const iterator = decodeRowsIterator(response.body!);
for await (const row of iterator) {
for await (const result of iterator) {
const [row, err] = result;
if (err) {
yield [{} as T, err];
return;
}
if (this.headerRow === null) {
this.headerRow = row.r;
continue;
}
yield this.formatRow(row.r);
yield [this.formatRow(row.r), null];
}
}
async Query<T = InstanceType<I>>(): Promise<T[]> {
const rows = this.Rows();
const result: T[] = [];
for await (const row of rows) {
for await (const res of rows) {
const [row, error] = res;
if (error) {
if (String(error).includes("RangeError")) {
break;
}
throw new Error(error);
}
result.push(row);
}
return result;
}
async Exec(): Promise<ExecResult> {
async Exec(): Promise<[ExecResult, IError]> {
this.formatRequest();
const response = await fetch(this.url, {
method: "POST",

View file

@ -15,13 +15,15 @@ export interface ExecResult {
Id: number;
RowsAffected: number;
LastInsertId: number;
Msg?: string;
Error?: string;
}
export interface Row {
r: unknown[];
}
export type IError = any;
export const METHODS =
"Raw|In|Find|Select|Fields|Join|Group|Sort|Limit|Offset|Delete|Insert|Set|Update|OnConflict|DoUpdate|DoNothing|Tx".split(
"|",
@ -31,28 +33,29 @@ export function encodeRequest(request: Request): Uint8Array {
return encode(request);
}
export function decodeResponse(input: Uint8Array): ExecResult | null {
export function decodeResponse(input: Uint8Array): [ExecResult, IError] {
try {
const res = decode(input) as {
i: number;
ra: number;
li: number;
m?: string;
e?: string;
};
return {
const result = {
Id: res.i,
RowsAffected: res.ra,
LastInsertId: res.li,
Msg: res.m,
Error: res.e,
};
return [result, result.Error];
} catch (e) {
return null;
return [{} as ExecResult, e];
}
}
const ROW_TAG = [0x81, 0xa1, 0x72];
export function decodeRows(input: Uint8Array): Row[] | null {
export function decodeRows(input: Uint8Array): [Row[], IError] {
try {
const rows = [];
let count = 0;
@ -76,24 +79,31 @@ export function decodeRows(input: Uint8Array): Row[] | null {
}
rows.push([...ROW_TAG, ...buf]);
rows.shift();
return rows.map((row) => decode(new Uint8Array(row as number[]))) as Row[];
return [
rows.map((row) => decode(new Uint8Array(row as number[]))) as Row[],
null,
];
} catch (e) {
return null;
return [[], e];
}
}
export async function* decodeRowsIterator(
stream: ReadableStream<Uint8Array>,
): AsyncGenerator<Row> {
): AsyncGenerator<[Row, IError]> {
const reader = stream.getReader();
for (;;) {
const { value, done } = await reader.read();
if (done) {
break;
}
const rows = decodeRows(value);
for (const row of rows!) {
yield row;
const [rows, err] = decodeRows(value);
if (err) {
yield [{} as Row, err];
break;
}
for (const row of rows) {
yield [row, null];
}
}
}

View file

@ -1,5 +1,5 @@
import fs from "fs";
import dal from "../Bunding";
import dal from "../BunFFI";
const Mb = (num) => Math.round(num / 1024 / 1024);

View file

@ -21,8 +21,8 @@ test("Rows iter, no format", async () => {
id: 1,
})
.Rows<any[]>();
for await (const row of rows) {
//console.log(row);
for await (const result of rows) {
const [row, error] = result;
expect(row.length).toBe(3);
}
expect(true).toBe(true);

Binary file not shown.

View file

@ -13,7 +13,7 @@ type SQLite = {
let Library: SQLite;
if (process.isBun) {
Library = require("./Bunding") as SQLite;
Library = require("./BunFFI") as SQLite;
} else {
Library = require("./Binding") as SQLite;
}

View file

@ -1,12 +1,10 @@
import Builder from "./Builder";
import Bunding from "./Library";
import Napi from "./napi";
import { encodeRequest, decodeRows, decodeResponse } from "./Protocol";
import type { ExecResult } from "./Protocol";
import type { ExecResult, IError } from "./Protocol";
//@ts-ignore
const Binding = Bunding.default ?? Bunding;
Binding.initSQLite(Buffer.from(" "));
const Binding = Napi.default ?? Napi;
type Options = {
database: string;
@ -15,27 +13,26 @@ type Options = {
/**
* Allows to use SQLite databases in a NodeJS process.
*/
export default class CBuilder<
export default class C <
I extends abstract new (...args: any) => any,
> extends Builder<I> {
constructor(opts: Options) {
super({ database: opts.database, url: "" });
}
/**
* TODO: handle responses
*/
async *Rows<T = InstanceType<I>>(): AsyncGenerator<T> {
async *Rows<T = InstanceType<I>>(): AsyncGenerator<[T, IError]> {
this.formatRequest();
const req = Buffer.from(encodeRequest(this.request));
const iter = Binding.rowIterator(req);
for (;;) {
const response = iter.next() as Buffer;
const error = decodeResponse(response);
if (error?.Msg) {
throw new Error(error.Msg);
const data = iter.next() as Buffer;
const [_, error] = decodeResponse(data);
if (error) {
yield [{} as T, error];
iter.cleanup();
return;
}
const rows = decodeRows(response);
if (!rows || rows.length === 0) {
const [rows, err] = decodeRows(data);
if (err || !rows || rows.length === 0) {
iter.cleanup();
return;
}
@ -44,19 +41,11 @@ export default class CBuilder<
this.headerRow = row.r;
continue;
}
yield this.formatRow(row.r);
yield [this.formatRow(row.r), null];
}
}
}
async Query<T = InstanceType<I>>(): Promise<T[]> {
const rows = this.Rows();
const result: T[] = [];
for await (const row of rows) {
result.push(row);
}
return result;
}
async Exec(): Promise<ExecResult> {
async Exec(): Promise<[ExecResult, IError]> {
this.formatRequest();
const req = Buffer.from(encodeRequest(this.request));
const iter = Binding.rowIterator(req);

View file

@ -9,7 +9,7 @@ bun install
To run:
```bash
bun run index.ts
bun test
```
This project was created using `bun init` in bun v1.1.25. [Bun](https://bun.sh) is a fast all-in-one JavaScript runtime.

View file

@ -1,6 +1,6 @@
import { describe, expect, test } from "bun:test";
import path from "path";
import DAL from "@nesterow/dal/client/libdal";
import DAL from "@nesterow/dal/client/native";
// in this case we need to use absolute path
const DATABASE_PATH = path.join(import.meta.dir, "..", "data", "chinook.db");
@ -10,7 +10,7 @@ const db = new DAL({
});
describe("Query Interface", () => {
test(".Find", async () => {
test(".Find [find 10 artists whose names start with 'A']", async () => {
const items = db
.In("artists")
.Find({
@ -19,14 +19,15 @@ describe("Query Interface", () => {
.Limit(10)
.Rows();
for await (const item of items) {
for await (const result of items) {
const [item, error] = result;
console.log(item);
}
expect(true).toBe(true);
});
test(".Find.As", async () => {
test(".Find.As [find 5 artists whose names start with 'B'; Represent each row as an Artist object]", async () => {
class Artist {
ArtistId = 0;
Name = "";
@ -38,13 +39,23 @@ describe("Query Interface", () => {
name: { $glob: "B*" },
})
.As(Artist)
.Limit(1)
.Limit(5)
.Rows();
for await (const item of items) {
console.log(item);
for await (const result of items) {
const [item, error] = result;
console.log(123, item);
}
console.log("done");
const all_rows = await db
.In("artists")
.Find({
name: { $glob: "B*" },
})
.As(Artist)
.Limit(5)
.Query();
expect(true).toBe(true);
});
});

View file

@ -1,6 +1,6 @@
import { describe, expect, test } from "bun:test";
import path from "path";
import DAL from "@nesterow/dal/client/libdal";
import DAL from "@nesterow/dal/client/native";
// in this case we need to use absolute path
const DATABASE_PATH = path.join(import.meta.dir, "..", "data", "chinook.db");
@ -37,16 +37,17 @@ describe("Query Interface", () => {
"ar.Name": { $glob: "A*" },
})
.Fields({
"tr.TrackId" : "TrackId",
"tr.Name" : "TrackName",
"ar.Name" : "ArtistName",
"al.Title" : "AlbumTitle",
"tr.TrackId": "TrackId",
"tr.Name": "TrackName",
"ar.Name": "ArtistName",
"al.Title": "AlbumTitle",
})
.Limit(10)
.As(Album)
.Rows();
for await (const item of items) {
for await (const result of items) {
const [item, error] = result;
console.log(item);
}

View file

@ -39,7 +39,7 @@ func (r *RowsIter) Exec(input []byte) {
query, err := req.Parse(adapter.GetDialect(db.Type))
if err != nil || e != nil {
res := proto.Response{
Msg: "failed to unmarshal request",
Error: "failed to unmarshal request",
}
r.Result, _ = res.MarshalMsg(nil)
return
@ -48,7 +48,7 @@ func (r *RowsIter) Exec(input []byte) {
result, err := db.Exec(query)
if err != nil {
res := proto.Response{
Msg: err.Error(),
Error: err.Error(),
}
r.Result, _ = res.MarshalMsg(nil)
return
@ -66,7 +66,7 @@ func (r *RowsIter) Exec(input []byte) {
rows, err := db.Query(query)
if err != nil {
res := proto.Response{
Msg: err.Error(),
Error: err.Error(),
}
r.Result, _ = res.MarshalMsg(nil)
return

View file

@ -6,5 +6,5 @@ type Response struct {
Id uint32 `msg:"i"`
RowsAffected int64 `msg:"ra"`
LastInsertId int64 `msg:"li"`
Msg string `msg:"m"`
Error string `msg:"e"`
}

View file

@ -42,10 +42,10 @@ func (z *Response) DecodeMsg(dc *msgp.Reader) (err error) {
err = msgp.WrapError(err, "LastInsertId")
return
}
case "m":
z.Msg, err = dc.ReadString()
case "e":
z.Error, err = dc.ReadString()
if err != nil {
err = msgp.WrapError(err, "Msg")
err = msgp.WrapError(err, "Error")
return
}
default:
@ -92,14 +92,14 @@ func (z *Response) EncodeMsg(en *msgp.Writer) (err error) {
err = msgp.WrapError(err, "LastInsertId")
return
}
// write "m"
err = en.Append(0xa1, 0x6d)
// write "e"
err = en.Append(0xa1, 0x65)
if err != nil {
return
}
err = en.WriteString(z.Msg)
err = en.WriteString(z.Error)
if err != nil {
err = msgp.WrapError(err, "Msg")
err = msgp.WrapError(err, "Error")
return
}
return
@ -118,9 +118,9 @@ func (z *Response) MarshalMsg(b []byte) (o []byte, err error) {
// string "li"
o = append(o, 0xa2, 0x6c, 0x69)
o = msgp.AppendInt64(o, z.LastInsertId)
// string "m"
o = append(o, 0xa1, 0x6d)
o = msgp.AppendString(o, z.Msg)
// string "e"
o = append(o, 0xa1, 0x65)
o = msgp.AppendString(o, z.Error)
return
}
@ -160,10 +160,10 @@ func (z *Response) UnmarshalMsg(bts []byte) (o []byte, err error) {
err = msgp.WrapError(err, "LastInsertId")
return
}
case "m":
z.Msg, bts, err = msgp.ReadStringBytes(bts)
case "e":
z.Error, bts, err = msgp.ReadStringBytes(bts)
if err != nil {
err = msgp.WrapError(err, "Msg")
err = msgp.WrapError(err, "Error")
return
}
default:
@ -180,6 +180,6 @@ func (z *Response) UnmarshalMsg(bts []byte) (o []byte, err error) {
// Msgsize returns an upper bound estimate of the number of bytes occupied by the serialized message
func (z *Response) Msgsize() (s int) {
s = 1 + 2 + msgp.Uint32Size + 3 + msgp.Int64Size + 3 + msgp.Int64Size + 2 + msgp.StringPrefixSize + len(z.Msg)
s = 1 + 2 + msgp.Uint32Size + 3 + msgp.Int64Size + 3 + msgp.Int64Size + 2 + msgp.StringPrefixSize + len(z.Error)
return
}

View file

@ -1,5 +1,5 @@
{
"files": ["client/index.ts", "client/Binding.ts", "client/libdal.ts"],
"files": ["client/index.ts", "client/Binding.ts", "client/native.ts"],
"compilerOptions": {
"target": "ESNext",
"module": "ESNext",