feat: use JSON column + bulk update support.

This commit is contained in:
mattia
2025-01-08 11:49:53 +01:00
parent 0a73fc5a95
commit 15f96b5a7c
7 changed files with 56 additions and 66 deletions

View File

@@ -6,6 +6,7 @@ export function exportGetSessionsSchema(z: Z) {
title: "Total logs", title: "Total logs",
description: description:
"The total number of logs satisfying the filters, regardless of pagination", "The total number of logs satisfying the filters, regardless of pagination",
example: 14,
}), }),
sessions: z sessions: z
.object({ .object({

View File

@@ -5,7 +5,7 @@ export function getEntries(sessionId: string, category?: string) {
let query = trx let query = trx
.selectFrom("log_entries") .selectFrom("log_entries")
.where("session_info_id", "=", sessionId) .where("session_info_id", "=", sessionId)
.select(["id", "message", "timestamp", "category"]) .select(["id", "message", "timestamp", "category", "metadata"])
.orderBy("timestamp", "asc"); .orderBy("timestamp", "asc");
if (category) { if (category) {
query = query.where("category", "=", category); query = query.where("category", "=", category);
@@ -13,24 +13,14 @@ export function getEntries(sessionId: string, category?: string) {
const logEntries = await query.execute(); const logEntries = await query.execute();
// get all the metadata for the log entries
const metadata = await trx
.selectFrom("log_metadata")
.where(
"log_entry_id",
"in",
logEntries.map((entry) => entry.id)
)
.select(["log_entry_id", "key", "value"])
.execute();
return logEntries.map((entry) => ({ return logEntries.map((entry) => ({
message: entry.message, message: entry.message,
timestamp: entry.timestamp, timestamp: entry.timestamp,
category: entry.category, category: entry.category,
metadata: metadata metadata: entry.metadata.reduce(
.filter((m) => m.log_entry_id === entry.id) (acc, m) => ({ ...acc, [m[0]]: m[1] }),
.reduce((acc, m) => ({ ...acc, [m.key]: m.value }), {}), {}
),
})); }));
}); });
} }

View File

@@ -13,22 +13,11 @@ export async function getSession(id: string): Promise<GetSessionResponse> {
const logEntries = await db const logEntries = await db
.selectFrom("log_entries") .selectFrom("log_entries")
.where("session_info_id", "=", id) .where("session_info_id", "=", id)
.select(["id", "message", "timestamp", "category"]) .select(["id", "message", "timestamp", "category", "metadata"])
.orderBy("timestamp", "asc") .orderBy("timestamp", "asc")
.execute(); .execute();
// get all the metadata for the log entries // build the object accordingly
const metadata = await db
.selectFrom("log_metadata")
.where(
"log_entry_id",
"in",
logEntries.map((entry) => entry.id)
)
.select(["log_entry_id", "key", "value"])
.execute();
// buidl the object accordingly
return { return {
game_name: session.game_name, game_name: session.game_name,
version: session.version, version: session.version,
@@ -36,9 +25,10 @@ export async function getSession(id: string): Promise<GetSessionResponse> {
message: entry.message, message: entry.message,
timestamp: entry.timestamp, timestamp: entry.timestamp,
category: entry.category, category: entry.category,
metadata: metadata metadata: entry.metadata.reduce(
.filter((m) => m.log_entry_id === entry.id) (acc, m) => ({ ...acc, [m[0]]: m[1] }),
.reduce((acc, m) => ({ ...acc, [m.key]: m.value }), {}), {}
),
})), })),
}; };
} }

View File

@@ -6,6 +6,7 @@ import {
MigrationProvider, MigrationProvider,
Migrator, Migrator,
MysqlDialect, MysqlDialect,
ParseJSONResultsPlugin,
} from "kysely"; } from "kysely";
import { env } from "process"; import { env } from "process";
@@ -42,6 +43,8 @@ export const db = new Kysely<Database>({
}); });
} }
}, },
// MariaDB's json columns are actually just LONGTEXT columns
plugins: [new ParseJSONResultsPlugin()],
}); });
import * as migrations from "./migrations"; import * as migrations from "./migrations";

View File

@@ -0,0 +1,12 @@
import { RawBuilder, sql } from "kysely";
/**
* Expression builder to wrap values that must be inserted in a JSON MariaDB column.
* MariaDB simply treat JSON as a LONGTEXT with a CHECK constraint to ensure it's a valid JSON. This means it's enough to escape a string.
* @param value The value to transform to JSON.
* @returns A builder to produce the JSON value.
*/
export function json<T>(value: T): RawBuilder<T> {
const json = JSON.stringify(value);
return sql`${json}`;
}

View File

@@ -2,6 +2,7 @@ import { LogEntry, SingleMetadata } from "../contract";
import { HttpError } from "../http-error"; import { HttpError } from "../http-error";
import { db } from "./init"; import { db } from "./init";
import { zip } from "lodash-es"; import { zip } from "lodash-es";
import { json } from "./kysely-mariadb-json";
declare global { declare global {
interface BigInt { interface BigInt {
@@ -46,8 +47,8 @@ export async function postLog(
version, version,
}) })
.execute(); .execute();
// add the main log line // add the main log lines with their metadata
const insertResult = await trx await trx
.insertInto("log_entries") .insertInto("log_entries")
.values( .values(
entries.map((entry) => ({ entries.map((entry) => ({
@@ -55,30 +56,11 @@ export async function postLog(
message: entry.message, message: entry.message,
timestamp: new Date(), timestamp: new Date(),
category: entry.category, category: entry.category,
metadata: json(entry.metadata.map((x) => [x.key, x.value])),
})) }))
) )
.returning("id") .returning("id")
.execute(); .execute();
console.log("insertResulttt:", JSON.stringify(insertResult, null, 2));
for (const r of insertResult) {
console.log("insertResult", r.id);
}
// console.log("insertResult.insertId", (insertResult as any).insertId);
// console.log("keys:", Object.keys(insertResult));
throw new Error("aaa");
// add all the metadata
// if (metadata.length > 0) {
// await trx
// .insertInto("log_metadata")
// .values(
// metadata.map(({ key, value }) => ({
// log_entry_id: Number(insertResult.insertId),
// key,
// value,
// }))
// )
// .execute();
// }
}); });
} }
} }

View File

@@ -1,11 +1,33 @@
import { Generated, Insertable, Selectable } from "kysely"; import { Generated, Insertable, Selectable } from "kysely";
/*
* DATABASE
*/
export interface Database { export interface Database {
log_entries: LogEntriesTable; log_entries: LogEntriesTable;
log_metadata: LogMetadataTable;
session_info: SessionInfoTable; session_info: SessionInfoTable;
} }
/*
* SESSION INFO
*/
export interface SessionInfoTable
{
id: string;
game_name: string;
version: string;
save_id: string;
}
export type SessionInfo = Selectable<SessionInfoTable>;
export type NewSessionInfoTable = Insertable<SessionInfoTable>;
/*
* LOG ENTRIES
*/
export interface LogEntriesTable export interface LogEntriesTable
{ {
id: Generated<number>; id: Generated<number>;
@@ -13,6 +35,7 @@ export interface LogEntriesTable
message: string; message: string;
timestamp: Date; timestamp: Date;
category?: string; category?: string;
metadata: [string, string][];
} }
export type LogEntry = Selectable<LogEntriesTable>; export type LogEntry = Selectable<LogEntriesTable>;
@@ -28,14 +51,3 @@ export interface LogMetadataTable
export type LogMetadata = Selectable<LogMetadataTable>; export type LogMetadata = Selectable<LogMetadataTable>;
export type NewLogMetadata = Insertable<LogMetadataTable>; export type NewLogMetadata = Insertable<LogMetadataTable>;
export interface SessionInfoTable
{
id: string;
save_id: string;
game_name: string;
version: string;
}
export type SessionInfo = Selectable<SessionInfoTable>;
export type NewSessionInfoTable = Insertable<SessionInfoTable>;