update: schedule archive snapshots to next Saturday midnight

fix: no expire when acquiring lock for classifyVideos
ref: format
This commit is contained in:
alikia2x (寒寒) 2025-05-31 12:13:56 +08:00
parent 2b0497c83a
commit 1a20d5afe0
Signed by: alikia2x
GPG Key ID: 56209E0CCD8420C6
37 changed files with 226 additions and 226 deletions

View File

@ -7,6 +7,7 @@
"postgres": "^3.4.5",
},
"devDependencies": {
"prettier": "^3.5.3",
"vite-tsconfig-paths": "^5.1.4",
"vitest": "^3.1.2",
"vitest-tsconfig-paths": "^3.4.1",

View File

@ -15,6 +15,7 @@
"devDependencies": {
"vite-tsconfig-paths": "^5.1.4",
"vitest": "^3.1.2",
"vitest-tsconfig-paths": "^3.4.1"
"vitest-tsconfig-paths": "^3.4.1",
"prettier": "^3.5.3"
}
}

View File

@ -35,7 +35,7 @@ export async function getVideoInfoFromAllData(sql: Psql, aid: number) {
`;
const row = rows[0];
let authorInfo = "";
if (row.uid && await userExistsInBiliUsers(sql, row.uid)) {
if (row.uid && (await userExistsInBiliUsers(sql, row.uid))) {
const userRows = await sql<BiliUserType[]>`
SELECT * FROM bilibili_user WHERE uid = ${row.uid}
`;
@ -48,7 +48,7 @@ export async function getVideoInfoFromAllData(sql: Psql, aid: number) {
title: row.title,
description: row.description,
tags: row.tags,
author_info: authorInfo,
author_info: authorInfo
};
}

View File

@ -22,7 +22,7 @@ export async function getVideosNearMilestone(sql: Psql) {
return queryResult.map((row) => {
return {
...row,
aid: Number(row.aid),
aid: Number(row.aid)
};
});
}
@ -40,7 +40,7 @@ export async function getLatestVideoSnapshot(sql: Psql, aid: number): Promise<nu
return {
...row,
aid: Number(row.aid),
time: new Date(row.time).getTime(),
time: new Date(row.time).getTime()
};
})[0];
}

View File

@ -79,7 +79,7 @@ export async function videoHasProcessingSchedule(sql: Psql, aid: number) {
FROM snapshot_schedule
WHERE aid = ${aid}
AND status = 'processing'
`
`;
return rows.length > 0;
}
@ -90,7 +90,7 @@ export async function bulkGetVideosWithoutProcessingSchedules(sql: Psql, aids: n
WHERE aid = ANY(${aids})
AND status != 'processing'
GROUP BY aid
`
`;
return rows.map((row) => Number(row.aid));
}
@ -346,6 +346,6 @@ export async function getAllVideosWithoutActiveSnapshotSchedule(psql: Psql) {
FROM bilibili_metadata s
LEFT JOIN snapshot_schedule ss ON s.aid = ss.aid AND (ss.status = 'pending' OR ss.status = 'processing')
WHERE ss.aid IS NULL
`
`;
return rows.map((r) => Number(r.aid));
}

View File

@ -16,8 +16,8 @@ class AkariProto extends AIManager {
constructor() {
super();
this.models = {
"classifier": onnxClassifierPath,
"embedding": onnxEmbeddingPath,
classifier: onnxClassifierPath,
embedding: onnxEmbeddingPath
};
}
@ -55,7 +55,7 @@ class AkariProto extends AIManager {
const { input_ids } = await tokenizer(texts, {
add_special_tokens: false,
return_tensor: false,
return_tensor: false
});
const cumsum = (arr: number[]): number[] =>
@ -66,9 +66,9 @@ class AkariProto extends AIManager {
const inputs = {
input_ids: new ort.Tensor("int64", new BigInt64Array(flattened_input_ids.map(BigInt)), [
flattened_input_ids.length,
flattened_input_ids.length
]),
offsets: new ort.Tensor("int64", new BigInt64Array(offsets.map(BigInt)), [offsets.length]),
offsets: new ort.Tensor("int64", new BigInt64Array(offsets.map(BigInt)), [offsets.length])
};
const { embeddings } = await session.run(inputs);
@ -77,21 +77,14 @@ class AkariProto extends AIManager {
private async runClassification(embeddings: number[]): Promise<number[]> {
const session = this.getModelSession("classifier");
const inputTensor = new ort.Tensor(
Float32Array.from(embeddings),
[1, 3, 1024],
);
const inputTensor = new ort.Tensor(Float32Array.from(embeddings), [1, 3, 1024]);
const { logits } = await session.run({ channel_features: inputTensor });
return this.softmax(logits.data as Float32Array);
}
public async classifyVideo(title: string, description: string, tags: string, aid?: number): Promise<number> {
const embeddings = await this.getJinaEmbeddings1024([
title,
description,
tags,
]);
const embeddings = await this.getJinaEmbeddings1024([title, description, tags]);
const probabilities = await this.runClassification(embeddings);
if (aid) {
logger.log(`Prediction result for aid: ${aid}: [${probabilities.map((p) => p.toFixed(5))}]`, "ml");

View File

@ -6,8 +6,7 @@ export class AIManager {
public sessions: { [key: string]: ort.InferenceSession } = {};
public models: { [key: string]: string } = {};
constructor() {
}
constructor() {}
public async init() {
const modelKeys = Object.keys(this.models);

View File

@ -3,9 +3,26 @@ import { getAllVideosWithoutActiveSnapshotSchedule, scheduleSnapshot } from "db/
import logger from "@core/log/logger.ts";
import { lockManager } from "@core/mq/lockManager.ts";
import { getLatestVideoSnapshot } from "db/snapshot.ts";
import { HOUR, MINUTE } from "@core/const/time.ts";
import { MINUTE } from "@core/const/time.ts";
import { sql } from "@core/db/dbNew";
function getNextSaturdayMidnightTimestamp(): number {
const now = new Date();
const currentDay = now.getDay();
let daysUntilNextSaturday = (6 - currentDay + 7) % 7;
if (daysUntilNextSaturday === 0) {
daysUntilNextSaturday = 7;
}
const nextSaturday = new Date(now);
nextSaturday.setDate(nextSaturday.getDate() + daysUntilNextSaturday);
nextSaturday.setHours(0, 0, 0, 0);
return nextSaturday.getTime();
}
export const archiveSnapshotsWorker = async (_job: Job) => {
try {
const startedAt = Date.now();
@ -20,15 +37,16 @@ export const archiveSnapshotsWorker = async (_job: Job) => {
const latestSnapshot = await getLatestVideoSnapshot(sql, aid);
const now = Date.now();
const lastSnapshotedAt = latestSnapshot?.time ?? now;
const interval = 168;
const nextSatMidnight = getNextSaturdayMidnightTimestamp();
const interval = nextSatMidnight - now;
logger.log(
`Scheduled archive snapshot for aid ${aid} in ${interval} hours.`,
"mq",
"fn:archiveSnapshotsWorker"
);
const targetTime = lastSnapshotedAt + interval * HOUR;
const targetTime = lastSnapshotedAt + interval;
await scheduleSnapshot(sql, aid, "archive", targetTime);
if (now - startedAt > 250 * MINUTE) {
if (now - startedAt > 30 * MINUTE) {
return;
}
}

View File

@ -34,7 +34,7 @@ export const classifyVideoWorker = async (job: Job) => {
await job.updateData({
...job.data,
label: label,
label: label
});
return 0;
@ -46,7 +46,7 @@ export const classifyVideosWorker = async () => {
return;
}
await lockManager.acquireLock("classifyVideos");
await lockManager.acquireLock("classifyVideos", 5 * 60);
const videos = await getUnlabelledVideos(sql);
logger.log(`Found ${videos.length} unlabelled videos`);

View File

@ -4,4 +4,4 @@ import { collectSongs } from "mq/task/collectSongs.ts";
export const collectSongsWorker = async (_job: Job): Promise<void> => {
await collectSongs();
return;
}
};

View File

@ -26,4 +26,4 @@ export const dispatchMilestoneSnapshotsWorker = async (_job: Job) => {
} catch (e) {
logger.error(e as Error, "mq", "fn:dispatchMilestoneSnapshotsWorker");
}
}
};

View File

@ -1,10 +1,7 @@
import { Job } from "bullmq";
import { getLatestVideoSnapshot } from "db/snapshot.ts";
import { truncate } from "utils/truncate.ts";
import {
getVideosWithoutActiveSnapshotScheduleByType,
scheduleSnapshot
} from "db/snapshotSchedule.ts";
import { getVideosWithoutActiveSnapshotScheduleByType, scheduleSnapshot } from "db/snapshotSchedule.ts";
import logger from "@core/log/logger.ts";
import { HOUR, MINUTE, WEEK } from "@core/const/time.ts";
import { lockManager } from "@core/mq/lockManager.ts";

View File

@ -4,4 +4,4 @@ import { queueLatestVideos } from "mq/task/queueLatestVideo.ts";
export const getLatestVideosWorker = async (_job: Job): Promise<void> => {
await queueLatestVideos(sql);
}
};

View File

@ -10,4 +10,4 @@ export const getVideoInfoWorker = async (job: Job): Promise<void> => {
return;
}
await insertVideoInfo(sql, aid);
}
};

View File

@ -5,15 +5,15 @@ import {
getBulkSnapshotsInNextSecond,
getSnapshotsInNextSecond,
setSnapshotStatus,
videoHasProcessingSchedule,
videoHasProcessingSchedule
} from "db/snapshotSchedule.ts";
import logger from "@core/log/logger.ts";
import { SnapshotQueue } from "mq/index.ts";
import { sql } from "@core/db/dbNew";
const priorityMap: { [key: string]: number } = {
"milestone": 1,
"normal": 3,
milestone: 1,
normal: 3
};
export const bulkSnapshotTickWorker = async (_job: Job) => {
@ -35,12 +35,16 @@ export const bulkSnapshotTickWorker = async (_job: Job) => {
created_at: schedule.created_at,
started_at: schedule.started_at,
finished_at: schedule.finished_at,
status: schedule.status,
status: schedule.status
};
});
await SnapshotQueue.add("bulkSnapshotVideo", {
schedules: schedulesData,
}, { priority: 3 });
await SnapshotQueue.add(
"bulkSnapshotVideo",
{
schedules: schedulesData
},
{ priority: 3 }
);
}
return `OK`;
} catch (e) {
@ -61,11 +65,15 @@ export const snapshotTickWorker = async (_job: Job) => {
}
const aid = Number(schedule.aid);
await setSnapshotStatus(sql, schedule.id, "processing");
await SnapshotQueue.add("snapshotVideo", {
await SnapshotQueue.add(
"snapshotVideo",
{
aid: Number(aid),
id: Number(schedule.id),
type: schedule.type ?? "normal",
}, { priority });
type: schedule.type ?? "normal"
},
{ priority }
);
}
return `OK`;
} catch (e) {

View File

@ -10,9 +10,9 @@ import { NetSchedulerError } from "@core/net/delegate.ts";
import { sql } from "@core/db/dbNew.ts";
const snapshotTypeToTaskMap: { [key: string]: string } = {
"milestone": "snapshotMilestoneVideo",
"normal": "snapshotVideo",
"new": "snapshotMilestoneVideo",
milestone: "snapshotMilestoneVideo",
normal: "snapshotVideo",
new: "snapshotMilestoneVideo"
};
export const snapshotVideoWorker = async (job: Job): Promise<void> => {
@ -31,7 +31,7 @@ export const snapshotVideoWorker = async (job: Job): Promise<void> => {
logger.warn(
`Video ${aid} has status ${status} in the database. Abort snapshoting.`,
"mq",
"fn:dispatchRegularSnapshotsWorker",
"fn:dispatchRegularSnapshotsWorker"
);
return;
}
@ -43,7 +43,7 @@ export const snapshotVideoWorker = async (job: Job): Promise<void> => {
logger.warn(
`Bilibili return status ${status} when snapshoting for ${aid}.`,
"mq",
"fn:dispatchRegularSnapshotsWorker",
"fn:dispatchRegularSnapshotsWorker"
);
return;
}
@ -51,7 +51,7 @@ export const snapshotVideoWorker = async (job: Job): Promise<void> => {
if (type === "new") {
const publihsedAt = await getSongsPublihsedAt(sql, aid);
const timeSincePublished = stat.time - publihsedAt!;
const viewsPerHour = stat.views / timeSincePublished * HOUR;
const viewsPerHour = (stat.views / timeSincePublished) * HOUR;
if (timeSincePublished > 48 * HOUR) {
return;
}
@ -77,7 +77,7 @@ export const snapshotVideoWorker = async (job: Job): Promise<void> => {
logger.warn(
`ETA (${etaHoursString}) too long for milestone snapshot. aid: ${aid}.`,
"mq",
"fn:snapshotVideoWorker",
"fn:snapshotVideoWorker"
);
return;
}
@ -86,23 +86,17 @@ export const snapshotVideoWorker = async (job: Job): Promise<void> => {
await scheduleSnapshot(sql, aid, type, targetTime);
await setSnapshotStatus(sql, id, "completed");
return;
}
catch (e) {
} catch (e) {
if (e instanceof NetSchedulerError && e.code === "NO_PROXY_AVAILABLE") {
logger.warn(
`No available proxy for aid ${job.data.aid}.`,
"mq",
"fn:snapshotVideoWorker",
);
logger.warn(`No available proxy for aid ${job.data.aid}.`, "mq", "fn:snapshotVideoWorker");
await setSnapshotStatus(sql, id, "no_proxy");
await scheduleSnapshot(sql, aid, type, Date.now() + retryInterval);
return;
}
else if (e instanceof NetSchedulerError && e.code === "ALICLOUD_PROXY_ERR") {
} else if (e instanceof NetSchedulerError && e.code === "ALICLOUD_PROXY_ERR") {
logger.warn(
`Failed to proxy request for aid ${job.data.aid}: ${e.message}`,
"mq",
"fn:snapshotVideoWorker",
"fn:snapshotVideoWorker"
);
await setSnapshotStatus(sql, id, "failed");
await scheduleSnapshot(sql, aid, type, Date.now() + retryInterval);

View File

@ -3,7 +3,7 @@ import {
bulkScheduleSnapshot,
bulkSetSnapshotStatus,
scheduleSnapshot,
snapshotScheduleExists,
snapshotScheduleExists
} from "db/snapshotSchedule.ts";
import { bulkGetVideoStats } from "net/bulkGetVideoStats.ts";
import logger from "@core/log/logger.ts";
@ -55,7 +55,7 @@ export const takeBulkSnapshotForVideosWorker = async (job: Job) => {
${shares},
${favorites}
)
`
`;
logger.log(`Taken snapshot for video ${aid} in bulk.`, "net", "fn:takeBulkSnapshotForVideosWorker");
}
@ -72,11 +72,7 @@ export const takeBulkSnapshotForVideosWorker = async (job: Job) => {
return `DONE`;
} catch (e) {
if (e instanceof NetSchedulerError && e.code === "NO_PROXY_AVAILABLE") {
logger.warn(
`No available proxy for bulk request now.`,
"mq",
"fn:takeBulkSnapshotForVideosWorker",
);
logger.warn(`No available proxy for bulk request now.`, "mq", "fn:takeBulkSnapshotForVideosWorker");
await bulkSetSnapshotStatus(sql, ids, "no_proxy");
await bulkScheduleSnapshot(sql, aidsToFetch, "normal", Date.now() + 20 * MINUTE * Math.random());
return;

View File

@ -62,8 +62,8 @@ export async function initMQ() {
});
await SnapshotQueue.upsertJobScheduler("dispatchArchiveSnapshots", {
every: 6 * HOUR,
immediately: true
every: 2 * HOUR,
immediately: false
});
await SnapshotQueue.upsertJobScheduler("scheduleCleanup", {

View File

@ -14,11 +14,10 @@ const getFactor = (x: number) => {
const g = 455;
if (x > g) {
return log(b / log(x + 1), a);
}
else {
} else {
return log(b / log(x + c), a) + u;
}
}
};
/*
* Returns the minimum ETA in hours for the next snapshot

View File

@ -25,5 +25,5 @@ export async function insertIntoSongs(sql: Psql, aid: number) {
(SELECT duration FROM bilibili_metadata WHERE aid = ${aid})
)
ON CONFLICT DO NOTHING
`
`;
}

View File

@ -18,9 +18,9 @@ export async function insertVideoInfo(sql: Psql, aid: number) {
const bvid = data.View.bvid;
const desc = data.View.desc;
const uid = data.View.owner.mid;
const tags = data.Tags
.filter((tag) => !["old_channel", "topic"].indexOf(tag.tag_type))
.map((tag) => tag.tag_name).join(",");
const tags = data.Tags.filter((tag) => !["old_channel", "topic"].indexOf(tag.tag_type))
.map((tag) => tag.tag_name)
.join(",");
const title = data.View.title;
const published_at = formatTimestampToPsql(data.View.pubdate * SECOND + 8 * HOUR);
const duration = data.View.duration;
@ -55,7 +55,7 @@ export async function insertVideoInfo(sql: Psql, aid: number) {
${stat.share},
${stat.favorite}
)
`
`;
logger.log(`Inserted video metadata for aid: ${aid}`, "mq");
await ClassifyVideoQueue.add("classifyVideo", { aid });

View File

@ -24,11 +24,7 @@ export interface SnapshotNumber {
* - The native `fetch` function threw an error: with error code `FETCH_ERROR`
* - The alicloud-fc threw an error: with error code `ALICLOUD_FC_ERROR`
*/
export async function insertVideoSnapshot(
sql: Psql,
aid: number,
task: string,
): Promise<number | SnapshotNumber> {
export async function insertVideoSnapshot(sql: Psql, aid: number, task: string): Promise<number | SnapshotNumber> {
const data = await getVideoInfo(aid, task);
if (typeof data == "number") {
return data;
@ -45,7 +41,7 @@ export async function insertVideoSnapshot(
await sql`
INSERT INTO video_snapshot (aid, views, danmakus, replies, likes, coins, shares, favorites)
VALUES (${aid}, ${views}, ${danmakus}, ${replies}, ${likes}, ${coins}, ${shares}, ${favorites})
`
`;
logger.log(`Taken snapshot for video ${aid}.`, "net", "fn:insertVideoSnapshot");
@ -58,6 +54,6 @@ export async function insertVideoSnapshot(
coins,
shares,
favorites,
time,
time
};
}

View File

@ -6,9 +6,7 @@ import logger from "@core/log/logger.ts";
import { LatestVideosQueue } from "mq/index.ts";
import type { Psql } from "@core/db/psql.d.ts";
export async function queueLatestVideos(
sql: Psql,
): Promise<number | null> {
export async function queueLatestVideos(sql: Psql): Promise<number | null> {
let page = 1;
let i = 0;
const videosFound = new Set();
@ -26,14 +24,18 @@ export async function queueLatestVideos(
if (videoExists) {
continue;
}
await LatestVideosQueue.add("getVideoInfo", { aid }, {
await LatestVideosQueue.add(
"getVideoInfo",
{ aid },
{
delay,
attempts: 100,
backoff: {
type: "fixed",
delay: SECOND * 5,
},
});
delay: SECOND * 5
}
}
);
videosFound.add(aid);
allExists = false;
delay += Math.random() * SECOND * 1.5;
@ -42,7 +44,7 @@ export async function queueLatestVideos(
logger.log(
`Page ${page} crawled, total: ${videosFound.size}/${i} videos added/observed.`,
"net",
"fn:queueLatestVideos()",
"fn:queueLatestVideos()"
);
if (allExists) {
return 0;

View File

@ -14,7 +14,7 @@ export const getRegularSnapshotInterval = async (sql: Psql, aid: number) => {
if (hoursDiff < 8) return 24;
const viewsDiff = latestSnapshot.views - oldSnapshot.views;
if (viewsDiff === 0) return 72;
const speedPerDay = viewsDiff / (hoursDiff + 0.001) * 24;
const speedPerDay = (viewsDiff / (hoursDiff + 0.001)) * 24;
if (speedPerDay < 6) return 36;
if (speedPerDay < 120) return 24;
if (speedPerDay < 320) return 12;

View File

@ -2,11 +2,7 @@ import { sql } from "@core/db/dbNew";
import logger from "@core/log/logger.ts";
export async function removeAllTimeoutSchedules() {
logger.log(
"Too many timeout schedules, directly removing these schedules...",
"mq",
"fn:scheduleCleanupWorker",
);
logger.log("Too many timeout schedules, directly removing these schedules...", "mq", "fn:scheduleCleanupWorker");
return await sql`
DELETE FROM snapshot_schedule
WHERE status IN ('pending', 'processing')

View File

@ -7,7 +7,8 @@
"worker:filter": "bun run ./build/filterWorker.js",
"adder": "bun run ./src/jobAdder.ts",
"bullui": "bun run ./src/bullui.ts",
"all": "bun run concurrently --restart-tries -1 'bun run worker:main' 'bun run adder' 'bun run worker:filter'"
"all": "bun run concurrently --restart-tries -1 'bun run worker:main' 'bun run adder' 'bun run worker:filter'",
"format": "prettier --write ."
},
"devDependencies": {
"concurrently": "^9.1.2"

View File

@ -6,7 +6,6 @@ await Bun.build({
target: "node"
});
const file = Bun.file("./build/filterWorker.js");
const code = await file.text();

View File

@ -11,9 +11,9 @@ createBullBoard({
queues: [
new BullMQAdapter(LatestVideosQueue),
new BullMQAdapter(ClassifyVideoQueue),
new BullMQAdapter(SnapshotQueue),
new BullMQAdapter(SnapshotQueue)
],
serverAdapter: serverAdapter,
serverAdapter: serverAdapter
});
const app = express();

View File

@ -12,8 +12,8 @@ const shutdown = async (signal: string) => {
process.exit(0);
};
process.on('SIGINT', () => shutdown('SIGINT'));
process.on('SIGTERM', () => shutdown('SIGTERM'));
process.on("SIGINT", () => shutdown("SIGINT"));
process.on("SIGTERM", () => shutdown("SIGTERM"));
await Akari.init();
@ -29,7 +29,7 @@ const filterWorker = new Worker(
break;
}
},
{ connection: redis as ConnectionOptions, concurrency: 2, removeOnComplete: { count: 1000 } },
{ connection: redis as ConnectionOptions, concurrency: 2, removeOnComplete: { count: 1000 } }
);
filterWorker.on("active", () => {

View File

@ -10,7 +10,7 @@ import {
scheduleCleanupWorker,
snapshotTickWorker,
snapshotVideoWorker,
takeBulkSnapshotForVideosWorker,
takeBulkSnapshotForVideosWorker
} from "mq/exec/executors.ts";
import { redis } from "@core/db/redis.ts";
import logger from "@core/log/logger.ts";
@ -37,8 +37,8 @@ const shutdown = async (signal: string) => {
process.exit(0);
};
process.on('SIGINT', () => shutdown('SIGINT'));
process.on('SIGTERM', () => shutdown('SIGTERM'));
process.on("SIGINT", () => shutdown("SIGINT"));
process.on("SIGTERM", () => shutdown("SIGTERM"));
const latestVideoWorker = new Worker(
"latestVideos",
@ -58,8 +58,8 @@ const latestVideoWorker = new Worker(
connection: redis as ConnectionOptions,
concurrency: 6,
removeOnComplete: { count: 1440 },
removeOnFail: { count: 0 },
},
removeOnFail: { count: 0 }
}
);
latestVideoWorker.on("active", () => {
@ -95,7 +95,7 @@ const snapshotWorker = new Worker(
break;
}
},
{ connection: redis as ConnectionOptions, concurrency: 50, removeOnComplete: { count: 2000 } },
{ connection: redis as ConnectionOptions, concurrency: 50, removeOnComplete: { count: 2000 } }
);
snapshotWorker.on("error", (err) => {

View File

@ -56,26 +56,26 @@ const databasePreparationQuery = `
CREATE INDEX idx_snapshot_schedule_status ON snapshot_schedule USING btree (status);
CREATE INDEX idx_snapshot_schedule_type ON snapshot_schedule USING btree (type);
CREATE UNIQUE INDEX snapshot_schedule_pkey ON snapshot_schedule USING btree (id);
`
`;
const cleanUpQuery = `
DROP SEQUENCE IF EXISTS "snapshot_schedule_id_seq" CASCADE;
DROP TABLE IF EXISTS "snapshot_schedule" CASCADE;
`
`;
async function testMocking() {
await sql.begin(async tx => {
await sql.begin(async (tx) => {
await tx.unsafe(cleanUpQuery).simple();
await tx.unsafe(databasePreparationQuery).simple();
await tx`
INSERT INTO snapshot_schedule
${sql(mockSnapshotSchedules, 'aid', 'created_at', 'finished_at', 'id', 'started_at', 'status', 'type')}
${sql(mockSnapshotSchedules, "aid", "created_at", "finished_at", "id", "started_at", "status", "type")}
`;
await tx`
ROLLBACK;
`
`;
await tx.unsafe(cleanUpQuery).simple();
return;
@ -83,18 +83,18 @@ async function testMocking() {
}
async function testBulkSetSnapshotStatus() {
return await sql.begin(async tx => {
return await sql.begin(async (tx) => {
await tx.unsafe(cleanUpQuery).simple();
await tx.unsafe(databasePreparationQuery).simple();
await tx`
INSERT INTO snapshot_schedule
${sql(mockSnapshotSchedules, 'aid', 'created_at', 'finished_at', 'id', 'started_at', 'status', 'type')}
${sql(mockSnapshotSchedules, "aid", "created_at", "finished_at", "id", "started_at", "status", "type")}
`;
const ids = [1, 2, 3];
await bulkSetSnapshotStatus(tx, ids, 'pending')
await bulkSetSnapshotStatus(tx, ids, "pending");
const rows = tx<{ status: string }[]>`
SELECT status FROM snapshot_schedule WHERE id = 1;
@ -102,7 +102,7 @@ async function testBulkSetSnapshotStatus() {
await tx`
ROLLBACK;
`
`;
await tx.unsafe(cleanUpQuery).simple();
return rows;
@ -116,5 +116,5 @@ test("data mocking works", async () => {
test("bulkSetSnapshotStatus core logic works smoothly", async () => {
const rows = await testBulkSetSnapshotStatus();
expect(rows.every(item => item.status === 'pending')).toBe(true);
expect(rows.every((item) => item.status === "pending")).toBe(true);
});

View File

@ -1,6 +1,6 @@
export function formatTimestampToPsql(timestamp: number) {
const date = new Date(timestamp);
return date.toISOString().slice(0, 23).replace("T", " ") + '+08';
return date.toISOString().slice(0, 23).replace("T", " ") + "+08";
}
export function parseTimestampFromPsql(timestamp: string) {

View File

@ -1,5 +1,5 @@
import { defineConfig } from "vitest/config";
import tsconfigPaths from 'vite-tsconfig-paths'
import tsconfigPaths from "vite-tsconfig-paths";
export default defineConfig({
plugins: [tsconfigPaths()]