ref: use custom logger

This commit is contained in:
alikia2x (寒寒) 2025-02-09 22:38:45 +08:00
parent 2585025f69
commit 63874eef20
Signed by: alikia2x
GPG Key ID: 56209E0CCD8420C6
10 changed files with 94 additions and 42 deletions

View File

@ -10,7 +10,7 @@
"build": "deno run -A dev.ts build",
"preview": "deno run -A main.ts",
"update": "deno run -A -r https://fresh.deno.dev/update .",
"worker": "deno run --env-file=.env --allow-env --allow-read --allow-ffi --allow-net worker.ts",
"worker": "deno run --env-file=.env --allow-env --allow-read --allow-ffi --allow-net --allow-write worker.ts",
"adder": "deno run --allow-env --allow-read --allow-ffi --allow-net jobAdder.ts",
"bullui": "deno run --allow-read --allow-env --allow-ffi --allow-net bullui.ts",
"all": "concurrently 'deno task start' 'deno task worker' 'deno task adder' 'deno task bullui'"

View File

@ -1,5 +1,6 @@
import { Client } from "https://deno.land/x/postgres@v0.19.3/mod.ts";
import { AllDataType } from "lib/db/schema.d.ts";
import logger from "lib/log/logger.ts";
export async function videoExistsInAllData(client: Client, aid: number) {
return await client.queryObject<{ exists: boolean }>("SELECT EXISTS(SELECT 1 FROM all_data WHERE aid = $1)", [aid])
@ -7,7 +8,7 @@ export async function videoExistsInAllData(client: Client, aid: number) {
}
export async function insertIntoAllData(client: Client, data: AllDataType) {
console.log(`[db:all_data] inserted ${data.aid}`)
logger.log(`inserted ${data.aid}`, "db-all_data")
return await client.queryObject(
"INSERT INTO all_data (aid, bvid, description, uid, tags, title, published_at) VALUES ($1, $2, $3, $4, $5, $6, $7)",
[data.aid, data.bvid, data.description, data.uid, data.tags, data.title, data.published_at],

View File

@ -1,15 +1,18 @@
import winston, { format, transports } from "npm:winston";
import { TransformableInfo } from "npm:logform";
import chalk from "npm:chalk";
import stripAnsi from 'npm:strip-ansi';
const customFormat = format.printf((info: TransformableInfo) => {
const { timestamp, level, message, service, codePath } = info;
const coloredService = service ? chalk.magenta(service): "";
const { timestamp, level, message, service, codePath, error } = info;
const coloredService = service ? chalk.magenta(service) : "";
const coloredCodePath = codePath ? chalk.grey(`@${codePath}`) : "";
const colon = service || codePath ? ": " : "";
const colon = service || codePath ? ": " : "";
const err = error as Error | undefined;
if (err) {
return `${timestamp} [${level}] ${coloredService}${colon}${message}\n${chalk.red(err.stack) ?? ""}`;
}
return stripAnsi(level) === "debug"
return coloredCodePath
? `${timestamp} [${level}] ${coloredService}${coloredCodePath}${colon}${message}`
: `${timestamp} [${level}] ${coloredService}${colon}${message}`;
});
@ -17,49 +20,79 @@ const customFormat = format.printf((info: TransformableInfo) => {
const timestampFormat = format.timestamp({ format: "YYYY-MM-DD HH:mm:ss.SSS" });
const createTransport = (level: string, filename: string) => {
const MB = 1000000;
let maxsize = undefined;
let maxFiles = undefined;
let tailable = undefined;
if (level === "verbose") {
maxsize = 10 * MB;
maxFiles = 10;
tailable = false;
} else if (level === "warn") {
maxsize = 10 * MB;
maxFiles = 1;
tailable = false;
}
function replacer(key: unknown, value: unknown) {
if (typeof value === "bigint") {
return value.toString();
}
if (key == "error") {
return undefined;
}
return value;
}
return new transports.File({
level,
filename,
format: format.combine(timestampFormat, format.json()),
maxsize,
tailable,
maxFiles,
format: format.combine(timestampFormat, format.json({ replacer })),
});
};
const verboseLogPath = Deno.env.get("LOG_VERBOSE") ?? "logs/verbose.log";
const warnLogPath = Deno.env.get("LOG_WARN") ?? "logs/warn.log";
const errorLogPath = Deno.env.get("LOG_ERROR") ?? "logs/error.log";
const winstonLogger = winston.createLogger({
levels: winston.config.npm.levels,
transports: [
new transports.Console({
level: "debug",
format: format.combine(
format.timestamp({ format: "HH:mm:ss.SSS" }), // Different format for console
format.timestamp({ format: "HH:mm:ss.SSS" }),
format.colorize(),
format.errors({ stack: true }),
customFormat,
),
}),
createTransport("info", "logs/app.log"),
createTransport("warn", "logs/warn.log"),
createTransport("error", "logs/error.log"),
createTransport("verbose", verboseLogPath),
createTransport("warn", warnLogPath),
createTransport("error", errorLogPath),
],
});
const logger = {
log: (message: string, service?: string, target: "term" | "file" | "both" = "both") => {
const logLevels = [];
if (target === "term" || target === "both") {
logLevels.push("info");
}
if (target === "file" || target === "both") {
logLevels.push("info");
}
logLevels.forEach((level) => winstonLogger.log(level, message, { service }));
verbose: (message: string, service?: string, codePath?: string) => {
winstonLogger.verbose(message, { service, codePath });
},
log: (message: string, service?: string, codePath?: string) => {
winstonLogger.info(message, { service, codePath });
},
debug: (message: string, service?: string, codePath?: string) => {
winstonLogger.debug(message, { service, codePath });
},
warn: (message: string, service?: string) => {
winstonLogger.warn(message, { service });
warn: (message: string, service?: string, codePath?: string) => {
winstonLogger.warn(message, { service, codePath });
},
error: (message: string, service?: string) => {
winstonLogger.error(message, { service });
error: (error: string | Error, service?: string, codePath?: string) => {
if (error instanceof Error) {
winstonLogger.error(error.message, { service, error: error, codePath });
} else {
winstonLogger.error(error, { service, codePath });
}
},
};

12
lib/log/test.ts Normal file
View File

@ -0,0 +1,12 @@
import logger from "lib/log/logger.ts";
logger.error(Error("test error"), "test service");
logger.debug(`some string`);
logger.debug(`hello`, "servicename");
logger.debug(`hello`, "servicename", "codepath.ts");
logger.log("something");
logger.log("foo", "service");
logger.log("foo", "db", "insert.ts");
logger.warn("warn");
logger.error("error");
logger.verbose("error");

View File

@ -5,11 +5,12 @@ import { MINUTE } from "$std/datetime/constants.ts";
import { db } from "lib/db/init.ts";
import { truncate } from "lib/utils/truncate.ts";
import { Client } from "https://deno.land/x/postgres@v0.19.3/mod.ts";
import logger from "lib/log/logger.ts";
const delayMap = [5, 10, 15, 30, 60, 60];
const addJobToQueue = (failedCount: number, delay: number) => {
console.log(`[bullmq] job:getLatestVideos added to queue with delay of ${delay / MINUTE} minutes.`);
logger.log(`job:getLatestVideos added to queue, delay: ${(delay / MINUTE).toFixed(2)} minutes.`, "mq");
MainQueue.upsertJobScheduler("getLatestVideos", {
every: delay,
}, {
@ -29,7 +30,7 @@ export const insertVideosWorker = async (job: Job) => {
};
const executeTask = async (client: Client, failedCount: number) => {
console.log("[task] Executing task:getLatestVideos");
logger.log("getLatestVideos now executing", "task");
const result = await insertLatestVideos(client);
failedCount = result !== 0 ? truncate(failedCount + 1, 0, 5) : 0;
if (failedCount !== 0) {

View File

@ -1,13 +1,14 @@
import { MINUTE } from "$std/datetime/constants.ts";
import MainQueue from "lib/mq/index.ts";
import logger from "lib/log/logger.ts";
async function configGetLatestVideos() {
await MainQueue.upsertJobScheduler("getLatestVideos", {
every: 5 * MINUTE
every: 1 * MINUTE
})
}
export async function initMQ() {
await configGetLatestVideos()
console.log("Message queue initialized.")
logger.log("Message queue initialized.")
}

View File

@ -3,6 +3,7 @@ import formatPublishedAt from "lib/utils/formatTimestampToPostgre.ts";
import { getVideoTags } from "lib/net/getVideoTags.ts";
import { AllDataType } from "lib/db/schema.d.ts";
import { sleep } from "lib/utils/sleep.ts";
import logger from "lib/log/logger.ts";
export async function getLatestVideos(page: number = 1, pageSize: number = 10, sleepRate: number = 250, fetchTags: boolean = true): Promise<AllDataType[] | null> {
try {
@ -10,12 +11,12 @@ export async function getLatestVideos(page: number = 1, pageSize: number = 10, s
const data: VideoListResponse = await response.json();
if (data.code !== 0) {
console.error(`Error fetching videos: ${data.message}`);
logger.error(`Error fetching videos: ${data.message}`, 'net', 'getLatestVideos');
return null;
}
if (data.data.archives.length === 0) {
console.warn("No more videos found");
logger.verbose("No more videos found", 'net', 'getLatestVideos');
return [];
}
@ -45,7 +46,7 @@ export async function getLatestVideos(page: number = 1, pageSize: number = 10, s
return result;
} catch (error) {
console.error(error);
logger.error(error as Error, "net", "getLatestVideos");
return null;
}
}

View File

@ -1,4 +1,5 @@
import { VideoTagsResponse } from "lib/net/bilibili.d.ts";
import logger from "lib/log/logger.ts";
export async function getVideoTags(aid: number): Promise<string[] | null> {
try {
@ -6,13 +7,13 @@ export async function getVideoTags(aid: number): Promise<string[] | null> {
const res = await fetch(url);
const data: VideoTagsResponse = await res.json();
if (data.code != 0) {
console.error(`Error fetching tags for video ${aid}: ${data.message}`);
logger.error(`Error fetching tags for video ${aid}: ${data.message}`, 'net', 'getVideoTags');
return [];
}
return data.data.map((tag) => tag.tag_name);
}
catch {
console.error(`Error fetching tags for video ${aid}`);
logger.error(`Error fetching tags for video ${aid}`, 'net', 'getVideoTags');
return null;
}
}

View File

@ -4,6 +4,7 @@ import { getLatestVideoTimestampFromAllData, insertIntoAllData, videoExistsInAll
import { sleep } from "lib/utils/sleep.ts";
import { getVideoPositionInNewList } from "lib/net/bisectVideoStartFrom.ts";
import { SECOND } from "$std/datetime/constants.ts";
import logger from "lib/log/logger.ts";
export async function insertLatestVideos(
client: Client,
@ -13,13 +14,13 @@ export async function insertLatestVideos(
): Promise<number | null> {
const latestVideoTimestamp = await getLatestVideoTimestampFromAllData(client);
if (latestVideoTimestamp == null) {
console.error("[func:insertLatestVideos] Cannot get latest video timestamp from current database.");
logger.error("Cannot get latest video timestamp from current database.", "net", "fn:insertLatestVideos()");
return null
}
console.log(`[func:insertLatestVideos] Latest video in the database: ${new Date(latestVideoTimestamp).toISOString()}`)
logger.log(`Latest video in the database: ${new Date(latestVideoTimestamp).toISOString()}`, "net", "fn:insertLatestVideos()")
const videoIndex = await getVideoPositionInNewList(latestVideoTimestamp);
if (videoIndex == null) {
console.error("[func:insertLatestVideos] Cannot locate the video through bisect.");
logger.error("Cannot locate the video through bisect.", "net", "fn:insertLatestVideos()");
return null
}
if (typeof videoIndex == "object") {
@ -46,7 +47,7 @@ export async function insertLatestVideos(
}
failCount = 0;
if (videos.length == 0) {
console.warn("No more videos found");
logger.verbose("No more videos found", "net", "fn:insertLatestVideos()");
break;
}
for (const video of videos) {
@ -56,13 +57,13 @@ export async function insertLatestVideos(
insertedVideos.add(video.aid);
}
}
console.log(`[func:insertLatestVideos] Page ${page} crawled, total: ${insertedVideos.size} videos.`);
logger.log(`Page ${page} crawled, total: ${insertedVideos.size} videos.`, "net", "fn:insertLatestVideos()");
page--;
if (page < 1) {
return 0;
}
} catch (error) {
console.error(error);
logger.error(error as Error, "net", "fn:insertLatestVideos()");
failCount++;
if (failCount > 5) {
return null;

View File

@ -1,6 +1,7 @@
import { Job, Worker } from "bullmq";
import { insertVideosWorker } from "lib/mq/executors.ts";
import { redis } from "lib/db/redis.ts";
import logger from "lib/log/logger.ts";
const worker = new Worker(
"cvsa",
@ -17,9 +18,9 @@ const worker = new Worker(
);
worker.on("active", () => {
console.log("[bullmq] Worker activated.");
logger.log("Worker activated.", "mq");
});
worker.on("error", (err) => {
console.error(err);
logger.error(err);
});