Compare commits

..

No commits in common. "d9c8253019b2a7aa597b61e82de5379f306451ee" and "22b1c337acb0943e4f7548f1058f5c5aa3b26bc5" have entirely different histories.

151 changed files with 834 additions and 3140 deletions

1
.gitattributes vendored
View File

@ -1 +0,0 @@
*.woff2 filter=lfs diff=lfs merge=lfs -text

View File

@ -14,11 +14,6 @@
<excludeFolder url="file://$MODULE_DIR$/logs" />
<excludeFolder url="file://$MODULE_DIR$/model" />
<excludeFolder url="file://$MODULE_DIR$/src/db" />
<excludeFolder url="file://$MODULE_DIR$/.idea" />
<excludeFolder url="file://$MODULE_DIR$/.vscode" />
<excludeFolder url="file://$MODULE_DIR$/.zed" />
<excludeFolder url="file://$MODULE_DIR$/packages/frontend/.astro" />
<excludeFolder url="file://$MODULE_DIR$/scripts" />
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />

View File

@ -1,6 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="DenoSettings">
<option name="denoInit" value="{&#10; &quot;enable&quot;: true,&#10; &quot;lint&quot;: true,&#10; &quot;unstable&quot;: true,&#10; &quot;importMap&quot;: &quot;import_map.json&quot;,&#10; &quot;config&quot;: &quot;deno.json&quot;,&#10; &quot;fmt&quot;: {&#10; &quot;useTabs&quot;: true,&#10; &quot;lineWidth&quot;: 120,&#10; &quot;indentWidth&quot;: 4,&#10; &quot;semiColons&quot;: true,&#10; &quot;proseWrap&quot;: &quot;always&quot;&#10; }&#10;}" />
</component>
</project>

View File

@ -1,17 +1,6 @@
<component name="InspectionProjectProfileManager">
<profile version="1.0">
<option name="myName" value="Project Default" />
<inspection_tool class="CssUnknownProperty" enabled="false" level="WARNING" enabled_by_default="false">
<option name="myCustomPropertiesEnabled" value="true" />
<option name="myIgnoreVendorSpecificProperties" value="false" />
<option name="myCustomPropertiesList">
<value>
<list size="1">
<item index="0" class="java.lang.String" itemvalue="lc-l-with-tail" />
</list>
</value>
</option>
</inspection_tool>
<inspection_tool class="GrazieInspection" enabled="false" level="GRAMMAR_ERROR" enabled_by_default="false" />
<inspection_tool class="LanguageDetectionInspection" enabled="false" level="WARNING" enabled_by_default="false" />
<inspection_tool class="SpellCheckingInspection" enabled="false" level="TYPO" enabled_by_default="false">

View File

@ -16,6 +16,7 @@
"imports": {
"@astrojs/node": "npm:@astrojs/node@^9.1.3",
"@astrojs/svelte": "npm:@astrojs/svelte@^7.0.8",
"@core/db/": "./packages/core/db/",
"date-fns": "npm:date-fns@^4.1.0"
}
}

View File

@ -1,21 +1,21 @@
# Table of contents
* [Welcome](README.md)
- [Welcome](README.md)
## About
* [About CVSA Project](about/this-project.md)
* [Scope of Inclusion](about/scope-of-inclusion.md)
- [About CVSA Project](about/this-project.md)
- [Scope of Inclusion](about/scope-of-inclusion.md)
## Architecure
* [Overview](architecure/overview.md)
* [Crawler](architecure/crawler.md)
* [Database Structure](architecure/database-structure/README.md)
* [Type of Song](architecure/database-structure/type-of-song.md)
* [Message Queue](architecure/message-queue.md)
* [Artificial Intelligence](architecure/artificial-intelligence.md)
## API Doc
* [Catalog](api-doc/catalog.md)
* [Songs](api-doc/songs.md)
- [Catalog](api-doc/catalog.md)
- [Songs](api-doc/songs.md)

View File

@ -7,34 +7,23 @@ For a **song**, it must meet the following conditions to be included in CVSA:
### Category 30
In principle, the songs must be featured in a video that is categorized under the VOCALOID·UTAU (ID 30) category in
[Bilibili](https://en.wikipedia.org/wiki/Bilibili) in order to be observed by our
[automation program](../architecure/overview.md#crawler). We welcome editors to manually add songs that have not been
uploaded to bilibili / categorized under this category.
In principle, the songs must be featured in a video that is categorized under the VOCALOID·UTAU (ID 30) category in [Bilibili](https://en.wikipedia.org/wiki/Bilibili) in order to be observed by our [automation program](../architecure/overview.md#crawler). We welcome editors to manually add songs that have not been uploaded to bilibili / categorized under this category.
#### NEWS
Recently, Bilibili seems to be offlining the sub-category. This means the VOCALOID·UTAU category can no longer be
entered from the frontend, and producers can no longer upload videos to this category (instead, they can only choose the
parent category "Music").&#x20;
Recently, Bilibili seems to be offlining the sub-category. This means the VOCALOID·UTAU category can no longer be entered from the frontend, and producers can no longer upload videos to this category (instead, they can only choose the parent category "Music").&#x20;
According to our experiments, Bilibili still retains the code logic of sub-categories in the backend, and newly
published songs may still be in the VOCALOID·UTAU sub-category, and the related APIs can still work normally. However,
there are [reports](https://www.bilibili.com/opus/1041223385394184199) that some of the new songs have been placed under
the "Music General" sub-category.\
We are still waiting for Bilibili's follow-up actions, and in the future, we may adjust the scope of our automated
program's crawling.
According to our experiments, Bilibili still retains the code logic of sub-categories in the backend, and newly published songs may still be in the VOCALOID·UTAU sub-category, and the related APIs can still work normally. However, there are [reports](https://www.bilibili.com/opus/1041223385394184199) that some of the new songs have been placed under the "Music General" sub-category.\
We are still waiting for Bilibili's follow-up actions, and in the future, we may adjust the scope of our automated program's crawling.
### At Leats One Line of Chinese / Chinese Virtual Singer
The lyrics of the song must contain at least one line in Chinese. Otherwise, if the lyrics of the song do not contain
Chinese, it will only be included in the CVSA only if a Chinese virtual singer has been used.
The lyrics of the song must contain at least one line in Chinese. Otherwise, if the lyrics of the song do not contain Chinese, it will only be included in the CVSA only if a Chinese virtual singer has been used.
We define a **Chinese virtual singer** as follows:
1. The singer primarily uses Chinese voicebank (i.e. the most widely used voickbank for the singer is Chinese)
2. The singer is operated by a company, organization, individual or group located in Mainland China, Hong Kong, Macau or
Taiwan.
2. The singer is operated by a company, organization, individual or group located in Mainland China, Hong Kong, Macau or Taiwan.
### Using Vocal Synthesizer

View File

@ -9,13 +9,10 @@ The AI systems we currently use are:
Located at `/filter/` under project root dir, it classifies a video in the
[category 30](../about/scope-of-inclusion.md#category-30) into the following categories:
- 0: Not related to Chinese vocal synthesis
- 1: A original song with Chinese vocal synthesis
- 2: A cover/remix song with Chinese vocal synthesis
* 0: Not related to Chinese vocal synthesis
* 1: A original song with Chinese vocal synthesis
* 2: A cover/remix song with Chinese vocal synthesis
### The Predictor
Located at `/pred/`under the project root dir, it predicts the future views of a video. This is a regression model that
takes historical view trends of a video, other contextual information (such as the current time), and future time points
to be predicted as feature inputs, and outputs the increment in the video's view count from "now" to the specified
future time point.
Located at `/pred/`under the project root dir, it predicts the future views of a video. This is a regression model that takes historical view trends of a video, other contextual information (such as the current time), and future time points to be predicted as feature inputs, and outputs the increment in the video's view count from "now" to the specified future time point.

View File

@ -1,4 +0,0 @@
# Crawler
A central aspect of CVSA's technical design is its emphasis on automation. The data collection process within the `crawler` is orchestrated using a message queue powered by [BullMQ](https://bullmq.io/). This enables concurrent processing of various tasks involved in the data lifecycle. State management and data persistence are handled by a combination of Redis for caching and real-time data, and PostgreSQL as the primary database.

View File

@ -5,11 +5,10 @@ CVSA uses [PostgreSQL](https://www.postgresql.org/) as our database.
All public data of CVSA (excluding users' personal data) is stored in a database named `cvsa_main`, which contains the
following tables:
- songs: stores the main information of songs
- bili\_user: stores snapshots of Bilibili user information
- all\_data: metadata of all videos in [category 30](../../about/scope-of-inclusion.md#category-30).
- labelling\_result: Contains label of videos in `all_data`tagged by our
[AI system](../artificial-intelligence.md#the-filter).
- video\_snapshot: Statistical data of videos that are fetched regularly (e.g., number of views, etc.), we call this
fetch process as "snapshot".
- snapshot\_schedule: The scheduling information for video snapshots.
* songs: stores the main information of songs
* bili\_user: stores snapshots of Bilibili user information
* all\_data: metadata of all videos in [category 30](../../about/scope-of-inclusion.md#category-30).
* labelling\_result: Contains label of videos in `all_data`tagged by our [AI system](../artificial-intelligence.md#the-filter).
* video\_snapshot: Statistical data of videos that are fetched regularly (e.g., number of views, etc.), we call this fetch process as "snapshot".
* snapshot\_schedule: The scheduling information for video snapshots.

View File

@ -0,0 +1,7 @@
# Message Queue
We rely on message queues to manage the various tasks that [the cralwer ](overview.md#crawler)needs to perform.
### Code Path
Currently, the code related to message queues are located at `lib/mq` and `src`.

View File

@ -14,29 +14,14 @@ layout:
# Overview
The CVSA is a [monorepo](https://en.wikipedia.org/wiki/Monorepo) codebase, mainly using TypeScript as the development language. With [Deno workspace](https://docs.deno.com/runtime/fundamentals/workspaces/), the major part of the codebase is under `packages/`.&#x20;
The whole CVSA system can be sperate into three different parts:
**Project structure:**
* Frontend
* API
* Crawler
```
cvsa
├── deno.json
├── packages
│ ├── backend
│ ├── core
│ ├── crawler
│ └── frontend
└── README.md
```
**Package Breakdown:**
* **`backend`**: This package houses the server-side logic, built with the [Hono](https://hono.dev/) web framework. It's responsible for interacting with the database and exposing data through REST and GraphQL APIs for consumption by the frontend, internal applications, and third-party developers.
* **`frontend`**: The user-facing web interface of CVSA is developed using [Astro](https://astro.build/). This package handles the presentation layer, displaying information fetched from the database.
* **`crawler`**: This automated data collection system is a key component of CVSA. It's designed to automatically discover and gather new song data from bilibili, as well as track relevant statistics over time.
* **`core`**: This package contains reusable and generic code that is utilized across multiple workspaces within the CVSA monorepo.
The frontend is driven by [Astro](https://astro.build/) and is used to display the final CVSA page. The API is driven by [Hono](https://hono.dev) and is used to query the database and provide REST/GraphQL APIs that can be called by out website, applications, or third parties. The crawler is our automatic data collector, used to automatically collect new songs from bilibili, track their statistics, etc.
### Crawler
Automation is the biggest highlight of CVSA's technical design. The data collection process within the `crawler` is orchestrated using a message queue powered by [BullMQ](https://bullmq.io/). This enables concurrent processing of various tasks involved in the data collection lifecycle. State management and data persistence are handled by a combination of Redis for caching and real-time data, and PostgreSQL as the primary database.
Automation is the biggest highlight of CVSA's technical design. To achieve this, we use a message queue powered by [BullMQ](https://bullmq.io/) to concurrently process various tasks in the data collection life cycle.

View File

@ -1,106 +0,0 @@
openapi: 3.0.0
info:
title: CVSA API
version: v1
servers:
- url: https://api.projectcvsa.com
paths:
/video/{id}/snapshots:
get:
summary: 获取视频快照列表
description: 根据视频 ID 获取视频的快照列表。视频 ID 可以是以 "av" 开头的数字,以 "BV" 开头的 12 位字母数字,或者一个正整数。
parameters:
- in: path
name: id
required: true
schema:
type: string
description: "视频 ID (如: av78977256, BV1KJ411C7CW, 78977256)"
- in: query
name: ps
schema:
type: integer
minimum: 1
description: 每页返回的快照数量 (pageSize),默认为 1000。
- in: query
name: pn
schema:
type: integer
minimum: 1
description: 页码 (pageNumber)用于分页查询。offset 与 pn 只能选择一个。
- in: query
name: offset
schema:
type: integer
minimum: 1
description: 偏移量用于基于偏移量的查询。offset 与 pn 只能选择一个。
- in: query
name: reverse
schema:
type: boolean
description: 是否反向排序(从旧到新),默认为 false。
responses:
'200':
description: 成功获取快照列表
content:
application/json:
schema:
type: array
items:
type: object
properties:
id:
type: integer
description: 快照 ID
aid:
type: integer
description: 视频的 av 号
views:
type: integer
description: 视频播放量
coins:
type: integer
description: 视频投币数
likes:
type: integer
description: 视频点赞数
favorites:
type: integer
description: 视频收藏数
shares:
type: integer
description: 视频分享数
danmakus:
type: integer
description: 视频弹幕数
replies:
type: integer
description: 视频评论数
'400':
description: 无效的查询参数
content:
application/json:
schema:
type: object
properties:
message:
type: string
description: 错误消息
errors:
type: object
description: 详细的错误信息
'500':
description: 服务器内部错误
content:
application/json:
schema:
type: object
properties:
message:
type: string
description: 错误消息
error:
type: object
description: 详细的错误信息

View File

@ -1,22 +1,22 @@
# Table of contents
* [欢迎](README.md)
- [欢迎](README.md)
## 关于 <a href="#about" id="about"></a>
* [关于本项目](about/this-project.md)
* [收录范围](about/scope-of-inclusion.md)
- [关于本项目](about/this-project.md)
- [收录范围](about/scope-of-inclusion.md)
## 技术架构 <a href="#architecture" id="architecture"></a>
- [概览](architecture/overview.md)
- [数据库结构](architecture/database-structure/README.md)
- [歌曲类型](architecture/database-structure/type-of-song.md)
- [人工智能](architecture/artificial-intelligence.md)
- [消息队列](architecture/message-queue/README.md)
- [LatestVideosQueue 队列](architecture/message-queue/latestvideosqueue-dui-lie.md)
* [概览](architecture/overview.md)
* [数据库结构](architecture/database-structure/README.md)
* [歌曲类型](architecture/database-structure/type-of-song.md)
* [人工智能](architecture/artificial-intelligence.md)
* [消息队列](architecture/message-queue/README.md)
* [LatestVideosQueue 队列](architecture/message-queue/latestvideosqueue-dui-lie.md)
## API 文档 <a href="#api-doc" id="api-doc"></a>
* [目录](api-doc/catalog.md)
* [视频快照](api-doc/video-snapshot.md)
- [目录](api-doc/catalog.md)
- [歌曲](api-doc/songs.md)

View File

@ -1,4 +1,3 @@
# 目录
* [视频快照](video-snapshot.md)
- [歌曲](songs.md)

3
doc/zh/api-doc/songs.md Normal file
View File

@ -0,0 +1,3 @@
# 歌曲
暂未实现。

View File

@ -1,6 +0,0 @@
# 视频快照
{% openapi src="../.gitbook/assets/1.yaml" path="/video/{id}/snapshots" method="get" %}
[1.yaml](../.gitbook/assets/1.yaml)
{% endopenapi %}

View File

@ -2,14 +2,13 @@
CVSA 使用 [PostgreSQL](https://www.postgresql.org/) 作为数据库。
CVSA 设计了两个
CVSA 的所有公开数据(不包括用户的个人数据)都存储在名为 `cvsa_main` 的数据库中,该数据库包含以下表:
- songs存储歌曲的主要信息
- bilibili\_user存储 Bilibili 用户信息快照
- bilibili\_metadata[分区 30](../../about/scope-of-inclusion.md#vocaloiduatu-fen-qu) 中所有视频的元数据
- labelling\_result包含由我们的 AI 系统 标记的 `all_data` 中视频的标签。
- latest\_video\_snapshot存储视频最新的快照
- video\_snapshot存储视频的快照包括特定时间下视频的统计信息播放量、点赞数等
- snapshot\_schedule视频快照的规划信息为辅助表
* songs存储歌曲的主要信息
* bilibili\_user存储 Bilibili 用户信息快照
* bilibili\_metadata[分区 30](../../about/scope-of-inclusion.md#vocaloiduatu-fen-qu) 中所有视频的元数据
* labelling\_result包含由我们的 AI 系统 标记的 `all_data` 中视频的标签。
* latest\_video\_snapshot存储视频最新的快照
* video\_snapshot存储视频的快照包括特定时间下视频的统计信息播放量、点赞数等
* snapshot\_schedule视频快照的规划信息为辅助表

View File

@ -1 +1,2 @@
# LatestVideosQueue 队列

View File

@ -20,7 +20,8 @@ layout:
位于项目目录`packages/crawler` 下,它负责以下工作:
- 抓取新的视频并收录作品
- 持续监控视频的播放量等统计信息
* 抓取新的视频并收录作品
* 持续监控视频的播放量等统计信息
整个 crawler 由 BullMQ 消息队列驱动,使用 Redis 和 PostgreSQL 管理状态。

View File

@ -20,7 +20,7 @@ export const dbCredMiddleware = createMiddleware(async (c, next) => {
c.set("dbCred", connection);
await next();
connection.release();
});
})
declare module "hono" {
interface ContextVariableMap {

View File

@ -4,15 +4,11 @@
"@rabbit-company/argon2id": "jsr:@rabbit-company/argon2id@^2.1.0",
"hono": "jsr:@hono/hono@^4.7.5",
"zod": "npm:zod",
"yup": "npm:yup",
"@core/": "../core/",
"log/": "../core/log/",
"@crawler/net/videoInfo": "../crawler/net/getVideoInfo.ts",
"ioredis": "npm:ioredis"
"yup": "npm:yup"
},
"tasks": {
"dev": "deno serve --env-file=.env --allow-env --allow-net --allow-read --allow-write --allow-run --watch main.ts",
"start": "deno serve --env-file=.env --allow-env --allow-net --allow-read --allow-write --allow-run --host 127.0.0.1 main.ts"
"dev": "deno serve --env-file=.env --allow-env --allow-net --watch main.ts",
"start": "deno serve --env-file=.env --allow-env --allow-net --host 127.0.0.1 main.ts"
},
"compilerOptions": {
"jsx": "precompile",

View File

@ -3,19 +3,16 @@ import { dbCredMiddleware, dbMiddleware } from "./database.ts";
import { rootHandler } from "./root.ts";
import { getSnapshotsHanlder } from "./snapshots.ts";
import { registerHandler } from "./register.ts";
import { videoInfoHandler } from "./videoInfo.ts";
export const app = new Hono();
app.use("/video/*", dbMiddleware);
app.use("/user", dbCredMiddleware);
app.use('/video/*', dbMiddleware);
app.use('/user', dbCredMiddleware);
app.get("/", ...rootHandler);
app.get("/video/:id/snapshots", ...getSnapshotsHanlder);
app.post("/user", ...registerHandler);
app.get("/video/:id/info", ...videoInfoHandler);
app.get('/video/:id/snapshots', ...getSnapshotsHanlder);
app.post('/user', ...registerHandler);
const fetch = app.fetch;
@ -23,4 +20,4 @@ export default {
fetch,
} satisfies Deno.ServeDefaultExport;
export const VERSION = "0.4.2";
export const VERSION = "0.3.0";

View File

@ -19,7 +19,7 @@ export const userExists = async (username: string, client: Client) => {
`;
const result = await client.queryObject(query, [username]);
return result.rows.length > 0;
};
}
export const registerHandler = createHandlers(async (c: ContextType) => {
const client = c.get("dbCred");

View File

@ -3,27 +3,29 @@ import { VERSION } from "./main.ts";
import { createHandlers } from "./utils.ts";
export const rootHandler = createHandlers((c) => {
let singer: Singer | Singer[];
let singer: Singer | Singer[] | null = null;
const shouldShowSpecialSinger = Math.random() < 0.016;
if (getSingerForBirthday().length !== 0) {
singer = JSON.parse(JSON.stringify(getSingerForBirthday())) as Singer[];
if (getSingerForBirthday().length !== 0){
singer = getSingerForBirthday();
for (const s of singer) {
delete s.birthday;
s.message = `${s.name}生日快乐~`;
s.message = `${s.name}生日快乐~`
}
} else if (shouldShowSpecialSinger) {
}
else if (shouldShowSpecialSinger) {
singer = pickSpecialSinger();
} else {
}
else {
singer = pickSinger();
}
return c.json({
"project": {
"name": "中V档案馆",
"motto": "一起唱吧,心中的歌!",
"motto": "一起唱吧,心中的歌!"
},
"status": 200,
"version": VERSION,
"time": Date.now(),
"singer": singer,
});
});
"singer": singer
})
})

View File

@ -12,7 +12,7 @@ const SnapshotQueryParamsSchema = object({
reverse: boolean().optional(),
});
export const idSchema = mixed().test(
const idSchema = mixed().test(
"is-valid-id",
'id must be a string starting with "av" followed by digits, or "BV" followed by 10 alphanumeric characters, or a positive integer',
async (value) => {
@ -46,7 +46,8 @@ export const getSnapshotsHanlder = createHandlers(async (c: ContextType) => {
let videoId: string | number = idParam as string;
if (videoId.startsWith("av")) {
videoId = parseInt(videoId.slice(2));
} else if (await number().isValid(videoId)) {
}
else if (await number().isValid(videoId)) {
videoId = parseInt(videoId);
}
const queryParams = await SnapshotQueryParamsSchema.validate(c.req.query());

View File

@ -1,4 +1,4 @@
import { createFactory } from "hono/factory";
import { createFactory } from 'hono/factory'
const factory = createFactory();

View File

@ -1,84 +0,0 @@
import logger from "log/logger.ts";
import { Redis } from "ioredis";
import { number, ValidationError } from "yup";
import { createHandlers } from "./utils.ts";
import { getVideoInfo, getVideoInfoByBV } from "@crawler/net/videoInfo";
import { idSchema } from "./snapshots.ts";
import { NetSchedulerError } from "@core/net/delegate.ts";
import type { Context } from "hono";
import type { Client } from "https://deno.land/x/postgres@v0.19.3/mod.ts";
import type { BlankEnv, BlankInput } from "hono/types";
import type { VideoInfoData } from "@core/net/bilibili.d.ts";
const redis = new Redis({ maxRetriesPerRequest: null });
const CACHE_EXPIRATION_SECONDS = 60;
type ContextType = Context<BlankEnv, "/video/:id/info", BlankInput>;
async function insertVideoSnapshot(client: Client, data: VideoInfoData) {
const views = data.stat.view;
const danmakus = data.stat.danmaku;
const replies = data.stat.reply;
const likes = data.stat.like;
const coins = data.stat.coin;
const shares = data.stat.share;
const favorites = data.stat.favorite;
const aid = data.aid;
const query: string = `
INSERT INTO video_snapshot (aid, views, danmakus, replies, likes, coins, shares, favorites)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8)
`;
await client.queryObject(
query,
[aid, views, danmakus, replies, likes, coins, shares, favorites],
);
logger.log(`Inserted into snapshot for video ${aid} by videoInfo API.`, "api", "fn:insertVideoSnapshot");
}
export const videoInfoHandler = createHandlers(async (c: ContextType) => {
const client = c.get("db");
try {
const id = await idSchema.validate(c.req.param("id"));
let videoId: string | number = id as string;
if (videoId.startsWith("av")) {
videoId = parseInt(videoId.slice(2));
} else if (await number().isValid(videoId)) {
videoId = parseInt(videoId);
}
const cacheKey = `cvsa:videoInfo:${videoId}`;
const cachedData = await redis.get(cacheKey);
if (cachedData) {
return c.json(JSON.parse(cachedData));
}
let result: VideoInfoData | number;
if (typeof videoId === "number") {
result = await getVideoInfo(videoId, "getVideoInfo");
} else {
result = await getVideoInfoByBV(videoId, "getVideoInfo");
}
if (typeof result === "number") {
return c.json({ message: "Error fetching video info", code: result }, 500);
}
await redis.setex(cacheKey, CACHE_EXPIRATION_SECONDS, JSON.stringify(result));
await insertVideoSnapshot(client, result);
return c.json(result);
} catch (e) {
if (e instanceof ValidationError) {
return c.json({ message: "Invalid query parameters", errors: e.errors }, 400);
} else if (e instanceof NetSchedulerError) {
return c.json({ message: "Error fetching video info", code: e.code }, 500);
} else {
return c.json({ message: "Unhandled error", error: e }, 500);
}
}
});

View File

@ -1,62 +1,33 @@
import type { Client } from "https://deno.land/x/postgres@v0.19.3/mod.ts";
import type { VideoSnapshotType } from "./schema.d.ts";
import { Client } from "https://deno.land/x/postgres@v0.19.3/mod.ts";
import { VideoSnapshotType } from "@core/db/schema.d.ts";
export async function getVideoSnapshots(
client: Client,
aid: number,
limit: number,
pageOrOffset: number,
reverse: boolean,
mode: "page" | "offset" = "page",
) {
const offset = mode === "page" ? (pageOrOffset - 1) * limit : pageOrOffset;
const queryDesc: string = `
export async function getVideoSnapshots(client: Client, aid: number, limit: number, pageOrOffset: number, reverse: boolean, mode: 'page' | 'offset' = 'page') {
const offset = mode === 'page' ? (pageOrOffset - 1) * limit : pageOrOffset;
const order = reverse ? 'ASC' : 'DESC';
const query = `
SELECT *
FROM video_snapshot
WHERE aid = $1
ORDER BY created_at DESC
ORDER BY created_at ${order}
LIMIT $2
OFFSET $3
`;
const queryAsc: string = `
SELECT *
FROM video_snapshot
WHERE aid = $1
ORDER BY created_at
LIMIT $2 OFFSET $3
`;
const query = reverse ? queryAsc : queryDesc;
const queryResult = await client.queryObject<VideoSnapshotType>(query, [aid, limit, offset]);
return queryResult.rows;
}
export async function getVideoSnapshotsByBV(
client: Client,
bv: string,
limit: number,
pageOrOffset: number,
reverse: boolean,
mode: "page" | "offset" = "page",
) {
const offset = mode === "page" ? (pageOrOffset - 1) * limit : pageOrOffset;
const queryAsc = `
export async function getVideoSnapshotsByBV(client: Client, bv: string, limit: number, pageOrOffset: number, reverse: boolean, mode: 'page' | 'offset' = 'page') {
const offset = mode === 'page' ? (pageOrOffset - 1) * limit : pageOrOffset;
const order = reverse ? 'ASC' : 'DESC';
const query = `
SELECT vs.*
FROM video_snapshot vs
JOIN bilibili_metadata bm ON vs.aid = bm.aid
WHERE bm.bvid = $1
ORDER BY vs.created_at
ORDER BY vs.created_at ${order}
LIMIT $2
OFFSET $3
`;
const queryDesc: string = `
SELECT *
FROM video_snapshot vs
JOIN bilibili_metadata bm ON vs.aid = bm.aid
WHERE bm.bvid = $1
ORDER BY vs.created_at DESC
LIMIT $2 OFFSET $3
`;
const query = reverse ? queryAsc : queryDesc;
`
const queryResult = await client.queryObject<VideoSnapshotType>(query, [bv, limit, offset]);
return queryResult.rows;
}

View File

@ -1,12 +0,0 @@
{
"name": "@cvsa/core",
"exports": "./main.ts",
"imports": {
"ioredis": "npm:ioredis",
"log/": "./log/",
"db/": "./db/",
"$std/": "https://deno.land/std@0.216.0/",
"mq/": "./mq/",
"chalk": "npm:chalk"
}
}

View File

@ -1 +0,0 @@
export const DB_VERSION = 10;

View File

@ -1,5 +1,5 @@
import { Client } from "https://deno.land/x/postgres@v0.19.3/mod.ts";
import { AllDataType, BiliUserType } from "@core/db/schema";
import { AllDataType, BiliUserType } from "db/schema.d.ts";
import Akari from "ml/akari.ts";
export async function videoExistsInAllData(client: Client, aid: number) {

View File

@ -1,5 +1,5 @@
import { Pool } from "https://deno.land/x/postgres@v0.19.3/mod.ts";
import { postgresConfig } from "@core/db/pgConfig";
import { postgresConfig } from "@core/db/pgConfig.ts";
const pool = new Pool(postgresConfig, 12);

55
packages/crawler/db/schema.d.ts vendored Normal file
View File

@ -0,0 +1,55 @@
export interface AllDataType {
id: number;
aid: number;
bvid: string | null;
description: string | null;
uid: number | null;
tags: string | null;
title: string | null;
published_at: string | null;
duration: number;
created_at: string | null;
}
export interface BiliUserType {
id: number;
uid: number;
username: string;
desc: string;
fans: number;
}
export interface VideoSnapshotType {
id: number;
created_at: string;
views: number;
coins: number;
likes: number;
favorites: number;
shares: number;
danmakus: number;
aid: bigint;
replies: number;
}
export interface LatestSnapshotType {
aid: number;
time: number;
views: number;
danmakus: number;
replies: number;
likes: number;
coins: number;
shares: number;
favorites: number;
}
export interface SnapshotScheduleType {
id: number;
aid: number;
type?: string;
created_at: string;
started_at?: string;
finished_at?: string;
status: string;
}

View File

@ -1,20 +1,15 @@
import { Client } from "https://deno.land/x/postgres@v0.19.3/mod.ts";
import { LatestSnapshotType } from "@core/db/schema";
import { SnapshotNumber } from "mq/task/getVideoStats.ts";
import { LatestSnapshotType } from "db/schema.d.ts";
export async function getVideosNearMilestone(client: Client) {
const queryResult = await client.queryObject<LatestSnapshotType>(`
SELECT ls.*
FROM latest_video_snapshot ls
RIGHT JOIN songs ON songs.aid = ls.aid
WHERE
(views >= 50000 AND views < 100000) OR
(views >= 900000 AND views < 1000000) OR
(views >= 9900000 AND views < 10000000)
UNION
SELECT ls.*
FROM latest_video_snapshot ls
INNER JOIN
songs s ON ls.aid = s.aid
AND s.deleted = false
WHERE
s.deleted = false AND
(views >= 90000 AND views < 100000) OR
(views >= 900000 AND views < 1000000) OR
(views >= 9900000 AND views < 10000000)
@ -27,7 +22,7 @@ export async function getVideosNearMilestone(client: Client) {
});
}
export async function getLatestVideoSnapshot(client: Client, aid: number): Promise<null | SnapshotNumber> {
export async function getLatestVideoSnapshot(client: Client, aid: number): Promise<null | LatestSnapshotType> {
const queryResult = await client.queryObject<LatestSnapshotType>(
`
SELECT *

View File

@ -1,17 +1,18 @@
import { Client } from "https://deno.land/x/postgres@v0.19.3/mod.ts";
import { SnapshotScheduleType } from "@core/db/schema";
import { formatTimestampToPsql } from "utils/formatTimestampToPostgre.ts";
import { SnapshotScheduleType } from "./schema.d.ts";
import logger from "log/logger.ts";
import { MINUTE } from "$std/datetime/constants.ts";
import { redis } from "@core/db/redis.ts";
import { redis } from "db/redis.ts";
import { Redis } from "ioredis";
import {parseTimestampFromPsql} from "../utils/formatTimestampToPostgre.ts";
const REDIS_KEY = "cvsa:snapshot_window_counts";
function getCurrentWindowIndex(): number {
const now = new Date();
const minutesSinceMidnight = now.getHours() * 60 + now.getMinutes();
return Math.floor(minutesSinceMidnight / 5);
const currentWindow = Math.floor(minutesSinceMidnight / 5);
return currentWindow;
}
export async function refreshSnapshotWindowCounts(client: Client, redisClient: Redis) {
@ -70,14 +71,6 @@ export async function videoHasActiveSchedule(client: Client, aid: number) {
return res.rows.length > 0;
}
export async function videoHasActiveScheduleWithType(client: Client, aid: number, type: string) {
const res = await client.queryObject<{ status: string }>(
`SELECT status FROM snapshot_schedule WHERE aid = $1 AND (status = 'pending' OR status = 'processing') AND type = $2`,
[aid, type],
);
return res.rows.length > 0;
}
export async function videoHasProcessingSchedule(client: Client, aid: number) {
const res = await client.queryObject<{ status: string }>(
`SELECT status FROM snapshot_schedule WHERE aid = $1 AND status = 'processing'`,
@ -169,18 +162,22 @@ export async function getLatestSnapshot(client: Client, aid: number): Promise<Sn
};
}
export async function getLatestActiveScheduleWithType(client: Client, aid: number, type: string) {
const query: string = `
SELECT *
FROM snapshot_schedule
WHERE aid = $1
AND type = $2
AND (status = 'pending' OR status = 'processing')
ORDER BY started_at DESC
LIMIT 1
`
const res = await client.queryObject<SnapshotScheduleType>(query, [aid, type]);
return res.rows[0];
/*
* Returns the number of snapshot schedules within the specified range.
* @param client The database client.
* @param start The start time of the range. (Timestamp in milliseconds)
* @param end The end time of the range. (Timestamp in milliseconds)
*/
export async function getSnapshotScheduleCountWithinRange(client: Client, start: number, end: number) {
const startTimeString = formatTimestampToPsql(start);
const endTimeString = formatTimestampToPsql(end);
const query = `
SELECT COUNT(*) FROM snapshot_schedule
WHERE started_at BETWEEN $1 AND $2
AND status = 'pending'
`;
const res = await client.queryObject<{ count: number }>(query, [startTimeString, endTimeString]);
return res.rows[0].count;
}
/*
@ -196,28 +193,10 @@ export async function scheduleSnapshot(
targetTime: number,
force: boolean = false,
) {
if (await videoHasActiveSchedule(client, aid) && !force) return;
let adjustedTime = new Date(targetTime);
const hashActiveSchedule = await videoHasActiveScheduleWithType(client, aid, type);
if (type == "milestone" && hashActiveSchedule) {
const latestActiveSchedule = await getLatestActiveScheduleWithType(client, aid, type);
const latestScheduleStartedAt = new Date(parseTimestampFromPsql(latestActiveSchedule.started_at!));
if (latestScheduleStartedAt > adjustedTime) {
await client.queryObject(`
UPDATE snapshot_schedule
SET started_at = $1
WHERE id = $2
`, [adjustedTime, latestActiveSchedule.id]);
logger.log(
`Updated snapshot schedule for ${aid} at ${adjustedTime.toISOString()}`,
"mq",
"fn:scheduleSnapshot",
);
return;
}
}
if (hashActiveSchedule && !force) return;
if (type !== "milestone" && type !== "new") {
adjustedTime = await adjustSnapshotTime(new Date(targetTime), 2000, redis);
adjustedTime = await adjustSnapshotTime(new Date(targetTime), 1000, redis);
}
logger.log(`Scheduled snapshot for ${aid} at ${adjustedTime.toISOString()}`, "mq", "fn:scheduleSnapshot");
return client.queryObject(
@ -248,7 +227,7 @@ export async function adjustSnapshotTime(
const initialOffset = currentWindow + Math.max(targetOffset, 0);
let timePerIteration: number;
let timePerIteration = 0;
const MAX_ITERATIONS = 2880;
let iters = 0;
const t = performance.now();
@ -307,14 +286,8 @@ export async function getBulkSnapshotsInNextSecond(client: Client) {
const query = `
SELECT *
FROM snapshot_schedule
WHERE (started_at <= NOW() + INTERVAL '15 seconds')
AND status = 'pending'
AND (type = 'normal' OR type = 'archive')
ORDER BY CASE
WHEN type = 'normal' THEN 1
WHEN type = 'archive' THEN 2
END,
started_at
WHERE started_at <= NOW() + INTERVAL '15 seconds' AND status = 'pending' AND type = 'normal'
ORDER BY started_at
LIMIT 1000;
`;
const res = await client.queryObject<SnapshotScheduleType>(query, []);
@ -345,14 +318,3 @@ export async function getVideosWithoutActiveSnapshotSchedule(client: Client) {
const res = await client.queryObject<{ aid: number }>(query, []);
return res.rows.map((r) => Number(r.aid));
}
export async function getAllVideosWithoutActiveSnapshotSchedule(client: Client) {
const query: string = `
SELECT s.aid
FROM bilibili_metadata s
LEFT JOIN snapshot_schedule ss ON s.aid = ss.aid AND (ss.status = 'pending' OR ss.status = 'processing')
WHERE ss.aid IS NULL
`;
const res = await client.queryObject<{ aid: number }>(query, []);
return res.rows.map((r) => Number(r.aid));
}

View File

@ -1,32 +0,0 @@
import { Client } from "https://deno.land/x/postgres@v0.19.3/mod.ts";
import { db } from "db/init.ts";
/**
* Executes a function with a database connection.
* @param operation The function that accepts the `client` as the parameter.
* @param errorHandling Optional function to handle errors.
* If no error handling function is provided, the error will be re-thrown.
* @param cleanup Optional function to execute after the operation.
* @returns The result of the operation or undefined if an error occurred.
*/
export async function withDbConnection<T>(
operation: (client: Client) => Promise<T>,
errorHandling?: (error: unknown, client: Client) => void,
cleanup?: () => void,
): Promise<T | undefined> {
const client = await db.connect();
try {
return await operation(client);
} catch (error) {
if (errorHandling) {
errorHandling(error, client);
return;
}
throw error;
} finally {
client.release();
if (cleanup) {
cleanup();
}
}
}

View File

@ -12,7 +12,7 @@
"worker:filter": "deno run --env-file=.env --allow-env --allow-read --allow-ffi --allow-net --allow-write ./src/filterWorker.ts",
"adder": "deno run --env-file=.env --allow-env --allow-read --allow-ffi --allow-net ./src/jobAdder.ts",
"bullui": "deno run --allow-read --allow-env --allow-ffi --allow-net ./src/bullui.ts",
"all": "concurrently --restart-tries -1 'deno task worker:main' 'deno task adder' 'deno task bullui' 'deno task worker:filter'",
"all": "concurrently 'deno task worker:main' 'deno task adder' 'deno task bullui' 'deno task worker:filter'",
"test": "deno test ./test/ --allow-env --allow-ffi --allow-read --allow-net --allow-write --allow-run"
},
"lint": {
@ -23,13 +23,11 @@
"imports": {
"@std/assert": "jsr:@std/assert@1",
"$std/": "https://deno.land/std@0.216.0/",
"@std/datetime": "jsr:@std/datetime@^0.225.4",
"@huggingface/transformers": "npm:@huggingface/transformers@3.0.0",
"bullmq": "npm:bullmq",
"mq/": "./mq/",
"db/": "./db/",
"@core/": "../core/",
"log/": "../core/log/",
"log/": "./log/",
"net/": "./net/",
"ml/": "./ml/",
"utils/": "./utils/",
@ -39,9 +37,7 @@
"express": "npm:express",
"src/": "./src/",
"onnxruntime": "npm:onnxruntime-node@1.19.2",
"chalk": "npm:chalk",
"@core/db/schema": "../core/db/schema.d.ts",
"@core/db/pgConfig": "../core/db/pgConfig.ts"
"chalk": "npm:chalk"
},
"exports": "./main.ts"
}

View File

@ -1,5 +1,5 @@
import winston, { format, transports } from "npm:winston";
import type { TransformableInfo } from "npm:logform";
import { TransformableInfo } from "npm:logform";
import chalk from "chalk";
const customFormat = format.printf((info: TransformableInfo) => {

View File

@ -4,4 +4,4 @@
// SO HERE'S A PLACHOLDER EXPORT FOR DENO:
export const DENO = "FUCK YOU DENO";
// Oh, maybe export the version is a good idea
export const VERSION = "1.0.26";
export const VERSION = "1.0.17";

View File

@ -1,40 +0,0 @@
import { Job } from "npm:bullmq@5.45.2";
import { getAllVideosWithoutActiveSnapshotSchedule, scheduleSnapshot } from "db/snapshotSchedule.ts";
import { withDbConnection } from "db/withConnection.ts";
import { Client } from "https://deno.land/x/postgres@v0.19.3/mod.ts";
import logger from "log/logger.ts";
import { lockManager } from "mq/lockManager.ts";
import { getLatestVideoSnapshot } from "db/snapshot.ts";
import { HOUR, MINUTE } from "$std/datetime/constants.ts";
export const archiveSnapshotsWorker = async (_job: Job) =>
await withDbConnection<void>(async (client: Client) => {
const startedAt = Date.now();
if (await lockManager.isLocked("dispatchArchiveSnapshots")) {
logger.log("dispatchArchiveSnapshots is already running", "mq");
return;
}
await lockManager.acquireLock("dispatchArchiveSnapshots", 30 * 60);
const aids = await getAllVideosWithoutActiveSnapshotSchedule(client);
for (const rawAid of aids) {
const aid = Number(rawAid);
const latestSnapshot = await getLatestVideoSnapshot(client, aid);
const now = Date.now();
const lastSnapshotedAt = latestSnapshot?.time ?? now;
const interval = 168;
logger.log(
`Scheduled archive snapshot for aid ${aid} in ${interval} hours.`,
"mq",
"fn:archiveSnapshotsWorker",
);
const targetTime = lastSnapshotedAt + interval * HOUR;
await scheduleSnapshot(client, aid, "archive", targetTime);
if (now - startedAt > 250 * MINUTE) {
return;
}
}
}, (e) => {
logger.error(e as Error, "mq", "fn:archiveSnapshotsWorker");
}, async () => {
await lockManager.releaseLock("dispatchArchiveSnapshots");
});

View File

@ -1,6 +1,6 @@
import { Job } from "bullmq";
import { db } from "db/init.ts";
import { getUnlabelledVideos, getVideoInfoFromAllData, insertVideoLabel } from "../../db/bilibili_metadata.ts";
import { getUnlabelledVideos, getVideoInfoFromAllData, insertVideoLabel } from "db/allData.ts";
import Akari from "ml/akari.ts";
import { ClassifyVideoQueue } from "mq/index.ts";
import logger from "log/logger.ts";
@ -8,7 +8,7 @@ import { lockManager } from "mq/lockManager.ts";
import { aidExistsInSongs } from "db/songs.ts";
import { insertIntoSongs } from "mq/task/collectSongs.ts";
import { scheduleSnapshot } from "db/snapshotSchedule.ts";
import { MINUTE } from "@std/datetime";
import { MINUTE } from "$std/datetime/constants.ts";
export const classifyVideoWorker = async (job: Job) => {
const client = await db.connect();

View File

@ -1,9 +0,0 @@
import { Job } from "npm:bullmq@5.45.2";
import { collectSongs } from "mq/task/collectSongs.ts";
import { withDbConnection } from "db/withConnection.ts";
import { Client } from "https://deno.land/x/postgres@v0.19.3/mod.ts";
export const collectSongsWorker = (_job: Job): Promise<void> =>
withDbConnection(async (client: Client) => {
await collectSongs(client);
});

View File

@ -1,29 +0,0 @@
import { Job } from "npm:bullmq@5.45.2";
import { withDbConnection } from "db/withConnection.ts";
import { Client } from "https://deno.land/x/postgres@v0.19.3/mod.ts";
import { getVideosNearMilestone } from "db/snapshot.ts";
import { getAdjustedShortTermETA } from "mq/scheduling.ts";
import { truncate } from "utils/truncate.ts";
import { scheduleSnapshot } from "db/snapshotSchedule.ts";
import logger from "log/logger.ts";
import { HOUR, MINUTE, SECOND } from "@std/datetime";
export const dispatchMilestoneSnapshotsWorker = (_job: Job): Promise<void> =>
withDbConnection(async (client: Client) => {
const videos = await getVideosNearMilestone(client);
for (const video of videos) {
const aid = Number(video.aid);
const eta = await getAdjustedShortTermETA(client, aid);
if (eta > 144) continue;
const now = Date.now();
const scheduledNextSnapshotDelay = eta * HOUR;
const maxInterval = 1 * HOUR;
const minInterval = 1 * SECOND;
const delay = truncate(scheduledNextSnapshotDelay, minInterval, maxInterval);
const targetTime = now + delay;
await scheduleSnapshot(client, aid, "milestone", targetTime);
logger.log(`Scheduled milestone snapshot for aid ${aid} in ${(delay / MINUTE).toFixed(2)} mins.`, "mq");
}
}, (e) => {
logger.error(e as Error, "mq", "fn:dispatchMilestoneSnapshotsWorker");
});

View File

@ -1,39 +0,0 @@
import { Job } from "npm:bullmq@5.45.2";
import { withDbConnection } from "db/withConnection.ts";
import { Client } from "https://deno.land/x/postgres@v0.19.3/mod.ts";
import { getLatestVideoSnapshot } from "db/snapshot.ts";
import { truncate } from "utils/truncate.ts";
import { getVideosWithoutActiveSnapshotSchedule, scheduleSnapshot } from "db/snapshotSchedule.ts";
import logger from "log/logger.ts";
import { HOUR, MINUTE, WEEK } from "@std/datetime";
import { lockManager } from "../lockManager.ts";
import { getRegularSnapshotInterval } from "../task/regularSnapshotInterval.ts";
export const dispatchRegularSnapshotsWorker = async (_job: Job): Promise<void> =>
await withDbConnection(async (client: Client) => {
const startedAt = Date.now();
if (await lockManager.isLocked("dispatchRegularSnapshots")) {
logger.log("dispatchRegularSnapshots is already running", "mq");
return;
}
await lockManager.acquireLock("dispatchRegularSnapshots", 30 * 60);
const aids = await getVideosWithoutActiveSnapshotSchedule(client);
for (const rawAid of aids) {
const aid = Number(rawAid);
const latestSnapshot = await getLatestVideoSnapshot(client, aid);
const now = Date.now();
const lastSnapshotedAt = latestSnapshot?.time ?? now;
const interval = await getRegularSnapshotInterval(client, aid);
logger.log(`Scheduled regular snapshot for aid ${aid} in ${interval} hours.`, "mq");
const targetTime = truncate(lastSnapshotedAt + interval * HOUR, now + 1, now + 100000 * WEEK);
await scheduleSnapshot(client, aid, "normal", targetTime);
if (now - startedAt > 25 * MINUTE) {
return;
}
}
}, (e) => {
logger.error(e as Error, "mq", "fn:regularSnapshotsWorker");
}, async () => {
await lockManager.releaseLock("dispatchRegularSnapshots");
});

View File

@ -1,10 +0,0 @@
export * from "mq/exec/getLatestVideos.ts";
export * from "./getVideoInfo.ts";
export * from "./collectSongs.ts";
export * from "./takeBulkSnapshot.ts";
export * from "./archiveSnapshots.ts";
export * from "./dispatchMilestoneSnapshots.ts";
export * from "./dispatchRegularSnapshots.ts";
export * from "./snapshotVideo.ts";
export * from "./scheduleCleanup.ts";
export * from "./snapshotTick.ts";

View File

@ -1,9 +1,37 @@
import { Job } from "bullmq";
import { queueLatestVideos } from "mq/task/queueLatestVideo.ts";
import { withDbConnection } from "db/withConnection.ts";
import { Client } from "https://deno.land/x/postgres@v0.19.3/mod.ts";
import { db } from "db/init.ts";
import { insertVideoInfo } from "mq/task/getVideoDetails.ts";
import { collectSongs } from "mq/task/collectSongs.ts";
export const getLatestVideosWorker = (_job: Job): Promise<void> =>
withDbConnection(async (client: Client) => {
export const getLatestVideosWorker = async (_job: Job): Promise<void> => {
const client = await db.connect();
try {
await queueLatestVideos(client);
});
} finally {
client.release();
}
};
export const collectSongsWorker = async (_job: Job): Promise<void> => {
const client = await db.connect();
try {
await collectSongs(client);
} finally {
client.release();
}
};
export const getVideoInfoWorker = async (job: Job): Promise<number> => {
const client = await db.connect();
try {
const aid = job.data.aid;
if (!aid) {
return 3;
}
await insertVideoInfo(client, aid);
return 0;
} finally {
client.release();
}
};

View File

@ -1,15 +0,0 @@
import { Job } from "npm:bullmq@5.45.2";
import { insertVideoInfo } from "mq/task/getVideoDetails.ts";
import { withDbConnection } from "db/withConnection.ts";
import { Client } from "https://deno.land/x/postgres@v0.19.3/mod.ts";
import logger from "log/logger.ts";
export const getVideoInfoWorker = async (job: Job): Promise<void> =>
await withDbConnection<void>(async (client: Client) => {
const aid = job.data.aid;
if (!aid) {
logger.warn("aid does not exists", "mq", "job:getVideoInfo");
return;
}
await insertVideoInfo(client, aid);
});

View File

@ -1,45 +0,0 @@
import { Job } from "npm:bullmq@5.45.2";
import { withDbConnection } from "db/withConnection.ts";
import { Client } from "https://deno.land/x/postgres@v0.19.3/mod.ts";
import logger from "log/logger.ts";
import { scheduleSnapshot, setSnapshotStatus } from "db/snapshotSchedule.ts";
import { SECOND } from "@std/datetime";
import { getTimeoutSchedulesCount } from "mq/task/getTimeoutSchedulesCount.ts";
import { removeAllTimeoutSchedules } from "mq/task/removeAllTimeoutSchedules.ts";
export const scheduleCleanupWorker = async (_job: Job): Promise<void> =>
await withDbConnection<void>(async (client: Client) => {
if (await getTimeoutSchedulesCount(client) > 2000) {
await removeAllTimeoutSchedules(client);
return;
}
const query: string = `
SELECT id, aid, type
FROM snapshot_schedule
WHERE status IN ('pending', 'processing')
AND started_at < NOW() - INTERVAL '30 minutes'
UNION
SELECT id, aid, type
FROM snapshot_schedule
WHERE status IN ('pending', 'processing')
AND started_at < NOW() - INTERVAL '2 minutes'
AND type = 'milestone'
`;
const { rows } = await client.queryObject<{ id: bigint; aid: bigint; type: string }>(query);
if (rows.length === 0) return;
for (const row of rows) {
const id = Number(row.id);
const aid = Number(row.aid);
const type = row.type;
await setSnapshotStatus(client, id, "timeout");
await scheduleSnapshot(client, aid, type, Date.now() + 10 * SECOND);
logger.log(
`Schedule ${id} has not received any response in a while, rescheduled.`,
"mq",
"fn:scheduleCleanupWorker",
);
}
}, (e) => {
logger.error(e as Error, "mq", "fn:scheduleCleanupWorker");
});

View File

@ -1,21 +1,45 @@
import { Job } from "bullmq";
import { db } from "db/init.ts";
import { getLatestVideoSnapshot, getVideosNearMilestone } from "db/snapshot.ts";
import {
bulkGetVideosWithoutProcessingSchedules,
bulkScheduleSnapshot,
bulkSetSnapshotStatus,
findClosestSnapshot,
findSnapshotBefore,
getBulkSnapshotsInNextSecond,
getLatestSnapshot,
getSnapshotsInNextSecond,
getVideosWithoutActiveSnapshotSchedule,
hasAtLeast2Snapshots,
scheduleSnapshot,
setSnapshotStatus,
snapshotScheduleExists,
videoHasProcessingSchedule,
} from "db/snapshotSchedule.ts";
import { Client } from "https://deno.land/x/postgres@v0.19.3/mod.ts";
import { HOUR, MINUTE, SECOND, WEEK } from "$std/datetime/constants.ts";
import logger from "log/logger.ts";
import { SnapshotQueue } from "mq/index.ts";
import { insertVideoSnapshot } from "mq/task/getVideoStats.ts";
import { NetSchedulerError } from "mq/scheduler.ts";
import { getBiliVideoStatus, setBiliVideoStatus } from "db/allData.ts";
import { truncate } from "utils/truncate.ts";
import { lockManager } from "mq/lockManager.ts";
import { getSongsPublihsedAt } from "db/songs.ts";
import { bulkGetVideoStats } from "net/bulkGetVideoStats.ts";
const priorityMap: { [key: string]: number } = {
"milestone": 1,
"normal": 3,
};
const snapshotTypeToTaskMap: { [key: string]: string } = {
"milestone": "snapshotMilestoneVideo",
"normal": "snapshotVideo",
"new": "snapshotMilestoneVideo",
};
export const bulkSnapshotTickWorker = async (_job: Job) => {
const client = await db.connect();
try {
@ -28,22 +52,15 @@ export const bulkSnapshotTickWorker = async (_job: Job) => {
const filteredAids = await bulkGetVideosWithoutProcessingSchedules(client, aids);
if (filteredAids.length === 0) continue;
await bulkSetSnapshotStatus(client, filteredAids, "processing");
const schedulesData = group.map((schedule) => {
return {
aid: Number(schedule.aid),
id: Number(schedule.id),
type: schedule.type,
created_at: schedule.created_at,
started_at: schedule.started_at,
finished_at: schedule.finished_at,
status: schedule.status,
};
});
const dataMap: { [key: number]: number } = {};
for (const schedule of group) {
const id = Number(schedule.id);
dataMap[id] = Number(schedule.aid);
}
await SnapshotQueue.add("bulkSnapshotVideo", {
schedules: schedulesData,
map: dataMap,
}, { priority: 3 });
}
return `OK`;
} catch (e) {
logger.error(e as Error);
} finally {
@ -57,7 +74,7 @@ export const snapshotTickWorker = async (_job: Job) => {
const schedules = await getSnapshotsInNextSecond(client);
for (const schedule of schedules) {
if (await videoHasProcessingSchedule(client, Number(schedule.aid))) {
continue;
return `ALREADY_PROCESSING`;
}
let priority = 3;
if (schedule.type && priorityMap[schedule.type]) {
@ -66,12 +83,11 @@ export const snapshotTickWorker = async (_job: Job) => {
const aid = Number(schedule.aid);
await setSnapshotStatus(client, schedule.id, "processing");
await SnapshotQueue.add("snapshotVideo", {
aid: Number(aid),
aid: aid,
id: Number(schedule.id),
type: schedule.type ?? "normal",
}, { priority });
}
return `OK`;
} catch (e) {
logger.error(e as Error);
} finally {
@ -84,3 +100,297 @@ export const closetMilestone = (views: number) => {
if (views < 1000000) return 1000000;
return 10000000;
};
const log = (value: number, base: number = 10) => Math.log(value) / Math.log(base);
/*
* Returns the minimum ETA in hours for the next snapshot
* @param client - Postgres client
* @param aid - aid of the video
* @returns ETA in hours
*/
export const getAdjustedShortTermETA = async (client: Client, aid: number) => {
const latestSnapshot = await getLatestSnapshot(client, aid);
// Immediately dispatch a snapshot if there is no snapshot yet
if (!latestSnapshot) return 0;
const snapshotsEnough = await hasAtLeast2Snapshots(client, aid);
if (!snapshotsEnough) return 0;
const currentTimestamp = new Date().getTime();
const timeIntervals = [3 * MINUTE, 20 * MINUTE, 1 * HOUR, 3 * HOUR, 6 * HOUR, 72 * HOUR];
const DELTA = 0.00001;
let minETAHours = Infinity;
for (const timeInterval of timeIntervals) {
const date = new Date(currentTimestamp - timeInterval);
const snapshot = await findClosestSnapshot(client, aid, date);
if (!snapshot) continue;
const hoursDiff = (latestSnapshot.created_at - snapshot.created_at) / HOUR;
const viewsDiff = latestSnapshot.views - snapshot.views;
if (viewsDiff <= 0) continue;
const speed = viewsDiff / (hoursDiff + DELTA);
const target = closetMilestone(latestSnapshot.views);
const viewsToIncrease = target - latestSnapshot.views;
const eta = viewsToIncrease / (speed + DELTA);
let factor = log(2.97 / log(viewsToIncrease + 1), 1.14);
factor = truncate(factor, 3, 100);
const adjustedETA = eta / factor;
if (adjustedETA < minETAHours) {
minETAHours = adjustedETA;
}
}
if (isNaN(minETAHours)) {
minETAHours = Infinity;
}
return minETAHours;
};
export const collectMilestoneSnapshotsWorker = async (_job: Job) => {
const client = await db.connect();
try {
const videos = await getVideosNearMilestone(client);
for (const video of videos) {
const aid = Number(video.aid);
const eta = await getAdjustedShortTermETA(client, aid);
if (eta > 72) continue;
const now = Date.now();
const scheduledNextSnapshotDelay = eta * HOUR;
const maxInterval = 4 * HOUR;
const minInterval = 1 * SECOND;
const delay = truncate(scheduledNextSnapshotDelay, minInterval, maxInterval);
const targetTime = now + delay;
await scheduleSnapshot(client, aid, "milestone", targetTime);
}
} catch (e) {
logger.error(e as Error, "mq", "fn:collectMilestoneSnapshotsWorker");
} finally {
client.release();
}
};
const getRegularSnapshotInterval = async (client: Client, aid: number) => {
const now = Date.now();
const date = new Date(now - 24 * HOUR);
let oldSnapshot = await findSnapshotBefore(client, aid, date);
if (!oldSnapshot) oldSnapshot = await findClosestSnapshot(client, aid, date);
const latestSnapshot = await getLatestSnapshot(client, aid);
if (!oldSnapshot || !latestSnapshot) return 0;
if (oldSnapshot.created_at === latestSnapshot.created_at) return 0;
const hoursDiff = (latestSnapshot.created_at - oldSnapshot.created_at) / HOUR;
if (hoursDiff < 8) return 24;
const viewsDiff = latestSnapshot.views - oldSnapshot.views;
if (viewsDiff === 0) return 72;
const speedPerDay = viewsDiff / (hoursDiff + 0.001) * 24;
if (speedPerDay < 6) return 36;
if (speedPerDay < 120) return 24;
if (speedPerDay < 320) return 12;
return 6;
};
export const regularSnapshotsWorker = async (_job: Job) => {
const client = await db.connect();
const startedAt = Date.now();
if (await lockManager.isLocked("dispatchRegularSnapshots")) {
logger.log("dispatchRegularSnapshots is already running", "mq");
client.release();
return;
}
await lockManager.acquireLock("dispatchRegularSnapshots", 30 * 60);
try {
const aids = await getVideosWithoutActiveSnapshotSchedule(client);
for (const rawAid of aids) {
const aid = Number(rawAid);
const latestSnapshot = await getLatestVideoSnapshot(client, aid);
const now = Date.now();
const lastSnapshotedAt = latestSnapshot?.time ?? now;
const interval = await getRegularSnapshotInterval(client, aid);
logger.log(`Scheduled regular snapshot for aid ${aid} in ${interval} hours.`, "mq");
const targetTime = truncate(lastSnapshotedAt + interval * HOUR, now + 1, now + 100000 * WEEK);
await scheduleSnapshot(client, aid, "normal", targetTime);
if (now - startedAt > 25 * MINUTE) {
return;
}
}
} catch (e) {
logger.error(e as Error, "mq", "fn:regularSnapshotsWorker");
} finally {
lockManager.releaseLock("dispatchRegularSnapshots");
client.release();
}
};
export const takeBulkSnapshotForVideosWorker = async (job: Job) => {
const dataMap: { [key: number]: number } = job.data.map;
const ids = Object.keys(dataMap).map((id) => Number(id));
const aidsToFetch: number[] = [];
const client = await db.connect();
try {
for (const id of ids) {
const aid = Number(dataMap[id]);
const exists = await snapshotScheduleExists(client, id);
if (!exists) {
continue;
}
aidsToFetch.push(aid);
}
const data = await bulkGetVideoStats(aidsToFetch);
if (typeof data === "number") {
await bulkSetSnapshotStatus(client, ids, "failed");
await bulkScheduleSnapshot(client, aidsToFetch, "normal", Date.now() + 15 * SECOND);
return `GET_BILI_STATUS_${data}`;
}
for (const video of data) {
const aid = video.id;
const stat = video.cnt_info;
const views = stat.play;
const danmakus = stat.danmaku;
const replies = stat.reply;
const likes = stat.thumb_up;
const coins = stat.coin;
const shares = stat.share;
const favorites = stat.collect;
const query: string = `
INSERT INTO video_snapshot (aid, views, danmakus, replies, likes, coins, shares, favorites)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8)
`;
await client.queryObject(
query,
[aid, views, danmakus, replies, likes, coins, shares, favorites],
);
logger.log(`Taken snapshot for video ${aid} in bulk.`, "net", "fn:takeBulkSnapshotForVideosWorker");
}
await bulkSetSnapshotStatus(client, ids, "completed");
for (const aid of aidsToFetch) {
const interval = await getRegularSnapshotInterval(client, aid);
logger.log(`Scheduled regular snapshot for aid ${aid} in ${interval} hours.`, "mq");
await scheduleSnapshot(client, aid, "normal", Date.now() + interval * HOUR);
}
return `DONE`;
} catch (e) {
if (e instanceof NetSchedulerError && e.code === "NO_PROXY_AVAILABLE") {
logger.warn(
`No available proxy for bulk request now.`,
"mq",
"fn:takeBulkSnapshotForVideosWorker",
);
await bulkSetSnapshotStatus(client, ids, "completed");
await bulkScheduleSnapshot(client, aidsToFetch, "normal", Date.now() + 2 * MINUTE);
return;
}
logger.error(e as Error, "mq", "fn:takeBulkSnapshotForVideosWorker");
await bulkSetSnapshotStatus(client, ids, "failed");
} finally {
client.release();
}
};
export const takeSnapshotForVideoWorker = async (job: Job) => {
const id = job.data.id;
const aid = Number(job.data.aid);
const type = job.data.type;
const task = snapshotTypeToTaskMap[type] ?? "snapshotVideo";
const client = await db.connect();
const retryInterval = type === "milestone" ? 5 * SECOND : 2 * MINUTE;
const exists = await snapshotScheduleExists(client, id);
if (!exists) {
client.release();
return;
}
const status = await getBiliVideoStatus(client, aid);
if (status !== 0) {
client.release();
return `REFUSE_WORKING_BILI_STATUS_${status}`;
}
try {
await setSnapshotStatus(client, id, "processing");
const stat = await insertVideoSnapshot(client, aid, task);
if (typeof stat === "number") {
await setBiliVideoStatus(client, aid, stat);
await setSnapshotStatus(client, id, "completed");
return `GET_BILI_STATUS_${stat}`;
}
await setSnapshotStatus(client, id, "completed");
if (type === "normal") {
const interval = await getRegularSnapshotInterval(client, aid);
logger.log(`Scheduled regular snapshot for aid ${aid} in ${interval} hours.`, "mq");
await scheduleSnapshot(client, aid, type, Date.now() + interval * HOUR);
return `DONE`;
} else if (type === "new") {
const publihsedAt = await getSongsPublihsedAt(client, aid);
const timeSincePublished = stat.time - publihsedAt!;
const viewsPerHour = stat.views / timeSincePublished * HOUR;
if (timeSincePublished > 48 * HOUR) {
return `DONE`;
}
if (timeSincePublished > 2 * HOUR && viewsPerHour < 10) {
return `DONE`;
}
let intervalMins = 240;
if (viewsPerHour > 50) {
intervalMins = 120;
}
if (viewsPerHour > 100) {
intervalMins = 60;
}
if (viewsPerHour > 1000) {
intervalMins = 15;
}
await scheduleSnapshot(client, aid, type, Date.now() + intervalMins * MINUTE, true);
}
if (type !== "milestone") return `DONE`;
const eta = await getAdjustedShortTermETA(client, aid);
if (eta > 72) return "ETA_TOO_LONG";
const now = Date.now();
const targetTime = now + eta * HOUR;
await scheduleSnapshot(client, aid, type, targetTime);
return `DONE`;
} catch (e) {
if (e instanceof NetSchedulerError && e.code === "NO_PROXY_AVAILABLE") {
logger.warn(
`No available proxy for aid ${job.data.aid}.`,
"mq",
"fn:takeSnapshotForVideoWorker",
);
await setSnapshotStatus(client, id, "completed");
await scheduleSnapshot(client, aid, type, Date.now() + retryInterval);
return;
}
logger.error(e as Error, "mq", "fn:takeSnapshotForVideoWorker");
await setSnapshotStatus(client, id, "failed");
} finally {
client.release();
}
};
export const scheduleCleanupWorker = async (_job: Job) => {
const client = await db.connect();
try {
const query = `
SELECT id, aid, type
FROM snapshot_schedule
WHERE status IN ('pending', 'processing')
AND started_at < NOW() - INTERVAL '30 minutes'
`;
const { rows } = await client.queryObject<{ id: bigint; aid: bigint; type: string }>(query);
if (rows.length === 0) return;
for (const row of rows) {
const id = Number(row.id);
const aid = Number(row.aid);
const type = row.type;
await setSnapshotStatus(client, id, "timeout");
await scheduleSnapshot(client, aid, type, Date.now() + 10 * SECOND);
logger.log(
`Schedule ${id} has no response received for 5 minutes, rescheduled.`,
"mq",
"fn:scheduleCleanupWorker",
);
}
} catch (e) {
logger.error(e as Error, "mq", "fn:scheduleCleanupWorker");
} finally {
client.release();
}
};

View File

@ -1,107 +0,0 @@
import { Job } from "npm:bullmq@5.45.2";
import { withDbConnection } from "db/withConnection.ts";
import { Client } from "https://deno.land/x/postgres@v0.19.3/mod.ts";
import { scheduleSnapshot, setSnapshotStatus, snapshotScheduleExists } from "db/snapshotSchedule.ts";
import logger from "log/logger.ts";
import { HOUR, MINUTE, SECOND } from "@std/datetime";
import { lockManager } from "mq/lockManager.ts";
import { getBiliVideoStatus, setBiliVideoStatus } from "../../db/bilibili_metadata.ts";
import { insertVideoSnapshot } from "mq/task/getVideoStats.ts";
import { getSongsPublihsedAt } from "db/songs.ts";
import { getAdjustedShortTermETA } from "mq/scheduling.ts";
import { NetSchedulerError } from "@core/net/delegate.ts";
const snapshotTypeToTaskMap: { [key: string]: string } = {
"milestone": "snapshotMilestoneVideo",
"normal": "snapshotVideo",
"new": "snapshotMilestoneVideo",
};
export const snapshotVideoWorker = async (job: Job): Promise<void> => {
const id = job.data.id;
const aid = Number(job.data.aid);
const type = job.data.type;
const task = snapshotTypeToTaskMap[type] ?? "snapshotVideo";
const retryInterval = type === "milestone" ? 5 * SECOND : 2 * MINUTE;
await withDbConnection(async (client: Client) => {
const exists = await snapshotScheduleExists(client, id);
if (!exists) {
return;
}
const status = await getBiliVideoStatus(client, aid);
if (status !== 0) {
logger.warn(
`Video ${aid} has status ${status} in the database. Abort snapshoting.`,
"mq",
"fn:dispatchRegularSnapshotsWorker",
);
return;
}
await setSnapshotStatus(client, id, "processing");
const stat = await insertVideoSnapshot(client, aid, task);
if (typeof stat === "number") {
await setBiliVideoStatus(client, aid, stat);
await setSnapshotStatus(client, id, "bili_error");
logger.warn(
`Bilibili return status ${status} when snapshoting for ${aid}.`,
"mq",
"fn:dispatchRegularSnapshotsWorker",
);
return;
}
await setSnapshotStatus(client, id, "completed");
if (type === "new") {
const publihsedAt = await getSongsPublihsedAt(client, aid);
const timeSincePublished = stat.time - publihsedAt!;
const viewsPerHour = stat.views / timeSincePublished * HOUR;
if (timeSincePublished > 48 * HOUR) {
return;
}
if (timeSincePublished > 2 * HOUR && viewsPerHour < 10) {
return;
}
let intervalMins = 240;
if (viewsPerHour > 50) {
intervalMins = 120;
}
if (viewsPerHour > 100) {
intervalMins = 60;
}
if (viewsPerHour > 1000) {
intervalMins = 15;
}
await scheduleSnapshot(client, aid, type, Date.now() + intervalMins * MINUTE, true);
}
if (type !== "milestone") return;
const eta = await getAdjustedShortTermETA(client, aid);
if (eta > 144) {
const etaHoursString = eta.toFixed(2) + " hrs";
logger.warn(
`ETA (${etaHoursString}) too long for milestone snapshot. aid: ${aid}.`,
"mq",
"fn:dispatchRegularSnapshotsWorker",
);
}
const now = Date.now();
const targetTime = now + eta * HOUR;
await scheduleSnapshot(client, aid, type, targetTime);
await setSnapshotStatus(client, id, "completed");
return;
}, async (e, client) => {
if (e instanceof NetSchedulerError && e.code === "NO_PROXY_AVAILABLE") {
logger.warn(
`No available proxy for aid ${job.data.aid}.`,
"mq",
"fn:takeSnapshotForVideoWorker",
);
await setSnapshotStatus(client, id, "no_proxy");
await scheduleSnapshot(client, aid, type, Date.now() + retryInterval);
return;
}
logger.error(e as Error, "mq", "fn:takeSnapshotForVideoWorker");
await setSnapshotStatus(client, id, "failed");
}, async () => {
await lockManager.releaseLock("dispatchRegularSnapshots");
});
return;
};

View File

@ -1,85 +0,0 @@
import { Job } from "npm:bullmq@5.45.2";
import { db } from "db/init.ts";
import {
bulkScheduleSnapshot,
bulkSetSnapshotStatus,
scheduleSnapshot,
snapshotScheduleExists,
} from "db/snapshotSchedule.ts";
import { bulkGetVideoStats } from "net/bulkGetVideoStats.ts";
import logger from "log/logger.ts";
import { NetSchedulerError } from "@core/net/delegate.ts";
import { HOUR, MINUTE, SECOND } from "@std/datetime";
import { getRegularSnapshotInterval } from "../task/regularSnapshotInterval.ts";
import { SnapshotScheduleType } from "@core/db/schema";
export const takeBulkSnapshotForVideosWorker = async (job: Job) => {
const schedules: SnapshotScheduleType[] = job.data.schedules;
const ids = schedules.map((schedule) => Number(schedule.id));
const aidsToFetch: number[] = [];
const client = await db.connect();
try {
for (const schedule of schedules) {
const aid = Number(schedule.aid);
const id = Number(schedule.id);
const exists = await snapshotScheduleExists(client, id);
if (!exists) {
continue;
}
aidsToFetch.push(aid);
}
const data = await bulkGetVideoStats(aidsToFetch);
if (typeof data === "number") {
await bulkSetSnapshotStatus(client, ids, "failed");
await bulkScheduleSnapshot(client, aidsToFetch, "normal", Date.now() + 15 * SECOND);
return `GET_BILI_STATUS_${data}`;
}
for (const video of data) {
const aid = video.id;
const stat = video.cnt_info;
const views = stat.play;
const danmakus = stat.danmaku;
const replies = stat.reply;
const likes = stat.thumb_up;
const coins = stat.coin;
const shares = stat.share;
const favorites = stat.collect;
const query: string = `
INSERT INTO video_snapshot (aid, views, danmakus, replies, likes, coins, shares, favorites)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8)
`;
await client.queryObject(
query,
[aid, views, danmakus, replies, likes, coins, shares, favorites],
);
logger.log(`Taken snapshot for video ${aid} in bulk.`, "net", "fn:takeBulkSnapshotForVideosWorker");
}
await bulkSetSnapshotStatus(client, ids, "completed");
for (const schedule of schedules) {
const aid = Number(schedule.aid);
const type = schedule.type;
if (type == "archive") continue;
const interval = await getRegularSnapshotInterval(client, aid);
logger.log(`Scheduled regular snapshot for aid ${aid} in ${interval} hours.`, "mq");
await scheduleSnapshot(client, aid, "normal", Date.now() + interval * HOUR);
}
return `DONE`;
} catch (e) {
if (e instanceof NetSchedulerError && e.code === "NO_PROXY_AVAILABLE") {
logger.warn(
`No available proxy for bulk request now.`,
"mq",
"fn:takeBulkSnapshotForVideosWorker",
);
await bulkSetSnapshotStatus(client, ids, "no_proxy");
await bulkScheduleSnapshot(client, aidsToFetch, "normal", Date.now() + 20 * MINUTE * Math.random());
return;
}
logger.error(e as Error, "mq", "fn:takeBulkSnapshotForVideosWorker");
await bulkSetSnapshotStatus(client, ids, "failed");
} finally {
client.release();
}
};

View File

@ -0,0 +1 @@
export * from "mq/exec/getLatestVideos.ts";

View File

@ -1,9 +1,9 @@
import { HOUR, MINUTE, SECOND } from "$std/datetime/constants.ts";
import { MINUTE, SECOND } from "$std/datetime/constants.ts";
import { ClassifyVideoQueue, LatestVideosQueue, SnapshotQueue } from "mq/index.ts";
import logger from "log/logger.ts";
import { initSnapshotWindowCounts } from "db/snapshotSchedule.ts";
import { db } from "db/init.ts";
import { redis } from "@core/db/redis.ts";
import { redis } from "db/redis.ts";
export async function initMQ() {
const client = await db.connect();
@ -30,8 +30,8 @@ export async function initMQ() {
immediately: true,
}, {
opts: {
removeOnComplete: 300,
removeOnFail: 600,
removeOnComplete: 1,
removeOnFail: 1,
},
});
@ -40,12 +40,12 @@ export async function initMQ() {
immediately: true,
}, {
opts: {
removeOnComplete: 60,
removeOnFail: 600,
removeOnComplete: 1,
removeOnFail: 1,
},
});
await SnapshotQueue.upsertJobScheduler("dispatchMilestoneSnapshots", {
await SnapshotQueue.upsertJobScheduler("collectMilestoneSnapshots", {
every: 5 * MINUTE,
immediately: true,
});
@ -55,13 +55,8 @@ export async function initMQ() {
immediately: true,
});
await SnapshotQueue.upsertJobScheduler("dispatchArchiveSnapshots", {
every: 6 * HOUR,
immediately: true,
});
await SnapshotQueue.upsertJobScheduler("scheduleCleanup", {
every: 2 * MINUTE,
every: 30 * MINUTE,
immediately: true,
});

View File

@ -1,5 +1,5 @@
import { Redis } from "ioredis";
import { redis } from "../../core/db/redis.ts";
import { redis } from "db/redis.ts";
class LockManager {
private redis: Redis;

View File

@ -1,4 +1,4 @@
import { SlidingWindow } from "./slidingWindow.ts";
import { SlidingWindow } from "mq/slidingWindow.ts";
export interface RateLimiterConfig {
window: SlidingWindow;

View File

@ -1,5 +1,5 @@
import logger from "log/logger.ts";
import { RateLimiter, type RateLimiterConfig } from "mq/rateLimiter.ts";
import { RateLimiter, RateLimiterConfig } from "mq/rateLimiter.ts";
import { SlidingWindow } from "mq/slidingWindow.ts";
import { redis } from "db/redis.ts";
import Redis from "ioredis";
@ -19,7 +19,7 @@ interface ProxiesMap {
[name: string]: Proxy;
}
type NetworkDelegateErrorCode =
type NetSchedulerErrorCode =
| "NO_PROXY_AVAILABLE"
| "PROXY_RATE_LIMITED"
| "PROXY_NOT_FOUND"
@ -28,9 +28,9 @@ type NetworkDelegateErrorCode =
| "ALICLOUD_PROXY_ERR";
export class NetSchedulerError extends Error {
public code: NetworkDelegateErrorCode;
public code: NetSchedulerErrorCode;
public rawError: unknown | undefined;
constructor(message: string, errorCode: NetworkDelegateErrorCode, rawError?: unknown) {
constructor(message: string, errorCode: NetSchedulerErrorCode, rawError?: unknown) {
super(message);
this.name = "NetSchedulerError";
this.code = errorCode;
@ -59,7 +59,7 @@ function shuffleArray<T>(array: T[]): T[] {
return newArray;
}
class NetworkDelegate {
class NetScheduler {
private proxies: ProxiesMap = {};
private providerLimiters: LimiterMap = {};
private proxyLimiters: OptionalLimiterMap = {};
@ -69,6 +69,23 @@ class NetworkDelegate {
this.proxies[proxyName] = { type, data };
}
removeProxy(proxyName: string): void {
if (!this.proxies[proxyName]) {
throw new Error(`Proxy ${proxyName} not found`);
}
delete this.proxies[proxyName];
// Clean up associated limiters
this.cleanupProxyLimiters(proxyName);
}
private cleanupProxyLimiters(proxyName: string): void {
for (const limiterId in this.proxyLimiters) {
if (limiterId.startsWith(`proxy-${proxyName}`)) {
delete this.proxyLimiters[limiterId];
}
}
}
addTask(taskName: string, provider: string, proxies: string[] | "all"): void {
this.tasks[taskName] = { provider, proxies };
}
@ -200,7 +217,8 @@ class NetworkDelegate {
const providerLimiterId = "provider-" + proxyName + "-" + provider;
if (!this.proxyLimiters[proxyLimiterId]) {
const providerLimiter = this.providerLimiters[providerLimiterId];
return await providerLimiter.getAvailability();
const providerAvailable = await providerLimiter.getAvailability();
return providerAvailable;
}
const proxyLimiter = this.proxyLimiters[proxyLimiterId];
const providerLimiter = this.providerLimiters[providerLimiterId];
@ -263,7 +281,6 @@ class NetworkDelegate {
const out = decoder.decode(output.stdout);
const rawData = JSON.parse(out);
if (rawData.statusCode !== 200) {
// noinspection ExceptionCaughtLocallyJS
throw new NetSchedulerError(
`Error proxying ${url} to ali-fc region ${region}, code: ${rawData.statusCode}.`,
"ALICLOUD_PROXY_ERR",
@ -278,7 +295,7 @@ class NetworkDelegate {
}
}
const networkDelegate = new NetworkDelegate();
const netScheduler = new NetScheduler();
const videoInfoRateLimiterConfig: RateLimiterConfig[] = [
{
window: new SlidingWindow(redis, 0.3),
@ -352,14 +369,14 @@ but both should come after addProxy and addTask to ensure proper setup and depen
*/
const regions = ["shanghai", "hangzhou", "qingdao", "beijing", "zhangjiakou", "chengdu", "shenzhen", "hohhot"];
networkDelegate.addProxy("native", "native", "");
netScheduler.addProxy("native", "native", "");
for (const region of regions) {
networkDelegate.addProxy(`alicloud-${region}`, "alicloud-fc", region);
netScheduler.addProxy(`alicloud-${region}`, "alicloud-fc", region);
}
networkDelegate.addTask("getVideoInfo", "bilibili", "all");
networkDelegate.addTask("getLatestVideos", "bilibili", "all");
networkDelegate.addTask("snapshotMilestoneVideo", "bilibili", regions.map((region) => `alicloud-${region}`));
networkDelegate.addTask("snapshotVideo", "bili_test", [
netScheduler.addTask("getVideoInfo", "bilibili", "all");
netScheduler.addTask("getLatestVideos", "bilibili", "all");
netScheduler.addTask("snapshotMilestoneVideo", "bilibili", regions.map((region) => `alicloud-${region}`));
netScheduler.addTask("snapshotVideo", "bili_test", [
"alicloud-qingdao",
"alicloud-shanghai",
"alicloud-zhangjiakou",
@ -367,7 +384,7 @@ networkDelegate.addTask("snapshotVideo", "bili_test", [
"alicloud-shenzhen",
"alicloud-hohhot",
]);
networkDelegate.addTask("bulkSnapshot", "bili_strict", [
netScheduler.addTask("bulkSnapshot", "bili_strict", [
"alicloud-qingdao",
"alicloud-shanghai",
"alicloud-zhangjiakou",
@ -375,13 +392,13 @@ networkDelegate.addTask("bulkSnapshot", "bili_strict", [
"alicloud-shenzhen",
"alicloud-hohhot",
]);
networkDelegate.setTaskLimiter("getVideoInfo", videoInfoRateLimiterConfig);
networkDelegate.setTaskLimiter("getLatestVideos", null);
networkDelegate.setTaskLimiter("snapshotMilestoneVideo", null);
networkDelegate.setTaskLimiter("snapshotVideo", null);
networkDelegate.setTaskLimiter("bulkSnapshot", null);
networkDelegate.setProviderLimiter("bilibili", biliLimiterConfig);
networkDelegate.setProviderLimiter("bili_test", bili_test);
networkDelegate.setProviderLimiter("bili_strict", bili_strict);
netScheduler.setTaskLimiter("getVideoInfo", videoInfoRateLimiterConfig);
netScheduler.setTaskLimiter("getLatestVideos", null);
netScheduler.setTaskLimiter("snapshotMilestoneVideo", null);
netScheduler.setTaskLimiter("snapshotVideo", null);
netScheduler.setTaskLimiter("bulkSnapshot", null);
netScheduler.setProviderLimiter("bilibili", biliLimiterConfig);
netScheduler.setProviderLimiter("bili_test", bili_test);
netScheduler.setProviderLimiter("bili_strict", bili_strict);
export default networkDelegate;
export default netScheduler;

View File

@ -1,65 +0,0 @@
import { findClosestSnapshot, getLatestSnapshot, hasAtLeast2Snapshots } from "db/snapshotSchedule.ts";
import { truncate } from "utils/truncate.ts";
import { closetMilestone } from "./exec/snapshotTick.ts";
import { Client } from "https://deno.land/x/postgres@v0.19.3/mod.ts";
import { HOUR, MINUTE } from "@std/datetime";
const log = (value: number, base: number = 10) => Math.log(value) / Math.log(base);
const getFactor = (x: number) => {
const a = 1.054;
const b = 4.5;
const c = 100;
const u = 0.601;
const g = 455;
if (x>g) {
return log(b/log(x+1),a);
}
else {
return log(b/log(x+c),a)+u;
}
}
/*
* Returns the minimum ETA in hours for the next snapshot
* @param client - Postgres client
* @param aid - aid of the video
* @returns ETA in hours
*/
export const getAdjustedShortTermETA = async (client: Client, aid: number) => {
const latestSnapshot = await getLatestSnapshot(client, aid);
// Immediately dispatch a snapshot if there is no snapshot yet
if (!latestSnapshot) return 0;
const snapshotsEnough = await hasAtLeast2Snapshots(client, aid);
if (!snapshotsEnough) return 0;
const currentTimestamp = new Date().getTime();
const timeIntervals = [3 * MINUTE, 20 * MINUTE, 1 * HOUR, 3 * HOUR, 6 * HOUR, 72 * HOUR];
const DELTA = 0.00001;
let minETAHours = Infinity;
for (const timeInterval of timeIntervals) {
const date = new Date(currentTimestamp - timeInterval);
const snapshot = await findClosestSnapshot(client, aid, date);
if (!snapshot) continue;
const hoursDiff = (latestSnapshot.created_at - snapshot.created_at) / HOUR;
const viewsDiff = latestSnapshot.views - snapshot.views;
if (viewsDiff <= 0) continue;
const speed = viewsDiff / (hoursDiff + DELTA);
const target = closetMilestone(latestSnapshot.views);
const viewsToIncrease = target - latestSnapshot.views;
const eta = viewsToIncrease / (speed + DELTA);
let factor = getFactor(viewsToIncrease);
factor = truncate(factor, 4.5, 100);
const adjustedETA = eta / factor;
if (adjustedETA < minETAHours) {
minETAHours = adjustedETA;
}
}
if (isNaN(minETAHours)) {
minETAHours = Infinity;
}
return minETAHours;
};

View File

@ -2,7 +2,7 @@ import { Client } from "https://deno.land/x/postgres@v0.19.3/mod.ts";
import { aidExistsInSongs, getNotCollectedSongs } from "db/songs.ts";
import logger from "log/logger.ts";
import { scheduleSnapshot } from "db/snapshotSchedule.ts";
import { MINUTE } from "@std/datetime";
import { MINUTE } from "$std/datetime/constants.ts";
export async function collectSongs(client: Client) {
const aids = await getNotCollectedSongs(client);

View File

@ -1,13 +0,0 @@
import { Client } from "https://deno.land/x/postgres@v0.19.3/mod.ts";
export async function getTimeoutSchedulesCount(client: Client) {
const query: string = `
SELECT COUNT(id)
FROM snapshot_schedule
WHERE status IN ('pending', 'processing')
AND started_at < NOW() - INTERVAL '30 minutes'
`;
const { rows } = await client.queryObject<{ count: number }>(query);
return rows[0].count;
}

View File

@ -3,8 +3,8 @@ import { getVideoDetails } from "net/getVideoDetails.ts";
import { formatTimestampToPsql } from "utils/formatTimestampToPostgre.ts";
import logger from "log/logger.ts";
import { ClassifyVideoQueue } from "mq/index.ts";
import { userExistsInBiliUsers, videoExistsInAllData } from "../../db/bilibili_metadata.ts";
import { HOUR, SECOND } from "@std/datetime";
import { userExistsInBiliUsers, videoExistsInAllData } from "db/allData.ts";
import { HOUR, SECOND } from "$std/datetime/constants.ts";
export async function insertVideoInfo(client: Client, aid: number) {
const videoExists = await videoExistsInAllData(client, aid);
@ -42,18 +42,6 @@ export async function insertVideoInfo(client: Client, aid: number) {
[data.Card.follower, uid],
);
}
const stat = data.View.stat;
const query: string = `
INSERT INTO video_snapshot (aid, views, danmakus, replies, likes, coins, shares, favorites)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8)
`;
await client.queryObject(
query,
[aid, stat.view, stat.danmaku, stat.reply, stat.like, stat.coin, stat.share, stat.favorite],
);
logger.log(`Inserted video metadata for aid: ${aid}`, "mq");
await ClassifyVideoQueue.add("classifyVideo", { aid });
}

View File

@ -1,19 +1,8 @@
import { Client } from "https://deno.land/x/postgres@v0.19.3/mod.ts";
import { getVideoInfo } from "net/getVideoInfo.ts";
import { LatestSnapshotType } from "db/schema.d.ts";
import logger from "log/logger.ts";
export interface SnapshotNumber {
time: number;
views: number;
coins: number;
likes: number;
favorites: number;
shares: number;
danmakus: number;
aid: number;
replies: number;
}
/*
* Fetch video stats from bilibili API and insert into database
* @returns {Promise<number|VideoSnapshot>}
@ -28,7 +17,7 @@ export async function insertVideoSnapshot(
client: Client,
aid: number,
task: string,
): Promise<number | SnapshotNumber> {
): Promise<number | LatestSnapshotType> {
const data = await getVideoInfo(aid, task);
if (typeof data == "number") {
return data;
@ -53,7 +42,7 @@ export async function insertVideoSnapshot(
logger.log(`Taken snapshot for video ${aid}.`, "net", "fn:insertVideoSnapshot");
return {
const snapshot: LatestSnapshotType = {
aid,
views,
danmakus,
@ -64,4 +53,6 @@ export async function insertVideoSnapshot(
favorites,
time,
};
return snapshot;
}

View File

@ -1,8 +1,8 @@
import { Client } from "https://deno.land/x/postgres@v0.19.3/mod.ts";
import { getLatestVideoAids } from "net/getLatestVideoAids.ts";
import { videoExistsInAllData } from "../../db/bilibili_metadata.ts";
import { videoExistsInAllData } from "db/allData.ts";
import { sleep } from "utils/sleep.ts";
import { SECOND } from "@std/datetime";
import { SECOND } from "$std/datetime/constants.ts";
import logger from "log/logger.ts";
import { LatestVideosQueue } from "mq/index.ts";

View File

@ -1,22 +0,0 @@
import { findClosestSnapshot, findSnapshotBefore, getLatestSnapshot } from "db/snapshotSchedule.ts";
import { Client } from "https://deno.land/x/postgres@v0.19.3/mod.ts";
import { HOUR } from "@std/datetime";
export const getRegularSnapshotInterval = async (client: Client, aid: number) => {
const now = Date.now();
const date = new Date(now - 24 * HOUR);
let oldSnapshot = await findSnapshotBefore(client, aid, date);
if (!oldSnapshot) oldSnapshot = await findClosestSnapshot(client, aid, date);
const latestSnapshot = await getLatestSnapshot(client, aid);
if (!oldSnapshot || !latestSnapshot) return 0;
if (oldSnapshot.created_at === latestSnapshot.created_at) return 0;
const hoursDiff = (latestSnapshot.created_at - oldSnapshot.created_at) / HOUR;
if (hoursDiff < 8) return 24;
const viewsDiff = latestSnapshot.views - oldSnapshot.views;
if (viewsDiff === 0) return 72;
const speedPerDay = viewsDiff / (hoursDiff + 0.001) * 24;
if (speedPerDay < 6) return 36;
if (speedPerDay < 120) return 24;
if (speedPerDay < 320) return 12;
return 6;
};

View File

@ -1,16 +0,0 @@
import { Client } from "https://deno.land/x/postgres@v0.19.3/mod.ts";
import logger from "log/logger.ts";
export async function removeAllTimeoutSchedules(client: Client) {
logger.log(
"Too many timeout schedules, directly removing these schedules...",
"mq",
"fn:scheduleCleanupWorker",
);
const query: string = `
DELETE FROM snapshot_schedule
WHERE status IN ('pending', 'processing')
AND started_at < NOW() - INTERVAL '30 minutes'
`;
await client.queryObject(query);
}

View File

@ -1,5 +1,5 @@
import networkDelegate from "@core/net/delegate.ts";
import { MediaListInfoData, MediaListInfoResponse } from "@core/net/bilibili.d.ts";
import netScheduler from "mq/scheduler.ts";
import { MediaListInfoData, MediaListInfoResponse } from "net/bilibili.d.ts";
import logger from "log/logger.ts";
/*
@ -12,11 +12,12 @@ import logger from "log/logger.ts";
* - The alicloud-fc threw an error: with error code `ALICLOUD_FC_ERROR`
*/
export async function bulkGetVideoStats(aids: number[]): Promise<MediaListInfoData | number> {
let url = `https://api.bilibili.com/medialist/gateway/base/resource/infos?resources=`;
const baseURL = `https://api.bilibili.com/medialist/gateway/base/resource/infos?resources=`;
let url = baseURL;
for (const aid of aids) {
url += `${aid}:2,`;
}
const data = await networkDelegate.request<MediaListInfoResponse>(url, "bulkSnapshot");
const data = await netScheduler.request<MediaListInfoResponse>(url, "bulkSnapshot");
const errMessage = `Error fetching metadata for aid list: ${aids.join(",")}:`;
if (data.code !== 0) {
logger.error(errMessage + data.code + "-" + data.message, "net", "fn:getVideoInfo");

View File

@ -1,6 +1,6 @@
import { VideoListResponse } from "@core/net/bilibili.d.ts";
import { VideoListResponse } from "net/bilibili.d.ts";
import logger from "log/logger.ts";
import networkDelegate from "@core/net/delegate.ts";
import netScheduler from "mq/scheduler.ts";
export async function getLatestVideoAids(page: number = 1, pageSize: number = 10): Promise<number[]> {
const startFrom = 1 + pageSize * (page - 1);
@ -8,7 +8,7 @@ export async function getLatestVideoAids(page: number = 1, pageSize: number = 10
const range = `${startFrom}-${endTo}`;
const errMessage = `Error fetching latest aid for ${range}:`;
const url = `https://api.bilibili.com/x/web-interface/newlist?rid=30&ps=${pageSize}&pn=${page}`;
const data = await networkDelegate.request<VideoListResponse>(url, "getLatestVideos");
const data = await netScheduler.request<VideoListResponse>(url, "getLatestVideos");
if (data.code != 0) {
logger.error(errMessage + data.message, "net", "getLastestVideos");
return [];

View File

@ -1,10 +1,10 @@
import networkDelegate from "@core/net/delegate.ts";
import { VideoDetailsData, VideoDetailsResponse } from "@core/net/bilibili.d.ts";
import netScheduler from "mq/scheduler.ts";
import { VideoDetailsData, VideoDetailsResponse } from "net/bilibili.d.ts";
import logger from "log/logger.ts";
export async function getVideoDetails(aid: number): Promise<VideoDetailsData | null> {
const url = `https://api.bilibili.com/x/web-interface/view/detail?aid=${aid}`;
const data = await networkDelegate.request<VideoDetailsResponse>(url, "getVideoInfo");
const data = await netScheduler.request<VideoDetailsResponse>(url, "getVideoInfo");
const errMessage = `Error fetching metadata for ${aid}:`;
if (data.code !== 0) {
logger.error(errMessage + data.code + "-" + data.message, "net", "fn:getVideoInfo");

View File

@ -1,5 +1,5 @@
import networkDelegate from "@core/net/delegate.ts";
import { VideoInfoData, VideoInfoResponse } from "@core/net/bilibili.d.ts";
import netScheduler from "mq/scheduler.ts";
import { VideoInfoData, VideoInfoResponse } from "net/bilibili.d.ts";
import logger from "log/logger.ts";
/*
@ -17,7 +17,7 @@ import logger from "log/logger.ts";
*/
export async function getVideoInfo(aid: number, task: string): Promise<VideoInfoData | number> {
const url = `https://api.bilibili.com/x/web-interface/view?aid=${aid}`;
const data = await networkDelegate.request<VideoInfoResponse>(url, task);
const data = await netScheduler.request<VideoInfoResponse>(url, task);
const errMessage = `Error fetching metadata for ${aid}:`;
if (data.code !== 0) {
logger.error(errMessage + data.code + "-" + data.message, "net", "fn:getVideoInfo");
@ -25,27 +25,3 @@ export async function getVideoInfo(aid: number, task: string): Promise<VideoInfo
}
return data.data;
}
/*
* Fetch video metadata from bilibili API by BVID
* @param {string} bvid - The video's BVID
* @param {string} task - The task name used in scheduler. It can be one of the following:
* - snapshotVideo
* - getVideoInfo
* - snapshotMilestoneVideo
* @returns {Promise<VideoInfoData | number>} VideoInfoData or the error code returned by bilibili API
* @throws {NetSchedulerError} - The error will be thrown in following cases:
* - No proxy is available currently: with error code `NO_PROXY_AVAILABLE`
* - The native `fetch` function threw an error: with error code `FETCH_ERROR`
* - The alicloud-fc threw an error: with error code `ALICLOUD_FC_ERROR`
*/
export async function getVideoInfoByBV(bvid: string, task: string): Promise<VideoInfoData | number> {
const url = `https://api.bilibili.com/x/web-interface/view?bvid=${bvid}`;
const data = await networkDelegate.request<VideoInfoResponse>(url, task);
const errMessage = `Error fetching metadata for ${bvid}:`;
if (data.code !== 0) {
logger.error(errMessage + data.code + "-" + data.message, "net", "fn:getVideoInfoByBV");
return data.code;
}
return data.data;
}

View File

@ -1,5 +1,5 @@
import { ConnectionOptions, Job, Worker } from "bullmq";
import { redis } from "../../core/db/redis.ts";
import { redis } from "db/redis.ts";
import logger from "log/logger.ts";
import { classifyVideosWorker, classifyVideoWorker } from "mq/exec/classifyVideo.ts";
import { WorkerError } from "mq/schema.ts";
@ -18,7 +18,7 @@ Deno.addSignalListener("SIGTERM", async () => {
Deno.exit();
});
await Akari.init();
Akari.init();
const filterWorker = new Worker(
"classifyVideo",

View File

@ -1,37 +1,22 @@
import { ConnectionOptions, Job, Worker } from "bullmq";
import {
archiveSnapshotsWorker,
bulkSnapshotTickWorker,
collectSongsWorker,
dispatchMilestoneSnapshotsWorker,
dispatchRegularSnapshotsWorker,
getLatestVideosWorker,
getVideoInfoWorker,
scheduleCleanupWorker,
snapshotTickWorker,
snapshotVideoWorker,
takeBulkSnapshotForVideosWorker,
} from "mq/exec/executors.ts";
import { redis } from "@core/db/redis.ts";
import { collectSongsWorker, getLatestVideosWorker } from "mq/executors.ts";
import { redis } from "db/redis.ts";
import logger from "log/logger.ts";
import { lockManager } from "mq/lockManager.ts";
import { WorkerError } from "mq/schema.ts";
const releaseLockForJob = async (name: string) => {
await lockManager.releaseLock(name);
logger.log(`Released lock: ${name}`, "mq");
};
const releaseAllLocks = async () => {
const locks = ["dispatchRegularSnapshots", "dispatchArchiveSnapshots", "getLatestVideos"];
for (const lock of locks) {
await releaseLockForJob(lock);
}
};
import { getVideoInfoWorker } from "mq/exec/getLatestVideos.ts";
import {
bulkSnapshotTickWorker,
collectMilestoneSnapshotsWorker,
regularSnapshotsWorker,
scheduleCleanupWorker,
snapshotTickWorker,
takeBulkSnapshotForVideosWorker,
takeSnapshotForVideoWorker,
} from "mq/exec/snapshotTick.ts";
Deno.addSignalListener("SIGINT", async () => {
logger.log("SIGINT Received: Shutting down workers...", "mq");
await releaseAllLocks();
await latestVideoWorker.close(true);
await snapshotWorker.close(true);
Deno.exit();
@ -39,7 +24,6 @@ Deno.addSignalListener("SIGINT", async () => {
Deno.addSignalListener("SIGTERM", async () => {
logger.log("SIGTERM Received: Shutting down workers...", "mq");
await releaseAllLocks();
await latestVideoWorker.close(true);
await snapshotWorker.close(true);
Deno.exit();
@ -50,11 +34,14 @@ const latestVideoWorker = new Worker(
async (job: Job) => {
switch (job.name) {
case "getLatestVideos":
return await getLatestVideosWorker(job);
await getLatestVideosWorker(job);
break;
case "getVideoInfo":
return await getVideoInfoWorker(job);
await getVideoInfoWorker(job);
break;
case "collectSongs":
return await collectSongsWorker(job);
await collectSongsWorker(job);
break;
default:
break;
}
@ -76,26 +63,35 @@ latestVideoWorker.on("error", (err) => {
logger.error(e.rawError, e.service, e.codePath);
});
latestVideoWorker.on("closed", async () => {
await lockManager.releaseLock("getLatestVideos");
});
const snapshotWorker = new Worker(
"snapshot",
async (job: Job) => {
switch (job.name) {
case "snapshotVideo":
return await snapshotVideoWorker(job);
await takeSnapshotForVideoWorker(job);
break;
case "snapshotTick":
return await snapshotTickWorker(job);
case "dispatchMilestoneSnapshots":
return await dispatchMilestoneSnapshotsWorker(job);
await snapshotTickWorker(job);
break;
case "collectMilestoneSnapshots":
await collectMilestoneSnapshotsWorker(job);
break;
case "dispatchRegularSnapshots":
return await dispatchRegularSnapshotsWorker(job);
await regularSnapshotsWorker(job);
break;
case "scheduleCleanup":
return await scheduleCleanupWorker(job);
await scheduleCleanupWorker(job);
break;
case "bulkSnapshotVideo":
return await takeBulkSnapshotForVideosWorker(job);
await takeBulkSnapshotForVideosWorker(job);
break;
case "bulkSnapshotTick":
return await bulkSnapshotTickWorker(job);
case "dispatchArchiveSnapshots":
return await archiveSnapshotsWorker(job);
await bulkSnapshotTickWorker(job);
break;
default:
break;
}
@ -107,3 +103,7 @@ snapshotWorker.on("error", (err) => {
const e = err as WorkerError;
logger.error(e.rawError, e.service, e.codePath);
});
snapshotWorker.on("closed", async () => {
await lockManager.releaseLock("dispatchRegularSnapshots");
});

View File

@ -19,9 +19,6 @@ export default defineConfig({
allow: [".", "../../"],
},
},
plugins: [tsconfigPaths()],
plugins: [tsconfigPaths()]
},
markdown: {
remarkRehype: { footnoteLabel: "脚注", footnoteBackLabel: "回到引用 1" },
}
});

File diff suppressed because it is too large Load Diff

View File

@ -9,15 +9,12 @@
"astro": "astro"
},
"dependencies": {
"@astrojs/node": "^9.1.3",
"@astrojs/svelte": "^7.0.9",
"@astrojs/tailwind": "^6.0.2",
"argon2id": "^1.0.1",
"astro": "^5.5.5",
"autoprefixer": "^10.4.21",
"pg": "^8.11.11",
"postcss": "^8.5.3",
"svelte": "^5.25.7",
"tailwindcss": "^3.0.24",
"vite-tsconfig-paths": "^5.1.4"
},

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 6.3 KiB

After

Width:  |  Height:  |  Size: 6.1 KiB

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 6.4 KiB

After

Width:  |  Height:  |  Size: 6.1 KiB

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 22 KiB

After

Width:  |  Height:  |  Size: 22 KiB

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 22 KiB

After

Width:  |  Height:  |  Size: 22 KiB

View File

@ -1,7 +1,30 @@
---
import astroLogoLight from "@assets/标题-浅色.svg";
import astroLogoDark from "@assets/标题-深色.svg";
import DarkModeImage from "@components/DarkModeImage.svelte";
import SearchBox from "@components/SearchBox.svelte";
import TitleBarMobile from "@components/TitleBarMobile.svelte";
import TitleBarDesktop from "./TitleBarDesktop.astro";
---
<TitleBarDesktop/>
<div class="hidden md:block fixed top-0 left-0 w-full h-28 bg-white/80 dark:bg-zinc-900/70 backdrop-blur-lg z-50">
<div class="w-[305px] ml-8 inline-flex h-full items-center">
<a href="/">
<DarkModeImage
lightSrc={astroLogoLight.src}
darkSrc={astroLogoDark.src}
alt="Logo"
className="w-[305px] h-24 inline-block"
client:load
/>
</a>
</div>
<SearchBox client:load />
<div
class="inline-flex right-12 absolute gap-4 h-full text-xl dark:text-[#C6DCF2] font-medium items-center w-48 justify-end"
>
<a href="/about" class="hover:dark:text-[#B1C5DA]">关于</a>
</div>
</div>
<TitleBarMobile client:load />

View File

@ -1,26 +0,0 @@
---
import astroLogoLight from "@assets/标题-浅色.svg";
import astroLogoDark from "@assets/标题-深色.svg";
import DarkModeImage from "@components/DarkModeImage.svelte";
import SearchBox from "@components/SearchBox.svelte";
---
<div class="hidden md:block relative top-0 left-0 w-full h-28 bg-white/80 dark:bg-zinc-900/70 backdrop-blur-lg z-50">
<div class="w-[305px] ml-8 inline-flex h-full items-center">
<a href="/">
<DarkModeImage
lightSrc={astroLogoLight.src}
darkSrc={astroLogoDark.src}
alt="Logo"
className="w-[305px] h-24 inline-block"
client:load
/>
</a>
</div>
<SearchBox client:load/>
<div class="inline-flex right-12 absolute gap-4 h-full
text-xl font-medium items-center w-48 justify-end">
<a href="/about">关于</a>
</div>
</div>

View File

@ -15,7 +15,7 @@
}
</script>
<div class="md:hidden relative top-0 left-0 w-full h-16 bg-white/80 dark:bg-zinc-800/70 backdrop-blur-lg z-50">
<div class="md:hidden fixed top-0 left-0 w-full h-16 bg-white/80 dark:bg-zinc-800/70 backdrop-blur-lg z-50">
{#if !showSearchBox}
<button class="inline-block ml-4 mt-4 dark:text-white">
<MenuIcon />

View File

@ -4,7 +4,7 @@ import TitleBar from "@components/TitleBar.astro";
<TitleBar/>
<main class="flex flex-col items-center justify-center h-full flex-grow gap-8 px-4">
<h1 class="text-4xl font-medium text-center">正在施工中……</h1>
<main class="flex flex-col items-center justify-center min-h-screen gap-8">
<h1 class="text-4xl font-bold text-center">正在施工中……</h1>
<p>在搜索栏输入BV号或AV号可以查询目前数据库收集到的信息~</p>
</main>

View File

@ -1,25 +1,13 @@
const N_1024 = BigInt(
"129023318876534346704360951712586568674758913224876821534686030409476129469193481910786173836188085930974906857867802234113909470848523288588793477904039083513378341278558405407018889387577114155572311708428733260891448259786041525189132461448841652472631435226032063278124857443496954605482776113964107326943",
);
const N_1024 = BigInt("129023318876534346704360951712586568674758913224876821534686030409476129469193481910786173836188085930974906857867802234113909470848523288588793477904039083513378341278558405407018889387577114155572311708428733260891448259786041525189132461448841652472631435226032063278124857443496954605482776113964107326943")
const N_2048 = BigInt(
"23987552118069940970878653610463005981599204778388399885550631951871084945075866571231062435627294546200946516668493107358732376187241747090707087544153108117326163500579370560400058549184722138636116585329496684877258304519458316233517215780035360354808658620079068489084797380781488445517430961701007542207001544091884001098497324624368085682074645221148086075871342544591022944384890014176612259729018968864426602901247715051556212559854689574013699665035317257438297910516976812428036717668766321871780963854649899276251822244719887233041422346429752896925499321431273560130952088238625622570366815755926694833109",
);
const N_2048 = BigInt("23987552118069940970878653610463005981599204778388399885550631951871084945075866571231062435627294546200946516668493107358732376187241747090707087544153108117326163500579370560400058549184722138636116585329496684877258304519458316233517215780035360354808658620079068489084797380781488445517430961701007542207001544091884001098497324624368085682074645221148086075871342544591022944384890014176612259729018968864426602901247715051556212559854689574013699665035317257438297910516976812428036717668766321871780963854649899276251822244719887233041422346429752896925499321431273560130952088238625622570366815755926694833109")
const N_1792 = BigInt(
"23987552118069940970878653610463005981599204778388399885550631951871084945075866571231062435627294546200946516668493107358732376187241747090707087544153108117326163500579370560400058549184722138636116585329496684877258304519458316233517215780035360354808658620079068489084797380781488445517430961701007542207001544091884001098497324624368085682074645221148086075871342544591022944384890014176612259729018968864426602901247715051556212559854689574013699665035317257438297910516976812428036717668766321871780963854649899276251822244719887233041422346429752896925499321431273560130952088238625622570366815755926694833109",
);
const N_1792 = BigInt("23987552118069940970878653610463005981599204778388399885550631951871084945075866571231062435627294546200946516668493107358732376187241747090707087544153108117326163500579370560400058549184722138636116585329496684877258304519458316233517215780035360354808658620079068489084797380781488445517430961701007542207001544091884001098497324624368085682074645221148086075871342544591022944384890014176612259729018968864426602901247715051556212559854689574013699665035317257438297910516976812428036717668766321871780963854649899276251822244719887233041422346429752896925499321431273560130952088238625622570366815755926694833109")
const N_1536 = BigInt(
"1694330250214463438908848400950857073137355630337290254958754184668036770489801447652464038218330711288158361242955860326168191830448553710492926795708495297280933502917598985378231124113971732841791156356676046934277122699383776036675381503510992810963611269045078440132744168908318454891211962146563551929591147663448816841024591820348784855441153716551049843185172472891407933214238000452095646085222944171689449292644270516031799660928056315886939284985905227",
);
const N_1536 = BigInt("1694330250214463438908848400950857073137355630337290254958754184668036770489801447652464038218330711288158361242955860326168191830448553710492926795708495297280933502917598985378231124113971732841791156356676046934277122699383776036675381503510992810963611269045078440132744168908318454891211962146563551929591147663448816841024591820348784855441153716551049843185172472891407933214238000452095646085222944171689449292644270516031799660928056315886939284985905227")
const N_3072 = BigInt(
"4432919939296042464443862503456460073874727648022810391370558006281079088795179408238989283371442564716849343712703672836423961818025813387453469700639513190304802553045342607888612037304066433501317127429264242784608682213025490491212489901736408833027611579294436675682774458141490718959615677971745638214649336218217578937534746160749039668886450447773018369168258067682196337978245372237157696236362344796867228581553446331915147012787367438751646936429739232247148712001806846526947508445039707404287951727838234648917450736371192435665040644040487427986702098273581288935278964444790007953559851323281510927332862225214878776790605026472021669614552481167977412450477230442015077669503312683966631454347169703030544483487968842349634064181183599641180349414682042575010303056241481622837185325228233789954078775053744988023738762706404546546146837242590884760044438874357295029411988267287001033032827035809135092270843",
);
const N_3072 = BigInt("4432919939296042464443862503456460073874727648022810391370558006281079088795179408238989283371442564716849343712703672836423961818025813387453469700639513190304802553045342607888612037304066433501317127429264242784608682213025490491212489901736408833027611579294436675682774458141490718959615677971745638214649336218217578937534746160749039668886450447773018369168258067682196337978245372237157696236362344796867228581553446331915147012787367438751646936429739232247148712001806846526947508445039707404287951727838234648917450736371192435665040644040487427986702098273581288935278964444790007953559851323281510927332862225214878776790605026472021669614552481167977412450477230442015077669503312683966631454347169703030544483487968842349634064181183599641180349414682042575010303056241481622837185325228233789954078775053744988023738762706404546546146837242590884760044438874357295029411988267287001033032827035809135092270843")
const N_4096 = BigInt(
"703671044356805218391078271512201582198770553281951369783674142891088501340774249238173262580562112786670043634665390581120113644316651934154746357220932310140476300088580654571796404198410555061275065442553506658401183560336140989074165998202690496991174269748740565700402715364422506782445179963440819952745241176450402011121226863984008975377353558155910994380700267903933205531681076494639818328879475919332604951949178075254600102192323286738973253864238076198710173840170988339024438220034106150475640983877458155141500313471699516670799821379238743709125064098477109094533426340852518505385314780319279862586851512004686798362431227795743253799490998475141728082088984359237540124375439664236138519644100625154580910233437864328111620708697941949936338367445851449766581651338876219676721272448769082914348242483068204896479076062102236087066428603930888978596966798402915747531679758905013008059396214343112694563043918465373870648649652122703709658068801764236979191262744515840224548957285182453209028157886219424802426566456408109642062498413592155064289314088837031184200671561102160059065729282902863248815224399131391716503171191977463328439766546574118092303414702384104112719959325482439604572518549918705623086363111",
);
const N_4096 = BigInt("703671044356805218391078271512201582198770553281951369783674142891088501340774249238173262580562112786670043634665390581120113644316651934154746357220932310140476300088580654571796404198410555061275065442553506658401183560336140989074165998202690496991174269748740565700402715364422506782445179963440819952745241176450402011121226863984008975377353558155910994380700267903933205531681076494639818328879475919332604951949178075254600102192323286738973253864238076198710173840170988339024438220034106150475640983877458155141500313471699516670799821379238743709125064098477109094533426340852518505385314780319279862586851512004686798362431227795743253799490998475141728082088984359237540124375439664236138519644100625154580910233437864328111620708697941949936338367445851449766581651338876219676721272448769082914348242483068204896479076062102236087066428603930888978596966798402915747531679758905013008059396214343112694563043918465373870648649652122703709658068801764236979191262744515840224548957285182453209028157886219424802426566456408109642062498413592155064289314088837031184200671561102160059065729282902863248815224399131391716503171191977463328439766546574118092303414702384104112719959325482439604572518549918705623086363111")
export const N_ARRAY = [N_1024, N_1536, N_1792, N_2048, N_3072, N_4096];

View File

@ -1,27 +1,27 @@
# 关于「中 V 档案馆」
# 关于「中V档案馆」
「中 V 档案馆」是一个旨在收录与展示「中文歌声合成作品」及有关信息的网站。
「中V档案馆」是一个旨在收录与展示「中文歌声合成作品」及有关信息的网站。
## 创建背景与关联工作
纵观整个互联网,对于「中文歌声合成」或「中文虚拟歌手」(常简称为中 V VC相关信息进行较为系统、全面地整理收集的主要有以下几个网站
纵观整个互联网对于「中文歌声合成」或「中文虚拟歌手」常简称为中V或VC相关信息进行较为系统、全面地整理收集的主要有以下几个网站
- [萌娘百科](https://zh.moegirl.org.cn/):
收录了大量中 V 歌曲及歌姬的信息,呈现形式为传统维基(基于 [MediaWiki](https://www.mediawiki.org/))。
收录了大量中V歌曲及歌姬的信息呈现形式为传统维基基于[MediaWiki](https://www.mediawiki.org/))。
- [VCPedia](https://vcpedia.cn/):
由原萌娘百科中文歌声合成编辑团队的部分成员搭建,专属于中文歌声合成相关内容的信息集成站点 [^1],呈现形式为传统维基(基于 [MediaWiki](https://www.mediawiki.org/))。
由原萌娘百科中文歌声合成编辑团队的部分成员搭建,专属于中文歌声合成相关内容的信息集成站点[^1],呈现形式为传统维基(基于[MediaWiki](https://www.mediawiki.org/))。
- [VocaDB](https://vocadb.net/): 一个围绕 Vocaloid、UTAU 和其他歌声合成器的协作数据库其中包含艺术家、唱片、PV
[^2],其中包含大量中文歌声合成作品。
- [天钿 Daily](https://tdd.bunnyxt.com/):一个 VC 相关数据交流与分享的网站。致力于 VC 相关数据交流,定期抓取 VC 相关数据,选取有意义的纬度展示。[^3]
等[^2],其中包含大量中文歌声合成作品。
- [天钿Daily](https://tdd.bunnyxt.com/)一个VC相关数据交流与分享的网站。致力于VC相关数据交流定期抓取VC相关数据选取有意义的纬度展示。[^3]
上述网站中,或多或少存在一些不足,例如:
- 萌娘百科、VCPedia 受限于传统维基,绝大多数内容依赖人工编辑。
- VocaDB 基于结构化数据库构建,由此可以依赖程序生成一些信息,但 **条目收录** 仍然完全依赖人工完成。
- VocaDB 主要专注于元数据展示,少有关于歌曲、作者等的描述性的文字,也缺乏描述性的背景信息。
- 天钿 Daily 只展示歌曲的统计数据及历史趋势,没有关于歌曲其它信息的收集。
- 萌娘百科、VCPedia受限于传统维基绝大多数内容依赖人工编辑。
- VocaDB基于结构化数据库构建由此可以依赖程序生成一些信息但**条目收录**仍然完全依赖人工完成。
- VocaDB主要专注于元数据展示少有关于歌曲、作者等的描述性的文字也缺乏描述性的背景信息。
- 天钿Daily只展示歌曲的统计数据及历史趋势没有关于歌曲其它信息的收集。
因此,**中 V 档案馆** 吸取前人经验,克服上述网站的不足,希望做到:
因此,**中V档案馆**吸取前人经验,克服上述网站的不足,希望做到:
- 歌曲收录(指发现歌曲并创建条目)的完全自动化
- 歌曲元信息提取的高度自动化
@ -31,30 +31,31 @@
## 技术架构
参见 [CVSA 文档](https://docs.projectcvsa.com/)。
参见[CVSA文档](https://docs.projectcvsa.com/)。
## 开放许可
受本文以 [CC BY-NC-SA 4.0 协议](https://creativecommons.org/licenses/by-nc-sa/4.0/) 提供。
受本文以[CC BY-NC-SA 4.0协议](https://creativecommons.org/licenses/by-nc-sa/4.0/)提供。
### 数据库
V 档案馆使用 [PostgreSQL](https://postgresql.org) 作为数据库,我们承诺定期导出数据库转储 (dump)
中V档案馆使用[PostgreSQL](https://postgresql.org)作为数据库,我们承诺定期导出数据库转储 (dump)
文件并公开,其内容遵从以下协议或条款:
- 数据库中的事实性数据,根据适用法律,不构成受版权保护的内容。中 V 档案馆放弃一切可能的权利([CC0 1.0 Universal](https://creativecommons.org/publicdomain/zero/1.0/))。
- 对于数据库中有原创性的内容(如贡献者编辑的描述性内容),如无例外,以 [CC BY 4.0 协议](https://creativecommons.org/licenses/by/4.0/) 提供。
- 对于引用、摘编或改编自萌娘百科、VCPedia 的内容,以与原始协议(CC BY-NC-SA 3.0
CN)兼容的协议 [CC BY-NC-SA 4.0 协议](https://creativecommons.org/licenses/by-nc-sa/4.0/) 提供,并注明原始协议 。
> 根据原始协议第四条第 2 项内容CC BY-NC-SA 4.0 协议为与原始协议具有相同授权要素的后续版本(“可适用的协议”)。
- 中 V 档案馆文档使用 [CC BY 4.0 协议](https://creativecommons.org/licenses/by/4.0/)。
- 数据库中的事实性数据根据适用法律不构成受版权保护的内容。中V档案馆放弃一切可能的权利[CC0 1.0 Universal](https://creativecommons.org/publicdomain/zero/1.0/))。
- 对于数据库中有原创性的内容(如贡献者编辑的描述性内容),如无例外,以[CC BY 4.0协议](https://creativecommons.org/licenses/by/4.0/)提供。
- 对于引用、摘编或改编自萌娘百科、VCPedia的内容以与原始协议(CC BY-NC-SA 3.0
CN)兼容的协议[CC BY-NC-SA 4.0协议](https://creativecommons.org/licenses/by-nc-sa/4.0/)提供,并注明原始协议 。
> 根据原始协议第四条第2项内容CC BY-NC-SA 4.0协议为与原始协议具有相同授权要素的后续版本(“可适用的协议”)。
- 中V档案馆文档使用[CC BY 4.0协议](https://creativecommons.org/licenses/by/4.0/)。
### 软件代码
用于构建中 V 档案馆的软件代码在 [AGPL 3.0](https://www.gnu.org/licenses/agpl-3.0.html) 许可证下公开,参见 [LICENSE](./LICENSE)
用于构建中V档案馆的软件代码在[AGPL 3.0](https://www.gnu.org/licenses/agpl-3.0.html)许可证下公开,参见[LICENSE](./LICENSE)
[^1]: 引用自 [VCPedia](https://vcpedia.cn/%E9%A6%96%E9%A1%B5),于 [知识共享 署名-非商业性使用-相同方式共享 3.0中国大陆 (CC BY-NC-SA 3.0 CN) 许可协议](https://creativecommons.org/licenses/by-nc-sa/3.0/cn/) 下提供。
[^1]: 引用自[VCPedia](https://vcpedia.cn/%E9%A6%96%E9%A1%B5),于[知识共享 署名-非商业性使用-相同方式共享 3.0中国大陆 (CC BY-NC-SA 3.0 CN) 许可协议](https://creativecommons.org/licenses/by-nc-sa/3.0/cn/)下提供。
[^2]: 翻译自 [VocaDB](https://vocadb.net/),于 [CC BY 4.0协议](https://creativecommons.org/licenses/by/4.0/) 下提供。
[^2]: 翻译自[VocaDB](https://vocadb.net/),于[CC BY 4.0协议](https://creativecommons.org/licenses/by/4.0/)下提供。
[^3]: 引用自 [关于 - 天钿Daily](https://tdd.bunnyxt.com/about)
[^3]: 引用自[关于 - 天钿Daily](https://tdd.bunnyxt.com/about)

View File

@ -1,15 +0,0 @@
---
import TitleBar from "@components/TitleBar.astro";
import Layout from '@layouts/Layout.astro';
import Footer from "./Footer.astro";
---
<Layout>
<TitleBar/>
<main class="flex flex-col items-center flex-grow gap-8 md:mt-12 relative z-0">
<div class="w-full lg:w-2/3 xl:w-1/2 content px-8 md:px-12 lg:px-0">
<slot/>
</div>
</main>
<Footer/>
</Layout>

View File

@ -1,10 +0,0 @@
<footer class="py-6">
<div class="container mx-auto text-center">
<ul class="flex justify-center space-x-4">
<li><a href="/about">关于</a></li>
<li><a href="#">服务</a></li>
<li><a href="#">隐私政策</a></li>
<li><a href="#">联系我们</a></li>
</ul>
</div>
</footer>

View File

@ -1,3 +0,0 @@
---
---

View File

@ -1,7 +1,5 @@
---
import "../styles/global.css";
const { title } = Astro.props;
const pageTitle = title ? title + ' - 中V档案馆' :'中V档案馆';
---
<!doctype html>
@ -9,9 +7,9 @@ const pageTitle = title ? title + ' - 中V档案馆' :'中V档案馆';
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>{pageTitle}</title>
<title>中V档案馆</title>
</head>
<body class="dark:bg-zinc-900 dark:text-zinc-100 min-h-screen flex flex-col">
<body class="dark:bg-zinc-900 dark:text-zinc-100">
<slot />
</body>
</html>

View File

@ -1,12 +0,0 @@
---
import Layout from "@layouts/Layout.astro";
import Content from "@layouts/Content.astro";
---
<Layout title="开源许可">
<Content>
中V档案馆运行着开源软件它的前端、后端和其它组件的代码在
<a href="https://www.gnu.org/licenses/agpl-3.0.html">AGPL 3.0</a>
开源许可下授权。
</Content>
</Layout>

View File

@ -7,8 +7,8 @@ import "../styles/content.css";
<Layout>
<TitleBar/>
<main class="flex flex-col items-center min-h-screen gap-8 md:mt-12 relative z-0">
<div class="w-full lg:w-2/3 xl:w-1/2 content px-8 md:px-12 lg:px-0">
<main class="flex flex-col items-center min-h-screen gap-8 mt-36 relative z-0">
<div class="lg:w-1/2 content">
<AboutContent/>
</div>
</main>

View File

@ -1,9 +0,0 @@
---
import Layout from "@layouts/Layout.astro";
---
<Layout title="注册">
<main class="relative flex-grow pt-36">
<h1>欢迎</h1>
</main>
</Layout>

View File

@ -100,57 +100,57 @@ interface Snapshot {
<Layout>
<TitleBar />
<main class="flex flex-col items-center min-h-screen gap-8 mt-6 relative z-0 overflow-x-auto">
<main class="flex flex-col items-center min-h-screen gap-8 mt-36 relative z-0">
<div class="max-w-4xl mx-auto rounded-lg p-6">
<h1 class="text-2xl font-medium mb-4">视频信息: <a href={`https://www.bilibili.com/video/av${aid}`} class="underline ">av{aid}</a></h1>
<h1 class="text-2xl font-bold mb-4">视频信息: <a href={`https://www.bilibili.com/video/av${aid}`} class="underline">av{aid}</a></h1>
<div class="mb-6 p-4 rounded-lg">
<h2 class="text-xl font-medium mb-8">基本信息</h2>
<div class="overflow-x-auto max-w-full">
<h2 class="text-xl font-semibold mb-8">基本信息</h2>
<div class="overflow-x-auto">
<table class="table-auto w-full">
<tbody>
<tr>
<td class="border dark:border-zinc-500 px-4 py-2 font-semibold">ID</td>
<td class="border dark:border-zinc-500 px-4 py-2 font-bold">ID</td>
<td class="border dark:border-zinc-500 px-4 py-2">{videoInfo?.id}</td>
</tr>
<tr>
<td class="border dark:border-zinc-500 px-4 py-2 font-semibold">AID</td>
<td class="border dark:border-zinc-500 px-4 py-2 font-bold">AID</td>
<td class="border dark:border-zinc-500 px-4 py-2">{videoInfo?.aid}</td>
</tr>
<tr>
<td class="border dark:border-zinc-500 px-4 py-2 font-semibold">BVID</td>
<td class="border dark:border-zinc-500 px-4 py-2 font-bold">BVID</td>
<td class="border dark:border-zinc-500 px-4 py-2">{videoInfo?.bvid}</td>
</tr>
<tr>
<td class="border dark:border-zinc-500 px-4 py-2 font-[470]">标题</td>
<td class="border dark:border-zinc-500 px-4 py-2 font-bold">标题</td>
<td class="border dark:border-zinc-500 px-4 py-2">{videoInfo?.title}</td>
</tr>
<tr>
<td class="border dark:border-zinc-500 px-4 py-2 font-[470]">描述</td>
<td class="border dark:border-zinc-500 px-4 py-2 font-bold">描述</td>
<td class="border dark:border-zinc-500 px-4 py-2">{videoInfo?.description}</td>
</tr>
<tr>
<td class="border dark:border-zinc-500 px-4 py-2 font-semibold">UID</td>
<td class="border dark:border-zinc-500 px-4 py-2 font-bold">UID</td>
<td class="border dark:border-zinc-500 px-4 py-2">{videoInfo?.uid}</td>
</tr>
<tr>
<td class="border dark:border-zinc-500 px-4 py-2 font-[470]">标签</td>
<td class="border dark:border-zinc-500 px-4 py-2 font-bold">标签</td>
<td class="border dark:border-zinc-500 px-4 py-2">{videoInfo?.tags}</td>
</tr>
<tr>
<td class="border dark:border-zinc-500 px-4 py-2 font-[470]">发布时间</td>
<td class="border dark:border-zinc-500 px-4 py-2 font-bold">发布时间</td>
<td class="border dark:border-zinc-500 px-4 py-2">{videoInfo?.published_at ? format(new Date(videoInfo.published_at), 'yyyy-MM-dd HH:mm:ss', { locale: zhCN }) : '-'}</td>
</tr>
<tr>
<td class="border dark:border-zinc-500 px-4 py-2 font-[470]">时长 (秒)</td>
<td class="border dark:border-zinc-500 px-4 py-2 font-bold">时长 (秒)</td>
<td class="border dark:border-zinc-500 px-4 py-2">{videoInfo?.duration}</td>
</tr>
<tr>
<td class="border dark:border-zinc-500 px-4 py-2 font-[470]">创建时间</td>
<td class="border dark:border-zinc-500 px-4 py-2 font-bold">创建时间</td>
<td class="border dark:border-zinc-500 px-4 py-2">{videoInfo?.created_at ? format(new Date(videoInfo.created_at), 'yyyy-MM-dd HH:mm:ss', { locale: zhCN }) : '-'}</td>
</tr>
<tr>
<td class="border dark:border-zinc-500 px-4 py-2 font-[470]">封面</td>
<td class="border dark:border-zinc-500 px-4 py-2 font-bold">封面</td>
<td class="border dark:border-zinc-500 px-4 py-2">{videoInfo?.cover_url ? videoInfo.cover_url : '-'}</td>
</tr>
</tbody>
@ -159,20 +159,20 @@ interface Snapshot {
</div>
<div class="p-4 rounded-lg">
<h2 class="text-xl font-medium mb-4">播放量历史数据</h2>
<h2 class="text-xl font-semibold mb-4">播放量历史数据</h2>
{snapshots && snapshots.length > 0 ? (
<div class="overflow-x-auto">
<table class="table-auto w-full">
<thead>
<tr>
<th class="border dark:border-zinc-500 px-4 py-2 font-medium">创建时间</th>
<th class="border dark:border-zinc-500 px-4 py-2 font-medium">观看</th>
<th class="border dark:border-zinc-500 px-4 py-2 font-medium">硬币</th>
<th class="border dark:border-zinc-500 px-4 py-2 font-medium">点赞</th>
<th class="border dark:border-zinc-500 px-4 py-2 font-medium">收藏</th>
<th class="border dark:border-zinc-500 px-4 py-2 font-medium">分享</th>
<th class="border dark:border-zinc-500 px-4 py-2 font-medium">弹幕</th>
<th class="border dark:border-zinc-500 px-4 py-2 font-medium">评论</th>
<th class="border dark:border-zinc-500 px-4 py-2">创建时间</th>
<th class="border dark:border-zinc-500 px-4 py-2">观看</th>
<th class="border dark:border-zinc-500 px-4 py-2">硬币</th>
<th class="border dark:border-zinc-500 px-4 py-2">点赞</th>
<th class="border dark:border-zinc-500 px-4 py-2">收藏</th>
<th class="border dark:border-zinc-500 px-4 py-2">分享</th>
<th class="border dark:border-zinc-500 px-4 py-2">弹幕</th>
<th class="border dark:border-zinc-500 px-4 py-2">评论</th>
</tr>
</thead>
<tbody>

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Some files were not shown because too many files have changed in this diff Show More