From e7cbaed069f05b6d0d47ff381a03325a3c5bbfa4 Mon Sep 17 00:00:00 2001 From: alikia2x Date: Tue, 16 Dec 2025 03:29:54 +0800 Subject: [PATCH] ref: formatting & linting with Biome, add turborepo --- .gitignore | 4 +- .idea/biome.xml | 7 + .idea/data_source_mapping.xml | 1 - .idea/prettier.xml | 1 + .prettierrc | 8 - biome.json | 23 + bun.lock | 34 + ecosystem.config.mjs | 102 +- package.json | 10 +- packages/backend/biome.json | 7 + packages/backend/lib/auth.ts | 20 +- packages/backend/lib/bilibiliID.ts | 4 +- packages/backend/lib/mq.ts | 6 +- packages/backend/lib/schema.ts | 10 +- packages/backend/lib/singers.ts | 42 +- packages/backend/middlewares/auth.ts | 14 +- packages/backend/middlewares/captcha.ts | 10 +- packages/backend/middlewares/openapi.ts | 15 +- packages/backend/middlewares/timing.ts | 187 ++- packages/backend/package.json | 3 +- packages/backend/routes/auth/login.ts | 20 +- packages/backend/routes/auth/logout.ts | 12 +- packages/backend/routes/auth/user.ts | 12 +- packages/backend/routes/ping/index.ts | 18 +- packages/backend/routes/root/index.ts | 23 +- packages/backend/routes/search/index.ts | 59 +- packages/backend/routes/song/add.ts | 52 +- packages/backend/routes/song/delete.ts | 22 +- packages/backend/routes/song/info.ts | 60 +- packages/backend/routes/song/milestone.ts | 28 +- packages/backend/routes/video/eta.ts | 25 +- packages/backend/routes/video/label.ts | 28 +- packages/backend/routes/video/metadata.ts | 26 +- packages/backend/routes/video/snapshots.ts | 22 +- packages/backend/src/index.ts | 38 +- packages/backend/src/mq.ts | 6 +- packages/backend/src/onAfterHandle.ts | 8 +- packages/backend/src/schema.ts | 6 +- packages/backend/src/startMessage.ts | 2 +- packages/core/biome.json | 10 + packages/core/db/dbNew.ts | 2 +- packages/core/db/pgConfigNew.ts | 4 +- packages/core/db/redis.ts | 2 +- packages/core/db/snapshots/index.ts | 2 +- packages/core/db/snapshots/milestone.ts | 4 +- packages/core/drizzle.config.ts | 4 +- packages/core/drizzle/drizzle-cred.config.ts | 4 +- packages/core/drizzle/drizzle-main.config.ts | 8 +- packages/core/drizzle/index.ts | 4 +- packages/core/drizzle/type.ts | 10 +- packages/core/index.ts | 1 - packages/core/lib/index.ts | 2 +- packages/core/lib/math.ts | 3 +- packages/core/lib/type.ts | 2 +- packages/core/log/index.ts | 22 +- packages/core/mq/lockManager.ts | 2 +- packages/core/mq/multipleRateLimiter.ts | 4 +- packages/core/net/delegate.ts | 72 +- packages/core/net/getVideoDetails.ts | 4 +- packages/core/net/getVideoInfo.ts | 6 +- packages/core/net/services.ts | 12 +- packages/core/package.json | 54 +- packages/core/test/netDelegate.test.ts | 11 +- packages/core/types.d.ts | 3 - packages/crawler/db/bilibili_metadata.ts | 16 +- packages/crawler/db/eta.ts | 2 +- packages/crawler/db/snapshot.ts | 15 +- packages/crawler/db/snapshotSchedule.ts | 18 +- packages/crawler/metrics/index.ts | 24 +- packages/crawler/ml/akari.ts | 24 +- packages/crawler/ml/akari_api.ts | 2 +- packages/crawler/ml/api_manager.ts | 14 +- packages/crawler/ml/manager.ts | 2 +- packages/crawler/mq/exec/archiveSnapshots.ts | 30 +- packages/crawler/mq/exec/classifyVideo.ts | 24 +- packages/crawler/mq/exec/collectSongs.ts | 2 +- packages/crawler/mq/exec/directSnapshot.ts | 4 +- .../mq/exec/dispatchMilestoneSnapshots.ts | 12 +- .../mq/exec/dispatchRegularSnapshots.ts | 14 +- packages/crawler/mq/exec/executors.ts | 10 +- packages/crawler/mq/exec/getLatestVideos.ts | 2 +- packages/crawler/mq/exec/getVideoInfo.ts | 24 +- packages/crawler/mq/exec/scheduleCleanup.ts | 2 +- packages/crawler/mq/exec/snapshotTick.ts | 20 +- packages/crawler/mq/exec/snapshotVideo.ts | 18 +- packages/crawler/mq/exec/takeBulkSnapshot.ts | 22 +- packages/crawler/mq/index.ts | 12 +- packages/crawler/mq/init.ts | 34 +- packages/crawler/mq/scheduling.ts | 6 +- packages/crawler/mq/task/collectSongs.ts | 12 +- packages/crawler/mq/task/getVideoStats.ts | 8 +- packages/crawler/mq/task/queueLatestVideo.ts | 12 +- .../mq/task/regularSnapshotInterval.ts | 4 +- packages/crawler/net/bulkGetVideoStats.ts | 6 +- packages/crawler/net/getLatestVideoAids.ts | 2 +- packages/crawler/src/bullui.ts | 8 +- packages/crawler/src/filterWorker.ts | 6 +- packages/crawler/src/worker.ts | 18 +- packages/palette/src/App.tsx | 30 +- packages/palette/src/Switch.tsx | 16 +- packages/palette/src/ThemeContext.tsx | 2 +- packages/palette/src/colorTokens.ts | 8 +- packages/palette/src/components/Check.tsx | 4 +- .../palette/src/components/ColorBlock.tsx | 20 +- .../palette/src/components/Components.tsx | 22 +- packages/palette/src/components/Palette.tsx | 2 +- .../palette/src/components/Picker/Handle.tsx | 9 +- .../palette/src/components/Picker/Picker.tsx | 16 +- .../palette/src/components/Picker/Slider.tsx | 23 +- .../src/components/Picker/useOklchCanvas.tsx | 6 +- .../palette/src/components/Picker/utils.ts | 10 +- packages/palette/src/main.tsx | 2 +- packages/palette/src/utils.ts | 4 +- packages/palette/uno.config.ts | 2 +- packages/palette/vite.config.ts | 14 +- packages/temp_frontend/.prettierrc | 5 - .../temp_frontend/app/components/Error.tsx | 10 +- .../temp_frontend/app/components/Search.tsx | 24 +- .../app/components/SearchResults.tsx | 46 +- .../temp_frontend/app/components/Title.tsx | 2 +- .../app/components/icons/search.tsx | 8 +- .../app/components/ui/alert-dialog.tsx | 211 ++- .../app/components/ui/button.tsx | 12 +- .../temp_frontend/app/components/ui/card.tsx | 124 +- .../temp_frontend/app/components/ui/chart.tsx | 569 ++++---- .../app/components/ui/dialog.tsx | 204 ++- .../temp_frontend/app/components/ui/input.tsx | 4 +- .../temp_frontend/app/components/ui/label.tsx | 33 +- .../app/components/ui/progress.tsx | 46 +- .../app/components/ui/scroll-area.tsx | 10 +- .../app/components/ui/select.tsx | 283 ++-- .../app/components/ui/skeleton.tsx | 18 +- .../app/components/ui/sonner.tsx | 2 +- .../temp_frontend/app/components/ui/table.tsx | 156 +-- .../temp_frontend/app/components/ui/tabs.tsx | 20 +- packages/temp_frontend/app/lib/utils.ts | 2 +- packages/temp_frontend/app/root.tsx | 13 +- packages/temp_frontend/app/routes.ts | 2 +- .../app/routes/home/Milestone.tsx | 55 +- .../app/routes/home/MilestoneVideoCard.tsx | 15 +- .../temp_frontend/app/routes/home/index.tsx | 12 +- .../app/routes/labelling/ControlBar.tsx | 16 +- .../routes/labelling/LabelInstructions.tsx | 32 +- .../app/routes/labelling/VideoInfo.tsx | 5 +- .../app/routes/labelling/index.tsx | 76 +- packages/temp_frontend/app/routes/login.tsx | 10 +- .../temp_frontend/app/routes/search/index.tsx | 21 +- .../app/routes/song/[id]/add.tsx | 48 +- .../app/routes/song/[id]/info/columns.tsx | 7 +- .../app/routes/song/[id]/info/data-table.tsx | 47 +- .../app/routes/song/[id]/info/index.tsx | 111 +- .../app/routes/song/[id]/info/lib.ts | 56 +- .../app/routes/song/[id]/info/views-chart.tsx | 35 +- .../app/routes/time-calculator.tsx | 14 +- .../app/routes/video/[id]/info/index.tsx | 87 +- .../temp_frontend/public/site.webmanifest | 12 +- packages/temp_frontend/vite.config.ts | 4 +- packages/tracker/app/admin/users.tsx | 141 +- .../app/components/column/ColumnDialog.tsx | 10 +- .../app/components/project/ProjectDialog.tsx | 25 +- .../app/components/project/UserSearch.tsx | 15 +- .../app/components/task/TaskDialog.tsx | 2 +- .../tracker/app/components/task/TaskForm.tsx | 22 +- .../app/components/ui/alert-dialog.tsx | 213 ++- packages/tracker/app/components/ui/badge.tsx | 70 +- packages/tracker/app/components/ui/button.tsx | 100 +- .../tracker/app/components/ui/calendar.tsx | 365 +++-- packages/tracker/app/components/ui/card.tsx | 124 +- .../tracker/app/components/ui/checkbox.tsx | 49 +- packages/tracker/app/components/ui/dialog.tsx | 204 ++- packages/tracker/app/components/ui/input.tsx | 32 +- packages/tracker/app/components/ui/label.tsx | 33 +- .../tracker/app/components/ui/popover.tsx | 64 +- .../tracker/app/components/ui/scroll-area.tsx | 90 +- packages/tracker/app/components/ui/select.tsx | 283 ++-- .../tracker/app/components/ui/separator.tsx | 42 +- packages/tracker/app/components/ui/sheet.tsx | 199 ++- .../tracker/app/components/ui/sidebar.tsx | 1193 ++++++++--------- .../tracker/app/components/ui/skeleton.tsx | 18 +- packages/tracker/app/components/ui/sonner.tsx | 68 +- .../tracker/app/components/ui/spinner.tsx | 22 +- packages/tracker/app/components/ui/table.tsx | 156 +-- .../tracker/app/components/ui/textarea.tsx | 26 +- .../tracker/app/components/ui/tooltip.tsx | 88 +- packages/tracker/app/home/home.tsx | 28 +- packages/tracker/app/hooks/use-mobile.ts | 26 +- packages/tracker/app/lib/utils.ts | 6 +- packages/tracker/app/login/action.ts | 6 +- packages/tracker/app/login/page.tsx | 14 +- packages/tracker/app/logout/logout.tsx | 4 +- packages/tracker/app/projects/newProject.tsx | 24 +- packages/tracker/app/projects/projectPage.tsx | 48 +- .../tracker/app/projects/projectPageAction.ts | 28 +- packages/tracker/app/projects/settings.tsx | 64 +- packages/tracker/app/root.tsx | 10 +- packages/tracker/app/routes.ts | 4 +- packages/tracker/app/setup/setup.tsx | 33 +- packages/tracker/app/user/profile.tsx | 24 +- packages/tracker/components.json | 40 +- packages/tracker/drizzle.config.ts | 2 +- packages/tracker/lib/auth-utils.ts | 2 +- packages/tracker/lib/auth.ts | 28 +- packages/tracker/lib/db/index.ts | 1 - packages/tracker/lib/db/schema.ts | 40 +- packages/tracker/public/site.webmanifest | 12 +- packages/tracker/react-router.config.ts | 2 +- packages/tracker/vite.config.ts | 4 +- src/aliyun-fc.mjs | 11 +- src/fillSongInfo.ts | 14 +- src/fixCover.ts | 6 +- src/fixPubDate.ts | 6 +- src/fullSnapshot.ts | 18 +- src/importSnapshots.ts | 10 +- turbo.json | 14 + 214 files changed, 4344 insertions(+), 4152 deletions(-) create mode 100644 .idea/biome.xml delete mode 100644 .prettierrc create mode 100644 biome.json create mode 100644 packages/backend/biome.json create mode 100644 packages/core/biome.json delete mode 100644 packages/core/index.ts delete mode 100644 packages/core/types.d.ts delete mode 100644 packages/temp_frontend/.prettierrc create mode 100644 turbo.json diff --git a/.gitignore b/.gitignore index 9580abc..f582f85 100644 --- a/.gitignore +++ b/.gitignore @@ -45,4 +45,6 @@ ml_new/datasets/ mutagen.yml -mutagen.yml.lock \ No newline at end of file +mutagen.yml.lock + +.turbo \ No newline at end of file diff --git a/.idea/biome.xml b/.idea/biome.xml new file mode 100644 index 0000000..f400d0c --- /dev/null +++ b/.idea/biome.xml @@ -0,0 +1,7 @@ + + + + + \ No newline at end of file diff --git a/.idea/data_source_mapping.xml b/.idea/data_source_mapping.xml index cc982ab..7ba958c 100644 --- a/.idea/data_source_mapping.xml +++ b/.idea/data_source_mapping.xml @@ -6,6 +6,5 @@ - \ No newline at end of file diff --git a/.idea/prettier.xml b/.idea/prettier.xml index b0c1c68..01f3c56 100644 --- a/.idea/prettier.xml +++ b/.idea/prettier.xml @@ -2,5 +2,6 @@ \ No newline at end of file diff --git a/.prettierrc b/.prettierrc deleted file mode 100644 index d8cc397..0000000 --- a/.prettierrc +++ /dev/null @@ -1,8 +0,0 @@ -{ - "useTabs": true, - "tabWidth": 4, - "trailingComma": "none", - "singleQuote": false, - "printWidth": 100, - "endOfLine": "lf" -} diff --git a/biome.json b/biome.json new file mode 100644 index 0000000..6de5722 --- /dev/null +++ b/biome.json @@ -0,0 +1,23 @@ +{ + "$schema": "https://biomejs.dev/schemas/2.3.8/schema.json", + "vcs": { + "enabled": true, + "useIgnoreFile": true, + "clientKind": "git" + }, + "formatter": { + "enabled": true, + "indentStyle": "tab", + "indentWidth": 4, + "lineWidth": 100 + }, + "javascript": { + "formatter": { + "enabled": true, + "indentStyle": "tab", + "quoteStyle": "double", + "semicolons": "always", + "trailingCommas": "es5" + } + } +} diff --git a/bun.lock b/bun.lock index 9d32495..5cd613b 100644 --- a/bun.lock +++ b/bun.lock @@ -11,8 +11,10 @@ "postgres": "^3.4.7", }, "devDependencies": { + "@biomejs/biome": "2.3.8", "@types/bun": "^1.3.1", "prettier": "^3.6.2", + "turbo": "^2.6.3", "vite-tsconfig-paths": "^5.1.4", "vitest": "^3.2.4", "vitest-tsconfig-paths": "^3.4.1", @@ -322,6 +324,24 @@ "@babel/types": ["@babel/types@7.28.5", "", { "dependencies": { "@babel/helper-string-parser": "^7.27.1", "@babel/helper-validator-identifier": "^7.28.5" } }, "sha512-qQ5m48eI/MFLQ5PxQj4PFaprjyCTLI37ElWMmNs0K8Lk3dVeOdNpB3ks8jc7yM5CDmVC73eMVk/trk3fgmrUpA=="], + "@biomejs/biome": ["@biomejs/biome@2.3.8", "", { "optionalDependencies": { "@biomejs/cli-darwin-arm64": "2.3.8", "@biomejs/cli-darwin-x64": "2.3.8", "@biomejs/cli-linux-arm64": "2.3.8", "@biomejs/cli-linux-arm64-musl": "2.3.8", "@biomejs/cli-linux-x64": "2.3.8", "@biomejs/cli-linux-x64-musl": "2.3.8", "@biomejs/cli-win32-arm64": "2.3.8", "@biomejs/cli-win32-x64": "2.3.8" }, "bin": { "biome": "bin/biome" } }, "sha512-Qjsgoe6FEBxWAUzwFGFrB+1+M8y/y5kwmg5CHac+GSVOdmOIqsAiXM5QMVGZJ1eCUCLlPZtq4aFAQ0eawEUuUA=="], + + "@biomejs/cli-darwin-arm64": ["@biomejs/cli-darwin-arm64@2.3.8", "", { "os": "darwin", "cpu": "arm64" }, "sha512-HM4Zg9CGQ3txTPflxD19n8MFPrmUAjaC7PQdLkugeeC0cQ+PiVrd7i09gaBS/11QKsTDBJhVg85CEIK9f50Qww=="], + + "@biomejs/cli-darwin-x64": ["@biomejs/cli-darwin-x64@2.3.8", "", { "os": "darwin", "cpu": "x64" }, "sha512-lUDQ03D7y/qEao7RgdjWVGCu+BLYadhKTm40HkpJIi6kn8LSv5PAwRlew/DmwP4YZ9ke9XXoTIQDO1vAnbRZlA=="], + + "@biomejs/cli-linux-arm64": ["@biomejs/cli-linux-arm64@2.3.8", "", { "os": "linux", "cpu": "arm64" }, "sha512-Uo1OJnIkJgSgF+USx970fsM/drtPcQ39I+JO+Fjsaa9ZdCN1oysQmy6oAGbyESlouz+rzEckLTF6DS7cWse95g=="], + + "@biomejs/cli-linux-arm64-musl": ["@biomejs/cli-linux-arm64-musl@2.3.8", "", { "os": "linux", "cpu": "arm64" }, "sha512-PShR4mM0sjksUMyxbyPNMxoKFPVF48fU8Qe8Sfx6w6F42verbwRLbz+QiKNiDPRJwUoMG1nPM50OBL3aOnTevA=="], + + "@biomejs/cli-linux-x64": ["@biomejs/cli-linux-x64@2.3.8", "", { "os": "linux", "cpu": "x64" }, "sha512-QDPMD5bQz6qOVb3kiBui0zKZXASLo0NIQ9JVJio5RveBEFgDgsvJFUvZIbMbUZT3T00M/1wdzwWXk4GIh0KaAw=="], + + "@biomejs/cli-linux-x64-musl": ["@biomejs/cli-linux-x64-musl@2.3.8", "", { "os": "linux", "cpu": "x64" }, "sha512-YGLkqU91r1276uwSjiUD/xaVikdxgV1QpsicT0bIA1TaieM6E5ibMZeSyjQ/izBn4tKQthUSsVZacmoJfa3pDA=="], + + "@biomejs/cli-win32-arm64": ["@biomejs/cli-win32-arm64@2.3.8", "", { "os": "win32", "cpu": "arm64" }, "sha512-H4IoCHvL1fXKDrTALeTKMiE7GGWFAraDwBYFquE/L/5r1927Te0mYIGseXi4F+lrrwhSWbSGt5qPFswNoBaCxg=="], + + "@biomejs/cli-win32-x64": ["@biomejs/cli-win32-x64@2.3.8", "", { "os": "win32", "cpu": "x64" }, "sha512-RguzimPoZWtBapfKhKjcWXBVI91tiSprqdBYu7tWhgN8pKRZhw24rFeNZTNf6UiBfjCYCi9eFQs/JzJZIhuK4w=="], + "@borewit/text-codec": ["@borewit/text-codec@0.1.1", "", {}, "sha512-5L/uBxmjaCIX5h8Z+uu+kA9BQLkc/Wl06UGR5ajNRxu+/XjonB5i8JpgFMrPj3LXTCPA0pv8yxUvbUi+QthGGA=="], "@bull-board/api": ["@bull-board/api@6.15.0", "", { "dependencies": { "redis-info": "^3.1.0" }, "peerDependencies": { "@bull-board/ui": "6.15.0" } }, "sha512-z8qLZ4uv83hZNu+0YnHzhVoWv1grULuYh80FdC2xXLg8M1EwsOZD9cJ5CNpgBFqHb+NVByTmf5FltIvXdOU8tQ=="], @@ -1966,6 +1986,20 @@ "tslib": ["tslib@2.8.1", "", {}, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="], + "turbo": ["turbo@2.6.3", "", { "optionalDependencies": { "turbo-darwin-64": "2.6.3", "turbo-darwin-arm64": "2.6.3", "turbo-linux-64": "2.6.3", "turbo-linux-arm64": "2.6.3", "turbo-windows-64": "2.6.3", "turbo-windows-arm64": "2.6.3" }, "bin": { "turbo": "bin/turbo" } }, "sha512-bf6YKUv11l5Xfcmg76PyWoy/e2vbkkxFNBGJSnfdSXQC33ZiUfutYh6IXidc5MhsnrFkWfdNNLyaRk+kHMLlwA=="], + + "turbo-darwin-64": ["turbo-darwin-64@2.6.3", "", { "os": "darwin", "cpu": "x64" }, "sha512-BlJJDc1CQ7SK5Y5qnl7AzpkvKSnpkfPmnA+HeU/sgny3oHZckPV2776ebO2M33CYDSor7+8HQwaodY++IINhYg=="], + + "turbo-darwin-arm64": ["turbo-darwin-arm64@2.6.3", "", { "os": "darwin", "cpu": "arm64" }, "sha512-MwVt7rBKiOK7zdYerenfCRTypefw4kZCue35IJga9CH1+S50+KTiCkT6LBqo0hHeoH2iKuI0ldTF2a0aB72z3w=="], + + "turbo-linux-64": ["turbo-linux-64@2.6.3", "", { "os": "linux", "cpu": "x64" }, "sha512-cqpcw+dXxbnPtNnzeeSyWprjmuFVpHJqKcs7Jym5oXlu/ZcovEASUIUZVN3OGEM6Y/OTyyw0z09tOHNt5yBAVg=="], + + "turbo-linux-arm64": ["turbo-linux-arm64@2.6.3", "", { "os": "linux", "cpu": "arm64" }, "sha512-MterpZQmjXyr4uM7zOgFSFL3oRdNKeflY7nsjxJb2TklsYqiu3Z9pQ4zRVFFH8n0mLGna7MbQMZuKoWqqHb45w=="], + + "turbo-windows-64": ["turbo-windows-64@2.6.3", "", { "os": "win32", "cpu": "x64" }, "sha512-biDU70v9dLwnBdLf+daoDlNJVvqOOP8YEjqNipBHzgclbQlXbsi6Gqqelp5er81Qo3BiRgmTNx79oaZQTPb07Q=="], + + "turbo-windows-arm64": ["turbo-windows-arm64@2.6.3", "", { "os": "win32", "cpu": "arm64" }, "sha512-dDHVKpSeukah3VsI/xMEKeTnV9V9cjlpFSUs4bmsUiLu3Yv2ENlgVEZv65wxbeE0bh0jjpmElDT+P1KaCxArQQ=="], + "tw-animate-css": ["tw-animate-css@1.4.0", "", {}, "sha512-7bziOlRqH0hJx80h/3mbicLW7o8qLsH5+RaLR2t+OHM3D0JlWGODQKQ4cxbK7WlvmUxpcj6Kgu6EKqjrGFe3QQ=="], "type-check": ["type-check@0.4.0", "", { "dependencies": { "prelude-ls": "^1.2.1" } }, "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew=="], diff --git a/ecosystem.config.mjs b/ecosystem.config.mjs index c858798..4a91c9b 100644 --- a/ecosystem.config.mjs +++ b/ecosystem.config.mjs @@ -1,53 +1,53 @@ -import 'dotenv/config' +import "dotenv/config"; export const apps = [ - { - name: 'crawler-jobadder', - script: 'src/jobAdder.wrapper.ts', - cwd: './packages/crawler', - interpreter: 'bun', - }, - { - name: 'crawler-worker', - script: 'src/worker.ts', - cwd: './packages/crawler', - interpreter: 'bun', - env: { - LOG_VERBOSE: "logs/verbose.log", - LOG_WARN: "logs/warn.log", - LOG_ERR: "logs/error.log" - } - }, - { - name: 'crawler-filter', - script: 'src/filterWorker.wrapper.ts', - cwd: './packages/crawler', - interpreter: 'bun', - env: { - LOG_VERBOSE: "logs/verbose.log", - LOG_WARN: "logs/warn.log", - LOG_ERR: "logs/error.log" - } - }, - { - name: 'ml-api', - script: 'start.py', - cwd: './ml/api', - interpreter: process.env.PYTHON_INTERPRETER || 'python3', - env: { - PYTHONPATH: './ml/api:./ml/filter', - LOG_VERBOSE: "logs/verbose.log", - LOG_WARN: "logs/warn.log", - LOG_ERR: "logs/error.log" - } - }, - { - name: 'cvsa-be', - script: 'src/index.ts', - cwd: './packages/backend', - interpreter: 'bun', - env: { - NODE_ENV: 'production' - } - }, -] \ No newline at end of file + { + name: "crawler-jobadder", + script: "src/jobAdder.wrapper.ts", + cwd: "./packages/crawler", + interpreter: "bun", + }, + { + name: "crawler-worker", + script: "src/worker.ts", + cwd: "./packages/crawler", + interpreter: "bun", + env: { + LOG_VERBOSE: "logs/verbose.log", + LOG_WARN: "logs/warn.log", + LOG_ERR: "logs/error.log", + }, + }, + { + name: "crawler-filter", + script: "src/filterWorker.wrapper.ts", + cwd: "./packages/crawler", + interpreter: "bun", + env: { + LOG_VERBOSE: "logs/verbose.log", + LOG_WARN: "logs/warn.log", + LOG_ERR: "logs/error.log", + }, + }, + { + name: "ml-api", + script: "start.py", + cwd: "./ml/api", + interpreter: process.env.PYTHON_INTERPRETER || "python3", + env: { + PYTHONPATH: "./ml/api:./ml/filter", + LOG_VERBOSE: "logs/verbose.log", + LOG_WARN: "logs/warn.log", + LOG_ERR: "logs/error.log", + }, + }, + { + name: "cvsa-be", + script: "src/index.ts", + cwd: "./packages/backend", + interpreter: "bun", + env: { + NODE_ENV: "production", + }, + }, +]; diff --git a/package.json b/package.json index 33d9744..09532a8 100644 --- a/package.json +++ b/package.json @@ -3,6 +3,11 @@ "version": "5.5.0", "private": false, "type": "module", + "scripts": { + "biome:format": "bunx --bun biome format --write", + "biome:lint": "bunx --bun biome lint", + "biome:check": "bunx --bun biome check --write" + }, "workspaces": { "packages": [ "packages/core", @@ -23,10 +28,13 @@ "postgres": "^3.4.7" }, "devDependencies": { + "@biomejs/biome": "2.3.8", "@types/bun": "^1.3.1", "prettier": "^3.6.2", + "turbo": "^2.6.3", "vite-tsconfig-paths": "^5.1.4", "vitest": "^3.2.4", "vitest-tsconfig-paths": "^3.4.1" - } + }, + "packageManager": "bun@1.3.3" } diff --git a/packages/backend/biome.json b/packages/backend/biome.json new file mode 100644 index 0000000..b2bd883 --- /dev/null +++ b/packages/backend/biome.json @@ -0,0 +1,7 @@ +{ + "extends": "//", + "$schema": "https://biomejs.dev/schemas/2.3.8/schema.json", + "linter": { + "enabled": true + } +} diff --git a/packages/backend/lib/auth.ts b/packages/backend/lib/auth.ts index 51aa18c..750e157 100644 --- a/packages/backend/lib/auth.ts +++ b/packages/backend/lib/auth.ts @@ -1,14 +1,14 @@ -import Argon2id from "@rabbit-company/argon2id"; +import { generate as generateId } from "@alikia/random-key"; import { db, - usersInCredentials, loginSessionsInCredentials, - UserType, - SessionType + type SessionType, + type UserType, + usersInCredentials, } from "@core/drizzle"; -import { eq, and, isNull } from "drizzle-orm"; -import { generate as generateId } from "@alikia/random-key"; import logger from "@core/log"; +import Argon2id from "@rabbit-company/argon2id"; +import { and, eq, isNull } from "drizzle-orm"; export async function verifyUser( username: string, @@ -36,7 +36,7 @@ export async function verifyUser( nickname: foundUser.nickname, role: foundUser.role, unqId: foundUser.unqId, - createdAt: foundUser.createdAt + createdAt: foundUser.createdAt, }; } @@ -57,7 +57,7 @@ export async function createSession( ipAddress, userAgent, lastUsedAt: new Date().toISOString(), - expireAt: expireAt.toISOString() + expireAt: expireAt.toISOString(), }); } catch (error) { logger.error(error as Error); @@ -108,7 +108,7 @@ export async function validateSession( return { user: users[0], - session: session + session: session, }; } @@ -116,7 +116,7 @@ export async function deactivateSession(sessionId: string): Promise { const result = await db .update(loginSessionsInCredentials) .set({ - deactivatedAt: new Date().toISOString() + deactivatedAt: new Date().toISOString(), }) .where(eq(loginSessionsInCredentials.id, sessionId)); diff --git a/packages/backend/lib/bilibiliID.ts b/packages/backend/lib/bilibiliID.ts index d50e079..1898931 100644 --- a/packages/backend/lib/bilibiliID.ts +++ b/packages/backend/lib/bilibiliID.ts @@ -39,12 +39,12 @@ export function detectBiliID(id: string) { if (bvSchema.safeParse(id).success) { return { type: "bv" as const, - id: id as `BV1${string}` + id: id as `BV1${string}`, }; } else if (avSchema.safeParse(id).success) { return { type: "av" as const, - id: id as `av${string}` + id: id as `av${string}`, }; } return null; diff --git a/packages/backend/lib/mq.ts b/packages/backend/lib/mq.ts index 08c29f0..6225077 100644 --- a/packages/backend/lib/mq.ts +++ b/packages/backend/lib/mq.ts @@ -1,10 +1,10 @@ -import { Queue, ConnectionOptions } from "bullmq"; import { redis } from "@core/db/redis"; +import { type ConnectionOptions, Queue } from "bullmq"; export const LatestVideosQueue = new Queue("latestVideos", { - connection: redis as ConnectionOptions + connection: redis as ConnectionOptions, }); export const SnapshotQueue = new Queue("snapshot", { - connection: redis as ConnectionOptions + connection: redis as ConnectionOptions, }); diff --git a/packages/backend/lib/schema.ts b/packages/backend/lib/schema.ts index 4fe2684..ca4de6f 100644 --- a/packages/backend/lib/schema.ts +++ b/packages/backend/lib/schema.ts @@ -10,7 +10,7 @@ const videoStatsSchema = z.object({ share: z.number(), now_rank: z.number(), his_rank: z.number(), - like: z.number() + like: z.number(), }); export const BiliAPIVideoMetadataSchema = z.object({ @@ -32,9 +32,9 @@ export const BiliAPIVideoMetadataSchema = z.object({ owner: z.object({ mid: z.number(), name: z.string(), - face: z.string() + face: z.string(), }), - stat: videoStatsSchema + stat: videoStatsSchema, }); export const BiliVideoSchema = z.object({ @@ -49,7 +49,7 @@ export const BiliVideoSchema = z.object({ tags: z.string().nullable(), title: z.string().nullable(), status: z.number(), - coverUrl: z.string().nullable() + coverUrl: z.string().nullable(), }); export type BiliVideoType = z.infer; @@ -66,5 +66,5 @@ export const SongSchema = z.object({ updatedAt: z.string(), deleted: z.boolean(), image: z.string().nullable(), - producer: z.string().nullable() + producer: z.string().nullable(), }); diff --git a/packages/backend/lib/singers.ts b/packages/backend/lib/singers.ts index 713b8f3..10518a9 100644 --- a/packages/backend/lib/singers.ts +++ b/packages/backend/lib/singers.ts @@ -9,99 +9,99 @@ export const singers: Singer[] = [ { name: "洛天依", color: "#66CCFF", - birthday: "0712" + birthday: "0712", }, { name: "言和", color: "#00FFCC", - birthday: "0711" + birthday: "0711", }, { name: "乐正绫", color: "#EE0000", - birthday: "0412" + birthday: "0412", }, { name: "乐正龙牙", color: "#006666", - birthday: "1002" + birthday: "1002", }, { name: "徵羽摩柯", color: "#0080FF", - birthday: "1210" + birthday: "1210", }, { name: "墨清弦", color: "#FFFF00", - birthday: "0520" + birthday: "0520", }, { name: "星尘", color: "#9999FF", - birthday: "0812" + birthday: "0812", }, { name: "永夜Minus", color: "#613c8a", - birthday: "1208" + birthday: "1208", }, { name: "心华", color: "#EE82EE", - birthday: "0210" + birthday: "0210", }, { name: "海伊", color: "#3399FF", - birthday: "0722" + birthday: "0722", }, { name: "苍穹", color: "#8BC0B5", - birthday: "0520" + birthday: "0520", }, { name: "赤羽", color: "#FF4004", - birthday: "1126" + birthday: "1126", }, { name: "诗岸", color: "#F6BE72", - birthday: "0119" + birthday: "0119", }, { name: "牧心", color: "#2A2859", - birthday: "0807" + birthday: "0807", }, { name: "起礼", color: "#FF0099", - birthday: "0713" + birthday: "0713", }, { name: "起复", color: "#99FF00", - birthday: "0713" + birthday: "0713", }, { name: "夏语遥", color: "#34CCCC", - birthday: "1110" - } + birthday: "1110", + }, ]; export const specialSingers = [ { name: "雅音宫羽", - message: "你是我最真模样,从来不曾遗忘。" + message: "你是我最真模样,从来不曾遗忘。", }, { name: "初音未来", - message: "初始之音,响彻未来!" - } + message: "初始之音,响彻未来!", + }, ]; export const pickSinger = () => { diff --git a/packages/backend/middlewares/auth.ts b/packages/backend/middlewares/auth.ts index ebe6155..9828d5c 100644 --- a/packages/backend/middlewares/auth.ts +++ b/packages/backend/middlewares/auth.ts @@ -1,6 +1,6 @@ -import { Elysia } from "elysia"; import { validateSession } from "@backend/lib/auth"; -import { SessionType, UserType } from "@core/drizzle"; +import type { SessionType, UserType } from "@core/drizzle"; +import { Elysia } from "elysia"; export interface AuthenticatedContext { user: UserType; @@ -42,7 +42,7 @@ export const requireAuth = new Elysia({ name: "require-auth" }) return { user: null, session: null, - isAuthenticated: false + isAuthenticated: false, }; } @@ -54,7 +54,7 @@ export const requireAuth = new Elysia({ name: "require-auth" }) return { user: null, session: null, - isAuthenticated: false + isAuthenticated: false, }; } @@ -62,13 +62,13 @@ export const requireAuth = new Elysia({ name: "require-auth" }) return { user: validationResult.user, session: validationResult.session, - isAuthenticated: true + isAuthenticated: true, }; }) - .onBeforeHandle({ as: "scoped" }, ({ user, session, status }) => { + .onBeforeHandle({ as: "scoped" }, ({ user, status }) => { if (!user) { return status(401, { - message: "Authentication required." + message: "Authentication required.", }); } }) diff --git a/packages/backend/middlewares/captcha.ts b/packages/backend/middlewares/captcha.ts index ce17dbb..9357c08 100644 --- a/packages/backend/middlewares/captcha.ts +++ b/packages/backend/middlewares/captcha.ts @@ -1,17 +1,17 @@ -import { Elysia } from "elysia"; -import { jwt } from "@elysiajs/jwt"; import { redis } from "@core/db/redis"; +import { jwt } from "@elysiajs/jwt"; +import { Elysia } from "elysia"; interface JWTPayload { id: string; - [key: string]: any; + [key: string]: unknown; } export const captchaMiddleware = new Elysia({ name: "captcha" }) .use( jwt({ name: "captchaJwt", - secret: process.env.JWT_SECRET || "default-secret-key" + secret: process.env.JWT_SECRET || "default-secret-key", }) ) .derive(async ({ request, captchaJwt, set }) => { @@ -44,7 +44,7 @@ export const captchaMiddleware = new Elysia({ name: "captcha" }) return { captchaVerified: true, - userId: payload.id + userId: payload.id, }; } catch (error) { if (error instanceof Error) { diff --git a/packages/backend/middlewares/openapi.ts b/packages/backend/middlewares/openapi.ts index 5c20daa..2112d18 100644 --- a/packages/backend/middlewares/openapi.ts +++ b/packages/backend/middlewares/openapi.ts @@ -1,22 +1,21 @@ -import openapi from "@elysiajs/openapi"; -import pkg from "../package.json"; +import openapi, { fromTypes } from "@elysiajs/openapi"; import * as z from "zod"; -import { fromTypes } from "@elysiajs/openapi"; +import pkg from "../package.json"; export const openAPIMiddleware = openapi({ documentation: { info: { title: "CVSA API Docs", - version: pkg.version - } + version: pkg.version, + }, }, mapJsonSchema: { - zod: z.toJSONSchema + zod: z.toJSONSchema, }, references: fromTypes(), scalar: { theme: "kepler", hideClientButton: true, - hideDarkModeToggle: true - } + hideDarkModeToggle: true, + }, }); diff --git a/packages/backend/middlewares/timing.ts b/packages/backend/middlewares/timing.ts index 0d7e8e8..1144fdb 100644 --- a/packages/backend/middlewares/timing.ts +++ b/packages/backend/middlewares/timing.ts @@ -1,35 +1,20 @@ -import { Elysia, type MapResponse, type Context, type TraceEvent, type TraceProcess } from "elysia"; +import { type Context, Elysia, type MapResponse, type TraceEvent, type TraceProcess } from "elysia"; type MaybePromise = T | Promise; class TimeLogger { - private startTimes: Map; private durations: Map; private totalStartTime: number | null; constructor() { - this.startTimes = new Map(); this.durations = new Map(); this.totalStartTime = null; } - startTime(name: string) { - this.startTimes.set(name, performance.now()); - } - - endTime(name: string) { - const startTime = this.startTimes.get(name); - if (startTime !== undefined) { - const duration = performance.now() - startTime; - this.durations.set(name, duration); - this.startTimes.delete(name); - } - } - getCompletedDurations() { return Array.from(this.durations.entries()).map(([name, duration]) => ({ name, - duration + duration, })); } @@ -108,7 +93,7 @@ export interface ServerTimingOptions { total?: boolean; }; /** - * Determine whether or not Server Timing should be enabled + * Determine whether Server Timing should be enabled * * @default NODE_ENV !== 'production' */ @@ -129,7 +114,7 @@ export interface ServerTimingOptions { const getLabel = ( event: TraceEvent, - listener: (callback: (process: TraceProcess<"begin", true>) => unknown) => unknown, + listener: (callback: (process: TraceProcess<"begin">) => unknown) => unknown, write: (value: string) => void ) => { listener(async ({ onStop, onEvent, total }) => { @@ -137,13 +122,13 @@ const getLabel = ( if (total === 0) return; - onEvent(({ name, index, onStop }) => { + await onEvent(({ name, index, onStop }) => { onStop(({ elapsed }) => { label += `${event}.${index}.${name || "anon"};dur=${elapsed},`; }); }); - onStop(({ elapsed }) => { + await onStop(({ elapsed }) => { label += `${event};dur=${elapsed},`; write(label); @@ -163,99 +148,91 @@ export const serverTiming = ({ afterHandle: traceAfterHandle = true, error: traceError = true, mapResponse: traceMapResponse = true, - total: traceTotal = true + total: traceTotal = true, } = {}, - mapResponse }: ServerTimingOptions = {}) => { - const app = new Elysia().decorate("timeLog", new TimeLogger()).trace( - { as: "global" }, - async ({ - onRequest, - onParse, - onTransform, - onBeforeHandle, - onHandle, - onAfterHandle, - onMapResponse, - onError, - set, - context, - response, - context: { - request: { method } - } - }) => { - if (!enabled) return; - let label = ""; + return new Elysia() + .decorate("timeLog", new TimeLogger()) + .trace( + { as: "global" }, + async ({ + onRequest, + onParse, + onTransform, + onBeforeHandle, + onHandle, + onAfterHandle, + onMapResponse, + onError, + set, + context, + }) => { + if (!enabled) return; + let label = ""; - const write = (nextValue: string) => { - label += nextValue; - }; + const write = (nextValue: string) => { + label += nextValue; + }; - let start: number; + await onRequest(() => { + context.timeLog.startTotal(); + }); - onRequest(({ begin }) => { - context.timeLog.startTotal(); - start = begin; - }); + if (traceRequest) getLabel("request", onRequest, write); + if (traceParse) getLabel("parse", onParse, write); + if (traceTransform) getLabel("transform", onTransform, write); + if (traceBeforeHandle) getLabel("beforeHandle", onBeforeHandle, write); + if (traceAfterHandle) getLabel("afterHandle", onAfterHandle, write); + if (traceError) getLabel("error", onError, write); + if (traceMapResponse) getLabel("mapResponse", onMapResponse, write); - if (traceRequest) getLabel("request", onRequest, write); - if (traceParse) getLabel("parse", onParse, write); - if (traceTransform) getLabel("transform", onTransform, write); - if (traceBeforeHandle) getLabel("beforeHandle", onBeforeHandle, write); - if (traceAfterHandle) getLabel("afterHandle", onAfterHandle, write); - if (traceError) getLabel("error", onError, write); - if (traceMapResponse) getLabel("mapResponse", onMapResponse, write); + if (traceHandle) + await onHandle(({ name, onStop }) => { + onStop(({ elapsed }) => { + label += `handle.${name};dur=${elapsed},`; + }); + }); - if (traceHandle) - onHandle(({ name, onStop }) => { - onStop(({ elapsed }) => { - label += `handle.${name};dur=${elapsed},`; + await onMapResponse(({ onStop }) => { + onStop(async () => { + const completedDurations = context.timeLog.getCompletedDurations(); + if (completedDurations.length > 0) { + label += `${completedDurations + .map(({ name, duration }) => `${name};dur=${duration}`) + .join(", ")},`; + } + const elapsed = context.timeLog.endTotal(); + + let allowed = allow; + if (allowed instanceof Promise) allowed = await allowed; + + if (traceTotal) label += `total;dur=${elapsed}`; + else label = label.slice(0, -1); + + // ? Must wait until request is reported + switch (typeof allowed) { + case "boolean": + if (!allowed) delete set.headers["Server-Timing"]; + + set.headers["Server-Timing"] = label; + + break; + + case "function": + if ((await allowed(context)) === false) + delete set.headers["Server-Timing"]; + + set.headers["Server-Timing"] = label; + + break; + + default: + set.headers["Server-Timing"] = label; + } }); }); - - onMapResponse(({ onStop }) => { - onStop(async ({ end }) => { - const completedDurations = context.timeLog.getCompletedDurations(); - if (completedDurations.length > 0) { - label += - completedDurations - .map(({ name, duration }) => `${name};dur=${duration}`) - .join(", ") + ","; - } - const elapsed = context.timeLog.endTotal(); - - let allowed = allow; - if (allowed instanceof Promise) allowed = await allowed; - - if (traceTotal) label += `total;dur=${elapsed}`; - else label = label.slice(0, -1); - - // ? Must wait until request is reported - switch (typeof allowed) { - case "boolean": - if (allowed === false) delete set.headers["Server-Timing"]; - - set.headers["Server-Timing"] = label; - - break; - - case "function": - if ((await allowed(context)) === false) - delete set.headers["Server-Timing"]; - - set.headers["Server-Timing"] = label; - - break; - - default: - set.headers["Server-Timing"] = label; - } - }); - }); - } - ); - return app; + } + ); }; export default serverTiming; diff --git a/packages/backend/package.json b/packages/backend/package.json index 769e2ec..7b4b1a6 100644 --- a/packages/backend/package.json +++ b/packages/backend/package.json @@ -3,8 +3,7 @@ "version": "1.1.0", "scripts": { "dev": "NODE_ENV=development bun run --watch src/index.ts", - "start": "NODE_ENV=production bun run src/index.ts", - "format": "prettier --write ." + "start": "NODE_ENV=production bun run src/index.ts" }, "dependencies": { "@alikia/random-key": "^2.0.0", diff --git a/packages/backend/routes/auth/login.ts b/packages/backend/routes/auth/login.ts index 64f4101..b00bb1b 100644 --- a/packages/backend/routes/auth/login.ts +++ b/packages/backend/routes/auth/login.ts @@ -1,6 +1,6 @@ +import { createSession, getSessionExpirationDate, verifyUser } from "@backend/lib/auth"; import { Elysia, t } from "elysia"; import { ip } from "elysia-ip"; -import { verifyUser, createSession, getSessionExpirationDate } from "@backend/lib/auth"; export const loginHandler = new Elysia({ prefix: "/auth" }).use(ip()).post( "/session", @@ -29,9 +29,9 @@ export const loginHandler = new Elysia({ prefix: "/auth" }).use(ip()).post( id: user.id, username: user.username, nickname: user.nickname, - role: user.role + role: user.role, }, - sessionID: sessionId + sessionID: sessionId, }; }, { @@ -42,24 +42,24 @@ export const loginHandler = new Elysia({ prefix: "/auth" }).use(ip()).post( id: t.Integer(), username: t.String(), nickname: t.Optional(t.String()), - role: t.String() + role: t.String(), }), - sessionID: t.String() + sessionID: t.String(), }), 401: t.Object({ - message: t.String() - }) + message: t.String(), + }), }, body: t.Object({ username: t.String(), - password: t.String() + password: t.String(), }), detail: { summary: "User login", description: "This endpoint authenticates users by verifying their credentials and creates a new session. \ Upon successful authentication, it returns user information and sets a secure HTTP-only cookie \ - for session management. The session includes IP address and user agent tracking for security purposes." - } + for session management. The session includes IP address and user agent tracking for security purposes.", + }, } ); diff --git a/packages/backend/routes/auth/logout.ts b/packages/backend/routes/auth/logout.ts index 7d13b80..814879c 100644 --- a/packages/backend/routes/auth/logout.ts +++ b/packages/backend/routes/auth/logout.ts @@ -1,6 +1,6 @@ -import { Elysia, t } from "elysia"; import { deactivateSession } from "@backend/lib/auth"; import requireAuth from "@backend/middlewares/auth"; +import { Elysia, t } from "elysia"; export const logoutHandler = new Elysia({ prefix: "/auth" }).use(requireAuth).delete( "/session", @@ -20,18 +20,18 @@ export const logoutHandler = new Elysia({ prefix: "/auth" }).use(requireAuth).de { response: { 200: t.Object({ - message: t.String() + message: t.String(), }), 401: t.Object({ - message: t.String() - }) + message: t.String(), + }), }, detail: { summary: "Logout current session", description: "This endpoint logs out the current user by deactivating their session and removing the session cookie. \ It requires an active session cookie to be present in the request. After successful logout, the session \ - is invalidated and cannot be used again." - } + is invalidated and cannot be used again.", + }, } ); diff --git a/packages/backend/routes/auth/user.ts b/packages/backend/routes/auth/user.ts index 57baf55..d1e0837 100644 --- a/packages/backend/routes/auth/user.ts +++ b/packages/backend/routes/auth/user.ts @@ -1,5 +1,5 @@ -import { Elysia, t } from "elysia"; import requireAuth from "@backend/middlewares/auth"; +import { Elysia, t } from "elysia"; export const getCurrentUserHandler = new Elysia().use(requireAuth).get( "/user", @@ -11,7 +11,7 @@ export const getCurrentUserHandler = new Elysia().use(requireAuth).get( id: user.id, username: user.username, nickname: user.nickname, - role: user.role + role: user.role, }; }, { @@ -20,11 +20,11 @@ export const getCurrentUserHandler = new Elysia().use(requireAuth).get( id: t.Integer(), username: t.String(), nickname: t.Union([t.String(), t.Null()]), - role: t.String() + role: t.String(), }), 401: t.Object({ - message: t.String() - }) - } + message: t.String(), + }), + }, } ); diff --git a/packages/backend/routes/ping/index.ts b/packages/backend/routes/ping/index.ts index f3fbdd7..191c7fa 100644 --- a/packages/backend/routes/ping/index.ts +++ b/packages/backend/routes/ping/index.ts @@ -12,13 +12,13 @@ export const pingHandler = new Elysia({ prefix: "/ping" }).use(ip()).get( ip: ip, method: request.method, body: body, - url: request.url + url: request.url, }, response: { time: Date.now(), status: 200, - version: VERSION - } + version: VERSION, + }, }; }, { @@ -30,20 +30,20 @@ export const pingHandler = new Elysia({ prefix: "/ping" }).use(ip()).get( ip: t.Optional(t.String()), method: t.String(), body: t.Optional(t.Union([t.String(), t.Null()])), - url: t.String() + url: t.String(), }), response: t.Object({ time: t.Number(), status: t.Number(), - version: t.String() - }) - }) + version: t.String(), + }), + }), }, body: t.Optional(t.String()), detail: { summary: "Send a ping", description: - "This endpoint returns a 'pong' message along with comprehensive information about the incoming request and the server's current status, including request headers, IP address, and server version. It's useful for monitoring API availability and debugging." - } + "This endpoint returns a 'pong' message along with comprehensive information about the incoming request and the server's current status, including request headers, IP address, and server version. It's useful for monitoring API availability and debugging.", + }, } ); diff --git a/packages/backend/routes/root/index.ts b/packages/backend/routes/root/index.ts index e68c8b5..4d25daf 100644 --- a/packages/backend/routes/root/index.ts +++ b/packages/backend/routes/root/index.ts @@ -1,4 +1,9 @@ -import { getSingerForBirthday, pickSinger, pickSpecialSinger, Singer } from "@backend/lib/singers"; +import { + getSingerForBirthday, + pickSinger, + pickSpecialSinger, + type Singer, +} from "@backend/lib/singers"; import { VERSION } from "@backend/src"; import { Elysia, t } from "elysia"; @@ -6,7 +11,7 @@ const SingerObj = t.Object({ name: t.String(), color: t.Optional(t.String()), birthday: t.Optional(t.String()), - message: t.Optional(t.String()) + message: t.Optional(t.String()), }); export const rootHandler = new Elysia().get( @@ -29,12 +34,12 @@ export const rootHandler = new Elysia().get( project: { name: "中V档案馆", mascot: "知夏", - quote: "星河知海夏生光" + quote: "星河知海夏生光", }, status: 200, version: VERSION, time: Date.now(), - singer: singer + singer: singer, }; }, { @@ -43,19 +48,19 @@ export const rootHandler = new Elysia().get( project: t.Object({ name: t.String(), mascot: t.String(), - quote: t.String() + quote: t.String(), }), status: t.Number(), version: t.String(), time: t.Number(), - singer: t.Union([SingerObj, t.Array(SingerObj)]) - }) + singer: t.Union([SingerObj, t.Array(SingerObj)]), + }), }, detail: { summary: "Root route", description: "The root path. It returns a JSON object containing a random virtual singer, \ - backend version, current server time and other miscellaneous information." - } + backend version, current server time and other miscellaneous information.", + }, } ); diff --git a/packages/backend/routes/search/index.ts b/packages/backend/routes/search/index.ts index a247641..9224e60 100644 --- a/packages/backend/routes/search/index.ts +++ b/packages/backend/routes/search/index.ts @@ -1,12 +1,13 @@ -import { Elysia } from "elysia"; -import { db, bilibiliMetadata, latestVideoSnapshot, songs } from "@core/drizzle"; -import { eq, ilike } from "drizzle-orm"; -import { BiliAPIVideoMetadataSchema, BiliVideoSchema, SongSchema } from "@backend/lib/schema"; -import { z } from "zod"; -import { getVideoInfo } from "@core/net/getVideoInfo"; import { biliIDToAID } from "@backend/lib/bilibiliID"; -import { retrieveVideoInfoFromCache } from "../video/metadata"; +import { BiliAPIVideoMetadataSchema, BiliVideoSchema, SongSchema } from "@backend/lib/schema"; import { redis } from "@core/db/redis"; +import { bilibiliMetadata, db, latestVideoSnapshot, songs } from "@core/drizzle"; +import type { VideoInfoData } from "@core/net/bilibili"; +import { getVideoInfo } from "@core/net/getVideoInfo"; +import { eq, ilike } from "drizzle-orm"; +import { Elysia } from "elysia"; +import { z } from "zod"; +import { retrieveVideoInfoFromCache } from "../video/metadata"; const getSongSearchResult = async (searchQuery: string) => { const data = await db @@ -26,7 +27,7 @@ const getSongSearchResult = async (searchQuery: string) => { data: song, occurrences, viewsLog, - lengthRatio + lengthRatio, }; }) .filter((d) => d !== null); @@ -53,7 +54,7 @@ const getSongSearchResult = async (searchQuery: string) => { return { type: result.type, data: result.data.songs, - rank: Math.min(Math.max(rank, 0), 1) // Ensure rank is between 0 and 1 + rank: Math.min(Math.max(rank, 0), 1), // Ensure rank is between 0 and 1 }; }); @@ -71,21 +72,21 @@ const getDBVideoSearchResult = async (searchQuery: string) => { return results.map((video) => ({ type: "bili-video-db" as "bili-video-db", data: { views: video.latest_video_snapshot.views, ...video.bilibili_metadata }, - rank: 1 // Exact match + rank: 1, // Exact match })); }; const getVideoSearchResult = async (searchQuery: string) => { const aid = biliIDToAID(searchQuery); if (!aid) return []; - let data; + let data: VideoInfoData; const cachedData = await retrieveVideoInfoFromCache(aid); if (cachedData) { data = cachedData; } else { - data = await getVideoInfo(aid, "getVideoInfo"); - if (typeof data === "number") return []; - data = data.data; + const result = await getVideoInfo(aid, "getVideoInfo"); + if (typeof result === "number") return []; + data = result.data; const cacheKey = `cvsa:videoInfo:av${aid}`; await redis.setex(cacheKey, 60, JSON.stringify(data)); } @@ -93,13 +94,13 @@ const getVideoSearchResult = async (searchQuery: string) => { { type: "bili-video" as "bili-video", data: data, - rank: 0.99 // Exact match - } + rank: 0.99, // Exact match + }, ]; }; const BiliVideoDataSchema = BiliVideoSchema.extend({ - views: z.number() + views: z.number(), }); export const searchHandler = new Elysia({ prefix: "/search" }).get( @@ -110,7 +111,7 @@ export const searchHandler = new Elysia({ prefix: "/search" }).get( const [songResults, videoResults, dbVideoResults] = await Promise.all([ getSongSearchResult(searchQuery), getVideoSearchResult(searchQuery), - getDBVideoSearchResult(searchQuery) + getDBVideoSearchResult(searchQuery), ]); const combinedResults = [...songResults, ...videoResults, ...dbVideoResults]; @@ -118,7 +119,7 @@ export const searchHandler = new Elysia({ prefix: "/search" }).get( const end = performance.now(); return { data, - elapsedMs: end - start + elapsedMs: end - start, }; }, { @@ -130,27 +131,27 @@ export const searchHandler = new Elysia({ prefix: "/search" }).get( z.object({ type: z.literal("song"), data: SongSchema, - rank: z.number() + rank: z.number(), }), z.object({ type: z.literal("bili-video-db"), data: BiliVideoDataSchema, - rank: z.number() + rank: z.number(), }), z.object({ type: z.literal("bili-video"), data: BiliAPIVideoMetadataSchema, - rank: z.number() - }) + rank: z.number(), + }), ]) - ) + ), }), 404: z.object({ - message: z.string() - }) + message: z.string(), + }), }, query: z.object({ - query: z.string() + query: z.string(), }), detail: { summary: "Search songs and videos", @@ -158,7 +159,7 @@ export const searchHandler = new Elysia({ prefix: "/search" }).get( "This endpoint performs a comprehensive search across songs and videos in the database. \ It searches for songs by name and videos by bilibili ID (av/BV format). The results are ranked \ by relevance using a weighted algorithm that considers search term frequency, title length, \ - and view count. Returns search results with performance timing information." - } + and view count. Returns search results with performance timing information.", + }, } ); diff --git a/packages/backend/routes/song/add.ts b/packages/backend/routes/song/add.ts index 4fa6a70..fef6e43 100644 --- a/packages/backend/routes/song/add.ts +++ b/packages/backend/routes/song/add.ts @@ -1,9 +1,9 @@ -import { Elysia, t } from "elysia"; import { biliIDToAID } from "@backend/lib/bilibiliID"; -import { requireAuth } from "@backend/middlewares/auth"; import { LatestVideosQueue } from "@backend/lib/mq"; +import { requireAuth } from "@backend/middlewares/auth"; import { db, songs } from "@core/drizzle"; -import { eq, and } from "drizzle-orm"; +import { and, eq } from "drizzle-orm"; +import { Elysia, t } from "elysia"; export const addSongHandler = new Elysia() .use(requireAuth) @@ -15,7 +15,7 @@ export const addSongHandler = new Elysia() if (!aid) { return status(400, { message: - "We cannot parse the video ID, or we currently do not support this format." + "We cannot parse the video ID, or we currently do not support this format.", }); } const aidExistsInSongs = await db @@ -26,42 +26,42 @@ export const addSongHandler = new Elysia() if (aidExistsInSongs.length > 0) { return { jobID: -1, - message: "Video already exists in the songs table." + message: "Video already exists in the songs table.", }; } const job = await LatestVideosQueue.add("getVideoInfo", { aid: aid, insertSongs: true, - uid: user!.unqId + uid: user!.unqId, }); if (!job.id) { return status(500, { - message: "Failed to enqueue job to add song." + message: "Failed to enqueue job to add song.", }); } return status(201, { message: "Successfully created import session.", - jobID: job.id + jobID: job.id, }); }, { response: { 201: t.Object({ message: t.String(), - jobID: t.String() + jobID: t.String(), }), 400: t.Object({ - message: t.String() + message: t.String(), }), 401: t.Object({ - message: t.String() + message: t.String(), }), 500: t.Object({ - message: t.String() - }) + message: t.String(), + }), }, body: t.Object({ - id: t.String() + id: t.String(), }), detail: { summary: "Import song from bilibili", @@ -69,8 +69,8 @@ export const addSongHandler = new Elysia() "This endpoint allows authenticated users to import a song from bilibili by providing a video ID. \ The video ID can be in av or BV format. The system validates the ID format, checks if the video already \ exists in the database, and if not, creates a background job to fetch video metadata and add it to the songs collection. \ - Returns the job ID for tracking the import progress." - } + Returns the job ID for tracking the import progress.", + }, } ) .get( @@ -82,14 +82,14 @@ export const addSongHandler = new Elysia() id: jobID, state: "completed", result: { - message: "Video already exists in the songs table." - } + message: "Video already exists in the songs table.", + }, }; } const job = await LatestVideosQueue.getJob(jobID); if (!job) { return status(404, { - message: "Job not found." + message: "Job not found.", }); } const state = await job.getState(); @@ -97,7 +97,7 @@ export const addSongHandler = new Elysia() id: job.id!, state, result: job.returnvalue, - failedReason: job.failedReason + failedReason: job.failedReason, }; }, { @@ -106,21 +106,21 @@ export const addSongHandler = new Elysia() id: t.String(), state: t.String(), result: t.Optional(t.Any()), - failedReason: t.Optional(t.String()) + failedReason: t.Optional(t.String()), }), 404: t.Object({ - message: t.String() - }) + message: t.String(), + }), }, params: t.Object({ - id: t.String() + id: t.String(), }), detail: { summary: "Check import job status", description: "This endpoint retrieves the current status of a song import job. It returns the job state \ (completed, failed, active, etc.), the result if completed, and any failure reason if the job failed. \ - Use this endpoint to monitor the progress of song imports initiated through the import endpoint." - } + Use this endpoint to monitor the progress of song imports initiated through the import endpoint.", + }, } ); diff --git a/packages/backend/routes/song/delete.ts b/packages/backend/routes/song/delete.ts index e33368e..3b1d9af 100644 --- a/packages/backend/routes/song/delete.ts +++ b/packages/backend/routes/song/delete.ts @@ -1,7 +1,7 @@ -import { Elysia, t } from "elysia"; import { requireAuth } from "@backend/middlewares/auth"; -import { songs, history, db } from "@core/drizzle"; +import { db, history, songs } from "@core/drizzle"; import { eq } from "drizzle-orm"; +import { Elysia, t } from "elysia"; export const deleteSongHandler = new Elysia({ prefix: "/song" }).use(requireAuth).delete( "/:id", @@ -12,33 +12,33 @@ export const deleteSongHandler = new Elysia({ prefix: "/song" }).use(requireAuth objectId: id, changeType: "del-song", changedBy: user!.unqId, - data: null + data: null, }); return { - message: `Successfully deleted song ${id}.` + message: `Successfully deleted song ${id}.`, }; }, { response: { 200: t.Object({ - message: t.String() + message: t.String(), }), 401: t.Object({ - message: t.String() + message: t.String(), }), 500: t.Object({ - message: t.String() - }) + message: t.String(), + }), }, params: t.Object({ - id: t.String() + id: t.String(), }), detail: { summary: "Delete song", description: "This endpoint allows authenticated users to soft-delete a song from the database. \ The song is marked as deleted rather than being permanently removed, preserving data integrity. \ - The deletion is logged in the history table for audit purposes. Requires authentication and appropriate permissions." - } + The deletion is logged in the history table for audit purposes. Requires authentication and appropriate permissions.", + }, } ); diff --git a/packages/backend/routes/song/info.ts b/packages/backend/routes/song/info.ts index bb41345..026d4f8 100644 --- a/packages/backend/routes/song/info.ts +++ b/packages/backend/routes/song/info.ts @@ -1,8 +1,8 @@ -import { Elysia, t } from "elysia"; -import { db, eta, history, songs, videoSnapshot } from "@core/drizzle"; -import { eq, and, desc } from "drizzle-orm"; import { bv2av } from "@backend/lib/bilibiliID"; import { requireAuth } from "@backend/middlewares/auth"; +import { db, eta, history, songs, videoSnapshot } from "@core/drizzle"; +import { and, desc, eq } from "drizzle-orm"; +import { Elysia, t } from "elysia"; async function getSongIDFromBiliID(id: string) { let aid: number; @@ -48,11 +48,11 @@ export const songHandler = new Elysia({ prefix: "/song/:id" }) if (Number.isNaN(songID)) { return status(404, { code: "SONG_NOT_FOUND", - message: "Given song cannot be found." + message: "Given song cannot be found.", }); } return { - songID + songID, }; }) .get( @@ -62,7 +62,7 @@ export const songHandler = new Elysia({ prefix: "/song/:id" }) if (!info) { return status(404, { code: "SONG_NOT_FOUND", - message: "Given song cannot be found." + message: "Given song cannot be found.", }); } return { @@ -72,7 +72,7 @@ export const songHandler = new Elysia({ prefix: "/song/:id" }) producer: info.producer, duration: info.duration, cover: info.image || undefined, - publishedAt: info.publishedAt + publishedAt: info.publishedAt, }; }, { @@ -84,15 +84,15 @@ export const songHandler = new Elysia({ prefix: "/song/:id" }) producer: t.Union([t.String(), t.Null()]), duration: t.Union([t.Number(), t.Null()]), cover: t.Optional(t.String()), - publishedAt: t.Union([t.String(), t.Null()]) + publishedAt: t.Union([t.String(), t.Null()]), }), 404: t.Object({ code: t.String(), - message: t.String() - }) + message: t.String(), + }), }, headers: t.Object({ - Authorization: t.Optional(t.String()) + Authorization: t.Optional(t.String()), }), detail: { summary: "Get information of a song", @@ -101,23 +101,23 @@ export const songHandler = new Elysia({ prefix: "/song/:id" }) which can be provided in several formats. \ The endpoint accepts a song ID in either a numerical format as the internal ID in our database\ or as a bilibili video ID (either av or BV format). \ - It responds with the song's name, bilibili ID (av), producer, duration, and associated singers." - } + It responds with the song's name, bilibili ID (av), producer, duration, and associated singers.", + }, } ) .get("/snapshots", async ({ status, songID }) => { const r = await db.select().from(songs).where(eq(songs.id, songID)).limit(1); - if (r.length == 0) { + if (r.length === 0) { return status(404, { code: "SONG_NOT_FOUND", - message: "Given song cannot be found." + message: "Given song cannot be found.", }); } const song = r[0]; const aid = song.aid; if (!aid) { return status(404, { - message: "Given song is not associated with any bilibili video." + message: "Given song is not associated with any bilibili video.", }); } return db @@ -128,17 +128,17 @@ export const songHandler = new Elysia({ prefix: "/song/:id" }) }) .get("/eta", async ({ status, songID }) => { const r = await db.select().from(songs).where(eq(songs.id, songID)).limit(1); - if (r.length == 0) { + if (r.length === 0) { return status(404, { code: "SONG_NOT_FOUND", - message: "Given song cannot be found." + message: "Given song cannot be found.", }); } const song = r[0]; const aid = song.aid; if (!aid) { return status(404, { - message: "Given song is not associated with any bilibili video." + message: "Given song is not associated with any bilibili video.", }); } return db.select().from(eta).where(eq(eta.aid, aid)); @@ -151,7 +151,7 @@ export const songHandler = new Elysia({ prefix: "/song/:id" }) if (!info) { return status(404, { code: "SONG_NOT_FOUND", - message: "Given song cannot be found." + message: "Given song cannot be found.", }); } @@ -174,32 +174,32 @@ export const songHandler = new Elysia({ prefix: "/song/:id" }) updatedData.length > 0 ? { old: info, - new: updatedData[0] + new: updatedData[0], } - : null + : null, }); return { message: "Successfully updated song info.", - updated: updatedData.length > 0 ? updatedData[0] : null + updated: updatedData.length > 0 ? updatedData[0] : null, }; }, { response: { 200: t.Object({ message: t.String(), - updated: t.Any() + updated: t.Any(), }), 401: t.Object({ - message: t.String() + message: t.String(), }), 404: t.Object({ message: t.String(), - code: t.String() - }) + code: t.String(), + }), }, body: t.Object({ name: t.Optional(t.String()), - producer: t.Optional(t.String()) + producer: t.Optional(t.String()), }), detail: { summary: "Update song information", @@ -207,7 +207,7 @@ export const songHandler = new Elysia({ prefix: "/song/:id" }) "This endpoint allows authenticated users to update song metadata. It accepts partial updates \ for song name and producer fields. The endpoint validates the song ID (accepting both internal database IDs \ and bilibili video IDs in av/BV format), applies the requested changes, and logs the update in the history table \ - for audit purposes. Requires authentication." - } + for audit purposes. Requires authentication.", + }, } ); diff --git a/packages/backend/routes/song/milestone.ts b/packages/backend/routes/song/milestone.ts index 60afa6f..36f649f 100644 --- a/packages/backend/routes/song/milestone.ts +++ b/packages/backend/routes/song/milestone.ts @@ -1,16 +1,16 @@ -import { Elysia, t } from "elysia"; -import { db, bilibiliMetadata, eta } from "@core/drizzle"; -import { eq, and, gte, lt } from "drizzle-orm"; -import serverTiming from "@backend/middlewares/timing"; -import z from "zod"; import { BiliVideoSchema } from "@backend/lib/schema"; +import serverTiming from "@backend/middlewares/timing"; +import { bilibiliMetadata, db, eta } from "@core/drizzle"; +import { and, eq, gte, lt } from "drizzle-orm"; +import { Elysia, t } from "elysia"; +import z from "zod"; type MileStoneType = "dendou" | "densetsu" | "shinwa"; const range = { dendou: [0, 100000], densetsu: [100000, 1000000], - shinwa: [1000000, 10000000] + shinwa: [1000000, 10000000], }; export const closeMileStoneHandler = new Elysia({ prefix: "/songs" }).use(serverTiming()).get( @@ -39,21 +39,21 @@ export const closeMileStoneHandler = new Elysia({ prefix: "/songs" }).use(server eta: z.number(), speed: z.number(), currentViews: z.number(), - updatedAt: z.string() + updatedAt: z.string(), }), - bilibili_metadata: BiliVideoSchema + bilibili_metadata: BiliVideoSchema, }) ), 404: t.Object({ - message: t.String() - }) + message: t.String(), + }), }, params: t.Object({ - type: t.String({ enum: ["dendou", "densetsu", "shinwa"] }) + type: t.String({ enum: ["dendou", "densetsu", "shinwa"] }), }), query: t.Object({ offset: t.Optional(t.Number()), - limit: t.Optional(t.Number()) + limit: t.Optional(t.Number()), }), detail: { summary: "Get songs close to milestones", @@ -61,7 +61,7 @@ export const closeMileStoneHandler = new Elysia({ prefix: "/songs" }).use(server "This endpoint retrieves songs that are approaching significant view count milestones. \ It supports three milestone types: 'dendou' (0-100k views), 'densetsu' (100k-1M views), and 'shinwa' (1M-10M views). \ For each type, it returns videos that are within the specified view range and have an estimated time to reach \ - the next milestone below the threshold. Results are ordered by estimated time to milestone." - } + the next milestone below the threshold. Results are ordered by estimated time to milestone.", + }, } ); diff --git a/packages/backend/routes/video/eta.ts b/packages/backend/routes/video/eta.ts index 86d1573..bab5c24 100644 --- a/packages/backend/routes/video/eta.ts +++ b/packages/backend/routes/video/eta.ts @@ -1,7 +1,7 @@ -import { Elysia, t } from "elysia"; +import { biliIDToAID } from "@backend/lib/bilibiliID"; import { db, eta } from "@core/drizzle"; import { eq } from "drizzle-orm"; -import { biliIDToAID } from "@backend/lib/bilibiliID"; +import { Elysia, t } from "elysia"; export const songEtaHandler = new Elysia({ prefix: "/video" }).get( "/:id/eta", @@ -11,14 +11,15 @@ export const songEtaHandler = new Elysia({ prefix: "/video" }).get( if (!aid) { return status(400, { code: "MALFORMED_SLOT", - message: "We cannot parse the video ID, or we currently do not support this format." + message: + "We cannot parse the video ID, or we currently do not support this format.", }); } const data = await db.select().from(eta).where(eq(eta.aid, aid)); if (data.length === 0) { return status(404, { code: "VIDEO_NOT_FOUND", - message: "Video not found." + message: "Video not found.", }); } return { @@ -26,7 +27,7 @@ export const songEtaHandler = new Elysia({ prefix: "/video" }).get( eta: data[0].eta, views: data[0].currentViews, speed: data[0].speed, - updatedAt: data[0].updatedAt + updatedAt: data[0].updatedAt, }; }, { @@ -36,19 +37,19 @@ export const songEtaHandler = new Elysia({ prefix: "/video" }).get( eta: t.Number(), views: t.Number(), speed: t.Number(), - updatedAt: t.String() + updatedAt: t.String(), }), 400: t.Object({ code: t.String(), - message: t.String() + message: t.String(), }), 404: t.Object({ code: t.String(), - message: t.String() - }) + message: t.String(), + }), }, headers: t.Object({ - Authorization: t.Optional(t.String()) + Authorization: t.Optional(t.String()), }), detail: { summary: "Get video milestone ETA", @@ -56,7 +57,7 @@ export const songEtaHandler = new Elysia({ prefix: "/video" }).get( "This endpoint retrieves the estimated time to reach the next milestone for a given video. \ It accepts video IDs in av or BV format and returns the current view count, estimated time to \ reach the next milestone (in hours), view growth speed, and last update timestamp. Useful for \ - tracking video growth and milestone predictions." - } + tracking video growth and milestone predictions.", + }, } ); diff --git a/packages/backend/routes/video/label.ts b/packages/backend/routes/video/label.ts index 08f2c6f..3376351 100644 --- a/packages/backend/routes/video/label.ts +++ b/packages/backend/routes/video/label.ts @@ -1,11 +1,11 @@ -import { Elysia, t } from "elysia"; -import { ErrorResponseSchema } from "@backend/src/schema"; -import z from "zod"; +import { biliIDToAID } from "@backend/lib/bilibiliID"; import { BiliVideoSchema } from "@backend/lib/schema"; import requireAuth from "@backend/middlewares/auth"; -import { eq, sql } from "drizzle-orm"; +import { ErrorResponseSchema } from "@backend/src/schema"; import { bilibiliMetadata, db, videoTypeLabelInInternal } from "@core/drizzle"; -import { biliIDToAID } from "@backend/lib/bilibiliID"; +import { eq, sql } from "drizzle-orm"; +import { Elysia, t } from "elysia"; +import z from "zod"; const videoSchema = BiliVideoSchema.omit({ publishedAt: true }) .omit({ createdAt: true }) @@ -16,7 +16,7 @@ const videoSchema = BiliVideoSchema.omit({ publishedAt: true }) uid: z.number(), published_at: z.string(), createdAt: z.string(), - cover_url: z.string() + cover_url: z.string(), }); export const getUnlabelledVideos = new Elysia({ prefix: "/videos" }).use(requireAuth).get( @@ -60,8 +60,8 @@ export const getUnlabelledVideos = new Elysia({ prefix: "/videos" }).use(require response: { 200: z.array(videoSchema), 400: ErrorResponseSchema, - 500: ErrorResponseSchema - } + 500: ErrorResponseSchema, + }, } ); @@ -77,7 +77,7 @@ export const postVideoLabel = new Elysia({ prefix: "/video" }).use(requireAuth). code: "MALFORMED_SLOT", message: "We cannot parse the video ID, or we currently do not support this format.", - errors: [] + errors: [], }); } @@ -91,23 +91,23 @@ export const postVideoLabel = new Elysia({ prefix: "/video" }).use(requireAuth). return status(400, { code: "VIDEO_NOT_FOUND", message: "Video not found", - errors: [] + errors: [], }); } await db.insert(videoTypeLabelInInternal).values({ aid, label, - user: user!.unqId + user: user!.unqId, }); return status(201, { - message: `Labelled video av${aid} as ${label}` + message: `Labelled video av${aid} as ${label}`, }); }, { body: t.Object({ - label: t.Boolean() - }) + label: t.Boolean(), + }), } ); diff --git a/packages/backend/routes/video/metadata.ts b/packages/backend/routes/video/metadata.ts index f7becc6..bc1189b 100644 --- a/packages/backend/routes/video/metadata.ts +++ b/packages/backend/routes/video/metadata.ts @@ -1,12 +1,12 @@ -import { Elysia } from "elysia"; -import { db, videoSnapshot } from "@core/drizzle"; import { biliIDToAID } from "@backend/lib/bilibiliID"; -import { getVideoInfo } from "@core/net/getVideoInfo"; -import { redis } from "@core/db/redis"; -import { ErrorResponseSchema } from "@backend/src/schema"; -import type { VideoInfoData } from "@core/net/bilibili.d.ts"; import { BiliAPIVideoMetadataSchema } from "@backend/lib/schema"; +import { ErrorResponseSchema } from "@backend/src/schema"; +import { redis } from "@core/db/redis"; +import { db, videoSnapshot } from "@core/drizzle"; +import type { VideoInfoData } from "@core/net/bilibili.d.ts"; +import { getVideoInfo } from "@core/net/getVideoInfo"; import { snapshotCounter } from "@crawler/metrics"; +import { Elysia } from "elysia"; export async function retrieveVideoInfoFromCache(aid: number) { const cacheKey = `cvsa:videoInfo:av${aid}`; @@ -40,7 +40,7 @@ async function insertVideoSnapshot(data: VideoInfoData) { likes, coins, shares, - favorites + favorites, }); snapshotCounter.add(1); } @@ -56,7 +56,7 @@ export const getVideoMetadataHandler = new Elysia({ prefix: "/video" }).get( code: "MALFORMED_SLOT", message: "We cannot parse the video ID, or we currently do not support this format.", - errors: [] + errors: [], }); } @@ -67,11 +67,11 @@ export const getVideoMetadataHandler = new Elysia({ prefix: "/video" }).get( const r = await getVideoInfo(aid, "getVideoInfo"); - if (typeof r == "number") { + if (typeof r === "number") { return c.status(500, { code: "THIRD_PARTY_ERROR", message: `Got status code ${r} from bilibili API.`, - errors: [] + errors: [], }); } @@ -86,7 +86,7 @@ export const getVideoMetadataHandler = new Elysia({ prefix: "/video" }).get( response: { 200: BiliAPIVideoMetadataSchema, 400: ErrorResponseSchema, - 500: ErrorResponseSchema + 500: ErrorResponseSchema, }, detail: { summary: "Get video metadata", @@ -94,7 +94,7 @@ export const getVideoMetadataHandler = new Elysia({ prefix: "/video" }).get( "This endpoint retrieves comprehensive metadata for a bilibili video. It accepts video IDs in av or BV format \ and returns detailed information including title, description, uploader, statistics (views, likes, coins, etc.), \ and publication date. The data is cached for 60 seconds to reduce API calls. If the video is not in cache, \ - it fetches fresh data from bilibili API and stores a snapshot in the database." - } + it fetches fresh data from bilibili API and stores a snapshot in the database.", + }, } ); diff --git a/packages/backend/routes/video/snapshots.ts b/packages/backend/routes/video/snapshots.ts index 33a9edf..49e3199 100644 --- a/packages/backend/routes/video/snapshots.ts +++ b/packages/backend/routes/video/snapshots.ts @@ -1,10 +1,10 @@ -import { Elysia } from "elysia"; -import { db, videoSnapshot } from "@core/drizzle"; import { biliIDToAID } from "@backend/lib/bilibiliID"; -import { ErrorResponseSchema } from "@backend/src/schema"; -import { eq, desc } from "drizzle-orm"; -import z from "zod"; import { SnapshotQueue } from "@backend/lib/mq"; +import { ErrorResponseSchema } from "@backend/src/schema"; +import { db, videoSnapshot } from "@core/drizzle"; +import { desc, eq } from "drizzle-orm"; +import { Elysia } from "elysia"; +import z from "zod"; export const getVideoSnapshotsHandler = new Elysia({ prefix: "/video" }).get( "/:id/snapshots", @@ -17,7 +17,7 @@ export const getVideoSnapshotsHandler = new Elysia({ prefix: "/video" }).get( code: "MALFORMED_SLOT", message: "We cannot parse the video ID, or we currently do not support this format.", - errors: [] + errors: [], }); } @@ -29,7 +29,7 @@ export const getVideoSnapshotsHandler = new Elysia({ prefix: "/video" }).get( if (data.length === 0) { await SnapshotQueue.add("directSnapshot", { - aid + aid, }); } @@ -48,11 +48,11 @@ export const getVideoSnapshotsHandler = new Elysia({ prefix: "/video" }).get( shares: z.number().nullable(), danmakus: z.number().nullable(), aid: z.number(), - replies: z.number().nullable() + replies: z.number().nullable(), }) ), 400: ErrorResponseSchema, - 500: ErrorResponseSchema + 500: ErrorResponseSchema, }, detail: { summary: "Get video snapshots", @@ -60,7 +60,7 @@ export const getVideoSnapshotsHandler = new Elysia({ prefix: "/video" }).get( "This endpoint retrieves historical view count snapshots for a bilibili video. It accepts video IDs in av or BV format \ and returns a chronological list of snapshots showing how the video's statistics (views, likes, coins, favorites, etc.) \ have changed over time. If no snapshots exist for the video, it automatically queues a snapshot job to collect initial data. \ - Results are ordered by creation date in descending order." - } + Results are ordered by creation date in descending order.", + }, } ); diff --git a/packages/backend/src/index.ts b/packages/backend/src/index.ts index f05ac85..1e413dc 100644 --- a/packages/backend/src/index.ts +++ b/packages/backend/src/index.ts @@ -1,22 +1,22 @@ -import { Elysia, ErrorHandler } from "elysia"; -import { getBindingInfo, logStartup } from "./startMessage"; -import { pingHandler } from "@backend/routes/ping"; -import { cors } from "@elysiajs/cors"; -import { songHandler } from "@backend/routes/song/info"; -import { rootHandler } from "@backend/routes/root"; -import { getVideoMetadataHandler } from "@backend/routes/video/metadata"; -import { closeMileStoneHandler } from "@backend/routes/song/milestone"; import { authHandler } from "@backend/routes/auth"; -import { onAfterHandler } from "./onAfterHandle"; +import { pingHandler } from "@backend/routes/ping"; +import { rootHandler } from "@backend/routes/root"; import { searchHandler } from "@backend/routes/search"; -import { getVideoSnapshotsHandler } from "@backend/routes/video/snapshots"; import { addSongHandler } from "@backend/routes/song/add"; import { deleteSongHandler } from "@backend/routes/song/delete"; +import { songHandler } from "@backend/routes/song/info"; +import { closeMileStoneHandler } from "@backend/routes/song/milestone"; import { songEtaHandler } from "@backend/routes/video/eta"; +import { getVideoMetadataHandler } from "@backend/routes/video/metadata"; +import { getVideoSnapshotsHandler } from "@backend/routes/video/snapshots"; +import { cors } from "@elysiajs/cors"; +import { Elysia, type ErrorHandler } from "elysia"; +import { onAfterHandler } from "./onAfterHandle"; +import { getBindingInfo, logStartup } from "./startMessage"; import "./mq"; -import pkg from "../package.json"; -import { getUnlabelledVideos, postVideoLabel } from "@backend/routes/video/label"; import { openAPIMiddleware } from "@backend/middlewares/openapi"; +import { getUnlabelledVideos, postVideoLabel } from "@backend/routes/video/label"; +import pkg from "../package.json"; const [host, port] = getBindingInfo(); logStartup(host, port); @@ -24,7 +24,7 @@ logStartup(host, port); const errorHandler: ErrorHandler = ({ code, status, error }) => { if (code === "NOT_FOUND") return status(404, { - message: "The requested resource was not found." + message: "The requested resource was not found.", }); if (code === "VALIDATION") return error.detail(error.message); return error; @@ -32,8 +32,8 @@ const errorHandler: ErrorHandler = ({ code, status, error }) => { const app = new Elysia({ serve: { - hostname: host - } + hostname: host, + }, }) .onError(errorHandler) .use(onAfterHandler) @@ -60,8 +60,8 @@ const app = new Elysia({ }, { detail: { - hide: true - } + hide: true, + }, } ) .get( @@ -71,8 +71,8 @@ const app = new Elysia({ }, { detail: { - hide: true - } + hide: true, + }, } ) .listen(15412); diff --git a/packages/backend/src/mq.ts b/packages/backend/src/mq.ts index 6ccbcfd..4e2e124 100644 --- a/packages/backend/src/mq.ts +++ b/packages/backend/src/mq.ts @@ -1,12 +1,12 @@ import { db, history } from "@core/drizzle"; -import { ConnectionOptions, QueueEvents, QueueEventsListener } from "bullmq"; +import { type ConnectionOptions, QueueEvents, type QueueEventsListener } from "bullmq"; import { redis } from "bun"; interface CustomListener extends QueueEventsListener { addSong: (args: { uid: string; songID: number }, id: string) => void; } const queueEvents = new QueueEvents("latestVideos", { - connection: redis as ConnectionOptions + connection: redis as ConnectionOptions, }); queueEvents.on( "addSong", @@ -15,7 +15,7 @@ queueEvents.on( objectId: songID, changeType: "add-song", changedBy: uid, - data: null + data: null, }); } ); diff --git a/packages/backend/src/onAfterHandle.ts b/packages/backend/src/onAfterHandle.ts index 9802fe6..82accc5 100644 --- a/packages/backend/src/onAfterHandle.ts +++ b/packages/backend/src/onAfterHandle.ts @@ -26,8 +26,8 @@ export const onAfterHandler = new Elysia().onAfterHandle( return new Response(encoder.encode(text), { status: realResponse.code as any, headers: { - "Content-Type": "application/json; charset=utf-8" - } + "Content-Type": "application/json; charset=utf-8", + }, }); } const text = isBrowser @@ -35,8 +35,8 @@ export const onAfterHandler = new Elysia().onAfterHandle( : JSON.stringify(realResponse); return new Response(encoder.encode(text), { headers: { - "Content-Type": "application/json; charset=utf-8" - } + "Content-Type": "application/json; charset=utf-8", + }, }); } ); diff --git a/packages/backend/src/schema.ts b/packages/backend/src/schema.ts index 0e587ad..d5bcd99 100644 --- a/packages/backend/src/schema.ts +++ b/packages/backend/src/schema.ts @@ -13,7 +13,7 @@ export const errorCodes = [ "SERVER_ERROR", "RATE_LIMIT_EXCEEDED", "ENTITY_EXISTS", - "THIRD_PARTY_ERROR" + "THIRD_PARTY_ERROR", ]; function generateErrorCodeRegex(strings: string[]): string { @@ -33,7 +33,7 @@ export const ErrorResponseSchema = t.Object({ i18n: t.Optional( t.Object({ key: t.String(), - values: t.Optional(t.Record(t.String(), t.Union([t.String(), t.Number(), t.Date()]))) + values: t.Optional(t.Record(t.String(), t.Union([t.String(), t.Number(), t.Date()]))), }) - ) + ), }); diff --git a/packages/backend/src/startMessage.ts b/packages/backend/src/startMessage.ts index ffc6688..75e985e 100644 --- a/packages/backend/src/startMessage.ts +++ b/packages/backend/src/startMessage.ts @@ -1,5 +1,5 @@ -import os from "os"; import chalk from "chalk"; +import os from "os"; function getLocalIpAddress(): string { const interfaces = os.networkInterfaces(); diff --git a/packages/core/biome.json b/packages/core/biome.json new file mode 100644 index 0000000..3cca237 --- /dev/null +++ b/packages/core/biome.json @@ -0,0 +1,10 @@ +{ + "extends": "//", + "$schema": "https://biomejs.dev/schemas/2.3.8/schema.json", + "files": { + "includes": ["**", "!!**/drizzle/main"] + }, + "linter": { + "enabled": true + } +} diff --git a/packages/core/db/dbNew.ts b/packages/core/db/dbNew.ts index 15e537d..0493ab0 100644 --- a/packages/core/db/dbNew.ts +++ b/packages/core/db/dbNew.ts @@ -1,4 +1,4 @@ import postgres from "postgres"; import { postgresConfig } from "./pgConfigNew"; -export const sql = postgres(postgresConfig); \ No newline at end of file +export const sql = postgres(postgresConfig); diff --git a/packages/core/db/pgConfigNew.ts b/packages/core/db/pgConfigNew.ts index 2180ae1..66de89e 100644 --- a/packages/core/db/pgConfigNew.ts +++ b/packages/core/db/pgConfigNew.ts @@ -22,7 +22,7 @@ export const postgresConfig = { port: parseInt(databasePort), database: databaseName, username: databaseUser, - password: databasePassword + password: databasePassword, }; export const postgresConfigCred = { @@ -30,5 +30,5 @@ export const postgresConfigCred = { port: parseInt(databasePort), database: databaseNameCred, user: databaseUser, - password: databasePassword + password: databasePassword, }; diff --git a/packages/core/db/redis.ts b/packages/core/db/redis.ts index 78d524d..2b80e68 100644 --- a/packages/core/db/redis.ts +++ b/packages/core/db/redis.ts @@ -6,5 +6,5 @@ const port = parseInt(process.env.REDIS_PORT) || 6379; export const redis = new Redis({ port: port, host: host, - maxRetriesPerRequest: null + maxRetriesPerRequest: null, }); diff --git a/packages/core/db/snapshots/index.ts b/packages/core/db/snapshots/index.ts index 8c3e684..68cd964 100644 --- a/packages/core/db/snapshots/index.ts +++ b/packages/core/db/snapshots/index.ts @@ -1,3 +1,3 @@ -export * from "./getLatestSnapshot"; export * from "./getClosetSnapshot"; +export * from "./getLatestSnapshot"; export * from "./milestone"; diff --git a/packages/core/db/snapshots/milestone.ts b/packages/core/db/snapshots/milestone.ts index f859427..e7342d6 100644 --- a/packages/core/db/snapshots/milestone.ts +++ b/packages/core/db/snapshots/milestone.ts @@ -1,7 +1,7 @@ +import { getClosestSnapshot, getLatestSnapshot } from "@core/db"; import { db, eta as etaTable } from "@core/drizzle"; +import { getClosetMilestone, HOUR, MINUTE } from "@core/lib"; import { eq } from "drizzle-orm"; -import { MINUTE, HOUR, getClosetMilestone } from "@core/lib"; -import { getLatestSnapshot, getClosestSnapshot } from "@core/db"; export const getGroundTruthMilestoneETA = async ( aid: number, diff --git a/packages/core/drizzle.config.ts b/packages/core/drizzle.config.ts index 56da063..34270c2 100644 --- a/packages/core/drizzle.config.ts +++ b/packages/core/drizzle.config.ts @@ -5,7 +5,7 @@ export default defineConfig({ out: "./drizzle/main", dialect: "postgresql", dbCredentials: { - url: process.env.DATABASE_URL_MAIN! + url: process.env.DATABASE_URL_MAIN!, }, - schemaFilter: ["public", "credentials", "internal"] + schemaFilter: ["public", "credentials", "internal"], }); diff --git a/packages/core/drizzle/drizzle-cred.config.ts b/packages/core/drizzle/drizzle-cred.config.ts index 90fc57a..3f78afc 100644 --- a/packages/core/drizzle/drizzle-cred.config.ts +++ b/packages/core/drizzle/drizzle-cred.config.ts @@ -4,6 +4,6 @@ export default defineConfig({ out: "./cred", dialect: "postgresql", dbCredentials: { - url: process.env.DATABASE_URL_CRED! - } + url: process.env.DATABASE_URL_CRED!, + }, }); diff --git a/packages/core/drizzle/drizzle-main.config.ts b/packages/core/drizzle/drizzle-main.config.ts index 1634ecc..4244b81 100644 --- a/packages/core/drizzle/drizzle-main.config.ts +++ b/packages/core/drizzle/drizzle-main.config.ts @@ -1,10 +1,14 @@ import "dotenv/config"; import { defineConfig } from "drizzle-kit"; +if (!process.env.DATABASE_URL_MAIN) { + throw new Error("DATABASE_URL_MAIN is not defined"); +} + export default defineConfig({ out: "./drizzle/main", dialect: "postgresql", dbCredentials: { - url: process.env.DATABASE_URL_MAIN! - } + url: process.env.DATABASE_URL_MAIN, + }, }); diff --git a/packages/core/drizzle/index.ts b/packages/core/drizzle/index.ts index 34d947f..a0edf4c 100644 --- a/packages/core/drizzle/index.ts +++ b/packages/core/drizzle/index.ts @@ -1,8 +1,8 @@ "use server"; -import { drizzle } from "drizzle-orm/postgres-js"; import { sql } from "@core/db/dbNew"; +import { drizzle } from "drizzle-orm/postgres-js"; export const db = drizzle(sql); export * from "./main/schema"; -export * from "./type"; \ No newline at end of file +export * from "./type"; diff --git a/packages/core/drizzle/type.ts b/packages/core/drizzle/type.ts index d4b55bb..fbcc41c 100644 --- a/packages/core/drizzle/type.ts +++ b/packages/core/drizzle/type.ts @@ -1,12 +1,12 @@ import type { InferSelectModel } from "drizzle-orm"; -import { - usersInCredentials, +import type { bilibiliMetadata, latestVideoSnapshot, - songs, - videoSnapshot, + loginSessionsInCredentials, producer, - loginSessionsInCredentials + songs, + usersInCredentials, + videoSnapshot, } from "./main/schema"; export type UserType = InferSelectModel; diff --git a/packages/core/index.ts b/packages/core/index.ts deleted file mode 100644 index a627c87..0000000 --- a/packages/core/index.ts +++ /dev/null @@ -1 +0,0 @@ -export * from "./db/dbNew"; diff --git a/packages/core/lib/index.ts b/packages/core/lib/index.ts index 4bf52b9..999a8bf 100644 --- a/packages/core/lib/index.ts +++ b/packages/core/lib/index.ts @@ -2,4 +2,4 @@ export * from "./math"; export * from "./milestone"; export * from "./randomID"; export * from "./time"; -export * from "./type"; \ No newline at end of file +export * from "./type"; diff --git a/packages/core/lib/math.ts b/packages/core/lib/math.ts index f81bc2d..20e9b3b 100644 --- a/packages/core/lib/math.ts +++ b/packages/core/lib/math.ts @@ -1,3 +1,4 @@ export const log = (value: number, base: number = 10) => Math.log(value) / Math.log(base); -export const truncate = (num: number, min: number, max: number) => Math.max(min, Math.min(num, max)); +export const truncate = (num: number, min: number, max: number) => + Math.max(min, Math.min(num, max)); diff --git a/packages/core/lib/type.ts b/packages/core/lib/type.ts index c68be82..ce7e0b4 100644 --- a/packages/core/lib/type.ts +++ b/packages/core/lib/type.ts @@ -1 +1 @@ -export type PartialBy = Omit & Partial>; \ No newline at end of file +export type PartialBy = Omit & Partial>; diff --git a/packages/core/log/index.ts b/packages/core/log/index.ts index 1eccac2..048a755 100644 --- a/packages/core/log/index.ts +++ b/packages/core/log/index.ts @@ -1,6 +1,6 @@ -import winston, { format, transports } from "winston"; -import type { TransformableInfo } from "logform"; import chalk from "chalk"; +import type { TransformableInfo } from "logform"; +import winston, { format, transports } from "winston"; const customFormat = format.printf((info: TransformableInfo) => { const { timestamp, level, message, service, codePath, error } = info; @@ -21,9 +21,9 @@ const timestampFormat = format.timestamp({ format: "YYYY-MM-DD HH:mm:ss.SSSZZ" } const createTransport = (level: string, filename: string) => { const MB = 1000000; - let maxsize = undefined; - let maxFiles = undefined; - let tailable = undefined; + let maxsize; + let maxFiles; + let tailable; if (level === "silly") { maxsize = 500 * MB; maxFiles = undefined; @@ -37,7 +37,7 @@ const createTransport = (level: string, filename: string) => { if (typeof value === "bigint") { return value.toString(); } - if (key == "error") { + if (key === "error") { return undefined; } return value; @@ -48,7 +48,7 @@ const createTransport = (level: string, filename: string) => { maxsize, tailable, maxFiles, - format: format.combine(timestampFormat, format.json({ replacer })) + format: format.combine(timestampFormat, format.json({ replacer })), }); }; @@ -66,12 +66,12 @@ const winstonLogger = winston.createLogger({ format.colorize(), format.errors({ stack: true }), customFormat - ) + ), }), createTransport("silly", sillyLogPath), createTransport("warn", warnLogPath), - createTransport("error", errorLogPath) - ] + createTransport("error", errorLogPath), + ], }); const logger = { @@ -96,7 +96,7 @@ const logger = { } else { winstonLogger.error(error, { service, codePath }); } - } + }, }; export default logger; diff --git a/packages/core/mq/lockManager.ts b/packages/core/mq/lockManager.ts index a11e22f..eca3f3d 100644 --- a/packages/core/mq/lockManager.ts +++ b/packages/core/mq/lockManager.ts @@ -1,5 +1,5 @@ -import { Redis } from "ioredis"; import { redis } from "@core/db/redis"; +import type { Redis } from "ioredis"; class LockManager { private redis: Redis; diff --git a/packages/core/mq/multipleRateLimiter.ts b/packages/core/mq/multipleRateLimiter.ts index 6be42d6..19e232b 100644 --- a/packages/core/mq/multipleRateLimiter.ts +++ b/packages/core/mq/multipleRateLimiter.ts @@ -1,5 +1,5 @@ -import { RateLimiter as Limiter } from "@koshnic/ratelimit"; import { redis } from "@core/db/redis"; +import { RateLimiter as Limiter } from "@koshnic/ratelimit"; export interface RateLimiterConfig { duration: number; @@ -42,7 +42,7 @@ export class MultipleRateLimiter { burst: max, ratePerPeriod: max, period: duration, - cost: 1 + cost: 1, }); if (!allowed && shouldThrow) { throw new RateLimiterError("Rate limit exceeded"); diff --git a/packages/core/net/delegate.ts b/packages/core/net/delegate.ts index 98f7a5b..581147e 100644 --- a/packages/core/net/delegate.ts +++ b/packages/core/net/delegate.ts @@ -1,25 +1,25 @@ // noinspection ExceptionCaughtLocallyJS +import Credential from "@alicloud/credentials"; +import Stream from "@alicloud/darabonba-stream"; +import FC20230330, * as $FC20230330 from "@alicloud/fc20230330"; +import * as OpenApi from "@alicloud/openapi-client"; +import * as Util from "@alicloud/tea-util"; +import { SECOND } from "@core/lib"; import logger from "@core/log"; import { MultipleRateLimiter, type RateLimiterConfig, - RateLimiterError + RateLimiterError, } from "@core/mq/multipleRateLimiter"; -import { ReplyError } from "ioredis"; -import { SECOND } from "@core/lib"; -import FC20230330, * as $FC20230330 from "@alicloud/fc20230330"; -import Credential from "@alicloud/credentials"; -import * as OpenApi from "@alicloud/openapi-client"; -import Stream from "@alicloud/darabonba-stream"; -import * as Util from "@alicloud/tea-util"; -import { Readable } from "stream"; import { aliFCCounter, aliFCErrorCounter, ipProxyCounter, - ipProxyErrorCounter + ipProxyErrorCounter, } from "crawler/metrics"; +import { ReplyError } from "ioredis"; +import type { Readable } from "stream"; type ProxyType = "native" | "alicloud-fc" | "ip-proxy"; @@ -33,8 +33,8 @@ function createAliProxiesObject(regions: T) { type: "alicloud-fc" as const, data: { region: currentRegion, - timeout: 15000 - } + timeout: 15000, + }, } as ProxyDef; return result; }, @@ -48,7 +48,7 @@ const aliProxies = aliRegions.map((region) => `alicloud_${region}` as `alicloud_ const proxies = { native: { type: "native" as const, - data: {} + data: {}, }, ...aliProxiesObject, @@ -79,7 +79,7 @@ const proxies = { port: item.port, lifespan: Date.parse(item.endtime + "+08") - Date.now(), createdAt: Date.now(), - used: false + used: false, }; }); }, @@ -87,9 +87,9 @@ const proxies = { minPoolSize: 10, maxPoolSize: 100, refreshInterval: 5 * SECOND, - initialPoolSize: 10 - } - } + initialPoolSize: 10, + }, + }, } satisfies Record; interface FCResponse { @@ -98,7 +98,7 @@ interface FCResponse { serverTime: number; } -interface NativeProxyData {} +type NativeProxyData = {}; interface AlicloudFcProxyData { region: (typeof aliRegions)[number]; @@ -169,7 +169,7 @@ interface NetworkConfigInternal { const biliLimiterConfig: RateLimiterConfig[] = [ { duration: 1, max: 20 }, { duration: 15, max: 130 }, - { duration: 5 * 60, max: 2000 } + { duration: 5 * 60, max: 2000 }, ]; const bili_normal = structuredClone(biliLimiterConfig); @@ -196,40 +196,40 @@ const config = createNetworkConfig({ proxies: proxies, providers: { test: { limiters: [] }, - bilibili: { limiters: [] } + bilibili: { limiters: [] }, }, tasks: { test: { provider: "test", - proxies: fcProxies + proxies: fcProxies, }, test_ip: { provider: "test", - proxies: ["ip_proxy_pool"] + proxies: ["ip_proxy_pool"], }, getVideoInfo: { provider: "bilibili", proxies: "all", - limiters: bili_strict + limiters: bili_strict, }, getLatestVideos: { provider: "bilibili", proxies: "all", - limiters: bili_strict + limiters: bili_strict, }, snapshotMilestoneVideo: { provider: "bilibili", - proxies: aliProxies + proxies: aliProxies, }, snapshotVideo: { provider: "bilibili", - proxies: aliProxies + proxies: aliProxies, }, bulkSnapshot: { provider: "bilibili", - proxies: aliProxies - } - } + proxies: aliProxies, + }, + }, }); type NetworkConfig = typeof config; @@ -279,7 +279,7 @@ class IPPoolManager { minPoolSize: config.minPoolSize ?? 5, maxPoolSize: config.maxPoolSize ?? 50, refreshInterval: config.refreshInterval ?? 30_000, - initialPoolSize: config.initialPoolSize ?? 10 + initialPoolSize: config.initialPoolSize ?? 10, }; } @@ -374,7 +374,7 @@ class IPPoolManager { const ipEntry: IPEntry = { ...ipData, createdAt: Date.now(), - used: false + used: false, }; this.pool.push(ipEntry); } @@ -458,14 +458,14 @@ export class NetworkDelegate { this.tasks[taskName] = { provider: taskDef.provider, - proxies: [...targetProxies] + proxies: [...targetProxies], }; if (taskDef.limiters && taskDef.limiters.length > 0) { for (const proxyName of targetProxies) { const limiterId = `proxy-${proxyName}-${taskName}`; this.proxyLimiters[limiterId] = new MultipleRateLimiter(limiterId, [ - ...taskDef.limiters + ...taskDef.limiters, ]); } } @@ -485,7 +485,7 @@ export class NetworkDelegate { const limiterId = `provider-${proxyName}-${providerName}`; if (!this.providerLimiters[limiterId]) { this.providerLimiters[limiterId] = new MultipleRateLimiter(limiterId, [ - ...providerDef.limiters + ...providerDef.limiters, ]); } } @@ -654,7 +654,7 @@ export class NetworkDelegate { } else { return { data: JSON.parse(rawData.body) as R, - time: rawData.serverTime + time: rawData.serverTime, }; } } catch (e) { @@ -700,7 +700,7 @@ export class NetworkDelegate { const response = await fetch(url, { signal: controller.signal, - proxy: `http://${ipEntry.address}:${ipEntry.port}` + proxy: `http://${ipEntry.address}:${ipEntry.port}`, }); clearTimeout(timeout); diff --git a/packages/core/net/getVideoDetails.ts b/packages/core/net/getVideoDetails.ts index 696478b..4aee29e 100644 --- a/packages/core/net/getVideoDetails.ts +++ b/packages/core/net/getVideoDetails.ts @@ -1,6 +1,6 @@ -import networkDelegate from "@core/net/delegate"; -import type { VideoDetailsData, VideoDetailsResponse } from "@core/net/bilibili.d"; import logger from "@core/log"; +import type { VideoDetailsData, VideoDetailsResponse } from "@core/net/bilibili.d"; +import networkDelegate from "@core/net/delegate"; export async function getVideoDetails(aid: number): Promise { const url = `https://api.bilibili.com/x/web-interface/view/detail?aid=${aid}`; diff --git a/packages/core/net/getVideoInfo.ts b/packages/core/net/getVideoInfo.ts index c9bf91e..3186465 100644 --- a/packages/core/net/getVideoInfo.ts +++ b/packages/core/net/getVideoInfo.ts @@ -1,6 +1,6 @@ -import networkDelegate from "@core/net/delegate"; -import type { VideoInfoData, VideoInfoResponse } from "@core/net/bilibili.d"; import logger from "@core/log"; +import type { VideoInfoData, VideoInfoResponse } from "@core/net/bilibili.d"; +import networkDelegate from "@core/net/delegate"; /* * Fetch video metadata from bilibili API @@ -34,6 +34,6 @@ export async function getVideoInfo( } return { data: data.data, - time: time + time: time, }; } diff --git a/packages/core/net/services.ts b/packages/core/net/services.ts index 43372a3..b097015 100644 --- a/packages/core/net/services.ts +++ b/packages/core/net/services.ts @@ -1,19 +1,19 @@ +import type { VideoSnapshotType } from "@core/drizzle"; +import type { PartialBy } from "@core/lib"; +import type { VideoInfoResponse } from "@core/net/bilibili"; import networkDelegate, { type RequestTasks } from "@core/net/delegate.ts"; -import { VideoInfoResponse } from "@core/net/bilibili"; -import { PartialBy } from "@core/lib"; -import { VideoSnapshotType } from "@core/drizzle"; export class BilibiliService { private static videoMetadataUrl = "https://api.bilibili.com/x/web-interface/view"; private static async getVideoMetadata(aid: number, task: RequestTasks) { - const url = new URL(this.videoMetadataUrl); + const url = new URL(BilibiliService.videoMetadataUrl); url.searchParams.set("aid", aid.toString()); return networkDelegate.request(url.toString(), task); } static async milestoneSnapshot(aid: number): Promise> { - const metadata = await this.getVideoMetadata(aid, "snapshotMilestoneVideo"); + const metadata = await BilibiliService.getVideoMetadata(aid, "snapshotMilestoneVideo"); const stats = metadata.data.data.stat; return { aid, @@ -24,7 +24,7 @@ export class BilibiliService { favorites: stats.favorite, replies: stats.reply, shares: stats.share, - danmakus: stats.danmaku + danmakus: stats.danmaku, }; } } diff --git a/packages/core/package.json b/packages/core/package.json index 699006e..4ad399f 100644 --- a/packages/core/package.json +++ b/packages/core/package.json @@ -1,31 +1,27 @@ { - "name": "core", - "private": false, - "version": "0.0.10", - "scripts": { - "test": "bun --env-file=.env.test run vitest", - "build": "bun build ./index.ts --target node --outdir ./dist", - "drizzle:pull": "drizzle-kit pull" - }, - "dependencies": { - "@alicloud/credentials": "^2.4.4", - "@alicloud/darabonba-stream": "^0.0.2", - "@alicloud/fc20230330": "^4.6.2", - "@alicloud/openapi-client": "^0.4.15", - "@alicloud/tea-util": "^1.4.10", - "@koshnic/ratelimit": "^1.0.3", - "@types/luxon": "^3.7.1", - "chalk": "^5.4.1", - "ioredis": "^5.6.1", - "logform": "^2.7.0", - "luxon": "^3.7.2", - "postgres": "^3.4.5", - "winston": "^3.17.0" - }, - "devDependencies": { - "@types/ioredis": "^5.0.0", - "drizzle-kit": "^0.31.4" - }, - "main": "./dist/index.js", - "types": "./types.d.ts" + "name": "core", + "private": true, + "scripts": { + "test": "bun --env-file=.env.test run vitest", + "drizzle:pull": "drizzle-kit pull" + }, + "dependencies": { + "@alicloud/credentials": "^2.4.4", + "@alicloud/darabonba-stream": "^0.0.2", + "@alicloud/fc20230330": "^4.6.2", + "@alicloud/openapi-client": "^0.4.15", + "@alicloud/tea-util": "^1.4.10", + "@koshnic/ratelimit": "^1.0.3", + "@types/luxon": "^3.7.1", + "chalk": "^5.4.1", + "ioredis": "^5.6.1", + "logform": "^2.7.0", + "luxon": "^3.7.2", + "postgres": "^3.4.5", + "winston": "^3.17.0" + }, + "devDependencies": { + "@types/ioredis": "^5.0.0", + "drizzle-kit": "^0.31.4" + } } diff --git a/packages/core/test/netDelegate.test.ts b/packages/core/test/netDelegate.test.ts index e58485a..ec58243 100644 --- a/packages/core/test/netDelegate.test.ts +++ b/packages/core/test/netDelegate.test.ts @@ -1,15 +1,12 @@ +import { describe, expect, test } from "bun:test"; import networkDelegate from "@core/net/delegate"; -import { test, expect, describe } from "bun:test"; describe("proxying requests", () => { test("Alibaba Cloud FC", async () => { - const { data } = (await networkDelegate.request<{ + const { data } = await networkDelegate.request<{ headers: Record; - }>( - "https://postman-echo.com/get", - "test" - )); - expect(data.headers.referer).toBe('https://www.bilibili.com/'); + }>("https://postman-echo.com/get", "test"); + expect(data.headers.referer).toBe("https://www.bilibili.com/"); }); test("IP Proxy", async () => { const { data } = await networkDelegate.request<{ diff --git a/packages/core/types.d.ts b/packages/core/types.d.ts deleted file mode 100644 index b765385..0000000 --- a/packages/core/types.d.ts +++ /dev/null @@ -1,3 +0,0 @@ -export * from "./db/schema"; -export * from "./index"; -export * from "./net/bilibili"; diff --git a/packages/crawler/db/bilibili_metadata.ts b/packages/crawler/db/bilibili_metadata.ts index b79b92e..057b918 100644 --- a/packages/crawler/db/bilibili_metadata.ts +++ b/packages/crawler/db/bilibili_metadata.ts @@ -1,13 +1,13 @@ import { + type BilibiliMetadataType, bilibiliMetadata, - BilibiliMetadataType, bilibiliUser, db, - labellingResult + labellingResult, } from "@core/drizzle"; -import { AkariModelVersion } from "ml/const"; +import type { PartialBy } from "@core/lib"; import { eq, isNull } from "drizzle-orm"; -import { PartialBy } from "@core/lib"; +import { AkariModelVersion } from "ml/const"; export async function insertIntoMetadata( data: PartialBy @@ -18,7 +18,7 @@ export async function insertIntoMetadata( export async function videoExistsInAllData(aid: number) { const rows = await db .select({ - id: bilibiliMetadata.id + id: bilibiliMetadata.id, }) .from(bilibiliMetadata) .where(eq(bilibiliMetadata.aid, aid)) @@ -53,10 +53,10 @@ export async function insertVideoLabel(aid: number, label: number) { .values({ aid, label, - modelVersion: AkariModelVersion + modelVersion: AkariModelVersion, }) .onConflictDoNothing({ - target: [labellingResult.aid, labellingResult.modelVersion] + target: [labellingResult.aid, labellingResult.modelVersion], }); } @@ -75,7 +75,7 @@ export async function getVideoInfoFromAllData(aid: number) { return { title: row.title, description: row.description, - tags: row.tags + tags: row.tags, }; } diff --git a/packages/crawler/db/eta.ts b/packages/crawler/db/eta.ts index db58f63..0468b60 100644 --- a/packages/crawler/db/eta.ts +++ b/packages/crawler/db/eta.ts @@ -1,4 +1,4 @@ -import { Psql } from "@core/db/psql"; +import type { Psql } from "@core/db/psql"; export async function updateETA(sql: Psql, aid: number, eta: number, speed: number, views: number) { return sql` diff --git a/packages/crawler/db/snapshot.ts b/packages/crawler/db/snapshot.ts index 94121a4..0ab1e7d 100644 --- a/packages/crawler/db/snapshot.ts +++ b/packages/crawler/db/snapshot.ts @@ -1,9 +1,14 @@ -import { SnapshotNumber } from "mq/task/getVideoStats"; import type { Psql } from "@core/db/psql.d"; -import { db, LatestVideoSnapshotType, videoSnapshot, VideoSnapshotType } from "@core/drizzle"; -import { PartialBy } from "@core/lib"; +import { + db, + type LatestVideoSnapshotType, + type VideoSnapshotType, + videoSnapshot, +} from "@core/drizzle"; +import type { PartialBy } from "@core/lib"; import { sql } from "drizzle-orm"; import { snapshotCounter } from "metrics"; +import type { SnapshotNumber } from "mq/task/getVideoStats"; export async function insertVideoSnapshot(data: PartialBy) { await db.insert(videoSnapshot).values(data); @@ -36,7 +41,7 @@ export async function getVideosNearMilestone() { return results.map((row) => { return { ...row, - aid: Number(row.aid) + aid: Number(row.aid), }; }); } @@ -57,7 +62,7 @@ export async function getLatestVideoSnapshot( return { ...row, aid: Number(row.aid), - time: new Date(row.time).getTime() + time: new Date(row.time).getTime(), }; })[0]; } diff --git a/packages/crawler/db/snapshotSchedule.ts b/packages/crawler/db/snapshotSchedule.ts index b53d575..075983b 100644 --- a/packages/crawler/db/snapshotSchedule.ts +++ b/packages/crawler/db/snapshotSchedule.ts @@ -1,10 +1,10 @@ -import type { SnapshotScheduleType } from "@core/db/schema.d"; -import logger from "@core/log"; -import { MINUTE } from "@core/lib"; -import { redis } from "@core/db/redis"; -import { Redis } from "ioredis"; -import { parseTimestampFromPsql } from "../utils/formatTimestampToPostgre"; import type { Psql } from "@core/db/psql.d"; +import { redis } from "@core/db/redis"; +import type { SnapshotScheduleType } from "@core/db/schema.d"; +import { MINUTE } from "@core/lib"; +import logger from "@core/log"; +import type { Redis } from "ioredis"; +import { parseTimestampFromPsql } from "../utils/formatTimestampToPostgre"; const REDIS_KEY = "cvsa:snapshot_window_counts"; @@ -141,7 +141,7 @@ export async function findClosestSnapshot( const row = result[0]; return { created_at: new Date(row.created_at).getTime(), - views: row.views + views: row.views, }; } @@ -162,7 +162,7 @@ export async function findSnapshotBefore( const row = result[0]; return { created_at: new Date(row.created_at).getTime(), - views: row.views + views: row.views, }; } @@ -190,7 +190,7 @@ export async function getLatestSnapshot(sql: Psql, aid: number): Promise @@ -70,17 +70,17 @@ class AkariProto extends AIManager { const offsets: number[] = [ 0, - ...cumsum(input_ids.slice(0, -1).map((x: string) => x.length)) + ...cumsum(input_ids.slice(0, -1).map((x: string) => x.length)), ]; const flattened_input_ids = input_ids.flat(); const inputs = { input_ids: new ort.Tensor("int64", new BigInt64Array(flattened_input_ids.map(BigInt)), [ - flattened_input_ids.length + flattened_input_ids.length, ]), offsets: new ort.Tensor("int64", new BigInt64Array(offsets.map(BigInt)), [ - offsets.length - ]) + offsets.length, + ]), }; const { embeddings } = await session.run(inputs); diff --git a/packages/crawler/ml/akari_api.ts b/packages/crawler/ml/akari_api.ts index 1705d5f..56f789c 100644 --- a/packages/crawler/ml/akari_api.ts +++ b/packages/crawler/ml/akari_api.ts @@ -1,6 +1,6 @@ -import apiManager from "./api_manager"; import logger from "@core/log"; import { WorkerError } from "mq/schema"; +import apiManager from "./api_manager"; class AkariAPI { private readonly serviceReady: Promise; diff --git a/packages/crawler/ml/api_manager.ts b/packages/crawler/ml/api_manager.ts index a461c2c..4029981 100644 --- a/packages/crawler/ml/api_manager.ts +++ b/packages/crawler/ml/api_manager.ts @@ -33,9 +33,9 @@ export class APIManager { const response = await fetch(`${this.baseUrl}/health`, { method: "GET", headers: { - "Content-Type": "application/json" + "Content-Type": "application/json", }, - signal: AbortSignal.timeout(this.timeout) + signal: AbortSignal.timeout(this.timeout), }); if (!response.ok) { @@ -60,17 +60,17 @@ export class APIManager { title: title.trim() || "untitled", description: description.trim() || "N/A", tags: tags.trim() || "empty", - aid: aid + aid: aid, }; try { const response = await fetch(`${this.baseUrl}/classify`, { method: "POST", headers: { - "Content-Type": "application/json" + "Content-Type": "application/json", }, body: JSON.stringify(request), - signal: AbortSignal.timeout(this.timeout) + signal: AbortSignal.timeout(this.timeout), }); if (!response.ok) { @@ -100,10 +100,10 @@ export class APIManager { const response = await fetch(`${this.baseUrl}/classify_batch`, { method: "POST", headers: { - "Content-Type": "application/json" + "Content-Type": "application/json", }, body: JSON.stringify(requests), - signal: AbortSignal.timeout(this.timeout * 2) // Longer timeout for batch + signal: AbortSignal.timeout(this.timeout * 2), // Longer timeout for batch }); if (!response.ok) { diff --git a/packages/crawler/ml/manager.ts b/packages/crawler/ml/manager.ts index ff6c045..4ac60a6 100644 --- a/packages/crawler/ml/manager.ts +++ b/packages/crawler/ml/manager.ts @@ -1,6 +1,6 @@ -import * as ort from "onnxruntime-node"; import logger from "@core/log"; import { WorkerError } from "mq/schema"; +import * as ort from "onnxruntime-node"; export class AIManager { public sessions: { [key: string]: ort.InferenceSession } = {}; diff --git a/packages/crawler/mq/exec/archiveSnapshots.ts b/packages/crawler/mq/exec/archiveSnapshots.ts index fdb5c6f..4a4104c 100644 --- a/packages/crawler/mq/exec/archiveSnapshots.ts +++ b/packages/crawler/mq/exec/archiveSnapshots.ts @@ -1,20 +1,20 @@ -import { Job } from "bullmq"; +import { sql } from "@core/db/dbNew"; +import { MINUTE } from "@core/lib"; +import logger from "@core/log"; +import { lockManager } from "@core/mq/lockManager"; +import type { Job } from "bullmq"; +import { + formatDistanceStrict, + formatDuration, + intervalToDuration, + nextMonday, + nextSaturday, +} from "date-fns"; import { getCommonArchiveAids, getVideosWithoutActiveSnapshotScheduleByType, - scheduleSnapshot + scheduleSnapshot, } from "db/snapshotSchedule"; -import logger from "@core/log"; -import { lockManager } from "@core/mq/lockManager"; -import { MINUTE } from "@core/lib"; -import { sql } from "@core/db/dbNew"; -import { - nextMonday, - nextSaturday, - formatDistanceStrict, - intervalToDuration, - formatDuration -} from "date-fns"; function randomTimestampBetween(start: Date, end: Date) { const startMs = start.getTime(); @@ -43,7 +43,7 @@ export const archiveSnapshotsWorker = async (_job: Job) => { const now = Date.now(); const date = new Date(); const formatted = formatDistanceStrict(date, nextSaturday(date).getTime(), { - unit: "hour" + unit: "hour", }); logger.log( `Scheduled archive snapshot for aid ${aid} in ${formatted}.`, @@ -62,7 +62,7 @@ export const archiveSnapshotsWorker = async (_job: Job) => { const targetTime = getRandomTimeInNextWeek(); const interval = intervalToDuration({ start: new Date(), - end: new Date(targetTime) + end: new Date(targetTime), }); const formatted = formatDuration(interval, { format: ["days", "hours"] }); diff --git a/packages/crawler/mq/exec/classifyVideo.ts b/packages/crawler/mq/exec/classifyVideo.ts index 883bc1a..5462b50 100644 --- a/packages/crawler/mq/exec/classifyVideo.ts +++ b/packages/crawler/mq/exec/classifyVideo.ts @@ -1,18 +1,18 @@ -import { Job } from "bullmq"; +import { sql } from "@core/db/dbNew"; +import { MINUTE } from "@core/lib"; +import logger from "@core/log"; +import { lockManager } from "@core/mq/lockManager"; +import type { Job } from "bullmq"; +import { scheduleSnapshot } from "db/snapshotSchedule"; +import { aidExistsInSongs } from "db/songs"; +import Akari from "ml/akari_api"; +import { ClassifyVideoQueue } from "mq/index"; +import { insertIntoSongs } from "mq/task/collectSongs"; import { getUnlabelledVideos, getVideoInfoFromAllData, - insertVideoLabel + insertVideoLabel, } from "../../db/bilibili_metadata"; -import Akari from "ml/akari_api"; -import { ClassifyVideoQueue } from "mq/index"; -import logger from "@core/log"; -import { lockManager } from "@core/mq/lockManager"; -import { aidExistsInSongs } from "db/songs"; -import { insertIntoSongs } from "mq/task/collectSongs"; -import { scheduleSnapshot } from "db/snapshotSchedule"; -import { MINUTE } from "@core/lib"; -import { sql } from "@core/db/dbNew"; export const classifyVideoWorker = async (job: Job) => { const aid = job.data.aid; @@ -44,7 +44,7 @@ export const classifyVideoWorker = async (job: Job) => { await job.updateData({ ...job.data, - label: label + label: label, }); return 0; diff --git a/packages/crawler/mq/exec/collectSongs.ts b/packages/crawler/mq/exec/collectSongs.ts index 02f2bc6..d20c812 100644 --- a/packages/crawler/mq/exec/collectSongs.ts +++ b/packages/crawler/mq/exec/collectSongs.ts @@ -1,4 +1,4 @@ -import { Job } from "bullmq"; +import type { Job } from "bullmq"; import { collectSongs } from "mq/task/collectSongs"; export const collectSongsWorker = async (_job: Job): Promise => { diff --git a/packages/crawler/mq/exec/directSnapshot.ts b/packages/crawler/mq/exec/directSnapshot.ts index e626e72..4fbdeac 100644 --- a/packages/crawler/mq/exec/directSnapshot.ts +++ b/packages/crawler/mq/exec/directSnapshot.ts @@ -1,7 +1,7 @@ -import { Job } from "bullmq"; -import { takeVideoSnapshot } from "mq/task/getVideoStats"; import { sql } from "@core/db/dbNew"; import { lockManager } from "@core/mq/lockManager"; +import type { Job } from "bullmq"; +import { takeVideoSnapshot } from "mq/task/getVideoStats"; export const directSnapshotWorker = async (job: Job): Promise => { const lock = await lockManager.isLocked(`directSnapshot-${job.data.aid}`); diff --git a/packages/crawler/mq/exec/dispatchMilestoneSnapshots.ts b/packages/crawler/mq/exec/dispatchMilestoneSnapshots.ts index ee4995a..a3a0fe3 100644 --- a/packages/crawler/mq/exec/dispatchMilestoneSnapshots.ts +++ b/packages/crawler/mq/exec/dispatchMilestoneSnapshots.ts @@ -1,12 +1,12 @@ -import { Job } from "bullmq"; +import { sql } from "@core/db/dbNew"; +import { HOUR, MINUTE, SECOND } from "@core/lib"; +import logger from "@core/log"; +import type { Job } from "bullmq"; import { getVideosNearMilestone } from "db/snapshot"; +import { scheduleSnapshot } from "db/snapshotSchedule"; +import { jobCounter, jobDurationRaw } from "metrics"; import { getAdjustedShortTermETA } from "mq/scheduling"; import { truncate } from "utils/truncate"; -import { scheduleSnapshot } from "db/snapshotSchedule"; -import logger from "@core/log"; -import { HOUR, MINUTE, SECOND } from "@core/lib"; -import { sql } from "@core/db/dbNew"; -import { jobCounter, jobDurationRaw } from "metrics"; export const dispatchMilestoneSnapshotsWorker = async (_job: Job) => { const start = Date.now(); diff --git a/packages/crawler/mq/exec/dispatchRegularSnapshots.ts b/packages/crawler/mq/exec/dispatchRegularSnapshots.ts index bfc744a..a59e9c4 100644 --- a/packages/crawler/mq/exec/dispatchRegularSnapshots.ts +++ b/packages/crawler/mq/exec/dispatchRegularSnapshots.ts @@ -1,15 +1,15 @@ -import { Job } from "bullmq"; +import { sql } from "@core/db/dbNew"; +import { HOUR, MINUTE, WEEK } from "@core/lib"; +import logger from "@core/log"; +import { lockManager } from "@core/mq/lockManager"; +import type { Job } from "bullmq"; import { getLatestVideoSnapshot } from "db/snapshot"; -import { truncate } from "utils/truncate"; import { getVideosWithoutActiveSnapshotScheduleByType, - scheduleSnapshot + scheduleSnapshot, } from "db/snapshotSchedule"; -import logger from "@core/log"; -import { HOUR, MINUTE, WEEK } from "@core/lib"; -import { lockManager } from "@core/mq/lockManager"; import { getRegularSnapshotInterval } from "mq/task/regularSnapshotInterval"; -import { sql } from "@core/db/dbNew"; +import { truncate } from "utils/truncate"; export const dispatchRegularSnapshotsWorker = async (_job: Job): Promise => { try { diff --git a/packages/crawler/mq/exec/executors.ts b/packages/crawler/mq/exec/executors.ts index ddeaa4c..3c2d370 100644 --- a/packages/crawler/mq/exec/executors.ts +++ b/packages/crawler/mq/exec/executors.ts @@ -1,10 +1,10 @@ -export * from "./getLatestVideos"; -export * from "./getVideoInfo"; -export * from "./collectSongs"; -export * from "./takeBulkSnapshot"; export * from "./archiveSnapshots"; +export * from "./collectSongs"; export * from "./dispatchMilestoneSnapshots"; export * from "./dispatchRegularSnapshots"; -export * from "./snapshotVideo"; +export * from "./getLatestVideos"; +export * from "./getVideoInfo"; export * from "./scheduleCleanup"; export * from "./snapshotTick"; +export * from "./snapshotVideo"; +export * from "./takeBulkSnapshot"; diff --git a/packages/crawler/mq/exec/getLatestVideos.ts b/packages/crawler/mq/exec/getLatestVideos.ts index 85a72e1..028917c 100644 --- a/packages/crawler/mq/exec/getLatestVideos.ts +++ b/packages/crawler/mq/exec/getLatestVideos.ts @@ -1,5 +1,5 @@ import { sql } from "@core/db/dbNew"; -import { Job } from "bullmq"; +import type { Job } from "bullmq"; import { queueLatestVideos } from "mq/task/queueLatestVideo"; export const getLatestVideosWorker = async (_job: Job): Promise => { diff --git a/packages/crawler/mq/exec/getVideoInfo.ts b/packages/crawler/mq/exec/getVideoInfo.ts index bcfd393..4bce811 100644 --- a/packages/crawler/mq/exec/getVideoInfo.ts +++ b/packages/crawler/mq/exec/getVideoInfo.ts @@ -1,17 +1,17 @@ -import { Job } from "bullmq"; -import { getVideoDetails } from "@core/net/getVideoDetails"; +import { bilibiliUser, db, videoSnapshot } from "@core/drizzle"; import logger from "@core/log"; -import { ClassifyVideoQueue, latestVideosEventsProducer } from "mq/index"; +import { getVideoDetails } from "@core/net/getVideoDetails"; +import type { Job } from "bullmq"; import { insertIntoMetadata, userExistsInBiliUsers, - videoExistsInAllData + videoExistsInAllData, } from "db/bilibili_metadata"; -import { insertIntoSongs } from "mq/task/collectSongs"; -import { bilibiliUser, db, videoSnapshot } from "@core/drizzle"; import { eq } from "drizzle-orm"; -import { GetVideoInfoJobData } from "mq/schema"; import { snapshotCounter } from "metrics"; +import { ClassifyVideoQueue, latestVideosEventsProducer } from "mq/index"; +import type { GetVideoInfoJobData } from "mq/schema"; +import { insertIntoSongs } from "mq/task/collectSongs"; interface AddSongEventPayload { eventName: string; @@ -23,7 +23,7 @@ const publishAddsongEvent = async (songID: number, uid: string) => latestVideosEventsProducer.publishEvent({ eventName: "addSong", uid: uid, - songID: songID + songID: songID, }); export const getVideoInfoWorker = async (job: Job): Promise => { @@ -64,7 +64,7 @@ export const getVideoInfoWorker = async (job: Job): Promise title: data.View.title, publishedAt: new Date(data.View.pubdate * 1000).toISOString(), duration: data.View.duration, - coverUrl: data.View.pic + coverUrl: data.View.pic, }); const userExists = await userExistsInBiliUsers(aid); @@ -74,7 +74,7 @@ export const getVideoInfoWorker = async (job: Job): Promise username: data.View.owner.name, desc: data.Card.card.sign, fans: data.Card.follower, - avatar: data.View.owner.face + avatar: data.View.owner.face, }); } else { await db @@ -83,7 +83,7 @@ export const getVideoInfoWorker = async (job: Job): Promise username: data.View.owner.name, desc: data.Card.card.sign, fans: data.Card.follower, - avatar: data.View.owner.face + avatar: data.View.owner.face, }) .where(eq(bilibiliUser.uid, uid)); } @@ -98,7 +98,7 @@ export const getVideoInfoWorker = async (job: Job): Promise likes: stat.like, coins: stat.coin, shares: stat.share, - favorites: stat.favorite + favorites: stat.favorite, }); snapshotCounter.add(1); diff --git a/packages/crawler/mq/exec/scheduleCleanup.ts b/packages/crawler/mq/exec/scheduleCleanup.ts index b24e857..9f2a54e 100644 --- a/packages/crawler/mq/exec/scheduleCleanup.ts +++ b/packages/crawler/mq/exec/scheduleCleanup.ts @@ -1,5 +1,5 @@ -import { Job } from "bullmq"; import logger from "@core/log"; +import type { Job } from "bullmq"; import { removeAllTimeoutSchedules } from "mq/task/removeAllTimeoutSchedules"; export const scheduleCleanupWorker = async (_job: Job): Promise => { diff --git a/packages/crawler/mq/exec/snapshotTick.ts b/packages/crawler/mq/exec/snapshotTick.ts index 6f73499..154dda5 100644 --- a/packages/crawler/mq/exec/snapshotTick.ts +++ b/packages/crawler/mq/exec/snapshotTick.ts @@ -1,21 +1,21 @@ -import { Job } from "bullmq"; +import { sql } from "@core/db/dbNew"; +import { getClosetMilestone as closetMilestone } from "@core/lib/milestone"; +import logger from "@core/log"; +import type { Job } from "bullmq"; import { bulkGetVideosWithoutProcessingSchedules, bulkSetSnapshotStatus, getBulkSnapshotsInNextSecond, getSnapshotsInNextSecond, setSnapshotStatus, - videoHasProcessingSchedule + videoHasProcessingSchedule, } from "db/snapshotSchedule"; -import logger from "@core/log"; -import { SnapshotQueue } from "mq/index"; -import { sql } from "@core/db/dbNew"; import { jobCounter, jobDurationRaw } from "metrics"; -import { getClosetMilestone as closetMilestone } from "@core/lib/milestone"; +import { SnapshotQueue } from "mq/index"; const priorityMap: { [key: string]: number } = { milestone: 1, - normal: 3 + normal: 3, }; export const bulkSnapshotTickWorker = async (_job: Job) => { @@ -37,13 +37,13 @@ export const bulkSnapshotTickWorker = async (_job: Job) => { created_at: schedule.created_at, started_at: schedule.started_at, finished_at: schedule.finished_at, - status: schedule.status + status: schedule.status, }; }); await SnapshotQueue.add( "bulkSnapshotVideo", { - schedules: schedulesData + schedules: schedulesData, }, { priority: 3 } ); @@ -73,7 +73,7 @@ export const snapshotTickWorker = async (_job: Job) => { { aid: Number(aid), id: Number(schedule.id), - type: schedule.type ?? "normal" + type: schedule.type ?? "normal", }, { priority } ); diff --git a/packages/crawler/mq/exec/snapshotVideo.ts b/packages/crawler/mq/exec/snapshotVideo.ts index 4b18537..a321e06 100644 --- a/packages/crawler/mq/exec/snapshotVideo.ts +++ b/packages/crawler/mq/exec/snapshotVideo.ts @@ -1,24 +1,24 @@ -import { Job } from "bullmq"; +import { sql } from "@core/db/dbNew"; +import { HOUR, MINUTE, SECOND } from "@core/lib"; +import logger from "@core/log"; +import { NetSchedulerError } from "@core/net/delegate"; +import type { Job } from "bullmq"; import { getLatestSnapshot, scheduleSnapshot, setSnapshotStatus, - snapshotScheduleExists + snapshotScheduleExists, } from "db/snapshotSchedule"; -import logger from "@core/log"; -import { HOUR, MINUTE, SECOND } from "@core/lib"; -import { getBiliVideoStatus, setBiliVideoStatus } from "../../db/bilibili_metadata"; -import { takeVideoSnapshot } from "mq/task/getVideoStats"; import { getSongsPublihsedAt } from "db/songs"; import { getAdjustedShortTermETA } from "mq/scheduling"; -import { NetSchedulerError } from "@core/net/delegate"; -import { sql } from "@core/db/dbNew"; +import { takeVideoSnapshot } from "mq/task/getVideoStats"; +import { getBiliVideoStatus, setBiliVideoStatus } from "../../db/bilibili_metadata"; import { closetMilestone } from "./snapshotTick"; const snapshotTypeToTaskMap = { milestone: "snapshotMilestoneVideo", normal: "snapshotVideo", - new: "snapshotMilestoneVideo" + new: "snapshotMilestoneVideo", } as const; export const snapshotVideoWorker = async (job: Job): Promise => { diff --git a/packages/crawler/mq/exec/takeBulkSnapshot.ts b/packages/crawler/mq/exec/takeBulkSnapshot.ts index e62b7d1..7b9a6f8 100644 --- a/packages/crawler/mq/exec/takeBulkSnapshot.ts +++ b/packages/crawler/mq/exec/takeBulkSnapshot.ts @@ -1,21 +1,21 @@ -import { Job } from "bullmq"; +import { sql } from "@core/db/dbNew"; +import type { SnapshotScheduleType } from "@core/db/schema"; +import { HOUR, MINUTE, SECOND } from "@core/lib"; +import logger from "@core/log"; +import { NetSchedulerError } from "@core/net/delegate"; +import type { Job } from "bullmq"; +import { updateETA } from "db/eta"; import { bulkScheduleSnapshot, bulkSetSnapshotStatus, getLatestSnapshot, scheduleSnapshot, - snapshotScheduleExists + snapshotScheduleExists, } from "db/snapshotSchedule"; -import { bulkGetVideoStats } from "net/bulkGetVideoStats"; -import logger from "@core/log"; -import { NetSchedulerError } from "@core/net/delegate"; -import { HOUR, MINUTE, SECOND } from "@core/lib"; -import { getRegularSnapshotInterval } from "mq/task/regularSnapshotInterval"; -import { SnapshotScheduleType } from "@core/db/schema"; -import { sql } from "@core/db/dbNew"; -import { updateETA } from "db/eta"; -import { closetMilestone } from "./snapshotTick"; import { snapshotCounter } from "metrics"; +import { getRegularSnapshotInterval } from "mq/task/regularSnapshotInterval"; +import { bulkGetVideoStats } from "net/bulkGetVideoStats"; +import { closetMilestone } from "./snapshotTick"; export const takeBulkSnapshotForVideosWorker = async (job: Job) => { const schedules: SnapshotScheduleType[] = job.data.schedules; diff --git a/packages/crawler/mq/index.ts b/packages/crawler/mq/index.ts index 00f715a..a0e3997 100644 --- a/packages/crawler/mq/index.ts +++ b/packages/crawler/mq/index.ts @@ -1,22 +1,22 @@ -import { Queue, ConnectionOptions, QueueEventsProducer } from "bullmq"; +import { type ConnectionOptions, Queue, QueueEventsProducer } from "bullmq"; import { redis } from "bun"; export const LatestVideosQueue = new Queue("latestVideos", { - connection: redis as ConnectionOptions + connection: redis as ConnectionOptions, }); export const ClassifyVideoQueue = new Queue("classifyVideo", { - connection: redis as ConnectionOptions + connection: redis as ConnectionOptions, }); export const SnapshotQueue = new Queue("snapshot", { - connection: redis as ConnectionOptions + connection: redis as ConnectionOptions, }); export const MiscQueue = new Queue("misc", { - connection: redis as ConnectionOptions + connection: redis as ConnectionOptions, }); export const latestVideosEventsProducer = new QueueEventsProducer("latestVideos", { - connection: redis as ConnectionOptions + connection: redis as ConnectionOptions, }); diff --git a/packages/crawler/mq/init.ts b/packages/crawler/mq/init.ts index 3cba399..f333bf0 100644 --- a/packages/crawler/mq/init.ts +++ b/packages/crawler/mq/init.ts @@ -1,39 +1,39 @@ +import { sql } from "@core/db/dbNew"; +import { redis } from "@core/db/redis"; import { HOUR, MINUTE, SECOND } from "@core/lib"; -import { ClassifyVideoQueue, LatestVideosQueue, MiscQueue, SnapshotQueue } from "mq/index"; import logger from "@core/log"; import { initSnapshotWindowCounts } from "db/snapshotSchedule"; -import { redis } from "@core/db/redis"; -import { sql } from "@core/db/dbNew"; +import { ClassifyVideoQueue, LatestVideosQueue, MiscQueue, SnapshotQueue } from "mq/index"; export async function initMQ() { await initSnapshotWindowCounts(sql, redis); await LatestVideosQueue.upsertJobScheduler("getLatestVideos", { every: 1 * MINUTE, - immediately: true + immediately: true, }); await ClassifyVideoQueue.upsertJobScheduler("classifyVideos", { every: 5 * MINUTE, - immediately: true + immediately: true, }); await LatestVideosQueue.upsertJobScheduler("collectSongs", { every: 3 * MINUTE, - immediately: true + immediately: true, }); await SnapshotQueue.upsertJobScheduler( "snapshotTick", { every: 1 * SECOND, - immediately: true + immediately: true, }, { opts: { removeOnComplete: 300, - removeOnFail: 600 - } + removeOnFail: 600, + }, } ); @@ -41,39 +41,39 @@ export async function initMQ() { "bulkSnapshotTick", { every: 15 * SECOND, - immediately: true + immediately: true, }, { opts: { removeOnComplete: 60, - removeOnFail: 600 - } + removeOnFail: 600, + }, } ); await SnapshotQueue.upsertJobScheduler("dispatchMilestoneSnapshots", { every: 5 * MINUTE, - immediately: true + immediately: true, }); await SnapshotQueue.upsertJobScheduler("dispatchRegularSnapshots", { every: 30 * MINUTE, - immediately: true + immediately: true, }); await SnapshotQueue.upsertJobScheduler("dispatchArchiveSnapshots", { every: 2 * HOUR, - immediately: false + immediately: false, }); await SnapshotQueue.upsertJobScheduler("scheduleCleanup", { every: 2 * MINUTE, - immediately: true + immediately: true, }); await MiscQueue.upsertJobScheduler("collectQueueMetrics", { every: 3 * SECOND, - immediately: true + immediately: true, }); logger.log("Message queue initialized."); diff --git a/packages/crawler/mq/scheduling.ts b/packages/crawler/mq/scheduling.ts index e267b8a..273c744 100644 --- a/packages/crawler/mq/scheduling.ts +++ b/packages/crawler/mq/scheduling.ts @@ -1,9 +1,9 @@ +import type { Psql } from "@core/db/psql.d"; +import { HOUR, MINUTE } from "@core/lib"; +import { updateETA } from "db/eta"; import { findClosestSnapshot, getLatestSnapshot, hasAtLeast2Snapshots } from "db/snapshotSchedule"; import { truncate } from "utils/truncate"; import { closetMilestone } from "./exec/snapshotTick"; -import { HOUR, MINUTE } from "@core/lib"; -import type { Psql } from "@core/db/psql.d"; -import { updateETA } from "db/eta"; const log = (value: number, base: number = 10) => Math.log(value) / Math.log(base); diff --git a/packages/crawler/mq/task/collectSongs.ts b/packages/crawler/mq/task/collectSongs.ts index 8ce4a0d..a86460a 100644 --- a/packages/crawler/mq/task/collectSongs.ts +++ b/packages/crawler/mq/task/collectSongs.ts @@ -1,11 +1,11 @@ import { sql } from "@core/db/dbNew"; -import { aidExistsInSongs, getNotCollectedSongs } from "db/songs"; -import logger from "@core/log"; -import { scheduleSnapshot } from "db/snapshotSchedule"; -import { MINUTE } from "@core/lib"; import type { Psql } from "@core/db/psql.d"; import { db, songs } from "@core/drizzle"; -import { and, eq, sql as drizzleSQL } from "drizzle-orm"; +import { MINUTE } from "@core/lib"; +import logger from "@core/log"; +import { scheduleSnapshot } from "db/snapshotSchedule"; +import { aidExistsInSongs, getNotCollectedSongs } from "db/songs"; +import { and, sql as drizzleSQL, eq } from "drizzle-orm"; export async function collectSongs() { const aids = await getNotCollectedSongs(sql); @@ -51,7 +51,7 @@ export async function insertIntoSongs(aid: number) { ) ON CONFLICT DO NOTHING RETURNING * - ` + `; return data; } diff --git a/packages/crawler/mq/task/getVideoStats.ts b/packages/crawler/mq/task/getVideoStats.ts index cdfe02c..ccf57a4 100644 --- a/packages/crawler/mq/task/getVideoStats.ts +++ b/packages/crawler/mq/task/getVideoStats.ts @@ -1,6 +1,6 @@ -import { getVideoInfo } from "@core/net/getVideoInfo"; -import logger from "@core/log"; import type { Psql } from "@core/db/psql.d"; +import logger from "@core/log"; +import { getVideoInfo } from "@core/net/getVideoInfo"; import { insertVideoSnapshot } from "db/snapshot"; export interface SnapshotNumber { @@ -52,7 +52,7 @@ export async function takeVideoSnapshot( shares, danmakus, replies, - aid + aid, }); logger.log(`Taken snapshot for video ${aid}.`, "net", "fn:insertVideoSnapshot"); @@ -66,6 +66,6 @@ export async function takeVideoSnapshot( coins, shares, favorites, - time + time, }; } diff --git a/packages/crawler/mq/task/queueLatestVideo.ts b/packages/crawler/mq/task/queueLatestVideo.ts index 5aad8d2..804a673 100644 --- a/packages/crawler/mq/task/queueLatestVideo.ts +++ b/packages/crawler/mq/task/queueLatestVideo.ts @@ -1,10 +1,10 @@ -import { getLatestVideoAids } from "net/getLatestVideoAids"; -import { videoExistsInAllData } from "db/bilibili_metadata"; -import { sleep } from "utils/sleep"; +import type { Psql } from "@core/db/psql.d"; import { SECOND } from "@core/lib"; import logger from "@core/log"; +import { videoExistsInAllData } from "db/bilibili_metadata"; import { LatestVideosQueue } from "mq/index"; -import type { Psql } from "@core/db/psql.d"; +import { getLatestVideoAids } from "net/getLatestVideoAids"; +import { sleep } from "utils/sleep"; export async function queueLatestVideos(sql: Psql): Promise { let page = 1; @@ -32,8 +32,8 @@ export async function queueLatestVideos(sql: Psql): Promise { attempts: 100, backoff: { type: "fixed", - delay: SECOND * 5 - } + delay: SECOND * 5, + }, } ); videosFound.add(aid); diff --git a/packages/crawler/mq/task/regularSnapshotInterval.ts b/packages/crawler/mq/task/regularSnapshotInterval.ts index a90a4b6..fc3fe79 100644 --- a/packages/crawler/mq/task/regularSnapshotInterval.ts +++ b/packages/crawler/mq/task/regularSnapshotInterval.ts @@ -1,6 +1,6 @@ -import { findClosestSnapshot, findSnapshotBefore, getLatestSnapshot } from "db/snapshotSchedule"; -import { HOUR } from "@core/lib"; import type { Psql } from "@core/db/psql.d"; +import { HOUR } from "@core/lib"; +import { findClosestSnapshot, findSnapshotBefore, getLatestSnapshot } from "db/snapshotSchedule"; export const getRegularSnapshotInterval = async (sql: Psql, aid: number) => { const now = Date.now(); diff --git a/packages/crawler/net/bulkGetVideoStats.ts b/packages/crawler/net/bulkGetVideoStats.ts index aa71ddd..24aeaf5 100644 --- a/packages/crawler/net/bulkGetVideoStats.ts +++ b/packages/crawler/net/bulkGetVideoStats.ts @@ -1,6 +1,6 @@ -import networkDelegate from "@core/net/delegate"; -import type { MediaListInfoData, MediaListInfoResponse } from "@core/net/bilibili.d"; import logger from "@core/log"; +import type { MediaListInfoData, MediaListInfoResponse } from "@core/net/bilibili.d"; +import networkDelegate from "@core/net/delegate"; /* * Bulk fetch video metadata from bilibili API @@ -34,6 +34,6 @@ export async function bulkGetVideoStats(aids: number[]): Promise< } return { data: data.data, - time: time + time: time, }; } diff --git a/packages/crawler/net/getLatestVideoAids.ts b/packages/crawler/net/getLatestVideoAids.ts index 65137e3..319f7f1 100644 --- a/packages/crawler/net/getLatestVideoAids.ts +++ b/packages/crawler/net/getLatestVideoAids.ts @@ -1,5 +1,5 @@ -import type { VideoListResponse } from "@core/net/bilibili.d"; import logger from "@core/log"; +import type { VideoListResponse } from "@core/net/bilibili.d"; import networkDelegate from "@core/net/delegate"; export async function getLatestVideoAids( diff --git a/packages/crawler/src/bullui.ts b/packages/crawler/src/bullui.ts index 714bde9..a304962 100644 --- a/packages/crawler/src/bullui.ts +++ b/packages/crawler/src/bullui.ts @@ -1,8 +1,8 @@ -import express from "express"; import { createBullBoard } from "@bull-board/api"; import { BullMQAdapter } from "@bull-board/api/bullMQAdapter.js"; import { ExpressAdapter } from "@bull-board/express"; -import { ClassifyVideoQueue, LatestVideosQueue, SnapshotQueue, MiscQueue } from "mq/index"; +import express from "express"; +import { ClassifyVideoQueue, LatestVideosQueue, MiscQueue, SnapshotQueue } from "mq/index"; const serverAdapter = new ExpressAdapter(); serverAdapter.setBasePath("/"); @@ -12,9 +12,9 @@ createBullBoard({ new BullMQAdapter(LatestVideosQueue), new BullMQAdapter(ClassifyVideoQueue), new BullMQAdapter(SnapshotQueue), - new BullMQAdapter(MiscQueue) + new BullMQAdapter(MiscQueue), ], - serverAdapter: serverAdapter + serverAdapter: serverAdapter, }); const app = express(); diff --git a/packages/crawler/src/filterWorker.ts b/packages/crawler/src/filterWorker.ts index ae32204..8c05fbb 100644 --- a/packages/crawler/src/filterWorker.ts +++ b/packages/crawler/src/filterWorker.ts @@ -1,10 +1,10 @@ -import { ConnectionOptions, Job, Worker } from "bullmq"; import { redis } from "@core/db/redis"; import logger from "@core/log"; -import { classifyVideosWorker, classifyVideoWorker } from "mq/exec/classifyVideo"; -import { WorkerError } from "mq/schema"; import { lockManager } from "@core/mq/lockManager"; +import { type ConnectionOptions, type Job, Worker } from "bullmq"; import Akari from "ml/akari_api"; +import { classifyVideosWorker, classifyVideoWorker } from "mq/exec/classifyVideo"; +import type { WorkerError } from "mq/schema"; const shutdown = async (signal: string, filterWorker: Worker) => { logger.log(`${signal} Received: Shutting down workers...`, "mq"); diff --git a/packages/crawler/src/worker.ts b/packages/crawler/src/worker.ts index c07e90a..1113ada 100644 --- a/packages/crawler/src/worker.ts +++ b/packages/crawler/src/worker.ts @@ -1,4 +1,9 @@ -import { ConnectionOptions, Job, Worker } from "bullmq"; +import { redis } from "@core/db/redis"; +import logger from "@core/log"; +import { lockManager } from "@core/mq/lockManager"; +import { type ConnectionOptions, type Job, Worker } from "bullmq"; +import { collectQueueMetrics } from "mq/exec/collectQueueMetrics"; +import { directSnapshotWorker } from "mq/exec/directSnapshot"; import { archiveSnapshotsWorker, bulkSnapshotTickWorker, @@ -10,14 +15,9 @@ import { scheduleCleanupWorker, snapshotTickWorker, snapshotVideoWorker, - takeBulkSnapshotForVideosWorker + takeBulkSnapshotForVideosWorker, } from "mq/exec/executors"; -import { redis } from "@core/db/redis"; -import logger from "@core/log"; -import { lockManager } from "@core/mq/lockManager"; -import { WorkerError } from "mq/schema"; -import { collectQueueMetrics } from "mq/exec/collectQueueMetrics"; -import { directSnapshotWorker } from "mq/exec/directSnapshot"; +import type { WorkerError } from "mq/schema"; const releaseLockForJob = async (name: string) => { await lockManager.releaseLock(name); @@ -61,7 +61,7 @@ const latestVideoWorker = new Worker( connection: redis as ConnectionOptions, concurrency: 6, removeOnComplete: { count: 1440 }, - removeOnFail: { count: 0 } + removeOnFail: { count: 0 }, } ); diff --git a/packages/palette/src/App.tsx b/packages/palette/src/App.tsx index 3ca7be5..3dd6576 100644 --- a/packages/palette/src/App.tsx +++ b/packages/palette/src/App.tsx @@ -1,15 +1,15 @@ import "virtual:uno.css"; -import { type Oklch } from "culori"; -import { Picker } from "./components/Picker/Picker"; -import { Switch } from "./Switch"; -import { i18nProvider } from "./utils"; -import { useTheme } from "./ThemeContext"; -import { ColorPalette } from "./components/Palette"; -import { Buttons, Paragraph, SearchBar } from "./components/Components"; -import { AnimatePresence, motion } from "motion/react"; -import { Moon, Sun } from "lucide-react"; +import type { Oklch } from "culori"; import { useAtom } from "jotai"; import { atomWithStorage } from "jotai/utils"; +import { Moon, Sun } from "lucide-react"; +import { AnimatePresence, motion } from "motion/react"; +import { Buttons, Paragraph, SearchBar } from "./components/Components"; +import { ColorPalette } from "./components/Palette"; +import { Picker } from "./components/Picker/Picker"; +import { Switch } from "./Switch"; +import { useTheme } from "./ThemeContext"; +import { i18nProvider } from "./utils"; const defaultColor: Oklch = { mode: "oklch", h: 29.2339, c: 0.244572, l: 0.596005 }; @@ -60,16 +60,22 @@ function App() { return (
-

CVSA Color Palette Generator

+

+ CVSA Color Palette Generator +

{/* Left Column - Color Picker */}
-

Color Selection

+

+ Color Selection +

- +
= ({ checked, onChange, disabled = false, className = "", label }) => { +export const Switch: React.FC = ({ + checked, + onChange, + disabled = false, + className = "", + label, +}) => { const handleToggle = () => { if (!disabled) { onChange(!checked); @@ -22,7 +28,9 @@ export const Switch: React.FC = ({ checked, onChange, disabled = fa disabled={disabled} className={`relative flex items-center justify-center w-12 h-6 rounded-full transition-all duration-300 focus:outline-none focus:ring-2 focus:ring-green-500 focus:ring-offset-2 ${ - disabled ? "cursor-not-allowed opacity-50" : "cursor-pointer hover:scale-105" + disabled + ? "cursor-not-allowed opacity-50" + : "cursor-pointer hover:scale-105" } ${checked ? "bg-green-500" : "bg-zinc-300 dark:bg-zinc-600"}`} aria-checked={checked} aria-disabled={disabled} @@ -40,7 +48,9 @@ export const Switch: React.FC = ({ checked, onChange, disabled = fa {label && (