fix: unintended local_files_only in loading the tokenizer

This commit is contained in:
alikia2x (寒寒) 2025-02-22 21:48:17 +08:00
parent f70401846a
commit 46191cfd56
Signed by: alikia2x
GPG Key ID: 56209E0CCD8420C6
2 changed files with 2 additions and 2 deletions

View File

@ -18,8 +18,7 @@ export async function initializeModels() {
}
try {
const tokenizerConfig = { local_files_only: true };
tokenizer = await AutoTokenizer.from_pretrained(tokenizerModel, tokenizerConfig);
tokenizer = await AutoTokenizer.from_pretrained(tokenizerModel);
const [classifierSession, embeddingSession] = await Promise.all([
ort.InferenceSession.create(onnxClassifierPath),

View File

@ -18,6 +18,7 @@ export async function insertLatestVideos(
}
logger.log(`Latest video in the database: ${new Date(latestVideoTimestamp).toISOString()}`, "net", "fn:insertLatestVideos()")
const videoIndex = await getVideoPositionInNewList(latestVideoTimestamp);
logger.log(`Position of the video in the latest list: ${videoIndex}`, "net", "fn:insertLatestVideos()")
if (videoIndex == null) {
logger.error("Cannot locate the video through bisect.", "net", "fn:insertLatestVideos()");
return null