update: tokenizer
This commit is contained in:
parent
5abe7521ba
commit
cf6c69e61b
@ -1,38 +0,0 @@
|
||||
type EmbeddingDict = { [key: number]: Float32Array };
|
||||
|
||||
function getEmbeddingLayer(buffer: Buffer): EmbeddingDict {
|
||||
const dict: EmbeddingDict = {};
|
||||
|
||||
const entrySize = 514;
|
||||
const numEntries = buffer.length / entrySize;
|
||||
|
||||
for (let i = 0; i < numEntries; i++) {
|
||||
const offset = i * entrySize;
|
||||
const key = buffer.readUInt16LE(offset);
|
||||
const floatArray = new Float32Array(128);
|
||||
|
||||
for (let j = 0; j < 128; j++) {
|
||||
floatArray[j] = buffer.readFloatLE(offset + 2 + j * 4);
|
||||
}
|
||||
|
||||
dict[key] = floatArray;
|
||||
}
|
||||
|
||||
return dict;
|
||||
}
|
||||
|
||||
function getEmbedding(tokenIds: number[], embeddingDict: EmbeddingDict, contextSize: number) {
|
||||
let result: number[] = [];
|
||||
for (let i = 0; i < contextSize; i++) {
|
||||
if (i < tokenIds.length) {
|
||||
const tokenId = tokenIds[i];
|
||||
result = result.concat(Array.from(embeddingDict[tokenId]))
|
||||
}
|
||||
else {
|
||||
result = result.concat(new Array(128).fill(0))
|
||||
}
|
||||
}
|
||||
return new Float32Array(result);
|
||||
}
|
||||
|
||||
export {getEmbeddingLayer, getEmbedding};
|
@ -1,56 +1,16 @@
|
||||
type TokenDict = { [key: string]: number };
|
||||
import { AutoTokenizer, env } from "@xenova/transformers";
|
||||
|
||||
function tokenize(query: string, tokenDict: TokenDict): number[] {
|
||||
const tokenIds: number[] = [];
|
||||
let index = 0;
|
||||
env.allowRemoteModels = false;
|
||||
env.localModelPath = "/transformers/";
|
||||
|
||||
// Replace spaces with "▁"
|
||||
query = "▁" + query.replace(/ /g, "▁");
|
||||
query = query.replace(/\n/g, "<0x0A>");
|
||||
|
||||
while (index < query.length) {
|
||||
let bestToken = null;
|
||||
let bestLength = 0;
|
||||
|
||||
// Step 2: Find the longest token that matches the beginning of the remaining query
|
||||
for (const token in tokenDict) {
|
||||
if (query.startsWith(token, index) && token.length > bestLength) {
|
||||
bestToken = token;
|
||||
bestLength = token.length;
|
||||
}
|
||||
}
|
||||
|
||||
if (bestToken) {
|
||||
tokenIds.push(tokenDict[bestToken]);
|
||||
index += bestLength;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Step 3: Handle the case where no token matches
|
||||
const char = query[index];
|
||||
if (char.charCodeAt(0) <= 127) {
|
||||
// If the character is ASCII, and it doesn't match any token, treat it as an unknown token
|
||||
console.error(`Unknown token: ${char}`);
|
||||
index++;
|
||||
continue;
|
||||
}
|
||||
|
||||
// If the character is non-ASCII, convert it to a series of bytes and match each byte
|
||||
const bytes = new TextEncoder().encode(char);
|
||||
for (const byte of bytes) {
|
||||
const byteToken = `<0x${byte.toString(16).toUpperCase()}>`;
|
||||
if (tokenDict[byteToken] === undefined) {
|
||||
console.error(`Unknown byte token: ${byteToken}`);
|
||||
index++;
|
||||
continue;
|
||||
}
|
||||
tokenIds.push(tokenDict[byteToken]);
|
||||
}
|
||||
index++;
|
||||
async function tokenize(text: string, model: string) {
|
||||
const tokenizer = await AutoTokenizer.from_pretrained(model);
|
||||
const { input_ids } = await tokenizer(text);
|
||||
const tokenIds = [];
|
||||
for (let id of input_ids.data) {
|
||||
tokenIds.push(parseInt(id));
|
||||
}
|
||||
|
||||
return tokenIds;
|
||||
}
|
||||
|
||||
export default tokenize;
|
||||
export type { TokenDict };
|
||||
export default tokenize;
|
123
package.json
123
package.json
@ -1,63 +1,64 @@
|
||||
{
|
||||
"name": "sparkhome",
|
||||
"private": false,
|
||||
"version": "5.8.0",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "bun server.ts",
|
||||
"build": "bun license-gen && tsc -b && vite build",
|
||||
"lint": "eslint . --ext ts,tsx --report-unused-disable-directives --max-warnings 0",
|
||||
"preview": "NODE_ENV=production bun server.ts",
|
||||
"license-gen": "bunx generate-license-file --input package.json --output lib/license.txt --overwrite",
|
||||
"format": "prettier --write ."
|
||||
},
|
||||
"dependencies": {
|
||||
"@iconify/react": "^5.0.1",
|
||||
"@nextui-org/react": "^2.4.2",
|
||||
"@types/bun": "^1.1.6",
|
||||
"@types/express": "^4.17.21",
|
||||
"@types/tr46": "^5.0.0",
|
||||
"cac": "^6.7.14",
|
||||
"chalk": "^5.3.0",
|
||||
"express": "^4.19.2",
|
||||
"fflate": "^0.8.2",
|
||||
"framer-motion": "^11.2.12",
|
||||
"generate-license-file": "^3.5.1",
|
||||
"i18next": "^23.11.5",
|
||||
"i18next-browser-languagedetector": "^8.0.0",
|
||||
"i18next-icu": "^2.3.0",
|
||||
"jest": "^29.7.0",
|
||||
"jotai": "^2.8.3",
|
||||
"node-nlp": "^4.27.0",
|
||||
"react": "^18.3.1",
|
||||
"react-dom": "^18.3.1",
|
||||
"react-i18next": "^14.1.2",
|
||||
"react-router": "^6.23.1",
|
||||
"react-router-dom": "^6.23.1",
|
||||
"search-engine-autocomplete": "^0.4.3",
|
||||
"tr46": "^5.0.0",
|
||||
"valid-url": "^1.0.9",
|
||||
"validate-color": "^2.2.4",
|
||||
"vite-express": "^0.17.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/react": "^18.3.3",
|
||||
"@types/react-dom": "^18.3.0",
|
||||
"@types/valid-url": "^1.0.7",
|
||||
"@typescript-eslint/eslint-plugin": "^7.13.1",
|
||||
"@typescript-eslint/parser": "^7.13.1",
|
||||
"@vitejs/plugin-react-swc": "^3.5.0",
|
||||
"autoprefixer": "^10.4.19",
|
||||
"eslint": "^8.57.0",
|
||||
"eslint-plugin-react-hooks": "^4.6.2",
|
||||
"eslint-plugin-react-refresh": "^0.4.7",
|
||||
"postcss": "^8.4.38",
|
||||
"prettier": "^3.3.3",
|
||||
"tailwindcss": "^3.4.4",
|
||||
"typescript": "^5.2.2",
|
||||
"vite": "^5.3.1",
|
||||
"vite-plugin-chunk-split": "^0.5.0",
|
||||
"vite-plugin-pages": "^0.32.2",
|
||||
"vite-tsconfig-paths": "^4.3.2"
|
||||
}
|
||||
"name": "sparkhome",
|
||||
"private": false,
|
||||
"version": "5.8.0",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "bun server.ts",
|
||||
"build": "bun license-gen && tsc -b && vite build",
|
||||
"lint": "eslint . --ext ts,tsx --report-unused-disable-directives --max-warnings 0",
|
||||
"preview": "NODE_ENV=production bun server.ts",
|
||||
"license-gen": "bunx generate-license-file --input package.json --output lib/license.txt --overwrite",
|
||||
"format": "prettier --write ."
|
||||
},
|
||||
"dependencies": {
|
||||
"@iconify/react": "^5.0.1",
|
||||
"@nextui-org/react": "^2.4.2",
|
||||
"@types/bun": "^1.1.6",
|
||||
"@types/express": "^4.17.21",
|
||||
"@types/tr46": "^5.0.0",
|
||||
"@xenova/transformers": "^2.17.2",
|
||||
"cac": "^6.7.14",
|
||||
"chalk": "^5.3.0",
|
||||
"express": "^4.19.2",
|
||||
"fflate": "^0.8.2",
|
||||
"framer-motion": "^11.2.12",
|
||||
"generate-license-file": "^3.5.1",
|
||||
"i18next": "^23.11.5",
|
||||
"i18next-browser-languagedetector": "^8.0.0",
|
||||
"i18next-icu": "^2.3.0",
|
||||
"jest": "^29.7.0",
|
||||
"jotai": "^2.8.3",
|
||||
"node-nlp": "^4.27.0",
|
||||
"react": "^18.3.1",
|
||||
"react-dom": "^18.3.1",
|
||||
"react-i18next": "^14.1.2",
|
||||
"react-router": "^6.23.1",
|
||||
"react-router-dom": "^6.23.1",
|
||||
"search-engine-autocomplete": "^0.4.3",
|
||||
"tr46": "^5.0.0",
|
||||
"valid-url": "^1.0.9",
|
||||
"validate-color": "^2.2.4",
|
||||
"vite-express": "^0.17.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/react": "^18.3.3",
|
||||
"@types/react-dom": "^18.3.0",
|
||||
"@types/valid-url": "^1.0.7",
|
||||
"@typescript-eslint/eslint-plugin": "^7.13.1",
|
||||
"@typescript-eslint/parser": "^7.13.1",
|
||||
"@vitejs/plugin-react-swc": "^3.5.0",
|
||||
"autoprefixer": "^10.4.19",
|
||||
"eslint": "^8.57.0",
|
||||
"eslint-plugin-react-hooks": "^4.6.2",
|
||||
"eslint-plugin-react-refresh": "^0.4.7",
|
||||
"postcss": "^8.4.38",
|
||||
"prettier": "^3.3.3",
|
||||
"tailwindcss": "^3.4.4",
|
||||
"typescript": "^5.2.2",
|
||||
"vite": "^5.3.1",
|
||||
"vite-plugin-chunk-split": "^0.5.0",
|
||||
"vite-plugin-pages": "^0.32.2",
|
||||
"vite-tsconfig-paths": "^4.3.2"
|
||||
}
|
||||
}
|
||||
|
28
public/transformers/Qwen/Qwen2.5-3B/config.json
Normal file
28
public/transformers/Qwen/Qwen2.5-3B/config.json
Normal file
@ -0,0 +1,28 @@
|
||||
{
|
||||
"architectures": [
|
||||
"Qwen2ForCausalLM"
|
||||
],
|
||||
"attention_dropout": 0.0,
|
||||
"bos_token_id": 151643,
|
||||
"eos_token_id": 151643,
|
||||
"hidden_act": "silu",
|
||||
"hidden_size": 2048,
|
||||
"initializer_range": 0.02,
|
||||
"intermediate_size": 11008,
|
||||
"max_position_embeddings": 32768,
|
||||
"max_window_layers": 36,
|
||||
"model_type": "qwen2",
|
||||
"num_attention_heads": 16,
|
||||
"num_hidden_layers": 36,
|
||||
"num_key_value_heads": 2,
|
||||
"rms_norm_eps": 1e-06,
|
||||
"rope_theta": 1000000.0,
|
||||
"sliding_window": 32768,
|
||||
"tie_word_embeddings": true,
|
||||
"torch_dtype": "bfloat16",
|
||||
"transformers_version": "4.40.1",
|
||||
"use_cache": true,
|
||||
"use_mrope": false,
|
||||
"use_sliding_window": false,
|
||||
"vocab_size": 151936
|
||||
}
|
303282
public/transformers/Qwen/Qwen2.5-3B/tokenizer.json
Normal file
303282
public/transformers/Qwen/Qwen2.5-3B/tokenizer.json
Normal file
File diff suppressed because it is too large
Load Diff
207
public/transformers/Qwen/Qwen2.5-3B/tokenizer_config.json
Normal file
207
public/transformers/Qwen/Qwen2.5-3B/tokenizer_config.json
Normal file
@ -0,0 +1,207 @@
|
||||
{
|
||||
"add_bos_token": false,
|
||||
"add_prefix_space": false,
|
||||
"added_tokens_decoder": {
|
||||
"151643": {
|
||||
"content": "<|endoftext|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"151644": {
|
||||
"content": "<|im_start|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"151645": {
|
||||
"content": "<|im_end|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"151646": {
|
||||
"content": "<|object_ref_start|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"151647": {
|
||||
"content": "<|object_ref_end|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"151648": {
|
||||
"content": "<|box_start|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"151649": {
|
||||
"content": "<|box_end|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"151650": {
|
||||
"content": "<|quad_start|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"151651": {
|
||||
"content": "<|quad_end|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"151652": {
|
||||
"content": "<|vision_start|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"151653": {
|
||||
"content": "<|vision_end|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"151654": {
|
||||
"content": "<|vision_pad|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"151655": {
|
||||
"content": "<|image_pad|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"151656": {
|
||||
"content": "<|video_pad|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": true
|
||||
},
|
||||
"151657": {
|
||||
"content": "<tool_call>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": false
|
||||
},
|
||||
"151658": {
|
||||
"content": "</tool_call>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": false
|
||||
},
|
||||
"151659": {
|
||||
"content": "<|fim_prefix|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": false
|
||||
},
|
||||
"151660": {
|
||||
"content": "<|fim_middle|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": false
|
||||
},
|
||||
"151661": {
|
||||
"content": "<|fim_suffix|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": false
|
||||
},
|
||||
"151662": {
|
||||
"content": "<|fim_pad|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": false
|
||||
},
|
||||
"151663": {
|
||||
"content": "<|repo_name|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": false
|
||||
},
|
||||
"151664": {
|
||||
"content": "<|file_sep|>",
|
||||
"lstrip": false,
|
||||
"normalized": false,
|
||||
"rstrip": false,
|
||||
"single_word": false,
|
||||
"special": false
|
||||
}
|
||||
},
|
||||
"additional_special_tokens": [
|
||||
"<|im_start|>",
|
||||
"<|im_end|>",
|
||||
"<|object_ref_start|>",
|
||||
"<|object_ref_end|>",
|
||||
"<|box_start|>",
|
||||
"<|box_end|>",
|
||||
"<|quad_start|>",
|
||||
"<|quad_end|>",
|
||||
"<|vision_start|>",
|
||||
"<|vision_end|>",
|
||||
"<|vision_pad|>",
|
||||
"<|image_pad|>",
|
||||
"<|video_pad|>"
|
||||
],
|
||||
"bos_token": null,
|
||||
"chat_template": "{%- if tools %}\n {{- '<|im_start|>system\\n' }}\n {%- if messages[0]['role'] == 'system' %}\n {{- messages[0]['content'] }}\n {%- else %}\n {{- 'You are a helpful assistant.' }}\n {%- endif %}\n {{- \"\\n\\n# Tools\\n\\nYou may call one or more functions to assist with the user query.\\n\\nYou are provided with function signatures within <tools></tools> XML tags:\\n<tools>\" }}\n {%- for tool in tools %}\n {{- \"\\n\" }}\n {{- tool | tojson }}\n {%- endfor %}\n {{- \"\\n</tools>\\n\\nFor each function call, return a json object with function name and arguments within <tool_call></tool_call> XML tags:\\n<tool_call>\\n{\\\"name\\\": <function-name>, \\\"arguments\\\": <args-json-object>}\\n</tool_call><|im_end|>\\n\" }}\n{%- else %}\n {%- if messages[0]['role'] == 'system' %}\n {{- '<|im_start|>system\\n' + messages[0]['content'] + '<|im_end|>\\n' }}\n {%- else %}\n {{- '<|im_start|>system\\nYou are a helpful assistant.<|im_end|>\\n' }}\n {%- endif %}\n{%- endif %}\n{%- for message in messages %}\n {%- if (message.role == \"user\") or (message.role == \"system\" and not loop.first) or (message.role == \"assistant\" and not message.tool_calls) %}\n {{- '<|im_start|>' + message.role + '\\n' + message.content + '<|im_end|>' + '\\n' }}\n {%- elif message.role == \"assistant\" %}\n {{- '<|im_start|>' + message.role }}\n {%- if message.content %}\n {{- '\\n' + message.content }}\n {%- endif %}\n {%- for tool_call in message.tool_calls %}\n {%- if tool_call.function is defined %}\n {%- set tool_call = tool_call.function %}\n {%- endif %}\n {{- '\\n<tool_call>\\n{\"name\": \"' }}\n {{- tool_call.name }}\n {{- '\", \"arguments\": ' }}\n {{- tool_call.arguments | tojson }}\n {{- '}\\n</tool_call>' }}\n {%- endfor %}\n {{- '<|im_end|>\\n' }}\n {%- elif message.role == \"tool\" %}\n {%- if (loop.index0 == 0) or (messages[loop.index0 - 1].role != \"tool\") %}\n {{- '<|im_start|>user' }}\n {%- endif %}\n {{- '\\n<tool_response>\\n' }}\n {{- message.content }}\n {{- '\\n</tool_response>' }}\n {%- if loop.last or (messages[loop.index0 + 1].role != \"tool\") %}\n {{- '<|im_end|>\\n' }}\n {%- endif %}\n {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n {{- '<|im_start|>assistant\\n' }}\n{%- endif %}\n",
|
||||
"clean_up_tokenization_spaces": false,
|
||||
"eos_token": "<|endoftext|>",
|
||||
"errors": "replace",
|
||||
"model_max_length": 131072,
|
||||
"pad_token": "<|endoftext|>",
|
||||
"split_special_tokens": false,
|
||||
"tokenizer_class": "Qwen2Tokenizer",
|
||||
"unk_token": null
|
||||
}
|
1
public/transformers/Qwen/Qwen2.5-3B/vocab.json
Normal file
1
public/transformers/Qwen/Qwen2.5-3B/vocab.json
Normal file
File diff suppressed because one or more lines are too long
Loading…
Reference in New Issue
Block a user