fix: unable to properly segment raw TLD data
feat: searchbox's value changes while selecting suggestion
This commit is contained in:
parent
de7c5990bb
commit
f7de462a28
@ -23,6 +23,7 @@ import { loadVocab } from "lib/nlp/tokenize/loadVocab";
|
|||||||
import BPETokenizer from "lib/nlp/tokenize/BPEtokenizer";
|
import BPETokenizer from "lib/nlp/tokenize/BPEtokenizer";
|
||||||
import energyScore from "lib/nlp/energyScore";
|
import energyScore from "lib/nlp/energyScore";
|
||||||
import bytesToUnicode from "lib/nlp/tokenize/bytesToUnicode";
|
import bytesToUnicode from "lib/nlp/tokenize/bytesToUnicode";
|
||||||
|
import { searchboxLastInputAtom } from "lib/state/searchboxLastInput";
|
||||||
|
|
||||||
interface EmbeddingLayer {
|
interface EmbeddingLayer {
|
||||||
[key: number]: Float32Array<ArrayBufferLike>;
|
[key: number]: Float32Array<ArrayBufferLike>;
|
||||||
@ -33,6 +34,7 @@ export default function OneSearch() {
|
|||||||
const [embeddingLayer, setEmbeddingLayer] = useState<EmbeddingLayer | null>(null);
|
const [embeddingLayer, setEmbeddingLayer] = useState<EmbeddingLayer | null>(null);
|
||||||
const [NLUsession, setNLUsession] = useState<ort.InferenceSession | null>(null);
|
const [NLUsession, setNLUsession] = useState<ort.InferenceSession | null>(null);
|
||||||
const [tokenizer, setTokenizer] = useState<BPETokenizer | null>(null);
|
const [tokenizer, setTokenizer] = useState<BPETokenizer | null>(null);
|
||||||
|
const lastInput = useAtomValue(searchboxLastInputAtom);
|
||||||
const lastRequestTimeRef = useRef(0);
|
const lastRequestTimeRef = useRef(0);
|
||||||
const selected = useAtomValue(selectedSuggestionAtom);
|
const selected = useAtomValue(selectedSuggestionAtom);
|
||||||
const settings = useAtomValue(settingsAtom);
|
const settings = useAtomValue(settingsAtom);
|
||||||
@ -69,7 +71,7 @@ export default function OneSearch() {
|
|||||||
// Handle fetch error
|
// Handle fetch error
|
||||||
sendError(error);
|
sendError(error);
|
||||||
});
|
});
|
||||||
}, [query]);
|
}, [lastInput]);
|
||||||
|
|
||||||
function updateSuggestion(data: suggestionItem[]) {
|
function updateSuggestion(data: suggestionItem[]) {
|
||||||
setFinalSuggetsion((cur: suggestionItem[]) => {
|
setFinalSuggetsion((cur: suggestionItem[]) => {
|
||||||
@ -177,7 +179,7 @@ export default function OneSearch() {
|
|||||||
}
|
}
|
||||||
console.log(data, energyScore(data));
|
console.log(data, energyScore(data));
|
||||||
})();
|
})();
|
||||||
}, [query, engineName]);
|
}, [lastInput, engineName]);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<SuggestionBox>
|
<SuggestionBox>
|
||||||
|
@ -6,12 +6,14 @@ import { selectedSuggestionAtom } from "lib/state/suggestionSelection";
|
|||||||
import handleEnter from "lib/onesearch/handleEnter";
|
import handleEnter from "lib/onesearch/handleEnter";
|
||||||
import { suggestionAtom } from "lib/state/suggestion";
|
import { suggestionAtom } from "lib/state/suggestion";
|
||||||
import { useTranslation } from "react-i18next";
|
import { useTranslation } from "react-i18next";
|
||||||
|
import { searchboxLastInputAtom } from "lib/state/searchboxLastInput";
|
||||||
|
|
||||||
export default function Search(props: { onFocus: () => void }) {
|
export default function Search(props: { onFocus: () => void }) {
|
||||||
const { t } = useTranslation();
|
const { t } = useTranslation();
|
||||||
const settings = useAtomValue(settingsAtom);
|
const settings = useAtomValue(settingsAtom);
|
||||||
const [query, setQuery] = useAtom(queryAtom);
|
const [query, setQuery] = useAtom(queryAtom);
|
||||||
const [selectedSuggestion, setSelected] = useAtom(selectedSuggestionAtom);
|
const [selectedSuggestion, setSelected] = useAtom(selectedSuggestionAtom);
|
||||||
|
const [_, setLastInput] = useAtom(searchboxLastInputAtom)
|
||||||
const suggestions = useAtomValue(suggestionAtom);
|
const suggestions = useAtomValue(suggestionAtom);
|
||||||
const searchBoxRef = useRef<HTMLInputElement>(null);
|
const searchBoxRef = useRef<HTMLInputElement>(null);
|
||||||
|
|
||||||
@ -25,11 +27,21 @@ export default function Search(props: { onFocus: () => void }) {
|
|||||||
} else if (e.key == "ArrowUp") {
|
} else if (e.key == "ArrowUp") {
|
||||||
e.preventDefault();
|
e.preventDefault();
|
||||||
const len = suggestions.length;
|
const len = suggestions.length;
|
||||||
setSelected((selectedSuggestion - 1 + len) % len);
|
const lastSelectedIndex = (selectedSuggestion - 1 + len) % len;
|
||||||
|
const lastSuggeston = suggestions[lastSelectedIndex];
|
||||||
|
setSelected(lastSelectedIndex);
|
||||||
|
if (["QUERY", "NAVIGATION", "default"].includes(lastSuggeston.type)) {
|
||||||
|
setQuery(lastSuggeston.suggestion);
|
||||||
|
}
|
||||||
} else if (e.key == "ArrowDown") {
|
} else if (e.key == "ArrowDown") {
|
||||||
e.preventDefault();
|
e.preventDefault();
|
||||||
const len = suggestions.length;
|
const len = suggestions.length;
|
||||||
setSelected((selectedSuggestion + 1) % len);
|
const nextSelectedIndex = (selectedSuggestion + 1 + len) % len;
|
||||||
|
const nextSuggeston = suggestions[nextSelectedIndex];
|
||||||
|
setSelected(nextSelectedIndex);
|
||||||
|
if (["QUERY", "NAVIGATION", "default"].includes(nextSuggeston.type)) {
|
||||||
|
setQuery(nextSuggeston.suggestion);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -48,11 +60,10 @@ export default function Search(props: { onFocus: () => void }) {
|
|||||||
placeholder={t("search.placeholder")}
|
placeholder={t("search.placeholder")}
|
||||||
onFocus={props.onFocus}
|
onFocus={props.onFocus}
|
||||||
onKeyDown={handleKeydown}
|
onKeyDown={handleKeydown}
|
||||||
onChange={(e) =>
|
onChange={(e) => {
|
||||||
setQuery(() => {
|
setLastInput(new Date().getTime());
|
||||||
return e.target.value;
|
setQuery(() => e.target.value);
|
||||||
})
|
}}
|
||||||
}
|
|
||||||
autoComplete="off"
|
autoComplete="off"
|
||||||
autoCorrect="off"
|
autoCorrect="off"
|
||||||
autoCapitalize="off"
|
autoCapitalize="off"
|
||||||
|
5
lib/state/searchboxLastInput.ts
Normal file
5
lib/state/searchboxLastInput.ts
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
import { atom } from "jotai";
|
||||||
|
|
||||||
|
const searchboxLastInputAtom = atom(0);
|
||||||
|
|
||||||
|
export { searchboxLastInputAtom };
|
@ -1,5 +1,5 @@
|
|||||||
import TLDtxt from "./tlds.txt?raw";
|
import TLDtxt from "./tlds.txt?raw";
|
||||||
|
|
||||||
export function getTLD() {
|
export function getTLD() {
|
||||||
return TLDtxt.split("\n").filter((line) => line[0] !== "#");
|
return TLDtxt.split("\r\n").filter((line) => line[0] !== "#");
|
||||||
}
|
}
|
||||||
|
@ -1,13 +0,0 @@
|
|||||||
import { describe, expect, test } from "bun:test";
|
|
||||||
import tokenize from "../lib/nlp/tokenize/tokenizer";
|
|
||||||
|
|
||||||
describe("Test if tokenizer works", () => {
|
|
||||||
test("Using without a mirror", async () => {
|
|
||||||
let result = await tokenize("你好,世界!", "Qwen/Qwen2.5-3B", false);
|
|
||||||
expect(result).toEqual([108386, 3837, 99489, 6313]);
|
|
||||||
});
|
|
||||||
test("Using with a mirror", async () => {
|
|
||||||
let result = await tokenize("你好,世界!", "Qwen/Qwen2.5-3B", true);
|
|
||||||
expect(result).toEqual([108386, 3837, 99489, 6313]);
|
|
||||||
});
|
|
||||||
});
|
|
Loading…
Reference in New Issue
Block a user