From 623c0f3b0baf177c419cd07fe8e07fecc16e9fa8 Mon Sep 17 00:00:00 2001 From: PrinOrange Date: Sat, 6 Jan 2024 11:55:42 +0800 Subject: [PATCH] cancel the tokenizer for indexing search. --- lib/search.ts | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/lib/search.ts b/lib/search.ts index a759c58..6d2033b 100644 --- a/lib/search.ts +++ b/lib/search.ts @@ -2,6 +2,10 @@ import minisearch from "minisearch"; import { cutForSearch } from "nodejs-jieba"; import { getPostFileContent, sortedPosts } from "./post-process"; + +// TODO: The jieba tokenizer seems not be supported by vercel platform +// that causes the module loading error. +// So here is the remain task for seeking better Chinese tokenizer. function tokenizer(str: string) { return cutForSearch(str, true); } @@ -10,7 +14,7 @@ function makeSearchIndex() { let miniSearch = new minisearch({ fields: ["id", "title", "tags", "subtitle", "summary", "content"], storeFields: ["id", "title", "tags"], - tokenize: tokenizer, + // tokenize: tokenizer, searchOptions: { fuzzy: 0.1, },