diff --git a/lib/search.ts b/lib/search.ts index a759c58..6d2033b 100644 --- a/lib/search.ts +++ b/lib/search.ts @@ -2,6 +2,10 @@ import minisearch from "minisearch"; import { cutForSearch } from "nodejs-jieba"; import { getPostFileContent, sortedPosts } from "./post-process"; + +// TODO: The jieba tokenizer seems not be supported by vercel platform +// that causes the module loading error. +// So here is the remain task for seeking better Chinese tokenizer. function tokenizer(str: string) { return cutForSearch(str, true); } @@ -10,7 +14,7 @@ function makeSearchIndex() { let miniSearch = new minisearch({ fields: ["id", "title", "tags", "subtitle", "summary", "content"], storeFields: ["id", "title", "tags"], - tokenize: tokenizer, + // tokenize: tokenizer, searchOptions: { fuzzy: 0.1, },