cancel the tokenizer for indexing search.

This commit is contained in:
PrinOrange
2024-01-06 11:55:42 +08:00
parent 4fdba9d2b9
commit 623c0f3b0b

View File

@@ -2,6 +2,10 @@ import minisearch from "minisearch";
import { cutForSearch } from "nodejs-jieba";
import { getPostFileContent, sortedPosts } from "./post-process";
// TODO: The jieba tokenizer seems not be supported by vercel platform
// that causes the module loading error.
// So here is the remain task for seeking better Chinese tokenizer.
function tokenizer(str: string) {
return cutForSearch(str, true);
}
@@ -10,7 +14,7 @@ function makeSearchIndex() {
let miniSearch = new minisearch({
fields: ["id", "title", "tags", "subtitle", "summary", "content"],
storeFields: ["id", "title", "tags"],
tokenize: tokenizer,
// tokenize: tokenizer,
searchOptions: {
fuzzy: 0.1,
},