cancel the tokenizer for indexing search.
This commit is contained in:
@@ -2,6 +2,10 @@ import minisearch from "minisearch";
|
|||||||
import { cutForSearch } from "nodejs-jieba";
|
import { cutForSearch } from "nodejs-jieba";
|
||||||
import { getPostFileContent, sortedPosts } from "./post-process";
|
import { getPostFileContent, sortedPosts } from "./post-process";
|
||||||
|
|
||||||
|
|
||||||
|
// TODO: The jieba tokenizer seems not be supported by vercel platform
|
||||||
|
// that causes the module loading error.
|
||||||
|
// So here is the remain task for seeking better Chinese tokenizer.
|
||||||
function tokenizer(str: string) {
|
function tokenizer(str: string) {
|
||||||
return cutForSearch(str, true);
|
return cutForSearch(str, true);
|
||||||
}
|
}
|
||||||
@@ -10,7 +14,7 @@ function makeSearchIndex() {
|
|||||||
let miniSearch = new minisearch({
|
let miniSearch = new minisearch({
|
||||||
fields: ["id", "title", "tags", "subtitle", "summary", "content"],
|
fields: ["id", "title", "tags", "subtitle", "summary", "content"],
|
||||||
storeFields: ["id", "title", "tags"],
|
storeFields: ["id", "title", "tags"],
|
||||||
tokenize: tokenizer,
|
// tokenize: tokenizer,
|
||||||
searchOptions: {
|
searchOptions: {
|
||||||
fuzzy: 0.1,
|
fuzzy: 0.1,
|
||||||
},
|
},
|
||||||
|
|||||||
Reference in New Issue
Block a user