mirror of
https://github.com/zadam/trilium.git
synced 2025-03-01 14:22:32 +01:00
fix highlighting
This commit is contained in:
parent
08dbf90a8c
commit
a8d12f723f
490
package-lock.json
generated
490
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -2,14 +2,22 @@ const parser = require('../src/services/search/parser');
|
||||
|
||||
describe("Parser", () => {
|
||||
it("fulltext parser without content", () => {
|
||||
const rootExp = parser(["hello", "hi"], [], false);
|
||||
const rootExp = parser({
|
||||
fulltextTokens: ["hello", "hi"],
|
||||
expressionTokens: [],
|
||||
includingNoteContent: false
|
||||
});
|
||||
|
||||
expect(rootExp.constructor.name).toEqual("NoteCacheFulltextExp");
|
||||
expect(rootExp.tokens).toEqual(["hello", "hi"]);
|
||||
});
|
||||
|
||||
it("fulltext parser with content", () => {
|
||||
const rootExp = parser(["hello", "hi"], [], true);
|
||||
const rootExp = parser({
|
||||
fulltextTokens: ["hello", "hi"],
|
||||
expressionTokens: [],
|
||||
includingNoteContent: true
|
||||
});
|
||||
|
||||
expect(rootExp.constructor.name).toEqual("OrExp");
|
||||
const [firstSub, secondSub] = rootExp.subExpressions;
|
||||
@ -22,7 +30,11 @@ describe("Parser", () => {
|
||||
});
|
||||
|
||||
it("simple label comparison", () => {
|
||||
const rootExp = parser([], ["#mylabel", "=", "text"], true);
|
||||
const rootExp = parser({
|
||||
fulltextTokens: [],
|
||||
expressionTokens: ["#mylabel", "=", "text"],
|
||||
includingNoteContent: true
|
||||
});
|
||||
|
||||
expect(rootExp.constructor.name).toEqual("FieldComparisonExp");
|
||||
expect(rootExp.attributeType).toEqual("label");
|
||||
@ -31,7 +43,11 @@ describe("Parser", () => {
|
||||
});
|
||||
|
||||
it("simple label AND", () => {
|
||||
const rootExp = parser([], ["#first", "=", "text", "AND", "#second", "=", "text"], true);
|
||||
const rootExp = parser({
|
||||
fulltextTokens: [],
|
||||
expressionTokens: ["#first", "=", "text", "AND", "#second", "=", "text"],
|
||||
includingNoteContent: true
|
||||
});
|
||||
|
||||
expect(rootExp.constructor.name).toEqual("AndExp");
|
||||
const [firstSub, secondSub] = rootExp.subExpressions;
|
||||
@ -44,7 +60,11 @@ describe("Parser", () => {
|
||||
});
|
||||
|
||||
it("simple label AND without explicit AND", () => {
|
||||
const rootExp = parser([], ["#first", "=", "text", "#second", "=", "text"], true);
|
||||
const rootExp = parser({
|
||||
fulltextTokens: [],
|
||||
expressionTokens: ["#first", "=", "text", "#second", "=", "text"],
|
||||
includingNoteContent: true
|
||||
});
|
||||
|
||||
expect(rootExp.constructor.name).toEqual("AndExp");
|
||||
const [firstSub, secondSub] = rootExp.subExpressions;
|
||||
@ -57,7 +77,11 @@ describe("Parser", () => {
|
||||
});
|
||||
|
||||
it("simple label OR", () => {
|
||||
const rootExp = parser([], ["#first", "=", "text", "OR", "#second", "=", "text"], true);
|
||||
const rootExp = parser({
|
||||
fulltextTokens: [],
|
||||
expressionTokens: ["#first", "=", "text", "OR", "#second", "=", "text"],
|
||||
includingNoteContent: true
|
||||
});
|
||||
|
||||
expect(rootExp.constructor.name).toEqual("OrExp");
|
||||
const [firstSub, secondSub] = rootExp.subExpressions;
|
||||
@ -70,7 +94,11 @@ describe("Parser", () => {
|
||||
});
|
||||
|
||||
it("fulltext and simple label", () => {
|
||||
const rootExp = parser(["hello"], ["#mylabel", "=", "text"], false);
|
||||
const rootExp = parser({
|
||||
fulltextTokens: ["hello"],
|
||||
expressionTokens: ["#mylabel", "=", "text"],
|
||||
includingNoteContent: false
|
||||
});
|
||||
|
||||
expect(rootExp.constructor.name).toEqual("AndExp");
|
||||
const [firstSub, secondSub] = rootExp.subExpressions;
|
||||
@ -83,7 +111,11 @@ describe("Parser", () => {
|
||||
});
|
||||
|
||||
it("label sub-expression", () => {
|
||||
const rootExp = parser([], ["#first", "=", "text", "OR", ["#second", "=", "text", "AND", "#third", "=", "text"]], false);
|
||||
const rootExp = parser({
|
||||
fulltextTokens: [],
|
||||
expressionTokens: ["#first", "=", "text", "OR", ["#second", "=", "text", "AND", "#third", "=", "text"]],
|
||||
includingNoteContent: false
|
||||
});
|
||||
|
||||
expect(rootExp.constructor.name).toEqual("OrExp");
|
||||
const [firstSub, secondSub] = rootExp.subExpressions;
|
||||
|
@ -7,7 +7,9 @@ const NoteCacheFulltextExp = require('./expressions/note_cache_fulltext');
|
||||
const NoteContentFulltextExp = require('./expressions/note_content_fulltext');
|
||||
const comparatorBuilder = require('./comparator_builder');
|
||||
|
||||
function getFulltext(tokens, includingNoteContent) {
|
||||
function getFulltext(tokens, includingNoteContent, highlightedTokens) {
|
||||
highlightedTokens.push(...tokens);
|
||||
|
||||
if (tokens.length === 0) {
|
||||
return null;
|
||||
}
|
||||
@ -26,7 +28,7 @@ function isOperator(str) {
|
||||
return str.match(/^[=<>*]+$/);
|
||||
}
|
||||
|
||||
function getExpression(tokens) {
|
||||
function getExpression(tokens, highlightedTokens) {
|
||||
if (tokens.length === 0) {
|
||||
return null;
|
||||
}
|
||||
@ -42,15 +44,19 @@ function getExpression(tokens) {
|
||||
}
|
||||
|
||||
if (Array.isArray(token)) {
|
||||
expressions.push(getExpression(token));
|
||||
expressions.push(getExpression(token, highlightedTokens));
|
||||
}
|
||||
else if (token.startsWith('#') || token.startsWith('@')) {
|
||||
const type = token.startsWith('#') ? 'label' : 'relation';
|
||||
|
||||
highlightedTokens.push(token.substr(1));
|
||||
|
||||
if (i < tokens.length - 2 && isOperator(tokens[i + 1])) {
|
||||
const operator = tokens[i + 1];
|
||||
const comparedValue = tokens[i + 2];
|
||||
|
||||
highlightedTokens.push(comparedValue);
|
||||
|
||||
const comparator = comparatorBuilder(operator, comparedValue);
|
||||
|
||||
if (!comparator) {
|
||||
@ -93,10 +99,12 @@ function getExpression(tokens) {
|
||||
}
|
||||
}
|
||||
|
||||
function parse(fulltextTokens, expressionTokens, includingNoteContent) {
|
||||
function parse({fulltextTokens, expressionTokens, includingNoteContent, highlightedTokens}) {
|
||||
highlightedTokens = highlightedTokens || [];
|
||||
|
||||
return AndExp.of([
|
||||
getFulltext(fulltextTokens, includingNoteContent),
|
||||
getExpression(expressionTokens)
|
||||
getFulltext(fulltextTokens, includingNoteContent, highlightedTokens),
|
||||
getExpression(expressionTokens, highlightedTokens)
|
||||
]);
|
||||
}
|
||||
|
||||
|
@ -42,10 +42,16 @@ async function findNotesWithExpression(expression) {
|
||||
return searchResults;
|
||||
}
|
||||
|
||||
function parseQueryToExpression(query) {
|
||||
function parseQueryToExpression(query, highlightedTokens) {
|
||||
const {fulltextTokens, expressionTokens} = lexer(query);
|
||||
const structuredExpressionTokens = parens(expressionTokens);
|
||||
const expression = parser(fulltextTokens, structuredExpressionTokens, false);
|
||||
|
||||
const expression = parser({
|
||||
fulltextTokens,
|
||||
expressionTokens: structuredExpressionTokens,
|
||||
includingNoteContent: false,
|
||||
highlightedTokens
|
||||
});
|
||||
|
||||
return expression;
|
||||
}
|
||||
@ -55,7 +61,9 @@ async function searchNotesForAutocomplete(query) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const expression = parseQueryToExpression(query);
|
||||
const highlightedTokens = [];
|
||||
|
||||
const expression = parseQueryToExpression(query, highlightedTokens);
|
||||
|
||||
if (!expression) {
|
||||
return [];
|
||||
@ -65,7 +73,7 @@ async function searchNotesForAutocomplete(query) {
|
||||
|
||||
searchResults = searchResults.slice(0, 200);
|
||||
|
||||
highlightSearchResults(searchResults, query);
|
||||
highlightSearchResults(searchResults, highlightedTokens);
|
||||
|
||||
return searchResults.map(result => {
|
||||
return {
|
||||
@ -76,20 +84,14 @@ async function searchNotesForAutocomplete(query) {
|
||||
});
|
||||
}
|
||||
|
||||
function highlightSearchResults(searchResults, query) {
|
||||
let tokens = query
|
||||
.trim() // necessary because even with .split() trailing spaces are tokens which causes havoc
|
||||
.toLowerCase()
|
||||
.split(/[ -]/)
|
||||
.filter(token => token !== '/');
|
||||
|
||||
function highlightSearchResults(searchResults, highlightedTokens) {
|
||||
// we remove < signs because they can cause trouble in matching and overwriting existing highlighted chunks
|
||||
// which would make the resulting HTML string invalid.
|
||||
// { and } are used for marking <b> and </b> tag (to avoid matches on single 'b' character)
|
||||
tokens = tokens.map(token => token.replace('/[<\{\}]/g', ''));
|
||||
highlightedTokens = highlightedTokens.map(token => token.replace('/[<\{\}]/g', ''));
|
||||
|
||||
// sort by the longest so we first highlight longest matches
|
||||
tokens.sort((a, b) => a.length > b.length ? -1 : 1);
|
||||
highlightedTokens.sort((a, b) => a.length > b.length ? -1 : 1);
|
||||
|
||||
for (const result of searchResults) {
|
||||
const note = noteCache.notes[result.noteId];
|
||||
@ -97,13 +99,13 @@ function highlightSearchResults(searchResults, query) {
|
||||
result.highlightedNotePathTitle = result.notePathTitle;
|
||||
|
||||
for (const attr of note.attributes) {
|
||||
if (tokens.find(token => attr.name.includes(token) || attr.value.includes(token))) {
|
||||
if (highlightedTokens.find(token => attr.name.includes(token) || attr.value.includes(token))) {
|
||||
result.highlightedNotePathTitle += ` <small>${formatAttribute(attr)}</small>`;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const token of tokens) {
|
||||
for (const token of highlightedTokens) {
|
||||
const tokenRegex = new RegExp("(" + utils.escapeRegExp(token) + ")", "gi");
|
||||
|
||||
for (const result of searchResults) {
|
||||
|
Loading…
x
Reference in New Issue
Block a user