trilium/code/spec/search/lexer.spec.js
VeKo0o 4902d66e63 Test adding an enum label type
styling fix

Added enum to promoted attribute definition parser

Fix for the dropdown

Added enumValues attr

Added support for task note type

Added support for swimlanes and swimlane dashboard

Some minor fixes regarding swimlanes

fixed a bug with ckeditor where pasting of links was not possible

restructured folders

folder-restructuring

some more restructuring

some folder restructuring

fixes for file restructuring

restored removed code

Sorted the swimlane lists by priority

sorting swimlanes and blinking based on priority

Added new func for commenting on tasks

fixed the workflow end to end

fixed the workflow end to end

Fixed the style for comments.

Added the functionality for marking tasks as Done

Added a file to track feature wish-list

Added status back to the task header. Also added relevant tags to the node_tree.

Fixed the status update on the note tree items. Also added deadline back

Adding tags to the Swimlane dashboard

Updating the dashboard view

Fixed the tags on the swimlane dashboard items

Fixed the hide/show for swimlane headers

Updated the dashboard, added the collapse and expand feature. Also added deadline to the dashboard and tasks

Prevented empty lines to be added as a comment. Also cleared task_deadline on a new task created

Adding swimlane props

Fixed the swimlane done/deprio, also added the subtasks for the tasks that have children

fixed the grouping on swimlanes, added bucket type for the main parent of tasks, added swimlane options to dashboard type, fixed deadlines, updates to how swimlanes are populated

Removed the db logs

Updated fancytree lib to the latest version

Updated gitignore

Test adding an enum label type

styling fix

Added enum to promoted attribute definition parser

Fix for the dropdown

Added enumValues attr

Added support for task note type

Added support for swimlanes and swimlane dashboard

Some minor fixes regarding swimlanes

fixed a bug with ckeditor where pasting of links was not possible

restructured folders

folder-restructuring

some more restructuring

some folder restructuring

fixes for file restructuring

restored removed code

Sorted the swimlane lists by priority

sorting swimlanes and blinking based on priority

Added new func for commenting on tasks

fixed the workflow end to end

fixed the workflow end to end

Fixed the style for comments.

Added the functionality for marking tasks as Done

Added a file to track feature wish-list

Added status back to the task header. Also added relevant tags to the node_tree.

Fixed the status update on the note tree items. Also added deadline back

Adding tags to the Swimlane dashboard

Updating the dashboard view

Fixed the tags on the swimlane dashboard items

Fixed the hide/show for swimlane headers

Updated the dashboard, added the collapse and expand feature. Also added deadline to the dashboard and tasks

Prevented empty lines to be added as a comment. Also cleared task_deadline on a new task created

Adding swimlane props

Fixed the swimlane done/deprio, also added the subtasks for the tasks that have children

all the updates as of Apr 24, 2024
2024-04-24 14:48:07 -04:00

163 lines
6.6 KiB
JavaScript

const lex = require('../../src/services/search/services/lex');
describe("Lexer fulltext", () => {
it("simple lexing", () => {
expect(lex("hello world").fulltextTokens.map(t => t.token))
.toEqual(["hello", "world"]);
});
it("use quotes to keep words together", () => {
expect(lex("'hello world' my friend").fulltextTokens.map(t => t.token))
.toEqual(["hello world", "my", "friend"]);
expect(lex('"hello world" my friend').fulltextTokens.map(t => t.token))
.toEqual(["hello world", "my", "friend"]);
expect(lex('`hello world` my friend').fulltextTokens.map(t => t.token))
.toEqual(["hello world", "my", "friend"]);
});
it("you can use different quotes and other special characters inside quotes", () => {
expect(lex("'i can use \" or ` or #~=*' without problem").fulltextTokens.map(t => t.token))
.toEqual(["i can use \" or ` or #~=*", "without", "problem"]);
});
it("I can use backslash to escape quotes", () => {
expect(lex("hello \\\"world\\\"").fulltextTokens.map(t => t.token))
.toEqual(["hello", '"world"']);
expect(lex("hello \\\'world\\\'").fulltextTokens.map(t => t.token))
.toEqual(["hello", "'world'"]);
expect(lex("hello \\\`world\\\`").fulltextTokens.map(t => t.token))
.toEqual(["hello", '`world`']);
expect(lex('"hello \\\"world\\\"').fulltextTokens.map(t => t.token))
.toEqual(['hello "world"']);
expect(lex("'hello \\\'world\\\''").fulltextTokens.map(t => t.token))
.toEqual(["hello 'world'"]);
expect(lex("`hello \\\`world\\\``").fulltextTokens.map(t => t.token))
.toEqual(["hello `world`"]);
expect(lex("\\#token").fulltextTokens.map(t => t.token))
.toEqual(["#token"]);
});
it("quote inside a word does not have a special meaning", () => {
const lexResult = lex("d'Artagnan is dead #hero = d'Artagnan");
expect(lexResult.fulltextTokens.map(t => t.token))
.toEqual(["d'artagnan", "is", "dead"]);
expect(lexResult.expressionTokens.map(t => t.token))
.toEqual(['#hero', '=', "d'artagnan"]);
});
it("if quote is not ended then it's just one long token", () => {
expect(lex("'unfinished quote").fulltextTokens.map(t => t.token))
.toEqual(["unfinished quote"]);
});
it("parenthesis and symbols in fulltext section are just normal characters", () => {
expect(lex("what's u=p <b(r*t)h>").fulltextTokens.map(t => t.token))
.toEqual(["what's", "u=p", "<b(r*t)h>"]);
});
it("operator characters in expressions are separate tokens", () => {
expect(lex("# abc+=-def**-+d").expressionTokens.map(t => t.token))
.toEqual(["#", "abc", "+=-", "def", "**-+", "d"]);
});
it("escaping special characters", () => {
expect(lex("hello \\#\\~\\'").fulltextTokens.map(t => t.token))
.toEqual(["hello", "#~'"]);
});
});
describe("Lexer expression", () => {
it("simple attribute existence", () => {
expect(lex("#label ~relation").expressionTokens.map(t => t.token))
.toEqual(["#label", "~relation"]);
});
it("simple label operators", () => {
expect(lex("#label*=*text").expressionTokens.map(t => t.token))
.toEqual(["#label", "*=*", "text"]);
});
it("simple label operator with in quotes", () => {
expect(lex("#label*=*'text'").expressionTokens)
.toEqual([
{token: "#label", inQuotes: false, startIndex: 0, endIndex: 5},
{token: "*=*", inQuotes: false, startIndex: 6, endIndex: 8},
{token: "text", inQuotes: true, startIndex: 10, endIndex: 13}
]);
});
it("simple label operator with param without quotes", () => {
expect(lex("#label*=*text").expressionTokens)
.toEqual([
{token: "#label", inQuotes: false, startIndex: 0, endIndex: 5},
{token: "*=*", inQuotes: false, startIndex: 6, endIndex: 8},
{token: "text", inQuotes: false, startIndex: 9, endIndex: 12}
]);
});
it("simple label operator with empty string param", () => {
expect(lex("#label = ''").expressionTokens)
.toEqual([
{token: "#label", inQuotes: false, startIndex: 0, endIndex: 5},
{token: "=", inQuotes: false, startIndex: 7, endIndex: 7},
// weird case for empty strings which ends up with endIndex < startIndex :-(
{token: "", inQuotes: true, startIndex: 10, endIndex: 9}
]);
});
it("note. prefix also separates fulltext from expression", () => {
expect(lex(`hello fulltext note.labels.capital = Prague`).expressionTokens.map(t => t.token))
.toEqual(["note", ".", "labels", ".", "capital", "=", "prague"]);
});
it("note. prefix in quotes will note start expression", () => {
expect(lex(`hello fulltext "note.txt"`).expressionTokens.map(t => t.token))
.toEqual([]);
expect(lex(`hello fulltext "note.txt"`).fulltextTokens.map(t => t.token))
.toEqual(["hello", "fulltext", "note.txt"]);
});
it("complex expressions with and, or and parenthesis", () => {
expect(lex(`# (#label=text OR #second=text) AND ~relation`).expressionTokens.map(t => t.token))
.toEqual(["#", "(", "#label", "=", "text", "or", "#second", "=", "text", ")", "and", "~relation"]);
});
it("dot separated properties", () => {
expect(lex(`# ~author.title = 'Hugh Howey' AND note.'book title' = 'Silo'`).expressionTokens.map(t => t.token))
.toEqual(["#", "~author", ".", "title", "=", "hugh howey", "and", "note", ".", "book title", "=", "silo"]);
});
it("negation of label and relation", () => {
expect(lex(`#!capital ~!neighbor`).expressionTokens.map(t => t.token))
.toEqual(["#!capital", "~!neighbor"]);
});
it("negation of sub-expression", () => {
expect(lex(`# not(#capital) and note.noteId != "root"`).expressionTokens.map(t => t.token))
.toEqual(["#", "not", "(", "#capital", ")", "and", "note", ".", "noteid", "!=", "root"]);
});
});
describe("Lexer invalid queries and edge cases", () => {
it("concatenated attributes", () => {
expect(lex("#label~relation").expressionTokens.map(t => t.token))
.toEqual(["#label", "~relation"]);
});
it("trailing escape \\", () => {
expect(lex('abc \\').fulltextTokens.map(t => t.token))
.toEqual(["abc", "\\"]);
});
});