mirror of
https://github.com/zadam/trilium.git
synced 2025-03-01 14:22:32 +01:00
server-ts: Port services/search/services/parse
This commit is contained in:
parent
29b3fb3646
commit
15169289f0
@ -1,5 +1,5 @@
|
|||||||
const SearchContext = require('../../src/services/search/search_context');
|
const SearchContext = require('../../src/services/search/search_context');
|
||||||
const parse = require('../../src/services/search/services/parse.js');
|
const parse = require('../../src/services/search/services/parse');
|
||||||
|
|
||||||
function tokens(toks, cur = 0) {
|
function tokens(toks, cur = 0) {
|
||||||
return toks.map(arg => {
|
return toks.map(arg => {
|
||||||
|
@ -11,9 +11,9 @@ class AncestorExp extends Expression {
|
|||||||
private ancestorNoteId: string;
|
private ancestorNoteId: string;
|
||||||
private ancestorDepthComparator;
|
private ancestorDepthComparator;
|
||||||
|
|
||||||
ancestorDepth: string;
|
ancestorDepth?: string;
|
||||||
|
|
||||||
constructor(ancestorNoteId: string, ancestorDepth: string) {
|
constructor(ancestorNoteId: string, ancestorDepth?: string) {
|
||||||
super();
|
super();
|
||||||
|
|
||||||
this.ancestorNoteId = ancestorNoteId;
|
this.ancestorNoteId = ancestorNoteId;
|
||||||
@ -51,7 +51,7 @@ class AncestorExp extends Expression {
|
|||||||
return depthConformingNoteSet;
|
return depthConformingNoteSet;
|
||||||
}
|
}
|
||||||
|
|
||||||
getComparator(depthCondition: string): ((depth: number) => boolean) | null {
|
getComparator(depthCondition?: string): ((depth: number) => boolean) | null {
|
||||||
if (!depthCondition) {
|
if (!depthCondition) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
@ -74,4 +74,4 @@ class AncestorExp extends Expression {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = AncestorExp;
|
export = AncestorExp;
|
||||||
|
@ -8,8 +8,8 @@ import TrueExp = require('./true');
|
|||||||
class AndExp extends Expression {
|
class AndExp extends Expression {
|
||||||
private subExpressions: Expression[];
|
private subExpressions: Expression[];
|
||||||
|
|
||||||
static of(subExpressions: Expression[]) {
|
static of(_subExpressions: (Expression | null | undefined)[]) {
|
||||||
subExpressions = subExpressions.filter(exp => !!exp);
|
const subExpressions = _subExpressions.filter((exp) => !!exp) as Expression[];
|
||||||
|
|
||||||
if (subExpressions.length === 1) {
|
if (subExpressions.length === 1) {
|
||||||
return subExpressions[0];
|
return subExpressions[0];
|
||||||
|
@ -11,9 +11,9 @@ class AttributeExistsExp extends Expression {
|
|||||||
private attributeType: string;
|
private attributeType: string;
|
||||||
private attributeName: string;
|
private attributeName: string;
|
||||||
private isTemplateLabel: boolean;
|
private isTemplateLabel: boolean;
|
||||||
private prefixMatch: string;
|
private prefixMatch: boolean;
|
||||||
|
|
||||||
constructor(attributeType: string, attributeName: string, prefixMatch: string) {
|
constructor(attributeType: string, attributeName: string, prefixMatch: boolean) {
|
||||||
super();
|
super();
|
||||||
|
|
||||||
this.attributeType = attributeType;
|
this.attributeType = attributeType;
|
||||||
|
@ -26,8 +26,8 @@ function getRegex(str: string): RegExp {
|
|||||||
|
|
||||||
interface ConstructorOpts {
|
interface ConstructorOpts {
|
||||||
tokens: string[];
|
tokens: string[];
|
||||||
raw: boolean;
|
raw?: boolean;
|
||||||
flatText: boolean;
|
flatText?: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
type SearchRow = Pick<NoteRow, "noteId" | "type" | "mime" | "content" | "isProtected">;
|
type SearchRow = Pick<NoteRow, "noteId" | "type" | "mime" | "content" | "isProtected">;
|
||||||
|
@ -5,14 +5,12 @@ import NoteSet = require("../note_set");
|
|||||||
import SearchContext = require("../search_context");
|
import SearchContext = require("../search_context");
|
||||||
import Expression = require("./expression");
|
import Expression = require("./expression");
|
||||||
|
|
||||||
type Direction = "asc";
|
|
||||||
|
|
||||||
interface ValueExtractor {
|
interface ValueExtractor {
|
||||||
extract: (note: BNote) => number | string | null;
|
extract: (note: BNote) => number | string | null;
|
||||||
}
|
}
|
||||||
|
|
||||||
interface OrderDefinition {
|
interface OrderDefinition {
|
||||||
direction: Direction;
|
direction?: string;
|
||||||
smaller: number;
|
smaller: number;
|
||||||
larger: number;
|
larger: number;
|
||||||
valueExtractor: ValueExtractor;
|
valueExtractor: ValueExtractor;
|
||||||
@ -22,9 +20,9 @@ class OrderByAndLimitExp extends Expression {
|
|||||||
|
|
||||||
private orderDefinitions: OrderDefinition[];
|
private orderDefinitions: OrderDefinition[];
|
||||||
private limit: number;
|
private limit: number;
|
||||||
private subExpression: Expression | null;
|
subExpression: Expression | null;
|
||||||
|
|
||||||
constructor(orderDefinitions: Pick<OrderDefinition, "direction">[], limit: number) {
|
constructor(orderDefinitions: Pick<OrderDefinition, "direction" | "valueExtractor">[], limit?: number) {
|
||||||
super();
|
super();
|
||||||
|
|
||||||
this.orderDefinitions = orderDefinitions as unknown as OrderDefinition[];
|
this.orderDefinitions = orderDefinitions as unknown as OrderDefinition[];
|
||||||
|
@ -8,7 +8,7 @@ interface SearchParams {
|
|||||||
includeHiddenNotes?: boolean;
|
includeHiddenNotes?: boolean;
|
||||||
ignoreHoistedNote?: boolean;
|
ignoreHoistedNote?: boolean;
|
||||||
ancestorNoteId?: string;
|
ancestorNoteId?: string;
|
||||||
ancestorDepth?: number;
|
ancestorDepth?: string;
|
||||||
orderBy?: string;
|
orderBy?: string;
|
||||||
orderDirection?: string;
|
orderDirection?: string;
|
||||||
limit?: number;
|
limit?: number;
|
||||||
@ -23,7 +23,7 @@ class SearchContext {
|
|||||||
includeHiddenNotes: boolean;
|
includeHiddenNotes: boolean;
|
||||||
ignoreHoistedNote: boolean;
|
ignoreHoistedNote: boolean;
|
||||||
ancestorNoteId?: string;
|
ancestorNoteId?: string;
|
||||||
ancestorDepth?: number;
|
ancestorDepth?: string;
|
||||||
orderBy?: string;
|
orderBy?: string;
|
||||||
orderDirection?: string;
|
orderDirection?: string;
|
||||||
limit?: number;
|
limit?: number;
|
||||||
|
@ -1,11 +1,9 @@
|
|||||||
interface Token {
|
import { TokenData } from "./types";
|
||||||
token: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This will create a recursive object from a list of tokens - tokens between parenthesis are grouped in a single array
|
* This will create a recursive object from a list of tokens - tokens between parenthesis are grouped in a single array
|
||||||
*/
|
*/
|
||||||
function handleParens(tokens: (Token | Token[])[]) {
|
function handleParens(tokens: (TokenData | TokenData[])[]) {
|
||||||
if (tokens.length === 0) {
|
if (tokens.length === 0) {
|
||||||
return [];
|
return [];
|
||||||
}
|
}
|
||||||
@ -45,7 +43,7 @@ function handleParens(tokens: (Token | Token[])[]) {
|
|||||||
...tokens.slice(0, leftIdx),
|
...tokens.slice(0, leftIdx),
|
||||||
handleParens(tokens.slice(leftIdx + 1, rightIdx)),
|
handleParens(tokens.slice(leftIdx + 1, rightIdx)),
|
||||||
...tokens.slice(rightIdx + 1)
|
...tokens.slice(rightIdx + 1)
|
||||||
] as (Token | Token[])[];
|
] as (TokenData | TokenData[])[];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,9 +1,4 @@
|
|||||||
interface TokenData {
|
import { TokenData } from "./types";
|
||||||
token: string;
|
|
||||||
inQuotes: boolean;
|
|
||||||
startIndex: number;
|
|
||||||
endIndex: number;
|
|
||||||
}
|
|
||||||
|
|
||||||
function lex(str: string) {
|
function lex(str: string) {
|
||||||
str = str.toLowerCase();
|
str = str.toLowerCase();
|
||||||
|
@ -1,28 +1,31 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
const dayjs = require("dayjs");
|
import dayjs = require("dayjs");
|
||||||
const AndExp = require('../expressions/and');
|
import AndExp = require('../expressions/and');
|
||||||
const OrExp = require('../expressions/or');
|
import OrExp = require('../expressions/or');
|
||||||
const NotExp = require('../expressions/not');
|
import NotExp = require('../expressions/not');
|
||||||
const ChildOfExp = require('../expressions/child_of');
|
import ChildOfExp = require('../expressions/child_of');
|
||||||
const DescendantOfExp = require('../expressions/descendant_of');
|
import DescendantOfExp = require('../expressions/descendant_of');
|
||||||
const ParentOfExp = require('../expressions/parent_of');
|
import ParentOfExp = require('../expressions/parent_of');
|
||||||
const RelationWhereExp = require('../expressions/relation_where');
|
import RelationWhereExp = require('../expressions/relation_where');
|
||||||
const PropertyComparisonExp = require('../expressions/property_comparison');
|
import PropertyComparisonExp = require('../expressions/property_comparison');
|
||||||
const AttributeExistsExp = require('../expressions/attribute_exists');
|
import AttributeExistsExp = require('../expressions/attribute_exists');
|
||||||
const LabelComparisonExp = require('../expressions/label_comparison');
|
import LabelComparisonExp = require('../expressions/label_comparison');
|
||||||
const NoteFlatTextExp = require('../expressions/note_flat_text');
|
import NoteFlatTextExp = require('../expressions/note_flat_text');
|
||||||
const NoteContentFulltextExp = require('../expressions/note_content_fulltext');
|
import NoteContentFulltextExp = require('../expressions/note_content_fulltext');
|
||||||
const OrderByAndLimitExp = require('../expressions/order_by_and_limit');
|
import OrderByAndLimitExp = require('../expressions/order_by_and_limit');
|
||||||
const AncestorExp = require('../expressions/ancestor');
|
import AncestorExp = require('../expressions/ancestor');
|
||||||
const buildComparator = require('./build_comparator');
|
import buildComparator = require('./build_comparator');
|
||||||
const ValueExtractor = require('../value_extractor');
|
import ValueExtractor = require('../value_extractor');
|
||||||
const utils = require('../../utils');
|
import utils = require('../../utils');
|
||||||
const TrueExp = require('../expressions/true');
|
import TrueExp = require('../expressions/true');
|
||||||
const IsHiddenExp = require('../expressions/is_hidden');
|
import IsHiddenExp = require('../expressions/is_hidden');
|
||||||
|
import SearchContext = require("../search_context");
|
||||||
|
import { TokenData } from "./types";
|
||||||
|
import Expression = require("../expressions/expression");
|
||||||
|
|
||||||
function getFulltext(tokens, searchContext) {
|
function getFulltext(_tokens: TokenData[], searchContext: SearchContext) {
|
||||||
tokens = tokens.map(t => utils.removeDiacritic(t.token));
|
const tokens: string[] = _tokens.map(t => utils.removeDiacritic(t.token));
|
||||||
|
|
||||||
searchContext.highlightedTokens.push(...tokens);
|
searchContext.highlightedTokens.push(...tokens);
|
||||||
|
|
||||||
@ -54,7 +57,7 @@ const OPERATORS = [
|
|||||||
"%="
|
"%="
|
||||||
];
|
];
|
||||||
|
|
||||||
function isOperator(token) {
|
function isOperator(token: TokenData) {
|
||||||
if (Array.isArray(token)) {
|
if (Array.isArray(token)) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
@ -62,20 +65,20 @@ function isOperator(token) {
|
|||||||
return OPERATORS.includes(token.token);
|
return OPERATORS.includes(token.token);
|
||||||
}
|
}
|
||||||
|
|
||||||
function getExpression(tokens, searchContext, level = 0) {
|
function getExpression(tokens: TokenData[], searchContext: SearchContext, level = 0) {
|
||||||
if (tokens.length === 0) {
|
if (tokens.length === 0) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
const expressions = [];
|
const expressions: Expression[] = [];
|
||||||
let op = null;
|
let op: string | null = null;
|
||||||
|
|
||||||
let i;
|
let i: number;
|
||||||
|
|
||||||
function context(i) {
|
function context(i: number) {
|
||||||
let {startIndex, endIndex} = tokens[i];
|
let {startIndex, endIndex} = tokens[i];
|
||||||
startIndex = Math.max(0, startIndex - 20);
|
startIndex = Math.max(0, (startIndex || 0) - 20);
|
||||||
endIndex = Math.min(searchContext.originalQuery.length, endIndex + 20);
|
endIndex = Math.min(searchContext.originalQuery.length, (endIndex || Number.MAX_SAFE_INTEGER) + 20);
|
||||||
|
|
||||||
return `"${startIndex !== 0 ? "..." : ""}${searchContext.originalQuery.substr(startIndex, endIndex - startIndex)}${endIndex !== searchContext.originalQuery.length ? "..." : ""}"`;
|
return `"${startIndex !== 0 ? "..." : ""}${searchContext.originalQuery.substr(startIndex, endIndex - startIndex)}${endIndex !== searchContext.originalQuery.length ? "..." : ""}"`;
|
||||||
}
|
}
|
||||||
@ -133,7 +136,7 @@ function getExpression(tokens, searchContext, level = 0) {
|
|||||||
return date.format(format);
|
return date.format(format);
|
||||||
}
|
}
|
||||||
|
|
||||||
function parseNoteProperty() {
|
function parseNoteProperty(): Expression | undefined | null {
|
||||||
if (tokens[i].token !== '.') {
|
if (tokens[i].token !== '.') {
|
||||||
searchContext.addError('Expected "." to separate field path');
|
searchContext.addError('Expected "." to separate field path');
|
||||||
return;
|
return;
|
||||||
@ -161,19 +164,25 @@ function getExpression(tokens, searchContext, level = 0) {
|
|||||||
if (tokens[i].token === 'parents') {
|
if (tokens[i].token === 'parents') {
|
||||||
i += 1;
|
i += 1;
|
||||||
|
|
||||||
return new ChildOfExp(parseNoteProperty());
|
const expression = parseNoteProperty();
|
||||||
|
if (!expression) { return; }
|
||||||
|
return new ChildOfExp(expression);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (tokens[i].token === 'children') {
|
if (tokens[i].token === 'children') {
|
||||||
i += 1;
|
i += 1;
|
||||||
|
|
||||||
return new ParentOfExp(parseNoteProperty());
|
const expression = parseNoteProperty();
|
||||||
|
if (!expression) { return; }
|
||||||
|
return new ParentOfExp(expression);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (tokens[i].token === 'ancestors') {
|
if (tokens[i].token === 'ancestors') {
|
||||||
i += 1;
|
i += 1;
|
||||||
|
|
||||||
return new DescendantOfExp(parseNoteProperty());
|
const expression = parseNoteProperty();
|
||||||
|
if (!expression) { return; }
|
||||||
|
return new DescendantOfExp(expression);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (tokens[i].token === 'labels') {
|
if (tokens[i].token === 'labels') {
|
||||||
@ -219,6 +228,10 @@ function getExpression(tokens, searchContext, level = 0) {
|
|||||||
i += 2;
|
i += 2;
|
||||||
|
|
||||||
const comparedValue = resolveConstantOperand();
|
const comparedValue = resolveConstantOperand();
|
||||||
|
if (!comparedValue) {
|
||||||
|
searchContext.addError(`Unresolved constant operand.`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
return new PropertyComparisonExp(searchContext, propertyName, operator, comparedValue);
|
return new PropertyComparisonExp(searchContext, propertyName, operator, comparedValue);
|
||||||
}
|
}
|
||||||
@ -226,7 +239,7 @@ function getExpression(tokens, searchContext, level = 0) {
|
|||||||
searchContext.addError(`Unrecognized note property "${tokens[i].token}" in ${context(i)}`);
|
searchContext.addError(`Unrecognized note property "${tokens[i].token}" in ${context(i)}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
function parseAttribute(name) {
|
function parseAttribute(name: string) {
|
||||||
const isLabel = name.startsWith('#');
|
const isLabel = name.startsWith('#');
|
||||||
|
|
||||||
name = name.substr(1);
|
name = name.substr(1);
|
||||||
@ -239,10 +252,10 @@ function getExpression(tokens, searchContext, level = 0) {
|
|||||||
|
|
||||||
const subExp = isLabel ? parseLabel(name) : parseRelation(name);
|
const subExp = isLabel ? parseLabel(name) : parseRelation(name);
|
||||||
|
|
||||||
return isNegated ? new NotExp(subExp) : subExp;
|
return subExp && isNegated ? new NotExp(subExp) : subExp;
|
||||||
}
|
}
|
||||||
|
|
||||||
function parseLabel(labelName) {
|
function parseLabel(labelName: string) {
|
||||||
searchContext.highlightedTokens.push(labelName);
|
searchContext.highlightedTokens.push(labelName);
|
||||||
|
|
||||||
if (i < tokens.length - 2 && isOperator(tokens[i + 1])) {
|
if (i < tokens.length - 2 && isOperator(tokens[i + 1])) {
|
||||||
@ -274,13 +287,15 @@ function getExpression(tokens, searchContext, level = 0) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function parseRelation(relationName) {
|
function parseRelation(relationName: string) {
|
||||||
searchContext.highlightedTokens.push(relationName);
|
searchContext.highlightedTokens.push(relationName);
|
||||||
|
|
||||||
if (i < tokens.length - 2 && tokens[i + 1].token === '.') {
|
if (i < tokens.length - 2 && tokens[i + 1].token === '.') {
|
||||||
i += 1;
|
i += 1;
|
||||||
|
|
||||||
return new RelationWhereExp(relationName, parseNoteProperty());
|
const expression = parseNoteProperty();
|
||||||
|
if (!expression) { return; }
|
||||||
|
return new RelationWhereExp(relationName, expression);
|
||||||
}
|
}
|
||||||
else if (i < tokens.length - 2 && isOperator(tokens[i + 1])) {
|
else if (i < tokens.length - 2 && isOperator(tokens[i + 1])) {
|
||||||
searchContext.addError(`Relation can be compared only with property, e.g. ~relation.title=hello in ${context(i)}`);
|
searchContext.addError(`Relation can be compared only with property, e.g. ~relation.title=hello in ${context(i)}`);
|
||||||
@ -293,7 +308,10 @@ function getExpression(tokens, searchContext, level = 0) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function parseOrderByAndLimit() {
|
function parseOrderByAndLimit() {
|
||||||
const orderDefinitions = [];
|
const orderDefinitions: {
|
||||||
|
valueExtractor: ValueExtractor,
|
||||||
|
direction: string
|
||||||
|
}[] = [];
|
||||||
let limit;
|
let limit;
|
||||||
|
|
||||||
if (tokens[i].token === 'orderby') {
|
if (tokens[i].token === 'orderby') {
|
||||||
@ -316,8 +334,9 @@ function getExpression(tokens, searchContext, level = 0) {
|
|||||||
|
|
||||||
const valueExtractor = new ValueExtractor(searchContext, propertyPath);
|
const valueExtractor = new ValueExtractor(searchContext, propertyPath);
|
||||||
|
|
||||||
if (valueExtractor.validate()) {
|
const validationError = valueExtractor.validate();
|
||||||
searchContext.addError(valueExtractor.validate());
|
if (validationError) {
|
||||||
|
searchContext.addError(validationError);
|
||||||
}
|
}
|
||||||
|
|
||||||
orderDefinitions.push({
|
orderDefinitions.push({
|
||||||
@ -348,7 +367,10 @@ function getExpression(tokens, searchContext, level = 0) {
|
|||||||
|
|
||||||
for (i = 0; i < tokens.length; i++) {
|
for (i = 0; i < tokens.length; i++) {
|
||||||
if (Array.isArray(tokens[i])) {
|
if (Array.isArray(tokens[i])) {
|
||||||
expressions.push(getExpression(tokens[i], searchContext, level++));
|
const expression = getExpression(tokens[i] as unknown as TokenData[], searchContext, level++);
|
||||||
|
if (expression) {
|
||||||
|
expressions.push(expression);
|
||||||
|
}
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -359,7 +381,10 @@ function getExpression(tokens, searchContext, level = 0) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (token.startsWith('#') || token.startsWith('~')) {
|
if (token.startsWith('#') || token.startsWith('~')) {
|
||||||
expressions.push(parseAttribute(token));
|
const attribute = parseAttribute(token);
|
||||||
|
if (attribute) {
|
||||||
|
expressions.push(attribute);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
else if (['orderby', 'limit'].includes(token)) {
|
else if (['orderby', 'limit'].includes(token)) {
|
||||||
if (level !== 0) {
|
if (level !== 0) {
|
||||||
@ -384,12 +409,17 @@ function getExpression(tokens, searchContext, level = 0) {
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
expressions.push(new NotExp(getExpression(tokens[i], searchContext, level++)));
|
const tokenArray = tokens[i] as unknown as TokenData[];
|
||||||
|
const expression = getExpression(tokenArray, searchContext, level++);
|
||||||
|
if (!expression) { return; }
|
||||||
|
expressions.push(new NotExp(expression));
|
||||||
}
|
}
|
||||||
else if (token === 'note') {
|
else if (token === 'note') {
|
||||||
i++;
|
i++;
|
||||||
|
|
||||||
expressions.push(parseNoteProperty());
|
const expression = parseNoteProperty();
|
||||||
|
if (!expression) { return; }
|
||||||
|
expressions.push(expression);
|
||||||
|
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
@ -416,13 +446,17 @@ function getExpression(tokens, searchContext, level = 0) {
|
|||||||
return getAggregateExpression();
|
return getAggregateExpression();
|
||||||
}
|
}
|
||||||
|
|
||||||
function parse({fulltextTokens, expressionTokens, searchContext}) {
|
function parse({fulltextTokens, expressionTokens, searchContext}: {
|
||||||
let expression;
|
fulltextTokens: TokenData[],
|
||||||
|
expressionTokens: TokenData[],
|
||||||
|
searchContext: SearchContext
|
||||||
|
}) {
|
||||||
|
let expression: Expression | undefined | null;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
expression = getExpression(expressionTokens, searchContext);
|
expression = getExpression(expressionTokens, searchContext);
|
||||||
}
|
}
|
||||||
catch (e) {
|
catch (e: any) {
|
||||||
searchContext.addError(e.message);
|
searchContext.addError(e.message);
|
||||||
|
|
||||||
expression = new TrueExp();
|
expression = new TrueExp();
|
||||||
@ -443,13 +477,13 @@ function parse({fulltextTokens, expressionTokens, searchContext}) {
|
|||||||
direction: searchContext.orderDirection
|
direction: searchContext.orderDirection
|
||||||
}], searchContext.limit);
|
}], searchContext.limit);
|
||||||
|
|
||||||
exp.subExpression = filterExp;
|
(exp as any).subExpression = filterExp;
|
||||||
}
|
}
|
||||||
|
|
||||||
return exp;
|
return exp;
|
||||||
}
|
}
|
||||||
|
|
||||||
function getAncestorExp({ancestorNoteId, ancestorDepth, includeHiddenNotes}) {
|
function getAncestorExp({ancestorNoteId, ancestorDepth, includeHiddenNotes}: SearchContext) {
|
||||||
if (ancestorNoteId && ancestorNoteId !== 'root') {
|
if (ancestorNoteId && ancestorNoteId !== 'root') {
|
||||||
return new AncestorExp(ancestorNoteId, ancestorDepth);
|
return new AncestorExp(ancestorNoteId, ancestorDepth);
|
||||||
} else if (!includeHiddenNotes) {
|
} else if (!includeHiddenNotes) {
|
||||||
@ -459,4 +493,4 @@ function getAncestorExp({ancestorNoteId, ancestorDepth, includeHiddenNotes}) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = parse;
|
export = parse;
|
@ -3,7 +3,7 @@
|
|||||||
const normalizeString = require("normalize-strings");
|
const normalizeString = require("normalize-strings");
|
||||||
const lex = require('./lex');
|
const lex = require('./lex');
|
||||||
const handleParens = require('./handle_parens');
|
const handleParens = require('./handle_parens');
|
||||||
const parse = require('./parse.js');
|
const parse = require('./parse');
|
||||||
const SearchResult = require('../search_result');
|
const SearchResult = require('../search_result');
|
||||||
const SearchContext = require('../search_context');
|
const SearchContext = require('../search_context');
|
||||||
const becca = require('../../../becca/becca');
|
const becca = require('../../../becca/becca');
|
||||||
|
6
src/services/search/services/types.ts
Normal file
6
src/services/search/services/types.ts
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
export interface TokenData {
|
||||||
|
token: string;
|
||||||
|
inQuotes?: boolean;
|
||||||
|
startIndex?: number;
|
||||||
|
endIndex?: number;
|
||||||
|
}
|
@ -134,4 +134,4 @@ class ValueExtractor {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = ValueExtractor;
|
export = ValueExtractor;
|
||||||
|
Loading…
x
Reference in New Issue
Block a user