import StringQueryParser from 'information-retrieval-boilerplate/node-information-retrieval-boilerplate/src/query/StringQueryParser.js'
StringQueryParser
Extends:
Helper functionality for parsing query representes in different formats into the one this package supports.
Constructor Summary
Public Constructor | ||
public |
|
Method Summary
Public Methods | ||
public |
classifyChar(char: *): string |
|
public |
* lemmatise(string: *) |
|
public |
* lex(string: *) |
|
public |
|
|
public |
parseJoinAllAndOrTokens(tokens: *) |
|
public |
parseJoinAllExactMatchTokens(tokens: *) |
|
public |
parseJoinAllFieldTokens(tokens: *) |
|
public |
parseJoinAllNotTokens(tokens: *) |
|
public |
parseJoinAllRegularTokens(tokens: *) |
|
public |
* tokenise(string: *) Probably the smallest tokeniser ever written |
|
public |
treeToFilters(fresh: *, config: *, tree: *): {"filter": *, "field": *, "values": *} |
Inherited Summary
From class QueryParser | ||
public |
|
|
public |
defaultField: * |
|
public |
|
|
public |
defaultSort: * |
|
public |
|
|
public |
fields: * |
|
public |
sort: * |
|
public |
getDefault(): {"filter": *, "sort": *, "order": *} |
Public Constructors
Public Methods
public * lemmatise(string: *) source
Params:
Name | Type | Attribute | Description |
string | * |
public * lex(string: *) source
Params:
Name | Type | Attribute | Description |
string | * |
public parse(query: *, maxTokns: number): * source
Params:
Name | Type | Attribute | Description |
query | * | ||
maxTokns | number |
|
Return:
* |
public parseJoinAllAndOrTokens(tokens: *) source
Params:
Name | Type | Attribute | Description |
tokens | * |
public parseJoinAllExactMatchTokens(tokens: *) source
Params:
Name | Type | Attribute | Description |
tokens | * |
public parseJoinAllFieldTokens(tokens: *) source
Params:
Name | Type | Attribute | Description |
tokens | * |
public parseJoinAllNotTokens(tokens: *) source
Params:
Name | Type | Attribute | Description |
tokens | * |
public parseJoinAllRegularTokens(tokens: *) source
Params:
Name | Type | Attribute | Description |
tokens | * |
public * tokenise(string: *) source
Probably the smallest tokeniser ever written
Params:
Name | Type | Attribute | Description |
string | * |
public treeToFilters(fresh: *, config: *, tree: *): {"filter": *, "field": *, "values": *} source
Params:
Name | Type | Attribute | Description |
fresh | * | ||
config | * | ||
tree | * |
Return:
{"filter": *, "field": *, "values": *} |