You've already forked node-redis
mirror of
https://github.com/redis/node-redis.git
synced 2025-08-06 02:15:48 +03:00
Merge branch 'master' of github.com:redis/node-redis
This commit is contained in:
@@ -434,6 +434,26 @@ describe('AGGREGATE', () => {
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('with PARAMS', () => {
|
||||
assert.deepEqual(
|
||||
transformArguments('index', '*', {
|
||||
PARAMS: {
|
||||
param: 'value'
|
||||
}
|
||||
}),
|
||||
['FT.AGGREGATE', 'index', '*', 'PARAMS', '2', 'param', 'value']
|
||||
);
|
||||
});
|
||||
|
||||
it('with DIALECT', () => {
|
||||
assert.deepEqual(
|
||||
transformArguments('index', '*', {
|
||||
DIALECT: 1
|
||||
}),
|
||||
['FT.AGGREGATE', 'index', '*', 'DIALECT', '1']
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
testUtils.testWithClient('client.ft.aggregate', async client => {
|
||||
|
@@ -1,6 +1,6 @@
|
||||
import { RedisCommandArgument, RedisCommandArguments } from '@node-redis/client/dist/lib/commands';
|
||||
import { pushVerdictArgument, transformTuplesReply } from '@node-redis/client/dist/lib/commands/generic-transformers';
|
||||
import { PropertyName, pushArgumentsWithLength, pushSortByArguments, SortByProperty } from '.';
|
||||
import { Params, PropertyName, pushArgumentsWithLength, pushParamsArgs, pushSortByArguments, SortByProperty } from '.';
|
||||
|
||||
export enum AggregateSteps {
|
||||
GROUPBY = 'GROUPBY',
|
||||
@@ -122,6 +122,8 @@ export interface AggregateOptions {
|
||||
VERBATIM?: true;
|
||||
LOAD?: LoadField | Array<LoadField>;
|
||||
STEPS?: Array<GroupByStep | SortStep | ApplyStep | LimitStep | FilterStep>;
|
||||
PARAMS?: Params;
|
||||
DIALECT?: number;
|
||||
}
|
||||
|
||||
export function transformArguments(
|
||||
@@ -129,17 +131,16 @@ export function transformArguments(
|
||||
query: string,
|
||||
options?: AggregateOptions
|
||||
): RedisCommandArguments {
|
||||
|
||||
const args = ['FT.AGGREGATE', index, query];
|
||||
pushAggregatehOptions(args, options);
|
||||
return args;
|
||||
return pushAggregatehOptions(
|
||||
['FT.AGGREGATE', index, query],
|
||||
options
|
||||
);
|
||||
}
|
||||
|
||||
export function pushAggregatehOptions(
|
||||
args: RedisCommandArguments,
|
||||
options?: AggregateOptions
|
||||
): RedisCommandArguments {
|
||||
|
||||
if (options?.VERBATIM) {
|
||||
args.push('VERBATIM');
|
||||
}
|
||||
@@ -202,6 +203,12 @@ export function pushAggregatehOptions(
|
||||
}
|
||||
}
|
||||
|
||||
pushParamsArgs(args, options?.PARAMS);
|
||||
|
||||
if (options?.DIALECT) {
|
||||
args.push('DIALECT', options.DIALECT.toString());
|
||||
}
|
||||
|
||||
return args;
|
||||
}
|
||||
|
||||
@@ -257,7 +264,6 @@ function pushGroupByReducer(args: RedisCommandArguments, reducer: GroupByReducer
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
|
@@ -1,7 +1,7 @@
|
||||
import { strict as assert } from 'assert';
|
||||
import testUtils, { GLOBAL } from '../test-utils';
|
||||
import { transformArguments } from './CREATE';
|
||||
import { SchemaFieldTypes, SchemaTextFieldPhonetics, RedisSearchLanguages } from '.';
|
||||
import { SchemaFieldTypes, SchemaTextFieldPhonetics, RedisSearchLanguages, VectorAlgorithms } from '.';
|
||||
|
||||
describe('CREATE', () => {
|
||||
describe('transformArguments', () => {
|
||||
@@ -126,6 +126,52 @@ describe('CREATE', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('VECTOR', () => {
|
||||
it('Flat algorithm', () => {
|
||||
assert.deepEqual(
|
||||
transformArguments('index', {
|
||||
field: {
|
||||
type: SchemaFieldTypes.VECTOR,
|
||||
ALGORITHM: VectorAlgorithms.FLAT,
|
||||
TYPE: 'FLOAT32',
|
||||
DIM: 2,
|
||||
DISTANCE_METRIC: 'L2',
|
||||
INITIAL_CAP: 1000000,
|
||||
BLOCK_SIZE: 1000
|
||||
}
|
||||
}),
|
||||
[
|
||||
'FT.CREATE', 'index', 'SCHEMA', 'field', 'VECTOR', 'FLAT', '10', 'TYPE',
|
||||
'FLOAT32', 'DIM', '2', 'DISTANCE_METRIC', 'L2', 'INITIAL_CAP', '1000000',
|
||||
'BLOCK_SIZE', '1000'
|
||||
]
|
||||
);
|
||||
});
|
||||
|
||||
it('HNSW algorithm', () => {
|
||||
assert.deepEqual(
|
||||
transformArguments('index', {
|
||||
field: {
|
||||
type: SchemaFieldTypes.VECTOR,
|
||||
ALGORITHM: VectorAlgorithms.HNSW,
|
||||
TYPE: 'FLOAT32',
|
||||
DIM: 2,
|
||||
DISTANCE_METRIC: 'L2',
|
||||
INITIAL_CAP: 1000000,
|
||||
M: 40,
|
||||
EF_CONSTRUCTION: 250,
|
||||
EF_RUNTIME: 20
|
||||
}
|
||||
}),
|
||||
[
|
||||
'FT.CREATE', 'index', 'SCHEMA', 'field', 'VECTOR', 'HNSW', '14', 'TYPE',
|
||||
'FLOAT32', 'DIM', '2', 'DISTANCE_METRIC', 'L2', 'INITIAL_CAP', '1000000',
|
||||
'M', '40', 'EF_CONSTRUCTION', '250', 'EF_RUNTIME', '20'
|
||||
]
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('with generic options', () => {
|
||||
it('with AS', () => {
|
||||
assert.deepEqual(
|
||||
|
@@ -2,10 +2,32 @@ import { strict as assert } from 'assert';
|
||||
import { transformArguments } from './EXPLAIN';
|
||||
|
||||
describe('EXPLAIN', () => {
|
||||
it('transformArguments', () => {
|
||||
assert.deepEqual(
|
||||
transformArguments('index', '*'),
|
||||
['FT.EXPLAIN', 'index', '*']
|
||||
);
|
||||
describe('transformArguments', () => {
|
||||
it('simple', () => {
|
||||
assert.deepEqual(
|
||||
transformArguments('index', '*'),
|
||||
['FT.EXPLAIN', 'index', '*']
|
||||
);
|
||||
});
|
||||
|
||||
it('with PARAMS', () => {
|
||||
assert.deepEqual(
|
||||
transformArguments('index', '*', {
|
||||
PARAMS: {
|
||||
param: 'value'
|
||||
}
|
||||
}),
|
||||
['FT.EXPLAIN', 'index', '*', 'PARAMS', '2', 'param', 'value']
|
||||
);
|
||||
});
|
||||
|
||||
it('with DIALECT', () => {
|
||||
assert.deepEqual(
|
||||
transformArguments('index', '*', {
|
||||
DIALECT: 1
|
||||
}),
|
||||
['FT.EXPLAIN', 'index', '*', 'DIALECT', '1']
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@@ -1,7 +1,26 @@
|
||||
import { Params, pushParamsArgs } from ".";
|
||||
|
||||
export const IS_READ_ONLY = true;
|
||||
|
||||
export function transformArguments(index: string, query: string): Array<string> {
|
||||
return ['FT.EXPLAIN', index, query];
|
||||
interface ExplainOptions {
|
||||
PARAMS?: Params;
|
||||
DIALECT?: number;
|
||||
}
|
||||
|
||||
export function transformArguments(
|
||||
index: string,
|
||||
query: string,
|
||||
options?: ExplainOptions
|
||||
): Array<string> {
|
||||
const args = ['FT.EXPLAIN', index, query];
|
||||
|
||||
pushParamsArgs(args, options?.PARAMS);
|
||||
|
||||
if (options?.DIALECT) {
|
||||
args.push('DIALECT', options.DIALECT.toString());
|
||||
}
|
||||
|
||||
return args;
|
||||
}
|
||||
|
||||
export declare function transformReply(): string;
|
||||
|
@@ -15,32 +15,56 @@ describe('INFO', () => {
|
||||
await client.ft.create('index', {
|
||||
field: SchemaFieldTypes.TEXT
|
||||
});
|
||||
|
||||
assert.deepEqual(
|
||||
await client.ft.info('index'),
|
||||
{
|
||||
indexName: 'index',
|
||||
indexOptions: [],
|
||||
indexDefinition: {
|
||||
defaultScore: '1',
|
||||
keyType: 'HASH',
|
||||
prefixes: ['']
|
||||
},
|
||||
attributes: [[
|
||||
'identifier',
|
||||
'field',
|
||||
'attribute',
|
||||
'field',
|
||||
'type',
|
||||
'TEXT',
|
||||
'WEIGHT',
|
||||
'1'
|
||||
]],
|
||||
indexDefinition: Object.create(null, {
|
||||
default_score: {
|
||||
value: '1',
|
||||
configurable: true,
|
||||
enumerable: true
|
||||
},
|
||||
key_type: {
|
||||
value: 'HASH',
|
||||
configurable: true,
|
||||
enumerable: true
|
||||
},
|
||||
prefixes: {
|
||||
value: [''],
|
||||
configurable: true,
|
||||
enumerable: true
|
||||
}
|
||||
}),
|
||||
attributes: [Object.create(null, {
|
||||
identifier: {
|
||||
value: 'field',
|
||||
configurable: true,
|
||||
enumerable: true
|
||||
},
|
||||
attribute: {
|
||||
value: 'field',
|
||||
configurable: true,
|
||||
enumerable: true
|
||||
},
|
||||
type: {
|
||||
value: 'TEXT',
|
||||
configurable: true,
|
||||
enumerable: true
|
||||
},
|
||||
WEIGHT: {
|
||||
value: '1',
|
||||
configurable: true,
|
||||
enumerable: true
|
||||
}
|
||||
})],
|
||||
numDocs: '0',
|
||||
maxDocId: '0',
|
||||
numTerms: '0',
|
||||
numRecords: '0',
|
||||
invertedSzMb: '0',
|
||||
vectorIndexSzMb: '0',
|
||||
totalInvertedIndexBlocks: '0',
|
||||
offsetVectorsSzMb: '0',
|
||||
docTableSizeMb: '0',
|
||||
@@ -67,7 +91,8 @@ describe('INFO', () => {
|
||||
globalTotal: 0,
|
||||
indexCapacity: 128,
|
||||
idnexTotal: 0
|
||||
}
|
||||
},
|
||||
stopWords: undefined
|
||||
}
|
||||
);
|
||||
}, GLOBAL.SERVERS.OPEN);
|
||||
|
@@ -1,121 +1,118 @@
|
||||
import { RedisCommandArgument } from '@node-redis/client/dist/lib/commands';
|
||||
import { transformTuplesReply } from '@node-redis/client/dist/lib/commands/generic-transformers';
|
||||
|
||||
export function transformArguments(index: string): Array<string> {
|
||||
return ['FT.INFO', index];
|
||||
}
|
||||
|
||||
type InfoRawReply = [
|
||||
_: string,
|
||||
indexName: string,
|
||||
_: string,
|
||||
indexOptions: Array<string>,
|
||||
_: string,
|
||||
indexDefinition: [
|
||||
_: string,
|
||||
keyType: string,
|
||||
_: string,
|
||||
prefixes: Array<string>,
|
||||
_: string,
|
||||
defaultScore: string
|
||||
'index_name',
|
||||
RedisCommandArgument,
|
||||
'index_options',
|
||||
Array<RedisCommandArgument>,
|
||||
'index_definition',
|
||||
Array<RedisCommandArgument>,
|
||||
'attributes',
|
||||
Array<Array<RedisCommandArgument>>,
|
||||
'num_docs',
|
||||
RedisCommandArgument,
|
||||
'max_doc_id',
|
||||
RedisCommandArgument,
|
||||
'num_terms',
|
||||
RedisCommandArgument,
|
||||
'num_records',
|
||||
RedisCommandArgument,
|
||||
'inverted_sz_mb',
|
||||
RedisCommandArgument,
|
||||
'vector_index_sz_mb',
|
||||
RedisCommandArgument,
|
||||
'total_inverted_index_blocks',
|
||||
RedisCommandArgument,
|
||||
'offset_vectors_sz_mb',
|
||||
RedisCommandArgument,
|
||||
'doc_table_size_mb',
|
||||
RedisCommandArgument,
|
||||
'sortable_values_size_mb',
|
||||
RedisCommandArgument,
|
||||
'key_table_size_mb',
|
||||
RedisCommandArgument,
|
||||
'records_per_doc_avg',
|
||||
RedisCommandArgument,
|
||||
'bytes_per_record_avg',
|
||||
RedisCommandArgument,
|
||||
'offsets_per_term_avg',
|
||||
RedisCommandArgument,
|
||||
'offset_bits_per_record_avg',
|
||||
RedisCommandArgument,
|
||||
'hash_indexing_failures',
|
||||
RedisCommandArgument,
|
||||
'indexing',
|
||||
RedisCommandArgument,
|
||||
'percent_indexed',
|
||||
RedisCommandArgument,
|
||||
'gc_stats',
|
||||
[
|
||||
'bytes_collected',
|
||||
RedisCommandArgument,
|
||||
'total_ms_run',
|
||||
RedisCommandArgument,
|
||||
'total_cycles',
|
||||
RedisCommandArgument,
|
||||
'average_cycle_time_ms',
|
||||
RedisCommandArgument,
|
||||
'last_run_time_ms',
|
||||
RedisCommandArgument,
|
||||
'gc_numeric_trees_missed',
|
||||
RedisCommandArgument,
|
||||
'gc_blocks_denied',
|
||||
RedisCommandArgument
|
||||
],
|
||||
_: string,
|
||||
attributes: Array<Array<string>>,
|
||||
_: string,
|
||||
numDocs: string,
|
||||
_: string,
|
||||
maxDocId: string,
|
||||
_: string,
|
||||
numTerms: string,
|
||||
_: string,
|
||||
numRecords: string,
|
||||
_: string,
|
||||
invertedSzMb: string,
|
||||
_: string,
|
||||
totalInvertedIndexBlocks: string,
|
||||
_: string,
|
||||
offsetVectorsSzMb: string,
|
||||
_: string,
|
||||
docTableSizeMb: string,
|
||||
_: string,
|
||||
sortableValuesSizeMb: string,
|
||||
_: string,
|
||||
keyTableSizeMb: string,
|
||||
_: string,
|
||||
recordsPerDocAvg: string,
|
||||
_: string,
|
||||
bytesPerRecordAvg: string,
|
||||
_: string,
|
||||
offsetsPerTermAvg: string,
|
||||
_: string,
|
||||
offsetBitsPerRecordAvg: string,
|
||||
_: string,
|
||||
hashIndexingFailures: string,
|
||||
_: string,
|
||||
indexing: string,
|
||||
_: string,
|
||||
percentIndexed: string,
|
||||
_: string,
|
||||
gcStats: [
|
||||
_: string,
|
||||
bytesCollected: string,
|
||||
_: string,
|
||||
totalMsRun: string,
|
||||
_: string,
|
||||
totalCycles: string,
|
||||
_: string,
|
||||
averageCycleTimeMs: string,
|
||||
_: string,
|
||||
lastRunTimeMs: string,
|
||||
_: string,
|
||||
gcNumericTreesMissed: string,
|
||||
_: string,
|
||||
gcBlocksDenied: string
|
||||
'cursor_stats',
|
||||
[
|
||||
'global_idle',
|
||||
number,
|
||||
'global_total',
|
||||
number,
|
||||
'index_capacity',
|
||||
number,
|
||||
'index_total',
|
||||
number
|
||||
],
|
||||
_: string,
|
||||
cursorStats: [
|
||||
_: string,
|
||||
globalIdle: number,
|
||||
_: string,
|
||||
globalTotal: number,
|
||||
_: string,
|
||||
indexCapacity: number,
|
||||
_: string,
|
||||
idnexTotal: number
|
||||
]
|
||||
'stopwords_list'?,
|
||||
Array<RedisCommandArgument>?
|
||||
];
|
||||
|
||||
interface InfoReply {
|
||||
indexName: string;
|
||||
indexOptions: Array<string>;
|
||||
indexDefinition: {
|
||||
keyType: string;
|
||||
prefixes: Array<string>;
|
||||
defaultScore: string;
|
||||
};
|
||||
attributes: Array<Array<string>>;
|
||||
numDocs: string;
|
||||
maxDocId: string;
|
||||
numTerms: string;
|
||||
numRecords: string;
|
||||
invertedSzMb: string;
|
||||
totalInvertedIndexBlocks: string;
|
||||
offsetVectorsSzMb: string;
|
||||
docTableSizeMb: string;
|
||||
sortableValuesSizeMb: string;
|
||||
keyTableSizeMb: string;
|
||||
recordsPerDocAvg: string;
|
||||
bytesPerRecordAvg: string;
|
||||
offsetsPerTermAvg: string;
|
||||
offsetBitsPerRecordAvg: string;
|
||||
hashIndexingFailures: string;
|
||||
indexing: string;
|
||||
percentIndexed: string;
|
||||
indexName: RedisCommandArgument;
|
||||
indexOptions: Array<RedisCommandArgument>;
|
||||
indexDefinition: Record<string, RedisCommandArgument>;
|
||||
attributes: Array<Record<string, RedisCommandArgument>>;
|
||||
numDocs: RedisCommandArgument;
|
||||
maxDocId: RedisCommandArgument;
|
||||
numTerms: RedisCommandArgument;
|
||||
numRecords: RedisCommandArgument;
|
||||
invertedSzMb: RedisCommandArgument;
|
||||
vectorIndexSzMb: RedisCommandArgument;
|
||||
totalInvertedIndexBlocks: RedisCommandArgument;
|
||||
offsetVectorsSzMb: RedisCommandArgument;
|
||||
docTableSizeMb: RedisCommandArgument;
|
||||
sortableValuesSizeMb: RedisCommandArgument;
|
||||
keyTableSizeMb: RedisCommandArgument;
|
||||
recordsPerDocAvg: RedisCommandArgument;
|
||||
bytesPerRecordAvg: RedisCommandArgument;
|
||||
offsetsPerTermAvg: RedisCommandArgument;
|
||||
offsetBitsPerRecordAvg: RedisCommandArgument;
|
||||
hashIndexingFailures: RedisCommandArgument;
|
||||
indexing: RedisCommandArgument;
|
||||
percentIndexed: RedisCommandArgument;
|
||||
gcStats: {
|
||||
bytesCollected: string;
|
||||
totalMsRun: string;
|
||||
totalCycles: string;
|
||||
averageCycleTimeMs: string;
|
||||
lastRunTimeMs: string;
|
||||
gcNumericTreesMissed: string;
|
||||
gcBlocksDenied: string;
|
||||
bytesCollected: RedisCommandArgument;
|
||||
totalMsRun: RedisCommandArgument;
|
||||
totalCycles: RedisCommandArgument;
|
||||
averageCycleTimeMs: RedisCommandArgument;
|
||||
lastRunTimeMs: RedisCommandArgument;
|
||||
gcNumericTreesMissed: RedisCommandArgument;
|
||||
gcBlocksDenied: RedisCommandArgument;
|
||||
};
|
||||
cursorStats: {
|
||||
globalIdle: number;
|
||||
@@ -123,49 +120,49 @@ interface InfoReply {
|
||||
indexCapacity: number;
|
||||
idnexTotal: number;
|
||||
};
|
||||
stopWords: Array<RedisCommandArgument> | undefined;
|
||||
}
|
||||
|
||||
export function transformReply(rawReply: InfoRawReply): InfoReply {
|
||||
console.log(rawReply);
|
||||
return {
|
||||
indexName: rawReply[1],
|
||||
indexOptions: rawReply[3],
|
||||
indexDefinition: {
|
||||
keyType: rawReply[5][1],
|
||||
prefixes: rawReply[5][3],
|
||||
defaultScore: rawReply[5][5]
|
||||
},
|
||||
attributes: rawReply[7],
|
||||
indexDefinition: transformTuplesReply(rawReply[5]),
|
||||
attributes: rawReply[7].map(attribute => transformTuplesReply(attribute)),
|
||||
numDocs: rawReply[9],
|
||||
maxDocId: rawReply[11],
|
||||
numTerms: rawReply[13],
|
||||
numRecords: rawReply[15],
|
||||
invertedSzMb: rawReply[17],
|
||||
totalInvertedIndexBlocks: rawReply[19],
|
||||
offsetVectorsSzMb: rawReply[21],
|
||||
docTableSizeMb: rawReply[23],
|
||||
sortableValuesSizeMb: rawReply[25],
|
||||
keyTableSizeMb: rawReply[27],
|
||||
recordsPerDocAvg: rawReply[29],
|
||||
bytesPerRecordAvg: rawReply[31],
|
||||
offsetsPerTermAvg: rawReply[33],
|
||||
offsetBitsPerRecordAvg: rawReply[35],
|
||||
hashIndexingFailures: rawReply[37],
|
||||
indexing: rawReply[39],
|
||||
percentIndexed: rawReply[41],
|
||||
vectorIndexSzMb: rawReply[19],
|
||||
totalInvertedIndexBlocks: rawReply[21],
|
||||
offsetVectorsSzMb: rawReply[23],
|
||||
docTableSizeMb: rawReply[25],
|
||||
sortableValuesSizeMb: rawReply[27],
|
||||
keyTableSizeMb: rawReply[29],
|
||||
recordsPerDocAvg: rawReply[31],
|
||||
bytesPerRecordAvg: rawReply[33],
|
||||
offsetsPerTermAvg: rawReply[35],
|
||||
offsetBitsPerRecordAvg: rawReply[37],
|
||||
hashIndexingFailures: rawReply[39],
|
||||
indexing: rawReply[41],
|
||||
percentIndexed: rawReply[43],
|
||||
gcStats: {
|
||||
bytesCollected: rawReply[43][1],
|
||||
totalMsRun: rawReply[43][3],
|
||||
totalCycles: rawReply[43][5],
|
||||
averageCycleTimeMs: rawReply[43][7],
|
||||
lastRunTimeMs: rawReply[43][9],
|
||||
gcNumericTreesMissed: rawReply[43][11],
|
||||
gcBlocksDenied: rawReply[43][13]
|
||||
bytesCollected: rawReply[45][1],
|
||||
totalMsRun: rawReply[45][3],
|
||||
totalCycles: rawReply[45][5],
|
||||
averageCycleTimeMs: rawReply[45][7],
|
||||
lastRunTimeMs: rawReply[45][9],
|
||||
gcNumericTreesMissed: rawReply[45][11],
|
||||
gcBlocksDenied: rawReply[45][13]
|
||||
},
|
||||
cursorStats: {
|
||||
globalIdle: rawReply[45][1],
|
||||
globalTotal: rawReply[45][3],
|
||||
indexCapacity: rawReply[45][5],
|
||||
idnexTotal: rawReply[45][7]
|
||||
}
|
||||
globalIdle: rawReply[47][1],
|
||||
globalTotal: rawReply[47][3],
|
||||
indexCapacity: rawReply[47][5],
|
||||
idnexTotal: rawReply[47][7]
|
||||
},
|
||||
stopWords: rawReply[49]
|
||||
};
|
||||
}
|
||||
|
@@ -1,5 +1,6 @@
|
||||
import { SearchOptions, SearchRawReply, transformReply as transformSearchReply } from './SEARCH';
|
||||
import { pushSearchOptions, ProfileOptions, ProfileRawReply, ProfileReply, transformProfile } from '.';
|
||||
import { RedisCommandArguments } from '@node-redis/client/dist/lib/commands';
|
||||
|
||||
export const IS_READ_ONLY = true;
|
||||
|
||||
@@ -7,7 +8,7 @@ export function transformArguments(
|
||||
index: string,
|
||||
query: string,
|
||||
options?: ProfileOptions & SearchOptions
|
||||
): Array<string> {
|
||||
): RedisCommandArguments {
|
||||
const args = ['FT.PROFILE', index, 'SEARCH'];
|
||||
|
||||
if (options?.LIMITED) {
|
||||
@@ -15,8 +16,7 @@ export function transformArguments(
|
||||
}
|
||||
|
||||
args.push('QUERY', query);
|
||||
pushSearchOptions(args, options)
|
||||
return args;
|
||||
return pushSearchOptions(args, options);
|
||||
}
|
||||
|
||||
type ProfileSearchRawReply = ProfileRawReply<SearchRawReply>;
|
||||
|
@@ -213,31 +213,98 @@ describe('SEARCH', () => {
|
||||
['FT.SEARCH', 'index', 'query', 'LIMIT', '0', '1']
|
||||
);
|
||||
});
|
||||
|
||||
it('with PARAMS', () => {
|
||||
assert.deepEqual(
|
||||
transformArguments('index', 'query', {
|
||||
PARAMS: {
|
||||
param: 'value'
|
||||
}
|
||||
}),
|
||||
['FT.SEARCH', 'index', 'query', 'PARAMS', '2', 'param', 'value']
|
||||
);
|
||||
});
|
||||
|
||||
it('with DIALECT', () => {
|
||||
assert.deepEqual(
|
||||
transformArguments('index', 'query', {
|
||||
DIALECT: 1
|
||||
}),
|
||||
['FT.SEARCH', 'index', 'query', 'DIALECT', '1']
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
testUtils.testWithClient('client.ft.search', async client => {
|
||||
await Promise.all([
|
||||
client.ft.create('index', {
|
||||
field: SchemaFieldTypes.NUMERIC
|
||||
}),
|
||||
client.hSet('1', 'field', '1')
|
||||
]);
|
||||
describe('client.ft.search', () => {
|
||||
testUtils.testWithClient('DIALECT 1', async client => {
|
||||
await Promise.all([
|
||||
client.ft.create('index', {
|
||||
field: SchemaFieldTypes.NUMERIC
|
||||
}),
|
||||
client.hSet('1', 'field', '1')
|
||||
]);
|
||||
|
||||
assert.deepEqual(
|
||||
await client.ft.search('index', '*'),
|
||||
{
|
||||
total: 1,
|
||||
documents: [{
|
||||
id: '1',
|
||||
value: Object.create(null, {
|
||||
field: {
|
||||
value: '1',
|
||||
configurable: true,
|
||||
enumerable: true
|
||||
}
|
||||
})
|
||||
}]
|
||||
}
|
||||
);
|
||||
}, GLOBAL.SERVERS.OPEN);
|
||||
assert.deepEqual(
|
||||
await client.ft.search('index', '*', {
|
||||
DIALECT: 1
|
||||
}),
|
||||
{
|
||||
total: 1,
|
||||
documents: [{
|
||||
id: '1',
|
||||
value: Object.create(null, {
|
||||
field: {
|
||||
value: '1',
|
||||
configurable: true,
|
||||
enumerable: true
|
||||
}
|
||||
})
|
||||
}]
|
||||
}
|
||||
);
|
||||
}, GLOBAL.SERVERS.OPEN);
|
||||
|
||||
testUtils.testWithClient('DIALECT 2', async client => {
|
||||
await Promise.all([
|
||||
client.ft.create('index', {
|
||||
field: SchemaFieldTypes.NUMERIC
|
||||
}),
|
||||
client.hSet('1', 'field', '1'),
|
||||
client.hSet('2', 'field', '2'),
|
||||
client.hSet('3', 'field', '3')
|
||||
]);
|
||||
|
||||
assert.deepEqual(
|
||||
await client.ft.search('index', '@field:[$min $max]', {
|
||||
PARAMS: {
|
||||
min: 1,
|
||||
max: 2
|
||||
},
|
||||
DIALECT: 2
|
||||
}),
|
||||
{
|
||||
total: 2,
|
||||
documents: [{
|
||||
id: '1',
|
||||
value: Object.create(null, {
|
||||
field: {
|
||||
value: '1',
|
||||
configurable: true,
|
||||
enumerable: true
|
||||
}
|
||||
})
|
||||
}, {
|
||||
id: '2',
|
||||
value: Object.create(null, {
|
||||
field: {
|
||||
value: '2',
|
||||
configurable: true,
|
||||
enumerable: true
|
||||
}
|
||||
})
|
||||
}]
|
||||
}
|
||||
);
|
||||
}, GLOBAL.SERVERS.OPEN);
|
||||
});
|
||||
});
|
||||
|
@@ -1,6 +1,6 @@
|
||||
import { RedisCommandArguments } from '@node-redis/client/dist/lib/commands';
|
||||
import { transformTuplesReply } from '@node-redis/client/dist/lib/commands/generic-transformers';
|
||||
import { pushSearchOptions, RedisSearchLanguages, PropertyName, SortByProperty, SearchReply } from '.';
|
||||
import { pushSearchOptions, RedisSearchLanguages, Params, PropertyName, SortByProperty, SearchReply } from '.';
|
||||
|
||||
export const FIRST_KEY_INDEX = 1;
|
||||
|
||||
@@ -54,6 +54,8 @@ export interface SearchOptions {
|
||||
from: number | string;
|
||||
size: number | string;
|
||||
};
|
||||
PARAMS?: Params;
|
||||
DIALECT?: number;
|
||||
}
|
||||
|
||||
export function transformArguments(
|
||||
@@ -61,9 +63,10 @@ export function transformArguments(
|
||||
query: string,
|
||||
options?: SearchOptions
|
||||
): RedisCommandArguments {
|
||||
const args: RedisCommandArguments = ['FT.SEARCH', index, query];
|
||||
pushSearchOptions(args, options);
|
||||
return args;
|
||||
return pushSearchOptions(
|
||||
['FT.SEARCH', index, query],
|
||||
options
|
||||
);
|
||||
}
|
||||
|
||||
export type SearchRawReply = Array<any>;
|
||||
|
@@ -47,6 +47,15 @@ describe('SPELLCHECK', () => {
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('with DIALECT', () => {
|
||||
assert.deepEqual(
|
||||
transformArguments('index', 'query', {
|
||||
DIALECT: 1
|
||||
}),
|
||||
['FT.SPELLCHECK', 'index', 'query', 'DIALECT', '1']
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
testUtils.testWithClient('client.ft.spellCheck', async client => {
|
||||
|
@@ -6,6 +6,7 @@ interface SpellCheckTerms {
|
||||
interface SpellCheckOptions {
|
||||
DISTANCE?: number;
|
||||
TERMS?: SpellCheckTerms | Array<SpellCheckTerms>;
|
||||
DIALECT?: number;
|
||||
}
|
||||
|
||||
export function transformArguments(index: string, query: string, options?: SpellCheckOptions): Array<string> {
|
||||
@@ -25,6 +26,10 @@ export function transformArguments(index: string, query: string, options?: Spell
|
||||
}
|
||||
}
|
||||
|
||||
if (options?.DIALECT) {
|
||||
args.push('DIALECT', options.DIALECT.toString());
|
||||
}
|
||||
|
||||
return args;
|
||||
}
|
||||
|
||||
|
@@ -28,7 +28,7 @@ import * as SUGLEN from './SUGLEN';
|
||||
import * as SYNDUMP from './SYNDUMP';
|
||||
import * as SYNUPDATE from './SYNUPDATE';
|
||||
import * as TAGVALS from './TAGVALS';
|
||||
import { RedisCommandArguments } from '@node-redis/client/dist/lib/commands';
|
||||
import { RedisCommandArgument, RedisCommandArguments } from '@node-redis/client/dist/lib/commands';
|
||||
import { pushOptionalVerdictArgument, pushVerdictArgument } from '@node-redis/client/dist/lib/commands/generic-transformers';
|
||||
import { SearchOptions } from './SEARCH';
|
||||
|
||||
@@ -172,16 +172,29 @@ export enum SchemaFieldTypes {
|
||||
TEXT = 'TEXT',
|
||||
NUMERIC = 'NUMERIC',
|
||||
GEO = 'GEO',
|
||||
TAG = 'TAG'
|
||||
TAG = 'TAG',
|
||||
VECTOR = 'VECTOR'
|
||||
}
|
||||
|
||||
type CreateSchemaField<T extends SchemaFieldTypes, E = Record<keyof any, any>> = T | ({
|
||||
type CreateSchemaField<
|
||||
T extends SchemaFieldTypes,
|
||||
E = Record<keyof any, any>
|
||||
> = T | ({
|
||||
type: T;
|
||||
AS?: string;
|
||||
SORTABLE?: true | 'UNF';
|
||||
NOINDEX?: true;
|
||||
} & E);
|
||||
|
||||
type CreateSchemaCommonField<
|
||||
T extends SchemaFieldTypes,
|
||||
E = Record<string, never>
|
||||
> = CreateSchemaField<
|
||||
T,
|
||||
({
|
||||
SORTABLE?: true | 'UNF';
|
||||
NOINDEX?: true;
|
||||
} & E)
|
||||
>;
|
||||
|
||||
export enum SchemaTextFieldPhonetics {
|
||||
DM_EN = 'dm:en',
|
||||
DM_FR = 'dm:fr',
|
||||
@@ -189,27 +202,55 @@ export enum SchemaTextFieldPhonetics {
|
||||
DM_ES = 'dm:es'
|
||||
}
|
||||
|
||||
type CreateSchemaTextField = CreateSchemaField<SchemaFieldTypes.TEXT, {
|
||||
type CreateSchemaTextField = CreateSchemaCommonField<SchemaFieldTypes.TEXT, {
|
||||
NOSTEM?: true;
|
||||
WEIGHT?: number;
|
||||
PHONETIC?: SchemaTextFieldPhonetics;
|
||||
}>;
|
||||
|
||||
type CreateSchemaNumericField = CreateSchemaField<SchemaFieldTypes.NUMERIC>;
|
||||
type CreateSchemaNumericField = CreateSchemaCommonField<SchemaFieldTypes.NUMERIC>;
|
||||
|
||||
type CreateSchemaGeoField = CreateSchemaField<SchemaFieldTypes.GEO>;
|
||||
type CreateSchemaGeoField = CreateSchemaCommonField<SchemaFieldTypes.GEO>;
|
||||
|
||||
type CreateSchemaTagField = CreateSchemaField<SchemaFieldTypes.TAG, {
|
||||
type CreateSchemaTagField = CreateSchemaCommonField<SchemaFieldTypes.TAG, {
|
||||
SEPARATOR?: string;
|
||||
CASESENSITIVE?: true;
|
||||
}>;
|
||||
|
||||
export enum VectorAlgorithms {
|
||||
FLAT = 'FLAT',
|
||||
HNSW = 'HNSW'
|
||||
}
|
||||
|
||||
type CreateSchemaVectorField<
|
||||
T extends VectorAlgorithms,
|
||||
A extends Record<string, unknown>
|
||||
> = CreateSchemaField<SchemaFieldTypes.VECTOR, {
|
||||
ALGORITHM: T;
|
||||
TYPE: string;
|
||||
DIM: number;
|
||||
DISTANCE_METRIC: 'L2' | 'IP' | 'COSINE';
|
||||
INITIAL_CAP?: number;
|
||||
} & A>;
|
||||
|
||||
type CreateSchemaFlatVectorField = CreateSchemaVectorField<VectorAlgorithms.FLAT, {
|
||||
BLOCK_SIZE?: number;
|
||||
}>;
|
||||
|
||||
type CreateSchemaHNSWVectorField = CreateSchemaVectorField<VectorAlgorithms.HNSW, {
|
||||
M?: number;
|
||||
EF_CONSTRUCTION?: number;
|
||||
EF_RUNTIME?: number;
|
||||
}>;
|
||||
|
||||
export interface RediSearchSchema {
|
||||
[field: string]:
|
||||
CreateSchemaTextField |
|
||||
CreateSchemaNumericField |
|
||||
CreateSchemaGeoField |
|
||||
CreateSchemaTagField;
|
||||
CreateSchemaTagField |
|
||||
CreateSchemaFlatVectorField |
|
||||
CreateSchemaHNSWVectorField;
|
||||
}
|
||||
|
||||
export function pushSchema(args: RedisCommandArguments, schema: RediSearchSchema) {
|
||||
@@ -257,6 +298,47 @@ export function pushSchema(args: RedisCommandArguments, schema: RediSearchSchema
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
case SchemaFieldTypes.VECTOR:
|
||||
args.push(fieldOptions.ALGORITHM);
|
||||
|
||||
pushArgumentsWithLength(args, () => {
|
||||
args.push(
|
||||
'TYPE', fieldOptions.TYPE,
|
||||
'DIM', fieldOptions.DIM.toString(),
|
||||
'DISTANCE_METRIC', fieldOptions.DISTANCE_METRIC
|
||||
);
|
||||
|
||||
if (fieldOptions.INITIAL_CAP) {
|
||||
args.push('INITIAL_CAP', fieldOptions.INITIAL_CAP.toString());
|
||||
}
|
||||
|
||||
switch (fieldOptions.ALGORITHM) {
|
||||
case VectorAlgorithms.FLAT:
|
||||
if (fieldOptions.BLOCK_SIZE) {
|
||||
args.push('BLOCK_SIZE', fieldOptions.BLOCK_SIZE.toString());
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
case VectorAlgorithms.HNSW:
|
||||
if (fieldOptions.M) {
|
||||
args.push('M', fieldOptions.M.toString());
|
||||
}
|
||||
|
||||
if (fieldOptions.EF_CONSTRUCTION) {
|
||||
args.push('EF_CONSTRUCTION', fieldOptions.EF_CONSTRUCTION.toString());
|
||||
}
|
||||
|
||||
if (fieldOptions.EF_RUNTIME) {
|
||||
args.push('EF_RUNTIME', fieldOptions.EF_RUNTIME.toString());
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
});
|
||||
|
||||
continue; // vector fields do not contain SORTABLE and NOINDEX options
|
||||
}
|
||||
|
||||
if (fieldOptions.SORTABLE) {
|
||||
@@ -273,11 +355,27 @@ export function pushSchema(args: RedisCommandArguments, schema: RediSearchSchema
|
||||
}
|
||||
}
|
||||
|
||||
export type Params = Record<string, RedisCommandArgument | number>;
|
||||
|
||||
export function pushParamsArgs(
|
||||
args: RedisCommandArguments,
|
||||
params?: Params
|
||||
): RedisCommandArguments {
|
||||
if (params) {
|
||||
const enrties = Object.entries(params);
|
||||
args.push('PARAMS', (enrties.length * 2).toString());
|
||||
for (const [key, value] of enrties) {
|
||||
args.push(key, value.toString());
|
||||
}
|
||||
}
|
||||
|
||||
return args;
|
||||
}
|
||||
|
||||
export function pushSearchOptions(
|
||||
args: RedisCommandArguments,
|
||||
options?: SearchOptions
|
||||
): RedisCommandArguments {
|
||||
|
||||
if (options?.VERBATIM) {
|
||||
args.push('VERBATIM');
|
||||
}
|
||||
@@ -381,6 +479,16 @@ export function pushSearchOptions(
|
||||
);
|
||||
}
|
||||
|
||||
if (options?.PARAMS) {
|
||||
pushParamsArgs(args, options.PARAMS);
|
||||
}
|
||||
|
||||
if (options?.DIALECT) {
|
||||
args.push('DIALECT', options.DIALECT.toString());
|
||||
}
|
||||
|
||||
console.log('!@#', args);
|
||||
|
||||
return args;
|
||||
}
|
||||
|
||||
|
@@ -1,5 +1,5 @@
|
||||
export { default } from './commands';
|
||||
|
||||
export { RediSearchSchema, SchemaFieldTypes, SchemaTextFieldPhonetics, SearchReply } from './commands';
|
||||
export { RediSearchSchema, SchemaFieldTypes, SchemaTextFieldPhonetics, SearchReply, VectorAlgorithms } from './commands';
|
||||
export { AggregateSteps, AggregateGroupByReducers } from './commands/AGGREGATE';
|
||||
export { SearchOptions } from './commands/SEARCH';
|
||||
export { SearchOptions } from './commands/SEARCH';
|
||||
|
@@ -4,7 +4,7 @@ import RediSearch from '.';
|
||||
export default new TestUtils({
|
||||
dockerImageName: 'redislabs/redisearch',
|
||||
dockerImageVersionArgument: 'redisearch-version',
|
||||
defaultDockerVersion: '2.2.7'
|
||||
defaultDockerVersion: '2.4.3'
|
||||
});
|
||||
|
||||
export const GLOBAL = {
|
||||
|
Reference in New Issue
Block a user