Skip to content

Commit

Permalink
2.4.2 cherry picks (#2633)
Browse files Browse the repository at this point in the history
  • Loading branch information
DvirDukhan committed Mar 8, 2022
1 parent a06b0df commit 0f9410d
Show file tree
Hide file tree
Showing 16 changed files with 155 additions and 132 deletions.
14 changes: 7 additions & 7 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ commands:
git submodule update --init deps/readies
./deps/readies/bin/getpy2
./deps/readies/bin/getpy3
python2 -m pip install awscli
python2 -m pip install -q awscli
install-prerequisites:
parameters:
Expand Down Expand Up @@ -349,9 +349,9 @@ jobs:
xcode: 12.4.0
steps:
- early-returns
- run:
name: Brew upgrade
command: brew upgrade
# - run:
# name: Brew upgrade
# command: brew upgrade
- run:
name: Set up workspace
command: mkdir -p ~/workspace
Expand Down Expand Up @@ -507,10 +507,10 @@ jobs:
- setup-automation
- run:
name: Run QA Automation
command: ./tests/qa/qatests -t release -m "$CIRCLE_TAG"
command: ./tests/qa/qatests -m "$CIRCLE_TAG"
- run:
name: Run QA Automation (RediSearch Light)
command: ./tests/qa/qatests -t release -m "$CIRCLE_TAG" --light
command: ./tests/qa/qatests -m "$CIRCLE_TAG" --light


#----------------------------------------------------------------------------------------------------------------------------------
Expand Down Expand Up @@ -593,7 +593,7 @@ workflows:
context: common
matrix:
parameters:
platform: [focal, bionic, xenial, centos7, ol8, bullseye]
platform: [focal, bionic, xenial, centos7, rocky8, bullseye]
- build-arm-platforms:
<<: *on-integ-and-version-tags
context: common
Expand Down
12 changes: 6 additions & 6 deletions coord/src/module.c
Original file line number Diff line number Diff line change
Expand Up @@ -1012,7 +1012,7 @@ static void knnPostProcess(searchReducerCtx *rCtx) {
static void sendSearchResults(RedisModuleCtx *ctx, searchReducerCtx *rCtx) {
// Reverse the top N results

rCtx->postProcess(rCtx);
rCtx->postProcess((struct searchReducerCtx *)rCtx);

searchRequestCtx *req = rCtx->searchCtx;

Expand Down Expand Up @@ -1167,21 +1167,21 @@ static int searchResultReducer(struct MRCtx *mc, int count, MRReply **replies) {
heap_init(rCtx.pq, cmp_results, req, num);

// Default result process and post process operations
rCtx.processReply = processSearchReply;
rCtx.postProcess = noOpPostProcess;
rCtx.processReply = (void (*)(struct redisReply *, struct searchReducerCtx *, RedisModuleCtx *))processSearchReply;
rCtx.postProcess = (void (*)(struct searchReducerCtx *))noOpPostProcess;


if(req->specialCases) {
size_t nSpecialCases = array_len(req->specialCases);
for(size_t i =0; i < nSpecialCases; i++) {
if(req->specialCases[i]->specialCaseType == SPECIAL_CASE_KNN) {
specialCaseCtx* knnCtx = req->specialCases[i];
rCtx.postProcess = knnPostProcess;
rCtx.postProcess = (void (*)(struct searchReducerCtx *))knnPostProcess;
rCtx.reduceSpecialCaseCtx = knnCtx;
if(knnCtx->knn.shouldSort) {
knnCtx->knn.pq = rm_malloc(heap_sizeof(knnCtx->knn.k));
heap_init(knnCtx->knn.pq, cmp_scored_results, NULL, knnCtx->knn.k);
rCtx.processReply = proccessKNNSearchReply;
rCtx.processReply =(void (*)(struct redisReply *, struct searchReducerCtx *, RedisModuleCtx *))proccessKNNSearchReply;
rCtx.reduceSpecialCaseCtx = knnCtx;
break;
}
Expand All @@ -1191,7 +1191,7 @@ static int searchResultReducer(struct MRCtx *mc, int count, MRReply **replies) {

for (int i = 0; i < count; i++) {
MRReply *reply = (!profile) ? replies[i] : MRReply_ArrayElement(replies[i], 0);
rCtx.processReply(reply, &rCtx, ctx);
rCtx.processReply(reply, (struct searchReducerCtx *)&rCtx, ctx);
}
if (rCtx.cachedResult) {
free(rCtx.cachedResult);
Expand Down
6 changes: 6 additions & 0 deletions docs/Commands.md
Original file line number Diff line number Diff line change
Expand Up @@ -317,6 +317,7 @@ FT.SEARCH {index} {query} [NOCONTENT] [VERBATIM] [NOSTOPWORDS] [WITHSCORES] [WIT
[PAYLOAD {payload}]
[SORTBY {attribute} [ASC|DESC]]
[LIMIT offset num]
[TIMEOUT {milliseconds}]
[PARAMS {nargs} {name} {value} ... ]
```

Expand Down Expand Up @@ -454,6 +455,8 @@ FT.SEARCH books-idx "*=>[KNN 10 @title_embedding $query_vec AS title_score]" PAR
!!! tip
`LIMIT 0 0` can be used to count the number of documents in the result set without actually returning them.

- **TIMEOUT {milliseconds}**: If set, we will override the timeout parameter of the module.

* **PARAMS {nargs} {name} {value}**. Define one or more value parameters. Each parameter has a name and a value. Parameters can be referenced in the query string by a `$`, followed by the parameter name, e.g., `$user`, and each such reference in the search query to a parameter name is substituted by the corresponding parameter value. For example, with parameter definition `PARAMS 4 lon 29.69465 lat 34.95126`, the expression `@loc:[$lon $lat 10 km]` would be evaluated to `@loc:[29.69465 34.95126 10 km]`. Parameters cannot be referenced in the query string where concrete values are not allowed, such as in field names, e.g., `@loc`

#### Complexity
Expand Down Expand Up @@ -490,6 +493,7 @@ FT.AGGREGATE {index_name}
[APPLY {expr} AS {alias}] ...
[LIMIT {offset} {num}] ...
[FILTER {expr}] ...
[TIMEOUT {milliseconds}]
```

#### Description
Expand Down Expand Up @@ -584,6 +588,8 @@ Here, we needed to use `LOAD` to pre-load the @location attribute because it is
* **FILTER {expr}**. Filter the results using predicate expressions relating to values in each result.
They are is applied post-query and relate to the current state of the pipeline.

* **TIMEOUT {milliseconds}**: If set, we will override the timeout parameter of the module.

#### Complexity

Non-deterministic. Depends on the query and aggregations performed, but it is usually linear to the number of results returned.
Expand Down
2 changes: 1 addition & 1 deletion sbin/get-redisjson
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ else
nick=ubuntu18.04
fi
elif [[ $dist == centos || $dist == redhat || $dist == fedora || $dist == ol ]]; then
if [[ $nick == "centos8" || $nick == "ol8" ]]; then
if [[ $nick == "centos8" || $nick == "ol8" || $nick == "rocky8" ]]; then
nick="rhel8"
else
nick="rhel7"
Expand Down
5 changes: 3 additions & 2 deletions sbin/pack.sh
Original file line number Diff line number Diff line change
Expand Up @@ -60,10 +60,11 @@ OSNICK=$($READIES/bin/platform --osnick)
[[ $OSNICK == trusty ]] && OSNICK=ubuntu14.04
[[ $OSNICK == xenial ]] && OSNICK=ubuntu16.04
[[ $OSNICK == bionic ]] && OSNICK=ubuntu18.04
[[ $OSNICK == focal ]] && OSNICK=ubuntu20.04
[[ $OSNICK == focal ]] && OSNICK=ubuntu20.04
[[ $OSNICK == centos7 ]] && OSNICK=rhel7
[[ $OSNICK == centos8 ]] && OSNICK=rhel8
[[ $OSNICK == ol8 ]] && OSNICK=rhel8
[[ $OSNICK == ol8 ]] && OSNICK=rhel8
[[ $OSNICK == rocky8 ]] && OSNICK=rhel8

PLATFORM="$OS-$OSNICK-$ARCH"

Expand Down
3 changes: 2 additions & 1 deletion sbin/upload-artifacts
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,8 @@ OS=$($READIES/bin/platform --os)
[[ $OSNICK == focal ]] && OSNICK=ubuntu20.04
[[ $OSNICK == centos7 ]] && OSNICK=rhel7
[[ $OSNICK == centos8 ]] && OSNICK=rhel8
[[ $OSNICK == ol8 ]] && OSNICK=rhel8
[[ $OSNICK == ol8 ]] && OSNICK=rhel8
[[ $OSNICK == rocky8 ]] && OSNICK=rhel8

PLATFORM="$OS-$OSNICK-$ARCH"

Expand Down
4 changes: 4 additions & 0 deletions src/spec.c
Original file line number Diff line number Diff line change
Expand Up @@ -1491,6 +1491,7 @@ static void FieldSpec_RdbSave(RedisModuleIO *rdb, FieldSpec *f) {
RedisModule_SaveStringBuffer(rdb, &f->tagOpts.tagSep, 1);
}
if (FIELD_IS(f, INDEXFLD_T_VECTOR)) {
RedisModule_SaveUnsigned(rdb, f->vectorOpts.expBlobSize);
VecSim_RdbSave(rdb, &f->vectorOpts.vecSimParams);
}
}
Expand Down Expand Up @@ -1542,6 +1543,9 @@ static int FieldSpec_RdbLoad(RedisModuleIO *rdb, FieldSpec *f, int encver) {
}
// Load vector specific options
if (encver >= INDEX_VECSIM_VERSION && FIELD_IS(f, INDEXFLD_T_VECTOR)) {
if (encver >= INDEX_VECSIM_2_VERSION) {
f->vectorOpts.expBlobSize = LoadUnsigned_IOError(rdb, goto fail);
}
if (VecSim_RdbLoad(rdb, &f->vectorOpts.vecSimParams) != REDISMODULE_OK) {
goto fail;
}
Expand Down
3 changes: 2 additions & 1 deletion src/spec.h
Original file line number Diff line number Diff line change
Expand Up @@ -180,7 +180,8 @@ typedef uint16_t FieldSpecDedupeArray[SPEC_MAX_FIELDS];
(Index_StoreFreqs | Index_StoreFieldFlags | Index_StoreTermOffsets | Index_StoreNumeric | \
Index_WideSchema)

#define INDEX_CURRENT_VERSION 19
#define INDEX_CURRENT_VERSION 20
#define INDEX_VECSIM_2_VERSION 20
#define INDEX_VECSIM_VERSION 19
#define INDEX_JSON_VERSION 18
#define INDEX_MIN_COMPAT_VERSION 17
Expand Down
2 changes: 1 addition & 1 deletion src/version.h
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@

#define REDISEARCH_VERSION_MAJOR 2
#define REDISEARCH_VERSION_MINOR 4
#define REDISEARCH_VERSION_PATCH 1
#define REDISEARCH_VERSION_PATCH 2

#ifndef REDISEARCH_MODULE_NAME
#define REDISEARCH_MODULE_NAME "search"
Expand Down
6 changes: 4 additions & 2 deletions tests/pytests/test_replicate.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,6 +98,8 @@ def testDropReplicate():
env.assertTrue(master.execute_command("ping"))
env.assertTrue(slave.execute_command("ping"))

env.expect('WAIT', '1', '10000').equal(1) # wait for master and slave to be in sync

'''
This test first creates documents
Next, it creates an index so all documents are scanned into the index
Expand All @@ -110,10 +112,10 @@ def testDropReplicate():
master.execute_command('HSET', 'doc%d' % j, 't', 'hello%d' % j, 'tg', 'world%d' % j, 'n', j, 'g', geo)

# test for FT.DROPINDEX
master.execute_command('WAIT', 1, 1000)
env.expect('WAIT', '1', '10000').equal(1) # wait for master and slave to be in sync
master.execute_command('FT.CREATE', 'idx', 'SCHEMA', 't', 'TEXT', 'n', 'NUMERIC', 'tg', 'TAG', 'g', 'GEO')
master.execute_command('FT.DROPINDEX', 'idx', 'DD')
master.execute_command('WAIT', 1, 1000)
env.expect('WAIT', '1', '10000').equal(1) # wait for master and slave to be in sync

# check that same docs were deleted by master and slave
master_keys = sorted(master.execute_command('KEYS', '*'))
Expand Down
36 changes: 22 additions & 14 deletions tests/pytests/test_vecsim.py
Original file line number Diff line number Diff line change
Expand Up @@ -297,15 +297,17 @@ def test_with_fields(env):
conn.execute_command('FT.CREATE', 'idx', 'SCHEMA', 'v', 'VECTOR', 'HNSW', '6', 'TYPE', 'FLOAT32', 'DIM', dimension, 'DISTANCE_METRIC', 'L2', 't', 'TEXT')
load_vectors_with_texts_into_redis(conn, 'v', dimension, qty)

query_data = np.float32(np.random.random((1, dimension)))
res = env.cmd('FT.SEARCH', 'idx', '*=>[KNN 100 @v $vec_param AS score]',
'SORTBY', 'score', 'PARAMS', 2, 'vec_param', query_data.tobytes(),
'RETURN', 2, 'score', 't')
res_nocontent = env.cmd('FT.SEARCH', 'idx', '*=>[KNN 100 @v $vec_param AS score]',
'SORTBY', 'score', 'PARAMS', 2, 'vec_param', query_data.tobytes(),
'NOCONTENT')
env.assertEqual(res[1::2], res_nocontent[1:])
env.assertEqual('t', res[2][2])
for _ in env.retry_with_rdb_reload():
waitForIndex(env, 'idx')
query_data = np.float32(np.random.random((1, dimension)))
res = env.cmd('FT.SEARCH', 'idx', '*=>[KNN 100 @v $vec_param AS score]',
'SORTBY', 'score', 'PARAMS', 2, 'vec_param', query_data.tobytes(),
'RETURN', 2, 'score', 't')
res_nocontent = env.cmd('FT.SEARCH', 'idx', '*=>[KNN 100 @v $vec_param AS score]',
'SORTBY', 'score', 'PARAMS', 2, 'vec_param', query_data.tobytes(),
'NOCONTENT')
env.assertEqual(res[1::2], res_nocontent[1:])
env.assertEqual('t', res[2][2])


def get_vecsim_memory(env, index_key, field_name):
Expand Down Expand Up @@ -736,10 +738,13 @@ def test_single_entry(env):
conn.execute_command('FT.CREATE', 'idx', 'SCHEMA', 'v', 'VECTOR', 'HNSW', '6', 'TYPE', 'FLOAT32', 'DIM', dimension, 'DISTANCE_METRIC', 'L2')
vector = np.random.rand(1, dimension).astype(np.float32)
conn.execute_command('HSET', 0, 'v', vector.tobytes())
env.expect('FT.SEARCH', 'idx', '*=>[KNN 10 @v $vec_param]',
'SORTBY', '__v_score',
'RETURN', '0',
'PARAMS', 2, 'vec_param', vector.tobytes()).equal([1L, '0'])

for _ in env.retry_with_rdb_reload():
waitForIndex(env, 'idx')
env.expect('FT.SEARCH', 'idx', '*=>[KNN 10 @v $vec_param]',
'SORTBY', '__v_score',
'RETURN', '0',
'PARAMS', 2, 'vec_param', vector.tobytes()).equal([1L, '0'])


def test_hybrid_query_adhoc_bf_mode(env):
Expand All @@ -760,7 +765,10 @@ def test_hybrid_query_adhoc_bf_mode(env):
query_data = np.float32([100 for j in range(dimension)])

expected_res = [10L, '100', ['__v_score', '0', 't', 'other'], '90', ['__v_score', '12800', 't', 'other'], '80', ['__v_score', '51200', 't', 'other'], '70', ['__v_score', '115200', 't', 'other'], '60', ['__v_score', '204800', 't', 'other'], '50', ['__v_score', '320000', 't', 'other'], '40', ['__v_score', '460800', 't', 'other'], '30', ['__v_score', '627200', 't', 'other'], '20', ['__v_score', '819200', 't', 'other'], '10', ['__v_score', '1036800', 't', 'other']]
execute_hybrid_query(env, '(other)=>[KNN 10 @v $vec_param]', query_data, 't', batches_mode=False).equal(expected_res)

for _ in env.retry_with_rdb_reload():
waitForIndex(env, 'idx')
execute_hybrid_query(env, '(other)=>[KNN 10 @v $vec_param]', query_data, 't', batches_mode=False).equal(expected_res)


def test_wrong_vector_size(env):
Expand Down
6 changes: 4 additions & 2 deletions tests/qa/RS_VERSIONS
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
6.2.4-54
100.0.0-2721
6.0.8-32
6.0.12-58
6.0.20-101
6.2.4-54
6.2.8-53
100.0.0-2602
6.2.10-83
6.2.12-11
27 changes: 27 additions & 0 deletions tests/qa/common.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
{
"service_id": "single_module_test_cycle",
"name": "redisearch automation-testing",
"properties": {
"sut_version": "{{RLEC_VERSION}}",
"email_recipients": "s5i1u4h5a8c8w2d7@redislabs.slack.com",
"sut_environments": [],
"tools_environment": {},
"modules_version": "{{SEARCH_VERSION}}",
"test_names_modules": [
"{{SEARCH_TEST_NAME}}"
],
"global_spot_instances": "ondemand",
"module_download_url": true,
"module_download_urls": {
"{{SEARCH_DOWNLOAD_NAME}}": "http://redismodules.s3.amazonaws.com/{{SEARCH_DIR}}/{{SEARCH_FILE_PREFIX}}.{{RLEC_OS}}-{{RLEC_ARCH}}.{{SEARCH_VERSION}}.zip",
"ReJSON": "http://redismodules.s3.amazonaws.com/{{REJSON_DIR}}/{{REJSON_FILE_PREFIX}}.{{RLEC_OS}}-{{RLEC_ARCH}}.{{REJSON_VERSION}}.zip']"
},
"cycle_environments_setup": [
{
"teardown": true,
"name": "{{RLEC_ENV}}",
"concurrency": 1
}
]
}
}
22 changes: 0 additions & 22 deletions tests/qa/nightly.json

This file was deleted.

0 comments on commit 0f9410d

Please sign in to comment.