Skip to content

Commit

Permalink
Merge 'test/cql-pytest: clean up tests to run on Cassandra' from Nada…
Browse files Browse the repository at this point in the history
…v Har'El

To keep our cql-pytest tests "correct", we should strive for them to pass on
Cassandra - unless they are testing a Scylla-only feature or a deliberate
difference between Scylla and Cassandra - in which case they should be marked
"scylla-only" and cause such tests to be skipped when running on Cassandra.

The following few small patches fix a few cases where our tests we failing on
Cassandra. In one case this even found a bug in the test (a trivial Python
mistake, but still).

Closes #8694

* github.com:scylladb/scylla:
  test/cql-pytest: fix python mistake in an xfailing test
  test/cql-pytest: mark some tests with scylla-only
  test/cql-pytest: clean up test_create_large_static_cells_and_rows
  • Loading branch information
psarna committed May 24, 2021
2 parents 789757a + 5206665 commit 95c6ec1
Show file tree
Hide file tree
Showing 4 changed files with 15 additions and 19 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -1535,20 +1535,20 @@ def testSelectionOfEmptyCollections(cql, test_keyspace):
assert_rows(execute(cql, table, "SELECT m['0'..'1'], s[0..1] FROM %s WHERE k = 0"), [None, None])
assert_rows(execute(cql, table, "SELECT m['0'..'1']['3'..'5'], s[0..1][3..5] FROM %s WHERE k = 0"), [None, None])

assert_rows(execute(cql, table, "SELECT m, s FROM %s WHERE k = 1"), [dict(), {}])
assert_rows(execute(cql, table, "SELECT m, s FROM %s WHERE k = 1"), [dict(), set()])
assert_rows(execute(cql, table, "SELECT m['0'], s[0] FROM %s WHERE k = 1"), [None, None])
assert_rows(execute(cql, table, "SELECT m['0'..'1'], s[0..1] FROM %s WHERE k = 1"), [dict(), {}])
assert_rows(execute(cql, table, "SELECT m['0'..'1']['3'..'5'], s[0..1][3..5] FROM %s WHERE k = 1"), [dict(), {}])
assert_rows(execute(cql, table, "SELECT m['0'..'1'], s[0..1] FROM %s WHERE k = 1"), [dict(), set()])
assert_rows(execute(cql, table, "SELECT m['0'..'1']['3'..'5'], s[0..1][3..5] FROM %s WHERE k = 1"), [dict(), set()])

assert_rows(execute(cql, table, "SELECT m, s FROM %s WHERE k = 2"), [dict(), {}])
assert_rows(execute(cql, table, "SELECT m, s FROM %s WHERE k = 2"), [dict(), set()])
assert_rows(execute(cql, table, "SELECT m['0'], s[0] FROM %s WHERE k = 2"), [None, None])
assert_rows(execute(cql, table, "SELECT m['0'..'1'], s[0..1] FROM %s WHERE k = 2"), [dict(), {}])
assert_rows(execute(cql, table, "SELECT m['0'..'1']['3'..'5'], s[0..1][3..5] FROM %s WHERE k = 2"), [dict(), {}])
assert_rows(execute(cql, table, "SELECT m['0'..'1'], s[0..1] FROM %s WHERE k = 2"), [dict(), set()])
assert_rows(execute(cql, table, "SELECT m['0'..'1']['3'..'5'], s[0..1][3..5] FROM %s WHERE k = 2"), [dict(), set()])

assert_rows(execute(cql, table, "SELECT m, s FROM %s WHERE k = 3"), [{"2": 2}, {2}])
assert_rows(execute(cql, table, "SELECT m['0'], s[0] FROM %s WHERE k = 3"), [None, None])
assert_rows(execute(cql, table, "SELECT m['0'..'1'], s[0..1] FROM %s WHERE k = 3"), [dict(), {}])
assert_rows(execute(cql, table, "SELECT m['0'..'1']['3'..'5'], s[0..1][3..5] FROM %s WHERE k = 3"), [dict(), {}])
assert_rows(execute(cql, table, "SELECT m['0'..'1'], s[0..1] FROM %s WHERE k = 3"), [dict(), set()])
assert_rows(execute(cql, table, "SELECT m['0'..'1']['3'..'5'], s[0..1][3..5] FROM %s WHERE k = 3"), [dict(), set()])

with create_table(cql, test_keyspace, "(k int PRIMARY KEY, m map<text, int>, s set<int>)") as table:
execute(cql, table, "INSERT INTO %s(k) VALUES (0)")
Expand Down
2 changes: 1 addition & 1 deletion test/cql-pytest/test_cdc.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@

from util import new_test_table

def test_cdc_log_entries_use_cdc_streams(cql, test_keyspace):
def test_cdc_log_entries_use_cdc_streams(scylla_only, cql, test_keyspace):
'''Test that the stream IDs chosen for CDC log entries come from the CDC generation
whose streams are listed in the streams description table. Since this test is executed
on a single-node cluster, there is only one generation.'''
Expand Down
12 changes: 4 additions & 8 deletions test/cql-pytest/test_large_cells_rows.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
from util import new_test_table

import requests
import nodetool

def test_create_large_static_cells_and_rows(cql, test_keyspace):
'''Test that `large_data_handler` successfully reports large static cells
Expand All @@ -26,18 +27,13 @@ def test_create_large_static_cells_and_rows(cql, test_keyspace):
This is a regression test for https://github.com/scylladb/scylla/issues/6780'''
schema = "pk int, ck int, user_ids set<text> static, PRIMARY KEY (pk, ck)"
with new_test_table(cql, test_keyspace, schema) as table:
insert_stmt = cql.prepare(f"INSERT INTO {table} (pk, ck, user_ids) VALUES (?, ?, ?) USING TIMEOUT 5m")
insert_stmt = cql.prepare(f"INSERT INTO {table} (pk, ck, user_ids) VALUES (?, ?, ?)")
# Default large data threshold for cells is 1 mb, for rows it is 10 mb.
# Take 10 mb cell to trigger large data reporting code both for
# static cells and static rows simultaneously.
large_set = {'x' * 1024 * 1024 * 10}
cql.execute(insert_stmt, [1, 1, large_set])

# REST API endpoint address for test scylla node
node_address = f'http://{cql.cluster.contact_points[0]}:10000'
# Execute force flush of test table to persistent storage, which is necessary to trigger
# `large_data_handler` execution.
table_without_ks = table[table.find('.') + 1:] # strip keyspace part from the table name
requests.post(f'{node_address}/storage_service/keyspace_flush/{test_keyspace}', params={'cf' : table_without_ks})
nodetool.flush(cql, table)
# No need to check that the Scylla server is running here, since the test will
# fail automatically in case Scylla crashes.
# fail automatically in case Scylla crashes.
4 changes: 2 additions & 2 deletions test/cql-pytest/test_validation.py
Original file line number Diff line number Diff line change
Expand Up @@ -158,7 +158,7 @@ def table1(cql, test_keyspace):
# Note that currently, Scylla's UTF-8 parser is stricter than Cassandra's
# (see comment above listing the relevant cases), so this test, as all tests
# using the bad_utf8 array, will fail on Cassandra.
def test_validation_utf8_as_blob(cql, table1):
def test_validation_utf8_as_blob(scylla_only, cql, table1):
cmd = "INSERT INTO {} (k, t) VALUES (1, blobAsText(0x{}))"
for b in good_utf8:
print(b)
Expand All @@ -181,7 +181,7 @@ def test_validation_utf8_as_blob(cql, table1):
# Note that currently, Scylla's UTF-8 parser is stricter than Cassandra's
# (see comment above listing the relevant cases), so this test, as all tests
# using the bad_utf8 array, will fail on Cassandra.
def test_validation_utf8_bound_column(cql, table1):
def test_validation_utf8_bound_column(scylla_only, cql, table1):
import cassandra.cqltypes
orig_serialize = cassandra.cqltypes.UTF8Type.serialize
def myserialize(ustr, protocol_version):
Expand Down

0 comments on commit 95c6ec1

Please sign in to comment.