Skip to content

Commit

Permalink
Fix sorting search result by health (#5880)
Browse files Browse the repository at this point in the history
  • Loading branch information
kozlovsky committed Dec 24, 2020
1 parent 6afcd6e commit 257a396
Show file tree
Hide file tree
Showing 4 changed files with 138 additions and 11 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
from asyncio import get_event_loop

from pony import orm
from pony.orm import db_session, desc, raw_sql, select
from pony.orm import db_session, desc, left_join, raw_sql

from tribler_core.modules.metadata_store.orm_bindings.channel_node import LEGACY_ENTRY, TODELETE
from tribler_core.modules.metadata_store.orm_bindings.torrent_metadata import NULL_KEY_SUBST
Expand Down Expand Up @@ -52,7 +52,7 @@ def search_keyword(cls, query, lim=100):
"""SELECT rowid FROM ChannelNode WHERE rowid IN (SELECT rowid FROM FtsIndex WHERE FtsIndex MATCH $query
ORDER BY bm25(FtsIndex) LIMIT $lim) GROUP BY infohash"""
)
return cls.select(lambda g: g.rowid in fts_ids)
return left_join(g for g in cls if g.rowid in fts_ids) # pylint: disable=E1135

@classmethod
@db_session
Expand Down Expand Up @@ -81,7 +81,7 @@ def get_entries_query(
"""
# Warning! For Pony magic to work, iteration variable name (e.g. 'g') should be the same everywhere!

pony_query = cls.search_keyword(txt_filter, lim=1000) if txt_filter else select(g for g in cls)
pony_query = cls.search_keyword(txt_filter, lim=1000) if txt_filter else left_join(g for g in cls)

if metadata_type is not None:
try:
Expand Down Expand Up @@ -120,11 +120,13 @@ def get_entries_query(
)

# Sort the query
pony_query = pony_query.sort_by("desc(g.rowid)" if sort_desc else "g.rowid")

if sort_by == "HEALTH":
pony_query = (
pony_query.sort_by("(desc(g.health.seeders), desc(g.health.leechers))")
pony_query = pony_query.sort_by(
"(desc(g.health.seeders), desc(g.health.leechers))"
if sort_desc
else pony_query.sort_by("(g.health.seeders, g.health.leechers)")
else "(g.health.seeders, g.health.leechers)"
)
elif sort_by == "size" and not issubclass(cls, db.ChannelMetadata):
# TODO: optimize this check to skip cases where size field does not matter
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ async def test_get_channels(enable_chant, enable_api, add_fake_torrents_channels
json_dict = await do_request(session, 'channels')
assert len(json_dict['results']) == 10
# Default channel state should be METAINFO_LOOKUP
assert json_dict['results'][0]['state'] == CHANNEL_STATE.METAINFO_LOOKUP.value
assert json_dict['results'][-1]['state'] == CHANNEL_STATE.METAINFO_LOOKUP.value

# We test out different combinations of channels' states and download progress
# State UPDATING:
Expand All @@ -38,7 +38,7 @@ async def test_get_channels(enable_chant, enable_api, add_fake_torrents_channels
channel.local_version = 123

json_dict = await do_request(session, 'channels')
assert json_dict['results'][0]['progress'] == 0.5
assert json_dict['results'][-1]['progress'] == 0.5

# State DOWNLOADING
with db_session:
Expand All @@ -49,7 +49,7 @@ async def test_get_channels(enable_chant, enable_api, add_fake_torrents_channels
session.dlmgr.metainfo_requests.get = lambda _: False
session.dlmgr.download_exists = lambda _: True
json_dict = await do_request(session, 'channels')
assert json_dict['results'][0]['state'] == CHANNEL_STATE.DOWNLOADING.value
assert json_dict['results'][-1]['state'] == CHANNEL_STATE.DOWNLOADING.value


@pytest.mark.asyncio
Expand Down Expand Up @@ -122,7 +122,6 @@ async def test_get_channel_contents(enable_chant, enable_api, add_fake_torrents_
with db_session:
chan = session.mds.ChannelMetadata.select().first()
json_dict = await do_request(session, 'channels/%s/123' % hexlify(chan.public_key), expected_code=200)
print(json_dict)
assert len(json_dict['results']) == 5
assert 'status' in json_dict['results'][0]
assert json_dict['results'][0]['progress'] == 0.5
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -450,6 +450,132 @@ def test_get_channels(metadata_store):
assert len(channels) == 5


@db_session
def test_sort_by_health(metadata_store):
def save():
metadata_store._db.flush() # pylint: disable=W0212

channel = metadata_store.ChannelMetadata.create_channel('channel1')
save()

torrent1 = metadata_store.TorrentMetadata(
origin_id=channel.id_, status=NEW, infohash=random_infohash(), title='torrent1 aaa bbb'
)
torrent1.health.set(seeders=10, leechers=20)
save()

folder1 = metadata_store.CollectionNode(origin_id=channel.id_, title='folder1 aaa ccc')
save()

torrent2 = metadata_store.TorrentMetadata(
origin_id=channel.id_, status=NEW, infohash=random_infohash(), title='torrent2 bbb ccc'
)
torrent2.health.set(seeders=5, leechers=10)
save()

folder2 = metadata_store.CollectionNode(origin_id=channel.id_, title='folder2 aaa bbb')
save()

torrent3 = metadata_store.TorrentMetadata(
origin_id=channel.id_, status=NEW, infohash=random_infohash(), title='torrent3 ccc ddd'
)
torrent3.health.set(seeders=30, leechers=40)
save()

folder2_1 = metadata_store.CollectionNode(
origin_id=folder2.id_,
title='folder2_1 aaa bbb',
)
save()

folder2_2 = metadata_store.CollectionNode(
origin_id=folder2.id_,
title='folder2_2 bbb ccc',
)
save()

torrent2_1 = metadata_store.TorrentMetadata(
origin_id=folder2.id_, status=NEW, infohash=random_infohash(), title='torrent2_1 aaa ccc'
)
torrent2_1.health.set(seeders=20, leechers=10)
save()

# Without FTS search

objects = metadata_store.MetadataNode.get_entries(channel_pk=channel.public_key, sort_by='HEALTH', sort_desc=True)
titles = [obj.title.partition(' ')[0] for obj in objects]
assert titles == [
'torrent3',
'torrent2_1',
'torrent1',
'torrent2',
'channel1',
'folder2_2',
'folder2_1',
'folder2',
'folder1',
]

objects = metadata_store.MetadataNode.get_entries(channel_pk=channel.public_key, sort_by='HEALTH', sort_desc=False)
titles = [obj.title.partition(' ')[0] for obj in objects]
assert titles == [
'folder1',
'folder2',
'folder2_1',
'folder2_2',
'channel1',
'torrent2',
'torrent1',
'torrent2_1',
'torrent3',
]

objects = metadata_store.MetadataNode.get_entries(
origin_id=channel.id_,
sort_by='HEALTH',
sort_desc=True,
)
titles = [obj.title.partition(' ')[0] for obj in objects]
assert titles == ['torrent3', 'torrent1', 'torrent2', 'folder2', 'folder1']

objects = metadata_store.MetadataNode.get_entries(origin_id=channel.id_, sort_by='HEALTH', sort_desc=False)
titles = [obj.title.partition(' ')[0] for obj in objects]
assert titles == ['folder1', 'folder2', 'torrent2', 'torrent1', 'torrent3']

# With FTS search

objects = metadata_store.MetadataNode.get_entries(
channel_pk=channel.public_key, txt_filter='aaa', sort_by='HEALTH', sort_desc=True
)
titles = [obj.title.partition(' ')[0] for obj in objects]
# FIXME: does not return folder2 and folder2_1 due to a bug in a full text search query
assert titles == ['torrent2_1', 'torrent1', 'folder1']

objects = metadata_store.MetadataNode.get_entries(
channel_pk=channel.public_key, txt_filter='aaa', sort_by='HEALTH', sort_desc=False
)
titles = [obj.title.partition(' ')[0] for obj in objects]
# FIXME: does not return folder2 and folder2_1 due to a bug in a full text search query
assert titles == ['folder1', 'torrent1', 'torrent2_1']

objects = metadata_store.MetadataNode.get_entries(
origin_id=channel.id_,
txt_filter='aaa',
sort_by='HEALTH',
sort_desc=True,
)
titles = [obj.title.partition(' ')[0] for obj in objects]
# FIXME: does not return folder2 due to a bug in a full text search query
assert titles == ['torrent1', 'folder1']

objects = metadata_store.MetadataNode.get_entries(
origin_id=channel.id_, txt_filter='aaa', sort_by='HEALTH', sort_desc=False
)
titles = [obj.title.partition(' ')[0] for obj in objects]
# FIXME: does not return folder2 due to a bug in a full text search query
assert titles == ['folder1', 'torrent1']


@db_session
def test_get_channel_name(metadata_store):
"""
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -225,7 +225,7 @@ def test_get_entries(metadata_store):

args = dict(channel_pk=channel_pk, hide_xxx=True, exclude_deleted=True, metadata_type=REGULAR_TORRENT)
torrents = metadata_store.TorrentMetadata.get_entries_query(**args)[:]
assert tlist[-5:-2] == list(torrents)
assert tlist[-5:-2] == list(torrents)[::-1]

count = metadata_store.TorrentMetadata.get_entries_count(**args)
assert count == 3
Expand Down

0 comments on commit 257a396

Please sign in to comment.