Skip to content

Commit

Permalink
Merge pull request #5596 from ichorid/feature/gui_changes
Browse files Browse the repository at this point in the history
Channels GUI changes
  • Loading branch information
ichorid committed Oct 24, 2020
2 parents c4ddfe5 + 1bbbe47 commit 08bb535
Show file tree
Hide file tree
Showing 34 changed files with 1,272 additions and 1,329 deletions.
10 changes: 10 additions & 0 deletions src/tribler-common/tribler_common/simpledefs.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,3 +101,13 @@ class NTFY(Enum):
EVENTS_START = "events_start"
TRIBLER_EXCEPTION = "tribler_exception"
POPULARITY_COMMUNITY_ADD_UNKNOWN_TORRENT = "PopularityCommunity:added_unknown_torrent"


class CHANNEL_STATE(Enum):
PERSONAL = "Personal"
LEGACY = "Legacy"
COMPLETE = "Complete"
UPDATING = "Updating"
DOWNLOADING = "Downloading"
PREVIEW = "Preview"
METAINFO_LOOKUP = "Searching for metainfo"
5 changes: 5 additions & 0 deletions src/tribler-core/tribler_core/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,6 +101,11 @@ def mock_dlmgr(session, mocker, tmpdir):
session.dlmgr.get_checkpoint_dir = lambda: tmpdir


@pytest.fixture
def mock_dlmgr_get_download(session, mocker, tmpdir, mock_dlmgr):
session.dlmgr.get_download = lambda _: None


@pytest.fixture
async def session(tribler_config):
session = Session(tribler_config)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -633,7 +633,6 @@ async def stream(self, request):
await wait_for(response.write(data), STREAM_PAUSE_TIME)
bytes_done += len(data)


if chunk.resume():
self._logger.debug("Stream %s-%s is resumed, starting sequential buffer", start, stop)
except AsyncTimeoutError:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -277,6 +277,7 @@ async def test_change_hops_error(enable_api, mock_dlmgr, test_download, session)
"""
Testing whether the API returns 400 if we supply both anon_hops and another parameter
"""
session.dlmgr.get_download = lambda _: True
await do_request(session, 'downloads/%s' % test_download.infohash, post_data={"state": "resume", 'anon_hops': 1},
expected_code=400, request_type='PATCH')

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -285,14 +285,15 @@ async def download_channel(self, channel):
async def process_channel_dir_threaded(self, channel):
def _process_download():
try:
channel_dirname = self.session.mds.channels_dir / channel.dirname
channel_dirname = self.session.mds.get_channel_dir_path(channel)
self.session.mds.process_channel_dir(
channel_dirname, channel.public_key, channel.id_, external_thread=True
)
self.session.mds._db.disconnect()
except Exception as e:
self._logger.error("Error when processing channel dir download: %s", e)
return
finally:
self.session.mds._db.disconnect()

await get_event_loop().run_in_executor(None, _process_download)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@
from pony import orm
from pony.orm import db_session, desc, raw_sql, select

from tribler_common.simpledefs import CHANNEL_STATE

from tribler_core.modules.metadata_store.discrete_clock import clock
from tribler_core.modules.metadata_store.orm_bindings.channel_node import (
COMMITTED,
Expand Down Expand Up @@ -392,16 +394,16 @@ def state(self):
"""
# TODO: optimize this by stopping doing blob comparisons on each call, and instead remember rowid?
if self.is_personal:
return "Personal"
return CHANNEL_STATE.PERSONAL.value
if self.status == LEGACY_ENTRY:
return "Legacy"
return CHANNEL_STATE.LEGACY.value
if self.local_version == self.timestamp:
return "Complete"
return CHANNEL_STATE.COMPLETE.value
if self.local_version > 0:
return "Updating"
return CHANNEL_STATE.UPDATING.value
if self.subscribed:
return "Downloading"
return "Preview"
return CHANNEL_STATE.METAINFO_LOOKUP.value
return CHANNEL_STATE.PREVIEW.value

def to_simple_dict(self, **kwargs):
"""
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,13 +13,17 @@

from pony.orm import db_session

from tribler_common.simpledefs import CHANNEL_STATE

from tribler_core.modules.libtorrent.torrentdef import TorrentDef
from tribler_core.modules.metadata_store.orm_bindings.channel_node import DIRTY_STATUSES, NEW
from tribler_core.modules.metadata_store.restapi.metadata_endpoint_base import MetadataEndpointBase
from tribler_core.modules.metadata_store.restapi.metadata_schema import ChannelSchema
from tribler_core.modules.metadata_store.serialization import REGULAR_TORRENT
from tribler_core.restapi.rest_endpoint import HTTP_BAD_REQUEST, HTTP_NOT_FOUND, RESTResponse
from tribler_core.restapi.schema import HandledErrorSchema
from tribler_core.utilities import path_util
from tribler_core.utilities.unicode import hexlify
from tribler_core.utilities.utilities import is_infohash, parse_magnetlink


Expand Down Expand Up @@ -79,7 +83,31 @@ async def get_channels(self, request):
with db_session:
channels = self.session.mds.ChannelMetadata.get_entries(**sanitized)
total = self.session.mds.ChannelMetadata.get_total_count(**sanitized) if include_total else None
channels_list = [channel.to_simple_dict() for channel in channels]
channels_list = []
for channel in channels:
channel_dict = channel.to_simple_dict()
# Add progress info for those channels that are still being processed
if channel.subscribed:
if channel_dict["state"] == CHANNEL_STATE.UPDATING.value:
try:
progress = self.session.mds.compute_channel_update_progress(channel)
channel_dict["progress"] = progress
except (ZeroDivisionError, FileNotFoundError) as e:
self._logger.error(
"Error %s when calculating channel update progress. Channel data: %s-%i %i/%i",
e,
hexlify(channel.public_key),
channel.id_,
channel.start_timestamp,
channel.local_version,
)
elif channel_dict["state"] == CHANNEL_STATE.METAINFO_LOOKUP.value:
if not self.session.dlmgr.metainfo_requests.get(
bytes(channel.infohash)
) and self.session.dlmgr.download_exists(bytes(channel.infohash)):
channel_dict["state"] = CHANNEL_STATE.DOWNLOADING.value

channels_list.append(channel_dict)
response_dict = {
"results": channels_list,
"first": sanitized["first"],
Expand Down Expand Up @@ -118,6 +146,11 @@ async def get_channel_contents(self, request):
contents = self.session.mds.MetadataNode.get_entries(**sanitized)
contents_list = [c.to_simple_dict() for c in contents]
total = self.session.mds.MetadataNode.get_total_count(**sanitized) if include_total else None
for torrent in contents_list:
if torrent['type'] == REGULAR_TORRENT:
dl = self.session.dlmgr.get_download(unhexlify(torrent['infohash']))
if dl is not None:
torrent['progress'] = dl.get_state().get_progress()
response_dict = {
"results": contents_list,
"first": sanitized['first'],
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@ class ChannelSchema(Schema):
updated = Integer()
subscribed = Boolean()
votes = Float()
progress = Float()


class TorrentSchema(Schema):
Expand All @@ -60,3 +61,4 @@ class TorrentSchema(Schema):
updated = Integer()
subscribed = Boolean()
votes = Float()
progress = Float()
Original file line number Diff line number Diff line change
Expand Up @@ -24,15 +24,14 @@ def add_fake_torrents_channels(session):
infohash=random_infohash(),
id_=123,
sign_with=ext_key,
version=10,
local_version=(ind % 11),
)
for torrent_ind in range(torrents_per_channel):
rand_infohash = random_infohash()
infohashes.append(rand_infohash)
session.mds.TorrentMetadata(
origin_id=channel.id_,
title='torrent%d' % torrent_ind,
infohash=rand_infohash,
sign_with=ext_key,
origin_id=channel.id_, title='torrent%d' % torrent_ind, infohash=rand_infohash, sign_with=ext_key
)


Expand All @@ -45,17 +44,13 @@ def my_channel(session):
origin_id=chan.id_, title='torrent%d' % ind, status=NEW, infohash=random_infohash()
)
for ind in range(5, 9):
_ = session.mds.TorrentMetadata(
origin_id=chan.id_, title='torrent%d' % ind, infohash=random_infohash()
)
_ = session.mds.TorrentMetadata(origin_id=chan.id_, title='torrent%d' % ind, infohash=random_infohash())

chan2 = session.mds.ChannelMetadata.create_channel('test2', 'test2')
for ind in range(5):
_ = session.mds.TorrentMetadata(
origin_id=chan2.id_, title='torrentB%d' % ind, status=NEW, infohash=random_infohash()
)
for ind in range(5, 9):
_ = session.mds.TorrentMetadata(
origin_id=chan2.id_, title='torrentB%d' % ind, infohash=random_infohash()
)
_ = session.mds.TorrentMetadata(origin_id=chan2.id_, title='torrentB%d' % ind, infohash=random_infohash())
return chan
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,8 @@

import pytest

from tribler_common.simpledefs import CHANNEL_STATE

from tribler_core.modules.libtorrent.torrentdef import TorrentDef
from tribler_core.modules.metadata_store.serialization import COLLECTION_NODE, REGULAR_TORRENT
from tribler_core.restapi.base_api_test import do_request
Expand All @@ -18,22 +20,46 @@


@pytest.mark.asyncio
async def test_get_channels(enable_chant, enable_api, add_fake_torrents_channels, session):
async def test_get_channels(enable_chant, enable_api, add_fake_torrents_channels, mock_dlmgr, session):
"""
Test whether we can query some channels in the database with the REST API
"""
json_dict = await do_request(session, 'channels?sort_by=title')
json_dict = await do_request(session, 'channels')
assert len(json_dict['results']) == 10
# Default channel state should be METAINFO_LOOKUP
assert json_dict['results'][0]['state'] == CHANNEL_STATE.METAINFO_LOOKUP.value

# We test out different combinations of channels' states and download progress
# State UPDATING:
session.mds.compute_channel_update_progress = lambda _: 0.5
with db_session:
channel = session.mds.ChannelMetadata.select().first()
channel.subscribed = True
channel.local_version = 123

json_dict = await do_request(session, 'channels')
assert json_dict['results'][0]['progress'] == 0.5

# State DOWNLOADING
with db_session:
channel = session.mds.ChannelMetadata.select().first()
channel.subscribed = True
channel.local_version = 0

session.dlmgr.metainfo_requests.get = lambda _: False
session.dlmgr.download_exists = lambda _: True
json_dict = await do_request(session, 'channels')
assert json_dict['results'][0]['state'] == CHANNEL_STATE.DOWNLOADING.value


@pytest.mark.asyncio
async def test_get_channels_sort_by_health(enable_chant, enable_api, add_fake_torrents_channels, session):
async def test_get_channels_sort_by_health(enable_chant, enable_api, add_fake_torrents_channels, mock_dlmgr, session):
json_dict = await do_request(session, 'channels?sort_by=health')
assert len(json_dict['results']) == 10


@pytest.mark.asyncio
async def test_get_channels_invalid_sort(enable_chant, enable_api, add_fake_torrents_channels, session):
async def test_get_channels_invalid_sort(enable_chant, enable_api, add_fake_torrents_channels, mock_dlmgr, session):
"""
Test whether we can query some channels in the database with the REST API and an invalid sort parameter
"""
Expand All @@ -42,7 +68,7 @@ async def test_get_channels_invalid_sort(enable_chant, enable_api, add_fake_torr


@pytest.mark.asyncio
async def test_get_subscribed_channels(enable_chant, enable_api, add_fake_torrents_channels, session):
async def test_get_subscribed_channels(enable_chant, enable_api, add_fake_torrents_channels, mock_dlmgr, session):
"""
Test whether we can successfully query channels we are subscribed to with the REST API
"""
Expand All @@ -51,7 +77,7 @@ async def test_get_subscribed_channels(enable_chant, enable_api, add_fake_torren


@pytest.mark.asyncio
async def test_get_channels_count(enable_chant, enable_api, add_fake_torrents_channels, session):
async def test_get_channels_count(enable_chant, enable_api, add_fake_torrents_channels, mock_dlmgr, session):
"""
Test getting the total number of channels through the API
"""
Expand All @@ -68,15 +94,16 @@ async def test_create_channel(enable_chant, enable_api, session):
with db_session:
assert session.mds.ChannelMetadata.get(title="New channel")
await do_request(
session,
'channels/mychannel/0/channels', request_type='POST', post_data={"name": "foobar"}, expected_code=200
session, 'channels/mychannel/0/channels', request_type='POST', post_data={"name": "foobar"}, expected_code=200
)
with db_session:
assert session.mds.ChannelMetadata.get(title="foobar")


@pytest.mark.asyncio
async def test_get_contents_count(enable_chant, enable_api, add_fake_torrents_channels, session):
async def test_get_contents_count(
enable_chant, enable_api, add_fake_torrents_channels, mock_dlmgr_get_download, session
):
"""
Test getting the total number of items in a specific channel
"""
Expand All @@ -87,19 +114,22 @@ async def test_get_contents_count(enable_chant, enable_api, add_fake_torrents_ch


@pytest.mark.asyncio
async def test_get_channel_contents(enable_chant, enable_api, add_fake_torrents_channels, session):
async def test_get_channel_contents(enable_chant, enable_api, add_fake_torrents_channels, mock_dlmgr, session):
"""
Test whether we can query torrents from a channel
"""
session.dlmgr.get_download().get_state().get_progress = lambda: 0.5
with db_session:
chan = session.mds.ChannelMetadata.select().first()
json_dict = await do_request(session, 'channels/%s/123' % hexlify(chan.public_key), expected_code=200)
print(json_dict)
assert len(json_dict['results']) == 5
assert 'status' in json_dict['results'][0]
assert json_dict['results'][0]['progress'] == 0.5


@pytest.mark.asyncio
async def test_get_channel_contents_by_type(enable_chant, enable_api, my_channel, session):
async def test_get_channel_contents_by_type(enable_chant, enable_api, my_channel, mock_dlmgr_get_download, session):
"""
Test filtering channel contents by a list of data types
"""
Expand All @@ -108,9 +138,9 @@ async def test_get_channel_contents_by_type(enable_chant, enable_api, my_channel

json_dict = await do_request(
session,
'channels/%s/%d?metadata_type=%d&metadata_type=%d' %
(hexlify(my_channel.public_key), my_channel.id_, COLLECTION_NODE, REGULAR_TORRENT),
expected_code=200
'channels/%s/%d?metadata_type=%d&metadata_type=%d'
% (hexlify(my_channel.public_key), my_channel.id_, COLLECTION_NODE, REGULAR_TORRENT),
expected_code=200,
)

assert len(json_dict['results']) == 10
Expand Down Expand Up @@ -388,6 +418,7 @@ async def test_add_torrent_from_magnet(enable_chant, enable_api, my_channel, moc
"""
Test whether we can add a torrent to your channel from a magnet link
"""

def fake_get_metainfo(_, **__):
meta_info = TorrentDef.load(TORRENT_UBUNTU_FILE).get_metainfo()
return succeed(meta_info)
Expand All @@ -410,6 +441,7 @@ async def test_add_torrent_from_magnet_error(enable_chant, enable_api, my_channe
"""
Test whether an error while adding magnets to your channel results in a proper 500 error
"""

def fake_get_metainfo(*_, **__):
return succeed(None)

Expand All @@ -426,7 +458,7 @@ def fake_get_metainfo(*_, **__):


@pytest.mark.asyncio
async def test_get_torrents(enable_chant, enable_api, my_channel, session):
async def test_get_torrents(enable_chant, enable_api, my_channel, mock_dlmgr_get_download, session):
"""
Test whether we can query some torrents in the database with the REST API
"""
Expand All @@ -437,7 +469,7 @@ async def test_get_torrents(enable_chant, enable_api, my_channel, session):


@pytest.mark.asyncio
async def test_get_torrents_ffa_channel(enable_chant, enable_api, my_channel, session):
async def test_get_torrents_ffa_channel(enable_chant, enable_api, my_channel, mock_dlmgr_get_download, session):
"""
Test whether we can query channel contents for unsigned (legacy/FFA) channels
"""
Expand Down

0 comments on commit 08bb535

Please sign in to comment.