diff --git a/admin/sql/create_tables.sql b/admin/sql/create_tables.sql index 70efd3e207..4e318efaaa 100644 --- a/admin/sql/create_tables.sql +++ b/admin/sql/create_tables.sql @@ -49,7 +49,7 @@ CREATE TABLE statistics.user ( artists JSONB, releases JSONB, recordings JSONB, - last_updated TIMESTAMP WITH TIME ZONE + last_updated TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW() ); CREATE TABLE statistics.artist ( @@ -60,7 +60,7 @@ CREATE TABLE statistics.artist ( recordings JSONB, users JSONB, listen_count JSONB, - last_updated TIMESTAMP WITH TIME ZONE + last_updated TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW() ); ALTER TABLE statistics.artist ADD CONSTRAINT artist_stats_msid_uniq UNIQUE (msid); @@ -71,7 +71,7 @@ CREATE TABLE statistics.release ( recordings JSONB, users JSONB, listen_count JSONB, - last_updated TIMESTAMP WITH TIME ZONE + last_updated TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW() ); ALTER TABLE statistics.release ADD CONSTRAINT release_stats_msid_uniq UNIQUE (msid); @@ -81,7 +81,7 @@ CREATE TABLE statistics.recording ( name VARCHAR, users_all_time JSONB, listen_count JSONB, - last_updated TIMESTAMP WITH TIME ZONE + last_updated TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW() ); ALTER TABLE statistics.recording ADD CONSTRAINT recording_stats_msid_uniq UNIQUE (msid); diff --git a/admin/sql/updates/2017-08-03-make-stats-updated-not-null.sql b/admin/sql/updates/2017-08-03-make-stats-updated-not-null.sql new file mode 100644 index 0000000000..9a1b30c8b8 --- /dev/null +++ b/admin/sql/updates/2017-08-03-make-stats-updated-not-null.sql @@ -0,0 +1,21 @@ +BEGIN; + +UPDATE statistics.user SET last_updated = to_timestamp(0) WHERE last_updated IS NULL; +UPDATE statistics.artist SET last_updated = to_timestamp(0) WHERE last_updated IS NULL; +UPDATE statistics.release SET last_updated = to_timestamp(0) WHERE last_updated IS NULL; +UPDATE statistics.recording SET last_updated = to_timestamp(0) WHERE last_updated IS NULL; + +ALTER TABLE statistics.user ALTER COLUMN last_updated SET NOT NULL; +ALTER TABLE statistics.user ALTER COLUMN last_updated SET DEFAULT NOW(); + +ALTER TABLE statistics.artist ALTER COLUMN last_updated SET NOT NULL; +ALTER TABLE statistics.artist ALTER COLUMN last_updated SET DEFAULT NOW(); + + +ALTER TABLE statistics.release ALTER COLUMN last_updated SET NOT NULL; +ALTER TABLE statistics.release ALTER COLUMN last_updated SET DEFAULT NOW(); + +ALTER TABLE statistics.recording ALTER COLUMN last_updated SET NOT NULL; +ALTER TABLE statistics.recording ALTER COLUMN last_updated SET DEFAULT NOW(); + +COMMIT; diff --git a/listenbrainz/bigquery-writer/bigquery-writer.py b/listenbrainz/bigquery-writer/bigquery-writer.py index 6689f916c8..2133dfc940 100755 --- a/listenbrainz/bigquery-writer/bigquery-writer.py +++ b/listenbrainz/bigquery-writer/bigquery-writer.py @@ -12,10 +12,11 @@ from googleapiclient import discovery from googleapiclient.errors import HttpError +from listenbrainz.bigquery import create_bigquery_object +from listenbrainz.bigquery import NoCredentialsVariableException, NoCredentialsFileException from oauth2client.client import GoogleCredentials REPORT_FREQUENCY = 5000 -APP_CREDENTIALS_FILE = os.environ.get('GOOGLE_APPLICATION_CREDENTIALS') ERROR_RETRY_DELAY = 3 # number of seconds to wait until retrying an operation DUMP_JSON_WITH_ERRORS = True @@ -153,18 +154,11 @@ def start(self): sleep(66666) return - if not APP_CREDENTIALS_FILE: - self.log.error("BiqQueryWriter not started, the GOOGLE_APPLICATION_CREDENTIALS env var is not defined.") + try: + self.bigquery = create_bigquery_object() + except (NoCredentialsFileException, NoCredentialsVariableException): + self.log.error("Credential File not present or invalid! Sleeping...") sleep(1000) - return - - if not os.path.exists(APP_CREDENTIALS_FILE): - self.log.error("BiqQueryWriter not started, %s is missing." % APP_CREDENTIALS_FILE) - sleep(1000) - return - - credentials = GoogleCredentials.get_application_default() - self.bigquery = discovery.build('bigquery', 'v2', credentials=credentials) while True: try: diff --git a/listenbrainz/bigquery.py b/listenbrainz/bigquery.py new file mode 100644 index 0000000000..a6add0e5f1 --- /dev/null +++ b/listenbrainz/bigquery.py @@ -0,0 +1,33 @@ +import os +from googleapiclient import discovery +import googleapiclient +from oauth2client.client import GoogleCredentials + +APP_CREDENTIALS_FILE = os.environ.get('GOOGLE_APPLICATION_CREDENTIALS') + +def create_bigquery_object(): + """ Initiates the connection to Google BigQuery. Returns a BigQuery object. """ + + if not APP_CREDENTIALS_FILE: + logger.error("The GOOGLE_APPLICATIONS_CREDENTIALS variable is undefined, cannot connect to BigQuery") + raise NoCredentialsVariableException + + if not os.path.exists(APP_CREDENTIALS_FILE): + logger.error("The BigQuery credentials file does not exist, cannot connect to BigQuery") + raise NoCredentialsFileException + + credentials = GoogleCredentials.get_application_default() + return discovery.build('bigquery', 'v2', credentials=credentials) + + +# Exceptions +class BigQueryException(Exception): + pass + + +class NoCredentialsVariableException(BigQueryException): + pass + + +class NoCredentialsFileException(BigQueryException): + pass diff --git a/listenbrainz/config.py.sample b/listenbrainz/config.py.sample index c3391ca29a..62ac99a271 100644 --- a/listenbrainz/config.py.sample +++ b/listenbrainz/config.py.sample @@ -54,6 +54,9 @@ BIGQUERY_TABLE_ID = "listen" # Stats STATS_ENTITY_LIMIT = 100 # the number of entities to calculate at max with BQ +STATS_CALCULATION_LOGIN_TIME = 30 # users must have logged in to LB in the past 30 days for stats to be calculated +STATS_CALCULATION_INTERVAL = 7 # stats are calculated every 7 days + # Max time in seconds after which the playing_now stream will expire. PLAYING_NOW_MAX_DURATION = 10 * 60 diff --git a/listenbrainz/db/stats.py b/listenbrainz/db/stats.py new file mode 100644 index 0000000000..649217f8b1 --- /dev/null +++ b/listenbrainz/db/stats.py @@ -0,0 +1,73 @@ +"""This module contains functions to insert and retrieve statistics + calculated from Google BigQuery into the database. +""" + +import sqlalchemy +import ujson +from listenbrainz import db + + +def insert_user_stats(user_id, artists, recordings, releases, artist_count): + """Inserts user stats calculated from Google BigQuery into the database. + + If stats are already present for some user, they are updated to the new + values passed. + + Args: user_id (int): the row id of the user, + artists (dict): the top artists listened to by the user + recordings (dict): the top recordings listened to by the user + releases (dict): the top releases listened to by the user + artist_count (int): the total number of artists listened to by the user + """ + + # put all artist stats into one dict which will then be inserted + # into the artist column of the stats.user table + artist_stats = { + 'count': artist_count, + 'all_time': artists + } + + with db.engine.connect() as connection: + connection.execute(sqlalchemy.text(""" + INSERT INTO statistics.user (user_id, artists, recordings, releases) + VALUES (:user_id, :artists, :recordings, :releases) + ON CONFLICT (user_id) + DO UPDATE SET artists = :artists, + recordings = :recordings, + releases = :releases, + last_updated = NOW() + """), { + 'user_id': user_id, + 'artists': ujson.dumps(artist_stats), + 'recordings': ujson.dumps(recordings), + 'releases': ujson.dumps(releases) + } + ) + + +def get_user_stats(user_id): + """Get user stats for user with given ID. + + Args: user_id (int): the row ID of the user in the DB + + Returns: A dict of the following format + { + "user_id" (int): the id of the user + "artists" (dict): artist stats for the user + "releases" (dict) : release stats for the user + "recordings" (dict): recording stats for the user + "last_updated" (datetime): timestamp when the stats were last updated + } + """ + + with db.engine.connect() as connection: + result = connection.execute(sqlalchemy.text(""" + SELECT user_id, artists, releases, recordings, last_updated + FROM statistics.user + WHERE user_id = :user_id + """), { + 'user_id': user_id + } + ) + row = result.fetchone() + return dict(row) if row else None diff --git a/listenbrainz/db/tests/test_stats.py b/listenbrainz/db/tests/test_stats.py new file mode 100644 index 0000000000..32c2382bc7 --- /dev/null +++ b/listenbrainz/db/tests/test_stats.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +import json +import os +import listenbrainz.db.user as db_user +import listenbrainz.db.stats as db_stats +from listenbrainz.db.testing import DatabaseTestCase + + +class StatsDatabaseTestCase(DatabaseTestCase): + + TEST_DATA_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', '..', 'testdata') + + def setUp(self): + DatabaseTestCase.setUp(self) + self.user = db_user.get_or_create('stats_user') + + def path_to_data_file(self, filename): + return os.path.join(StatsDatabaseTestCase.TEST_DATA_PATH, filename) + + def test_insert_user_stats(self): + + with open(self.path_to_data_file('user_top_artists.json')) as f: + artists = json.load(f) + with open(self.path_to_data_file('user_top_releases.json')) as f: + releases = json.load(f) + with open(self.path_to_data_file('user_top_recordings.json')) as f: + recordings = json.load(f) + + + db_stats.insert_user_stats( + user_id=self.user['id'], + artists=artists, + recordings=recordings, + releases=releases, + artist_count=2, + ) + + result = db_stats.get_user_stats(user_id=self.user['id']) + self.assertDictEqual(result['artists']['all_time'], artists) + self.assertEqual(result['artists']['count'], 2) + self.assertDictEqual(result['releases'], releases) + self.assertDictEqual(result['recordings'], recordings) + self.assertGreater(int(result['last_updated'].strftime('%s')), 0) + diff --git a/listenbrainz/db/tests/test_user.py b/listenbrainz/db/tests/test_user.py index 0664d71a2c..df295e1678 100644 --- a/listenbrainz/db/tests/test_user.py +++ b/listenbrainz/db/tests/test_user.py @@ -1,10 +1,9 @@ # -*- coding: utf-8 -*- -from listenbrainz.db.testing import DatabaseTestCase -import listenbrainz.db.user as db_user -from listenbrainz import db import time import sqlalchemy - +from listenbrainz import db +from listenbrainz.db.testing import DatabaseTestCase +import listenbrainz.db.user as db_user class UserTestCase(DatabaseTestCase): @@ -59,3 +58,34 @@ def test_update_latest_import(self): db_user.update_latest_import(user['musicbrainz_id'], val) user = db_user.get_by_mb_id(user['musicbrainz_id']) self.assertEqual(val, int(user['latest_import'].strftime('%s'))) + + def test_get_recently_logged_in_users(self): + """Tests getting recently logged in users""" + + # create two users, set one's last_login + # to a very old value and one's last_login + # to now and then call get_recently_logged_in_users + user1 = db_user.get_or_create('recentuser1') + with db.engine.connect() as connection: + connection.execute(sqlalchemy.text(""" + UPDATE "user" + SET last_login = to_timestamp(0) + WHERE musicbrainz_id = :musicbrainz_id + """), { + 'musicbrainz_id': 'recentuser1' + }) + + user2 = db_user.get_or_create('recentuser2') + with db.engine.connect() as connection: + connection.execute(sqlalchemy.text(""" + UPDATE "user" + SET last_login = NOW() + WHERE musicbrainz_id = :musicbrainz_id + """), { + 'musicbrainz_id': 'recentuser2' + }) + + recent_users = db_user.get_recently_logged_in_users() + self.assertEqual(len(recent_users), 1) + self.assertEqual(recent_users[0]['musicbrainz_id'], 'recentuser2') + diff --git a/listenbrainz/db/user.py b/listenbrainz/db/user.py index 4648e8c3fa..a147af63a9 100644 --- a/listenbrainz/db/user.py +++ b/listenbrainz/db/user.py @@ -1,10 +1,10 @@ - -from listenbrainz import db import uuid import sqlalchemy -from listenbrainz.db.exceptions import DatabaseException import logging import time +from listenbrainz import db +from listenbrainz.db.exceptions import DatabaseException +from listenbrainz import config logger = logging.getLogger(__name__) logger.setLevel(logging.INFO) @@ -213,3 +213,18 @@ def update_latest_import(musicbrainz_id, ts): except sqlalchemy.exc.ProgrammingError as e: logger.error(e) raise DatabaseException + + +def get_recently_logged_in_users(): + """Returns a list of users who have logged-in in the + last config.STATS_CALCULATION_LOGIN_TIME days + """ + with db.engine.connect() as connection: + result = connection.execute(sqlalchemy.text(""" + SELECT {columns} + FROM "user" + WHERE last_login >= NOW() - INTERVAL ':x days' + """.format(columns=','.join(USER_GET_COLUMNS))), { + 'x': config.STATS_CALCULATION_LOGIN_TIME + }) + return [dict(row) for row in result] diff --git a/listenbrainz/stats/__init__.py b/listenbrainz/stats/__init__.py index 98c3ffd9de..3cce86f510 100644 --- a/listenbrainz/stats/__init__.py +++ b/listenbrainz/stats/__init__.py @@ -1,9 +1,6 @@ -from googleapiclient import discovery -import googleapiclient -from oauth2client.client import GoogleCredentials import os import logging -from listenbrainz.stats.exceptions import NoCredentialsVariableException, NoCredentialsFileException +from listenbrainz.bigquery import create_bigquery_object import listenbrainz.config as config import time @@ -12,25 +9,14 @@ logger = logging.getLogger(__name__) logger.setLevel(logging.INFO) -APP_CREDENTIALS_FILE = os.environ.get('GOOGLE_APPLICATION_CREDENTIALS') - bigquery = None def init_bigquery_connection(): """ Initiates the connection to Google BigQuery """ - if not APP_CREDENTIALS_FILE: - logger.error("The GOOGLE_APPLICATIONS_CREDENTIALS variable is undefined, cannot connect to BigQuery") - raise NoCredentialsVariableException - - if not os.path.exists(APP_CREDENTIALS_FILE): - logger.error("The BigQuery credentials file does not exist, cannot connect to BigQuery") - raise NoCredentialsFileException - global bigquery - credentials = GoogleCredentials.get_application_default() - bigquery = discovery.build('bigquery', 'v2', credentials=credentials) + bigquery = create_bigquery_object() def get_parameters_dict(parameters): diff --git a/listenbrainz/stats/calculate.py b/listenbrainz/stats/calculate.py new file mode 100644 index 0000000000..13502c5baa --- /dev/null +++ b/listenbrainz/stats/calculate.py @@ -0,0 +1,35 @@ +import listenbrainz.stats.user as stats_user +import listenbrainz.db.user as db_user +import listenbrainz.db.stats as db_stats +from listenbrainz import db +from listenbrainz import config +from listenbrainz import stats + + +def calculate_user_stats(): + for user in db_user.get_recently_logged_in_users(): + recordings = stats_user.get_top_recordings(musicbrainz_id=user['musicbrainz_id']) + artists = stats_user.get_top_artists(musicbrainz_id=user['musicbrainz_id']) + releases = stats_user.get_top_releases(musicbrainz_id=user['musicbrainz_id']) + artist_count = stats_user.get_artist_count(musicbrainz_id=user['musicbrainz_id']) + + db_stats.insert_user_stats( + user_id=user['id'], + artists=artists, + recordings=recordings, + releases=releases, + artist_count=artist_count + ) + +def calculate_stats(): + calculate_user_stats() + +if __name__ == '__main__': + print('Connecting to Google BigQuery...') + stats.init_bigquery_connection() + print('Connecting to database...') + db.init_db_connection(config.SQLALCHEMY_DATABASE_URI) + print('Connected!') + print('Calculating statistics using Google BigQuery...') + calculate_stats() + print('Calculations done!') diff --git a/listenbrainz/stats/user.py b/listenbrainz/stats/user.py index 3ef04acc7b..f575560373 100644 --- a/listenbrainz/stats/user.py +++ b/listenbrainz/stats/user.py @@ -133,11 +133,49 @@ def get_top_artists(musicbrainz_id, time_interval=None): return stats.run_query(query, parameters) + +def get_artist_count(musicbrainz_id, time_interval=None): + """ Get artist count for user with given MusicBrainz ID over a particular period of time + + Args: musicbrainz_id (str): the MusicBrainz ID of the user + time_interval (str): the time interval over which artist count should be returned + (defaults to all time) + + Returns: artist_count (int): total number of artists listened to by the user in that + period of time + """ + + filter_clause = "" + if time_interval: + filter_clause = "AND listened_at >= TIMESTAMP_SUB(CURRENT_TIME(), INTERVAL {})".format(time_interval) + + query = """SELECT COUNT(DISTINCT(artist_msid)) as artist_count + FROM {dataset_id}.{table_id} + WHERE user_name = @musicbrainz_id + {time_filter_clause} + """.format( + dataset_id=config.BIGQUERY_DATASET_ID, + table_id=config.BIGQUERY_TABLE_ID, + time_filter_clause=filter_clause, + ) + + parameters = [ + { + 'name': 'musicbrainz_id', + 'type': 'STRING', + 'value': musicbrainz_id, + } + ] + + return stats.run_query(query, parameters)[0]['artist_count'] + + + def get_top_releases(musicbrainz_id, time_interval=None): """ Get top releases for user with given MusicBrainz ID over a particular period of time Args: musicbrainz_id (str): the MusicBrainz ID of the user - time_interval (str): the time interval over which top artists should be returned + time_interval (str): the time interval over which top releases should be returned (defaults to all time) Returns: A sorted list of dicts with the following structure diff --git a/listenbrainz/testdata/user_top_artists.json b/listenbrainz/testdata/user_top_artists.json new file mode 100644 index 0000000000..6dfdc2e4f5 --- /dev/null +++ b/listenbrainz/testdata/user_top_artists.json @@ -0,0 +1,14 @@ +{ + "all_time": [ + { + "listen_count": 230, + "artist_msid": "ed6c388e-ea65-431f-8be5-4959239d8c65", + "name": "Kanye West" + }, + { + "listen_count": 229, + "artist_msid": "80ca06c7-07fb-4b3a-a315-ad584cc8eeb0", + "name": "Frank Ocean" + } + ] +} diff --git a/listenbrainz/testdata/user_top_recordings.json b/listenbrainz/testdata/user_top_recordings.json new file mode 100644 index 0000000000..2101d15058 --- /dev/null +++ b/listenbrainz/testdata/user_top_recordings.json @@ -0,0 +1,14 @@ +{ + "all_time": [ + { + "listen_count": 230, + "artist_msid": "59f4d2c6-ca76-4dc7-80de-c3fef6b51211", + "name": "Fade" + }, + { + "listen_count": 229, + "artist_msid": "c2375e2e-e98a-4c6b-a139-3aae0a831ebf", + "name": "Nikes" + } + ] +} diff --git a/listenbrainz/testdata/user_top_releases.json b/listenbrainz/testdata/user_top_releases.json new file mode 100644 index 0000000000..c0306d6282 --- /dev/null +++ b/listenbrainz/testdata/user_top_releases.json @@ -0,0 +1,14 @@ +{ + "all_time": [ + { + "listen_count": 230, + "artist_msid": "19eadc02-1f64-48fe-bb4c-33732b96f7b1", + "name": "The Life Of Pablo" + }, + { + "listen_count": 229, + "artist_msid": "155fa8f8-ef04-471d-ab14-dd21838338d7", + "name": "Blonde" + } + ] +} diff --git a/listenbrainz/webserver/scheduler.py b/listenbrainz/webserver/scheduler.py index 349e6b43ed..6a58c82382 100644 --- a/listenbrainz/webserver/scheduler.py +++ b/listenbrainz/webserver/scheduler.py @@ -1,74 +1,32 @@ -from sqlalchemy import create_engine, text -from sqlalchemy.pool import NullPool -import sqlalchemy.exc +""" This module contains code that triggers jobs we want to run + on regular intervals. +""" from apscheduler.schedulers.background import BackgroundScheduler +from listenbrainz import stats +from listenbrainz.stats.calculate import calculate_stats import logging -# Create a cron job to clean postgres database class ScheduledJobs(): - """ Schedule the scripts that need to be run at scheduled intervals """ + """Schedule the scripts that need to be run at regular intervals """ def __init__(self, conf): + self.log = logging.getLogger(__name__) + logging.basicConfig() + self.log.setLevel(logging.INFO) + self.conf = conf self.scheduler = BackgroundScheduler() + stats.init_bigquery_connection() self.add_jobs() self.run() def run(self): + """ Start the scheduler but stop on KeyboardInterrupts and such""" try: self.scheduler.start() except (KeyboardInterrupt, SystemExit): - self.shutdown() + self.scheduler.shutdown() def add_jobs(self): - args = {} - if 'MAX_POSTGRES_LISTEN_HISTORY' in self.conf: - args['max_days'] = int(self.conf['MAX_POSTGRES_LISTEN_HISTORY']) - - self.scheduler.add_job(self._clean_postgres, 'interval', hours=24, \ - kwargs=args) - - def _clean_postgres(self, max_days=90): - """ Clean all the listens that are older than a set no of days - Default: 90 days - """ - - # If max days is set to a negative number, don't throw anything out - if max_days < 0: - return - - seconds = max_days*24*3600 - engine = create_engine(self.conf['SQLALCHEMY_DATABASE_URI'], poolclass=NullPool) - connection = engine.connect() - # query = """ - # WITH max_table as ( - # SELECT user_id, max(extract(epoch from ts)) - %s as mx - # FROM listens - # GROUP BY user_id - # ) - # DELETE FROM listens - # WHERE extract(epoch from ts) < (SELECT mx - # FROM max_table - # WHERE max_table.user_id = listens.user_id) - # RETURNING * - # """ - - query = """ - DELETE FROM listen - WHERE id in ( - SELECT id FROM listen - JOIN ( - SELECT user_id, extract(epoch from max(ts)) as max - FROM listens - GROUP BY user_id - ) max_table on listens.user_id = max_table.user_id AND extract(epoch from listens.ts) <= max_table.max - %s - ) RETURNING * - """ - - deleted = connection.execute(query % (seconds)) - log = logging.getLogger(__name__) - log.info('(Scheduled Job) CleanPostgres: ' + str(len(deleted.fetchall())) + " records deleted successfully") - connection.close() - - def shutdown(self): - self.scheduler.shutdown() + """Add the jobs that need to be run to the scheduler""" + self.scheduler.add_job(calculate_stats, 'interval', days=self.conf['STATS_CALCULATION_INTERVAL']) diff --git a/listenbrainz/webserver/templates/user/profile.html b/listenbrainz/webserver/templates/user/profile.html index 9e5fd0ee62..c46f0f92ea 100644 --- a/listenbrainz/webserver/templates/user/profile.html +++ b/listenbrainz/webserver/templates/user/profile.html @@ -11,11 +11,29 @@

{{ user.musicbrainz_id }}

See profile on MusicBrainz - {% if have_listen_count %} -

Listen Count: {{ listen_count }}

- {% else %} - We were not able to get listen count for {{ user.musicbrainz_id }} due to an error. Please reload to try again. - {% endif %} + +

Statistics

+ +
+
+ + + {% if listen_count %} + + + + + {% endif %} + + {% if artist_count %} + + + + + {% endif %} +
Listen count{{ listen_count }}
Artist count{{ artist_count }}
+
+

Recent listens

diff --git a/listenbrainz/webserver/views/user.py b/listenbrainz/webserver/views/user.py index a7f623494d..e0689b2d6e 100644 --- a/listenbrainz/webserver/views/user.py +++ b/listenbrainz/webserver/views/user.py @@ -1,4 +1,3 @@ - from flask import Blueprint, render_template, request, url_for, Response, redirect, flash, current_app, jsonify from flask_login import current_user, login_required from werkzeug.exceptions import NotFound, BadRequest, RequestEntityTooLarge, InternalServerError @@ -9,6 +8,7 @@ from listenbrainz import webserver from listenbrainz.webserver import flash import listenbrainz.db.user as db_user +import listenbrainz.db.stats as db_stats import listenbrainz.config as config from listenbrainz.db.exceptions import DatabaseException from flask import make_response @@ -152,6 +152,12 @@ def profile(user_name): } listens.insert(0, listen) + user_stats = db_stats.get_user_stats(user.id) + try: + artist_count = int(user_stats['artists']['count']) + except (KeyError, TypeError): + artist_count = 0 + return render_template( "user/profile.html", user=user, @@ -161,6 +167,7 @@ def profile(user_name): spotify_uri=_get_spotify_uri_for_listens(listens), have_listen_count=have_listen_count, listen_count=format(int(listen_count), ",d"), + artist_count=format(artist_count, ",d") )