diff --git a/.circleci/config.yml b/.circleci/config.yml index 55023f8892d..d8edfca77f6 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -707,7 +707,6 @@ jobs: name: discovery provider tests command: | cd discovery-provider - export audius_ipfs_port=5001 export audius_redis_url=redis://localhost:6379/0 export audius_elasticsearch_url=http://localhost:9200 export audius_delegate_owner_wallet=0x1D9c77BcfBfa66D37390BF2335f0140979a6122B diff --git a/discovery-provider/compose/.env b/discovery-provider/compose/.env index bb4a9e0c0b1..3b83dbecb49 100644 --- a/discovery-provider/compose/.env +++ b/discovery-provider/compose/.env @@ -5,13 +5,10 @@ audius_web3_eth_provider_url=http://audius_ganache_cli_eth_contracts:8545 audius_redis_url=redis://redis-server:6379/00 audius_db_url=postgresql+psycopg2://postgres:postgres@discovery-provider-db:5432/audius_discovery audius_db_url_read_replica=postgresql+psycopg2://postgres:postgres@discovery-provider-db:5432/audius_discovery -audius_ipfs_host=ipfs-node -audius_ipfs_port=5001 -WAIT_HOSTS=discovery-provider-db:5432,redis-server:6379, ipfs-node:5001 +WAIT_HOSTS=discovery-provider-db:5432,redis-server:6379 audius_delegate_owner_wallet=0x1D9c77BcfBfa66D37390BF2335f0140979a6122B audius_delegate_private_key=0x3873ed01bfb13621f9301487cc61326580614a5b99f3c33cf39c6f9da3a19cad audius_discprov_identity_service_url=http://audius-identity-service_identity-service_1:7000 -audius_discprov_user_metadata_service_url=http://cn1_creator-node_1:4000 audius_discprov_blacklist_block_indexing_interval=5 audius_eth_contracts_registry=0x26b9c682a87879Ef035c92E4F978c303eC69a84B audius_solana_track_listen_count_address=4ZN1TTLp76gGCZ7tzXLfV4cahuHaP6oHFuiVWQS3knTp diff --git a/discovery-provider/compose/.test.env b/discovery-provider/compose/.test.env index 1c0562bad64..ea0c57b000a 100644 --- a/discovery-provider/compose/.test.env +++ b/discovery-provider/compose/.test.env @@ -1,5 +1,4 @@ COMPOSE_PROJECT_NAME=dn1 audius_db_port=5432 -audius_ipfs_external_port=5001 audius_redis_port=5379 audius_es_port=9200 diff --git a/discovery-provider/compose/docker-compose.backend.yml b/discovery-provider/compose/docker-compose.backend.yml index 3e87ed06e56..325babfab81 100644 --- a/discovery-provider/compose/docker-compose.backend.yml +++ b/discovery-provider/compose/docker-compose.backend.yml @@ -18,9 +18,8 @@ services: - audius_elasticsearch_search_enabled=true - audius_delegate_owner_wallet=${audius_delegate_owner_wallet} - audius_delegate_private_key=${audius_delegate_private_key} - - audius_ipfs_host=${COMPOSE_PROJECT_NAME}-ipfs-node - audius_discprov_dev_mode=true - - WAIT_HOSTS=${COMPOSE_PROJECT_NAME}_discovery-provider-db_1:5432,${COMPOSE_PROJECT_NAME}_redis-server_1:6379, ${COMPOSE_PROJECT_NAME}-ipfs-node:5001 + - WAIT_HOSTS=${COMPOSE_PROJECT_NAME}_discovery-provider-db_1:5432,${COMPOSE_PROJECT_NAME}_redis-server_1:6379 depends_on: discovery-provider-db: condition: service_healthy diff --git a/discovery-provider/compose/docker-compose.ipfs.yml b/discovery-provider/compose/docker-compose.ipfs.yml deleted file mode 100644 index 03c8113c72f..00000000000 --- a/discovery-provider/compose/docker-compose.ipfs.yml +++ /dev/null @@ -1,16 +0,0 @@ -# Compose file to run just the discovery ipfs container -version: '3' -services: - ipfs-node: - container_name: '${COMPOSE_PROJECT_NAME}-ipfs-node' - image: ipfs/go-ipfs:v0.8.0 - - ports: - - '${audius_ipfs_external_port}:5001' - - networks: - - audius_dev - -networks: - audius_dev: - external: true \ No newline at end of file diff --git a/discovery-provider/compose/docker-compose.web-server.yml b/discovery-provider/compose/docker-compose.web-server.yml index 9161c6bf360..58a2f19b110 100644 --- a/discovery-provider/compose/docker-compose.web-server.yml +++ b/discovery-provider/compose/docker-compose.web-server.yml @@ -1,7 +1,7 @@ # Compose file to run the discovery flask server on its own # # The server is pointed at whatever database is set in .env -# and runs no workers, but does need redis and IPFS to be available +# and runs no workers version: '3' services: web-server: @@ -15,10 +15,9 @@ services: - audius_db_run_migrations=false - FLASK_RUN_PORT=${audius_server_port} - audius_redis_url=redis://${COMPOSE_PROJECT_NAME}_redis-server_1:6379/00 - - audius_ipfs_host=${COMPOSE_PROJECT_NAME}-ipfs-node - audius_discprov_dev_mode=true - audius_no_workers=true - - WAIT_HOSTS=${COMPOSE_PROJECT_NAME}_redis-server_1:6379,${COMPOSE_PROJECT_NAME}-ipfs-node:5001 + - WAIT_HOSTS=${COMPOSE_PROJECT_NAME}_redis-server_1:6379 depends_on: - redis-server volumes: diff --git a/discovery-provider/compose/env/shellEnv1.sh b/discovery-provider/compose/env/shellEnv1.sh index cbc51d97bce..c05a502dc9a 100644 --- a/discovery-provider/compose/env/shellEnv1.sh +++ b/discovery-provider/compose/env/shellEnv1.sh @@ -1,6 +1,5 @@ export COMPOSE_PROJECT_NAME="dn1" export audius_server_port="5000" export audius_db_port="5432" -export audius_ipfs_external_port="5200" export audius_redis_port="5380" export audius_es_port="9200" diff --git a/discovery-provider/compose/env/shellEnv2.sh b/discovery-provider/compose/env/shellEnv2.sh index 3464ed745f7..047c627599f 100644 --- a/discovery-provider/compose/env/shellEnv2.sh +++ b/discovery-provider/compose/env/shellEnv2.sh @@ -1,6 +1,5 @@ export COMPOSE_PROJECT_NAME="dn2" export audius_server_port="5001" export audius_db_port="5433" -export audius_ipfs_external_port="5201" export audius_redis_port="5382" export audius_es_port="9201" diff --git a/discovery-provider/compose/env/shellEnv3.sh b/discovery-provider/compose/env/shellEnv3.sh index a1e2d8389cd..4d90b1fc183 100644 --- a/discovery-provider/compose/env/shellEnv3.sh +++ b/discovery-provider/compose/env/shellEnv3.sh @@ -1,6 +1,5 @@ export COMPOSE_PROJECT_NAME="dn3" export audius_server_port="5002" export audius_db_port="5434" -export audius_ipfs_external_port="5202" export audius_redis_port="5383" export audius_es_port="9202" diff --git a/discovery-provider/compose/env/shellEnv4.sh b/discovery-provider/compose/env/shellEnv4.sh index 554b31c2671..ad2034427d7 100644 --- a/discovery-provider/compose/env/shellEnv4.sh +++ b/discovery-provider/compose/env/shellEnv4.sh @@ -1,5 +1,4 @@ export COMPOSE_PROJECT_NAME="dn4" export audius_server_port="5003" export audius_db_port="5435" -export audius_ipfs_external_port="5203" export audius_redis_port="5384" diff --git a/discovery-provider/compose/env/shellEnv5.sh b/discovery-provider/compose/env/shellEnv5.sh index c29afaf35f8..39b460acc9f 100644 --- a/discovery-provider/compose/env/shellEnv5.sh +++ b/discovery-provider/compose/env/shellEnv5.sh @@ -1,5 +1,4 @@ export COMPOSE_PROJECT_NAME="dn5" export audius_server_port="5004" export audius_db_port="5436" -export audius_ipfs_external_port="5204" export audius_redis_port="5385" diff --git a/discovery-provider/compose/env/shellEnv6.sh b/discovery-provider/compose/env/shellEnv6.sh index f6fbbac0bff..6e5f67c2faf 100644 --- a/discovery-provider/compose/env/shellEnv6.sh +++ b/discovery-provider/compose/env/shellEnv6.sh @@ -1,5 +1,4 @@ export COMPOSE_PROJECT_NAME="dn6" export audius_server_port="5005" export audius_db_port="5437" -export audius_ipfs_external_port="5205" export audius_redis_port="5386" diff --git a/discovery-provider/compose/env/shellEnv7.sh b/discovery-provider/compose/env/shellEnv7.sh index 2c56a08d4c8..d425ec46c62 100644 --- a/discovery-provider/compose/env/shellEnv7.sh +++ b/discovery-provider/compose/env/shellEnv7.sh @@ -1,5 +1,4 @@ export COMPOSE_PROJECT_NAME="dn7" export audius_server_port="5006" export audius_db_port="5438" -export audius_ipfs_external_port="5206" export audius_redis_port="5387" diff --git a/discovery-provider/compose/env/shellEnv8.sh b/discovery-provider/compose/env/shellEnv8.sh index 3e6e96cbd2b..56606abd749 100644 --- a/discovery-provider/compose/env/shellEnv8.sh +++ b/discovery-provider/compose/env/shellEnv8.sh @@ -1,5 +1,4 @@ export COMPOSE_PROJECT_NAME="dn8" export audius_server_port="5007" export audius_db_port="5439" -export audius_ipfs_external_port="5207" export audius_redis_port="5388" diff --git a/discovery-provider/compose/env/shellEnv9.sh b/discovery-provider/compose/env/shellEnv9.sh index f5f3fe4b413..86118a0885e 100644 --- a/discovery-provider/compose/env/shellEnv9.sh +++ b/discovery-provider/compose/env/shellEnv9.sh @@ -1,5 +1,4 @@ export COMPOSE_PROJECT_NAME="dn9" export audius_server_port="5008" export audius_db_port="5440" -export audius_ipfs_external_port="5208" export audius_redis_port="5389" diff --git a/discovery-provider/compose/env/unsetShellEnv.sh b/discovery-provider/compose/env/unsetShellEnv.sh index ac5876f28e2..69d0fb70723 100644 --- a/discovery-provider/compose/env/unsetShellEnv.sh +++ b/discovery-provider/compose/env/unsetShellEnv.sh @@ -2,6 +2,5 @@ unset COMPOSE_PROJECT_NAME unset audius_server_port unset audius_db_port -unset audius_ipfs_external_port unset audius_redis_port unset audius_es_port diff --git a/discovery-provider/default_config.ini b/discovery-provider/default_config.ini index 5e18da1ba2c..32d8c5ece7b 100644 --- a/discovery-provider/default_config.ini +++ b/discovery-provider/default_config.ini @@ -8,7 +8,6 @@ block_processing_window = 20 block_processing_interval_sec = 1 peer_refresh_interval = 3000 identity_service_url = https://identityservice.test -user_metadata_service_url = '' healthy_block_diff = 100 notifications_max_block_diff = 25 notifications_max_slot_diff = 200 @@ -61,11 +60,6 @@ engine_args_literal = { 'connect_args': {'options': '-c timezone=utc'} } -[ipfs] -host = 127.0.0.1 -port = 5001 -gateway_hosts = https://cloudflare-ipfs.com,https://ipfs.io - [cors] allow_all = false diff --git a/discovery-provider/integration_tests/queries/test_search.py b/discovery-provider/integration_tests/queries/test_search.py index d6e8cff07d2..9ad007f1a7b 100644 --- a/discovery-provider/integration_tests/queries/test_search.py +++ b/discovery-provider/integration_tests/queries/test_search.py @@ -101,6 +101,7 @@ def setup_search(app_module): handle="", wallet="", name="user 1", + creator_node_endpoint="https://cn.io", updated_at=now, created_at=now, ), @@ -112,6 +113,7 @@ def setup_search(app_module): handle="", name="user 2", wallet="", + creator_node_endpoint="https://cn.io", updated_at=now, created_at=now, ), @@ -123,6 +125,7 @@ def setup_search(app_module): handle="", wallet="", name="fdwea", + creator_node_endpoint="https://cn.io", updated_at=now, created_at=now, ), diff --git a/discovery-provider/integration_tests/tasks/entity_manager/test_playlist_entity_manager.py b/discovery-provider/integration_tests/tasks/entity_manager/test_playlist_entity_manager.py index 52e4eba00a2..f1ac86953f9 100644 --- a/discovery-provider/integration_tests/tasks/entity_manager/test_playlist_entity_manager.py +++ b/discovery-provider/integration_tests/tasks/entity_manager/test_playlist_entity_manager.py @@ -178,7 +178,7 @@ def get_events_side_effect(_, tx_receipt): block_number=0, block_timestamp=1585336422, block_hash=0, - ipfs_metadata=test_metadata, + metadata=test_metadata, ) # validate db records @@ -439,7 +439,7 @@ def get_events_side_effect(_, tx_receipt): block_number=0, block_timestamp=1585336422, block_hash=0, - ipfs_metadata=test_metadata, + metadata=test_metadata, ) # validate db records diff --git a/discovery-provider/integration_tests/tasks/entity_manager/test_social_feature_entity_manager.py b/discovery-provider/integration_tests/tasks/entity_manager/test_social_feature_entity_manager.py index 99cd805a26e..020a10b75ce 100644 --- a/discovery-provider/integration_tests/tasks/entity_manager/test_social_feature_entity_manager.py +++ b/discovery-provider/integration_tests/tasks/entity_manager/test_social_feature_entity_manager.py @@ -158,7 +158,7 @@ def get_events_side_effect(_, tx_receipt): block_number=1, block_timestamp=1585336422, block_hash=0, - ipfs_metadata={}, + metadata={}, ) # Verify follows @@ -326,7 +326,7 @@ def get_events_side_effect(_, tx_receipt): block_number=1, block_timestamp=1585336422, block_hash=0, - ipfs_metadata={}, + metadata={}, ) # Verify follows diff --git a/discovery-provider/integration_tests/tasks/entity_manager/test_track_entity_manager.py b/discovery-provider/integration_tests/tasks/entity_manager/test_track_entity_manager.py index 8776b47cddc..3da624ffaef 100644 --- a/discovery-provider/integration_tests/tasks/entity_manager/test_track_entity_manager.py +++ b/discovery-provider/integration_tests/tasks/entity_manager/test_track_entity_manager.py @@ -259,7 +259,7 @@ def get_events_side_effect(_, tx_receipt): block_number=0, block_timestamp=1585336422, block_hash=0, - ipfs_metadata=test_metadata, + metadata=test_metadata, ) # validate db records @@ -494,7 +494,7 @@ def get_events_side_effect(_, tx_receipt): block_number=0, block_timestamp=1585336422, block_hash=0, - ipfs_metadata={}, + metadata={}, ) # validate db records diff --git a/discovery-provider/integration_tests/tasks/test_index_operations.py b/discovery-provider/integration_tests/tasks/test_index_operations.py index 5c5f4114d01..e7e44d4c831 100644 --- a/discovery-provider/integration_tests/tasks/test_index_operations.py +++ b/discovery-provider/integration_tests/tasks/test_index_operations.py @@ -2,7 +2,6 @@ import secrets from contextlib import contextmanager -import ipfshttpclient import pytest import src.utils.multihash from chance import chance @@ -28,10 +27,6 @@ def seed_contract_data(task, contracts, web3): user_factory_contract = contracts["user_factory_contract"] track_factory_contract = contracts["track_factory_contract"] - ipfs_peer_host = task.shared_config["ipfs"]["host"] - ipfs_peer_port = task.shared_config["ipfs"]["port"] - ipfs = ipfshttpclient.connect(f"/dns/{ipfs_peer_host}/tcp/{ipfs_peer_port}/http") - # Retrieve web3 instance from fixture chain_id = web3.net.version @@ -79,9 +74,7 @@ def seed_contract_data(task, contracts, web3): new_user_args = tx_new_user_info[0].args user_id_from_event = int(new_user_args._userId) - # Add audio file to ipfs node - res = ipfs.add(test_file) - test_audio_file_hash = res["Hash"] + test_audio_file_hash = "Qmb372yhT7bKWxm5VzRpySgFPAhjnQuC6FNStTZww6ULVD" test_track_segments = [{"multihash": test_audio_file_hash, "duration": 28060}] # Create track metadata object @@ -113,9 +106,7 @@ def seed_contract_data(task, contracts, web3): with open(track_metadata_json_file, "w") as f: json.dump(track_metadata, f) - # add track metadata to ipfs - metadata_res = ipfs.add(track_metadata_json_file) - metadata_hash = metadata_res["Hash"] + metadata_hash = "QmQgUzCB38dc1Qb1waQcW75PgBB2fXr4gw2m9JBvGYUdka" # get track metadata multihash metadata_decoded = src.utils.multihash.from_b58_string(metadata_hash) @@ -253,13 +244,13 @@ def test_index_operations_metadata_fetch_error( celery_app, celery_app_contracts, mocker ): """ - Confirm indexer throws IndexingError when ipfs metadata fetch throws an error + Confirm indexer throws IndexingError when metadata fetch throws an error """ task = celery_app.celery.tasks["update_discovery_provider"] db = task.db web3 = celery_app_contracts["web3"] - # patch ipfs metadata event to raise an exception + # patch metadata event to raise an exception def fetch_metadata_stub(*_, should_fetch_from_replica_set): raise Exception("Broken fetch") diff --git a/discovery-provider/integration_tests/tasks/test_index_playlists.py b/discovery-provider/integration_tests/tasks/test_index_playlists.py index 95d6f6d87b0..12ad7a71ec9 100644 --- a/discovery-provider/integration_tests/tasks/test_index_playlists.py +++ b/discovery-provider/integration_tests/tasks/test_index_playlists.py @@ -462,7 +462,7 @@ class TestPlaylistTransaction: ], autospec=True, ) - test_ipfs_metadata = {} + test_metadata = {} with db.scoped_session() as session: try: @@ -481,7 +481,7 @@ class TestPlaylistTransaction: test_block_number, test_block_timestamp, block_hash, - test_ipfs_metadata, + test_metadata, ) assert len(updated_playlist_ids_set) == 1 assert ( diff --git a/discovery-provider/integration_tests/tasks/test_index_tracks.py b/discovery-provider/integration_tests/tasks/test_index_tracks.py index 79fdf523b5a..538c94e18b0 100644 --- a/discovery-provider/integration_tests/tasks/test_index_tracks.py +++ b/discovery-provider/integration_tests/tasks/test_index_tracks.py @@ -265,7 +265,7 @@ def test_index_tracks(mock_index_task, app): track_record = parse_track_event( None, # self - not used session, - update_task, # only need the ipfs client for get_metadata + update_task, entry, # Contains the event args used for updating event_type, # String that should one of user_event_types_lookup track_record, # User ORM instance @@ -520,7 +520,7 @@ def test_index_tracks(mock_index_task, app): parse_track_event( None, # self - not used session, - update_task, # only need the ipfs client for get_metadata + update_task, entry, # Contains the event args used for updating event_type, # String that should one of user_event_types_lookup track_record, # User ORM instance @@ -629,7 +629,7 @@ class TestTrackTransaction: autospec=True, ) - test_ipfs_metadata = {} + test_metadata = {} with db.scoped_session() as session, challenge_event_bus.use_scoped_dispatch_queue(): try: @@ -648,7 +648,7 @@ class TestTrackTransaction: test_block_number, test_block_timestamp, block_hash, - test_ipfs_metadata, + test_metadata, ) assert len(updated_track_ids_set) == 1 assert list(updated_track_ids_set)[0] == blessed_track_record.track_id diff --git a/discovery-provider/integration_tests/tasks/test_index_user_replica_set.py b/discovery-provider/integration_tests/tasks/test_index_user_replica_set.py index 28133237098..3a309cee66b 100644 --- a/discovery-provider/integration_tests/tasks/test_index_user_replica_set.py +++ b/discovery-provider/integration_tests/tasks/test_index_user_replica_set.py @@ -198,7 +198,7 @@ class TestUserReplicaSetTransaction: autospec=True, ) - test_ipfs_metadata = {} + test_metadata = {} with db.scoped_session() as session: try: @@ -217,7 +217,7 @@ class TestUserReplicaSetTransaction: test_block_number, test_block_timestamp, block_hash, - test_ipfs_metadata, + test_metadata, ) assert len(updated_user_ids_set) == 1 assert list(updated_user_ids_set)[0] == blessed_user_record.user_id diff --git a/discovery-provider/integration_tests/tasks/test_index_users.py b/discovery-provider/integration_tests/tasks/test_index_users.py index 636c49eca7a..dfc7ddb916e 100644 --- a/discovery-provider/integration_tests/tasks/test_index_users.py +++ b/discovery-provider/integration_tests/tasks/test_index_users.py @@ -235,14 +235,14 @@ def test_index_users(bus_mock: mock.MagicMock, app): parse_user_event( None, # self - not used - update_task, # only need the ipfs client for get_metadata + update_task, session, None, # tx_receipt - not used block_number, # not used entry, # Contains the event args used for updating event_type, # String that should one of user_event_types_lookup user_record, # User ORM instance - None, # ipfs_metadata - not used + None, # metadata - not used block_timestamp, # Used to update the user.updated_at field ) @@ -264,14 +264,14 @@ def test_index_users(bus_mock: mock.MagicMock, app): parse_user_event( None, # self - not used - update_task, # only need the ipfs client for get_metadata + update_task, session, None, # tx_receipt - not used block_number, # not used entry, # Contains the event args used for updating event_type, # String that should one of user_event_types_lookup user_record, # User ORM instance - None, # ipfs_metadata - not used + None, # metadata - not used block_timestamp, # Used to update the user.updated_at field ) @@ -286,14 +286,14 @@ def test_index_users(bus_mock: mock.MagicMock, app): parse_user_event( None, # self - not used - update_task, # only need the ipfs client for get_metadata + update_task, session, None, # tx_receipt - not used block_number, # not used entry, # Contains the event args used for updating event_type, # String that should one of user_event_types_lookup user_record, # User ORM instance - None, # ipfs_metadata - not used + None, # metadata - not used block_timestamp, # Used to update the user.updated_at field ) @@ -308,14 +308,14 @@ def test_index_users(bus_mock: mock.MagicMock, app): parse_user_event( None, # self - not used - update_task, # only need the ipfs client for get_metadata + update_task, session, None, # tx_receipt - not used block_number, # not used entry, # Contains the event args used for updating event_type, # String that should one of user_event_types_lookup user_record, # User ORM instance - None, # ipfs_metadata - not used + None, # metadata - not used block_timestamp, # Used to update the user.updated_at field ) @@ -334,14 +334,14 @@ def test_index_users(bus_mock: mock.MagicMock, app): parse_user_event( None, # self - not used - update_task, # only need the ipfs client for get_metadata + update_task, session, None, # tx_receipt - not used block_number, # not used entry, # Contains the event args used for updating event_type, # String that should one of user_event_types_lookup user_record, # User ORM instance - None, # ipfs_metadata - not used + None, # metadata - not used block_timestamp, # Used to update the user.updated_at field ) @@ -359,14 +359,14 @@ def test_index_users(bus_mock: mock.MagicMock, app): parse_user_event( None, # self - not used - update_task, # only need the ipfs client for get_metadata + update_task, session, None, # tx_receipt - not used block_number, # not used entry, # Contains the event args used for updating event_type, # String that should one of user_event_types_lookup user_record, # User ORM instance - None, # ipfs_metadata - not used + None, # metadata - not used block_timestamp, # Used to update the user.updated_at field ) @@ -382,14 +382,14 @@ def test_index_users(bus_mock: mock.MagicMock, app): parse_user_event( None, # self - not used - update_task, # only need the ipfs client for get_metadata + update_task, session, None, # tx_receipt - not used block_number, # not used entry, # Contains the event args used for updating event_type, # String that should one of user_event_types_lookup user_record, # User ORM instance - None, # ipfs_metadata - not used + None, # metadata - not used block_timestamp, # Used to update the user.updated_at field ) @@ -408,14 +408,14 @@ def test_index_users(bus_mock: mock.MagicMock, app): parse_user_event( None, # self - not used - update_task, # only need the ipfs client for get_metadata + update_task, session, None, # tx_receipt - not used block_number, # not used entry, # Contains the event args used for updating event_type, # String that should one of user_event_types_lookup user_record, # User ORM instance - None, # ipfs_metadata - not used + None, # metadata - not used block_timestamp, # Used to update the user.updated_at field ) @@ -430,7 +430,7 @@ def test_index_users(bus_mock: mock.MagicMock, app): parse_user_event( None, # self - not used - update_task, # only need the ipfs client for get_metadata + update_task, session, None, # tx_receipt - not used block_number, # not used @@ -441,31 +441,27 @@ def test_index_users(bus_mock: mock.MagicMock, app): helpers.multihash_digest_to_cid(entry.args._multihashDigest), user_metadata_format, "", - ), # ipfs_metadata + ), # metadata block_timestamp, # Used to update the user.updated_at field ) session.flush() entry_multihash = helpers.multihash_digest_to_cid(entry.args._multihashDigest) - ipfs_metadata = update_task.cid_metadata_client.get_metadata( - entry_multihash, "", "" - ) - - assert user_record.profile_picture == ipfs_metadata["profile_picture"] - assert user_record.cover_photo == ipfs_metadata["cover_photo"] - assert user_record.bio == ipfs_metadata["bio"] - assert user_record.name == ipfs_metadata["name"] - assert user_record.location == ipfs_metadata["location"] - assert ( - user_record.profile_picture_sizes == ipfs_metadata["profile_picture_sizes"] - ) - assert user_record.cover_photo_sizes == ipfs_metadata["cover_photo_sizes"] + metadata = update_task.cid_metadata_client.get_metadata(entry_multihash, "", "") + + assert user_record.profile_picture == metadata["profile_picture"] + assert user_record.cover_photo == metadata["cover_photo"] + assert user_record.bio == metadata["bio"] + assert user_record.name == metadata["name"] + assert user_record.location == metadata["location"] + assert user_record.profile_picture_sizes == metadata["profile_picture_sizes"] + assert user_record.cover_photo_sizes == metadata["cover_photo_sizes"] assert user_record.has_collectibles == True - assert user_record.playlist_library == ipfs_metadata["playlist_library"] + assert user_record.playlist_library == metadata["playlist_library"] assert user_record.is_deactivated == True - ipfs_associated_wallets = ipfs_metadata["associated_wallets"] + metadata_associated_wallets = metadata["associated_wallets"] associated_wallets = ( session.query(AssociatedWallet) .filter_by( @@ -477,10 +473,10 @@ def test_index_users(bus_mock: mock.MagicMock, app): .all() ) for associated_wallet in associated_wallets: - assert associated_wallet.wallet in ipfs_associated_wallets - assert len(associated_wallets) == len(ipfs_associated_wallets) + assert associated_wallet.wallet in metadata_associated_wallets + assert len(associated_wallets) == len(associated_wallets) - ipfs_associated_sol_wallets = ipfs_metadata["associated_sol_wallets"] + metadata_associated_sol_wallets = metadata["associated_sol_wallets"] associated_sol_wallets = ( session.query(AssociatedWallet) .filter_by( @@ -492,8 +488,8 @@ def test_index_users(bus_mock: mock.MagicMock, app): .all() ) for associated_wallet in associated_sol_wallets: - assert associated_wallet.wallet in ipfs_associated_sol_wallets - assert len(associated_sol_wallets) == len(ipfs_associated_sol_wallets) + assert associated_wallet.wallet in metadata_associated_sol_wallets + assert len(associated_sol_wallets) == len(associated_sol_wallets) user_events = ( session.query(UserEvent) @@ -634,7 +630,7 @@ def __init__(self): ], autospec=True, ) - test_ipfs_metadata: Dict[str, Any] = {} + test_metadata: Dict[str, Any] = {} with db.scoped_session() as session, bus_mock.use_scoped_dispatch_queue(): try: @@ -653,7 +649,7 @@ def __init__(self): test_block_number, test_block_timestamp, block_hash, - test_ipfs_metadata, + test_metadata, ) assert len(updated_user_ids_set) == 1 assert list(updated_user_ids_set)[0] == blessed_user_record.user_id diff --git a/discovery-provider/integration_tests/utils.py b/discovery-provider/integration_tests/utils.py index b662fee6311..0e9037e2097 100644 --- a/discovery-provider/integration_tests/utils.py +++ b/discovery-provider/integration_tests/utils.py @@ -223,6 +223,9 @@ def populate_mock_db(db, entities, block_offset=None): primary_id=user_meta.get("primary_id"), secondary_ids=user_meta.get("secondary_ids"), replica_set_update_signer=user_meta.get("replica_set_update_signer"), + creator_node_endpoint=user_meta.get( + "creator_node_endpoint", "https://cn.io" + ), ) user_bank = UserBankAccount( signature=f"0x{i}", diff --git a/discovery-provider/requirements.txt b/discovery-provider/requirements.txt index fa71db9a367..785f7fb316e 100644 --- a/discovery-provider/requirements.txt +++ b/discovery-provider/requirements.txt @@ -23,7 +23,6 @@ redis==3.2.0 pytest==6.2.5 SQLAlchemy-Utils==0.37.6 chance==0.110 -ipfshttpclient==0.8.0a2 pytest-cov==2.6.0 pytest-dotenv==0.5.2 elasticsearch==8.1.2 diff --git a/discovery-provider/scripts/test.sh b/discovery-provider/scripts/test.sh index 8bead13f920..f1c34631fcd 100755 --- a/discovery-provider/scripts/test.sh +++ b/discovery-provider/scripts/test.sh @@ -2,8 +2,6 @@ # Audius Discovery Provider / Test # Runs configured unit test scripts -# NOTE - the ipfs compose files have been moved from discprov to libs. -# Before running this test locally, bring up ipfs pod with libs/scripts/ipfs.sh source ./scripts/utilities.sh source .test.env @@ -52,7 +50,6 @@ docker-compose \ -f compose/docker-compose.db.yml \ -f compose/docker-compose.redis.yml \ -f compose/docker-compose.elasticsearch.yml \ - -f compose/docker-compose.ipfs.yml \ --env-file compose/.test.env \ stop @@ -60,17 +57,15 @@ docker-compose \ -f compose/docker-compose.db.yml \ -f compose/docker-compose.redis.yml \ -f compose/docker-compose.elasticsearch.yml \ - -f compose/docker-compose.ipfs.yml \ --env-file compose/.test.env \ rm -rf -# Bring up local dependencies - postgres, redis, ipfs +# Bring up local dependencies - postgres, redis docker network create audius_dev docker-compose \ -f compose/docker-compose.db.yml \ -f compose/docker-compose.redis.yml \ -f compose/docker-compose.elasticsearch.yml \ - -f compose/docker-compose.ipfs.yml \ --env-file compose/.test.env \ up -d diff --git a/discovery-provider/scripts/up.sh b/discovery-provider/scripts/up.sh index f473c2eb96b..86be81bde28 100755 --- a/discovery-provider/scripts/up.sh +++ b/discovery-provider/scripts/up.sh @@ -13,13 +13,11 @@ if [[ "$UP" == true || "$RESTART" == true ]]; then -f compose/docker-compose.db.yml \ -f compose/docker-compose.redis.yml \ -f compose/docker-compose.elasticsearch.yml \ - -f compose/docker-compose.backend.yml \ - -f compose/docker-compose.ipfs.yml" + -f compose/docker-compose.backend.yml" elif [[ "$UP_WEB_SERVER" == true ]]; then alias dc="docker-compose \ -f compose/docker-compose.redis.yml \ -f compose/docker-compose.elasticsearch.yml \ - -f compose/docker-compose.ipfs.yml \ -f compose/docker-compose.web-server.yml" else echo '$UP, $RESTART, or $UP_WEB_SERVER must be set to "true"' diff --git a/discovery-provider/src/api/v1/helpers.py b/discovery-provider/src/api/v1/helpers.py index 2bf3a5e9263..1e24be39d33 100644 --- a/discovery-provider/src/api/v1/helpers.py +++ b/discovery-provider/src/api/v1/helpers.py @@ -11,7 +11,6 @@ from src.queries.get_undisbursed_challenges import UndisbursedChallengeResponse from src.queries.query_helpers import SortDirection, SortMethod from src.queries.reactions import ReactionResponse -from src.utils.config import shared_config from src.utils.helpers import decode_string_id, encode_int_id from src.utils.spl_audio import to_wei_string @@ -19,13 +18,11 @@ def make_image(endpoint, cid, width="", height=""): - return f"{endpoint}/ipfs/{cid}/{width}x{height}.jpg" + return f"{endpoint}/content/{cid}/{width}x{height}.jpg" def get_primary_endpoint(user): raw_endpoint = user["creator_node_endpoint"] - if not raw_endpoint: - return shared_config["discprov"]["user_metadata_service_url"] return raw_endpoint.split(",")[0] diff --git a/discovery-provider/src/queries/index_block_stats.py b/discovery-provider/src/queries/index_block_stats.py index 1d4163d7fc5..dbb0b452096 100644 --- a/discovery-provider/src/queries/index_block_stats.py +++ b/discovery-provider/src/queries/index_block_stats.py @@ -5,7 +5,7 @@ from src.utils import redis_connection from src.utils.index_blocks_performance import ( get_add_indexed_block_to_db_ms_stats_since, - get_fetch_ipfs_metadata_ms_stats_since, + get_fetch_metadata_ms_stats_since, get_index_blocks_ms_stats_since, ) @@ -38,19 +38,17 @@ def index_block_stats(): ), "day": get_index_blocks_ms_stats_since(redis, DAY_IN_SECONDS), }, - "fetch_ipfs_metadata_ms": { - "minute": get_fetch_ipfs_metadata_ms_stats_since(redis, MINUTE_IN_SECONDS), - "ten_minutes": get_fetch_ipfs_metadata_ms_stats_since( + "fetch_metadata_ms": { + "minute": get_fetch_metadata_ms_stats_since(redis, MINUTE_IN_SECONDS), + "ten_minutes": get_fetch_metadata_ms_stats_since( redis, TEN_MINUTES_IN_SECONDS ), - "hour": get_fetch_ipfs_metadata_ms_stats_since(redis, HOUR_IN_SECONDS), - "six_hour": get_fetch_ipfs_metadata_ms_stats_since( - redis, SIX_HOURS_IN_SECONDS - ), - "twelve_hour": get_fetch_ipfs_metadata_ms_stats_since( + "hour": get_fetch_metadata_ms_stats_since(redis, HOUR_IN_SECONDS), + "six_hour": get_fetch_metadata_ms_stats_since(redis, SIX_HOURS_IN_SECONDS), + "twelve_hour": get_fetch_metadata_ms_stats_since( redis, TWELVE_HOURS_IN_SECONDS ), - "day": get_fetch_ipfs_metadata_ms_stats_since(redis, DAY_IN_SECONDS), + "day": get_fetch_metadata_ms_stats_since(redis, DAY_IN_SECONDS), }, "add_indexed_block_to_db_ms": { "minute": get_add_indexed_block_to_db_ms_stats_since( diff --git a/discovery-provider/src/solana/audius_data_transaction_handlers.py b/discovery-provider/src/solana/audius_data_transaction_handlers.py index ccbcba083d1..9713bec5833 100644 --- a/discovery-provider/src/solana/audius_data_transaction_handlers.py +++ b/discovery-provider/src/solana/audius_data_transaction_handlers.py @@ -502,7 +502,7 @@ def update_user_model_metadata( # update_task.challenge_event_bus, # ) - # reconstructed endpoints from sp IDs in tx not /ipfs response + # reconstructed endpoints from sp IDs in tx not response if "creator_node_endpoint" in metadata_dict: user_record.creator_node_endpoint = metadata_dict["creator_node_endpoint"] diff --git a/discovery-provider/src/tasks/entity_manager/entity_manager.py b/discovery-provider/src/tasks/entity_manager/entity_manager.py index 4fb90c254d7..1d52d924c45 100644 --- a/discovery-provider/src/tasks/entity_manager/entity_manager.py +++ b/discovery-provider/src/tasks/entity_manager/entity_manager.py @@ -54,7 +54,7 @@ def entity_manager_update( block_number: int, block_timestamp, block_hash: str, - ipfs_metadata: Dict, + metadata: Dict, ) -> Tuple[int, Dict[str, Set[(int)]]]: try: challenge_bus: ChallengeEventBus = update_task.challenge_event_bus @@ -107,7 +107,7 @@ def entity_manager_update( new_records, # actions below populate these records existing_records, pending_track_routes, - ipfs_metadata, + metadata, block_timestamp, block_number, event_blockhash, diff --git a/discovery-provider/src/tasks/entity_manager/playlist.py b/discovery-provider/src/tasks/entity_manager/playlist.py index 898b35a79e5..1eca6d9b6b1 100644 --- a/discovery-provider/src/tasks/entity_manager/playlist.py +++ b/discovery-provider/src/tasks/entity_manager/playlist.py @@ -48,7 +48,7 @@ def create_playlist(params: ManageEntityParameters): validate_playlist_tx(params) playlist_id = params.entity_id - metadata = params.ipfs_metadata[params.metadata_cid] + metadata = params.metadata[params.metadata_cid] tracks = metadata["playlist_contents"].get("track_ids", []) tracks_with_index_time = [] last_added_to = None @@ -103,7 +103,7 @@ def update_playlist(params: ManageEntityParameters): # TODO ignore updates on deleted playlists? playlist_id = params.entity_id - metadata = params.ipfs_metadata[params.metadata_cid] + metadata = params.metadata[params.metadata_cid] existing_playlist = params.existing_records[EntityType.PLAYLIST][playlist_id] if ( playlist_id in params.new_records[EntityType.PLAYLIST] diff --git a/discovery-provider/src/tasks/entity_manager/track.py b/discovery-provider/src/tasks/entity_manager/track.py index e33f1355245..85bb5a6a05c 100644 --- a/discovery-provider/src/tasks/entity_manager/track.py +++ b/discovery-provider/src/tasks/entity_manager/track.py @@ -126,7 +126,7 @@ def create_track(params: ManageEntityParameters): track_id = params.entity_id owner_id = params.user_id - track_metadata = params.ipfs_metadata[params.metadata_cid] + track_metadata = params.metadata[params.metadata_cid] track_record = Track( track_id=track_id, @@ -159,7 +159,7 @@ def update_track(params: ManageEntityParameters): return # TODO ignore updates on deleted playlists? - track_metadata = params.ipfs_metadata[params.metadata_cid] + track_metadata = params.metadata[params.metadata_cid] track_id = params.entity_id existing_track = params.existing_records[EntityType.TRACK][track_id] existing_track.is_current = False # invalidate diff --git a/discovery-provider/src/tasks/entity_manager/utils.py b/discovery-provider/src/tasks/entity_manager/utils.py index c43f24a9b3a..e98e1bfd0d4 100644 --- a/discovery-provider/src/tasks/entity_manager/utils.py +++ b/discovery-provider/src/tasks/entity_manager/utils.py @@ -80,7 +80,7 @@ def __init__( new_records: RecordDict, existing_records: ExistingRecordDict, pending_track_routes: List[TrackRoute], - ipfs_metadata: Dict[str, Dict[str, Dict]], + metadata: Dict[str, Dict[str, Dict]], block_timestamp: int, block_number: int, event_blockhash: str, @@ -100,7 +100,7 @@ def __init__( self.pending_track_routes = pending_track_routes self.event = event - self.ipfs_metadata = ipfs_metadata + self.metadata = metadata self.block_number = block_number self.event_blockhash = event_blockhash self.txhash = txhash diff --git a/discovery-provider/src/tasks/index.py b/discovery-provider/src/tasks/index.py index 4ab024074ee..7c586758310 100644 --- a/discovery-provider/src/tasks/index.py +++ b/discovery-provider/src/tasks/index.py @@ -44,10 +44,10 @@ from src.utils.constants import CONTRACT_NAMES_ON_CHAIN, CONTRACT_TYPES from src.utils.index_blocks_performance import ( record_add_indexed_block_to_db_ms, - record_fetch_ipfs_metadata_ms, + record_fetch_metadata_ms, record_index_blocks_ms, sweep_old_add_indexed_block_to_db_ms, - sweep_old_fetch_ipfs_metadata_ms, + sweep_old_fetch_metadata_ms, sweep_old_index_blocks_ms, ) from src.utils.indexing_errors import IndexingError @@ -676,7 +676,7 @@ def index_blocks(self, db, blocks_list): """ Fetch JSON metadata """ - fetch_ipfs_metadata_start_time = time.time() + fetch_metadata_start_time = time.time() # pre-fetch cids asynchronously to not have it block in user_state_update # and track_state_update cid_metadata = fetch_cid_metadata( @@ -686,19 +686,19 @@ def index_blocks(self, db, blocks_list): txs_grouped_by_type[ENTITY_MANAGER], ) logger.info( - f"index.py | index_blocks - fetch_ipfs_metadata in {time.time() - fetch_ipfs_metadata_start_time}s" + f"index.py | index_blocks - fetch_metadata in {time.time() - fetch_metadata_start_time}s" ) # Record the time this took in redis duration_ms = round( - (time.time() - fetch_ipfs_metadata_start_time) * 1000 + (time.time() - fetch_metadata_start_time) * 1000 ) - record_fetch_ipfs_metadata_ms(redis, duration_ms) + record_fetch_metadata_ms(redis, duration_ms) metric.save_time( - {"scope": "fetch_ipfs_metadata"}, - start_time=fetch_ipfs_metadata_start_time, + {"scope": "fetch_metadata"}, + start_time=fetch_metadata_start_time, ) logger.info( - f"index.py | index_blocks - fetch_ipfs_metadata in {duration_ms}ms" + f"index.py | index_blocks - fetch_metadata in {duration_ms}ms" ) """ @@ -803,7 +803,7 @@ def index_blocks(self, db, blocks_list): # Sweep records older than 30 days every day if block_number % BLOCKS_PER_DAY == 0: sweep_old_index_blocks_ms(redis, 30) - sweep_old_fetch_ipfs_metadata_ms(redis, 30) + sweep_old_fetch_metadata_ms(redis, 30) sweep_old_add_indexed_block_to_db_ms(redis, 30) if num_blocks > 0: diff --git a/discovery-provider/src/tasks/index_network_peers.py b/discovery-provider/src/tasks/index_network_peers.py index 31d34700a85..4986c3481d2 100644 --- a/discovery-provider/src/tasks/index_network_peers.py +++ b/discovery-provider/src/tasks/index_network_peers.py @@ -12,9 +12,6 @@ # The logic here is to ensure a robust connection from an active indexer # to all active entities in the network. # This is to ensure minimal retrieval time within the actual indexing flow itself -# NOTE - The terminology of "peer" in this file overlaps with ipfs swarm peers -# Even though we 'swarm connect' to an ipfs node embedded within our protocol the -# concept is very much distinct. # Query the L1 set of audius protocol contracts and retrieve a list of peer endpoints @@ -53,12 +50,6 @@ def update_network_peers(self): # Combine the set of known peers from ethereum and within local database all_peers = peers_from_ethereum - # Legacy user metadata node is always added to set of known peers - user_metadata_url = update_network_peers.shared_config["discprov"][ - "user_metadata_service_url" - ] - all_peers.add(user_metadata_url) - logger.info(f"index_network_peers.py | All known peers {all_peers}") peers_list = list(all_peers) # Update creator node url list in CID Metadata Client diff --git a/discovery-provider/src/tasks/metadata.py b/discovery-provider/src/tasks/metadata.py index 1ce769cc934..189dbbdea9b 100644 --- a/discovery-provider/src/tasks/metadata.py +++ b/discovery-provider/src/tasks/metadata.py @@ -1,6 +1,6 @@ from typing import Any, Dict -# Required format for track metadata retrieved from IPFS +# Required format for track metadata retrieved from the content system track_metadata_format: Dict[str, Any] = { "owner_id": None, @@ -30,7 +30,7 @@ "premium_conditions": None, } -# Required format for user metadata retrieved from IPFS +# Required format for user metadata retrieved from the content system user_metadata_format = { "profile_picture": None, "profile_picture_sizes": None, diff --git a/discovery-provider/src/tasks/playlists.py b/discovery-provider/src/tasks/playlists.py index 10b7d4fa7fc..a986219c529 100644 --- a/discovery-provider/src/tasks/playlists.py +++ b/discovery-provider/src/tasks/playlists.py @@ -27,7 +27,7 @@ def playlist_state_update( block_number, block_timestamp, block_hash, - _ipfs_metadata, # prefix unused args with underscore to prevent pylint + _metadata, # prefix unused args with underscore to prevent pylint ) -> Tuple[int, Set]: """Return Tuple containing int representing number of Playlist model state changes found in transaction and set of processed playlist IDs.""" blockhash = update_task.web3.toHex(block_hash) @@ -323,7 +323,7 @@ def parse_playlist_event( event_args._playlistImageMultihashDigest ) - # All incoming playlist images are set to ipfs dir in column playlist_image_sizes_multihash + # All incoming playlist images are set to the images dir in column playlist_image_sizes_multihash if playlist_record.playlist_image_multihash: logger.info( f"index.py | playlists.py | Processing playlist image \ diff --git a/discovery-provider/src/tasks/social_features.py b/discovery-provider/src/tasks/social_features.py index e6fb977ae50..12551792926 100644 --- a/discovery-provider/src/tasks/social_features.py +++ b/discovery-provider/src/tasks/social_features.py @@ -25,7 +25,7 @@ def social_feature_state_update( block_number, block_timestamp, block_hash, - _ipfs_metadata, # prefix unused args with underscore to prevent pylint + _metadata, # prefix unused args with underscore to prevent pylint ) -> Tuple[int, Set]: """Return Tuple containing int representing number of social feature related state changes in this transaction and empty Set (to align with other _state_update function signatures)""" empty_set: Set[int] = set() diff --git a/discovery-provider/src/tasks/tracks.py b/discovery-provider/src/tasks/tracks.py index 82ec4880e43..2419cdd91b2 100644 --- a/discovery-provider/src/tasks/tracks.py +++ b/discovery-provider/src/tasks/tracks.py @@ -34,7 +34,7 @@ def track_state_update( block_number, block_timestamp, block_hash, - ipfs_metadata, + metadata, ) -> Tuple[int, Set]: """Return tuple containing int representing number of Track model state changes found in transaction and set of processed track IDs.""" begin_track_state_update = datetime.now() @@ -91,7 +91,7 @@ def track_state_update( bytes.fromhex(track_metadata_digest), track_metadata_hash_fn ) cid = multihash.to_b58_string(buf) - track_metadata = ipfs_metadata[cid] + track_metadata = metadata[cid] parsed_track = parse_track_event( self, diff --git a/discovery-provider/src/tasks/user_library.py b/discovery-provider/src/tasks/user_library.py index c5ead69f258..2f01c767ad6 100644 --- a/discovery-provider/src/tasks/user_library.py +++ b/discovery-provider/src/tasks/user_library.py @@ -24,7 +24,7 @@ def user_library_state_update( block_number, block_timestamp, block_hash, - _ipfs_metadata, # prefix unused args with underscore to prevent pylint + _metadata, # prefix unused args with underscore to prevent pylint ) -> Tuple[int, Set]: """Return Tuple containing int representing number of User Library model state changes found in transaction and empty Set (to align with fn signature of other _state_update functions.""" empty_set: Set[int] = set() diff --git a/discovery-provider/src/tasks/user_replica_set.py b/discovery-provider/src/tasks/user_replica_set.py index 2f70ac580e5..3e5113dd55f 100644 --- a/discovery-provider/src/tasks/user_replica_set.py +++ b/discovery-provider/src/tasks/user_replica_set.py @@ -33,7 +33,7 @@ def user_replica_set_state_update( block_number, block_timestamp, block_hash, - _ipfs_metadata, # prefix unused args with underscore to prevent pylint + _metadata, # prefix unused args with underscore to prevent pylint ) -> Tuple[int, Set]: """Return Tuple containing int representing number of User model state changes found in transaction and set of user_id values""" diff --git a/discovery-provider/src/tasks/users.py b/discovery-provider/src/tasks/users.py index 64bf119e9d4..fc88dbe628f 100644 --- a/discovery-provider/src/tasks/users.py +++ b/discovery-provider/src/tasks/users.py @@ -34,7 +34,7 @@ def user_state_update( block_number, block_timestamp, block_hash, - ipfs_metadata, + metadata, ) -> Tuple[int, Set]: """Return tuple containing int representing number of User model state changes found in transaction and set of processed user IDs.""" begin_user_state_update = datetime.now() @@ -95,7 +95,7 @@ def user_state_update( block_number, block_timestamp, blockhash, - ipfs_metadata, + metadata, user_ids, skipped_tx_count, ) @@ -137,7 +137,7 @@ def process_user_txs_serial( block_number, block_timestamp, blockhash, - ipfs_metadata, + metadata, user_ids, skipped_tx_count, ): @@ -178,7 +178,7 @@ def process_user_txs_serial( entry, event_type, existing_user_record, - ipfs_metadata[metadata_multihash], + metadata[metadata_multihash], block_timestamp, ) else: @@ -277,7 +277,7 @@ def parse_user_event( entry, event_type, user_record, - ipfs_metadata, + metadata, block_timestamp, ): # type specific field changes @@ -336,79 +336,73 @@ def parse_user_event( # If the multihash is updated, fetch the metadata (if not fetched) and update the associated wallets column if event_type == user_event_types_lookup["update_multihash"]: - # Look up metadata multihash in IPFS and override with metadata fields - if ipfs_metadata: + # Look up metadata multihash and override with metadata fields + if metadata: # Fields also stored on chain - if "profile_picture" in ipfs_metadata and ipfs_metadata["profile_picture"]: - user_record.profile_picture = ipfs_metadata["profile_picture"] + if "profile_picture" in metadata and metadata["profile_picture"]: + user_record.profile_picture = metadata["profile_picture"] - if "cover_photo" in ipfs_metadata and ipfs_metadata["cover_photo"]: - user_record.cover_photo = ipfs_metadata["cover_photo"] + if "cover_photo" in metadata and metadata["cover_photo"]: + user_record.cover_photo = metadata["cover_photo"] - if "bio" in ipfs_metadata and ipfs_metadata["bio"]: - user_record.bio = ipfs_metadata["bio"] + if "bio" in metadata and metadata["bio"]: + user_record.bio = metadata["bio"] - if "name" in ipfs_metadata and ipfs_metadata["name"]: - user_record.name = ipfs_metadata["name"] + if "name" in metadata and metadata["name"]: + user_record.name = metadata["name"] - if "location" in ipfs_metadata and ipfs_metadata["location"]: - user_record.location = ipfs_metadata["location"] + if "location" in metadata and metadata["location"]: + user_record.location = metadata["location"] # Fields with no on-chain counterpart if ( - "profile_picture_sizes" in ipfs_metadata - and ipfs_metadata["profile_picture_sizes"] + "profile_picture_sizes" in metadata + and metadata["profile_picture_sizes"] ): - user_record.profile_picture = ipfs_metadata["profile_picture_sizes"] + user_record.profile_picture = metadata["profile_picture_sizes"] - if ( - "cover_photo_sizes" in ipfs_metadata - and ipfs_metadata["cover_photo_sizes"] - ): - user_record.cover_photo = ipfs_metadata["cover_photo_sizes"] + if "cover_photo_sizes" in metadata and metadata["cover_photo_sizes"]: + user_record.cover_photo = metadata["cover_photo_sizes"] if ( - "collectibles" in ipfs_metadata - and ipfs_metadata["collectibles"] - and isinstance(ipfs_metadata["collectibles"], dict) - and ipfs_metadata["collectibles"].items() + "collectibles" in metadata + and metadata["collectibles"] + and isinstance(metadata["collectibles"], dict) + and metadata["collectibles"].items() ): user_record.has_collectibles = True else: user_record.has_collectibles = False - if "associated_wallets" in ipfs_metadata: + if "associated_wallets" in metadata: update_user_associated_wallets( session, update_task, user_record, - ipfs_metadata["associated_wallets"], + metadata["associated_wallets"], "eth", ) - if "associated_sol_wallets" in ipfs_metadata: + if "associated_sol_wallets" in metadata: update_user_associated_wallets( session, update_task, user_record, - ipfs_metadata["associated_sol_wallets"], + metadata["associated_sol_wallets"], "sol", ) - if ( - "playlist_library" in ipfs_metadata - and ipfs_metadata["playlist_library"] - ): - user_record.playlist_library = ipfs_metadata["playlist_library"] + if "playlist_library" in metadata and metadata["playlist_library"]: + user_record.playlist_library = metadata["playlist_library"] - if "is_deactivated" in ipfs_metadata: - user_record.is_deactivated = ipfs_metadata["is_deactivated"] + if "is_deactivated" in metadata: + user_record.is_deactivated = metadata["is_deactivated"] - if "events" in ipfs_metadata and ipfs_metadata["events"]: + if "events" in metadata and metadata["events"]: update_user_events( session, user_record, - ipfs_metadata["events"], + metadata["events"], update_task.challenge_event_bus, ) diff --git a/discovery-provider/src/utils/cid_metadata_client.py b/discovery-provider/src/utils/cid_metadata_client.py index 814ffe5dfd7..baab16d9490 100644 --- a/discovery-provider/src/utils/cid_metadata_client.py +++ b/discovery-provider/src/utils/cid_metadata_client.py @@ -64,7 +64,7 @@ def _get_metadata_from_json(self, default_metadata_fields, resp_json): return metadata async def _get_metadata_async(self, async_session, multihash, gateway_endpoint): - url = gateway_endpoint + "/ipfs/" + multihash + url = gateway_endpoint + "/content/" + multihash # Skip URL if invalid try: validate_url = urlparse(url) diff --git a/discovery-provider/src/utils/helpers.py b/discovery-provider/src/utils/helpers.py index 5b468c71f71..556a3b35556 100644 --- a/discovery-provider/src/utils/helpers.py +++ b/discovery-provider/src/utils/helpers.py @@ -357,24 +357,6 @@ def get_discovery_provider_version(): return data -def get_valid_multiaddr_from_id_json(id_json): - logger = logging.getLogger(__name__) - # js-ipfs api returns lower case keys - if "addresses" in id_json and isinstance(id_json["addresses"], list): - for multiaddr in id_json["addresses"]: - if ("127.0.0.1" not in multiaddr) and ("ip6" not in multiaddr): - logger.warning(f"returning {multiaddr}") - return multiaddr - - # py-ipfs api returns uppercase keys - if "Addresses" in id_json and isinstance(id_json["Addresses"], list): - for multiaddr in id_json["Addresses"]: - if ("127.0.0.1" not in multiaddr) and ("ip6" not in multiaddr): - logger.warning(f"returning {multiaddr}") - return multiaddr - return None - - HASH_MIN_LENGTH = 5 HASH_SALT = "azowernasdfoia" diff --git a/discovery-provider/src/utils/index_blocks_performance.py b/discovery-provider/src/utils/index_blocks_performance.py index 67a13361cfc..21bb56e0763 100644 --- a/discovery-provider/src/utils/index_blocks_performance.py +++ b/discovery-provider/src/utils/index_blocks_performance.py @@ -2,7 +2,7 @@ from datetime import datetime, timedelta INDEX_BLOCKS_SECONDS_REDIS_KEY = "index_blocks:ms" -FETCH_IPFS_METADATA_SECONDS_REDIS_KEY = "fetch_ipfs_metadata:ms" +FETCH_METADATA_SECONDS_REDIS_KEY = "fetch_metadata:ms" ADD_INDEXED_BLOCK_TO_DB_SECONDS_REDIS_KEY = "add_indexed_block_to_db:ms" @@ -33,14 +33,14 @@ def record_index_blocks_ms(redis, index_blocks_duration_ms): ) -def record_fetch_ipfs_metadata_ms(redis, fetch_ipfs_metadata_duration_ms): - """Records that fetch_ipfs_metadata took some number of ms""" +def record_fetch_metadata_ms(redis, fetch_metadata_duration_ms): + """Records that fetch_metadata took some number of ms""" now = round(datetime.now().timestamp()) # Key as ms:date, value as date so that we can sort by range (on values) # Zset lets you only query by ranges of the value. redis.zadd( - FETCH_IPFS_METADATA_SECONDS_REDIS_KEY, - {f"{fetch_ipfs_metadata_duration_ms}:{now}": now}, + FETCH_METADATA_SECONDS_REDIS_KEY, + {f"{fetch_metadata_duration_ms}:{now}": now}, ) @@ -68,10 +68,10 @@ def get_index_blocks_ms_stats_since(redis, seconds_ago): } -def get_fetch_ipfs_metadata_ms_stats_since(redis, seconds_ago): - """From seconds ago until now, get the average ms fetch_ipfs_metadata took""" +def get_fetch_metadata_ms_stats_since(redis, seconds_ago): + """From seconds ago until now, get the average ms fetch_metadata took""" ago = round((datetime.now() - timedelta(seconds=seconds_ago)).timestamp()) - res = redis.zrangebyscore(FETCH_IPFS_METADATA_SECONDS_REDIS_KEY, ago, "+inf") + res = redis.zrangebyscore(FETCH_METADATA_SECONDS_REDIS_KEY, ago, "+inf") ms_per_block = list(map(lambda x: int(x.decode("utf-8").split(":")[0]), res)) return { "mean": mean(ms_per_block), @@ -94,10 +94,10 @@ def get_add_indexed_block_to_db_ms_stats_since(redis, seconds_ago): } -def get_average_fetch_ipfs_metadata_ms_since(redis, seconds_ago): - """From seconds ago until now, get the average ms fetch_ipfs_metadata took""" +def get_average_fetch_metadata_ms_since(redis, seconds_ago): + """From seconds ago until now, get the average ms fetch_metadata took""" ago = round((datetime.now() - timedelta(seconds=seconds_ago)).timestamp()) - res = redis.zrangebyscore(FETCH_IPFS_METADATA_SECONDS_REDIS_KEY, ago, "+inf") + res = redis.zrangebyscore(FETCH_METADATA_SECONDS_REDIS_KEY, ago, "+inf") ms_per_block = list(map(lambda x: int(x.decode("utf-8").split(":")[0]), res)) if len(ms_per_block) > 0: return sum(ms_per_block) / len(ms_per_block) @@ -120,10 +120,10 @@ def sweep_old_index_blocks_ms(redis, expire_after_days): redis.zremrangebyscore(INDEX_BLOCKS_SECONDS_REDIS_KEY, 0, timestamp) -def sweep_old_fetch_ipfs_metadata_ms(redis, expire_after_days): - """Sweep old records for fetch ipfs metadata ms after `expire_after_days`""" +def sweep_old_fetch_metadata_ms(redis, expire_after_days): + """Sweep old records for fetch metadata ms after `expire_after_days`""" timestamp = round((datetime.now() - timedelta(days=expire_after_days)).timestamp()) - redis.zremrangebyscore(FETCH_IPFS_METADATA_SECONDS_REDIS_KEY, 0, timestamp) + redis.zremrangebyscore(FETCH_METADATA_SECONDS_REDIS_KEY, 0, timestamp) def sweep_old_add_indexed_block_to_db_ms(redis, expire_after_days):