-
-
Notifications
You must be signed in to change notification settings - Fork 495
/
handler.py
2120 lines (1907 loc) · 94.9 KB
/
handler.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
import logging
import os
import shutil
import sqlite3
from collections import defaultdict
from pathlib import Path
from typing import TYPE_CHECKING, Any, Literal, Optional, cast, overload
from gevent.lock import Semaphore
from rotkehlchen.assets.asset import (
Asset,
AssetWithNameAndType,
AssetWithOracles,
CryptoAsset,
CustomAsset,
EvmToken,
Nft,
UnderlyingToken,
)
from rotkehlchen.assets.types import AssetData, AssetType
from rotkehlchen.chain.evm.types import string_to_evm_address
from rotkehlchen.constants.assets import A_ETH, A_ETH2
from rotkehlchen.constants.misc import (
DEFAULT_SQL_VM_INSTRUCTIONS_CB,
GLOBALDB_NAME,
GLOBALDIR_NAME,
NFT_DIRECTIVE,
)
from rotkehlchen.db.drivers.gevent import DBConnection, DBConnectionType, DBCursor
from rotkehlchen.errors.asset import UnknownAsset, UnsupportedAsset, WrongAssetType
from rotkehlchen.errors.misc import DBUpgradeError, InputError
from rotkehlchen.errors.serialization import DeserializationError
from rotkehlchen.history.deserialization import deserialize_price
from rotkehlchen.history.types import HistoricalPrice, HistoricalPriceOracle
from rotkehlchen.logging import RotkehlchenLogsAdapter
from rotkehlchen.types import (
SPAM_PROTOCOL,
ChainID,
ChecksumEvmAddress,
EvmTokenKind,
Location,
LocationAssetMappingDeleteEntry,
LocationAssetMappingUpdateEntry,
Price,
Timestamp,
)
from rotkehlchen.utils.misc import timestamp_to_date, ts_now
from rotkehlchen.utils.serialization import (
deserialize_asset_with_oracles_from_db,
deserialize_generic_asset_from_db,
)
from .migrations.manager import LAST_DATA_MIGRATION, maybe_apply_globaldb_migrations
from .schema import DB_SCRIPT_CREATE_TABLES
from .upgrades.manager import maybe_upgrade_globaldb
from .utils import GLOBAL_DB_VERSION, globaldb_get_setting_value
if TYPE_CHECKING:
from rotkehlchen.db.dbhandler import DBHandler
from rotkehlchen.db.filtering import AssetsFilterQuery, LocationAssetMappingsFilterQuery
logger = logging.getLogger(__name__)
log = RotkehlchenLogsAdapter(logger)
_ALL_ASSETS_TABLES_JOINS = """
FROM {dbprefix}assets LEFT JOIN {dbprefix}common_asset_details on {dbprefix}assets.identifier={dbprefix}common_asset_details.identifier
LEFT JOIN {dbprefix}evm_tokens ON evm_tokens.identifier=assets.identifier
LEFT JOIN {dbprefix}custom_assets ON custom_assets.identifier=assets.identifier
""" # noqa: E501
ALL_ASSETS_TABLES_QUERY = """
SELECT {dbprefix}assets.identifier, name, symbol, chain, assets.type, custom_assets.type """ + _ALL_ASSETS_TABLES_JOINS # noqa: E501
ALL_ASSETS_TABLES_QUERY_WITH_COLLECTIONS = (
'SELECT {dbprefix}assets.identifier, assets.name, common_asset_details.symbol, chain, assets.type, custom_assets.type, collection_id, asset_collections.name, asset_collections.symbol, protocol' + # noqa: E501
_ALL_ASSETS_TABLES_JOINS +
'LEFT JOIN {dbprefix}multiasset_mappings ON {dbprefix}assets.identifier={dbprefix}multiasset_mappings.asset LEFT JOIN {dbprefix}asset_collections ON {dbprefix}multiasset_mappings.collection_id={dbprefix}asset_collections.id' # noqa: E501
)
def _initialize_and_check_unfinished_upgrades(
global_dir: Path,
db_filename: str,
sql_vm_instructions_cb: int,
) -> tuple[DBConnection, bool]:
"""
Checks the database whether there are any not finished upgrades and automatically uses a
backup if there are any. If no backup found, throws an error.
Returns the DB connection and true if a DB backup was used and False otherwise
"""
connection = DBConnection(
path=global_dir / db_filename,
connection_type=DBConnectionType.GLOBAL,
sql_vm_instructions_cb=sql_vm_instructions_cb,
)
try:
with connection.read_ctx() as cursor:
ongoing_upgrade_from_version = globaldb_get_setting_value(
cursor=cursor,
name='ongoing_upgrade_from_version',
default_value=-1,
)
except sqlite3.OperationalError: # pylint: disable=no-member
ongoing_upgrade_from_version = -1 # Fresh DB
if ongoing_upgrade_from_version == -1:
return connection, False # We are all good
# Otherwise replace the db with a backup and relogin
connection.close()
backup_postfix = f'global_db_v{ongoing_upgrade_from_version}.backup'
found_backups = list(filter(
lambda x: x[-len(backup_postfix):] == backup_postfix,
os.listdir(global_dir),
))
if len(found_backups) == 0:
raise DBUpgradeError(
'Your global database is in a half-upgraded state and there was no backup '
'found. Please open an issue on our github or contact us in our discord server.',
)
backup_to_use = sorted(found_backups)[-1] # Use latest backup
shutil.copyfile(
global_dir / backup_to_use,
global_dir / db_filename,
)
connection = DBConnection(
path=global_dir / db_filename,
connection_type=DBConnectionType.GLOBAL,
sql_vm_instructions_cb=sql_vm_instructions_cb,
)
return connection, True
def initialize_globaldb(
global_dir: Path,
db_filename: str,
sql_vm_instructions_cb: int,
) -> tuple[DBConnection, bool]:
"""Initialize globaldb.
- global_dir: The directory in which to find the global.db to initialize
- db_filename: The filename of the DB. Almost always: global.db.
- sql_vm_instructions_cb is a connection setting. Check DBConnection for details.
May raise DBSchemaError if GlobalDB's schema is malformed.
"""
connection, used_backup = _initialize_and_check_unfinished_upgrades(
global_dir=global_dir,
db_filename=db_filename,
sql_vm_instructions_cb=sql_vm_instructions_cb,
)
is_fresh_db = maybe_upgrade_globaldb(
connection=connection,
global_dir=global_dir,
db_filename=db_filename,
)
connection.executescript('PRAGMA foreign_keys=on;')
# switch to WAL mode: https://www.sqlite.org/wal.html
connection.execute('PRAGMA journal_mode=WAL;')
if is_fresh_db is True:
connection.executescript(DB_SCRIPT_CREATE_TABLES)
with connection.write_ctx() as cursor:
cursor.executemany(
'INSERT OR REPLACE INTO settings(name, value) VALUES(?, ?)',
[('version', str(GLOBAL_DB_VERSION)), ('last_data_migration', str(LAST_DATA_MIGRATION))], # noqa: E501
)
else:
maybe_apply_globaldb_migrations(connection)
connection.schema_sanity_check()
return connection, used_backup
def _initialize_global_db_directory(
data_dir: Path,
sql_vm_instructions_cb: int,
) -> tuple[DBConnection, bool]:
"""Initialize globaldb directory. May raise DBSchemaError if GlobalDB's schema is malformed.
Returns the DB connection and True if a DB backup was used and False otherwise
"""
global_dir = data_dir / GLOBALDIR_NAME
global_dir.mkdir(parents=True, exist_ok=True)
dbname = global_dir / GLOBALDB_NAME
if not dbname.is_file():
# if no global db exists, copy the built-in file
root_dir = Path(__file__).resolve().parent.parent
builtin_data_dir = root_dir / 'data'
shutil.copyfile(builtin_data_dir / GLOBALDB_NAME, global_dir / GLOBALDB_NAME)
return initialize_globaldb(
global_dir=global_dir,
db_filename=GLOBALDB_NAME,
sql_vm_instructions_cb=sql_vm_instructions_cb,
)
class GlobalDBHandler:
"""A singleton class controlling the global DB"""
__instance: Optional['GlobalDBHandler'] = None
_data_directory: Path | None = None
_packaged_db_conn: DBConnection | None = None
conn: DBConnection
used_backup: bool # specifies if the global DB was restored from a backup
packaged_db_lock: Semaphore
def __new__(
cls,
data_dir: Path | None = None,
sql_vm_instructions_cb: int | None = None,
) -> 'GlobalDBHandler':
"""
Initializes the GlobalDB.
If the data dir is given it uses the already existing global DB in that directory,
of if there is none copies the built-in one there.
May raise:
- DBSchemaError if GlobalDB's schema is malformed
"""
if GlobalDBHandler.__instance is not None:
return GlobalDBHandler.__instance
assert data_dir is not None, 'First instantiation of GlobalDBHandler should have a data_dir' # noqa: E501
assert sql_vm_instructions_cb is not None, 'First instantiation of GlobalDBHandler should have a sql_vm_instructions_cb' # noqa: E501
GlobalDBHandler.__instance = object.__new__(cls)
GlobalDBHandler.__instance._data_directory = data_dir
GlobalDBHandler.__instance.conn, GlobalDBHandler.__instance.used_backup = _initialize_global_db_directory(data_dir, sql_vm_instructions_cb) # noqa: E501
GlobalDBHandler.__instance.packaged_db_lock = Semaphore()
return GlobalDBHandler.__instance
def filepath(self) -> Path:
"""This should only be called after initalization of the global DB"""
return self._data_directory / GLOBALDIR_NAME / GLOBALDB_NAME # type: ignore [operator]
def cleanup(self) -> None:
self.conn.close()
if self._packaged_db_conn is not None:
self._packaged_db_conn.close()
def _wal_checkpoint(self) -> None:
"""
Commit wal file. If database is locked we ignore it. This is needed when attaching
databases and we want to have up to date information. It can happen that if two tasks
are spawned in a short time and both of them try to commit the WAL one of them finds
the database locked. This doesn't prevent any query later from executing.
https://www.sqlite.org/pragma.html#pragma_wal_checkpoint
"""
try:
self.conn.execute('PRAGMA wal_checkpoint(PASSIVE);')
except sqlite3.OperationalError as e:
if 'database table is locked' in (err := str(e)):
log.warning(f'Could not commit globaldb wal file. Ignoring. {err}')
else:
raise
@staticmethod
def packaged_db_conn() -> DBConnection:
"""Return a DBConnection instance for the packaged global db."""
if GlobalDBHandler()._packaged_db_conn is not None:
# mypy does not recognize the initialization as that of a singleton
return GlobalDBHandler()._packaged_db_conn # type: ignore
packaged_db_path = Path(__file__).resolve().parent.parent / 'data' / GLOBALDB_NAME
packaged_db_conn = DBConnection(
path=packaged_db_path,
connection_type=DBConnectionType.GLOBAL,
sql_vm_instructions_cb=DEFAULT_SQL_VM_INSTRUCTIONS_CB,
)
GlobalDBHandler()._packaged_db_conn = packaged_db_conn
return packaged_db_conn
@staticmethod
def get_schema_version() -> int:
"""Get the version of the DB Schema"""
with GlobalDBHandler().conn.read_ctx() as cursor:
return globaldb_get_setting_value(cursor, 'version', GLOBAL_DB_VERSION)
@staticmethod
def get_setting_value(name: str, default_value: int) -> int:
"""Get the value of a setting or default. Typing is always int for now"""
with GlobalDBHandler().conn.read_ctx() as cursor:
return globaldb_get_setting_value(cursor, name, default_value)
@staticmethod
def add_setting_value(name: str, value: Any) -> None:
"""Add the value of a setting"""
with GlobalDBHandler().conn.write_ctx() as write_cursor:
write_cursor.execute(
'INSERT OR REPLACE INTO settings(name, value) VALUES(?, ?)',
(name, str(value)),
)
@staticmethod
def add_asset(asset: AssetWithNameAndType) -> None:
"""
Add an asset in the DB.
May raise InputError in case of error, meaning asset exists or some constraint hit"""
try:
with GlobalDBHandler().conn.write_ctx() as write_cursor:
write_cursor.execute(
'INSERT INTO assets(identifier, name, type) '
'VALUES(?, ?, ?);',
(
asset.identifier,
asset.name,
asset.asset_type.serialize_for_db(),
),
)
if asset.asset_type == AssetType.CUSTOM_ASSET:
write_cursor.execute(
'INSERT INTO custom_assets(identifier, type, notes) VALUES(?, ?, ?)',
cast(CustomAsset, asset).serialize_for_db(),
)
return
# since the asset is not a custom asset it can only be an asset with oracles
asset = cast(AssetWithOracles, asset)
forked, started, swapped_for = None, None, None
if asset.is_crypto():
if asset.is_evm_token():
asset = cast(EvmToken, asset)
GlobalDBHandler.add_evm_token_data(write_cursor, asset)
else:
asset = cast(CryptoAsset, asset)
forked = asset.forked.identifier if asset.forked else None
started = asset.started
swapped_for = asset.swapped_for.identifier if asset.swapped_for else None
write_cursor.execute(
'INSERT INTO common_asset_details(identifier, symbol, coingecko, cryptocompare, forked, started, swapped_for)' # noqa: E501
'VALUES(?, ?, ?, ?, ?, ?, ?);',
(
asset.identifier,
asset.symbol,
asset.coingecko,
asset.cryptocompare,
forked,
started,
swapped_for,
),
)
except sqlite3.IntegrityError as e:
raise InputError(
f'Failed to add asset {asset.identifier} into the assets table due to {e!s}',
) from e
@staticmethod
def retrieve_assets(userdb: 'DBHandler', filter_query: 'AssetsFilterQuery') -> tuple[list[dict[str, Any]], int]: # noqa: E501
"""
Returns a tuple that contains a list of assets details and a
count of those assets that match the filter query.
May raise:
- DeserializationError
"""
assets_info = []
underlying_tokens: dict[str, list[dict[str, str]]] = defaultdict(list)
prepared_filter_query, bindings = filter_query.prepare()
parent_query = """
SELECT A.identifier AS identifier, A.type, B.address, B.decimals, A.name, C.symbol,
C.started, C.forked, C.swapped_for, C.coingecko, C.cryptocompare, B.protocol, B.chain,
B.token_kind, D.notes, D.type AS custom_asset_type FROM
globaldb.assets as A
LEFT JOIN globaldb.common_asset_details AS C ON C.identifier = A.identifier
LEFT JOIN globaldb.evm_tokens as B ON B.identifier = A.identifier
LEFT JOIN globaldb.custom_assets as D ON D.identifier = A.identifier
"""
query = f'SELECT * FROM ({parent_query}) {prepared_filter_query}'
# Get the identifier of the EVM tokens in the filter and query the underlying tokens where
# parent_token_entry is in that set of identifiers. We need to join with evm_tokens since
# the address column is present there. @yabirgb tested using JOIN instead of a subquery
# and the query increased in complexity since we need to join more tables and the test
# showed that the query using joins is slower than this one with the subquery.
# test with subquery took on average 1.17 seconds and with joins 1.53 seconds.
# The point that prevents this query from being really slow is that the subquery comes
# filtered from the frontend where we set a limit in the number of results that goes
# in the range from 10 to 100 and this guarantees that the size of the subquery is small.
underlying_tokens_query = (
f'SELECT parent_token_entry, address, token_kind, weight FROM globaldb.underlying_tokens_list ' # noqa: E501
f'LEFT JOIN globaldb.evm_tokens ON globaldb.underlying_tokens_list.identifier=evm_tokens.identifier ' # noqa: E501
f'WHERE parent_token_entry IN (SELECT identifier FROM ({query}))'
)
with userdb.conn.read_ctx() as cursor:
globaldb = GlobalDBHandler()
globaldb._wal_checkpoint()
cursor.execute(
f'ATTACH DATABASE "{globaldb.filepath()!s}" AS globaldb KEY "";',
)
try:
# get all underlying tokens
for entry in cursor.execute(underlying_tokens_query, bindings):
underlying_tokens[entry[0]].append(UnderlyingToken.deserialize_from_db((entry[1], entry[2], entry[3])).serialize()) # noqa: E501
cursor.execute(query, bindings)
for entry in cursor:
asset_type = AssetType.deserialize_from_db(entry[1])
data = {
'identifier': entry[0],
'asset_type': str(asset_type),
'name': entry[4],
}
# for evm tokens and crypto assets
common_data = {
'symbol': entry[5],
'started': entry[6],
'swapped_for': entry[8],
'forked': entry[7],
'cryptocompare': entry[10],
'coingecko': entry[9],
}
if asset_type == AssetType.FIAT:
data.update({
'symbol': entry[5],
'started': entry[6],
})
elif asset_type == AssetType.EVM_TOKEN:
data.update({
'address': entry[2],
'evm_chain': ChainID.deserialize_from_db(entry[12]).to_name(),
'token_kind': EvmTokenKind.deserialize_from_db(entry[13]).serialize(),
'decimals': entry[3],
'underlying_tokens': underlying_tokens.get(entry[0], None),
'protocol': entry[11],
})
data.update(common_data)
elif AssetType.is_crypto_asset(asset_type):
data.update(common_data)
elif asset_type == AssetType.CUSTOM_ASSET:
data.update({
'notes': entry[14],
'custom_asset_type': entry[15],
})
else:
raise NotImplementedError(f'Unsupported AssetType {asset_type} found in the DB. Should never happen') # noqa: E501
assets_info.append(data)
# get `entries_found`
query, bindings = filter_query.prepare(with_pagination=False)
total_found_query = f'SELECT COUNT(*) FROM ({parent_query}) ' + query
entries_found = cursor.execute(total_found_query, bindings).fetchone()[0]
finally:
cursor.execute('DETACH DATABASE globaldb;')
return assets_info, entries_found
@staticmethod
def get_assets_mappings(identifiers: list[str]) -> tuple[dict[str, dict], dict[str, dict[str, str]]]: # noqa: E501
"""
Given a list of asset identifiers, return a list of asset information
(id, name, symbol, collection) for those identifiers and a dictionary that maps the
collection id to their properties.
"""
result: dict[str, dict[str, Any]] = {}
asset_collections = {}
identifiers_query = f'assets.identifier IN ({",".join("?" * len(identifiers))})'
with GlobalDBHandler().conn.read_ctx() as cursor:
cursor.execute(
ALL_ASSETS_TABLES_QUERY_WITH_COLLECTIONS.format(dbprefix='') +
' WHERE ' + identifiers_query,
tuple(identifiers),
)
for entry in cursor:
result[entry[0]] = {
'name': entry[1],
'symbol': entry[2],
'asset_type': AssetType.deserialize_from_db(entry[4]).serialize(),
}
if entry[3] is not None:
result[entry[0]].update({'evm_chain': ChainID.deserialize_from_db(entry[3]).to_name()}) # noqa: E501
if entry[5] is not None:
result[entry[0]].update({'custom_asset_type': entry[5]})
if entry[6] is not None:
result[entry[0]].update({'collection_id': str(entry[6])})
if entry[6] not in asset_collections:
asset_collections[str(entry[6])] = {
'name': entry[7],
'symbol': entry[8],
}
if entry[9] == SPAM_PROTOCOL:
result[entry[0]].update({'is_spam': True})
return result, asset_collections
@staticmethod
def search_assets(
filter_query: 'AssetsFilterQuery',
db: 'DBHandler',
) -> list[dict[str, Any]]:
"""Returns a list of asset details that match the search query provided."""
search_result = []
globaldb = GlobalDBHandler()
query, bindings = filter_query.prepare()
query = ALL_ASSETS_TABLES_QUERY.format(dbprefix='globaldb.') + query
resolved_eth = A_ETH.resolve_to_crypto_asset()
globaldb._wal_checkpoint()
with db.conn.read_ctx() as cursor:
treat_eth2_as_eth = db.get_settings(cursor).treat_eth2_as_eth
cursor.execute(
f'ATTACH DATABASE "{globaldb.filepath()!s}" AS globaldb KEY "";',
)
try:
cursor.execute(query, bindings)
found_eth = False
for entry in cursor:
if treat_eth2_as_eth is True and entry[0] in (A_ETH.identifier, A_ETH2.identifier): # noqa: E501
if found_eth is False:
search_result.append({
'identifier': resolved_eth.identifier,
'name': resolved_eth.name,
'symbol': resolved_eth.symbol,
'is_custom_asset': False,
})
found_eth = True
continue
entry_info = {
'identifier': entry[0],
'name': entry[1],
'symbol': entry[2],
'is_custom_asset': AssetType.deserialize_from_db(entry[4]) == AssetType.CUSTOM_ASSET, # noqa: E501
}
if entry[3] is not None:
entry_info['evm_chain'] = ChainID.deserialize_from_db(entry[3]).to_name()
if entry[5] is not None:
entry_info['custom_asset_type'] = entry[5]
search_result.append(entry_info)
finally:
cursor.execute('DETACH DATABASE globaldb;')
return search_result
@overload
@staticmethod
def get_all_asset_data(
mapping: Literal[True],
serialized: bool = False,
specific_ids: list[str] | None = None,
) -> dict[str, dict[str, Any]]:
...
@overload
@staticmethod
def get_all_asset_data(
mapping: Literal[False],
serialized: bool = False,
specific_ids: list[str] | None = None,
) -> list[AssetData]:
...
@staticmethod
def get_all_asset_data(
mapping: bool,
serialized: bool = False,
specific_ids: list[str] | None = None,
) -> list[AssetData] | dict[str, dict[str, Any]]:
"""Return all asset data from the DB or all data matching the given ids
If mapping is True, return them as a Dict of identifier to data
If mapping is False, return them as a List of AssetData
"""
result: list[AssetData] | dict[str, dict[str, Any]]
if mapping:
result = {}
else:
result = []
specific_ids_query = ''
if specific_ids is not None:
specific_ids_query = f'AND A.identifier in ({",".join("?" * len(specific_ids))})'
querystr = f"""
SELECT A.identifier, A.type, B.address, B.decimals, A.name, C.symbol, C.started, null, C.swapped_for, C.coingecko, C.cryptocompare, B.protocol, B.chain, B.token_kind FROM assets as A JOIN evm_tokens as B
ON B.identifier = A.identifier JOIN common_asset_details AS C ON C.identifier = B.identifier WHERE A.type = '{AssetType.EVM_TOKEN.serialize_for_db()}' {specific_ids_query}
UNION ALL
SELECT A.identifier, A.type, null, null, A.name, B.symbol, B.started, B.forked, B.swapped_for, B.coingecko, B.cryptocompare, null, null, null from assets as A JOIN common_asset_details as B
ON B.identifier = A.identifier WHERE A.type != '{AssetType.EVM_TOKEN.serialize_for_db()}' {specific_ids_query};
""" # noqa: E501
if specific_ids is not None:
bindings = (*specific_ids, *specific_ids)
else:
bindings = ()
with GlobalDBHandler().conn.read_ctx() as cursor:
cursor.execute(querystr, bindings)
for entry in cursor:
asset_type = AssetType.deserialize_from_db(entry[1])
evm_address: ChecksumEvmAddress | None
if asset_type == AssetType.EVM_TOKEN:
evm_address = string_to_evm_address(entry[2])
chain = ChainID.deserialize_from_db(entry[12])
token_kind = EvmTokenKind.deserialize_from_db(entry[13])
else:
evm_address, chain, token_kind = None, None, None
data = AssetData(
identifier=entry[0],
asset_type=asset_type,
address=evm_address,
chain_id=chain,
token_kind=token_kind,
decimals=entry[3],
name=entry[4],
symbol=entry[5],
started=entry[6],
forked=entry[7],
swapped_for=entry[8],
coingecko=entry[9],
cryptocompare=entry[10],
protocol=entry[11],
)
if mapping:
result[entry[0]] = data.serialize() if serialized else data # type: ignore
else:
result.append(data) # type: ignore
return result
@staticmethod
def get_asset_data(
identifier: str,
form_with_incomplete_data: bool,
) -> AssetData | None:
"""Get all details of a single asset by identifier
Returns None if identifier can't be matched to an asset
"""
with GlobalDBHandler().conn.read_ctx() as cursor:
cursor.execute(
'SELECT A.identifier, A.type, A.name, B.symbol, B.started, B.swapped_for, '
'B.coingecko, B.cryptocompare, B.forked from assets AS A JOIN '
'common_asset_details AS B ON A.identifier = B.identifier WHERE A.identifier=?;',
(identifier,),
)
result = cursor.fetchone()
if result is None:
return None
# Since comparison is case insensitive let's return original identifier
saved_identifier = result[0] # get the identifier as saved in the DB.
db_serialized_type = result[1]
name = result[2]
symbol = result[3]
started = result[4]
swapped_for = result[5]
coingecko = result[6]
cryptocompare = result[7]
forked = result[8]
decimals = None
protocol = None
evm_address = None
chain = None
token_kind = None
try:
asset_type = AssetType.deserialize_from_db(db_serialized_type)
except DeserializationError as e:
log.debug(
f'Failed to read asset {identifier} from the DB due to '
f'{e!s}. Skipping',
)
return None
if asset_type == AssetType.EVM_TOKEN:
cursor.execute(
'SELECT decimals, protocol, address, chain, token_kind from evm_tokens '
'WHERE identifier=?', (saved_identifier,),
)
result = cursor.fetchone()
if result is None:
log.error(
f'Found token {saved_identifier} in the DB assets table but not '
f'in the token details table.',
)
return None
decimals = result[0]
protocol = result[1]
evm_address = result[2]
chain = ChainID.deserialize_from_db(result[3])
token_kind = EvmTokenKind.deserialize_from_db(result[4])
missing_basic_data = name is None or symbol is None or decimals is None
if missing_basic_data and form_with_incomplete_data is False:
log.debug(
f'Considering ethereum token with identifier {saved_identifier} '
f'as unknown since its missing either decimals or name or symbol',
)
return None
return AssetData(
identifier=saved_identifier,
name=name,
symbol=symbol,
asset_type=asset_type,
started=started,
forked=forked,
swapped_for=swapped_for,
address=evm_address,
chain_id=chain,
token_kind=token_kind,
decimals=decimals,
coingecko=coingecko,
cryptocompare=cryptocompare,
protocol=protocol,
)
@staticmethod
def fetch_underlying_tokens(
cursor: DBCursor,
parent_token_identifier: str,
) -> list[UnderlyingToken] | None:
"""Fetch underlying tokens for a token address if they exist"""
cursor.execute(
'SELECT B.address, B.token_kind, A.weight FROM underlying_tokens_list AS A JOIN evm_tokens as B WHERE A.identifier=B.identifier AND parent_token_entry=?;', # noqa: E501
(parent_token_identifier,),
)
results = cursor.fetchall()
underlying_tokens = None
if len(results) != 0:
underlying_tokens = [UnderlyingToken.deserialize_from_db(x) for x in results]
return underlying_tokens
@staticmethod
def _add_underlying_tokens(
write_cursor: DBCursor,
parent_token_identifier: str,
underlying_tokens: list[UnderlyingToken],
chain_id: ChainID,
) -> None:
"""Add the underlying tokens for the parent token
May raise InputError
"""
for underlying_token in underlying_tokens:
# make sure underlying token address is tracked if not already there
asset_id = GlobalDBHandler.get_evm_token_identifier(
cursor=write_cursor,
address=underlying_token.address,
chain_id=chain_id,
)
if asset_id is None:
try: # underlying token does not exist. Track it
asset_id = underlying_token.get_identifier(parent_chain=chain_id)
write_cursor.execute(
'INSERT INTO assets(identifier, name, type)'
'VALUES(?, ?, ?);',
(asset_id, None, AssetType.EVM_TOKEN.serialize_for_db()),
)
write_cursor.execute(
'INSERT INTO evm_tokens(identifier, address, chain, token_kind)'
'VALUES(?, ?, ?, ?)',
(
asset_id,
underlying_token.address,
chain_id.serialize_for_db(),
underlying_token.token_kind.serialize_for_db(),
),
)
write_cursor.execute(
'INSERT INTO common_asset_details(identifier, symbol, coingecko, cryptocompare, forked, started, swapped_for)' # noqa: E501
'VALUES(?, ?, ?, ?, ?, ?, ?)',
(asset_id, None, None, '', None, None, None),
)
except sqlite3.IntegrityError as e:
raise InputError(
f'Failed to add underlying tokens for {parent_token_identifier} '
f'due to {e!s}',
) from e
try:
write_cursor.execute(
'INSERT INTO underlying_tokens_list(identifier, weight, parent_token_entry) '
'VALUES(?, ?, ?)',
(
asset_id,
str(underlying_token.weight),
parent_token_identifier,
),
)
except sqlite3.IntegrityError as e:
raise InputError(
f'Failed to add underlying tokens for {parent_token_identifier} '
f'due to {e!s}',
) from e
@staticmethod
def get_evm_token_identifier(
cursor: DBCursor,
address: ChecksumEvmAddress,
chain_id: ChainID,
) -> str | None:
"""Returns the asset identifier of an EVM token by address if it can be found"""
query = cursor.execute(
'SELECT identifier from evm_tokens WHERE address=? AND chain=?;',
(address, chain_id.serialize_for_db()),
)
result = query.fetchall()
if len(result) == 0:
return None
return result[0][0]
@staticmethod
def check_asset_exists(asset: AssetWithOracles) -> list[str] | None:
"""
Checks if an asset of a given type, symbol and name exists in the DB already
For non ethereum tokens with no unique identifier like an address this is the
only way to check if something already exists in the DB.
If it exists it returns a list of the identifiers of the assets.
"""
cursor = GlobalDBHandler().conn.cursor()
query = cursor.execute(
'SELECT A.identifier from assets AS A JOIN common_asset_details as C '
'ON A.identifier=C.identifier WHERE A.type=? AND A.name=? AND C.symbol=?;',
(asset.asset_type.serialize_for_db(), asset.name, asset.symbol),
)
result = query.fetchall()
if len(result) == 0:
return None
return [x[0] for x in result]
@staticmethod
def get_evm_token(address: ChecksumEvmAddress, chain_id: ChainID) -> EvmToken | None:
"""Gets all details for an evm token by its address
If no token for the given address can be found None is returned.
"""
with GlobalDBHandler().conn.read_ctx() as cursor:
cursor.execute(
'SELECT A.identifier, B.address, B.chain, B.token_kind, B.decimals, C.name, '
'A.symbol, A.started, A.swapped_for, A.coingecko, A.cryptocompare, B.protocol '
'FROM evm_tokens AS B JOIN '
'common_asset_details AS A ON B.identifier = A.identifier '
'JOIN assets AS C on C.identifier=A.identifier WHERE B.address=? AND B.chain=?;',
(address, chain_id.serialize_for_db()),
)
results = cursor.fetchall()
if len(results) == 0:
return None
token_data = results[0]
underlying_tokens = GlobalDBHandler().fetch_underlying_tokens(cursor, token_data[0])
try:
return EvmToken.deserialize_from_db(
entry=token_data,
underlying_tokens=underlying_tokens,
)
except UnknownAsset as e:
log.error(
f'Found unknown swapped_for asset {e!s} in '
f'the DB when deserializing an EvmToken',
)
return None
@staticmethod
def get_evm_tokens(
chain_id: ChainID,
exceptions: set[ChecksumEvmAddress] | None = None,
protocol: str | None = None,
ignore_spam: bool = True,
) -> list[EvmToken]:
"""Gets all ethereum tokens from the DB
Can also accept filtering parameters.
- List of addresses to ignore via exceptions
- Protocol for which to return tokens
- ignore_spam (default True) to filter out "spam protocol" assets
"""
querystr = (
'SELECT A.identifier, B.address, B.chain, B.token_kind, B.decimals, A.name, '
'C.symbol, C.started, C.swapped_for, C.coingecko, C.cryptocompare, B.protocol '
'FROM evm_tokens as B LEFT OUTER JOIN '
'assets AS A on B.identifier = A.identifier JOIN common_asset_details AS C ON '
'C.identifier = B.identifier WHERE B.chain = ? '
)
bindings_list: list[str | (int | ChecksumEvmAddress)] = [chain_id.serialize_for_db()]
if exceptions is not None or protocol is not None or ignore_spam is True:
querystr_additions = []
if exceptions is not None:
questionmarks = '?' * len(exceptions)
querystr_additions.append(f'B.address NOT IN ({",".join(questionmarks)}) ')
bindings_list.extend(exceptions)
if protocol is not None:
querystr_additions.append('B.protocol=? ')
bindings_list.append(protocol)
if ignore_spam: # NB: != for nullable needs to be accompanied by IS NULL
querystr_additions.append('(B.protocol!=? OR protocol IS NULL) ')
bindings_list.append(SPAM_PROTOCOL)
querystr += 'AND ' + 'AND '.join(querystr_additions) + ';'
else:
querystr += ';'
bindings = tuple(bindings_list)
with GlobalDBHandler().conn.read_ctx() as cursor:
cursor.execute(querystr, bindings)
tokens = []
for entry in cursor:
with GlobalDBHandler().conn.read_ctx() as other_cursor:
underlying_tokens = GlobalDBHandler().fetch_underlying_tokens(other_cursor, entry[0]) # noqa: E501
try:
token = EvmToken.deserialize_from_db(entry, underlying_tokens)
tokens.append(token)
except UnknownAsset as e:
log.error(
f'Found unknown swapped_for asset {e!s} in '
f'the DB when deserializing an EvmToken',
)
return tokens
@staticmethod
def get_token_name(address: ChecksumEvmAddress, chain_id: ChainID) -> str | None:
"""Gets address -> name for the token and given chain if existing"""
with GlobalDBHandler().conn.read_ctx() as cursor:
cursor.execute(
'SELECT assets.name FROM evm_tokens INNER JOIN assets ON '
'evm_tokens.identifier = assets.identifier WHERE address = ? and chain = ?',
(address, chain_id.serialize_for_db()),
)
result = cursor.fetchone()
return result if result is None else result[0]
@staticmethod
def add_evm_token_data(write_cursor: DBCursor, entry: EvmToken) -> None:
"""Adds ethereum token specific information into the global DB
May raise InputError if the token already exists
"""
try:
write_cursor.execute(
'INSERT INTO '
'evm_tokens(identifier, token_kind, chain, address, decimals, protocol) '
'VALUES(?, ?, ?, ?, ?, ?)',
(
entry.identifier,
entry.token_kind.serialize_for_db(),
entry.chain_id.serialize_for_db(),
entry.evm_address,
entry.decimals,
entry.protocol,
),
)
except sqlite3.IntegrityError as e:
exception_msg = str(e)
if 'FOREIGN KEY' in exception_msg:
# should not really happen since API should check for this
msg = (
f'Ethereum token with address {entry.evm_address} can not be put '
f'in the DB due to asset with identifier {entry.identifier} dosent exist'
)
else:
msg = f'Ethereum token with identifier {entry.identifier} already exists in the DB'
raise InputError(msg) from e
if entry.underlying_tokens is not None:
GlobalDBHandler._add_underlying_tokens(
write_cursor=write_cursor,
parent_token_identifier=entry.identifier,
underlying_tokens=entry.underlying_tokens,
chain_id=entry.chain_id,
)
@staticmethod
def edit_evm_token(entry: EvmToken) -> str:
"""Edits an EVM token entry in the DB
May raise InputError if there is an error during updating
Returns the token's rotki identifier
"""
try:
with GlobalDBHandler().conn.write_ctx() as write_cursor:
write_cursor.execute(
'UPDATE common_asset_details SET symbol=?, coingecko=?, '
'cryptocompare=?, forked=?, started=?, swapped_for=? WHERE identifier=?;',
(
entry.symbol,
entry.coingecko,
entry.cryptocompare,
entry.forked.identifier if entry.forked else None,
entry.started,
entry.swapped_for.identifier if entry.swapped_for else None,
entry.identifier,
),
)
write_cursor.execute(
'UPDATE assets SET name=?, type=? WHERE identifier=?;',
(
entry.name,
AssetType.EVM_TOKEN.serialize_for_db(),
entry.identifier,
),
)