Skip to content

Commit

Permalink
Merge branch 'moonbase-stg' into moonbeam-stg
Browse files Browse the repository at this point in the history
  • Loading branch information
datradito committed Sep 8, 2022
2 parents 4a1bf2a + 461d281 commit e843db9
Show file tree
Hide file tree
Showing 21 changed files with 324 additions and 152 deletions.
14 changes: 10 additions & 4 deletions .github/PULL_REQUEST_TEMPLATE.md
Original file line number Diff line number Diff line change
@@ -1,7 +1,13 @@
### What was wrong?
### Make sure these boxes are checked! 📦✅

Closes #
- [ ] You ran `./run_tests.sh`
- [ ] You ran `pre-commit run -a`
- [ ] If you want to add your network to `setup_service.py`, provide a link to your
[safe-deployments PR](https://github.com/safe-global/safe-deployments/pulls) and check network name
exists in [safe-eth-py](https://github.com/safe-global/safe-eth-py/blob/master/gnosis/eth/ethereum_network.py)

### What was wrong? 👾

### How was it fixed?
Closes #

@gnosis/safe-services
### How was it fixed? 🎯
5 changes: 3 additions & 2 deletions config/settings/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,7 @@
# 'django.contrib.humanize', # Handy template tags
]
THIRD_PARTY_APPS = [
"django_extensions",
"corsheaders",
"rest_framework",
"drf_yasg",
Expand Down Expand Up @@ -384,8 +385,8 @@
"ETH_EVENTS_BLOCK_PROCESS_LIMIT_MAX", default=0
) # Maximum number of blocks to process together when searching for events. 0 == no limit.
ETH_EVENTS_QUERY_CHUNK_SIZE = env.int(
"ETH_EVENTS_QUERY_CHUNK_SIZE", default=5_000
) # Number of addresses to use as `getLogs` parameter. `0 == no limit`. By testing `5000` looks like a good default
"ETH_EVENTS_QUERY_CHUNK_SIZE", default=1_000
) # Number of addresses to use as `getLogs` parameter. `0 == no limit`. By testing `1_000` looks like a good default
ETH_EVENTS_UPDATED_BLOCK_BEHIND = env.int(
"ETH_EVENTS_UPDATED_BLOCK_BEHIND", default=24 * 60 * 60 // 15
) # Number of blocks to consider an address 'almost updated'.
Expand Down
2 changes: 1 addition & 1 deletion config/settings/local.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@
# django-debug-toolbar
# ------------------------------------------------------------------------------
# https://django-debug-toolbar.readthedocs.io/en/latest/installation.html#prerequisites
INSTALLED_APPS += ["debug_toolbar", "django_extensions"] # noqa F405
INSTALLED_APPS += ["debug_toolbar"] # noqa F405

# https://django-debug-toolbar.readthedocs.io/en/latest/installation.html#middleware
MIDDLEWARE += [ # noqa F405
Expand Down
1 change: 0 additions & 1 deletion requirements-dev.txt
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
-r requirements.txt
-r requirements-test.txt
django-extensions
flake8
ipdb
ipython
Expand Down
6 changes: 3 additions & 3 deletions requirements-test.txt
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
-r requirements.txt
coverage==6.4.3
coverage==6.4.4
django-stubs==1.12.0
factory-boy==3.2.1
faker==13.15.1
faker==14.2.0
mypy==0.971
pytest==7.1.2
pytest==7.1.3
pytest-celery==0.0.0
pytest-django==4.5.2
pytest-env==0.6.2
Expand Down
13 changes: 7 additions & 6 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -1,34 +1,35 @@
boto3==1.24.46
boto3==1.24.66
cachetools==5.2.0
celery==5.2.7
django==4.0.6
django==4.0.7
django-cache-memoize==0.1.10
django-celery-beat==2.3.0
django-cors-headers==3.13.0
django-db-geventpool==4.0.1
django-debug-toolbar
django-debug-toolbar-force
django-environ==0.9.0
django-extensions==3.2.0
django-filter==22.1
django-imagekit==4.1.0
django-model-utils==4.2.0
django-redis==5.2.0
django-s3-storage==0.13.8
django-s3-storage==0.13.9
django-timezone-field==5.0.0
djangorestframework==3.13.1
djangorestframework-camel-case==1.3.0
docutils==0.19
drf-yasg[validation]==1.21.3
firebase-admin==5.2.0
firebase-admin==5.3.0
flower==1.2.0
gunicorn[gevent]==20.1.0
hexbytes==0.2.2
hexbytes==0.2.3
hiredis==2.0.0
packaging>=21.0
pillow==9.2.0
psycogreen==1.0.2
psycopg2==2.9.3
redis==4.3.4
requests==2.28.1
safe-eth-py[django]==4.3.2
safe-eth-py[django]==4.4.0
web3==5.30.0
2 changes: 1 addition & 1 deletion safe_transaction_service/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
__version__ = "4.6.2"
__version__ = "4.8.1"
__version_info__ = tuple(
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
Expand Down
23 changes: 10 additions & 13 deletions safe_transaction_service/history/indexers/erc20_events_indexer.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
from logging import getLogger
from typing import Iterator, List, Sequence

import gevent
from cache_memoize import cache_memoize
from cachetools import cachedmethod
from eth_abi.exceptions import DecodingError
Expand Down Expand Up @@ -80,19 +79,17 @@ def _do_node_query(
else:
addresses_chunks = [addresses]

jobs = [
gevent.spawn(
self.ethereum_client.erc20.get_total_transfer_history,
addresses_chunk,
from_block=from_block_number,
to_block=to_block_number,
)
for addresses_chunk in addresses_chunks
]
_ = gevent.joinall(jobs)
transfer_events = []
for job in jobs:
transfer_events.extend(job.get())
for addresses_chunk in addresses_chunks:
with self.auto_adjust_block_limit(from_block_number, to_block_number):
transfer_events.extend(
self.ethereum_client.erc20.get_total_transfer_history(
addresses_chunk,
from_block=from_block_number,
to_block=to_block_number,
)
)

return transfer_events

@cachedmethod(cache=operator.attrgetter("_cache_is_erc20"))
Expand Down
114 changes: 60 additions & 54 deletions safe_transaction_service/history/indexers/ethereum_indexer.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import time
from abc import ABC, abstractmethod
from contextlib import contextmanager
from logging import getLogger
from typing import Any, List, Optional, Sequence, Tuple

Expand Down Expand Up @@ -37,14 +38,14 @@ def __init__(
block_process_limit_max: int = 0,
blocks_to_reindex_again: int = 0,
updated_blocks_behind: int = 20,
query_chunk_size: Optional[int] = 5000,
query_chunk_size: Optional[int] = 1_000,
block_auto_process_limit: bool = True,
):
"""
:param ethereum_client:
:param confirmations: Don't index last `confirmations` blocks to prevent from reorgs
:param block_process_limit: Number of blocks to scan at a time for relevant data. `0` == `No limit`
:param block_process_limit: Maximum bumber of blocks to scan at a time for relevant data. `0` == `No limit`
:param block_process_limit_max: Maximum bumber of blocks to scan at a time for relevant data. `0` == `No limit`
:param blocks_to_reindex_again: Number of blocks to reindex every time the indexer runs, in case something
was missed.
:param updated_blocks_behind: Number of blocks scanned for an address that can be behind and
Expand Down Expand Up @@ -261,59 +262,25 @@ def update_monitored_address(

return updated_addresses

def process_addresses(
self, addresses: Sequence[str], current_block_number: Optional[int] = None
) -> Tuple[Sequence[Any], int, bool]:
@contextmanager
def auto_adjust_block_limit(self, from_block_number: int, to_block_number: int):
"""
Find and process relevant data for `addresses`, then store and return it
:param addresses: Addresses to process
:param current_block_number: To prevent fetching it again
:return: Tuple with a sequence of `processed data`, `last_block_number` processed
and `True` if no more blocks to scan, `False` otherwise
Optimize number of elements processed every time (block process limit)
based on how fast the block interval is retrieved
"""
assert addresses, "Addresses cannot be empty!"

current_block_number = (
current_block_number or self.ethereum_client.current_block_number
)
parameters = self.get_block_numbers_for_search(addresses, current_block_number)
if parameters is None:
return [], current_block_number, True
from_block_number, to_block_number = parameters

updated = to_block_number == (current_block_number - self.confirmations)

# Optimize number of elements processed every time (block process limit)
# Check that we are processing the `block_process_limit`, if not, measures are not valid
if (
if not (
self.block_auto_process_limit
and (to_block_number - from_block_number) == self.block_process_limit
):
start = int(time.time())
yield
else:
start = None

try:
elements = self.find_relevant_elements(
addresses,
from_block_number,
to_block_number,
current_block_number=current_block_number,
)
except (FindRelevantElementsException, SoftTimeLimitExceeded) as e:
self.block_process_limit = 1 # Set back to the very minimum
logger.info(
"%s: block_process_limit set back to %d",
self.__class__.__name__,
self.block_process_limit,
)
raise e

if start:
start = int(time.time())
yield
delta = int(time.time()) - start
if delta > 30:
self.block_process_limit //= 2
self.block_process_limit = max(self.block_process_limit // 2, 1)
logger.info(
"%s: block_process_limit halved to %d",
self.__class__.__name__,
Expand All @@ -327,32 +294,71 @@ def process_addresses(
self.__class__.__name__,
self.block_process_limit,
)
elif delta < 1:
elif delta < 2:
self.block_process_limit *= 2
logger.info(
"%s: block_process_limit duplicated to %d",
self.__class__.__name__,
self.block_process_limit,
)
elif delta < 3:
elif delta < 5:
self.block_process_limit += 20
logger.info(
"%s: block_process_limit increased to %d",
self.__class__.__name__,
self.block_process_limit,
)

if (
self.block_process_limit_max
and self.block_process_limit > self.block_process_limit_max
):
self.block_process_limit = self.block_process_limit_max
if (
self.block_process_limit_max
and self.block_process_limit > self.block_process_limit_max
):
self.block_process_limit = self.block_process_limit_max
logger.info(
"%s: block_process_limit %d is bigger than block_process_limit_max %d, reducing",
self.__class__.__name__,
self.block_process_limit,
self.block_process_limit_max,
)

def process_addresses(
self, addresses: Sequence[str], current_block_number: Optional[int] = None
) -> Tuple[Sequence[Any], int, bool]:
"""
Find and process relevant data for `addresses`, then store and return it
:param addresses: Addresses to process
:param current_block_number: To prevent fetching it again
:return: Tuple with a sequence of `processed data`, `last_block_number` processed
and `True` if no more blocks to scan, `False` otherwise
"""
assert addresses, "Addresses cannot be empty!"

current_block_number = (
current_block_number or self.ethereum_client.current_block_number
)
parameters = self.get_block_numbers_for_search(addresses, current_block_number)
if parameters is None:
return [], current_block_number, True
from_block_number, to_block_number = parameters

updated = to_block_number == (current_block_number - self.confirmations)

try:
elements = self.find_relevant_elements(
addresses,
from_block_number,
to_block_number,
current_block_number=current_block_number,
)
except (FindRelevantElementsException, SoftTimeLimitExceeded) as e:
self.block_process_limit = 1 # Set back to the very minimum
logger.info(
"%s: block_process_limit %d is bigger than block_process_limit_max %d, reducing",
"%s: block_process_limit set back to %d",
self.__class__.__name__,
self.block_process_limit,
self.block_process_limit_max,
)
raise e

processed_elements = self.process_elements(elements)

Expand Down
20 changes: 9 additions & 11 deletions safe_transaction_service/history/indexers/events_indexer.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@

from django.conf import settings

import gevent
from eth_typing import ChecksumAddress
from eth_utils import event_abi_to_log_topic
from hexbytes import HexBytes
Expand Down Expand Up @@ -95,19 +94,18 @@ def _do_node_query(
for addresses_chunk in addresses_chunks
]

jobs = [
gevent.spawn(
self.ethereum_client.slow_w3.eth.get_logs, single_parameters
)
for single_parameters in multiple_parameters
]
_ = gevent.joinall(jobs)
log_receipts = []
for job in jobs:
log_receipts.extend(job.get())

for single_parameters in multiple_parameters:
with self.auto_adjust_block_limit(from_block_number, to_block_number):
log_receipts.extend(
self.ethereum_client.slow_w3.eth.get_logs(single_parameters)
)

return log_receipts
else:
return self.ethereum_client.slow_w3.eth.get_logs(parameters)
with self.auto_adjust_block_limit(from_block_number, to_block_number):
return self.ethereum_client.slow_w3.eth.get_logs(parameters)

def _find_elements_using_topics(
self,
Expand Down
19 changes: 11 additions & 8 deletions safe_transaction_service/history/indexers/internal_tx_indexer.py
Original file line number Diff line number Diff line change
Expand Up @@ -119,9 +119,11 @@ def _find_relevant_elements_using_trace_block(
addresses_set = set(addresses) # More optimal to use with `in`
try:
block_numbers = list(range(from_block_number, to_block_number + 1))
blocks_traces: ParityBlockTrace = self.ethereum_client.parity.trace_blocks(
block_numbers
)

with self.auto_adjust_block_limit(from_block_number, to_block_number):
blocks_traces: ParityBlockTrace = (
self.ethereum_client.parity.trace_blocks(block_numbers)
)
traces: OrderedDict[HexStr, ParityFilterTrace] = OrderedDict()
relevant_tx_hashes: Set[HexStr] = set()
for block_number, block_traces in zip(block_numbers, blocks_traces):
Expand Down Expand Up @@ -170,11 +172,12 @@ def _find_relevant_elements_using_trace_filter(

try:
# We only need to search for traces `to` the provided addresses
to_traces = self.ethereum_client.parity.trace_filter(
from_block=from_block_number,
to_block=to_block_number,
to_address=addresses,
)
with self.auto_adjust_block_limit(from_block_number, to_block_number):
to_traces = self.ethereum_client.parity.trace_filter(
from_block=from_block_number,
to_block=to_block_number,
to_address=addresses,
)
except IOError as e:
raise FindRelevantElementsException(
"Request error calling `trace_filter`"
Expand Down

0 comments on commit e843db9

Please sign in to comment.