Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
162 changes: 91 additions & 71 deletions poetry.lock

Large diffs are not rendered by default.

1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ APScheduler = "^3.7.0"
sentry-sdk = "^1.1.0"
pyhumps = "^3.0.2"
aiolimiter = "^1.0.0-beta.1"
tabulate = "^0.8.9"

[tool.poetry.dev-dependencies]
black = "^20.8b1"
Expand Down
21 changes: 10 additions & 11 deletions src/demo_registrydao/handlers/on_factory_origination.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,14 +11,13 @@ async def on_factory_origination(
) -> None:
originated_contract = cast(str, registry_origination.data.originated_contract_address)
index_name = f'registry_dao_{originated_contract}'
if index_name not in ctx.config.indexes:
ctx.add_contract(
name=originated_contract,
address=originated_contract,
typename='registry',
)
ctx.add_index(
name=index_name,
template='registry_dao',
values=dict(contract=originated_contract),
)
ctx.add_contract(
name=originated_contract,
address=originated_contract,
typename='registry',
)
ctx.add_index(
name=index_name,
template='registry_dao',
values=dict(contract=originated_contract),
)
5 changes: 5 additions & 0 deletions src/dipdup/__init__.py
Original file line number Diff line number Diff line change
@@ -1,2 +1,7 @@
__version__ = '1.1.2'
__spec_version__ = '1.1'
spec_version_mapping = {
'0.1': '<=0.4.3',
'1.0': '>=1.0.0, <=1.1.2',
'1.1': '>=2.0.0',
}
82 changes: 21 additions & 61 deletions src/dipdup/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,18 @@
from dataclasses import dataclass
from functools import wraps
from os.path import dirname, join
from typing import List
from typing import List, NoReturn, cast

import click
import sentry_sdk
from fcache.cache import FileCache # type: ignore
from sentry_sdk.integrations.aiohttp import AioHttpIntegration

from dipdup import __spec_version__, __version__, spec_version_mapping
from dipdup.config import DipDupConfig, LoggingConfig
from dipdup.dipdup import DipDup
from dipdup.exceptions import ConfigurationError, DipDupError, MigrationRequiredError
from dipdup import __spec_version__, __version__
from dipdup.config import DipDupConfig, LoggingConfig, PostgresDatabaseConfig
from dipdup.dipdup import DipDup
Expand All @@ -21,41 +26,16 @@

_logger = logging.getLogger(__name__)

spec_version_to_version = {
'0.1': 'dipdup v0.4.3 and below',
'1.0': 'dipdup v1.0.0 - v1.1.2',
'1.1': 'dipdup v1.2.0 and above',
}

migration_required_message = """

Migration required!

project spec version: %s (%s)
current spec version: %s (%s)

1. Run `dipdup migrate`
2. Review and commit changes

See https://baking-bad.org/blog/ for additional release information.
"""


def migration_required(from_: str, to: str) -> NoReturn:
_logger.warning(
migration_required_message,
from_,
spec_version_to_version[from_],
to,
spec_version_to_version[to],
)
quit()


def click_async(fn):
def click_command_wrapper(fn):
@wraps(fn)
def wrapper(*args, **kwargs):
return asyncio.run(fn(*args, **kwargs))
try:
return asyncio.run(fn(*args, **kwargs))
except DipDupError as e:
_logger.critical(e.__repr__())
_logger.info(e.format())
quit(e.exit_code)

return wrapper

Expand All @@ -72,7 +52,7 @@ class CLIContext:
@click.option('--config', '-c', type=str, multiple=True, help='Path to dipdup YAML config', default=['dipdup.yml'])
@click.option('--logging-config', '-l', type=str, help='Path to logging YAML config', default='logging.yml')
@click.pass_context
@click_async
@click_command_wrapper
async def cli(ctx, config: List[str], logging_config: str):
try:
path = join(os.getcwd(), logging_config)
Expand All @@ -83,10 +63,10 @@ async def cli(ctx, config: List[str], logging_config: str):
_logging_config.apply()

_config = DipDupConfig.load(config)
if _config.spec_version not in spec_version_to_version:
raise ConfigurationError('Unknown `spec_version`')
if _config.spec_version not in spec_version_mapping:
raise ConfigurationError('Unknown `spec_version`, correct ones: {}')
if _config.spec_version != __spec_version__ and ctx.invoked_subcommand != 'migrate':
migration_required(_config.spec_version, __spec_version__)
raise MigrationRequiredError(None, _config.spec_version, __spec_version__)

if _config.sentry:
sentry_sdk.init(
Expand All @@ -105,7 +85,7 @@ async def cli(ctx, config: List[str], logging_config: str):
@click.option('--reindex', is_flag=True, help='Drop database and start indexing from scratch')
@click.option('--oneshot', is_flag=True, help='Synchronize indexes wia REST and exit without starting WS connection')
@click.pass_context
@click_async
@click_command_wrapper
async def run(ctx, reindex: bool, oneshot: bool) -> None:
config: DipDupConfig = ctx.obj.config
config.initialize()
Expand All @@ -115,7 +95,7 @@ async def run(ctx, reindex: bool, oneshot: bool) -> None:

@cli.command(help='Initialize new dipdup project')
@click.pass_context
@click_async
@click_command_wrapper
async def init(ctx):
config: DipDupConfig = ctx.obj.config
config.pre_initialize()
Expand All @@ -125,7 +105,7 @@ async def init(ctx):

@cli.command(help='Migrate project to the new spec version')
@click.pass_context
@click_async
@click_command_wrapper
async def migrate(ctx):
def _bump_spec_version(spec_version: str):
for config_path in ctx.obj.config_paths:
Expand All @@ -152,15 +132,15 @@ def _bump_spec_version(spec_version: str):

@cli.command(help='Clear development request cache')
@click.pass_context
@click_async
@click_command_wrapper
async def clear_cache(ctx):
FileCache('dipdup', flag='cs').clear()


@cli.command(help='Configure Hasura GraphQL Engine')
@click.option('--reset', is_flag=True, help='Reset metadata before configuring')
@click.pass_context
@click_async
@click_command_wrapper
async def configure_hasura(ctx, reset: bool):
config: DipDupConfig = ctx.obj.config
url = config.database.connection_string
Expand All @@ -175,23 +155,3 @@ async def configure_hasura(ctx, reset: bool):
await hasura.configure(reset)
finally:
await hasura.close_session()


@cli.command(help='Configure Hasura GraphQL Engine')
@click.option('--reset', is_flag=True, help='Reset metadata before configuring')
@click.pass_context
@click_async
async def cache(ctx, reset: bool):
config: DipDupConfig = ctx.obj.config
url = config.database.connection_string
models = f'{config.package}.models'
if not config.hasura:
_logger.error('`hasura` config section is empty')
return
hasura = HasuraManager(config.package, config.hasura, cast(PostgresDatabaseConfig, config.database))

async with tortoise_wrapper(url, models):
try:
await hasura.configure(reset)
finally:
await hasura.close_session()
37 changes: 24 additions & 13 deletions src/dipdup/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -734,17 +734,17 @@ def get_tzkt_datasource(self, name: str) -> TzktDatasourceConfig:

def get_rollback_fn(self) -> Type:
try:
module = f'{self.package}.handlers.{ROLLBACK_HANDLER}'
return getattr(importlib.import_module(module), ROLLBACK_HANDLER)
module_name = f'{self.package}.handlers.{ROLLBACK_HANDLER}'
return getattr(importlib.import_module(module_name), ROLLBACK_HANDLER)
except (ModuleNotFoundError, AttributeError) as e:
raise HandlerImportError(f'Module `{module}` not found. Have you forgot to call `init`?') from e
raise HandlerImportError(module=module_name, obj=ROLLBACK_HANDLER) from e

def get_configure_fn(self) -> Type:
try:
module = f'{self.package}.handlers.{CONFIGURE_HANDLER}'
return getattr(importlib.import_module(module), CONFIGURE_HANDLER)
module_name = f'{self.package}.handlers.{CONFIGURE_HANDLER}'
return getattr(importlib.import_module(module_name), CONFIGURE_HANDLER)
except (ModuleNotFoundError, AttributeError) as e:
raise HandlerImportError(f'Module `{module}` not found. Have you forgot to call `init`?') from e
raise HandlerImportError(module=module_name, obj=CONFIGURE_HANDLER) from e

def resolve_static_templates(self) -> None:
_logger.info('Substituting index templates')
Expand Down Expand Up @@ -880,20 +880,31 @@ def load(
**YAML(typ='base').load(raw_config),
}

config = cls(**json_config)
try:
config = cls(**json_config)
except Exception as e:
raise ConfigurationError(str(e)) from e
return config

def _initialize_handler_callback(self, handler_config: HandlerConfig) -> None:
_logger.info('Registering handler callback `%s`', handler_config.callback)
handler_module = importlib.import_module(f'{self.package}.handlers.{handler_config.callback}')
callback_fn = getattr(handler_module, handler_config.callback)
handler_config.callback_fn = callback_fn
try:
module_name = f'{self.package}.handlers.{handler_config.callback}'
module = importlib.import_module(module_name)
callback_fn = getattr(module, handler_config.callback)
handler_config.callback_fn = callback_fn
except (ModuleNotFoundError, AttributeError) as e:
raise HandlerImportError(module=module_name, obj=handler_config.callback) from e

def _initialize_job_callback(self, job_config: JobConfig) -> None:
_logger.info('Registering job callback `%s`', job_config.callback)
job_module = importlib.import_module(f'{self.package}.jobs.{job_config.callback}')
callback_fn = getattr(job_module, job_config.callback)
job_config.callback_fn = callback_fn
try:
module_name = f'{self.package}.jobs.{job_config.callback}'
module = importlib.import_module(module_name)
callback_fn = getattr(module, job_config.callback)
job_config.callback_fn = callback_fn
except (ModuleNotFoundError, AttributeError) as e:
raise HandlerImportError(module=module_name, obj=job_config.callback) from e

def _initialize_index(self, index_name: str, index_config: IndexConfigT) -> None:
if index_name in self._initialized:
Expand Down
36 changes: 31 additions & 5 deletions src/dipdup/context.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,21 @@
import os
import sys
from pprint import pformat
from typing import Any, Dict, Optional

from tortoise import Tortoise
from tortoise.transactions import in_transaction

from dipdup.config import ContractConfig, DipDupConfig, PostgresDatabaseConfig, StaticTemplateConfig
from dipdup.config import (
ContractConfig,
DipDupConfig,
OperationHandlerOriginationPatternConfig,
OperationIndexConfig,
PostgresDatabaseConfig,
StaticTemplateConfig,
)
from dipdup.datasources import DatasourceT
from dipdup.exceptions import ConfigurationError
from dipdup.exceptions import ContractAlreadyExistsError, IndexAlreadyExistsError
from dipdup.utils import FormattedLogger


Expand All @@ -22,6 +30,9 @@ def __init__(
self.config = config
self._updated: bool = False

def __str__(self) -> str:
return pformat(self.__dict__)

def commit(self) -> None:
"""Spawn indexes after handler execution"""
self._updated = True
Expand Down Expand Up @@ -82,8 +93,23 @@ def __init__(
self.datasource = datasource

def add_contract(self, name: str, address: str, typename: Optional[str] = None) -> None:
if name in self.config.contracts:
raise ConfigurationError(f'Contract `{name}` is already exists')
for contract_name, contract_config in self.config.contracts.items():
if name == contract_name or address == contract_config.address:
# NOTE: Origination pattern with `similar_to` field is a special case, safe to add duplicate
is_similar_to = False
for index_config in self.config.indexes.values():
if not isinstance(index_config, OperationIndexConfig):
continue
for handler_config in index_config.handlers:
for pattern_config in handler_config.pattern:
if not isinstance(pattern_config, OperationHandlerOriginationPatternConfig):
continue
if pattern_config.similar_to_contract_config.address == address:
is_similar_to = True

if not is_similar_to:
raise ContractAlreadyExistsError(self, name, address)

self.config.contracts[name] = ContractConfig(
address=address,
typename=typename,
Expand All @@ -92,7 +118,7 @@ def add_contract(self, name: str, address: str, typename: Optional[str] = None)

def add_index(self, name: str, template: str, values: Dict[str, Any]) -> None:
if name in self.config.indexes:
raise ConfigurationError(f'Index `{name}` is already exists')
raise IndexAlreadyExistsError(self, name)
self.config.get_template(template)
self.config.indexes[name] = StaticTemplateConfig(
template=template,
Expand Down
Loading