diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 2b6c2cec..c45de955 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/psf/black - rev: 22.3.0 + rev: 22.6.0 hooks: - id: black - repo: https://github.com/pycqa/isort diff --git a/README.md b/README.md index bef70e5f..2334383a 100644 --- a/README.md +++ b/README.md @@ -71,7 +71,7 @@ PGSync is written in Python (supporting version 3.7 onwards) and the stack is co PGSync leverages the [logical decoding](https://www.postgresql.org/docs/current/logicaldecoding.html) feature of [Postgres](https://www.postgresql.org) (introduced in PostgreSQL 9.4) to capture a continuous stream of change events. This feature needs to be enabled in your [Postgres](https://www.postgresql.org) configuration file by setting in the postgresql.conf file: ``` -> wal_level = logical +wal_level = logical ``` You can select any pivot table to be the root of your document. diff --git a/bin/parallel_sync b/bin/parallel_sync index 26f1a668..dd89c90e 100755 --- a/bin/parallel_sync +++ b/bin/parallel_sync @@ -95,7 +95,7 @@ def fetch_tasks(doc: dict, block_size: Optional[int] = None) -> Generator: pages: dict = {} sync: Sync = Sync(doc) root: Node = sync.tree.build(sync.nodes) - statement: sa.sql.selectable.Select = sa.select( + statement: sa.sql.Select = sa.select( [ sa.literal_column("1").label("x"), sa.literal_column("1").label("y"), diff --git a/examples/node/README b/examples/node/README new file mode 100644 index 00000000..3d5ca7ed --- /dev/null +++ b/examples/node/README @@ -0,0 +1,3 @@ +Demonstrates Adjacency List Relationships + +- https://docs.sqlalchemy.org/en/14/orm/self_referential.html \ No newline at end of file diff --git a/examples/node/data.py b/examples/node/data.py new file mode 100644 index 00000000..8822bf88 --- /dev/null +++ b/examples/node/data.py @@ -0,0 +1,48 @@ +import json +from typing import List + +import click +from schema import Node +from sqlalchemy.orm import sessionmaker + +from pgsync.base import pg_engine, subtransactions +from pgsync.helper import teardown +from pgsync.utils import get_config + + +@click.command() +@click.option( + "--config", + "-c", + help="Schema config", + type=click.Path(exists=True), +) +def main(config): + + config: str = get_config(config) + teardown(drop_db=False, config=config) + documents: List[dict] = json.load(open(config)) + engine: sa.engine.Engine = pg_engine( + database=documents[0].get("database", documents[0]["index"]) + ) + + Session = sessionmaker(bind=engine, autoflush=True) + session = Session() + + nodes: List[Node] = [ + Node(id=1, name="Node A"), + Node(id=2, name="Node B"), + Node(id=3, name="Node C"), + Node(id=4, name="Node A_A", node_id=1), + Node(id=5, name="Node B_B", node_id=2), + Node(id=6, name="Node C_C", node_id=3), + Node(id=7, name="Node A_A_A", node_id=4), + Node(id=8, name="Node B_B_B", node_id=5), + Node(id=9, name="Node C_C_C", node_id=6), + ] + with subtransactions(session): + session.add_all(nodes) + + +if __name__ == "__main__": + main() diff --git a/examples/node/schema.json b/examples/node/schema.json new file mode 100644 index 00000000..71a6b674 --- /dev/null +++ b/examples/node/schema.json @@ -0,0 +1,30 @@ +[ + { + "database": "node", + "index": "node", + "nodes": { + "table": "node", + "label": "parent", + "children": [ + { + "table": "node", + "label": "child", + "children": [ + { + "table": "node", + "label": "grand_child", + "relationship": { + "variant": "object", + "type": "one_to_one" + } + } + ], + "relationship": { + "variant": "object", + "type": "one_to_one" + } + } + ] + } + } +] \ No newline at end of file diff --git a/examples/node/schema.py b/examples/node/schema.py new file mode 100644 index 00000000..44c9dfbc --- /dev/null +++ b/examples/node/schema.py @@ -0,0 +1,46 @@ +import json + +import click +import sqlalchemy as sa +from sqlalchemy.ext.declarative import declarative_base + +from pgsync.base import create_database, pg_engine +from pgsync.helper import teardown +from pgsync.utils import get_config + +Base = declarative_base() + + +class Node(Base): + __tablename__ = "node" + id = sa.Column(sa.Integer, primary_key=True, autoincrement=True) + name = sa.Column(sa.String) + node_id = sa.Column(sa.Integer, sa.ForeignKey("node.id")) + children = sa.orm.relationship("Node", lazy="joined", join_depth=2) + + +def setup(config=None): + for document in json.load(open(config)): + database: str = document.get("database", document["index"]) + create_database(database) + engine: sa.engine.Engine = pg_engine(database=database) + Base.metadata.drop_all(engine) + Base.metadata.create_all(engine) + + +@click.command() +@click.option( + "--config", + "-c", + help="Schema config", + type=click.Path(exists=True), +) +def main(config): + + config = get_config(config) + teardown(config=config) + setup(config) + + +if __name__ == "__main__": + main() diff --git a/examples/poc/schema.json b/examples/poc/schema.json new file mode 100644 index 00000000..d7db6828 --- /dev/null +++ b/examples/poc/schema.json @@ -0,0 +1,11 @@ +[ + { + "database": "testdb", + "index": "test", + "nodes": { + "table": "user", + "schema": "user_profile", + "columns": [] + } + } +] diff --git a/pgsync/base.py b/pgsync/base.py index 93401ff0..a3ed0eb1 100644 --- a/pgsync/base.py +++ b/pgsync/base.py @@ -74,7 +74,7 @@ def process(value): class Base(object): def __init__(self, database: str, verbose: bool = False, *args, **kwargs): """Initialize the base class constructor.""" - self.__engine: sa.engine.base.Engine = pg_engine(database, **kwargs) + self.__engine: sa.engine.Engine = pg_engine(database, **kwargs) self.__schemas: Optional[dict] = None # models is a dict of f'{schema}.{table}' self.models: Dict[str] = {} @@ -149,7 +149,7 @@ def has_permissions(self, username: str, permissions: List[str]) -> bool: ) # Tables... - def model(self, table: str, schema: str) -> sa.sql.selectable.Alias: + def model(self, table: str, schema: str) -> sa.sql.Alias: """Get an SQLAlchemy model representation from a table. Args: @@ -205,7 +205,7 @@ def session(self) -> sessionmaker: return Session() @property - def engine(self) -> sa.engine.base.Engine: + def engine(self) -> sa.engine.Engine: """Get the database engine.""" return self.__engine @@ -375,9 +375,9 @@ def _logical_slot_changes( upto_nchanges: Optional[int] = None, limit: Optional[int] = None, offset: Optional[int] = None, - ) -> sa.sql.selectable.Select: + ) -> sa.sql.Select: filters: list = [] - statement: sa.sql.selectable.Select = sa.select( + statement: sa.sql.Select = sa.select( [sa.column("xid"), sa.column("data")] ).select_from( func( @@ -428,7 +428,7 @@ def logical_slot_get_changes( To get ALL changes and data in existing replication slot: SELECT * FROM PG_LOGICAL_SLOT_GET_CHANGES('testdb', NULL, NULL) """ - statement: sa.sql.selectable.Select = self._logical_slot_changes( + statement: sa.sql.Select = self._logical_slot_changes( slot_name, sa.func.PG_LOGICAL_SLOT_GET_CHANGES, txmin=txmin, @@ -454,7 +454,7 @@ def logical_slot_peek_changes( SELECT * FROM PG_LOGICAL_SLOT_PEEK_CHANGES('testdb', NULL, 1) """ - statement: sa.sql.selectable.Select = self._logical_slot_changes( + statement: sa.sql.Select = self._logical_slot_changes( slot_name, sa.func.PG_LOGICAL_SLOT_PEEK_CHANGES, txmin=txmin, @@ -474,7 +474,7 @@ def logical_slot_count_changes( upto_lsn: Optional[int] = None, upto_nchanges: Optional[int] = None, ) -> int: - statement: sa.sql.selectable.Select = self._logical_slot_changes( + statement: sa.sql.Select = self._logical_slot_changes( slot_name, sa.func.PG_LOGICAL_SLOT_PEEK_CHANGES, txmin=txmin, @@ -525,11 +525,6 @@ def create_triggers( join_queries: bool = False, ) -> None: """Create a database triggers.""" - self.execute( - CREATE_TRIGGER_TEMPLATE.replace( - MATERIALIZED_VIEW, f"{schema}.{MATERIALIZED_VIEW}" - ) - ) queries: List[str] = [] for table in self.tables(schema): schema, table = self._get_schema(schema, table) @@ -550,7 +545,8 @@ def create_triggers( f"{TRIGGER_FUNC}()", ) if join_queries: - self.execute("; ".join(queries)) + if queries: + self.execute("; ".join(queries)) else: for query in queries: self.execute(sa.DDL(query)) @@ -574,11 +570,18 @@ def drop_triggers( f'"{schema}"."{table}"' ) if join_queries: - self.execute("; ".join(queries)) + if queries: + self.execute("; ".join(queries)) else: for query in queries: self.execute(sa.DDL(query)) + def create_function(self) -> None: + self.execute(CREATE_TRIGGER_TEMPLATE) + + def drop_function(self) -> None: + self.execute(f"DROP FUNCTION IF EXISTS {TRIGGER_FUNC}()") + def disable_triggers(self, schema: str) -> None: """Disable all pgsync defined triggers in database.""" for table in self.tables(schema): @@ -669,7 +672,7 @@ def parse_value(self, type_: str, value: str) -> Optional[str]: raise return value - def parse_logical_slot(self, row): + def parse_logical_slot(self, row: str): def _parse_logical_slot(data): while True: @@ -734,7 +737,7 @@ def _parse_logical_slot(data): return payload # Querying... - def execute(self, statement, values=None, options=None): + def execute(self, statement: sa.sql.Select, values=None, options=None): """Execute a query statement.""" conn = self.__engine.connect() try: @@ -746,7 +749,9 @@ def execute(self, statement, values=None, options=None): logger.exception(f"Exception {e}") raise - def fetchone(self, statement, label=None, literal_binds=False): + def fetchone( + self, statement: sa.sql.Select, label=None, literal_binds=False + ): """Fetch one row query.""" if self.verbose: compiled_query(statement, label=label, literal_binds=literal_binds) @@ -760,7 +765,12 @@ def fetchone(self, statement, label=None, literal_binds=False): raise return row - def fetchall(self, statement, label=None, literal_binds=False): + def fetchall( + self, + statement: sa.sql.Select, + label: Optional[str] = None, + literal_binds: bool = False, + ): """Fetch all rows from a query statement.""" if self.verbose: compiled_query(statement, label=label, literal_binds=literal_binds) @@ -776,7 +786,7 @@ def fetchall(self, statement, label=None, literal_binds=False): def fetchmany( self, - statement: sa.sql.selectable.Select, + statement: sa.sql.Select, chunk_size: Optional[int] = None, ): chunk_size: int = chunk_size or QUERY_CHUNK_SIZE @@ -788,7 +798,7 @@ def fetchmany( for keys, row, *primary_keys in partition: yield keys, row, primary_keys - def fetchcount(self, statement): + def fetchcount(self, statement: sa.sql.Subquery) -> int: with self.__engine.connect() as conn: return conn.execute( statement.original.with_only_columns( @@ -890,15 +900,15 @@ def get_foreign_keys(node_a: Node, node_b: Node) -> dict: def pg_engine( - database, - user=None, - host=None, - password=None, - port=None, - echo=False, - sslmode=None, - sslrootcert=None, -): + database: str, + user: Optional[str] = None, + host: Optional[str] = None, + password: Optional[str] = None, + port: Optional[str] = None, + echo: bool = False, + sslmode: Optional[str] = None, + sslrootcert: Optional[str] = None, +) -> sa.engine.Engine: connect_args = {} sslmode: str = sslmode or PG_SSLMODE sslrootcert: str = sslrootcert or PG_SSLROOTCERT @@ -935,8 +945,8 @@ def pg_engine( def pg_execute( - engine: sa.engine.base.Engine, - query, + engine: sa.engine.Engine, + query: str, values: Optional[list] = None, options: Optional[dict] = None, ) -> None: @@ -952,7 +962,7 @@ def pg_execute( raise -def create_schema(engine: sa.engine.base.Engine, schema: str) -> None: +def create_schema(engine: sa.engine.Engine, schema: str) -> None: """Create database schema.""" if schema != DEFAULT_SCHEMA: engine.execute(sa.schema.CreateSchema(schema)) @@ -961,7 +971,7 @@ def create_schema(engine: sa.engine.base.Engine, schema: str) -> None: def create_database(database: str, echo: bool = False) -> None: """Create a database.""" logger.debug(f"Creating database: {database}") - engine: sa.engine.base.Engine = pg_engine(database="postgres", echo=echo) + engine: sa.engine.Engine = pg_engine(database="postgres", echo=echo) pg_execute(engine, f'CREATE DATABASE "{database}"') logger.debug(f"Created database: {database}") @@ -969,7 +979,7 @@ def create_database(database: str, echo: bool = False) -> None: def drop_database(database: str, echo: bool = False) -> None: """Drop a database.""" logger.debug(f"Dropping database: {database}") - engine: sa.engine.base.Engine = pg_engine(database="postgres", echo=echo) + engine: sa.engine.Engine = pg_engine(database="postgres", echo=echo) pg_execute(engine, f'DROP DATABASE IF EXISTS "{database}"') logger.debug(f"Dropped database: {database}") @@ -979,7 +989,7 @@ def create_extension( ) -> None: """Create a database extension.""" logger.debug(f"Creating extension: {extension}") - engine: sa.engine.base.Engine = pg_engine(database=database, echo=echo) + engine: sa.engine.Engine = pg_engine(database=database, echo=echo) pg_execute(engine, f'CREATE EXTENSION IF NOT EXISTS "{extension}"') logger.debug(f"Created extension: {extension}") @@ -987,7 +997,7 @@ def create_extension( def drop_extension(database: str, extension: str, echo: bool = False) -> None: """Drop a database extension.""" logger.debug(f"Dropping extension: {extension}") - engine: sa.engine.base.Engine = pg_engine(database=database, echo=echo) + engine: sa.engine.Engine = pg_engine(database=database, echo=echo) pg_execute(engine, f'DROP EXTENSION IF EXISTS "{extension}"') logger.debug(f"Dropped extension: {extension}") diff --git a/pgsync/node.py b/pgsync/node.py index 353ba48a..c0a7344b 100644 --- a/pgsync/node.py +++ b/pgsync/node.py @@ -103,7 +103,7 @@ def __str__(self): @dataclass class Node(object): - model: sa.sql.selectable.Alias + model: sa.sql.Alias table: str schema: str materialized: bool = False diff --git a/pgsync/sync.py b/pgsync/sync.py index 074888d8..b28cde1f 100644 --- a/pgsync/sync.py +++ b/pgsync/sync.py @@ -22,6 +22,7 @@ from . import __version__ from .base import Base, compiled_query, get_foreign_keys, TupleIdentifierType from .constants import ( + DEFAULT_SCHEMA, DELETE, INSERT, META, @@ -187,7 +188,7 @@ def validate(self, repl_slots: bool = True) -> None: for node in self.root.traverse_breadth_first(): # ensure all base tables have at least one primary_key for table in node.base_tables: - model: sa.sql.selectable.Alias = self.model(table, node.schema) + model: sa.sql.Alias = self.model(table, node.schema) if not model.primary_keys: raise PrimaryKeyNotFoundError( f"No primary key(s) for base table: {table}" @@ -264,6 +265,8 @@ def setup(self) -> None: self.teardown(drop_view=False) + self.create_function() + for schema in self.schemas: tables: Set = set([]) # tables with user defined foreign keys @@ -318,8 +321,11 @@ def teardown(self, drop_view: bool = True) -> None: self.drop_triggers( schema=schema, tables=tables, join_queries=join_queries ) + # constrain views to the public schema only if drop_view: - self.drop_view(schema=schema) + self.drop_view(schema=DEFAULT_SCHEMA) + + self.drop_function() self.drop_replication_slot(self.__name) def get_doc_id(self, primary_keys: List[str], table: str) -> str: diff --git a/pgsync/trigger.py b/pgsync/trigger.py index d726f651..edbb2d21 100644 --- a/pgsync/trigger.py +++ b/pgsync/trigger.py @@ -1,5 +1,5 @@ """PGSync Trigger template.""" -from .constants import MATERIALIZED_VIEW, TRIGGER_FUNC +from .constants import DEFAULT_SCHEMA, MATERIALIZED_VIEW, TRIGGER_FUNC CREATE_TRIGGER_TEMPLATE = f""" CREATE OR REPLACE FUNCTION {TRIGGER_FUNC}() RETURNS TRIGGER AS $$ @@ -20,7 +20,7 @@ SELECT primary_keys INTO _primary_keys - FROM {MATERIALIZED_VIEW} + FROM {DEFAULT_SCHEMA}.{MATERIALIZED_VIEW} WHERE table_name = TG_TABLE_NAME; old_row = ROW_TO_JSON(OLD); @@ -35,7 +35,7 @@ SELECT primary_keys, foreign_keys INTO _primary_keys, _foreign_keys - FROM {MATERIALIZED_VIEW} + FROM {DEFAULT_SCHEMA}.{MATERIALIZED_VIEW} WHERE table_name = TG_TABLE_NAME; new_row = ROW_TO_JSON(NEW); diff --git a/pgsync/view.py b/pgsync/view.py index 1c72d5af..2630444b 100644 --- a/pgsync/view.py +++ b/pgsync/view.py @@ -130,7 +130,7 @@ def _get_constraints( tables: List[str], label: str, constraint_type: str, -) -> sa.sql.selectable.Select: +) -> sa.sql.Select: with warnings.catch_warnings(): warnings.simplefilter("ignore", category=sa.exc.SAWarning) table_constraints = model("table_constraints", "information_schema") @@ -181,7 +181,7 @@ def _get_constraints( def _primary_keys( model: Callable, schema: str, tables: List[str] -) -> sa.sql.selectable.Select: +) -> sa.sql.Select: return _get_constraints( model, schema, @@ -193,7 +193,7 @@ def _primary_keys( def _foreign_keys( model: Callable, schema: str, tables: List[str] -) -> sa.sql.selectable.Select: +) -> sa.sql.Select: return _get_constraints( model, schema, @@ -204,7 +204,7 @@ def _foreign_keys( def create_view( - engine: sa.engine.base.Engine, + engine: sa.engine.Engine, model: Callable, fetchall: Callable, schema: str, @@ -308,17 +308,17 @@ def create_view( ) .alias("t") ) - logger.debug(f"Creating view: {schema}.{MATERIALIZED_VIEW}") - engine.execute(CreateView(schema, MATERIALIZED_VIEW, statement)) + logger.debug(f"Creating view: {DEFAULT_SCHEMA}.{MATERIALIZED_VIEW}") + engine.execute(CreateView(DEFAULT_SCHEMA, MATERIALIZED_VIEW, statement)) engine.execute(DropIndex("_idx")) engine.execute( - CreateIndex("_idx", schema, MATERIALIZED_VIEW, ["table_name"]) + CreateIndex("_idx", DEFAULT_SCHEMA, MATERIALIZED_VIEW, ["table_name"]) ) - logger.debug(f"Created view: {schema}.{MATERIALIZED_VIEW}") + logger.debug(f"Created view: {DEFAULT_SCHEMA}.{MATERIALIZED_VIEW}") def is_view( - engine: sa.engine.base.Engine, + engine: sa.engine.Engine, schema: str, table: str, materialized: bool = True, diff --git a/requirements/dev.txt b/requirements/dev.txt index af826c55..50cdbe68 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -8,14 +8,16 @@ async-timeout==4.0.2 # via redis attrs==21.4.0 # via pytest -black==22.3.0 +black==22.6.0 # via -r requirements/base.in -boto3==1.24.12 +boto3==1.24.20 # via -r requirements/base.in -botocore==1.27.12 +botocore==1.27.20 # via # boto3 # s3transfer +build==0.8.0 + # via pip-tools bump2version==1.0.1 # via bumpversion bumpversion==0.6.0 @@ -26,7 +28,7 @@ certifi==2022.6.15 # requests cfgv==3.3.1 # via pre-commit -charset-normalizer==2.0.12 +charset-normalizer==2.1.0 # via requests click==8.1.3 # via @@ -49,7 +51,7 @@ elasticsearch-dsl==7.4.0 # via -r requirements/base.in environs==9.5.0 # via -r requirements/base.in -faker==13.13.0 +faker==13.14.0 # via -r requirements/base.in filelock==3.7.1 # via virtualenv @@ -85,7 +87,7 @@ jmespath==1.0.1 # via # boto3 # botocore -marshmallow==3.16.0 +marshmallow==3.17.0 # via environs mccabe==0.6.1 # via flake8 @@ -93,10 +95,11 @@ mock==4.0.3 # via -r requirements/test.in mypy-extensions==0.4.3 # via black -nodeenv==1.6.0 +nodeenv==1.7.0 # via pre-commit packaging==21.3 # via + # build # marshmallow # pytest # pytest-sugar @@ -104,8 +107,8 @@ packaging==21.3 pathspec==0.9.0 # via black pep517==0.12.0 - # via pip-tools -pip-tools==6.6.2 + # via build +pip-tools==6.7.0 # via -r requirements/dev.in platformdirs==2.5.2 # via @@ -139,7 +142,7 @@ pytest==6.2.5 # pytest-sugar pytest-cov==3.0.0 # via -r requirements/test.in -pytest-mock==3.7.0 +pytest-mock==3.8.1 # via -r requirements/test.in pytest-runner==6.0.0 # via -r requirements/test.in @@ -154,9 +157,9 @@ python-dotenv==0.20.0 # via environs pyyaml==6.0 # via pre-commit -redis==4.3.3 +redis==4.3.4 # via -r requirements/base.in -requests==2.28.0 +requests==2.28.1 # via requests-aws4auth requests-aws4auth==1.1.2 # via -r requirements/base.in @@ -170,7 +173,7 @@ six==1.16.0 # virtualenv snowballstemmer==2.2.0 # via pydocstyle -sqlalchemy==1.4.37 +sqlalchemy==1.4.39 # via -r requirements/base.in sqlparse==0.4.2 # via -r requirements/base.in @@ -185,8 +188,8 @@ toml==0.10.2 tomli==2.0.1 # via # black + # build # coverage - # pep517 typing-extensions==4.2.0 # via black urllib3==1.26.9 @@ -194,7 +197,7 @@ urllib3==1.26.9 # botocore # elasticsearch # requests -virtualenv==20.14.1 +virtualenv==20.15.1 # via pre-commit wheel==0.37.1 # via pip-tools diff --git a/requirements/prod.txt b/requirements/prod.txt index 466a9c65..adf3e53f 100644 --- a/requirements/prod.txt +++ b/requirements/prod.txt @@ -6,11 +6,11 @@ # async-timeout==4.0.2 # via redis -black==22.3.0 +black==22.6.0 # via -r requirements/base.in -boto3==1.24.12 +boto3==1.24.20 # via -r requirements/base.in -botocore==1.27.12 +botocore==1.27.20 # via # boto3 # s3transfer @@ -22,7 +22,7 @@ certifi==2022.6.15 # via # elasticsearch # requests -charset-normalizer==2.0.12 +charset-normalizer==2.1.0 # via requests click==8.1.3 # via @@ -38,7 +38,7 @@ elasticsearch-dsl==7.4.0 # via -r requirements/base.in environs==9.5.0 # via -r requirements/base.in -faker==13.13.0 +faker==13.14.0 # via -r requirements/base.in greenlet==1.1.2 # via sqlalchemy @@ -50,11 +50,11 @@ jmespath==1.0.1 # via # boto3 # botocore -marshmallow==3.16.0 +marshmallow==3.17.0 # via environs mypy-extensions==0.4.3 # via black -newrelic==7.12.0.176 +newrelic==7.14.0.177 # via -r requirements/prod.in packaging==21.3 # via @@ -75,9 +75,9 @@ python-dateutil==2.8.2 # faker python-dotenv==0.20.0 # via environs -redis==4.3.3 +redis==4.3.4 # via -r requirements/base.in -requests==2.28.0 +requests==2.28.1 # via requests-aws4auth requests-aws4auth==1.1.2 # via -r requirements/base.in @@ -88,7 +88,7 @@ six==1.16.0 # elasticsearch-dsl # python-dateutil # requests-aws4auth -sqlalchemy==1.4.37 +sqlalchemy==1.4.39 # via -r requirements/base.in sqlparse==0.4.2 # via -r requirements/base.in diff --git a/requirements/test.txt b/requirements/test.txt index db688c56..21450173 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -8,11 +8,11 @@ async-timeout==4.0.2 # via redis attrs==21.4.0 # via pytest -black==22.3.0 +black==22.6.0 # via -r requirements/base.in -boto3==1.24.12 +boto3==1.24.20 # via -r requirements/base.in -botocore==1.27.12 +botocore==1.27.20 # via # boto3 # s3transfer @@ -24,7 +24,7 @@ certifi==2022.6.15 # via # elasticsearch # requests -charset-normalizer==2.0.12 +charset-normalizer==2.1.0 # via requests click==8.1.3 # via @@ -42,7 +42,7 @@ elasticsearch-dsl==7.4.0 # via -r requirements/base.in environs==9.5.0 # via -r requirements/base.in -faker==13.13.0 +faker==13.14.0 # via -r requirements/base.in flake8==4.0.1 # via @@ -74,7 +74,7 @@ jmespath==1.0.1 # via # boto3 # botocore -marshmallow==3.16.0 +marshmallow==3.17.0 # via environs mccabe==0.6.1 # via flake8 @@ -118,7 +118,7 @@ pytest==6.2.5 # pytest-sugar pytest-cov==3.0.0 # via -r requirements/test.in -pytest-mock==3.7.0 +pytest-mock==3.8.1 # via -r requirements/test.in pytest-runner==6.0.0 # via -r requirements/test.in @@ -131,9 +131,9 @@ python-dateutil==2.8.2 # faker python-dotenv==0.20.0 # via environs -redis==4.3.3 +redis==4.3.4 # via -r requirements/base.in -requests==2.28.0 +requests==2.28.1 # via requests-aws4auth requests-aws4auth==1.1.2 # via -r requirements/base.in @@ -146,7 +146,7 @@ six==1.16.0 # requests-aws4auth snowballstemmer==2.2.0 # via pydocstyle -sqlalchemy==1.4.37 +sqlalchemy==1.4.39 # via -r requirements/base.in sqlparse==0.4.2 # via -r requirements/base.in