diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index d6f1c122..befc0c21 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -34,8 +34,8 @@ repos: "--ignore-init-module-imports", ] - - repo: https://github.com/ambv/black - rev: 21.11b1 + - repo: https://github.com/psf/black + rev: "22.3.0" hooks: - id: black diff --git a/README.md b/README.md index a918e8ff..19dbebfb 100644 --- a/README.md +++ b/README.md @@ -10,9 +10,13 @@ As it takes some effort to get the headers. We suggest that you use the storage ```python3 -from storage3 import storage_client +from storage3 import create_client -storage_client = storage_client.SupabaseStorageClient('https://.supabase.co/storage/v1)', {'apiKey': '', 'Authorization': 'Bearer '}) +url = "https://.supabase.co/storage/v1" +key = "" -storage_client.list() +# pass in is_async=True to create an async client +storage_client = create_client(url, {'apiKey': key, 'Authorization': 'Bearer '}, is_async=False) + +storage_client.list_buckets() ``` diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 00000000..0679e33c --- /dev/null +++ b/poetry.lock @@ -0,0 +1,1074 @@ +[[package]] +name = "anyio" +version = "3.5.0" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +category = "main" +optional = false +python-versions = ">=3.6.2" + +[package.dependencies] +idna = ">=2.8" +sniffio = ">=1.1" +typing-extensions = {version = "*", markers = "python_version < \"3.8\""} + +[package.extras] +doc = ["packaging", "sphinx-rtd-theme", "sphinx-autodoc-typehints (>=1.2.0)"] +test = ["coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "pytest (>=6.0)", "pytest-mock (>=3.6.1)", "trustme", "contextlib2", "uvloop (<0.15)", "mock (>=4)", "uvloop (>=0.15)"] +trio = ["trio (>=0.16)"] + +[[package]] +name = "argcomplete" +version = "1.12.3" +description = "Bash tab completion for argparse" +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +importlib-metadata = {version = ">=0.23,<5", markers = "python_version == \"3.7\""} + +[package.extras] +test = ["coverage", "flake8", "pexpect", "wheel"] + +[[package]] +name = "atomicwrites" +version = "1.4.0" +description = "Atomic file writes." +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "attrs" +version = "21.4.0" +description = "Classes Without Boilerplate" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[package.extras] +dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"] +docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] +tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] +tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"] + +[[package]] +name = "black" +version = "22.3.0" +description = "The uncompromising code formatter." +category = "dev" +optional = false +python-versions = ">=3.6.2" + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +pathspec = ">=0.9.0" +platformdirs = ">=2" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typed-ast = {version = ">=1.4.2", markers = "python_version < \"3.8\" and implementation_name == \"cpython\""} +typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.7.4)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + +[[package]] +name = "certifi" +version = "2021.10.8" +description = "Python package for providing Mozilla's CA Bundle." +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "cfgv" +version = "3.3.1" +description = "Validate configuration and produce human readable error messages." +category = "dev" +optional = false +python-versions = ">=3.6.1" + +[[package]] +name = "charset-normalizer" +version = "2.0.12" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +category = "main" +optional = false +python-versions = ">=3.5.0" + +[package.extras] +unicode_backport = ["unicodedata2"] + +[[package]] +name = "click" +version = "8.1.1" +description = "Composable command line interface toolkit" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} +importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} + +[[package]] +name = "colorama" +version = "0.4.4" +description = "Cross-platform colored terminal text." +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "commitizen" +version = "2.23.0" +description = "Python commitizen client tool" +category = "dev" +optional = false +python-versions = ">=3.6.2,<4.0.0" + +[package.dependencies] +argcomplete = ">=1.12.1,<2.0.0" +colorama = ">=0.4.1,<0.5.0" +decli = ">=0.5.2,<0.6.0" +jinja2 = ">=2.10.3" +packaging = ">=19,<22" +pyyaml = ">=3.08" +questionary = ">=1.4.0,<2.0.0" +termcolor = ">=1.1,<2.0" +tomlkit = ">=0.5.3,<1.0.0" +typing-extensions = ">=4.0.1,<5.0.0" + +[[package]] +name = "coverage" +version = "6.3.2" +description = "Code coverage measurement for Python" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +tomli = {version = "*", optional = true, markers = "extra == \"toml\""} + +[package.extras] +toml = ["tomli"] + +[[package]] +name = "decli" +version = "0.5.2" +description = "Minimal, easy-to-use, declarative cli tool" +category = "dev" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "distlib" +version = "0.3.4" +description = "Distribution utilities" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "filelock" +version = "3.6.0" +description = "A platform independent file lock." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["furo (>=2021.8.17b43)", "sphinx (>=4.1)", "sphinx-autodoc-typehints (>=1.12)"] +testing = ["covdefaults (>=1.2.0)", "coverage (>=4)", "pytest (>=4)", "pytest-cov", "pytest-timeout (>=1.4.2)"] + +[[package]] +name = "flake8" +version = "4.0.1" +description = "the modular source code checker: pep8 pyflakes and co" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +importlib-metadata = {version = "<4.3", markers = "python_version < \"3.8\""} +mccabe = ">=0.6.0,<0.7.0" +pycodestyle = ">=2.8.0,<2.9.0" +pyflakes = ">=2.4.0,<2.5.0" + +[[package]] +name = "h11" +version = "0.12.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "httpcore" +version = "0.14.7" +description = "A minimal low-level HTTP client." +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +anyio = ">=3.0.0,<4.0.0" +certifi = "*" +h11 = ">=0.11,<0.13" +sniffio = ">=1.0.0,<2.0.0" + +[package.extras] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (>=1.0.0,<2.0.0)"] + +[[package]] +name = "httpx" +version = "0.21.3" +description = "The next generation HTTP client." +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +certifi = "*" +charset-normalizer = "*" +httpcore = ">=0.14.0,<0.15.0" +rfc3986 = {version = ">=1.3,<2", extras = ["idna2008"]} +sniffio = "*" + +[package.extras] +brotli = ["brotlicffi", "brotli"] +cli = ["click (>=8.0.0,<9.0.0)", "rich (>=10.0.0,<11.0.0)", "pygments (>=2.0.0,<3.0.0)"] +http2 = ["h2 (>=3,<5)"] + +[[package]] +name = "identify" +version = "2.4.12" +description = "File identification library for Python" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +license = ["ukkonen"] + +[[package]] +name = "idna" +version = "3.3" +description = "Internationalized Domain Names in Applications (IDNA)" +category = "main" +optional = false +python-versions = ">=3.5" + +[[package]] +name = "importlib-metadata" +version = "4.2.0" +description = "Read metadata from Python packages" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} +zipp = ">=0.5" + +[package.extras] +docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] +testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] + +[[package]] +name = "iniconfig" +version = "1.1.1" +description = "iniconfig: brain-dead simple config-ini parsing" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "isort" +version = "5.10.1" +description = "A Python utility / library to sort Python imports." +category = "dev" +optional = false +python-versions = ">=3.6.1,<4.0" + +[package.extras] +pipfile_deprecated_finder = ["pipreqs", "requirementslib"] +requirements_deprecated_finder = ["pipreqs", "pip-api"] +colors = ["colorama (>=0.4.3,<0.5.0)"] +plugins = ["setuptools"] + +[[package]] +name = "jinja2" +version = "3.1.1" +description = "A very fast and expressive template engine." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "markupsafe" +version = "2.1.1" +description = "Safely add untrusted strings to HTML/XML markup." +category = "dev" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "mccabe" +version = "0.6.1" +description = "McCabe checker, plugin for flake8" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "mypy-extensions" +version = "0.4.3" +description = "Experimental type system extensions for programs checked with the mypy typechecker." +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "nodeenv" +version = "1.6.0" +description = "Node.js virtual environment builder" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "packaging" +version = "21.3" +description = "Core utilities for Python packages" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" + +[[package]] +name = "pathspec" +version = "0.9.0" +description = "Utility library for gitignore style pattern matching of file paths." +category = "dev" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" + +[[package]] +name = "platformdirs" +version = "2.5.1" +description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["Sphinx (>=4)", "furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)"] +test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"] + +[[package]] +name = "pluggy" +version = "1.0.0" +description = "plugin and hook calling mechanisms for python" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "pre-commit" +version = "2.17.0" +description = "A framework for managing and maintaining multi-language pre-commit hooks." +category = "dev" +optional = false +python-versions = ">=3.6.1" + +[package.dependencies] +cfgv = ">=2.0.0" +identify = ">=1.0.0" +importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} +nodeenv = ">=0.11.1" +pyyaml = ">=5.1" +toml = "*" +virtualenv = ">=20.0.8" + +[[package]] +name = "prompt-toolkit" +version = "3.0.28" +description = "Library for building powerful interactive command lines in Python" +category = "dev" +optional = false +python-versions = ">=3.6.2" + +[package.dependencies] +wcwidth = "*" + +[[package]] +name = "py" +version = "1.11.0" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "pycodestyle" +version = "2.8.0" +description = "Python style guide checker" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "pyflakes" +version = "2.4.0" +description = "passive checker of Python programs" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "pyparsing" +version = "3.0.7" +description = "Python parsing module" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.extras] +diagrams = ["jinja2", "railroad-diagrams"] + +[[package]] +name = "pytest" +version = "6.2.5" +description = "pytest: simple powerful testing with Python" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} +attrs = ">=19.2.0" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +py = ">=1.8.2" +toml = "*" + +[package.extras] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] + +[[package]] +name = "pytest-cov" +version = "3.0.0" +description = "Pytest plugin for measuring coverage." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +coverage = {version = ">=5.2.1", extras = ["toml"]} +pytest = ">=4.6" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "six", "pytest-xdist", "virtualenv"] + +[[package]] +name = "python-dotenv" +version = "0.20.0" +description = "Read key-value pairs from a .env file and set them as environment variables" +category = "dev" +optional = false +python-versions = ">=3.5" + +[package.extras] +cli = ["click (>=5.0)"] + +[[package]] +name = "pyyaml" +version = "6.0" +description = "YAML parser and emitter for Python" +category = "dev" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "questionary" +version = "1.10.0" +description = "Python library to build pretty command line user prompts ⭐️" +category = "dev" +optional = false +python-versions = ">=3.6,<4.0" + +[package.dependencies] +prompt_toolkit = ">=2.0,<4.0" + +[package.extras] +docs = ["Sphinx (>=3.3,<4.0)", "sphinx-rtd-theme (>=0.5.0,<0.6.0)", "sphinx-autobuild (>=2020.9.1,<2021.0.0)", "sphinx-copybutton (>=0.3.1,<0.4.0)", "sphinx-autodoc-typehints (>=1.11.1,<2.0.0)"] + +[[package]] +name = "rfc3986" +version = "1.5.0" +description = "Validating URI References per RFC 3986" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +idna = {version = "*", optional = true, markers = "extra == \"idna2008\""} + +[package.extras] +idna2008 = ["idna"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" + +[[package]] +name = "sniffio" +version = "1.2.0" +description = "Sniff out which async library your code is running under" +category = "main" +optional = false +python-versions = ">=3.5" + +[[package]] +name = "termcolor" +version = "1.1.0" +description = "ANSII Color formatting for output in terminal." +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +category = "dev" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +category = "dev" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "tomlkit" +version = "0.10.1" +description = "Style preserving TOML library" +category = "dev" +optional = false +python-versions = ">=3.6,<4.0" + +[[package]] +name = "typed-ast" +version = "1.5.2" +description = "a fork of Python 2 and 3 ast modules with type comment support" +category = "dev" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "typer" +version = "0.4.1" +description = "Typer, build great CLIs. Easy to code. Based on Python type hints." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +click = ">=7.1.1,<9.0.0" + +[package.extras] +all = ["colorama (>=0.4.3,<0.5.0)", "shellingham (>=1.3.0,<2.0.0)"] +dev = ["autoflake (>=1.3.1,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)"] +doc = ["mkdocs (>=1.1.2,<2.0.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "mdx-include (>=1.4.1,<2.0.0)"] +test = ["shellingham (>=1.3.0,<2.0.0)", "pytest (>=4.4.0,<5.4.0)", "pytest-cov (>=2.10.0,<3.0.0)", "coverage (>=5.2,<6.0)", "pytest-xdist (>=1.32.0,<2.0.0)", "pytest-sugar (>=0.9.4,<0.10.0)", "mypy (==0.910)", "black (>=22.3.0,<23.0.0)", "isort (>=5.0.6,<6.0.0)"] + +[[package]] +name = "typing-extensions" +version = "4.1.1" +description = "Backported and Experimental Type Hints for Python 3.6+" +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "unasync" +version = "0.5.0" +description = "The async transformation code." +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" + +[[package]] +name = "unasync-cli" +version = "0.0.9" +description = "Command line interface for unasync" +category = "dev" +optional = false +python-versions = ">=3.6.14,<4.0.0" + +[package.dependencies] +typer = ">=0.4.0,<0.5.0" +unasync = ">=0.5.0,<0.6.0" + +[[package]] +name = "virtualenv" +version = "20.14.0" +description = "Virtual Python Environment builder" +category = "dev" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" + +[package.dependencies] +distlib = ">=0.3.1,<1" +filelock = ">=3.2,<4" +importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} +platformdirs = ">=2,<3" +six = ">=1.9.0,<2" + +[package.extras] +docs = ["proselint (>=0.10.2)", "sphinx (>=3)", "sphinx-argparse (>=0.2.5)", "sphinx-rtd-theme (>=0.4.3)", "towncrier (>=21.3)"] +testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)", "packaging (>=20.0)"] + +[[package]] +name = "wcwidth" +version = "0.2.5" +description = "Measures the displayed width of unicode strings in a terminal" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "zipp" +version = "3.7.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] + +[metadata] +lock-version = "1.1" +python-versions = "^3.7" +content-hash = "b256e7a183723da223f51e2153472792fd5cbebd470b1ce9da8a4322c4ab03a8" + +[metadata.files] +anyio = [ + {file = "anyio-3.5.0-py3-none-any.whl", hash = "sha256:b5fa16c5ff93fa1046f2eeb5bbff2dad4d3514d6cda61d02816dba34fa8c3c2e"}, + {file = "anyio-3.5.0.tar.gz", hash = "sha256:a0aeffe2fb1fdf374a8e4b471444f0f3ac4fb9f5a5b542b48824475e0042a5a6"}, +] +argcomplete = [ + {file = "argcomplete-1.12.3-py2.py3-none-any.whl", hash = "sha256:291f0beca7fd49ce285d2f10e4c1c77e9460cf823eef2de54df0c0fec88b0d81"}, + {file = "argcomplete-1.12.3.tar.gz", hash = "sha256:2c7dbffd8c045ea534921e63b0be6fe65e88599990d8dc408ac8c542b72a5445"}, +] +atomicwrites = [ + {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, + {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, +] +attrs = [ + {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"}, + {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"}, +] +black = [ + {file = "black-22.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:2497f9c2386572e28921fa8bec7be3e51de6801f7459dffd6e62492531c47e09"}, + {file = "black-22.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5795a0375eb87bfe902e80e0c8cfaedf8af4d49694d69161e5bd3206c18618bb"}, + {file = "black-22.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e3556168e2e5c49629f7b0f377070240bd5511e45e25a4497bb0073d9dda776a"}, + {file = "black-22.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67c8301ec94e3bcc8906740fe071391bce40a862b7be0b86fb5382beefecd968"}, + {file = "black-22.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:fd57160949179ec517d32ac2ac898b5f20d68ed1a9c977346efbac9c2f1e779d"}, + {file = "black-22.3.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:cc1e1de68c8e5444e8f94c3670bb48a2beef0e91dddfd4fcc29595ebd90bb9ce"}, + {file = "black-22.3.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2fc92002d44746d3e7db7cf9313cf4452f43e9ea77a2c939defce3b10b5c82"}, + {file = "black-22.3.0-cp36-cp36m-win_amd64.whl", hash = "sha256:a6342964b43a99dbc72f72812bf88cad8f0217ae9acb47c0d4f141a6416d2d7b"}, + {file = "black-22.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:328efc0cc70ccb23429d6be184a15ce613f676bdfc85e5fe8ea2a9354b4e9015"}, + {file = "black-22.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06f9d8846f2340dfac80ceb20200ea5d1b3f181dd0556b47af4e8e0b24fa0a6b"}, + {file = "black-22.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4efa5fad66b903b4a5f96d91461d90b9507a812b3c5de657d544215bb7877a"}, + {file = "black-22.3.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8477ec6bbfe0312c128e74644ac8a02ca06bcdb8982d4ee06f209be28cdf163"}, + {file = "black-22.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:637a4014c63fbf42a692d22b55d8ad6968a946b4a6ebc385c5505d9625b6a464"}, + {file = "black-22.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:863714200ada56cbc366dc9ae5291ceb936573155f8bf8e9de92aef51f3ad0f0"}, + {file = "black-22.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10dbe6e6d2988049b4655b2b739f98785a884d4d6b85bc35133a8fb9a2233176"}, + {file = "black-22.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:cee3e11161dde1b2a33a904b850b0899e0424cc331b7295f2a9698e79f9a69a0"}, + {file = "black-22.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5891ef8abc06576985de8fa88e95ab70641de6c1fca97e2a15820a9b69e51b20"}, + {file = "black-22.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:30d78ba6bf080eeaf0b7b875d924b15cd46fec5fd044ddfbad38c8ea9171043a"}, + {file = "black-22.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ee8f1f7228cce7dffc2b464f07ce769f478968bfb3dd1254a4c2eeed84928aad"}, + {file = "black-22.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ee227b696ca60dd1c507be80a6bc849a5a6ab57ac7352aad1ffec9e8b805f21"}, + {file = "black-22.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:9b542ced1ec0ceeff5b37d69838106a6348e60db7b8fdd245294dc1d26136265"}, + {file = "black-22.3.0-py3-none-any.whl", hash = "sha256:bc58025940a896d7e5356952228b68f793cf5fcb342be703c3a2669a1488cb72"}, + {file = "black-22.3.0.tar.gz", hash = "sha256:35020b8886c022ced9282b51b5a875b6d1ab0c387b31a065b84db7c33085ca79"}, +] +certifi = [ + {file = "certifi-2021.10.8-py2.py3-none-any.whl", hash = "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"}, + {file = "certifi-2021.10.8.tar.gz", hash = "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"}, +] +cfgv = [ + {file = "cfgv-3.3.1-py2.py3-none-any.whl", hash = "sha256:c6a0883f3917a037485059700b9e75da2464e6c27051014ad85ba6aaa5884426"}, + {file = "cfgv-3.3.1.tar.gz", hash = "sha256:f5a830efb9ce7a445376bb66ec94c638a9787422f96264c98edc6bdeed8ab736"}, +] +charset-normalizer = [ + {file = "charset-normalizer-2.0.12.tar.gz", hash = "sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597"}, + {file = "charset_normalizer-2.0.12-py3-none-any.whl", hash = "sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df"}, +] +click = [ + {file = "click-8.1.1-py3-none-any.whl", hash = "sha256:5e0d195c2067da3136efb897449ec1e9e6c98282fbf30d7f9e164af9be901a6b"}, + {file = "click-8.1.1.tar.gz", hash = "sha256:7ab900e38149c9872376e8f9b5986ddcaf68c0f413cf73678a0bca5547e6f976"}, +] +colorama = [ + {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, + {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, +] +commitizen = [ + {file = "commitizen-2.23.0-py3-none-any.whl", hash = "sha256:11497f3733f30f7a5408a9118e031bd53344c996d656550289a83fa3b6d511cc"}, + {file = "commitizen-2.23.0.tar.gz", hash = "sha256:5685d44ac235e3da0a02592e11c92aeebcf4864e059a6f5a59382207264fb671"}, +] +coverage = [ + {file = "coverage-6.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9b27d894748475fa858f9597c0ee1d4829f44683f3813633aaf94b19cb5453cf"}, + {file = "coverage-6.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:37d1141ad6b2466a7b53a22e08fe76994c2d35a5b6b469590424a9953155afac"}, + {file = "coverage-6.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9987b0354b06d4df0f4d3e0ec1ae76d7ce7cbca9a2f98c25041eb79eec766f1"}, + {file = "coverage-6.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:26e2deacd414fc2f97dd9f7676ee3eaecd299ca751412d89f40bc01557a6b1b4"}, + {file = "coverage-6.3.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4dd8bafa458b5c7d061540f1ee9f18025a68e2d8471b3e858a9dad47c8d41903"}, + {file = "coverage-6.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:46191097ebc381fbf89bdce207a6c107ac4ec0890d8d20f3360345ff5976155c"}, + {file = "coverage-6.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6f89d05e028d274ce4fa1a86887b071ae1755082ef94a6740238cd7a8178804f"}, + {file = "coverage-6.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:58303469e9a272b4abdb9e302a780072c0633cdcc0165db7eec0f9e32f901e05"}, + {file = "coverage-6.3.2-cp310-cp310-win32.whl", hash = "sha256:2fea046bfb455510e05be95e879f0e768d45c10c11509e20e06d8fcaa31d9e39"}, + {file = "coverage-6.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:a2a8b8bcc399edb4347a5ca8b9b87e7524c0967b335fbb08a83c8421489ddee1"}, + {file = "coverage-6.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f1555ea6d6da108e1999b2463ea1003fe03f29213e459145e70edbaf3e004aaa"}, + {file = "coverage-6.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5f4e1edcf57ce94e5475fe09e5afa3e3145081318e5fd1a43a6b4539a97e518"}, + {file = "coverage-6.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7a15dc0a14008f1da3d1ebd44bdda3e357dbabdf5a0b5034d38fcde0b5c234b7"}, + {file = "coverage-6.3.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21b7745788866028adeb1e0eca3bf1101109e2dc58456cb49d2d9b99a8c516e6"}, + {file = "coverage-6.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:8ce257cac556cb03be4a248d92ed36904a59a4a5ff55a994e92214cde15c5bad"}, + {file = "coverage-6.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b0be84e5a6209858a1d3e8d1806c46214e867ce1b0fd32e4ea03f4bd8b2e3359"}, + {file = "coverage-6.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:acf53bc2cf7282ab9b8ba346746afe703474004d9e566ad164c91a7a59f188a4"}, + {file = "coverage-6.3.2-cp37-cp37m-win32.whl", hash = "sha256:8bdde1177f2311ee552f47ae6e5aa7750c0e3291ca6b75f71f7ffe1f1dab3dca"}, + {file = "coverage-6.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:b31651d018b23ec463e95cf10070d0b2c548aa950a03d0b559eaa11c7e5a6fa3"}, + {file = "coverage-6.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:07e6db90cd9686c767dcc593dff16c8c09f9814f5e9c51034066cad3373b914d"}, + {file = "coverage-6.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2c6dbb42f3ad25760010c45191e9757e7dce981cbfb90e42feef301d71540059"}, + {file = "coverage-6.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c76aeef1b95aff3905fb2ae2d96e319caca5b76fa41d3470b19d4e4a3a313512"}, + {file = "coverage-6.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cf5cfcb1521dc3255d845d9dca3ff204b3229401994ef8d1984b32746bb45ca"}, + {file = "coverage-6.3.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8fbbdc8d55990eac1b0919ca69eb5a988a802b854488c34b8f37f3e2025fa90d"}, + {file = "coverage-6.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ec6bc7fe73a938933d4178c9b23c4e0568e43e220aef9472c4f6044bfc6dd0f0"}, + {file = "coverage-6.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:9baff2a45ae1f17c8078452e9e5962e518eab705e50a0aa8083733ea7d45f3a6"}, + {file = "coverage-6.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd9e830e9d8d89b20ab1e5af09b32d33e1a08ef4c4e14411e559556fd788e6b2"}, + {file = "coverage-6.3.2-cp38-cp38-win32.whl", hash = "sha256:f7331dbf301b7289013175087636bbaf5b2405e57259dd2c42fdcc9fcc47325e"}, + {file = "coverage-6.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:68353fe7cdf91f109fc7d474461b46e7f1f14e533e911a2a2cbb8b0fc8613cf1"}, + {file = "coverage-6.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b78e5afb39941572209f71866aa0b206c12f0109835aa0d601e41552f9b3e620"}, + {file = "coverage-6.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4e21876082ed887baed0146fe222f861b5815455ada3b33b890f4105d806128d"}, + {file = "coverage-6.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34626a7eee2a3da12af0507780bb51eb52dca0e1751fd1471d0810539cefb536"}, + {file = "coverage-6.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1ebf730d2381158ecf3dfd4453fbca0613e16eaa547b4170e2450c9707665ce7"}, + {file = "coverage-6.3.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd6fe30bd519694b356cbfcaca9bd5c1737cddd20778c6a581ae20dc8c04def2"}, + {file = "coverage-6.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:96f8a1cb43ca1422f36492bebe63312d396491a9165ed3b9231e778d43a7fca4"}, + {file = "coverage-6.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:dd035edafefee4d573140a76fdc785dc38829fe5a455c4bb12bac8c20cfc3d69"}, + {file = "coverage-6.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5ca5aeb4344b30d0bec47481536b8ba1181d50dbe783b0e4ad03c95dc1296684"}, + {file = "coverage-6.3.2-cp39-cp39-win32.whl", hash = "sha256:f5fa5803f47e095d7ad8443d28b01d48c0359484fec1b9d8606d0e3282084bc4"}, + {file = "coverage-6.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:9548f10d8be799551eb3a9c74bbf2b4934ddb330e08a73320123c07f95cc2d92"}, + {file = "coverage-6.3.2-pp36.pp37.pp38-none-any.whl", hash = "sha256:18d520c6860515a771708937d2f78f63cc47ab3b80cb78e86573b0a760161faf"}, + {file = "coverage-6.3.2.tar.gz", hash = "sha256:03e2a7826086b91ef345ff18742ee9fc47a6839ccd517061ef8fa1976e652ce9"}, +] +decli = [ + {file = "decli-0.5.2-py3-none-any.whl", hash = "sha256:d3207bc02d0169bf6ed74ccca09ce62edca0eb25b0ebf8bf4ae3fb8333e15ca0"}, + {file = "decli-0.5.2.tar.gz", hash = "sha256:f2cde55034a75c819c630c7655a844c612f2598c42c21299160465df6ad463ad"}, +] +distlib = [ + {file = "distlib-0.3.4-py2.py3-none-any.whl", hash = "sha256:6564fe0a8f51e734df6333d08b8b94d4ea8ee6b99b5ed50613f731fd4089f34b"}, + {file = "distlib-0.3.4.zip", hash = "sha256:e4b58818180336dc9c529bfb9a0b58728ffc09ad92027a3f30b7cd91e3458579"}, +] +filelock = [ + {file = "filelock-3.6.0-py3-none-any.whl", hash = "sha256:f8314284bfffbdcfa0ff3d7992b023d4c628ced6feb957351d4c48d059f56bc0"}, + {file = "filelock-3.6.0.tar.gz", hash = "sha256:9cd540a9352e432c7246a48fe4e8712b10acb1df2ad1f30e8c070b82ae1fed85"}, +] +flake8 = [ + {file = "flake8-4.0.1-py2.py3-none-any.whl", hash = "sha256:479b1304f72536a55948cb40a32dce8bb0ffe3501e26eaf292c7e60eb5e0428d"}, + {file = "flake8-4.0.1.tar.gz", hash = "sha256:806e034dda44114815e23c16ef92f95c91e4c71100ff52813adf7132a6ad870d"}, +] +h11 = [ + {file = "h11-0.12.0-py3-none-any.whl", hash = "sha256:36a3cb8c0a032f56e2da7084577878a035d3b61d104230d4bd49c0c6b555a9c6"}, + {file = "h11-0.12.0.tar.gz", hash = "sha256:47222cb6067e4a307d535814917cd98fd0a57b6788ce715755fa2b6c28b56042"}, +] +httpcore = [ + {file = "httpcore-0.14.7-py3-none-any.whl", hash = "sha256:47d772f754359e56dd9d892d9593b6f9870a37aeb8ba51e9a88b09b3d68cfade"}, + {file = "httpcore-0.14.7.tar.gz", hash = "sha256:7503ec1c0f559066e7e39bc4003fd2ce023d01cf51793e3c173b864eb456ead1"}, +] +httpx = [ + {file = "httpx-0.21.3-py3-none-any.whl", hash = "sha256:df9a0fd43fa79dbab411d83eb1ea6f7a525c96ad92e60c2d7f40388971b25777"}, + {file = "httpx-0.21.3.tar.gz", hash = "sha256:7a3eb67ef0b8abbd6d9402248ef2f84a76080fa1c839f8662e6eb385640e445a"}, +] +identify = [ + {file = "identify-2.4.12-py2.py3-none-any.whl", hash = "sha256:5f06b14366bd1facb88b00540a1de05b69b310cbc2654db3c7e07fa3a4339323"}, + {file = "identify-2.4.12.tar.gz", hash = "sha256:3f3244a559290e7d3deb9e9adc7b33594c1bc85a9dd82e0f1be519bf12a1ec17"}, +] +idna = [ + {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, + {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, +] +importlib-metadata = [ + {file = "importlib_metadata-4.2.0-py3-none-any.whl", hash = "sha256:057e92c15bc8d9e8109738a48db0ccb31b4d9d5cfbee5a8670879a30be66304b"}, + {file = "importlib_metadata-4.2.0.tar.gz", hash = "sha256:b7e52a1f8dec14a75ea73e0891f3060099ca1d8e6a462a4dff11c3e119ea1b31"}, +] +iniconfig = [ + {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, + {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, +] +isort = [ + {file = "isort-5.10.1-py3-none-any.whl", hash = "sha256:6f62d78e2f89b4500b080fe3a81690850cd254227f27f75c3a0c491a1f351ba7"}, + {file = "isort-5.10.1.tar.gz", hash = "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951"}, +] +jinja2 = [ + {file = "Jinja2-3.1.1-py3-none-any.whl", hash = "sha256:539835f51a74a69f41b848a9645dbdc35b4f20a3b601e2d9a7e22947b15ff119"}, + {file = "Jinja2-3.1.1.tar.gz", hash = "sha256:640bed4bb501cbd17194b3cace1dc2126f5b619cf068a726b98192a0fde74ae9"}, +] +markupsafe = [ + {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-win32.whl", hash = "sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-win32.whl", hash = "sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-win32.whl", hash = "sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-win32.whl", hash = "sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247"}, + {file = "MarkupSafe-2.1.1.tar.gz", hash = "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b"}, +] +mccabe = [ + {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, + {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, +] +mypy-extensions = [ + {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, + {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, +] +nodeenv = [ + {file = "nodeenv-1.6.0-py2.py3-none-any.whl", hash = "sha256:621e6b7076565ddcacd2db0294c0381e01fd28945ab36bcf00f41c5daf63bef7"}, + {file = "nodeenv-1.6.0.tar.gz", hash = "sha256:3ef13ff90291ba2a4a7a4ff9a979b63ffdd00a464dbe04acf0ea6471517a4c2b"}, +] +packaging = [ + {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, + {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, +] +pathspec = [ + {file = "pathspec-0.9.0-py2.py3-none-any.whl", hash = "sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a"}, + {file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"}, +] +platformdirs = [ + {file = "platformdirs-2.5.1-py3-none-any.whl", hash = "sha256:bcae7cab893c2d310a711b70b24efb93334febe65f8de776ee320b517471e227"}, + {file = "platformdirs-2.5.1.tar.gz", hash = "sha256:7535e70dfa32e84d4b34996ea99c5e432fa29a708d0f4e394bbcb2a8faa4f16d"}, +] +pluggy = [ + {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, + {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, +] +pre-commit = [ + {file = "pre_commit-2.17.0-py2.py3-none-any.whl", hash = "sha256:725fa7459782d7bec5ead072810e47351de01709be838c2ce1726b9591dad616"}, + {file = "pre_commit-2.17.0.tar.gz", hash = "sha256:c1a8040ff15ad3d648c70cc3e55b93e4d2d5b687320955505587fd79bbaed06a"}, +] +prompt-toolkit = [ + {file = "prompt_toolkit-3.0.28-py3-none-any.whl", hash = "sha256:30129d870dcb0b3b6a53efdc9d0a83ea96162ffd28ffe077e94215b233dc670c"}, + {file = "prompt_toolkit-3.0.28.tar.gz", hash = "sha256:9f1cd16b1e86c2968f2519d7fb31dd9d669916f515612c269d14e9ed52b51650"}, +] +py = [ + {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, + {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, +] +pycodestyle = [ + {file = "pycodestyle-2.8.0-py2.py3-none-any.whl", hash = "sha256:720f8b39dde8b293825e7ff02c475f3077124006db4f440dcbc9a20b76548a20"}, + {file = "pycodestyle-2.8.0.tar.gz", hash = "sha256:eddd5847ef438ea1c7870ca7eb78a9d47ce0cdb4851a5523949f2601d0cbbe7f"}, +] +pyflakes = [ + {file = "pyflakes-2.4.0-py2.py3-none-any.whl", hash = "sha256:3bb3a3f256f4b7968c9c788781e4ff07dce46bdf12339dcda61053375426ee2e"}, + {file = "pyflakes-2.4.0.tar.gz", hash = "sha256:05a85c2872edf37a4ed30b0cce2f6093e1d0581f8c19d7393122da7e25b2b24c"}, +] +pyparsing = [ + {file = "pyparsing-3.0.7-py3-none-any.whl", hash = "sha256:a6c06a88f252e6c322f65faf8f418b16213b51bdfaece0524c1c1bc30c63c484"}, + {file = "pyparsing-3.0.7.tar.gz", hash = "sha256:18ee9022775d270c55187733956460083db60b37d0d0fb357445f3094eed3eea"}, +] +pytest = [ + {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, + {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, +] +pytest-cov = [ + {file = "pytest-cov-3.0.0.tar.gz", hash = "sha256:e7f0f5b1617d2210a2cabc266dfe2f4c75a8d32fb89eafb7ad9d06f6d076d470"}, + {file = "pytest_cov-3.0.0-py3-none-any.whl", hash = "sha256:578d5d15ac4a25e5f961c938b85a05b09fdaae9deef3bb6de9a6e766622ca7a6"}, +] +python-dotenv = [ + {file = "python-dotenv-0.20.0.tar.gz", hash = "sha256:b7e3b04a59693c42c36f9ab1cc2acc46fa5df8c78e178fc33a8d4cd05c8d498f"}, + {file = "python_dotenv-0.20.0-py3-none-any.whl", hash = "sha256:d92a187be61fe482e4fd675b6d52200e7be63a12b724abbf931a40ce4fa92938"}, +] +pyyaml = [ + {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, + {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"}, + {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"}, + {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"}, + {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"}, + {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"}, + {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"}, + {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"}, + {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"}, + {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"}, + {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"}, + {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"}, + {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"}, + {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"}, + {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"}, + {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"}, + {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, + {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, +] +questionary = [ + {file = "questionary-1.10.0-py3-none-any.whl", hash = "sha256:fecfcc8cca110fda9d561cb83f1e97ecbb93c613ff857f655818839dac74ce90"}, + {file = "questionary-1.10.0.tar.gz", hash = "sha256:600d3aefecce26d48d97eee936fdb66e4bc27f934c3ab6dd1e292c4f43946d90"}, +] +rfc3986 = [ + {file = "rfc3986-1.5.0-py2.py3-none-any.whl", hash = "sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97"}, + {file = "rfc3986-1.5.0.tar.gz", hash = "sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835"}, +] +six = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] +sniffio = [ + {file = "sniffio-1.2.0-py3-none-any.whl", hash = "sha256:471b71698eac1c2112a40ce2752bb2f4a4814c22a54a3eed3676bc0f5ca9f663"}, + {file = "sniffio-1.2.0.tar.gz", hash = "sha256:c4666eecec1d3f50960c6bdf61ab7bc350648da6c126e3cf6898d8cd4ddcd3de"}, +] +termcolor = [ + {file = "termcolor-1.1.0.tar.gz", hash = "sha256:1d6d69ce66211143803fbc56652b41d73b4a400a2891d7bf7a1cdf4c02de613b"}, +] +toml = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] +tomli = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] +tomlkit = [ + {file = "tomlkit-0.10.1-py3-none-any.whl", hash = "sha256:3eba517439dcb2f84cf39f4f85fd2c3398309823a3c75ac3e73003638daf7915"}, + {file = "tomlkit-0.10.1.tar.gz", hash = "sha256:3c517894eadef53e9072d343d37e4427b8f0b6200a70b7c9a19b2ebd1f53b951"}, +] +typed-ast = [ + {file = "typed_ast-1.5.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:183b183b7771a508395d2cbffd6db67d6ad52958a5fdc99f450d954003900266"}, + {file = "typed_ast-1.5.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:676d051b1da67a852c0447621fdd11c4e104827417bf216092ec3e286f7da596"}, + {file = "typed_ast-1.5.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc2542e83ac8399752bc16e0b35e038bdb659ba237f4222616b4e83fb9654985"}, + {file = "typed_ast-1.5.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:74cac86cc586db8dfda0ce65d8bcd2bf17b58668dfcc3652762f3ef0e6677e76"}, + {file = "typed_ast-1.5.2-cp310-cp310-win_amd64.whl", hash = "sha256:18fe320f354d6f9ad3147859b6e16649a0781425268c4dde596093177660e71a"}, + {file = "typed_ast-1.5.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:31d8c6b2df19a777bc8826770b872a45a1f30cfefcfd729491baa5237faae837"}, + {file = "typed_ast-1.5.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:963a0ccc9a4188524e6e6d39b12c9ca24cc2d45a71cfdd04a26d883c922b4b78"}, + {file = "typed_ast-1.5.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0eb77764ea470f14fcbb89d51bc6bbf5e7623446ac4ed06cbd9ca9495b62e36e"}, + {file = "typed_ast-1.5.2-cp36-cp36m-win_amd64.whl", hash = "sha256:294a6903a4d087db805a7656989f613371915fc45c8cc0ddc5c5a0a8ad9bea4d"}, + {file = "typed_ast-1.5.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:26a432dc219c6b6f38be20a958cbe1abffcc5492821d7e27f08606ef99e0dffd"}, + {file = "typed_ast-1.5.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7407cfcad702f0b6c0e0f3e7ab876cd1d2c13b14ce770e412c0c4b9728a0f88"}, + {file = "typed_ast-1.5.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f30ddd110634c2d7534b2d4e0e22967e88366b0d356b24de87419cc4410c41b7"}, + {file = "typed_ast-1.5.2-cp37-cp37m-win_amd64.whl", hash = "sha256:8c08d6625bb258179b6e512f55ad20f9dfef019bbfbe3095247401e053a3ea30"}, + {file = "typed_ast-1.5.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:90904d889ab8e81a956f2c0935a523cc4e077c7847a836abee832f868d5c26a4"}, + {file = "typed_ast-1.5.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bbebc31bf11762b63bf61aaae232becb41c5bf6b3461b80a4df7e791fabb3aca"}, + {file = "typed_ast-1.5.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c29dd9a3a9d259c9fa19d19738d021632d673f6ed9b35a739f48e5f807f264fb"}, + {file = "typed_ast-1.5.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:58ae097a325e9bb7a684572d20eb3e1809802c5c9ec7108e85da1eb6c1a3331b"}, + {file = "typed_ast-1.5.2-cp38-cp38-win_amd64.whl", hash = "sha256:da0a98d458010bf4fe535f2d1e367a2e2060e105978873c04c04212fb20543f7"}, + {file = "typed_ast-1.5.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:33b4a19ddc9fc551ebabca9765d54d04600c4a50eda13893dadf67ed81d9a098"}, + {file = "typed_ast-1.5.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1098df9a0592dd4c8c0ccfc2e98931278a6c6c53cb3a3e2cf7e9ee3b06153344"}, + {file = "typed_ast-1.5.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42c47c3b43fe3a39ddf8de1d40dbbfca60ac8530a36c9b198ea5b9efac75c09e"}, + {file = "typed_ast-1.5.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f290617f74a610849bd8f5514e34ae3d09eafd521dceaa6cf68b3f4414266d4e"}, + {file = "typed_ast-1.5.2-cp39-cp39-win_amd64.whl", hash = "sha256:df05aa5b241e2e8045f5f4367a9f6187b09c4cdf8578bb219861c4e27c443db5"}, + {file = "typed_ast-1.5.2.tar.gz", hash = "sha256:525a2d4088e70a9f75b08b3f87a51acc9cde640e19cc523c7e41aa355564ae27"}, +] +typer = [ + {file = "typer-0.4.1-py3-none-any.whl", hash = "sha256:e8467f0ebac0c81366c2168d6ad9f888efdfb6d4e1d3d5b4a004f46fa444b5c3"}, + {file = "typer-0.4.1.tar.gz", hash = "sha256:5646aef0d936b2c761a10393f0384ee6b5c7fe0bb3e5cd710b17134ca1d99cff"}, +] +typing-extensions = [ + {file = "typing_extensions-4.1.1-py3-none-any.whl", hash = "sha256:21c85e0fe4b9a155d0799430b0ad741cdce7e359660ccbd8b530613e8df88ce2"}, + {file = "typing_extensions-4.1.1.tar.gz", hash = "sha256:1a9462dcc3347a79b1f1c0271fbe79e844580bb598bafa1ed208b94da3cdcd42"}, +] +unasync = [ + {file = "unasync-0.5.0-py3-none-any.whl", hash = "sha256:8d4536dae85e87b8751dfcc776f7656fd0baf54bb022a7889440dc1b9dc3becb"}, + {file = "unasync-0.5.0.tar.gz", hash = "sha256:b675d87cf56da68bd065d3b7a67ac71df85591978d84c53083c20d79a7e5096d"}, +] +unasync-cli = [ + {file = "unasync-cli-0.0.9.tar.gz", hash = "sha256:ca9d8c57ebb68911f8f8f68f243c7f6d0bb246ee3fd14743bc51c8317e276554"}, + {file = "unasync_cli-0.0.9-py3-none-any.whl", hash = "sha256:f96c42fb2862efa555ce6d6415a5983ceb162aa0e45be701656d20a955c7c540"}, +] +virtualenv = [ + {file = "virtualenv-20.14.0-py2.py3-none-any.whl", hash = "sha256:1e8588f35e8b42c6ec6841a13c5e88239de1e6e4e4cedfd3916b306dc826ec66"}, + {file = "virtualenv-20.14.0.tar.gz", hash = "sha256:8e5b402037287126e81ccde9432b95a8be5b19d36584f64957060a3488c11ca8"}, +] +wcwidth = [ + {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, + {file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"}, +] +zipp = [ + {file = "zipp-3.7.0-py3-none-any.whl", hash = "sha256:b47250dd24f92b7dd6a0a8fc5244da14608f3ca90a5efcd37a3b1642fac9a375"}, + {file = "zipp-3.7.0.tar.gz", hash = "sha256:9f50f446828eb9d45b267433fd3e9da8d801f614129124863f9c51ebceafb87d"}, +] diff --git a/pyproject.toml b/pyproject.toml index dee87152..1e2f0de6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "storage3" -version = "0.1.1" +version = "0.2.0" description = "Supabase Storage client for Python." authors = ["Joel Lee ", "Leon Fedden ", "Daniel Reinón García ", "Leynier Gutiérrez González "] homepage = "https://github.com/supabase-community/storage-py" @@ -20,18 +20,20 @@ httpx = ">=0.19,<0.22" [tool.poetry.dev-dependencies] pre-commit = "^2.16.0" -black = "^21.11b1" +black = "^22.3.0" pytest = "^6.2.5" flake8 = "^4.0.1" isort = "^5.9.3" pytest-cov = "^3.0.0" commitizen = "^2.20.3" +unasync-cli = "^0.0.9" +python-dotenv = "^0.20.0" [tool.commitizen] name = "cz_conventional_commits" version = "0.1.0" version_files = [ - "storage3/__init__.py", + "storage3/utils.py", "pyproject.toml:version" ] tag_format = "v$version" diff --git a/storage3/__init__.py b/storage3/__init__.py index c55a6d54..a1756de4 100644 --- a/storage3/__init__.py +++ b/storage3/__init__.py @@ -1,4 +1,32 @@ -__version__ = "0.1.1" -from storage3 import lib, storage_client +from __future__ import annotations -__all__ = ["storage_client", "lib"] +from typing import Literal, Union, overload + +from storage3._async import AsyncStorageClient +from storage3._sync import SyncStorageClient +from storage3.utils import __version__ + +__all__ = ["create_client", "__version__"] + + +@overload +def create_client( + url: str, headers: dict[str, str], *, is_async: Literal[True] +) -> AsyncStorageClient: + ... + + +@overload +def create_client( + url: str, headers: dict[str, str], *, is_async: Literal[False] +) -> SyncStorageClient: + ... + + +def create_client( + url: str, headers: dict[str, str], *, is_async: bool +) -> Union[AsyncStorageClient, SyncStorageClient]: + if is_async: + return AsyncStorageClient(url, headers) + else: + return SyncStorageClient(url, headers) diff --git a/storage3/_async/__init__.py b/storage3/_async/__init__.py new file mode 100644 index 00000000..694f552f --- /dev/null +++ b/storage3/_async/__init__.py @@ -0,0 +1 @@ +from .client import AsyncStorageClient as AsyncStorageClient diff --git a/storage3/_async/bucket.py b/storage3/_async/bucket.py new file mode 100644 index 00000000..3e79917e --- /dev/null +++ b/storage3/_async/bucket.py @@ -0,0 +1,105 @@ +from __future__ import annotations + +from typing import Any, Optional + +from httpx import HTTPError, Response + +from ..types import RequestMethod +from ..utils import AsyncClient, StorageException +from .file_api import AsyncBucket + +__all__ = ["AsyncStorageBucketAPI"] + + +class AsyncStorageBucketAPI: + """This class abstracts access to the endpoint to the Get, List, Empty, and Delete operations on a bucket""" + + def __init__(self, url: str, headers: dict[str, str], session: AsyncClient) -> None: + self.url = url + self.headers = headers + self._client = session + + async def _request( + self, + method: RequestMethod, + url: str, + json: Optional[dict[Any, Any]] = None, + ) -> Response: + response = await self._client.request( + method, url, headers=self.headers, json=json + ) + try: + response.raise_for_status() + except HTTPError: + raise StorageException(response.json()) + + return response + + async def list_buckets(self) -> list[AsyncBucket]: + """Retrieves the details of all storage buckets within an existing product.""" + # if the request doesn't error, it is assured to return a list + res = await self._request("GET", f"{self.url}/bucket") + return [ + AsyncBucket( + **bucket, _url=self.url, _headers=self.headers, _client=self._client + ) + for bucket in res.json() + ] + + async def get_bucket(self, id: str) -> AsyncBucket: + """Retrieves the details of an existing storage bucket. + + Parameters + ---------- + id + The unique identifier of the bucket you would like to retrieve. + """ + res = await self._request("GET", f"{self.url}/bucket/{id}") + json = res.json() + return AsyncBucket( + **json, _url=self.url, _headers=self.headers, _client=self._client + ) + + async def create_bucket( + self, id: str, name: Optional[str] = None, public: bool = False + ) -> dict[str, str]: + """Creates a new storage bucket. + + Parameters + ---------- + id + A unique identifier for the bucket you are creating. + name + A name for the bucket you are creating. If not passed, the id is used as the name as well. + public + Whether the bucket you are creating should be publicly accessible. Defaults to False. + """ + res = await self._request( + "POST", + f"{self.url}/bucket", + json={"id": id, "name": name or id, "public": public}, + ) + return res.json() + + async def empty_bucket(self, id: str) -> dict[str, str]: + """Removes all objects inside a single bucket. + + Parameters + ---------- + id + The unique identifier of the bucket you would like to empty. + """ + res = await self._request("POST", f"{self.url}/bucket/{id}/empty", json={}) + return res.json() + + async def delete_bucket(self, id: str) -> dict[str, str]: + """Deletes an existing bucket. Note that you cannot delete buckets with existing objects inside. You must first + `empty()` the bucket. + + Parameters + ---------- + id + The unique identifier of the bucket you would like to delete. + """ + res = await self._request("DELETE", f"{self.url}/bucket/{id}", json={}) + return res.json() diff --git a/storage3/_async/client.py b/storage3/_async/client.py new file mode 100644 index 00000000..b49328aa --- /dev/null +++ b/storage3/_async/client.py @@ -0,0 +1,28 @@ +from ..utils import AsyncClient, __version__ +from .bucket import AsyncStorageBucketAPI +from .file_api import AsyncBucketProxy + +__all__ = [ + "AsyncStorageClient", +] + + +class AsyncStorageClient(AsyncStorageBucketAPI): + """Manage storage buckets and files.""" + + def __init__(self, url: str, headers: dict[str, str]) -> None: + super().__init__( + url, + {"User-Agent": f"supabase-py/storage3 v{__version__}", **headers}, + AsyncClient(), + ) + + def from_(self, id: str) -> AsyncBucketProxy: + """Run a storage file operation. + + Parameters + ---------- + id + The unique identifier of the bucket + """ + return AsyncBucketProxy(id, self.url, self.headers, self._client) diff --git a/storage3/_async/file_api.py b/storage3/_async/file_api.py new file mode 100644 index 00000000..8bb6b5cb --- /dev/null +++ b/storage3/_async/file_api.py @@ -0,0 +1,201 @@ +from dataclasses import dataclass, field +from pathlib import Path +from typing import Any, Optional, Union + +from httpx import HTTPError, Response + +from ..constants import DEFAULT_FILE_OPTIONS, DEFAULT_SEARCH_OPTIONS +from ..types import BaseBucket, ListBucketFilesOptions, RequestMethod +from ..utils import AsyncClient, StorageException + +__all__ = ["AsyncBucket"] + + +class AsyncBucketActionsMixin: + """Functions needed to access the file API.""" + + id: str + _url: str + _headers: dict[str, str] + _client: AsyncClient + + async def _request( + self, + method: RequestMethod, + url: str, + headers: Optional[dict[str, Any]] = None, + json: Optional[dict[Any, Any]] = None, + files: Optional[Any] = None, + ) -> Response: + headers = headers or {} + response = await self._client.request( + method, url, headers={**self._headers, **headers}, json=json, files=files + ) + try: + response.raise_for_status() + except HTTPError: + raise StorageException(response.json()) + + return response + + async def create_signed_url(self, path: str, expires_in: int) -> dict[str, str]: + """ + Parameters + ---------- + path + file path to be downloaded, including the current file name. + expires_in + number of seconds until the signed URL expires. + """ + path = self._get_final_path(path) + response = await self._request( + "POST", + f"{self._url}/object/sign/{path}", + json={"expiresIn": str(expires_in)}, + ) + data = response.json() + data["signedURL"] = f"{self._url}{data['signedURL']}" + return data + + def get_public_url(self, path: str) -> str: + """ + Parameters + ---------- + path + file path, including the path and file name. For example `folder/image.png`. + """ + _path = self._get_final_path(path) + public_url = f"{self._url}/object/public/{_path}" + return public_url + + async def move(self, from_path: str, to_path: str) -> dict[str, str]: + """ + Moves an existing file, optionally renaming it at the same time. + + Parameters + ---------- + from_path + The original file path, including the current file name. For example `folder/image.png`. + to_path + The new file path, including the new file name. For example `folder/image-copy.png`. + """ + res = await self._request( + "POST", + f"{self._url}/object/move", + json={ + "bucketId": self.id, + "sourceKey": from_path, + "destinationKey": to_path, + }, + ) + return res.json() + + async def remove(self, paths: list) -> dict[str, str]: + """ + Deletes files within the same bucket + + Parameters + ---------- + paths + An array or list of files to be deletes, including the path and file name. For example [`folder/image.png`]. + """ + response = await self._request( + "DELETE", + f"{self._url}/object/{self.id}", + json={"prefixes": paths}, + ) + return response.json() + + async def list( + self, + path: Optional[str] = None, + options: Optional[ListBucketFilesOptions] = None, + ) -> list[dict[str, str]]: + """ + Lists all the files within a bucket. + + Parameters + ---------- + path + The folder path. + options + Search options, including `limit`, `offset`, and `sortBy`. + """ + extra_options = options or {} + body = dict(DEFAULT_SEARCH_OPTIONS, **extra_options) + extra_headers = {"Content-Type": "application/json"} + body["prefix"] = path if path else "" + response = await self._request( + "POST", + f"{self._url}/object/list/{self.id}", + json=body, + headers=extra_headers, + ) + return response.json() + + async def download(self, path: str) -> bytes: + """ + Downloads a file. + + Parameters + ---------- + path + The file path to be downloaded, including the path and file name. For example `folder/image.png`. + """ + _path = self._get_final_path(path) + response = await self._request( + "GET", f"{self._url}/object/{_path}", headers=self._headers + ) + return response.content + + async def upload( + self, path: str, file: Union[str, Path], file_options: Optional[dict] = None + ) -> Response: + """ + Uploads a file to an existing bucket. + + Parameters + ---------- + path + The relative file path including the bucket ID. Should be of the format `bucket/folder/subfolder/filename.png`. The bucket must already exist before attempting to upload. + file + The File object to be stored in the bucket. or a async generator of chunks + file_options + HTTP headers. For example `cacheControl` + """ + if file_options is None: + file_options = {} + headers = dict(self._headers, **DEFAULT_FILE_OPTIONS, **file_options) + filename = path.rsplit("/", maxsplit=1)[-1] + files = {"file": (filename, open(file, "rb"), headers["contentType"])} + _path = self._get_final_path(path) + + return await self._request( + "POST", + f"{self._url}/object/{_path}", + files=files, + headers=headers, + ) + + def _get_final_path(self, path: str) -> str: + return f"{self.id}/{path}" + + +# this class is returned by methods that fetch buckets, for example StorageBucketAPI.get_bucket +# adding this mixin on the BaseBucket means that those bucket objects can also be used to +# run methods like `upload` and `download` +@dataclass(repr=False) +class AsyncBucket(BaseBucket, AsyncBucketActionsMixin): + _url: str = field(repr=False) + _headers: dict[str, str] = field(repr=False) + _client: AsyncClient = field(repr=False) + + +@dataclass +class AsyncBucketProxy(AsyncBucketActionsMixin): + # contains the minimum required fields needed to query the file API endpoints + # this object is returned by the `StorageClient.from_`` method + id: str + _url: str + _headers: dict[str, str] + _client: AsyncClient diff --git a/storage3/_sync/__init__.py b/storage3/_sync/__init__.py new file mode 100644 index 00000000..9eedb131 --- /dev/null +++ b/storage3/_sync/__init__.py @@ -0,0 +1 @@ +from .client import SyncStorageClient as SyncStorageClient diff --git a/storage3/_sync/bucket.py b/storage3/_sync/bucket.py new file mode 100644 index 00000000..02ea7b38 --- /dev/null +++ b/storage3/_sync/bucket.py @@ -0,0 +1,103 @@ +from __future__ import annotations + +from typing import Any, Optional + +from httpx import HTTPError, Response + +from ..types import RequestMethod +from ..utils import StorageException, SyncClient +from .file_api import SyncBucket + +__all__ = ["SyncStorageBucketAPI"] + + +class SyncStorageBucketAPI: + """This class abstracts access to the endpoint to the Get, List, Empty, and Delete operations on a bucket""" + + def __init__(self, url: str, headers: dict[str, str], session: SyncClient) -> None: + self.url = url + self.headers = headers + self._client = session + + def _request( + self, + method: RequestMethod, + url: str, + json: Optional[dict[Any, Any]] = None, + ) -> Response: + response = self._client.request(method, url, headers=self.headers, json=json) + try: + response.raise_for_status() + except HTTPError: + raise StorageException(response.json()) + + return response + + def list_buckets(self) -> list[SyncBucket]: + """Retrieves the details of all storage buckets within an existing product.""" + # if the request doesn't error, it is assured to return a list + res = self._request("GET", f"{self.url}/bucket") + return [ + SyncBucket( + **bucket, _url=self.url, _headers=self.headers, _client=self._client + ) + for bucket in res.json() + ] + + def get_bucket(self, id: str) -> SyncBucket: + """Retrieves the details of an existing storage bucket. + + Parameters + ---------- + id + The unique identifier of the bucket you would like to retrieve. + """ + res = self._request("GET", f"{self.url}/bucket/{id}") + json = res.json() + return SyncBucket( + **json, _url=self.url, _headers=self.headers, _client=self._client + ) + + def create_bucket( + self, id: str, name: Optional[str] = None, public: bool = False + ) -> dict[str, str]: + """Creates a new storage bucket. + + Parameters + ---------- + id + A unique identifier for the bucket you are creating. + name + A name for the bucket you are creating. If not passed, the id is used as the name as well. + public + Whether the bucket you are creating should be publicly accessible. Defaults to False. + """ + res = self._request( + "POST", + f"{self.url}/bucket", + json={"id": id, "name": name or id, "public": public}, + ) + return res.json() + + def empty_bucket(self, id: str) -> dict[str, str]: + """Removes all objects inside a single bucket. + + Parameters + ---------- + id + The unique identifier of the bucket you would like to empty. + """ + res = self._request("POST", f"{self.url}/bucket/{id}/empty", json={}) + return res.json() + + def delete_bucket(self, id: str) -> dict[str, str]: + """Deletes an existing bucket. Note that you cannot delete buckets with existing objects inside. You must first + `empty()` the bucket. + + Parameters + ---------- + id + The unique identifier of the bucket you would like to delete. + """ + res = self._request("DELETE", f"{self.url}/bucket/{id}", json={}) + return res.json() diff --git a/storage3/_sync/client.py b/storage3/_sync/client.py new file mode 100644 index 00000000..bc89b983 --- /dev/null +++ b/storage3/_sync/client.py @@ -0,0 +1,28 @@ +from ..utils import SyncClient, __version__ +from .bucket import SyncStorageBucketAPI +from .file_api import SyncBucketProxy + +__all__ = [ + "SyncStorageClient", +] + + +class SyncStorageClient(SyncStorageBucketAPI): + """Manage storage buckets and files.""" + + def __init__(self, url: str, headers: dict[str, str]) -> None: + super().__init__( + url, + {"User-Agent": f"supabase-py/storage3 v{__version__}", **headers}, + SyncClient(), + ) + + def from_(self, id: str) -> SyncBucketProxy: + """Run a storage file operation. + + Parameters + ---------- + id + The unique identifier of the bucket + """ + return SyncBucketProxy(id, self.url, self.headers, self._client) diff --git a/storage3/_sync/file_api.py b/storage3/_sync/file_api.py new file mode 100644 index 00000000..23548849 --- /dev/null +++ b/storage3/_sync/file_api.py @@ -0,0 +1,201 @@ +from dataclasses import dataclass, field +from pathlib import Path +from typing import Any, Optional, Union + +from httpx import HTTPError, Response + +from ..constants import DEFAULT_FILE_OPTIONS, DEFAULT_SEARCH_OPTIONS +from ..types import BaseBucket, ListBucketFilesOptions, RequestMethod +from ..utils import StorageException, SyncClient + +__all__ = ["SyncBucket"] + + +class SyncBucketActionsMixin: + """Functions needed to access the file API.""" + + id: str + _url: str + _headers: dict[str, str] + _client: SyncClient + + def _request( + self, + method: RequestMethod, + url: str, + headers: Optional[dict[str, Any]] = None, + json: Optional[dict[Any, Any]] = None, + files: Optional[Any] = None, + ) -> Response: + headers = headers or {} + response = self._client.request( + method, url, headers={**self._headers, **headers}, json=json, files=files + ) + try: + response.raise_for_status() + except HTTPError: + raise StorageException(response.json()) + + return response + + def create_signed_url(self, path: str, expires_in: int) -> dict[str, str]: + """ + Parameters + ---------- + path + file path to be downloaded, including the current file name. + expires_in + number of seconds until the signed URL expires. + """ + path = self._get_final_path(path) + response = self._request( + "POST", + f"{self._url}/object/sign/{path}", + json={"expiresIn": str(expires_in)}, + ) + data = response.json() + data["signedURL"] = f"{self._url}{data['signedURL']}" + return data + + def get_public_url(self, path: str) -> str: + """ + Parameters + ---------- + path + file path, including the path and file name. For example `folder/image.png`. + """ + _path = self._get_final_path(path) + public_url = f"{self._url}/object/public/{_path}" + return public_url + + def move(self, from_path: str, to_path: str) -> dict[str, str]: + """ + Moves an existing file, optionally renaming it at the same time. + + Parameters + ---------- + from_path + The original file path, including the current file name. For example `folder/image.png`. + to_path + The new file path, including the new file name. For example `folder/image-copy.png`. + """ + res = self._request( + "POST", + f"{self._url}/object/move", + json={ + "bucketId": self.id, + "sourceKey": from_path, + "destinationKey": to_path, + }, + ) + return res.json() + + def remove(self, paths: list) -> dict[str, str]: + """ + Deletes files within the same bucket + + Parameters + ---------- + paths + An array or list of files to be deletes, including the path and file name. For example [`folder/image.png`]. + """ + response = self._request( + "DELETE", + f"{self._url}/object/{self.id}", + json={"prefixes": paths}, + ) + return response.json() + + def list( + self, + path: Optional[str] = None, + options: Optional[ListBucketFilesOptions] = None, + ) -> list[dict[str, str]]: + """ + Lists all the files within a bucket. + + Parameters + ---------- + path + The folder path. + options + Search options, including `limit`, `offset`, and `sortBy`. + """ + extra_options = options or {} + body = dict(DEFAULT_SEARCH_OPTIONS, **extra_options) + extra_headers = {"Content-Type": "application/json"} + body["prefix"] = path if path else "" + response = self._request( + "POST", + f"{self._url}/object/list/{self.id}", + json=body, + headers=extra_headers, + ) + return response.json() + + def download(self, path: str) -> bytes: + """ + Downloads a file. + + Parameters + ---------- + path + The file path to be downloaded, including the path and file name. For example `folder/image.png`. + """ + _path = self._get_final_path(path) + response = self._request( + "GET", f"{self._url}/object/{_path}", headers=self._headers + ) + return response.content + + def upload( + self, path: str, file: Union[str, Path], file_options: Optional[dict] = None + ) -> Response: + """ + Uploads a file to an existing bucket. + + Parameters + ---------- + path + The relative file path including the bucket ID. Should be of the format `bucket/folder/subfolder/filename.png`. The bucket must already exist before attempting to upload. + file + The File object to be stored in the bucket. or a async generator of chunks + file_options + HTTP headers. For example `cacheControl` + """ + if file_options is None: + file_options = {} + headers = dict(self._headers, **DEFAULT_FILE_OPTIONS, **file_options) + filename = path.rsplit("/", maxsplit=1)[-1] + files = {"file": (filename, open(file, "rb"), headers["contentType"])} + _path = self._get_final_path(path) + + return self._request( + "POST", + f"{self._url}/object/{_path}", + files=files, + headers=headers, + ) + + def _get_final_path(self, path: str) -> str: + return f"{self.id}/{path}" + + +# this class is returned by methods that fetch buckets, for example StorageBucketAPI.get_bucket +# adding this mixin on the BaseBucket means that those bucket objects can also be used to +# run methods like `upload` and `download` +@dataclass(repr=False) +class SyncBucket(BaseBucket, SyncBucketActionsMixin): + _url: str = field(repr=False) + _headers: dict[str, str] = field(repr=False) + _client: SyncClient = field(repr=False) + + +@dataclass +class SyncBucketProxy(SyncBucketActionsMixin): + # contains the minimum required fields needed to query the file API endpoints + # this object is returned by the `StorageClient.from_`` method + id: str + _url: str + _headers: dict[str, str] + _client: SyncClient diff --git a/storage3/constants.py b/storage3/constants.py new file mode 100644 index 00000000..25cd37c6 --- /dev/null +++ b/storage3/constants.py @@ -0,0 +1,13 @@ +DEFAULT_SEARCH_OPTIONS = { + "limit": 100, + "offset": 0, + "sortBy": { + "column": "name", + "order": "asc", + }, +} +DEFAULT_FILE_OPTIONS = { + "cacheControl": "3600", + "contentType": "text/plain;charset=UTF-8", + "x-upsert": "false", +} diff --git a/storage3/lib/__init__.py b/storage3/lib/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/storage3/lib/storage_bucket_api.py b/storage3/lib/storage_bucket_api.py deleted file mode 100644 index ae6382c4..00000000 --- a/storage3/lib/storage_bucket_api.py +++ /dev/null @@ -1,182 +0,0 @@ -from __future__ import annotations - -from collections.abc import Awaitable -from dataclasses import dataclass -from datetime import datetime -from typing import Any, Dict, List, Optional, Type, Union - -from httpx import AsyncClient, Client, HTTPError - -__all__ = ["Bucket", "StorageBucketAPI"] - -_RequestMethod = str - - -class StorageException(Exception): - """Error raised when an operation on the storage API fails.""" - - -@dataclass -class Bucket: - id: str - name: str - owner: str - public: bool - created_at: datetime - updated_at: datetime - - def __post_init__(self) -> None: - # created_at and updated_at are returned by the API as ISO timestamps - # so we convert them to datetime objects - self.created_at = datetime.fromisoformat(self.created_at) # type: ignore - self.updated_at = datetime.fromisoformat(self.updated_at) # type: ignore - - -ResponseType = Union[ - Dict[ - str, str - ], # response from an endpoint without a custom response_class, example: create_bucket - List[ - Bucket - ], # response from an endpoint which returns a list of objects, example: list_buckets - Bucket, # response from an endpoint which returns a single object, example: get_bucket - None, -] - - -class StorageBucketAPI: - """This class abstracts access to the endpoint to the Get, List, Empty, and Delete operations on a bucket""" - - def __init__( - self, url: str, headers: dict[str, str], is_async: bool = False - ) -> None: - self.url = url - self.headers = headers - - self._is_async = is_async - - if is_async: - self._client = AsyncClient(headers=self.headers) - else: - self._client = Client(headers=self.headers) - - def _request( - self, - method: _RequestMethod, - url: str, - json: Optional[dict[Any, Any]] = None, - response_class: Optional[Type] = None, - ) -> Any: - if self._is_async: - return self._async_request(method, url, json, response_class) - else: - return self._sync_request(method, url, json, response_class) - - def _sync_request( - self, - method: _RequestMethod, - url: str, - json: Optional[dict[Any, Any]] = None, - response_class: Optional[Type] = None, - ) -> ResponseType: - if isinstance(self._client, AsyncClient): # only to appease the type checker - return None - - response = self._client.request(method, url, json=json) - try: - response.raise_for_status() - except HTTPError: - raise StorageException(response.json()) - - response_data = response.json() - - if not response_class: - return response_data - - if isinstance(response_data, list): - return [response_class(**item) for item in response_data] - else: - return response_class(**response_data) - - async def _async_request( - self, - method: _RequestMethod, - url: str, - json: Optional[dict[Any, Any]] = None, - response_class: Optional[Type] = None, - ) -> ResponseType: - if isinstance(self._client, Client): # only to appease the type checker - return - - response = await self._client.request(method, url, json=json) - try: - response.raise_for_status() - except HTTPError: - raise StorageException(response.json()) - - response_data = response.json() - - if not response_class: - return response_data - - if isinstance(response_data, list): - return [response_class(**item) for item in response_data] - else: - return response_class(**response_data) - - def list_buckets(self) -> Union[list[Bucket], Awaitable[list[Bucket]], None]: - """Retrieves the details of all storage buckets within an existing product.""" - return self._request("GET", f"{self.url}/bucket", response_class=Bucket) - - def get_bucket(self, id: str) -> Union[Bucket, Awaitable[Bucket], None]: - """Retrieves the details of an existing storage bucket. - - Parameters - ---------- - id - The unique identifier of the bucket you would like to retrieve. - """ - return self._request("GET", f"{self.url}/bucket/{id}", response_class=Bucket) - - def create_bucket( - self, id: str, name: str = None, public: bool = False - ) -> Union[dict[str, str], Awaitable[dict[str, str]]]: - """Creates a new storage bucket. - - Parameters - ---------- - id - A unique identifier for the bucket you are creating. - name - A name for the bucket you are creating. If not passed, the id is used as the name as well. - public - Whether the bucket you are creating should be publicly accessible. Defaults to False. - """ - return self._request( - "POST", - f"{self.url}/bucket", - json={"id": id, "name": name or id, "public": public}, - ) - - def empty_bucket(self, id: str) -> Union[dict[str, str], Awaitable[dict[str, str]]]: - """Removes all objects inside a single bucket. - - Parameters - ---------- - id - The unique identifier of the bucket you would like to empty. - """ - return self._request("POST", f"{self.url}/bucket/{id}/empty", json={}) - - def delete_bucket( - self, id: str - ) -> Union[dict[str, str], Awaitable[dict[str, str]]]: - """Deletes an existing bucket. Note that you cannot delete buckets with existing objects inside. You must first - `empty()` the bucket. - - Parameters - ---------- - id - The unique identifier of the bucket you would like to delete. - """ - return self._request("DELETE", f"{self.url}/bucket/{id}", json={}) diff --git a/storage3/lib/storage_file_api.py b/storage3/lib/storage_file_api.py deleted file mode 100644 index 9c5d7c39..00000000 --- a/storage3/lib/storage_file_api.py +++ /dev/null @@ -1,208 +0,0 @@ -from typing import Any - -import httpx -from httpx import HTTPError - - -class StorageFileAPI: - DEFAULT_SEARCH_OPTIONS = { - "limit": 100, - "offset": 0, - "sortBy": { - "column": "name", - "order": "asc", - }, - } - DEFAULT_FILE_OPTIONS = { - "cacheControl": "3600", - "contentType": "text/plain;charset=UTF-8", - "x-upsert": "false", - } - - def __init__(self, url: str, headers: dict, bucket_id: str): - """ - Parameters - ---------- - url - base url for all the operation - headers - the base authentication headers - bucket_id - the id of the bucket that we want to access, you can get the list of buckets with the SupabaseStorageClient.list_buckets() - """ - self.url = url - self.headers = headers - self.bucket_id = bucket_id - # self.loop = asyncio.get_event_loop() - # self.replace = replace - - def create_signed_url(self, path: str, expires_in: int): - """ - Parameters - ---------- - path - file path to be downloaded, including the current file name. - expires_in - number of seconds until the signed URL expires. - """ - try: - _path = self._get_final_path(path) - response = httpx.post( - f"{self.url}/object/sign/{_path}", - json={"expiresIn": str(expires_in)}, - headers=self.headers, - ) - data = response.json() - data["signedURL"] = f"{self.url}{data['signedURL']}" - response.raise_for_status() - except HTTPError as http_err: - print(f"HTTP error occurred: {http_err}") # Python 3.6 - except Exception as err: - print(f"Other error occurred: {err}") # Python 3.6 - else: - return data - - def get_public_url(self, path: str): - """ - Parameters - ---------- - path - file path to be downloaded, including the path and file name. For example `folder/image.png`. - """ - try: - _path = self._get_final_path(path) - public_url = f"{self.url}/object/public/{_path}" - return public_url - except: - print("Public URL not found") - - def move(self, from_path: str, to_path: str): - """ - Moves an existing file, optionally renaming it at the same time. - Parameters - ---------- - from_path - The original file path, including the current file name. For example `folder/image.png`. - to_path - The new file path, including the new file name. For example `folder/image-copy.png`. - """ - try: - response = httpx.post( - f"{self.url}/object/move", - json={ - "bucketId": self.bucket_id, - "sourceKey": from_path, - "destinationKey": to_path, - }, - headers=self.headers, - ) - response.raise_for_status() - except HTTPError as http_err: - print(f"HTTP error occurred: {http_err}") # Python 3.6 - except Exception as err: - print(f"Other error occurred: {err}") # Python 3.6 - else: - return response.json() - - def remove(self, paths: list): - """ - Deletes files within the same bucket - Parameters - ---------- - paths - An array or list of files to be deletes, including the path and file name. For example [`folder/image.png`]. - """ - try: - response = httpx.request( - "DELETE", - f"{self.url}/object/{self.bucket_id}", - json={"prefixes": paths}, - headers=self.headers, - ) - response.raise_for_status() - except HTTPError as http_err: - print(f"HTTP error occurred: {http_err}") # Python 3.6 - except Exception as err: - raise err # Python 3.6 - else: - return response.json() - - def list(self, path: str = None, options: dict = {}): - """ - Lists all the files within a bucket. - Parameters - ---------- - path - The folder path. - options - Search options, including `limit`, `offset`, and `sortBy`. - """ - try: - body = dict(self.DEFAULT_SEARCH_OPTIONS, **options) - headers = dict(self.headers, **{"Content-Type": "application/json"}) - body["prefix"] = path if path else "" - getdata = httpx.post( - f"{self.url}/object/list/{self.bucket_id}", - json=body, - headers=headers, - ) - getdata.raise_for_status() - except HTTPError as http_err: - print(f"HTTP error occurred: {http_err}") # Python 3.6 - except Exception as err: - raise err # Python 3.6 - else: - return getdata.json() - - def download(self, path: str): - """ - Downloads a file. - Parameters - ---------- - path The file path to be downloaded, including the path and file name. For example `folder/image.png`. - """ - try: - _path = self._get_final_path(path) - response = httpx.get(f"{self.url}/object/{_path}", headers=self.headers) - - except HTTPError as http_err: - print(f"HTTP error occurred: {http_err}") # Python 3.6 - except Exception as err: - raise err # Python 3.6 - else: - return response.content - - def upload(self, path: str, file: Any, file_options: dict = None): - """ - Uploads a file to an existing bucket. - Parameters - ---------- - path - The relative file path including the bucket ID. Should be of the format `bucket/folder/subfolder/filename.png`. The bucket must already exist before attempting to upload. - file - The File object to be stored in the bucket. or a async generator of chunks - file_options - HTTP headers. For example `cacheControl` - """ - if file_options is None: - file_options = {} - headers = dict(self.headers, **self.DEFAULT_FILE_OPTIONS) - headers.update(file_options) - filename = path.rsplit("/", maxsplit=1)[-1] - files = {"file": (filename, open(file, "rb"), headers["contentType"])} - _path = self._get_final_path(path) - try: - resp = httpx.post( - f"{self.url}/object/{_path}", - files=files, - headers=headers, - ) - except HTTPError as http_err: - print(f"HTTP error occurred: {http_err}") # Python 3.6 - except Exception as err: - raise err # Python 3.6 - else: - return resp - - def _get_final_path(self, path: str): - return f"{self.bucket_id}/{path}" diff --git a/storage3/storage_client.py b/storage3/storage_client.py deleted file mode 100644 index 81f8f4be..00000000 --- a/storage3/storage_client.py +++ /dev/null @@ -1,23 +0,0 @@ -from typing import Dict - -from storage3.lib.storage_bucket_api import StorageBucketAPI -from storage3.lib.storage_file_api import StorageFileAPI - - -class SupabaseStorageClient(StorageBucketAPI): - """ - Manage the storage bucket and files - Examples - -------- - >>> url = storage_file.create_signed_url("something/test2.txt", 80) # signed url - >>> loop.run_until_complete(storage_file.download("something/test2.txt")) # upload or download - >>> loop.run_until_complete(storage_file.upload("something/test2.txt","path_file_upload")) - >>> list_buckets = storage.list_buckets() - >>> list_files = storage_file.list("something") - """ - - def __init__(self, url: str, headers: Dict[str, str]): - super().__init__(url, headers) - - def from_(self, id_: str) -> StorageFileAPI: - return StorageFileAPI(self.url, self.headers, id_) diff --git a/storage3/types.py b/storage3/types.py new file mode 100644 index 00000000..bd0225fa --- /dev/null +++ b/storage3/types.py @@ -0,0 +1,35 @@ +from dataclasses import dataclass +from datetime import datetime +from typing import Literal, TypedDict + +RequestMethod = Literal["GET", "POST", "DELETE", "PUT", "HEAD"] + + +@dataclass +class BaseBucket: + """Represents a file storage bucket.""" + + id: str + name: str + owner: str + public: bool + created_at: datetime + updated_at: datetime + + def __post_init__(self) -> None: + # created_at and updated_at are returned by the API as ISO timestamps + # so we convert them to datetime objects + self.created_at = datetime.fromisoformat(self.created_at) # type: ignore + self.updated_at = datetime.fromisoformat(self.updated_at) # type: ignore + + +# used in bucket.list method's option parameter +class _sortByType(TypedDict): + column: str + order: Literal["asc", "desc"] + + +class ListBucketFilesOptions(TypedDict): + limit: int + offset: int + sortBy: _sortByType diff --git a/storage3/utils.py b/storage3/utils.py new file mode 100644 index 00000000..770cacc5 --- /dev/null +++ b/storage3/utils.py @@ -0,0 +1,13 @@ +from httpx import AsyncClient as AsyncClient # noqa: F401 +from httpx import Client as BaseClient + +__version__ = "0.2.0" + + +class SyncClient(BaseClient): + def aclose(self) -> None: + self.close() + + +class StorageException(Exception): + """Error raised when an operation on the storage API fails."""