diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index a8d8d12..6983485 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -9,39 +9,26 @@ on: jobs: test: runs-on: ubuntu-latest - services: - databend: - # image: datafuselabs/databend-query - image: datafuselabs/databend - env: - QUERY_DEFAULT_USER: databend - QUERY_DEFAULT_PASSWORD: databend - MINIO_ENABLED: true - ports: - - 8000:8000 - - 9000:9000 + steps: - name: Checkout uses: actions/checkout@v2 - - name: Setup Python-3.10 - uses: actions/setup-python@v4 - with: - python-version: '3.10' + - name: Install uv + uses: astral-sh/setup-uv@v4 - - name: Pip Install - run: | - make install + - name: Set up Python + run: uv python install - - name: Verify Service Running - run: | - cid=$(docker ps -a | grep databend | cut -d' ' -f1) - docker logs ${cid} - curl -v http://localhost:8000/v1/health + - name: Install the project + run: uv sync --all-extras --dev + + - name: Start databend-server + run: make up - name: Test env: - TEST_DATABEND_DSN: "http://databend:databend@localhost:8000/default" + TEST_DATABEND_DSN: "http://root:@localhost:8000/default" run: | make lint make ci diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 10477ef..2fce113 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -14,19 +14,14 @@ jobs: - name: Checkout Repository uses: actions/checkout@v3 - - name: Set up Python - uses: actions/setup-python@v3 - with: - python-version: 3.9 + - name: Install uv + uses: astral-sh/setup-uv@v4 - - name: Install Dependencies - run: | - python -m pip install --upgrade pip - pip install setuptools wheel twine + - name: Set up Python + run: uv python install - - name: Determine Version Change - run: | - export VERSION=$(cat databend_py/VERSION) + - name: Install the project + run: uv sync --all-extras --dev - name: Release Package and Tag env: @@ -38,7 +33,7 @@ jobs: git config user.email "hantmac@outlook.com" git tag -a "v$VERSION" -m "Release Version $VERSION" git push origin "v$VERSION" - python setup.py sdist bdist_wheel + uv publish echo "show user name:" echo ${{ secrets.TWINE_USERNAME }} twine upload -u ${{ secrets.TWINE_USERNAME }} -p ${{ secrets.TWINE_PASSWORD }} dist/* diff --git a/.gitignore b/.gitignore index ed922ee..b5f2be8 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,4 @@ +upload.csv .envrc # Byte-compiled / optimized / DLL files __pycache__/ diff --git a/Makefile b/Makefile index bc28945..66c4920 100644 --- a/Makefile +++ b/Makefile @@ -1,12 +1,18 @@ +prepare: + mkdir -p data/databend + +up: prepare + docker compose -f docker-compose.yaml up --quiet-pull -d databend --wait + curl -u root: -XPOST "http://localhost:8000/v1/query" -H 'Content-Type: application/json' -d '{"sql": "select version()", "pagination": { "wait_time_secs": 10}}' + +start: up + test: - python tests/test_client.py + uv run pytest . ci: - python tests/test_client.py + uv run pytest . lint: - pyflakes . + uv run ruff check -install: - pip install -r requirements.txt - pip install -e . diff --git a/databend_py/client.py b/databend_py/client.py index 9a4275c..a8252fd 100644 --- a/databend_py/client.py +++ b/databend_py/client.py @@ -15,14 +15,19 @@ class Client(object): """ def __init__(self, *args, **kwargs): - self.settings = (kwargs.pop('settings', None) or {}).copy() - self.result_config = (kwargs.pop('result_config', None) or {}).copy() + self.settings = (kwargs.pop("settings", None) or {}).copy() + self.result_config = (kwargs.pop("result_config", None) or {}).copy() self.connection = Connection(*args, **kwargs) self.query_result_cls = QueryResult self.helper = Helper - self._debug = asbool(self.settings.get('debug', False)) - self._uploader = DataUploader(self, self.connection, self.settings, debug=self._debug, - compress=self.settings.get('compress', False)) + self._debug = asbool(self.settings.get("debug", False)) + self._uploader = DataUploader( + self, + self.connection, + self.settings, + debug=self._debug, + compress=self.settings.get("compress", False), + ) def __enter__(self): return self @@ -34,9 +39,9 @@ def disconnect_connection(self): self.connection.disconnect() def _data_generator(self, raw_data): - while raw_data['next_uri'] is not None: + while raw_data["next_uri"] is not None: try: - raw_data = self._receive_data(raw_data['next_uri']) + raw_data = self._receive_data(raw_data["next_uri"]) yield raw_data except (Exception, KeyboardInterrupt): self.disconnect() @@ -57,7 +62,8 @@ def _receive_result(self, query, query_id=None, with_column_types=False): helper.check_error() gen = self._data_generator(raw_data) result = self.query_result_cls( - gen, raw_data, with_column_types=with_column_types, **self.result_config) + gen, raw_data, with_column_types=with_column_types, **self.result_config + ) return result.get_result() def _iter_receive_result(self, query, query_id=None, with_column_types=False): @@ -67,14 +73,16 @@ def _iter_receive_result(self, query, query_id=None, with_column_types=False): helper.check_error() gen = self._data_generator(raw_data) result = self.query_result_cls( - gen, raw_data, with_column_types=with_column_types, **self.result_config) + gen, raw_data, with_column_types=with_column_types, **self.result_config + ) _, rows = result.get_result() for row in rows: for r in row: yield r - def execute(self, query, params=None, with_column_types=False, - query_id=None, settings=None): + def execute( + self, query, params=None, with_column_types=False, query_id=None, settings=None + ): """ Executes query. :param query: query that will be send to server. @@ -112,52 +120,63 @@ def execute(self, query, params=None, with_column_types=False, return [], rv column_types, rv = self._process_ordinary_query( - query, params=params, with_column_types=with_column_types, - query_id=query_id) + query, params=params, with_column_types=with_column_types, query_id=query_id + ) return column_types, rv # params = [(1,),(2,)] or params = [(1,2),(2,3)] def _process_insert_query(self, query, params): insert_rows = 0 if "values" in query: - query = query.split("values")[0] + 'values' + query = query.split("values")[0] + "values" elif "VALUES" in query: - query = query.split("VALUES")[0] + 'VALUES' - if len(query.split(' ')) < 3: + query = query.split("VALUES")[0] + "VALUES" + if len(query.split(" ")) < 3: raise Exception("Not standard insert/replace statement") - table_name = query.split(' ')[2] - batch_size = query.count(',') + 1 + table_name = query.split(" ")[2] + batch_size = query.count(",") + 1 if params is not None and len(params) > 0: if isinstance(params[0], tuple): tuple_ls = params else: - tuple_ls = [tuple(params[i:i + batch_size]) for i in range(0, len(params), batch_size)] + tuple_ls = [ + tuple(params[i : i + batch_size]) + for i in range(0, len(params), batch_size) + ] insert_rows = len(tuple_ls) self._uploader.upload_to_table_by_copy(table_name, tuple_ls) return insert_rows - def _process_ordinary_query(self, query, params=None, with_column_types=False, - query_id=None): + def _process_ordinary_query( + self, query, params=None, with_column_types=False, query_id=None + ): if params is not None: - query = self._substitute_params( - query, params, self.connection.context - ) - return self._receive_result(query, query_id=query_id, with_column_types=with_column_types, ) - - def execute_iter(self, query, params=None, with_column_types=False, - query_id=None, settings=None): + query = self._substitute_params(query, params, self.connection.context) + return self._receive_result( + query, + query_id=query_id, + with_column_types=with_column_types, + ) + + def execute_iter( + self, query, params=None, with_column_types=False, query_id=None, settings=None + ): if params is not None: - query = self._substitute_params( - query, params, self.connection.context - ) - return self._iter_receive_result(query, query_id=query_id, with_column_types=with_column_types) - - def _iter_process_ordinary_query(self, query, with_column_types=False, query_id=None): - return self._iter_receive_result(query, query_id=query_id, with_column_types=with_column_types) + query = self._substitute_params(query, params, self.connection.context) + return self._iter_receive_result( + query, query_id=query_id, with_column_types=with_column_types + ) + + def _iter_process_ordinary_query( + self, query, with_column_types=False, query_id=None + ): + return self._iter_receive_result( + query, query_id=query_id, with_column_types=with_column_types + ) def _substitute_params(self, query, params, context): if not isinstance(params, dict): - raise ValueError('Parameters are expected in dict form') + raise ValueError("Parameters are expected in dict form") escaped = escape_params(params, context) return query % escaped @@ -186,59 +205,59 @@ def from_url(cls, url): continue timeouts = { - 'connect_timeout', - 'read_timeout', - 'send_receive_timeout', - 'sync_request_timeout' + "connect_timeout", + "read_timeout", + "send_receive_timeout", + "sync_request_timeout", } value = value[0] - if name == 'client_name': + if name == "client_name": kwargs[name] = value - elif name == 'tenant': + elif name == "tenant": kwargs[name] = value - elif name == 'warehouse': + elif name == "warehouse": kwargs[name] = value - elif name == 'secure': + elif name == "secure": kwargs[name] = asbool(value) - elif name == 'copy_purge': + elif name == "copy_purge": kwargs[name] = asbool(value) settings[name] = asbool(value) - elif name == 'debug': + elif name == "debug": settings[name] = asbool(value) - elif name == 'compress': + elif name == "compress": settings[name] = asbool(value) elif name in timeouts: kwargs[name] = float(value) - elif name == 'persist_cookies': + elif name == "persist_cookies": kwargs[name] = asbool(value) - elif name == 'null_to_none': + elif name == "null_to_none": result_config[name] = asbool(value) else: settings[name] = value # settings={'copy_purge':False} secure = kwargs.get("secure", False) - kwargs['secure'] = secure + kwargs["secure"] = secure host = parsed_url.hostname if parsed_url.port is not None: - kwargs['port'] = parsed_url.port + kwargs["port"] = parsed_url.port - path = parsed_url.path.replace('/', '', 1) + path = parsed_url.path.replace("/", "", 1) if path: - kwargs['database'] = path + kwargs["database"] = path if parsed_url.username is not None: - kwargs['user'] = unquote(parsed_url.username) + kwargs["user"] = unquote(parsed_url.username) if parsed_url.password is not None: - kwargs['password'] = unquote(parsed_url.password) + kwargs["password"] = unquote(parsed_url.password) if settings: - kwargs['settings'] = settings + kwargs["settings"] = settings if result_config: - kwargs['result_config'] = result_config + kwargs["result_config"] = result_config return cls(host, **kwargs) @@ -250,7 +269,9 @@ def insert(self, database_name, table_name, data): data: the data which write into, it's a list of tuple """ # TODO: escape the database & table name - self._uploader.upload_to_table_by_copy("%s.%s" % (database_name, table_name), data) + self._uploader.upload_to_table_by_copy( + "%s.%s" % (database_name, table_name), data + ) def replace(self, database_name, table_name, conflict_keys, data): """ @@ -260,7 +281,9 @@ def replace(self, database_name, table_name, conflict_keys, data): conflict_keys: the key that use to replace into data: the data which write into, it's a list of tuple """ - self._uploader.replace_into_table("%s.%s" % (database_name, table_name), conflict_keys, data) + self._uploader.replace_into_table( + "%s.%s" % (database_name, table_name), conflict_keys, data + ) def upload_to_stage(self, stage_dir, file_name, data): """ diff --git a/databend_py/connection.py b/databend_py/connection.py index 273d033..8d0368a 100644 --- a/databend_py/connection.py +++ b/databend_py/connection.py @@ -11,7 +11,11 @@ from . import log from . import defines from .context import Context -from databend_py.errors import WarehouseTimeoutException, UnexpectedException, ServerException +from databend_py.errors import ( + WarehouseTimeoutException, + UnexpectedException, + ServerException, +) from databend_py.retry import retry from databend_py.sdk_info import sdk_info @@ -22,8 +26,16 @@ class ServerInfo(object): - def __init__(self, name, version_major, version_minor, version_patch, - revision, timezone, display_name): + def __init__( + self, + name, + version_major, + version_minor, + version_patch, + revision, + timezone, + display_name, + ): self.name = name self.version_major = version_major self.version_minor = version_minor @@ -38,29 +50,29 @@ def version_tuple(self): return self.version_major, self.version_minor, self.version_patch def __repr__(self): - version = '%s.%s.%s' % ( - self.version_major, self.version_minor, self.version_patch + version = "%s.%s.%s" % ( + self.version_major, + self.version_minor, + self.version_patch, ) items = [ - ('name', self.name), - ('version', version), - ('revision', self.revision), - ('timezone', self.timezone), - ('display_name', self.display_name) + ("name", self.name), + ("version", version), + ("revision", self.revision), + ("timezone", self.timezone), + ("display_name", self.display_name), ] - params = ', '.join('{}={}'.format(key, value) for key, value in items) - return '' % (params) + params = ", ".join("{}={}".format(key, value) for key, value in items) + return "" % (params) def get_error(response): - if response['error'] is None: + if response["error"] is None: return None # Wrap errno into msg, for result check - return ServerException( - response['error']['message'], - response['error']['code']) + return ServerException(response["error"]["message"], response["error"]["code"]) class Connection(object): @@ -74,11 +86,22 @@ class Connection(object): # 'port': 3307, # 'database': 'default' # } - def __init__(self, host, tenant=None, warehouse=None, port=None, user=defines.DEFAULT_USER, - password=defines.DEFAULT_PASSWORD, - connect_timeout=defines.DEFAULT_CONNECT_TIMEOUT, read_timeout=defines.DEFAULT_READ_TIMEOUT, - database=defines.DEFAULT_DATABASE, secure=False, copy_purge=False, session_settings=None, - persist_cookies=False): + def __init__( + self, + host, + tenant=None, + warehouse=None, + port=None, + user=defines.DEFAULT_USER, + password=defines.DEFAULT_PASSWORD, + connect_timeout=defines.DEFAULT_CONNECT_TIMEOUT, + read_timeout=defines.DEFAULT_READ_TIMEOUT, + database=defines.DEFAULT_DATABASE, + secure=False, + copy_purge=False, + session_settings=None, + persist_cookies=False, + ): self.host = host self.port = port self.tenant = tenant @@ -96,9 +119,9 @@ def __init__(self, host, tenant=None, warehouse=None, port=None, user=defines.DE self.query_option = None self.context = Context() self.requests_session = requests.Session() - self.schema = 'http' + self.schema = "http" if self.secure: - self.schema = 'https' + self.schema = "https" e = environs.Env() if os.getenv("ADDITIONAL_HEADERS") is not None: print(os.getenv("ADDITIONAL_HEADERS")) @@ -110,51 +133,72 @@ def default_session(self): return {"database": self.database} def make_headers(self): - headers = {'Content-Type': 'application/json', 'User-Agent': sdk_info(), 'Accept': 'application/json', - 'X-DATABEND-ROUTE': 'warehouse', XDatabendTenantHeader: self.tenant, - XDatabendWarehouseHeader: self.warehouse} + headers = { + "Content-Type": "application/json", + "User-Agent": sdk_info(), + "Accept": "application/json", + "X-DATABEND-ROUTE": "warehouse", + XDatabendTenantHeader: self.tenant, + XDatabendWarehouseHeader: self.warehouse, + } if "Authorization" not in self.additional_headers: return { - **headers, **self.additional_headers, - "Authorization": - "Basic " + base64.b64encode("{}:{}".format( - self.user, self.password).encode(encoding="utf-8")).decode() + **headers, + **self.additional_headers, + "Authorization": "Basic " + + base64.b64encode( + "{}:{}".format(self.user, self.password).encode(encoding="utf-8") + ).decode(), } else: return {**headers, **self.additional_headers} def get_description(self): - return '{}:{}'.format(self.host, self.port) + return "{}:{}".format(self.host, self.port) def disconnect(self): self.client_session = dict() @retry(times=10, exceptions=WarehouseTimeoutException) def do_query(self, url, query_sql): - response = self.requests_session.post(url, - data=json.dumps(query_sql), - headers=self.make_headers(), - auth=HTTPBasicAuth(self.user, self.password), - timeout=(self.connect_timeout, self.read_timeout), - verify=True) + response = self.requests_session.post( + url, + data=json.dumps(query_sql), + headers=self.make_headers(), + auth=HTTPBasicAuth(self.user, self.password), + timeout=(self.connect_timeout, self.read_timeout), + verify=True, + ) if response.status_code != 200: try: resp_dict = json.loads(response.content) - if resp_dict and resp_dict.get('error') and "no endpoint" in resp_dict.get('error'): + if ( + resp_dict + and resp_dict.get("error") + and "no endpoint" in resp_dict.get("error") + ): raise WarehouseTimeoutException except ValueError: pass - raise UnexpectedException("Unexpected status code %d when post query, content: %s, headers: %s" % - (response.status_code, response.content, response.headers)) + raise UnexpectedException( + "Unexpected status code %d when post query, content: %s, headers: %s" + % (response.status_code, response.content, response.headers) + ) if response.content: try: resp_dict = json.loads(response.content) except ValueError: - raise UnexpectedException("failed to parse response: %s" % response.content) - if resp_dict and resp_dict.get('error') and "no endpoint" in resp_dict.get('error'): + raise UnexpectedException( + "failed to parse response: %s" % response.content + ) + if ( + resp_dict + and resp_dict.get("error") + and "no endpoint" in resp_dict.get("error") + ): raise WarehouseTimeoutException - if resp_dict and resp_dict.get('error'): + if resp_dict and resp_dict.get("error"): raise UnexpectedException("failed to query: %s" % response.content) if self.persist_cookies: self.cookies = response.cookies @@ -165,14 +209,14 @@ def do_query(self, url, query_sql): def query(self, statement): url = self.format_url() log.logger.debug(f"http sql: {statement}") - query_sql = {'sql': statement, "string_fields": True} + query_sql = {"sql": statement, "string_fields": True} if self.client_session is not None and len(self.client_session) != 0: if "database" not in self.client_session: self.client_session = self.default_session() - query_sql['session'] = self.client_session + query_sql["session"] = self.client_session else: self.client_session = self.default_session() - query_sql['session'] = self.client_session + query_sql["session"] = self.client_session # if XDatabendQueryIDHeader in self.additional_headers: # del self.additional_headers[XDatabendQueryIDHeader] self.additional_headers.update({XDatabendQueryIDHeader: str(uuid.uuid4())}) @@ -182,9 +226,12 @@ def query(self, statement): self.client_session = resp_dict.get("session", self.default_session()) if self.additional_headers: self.additional_headers.update( - {XDatabendQueryIDHeader: resp_dict.get(QueryID)}) + {XDatabendQueryIDHeader: resp_dict.get(QueryID)} + ) else: - self.additional_headers = {XDatabendQueryIDHeader: resp_dict.get(QueryID)} + self.additional_headers = { + XDatabendQueryIDHeader: resp_dict.get(QueryID) + } return self.wait_until_has_schema(resp_dict) except Exception as err: log.logger.error( @@ -205,24 +252,30 @@ def reset_session(self): def wait_until_has_schema(self, raw_data_dict): resp_schema = raw_data_dict.get("schema") while resp_schema is not None and len(resp_schema) == 0: - if raw_data_dict['next_uri'] is None: + if raw_data_dict["next_uri"] is None: break - resp = self.next_page(raw_data_dict['next_uri']) + resp = self.next_page(raw_data_dict["next_uri"]) resp_dict = json.loads(resp.content) raw_data_dict = resp_dict resp_schema = raw_data_dict.get("schema") - if resp_schema is not None and (len(resp_schema) != 0 or len(raw_data_dict.get("data")) != 0): + if resp_schema is not None and ( + len(resp_schema) != 0 or len(raw_data_dict.get("data")) != 0 + ): break return raw_data_dict def next_page(self, next_uri): url = "{}://{}:{}{}".format(self.schema, self.host, self.port, next_uri) - response = self.requests_session.get(url=url, headers=self.make_headers(), cookies=self.cookies) + response = self.requests_session.get( + url=url, headers=self.make_headers(), cookies=self.cookies + ) if response.status_code != 200: - raise UnexpectedException("Unexpected status code %d when get %s, content: %s" % - (response.status_code, url, response.content)) + raise UnexpectedException( + "Unexpected status code %d when get %s, content: %s" + % (response.status_code, url, response.content) + ) return response # return a list of response util empty next_uri @@ -236,8 +289,8 @@ def query_with_session(self, statement): session = response.get("session", self.default_session()) if session: self.client_session = session - while response['next_uri'] is not None: - resp = self.next_page(response['next_uri']) + while response["next_uri"] is not None: + resp = self.next_page(response["next_uri"]) response = json.loads(resp.content) log.logger.debug(f"Sql in progress, fetch next_uri content: {response}") self.check_error(response) diff --git a/databend_py/context.py b/databend_py/context.py index 2c4a7bf..44fcab4 100644 --- a/databend_py/context.py +++ b/databend_py/context.py @@ -1,4 +1,3 @@ - class Context(object): def __init__(self): self._server_info = None @@ -31,6 +30,8 @@ def client_settings(self, value): self._client_settings = value.copy() def __repr__(self): - return '' % ( - self._server_info, self._client_settings, self._settings + return "" % ( + self._server_info, + self._client_settings, + self._settings, ) diff --git a/databend_py/datetypes.py b/databend_py/datetypes.py index 3be65af..646dd6b 100644 --- a/databend_py/datetypes.py +++ b/databend_py/datetypes.py @@ -40,6 +40,6 @@ def str_to_bool(s): return bool(s) -if __name__ == '__main__': +if __name__ == "__main__": d = DatabendDataType() - print(d.type_convert_fn("Uint64")('0')) + print(d.type_convert_fn("Uint64")("0")) diff --git a/databend_py/defines.py b/databend_py/defines.py index 7e596a5..4485b04 100644 --- a/databend_py/defines.py +++ b/databend_py/defines.py @@ -1,11 +1,11 @@ -DEFAULT_DATABASE = 'default' -DEFAULT_USER = 'root' -DEFAULT_PASSWORD = '' +DEFAULT_DATABASE = "default" +DEFAULT_USER = "root" +DEFAULT_PASSWORD = "" DEFAULT_SESSION_IDLE_TIME = 30 DEFAULT_CONNECT_TIMEOUT = 180 DEFAULT_READ_TIMEOUT = 180 -DBMS_NAME = 'Databend' -CLIENT_NAME = 'databend-py' +DBMS_NAME = "Databend" +CLIENT_NAME = "databend-py" -STRINGS_ENCODING = 'utf-8' +STRINGS_ENCODING = "utf-8" diff --git a/databend_py/errors.py b/databend_py/errors.py index fe9c4c2..954dc7c 100644 --- a/databend_py/errors.py +++ b/databend_py/errors.py @@ -6,8 +6,8 @@ def __init__(self, message=None): super(Error, self).__init__(message) def __str__(self): - message = ' ' + self.message if self.message is not None else '' - return 'Code: {}.{}'.format(self.code, message) + message = " " + self.message if self.message is not None else "" + return "Code: {}.{}".format(self.code, message) class ServerException(Error): @@ -17,7 +17,7 @@ def __init__(self, message, code=None): super(ServerException, self).__init__(message) def __str__(self): - return 'Code: {} {}'.format(self.code, self.message) + return "Code: {} {}".format(self.code, self.message) class WarehouseTimeoutException(Error): @@ -27,7 +27,7 @@ def __init__(self, message, code=None): super(WarehouseTimeoutException, self).__init__(message) def __str__(self): - return 'Provision warehouse timeout: {}'.format(self.message) + return "Provision warehouse timeout: {}".format(self.message) class UnexpectedException(Error): @@ -36,5 +36,5 @@ def __init__(self, message): super(UnexpectedException, self).__init__(message) def __str__(self): - message = ' ' + self.message if self.message is not None else '' - return 'Unexpected: {}'.format(message) \ No newline at end of file + message = " " + self.message if self.message is not None else "" + return "Unexpected: {}".format(message) diff --git a/databend_py/log.py b/databend_py/log.py index 772b57c..f9178f7 100644 --- a/databend_py/log.py +++ b/databend_py/log.py @@ -4,13 +4,13 @@ log_priorities = ( - 'Unknown', - 'Fatal', - 'Critical', - 'Error', - 'Warning', - 'Notice', - 'Information', - 'Debug', - 'Trace' + "Unknown", + "Fatal", + "Critical", + "Error", + "Warning", + "Notice", + "Information", + "Debug", + "Trace", ) diff --git a/databend_py/result.py b/databend_py/result.py index a5b34d0..bea4f16 100644 --- a/databend_py/result.py +++ b/databend_py/result.py @@ -8,8 +8,8 @@ class QueryResult(object): """ def __init__( - self, data_generator, first_data, - with_column_types=False, null_to_none=False): + self, data_generator, first_data, with_column_types=False, null_to_none=False + ): self.data_generator = data_generator self.with_column_types = with_column_types self.first_data = first_data @@ -26,7 +26,7 @@ def store_data(self, raw_data: dict): column_name_ls = [] datas = raw_data.get("data") for field in fields: - column_name_ls.append(field['name']) + column_name_ls.append(field["name"]) for data in datas: self.column_data_dict_list.append(dict(zip(column_name_ls, data))) @@ -35,8 +35,8 @@ def store_columns(self, raw_data: dict): fields = raw_data.get("schema") for field in fields: inner_type = self.extract_type(field["type"]) - column_type = (field['name'], inner_type) - self.column_type_dic[field['name']] = inner_type + column_type = (field["name"], inner_type) + self.column_type_dic[field["name"]] = inner_type self.columns_with_types.append(column_type) def get_result(self): @@ -52,7 +52,7 @@ def get_result(self): for read_data in self.column_data_dict_list: tmp_list = [] for c, d in read_data.items(): - if d == 'NULL': + if d == "NULL": if self.null_to_none: tmp_list.append(None) else: diff --git a/databend_py/retry.py b/databend_py/retry.py index 2325f9d..4ee306f 100644 --- a/databend_py/retry.py +++ b/databend_py/retry.py @@ -22,8 +22,8 @@ def newfn(*args, **kwargs): return func(*args, **kwargs) except exceptions: print( - 'Exception thrown when attempting to run %s, attempt ' - '%d of %d' % (func, attempt, times) + "Exception thrown when attempting to run %s, attempt " + "%d of %d" % (func, attempt, times) ) time.sleep(attempt * 10) attempt += 1 @@ -36,10 +36,10 @@ def newfn(*args, **kwargs): @retry(times=3, exceptions=WarehouseTimeoutException) def foo1(): - print('Some code here ....') - print('Oh no, we have exception') - raise WarehouseTimeoutException('Some error') + print("Some code here ....") + print("Oh no, we have exception") + raise WarehouseTimeoutException("Some error") -if __name__ == '__main__': +if __name__ == "__main__": foo1() diff --git a/databend_py/sdk_info.py b/databend_py/sdk_info.py index 5a28243..e10980e 100644 --- a/databend_py/sdk_info.py +++ b/databend_py/sdk_info.py @@ -4,8 +4,8 @@ def sdk_version(): - version_py = os.path.join(here, 'VERSION') - with open(version_py, encoding='utf-8') as f: + version_py = os.path.join(here, "VERSION") + with open(version_py, encoding="utf-8") as f: first_line = f.readline() return first_line.strip() diff --git a/databend_py/uploader.py b/databend_py/uploader.py index 1e98956..f078841 100644 --- a/databend_py/uploader.py +++ b/databend_py/uploader.py @@ -9,7 +9,15 @@ class DataUploader: - def __init__(self, client, connection, settings, default_stage_dir='@~', debug=False, compress=False): + def __init__( + self, + client, + connection, + settings, + default_stage_dir="@~", + debug=False, + compress=False, + ): # TODO: make it depends on Connection instead of Client self.client = client self.connection = connection @@ -24,7 +32,7 @@ def upload_to_table_by_copy(self, table_name, data): stage_path = self._gen_stage_path(self.default_stage_dir) presigned_url, headers = self._execute_presign(stage_path) self._upload_to_presigned_url(presigned_url, headers, data) - self._execute_copy(table_name, stage_path, 'CSV') + self._execute_copy(table_name, stage_path, "CSV") def replace_into_table(self, table_name, conflict_keys, data): """ @@ -38,7 +46,9 @@ def replace_into_table(self, table_name, conflict_keys, data): stage_path = self._gen_stage_path(self.default_stage_dir) presigned_url, headers = self._execute_presign(stage_path) self._upload_to_presigned_url(presigned_url, headers, data) - sql_statement = f"REPLACE INTO {table_name} ON ({','.join(conflict_keys)}) VALUES" + sql_statement = ( + f"REPLACE INTO {table_name} ON ({','.join(conflict_keys)}) VALUES" + ) self._execute_with_attachment(sql_statement, stage_path, "CSV") def upload_to_stage(self, stage_dir, filename, data): @@ -49,37 +59,40 @@ def upload_to_stage(self, stage_dir, filename, data): def _gen_stage_path(self, stage_dir, stage_filename=None): if stage_filename is None: - suffix = '.csv.gz' if self._compress else '.csv' - stage_filename = '%s%s' % (uuid.uuid4(), suffix) - if stage_filename.startswith('/'): + suffix = ".csv.gz" if self._compress else ".csv" + stage_filename = "%s%s" % (uuid.uuid4(), suffix) + if stage_filename.startswith("/"): stage_filename = stage_filename[1:] # TODO: escape the stage_path if it contains special characters - stage_path = '%s/%s' % (stage_dir, stage_filename) + stage_path = "%s/%s" % (stage_dir, stage_filename) return stage_path def _execute_presign(self, stage_path): start_time = time.time() - _, row = self.client.execute('presign upload %s' % stage_path) + _, row = self.client.execute("presign upload %s" % stage_path) presigned_url = row[0][2] headers = json.loads(row[0][1]) if self._debug: - print('upload:_execute_presign %s: %s' % (stage_path, time.time() - start_time)) + print( + "upload:_execute_presign %s: %s" + % (stage_path, time.time() - start_time) + ) return presigned_url, headers def _serialize_data(self, data, compress): # In Python3 csv.writer expects a file-like object opened in text mode. In Python2, csv.writer expects a file-like object opened in binary mode. start_time = time.time() buf = io.StringIO() - csvwriter = csv.writer(buf, delimiter=',', quoting=csv.QUOTE_MINIMAL) + csvwriter = csv.writer(buf, delimiter=",", quoting=csv.QUOTE_MINIMAL) csvwriter.writerows(data) - output = buf.getvalue().encode('utf-8') + output = buf.getvalue().encode("utf-8") if compress: buf = io.BytesIO() with gzip.GzipFile(fileobj=buf, mode="wb") as gzwriter: gzwriter.write(output) output = buf.getvalue() if self._debug: - print('upload:_serialize_data %s' % (time.time() - start_time)) + print("upload:_serialize_data %s" % (time.time() - start_time)) return output def _upload_to_presigned_url(self, presigned_url, headers, data): @@ -94,35 +107,43 @@ def _upload_to_presigned_url(self, presigned_url, headers, data): buf_size = len(buf) data_len = len(data) else: - raise Exception('data is not bytes, File, or a list: %s' % type(data)) + raise Exception("data is not bytes, File, or a list: %s" % type(data)) start_time = time.time() try: resp = requests.put(presigned_url, headers=headers, data=buf) resp.raise_for_status() finally: if self._debug: - print('upload:_upload_to_presigned_url len=%d bufsize=%d %s' % ( - data_len, buf_size, time.time() - start_time)) + print( + "upload:_upload_to_presigned_url len=%d bufsize=%d %s" + % (data_len, buf_size, time.time() - start_time) + ) def _execute_copy(self, table_name, stage_path, file_type): start_time = time.time() sql = self._make_copy_statement(table_name, stage_path, file_type) self.client.execute(sql) if self._debug: - print('upload:_execute_copy table=%s %s' % (table_name, time.time() - start_time)) + print( + "upload:_execute_copy table=%s %s" + % (table_name, time.time() - start_time) + ) def _make_copy_statement(self, table_name, stage_path, file_type): # copy options docs: https://databend.rs/doc/sql-commands/dml/dml-copy-into-table#copyoptions copy_options = {} copy_options["PURGE"] = self.settings.get("copy_purge", False) copy_options["FORCE"] = self.settings.get("force", False) - copy_options["SIZE_LIMIT"] = self.settings.get("size_limit", - 0) # TODO: is this correct to set size_limit = 100? + copy_options["SIZE_LIMIT"] = self.settings.get( + "size_limit", 0 + ) # TODO: is this correct to set size_limit = 100? copy_options["ON_ERROR"] = self.settings.get("on_error", "abort") - return f"COPY INTO {table_name} FROM {stage_path} " \ - f"FILE_FORMAT = (type = {file_type} RECORD_DELIMITER = '\\r\\n' COMPRESSION = AUTO) " \ - f"PURGE = {copy_options['PURGE']} FORCE = {copy_options['FORCE']} " \ - f"SIZE_LIMIT={copy_options['SIZE_LIMIT']} ON_ERROR = {copy_options['ON_ERROR']}" + return ( + f"COPY INTO {table_name} FROM {stage_path} " + f"FILE_FORMAT = (type = {file_type} RECORD_DELIMITER = '\\r\\n' COMPRESSION = AUTO) " + f"PURGE = {copy_options['PURGE']} FORCE = {copy_options['FORCE']} " + f"SIZE_LIMIT={copy_options['SIZE_LIMIT']} ON_ERROR = {copy_options['ON_ERROR']}" + ) def _execute_with_attachment(self, sql_statement, stage_path, file_type): start_time = time.time() @@ -131,9 +152,14 @@ def _execute_with_attachment(self, sql_statement, stage_path, file_type): try: resp_dict = self.connection.do_query(url, data) - self.client_session = resp_dict.get("session", self.connection.default_session()) + self.client_session = resp_dict.get( + "session", self.connection.default_session() + ) if self._debug: - print('upload:_execute_attachment sql=%s %s' % (sql_statement, time.time() - start_time)) + print( + "upload:_execute_attachment sql=%s %s" + % (sql_statement, time.time() - start_time) + ) except Exception as e: log.logger.error( f"http error on {url}, SQL: {sql_statement} error msg:{str(e)}" @@ -152,7 +178,10 @@ def _make_attachment(self, sql_statement, stage_path, file_type): data = { "sql": sql_statement, - "stage_attachment": {"location": stage_path, "file_format_options": file_format_options, - "copy_options": copy_options} + "stage_attachment": { + "location": stage_path, + "file_format_options": file_format_options, + "copy_options": copy_options, + }, } return data diff --git a/databend_py/util/escape.py b/databend_py/util/escape.py index 764a73e..4aa2671 100644 --- a/databend_py/util/escape.py +++ b/databend_py/util/escape.py @@ -14,7 +14,7 @@ "\a": "\\a", "\v": "\\v", "\\": "\\\\", - "'": "\\'" + "'": "\\'", } @@ -24,27 +24,27 @@ def escape_datetime(item, context): if item.tzinfo is not None: item = item.astimezone(server_tz) - return "'%s'" % item.strftime('%Y-%m-%d %H:%M:%S') + return "'%s'" % item.strftime("%Y-%m-%d %H:%M:%S") def escape_param(item, context): if item is None: - return 'NULL' + return "NULL" elif isinstance(item, datetime): return escape_datetime(item, context) elif isinstance(item, date): - return "'%s'" % item.strftime('%Y-%m-%d') + return "'%s'" % item.strftime("%Y-%m-%d") elif isinstance(item, str): - return "'%s'" % ''.join(escape_chars_map.get(c, c) for c in item) + return "'%s'" % "".join(escape_chars_map.get(c, c) for c in item) elif isinstance(item, list): - return "[%s]" % ', '.join(str(escape_param(x, context)) for x in item) + return "[%s]" % ", ".join(str(escape_param(x, context)) for x in item) elif isinstance(item, tuple): - return "(%s)" % ', '.join(str(escape_param(x, context)) for x in item) + return "(%s)" % ", ".join(str(escape_param(x, context)) for x in item) elif isinstance(item, Enum): return escape_param(item.value, context) diff --git a/databend_py/util/helper.py b/databend_py/util/helper.py index 21446d8..2c7470e 100644 --- a/databend_py/util/helper.py +++ b/databend_py/util/helper.py @@ -8,22 +8,24 @@ def __int__(self, response): super(Helper, self).__init__() def get_result_data(self): - return self.response['data'] + return self.response["data"] def get_fields(self): return self.response["schema"]["fields"] def get_next_uri(self): if "next_uri" in self.response: - return self.response['next_uri'] + return self.response["next_uri"] return None def get_error(self): - if self.response['error'] is None: + if self.response["error"] is None: return None - return ServerException(message=self.response['error']['message'], - code=self.response['error'].get('code')) + return ServerException( + message=self.response["error"]["message"], + code=self.response["error"].get("code"), + ) def check_error(self): error = self.get_error() @@ -35,11 +37,11 @@ def chunks(seq, n): # islice is MUCH slower than slice for lists and tuples. if isinstance(seq, (list, tuple)): i = 0 - item = seq[i:i + n] + item = seq[i : i + n] while item: yield list(item) i += n - item = seq[i:i + n] + item = seq[i : i + n] else: it = iter(seq) @@ -59,8 +61,9 @@ def column_chunks(columns, n): for column in columns: if not isinstance(column, (list, tuple)): raise TypeError( - 'Unsupported column type: {}. list or tuple is expected.' - .format(type(column)) + "Unsupported column type: {}. list or tuple is expected.".format( + type(column) + ) ) # create chunk generator for every column @@ -78,10 +81,10 @@ def column_chunks(columns, n): def asbool(obj): if isinstance(obj, str): obj = obj.strip().lower() - if obj in ['true', 'yes', 'on', 'y', 't', '1']: + if obj in ["true", "yes", "on", "y", "t", "1"]: return True - elif obj in ['false', 'no', 'off', 'n', 'f', '0']: + elif obj in ["false", "no", "off", "n", "f", "0"]: return False else: - raise ValueError('String is not true/false: %r' % obj) + raise ValueError("String is not true/false: %r" % obj) return bool(obj) diff --git a/docker-compose.yaml b/docker-compose.yaml new file mode 100644 index 0000000..086dc1f --- /dev/null +++ b/docker-compose.yaml @@ -0,0 +1,24 @@ +services: + minio: + image: docker.io/minio/minio + command: server /data + network_mode: "host" + volumes: + - ./data:/data + databend: + image: docker.io/datafuselabs/databend:nightly + environment: + - QUERY_STORAGE_TYPE=s3 + - QUERY_DATABEND_ENTERPRISE_LICENSE + - AWS_S3_ENDPOINT=http://localhost:9000 + - AWS_ACCESS_KEY_ID=minioadmin + - AWS_SECRET_ACCESS_KEY=minioadmin + network_mode: "host" + depends_on: + - minio + healthcheck: + test: "curl -f localhost:8080/v1/health || exit 1" + interval: 2s + retries: 10 + start_period: 2s + timeout: 1s diff --git a/examples/batch_insert.py b/examples/batch_insert.py index 4f88ae8..55a0f72 100644 --- a/examples/batch_insert.py +++ b/examples/batch_insert.py @@ -3,29 +3,29 @@ def insert(): client = Client.from_url("http://root:root@localhost:8000") - client.execute('DROP TABLE IF EXISTS test_upload') - client.execute('CREATE TABLE if not exists test_upload (x Int32,y VARCHAR)') - client.execute('DESC test_upload') - client.insert("default", "test_upload", [(1, 'a'), (1, 'b')]) - _, upload_res = client.execute('select * from test_upload') + client.execute("DROP TABLE IF EXISTS test_upload") + client.execute("CREATE TABLE if not exists test_upload (x Int32,y VARCHAR)") + client.execute("DESC test_upload") + client.insert("default", "test_upload", [(1, "a"), (1, "b")]) + _, upload_res = client.execute("select * from test_upload") # upload_res is [(1, 'a'), (1, 'b')] def batch_insert(): c = Client.from_url("http://root:root@localhost:8000") - c.execute('DROP TABLE IF EXISTS test') - c.execute('CREATE TABLE if not exists test (x Int32,y VARCHAR)') - c.execute('DESC test') - _, r1 = c.execute('INSERT INTO test (x,y) VALUES (%,%)', [1, 'yy', 2, 'xx']) - _, ss = c.execute('select * from test') + c.execute("DROP TABLE IF EXISTS test") + c.execute("CREATE TABLE if not exists test (x Int32,y VARCHAR)") + c.execute("DESC test") + _, r1 = c.execute("INSERT INTO test (x,y) VALUES (%,%)", [1, "yy", 2, "xx"]) + _, ss = c.execute("select * from test") # ss is [(1, 'yy'), (2, 'xx')] def batch_insert_with_tuple(): c = Client.from_url("http://root:root@localhost:8000") - c.execute('DROP TABLE IF EXISTS test') - c.execute('CREATE TABLE if not exists test (x Int32,y VARCHAR)') - c.execute('DESC test') + c.execute("DROP TABLE IF EXISTS test") + c.execute("CREATE TABLE if not exists test (x Int32,y VARCHAR)") + c.execute("DESC test") # data is tuple list - _, r1 = c.execute('INSERT INTO test (x,y) VALUES', [(3, 'aa'), (4, 'bb')]) - _, ss = c.execute('select * from test') + _, r1 = c.execute("INSERT INTO test (x,y) VALUES", [(3, "aa"), (4, "bb")]) + _, ss = c.execute("select * from test") diff --git a/examples/ordinary_query.py b/examples/ordinary_query.py index 702ac3b..1c89c71 100644 --- a/examples/ordinary_query.py +++ b/examples/ordinary_query.py @@ -13,5 +13,5 @@ def ordinary_query(): print(res2) # create table/ drop table - client.execute('DROP TABLE IF EXISTS test') - client.execute('CREATE TABLE if not exists test (x Int32,y VARCHAR)') + client.execute("DROP TABLE IF EXISTS test") + client.execute("CREATE TABLE if not exists test (x Int32,y VARCHAR)") diff --git a/examples/replace_into.py b/examples/replace_into.py index 5c00cc5..3bf8915 100644 --- a/examples/replace_into.py +++ b/examples/replace_into.py @@ -3,10 +3,10 @@ def replace_into(): client = Client.from_url("http://root:root@localhost:8000") - client.execute('DROP TABLE IF EXISTS test_replace') - client.execute('CREATE TABLE if not exists test_replace (x Int32,y VARCHAR)') - client.execute('DESC test_replace') - client.replace("default", "test_replace", ['x'], [(1, 'a'), (2, 'b')]) - client.replace("default", "test_replace", ['x'], [(1, 'c'), (2, 'd')]) - _, upload_res = client.execute('select * from test_replace') + client.execute("DROP TABLE IF EXISTS test_replace") + client.execute("CREATE TABLE if not exists test_replace (x Int32,y VARCHAR)") + client.execute("DESC test_replace") + client.replace("default", "test_replace", ["x"], [(1, "a"), (2, "b")]) + client.replace("default", "test_replace", ["x"], [(1, "c"), (2, "d")]) + _, upload_res = client.execute("select * from test_replace") # upload_res is [(1, 'c\r'), (2, 'd\r')] diff --git a/examples/session_setting.py b/examples/session_setting.py index cc93460..32c3cdf 100644 --- a/examples/session_setting.py +++ b/examples/session_setting.py @@ -4,5 +4,11 @@ def session_settings(): # docs: https://databend.rs/doc/integrations/api/rest#client-side-session session_settings = {"db": "test"} - client = Client(host="localhost", port=8000, user="root", password="root", session_settings=session_settings) + client = Client( + host="localhost", + port=8000, + user="root", + password="root", + session_settings=session_settings, + ) print(client) diff --git a/examples/upload_to_stage.py b/examples/upload_to_stage.py index 4a630e2..0d37d32 100644 --- a/examples/upload_to_stage.py +++ b/examples/upload_to_stage.py @@ -4,16 +4,17 @@ def create_csv(): import csv - with open('upload.csv', 'w', newline='') as file: + + with open("upload.csv", "w", newline="") as file: writer = csv.writer(file) - writer.writerow([1, 'a']) - writer.writerow([1, 'b']) + writer.writerow([1, "a"]) + writer.writerow([1, "b"]) def upload_to_stage(): client = Client.from_url("http://root:root@localhost:8000") # upload [(1, 'a'), (1, 'b')] as csv to stage ~ - stage_path = client.upload_to_stage('@~', "upload.csv", [(1, 'a'), (1, 'b')]) + stage_path = client.upload_to_stage("@~", "upload.csv", [(1, "a"), (1, "b")]) print(stage_path) # stage_path is @~/upload.csv @@ -22,7 +23,7 @@ def upload_file_to_stage(): create_csv() client = Client.from_url("http://root:root@localhost:8000") with open("upload.csv", "rb") as f: - stage_path = client.upload_to_stage('@~', "upload.csv", f) + stage_path = client.upload_to_stage("@~", "upload.csv", f) print(stage_path) os.remove("upload.csv") diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..a5d8ddf --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,49 @@ +[project] +name = "databend-py" +version = "0.1.0" +description = "Add your description here" +readme = "README.md" +authors = [ + {name = "Databend Cloud Team"} +] +license = {file = "LICENSE"} +requires-python = ">=3.9" +dependencies = [ + "black>=24.10.0", + "databend-driver>=0.23.2", + "environs>=11.2.1", + "pytz>=2024.2", + "requests>=2.32.3", + "setuptools>=75.6.0", +] + +classifiers = [ + "Development Status :: 4 - Beta", + "Environment :: Console", + "Intended Audience :: Developers", + "Intended Audience :: Information Technology", + "Operating System :: OS Independent", + "Programming Language :: SQL", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: Implementation :: PyPy", + "Topic :: Database", + "Topic :: Software Development", + "Topic :: Software Development :: Libraries", + "Topic :: Software Development :: Libraries :: Application Frameworks", + "Topic :: Software Development :: Libraries :: Python Modules", + "Topic :: Scientific/Engineering :: Information Analysis" +] + +[tool.uv] +dev-dependencies = [ + "pytest>=8.3.4", + "requests>=2.32.3", + "ruff>=0.8.2", +] + +[build-system] +requires = ["setuptools>=42", "wheel"] +build-backend = "setuptools.build_meta" diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index cff5230..0000000 --- a/requirements.txt +++ /dev/null @@ -1,6 +0,0 @@ -environs==9.5.0 -pytz==2022.5 -requests==2.28.1 -setuptools==62.3.2 -black==23.3.0 -pyflakes==3.0.1 diff --git a/tests/test_client.py b/tests/test_client.py index da3a3af..b7a7695 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -1,42 +1,47 @@ +import os +import unittest +import types from databend_py import Client -from unittest import TestCase -import types, os def sample_insert_data(): - return [(1, 'a'), (1, 'b')] + return [(1, "a"), (1, "b")] def create_csv(): import csv - with open('upload.csv', 'w', newline='') as file: + + with open("upload.csv", "w", newline="") as file: writer = csv.writer(file) - writer.writerow([1, 'a']) - writer.writerow([1, 'b']) + writer.writerow([1, "a"]) + writer.writerow([1, "b"]) + +class DatabendPyTestCase(unittest.TestCase): + databend_url = None -class DatabendPyTestCase(TestCase): - def __init__(self, databend_url): - super().__init__() - self.databend_url = databend_url + def setUp(self): + self.databend_url = os.getenv("TEST_DATABEND_DSN") def assertHostsEqual(self, client, another, msg=None): self.assertEqual(client.connection.host, another, msg=msg) def test_simple(self): - c = Client.from_url('https://app.databend.com:443?secure=True©_purge=True&debug=True') + c = Client.from_url( + "https://app.databend.com:443?secure=True©_purge=True&debug=True" + ) - self.assertHostsEqual(c, 'app.databend.com') - self.assertEqual(c.connection.database, 'default') - self.assertEqual(c.connection.user, 'root') + self.assertHostsEqual(c, "app.databend.com") + self.assertEqual(c.connection.database, "default") + self.assertEqual(c.connection.user, "root") self.assertEqual(c.connection.copy_purge, True) self.assertEqual(c.settings.get("debug"), True) - c = Client.from_url('https://host:443/db') + c = Client.from_url("https://host:443/db") - self.assertHostsEqual(c, 'host') - self.assertEqual(c.connection.database, 'db') - self.assertEqual(c.connection.password, '') + self.assertHostsEqual(c, "host") + self.assertEqual(c.connection.database, "db") + self.assertEqual(c.connection.password, "") c = Client.from_url("databend://localhost:8000/default?secure=true") self.assertEqual(c.connection.schema, "https") @@ -49,23 +54,33 @@ def test_simple(self): self.assertEqual(c.connection.connect_timeout, 180) self.assertEqual(c.connection.read_timeout, 180) - c = Client.from_url("databend://root:root@localhost:8000/default?connect_timeout=30&read_timeout=30") + c = Client.from_url( + "databend://root:root@localhost:8000/default?connect_timeout=30&read_timeout=30" + ) self.assertEqual(c.connection.connect_timeout, 30) self.assertEqual(c.connection.read_timeout, 30) self.assertEqual(c.connection.persist_cookies, False) - c = Client.from_url('https://root:root@localhost:8000?persist_cookies=True&tenant=tn1&warehouse=wh1') + c = Client.from_url( + "https://root:root@localhost:8000?persist_cookies=True&tenant=tn1&warehouse=wh1" + ) self.assertEqual(c.connection.persist_cookies, True) self.assertEqual(c.connection.tenant, "tn1") self.assertEqual(c.connection.warehouse, "wh1") def test_session_settings(self): session_settings = {"db": "database"} - c = Client(host="localhost", port=8000, user="root", password="root", session_settings={"db": "database"}) + c = Client( + host="localhost", + port=8000, + user="root", + password="root", + session_settings={"db": "database"}, + ) self.assertEqual(c.connection.client_session, session_settings) def test_ordinary_query(self): - select_test = ''' + select_test = """ select null as db, name as name, @@ -73,7 +88,7 @@ def test_ordinary_query(self): if(engine = 'VIEW', 'view', 'table') as type from system.tables where database = 'default'; - ''' + """ # if use the host from databend cloud, must set the 'ADDITIONAL_HEADERS': # os.environ['ADDITIONAL_HEADERS'] = 'X-DATABENDCLOUD-TENANT=TENANT,X-DATABENDCLOUD-WAREHOUSE=WAREHOUSE' c = Client.from_url(self.databend_url) @@ -81,32 +96,40 @@ def test_ordinary_query(self): self.assertEqual(r, ([(1,)])) column_types, _ = c.execute(select_test, with_column_types=True) print(column_types) - self.assertEqual(column_types, [('db', 'NULL'), ('name', 'String'), ('schema', 'String'), ('type', 'String')]) + self.assertEqual( + column_types, + [ + ("db", "NULL"), + ("name", "String"), + ("schema", "String"), + ("type", "String"), + ], + ) # test with_column_types=True r = c.execute("select 1", with_column_types=True) - self.assertEqual(r, ([('1', 'UInt8')], [(1,)])) + self.assertEqual(r, ([("1", "UInt8")], [(1,)])) def test_batch_insert(self): c = Client.from_url(self.databend_url) - c.execute('DROP TABLE IF EXISTS test') - c.execute('CREATE TABLE if not exists test (x Int32,y VARCHAR)') - c.execute('DESC test') - _, r1 = c.execute('INSERT INTO test (x,y) VALUES (%,%)', [1, 'yy', 2, 'xx']) + c.execute("DROP TABLE IF EXISTS test") + c.execute("CREATE TABLE if not exists test (x Int32,y VARCHAR)") + c.execute("DESC test") + _, r1 = c.execute("INSERT INTO test (x,y) VALUES (%,%)", [1, "yy", 2, "xx"]) self.assertEqual(r1, 2) - _, ss = c.execute('select * from test') + _, ss = c.execute("select * from test") print(ss) - self.assertEqual(ss, [(1, 'yy'), (2, 'xx')]) + self.assertEqual(ss, [(1, "yy"), (2, "xx")]) def test_batch_insert_with_tuple(self): c = Client.from_url(self.databend_url) - c.execute('DROP TABLE IF EXISTS test') - c.execute('CREATE TABLE if not exists test (x Int32,y VARCHAR)') - c.execute('DESC test') - _, r1 = c.execute('INSERT INTO test (x,y) VALUES', [(3, 'aa'), (4, 'bb')]) + c.execute("DROP TABLE IF EXISTS test") + c.execute("CREATE TABLE if not exists test (x Int32,y VARCHAR)") + c.execute("DESC test") + _, r1 = c.execute("INSERT INTO test (x,y) VALUES", [(3, "aa"), (4, "bb")]) self.assertEqual(r1, 2) - _, ss = c.execute('select * from test') - self.assertEqual(ss, [(3, 'aa'), (4, 'bb')]) + _, ss = c.execute("select * from test") + self.assertEqual(ss, [(3, "aa"), (4, "bb")]) def test_iter_query(self): client = Client.from_url(self.databend_url) @@ -119,57 +142,59 @@ def test_iter_query(self): def test_insert(self): client = Client.from_url(self.databend_url) - client.execute('DROP TABLE IF EXISTS test_upload') - client.execute('CREATE TABLE if not exists test_upload (x Int32,y VARCHAR)') - client.execute('DESC test_upload') - client.insert("default", "test_upload", [(1, 'a'), (1, 'b')]) - _, upload_res = client.execute('select * from test_upload') - self.assertEqual(upload_res, [(1, 'a'), (1, 'b')]) + client.execute("DROP TABLE IF EXISTS test_upload") + client.execute("CREATE TABLE if not exists test_upload (x Int32,y VARCHAR)") + client.execute("DESC test_upload") + client.insert("default", "test_upload", [(1, "a"), (1, "b")]) + _, upload_res = client.execute("select * from test_upload") + self.assertEqual(upload_res, [(1, "a"), (1, "b")]) def test_replace(self): client = Client.from_url(self.databend_url) - client.execute('DROP TABLE IF EXISTS test_replace') - client.execute('CREATE TABLE if not exists test_replace (x Int32,y VARCHAR)') - client.execute('DESC test_replace') - client.replace("default", "test_replace", ['x'], [(1, 'a'), (2, 'b')]) - client.replace("default", "test_replace", ['x'], [(1, 'c'), (2, 'd')]) - _, upload_res = client.execute('select * from test_replace') - self.assertEqual(upload_res, [(1, 'c\r'), (2, 'd\r')]) + client.execute("DROP TABLE IF EXISTS test_replace") + client.execute("CREATE TABLE if not exists test_replace (x Int32,y VARCHAR)") + client.execute("DESC test_replace") + client.replace("default", "test_replace", ["x"], [(1, "a"), (2, "b")]) + client.replace("default", "test_replace", ["x"], [(1, "c"), (2, "d")]) + _, upload_res = client.execute("select * from test_replace") + self.assertEqual(upload_res, [(1, "c\r"), (2, "d\r")]) def test_insert_with_compress(self): client = Client.from_url(self.databend_url + "?compress=True&debug=True") self.assertEqual(client._uploader._compress, True) - client.execute('DROP TABLE IF EXISTS test_upload') - client.execute('CREATE TABLE if not exists test_upload (x Int32,y VARCHAR)') - client.execute('DESC test_upload') - client.insert("default", "test_upload", [(1, 'a'), (1, 'b')]) - _, upload_res = client.execute('select * from test_upload') - self.assertEqual(upload_res, [(1, 'a'), (1, 'b')]) + client.execute("DROP TABLE IF EXISTS test_upload") + client.execute("CREATE TABLE if not exists test_upload (x Int32,y VARCHAR)") + client.execute("DESC test_upload") + client.insert("default", "test_upload", [(1, "a"), (1, "b")]) + _, upload_res = client.execute("select * from test_upload") + self.assertEqual(upload_res, [(1, "a"), (1, "b")]) def test_upload_to_stage(self): client = Client.from_url(self.databend_url) - stage_path = client.upload_to_stage('@~', "upload.csv", [(1, 'a'), (1, 'b')]) + stage_path = client.upload_to_stage("@~", "upload.csv", [(1, "a"), (1, "b")]) self.assertEqual(stage_path, "@~/upload.csv") def test_upload_file_to_stage(self): create_csv() client = Client.from_url(self.databend_url) with open("upload.csv", "rb") as f: - stage_path = client.upload_to_stage('@~', "upload.csv", f) + stage_path = client.upload_to_stage("@~", "upload.csv", f) print(stage_path) self.assertEqual(stage_path, "@~/upload.csv") os.remove("upload.csv") def test_select_over_paging(self): - expected_column = [('number', 'UInt64')] + expected_column = [("number", "UInt64")] client = Client.from_url(self.databend_url) - columns, data = client.execute('SELECT * FROM numbers(10001)', with_column_types=True) + columns, data = client.execute( + "SELECT * FROM numbers(10001)", with_column_types=True + ) self.assertEqual(expected_column, columns) def tearDown(self): client = Client.from_url(self.databend_url) - client.execute('DROP TABLE IF EXISTS test') + client.execute("DROP TABLE IF EXISTS test") client.disconnect() def test_cookies(self): @@ -201,20 +226,31 @@ def test_null_to_none(self): def test_special_chars(self): client = Client.from_url(self.databend_url) client.execute("create or replace table test_special_chars (x string)") - client.execute("INSERT INTO test_special_chars (x) VALUES", [('ó')]) + client.execute("INSERT INTO test_special_chars (x) VALUES", [("ó")]) _, data = client.execute("select * from test_special_chars") - self.assertEqual(data, [('ó')]) + self.assertEqual(data, [("ó",)]) def test_set_query_id_header(self): - os.environ["ADDITIONAL_HEADERS"] = "X-DATABENDCLOUD-TENANT=TENANT,X-DATABENDCLOUD-WAREHOUSE=WAREHOUSE" + os.environ["ADDITIONAL_HEADERS"] = ( + "X-DATABENDCLOUD-TENANT=TENANT,X-DATABENDCLOUD-WAREHOUSE=WAREHOUSE" + ) client = Client.from_url(self.databend_url) - self.assertEqual("X-DATABENDCLOUD-TENANT" in client.connection.additional_headers, True) - self.assertEqual(client.connection.additional_headers["X-DATABENDCLOUD-TENANT"], "TENANT") + self.assertEqual( + "X-DATABENDCLOUD-TENANT" in client.connection.additional_headers, True + ) + self.assertEqual( + client.connection.additional_headers["X-DATABENDCLOUD-TENANT"], "TENANT" + ) client.execute("select 1") - execute_query_id1 = client.connection.additional_headers["X-Databend-Query-Id"] - self.assertEqual("X-Databend-Query-Id" in client.connection.additional_headers, True) + execute_query_id1 = client.connection.additional_headers["X-DATABEND-QUERY-ID"] + self.assertEqual( + "X-DATABEND-QUERY-ID" in client.connection.additional_headers, True + ) client.execute("select 2") - self.assertNotEqual(execute_query_id1, client.connection.additional_headers["X-Databend-Query-Id"]) + self.assertNotEqual( + execute_query_id1, + client.connection.additional_headers["X-DATABEND-QUERY-ID"], + ) def test_commit(self): client = Client.from_url(self.databend_url) @@ -258,21 +294,5 @@ def test_cast_bool(self): self.assertEqual(data, [(True,), (False,)]) -if __name__ == '__main__': - print("start test......") - # os.environ['TEST_DATABEND_DSN'] = "http://root:@localhost:8000" - dt = DatabendPyTestCase(databend_url=os.getenv("TEST_DATABEND_DSN")) - dt.test_simple() - dt.test_ordinary_query() - dt.test_batch_insert() - dt.test_iter_query() - dt.test_insert() - dt.test_replace() - dt.test_insert_with_compress() - dt.test_upload_to_stage() - dt.test_upload_file_to_stage() - dt.test_cookies() - dt.test_null_to_none() - dt.tearDown() - dt.test_cast_bool() - print("end test.....") +if __name__ == "__main__": + unittest.main() diff --git a/tests/test_simple.py b/tests/test_simple.py new file mode 100644 index 0000000..e63fb2b --- /dev/null +++ b/tests/test_simple.py @@ -0,0 +1,39 @@ +import unittest + + +class Dict(dict): + def __init__(self, **kw): + super().__init__(**kw) + + def __getattr__(self, key): + try: + return self[key] + except KeyError: + raise AttributeError(r"'Dict' object has no attribute '%s'" % key) + + def __setattr__(self, key, value): + self[key] = value + + +class TestDict(unittest.TestCase): + databend_url = None # 使用类属性来存储 databend_url + + @classmethod + def setUpClass(cls): + cls.databend_url = "test_url" # 在类级别设置 databend_url + + def test_init(self): + d = Dict(a=1, b="test") + self.assertEqual(self.databend_url, "test_url") # 使用类属性 + self.assertEqual(d.a, 1) + self.assertEqual(d.b, "test") + self.assertTrue(isinstance(d, dict)) + + def test_key(self): + d = Dict() + d["key"] = "value" + self.assertEqual(d.key, "value") + + +if __name__ == "__main__": + unittest.main() diff --git a/uv.lock b/uv.lock new file mode 100644 index 0000000..ff3501a --- /dev/null +++ b/uv.lock @@ -0,0 +1,441 @@ +version = 1 +requires-python = ">=3.9" + +[[package]] +name = "black" +version = "24.10.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "mypy-extensions" }, + { name = "packaging" }, + { name = "pathspec" }, + { name = "platformdirs" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d8/0d/cc2fb42b8c50d80143221515dd7e4766995bd07c56c9a3ed30baf080b6dc/black-24.10.0.tar.gz", hash = "sha256:846ea64c97afe3bc677b761787993be4991810ecc7a4a937816dd6bddedc4875", size = 645813 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a3/f3/465c0eb5cddf7dbbfe1fecd9b875d1dcf51b88923cd2c1d7e9ab95c6336b/black-24.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e6668650ea4b685440857138e5fe40cde4d652633b1bdffc62933d0db4ed9812", size = 1623211 }, + { url = "https://files.pythonhosted.org/packages/df/57/b6d2da7d200773fdfcc224ffb87052cf283cec4d7102fab450b4a05996d8/black-24.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1c536fcf674217e87b8cc3657b81809d3c085d7bf3ef262ead700da345bfa6ea", size = 1457139 }, + { url = "https://files.pythonhosted.org/packages/6e/c5/9023b7673904a5188f9be81f5e129fff69f51f5515655fbd1d5a4e80a47b/black-24.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:649fff99a20bd06c6f727d2a27f401331dc0cc861fb69cde910fe95b01b5928f", size = 1753774 }, + { url = "https://files.pythonhosted.org/packages/e1/32/df7f18bd0e724e0d9748829765455d6643ec847b3f87e77456fc99d0edab/black-24.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:fe4d6476887de70546212c99ac9bd803d90b42fc4767f058a0baa895013fbb3e", size = 1414209 }, + { url = "https://files.pythonhosted.org/packages/c2/cc/7496bb63a9b06a954d3d0ac9fe7a73f3bf1cd92d7a58877c27f4ad1e9d41/black-24.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5a2221696a8224e335c28816a9d331a6c2ae15a2ee34ec857dcf3e45dbfa99ad", size = 1607468 }, + { url = "https://files.pythonhosted.org/packages/2b/e3/69a738fb5ba18b5422f50b4f143544c664d7da40f09c13969b2fd52900e0/black-24.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f9da3333530dbcecc1be13e69c250ed8dfa67f43c4005fb537bb426e19200d50", size = 1437270 }, + { url = "https://files.pythonhosted.org/packages/c9/9b/2db8045b45844665c720dcfe292fdaf2e49825810c0103e1191515fc101a/black-24.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4007b1393d902b48b36958a216c20c4482f601569d19ed1df294a496eb366392", size = 1737061 }, + { url = "https://files.pythonhosted.org/packages/a3/95/17d4a09a5be5f8c65aa4a361444d95edc45def0de887810f508d3f65db7a/black-24.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:394d4ddc64782e51153eadcaaca95144ac4c35e27ef9b0a42e121ae7e57a9175", size = 1423293 }, + { url = "https://files.pythonhosted.org/packages/90/04/bf74c71f592bcd761610bbf67e23e6a3cff824780761f536512437f1e655/black-24.10.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b5e39e0fae001df40f95bd8cc36b9165c5e2ea88900167bddf258bacef9bbdc3", size = 1644256 }, + { url = "https://files.pythonhosted.org/packages/4c/ea/a77bab4cf1887f4b2e0bce5516ea0b3ff7d04ba96af21d65024629afedb6/black-24.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d37d422772111794b26757c5b55a3eade028aa3fde43121ab7b673d050949d65", size = 1448534 }, + { url = "https://files.pythonhosted.org/packages/4e/3e/443ef8bc1fbda78e61f79157f303893f3fddf19ca3c8989b163eb3469a12/black-24.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:14b3502784f09ce2443830e3133dacf2c0110d45191ed470ecb04d0f5f6fcb0f", size = 1761892 }, + { url = "https://files.pythonhosted.org/packages/52/93/eac95ff229049a6901bc84fec6908a5124b8a0b7c26ea766b3b8a5debd22/black-24.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:30d2c30dc5139211dda799758559d1b049f7f14c580c409d6ad925b74a4208a8", size = 1434796 }, + { url = "https://files.pythonhosted.org/packages/d0/a0/a993f58d4ecfba035e61fca4e9f64a2ecae838fc9f33ab798c62173ed75c/black-24.10.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1cbacacb19e922a1d75ef2b6ccaefcd6e93a2c05ede32f06a21386a04cedb981", size = 1643986 }, + { url = "https://files.pythonhosted.org/packages/37/d5/602d0ef5dfcace3fb4f79c436762f130abd9ee8d950fa2abdbf8bbc555e0/black-24.10.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1f93102e0c5bb3907451063e08b9876dbeac810e7da5a8bfb7aeb5a9ef89066b", size = 1448085 }, + { url = "https://files.pythonhosted.org/packages/47/6d/a3a239e938960df1a662b93d6230d4f3e9b4a22982d060fc38c42f45a56b/black-24.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ddacb691cdcdf77b96f549cf9591701d8db36b2f19519373d60d31746068dbf2", size = 1760928 }, + { url = "https://files.pythonhosted.org/packages/dd/cf/af018e13b0eddfb434df4d9cd1b2b7892bab119f7a20123e93f6910982e8/black-24.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:680359d932801c76d2e9c9068d05c6b107f2584b2a5b88831c83962eb9984c1b", size = 1436875 }, + { url = "https://files.pythonhosted.org/packages/fe/02/f408c804e0ee78c367dcea0a01aedde4f1712af93b8b6e60df981e0228c7/black-24.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:17374989640fbca88b6a448129cd1745c5eb8d9547b464f281b251dd00155ccd", size = 1622516 }, + { url = "https://files.pythonhosted.org/packages/f8/b9/9b706ed2f55bfb28b436225a9c57da35990c9005b90b8c91f03924454ad7/black-24.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:63f626344343083322233f175aaf372d326de8436f5928c042639a4afbbf1d3f", size = 1456181 }, + { url = "https://files.pythonhosted.org/packages/0a/1c/314d7f17434a5375682ad097f6f4cc0e3f414f3c95a9b1bb4df14a0f11f9/black-24.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfa1d0cb6200857f1923b602f978386a3a2758a65b52e0950299ea014be6800", size = 1752801 }, + { url = "https://files.pythonhosted.org/packages/39/a7/20e5cd9237d28ad0b31438de5d9f01c8b99814576f4c0cda1edd62caf4b0/black-24.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:2cd9c95431d94adc56600710f8813ee27eea544dd118d45896bb734e9d7a0dc7", size = 1413626 }, + { url = "https://files.pythonhosted.org/packages/8d/a7/4b27c50537ebca8bec139b872861f9d2bf501c5ec51fcf897cb924d9e264/black-24.10.0-py3-none-any.whl", hash = "sha256:3bb2b7a1f7b685f85b11fed1ef10f8a9148bceb49853e47a294a3dd963c1dd7d", size = 206898 }, +] + +[[package]] +name = "certifi" +version = "2024.8.30" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b0/ee/9b19140fe824b367c04c5e1b369942dd754c4c5462d5674002f75c4dedc1/certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9", size = 168507 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/12/90/3c9ff0512038035f59d279fddeb79f5f1eccd8859f06d6163c58798b9487/certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8", size = 167321 }, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f2/4f/e1808dc01273379acc506d18f1504eb2d299bd4131743b9fc54d7be4df1e/charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e", size = 106620 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/69/8b/825cc84cf13a28bfbcba7c416ec22bf85a9584971be15b21dd8300c65b7f/charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6", size = 196363 }, + { url = "https://files.pythonhosted.org/packages/23/81/d7eef6a99e42c77f444fdd7bc894b0ceca6c3a95c51239e74a722039521c/charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b", size = 125639 }, + { url = "https://files.pythonhosted.org/packages/21/67/b4564d81f48042f520c948abac7079356e94b30cb8ffb22e747532cf469d/charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99", size = 120451 }, + { url = "https://files.pythonhosted.org/packages/c2/72/12a7f0943dd71fb5b4e7b55c41327ac0a1663046a868ee4d0d8e9c369b85/charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca", size = 140041 }, + { url = "https://files.pythonhosted.org/packages/67/56/fa28c2c3e31217c4c52158537a2cf5d98a6c1e89d31faf476c89391cd16b/charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d", size = 150333 }, + { url = "https://files.pythonhosted.org/packages/f9/d2/466a9be1f32d89eb1554cf84073a5ed9262047acee1ab39cbaefc19635d2/charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7", size = 142921 }, + { url = "https://files.pythonhosted.org/packages/f8/01/344ec40cf5d85c1da3c1f57566c59e0c9b56bcc5566c08804a95a6cc8257/charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3", size = 144785 }, + { url = "https://files.pythonhosted.org/packages/73/8b/2102692cb6d7e9f03b9a33a710e0164cadfce312872e3efc7cfe22ed26b4/charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907", size = 146631 }, + { url = "https://files.pythonhosted.org/packages/d8/96/cc2c1b5d994119ce9f088a9a0c3ebd489d360a2eb058e2c8049f27092847/charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b", size = 140867 }, + { url = "https://files.pythonhosted.org/packages/c9/27/cde291783715b8ec30a61c810d0120411844bc4c23b50189b81188b273db/charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912", size = 149273 }, + { url = "https://files.pythonhosted.org/packages/3a/a4/8633b0fc1a2d1834d5393dafecce4a1cc56727bfd82b4dc18fc92f0d3cc3/charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95", size = 152437 }, + { url = "https://files.pythonhosted.org/packages/64/ea/69af161062166b5975ccbb0961fd2384853190c70786f288684490913bf5/charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e", size = 150087 }, + { url = "https://files.pythonhosted.org/packages/3b/fd/e60a9d9fd967f4ad5a92810138192f825d77b4fa2a557990fd575a47695b/charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe", size = 145142 }, + { url = "https://files.pythonhosted.org/packages/6d/02/8cb0988a1e49ac9ce2eed1e07b77ff118f2923e9ebd0ede41ba85f2dcb04/charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc", size = 94701 }, + { url = "https://files.pythonhosted.org/packages/d6/20/f1d4670a8a723c46be695dff449d86d6092916f9e99c53051954ee33a1bc/charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749", size = 102191 }, + { url = "https://files.pythonhosted.org/packages/9c/61/73589dcc7a719582bf56aae309b6103d2762b526bffe189d635a7fcfd998/charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c", size = 193339 }, + { url = "https://files.pythonhosted.org/packages/77/d5/8c982d58144de49f59571f940e329ad6e8615e1e82ef84584c5eeb5e1d72/charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944", size = 124366 }, + { url = "https://files.pythonhosted.org/packages/bf/19/411a64f01ee971bed3231111b69eb56f9331a769072de479eae7de52296d/charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee", size = 118874 }, + { url = "https://files.pythonhosted.org/packages/4c/92/97509850f0d00e9f14a46bc751daabd0ad7765cff29cdfb66c68b6dad57f/charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c", size = 138243 }, + { url = "https://files.pythonhosted.org/packages/e2/29/d227805bff72ed6d6cb1ce08eec707f7cfbd9868044893617eb331f16295/charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6", size = 148676 }, + { url = "https://files.pythonhosted.org/packages/13/bc/87c2c9f2c144bedfa62f894c3007cd4530ba4b5351acb10dc786428a50f0/charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea", size = 141289 }, + { url = "https://files.pythonhosted.org/packages/eb/5b/6f10bad0f6461fa272bfbbdf5d0023b5fb9bc6217c92bf068fa5a99820f5/charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc", size = 142585 }, + { url = "https://files.pythonhosted.org/packages/3b/a0/a68980ab8a1f45a36d9745d35049c1af57d27255eff8c907e3add84cf68f/charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5", size = 144408 }, + { url = "https://files.pythonhosted.org/packages/d7/a1/493919799446464ed0299c8eef3c3fad0daf1c3cd48bff9263c731b0d9e2/charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594", size = 139076 }, + { url = "https://files.pythonhosted.org/packages/fb/9d/9c13753a5a6e0db4a0a6edb1cef7aee39859177b64e1a1e748a6e3ba62c2/charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c", size = 146874 }, + { url = "https://files.pythonhosted.org/packages/75/d2/0ab54463d3410709c09266dfb416d032a08f97fd7d60e94b8c6ef54ae14b/charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365", size = 150871 }, + { url = "https://files.pythonhosted.org/packages/8d/c9/27e41d481557be53d51e60750b85aa40eaf52b841946b3cdeff363105737/charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129", size = 148546 }, + { url = "https://files.pythonhosted.org/packages/ee/44/4f62042ca8cdc0cabf87c0fc00ae27cd8b53ab68be3605ba6d071f742ad3/charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236", size = 143048 }, + { url = "https://files.pythonhosted.org/packages/01/f8/38842422988b795220eb8038745d27a675ce066e2ada79516c118f291f07/charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99", size = 94389 }, + { url = "https://files.pythonhosted.org/packages/0b/6e/b13bd47fa9023b3699e94abf565b5a2f0b0be6e9ddac9812182596ee62e4/charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27", size = 101752 }, + { url = "https://files.pythonhosted.org/packages/d3/0b/4b7a70987abf9b8196845806198975b6aab4ce016632f817ad758a5aa056/charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6", size = 194445 }, + { url = "https://files.pythonhosted.org/packages/50/89/354cc56cf4dd2449715bc9a0f54f3aef3dc700d2d62d1fa5bbea53b13426/charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf", size = 125275 }, + { url = "https://files.pythonhosted.org/packages/fa/44/b730e2a2580110ced837ac083d8ad222343c96bb6b66e9e4e706e4d0b6df/charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db", size = 119020 }, + { url = "https://files.pythonhosted.org/packages/9d/e4/9263b8240ed9472a2ae7ddc3e516e71ef46617fe40eaa51221ccd4ad9a27/charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1", size = 139128 }, + { url = "https://files.pythonhosted.org/packages/6b/e3/9f73e779315a54334240353eaea75854a9a690f3f580e4bd85d977cb2204/charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03", size = 149277 }, + { url = "https://files.pythonhosted.org/packages/1a/cf/f1f50c2f295312edb8a548d3fa56a5c923b146cd3f24114d5adb7e7be558/charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284", size = 142174 }, + { url = "https://files.pythonhosted.org/packages/16/92/92a76dc2ff3a12e69ba94e7e05168d37d0345fa08c87e1fe24d0c2a42223/charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15", size = 143838 }, + { url = "https://files.pythonhosted.org/packages/a4/01/2117ff2b1dfc61695daf2babe4a874bca328489afa85952440b59819e9d7/charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8", size = 146149 }, + { url = "https://files.pythonhosted.org/packages/f6/9b/93a332b8d25b347f6839ca0a61b7f0287b0930216994e8bf67a75d050255/charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2", size = 140043 }, + { url = "https://files.pythonhosted.org/packages/ab/f6/7ac4a01adcdecbc7a7587767c776d53d369b8b971382b91211489535acf0/charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719", size = 148229 }, + { url = "https://files.pythonhosted.org/packages/9d/be/5708ad18161dee7dc6a0f7e6cf3a88ea6279c3e8484844c0590e50e803ef/charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631", size = 151556 }, + { url = "https://files.pythonhosted.org/packages/5a/bb/3d8bc22bacb9eb89785e83e6723f9888265f3a0de3b9ce724d66bd49884e/charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b", size = 149772 }, + { url = "https://files.pythonhosted.org/packages/f7/fa/d3fc622de05a86f30beea5fc4e9ac46aead4731e73fd9055496732bcc0a4/charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565", size = 144800 }, + { url = "https://files.pythonhosted.org/packages/9a/65/bdb9bc496d7d190d725e96816e20e2ae3a6fa42a5cac99c3c3d6ff884118/charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7", size = 94836 }, + { url = "https://files.pythonhosted.org/packages/3e/67/7b72b69d25b89c0b3cea583ee372c43aa24df15f0e0f8d3982c57804984b/charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9", size = 102187 }, + { url = "https://files.pythonhosted.org/packages/f3/89/68a4c86f1a0002810a27f12e9a7b22feb198c59b2f05231349fbce5c06f4/charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114", size = 194617 }, + { url = "https://files.pythonhosted.org/packages/4f/cd/8947fe425e2ab0aa57aceb7807af13a0e4162cd21eee42ef5b053447edf5/charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed", size = 125310 }, + { url = "https://files.pythonhosted.org/packages/5b/f0/b5263e8668a4ee9becc2b451ed909e9c27058337fda5b8c49588183c267a/charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250", size = 119126 }, + { url = "https://files.pythonhosted.org/packages/ff/6e/e445afe4f7fda27a533f3234b627b3e515a1b9429bc981c9a5e2aa5d97b6/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920", size = 139342 }, + { url = "https://files.pythonhosted.org/packages/a1/b2/4af9993b532d93270538ad4926c8e37dc29f2111c36f9c629840c57cd9b3/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64", size = 149383 }, + { url = "https://files.pythonhosted.org/packages/fb/6f/4e78c3b97686b871db9be6f31d64e9264e889f8c9d7ab33c771f847f79b7/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23", size = 142214 }, + { url = "https://files.pythonhosted.org/packages/2b/c9/1c8fe3ce05d30c87eff498592c89015b19fade13df42850aafae09e94f35/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc", size = 144104 }, + { url = "https://files.pythonhosted.org/packages/ee/68/efad5dcb306bf37db7db338338e7bb8ebd8cf38ee5bbd5ceaaaa46f257e6/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d", size = 146255 }, + { url = "https://files.pythonhosted.org/packages/0c/75/1ed813c3ffd200b1f3e71121c95da3f79e6d2a96120163443b3ad1057505/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88", size = 140251 }, + { url = "https://files.pythonhosted.org/packages/7d/0d/6f32255c1979653b448d3c709583557a4d24ff97ac4f3a5be156b2e6a210/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90", size = 148474 }, + { url = "https://files.pythonhosted.org/packages/ac/a0/c1b5298de4670d997101fef95b97ac440e8c8d8b4efa5a4d1ef44af82f0d/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b", size = 151849 }, + { url = "https://files.pythonhosted.org/packages/04/4f/b3961ba0c664989ba63e30595a3ed0875d6790ff26671e2aae2fdc28a399/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d", size = 149781 }, + { url = "https://files.pythonhosted.org/packages/d8/90/6af4cd042066a4adad58ae25648a12c09c879efa4849c705719ba1b23d8c/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482", size = 144970 }, + { url = "https://files.pythonhosted.org/packages/cc/67/e5e7e0cbfefc4ca79025238b43cdf8a2037854195b37d6417f3d0895c4c2/charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67", size = 94973 }, + { url = "https://files.pythonhosted.org/packages/65/97/fc9bbc54ee13d33dc54a7fcf17b26368b18505500fc01e228c27b5222d80/charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b", size = 102308 }, + { url = "https://files.pythonhosted.org/packages/54/2f/28659eee7f5d003e0f5a3b572765bf76d6e0fe6601ab1f1b1dd4cba7e4f1/charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa", size = 196326 }, + { url = "https://files.pythonhosted.org/packages/d1/18/92869d5c0057baa973a3ee2af71573be7b084b3c3d428fe6463ce71167f8/charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a", size = 125614 }, + { url = "https://files.pythonhosted.org/packages/d6/27/327904c5a54a7796bb9f36810ec4173d2df5d88b401d2b95ef53111d214e/charset_normalizer-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0", size = 120450 }, + { url = "https://files.pythonhosted.org/packages/a4/23/65af317914a0308495133b2d654cf67b11bbd6ca16637c4e8a38f80a5a69/charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a", size = 140135 }, + { url = "https://files.pythonhosted.org/packages/f2/41/6190102ad521a8aa888519bb014a74251ac4586cde9b38e790901684f9ab/charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242", size = 150413 }, + { url = "https://files.pythonhosted.org/packages/7b/ab/f47b0159a69eab9bd915591106859f49670c75f9a19082505ff16f50efc0/charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b", size = 142992 }, + { url = "https://files.pythonhosted.org/packages/28/89/60f51ad71f63aaaa7e51a2a2ad37919985a341a1d267070f212cdf6c2d22/charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62", size = 144871 }, + { url = "https://files.pythonhosted.org/packages/0c/48/0050550275fea585a6e24460b42465020b53375017d8596c96be57bfabca/charset_normalizer-3.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0", size = 146756 }, + { url = "https://files.pythonhosted.org/packages/dc/b5/47f8ee91455946f745e6c9ddbb0f8f50314d2416dd922b213e7d5551ad09/charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd", size = 141034 }, + { url = "https://files.pythonhosted.org/packages/84/79/5c731059ebab43e80bf61fa51666b9b18167974b82004f18c76378ed31a3/charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be", size = 149434 }, + { url = "https://files.pythonhosted.org/packages/ca/f3/0719cd09fc4dc42066f239cb3c48ced17fc3316afca3e2a30a4756fe49ab/charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d", size = 152443 }, + { url = "https://files.pythonhosted.org/packages/f7/0e/c6357297f1157c8e8227ff337e93fd0a90e498e3d6ab96b2782204ecae48/charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3", size = 150294 }, + { url = "https://files.pythonhosted.org/packages/54/9a/acfa96dc4ea8c928040b15822b59d0863d6e1757fba8bd7de3dc4f761c13/charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742", size = 145314 }, + { url = "https://files.pythonhosted.org/packages/73/1c/b10a63032eaebb8d7bcb8544f12f063f41f5f463778ac61da15d9985e8b6/charset_normalizer-3.4.0-cp39-cp39-win32.whl", hash = "sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2", size = 94724 }, + { url = "https://files.pythonhosted.org/packages/c5/77/3a78bf28bfaa0863f9cfef278dbeadf55efe064eafff8c7c424ae3c4c1bf/charset_normalizer-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca", size = 102159 }, + { url = "https://files.pythonhosted.org/packages/bf/9b/08c0432272d77b04803958a4598a51e2a4b51c06640af8b8f0f908c18bf2/charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079", size = 49446 }, +] + +[[package]] +name = "click" +version = "8.1.7" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "platform_system == 'Windows'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/96/d3/f04c7bfcf5c1862a2a5b845c6b2b360488cf47af55dfa79c98f6a6bf98b5/click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de", size = 336121 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/00/2e/d53fa4befbf2cfa713304affc7ca780ce4fc1fd8710527771b58311a3229/click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28", size = 97941 }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 }, +] + +[[package]] +name = "databend-driver" +version = "0.23.2" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0b/1c/a5e8e63e47484ac701c42ef5a7cc2a7e122249fc9f5b3bcc07e97c67c03b/databend_driver-0.23.2-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:58c2df0bbb7e7e46e991f0338a606df483fa5e56ac523fedfde1fa7934bc72f2", size = 5639659 }, + { url = "https://files.pythonhosted.org/packages/75/5a/a6601f10559618b4ef9a62a7e0fb870e6bae313cc508ed8e86d2a4571a35/databend_driver-0.23.2-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:887d0b3ad3d8c999bbbef546dcd7c47c96f6c93e346344b65f449ca0d0177068", size = 5269657 }, + { url = "https://files.pythonhosted.org/packages/df/bb/27d79a2fb52f3d08982502ca743e68da1d5e22fe0f3deafbaadf59ab7b15/databend_driver-0.23.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8dc4efa61f72ae82b2c422f194ec87b3eccaf31517d93c8df164980aed15da52", size = 6218513 }, + { url = "https://files.pythonhosted.org/packages/8e/38/b5ca980b1d6c08efe9019408289332ada3a08ce2b8d8fbf166aa42c7de87/databend_driver-0.23.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:961a194fe7d7e4f94041fc1c75df71298c7a6eae0d57a039273ed90da996b6dd", size = 5995034 }, + { url = "https://files.pythonhosted.org/packages/32/8b/9da9c9aa61d9d688192d00b20fcc598cb0e768ceff7457a7567fa3130f33/databend_driver-0.23.2-cp37-abi3-win_amd64.whl", hash = "sha256:2de67cf61d804bf665ca0ce3bfc64491e58e505678499008f73e570a92cab37b", size = 5090985 }, +] + +[[package]] +name = "databend-py" +version = "0.1.0" +source = { editable = "." } +dependencies = [ + { name = "black" }, + { name = "databend-driver" }, + { name = "environs" }, + { name = "pytz" }, + { name = "requests" }, + { name = "setuptools" }, +] + +[package.dev-dependencies] +dev = [ + { name = "pytest" }, + { name = "requests" }, + { name = "ruff" }, +] + +[package.metadata] +requires-dist = [ + { name = "black", specifier = ">=24.10.0" }, + { name = "databend-driver", specifier = ">=0.23.2" }, + { name = "environs", specifier = ">=11.2.1" }, + { name = "pytz", specifier = ">=2024.2" }, + { name = "requests", specifier = ">=2.32.3" }, + { name = "setuptools", specifier = ">=75.6.0" }, +] + +[package.metadata.requires-dev] +dev = [ + { name = "pytest", specifier = ">=8.3.4" }, + { name = "requests", specifier = ">=2.32.3" }, + { name = "ruff", specifier = ">=0.8.2" }, +] + +[[package]] +name = "environs" +version = "11.2.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "marshmallow" }, + { name = "python-dotenv" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/77/08/2b7d9cacf2b27482c9218ee6762336aa47bdb9d07ee26a136d072a328297/environs-11.2.1.tar.gz", hash = "sha256:e068ae3174cef52ba4b95ead22e639056a02465f616e62323e04ae08e86a75a4", size = 27485 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1a/21/1e0d8de234e9d0c675ea8fd50f9e7ad66fae32c207bc982f1d14f7c0835b/environs-11.2.1-py3-none-any.whl", hash = "sha256:9d2080cf25807a26fc0d4301e2d7b62c64fbf547540f21e3a30cc02bc5fbe948", size = 12923 }, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/09/35/2495c4ac46b980e4ca1f6ad6db102322ef3ad2410b79fdde159a4b0f3b92/exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc", size = 28883 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/02/cc/b7e31358aac6ed1ef2bb790a9746ac2c69bcb3c8588b41616914eb106eaf/exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b", size = 16453 }, +] + +[[package]] +name = "idna" +version = "3.10" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442 }, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d7/4b/cbd8e699e64a6f16ca3a8220661b5f83792b3017d0f79807cb8708d33913/iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3", size = 4646 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374", size = 5892 }, +] + +[[package]] +name = "marshmallow" +version = "3.23.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "packaging" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6d/30/14d8609f65c8aeddddd3181c06d2c9582da6278f063b27c910bbf9903441/marshmallow-3.23.1.tar.gz", hash = "sha256:3a8dfda6edd8dcdbf216c0ede1d1e78d230a6dc9c5a088f58c4083b974a0d468", size = 177488 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ac/a7/a78ff54e67ef92a3d12126b98eb98ab8abab3de4a8c46d240c87e514d6bb/marshmallow-3.23.1-py3-none-any.whl", hash = "sha256:fece2eb2c941180ea1b7fcbd4a83c51bfdd50093fdd3ad2585ee5e1df2508491", size = 49488 }, +] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/98/a4/1ab47638b92648243faf97a5aeb6ea83059cc3624972ab6b8d2316078d3f/mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782", size = 4433 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/e2/5d3f6ada4297caebe1a2add3b126fe800c96f56dbe5d1988a2cbe0b267aa/mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d", size = 4695 }, +] + +[[package]] +name = "packaging" +version = "24.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d0/63/68dbb6eb2de9cb10ee4c9c14a0148804425e13c4fb20d61cce69f53106da/packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f", size = 163950 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/88/ef/eb23f262cca3c0c4eb7ab1933c3b1f03d021f2c48f54763065b6f0e321be/packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759", size = 65451 }, +] + +[[package]] +name = "pathspec" +version = "0.12.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191 }, +] + +[[package]] +name = "platformdirs" +version = "4.3.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/13/fc/128cc9cb8f03208bdbf93d3aa862e16d376844a14f9a0ce5cf4507372de4/platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907", size = 21302 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3c/a6/bc1012356d8ece4d66dd75c4b9fc6c1f6650ddd5991e421177d9f8f671be/platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb", size = 18439 }, +] + +[[package]] +name = "pluggy" +version = "1.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/96/2d/02d4312c973c6050a18b314a5ad0b3210edb65a906f868e31c111dede4a6/pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1", size = 67955 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669", size = 20556 }, +] + +[[package]] +name = "pytest" +version = "8.3.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/05/35/30e0d83068951d90a01852cb1cef56e5d8a09d20c7f511634cc2f7e0372a/pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761", size = 1445919 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/11/92/76a1c94d3afee238333bc0a42b82935dd8f9cf8ce9e336ff87ee14d9e1cf/pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6", size = 343083 }, +] + +[[package]] +name = "python-dotenv" +version = "1.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/bc/57/e84d88dfe0aec03b7a2d4327012c1627ab5f03652216c63d49846d7a6c58/python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca", size = 39115 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6a/3e/b68c118422ec867fa7ab88444e1274aa40681c606d59ac27de5a5588f082/python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a", size = 19863 }, +] + +[[package]] +name = "pytz" +version = "2024.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3a/31/3c70bf7603cc2dca0f19bdc53b4537a797747a58875b552c8c413d963a3f/pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a", size = 319692 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/11/c3/005fcca25ce078d2cc29fd559379817424e94885510568bc1bc53d7d5846/pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725", size = 508002 }, +] + +[[package]] +name = "requests" +version = "2.32.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "charset-normalizer" }, + { name = "idna" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/63/70/2bf7780ad2d390a8d301ad0b550f1581eadbd9a20f896afe06353c2a2913/requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", size = 131218 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f9/9b/335f9764261e915ed497fcdeb11df5dfd6f7bf257d4a6a2a686d80da4d54/requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6", size = 64928 }, +] + +[[package]] +name = "ruff" +version = "0.8.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5e/2b/01245f4f3a727d60bebeacd7ee6d22586c7f62380a2597ddb22c2f45d018/ruff-0.8.2.tar.gz", hash = "sha256:b84f4f414dda8ac7f75075c1fa0b905ac0ff25361f42e6d5da681a465e0f78e5", size = 3349020 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/91/29/366be70216dba1731a00a41f2f030822b0c96c7c4f3b2c0cdce15cbace74/ruff-0.8.2-py3-none-linux_armv6l.whl", hash = "sha256:c49ab4da37e7c457105aadfd2725e24305ff9bc908487a9bf8d548c6dad8bb3d", size = 10530649 }, + { url = "https://files.pythonhosted.org/packages/63/82/a733956540bb388f00df5a3e6a02467b16c0e529132625fe44ce4c5fb9c7/ruff-0.8.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ec016beb69ac16be416c435828be702ee694c0d722505f9c1f35e1b9c0cc1bf5", size = 10274069 }, + { url = "https://files.pythonhosted.org/packages/3d/12/0b3aa14d1d71546c988a28e1b412981c1b80c8a1072e977a2f30c595cc4a/ruff-0.8.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:f05cdf8d050b30e2ba55c9b09330b51f9f97d36d4673213679b965d25a785f3c", size = 9909400 }, + { url = "https://files.pythonhosted.org/packages/23/08/f9f08cefb7921784c891c4151cce6ed357ff49e84b84978440cffbc87408/ruff-0.8.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60f578c11feb1d3d257b2fb043ddb47501ab4816e7e221fbb0077f0d5d4e7b6f", size = 10766782 }, + { url = "https://files.pythonhosted.org/packages/e4/71/bf50c321ec179aa420c8ec40adac5ae9cc408d4d37283a485b19a2331ceb/ruff-0.8.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cbd5cf9b0ae8f30eebc7b360171bd50f59ab29d39f06a670b3e4501a36ba5897", size = 10286316 }, + { url = "https://files.pythonhosted.org/packages/f2/83/c82688a2a6117539aea0ce63fdf6c08e60fe0202779361223bcd7f40bd74/ruff-0.8.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b402ddee3d777683de60ff76da801fa7e5e8a71038f57ee53e903afbcefdaa58", size = 11338270 }, + { url = "https://files.pythonhosted.org/packages/7f/d7/bc6a45e5a22e627640388e703160afb1d77c572b1d0fda8b4349f334fc66/ruff-0.8.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:705832cd7d85605cb7858d8a13d75993c8f3ef1397b0831289109e953d833d29", size = 12058579 }, + { url = "https://files.pythonhosted.org/packages/da/3b/64150c93946ec851e6f1707ff586bb460ca671581380c919698d6a9267dc/ruff-0.8.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:32096b41aaf7a5cc095fa45b4167b890e4c8d3fd217603f3634c92a541de7248", size = 11615172 }, + { url = "https://files.pythonhosted.org/packages/e4/9e/cf12b697ea83cfe92ec4509ae414dc4c9b38179cc681a497031f0d0d9a8e/ruff-0.8.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e769083da9439508833cfc7c23e351e1809e67f47c50248250ce1ac52c21fb93", size = 12882398 }, + { url = "https://files.pythonhosted.org/packages/a9/27/96d10863accf76a9c97baceac30b0a52d917eb985a8ac058bd4636aeede0/ruff-0.8.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fe716592ae8a376c2673fdfc1f5c0c193a6d0411f90a496863c99cd9e2ae25d", size = 11176094 }, + { url = "https://files.pythonhosted.org/packages/eb/10/cd2fd77d4a4e7f03c29351be0f53278a393186b540b99df68beb5304fddd/ruff-0.8.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:81c148825277e737493242b44c5388a300584d73d5774defa9245aaef55448b0", size = 10771884 }, + { url = "https://files.pythonhosted.org/packages/71/5d/beabb2ff18870fc4add05fa3a69a4cb1b1d2d6f83f3cf3ae5ab0d52f455d/ruff-0.8.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:d261d7850c8367704874847d95febc698a950bf061c9475d4a8b7689adc4f7fa", size = 10382535 }, + { url = "https://files.pythonhosted.org/packages/ae/29/6b3fdf3ad3e35b28d87c25a9ff4c8222ad72485ab783936b2b267250d7a7/ruff-0.8.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:1ca4e3a87496dc07d2427b7dd7ffa88a1e597c28dad65ae6433ecb9f2e4f022f", size = 10886995 }, + { url = "https://files.pythonhosted.org/packages/e9/dc/859d889b4d9356a1a2cdbc1e4a0dda94052bc5b5300098647e51a58c430b/ruff-0.8.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:729850feed82ef2440aa27946ab39c18cb4a8889c1128a6d589ffa028ddcfc22", size = 11220750 }, + { url = "https://files.pythonhosted.org/packages/0b/08/e8f519f61f1d624264bfd6b8829e4c5f31c3c61193bc3cff1f19dbe7626a/ruff-0.8.2-py3-none-win32.whl", hash = "sha256:ac42caaa0411d6a7d9594363294416e0e48fc1279e1b0e948391695db2b3d5b1", size = 8729396 }, + { url = "https://files.pythonhosted.org/packages/f8/d4/ba1c7ab72aba37a2b71fe48ab95b80546dbad7a7f35ea28cf66fc5cea5f6/ruff-0.8.2-py3-none-win_amd64.whl", hash = "sha256:2aae99ec70abf43372612a838d97bfe77d45146254568d94926e8ed5bbb409ea", size = 9594729 }, + { url = "https://files.pythonhosted.org/packages/23/34/db20e12d3db11b8a2a8874258f0f6d96a9a4d631659d54575840557164c8/ruff-0.8.2-py3-none-win_arm64.whl", hash = "sha256:fb88e2a506b70cfbc2de6fae6681c4f944f7dd5f2fe87233a7233d888bad73e8", size = 9035131 }, +] + +[[package]] +name = "setuptools" +version = "75.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/54/292f26c208734e9a7f067aea4a7e282c080750c4546559b58e2e45413ca0/setuptools-75.6.0.tar.gz", hash = "sha256:8199222558df7c86216af4f84c30e9b34a61d8ba19366cc914424cdbd28252f6", size = 1337429 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/55/21/47d163f615df1d30c094f6c8bbb353619274edccf0327b185cc2493c2c33/setuptools-75.6.0-py3-none-any.whl", hash = "sha256:ce74b49e8f7110f9bf04883b730f4765b774ef3ef28f722cce7c273d253aaf7d", size = 1224032 }, +] + +[[package]] +name = "tomli" +version = "2.2.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/18/87/302344fed471e44a87289cf4967697d07e532f2421fdaf868a303cbae4ff/tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff", size = 17175 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/43/ca/75707e6efa2b37c77dadb324ae7d9571cb424e61ea73fad7c56c2d14527f/tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249", size = 131077 }, + { url = "https://files.pythonhosted.org/packages/c7/16/51ae563a8615d472fdbffc43a3f3d46588c264ac4f024f63f01283becfbb/tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6", size = 123429 }, + { url = "https://files.pythonhosted.org/packages/f1/dd/4f6cd1e7b160041db83c694abc78e100473c15d54620083dbd5aae7b990e/tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a", size = 226067 }, + { url = "https://files.pythonhosted.org/packages/a9/6b/c54ede5dc70d648cc6361eaf429304b02f2871a345bbdd51e993d6cdf550/tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee", size = 236030 }, + { url = "https://files.pythonhosted.org/packages/1f/47/999514fa49cfaf7a92c805a86c3c43f4215621855d151b61c602abb38091/tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e", size = 240898 }, + { url = "https://files.pythonhosted.org/packages/73/41/0a01279a7ae09ee1573b423318e7934674ce06eb33f50936655071d81a24/tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4", size = 229894 }, + { url = "https://files.pythonhosted.org/packages/55/18/5d8bc5b0a0362311ce4d18830a5d28943667599a60d20118074ea1b01bb7/tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106", size = 245319 }, + { url = "https://files.pythonhosted.org/packages/92/a3/7ade0576d17f3cdf5ff44d61390d4b3febb8a9fc2b480c75c47ea048c646/tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8", size = 238273 }, + { url = "https://files.pythonhosted.org/packages/72/6f/fa64ef058ac1446a1e51110c375339b3ec6be245af9d14c87c4a6412dd32/tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff", size = 98310 }, + { url = "https://files.pythonhosted.org/packages/6a/1c/4a2dcde4a51b81be3530565e92eda625d94dafb46dbeb15069df4caffc34/tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b", size = 108309 }, + { url = "https://files.pythonhosted.org/packages/52/e1/f8af4c2fcde17500422858155aeb0d7e93477a0d59a98e56cbfe75070fd0/tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea", size = 132762 }, + { url = "https://files.pythonhosted.org/packages/03/b8/152c68bb84fc00396b83e7bbddd5ec0bd3dd409db4195e2a9b3e398ad2e3/tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8", size = 123453 }, + { url = "https://files.pythonhosted.org/packages/c8/d6/fc9267af9166f79ac528ff7e8c55c8181ded34eb4b0e93daa767b8841573/tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192", size = 233486 }, + { url = "https://files.pythonhosted.org/packages/5c/51/51c3f2884d7bab89af25f678447ea7d297b53b5a3b5730a7cb2ef6069f07/tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222", size = 242349 }, + { url = "https://files.pythonhosted.org/packages/ab/df/bfa89627d13a5cc22402e441e8a931ef2108403db390ff3345c05253935e/tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77", size = 252159 }, + { url = "https://files.pythonhosted.org/packages/9e/6e/fa2b916dced65763a5168c6ccb91066f7639bdc88b48adda990db10c8c0b/tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6", size = 237243 }, + { url = "https://files.pythonhosted.org/packages/b4/04/885d3b1f650e1153cbb93a6a9782c58a972b94ea4483ae4ac5cedd5e4a09/tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd", size = 259645 }, + { url = "https://files.pythonhosted.org/packages/9c/de/6b432d66e986e501586da298e28ebeefd3edc2c780f3ad73d22566034239/tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e", size = 244584 }, + { url = "https://files.pythonhosted.org/packages/1c/9a/47c0449b98e6e7d1be6cbac02f93dd79003234ddc4aaab6ba07a9a7482e2/tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98", size = 98875 }, + { url = "https://files.pythonhosted.org/packages/ef/60/9b9638f081c6f1261e2688bd487625cd1e660d0a85bd469e91d8db969734/tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4", size = 109418 }, + { url = "https://files.pythonhosted.org/packages/04/90/2ee5f2e0362cb8a0b6499dc44f4d7d48f8fff06d28ba46e6f1eaa61a1388/tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7", size = 132708 }, + { url = "https://files.pythonhosted.org/packages/c0/ec/46b4108816de6b385141f082ba99e315501ccd0a2ea23db4a100dd3990ea/tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c", size = 123582 }, + { url = "https://files.pythonhosted.org/packages/a0/bd/b470466d0137b37b68d24556c38a0cc819e8febe392d5b199dcd7f578365/tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13", size = 232543 }, + { url = "https://files.pythonhosted.org/packages/d9/e5/82e80ff3b751373f7cead2815bcbe2d51c895b3c990686741a8e56ec42ab/tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281", size = 241691 }, + { url = "https://files.pythonhosted.org/packages/05/7e/2a110bc2713557d6a1bfb06af23dd01e7dde52b6ee7dadc589868f9abfac/tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272", size = 251170 }, + { url = "https://files.pythonhosted.org/packages/64/7b/22d713946efe00e0adbcdfd6d1aa119ae03fd0b60ebed51ebb3fa9f5a2e5/tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140", size = 236530 }, + { url = "https://files.pythonhosted.org/packages/38/31/3a76f67da4b0cf37b742ca76beaf819dca0ebef26d78fc794a576e08accf/tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2", size = 258666 }, + { url = "https://files.pythonhosted.org/packages/07/10/5af1293da642aded87e8a988753945d0cf7e00a9452d3911dd3bb354c9e2/tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744", size = 243954 }, + { url = "https://files.pythonhosted.org/packages/5b/b9/1ed31d167be802da0fc95020d04cd27b7d7065cc6fbefdd2f9186f60d7bd/tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec", size = 98724 }, + { url = "https://files.pythonhosted.org/packages/c7/32/b0963458706accd9afcfeb867c0f9175a741bf7b19cd424230714d722198/tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69", size = 109383 }, + { url = "https://files.pythonhosted.org/packages/6e/c2/61d3e0f47e2b74ef40a68b9e6ad5984f6241a942f7cd3bbfbdbd03861ea9/tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc", size = 14257 }, +] + +[[package]] +name = "typing-extensions" +version = "4.12.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/df/db/f35a00659bc03fec321ba8bce9420de607a1d37f8342eee1863174c69557/typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8", size = 85321 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/26/9f/ad63fc0248c5379346306f8668cda6e2e2e9c95e01216d2b8ffd9ff037d0/typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d", size = 37438 }, +] + +[[package]] +name = "urllib3" +version = "2.2.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ed/63/22ba4ebfe7430b76388e7cd448d5478814d3032121827c12a2cc287e2260/urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9", size = 300677 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ce/d9/5f4c13cecde62396b0d3fe530a50ccea91e7dfc1ccf0e09c228841bb5ba8/urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac", size = 126338 }, +]