From b65096a4d4fe4bccf7a4c1ca6ce62ce429596930 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 21 Feb 2024 10:48:32 +0100 Subject: [PATCH] [pre-commit.ci] pre-commit autoupdate (#147) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * [pre-commit.ci] pre-commit autoupdate updates: - [github.com/PyCQA/bandit: 1.7.6 → 1.7.7](https://github.com/PyCQA/bandit/compare/1.7.6...1.7.7) - [github.com/astral-sh/ruff-pre-commit: v0.1.14 → v0.2.2](https://github.com/astral-sh/ruff-pre-commit/compare/v0.1.14...v0.2.2) - [github.com/psf/black: 23.12.1 → 24.2.0](https://github.com/psf/black/compare/23.12.1...24.2.0) * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Specify directory to skip assert_used for Bandit differently See https://github.com/PyCQA/bandit/issues/1106 --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Pieter Gijsbers --- .pre-commit-config.yaml | 6 +++--- pyproject.toml | 2 +- src/database/datasets.py | 1 + src/routers/openml/datasets.py | 1 + src/schemas/datasets/dcat.py | 1 + src/schemas/datasets/mldcat_ap.py | 7 ++++--- 6 files changed, 11 insertions(+), 7 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 6790bbf..8fa9729 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -18,7 +18,7 @@ repos: # - id: no-commit-to-branch - repo: https://github.com/PyCQA/bandit - rev: '1.7.6' + rev: '1.7.7' hooks: - id: bandit args: [-c, pyproject.toml] @@ -33,12 +33,12 @@ repos: - pytest - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.1.14 + rev: v0.2.2 hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] - repo: https://github.com/psf/black - rev: 23.12.1 + rev: 24.2.0 hooks: - id: black diff --git a/pyproject.toml b/pyproject.toml index 6edaa05..a747c9f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -39,7 +39,7 @@ docs = [ "Bug Tracker" = "https://github.com/openml/server-api/issues" [tool.bandit.assert_used] -skips = ["tests/*"] +skips = ["./tests/*"] [tool.black] line-length = 100 diff --git a/src/database/datasets.py b/src/database/datasets.py index 0d268b6..5efddcc 100644 --- a/src/database/datasets.py +++ b/src/database/datasets.py @@ -1,4 +1,5 @@ """ Translation from https://github.com/openml/OpenML/blob/c19c9b99568c0fabb001e639ff6724b9a754bbc9/openml_OS/models/api/v1/Api_data.php#L707""" + import datetime from collections import defaultdict from typing import Iterable diff --git a/src/routers/openml/datasets.py b/src/routers/openml/datasets.py index 23e810f..f9d68d2 100644 --- a/src/routers/openml/datasets.py +++ b/src/routers/openml/datasets.py @@ -2,6 +2,7 @@ We add separate endpoints for old-style JSON responses, so they don't clutter the schema of the new API, and are easily removed later. """ + import http.client import re from datetime import datetime diff --git a/src/schemas/datasets/dcat.py b/src/schemas/datasets/dcat.py index 8887fdc..9b2ece8 100644 --- a/src/schemas/datasets/dcat.py +++ b/src/schemas/datasets/dcat.py @@ -12,6 +12,7 @@ make public sector data better searchable across borders and sectors. This can be achieved by the exchange of descriptions of data sets among data portals. """ + import datetime from abc import ABC from typing import Literal, Union diff --git a/src/schemas/datasets/mldcat_ap.py b/src/schemas/datasets/mldcat_ap.py index 936212d..0d9836c 100644 --- a/src/schemas/datasets/mldcat_ap.py +++ b/src/schemas/datasets/mldcat_ap.py @@ -5,6 +5,7 @@ originally envisaged for the description of a machine learning process, developed in collaboration with OpenML. """ + from __future__ import annotations from abc import ABC @@ -270,9 +271,9 @@ class DataService(JsonLDObject): class JsonLDGraph(BaseModel): context: str | dict[str, HttpUrl] = Field(default_factory=dict, serialization_alias="@context") - graph: list[ - Distribution | DataService | Dataset | Quality | Feature | Agent | MD5Checksum - ] = Field(default_factory=list, serialization_alias="@graph") + graph: list[Distribution | DataService | Dataset | Quality | Feature | Agent | MD5Checksum] = ( + Field(default_factory=list, serialization_alias="@graph") + ) model_config = {"populate_by_name": True, "extra": "forbid"}