diff --git a/src/kagglesdk/__init__.py b/src/kagglesdk/__init__.py index 4de60423..1bb9cb3c 100644 --- a/src/kagglesdk/__init__.py +++ b/src/kagglesdk/__init__.py @@ -1,3 +1,5 @@ +__version__ = "0.1.0" + from kagglesdk.kaggle_client import KaggleClient from kagglesdk.kaggle_creds import KaggleCredentials from kagglesdk.kaggle_env import get_access_token_from_env, KaggleEnv diff --git a/src/kagglesdk/benchmarks/types/benchmark_types.py b/src/kagglesdk/benchmarks/types/benchmark_types.py index a0a8ef0d..2e30e7ab 100644 --- a/src/kagglesdk/benchmarks/types/benchmark_types.py +++ b/src/kagglesdk/benchmarks/types/benchmark_types.py @@ -2,298 +2,12 @@ from kagglesdk.kaggle_object import * from typing import Optional, List -class BenchmarkVersionIdentifier(KaggleObject): - r""" - Identifier for selecting a specific benchmark version. - - Attributes: - version_id_selector (VersionIdSelector) - published_latest_selector (PublishedLatestSelector) - published_number_selector (PublishedNumberSelector) - draft_selector (DraftSelector) - benchmark_slug_selector (BenchmarkSlugSelector) - """ - - def __init__(self): - self._version_id_selector = None - self._published_latest_selector = None - self._published_number_selector = None - self._draft_selector = None - self._benchmark_slug_selector = None - self._freeze() - - @property - def version_id_selector(self) -> Optional['VersionIdSelector']: - return self._version_id_selector or None - - @version_id_selector.setter - def version_id_selector(self, version_id_selector: Optional['VersionIdSelector']): - if version_id_selector is None: - del self.version_id_selector - return - if not isinstance(version_id_selector, VersionIdSelector): - raise TypeError('version_id_selector must be of type VersionIdSelector') - del self.published_latest_selector - del self.published_number_selector - del self.draft_selector - del self.benchmark_slug_selector - self._version_id_selector = version_id_selector - - @property - def published_latest_selector(self) -> Optional['PublishedLatestSelector']: - return self._published_latest_selector or None - - @published_latest_selector.setter - def published_latest_selector(self, published_latest_selector: Optional['PublishedLatestSelector']): - if published_latest_selector is None: - del self.published_latest_selector - return - if not isinstance(published_latest_selector, PublishedLatestSelector): - raise TypeError('published_latest_selector must be of type PublishedLatestSelector') - del self.version_id_selector - del self.published_number_selector - del self.draft_selector - del self.benchmark_slug_selector - self._published_latest_selector = published_latest_selector - - @property - def published_number_selector(self) -> Optional['PublishedNumberSelector']: - return self._published_number_selector or None - - @published_number_selector.setter - def published_number_selector(self, published_number_selector: Optional['PublishedNumberSelector']): - if published_number_selector is None: - del self.published_number_selector - return - if not isinstance(published_number_selector, PublishedNumberSelector): - raise TypeError('published_number_selector must be of type PublishedNumberSelector') - del self.version_id_selector - del self.published_latest_selector - del self.draft_selector - del self.benchmark_slug_selector - self._published_number_selector = published_number_selector - - @property - def draft_selector(self) -> Optional['DraftSelector']: - return self._draft_selector or None - - @draft_selector.setter - def draft_selector(self, draft_selector: Optional['DraftSelector']): - if draft_selector is None: - del self.draft_selector - return - if not isinstance(draft_selector, DraftSelector): - raise TypeError('draft_selector must be of type DraftSelector') - del self.version_id_selector - del self.published_latest_selector - del self.published_number_selector - del self.benchmark_slug_selector - self._draft_selector = draft_selector - - @property - def benchmark_slug_selector(self) -> Optional['BenchmarkSlugSelector']: - return self._benchmark_slug_selector or None - - @benchmark_slug_selector.setter - def benchmark_slug_selector(self, benchmark_slug_selector: Optional['BenchmarkSlugSelector']): - if benchmark_slug_selector is None: - del self.benchmark_slug_selector - return - if not isinstance(benchmark_slug_selector, BenchmarkSlugSelector): - raise TypeError('benchmark_slug_selector must be of type BenchmarkSlugSelector') - del self.version_id_selector - del self.published_latest_selector - del self.published_number_selector - del self.draft_selector - self._benchmark_slug_selector = benchmark_slug_selector - - -class DraftSelector(KaggleObject): - r""" - Select by the parent benchmark's single draft version (always exists). - - Attributes: - parent_benchmark_identifier (BenchmarkIdentifier) - """ - - def __init__(self): - self._parent_benchmark_identifier = None - self._freeze() - - @property - def parent_benchmark_identifier(self) -> Optional['BenchmarkIdentifier']: - return self._parent_benchmark_identifier - - @parent_benchmark_identifier.setter - def parent_benchmark_identifier(self, parent_benchmark_identifier: Optional['BenchmarkIdentifier']): - if parent_benchmark_identifier is None: - del self.parent_benchmark_identifier - return - if not isinstance(parent_benchmark_identifier, BenchmarkIdentifier): - raise TypeError('parent_benchmark_identifier must be of type BenchmarkIdentifier') - self._parent_benchmark_identifier = parent_benchmark_identifier - - -class PublishedLatestSelector(KaggleObject): - r""" - Select by the parent benchmark's latest published version, defaults to NULL. - - Attributes: - parent_benchmark_identifier (BenchmarkIdentifier) - """ - - def __init__(self): - self._parent_benchmark_identifier = None - self._freeze() - - @property - def parent_benchmark_identifier(self) -> Optional['BenchmarkIdentifier']: - return self._parent_benchmark_identifier - - @parent_benchmark_identifier.setter - def parent_benchmark_identifier(self, parent_benchmark_identifier: Optional['BenchmarkIdentifier']): - if parent_benchmark_identifier is None: - del self.parent_benchmark_identifier - return - if not isinstance(parent_benchmark_identifier, BenchmarkIdentifier): - raise TypeError('parent_benchmark_identifier must be of type BenchmarkIdentifier') - self._parent_benchmark_identifier = parent_benchmark_identifier - - -class PublishedNumberSelector(KaggleObject): - r""" - Select by the parent benchmark's published version at a particular version - number. Defaults to NULL. - - Attributes: - parent_benchmark_identifier (BenchmarkIdentifier) - version_number (int) - """ - - def __init__(self): - self._parent_benchmark_identifier = None - self._version_number = 0 - self._freeze() - - @property - def parent_benchmark_identifier(self) -> Optional['BenchmarkIdentifier']: - return self._parent_benchmark_identifier - - @parent_benchmark_identifier.setter - def parent_benchmark_identifier(self, parent_benchmark_identifier: Optional['BenchmarkIdentifier']): - if parent_benchmark_identifier is None: - del self.parent_benchmark_identifier - return - if not isinstance(parent_benchmark_identifier, BenchmarkIdentifier): - raise TypeError('parent_benchmark_identifier must be of type BenchmarkIdentifier') - self._parent_benchmark_identifier = parent_benchmark_identifier - - @property - def version_number(self) -> int: - return self._version_number - - @version_number.setter - def version_number(self, version_number: int): - if version_number is None: - del self.version_number - return - if not isinstance(version_number, int): - raise TypeError('version_number must be of type int') - self._version_number = version_number - - -class VersionIdSelector(KaggleObject): - r""" - Select by the benchmark version id. Optional parent benchmark id. - - Attributes: - parent_benchmark_identifier (BenchmarkIdentifier) - id (int) - """ - - def __init__(self): - self._parent_benchmark_identifier = None - self._id = 0 - self._freeze() - - @property - def parent_benchmark_identifier(self) -> Optional['BenchmarkIdentifier']: - return self._parent_benchmark_identifier or None - - @parent_benchmark_identifier.setter - def parent_benchmark_identifier(self, parent_benchmark_identifier: Optional[Optional['BenchmarkIdentifier']]): - if parent_benchmark_identifier is None: - del self.parent_benchmark_identifier - return - if not isinstance(parent_benchmark_identifier, BenchmarkIdentifier): - raise TypeError('parent_benchmark_identifier must be of type BenchmarkIdentifier') - self._parent_benchmark_identifier = parent_benchmark_identifier - - @property - def id(self) -> int: - return self._id - - @id.setter - def id(self, id: int): - if id is None: - del self.id - return - if not isinstance(id, int): - raise TypeError('id must be of type int') - self._id = id - - -class BenchmarkIdentifier(KaggleObject): - r""" - Identifier for selecting a specific benchmark. - - Attributes: - id (int) - slug (str) - """ - - def __init__(self): - self._id = None - self._slug = None - self._freeze() - - @property - def id(self) -> int: - return self._id or 0 - - @id.setter - def id(self, id: int): - if id is None: - del self.id - return - if not isinstance(id, int): - raise TypeError('id must be of type int') - del self.slug - self._id = id - - @property - def slug(self) -> str: - return self._slug or "" - - @slug.setter - def slug(self, slug: str): - if slug is None: - del self.slug - return - if not isinstance(slug, str): - raise TypeError('slug must be of type str') - del self.id - self._slug = slug - - class BenchmarkResult(KaggleObject): r""" TODO(bml): Integrate this proto with personal benchmarks trials. Represents the outcome of a benchmark run. All fields are immutable. Attributes: - task_version_id (int) - Convenience fields for this result (for the frontend): numeric_result (NumericResult) boolean_result (bool) custom_additional_results (CustomResult) @@ -304,16 +18,18 @@ class BenchmarkResult(KaggleObject): Numeric result on the public set of the benchmark version. evaluation_date (datetime) The date on which evaluation was performed. + task_version_id (int) + Convenience fields for this result (for the frontend): """ def __init__(self): - self._task_version_id = None self._numeric_result = None self._boolean_result = None self._custom_additional_results = [] self._numeric_result_private = None self._numeric_result_public = None self._evaluation_date = None + self._task_version_id = None self._freeze() @property @@ -417,60 +133,6 @@ def evaluation_date(self, evaluation_date: Optional[datetime]): self._evaluation_date = evaluation_date -class BenchmarkSlugSelector(KaggleObject): - r""" - Attributes: - owner_slug (str) - benchmark_slug (str) - version_number (int) - """ - - def __init__(self): - self._owner_slug = "" - self._benchmark_slug = "" - self._version_number = None - self._freeze() - - @property - def owner_slug(self) -> str: - return self._owner_slug - - @owner_slug.setter - def owner_slug(self, owner_slug: str): - if owner_slug is None: - del self.owner_slug - return - if not isinstance(owner_slug, str): - raise TypeError('owner_slug must be of type str') - self._owner_slug = owner_slug - - @property - def benchmark_slug(self) -> str: - return self._benchmark_slug - - @benchmark_slug.setter - def benchmark_slug(self, benchmark_slug: str): - if benchmark_slug is None: - del self.benchmark_slug - return - if not isinstance(benchmark_slug, str): - raise TypeError('benchmark_slug must be of type str') - self._benchmark_slug = benchmark_slug - - @property - def version_number(self) -> int: - return self._version_number or 0 - - @version_number.setter - def version_number(self, version_number: Optional[int]): - if version_number is None: - del self.version_number - return - if not isinstance(version_number, int): - raise TypeError('version_number must be of type int') - self._version_number = version_number - - class CustomResult(KaggleObject): r""" Attributes: @@ -617,51 +279,14 @@ def minus(self, minus: float): self._minus = minus -BenchmarkVersionIdentifier._fields = [ - FieldMetadata("versionIdSelector", "version_id_selector", "_version_id_selector", VersionIdSelector, None, KaggleObjectSerializer(), optional=True), - FieldMetadata("publishedLatestSelector", "published_latest_selector", "_published_latest_selector", PublishedLatestSelector, None, KaggleObjectSerializer(), optional=True), - FieldMetadata("publishedNumberSelector", "published_number_selector", "_published_number_selector", PublishedNumberSelector, None, KaggleObjectSerializer(), optional=True), - FieldMetadata("draftSelector", "draft_selector", "_draft_selector", DraftSelector, None, KaggleObjectSerializer(), optional=True), - FieldMetadata("benchmarkSlugSelector", "benchmark_slug_selector", "_benchmark_slug_selector", BenchmarkSlugSelector, None, KaggleObjectSerializer(), optional=True), -] - -DraftSelector._fields = [ - FieldMetadata("parentBenchmarkIdentifier", "parent_benchmark_identifier", "_parent_benchmark_identifier", BenchmarkIdentifier, None, KaggleObjectSerializer()), -] - -PublishedLatestSelector._fields = [ - FieldMetadata("parentBenchmarkIdentifier", "parent_benchmark_identifier", "_parent_benchmark_identifier", BenchmarkIdentifier, None, KaggleObjectSerializer()), -] - -PublishedNumberSelector._fields = [ - FieldMetadata("parentBenchmarkIdentifier", "parent_benchmark_identifier", "_parent_benchmark_identifier", BenchmarkIdentifier, None, KaggleObjectSerializer()), - FieldMetadata("versionNumber", "version_number", "_version_number", int, 0, PredefinedSerializer()), -] - -VersionIdSelector._fields = [ - FieldMetadata("parentBenchmarkIdentifier", "parent_benchmark_identifier", "_parent_benchmark_identifier", BenchmarkIdentifier, None, KaggleObjectSerializer(), optional=True), - FieldMetadata("id", "id", "_id", int, 0, PredefinedSerializer()), -] - -BenchmarkIdentifier._fields = [ - FieldMetadata("id", "id", "_id", int, None, PredefinedSerializer(), optional=True), - FieldMetadata("slug", "slug", "_slug", str, None, PredefinedSerializer(), optional=True), -] - BenchmarkResult._fields = [ - FieldMetadata("taskVersionId", "task_version_id", "_task_version_id", int, None, PredefinedSerializer(), optional=True), FieldMetadata("numericResult", "numeric_result", "_numeric_result", NumericResult, None, KaggleObjectSerializer(), optional=True), FieldMetadata("booleanResult", "boolean_result", "_boolean_result", bool, None, PredefinedSerializer(), optional=True), FieldMetadata("customAdditionalResults", "custom_additional_results", "_custom_additional_results", CustomResult, [], ListSerializer(KaggleObjectSerializer())), FieldMetadata("numericResultPrivate", "numeric_result_private", "_numeric_result_private", NumericResult, None, KaggleObjectSerializer(), optional=True), FieldMetadata("numericResultPublic", "numeric_result_public", "_numeric_result_public", NumericResult, None, KaggleObjectSerializer(), optional=True), FieldMetadata("evaluationDate", "evaluation_date", "_evaluation_date", datetime, None, DateTimeSerializer(), optional=True), -] - -BenchmarkSlugSelector._fields = [ - FieldMetadata("ownerSlug", "owner_slug", "_owner_slug", str, "", PredefinedSerializer()), - FieldMetadata("benchmarkSlug", "benchmark_slug", "_benchmark_slug", str, "", PredefinedSerializer()), - FieldMetadata("versionNumber", "version_number", "_version_number", int, None, PredefinedSerializer(), optional=True), + FieldMetadata("taskVersionId", "task_version_id", "_task_version_id", int, None, PredefinedSerializer(), optional=True), ] CustomResult._fields = [ diff --git a/src/kagglesdk/benchmarks/types/benchmarks_api_service.py b/src/kagglesdk/benchmarks/types/benchmarks_api_service.py index 1cd6fb77..a0887765 100644 --- a/src/kagglesdk/benchmarks/types/benchmarks_api_service.py +++ b/src/kagglesdk/benchmarks/types/benchmarks_api_service.py @@ -1,4 +1,4 @@ -from kagglesdk.benchmarks.types.benchmark_types import BenchmarkResult, BenchmarkVersionIdentifier +from kagglesdk.benchmarks.types.benchmark_types import BenchmarkResult from kagglesdk.kaggle_object import * from typing import List, Optional @@ -156,38 +156,66 @@ def rows(self, rows: Optional[List[Optional['ApiBenchmarkLeaderboard.Leaderboard class ApiGetBenchmarkLeaderboardRequest(KaggleObject): r""" Attributes: - identifier (BenchmarkVersionIdentifier) + owner_slug (str) + benchmark_slug (str) + version_number (int) """ def __init__(self): - self._identifier = None + self._owner_slug = "" + self._benchmark_slug = "" + self._version_number = None self._freeze() @property - def identifier(self) -> Optional['BenchmarkVersionIdentifier']: - return self._identifier + def owner_slug(self) -> str: + return self._owner_slug - @identifier.setter - def identifier(self, identifier: Optional['BenchmarkVersionIdentifier']): - if identifier is None: - del self.identifier + @owner_slug.setter + def owner_slug(self, owner_slug: str): + if owner_slug is None: + del self.owner_slug return - if not isinstance(identifier, BenchmarkVersionIdentifier): - raise TypeError('identifier must be of type BenchmarkVersionIdentifier') - self._identifier = identifier + if not isinstance(owner_slug, str): + raise TypeError('owner_slug must be of type str') + self._owner_slug = owner_slug - def endpoint(self): - path = '/api/v1/benchmarks/leaderboard' - return path.format_map(self.to_field_map(self)) + @property + def benchmark_slug(self) -> str: + return self._benchmark_slug + @benchmark_slug.setter + def benchmark_slug(self, benchmark_slug: str): + if benchmark_slug is None: + del self.benchmark_slug + return + if not isinstance(benchmark_slug, str): + raise TypeError('benchmark_slug must be of type str') + self._benchmark_slug = benchmark_slug - @staticmethod - def method(): - return 'POST' + @property + def version_number(self) -> int: + return self._version_number or 0 + + @version_number.setter + def version_number(self, version_number: Optional[int]): + if version_number is None: + del self.version_number + return + if not isinstance(version_number, int): + raise TypeError('version_number must be of type int') + self._version_number = version_number + + def endpoint(self): + if self.version_number: + path = '/api/v1/benchmarks/{owner_slug}/{benchmark_slug}/versions/{version_number}/leaderboard' + else: + path = '/api/v1/benchmarks/{owner_slug}/{benchmark_slug}/leaderboard' + return path.format_map(self.to_field_map(self)) @staticmethod - def body_fields(): - return '*' + def endpoint_path(): + return '/api/v1/benchmarks/{owner_slug}/{benchmark_slug}/leaderboard' ApiBenchmarkLeaderboard.LeaderboardRow._fields = [ @@ -208,6 +236,8 @@ def body_fields(): ] ApiGetBenchmarkLeaderboardRequest._fields = [ - FieldMetadata("identifier", "identifier", "_identifier", BenchmarkVersionIdentifier, None, KaggleObjectSerializer()), + FieldMetadata("ownerSlug", "owner_slug", "_owner_slug", str, "", PredefinedSerializer()), + FieldMetadata("benchmarkSlug", "benchmark_slug", "_benchmark_slug", str, "", PredefinedSerializer()), + FieldMetadata("versionNumber", "version_number", "_version_number", int, None, PredefinedSerializer(), optional=True), ] diff --git a/src/kagglesdk/competitions/types/competition_api_service.py b/src/kagglesdk/competitions/types/competition_api_service.py index 5e0432e4..20ba3122 100644 --- a/src/kagglesdk/competitions/types/competition_api_service.py +++ b/src/kagglesdk/competitions/types/competition_api_service.py @@ -4,6 +4,137 @@ from kagglesdk.kaggle_object import * from typing import Optional, List +class ApiCategory(KaggleObject): + r""" + TODO(erdalsivri): Consider reusing with Kaggle.Sdk.Datasets.ApiCategory. + + Attributes: + ref (str) + name (str) + description (str) + full_path (str) + competition_count (int) + dataset_count (int) + script_count (int) + total_count (int) + """ + + def __init__(self): + self._ref = "" + self._name = None + self._description = None + self._full_path = None + self._competition_count = 0 + self._dataset_count = 0 + self._script_count = 0 + self._total_count = 0 + self._freeze() + + @property + def ref(self) -> str: + return self._ref + + @ref.setter + def ref(self, ref: str): + if ref is None: + del self.ref + return + if not isinstance(ref, str): + raise TypeError('ref must be of type str') + self._ref = ref + + @property + def name(self) -> str: + return self._name or "" + + @name.setter + def name(self, name: Optional[str]): + if name is None: + del self.name + return + if not isinstance(name, str): + raise TypeError('name must be of type str') + self._name = name + + @property + def description(self) -> str: + return self._description or "" + + @description.setter + def description(self, description: Optional[str]): + if description is None: + del self.description + return + if not isinstance(description, str): + raise TypeError('description must be of type str') + self._description = description + + @property + def full_path(self) -> str: + return self._full_path or "" + + @full_path.setter + def full_path(self, full_path: Optional[str]): + if full_path is None: + del self.full_path + return + if not isinstance(full_path, str): + raise TypeError('full_path must be of type str') + self._full_path = full_path + + @property + def competition_count(self) -> int: + return self._competition_count + + @competition_count.setter + def competition_count(self, competition_count: int): + if competition_count is None: + del self.competition_count + return + if not isinstance(competition_count, int): + raise TypeError('competition_count must be of type int') + self._competition_count = competition_count + + @property + def dataset_count(self) -> int: + return self._dataset_count + + @dataset_count.setter + def dataset_count(self, dataset_count: int): + if dataset_count is None: + del self.dataset_count + return + if not isinstance(dataset_count, int): + raise TypeError('dataset_count must be of type int') + self._dataset_count = dataset_count + + @property + def script_count(self) -> int: + return self._script_count + + @script_count.setter + def script_count(self, script_count: int): + if script_count is None: + del self.script_count + return + if not isinstance(script_count, int): + raise TypeError('script_count must be of type int') + self._script_count = script_count + + @property + def total_count(self) -> int: + return self._total_count + + @total_count.setter + def total_count(self, total_count: int): + if total_count is None: + del self.total_count + return + if not isinstance(total_count, int): + raise TypeError('total_count must be of type int') + self._total_count = total_count + + class ApiCompetition(KaggleObject): r""" Attributes: @@ -660,6 +791,105 @@ def ref(self, ref: int): self._ref = ref +class ApiDataFile(KaggleObject): + r""" + Attributes: + ref (str) + name (str) + description (str) + total_bytes (int) + url (str) + creation_date (datetime) + """ + + def __init__(self): + self._ref = "" + self._name = None + self._description = None + self._total_bytes = 0 + self._url = None + self._creation_date = None + self._freeze() + + @property + def ref(self) -> str: + return self._ref + + @ref.setter + def ref(self, ref: str): + if ref is None: + del self.ref + return + if not isinstance(ref, str): + raise TypeError('ref must be of type str') + self._ref = ref + + @property + def name(self) -> str: + return self._name or "" + + @name.setter + def name(self, name: Optional[str]): + if name is None: + del self.name + return + if not isinstance(name, str): + raise TypeError('name must be of type str') + self._name = name + + @property + def description(self) -> str: + return self._description or "" + + @description.setter + def description(self, description: Optional[str]): + if description is None: + del self.description + return + if not isinstance(description, str): + raise TypeError('description must be of type str') + self._description = description + + @property + def total_bytes(self) -> int: + return self._total_bytes + + @total_bytes.setter + def total_bytes(self, total_bytes: int): + if total_bytes is None: + del self.total_bytes + return + if not isinstance(total_bytes, int): + raise TypeError('total_bytes must be of type int') + self._total_bytes = total_bytes + + @property + def url(self) -> str: + return self._url or "" + + @url.setter + def url(self, url: Optional[str]): + if url is None: + del self.url + return + if not isinstance(url, str): + raise TypeError('url must be of type str') + self._url = url + + @property + def creation_date(self) -> datetime: + return self._creation_date + + @creation_date.setter + def creation_date(self, creation_date: datetime): + if creation_date is None: + del self.creation_date + return + if not isinstance(creation_date, datetime): + raise TypeError('creation_date must be of type datetime') + self._creation_date = creation_date + + class ApiDownloadDataFileRequest(KaggleObject): r""" Attributes: @@ -1967,235 +2197,16 @@ def url(self, url: Optional[str]): self._url = url -class ApiCategory(KaggleObject): - r""" - TODO(erdalsivri): Consider reusing with Kaggle.Sdk.Datasets.ApiCategory. - - Attributes: - ref (str) - name (str) - description (str) - full_path (str) - competition_count (int) - dataset_count (int) - script_count (int) - total_count (int) - """ - - def __init__(self): - self._ref = "" - self._name = None - self._description = None - self._full_path = None - self._competition_count = 0 - self._dataset_count = 0 - self._script_count = 0 - self._total_count = 0 - self._freeze() - - @property - def ref(self) -> str: - return self._ref - - @ref.setter - def ref(self, ref: str): - if ref is None: - del self.ref - return - if not isinstance(ref, str): - raise TypeError('ref must be of type str') - self._ref = ref - - @property - def name(self) -> str: - return self._name or "" - - @name.setter - def name(self, name: Optional[str]): - if name is None: - del self.name - return - if not isinstance(name, str): - raise TypeError('name must be of type str') - self._name = name - - @property - def description(self) -> str: - return self._description or "" - - @description.setter - def description(self, description: Optional[str]): - if description is None: - del self.description - return - if not isinstance(description, str): - raise TypeError('description must be of type str') - self._description = description - - @property - def full_path(self) -> str: - return self._full_path or "" - - @full_path.setter - def full_path(self, full_path: Optional[str]): - if full_path is None: - del self.full_path - return - if not isinstance(full_path, str): - raise TypeError('full_path must be of type str') - self._full_path = full_path - - @property - def competition_count(self) -> int: - return self._competition_count - - @competition_count.setter - def competition_count(self, competition_count: int): - if competition_count is None: - del self.competition_count - return - if not isinstance(competition_count, int): - raise TypeError('competition_count must be of type int') - self._competition_count = competition_count - - @property - def dataset_count(self) -> int: - return self._dataset_count - - @dataset_count.setter - def dataset_count(self, dataset_count: int): - if dataset_count is None: - del self.dataset_count - return - if not isinstance(dataset_count, int): - raise TypeError('dataset_count must be of type int') - self._dataset_count = dataset_count - - @property - def script_count(self) -> int: - return self._script_count - - @script_count.setter - def script_count(self, script_count: int): - if script_count is None: - del self.script_count - return - if not isinstance(script_count, int): - raise TypeError('script_count must be of type int') - self._script_count = script_count - - @property - def total_count(self) -> int: - return self._total_count - - @total_count.setter - def total_count(self, total_count: int): - if total_count is None: - del self.total_count - return - if not isinstance(total_count, int): - raise TypeError('total_count must be of type int') - self._total_count = total_count - - -class ApiDataFile(KaggleObject): - r""" - Attributes: - ref (str) - name (str) - description (str) - total_bytes (int) - url (str) - creation_date (datetime) - """ - - def __init__(self): - self._ref = "" - self._name = None - self._description = None - self._total_bytes = 0 - self._url = None - self._creation_date = None - self._freeze() - - @property - def ref(self) -> str: - return self._ref - - @ref.setter - def ref(self, ref: str): - if ref is None: - del self.ref - return - if not isinstance(ref, str): - raise TypeError('ref must be of type str') - self._ref = ref - - @property - def name(self) -> str: - return self._name or "" - - @name.setter - def name(self, name: Optional[str]): - if name is None: - del self.name - return - if not isinstance(name, str): - raise TypeError('name must be of type str') - self._name = name - - @property - def description(self) -> str: - return self._description or "" - - @description.setter - def description(self, description: Optional[str]): - if description is None: - del self.description - return - if not isinstance(description, str): - raise TypeError('description must be of type str') - self._description = description - - @property - def total_bytes(self) -> int: - return self._total_bytes - - @total_bytes.setter - def total_bytes(self, total_bytes: int): - if total_bytes is None: - del self.total_bytes - return - if not isinstance(total_bytes, int): - raise TypeError('total_bytes must be of type int') - self._total_bytes = total_bytes - - @property - def url(self) -> str: - return self._url or "" - - @url.setter - def url(self, url: Optional[str]): - if url is None: - del self.url - return - if not isinstance(url, str): - raise TypeError('url must be of type str') - self._url = url - - @property - def creation_date(self) -> datetime: - return self._creation_date - - @creation_date.setter - def creation_date(self, creation_date: datetime): - if creation_date is None: - del self.creation_date - return - if not isinstance(creation_date, datetime): - raise TypeError('creation_date must be of type datetime') - self._creation_date = creation_date - +ApiCategory._fields = [ + FieldMetadata("ref", "ref", "_ref", str, "", PredefinedSerializer()), + FieldMetadata("name", "name", "_name", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("description", "description", "_description", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("fullPath", "full_path", "_full_path", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("competitionCount", "competition_count", "_competition_count", int, 0, PredefinedSerializer()), + FieldMetadata("datasetCount", "dataset_count", "_dataset_count", int, 0, PredefinedSerializer()), + FieldMetadata("scriptCount", "script_count", "_script_count", int, 0, PredefinedSerializer()), + FieldMetadata("totalCount", "total_count", "_total_count", int, 0, PredefinedSerializer()), +] ApiCompetition._fields = [ FieldMetadata("id", "id", "_id", int, 0, PredefinedSerializer()), @@ -2251,6 +2262,15 @@ def creation_date(self, creation_date: datetime): FieldMetadata("ref", "ref", "_ref", int, 0, PredefinedSerializer()), ] +ApiDataFile._fields = [ + FieldMetadata("ref", "ref", "_ref", str, "", PredefinedSerializer()), + FieldMetadata("name", "name", "_name", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("description", "description", "_description", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("totalBytes", "total_bytes", "_total_bytes", int, 0, PredefinedSerializer()), + FieldMetadata("url", "url", "_url", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("creationDate", "creation_date", "_creation_date", datetime, None, DateTimeSerializer()), +] + ApiDownloadDataFileRequest._fields = [ FieldMetadata("competitionName", "competition_name", "_competition_name", str, "", PredefinedSerializer()), FieldMetadata("fileName", "file_name", "_file_name", str, "", PredefinedSerializer()), @@ -2371,23 +2391,3 @@ def creation_date(self, creation_date: datetime): FieldMetadata("url", "url", "_url", str, None, PredefinedSerializer(), optional=True), ] -ApiCategory._fields = [ - FieldMetadata("ref", "ref", "_ref", str, "", PredefinedSerializer()), - FieldMetadata("name", "name", "_name", str, None, PredefinedSerializer(), optional=True), - FieldMetadata("description", "description", "_description", str, None, PredefinedSerializer(), optional=True), - FieldMetadata("fullPath", "full_path", "_full_path", str, None, PredefinedSerializer(), optional=True), - FieldMetadata("competitionCount", "competition_count", "_competition_count", int, 0, PredefinedSerializer()), - FieldMetadata("datasetCount", "dataset_count", "_dataset_count", int, 0, PredefinedSerializer()), - FieldMetadata("scriptCount", "script_count", "_script_count", int, 0, PredefinedSerializer()), - FieldMetadata("totalCount", "total_count", "_total_count", int, 0, PredefinedSerializer()), -] - -ApiDataFile._fields = [ - FieldMetadata("ref", "ref", "_ref", str, "", PredefinedSerializer()), - FieldMetadata("name", "name", "_name", str, None, PredefinedSerializer(), optional=True), - FieldMetadata("description", "description", "_description", str, None, PredefinedSerializer(), optional=True), - FieldMetadata("totalBytes", "total_bytes", "_total_bytes", int, 0, PredefinedSerializer()), - FieldMetadata("url", "url", "_url", str, None, PredefinedSerializer(), optional=True), - FieldMetadata("creationDate", "creation_date", "_creation_date", datetime, None, DateTimeSerializer()), -] - diff --git a/src/kagglesdk/datasets/databundles/types/databundle_api_types.py b/src/kagglesdk/datasets/databundles/types/databundle_api_types.py index 617108fb..54806a25 100644 --- a/src/kagglesdk/datasets/databundles/types/databundle_api_types.py +++ b/src/kagglesdk/datasets/databundles/types/databundle_api_types.py @@ -2,6 +2,170 @@ from kagglesdk.kaggle_object import * from typing import List, Optional +class ApiColumnSummaryInfo(KaggleObject): + r""" + Attributes: + total_column_count (int) + column_types (ApiColumnTypeSummaryInfo) + """ + + def __init__(self): + self._total_column_count = 0 + self._column_types = [] + self._freeze() + + @property + def total_column_count(self) -> int: + return self._total_column_count + + @total_column_count.setter + def total_column_count(self, total_column_count: int): + if total_column_count is None: + del self.total_column_count + return + if not isinstance(total_column_count, int): + raise TypeError('total_column_count must be of type int') + self._total_column_count = total_column_count + + @property + def column_types(self) -> Optional[List[Optional['ApiColumnTypeSummaryInfo']]]: + return self._column_types + + @column_types.setter + def column_types(self, column_types: Optional[List[Optional['ApiColumnTypeSummaryInfo']]]): + if column_types is None: + del self.column_types + return + if not isinstance(column_types, list): + raise TypeError('column_types must be of type list') + if not all([isinstance(t, ApiColumnTypeSummaryInfo) for t in column_types]): + raise TypeError('column_types must contain only items of type ApiColumnTypeSummaryInfo') + self._column_types = column_types + + +class ApiColumnTypeSummaryInfo(KaggleObject): + r""" + Attributes: + column_type (str) + column_count (int) + """ + + def __init__(self): + self._column_type = None + self._column_count = 0 + self._freeze() + + @property + def column_type(self) -> str: + return self._column_type or "" + + @column_type.setter + def column_type(self, column_type: Optional[str]): + if column_type is None: + del self.column_type + return + if not isinstance(column_type, str): + raise TypeError('column_type must be of type str') + self._column_type = column_type + + @property + def column_count(self) -> int: + return self._column_count + + @column_count.setter + def column_count(self, column_count: int): + if column_count is None: + del self.column_count + return + if not isinstance(column_count, int): + raise TypeError('column_count must be of type int') + self._column_count = column_count + + +class ApiDirectory(KaggleObject): + r""" + Attributes: + name (str) + relative_url (str) + total_directories (int) + total_files (int) + total_children (int) + """ + + def __init__(self): + self._name = None + self._relative_url = None + self._total_directories = 0 + self._total_files = 0 + self._total_children = 0 + self._freeze() + + @property + def name(self) -> str: + return self._name or "" + + @name.setter + def name(self, name: Optional[str]): + if name is None: + del self.name + return + if not isinstance(name, str): + raise TypeError('name must be of type str') + self._name = name + + @property + def relative_url(self) -> str: + return self._relative_url or "" + + @relative_url.setter + def relative_url(self, relative_url: Optional[str]): + if relative_url is None: + del self.relative_url + return + if not isinstance(relative_url, str): + raise TypeError('relative_url must be of type str') + self._relative_url = relative_url + + @property + def total_directories(self) -> int: + return self._total_directories + + @total_directories.setter + def total_directories(self, total_directories: int): + if total_directories is None: + del self.total_directories + return + if not isinstance(total_directories, int): + raise TypeError('total_directories must be of type int') + self._total_directories = total_directories + + @property + def total_files(self) -> int: + return self._total_files + + @total_files.setter + def total_files(self, total_files: int): + if total_files is None: + del self.total_files + return + if not isinstance(total_files, int): + raise TypeError('total_files must be of type int') + self._total_files = total_files + + @property + def total_children(self) -> int: + return self._total_children + + @total_children.setter + def total_children(self, total_children: int): + if total_children is None: + del self.total_children + return + if not isinstance(total_children, int): + raise TypeError('total_children must be of type int') + self._total_children = total_children + + class ApiDirectoryContent(KaggleObject): r""" Attributes: @@ -189,6 +353,60 @@ def description(self, description: Optional[str]): self._description = description +class ApiFileExtensionSummaryInfo(KaggleObject): + r""" + Attributes: + extension (str) + file_count (int) + total_size (int) + """ + + def __init__(self): + self._extension = "" + self._file_count = 0 + self._total_size = 0 + self._freeze() + + @property + def extension(self) -> str: + return self._extension + + @extension.setter + def extension(self, extension: str): + if extension is None: + del self.extension + return + if not isinstance(extension, str): + raise TypeError('extension must be of type str') + self._extension = extension + + @property + def file_count(self) -> int: + return self._file_count + + @file_count.setter + def file_count(self, file_count: int): + if file_count is None: + del self.file_count + return + if not isinstance(file_count, int): + raise TypeError('file_count must be of type int') + self._file_count = file_count + + @property + def total_size(self) -> int: + return self._total_size + + @total_size.setter + def total_size(self, total_size: int): + if total_size is None: + del self.total_size + return + if not isinstance(total_size, int): + raise TypeError('total_size must be of type int') + self._total_size = total_size + + class ApiFilesSummary(KaggleObject): r""" Attributes: @@ -269,223 +487,23 @@ def file_types(self, file_types: Optional[List[Optional['ApiFileExtensionSummary self._file_types = file_types -class ApiColumnSummaryInfo(KaggleObject): - r""" - Attributes: - total_column_count (int) - column_types (ApiColumnTypeSummaryInfo) - """ - - def __init__(self): - self._total_column_count = 0 - self._column_types = [] - self._freeze() - - @property - def total_column_count(self) -> int: - return self._total_column_count - - @total_column_count.setter - def total_column_count(self, total_column_count: int): - if total_column_count is None: - del self.total_column_count - return - if not isinstance(total_column_count, int): - raise TypeError('total_column_count must be of type int') - self._total_column_count = total_column_count - - @property - def column_types(self) -> Optional[List[Optional['ApiColumnTypeSummaryInfo']]]: - return self._column_types - - @column_types.setter - def column_types(self, column_types: Optional[List[Optional['ApiColumnTypeSummaryInfo']]]): - if column_types is None: - del self.column_types - return - if not isinstance(column_types, list): - raise TypeError('column_types must be of type list') - if not all([isinstance(t, ApiColumnTypeSummaryInfo) for t in column_types]): - raise TypeError('column_types must contain only items of type ApiColumnTypeSummaryInfo') - self._column_types = column_types - - -class ApiColumnTypeSummaryInfo(KaggleObject): - r""" - Attributes: - column_type (str) - column_count (int) - """ - - def __init__(self): - self._column_type = None - self._column_count = 0 - self._freeze() - - @property - def column_type(self) -> str: - return self._column_type or "" - - @column_type.setter - def column_type(self, column_type: Optional[str]): - if column_type is None: - del self.column_type - return - if not isinstance(column_type, str): - raise TypeError('column_type must be of type str') - self._column_type = column_type - - @property - def column_count(self) -> int: - return self._column_count - - @column_count.setter - def column_count(self, column_count: int): - if column_count is None: - del self.column_count - return - if not isinstance(column_count, int): - raise TypeError('column_count must be of type int') - self._column_count = column_count - - -class ApiDirectory(KaggleObject): - r""" - Attributes: - name (str) - relative_url (str) - total_directories (int) - total_files (int) - total_children (int) - """ - - def __init__(self): - self._name = None - self._relative_url = None - self._total_directories = 0 - self._total_files = 0 - self._total_children = 0 - self._freeze() - - @property - def name(self) -> str: - return self._name or "" - - @name.setter - def name(self, name: Optional[str]): - if name is None: - del self.name - return - if not isinstance(name, str): - raise TypeError('name must be of type str') - self._name = name - - @property - def relative_url(self) -> str: - return self._relative_url or "" - - @relative_url.setter - def relative_url(self, relative_url: Optional[str]): - if relative_url is None: - del self.relative_url - return - if not isinstance(relative_url, str): - raise TypeError('relative_url must be of type str') - self._relative_url = relative_url - - @property - def total_directories(self) -> int: - return self._total_directories - - @total_directories.setter - def total_directories(self, total_directories: int): - if total_directories is None: - del self.total_directories - return - if not isinstance(total_directories, int): - raise TypeError('total_directories must be of type int') - self._total_directories = total_directories - - @property - def total_files(self) -> int: - return self._total_files - - @total_files.setter - def total_files(self, total_files: int): - if total_files is None: - del self.total_files - return - if not isinstance(total_files, int): - raise TypeError('total_files must be of type int') - self._total_files = total_files - - @property - def total_children(self) -> int: - return self._total_children - - @total_children.setter - def total_children(self, total_children: int): - if total_children is None: - del self.total_children - return - if not isinstance(total_children, int): - raise TypeError('total_children must be of type int') - self._total_children = total_children - - -class ApiFileExtensionSummaryInfo(KaggleObject): - r""" - Attributes: - extension (str) - file_count (int) - total_size (int) - """ - - def __init__(self): - self._extension = "" - self._file_count = 0 - self._total_size = 0 - self._freeze() - - @property - def extension(self) -> str: - return self._extension - - @extension.setter - def extension(self, extension: str): - if extension is None: - del self.extension - return - if not isinstance(extension, str): - raise TypeError('extension must be of type str') - self._extension = extension - - @property - def file_count(self) -> int: - return self._file_count - - @file_count.setter - def file_count(self, file_count: int): - if file_count is None: - del self.file_count - return - if not isinstance(file_count, int): - raise TypeError('file_count must be of type int') - self._file_count = file_count - - @property - def total_size(self) -> int: - return self._total_size +ApiColumnSummaryInfo._fields = [ + FieldMetadata("totalColumnCount", "total_column_count", "_total_column_count", int, 0, PredefinedSerializer()), + FieldMetadata("columnTypes", "column_types", "_column_types", ApiColumnTypeSummaryInfo, [], ListSerializer(KaggleObjectSerializer())), +] - @total_size.setter - def total_size(self, total_size: int): - if total_size is None: - del self.total_size - return - if not isinstance(total_size, int): - raise TypeError('total_size must be of type int') - self._total_size = total_size +ApiColumnTypeSummaryInfo._fields = [ + FieldMetadata("columnType", "column_type", "_column_type", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("columnCount", "column_count", "_column_count", int, 0, PredefinedSerializer()), +] +ApiDirectory._fields = [ + FieldMetadata("name", "name", "_name", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("relativeUrl", "relative_url", "_relative_url", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("totalDirectories", "total_directories", "_total_directories", int, 0, PredefinedSerializer()), + FieldMetadata("totalFiles", "total_files", "_total_files", int, 0, PredefinedSerializer()), + FieldMetadata("totalChildren", "total_children", "_total_children", int, 0, PredefinedSerializer()), +] ApiDirectoryContent._fields = [ FieldMetadata("directories", "directories", "_directories", ApiDirectory, [], ListSerializer(KaggleObjectSerializer())), @@ -504,6 +522,12 @@ def total_size(self, total_size: int): FieldMetadata("description", "description", "_description", str, None, PredefinedSerializer(), optional=True), ] +ApiFileExtensionSummaryInfo._fields = [ + FieldMetadata("extension", "extension", "_extension", str, "", PredefinedSerializer()), + FieldMetadata("fileCount", "file_count", "_file_count", int, 0, PredefinedSerializer()), + FieldMetadata("totalSize", "total_size", "_total_size", int, 0, PredefinedSerializer()), +] + ApiFilesSummary._fields = [ FieldMetadata("fileSummaryInfo", "file_summary_info", "_file_summary_info", ApiFileSummaryInfo, None, KaggleObjectSerializer()), FieldMetadata("columnSummaryInfo", "column_summary_info", "_column_summary_info", ApiColumnSummaryInfo, None, KaggleObjectSerializer()), @@ -514,27 +538,3 @@ def total_size(self, total_size: int): FieldMetadata("fileTypes", "file_types", "_file_types", ApiFileExtensionSummaryInfo, [], ListSerializer(KaggleObjectSerializer())), ] -ApiColumnSummaryInfo._fields = [ - FieldMetadata("totalColumnCount", "total_column_count", "_total_column_count", int, 0, PredefinedSerializer()), - FieldMetadata("columnTypes", "column_types", "_column_types", ApiColumnTypeSummaryInfo, [], ListSerializer(KaggleObjectSerializer())), -] - -ApiColumnTypeSummaryInfo._fields = [ - FieldMetadata("columnType", "column_type", "_column_type", str, None, PredefinedSerializer(), optional=True), - FieldMetadata("columnCount", "column_count", "_column_count", int, 0, PredefinedSerializer()), -] - -ApiDirectory._fields = [ - FieldMetadata("name", "name", "_name", str, None, PredefinedSerializer(), optional=True), - FieldMetadata("relativeUrl", "relative_url", "_relative_url", str, None, PredefinedSerializer(), optional=True), - FieldMetadata("totalDirectories", "total_directories", "_total_directories", int, 0, PredefinedSerializer()), - FieldMetadata("totalFiles", "total_files", "_total_files", int, 0, PredefinedSerializer()), - FieldMetadata("totalChildren", "total_children", "_total_children", int, 0, PredefinedSerializer()), -] - -ApiFileExtensionSummaryInfo._fields = [ - FieldMetadata("extension", "extension", "_extension", str, "", PredefinedSerializer()), - FieldMetadata("fileCount", "file_count", "_file_count", int, 0, PredefinedSerializer()), - FieldMetadata("totalSize", "total_size", "_total_size", int, 0, PredefinedSerializer()), -] - diff --git a/src/kagglesdk/datasets/types/dataset_api_service.py b/src/kagglesdk/datasets/types/dataset_api_service.py index 873d6187..262c8e83 100644 --- a/src/kagglesdk/datasets/types/dataset_api_service.py +++ b/src/kagglesdk/datasets/types/dataset_api_service.py @@ -4,6 +4,135 @@ from kagglesdk.kaggle_object import * from typing import Optional, List +class ApiCategory(KaggleObject): + r""" + Attributes: + ref (str) + name (str) + description (str) + full_path (str) + competition_count (int) + dataset_count (int) + script_count (int) + total_count (int) + """ + + def __init__(self): + self._ref = "" + self._name = None + self._description = None + self._full_path = None + self._competition_count = 0 + self._dataset_count = 0 + self._script_count = 0 + self._total_count = 0 + self._freeze() + + @property + def ref(self) -> str: + return self._ref + + @ref.setter + def ref(self, ref: str): + if ref is None: + del self.ref + return + if not isinstance(ref, str): + raise TypeError('ref must be of type str') + self._ref = ref + + @property + def name(self) -> str: + return self._name or "" + + @name.setter + def name(self, name: Optional[str]): + if name is None: + del self.name + return + if not isinstance(name, str): + raise TypeError('name must be of type str') + self._name = name + + @property + def description(self) -> str: + return self._description or "" + + @description.setter + def description(self, description: Optional[str]): + if description is None: + del self.description + return + if not isinstance(description, str): + raise TypeError('description must be of type str') + self._description = description + + @property + def full_path(self) -> str: + return self._full_path or "" + + @full_path.setter + def full_path(self, full_path: Optional[str]): + if full_path is None: + del self.full_path + return + if not isinstance(full_path, str): + raise TypeError('full_path must be of type str') + self._full_path = full_path + + @property + def competition_count(self) -> int: + return self._competition_count + + @competition_count.setter + def competition_count(self, competition_count: int): + if competition_count is None: + del self.competition_count + return + if not isinstance(competition_count, int): + raise TypeError('competition_count must be of type int') + self._competition_count = competition_count + + @property + def dataset_count(self) -> int: + return self._dataset_count + + @dataset_count.setter + def dataset_count(self, dataset_count: int): + if dataset_count is None: + del self.dataset_count + return + if not isinstance(dataset_count, int): + raise TypeError('dataset_count must be of type int') + self._dataset_count = dataset_count + + @property + def script_count(self) -> int: + return self._script_count + + @script_count.setter + def script_count(self, script_count: int): + if script_count is None: + del self.script_count + return + if not isinstance(script_count, int): + raise TypeError('script_count must be of type int') + self._script_count = script_count + + @property + def total_count(self) -> int: + return self._total_count + + @total_count.setter + def total_count(self, total_count: int): + if total_count is None: + del self.total_count + return + if not isinstance(total_count, int): + raise TypeError('total_count must be of type int') + self._total_count = total_count + + class ApiCreateDatasetRequest(KaggleObject): r""" Attributes: @@ -931,6 +1060,90 @@ def thumbnail_image_url(self, thumbnail_image_url: Optional[str]): self._thumbnail_image_url = thumbnail_image_url +class ApiDatasetColumn(KaggleObject): + r""" + Attributes: + order (int) + name (str) + type (str) + original_type (str) + description (str) + """ + + def __init__(self): + self._order = None + self._name = None + self._type = None + self._original_type = None + self._description = None + self._freeze() + + @property + def order(self) -> int: + return self._order or 0 + + @order.setter + def order(self, order: Optional[int]): + if order is None: + del self.order + return + if not isinstance(order, int): + raise TypeError('order must be of type int') + self._order = order + + @property + def name(self) -> str: + return self._name or "" + + @name.setter + def name(self, name: Optional[str]): + if name is None: + del self.name + return + if not isinstance(name, str): + raise TypeError('name must be of type str') + self._name = name + + @property + def type(self) -> str: + return self._type or "" + + @type.setter + def type(self, type: Optional[str]): + if type is None: + del self.type + return + if not isinstance(type, str): + raise TypeError('type must be of type str') + self._type = type + + @property + def original_type(self) -> str: + return self._original_type or "" + + @original_type.setter + def original_type(self, original_type: Optional[str]): + if original_type is None: + del self.original_type + return + if not isinstance(original_type, str): + raise TypeError('original_type must be of type str') + self._original_type = original_type + + @property + def description(self) -> str: + return self._description or "" + + @description.setter + def description(self, description: Optional[str]): + if description is None: + del self.description + return + if not isinstance(description, str): + raise TypeError('description must be of type str') + self._description = description + + class ApiDatasetFile(KaggleObject): r""" Attributes: @@ -2586,218 +2799,16 @@ def files(self, files: Optional[List[Optional['ApiDatasetNewFile']]]): self._files = files -class ApiCategory(KaggleObject): - r""" - Attributes: - ref (str) - name (str) - description (str) - full_path (str) - competition_count (int) - dataset_count (int) - script_count (int) - total_count (int) - """ - - def __init__(self): - self._ref = "" - self._name = None - self._description = None - self._full_path = None - self._competition_count = 0 - self._dataset_count = 0 - self._script_count = 0 - self._total_count = 0 - self._freeze() - - @property - def ref(self) -> str: - return self._ref - - @ref.setter - def ref(self, ref: str): - if ref is None: - del self.ref - return - if not isinstance(ref, str): - raise TypeError('ref must be of type str') - self._ref = ref - - @property - def name(self) -> str: - return self._name or "" - - @name.setter - def name(self, name: Optional[str]): - if name is None: - del self.name - return - if not isinstance(name, str): - raise TypeError('name must be of type str') - self._name = name - - @property - def description(self) -> str: - return self._description or "" - - @description.setter - def description(self, description: Optional[str]): - if description is None: - del self.description - return - if not isinstance(description, str): - raise TypeError('description must be of type str') - self._description = description - - @property - def full_path(self) -> str: - return self._full_path or "" - - @full_path.setter - def full_path(self, full_path: Optional[str]): - if full_path is None: - del self.full_path - return - if not isinstance(full_path, str): - raise TypeError('full_path must be of type str') - self._full_path = full_path - - @property - def competition_count(self) -> int: - return self._competition_count - - @competition_count.setter - def competition_count(self, competition_count: int): - if competition_count is None: - del self.competition_count - return - if not isinstance(competition_count, int): - raise TypeError('competition_count must be of type int') - self._competition_count = competition_count - - @property - def dataset_count(self) -> int: - return self._dataset_count - - @dataset_count.setter - def dataset_count(self, dataset_count: int): - if dataset_count is None: - del self.dataset_count - return - if not isinstance(dataset_count, int): - raise TypeError('dataset_count must be of type int') - self._dataset_count = dataset_count - - @property - def script_count(self) -> int: - return self._script_count - - @script_count.setter - def script_count(self, script_count: int): - if script_count is None: - del self.script_count - return - if not isinstance(script_count, int): - raise TypeError('script_count must be of type int') - self._script_count = script_count - - @property - def total_count(self) -> int: - return self._total_count - - @total_count.setter - def total_count(self, total_count: int): - if total_count is None: - del self.total_count - return - if not isinstance(total_count, int): - raise TypeError('total_count must be of type int') - self._total_count = total_count - - -class ApiDatasetColumn(KaggleObject): - r""" - Attributes: - order (int) - name (str) - type (str) - original_type (str) - description (str) - """ - - def __init__(self): - self._order = None - self._name = None - self._type = None - self._original_type = None - self._description = None - self._freeze() - - @property - def order(self) -> int: - return self._order or 0 - - @order.setter - def order(self, order: Optional[int]): - if order is None: - del self.order - return - if not isinstance(order, int): - raise TypeError('order must be of type int') - self._order = order - - @property - def name(self) -> str: - return self._name or "" - - @name.setter - def name(self, name: Optional[str]): - if name is None: - del self.name - return - if not isinstance(name, str): - raise TypeError('name must be of type str') - self._name = name - - @property - def type(self) -> str: - return self._type or "" - - @type.setter - def type(self, type: Optional[str]): - if type is None: - del self.type - return - if not isinstance(type, str): - raise TypeError('type must be of type str') - self._type = type - - @property - def original_type(self) -> str: - return self._original_type or "" - - @original_type.setter - def original_type(self, original_type: Optional[str]): - if original_type is None: - del self.original_type - return - if not isinstance(original_type, str): - raise TypeError('original_type must be of type str') - self._original_type = original_type - - @property - def description(self) -> str: - return self._description or "" - - @description.setter - def description(self, description: Optional[str]): - if description is None: - del self.description - return - if not isinstance(description, str): - raise TypeError('description must be of type str') - self._description = description - +ApiCategory._fields = [ + FieldMetadata("ref", "ref", "_ref", str, "", PredefinedSerializer()), + FieldMetadata("name", "name", "_name", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("description", "description", "_description", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("fullPath", "full_path", "_full_path", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("competitionCount", "competition_count", "_competition_count", int, 0, PredefinedSerializer()), + FieldMetadata("datasetCount", "dataset_count", "_dataset_count", int, 0, PredefinedSerializer()), + FieldMetadata("scriptCount", "script_count", "_script_count", int, 0, PredefinedSerializer()), + FieldMetadata("totalCount", "total_count", "_total_count", int, 0, PredefinedSerializer()), +] ApiCreateDatasetRequest._fields = [ FieldMetadata("id", "id", "_id", int, None, PredefinedSerializer(), optional=True), @@ -2871,6 +2882,14 @@ def description(self, description: Optional[str]): FieldMetadata("thumbnailImageUrl", "thumbnail_image_url", "_thumbnail_image_url", str, None, PredefinedSerializer(), optional=True), ] +ApiDatasetColumn._fields = [ + FieldMetadata("order", "order", "_order", int, None, PredefinedSerializer(), optional=True), + FieldMetadata("name", "name", "_name", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("type", "type", "_type", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("originalType", "original_type", "_original_type", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("description", "description", "_description", str, None, PredefinedSerializer(), optional=True), +] + ApiDatasetFile._fields = [ FieldMetadata("ref", "ref", "_ref", str, "", PredefinedSerializer()), FieldMetadata("datasetRef", "dataset_ref", "_dataset_ref", str, None, PredefinedSerializer(), optional=True), @@ -3026,22 +3045,3 @@ def description(self, description: Optional[str]): FieldMetadata("files", "files", "_files", ApiDatasetNewFile, [], ListSerializer(KaggleObjectSerializer())), ] -ApiCategory._fields = [ - FieldMetadata("ref", "ref", "_ref", str, "", PredefinedSerializer()), - FieldMetadata("name", "name", "_name", str, None, PredefinedSerializer(), optional=True), - FieldMetadata("description", "description", "_description", str, None, PredefinedSerializer(), optional=True), - FieldMetadata("fullPath", "full_path", "_full_path", str, None, PredefinedSerializer(), optional=True), - FieldMetadata("competitionCount", "competition_count", "_competition_count", int, 0, PredefinedSerializer()), - FieldMetadata("datasetCount", "dataset_count", "_dataset_count", int, 0, PredefinedSerializer()), - FieldMetadata("scriptCount", "script_count", "_script_count", int, 0, PredefinedSerializer()), - FieldMetadata("totalCount", "total_count", "_total_count", int, 0, PredefinedSerializer()), -] - -ApiDatasetColumn._fields = [ - FieldMetadata("order", "order", "_order", int, None, PredefinedSerializer(), optional=True), - FieldMetadata("name", "name", "_name", str, None, PredefinedSerializer(), optional=True), - FieldMetadata("type", "type", "_type", str, None, PredefinedSerializer(), optional=True), - FieldMetadata("originalType", "original_type", "_original_type", str, None, PredefinedSerializer(), optional=True), - FieldMetadata("description", "description", "_description", str, None, PredefinedSerializer(), optional=True), -] - diff --git a/src/kagglesdk/datasets/types/dataset_enums.py b/src/kagglesdk/datasets/types/dataset_enums.py index e379d826..1d45f49b 100644 --- a/src/kagglesdk/datasets/types/dataset_enums.py +++ b/src/kagglesdk/datasets/types/dataset_enums.py @@ -77,6 +77,7 @@ class DatasetSortBy(enum.Enum): DATASET_SORT_BY_LAST_VIEWED = 6 DATASET_SORT_BY_USABILITY = 7 DATASET_SORT_BY_DOWNLOAD_COUNT = 8 + DATASET_SORT_BY_NOTEBOOK_COUNT = 9 class DatasetViewedGroup(enum.Enum): DATASET_VIEWED_GROUP_UNSPECIFIED = 0 diff --git a/src/kagglesdk/datasets/types/dataset_types.py b/src/kagglesdk/datasets/types/dataset_types.py index 8ad389c8..7724c81c 100644 --- a/src/kagglesdk/datasets/types/dataset_types.py +++ b/src/kagglesdk/datasets/types/dataset_types.py @@ -2,6 +2,62 @@ from kagglesdk.users.types.users_enums import CollaboratorType from typing import Optional, List +class DatasetCollaborator(KaggleObject): + r""" + Attributes: + username (str) + group_slug (str) + role (CollaboratorType) + """ + + def __init__(self): + self._username = None + self._group_slug = None + self._role = CollaboratorType.COLLABORATOR_TYPE_UNSPECIFIED + self._freeze() + + @property + def username(self) -> str: + return self._username or "" + + @username.setter + def username(self, username: str): + if username is None: + del self.username + return + if not isinstance(username, str): + raise TypeError('username must be of type str') + del self.group_slug + self._username = username + + @property + def group_slug(self) -> str: + return self._group_slug or "" + + @group_slug.setter + def group_slug(self, group_slug: str): + if group_slug is None: + del self.group_slug + return + if not isinstance(group_slug, str): + raise TypeError('group_slug must be of type str') + del self.username + self._group_slug = group_slug + + @property + def role(self) -> 'CollaboratorType': + return self._role + + @role.setter + def role(self, role: 'CollaboratorType'): + if role is None: + del self.role + return + if not isinstance(role, CollaboratorType): + raise TypeError('role must be of type CollaboratorType') + self._role = role + + class DatasetInfo(KaggleObject): r""" Attributes: @@ -536,61 +592,11 @@ def name(self, name: Optional[str]): self._name = name -class DatasetCollaborator(KaggleObject): - r""" - Attributes: - username (str) - group_slug (str) - role (CollaboratorType) - """ - - def __init__(self): - self._username = None - self._group_slug = None - self._role = CollaboratorType.COLLABORATOR_TYPE_UNSPECIFIED - self._freeze() - - @property - def username(self) -> str: - return self._username or "" - - @username.setter - def username(self, username: str): - if username is None: - del self.username - return - if not isinstance(username, str): - raise TypeError('username must be of type str') - del self.group_slug - self._username = username - - @property - def group_slug(self) -> str: - return self._group_slug or "" - - @group_slug.setter - def group_slug(self, group_slug: str): - if group_slug is None: - del self.group_slug - return - if not isinstance(group_slug, str): - raise TypeError('group_slug must be of type str') - del self.username - self._group_slug = group_slug - - @property - def role(self) -> 'CollaboratorType': - return self._role - - @role.setter - def role(self, role: 'CollaboratorType'): - if role is None: - del self.role - return - if not isinstance(role, CollaboratorType): - raise TypeError('role must be of type CollaboratorType') - self._role = role - +DatasetCollaborator._fields = [ + FieldMetadata("username", "username", "_username", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("groupSlug", "group_slug", "_group_slug", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("role", "role", "_role", CollaboratorType, CollaboratorType.COLLABORATOR_TYPE_UNSPECIFIED, EnumSerializer()), +] DatasetInfo._fields = [ FieldMetadata("datasetId", "dataset_id", "_dataset_id", int, 0, PredefinedSerializer()), @@ -638,9 +644,3 @@ def role(self, role: 'CollaboratorType'): FieldMetadata("name", "name", "_name", str, None, PredefinedSerializer(), optional=True), ] -DatasetCollaborator._fields = [ - FieldMetadata("username", "username", "_username", str, None, PredefinedSerializer(), optional=True), - FieldMetadata("groupSlug", "group_slug", "_group_slug", str, None, PredefinedSerializer(), optional=True), - FieldMetadata("role", "role", "_role", CollaboratorType, CollaboratorType.COLLABORATOR_TYPE_UNSPECIFIED, EnumSerializer()), -] - diff --git a/src/kagglesdk/kaggle_client.py b/src/kagglesdk/kaggle_client.py index dd9765ea..45ccbc08 100644 --- a/src/kagglesdk/kaggle_client.py +++ b/src/kagglesdk/kaggle_client.py @@ -1,42 +1,38 @@ -from kagglesdk.kernels.services.kernels_api_service import KernelsApiClient +from kagglesdk.admin.services.inbox_file_service import InboxFileClient +from kagglesdk.benchmarks.services.benchmarks_api_service import BenchmarksApiClient from kagglesdk.blobs.services.blob_api_service import BlobApiClient +from kagglesdk.common.services.operations_service import OperationsClient +from kagglesdk.competitions.services.competition_api_service import CompetitionApiClient +from kagglesdk.datasets.services.dataset_api_service import DatasetApiClient from kagglesdk.education.services.education_api_service import EducationApiClient -from kagglesdk.benchmarks.services.benchmarks_api_service import BenchmarksApiClient +from kagglesdk.kernels.services.kernels_api_service import KernelsApiClient from kagglesdk.models.services.model_api_service import ModelApiClient from kagglesdk.models.services.model_service import ModelClient -from kagglesdk.competitions.services.competition_api_service import CompetitionApiClient -from kagglesdk.datasets.services.dataset_api_service import DatasetApiClient -from kagglesdk.common.services.operations_service import OperationsClient -from kagglesdk.admin.services.inbox_file_service import InboxFileClient +from kagglesdk.search.services.search_api_service import SearchApiClient from kagglesdk.security.services.iam_service import IamClient from kagglesdk.security.services.oauth_service import OAuthClient -from kagglesdk.search.services.search_api_service import SearchApiClient from kagglesdk.users.services.account_service import AccountClient +from kagglesdk.users.services.group_api_service import GroupApiClient from kagglesdk.kaggle_env import KaggleEnv from kagglesdk.kaggle_http_client import KaggleHttpClient class KaggleClient(object): - class Kernels(object): - def __init__(self, http_client: KaggleHttpClient): - self.kernels_api_client = KernelsApiClient(http_client) - - class Blobs(object): - def __init__(self, http_client: KaggleHttpClient): - self.blob_api_client = BlobApiClient(http_client) - - class Education(object): + class Admin(object): def __init__(self, http_client: KaggleHttpClient): - self.education_api_client = EducationApiClient(http_client) + self.inbox_file_client = InboxFileClient(http_client) class Benchmarks(object): def __init__(self, http_client: KaggleHttpClient): self.benchmarks_api_client = BenchmarksApiClient(http_client) - class Models(object): + class Blobs(object): def __init__(self, http_client: KaggleHttpClient): - self.model_api_client = ModelApiClient(http_client) - self.model_client = ModelClient(http_client) + self.blob_api_client = BlobApiClient(http_client) + + class Common(object): + def __init__(self, http_client: KaggleHttpClient): + self.operations_client = OperationsClient(http_client) class Competitions(object): def __init__(self, http_client: KaggleHttpClient): @@ -46,40 +42,46 @@ class Datasets(object): def __init__(self, http_client: KaggleHttpClient): self.dataset_api_client = DatasetApiClient(http_client) - class Common(object): + class Education(object): def __init__(self, http_client: KaggleHttpClient): - self.operations_client = OperationsClient(http_client) + self.education_api_client = EducationApiClient(http_client) - class Admin(object): + class Kernels(object): def __init__(self, http_client: KaggleHttpClient): - self.inbox_file_client = InboxFileClient(http_client) + self.kernels_api_client = KernelsApiClient(http_client) - class Security(object): + class Models(object): def __init__(self, http_client: KaggleHttpClient): - self.iam_client = IamClient(http_client) - self.oauth_client = OAuthClient(http_client) + self.model_api_client = ModelApiClient(http_client) + self.model_client = ModelClient(http_client) class Search(object): def __init__(self, http_client: KaggleHttpClient): self.search_api_client = SearchApiClient(http_client) + class Security(object): + def __init__(self, http_client: KaggleHttpClient): + self.iam_client = IamClient(http_client) + self.oauth_client = OAuthClient(http_client) + class Users(object): def __init__(self, http_client: KaggleHttpClient): self.account_client = AccountClient(http_client) + self.group_api_client = GroupApiClient(http_client) def __init__(self, env: KaggleEnv = None, verbose: bool = False, username: str = None, password: str = None, api_token: str = None): self._http_client = http_client = KaggleHttpClient(env, verbose, username=username, password=password, api_token=api_token) - self.kernels = KaggleClient.Kernels(http_client) - self.blobs = KaggleClient.Blobs(http_client) - self.education = KaggleClient.Education(http_client) + self.admin = KaggleClient.Admin(http_client) self.benchmarks = KaggleClient.Benchmarks(http_client) - self.models = KaggleClient.Models(http_client) + self.blobs = KaggleClient.Blobs(http_client) + self.common = KaggleClient.Common(http_client) self.competitions = KaggleClient.Competitions(http_client) self.datasets = KaggleClient.Datasets(http_client) - self.common = KaggleClient.Common(http_client) - self.admin = KaggleClient.Admin(http_client) - self.security = KaggleClient.Security(http_client) + self.education = KaggleClient.Education(http_client) + self.kernels = KaggleClient.Kernels(http_client) + self.models = KaggleClient.Models(http_client) self.search = KaggleClient.Search(http_client) + self.security = KaggleClient.Security(http_client) self.users = KaggleClient.Users(http_client) self.username = username self.password = password diff --git a/src/kagglesdk/kaggle_env.py b/src/kagglesdk/kaggle_env.py index d108f602..a6dd8d2a 100644 --- a/src/kagglesdk/kaggle_env.py +++ b/src/kagglesdk/kaggle_env.py @@ -84,8 +84,21 @@ def get_access_token_from_env(): return (token, KAGGLE_API_V1_TOKEN_PATH) access_token = os.environ.get("KAGGLE_API_TOKEN") - if access_token is not None: - get_logger().debug("Using access token from KAGGLE_API_TOKEN environment variable") + if access_token: + if Path(access_token).exists(): + return (_get_access_token_from_file(access_token), "KAGGLE_API_TOKEN") + get_logger().debug( + "Using access token from KAGGLE_API_TOKEN environment variable" + ) return (access_token, "KAGGLE_API_TOKEN") + access_token = _get_access_token_from_file(os.path.expanduser("~/.kaggle/access_token")) + if access_token: + return (access_token, "access_token") + + # Check ".txt" as well in case Windows users create the file with this extension. + access_token = _get_access_token_from_file(os.path.expanduser("~/.kaggle/access_token.txt")) + if access_token: + return (access_token, "access_token") + return (None, None) diff --git a/src/kagglesdk/kaggle_http_client.py b/src/kagglesdk/kaggle_http_client.py index 2e938371..67d12471 100644 --- a/src/kagglesdk/kaggle_http_client.py +++ b/src/kagglesdk/kaggle_http_client.py @@ -12,7 +12,6 @@ from kagglesdk.kaggle_env import ( get_endpoint, get_env, - is_in_kaggle_notebook, get_access_token_from_env, KaggleEnv, ) diff --git a/src/kagglesdk/kernels/types/kernels_api_service.py b/src/kagglesdk/kernels/types/kernels_api_service.py index cd238869..7e922bf8 100644 --- a/src/kagglesdk/kernels/types/kernels_api_service.py +++ b/src/kagglesdk/kernels/types/kernels_api_service.py @@ -649,14 +649,14 @@ class ApiKernelMetadata(KaggleObject): kernel_type (str) is_private (bool) enable_gpu (bool) - enable_tpu (bool) enable_internet (bool) category_ids (str) dataset_data_sources (str) kernel_data_sources (str) competition_data_sources (str) - model_data_sources (str) total_votes (int) + model_data_sources (str) + enable_tpu (bool) current_version_number (int) docker_image (str) machine_shape (str) @@ -676,14 +676,14 @@ def __init__(self): self._kernel_type = None self._is_private = None self._enable_gpu = None - self._enable_tpu = None self._enable_internet = None self._category_ids = [] self._dataset_data_sources = [] self._kernel_data_sources = [] self._competition_data_sources = [] - self._model_data_sources = [] self._total_votes = 0 + self._model_data_sources = [] + self._enable_tpu = None self._current_version_number = None self._docker_image = None self._machine_shape = None @@ -978,6 +978,99 @@ def machine_shape(self, machine_shape: Optional[str]): self._machine_shape = machine_shape +class ApiKernelSessionOutputFile(KaggleObject): + r""" + Attributes: + url (str) + file_name (str) + """ + + def __init__(self): + self._url = None + self._file_name = None + self._freeze() + + @property + def url(self) -> str: + return self._url or "" + + @url.setter + def url(self, url: Optional[str]): + if url is None: + del self.url + return + if not isinstance(url, str): + raise TypeError('url must be of type str') + self._url = url + + @property + def file_name(self) -> str: + return self._file_name or "" + + @file_name.setter + def file_name(self, file_name: Optional[str]): + if file_name is None: + del self.file_name + return + if not isinstance(file_name, str): + raise TypeError('file_name must be of type str') + self._file_name = file_name + + +class ApiListKernelFilesItem(KaggleObject): + r""" + Attributes: + name (str) + size (int) + creation_date (str) + """ + + def __init__(self): + self._name = "" + self._size = 0 + self._creation_date = "" + self._freeze() + + @property + def name(self) -> str: + return self._name + + @name.setter + def name(self, name: str): + if name is None: + del self.name + return + if not isinstance(name, str): + raise TypeError('name must be of type str') + self._name = name + + @property + def size(self) -> int: + return self._size + + @size.setter + def size(self, size: int): + if size is None: + del self.size + return + if not isinstance(size, int): + raise TypeError('size must be of type int') + self._size = size + + @property + def creation_date(self) -> str: + return self._creation_date + + @creation_date.setter + def creation_date(self, creation_date: str): + if creation_date is None: + del self.creation_date + return + if not isinstance(creation_date, str): + raise TypeError('creation_date must be of type str') + self._creation_date = creation_date + + class ApiListKernelFilesRequest(KaggleObject): r""" Attributes: @@ -1555,9 +1648,6 @@ class ApiSaveKernelRequest(KaggleObject): enable_gpu (bool) Whether or not the kernel should run on a GPU. DEPRECATED: use `machine_shape` instead - enable_tpu (bool) - Whether or not the kernel should run on a TPU. - DEPRECATED: use `machine_shape` instead enable_internet (bool) Whether or not the kernel should be able to access the internet. docker_image_pinning_type (str) @@ -1568,6 +1658,9 @@ class ApiSaveKernelRequest(KaggleObject): `{username}/{model-slug}/{framework}/{variation-slug}` Or versioned: `{username}/{model-slug}/{framework}/{variation-slug}/{version-number}` + enable_tpu (bool) + Whether or not the kernel should run on a TPU. + DEPRECATED: use `machine_shape` instead session_timeout_seconds (int) If specified, terminate the kernel session after this many seconds of runtime, which must be lower than the global maximum. @@ -1601,10 +1694,10 @@ def __init__(self): self._category_ids = [] self._is_private = None self._enable_gpu = None - self._enable_tpu = None self._enable_internet = None self._docker_image_pinning_type = None self._model_data_sources = [] + self._enable_tpu = None self._session_timeout_seconds = None self._priority = None self._docker_image = None @@ -1989,6 +2082,7 @@ class ApiSaveKernelResponse(KaggleObject): invalid_competition_sources (str) invalid_kernel_sources (str) invalid_model_sources (str) + kernel_id (int) """ def __init__(self): @@ -2001,6 +2095,7 @@ def __init__(self): self._invalid_competition_sources = [] self._invalid_kernel_sources = [] self._invalid_model_sources = [] + self._kernel_id = 0 self._freeze() @property @@ -2130,6 +2225,19 @@ def invalid_model_sources(self, invalid_model_sources: Optional[List[str]]): raise TypeError('invalid_model_sources must contain only items of type str') self._invalid_model_sources = invalid_model_sources + @property + def kernel_id(self) -> int: + return self._kernel_id + + @kernel_id.setter + def kernel_id(self, kernel_id: int): + if kernel_id is None: + del self.kernel_id + return + if not isinstance(kernel_id, int): + raise TypeError('kernel_id must be of type int') + self._kernel_id = kernel_id + @property def versionNumber(self): return self.version_number @@ -2154,98 +2262,9 @@ def invalidKernelSources(self): def invalidModelSources(self): return self.invalid_model_sources - -class ApiKernelSessionOutputFile(KaggleObject): - r""" - Attributes: - url (str) - file_name (str) - """ - - def __init__(self): - self._url = None - self._file_name = None - self._freeze() - - @property - def url(self) -> str: - return self._url or "" - - @url.setter - def url(self, url: Optional[str]): - if url is None: - del self.url - return - if not isinstance(url, str): - raise TypeError('url must be of type str') - self._url = url - - @property - def file_name(self) -> str: - return self._file_name or "" - - @file_name.setter - def file_name(self, file_name: Optional[str]): - if file_name is None: - del self.file_name - return - if not isinstance(file_name, str): - raise TypeError('file_name must be of type str') - self._file_name = file_name - - -class ApiListKernelFilesItem(KaggleObject): - r""" - Attributes: - name (str) - size (int) - creation_date (str) - """ - - def __init__(self): - self._name = "" - self._size = 0 - self._creation_date = "" - self._freeze() - - @property - def name(self) -> str: - return self._name - - @name.setter - def name(self, name: str): - if name is None: - del self.name - return - if not isinstance(name, str): - raise TypeError('name must be of type str') - self._name = name - @property - def size(self) -> int: - return self._size - - @size.setter - def size(self, size: int): - if size is None: - del self.size - return - if not isinstance(size, int): - raise TypeError('size must be of type int') - self._size = size - - @property - def creation_date(self) -> str: - return self._creation_date - - @creation_date.setter - def creation_date(self, creation_date: str): - if creation_date is None: - del self.creation_date - return - if not isinstance(creation_date, str): - raise TypeError('creation_date must be of type str') - self._creation_date = creation_date + def kernelId(self): + return self.kernel_id ApiCancelKernelSessionRequest._fields = [ @@ -2323,19 +2342,30 @@ def creation_date(self, creation_date: str): FieldMetadata("kernelType", "kernel_type", "_kernel_type", str, None, PredefinedSerializer(), optional=True), FieldMetadata("isPrivate", "is_private", "_is_private", bool, None, PredefinedSerializer(), optional=True), FieldMetadata("enableGpu", "enable_gpu", "_enable_gpu", bool, None, PredefinedSerializer(), optional=True), - FieldMetadata("enableTpu", "enable_tpu", "_enable_tpu", bool, None, PredefinedSerializer(), optional=True), FieldMetadata("enableInternet", "enable_internet", "_enable_internet", bool, None, PredefinedSerializer(), optional=True), FieldMetadata("categoryIds", "category_ids", "_category_ids", str, [], ListSerializer(PredefinedSerializer())), FieldMetadata("datasetDataSources", "dataset_data_sources", "_dataset_data_sources", str, [], ListSerializer(PredefinedSerializer())), FieldMetadata("kernelDataSources", "kernel_data_sources", "_kernel_data_sources", str, [], ListSerializer(PredefinedSerializer())), FieldMetadata("competitionDataSources", "competition_data_sources", "_competition_data_sources", str, [], ListSerializer(PredefinedSerializer())), - FieldMetadata("modelDataSources", "model_data_sources", "_model_data_sources", str, [], ListSerializer(PredefinedSerializer())), FieldMetadata("totalVotes", "total_votes", "_total_votes", int, 0, PredefinedSerializer()), + FieldMetadata("modelDataSources", "model_data_sources", "_model_data_sources", str, [], ListSerializer(PredefinedSerializer())), + FieldMetadata("enableTpu", "enable_tpu", "_enable_tpu", bool, None, PredefinedSerializer(), optional=True), FieldMetadata("currentVersionNumber", "current_version_number", "_current_version_number", int, None, PredefinedSerializer(), optional=True), FieldMetadata("dockerImage", "docker_image", "_docker_image", str, None, PredefinedSerializer(), optional=True), FieldMetadata("machineShape", "machine_shape", "_machine_shape", str, None, PredefinedSerializer(), optional=True), ] +ApiKernelSessionOutputFile._fields = [ + FieldMetadata("url", "url", "_url", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("fileName", "file_name", "_file_name", str, None, PredefinedSerializer(), optional=True), +] + +ApiListKernelFilesItem._fields = [ + FieldMetadata("name", "name", "_name", str, "", PredefinedSerializer()), + FieldMetadata("size", "size", "_size", int, 0, PredefinedSerializer()), + FieldMetadata("creationDate", "creation_date", "_creation_date", str, "", PredefinedSerializer()), +] + ApiListKernelFilesRequest._fields = [ FieldMetadata("userName", "user_name", "_user_name", str, "", PredefinedSerializer()), FieldMetadata("kernelSlug", "kernel_slug", "_kernel_slug", str, "", PredefinedSerializer()), @@ -2395,10 +2425,10 @@ def creation_date(self, creation_date: str): FieldMetadata("categoryIds", "category_ids", "_category_ids", str, [], ListSerializer(PredefinedSerializer())), FieldMetadata("isPrivate", "is_private", "_is_private", bool, None, PredefinedSerializer(), optional=True), FieldMetadata("enableGpu", "enable_gpu", "_enable_gpu", bool, None, PredefinedSerializer(), optional=True), - FieldMetadata("enableTpu", "enable_tpu", "_enable_tpu", bool, None, PredefinedSerializer(), optional=True), FieldMetadata("enableInternet", "enable_internet", "_enable_internet", bool, None, PredefinedSerializer(), optional=True), FieldMetadata("dockerImagePinningType", "docker_image_pinning_type", "_docker_image_pinning_type", str, None, PredefinedSerializer(), optional=True), FieldMetadata("modelDataSources", "model_data_sources", "_model_data_sources", str, [], ListSerializer(PredefinedSerializer())), + FieldMetadata("enableTpu", "enable_tpu", "_enable_tpu", bool, None, PredefinedSerializer(), optional=True), FieldMetadata("sessionTimeoutSeconds", "session_timeout_seconds", "_session_timeout_seconds", int, None, PredefinedSerializer(), optional=True), FieldMetadata("priority", "priority", "_priority", int, None, PredefinedSerializer(), optional=True), FieldMetadata("dockerImage", "docker_image", "_docker_image", str, None, PredefinedSerializer(), optional=True), @@ -2416,16 +2446,6 @@ def creation_date(self, creation_date: str): FieldMetadata("invalidCompetitionSources", "invalid_competition_sources", "_invalid_competition_sources", str, [], ListSerializer(PredefinedSerializer())), FieldMetadata("invalidKernelSources", "invalid_kernel_sources", "_invalid_kernel_sources", str, [], ListSerializer(PredefinedSerializer())), FieldMetadata("invalidModelSources", "invalid_model_sources", "_invalid_model_sources", str, [], ListSerializer(PredefinedSerializer())), -] - -ApiKernelSessionOutputFile._fields = [ - FieldMetadata("url", "url", "_url", str, None, PredefinedSerializer(), optional=True), - FieldMetadata("fileName", "file_name", "_file_name", str, None, PredefinedSerializer(), optional=True), -] - -ApiListKernelFilesItem._fields = [ - FieldMetadata("name", "name", "_name", str, "", PredefinedSerializer()), - FieldMetadata("size", "size", "_size", int, 0, PredefinedSerializer()), - FieldMetadata("creationDate", "creation_date", "_creation_date", str, "", PredefinedSerializer()), + FieldMetadata("kernelId", "kernel_id", "_kernel_id", int, 0, PredefinedSerializer()), ] diff --git a/src/kagglesdk/models/types/model_api_service.py b/src/kagglesdk/models/types/model_api_service.py index 993b6364..0314ff36 100644 --- a/src/kagglesdk/models/types/model_api_service.py +++ b/src/kagglesdk/models/types/model_api_service.py @@ -83,11 +83,11 @@ class ApiCreateModelInstanceRequestBody(KaggleObject): fine_tunable (bool) training_data (str) files (ApiDatasetNewFile) - directories (ApiUploadDirectoryInfo) license_name (str) model_instance_type (ModelInstanceType) base_model_instance (str) external_base_model_url (str) + directories (ApiUploadDirectoryInfo) sigstore (bool) """ @@ -99,11 +99,11 @@ def __init__(self): self._fine_tunable = None self._training_data = [] self._files = [] - self._directories = [] self._license_name = None self._model_instance_type = None self._base_model_instance = None self._external_base_model_url = None + self._directories = [] self._sigstore = None self._freeze() @@ -601,16 +601,16 @@ class ApiCreateModelResponse(KaggleObject): id (int) ref (str) error (str) - error_code (int) url (str) + error_code (int) """ def __init__(self): self._id = None self._ref = None self._error = None - self._error_code = None self._url = None + self._error_code = None self._freeze() @property @@ -1038,6 +1038,168 @@ def endpoint_path(): return '/api/v1/models/{owner_slug}/{model_slug}/{framework}/{instance_slug}/{version_number}/download' +class ApiGatingUserConsent(KaggleObject): + r""" + ApiGatingUserConsent presents GatingUserConsent data for publisher access, + omitting or modifying certain fields as needed such as user_id. + + Attributes: + id (int) + agreement_id (int) + user_name (str) + request_data (str) + request_time (datetime) + review_time (datetime) + review_status (GatingAgreementRequestsReviewStatus) + expiry_status (GatingAgreementRequestsExpiryStatus) + expiry_time (datetime) + publisher_notes (str) + """ + + def __init__(self): + self._id = 0 + self._agreement_id = 0 + self._user_name = "" + self._request_data = None + self._request_time = None + self._review_time = None + self._review_status = GatingAgreementRequestsReviewStatus.GATING_AGREEMENT_REQUESTS_REVIEW_STATUS_UNSPECIFIED + self._expiry_status = GatingAgreementRequestsExpiryStatus.GATING_AGREEMENT_REQUESTS_EXPIRY_STATUS_UNSPECIFIED + self._expiry_time = None + self._publisher_notes = None + self._freeze() + + @property + def id(self) -> int: + return self._id + + @id.setter + def id(self, id: int): + if id is None: + del self.id + return + if not isinstance(id, int): + raise TypeError('id must be of type int') + self._id = id + + @property + def agreement_id(self) -> int: + return self._agreement_id + + @agreement_id.setter + def agreement_id(self, agreement_id: int): + if agreement_id is None: + del self.agreement_id + return + if not isinstance(agreement_id, int): + raise TypeError('agreement_id must be of type int') + self._agreement_id = agreement_id + + @property + def user_name(self) -> str: + return self._user_name + + @user_name.setter + def user_name(self, user_name: str): + if user_name is None: + del self.user_name + return + if not isinstance(user_name, str): + raise TypeError('user_name must be of type str') + self._user_name = user_name + + @property + def request_data(self) -> str: + return self._request_data or "" + + @request_data.setter + def request_data(self, request_data: Optional[str]): + if request_data is None: + del self.request_data + return + if not isinstance(request_data, str): + raise TypeError('request_data must be of type str') + self._request_data = request_data + + @property + def request_time(self) -> datetime: + return self._request_time + + @request_time.setter + def request_time(self, request_time: datetime): + if request_time is None: + del self.request_time + return + if not isinstance(request_time, datetime): + raise TypeError('request_time must be of type datetime') + self._request_time = request_time + + @property + def review_time(self) -> datetime: + return self._review_time or None + + @review_time.setter + def review_time(self, review_time: Optional[datetime]): + if review_time is None: + del self.review_time + return + if not isinstance(review_time, datetime): + raise TypeError('review_time must be of type datetime') + self._review_time = review_time + + @property + def review_status(self) -> 'GatingAgreementRequestsReviewStatus': + return self._review_status + + @review_status.setter + def review_status(self, review_status: 'GatingAgreementRequestsReviewStatus'): + if review_status is None: + del self.review_status + return + if not isinstance(review_status, GatingAgreementRequestsReviewStatus): + raise TypeError('review_status must be of type GatingAgreementRequestsReviewStatus') + self._review_status = review_status + + @property + def expiry_status(self) -> 'GatingAgreementRequestsExpiryStatus': + return self._expiry_status + + @expiry_status.setter + def expiry_status(self, expiry_status: 'GatingAgreementRequestsExpiryStatus'): + if expiry_status is None: + del self.expiry_status + return + if not isinstance(expiry_status, GatingAgreementRequestsExpiryStatus): + raise TypeError('expiry_status must be of type GatingAgreementRequestsExpiryStatus') + self._expiry_status = expiry_status + + @property + def expiry_time(self) -> datetime: + return self._expiry_time or None + + @expiry_time.setter + def expiry_time(self, expiry_time: Optional[datetime]): + if expiry_time is None: + del self.expiry_time + return + if not isinstance(expiry_time, datetime): + raise TypeError('expiry_time must be of type datetime') + self._expiry_time = expiry_time + + @property + def publisher_notes(self) -> str: + return self._publisher_notes or "" + + @publisher_notes.setter + def publisher_notes(self, publisher_notes: Optional[str]): + if publisher_notes is None: + del self.publisher_notes + return + if not isinstance(publisher_notes, str): + raise TypeError('publisher_notes must be of type str') + self._publisher_notes = publisher_notes + + class ApiGetModelInstanceRequest(KaggleObject): r""" Attributes: @@ -3279,60 +3441,163 @@ def idToken(self): return self.id_token -class KeysRequest(KaggleObject): - r""" - """ - - pass - def endpoint(self): - path = '/api/v1/models/signing/keys' - return path.format_map(self.to_field_map(self)) - - -class KeysResponse(KaggleObject): +class JWK(KaggleObject): r""" - JWKS specification can be found: - https://openid.net/specs/draft-jones-json-web-key-03.html// - https://datatracker.ietf.org/doc/html/rfc7517 - Attributes: - keys (JWK) + kty (str) + alg (str) + use (str) + kid (str) + n (str) + modulus + e (str) + public exponent """ def __init__(self): - self._keys = [] + self._kty = "" + self._alg = "" + self._use = "" + self._kid = "" + self._n = "" + self._e = "" self._freeze() @property - def keys(self) -> Optional[List[Optional['JWK']]]: - return self._keys + def kty(self) -> str: + return self._kty - @keys.setter - def keys(self, keys: Optional[List[Optional['JWK']]]): - if keys is None: - del self.keys + @kty.setter + def kty(self, kty: str): + if kty is None: + del self.kty return - if not isinstance(keys, list): - raise TypeError('keys must be of type list') - if not all([isinstance(t, JWK) for t in keys]): - raise TypeError('keys must contain only items of type JWK') - self._keys = keys - + if not isinstance(kty, str): + raise TypeError('kty must be of type str') + self._kty = kty -class WellKnowEndpointRequest(KaggleObject): - r""" - """ + @property + def alg(self) -> str: + return self._alg - pass - def endpoint(self): - path = '/api/v1/models/signing/.well-known/openid-configuration' - return path.format_map(self.to_field_map(self)) + @alg.setter + def alg(self, alg: str): + if alg is None: + del self.alg + return + if not isinstance(alg, str): + raise TypeError('alg must be of type str') + self._alg = alg + @property + def use(self) -> str: + return self._use -class WellKnowEndpointResponse(KaggleObject): - r""" - More details about the response can be found at - https://openid.net/specs/openid-connect-discovery-1_0.html#MetadataContents + @use.setter + def use(self, use: str): + if use is None: + del self.use + return + if not isinstance(use, str): + raise TypeError('use must be of type str') + self._use = use + + @property + def kid(self) -> str: + return self._kid + + @kid.setter + def kid(self, kid: str): + if kid is None: + del self.kid + return + if not isinstance(kid, str): + raise TypeError('kid must be of type str') + self._kid = kid + + @property + def n(self) -> str: + """modulus""" + return self._n + + @n.setter + def n(self, n: str): + if n is None: + del self.n + return + if not isinstance(n, str): + raise TypeError('n must be of type str') + self._n = n + + @property + def e(self) -> str: + """public exponent""" + return self._e + + @e.setter + def e(self, e: str): + if e is None: + del self.e + return + if not isinstance(e, str): + raise TypeError('e must be of type str') + self._e = e + + +class KeysRequest(KaggleObject): + r""" + """ + + pass + def endpoint(self): + path = '/api/v1/models/signing/keys' + return path.format_map(self.to_field_map(self)) + + +class KeysResponse(KaggleObject): + r""" + JWKS specification can be found: + https://openid.net/specs/draft-jones-json-web-key-03.html// + https://datatracker.ietf.org/doc/html/rfc7517 + + Attributes: + keys (JWK) + """ + + def __init__(self): + self._keys = [] + self._freeze() + + @property + def keys(self) -> Optional[List[Optional['JWK']]]: + return self._keys + + @keys.setter + def keys(self, keys: Optional[List[Optional['JWK']]]): + if keys is None: + del self.keys + return + if not isinstance(keys, list): + raise TypeError('keys must be of type list') + if not all([isinstance(t, JWK) for t in keys]): + raise TypeError('keys must contain only items of type JWK') + self._keys = keys + + +class WellKnowEndpointRequest(KaggleObject): + r""" + """ + + pass + def endpoint(self): + path = '/api/v1/models/signing/.well-known/openid-configuration' + return path.format_map(self.to_field_map(self)) + + +class WellKnowEndpointResponse(KaggleObject): + r""" + More details about the response can be found at + https://openid.net/specs/openid-connect-discovery-1_0.html#MetadataContents Attributes: issuer (str) @@ -3482,271 +3747,6 @@ def subjectTypesSupported(self): return self.subject_types_supported -class ApiGatingUserConsent(KaggleObject): - r""" - ApiGatingUserConsent presents GatingUserConsent data for publisher access, - omitting or modifying certain fields as needed such as user_id. - - Attributes: - id (int) - agreement_id (int) - user_name (str) - request_data (str) - request_time (datetime) - review_time (datetime) - review_status (GatingAgreementRequestsReviewStatus) - expiry_status (GatingAgreementRequestsExpiryStatus) - expiry_time (datetime) - publisher_notes (str) - """ - - def __init__(self): - self._id = 0 - self._agreement_id = 0 - self._user_name = "" - self._request_data = None - self._request_time = None - self._review_time = None - self._review_status = GatingAgreementRequestsReviewStatus.GATING_AGREEMENT_REQUESTS_REVIEW_STATUS_UNSPECIFIED - self._expiry_status = GatingAgreementRequestsExpiryStatus.GATING_AGREEMENT_REQUESTS_EXPIRY_STATUS_UNSPECIFIED - self._expiry_time = None - self._publisher_notes = None - self._freeze() - - @property - def id(self) -> int: - return self._id - - @id.setter - def id(self, id: int): - if id is None: - del self.id - return - if not isinstance(id, int): - raise TypeError('id must be of type int') - self._id = id - - @property - def agreement_id(self) -> int: - return self._agreement_id - - @agreement_id.setter - def agreement_id(self, agreement_id: int): - if agreement_id is None: - del self.agreement_id - return - if not isinstance(agreement_id, int): - raise TypeError('agreement_id must be of type int') - self._agreement_id = agreement_id - - @property - def user_name(self) -> str: - return self._user_name - - @user_name.setter - def user_name(self, user_name: str): - if user_name is None: - del self.user_name - return - if not isinstance(user_name, str): - raise TypeError('user_name must be of type str') - self._user_name = user_name - - @property - def request_data(self) -> str: - return self._request_data or "" - - @request_data.setter - def request_data(self, request_data: Optional[str]): - if request_data is None: - del self.request_data - return - if not isinstance(request_data, str): - raise TypeError('request_data must be of type str') - self._request_data = request_data - - @property - def request_time(self) -> datetime: - return self._request_time - - @request_time.setter - def request_time(self, request_time: datetime): - if request_time is None: - del self.request_time - return - if not isinstance(request_time, datetime): - raise TypeError('request_time must be of type datetime') - self._request_time = request_time - - @property - def review_time(self) -> datetime: - return self._review_time or None - - @review_time.setter - def review_time(self, review_time: Optional[datetime]): - if review_time is None: - del self.review_time - return - if not isinstance(review_time, datetime): - raise TypeError('review_time must be of type datetime') - self._review_time = review_time - - @property - def review_status(self) -> 'GatingAgreementRequestsReviewStatus': - return self._review_status - - @review_status.setter - def review_status(self, review_status: 'GatingAgreementRequestsReviewStatus'): - if review_status is None: - del self.review_status - return - if not isinstance(review_status, GatingAgreementRequestsReviewStatus): - raise TypeError('review_status must be of type GatingAgreementRequestsReviewStatus') - self._review_status = review_status - - @property - def expiry_status(self) -> 'GatingAgreementRequestsExpiryStatus': - return self._expiry_status - - @expiry_status.setter - def expiry_status(self, expiry_status: 'GatingAgreementRequestsExpiryStatus'): - if expiry_status is None: - del self.expiry_status - return - if not isinstance(expiry_status, GatingAgreementRequestsExpiryStatus): - raise TypeError('expiry_status must be of type GatingAgreementRequestsExpiryStatus') - self._expiry_status = expiry_status - - @property - def expiry_time(self) -> datetime: - return self._expiry_time or None - - @expiry_time.setter - def expiry_time(self, expiry_time: Optional[datetime]): - if expiry_time is None: - del self.expiry_time - return - if not isinstance(expiry_time, datetime): - raise TypeError('expiry_time must be of type datetime') - self._expiry_time = expiry_time - - @property - def publisher_notes(self) -> str: - return self._publisher_notes or "" - - @publisher_notes.setter - def publisher_notes(self, publisher_notes: Optional[str]): - if publisher_notes is None: - del self.publisher_notes - return - if not isinstance(publisher_notes, str): - raise TypeError('publisher_notes must be of type str') - self._publisher_notes = publisher_notes - - -class JWK(KaggleObject): - r""" - Attributes: - kty (str) - alg (str) - use (str) - kid (str) - n (str) - modulus - e (str) - public exponent - """ - - def __init__(self): - self._kty = "" - self._alg = "" - self._use = "" - self._kid = "" - self._n = "" - self._e = "" - self._freeze() - - @property - def kty(self) -> str: - return self._kty - - @kty.setter - def kty(self, kty: str): - if kty is None: - del self.kty - return - if not isinstance(kty, str): - raise TypeError('kty must be of type str') - self._kty = kty - - @property - def alg(self) -> str: - return self._alg - - @alg.setter - def alg(self, alg: str): - if alg is None: - del self.alg - return - if not isinstance(alg, str): - raise TypeError('alg must be of type str') - self._alg = alg - - @property - def use(self) -> str: - return self._use - - @use.setter - def use(self, use: str): - if use is None: - del self.use - return - if not isinstance(use, str): - raise TypeError('use must be of type str') - self._use = use - - @property - def kid(self) -> str: - return self._kid - - @kid.setter - def kid(self, kid: str): - if kid is None: - del self.kid - return - if not isinstance(kid, str): - raise TypeError('kid must be of type str') - self._kid = kid - - @property - def n(self) -> str: - """modulus""" - return self._n - - @n.setter - def n(self, n: str): - if n is None: - del self.n - return - if not isinstance(n, str): - raise TypeError('n must be of type str') - self._n = n - - @property - def e(self) -> str: - """public exponent""" - return self._e - - @e.setter - def e(self, e: str): - if e is None: - del self.e - return - if not isinstance(e, str): - raise TypeError('e must be of type str') - self._e = e - - ApiCreateModelInstanceRequest._fields = [ FieldMetadata("ownerSlug", "owner_slug", "_owner_slug", str, "", PredefinedSerializer()), FieldMetadata("modelSlug", "model_slug", "_model_slug", str, "", PredefinedSerializer()), @@ -3761,11 +3761,11 @@ def e(self, e: str): FieldMetadata("fineTunable", "fine_tunable", "_fine_tunable", bool, None, PredefinedSerializer(), optional=True), FieldMetadata("trainingData", "training_data", "_training_data", str, [], ListSerializer(PredefinedSerializer())), FieldMetadata("files", "files", "_files", ApiDatasetNewFile, [], ListSerializer(KaggleObjectSerializer())), - FieldMetadata("directories", "directories", "_directories", ApiUploadDirectoryInfo, [], ListSerializer(KaggleObjectSerializer())), FieldMetadata("licenseName", "license_name", "_license_name", str, None, PredefinedSerializer(), optional=True), FieldMetadata("modelInstanceType", "model_instance_type", "_model_instance_type", ModelInstanceType, None, EnumSerializer(), optional=True), FieldMetadata("baseModelInstance", "base_model_instance", "_base_model_instance", str, None, PredefinedSerializer(), optional=True), FieldMetadata("externalBaseModelUrl", "external_base_model_url", "_external_base_model_url", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("directories", "directories", "_directories", ApiUploadDirectoryInfo, [], ListSerializer(KaggleObjectSerializer())), FieldMetadata("sigstore", "sigstore", "_sigstore", bool, None, PredefinedSerializer(), optional=True), ] @@ -3799,8 +3799,8 @@ def e(self, e: str): FieldMetadata("id", "id", "_id", int, None, PredefinedSerializer(), optional=True), FieldMetadata("ref", "ref", "_ref", str, None, PredefinedSerializer(), optional=True), FieldMetadata("error", "error", "_error", str, None, PredefinedSerializer(), optional=True), - FieldMetadata("errorCode", "error_code", "_error_code", int, None, PredefinedSerializer(), optional=True), FieldMetadata("url", "url", "_url", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("errorCode", "error_code", "_error_code", int, None, PredefinedSerializer(), optional=True), ] ApiDeleteModelInstanceRequest._fields = [ @@ -3836,6 +3836,19 @@ def e(self, e: str): FieldMetadata("path", "path", "_path", str, None, PredefinedSerializer(), optional=True), ] +ApiGatingUserConsent._fields = [ + FieldMetadata("id", "id", "_id", int, 0, PredefinedSerializer()), + FieldMetadata("agreementId", "agreement_id", "_agreement_id", int, 0, PredefinedSerializer()), + FieldMetadata("userName", "user_name", "_user_name", str, "", PredefinedSerializer()), + FieldMetadata("requestData", "request_data", "_request_data", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("requestTime", "request_time", "_request_time", datetime, None, DateTimeSerializer()), + FieldMetadata("reviewTime", "review_time", "_review_time", datetime, None, DateTimeSerializer(), optional=True), + FieldMetadata("reviewStatus", "review_status", "_review_status", GatingAgreementRequestsReviewStatus, GatingAgreementRequestsReviewStatus.GATING_AGREEMENT_REQUESTS_REVIEW_STATUS_UNSPECIFIED, EnumSerializer()), + FieldMetadata("expiryStatus", "expiry_status", "_expiry_status", GatingAgreementRequestsExpiryStatus, GatingAgreementRequestsExpiryStatus.GATING_AGREEMENT_REQUESTS_EXPIRY_STATUS_UNSPECIFIED, EnumSerializer()), + FieldMetadata("expiryTime", "expiry_time", "_expiry_time", datetime, None, DateTimeSerializer(), optional=True), + FieldMetadata("publisherNotes", "publisher_notes", "_publisher_notes", str, None, PredefinedSerializer(), optional=True), +] + ApiGetModelInstanceRequest._fields = [ FieldMetadata("ownerSlug", "owner_slug", "_owner_slug", str, "", PredefinedSerializer()), FieldMetadata("modelSlug", "model_slug", "_model_slug", str, "", PredefinedSerializer()), @@ -4027,6 +4040,15 @@ def e(self, e: str): FieldMetadata("id_token", "id_token", "_id_token", str, "", PredefinedSerializer()), ] +JWK._fields = [ + FieldMetadata("kty", "kty", "_kty", str, "", PredefinedSerializer()), + FieldMetadata("alg", "alg", "_alg", str, "", PredefinedSerializer()), + FieldMetadata("use", "use", "_use", str, "", PredefinedSerializer()), + FieldMetadata("kid", "kid", "_kid", str, "", PredefinedSerializer()), + FieldMetadata("n", "n", "_n", str, "", PredefinedSerializer()), + FieldMetadata("e", "e", "_e", str, "", PredefinedSerializer()), +] + KeysRequest._fields = [] KeysResponse._fields = [ @@ -4045,25 +4067,3 @@ def e(self, e: str): FieldMetadata("subject_types_supported", "subject_types_supported", "_subject_types_supported", str, [], ListSerializer(PredefinedSerializer())), ] -ApiGatingUserConsent._fields = [ - FieldMetadata("id", "id", "_id", int, 0, PredefinedSerializer()), - FieldMetadata("agreementId", "agreement_id", "_agreement_id", int, 0, PredefinedSerializer()), - FieldMetadata("userName", "user_name", "_user_name", str, "", PredefinedSerializer()), - FieldMetadata("requestData", "request_data", "_request_data", str, None, PredefinedSerializer(), optional=True), - FieldMetadata("requestTime", "request_time", "_request_time", datetime, None, DateTimeSerializer()), - FieldMetadata("reviewTime", "review_time", "_review_time", datetime, None, DateTimeSerializer(), optional=True), - FieldMetadata("reviewStatus", "review_status", "_review_status", GatingAgreementRequestsReviewStatus, GatingAgreementRequestsReviewStatus.GATING_AGREEMENT_REQUESTS_REVIEW_STATUS_UNSPECIFIED, EnumSerializer()), - FieldMetadata("expiryStatus", "expiry_status", "_expiry_status", GatingAgreementRequestsExpiryStatus, GatingAgreementRequestsExpiryStatus.GATING_AGREEMENT_REQUESTS_EXPIRY_STATUS_UNSPECIFIED, EnumSerializer()), - FieldMetadata("expiryTime", "expiry_time", "_expiry_time", datetime, None, DateTimeSerializer(), optional=True), - FieldMetadata("publisherNotes", "publisher_notes", "_publisher_notes", str, None, PredefinedSerializer(), optional=True), -] - -JWK._fields = [ - FieldMetadata("kty", "kty", "_kty", str, "", PredefinedSerializer()), - FieldMetadata("alg", "alg", "_alg", str, "", PredefinedSerializer()), - FieldMetadata("use", "use", "_use", str, "", PredefinedSerializer()), - FieldMetadata("kid", "kid", "_kid", str, "", PredefinedSerializer()), - FieldMetadata("n", "n", "_n", str, "", PredefinedSerializer()), - FieldMetadata("e", "e", "_e", str, "", PredefinedSerializer()), -] - diff --git a/src/kagglesdk/models/types/model_service.py b/src/kagglesdk/models/types/model_service.py index c68b0000..5af7e475 100644 --- a/src/kagglesdk/models/types/model_service.py +++ b/src/kagglesdk/models/types/model_service.py @@ -93,49 +93,47 @@ def metrics(self, metrics: Optional[List[Optional['ModelMetric']]]): self._metrics = metrics -class ModelMetric(KaggleObject): +class ModelInstanceMetric(KaggleObject): r""" Attributes: - date (str) - views (int) + variation (str) + framework (ModelFramework) downloads (int) notebooks (int) - instances (ModelInstanceMetric) """ def __init__(self): - self._date = "" - self._views = 0 + self._variation = "" + self._framework = ModelFramework.MODEL_FRAMEWORK_UNSPECIFIED self._downloads = 0 self._notebooks = 0 - self._instances = [] self._freeze() @property - def date(self) -> str: - return self._date + def variation(self) -> str: + return self._variation - @date.setter - def date(self, date: str): - if date is None: - del self.date + @variation.setter + def variation(self, variation: str): + if variation is None: + del self.variation return - if not isinstance(date, str): - raise TypeError('date must be of type str') - self._date = date + if not isinstance(variation, str): + raise TypeError('variation must be of type str') + self._variation = variation @property - def views(self) -> int: - return self._views + def framework(self) -> 'ModelFramework': + return self._framework - @views.setter - def views(self, views: int): - if views is None: - del self.views + @framework.setter + def framework(self, framework: 'ModelFramework'): + if framework is None: + del self.framework return - if not isinstance(views, int): - raise TypeError('views must be of type int') - self._views = views + if not isinstance(framework, ModelFramework): + raise TypeError('framework must be of type ModelFramework') + self._framework = framework @property def downloads(self) -> int: @@ -163,63 +161,50 @@ def notebooks(self, notebooks: int): raise TypeError('notebooks must be of type int') self._notebooks = notebooks - @property - def instances(self) -> Optional[List[Optional['ModelInstanceMetric']]]: - return self._instances - @instances.setter - def instances(self, instances: Optional[List[Optional['ModelInstanceMetric']]]): - if instances is None: - del self.instances - return - if not isinstance(instances, list): - raise TypeError('instances must be of type list') - if not all([isinstance(t, ModelInstanceMetric) for t in instances]): - raise TypeError('instances must contain only items of type ModelInstanceMetric') - self._instances = instances - - -class ModelInstanceMetric(KaggleObject): +class ModelMetric(KaggleObject): r""" Attributes: - variation (str) - framework (ModelFramework) + date (str) + views (int) downloads (int) notebooks (int) + instances (ModelInstanceMetric) """ def __init__(self): - self._variation = "" - self._framework = ModelFramework.MODEL_FRAMEWORK_UNSPECIFIED + self._date = "" + self._views = 0 self._downloads = 0 self._notebooks = 0 + self._instances = [] self._freeze() @property - def variation(self) -> str: - return self._variation + def date(self) -> str: + return self._date - @variation.setter - def variation(self, variation: str): - if variation is None: - del self.variation + @date.setter + def date(self, date: str): + if date is None: + del self.date return - if not isinstance(variation, str): - raise TypeError('variation must be of type str') - self._variation = variation + if not isinstance(date, str): + raise TypeError('date must be of type str') + self._date = date @property - def framework(self) -> 'ModelFramework': - return self._framework + def views(self) -> int: + return self._views - @framework.setter - def framework(self, framework: 'ModelFramework'): - if framework is None: - del self.framework + @views.setter + def views(self, views: int): + if views is None: + del self.views return - if not isinstance(framework, ModelFramework): - raise TypeError('framework must be of type ModelFramework') - self._framework = framework + if not isinstance(views, int): + raise TypeError('views must be of type int') + self._views = views @property def downloads(self) -> int: @@ -247,6 +232,21 @@ def notebooks(self, notebooks: int): raise TypeError('notebooks must be of type int') self._notebooks = notebooks + @property + def instances(self) -> Optional[List[Optional['ModelInstanceMetric']]]: + return self._instances + + @instances.setter + def instances(self, instances: Optional[List[Optional['ModelInstanceMetric']]]): + if instances is None: + del self.instances + return + if not isinstance(instances, list): + raise TypeError('instances must be of type list') + if not all([isinstance(t, ModelInstanceMetric) for t in instances]): + raise TypeError('instances must contain only items of type ModelInstanceMetric') + self._instances = instances + GetModelMetricsRequest._fields = [ FieldMetadata("ownerSlug", "owner_slug", "_owner_slug", str, "", PredefinedSerializer()), @@ -258,18 +258,18 @@ def notebooks(self, notebooks: int): FieldMetadata("metrics", "metrics", "_metrics", ModelMetric, [], ListSerializer(KaggleObjectSerializer())), ] -ModelMetric._fields = [ - FieldMetadata("date", "date", "_date", str, "", PredefinedSerializer()), - FieldMetadata("views", "views", "_views", int, 0, PredefinedSerializer()), +ModelInstanceMetric._fields = [ + FieldMetadata("variation", "variation", "_variation", str, "", PredefinedSerializer()), + FieldMetadata("framework", "framework", "_framework", ModelFramework, ModelFramework.MODEL_FRAMEWORK_UNSPECIFIED, EnumSerializer()), FieldMetadata("downloads", "downloads", "_downloads", int, 0, PredefinedSerializer()), FieldMetadata("notebooks", "notebooks", "_notebooks", int, 0, PredefinedSerializer()), - FieldMetadata("instances", "instances", "_instances", ModelInstanceMetric, [], ListSerializer(KaggleObjectSerializer())), ] -ModelInstanceMetric._fields = [ - FieldMetadata("variation", "variation", "_variation", str, "", PredefinedSerializer()), - FieldMetadata("framework", "framework", "_framework", ModelFramework, ModelFramework.MODEL_FRAMEWORK_UNSPECIFIED, EnumSerializer()), +ModelMetric._fields = [ + FieldMetadata("date", "date", "_date", str, "", PredefinedSerializer()), + FieldMetadata("views", "views", "_views", int, 0, PredefinedSerializer()), FieldMetadata("downloads", "downloads", "_downloads", int, 0, PredefinedSerializer()), FieldMetadata("notebooks", "notebooks", "_notebooks", int, 0, PredefinedSerializer()), + FieldMetadata("instances", "instances", "_instances", ModelInstanceMetric, [], ListSerializer(KaggleObjectSerializer())), ] diff --git a/src/kagglesdk/models/types/model_types.py b/src/kagglesdk/models/types/model_types.py index e2f8c733..d89c686d 100644 --- a/src/kagglesdk/models/types/model_types.py +++ b/src/kagglesdk/models/types/model_types.py @@ -91,19 +91,95 @@ def framework(self, framework: 'ModelFramework'): self._framework = framework +class LicensePost(KaggleObject): + r""" + Attributes: + id (int) + content (str) + """ + + def __init__(self): + self._id = 0 + self._content = "" + self._freeze() + + @property + def id(self) -> int: + return self._id + + @id.setter + def id(self, id: int): + if id is None: + del self.id + return + if not isinstance(id, int): + raise TypeError('id must be of type int') + self._id = id + + @property + def content(self) -> str: + return self._content + + @content.setter + def content(self, content: str): + if content is None: + del self.content + return + if not isinstance(content, str): + raise TypeError('content must be of type str') + self._content = content + + +class ModelActivityTimeSeriesPoint(KaggleObject): + r""" + Attributes: + date (datetime) + count (int) + """ + + def __init__(self): + self._date = None + self._count = 0 + self._freeze() + + @property + def date(self) -> datetime: + return self._date + + @date.setter + def date(self, date: datetime): + if date is None: + del self.date + return + if not isinstance(date, datetime): + raise TypeError('date must be of type datetime') + self._date = date + + @property + def count(self) -> int: + return self._count + + @count.setter + def count(self, count: int): + if count is None: + del self.count + return + if not isinstance(count, int): + raise TypeError('count must be of type int') + self._count = count + + class ModelInstance(KaggleObject): r""" Attributes: id (int) owner_slug (str) model_slug (str) - model_id (int) slug (str) version_id (int) fine_tunable (bool) overview (str) usage (str) - rendered_usage (str) text_representation (str) source_url (str) version_number (int) @@ -112,26 +188,28 @@ class ModelInstance(KaggleObject): finished taking shape version_notes (str) download_url (str) - databundle_id (int) databundle_version_id (int) - databundle_version_type (DatabundleVersionType) - firestore_path (str) - status (DatabundleVersionStatus) - creation_status (DatabundleVersionCreationStatus) - error_message (str) last_version_id (int) Version ID associated with the most up-to-date version of the ModelInstance source_organization (Owner) training_data (str) metrics (str) license_post (LicensePost) + rendered_usage (str) license (License) + databundle_id (int) + firestore_path (str) + status (DatabundleVersionStatus) + error_message (str) + databundle_version_type (DatabundleVersionType) can_use (bool) + creation_status (DatabundleVersionCreationStatus) uncompressed_storage_uri (str) model_instance_type (ModelInstanceType) base_model_instance_id (int) base_model_instance_information (BaseModelInstanceInformation) external_base_model_url (str) + model_id (int) download_summary (ModelInstanceDownloadSummary) total_uncompressed_bytes (int) sigstore_state (SigstoreState) @@ -144,38 +222,38 @@ def __init__(self): self._id = 0 self._owner_slug = "" self._model_slug = "" - self._model_id = 0 self._slug = "" self._version_id = 0 self._fine_tunable = None self._overview = "" self._usage = "" - self._rendered_usage = "" self._text_representation = None self._source_url = None self._version_number = 0 self._framework = ModelFramework.MODEL_FRAMEWORK_UNSPECIFIED self._version_notes = "" self._download_url = None - self._databundle_id = 0 self._databundle_version_id = 0 - self._databundle_version_type = DatabundleVersionType.DATABUNDLE_VERSION_TYPE_UNSPECIFIED - self._firestore_path = "" - self._status = DatabundleVersionStatus.NOT_YET_PERSISTED - self._creation_status = None - self._error_message = None self._last_version_id = None self._source_organization = None self._training_data = [] self._metrics = None self._license_post = None + self._rendered_usage = "" self._license = None + self._databundle_id = 0 + self._firestore_path = "" + self._status = DatabundleVersionStatus.NOT_YET_PERSISTED + self._error_message = None + self._databundle_version_type = DatabundleVersionType.DATABUNDLE_VERSION_TYPE_UNSPECIFIED self._can_use = None + self._creation_status = None self._uncompressed_storage_uri = None self._model_instance_type = None self._base_model_instance_id = None self._base_model_instance_information = None self._external_base_model_url = None + self._model_id = 0 self._download_summary = None self._total_uncompressed_bytes = None self._sigstore_state = None @@ -781,56 +859,72 @@ def model_instance_id(self, model_instance_id: int): self._model_instance_id = model_instance_id -class ModelInstanceVersionList(KaggleObject): +class ModelInstanceVersion(KaggleObject): r""" Attributes: - versions (ModelInstanceVersion) + id (int) + framework (ModelFramework) + is_tfhub_model (bool) + url (str) + variation_slug (str) + version_number (int) + model_title (str) + thumbnail_url (str) + is_private (bool) + sigstore_state (SigstoreState) """ def __init__(self): - self._versions = [] + self._id = 0 + self._framework = ModelFramework.MODEL_FRAMEWORK_UNSPECIFIED + self._is_tfhub_model = False + self._url = "" + self._variation_slug = "" + self._version_number = 0 + self._model_title = "" + self._thumbnail_url = "" + self._is_private = False + self._sigstore_state = SigstoreState.SIGSTORE_STATE_UNSPECIFIED self._freeze() @property - def versions(self) -> Optional[List[Optional['ModelInstanceVersion']]]: - return self._versions + def id(self) -> int: + return self._id - @versions.setter - def versions(self, versions: Optional[List[Optional['ModelInstanceVersion']]]): - if versions is None: - del self.versions + @id.setter + def id(self, id: int): + if id is None: + del self.id return - if not isinstance(versions, list): - raise TypeError('versions must be of type list') - if not all([isinstance(t, ModelInstanceVersion) for t in versions]): - raise TypeError('versions must contain only items of type ModelInstanceVersion') - self._versions = versions - + if not isinstance(id, int): + raise TypeError('id must be of type int') + self._id = id -class ModelLink(KaggleObject): - r""" - Attributes: - type (ModelVersionLinkType) - url (str) - """ + @property + def framework(self) -> 'ModelFramework': + return self._framework - def __init__(self): - self._type = ModelVersionLinkType.MODEL_VERSION_LINK_TYPE_UNSPECIFIED - self._url = "" - self._freeze() + @framework.setter + def framework(self, framework: 'ModelFramework'): + if framework is None: + del self.framework + return + if not isinstance(framework, ModelFramework): + raise TypeError('framework must be of type ModelFramework') + self._framework = framework @property - def type(self) -> 'ModelVersionLinkType': - return self._type + def is_tfhub_model(self) -> bool: + return self._is_tfhub_model - @type.setter - def type(self, type: 'ModelVersionLinkType'): - if type is None: - del self.type + @is_tfhub_model.setter + def is_tfhub_model(self, is_tfhub_model: bool): + if is_tfhub_model is None: + del self.is_tfhub_model return - if not isinstance(type, ModelVersionLinkType): - raise TypeError('type must be of type ModelVersionLinkType') - self._type = type + if not isinstance(is_tfhub_model, bool): + raise TypeError('is_tfhub_model must be of type bool') + self._is_tfhub_model = is_tfhub_model @property def url(self) -> str: @@ -845,55 +939,198 @@ def url(self, url: str): raise TypeError('url must be of type str') self._url = url + @property + def variation_slug(self) -> str: + return self._variation_slug -class Owner(KaggleObject): - r""" - Based off Datasets OwnerDto as the permission model is the same - Separate message since Models don't have max_file_size_bytes. - Consider renaming more generically to apply to Users/Orgs - interchangeably without a strict concept of ownership - - Attributes: - id (int) - image_url (str) - is_organization (bool) - name (str) - profile_url (str) - slug (str) - user_tier (UserAchievementTier) - user_progression_opt_out (bool) - Whether or not the owner is progression opted-out (only for user owners). - allow_model_gating (bool) - """ - - def __init__(self): - self._id = 0 - self._image_url = None - self._is_organization = False - self._name = "" - self._profile_url = None - self._slug = "" - self._user_tier = UserAchievementTier.NOVICE - self._user_progression_opt_out = None - self._allow_model_gating = None - self._freeze() + @variation_slug.setter + def variation_slug(self, variation_slug: str): + if variation_slug is None: + del self.variation_slug + return + if not isinstance(variation_slug, str): + raise TypeError('variation_slug must be of type str') + self._variation_slug = variation_slug @property - def id(self) -> int: - return self._id + def version_number(self) -> int: + return self._version_number - @id.setter - def id(self, id: int): - if id is None: - del self.id + @version_number.setter + def version_number(self, version_number: int): + if version_number is None: + del self.version_number return - if not isinstance(id, int): - raise TypeError('id must be of type int') - self._id = id + if not isinstance(version_number, int): + raise TypeError('version_number must be of type int') + self._version_number = version_number @property - def image_url(self) -> str: - return self._image_url or "" + def model_title(self) -> str: + return self._model_title + + @model_title.setter + def model_title(self, model_title: str): + if model_title is None: + del self.model_title + return + if not isinstance(model_title, str): + raise TypeError('model_title must be of type str') + self._model_title = model_title + + @property + def thumbnail_url(self) -> str: + return self._thumbnail_url + + @thumbnail_url.setter + def thumbnail_url(self, thumbnail_url: str): + if thumbnail_url is None: + del self.thumbnail_url + return + if not isinstance(thumbnail_url, str): + raise TypeError('thumbnail_url must be of type str') + self._thumbnail_url = thumbnail_url + + @property + def is_private(self) -> bool: + return self._is_private + + @is_private.setter + def is_private(self, is_private: bool): + if is_private is None: + del self.is_private + return + if not isinstance(is_private, bool): + raise TypeError('is_private must be of type bool') + self._is_private = is_private + + @property + def sigstore_state(self) -> 'SigstoreState': + return self._sigstore_state + + @sigstore_state.setter + def sigstore_state(self, sigstore_state: 'SigstoreState'): + if sigstore_state is None: + del self.sigstore_state + return + if not isinstance(sigstore_state, SigstoreState): + raise TypeError('sigstore_state must be of type SigstoreState') + self._sigstore_state = sigstore_state + + +class ModelInstanceVersionList(KaggleObject): + r""" + Attributes: + versions (ModelInstanceVersion) + """ + + def __init__(self): + self._versions = [] + self._freeze() + + @property + def versions(self) -> Optional[List[Optional['ModelInstanceVersion']]]: + return self._versions + + @versions.setter + def versions(self, versions: Optional[List[Optional['ModelInstanceVersion']]]): + if versions is None: + del self.versions + return + if not isinstance(versions, list): + raise TypeError('versions must be of type list') + if not all([isinstance(t, ModelInstanceVersion) for t in versions]): + raise TypeError('versions must contain only items of type ModelInstanceVersion') + self._versions = versions + + +class ModelLink(KaggleObject): + r""" + Attributes: + type (ModelVersionLinkType) + url (str) + """ + + def __init__(self): + self._type = ModelVersionLinkType.MODEL_VERSION_LINK_TYPE_UNSPECIFIED + self._url = "" + self._freeze() + + @property + def type(self) -> 'ModelVersionLinkType': + return self._type + + @type.setter + def type(self, type: 'ModelVersionLinkType'): + if type is None: + del self.type + return + if not isinstance(type, ModelVersionLinkType): + raise TypeError('type must be of type ModelVersionLinkType') + self._type = type + + @property + def url(self) -> str: + return self._url + + @url.setter + def url(self, url: str): + if url is None: + del self.url + return + if not isinstance(url, str): + raise TypeError('url must be of type str') + self._url = url + + +class Owner(KaggleObject): + r""" + Based off Datasets OwnerDto as the permission model is the same + Separate message since Models don't have max_file_size_bytes. + Consider renaming more generically to apply to Users/Orgs + interchangeably without a strict concept of ownership + + Attributes: + id (int) + image_url (str) + is_organization (bool) + name (str) + profile_url (str) + slug (str) + user_tier (UserAchievementTier) + allow_model_gating (bool) + user_progression_opt_out (bool) + Whether or not the owner is progression opted-out (only for user owners). + """ + + def __init__(self): + self._id = 0 + self._image_url = None + self._is_organization = False + self._name = "" + self._profile_url = None + self._slug = "" + self._user_tier = UserAchievementTier.NOVICE + self._allow_model_gating = None + self._user_progression_opt_out = None + self._freeze() + + @property + def id(self) -> int: + return self._id + + @id.setter + def id(self, id: int): + if id is None: + del self.id + return + if not isinstance(id, int): + raise TypeError('id must be of type int') + self._id = id + + @property + def image_url(self) -> str: + return self._image_url or "" @image_url.setter def image_url(self, image_url: Optional[str]): @@ -997,243 +1234,6 @@ def allow_model_gating(self, allow_model_gating: Optional[bool]): self._allow_model_gating = allow_model_gating -class LicensePost(KaggleObject): - r""" - Attributes: - id (int) - content (str) - """ - - def __init__(self): - self._id = 0 - self._content = "" - self._freeze() - - @property - def id(self) -> int: - return self._id - - @id.setter - def id(self, id: int): - if id is None: - del self.id - return - if not isinstance(id, int): - raise TypeError('id must be of type int') - self._id = id - - @property - def content(self) -> str: - return self._content - - @content.setter - def content(self, content: str): - if content is None: - del self.content - return - if not isinstance(content, str): - raise TypeError('content must be of type str') - self._content = content - - -class ModelActivityTimeSeriesPoint(KaggleObject): - r""" - Attributes: - date (datetime) - count (int) - """ - - def __init__(self): - self._date = None - self._count = 0 - self._freeze() - - @property - def date(self) -> datetime: - return self._date - - @date.setter - def date(self, date: datetime): - if date is None: - del self.date - return - if not isinstance(date, datetime): - raise TypeError('date must be of type datetime') - self._date = date - - @property - def count(self) -> int: - return self._count - - @count.setter - def count(self, count: int): - if count is None: - del self.count - return - if not isinstance(count, int): - raise TypeError('count must be of type int') - self._count = count - - -class ModelInstanceVersion(KaggleObject): - r""" - Attributes: - id (int) - framework (ModelFramework) - is_tfhub_model (bool) - url (str) - variation_slug (str) - version_number (int) - model_title (str) - thumbnail_url (str) - is_private (bool) - sigstore_state (SigstoreState) - """ - - def __init__(self): - self._id = 0 - self._framework = ModelFramework.MODEL_FRAMEWORK_UNSPECIFIED - self._is_tfhub_model = False - self._url = "" - self._variation_slug = "" - self._version_number = 0 - self._model_title = "" - self._thumbnail_url = "" - self._is_private = False - self._sigstore_state = SigstoreState.SIGSTORE_STATE_UNSPECIFIED - self._freeze() - - @property - def id(self) -> int: - return self._id - - @id.setter - def id(self, id: int): - if id is None: - del self.id - return - if not isinstance(id, int): - raise TypeError('id must be of type int') - self._id = id - - @property - def framework(self) -> 'ModelFramework': - return self._framework - - @framework.setter - def framework(self, framework: 'ModelFramework'): - if framework is None: - del self.framework - return - if not isinstance(framework, ModelFramework): - raise TypeError('framework must be of type ModelFramework') - self._framework = framework - - @property - def is_tfhub_model(self) -> bool: - return self._is_tfhub_model - - @is_tfhub_model.setter - def is_tfhub_model(self, is_tfhub_model: bool): - if is_tfhub_model is None: - del self.is_tfhub_model - return - if not isinstance(is_tfhub_model, bool): - raise TypeError('is_tfhub_model must be of type bool') - self._is_tfhub_model = is_tfhub_model - - @property - def url(self) -> str: - return self._url - - @url.setter - def url(self, url: str): - if url is None: - del self.url - return - if not isinstance(url, str): - raise TypeError('url must be of type str') - self._url = url - - @property - def variation_slug(self) -> str: - return self._variation_slug - - @variation_slug.setter - def variation_slug(self, variation_slug: str): - if variation_slug is None: - del self.variation_slug - return - if not isinstance(variation_slug, str): - raise TypeError('variation_slug must be of type str') - self._variation_slug = variation_slug - - @property - def version_number(self) -> int: - return self._version_number - - @version_number.setter - def version_number(self, version_number: int): - if version_number is None: - del self.version_number - return - if not isinstance(version_number, int): - raise TypeError('version_number must be of type int') - self._version_number = version_number - - @property - def model_title(self) -> str: - return self._model_title - - @model_title.setter - def model_title(self, model_title: str): - if model_title is None: - del self.model_title - return - if not isinstance(model_title, str): - raise TypeError('model_title must be of type str') - self._model_title = model_title - - @property - def thumbnail_url(self) -> str: - return self._thumbnail_url - - @thumbnail_url.setter - def thumbnail_url(self, thumbnail_url: str): - if thumbnail_url is None: - del self.thumbnail_url - return - if not isinstance(thumbnail_url, str): - raise TypeError('thumbnail_url must be of type str') - self._thumbnail_url = thumbnail_url - - @property - def is_private(self) -> bool: - return self._is_private - - @is_private.setter - def is_private(self, is_private: bool): - if is_private is None: - del self.is_private - return - if not isinstance(is_private, bool): - raise TypeError('is_private must be of type bool') - self._is_private = is_private - - @property - def sigstore_state(self) -> 'SigstoreState': - return self._sigstore_state - - @sigstore_state.setter - def sigstore_state(self, sigstore_state: 'SigstoreState'): - if sigstore_state is None: - del self.sigstore_state - return - if not isinstance(sigstore_state, SigstoreState): - raise TypeError('sigstore_state must be of type SigstoreState') - self._sigstore_state = sigstore_state - - BaseModelInstanceInformation._fields = [ FieldMetadata("id", "id", "_id", int, 0, PredefinedSerializer()), FieldMetadata("owner", "owner", "_owner", Owner, None, KaggleObjectSerializer()), @@ -1242,42 +1242,52 @@ def sigstore_state(self, sigstore_state: 'SigstoreState'): FieldMetadata("framework", "framework", "_framework", ModelFramework, ModelFramework.MODEL_FRAMEWORK_UNSPECIFIED, EnumSerializer()), ] +LicensePost._fields = [ + FieldMetadata("id", "id", "_id", int, 0, PredefinedSerializer()), + FieldMetadata("content", "content", "_content", str, "", PredefinedSerializer()), +] + +ModelActivityTimeSeriesPoint._fields = [ + FieldMetadata("date", "date", "_date", datetime, None, DateTimeSerializer()), + FieldMetadata("count", "count", "_count", int, 0, PredefinedSerializer()), +] + ModelInstance._fields = [ FieldMetadata("id", "id", "_id", int, 0, PredefinedSerializer()), FieldMetadata("ownerSlug", "owner_slug", "_owner_slug", str, "", PredefinedSerializer()), FieldMetadata("modelSlug", "model_slug", "_model_slug", str, "", PredefinedSerializer()), - FieldMetadata("modelId", "model_id", "_model_id", int, 0, PredefinedSerializer()), FieldMetadata("slug", "slug", "_slug", str, "", PredefinedSerializer()), FieldMetadata("versionId", "version_id", "_version_id", int, 0, PredefinedSerializer()), FieldMetadata("fineTunable", "fine_tunable", "_fine_tunable", bool, None, PredefinedSerializer(), optional=True), FieldMetadata("overview", "overview", "_overview", str, "", PredefinedSerializer()), FieldMetadata("usage", "usage", "_usage", str, "", PredefinedSerializer()), - FieldMetadata("renderedUsage", "rendered_usage", "_rendered_usage", str, "", PredefinedSerializer()), FieldMetadata("textRepresentation", "text_representation", "_text_representation", str, None, PredefinedSerializer(), optional=True), FieldMetadata("sourceUrl", "source_url", "_source_url", str, None, PredefinedSerializer(), optional=True), FieldMetadata("versionNumber", "version_number", "_version_number", int, 0, PredefinedSerializer()), FieldMetadata("framework", "framework", "_framework", ModelFramework, ModelFramework.MODEL_FRAMEWORK_UNSPECIFIED, EnumSerializer()), FieldMetadata("versionNotes", "version_notes", "_version_notes", str, "", PredefinedSerializer()), FieldMetadata("downloadUrl", "download_url", "_download_url", str, None, PredefinedSerializer(), optional=True), - FieldMetadata("databundleId", "databundle_id", "_databundle_id", int, 0, PredefinedSerializer()), FieldMetadata("databundleVersionId", "databundle_version_id", "_databundle_version_id", int, 0, PredefinedSerializer()), - FieldMetadata("databundleVersionType", "databundle_version_type", "_databundle_version_type", DatabundleVersionType, DatabundleVersionType.DATABUNDLE_VERSION_TYPE_UNSPECIFIED, EnumSerializer()), - FieldMetadata("firestorePath", "firestore_path", "_firestore_path", str, "", PredefinedSerializer()), - FieldMetadata("status", "status", "_status", DatabundleVersionStatus, DatabundleVersionStatus.NOT_YET_PERSISTED, EnumSerializer()), - FieldMetadata("creationStatus", "creation_status", "_creation_status", DatabundleVersionCreationStatus, None, KaggleObjectSerializer(), optional=True), - FieldMetadata("errorMessage", "error_message", "_error_message", str, None, PredefinedSerializer(), optional=True), FieldMetadata("lastVersionId", "last_version_id", "_last_version_id", int, None, PredefinedSerializer(), optional=True), FieldMetadata("sourceOrganization", "source_organization", "_source_organization", Owner, None, KaggleObjectSerializer(), optional=True), FieldMetadata("trainingData", "training_data", "_training_data", str, [], ListSerializer(PredefinedSerializer())), FieldMetadata("metrics", "metrics", "_metrics", str, None, PredefinedSerializer(), optional=True), FieldMetadata("licensePost", "license_post", "_license_post", LicensePost, None, KaggleObjectSerializer(), optional=True), + FieldMetadata("renderedUsage", "rendered_usage", "_rendered_usage", str, "", PredefinedSerializer()), FieldMetadata("license", "license", "_license", License, None, KaggleObjectSerializer(), optional=True), + FieldMetadata("databundleId", "databundle_id", "_databundle_id", int, 0, PredefinedSerializer()), + FieldMetadata("firestorePath", "firestore_path", "_firestore_path", str, "", PredefinedSerializer()), + FieldMetadata("status", "status", "_status", DatabundleVersionStatus, DatabundleVersionStatus.NOT_YET_PERSISTED, EnumSerializer()), + FieldMetadata("errorMessage", "error_message", "_error_message", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("databundleVersionType", "databundle_version_type", "_databundle_version_type", DatabundleVersionType, DatabundleVersionType.DATABUNDLE_VERSION_TYPE_UNSPECIFIED, EnumSerializer()), FieldMetadata("canUse", "can_use", "_can_use", bool, None, PredefinedSerializer(), optional=True), + FieldMetadata("creationStatus", "creation_status", "_creation_status", DatabundleVersionCreationStatus, None, KaggleObjectSerializer(), optional=True), FieldMetadata("uncompressedStorageUri", "uncompressed_storage_uri", "_uncompressed_storage_uri", str, None, PredefinedSerializer(), optional=True), FieldMetadata("modelInstanceType", "model_instance_type", "_model_instance_type", ModelInstanceType, None, EnumSerializer(), optional=True), FieldMetadata("baseModelInstanceId", "base_model_instance_id", "_base_model_instance_id", int, None, PredefinedSerializer(), optional=True), FieldMetadata("baseModelInstanceInformation", "base_model_instance_information", "_base_model_instance_information", BaseModelInstanceInformation, None, KaggleObjectSerializer(), optional=True), FieldMetadata("externalBaseModelUrl", "external_base_model_url", "_external_base_model_url", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("modelId", "model_id", "_model_id", int, 0, PredefinedSerializer()), FieldMetadata("downloadSummary", "download_summary", "_download_summary", ModelInstanceDownloadSummary, None, KaggleObjectSerializer(), optional=True), FieldMetadata("totalUncompressedBytes", "total_uncompressed_bytes", "_total_uncompressed_bytes", int, None, PredefinedSerializer(), optional=True), FieldMetadata("sigstoreState", "sigstore_state", "_sigstore_state", SigstoreState, None, EnumSerializer(), optional=True), @@ -1292,6 +1302,19 @@ def sigstore_state(self, sigstore_state: 'SigstoreState'): FieldMetadata("modelInstanceId", "model_instance_id", "_model_instance_id", int, 0, PredefinedSerializer()), ] +ModelInstanceVersion._fields = [ + FieldMetadata("id", "id", "_id", int, 0, PredefinedSerializer()), + FieldMetadata("framework", "framework", "_framework", ModelFramework, ModelFramework.MODEL_FRAMEWORK_UNSPECIFIED, EnumSerializer()), + FieldMetadata("isTfhubModel", "is_tfhub_model", "_is_tfhub_model", bool, False, PredefinedSerializer()), + FieldMetadata("url", "url", "_url", str, "", PredefinedSerializer()), + FieldMetadata("variationSlug", "variation_slug", "_variation_slug", str, "", PredefinedSerializer()), + FieldMetadata("versionNumber", "version_number", "_version_number", int, 0, PredefinedSerializer()), + FieldMetadata("modelTitle", "model_title", "_model_title", str, "", PredefinedSerializer()), + FieldMetadata("thumbnailUrl", "thumbnail_url", "_thumbnail_url", str, "", PredefinedSerializer()), + FieldMetadata("isPrivate", "is_private", "_is_private", bool, False, PredefinedSerializer()), + FieldMetadata("sigstoreState", "sigstore_state", "_sigstore_state", SigstoreState, SigstoreState.SIGSTORE_STATE_UNSPECIFIED, EnumSerializer()), +] + ModelInstanceVersionList._fields = [ FieldMetadata("versions", "versions", "_versions", ModelInstanceVersion, [], ListSerializer(KaggleObjectSerializer())), ] @@ -1309,30 +1332,7 @@ def sigstore_state(self, sigstore_state: 'SigstoreState'): FieldMetadata("profileUrl", "profile_url", "_profile_url", str, None, PredefinedSerializer(), optional=True), FieldMetadata("slug", "slug", "_slug", str, "", PredefinedSerializer()), FieldMetadata("userTier", "user_tier", "_user_tier", UserAchievementTier, UserAchievementTier.NOVICE, EnumSerializer()), - FieldMetadata("userProgressionOptOut", "user_progression_opt_out", "_user_progression_opt_out", bool, None, PredefinedSerializer(), optional=True), FieldMetadata("allowModelGating", "allow_model_gating", "_allow_model_gating", bool, None, PredefinedSerializer(), optional=True), -] - -LicensePost._fields = [ - FieldMetadata("id", "id", "_id", int, 0, PredefinedSerializer()), - FieldMetadata("content", "content", "_content", str, "", PredefinedSerializer()), -] - -ModelActivityTimeSeriesPoint._fields = [ - FieldMetadata("date", "date", "_date", datetime, None, DateTimeSerializer()), - FieldMetadata("count", "count", "_count", int, 0, PredefinedSerializer()), -] - -ModelInstanceVersion._fields = [ - FieldMetadata("id", "id", "_id", int, 0, PredefinedSerializer()), - FieldMetadata("framework", "framework", "_framework", ModelFramework, ModelFramework.MODEL_FRAMEWORK_UNSPECIFIED, EnumSerializer()), - FieldMetadata("isTfhubModel", "is_tfhub_model", "_is_tfhub_model", bool, False, PredefinedSerializer()), - FieldMetadata("url", "url", "_url", str, "", PredefinedSerializer()), - FieldMetadata("variationSlug", "variation_slug", "_variation_slug", str, "", PredefinedSerializer()), - FieldMetadata("versionNumber", "version_number", "_version_number", int, 0, PredefinedSerializer()), - FieldMetadata("modelTitle", "model_title", "_model_title", str, "", PredefinedSerializer()), - FieldMetadata("thumbnailUrl", "thumbnail_url", "_thumbnail_url", str, "", PredefinedSerializer()), - FieldMetadata("isPrivate", "is_private", "_is_private", bool, False, PredefinedSerializer()), - FieldMetadata("sigstoreState", "sigstore_state", "_sigstore_state", SigstoreState, SigstoreState.SIGSTORE_STATE_UNSPECIFIED, EnumSerializer()), + FieldMetadata("userProgressionOptOut", "user_progression_opt_out", "_user_progression_opt_out", bool, None, PredefinedSerializer(), optional=True), ] diff --git a/src/kagglesdk/search/types/search_api_service.py b/src/kagglesdk/search/types/search_api_service.py index 0e1c6872..06c3c303 100644 --- a/src/kagglesdk/search/types/search_api_service.py +++ b/src/kagglesdk/search/types/search_api_service.py @@ -23,2301 +23,2247 @@ class ApiListType(enum.Enum): API_LIST_TYPE_UNSPECIFIED = 0 API_LIST_TYPE_YOUR_WORK = 1 -class ListEntitiesRequest(KaggleObject): +class ApiOrganizationCard(KaggleObject): r""" + Based on kaggle.users.OrganizationCard + Attributes: - filters (ListEntitiesFilters) - Canonical filters to apply to the search - canonical_order_by (ListSearchContentOrderBy) - Canonical order to apply to the results - competitions_order_by (SearchCompetitionsOrderBy) - Competitions order to apply to the results - datasets_order_by (SearchDatasetsOrderBy) - Datasets order to apply to the results - kernels_order_by (SearchKernelsOrderBy) - Kernels order to apply to the results - models_order_by (SearchModelsOrderBy) - Models order to apply to the results - discussions_order_by (SearchDiscussionsOrderBy) - Discussions order to apply to the results - users_order_by (SearchUsersOrderBy) - Users order to apply to the results - page_token (str) - Page token for paging (see aip.dev/158) - page_size (int) - Number of documents per page to return - skip (int) - How many results to skip + name (str) + id (int) + thumbnail_image_url (str) + slug (str) """ def __init__(self): - self._filters = None - self._canonical_order_by = None - self._competitions_order_by = None - self._datasets_order_by = None - self._kernels_order_by = None - self._models_order_by = None - self._discussions_order_by = None - self._users_order_by = None - self._page_token = "" - self._page_size = 0 - self._skip = 0 + self._name = "" + self._id = 0 + self._thumbnail_image_url = "" + self._slug = "" self._freeze() @property - def filters(self) -> Optional['ListEntitiesFilters']: - """Canonical filters to apply to the search""" - return self._filters - - @filters.setter - def filters(self, filters: Optional['ListEntitiesFilters']): - if filters is None: - del self.filters - return - if not isinstance(filters, ListEntitiesFilters): - raise TypeError('filters must be of type ListEntitiesFilters') - self._filters = filters - - @property - def canonical_order_by(self) -> 'ListSearchContentOrderBy': - """Canonical order to apply to the results""" - return self._canonical_order_by or ListSearchContentOrderBy.LIST_SEARCH_CONTENT_ORDER_BY_UNSPECIFIED - - @canonical_order_by.setter - def canonical_order_by(self, canonical_order_by: 'ListSearchContentOrderBy'): - if canonical_order_by is None: - del self.canonical_order_by - return - if not isinstance(canonical_order_by, ListSearchContentOrderBy): - raise TypeError('canonical_order_by must be of type ListSearchContentOrderBy') - del self.competitions_order_by - del self.datasets_order_by - del self.kernels_order_by - del self.models_order_by - del self.discussions_order_by - del self.users_order_by - self._canonical_order_by = canonical_order_by - - @property - def competitions_order_by(self) -> 'SearchCompetitionsOrderBy': - """Competitions order to apply to the results""" - return self._competitions_order_by or SearchCompetitionsOrderBy.SEARCH_COMPETITIONS_ORDER_BY_UNSPECIFIED + def name(self) -> str: + return self._name - @competitions_order_by.setter - def competitions_order_by(self, competitions_order_by: 'SearchCompetitionsOrderBy'): - if competitions_order_by is None: - del self.competitions_order_by + @name.setter + def name(self, name: str): + if name is None: + del self.name return - if not isinstance(competitions_order_by, SearchCompetitionsOrderBy): - raise TypeError('competitions_order_by must be of type SearchCompetitionsOrderBy') - del self.canonical_order_by - del self.datasets_order_by - del self.kernels_order_by - del self.models_order_by - del self.discussions_order_by - del self.users_order_by - self._competitions_order_by = competitions_order_by + if not isinstance(name, str): + raise TypeError('name must be of type str') + self._name = name @property - def datasets_order_by(self) -> 'SearchDatasetsOrderBy': - """Datasets order to apply to the results""" - return self._datasets_order_by or SearchDatasetsOrderBy.SEARCH_DATASETS_ORDER_BY_UNSPECIFIED + def id(self) -> int: + return self._id - @datasets_order_by.setter - def datasets_order_by(self, datasets_order_by: 'SearchDatasetsOrderBy'): - if datasets_order_by is None: - del self.datasets_order_by + @id.setter + def id(self, id: int): + if id is None: + del self.id return - if not isinstance(datasets_order_by, SearchDatasetsOrderBy): - raise TypeError('datasets_order_by must be of type SearchDatasetsOrderBy') - del self.canonical_order_by - del self.competitions_order_by - del self.kernels_order_by - del self.models_order_by - del self.discussions_order_by - del self.users_order_by - self._datasets_order_by = datasets_order_by + if not isinstance(id, int): + raise TypeError('id must be of type int') + self._id = id @property - def kernels_order_by(self) -> 'SearchKernelsOrderBy': - """Kernels order to apply to the results""" - return self._kernels_order_by or SearchKernelsOrderBy.SEARCH_KERNELS_ORDER_BY_UNSPECIFIED + def thumbnail_image_url(self) -> str: + return self._thumbnail_image_url - @kernels_order_by.setter - def kernels_order_by(self, kernels_order_by: 'SearchKernelsOrderBy'): - if kernels_order_by is None: - del self.kernels_order_by + @thumbnail_image_url.setter + def thumbnail_image_url(self, thumbnail_image_url: str): + if thumbnail_image_url is None: + del self.thumbnail_image_url return - if not isinstance(kernels_order_by, SearchKernelsOrderBy): - raise TypeError('kernels_order_by must be of type SearchKernelsOrderBy') - del self.canonical_order_by - del self.competitions_order_by - del self.datasets_order_by - del self.models_order_by - del self.discussions_order_by - del self.users_order_by - self._kernels_order_by = kernels_order_by + if not isinstance(thumbnail_image_url, str): + raise TypeError('thumbnail_image_url must be of type str') + self._thumbnail_image_url = thumbnail_image_url @property - def models_order_by(self) -> 'SearchModelsOrderBy': - """Models order to apply to the results""" - return self._models_order_by or SearchModelsOrderBy.MODELS_SEARCH_ORDER_BY_UNSPECIFIED + def slug(self) -> str: + return self._slug - @models_order_by.setter - def models_order_by(self, models_order_by: 'SearchModelsOrderBy'): - if models_order_by is None: - del self.models_order_by + @slug.setter + def slug(self, slug: str): + if slug is None: + del self.slug return - if not isinstance(models_order_by, SearchModelsOrderBy): - raise TypeError('models_order_by must be of type SearchModelsOrderBy') - del self.canonical_order_by - del self.competitions_order_by - del self.datasets_order_by - del self.kernels_order_by - del self.discussions_order_by - del self.users_order_by - self._models_order_by = models_order_by + if not isinstance(slug, str): + raise TypeError('slug must be of type str') + self._slug = slug - @property - def discussions_order_by(self) -> 'SearchDiscussionsOrderBy': - """Discussions order to apply to the results""" - return self._discussions_order_by or SearchDiscussionsOrderBy.SEARCH_DISCUSSIONS_ORDER_BY_UNSPECIFIED - @discussions_order_by.setter - def discussions_order_by(self, discussions_order_by: 'SearchDiscussionsOrderBy'): - if discussions_order_by is None: - del self.discussions_order_by - return - if not isinstance(discussions_order_by, SearchDiscussionsOrderBy): - raise TypeError('discussions_order_by must be of type SearchDiscussionsOrderBy') - del self.canonical_order_by - del self.competitions_order_by - del self.datasets_order_by - del self.kernels_order_by - del self.models_order_by - del self.users_order_by - self._discussions_order_by = discussions_order_by +class ApiSearchCompetitionsDocument(KaggleObject): + r""" + Based on kaggle.competitions.SearchCompetitionsDocument - @property - def users_order_by(self) -> 'SearchUsersOrderBy': - """Users order to apply to the results""" - return self._users_order_by or SearchUsersOrderBy.SEARCH_USERS_ORDER_BY_UNSPECIFIED + Attributes: + host_segment (HostSegment) + The host segment of the Competition + deadline (datetime) + The deadline of the Competition + team_count (int) + The total number of teams participating in the Competition + team_rank (int) + The rank of the current user's team on the Competition + is_environment_evaluation (bool) + Whether the Competition has an environment evaluation + prize_type (RewardTypeId) + The prize/award type of the Competition + prize_value (float) + The prize/award value of the Competition + is_launched (bool) + Whether the competition has launched (even if it's ended) + owner_user_has_joined (bool) + Whether the owner user (profile user, then current user) has joined the + competition + is_limited_participation (bool) + Whether the competition is a limited participation competition + only_allow_kernel_submissions (bool) + Whether only kernel submissions are allowed + """ - @users_order_by.setter - def users_order_by(self, users_order_by: 'SearchUsersOrderBy'): - if users_order_by is None: - del self.users_order_by - return - if not isinstance(users_order_by, SearchUsersOrderBy): - raise TypeError('users_order_by must be of type SearchUsersOrderBy') - del self.canonical_order_by - del self.competitions_order_by - del self.datasets_order_by - del self.kernels_order_by - del self.models_order_by - del self.discussions_order_by - self._users_order_by = users_order_by + def __init__(self): + self._host_segment = HostSegment.HOST_SEGMENT_UNSPECIFIED + self._deadline = None + self._team_count = 0 + self._team_rank = None + self._is_environment_evaluation = False + self._prize_type = RewardTypeId.REWARD_TYPE_ID_UNSPECIFIED + self._prize_value = None + self._is_launched = False + self._owner_user_has_joined = False + self._is_limited_participation = False + self._only_allow_kernel_submissions = False + self._freeze() @property - def page_token(self) -> str: - """Page token for paging (see aip.dev/158)""" - return self._page_token + def host_segment(self) -> 'HostSegment': + """The host segment of the Competition""" + return self._host_segment - @page_token.setter - def page_token(self, page_token: str): - if page_token is None: - del self.page_token + @host_segment.setter + def host_segment(self, host_segment: 'HostSegment'): + if host_segment is None: + del self.host_segment return - if not isinstance(page_token, str): - raise TypeError('page_token must be of type str') - self._page_token = page_token + if not isinstance(host_segment, HostSegment): + raise TypeError('host_segment must be of type HostSegment') + self._host_segment = host_segment @property - def page_size(self) -> int: - """Number of documents per page to return""" - return self._page_size + def deadline(self) -> datetime: + """The deadline of the Competition""" + return self._deadline - @page_size.setter - def page_size(self, page_size: int): - if page_size is None: - del self.page_size + @deadline.setter + def deadline(self, deadline: datetime): + if deadline is None: + del self.deadline return - if not isinstance(page_size, int): - raise TypeError('page_size must be of type int') - self._page_size = page_size + if not isinstance(deadline, datetime): + raise TypeError('deadline must be of type datetime') + self._deadline = deadline @property - def skip(self) -> int: - """How many results to skip""" - return self._skip + def team_count(self) -> int: + """The total number of teams participating in the Competition""" + return self._team_count - @skip.setter - def skip(self, skip: int): - if skip is None: - del self.skip + @team_count.setter + def team_count(self, team_count: int): + if team_count is None: + del self.team_count return - if not isinstance(skip, int): - raise TypeError('skip must be of type int') - self._skip = skip - - def endpoint(self): - path = '/api/v1/search/list-entities' - return path.format_map(self.to_field_map(self)) - - -class ListEntitiesResponse(KaggleObject): - r""" - Attributes: - documents (ListEntitiesDocument) - The list of documents returned after filtering - total_documents (int) - The total number of documents matching any filters - next_page_token (str) - The token to request the next page - """ - - def __init__(self): - self._documents = [] - self._total_documents = 0 - self._next_page_token = "" - self._freeze() + if not isinstance(team_count, int): + raise TypeError('team_count must be of type int') + self._team_count = team_count @property - def documents(self) -> Optional[List[Optional['ListEntitiesDocument']]]: - """The list of documents returned after filtering""" - return self._documents + def team_rank(self) -> int: + """The rank of the current user's team on the Competition""" + return self._team_rank or 0 - @documents.setter - def documents(self, documents: Optional[List[Optional['ListEntitiesDocument']]]): - if documents is None: - del self.documents + @team_rank.setter + def team_rank(self, team_rank: Optional[int]): + if team_rank is None: + del self.team_rank return - if not isinstance(documents, list): - raise TypeError('documents must be of type list') - if not all([isinstance(t, ListEntitiesDocument) for t in documents]): - raise TypeError('documents must contain only items of type ListEntitiesDocument') - self._documents = documents + if not isinstance(team_rank, int): + raise TypeError('team_rank must be of type int') + self._team_rank = team_rank @property - def total_documents(self) -> int: - """The total number of documents matching any filters""" - return self._total_documents + def is_environment_evaluation(self) -> bool: + """Whether the Competition has an environment evaluation""" + return self._is_environment_evaluation - @total_documents.setter - def total_documents(self, total_documents: int): - if total_documents is None: - del self.total_documents + @is_environment_evaluation.setter + def is_environment_evaluation(self, is_environment_evaluation: bool): + if is_environment_evaluation is None: + del self.is_environment_evaluation return - if not isinstance(total_documents, int): - raise TypeError('total_documents must be of type int') - self._total_documents = total_documents + if not isinstance(is_environment_evaluation, bool): + raise TypeError('is_environment_evaluation must be of type bool') + self._is_environment_evaluation = is_environment_evaluation @property - def next_page_token(self) -> str: - """The token to request the next page""" - return self._next_page_token + def prize_type(self) -> 'RewardTypeId': + """The prize/award type of the Competition""" + return self._prize_type - @next_page_token.setter - def next_page_token(self, next_page_token: str): - if next_page_token is None: - del self.next_page_token + @prize_type.setter + def prize_type(self, prize_type: 'RewardTypeId'): + if prize_type is None: + del self.prize_type return - if not isinstance(next_page_token, str): - raise TypeError('next_page_token must be of type str') - self._next_page_token = next_page_token - - @property - def totalDocuments(self): - return self.total_documents - - @property - def nextPageToken(self): - return self.next_page_token - - -class ListEntitiesDocument(KaggleObject): - r""" - Based on kaggle.search.ListSearchContentDocument - - Attributes: - id (int) - The DB ID (i.e. the PK from the table) of the document - document_type (DocumentType) - The type of content of the document - title (str) - The canonical title of the document - image_url (str) - The thumbnail URL of the document - create_time (datetime) - The canonical creation time of the document; May mean different things - between content types - update_time (datetime) - The canonical update time of the document; May be different between content - types - is_private (bool) - Whether the content is marked as private - votes (int) - The total votes (or score, if downvotes are supported) for the document - owner_user (ApiUserAvatar) - owner_organization (ApiOrganizationCard) - competition_document (ApiSearchCompetitionsDocument) - dataset_document (ApiSearchDatasetsDocument) - kernel_document (ApiSearchKernelsDocument) - model_document (ApiSearchModelsDocument) - discussion_document (ApiSearchDiscussionsDocument) - user_document (ApiSearchUsersDocument) - slug (str) - The slug of the document (which may be close to the url) - """ - - def __init__(self): - self._id = 0 - self._document_type = DocumentType.DOCUMENT_TYPE_UNSPECIFIED - self._title = "" - self._image_url = "" - self._create_time = None - self._update_time = None - self._is_private = None - self._votes = None - self._owner_user = None - self._owner_organization = None - self._competition_document = None - self._dataset_document = None - self._kernel_document = None - self._model_document = None - self._discussion_document = None - self._user_document = None - self._slug = None - self._freeze() + if not isinstance(prize_type, RewardTypeId): + raise TypeError('prize_type must be of type RewardTypeId') + self._prize_type = prize_type @property - def id(self) -> int: - """The DB ID (i.e. the PK from the table) of the document""" - return self._id + def prize_value(self) -> float: + """The prize/award value of the Competition""" + return self._prize_value or 0.0 - @id.setter - def id(self, id: int): - if id is None: - del self.id + @prize_value.setter + def prize_value(self, prize_value: Optional[float]): + if prize_value is None: + del self.prize_value return - if not isinstance(id, int): - raise TypeError('id must be of type int') - self._id = id + if not isinstance(prize_value, float): + raise TypeError('prize_value must be of type float') + self._prize_value = prize_value @property - def document_type(self) -> 'DocumentType': - """The type of content of the document""" - return self._document_type + def is_launched(self) -> bool: + """Whether the competition has launched (even if it's ended)""" + return self._is_launched - @document_type.setter - def document_type(self, document_type: 'DocumentType'): - if document_type is None: - del self.document_type + @is_launched.setter + def is_launched(self, is_launched: bool): + if is_launched is None: + del self.is_launched return - if not isinstance(document_type, DocumentType): - raise TypeError('document_type must be of type DocumentType') - self._document_type = document_type + if not isinstance(is_launched, bool): + raise TypeError('is_launched must be of type bool') + self._is_launched = is_launched @property - def title(self) -> str: - """The canonical title of the document""" - return self._title + def owner_user_has_joined(self) -> bool: + r""" + Whether the owner user (profile user, then current user) has joined the + competition + """ + return self._owner_user_has_joined - @title.setter - def title(self, title: str): - if title is None: - del self.title + @owner_user_has_joined.setter + def owner_user_has_joined(self, owner_user_has_joined: bool): + if owner_user_has_joined is None: + del self.owner_user_has_joined return - if not isinstance(title, str): - raise TypeError('title must be of type str') - self._title = title + if not isinstance(owner_user_has_joined, bool): + raise TypeError('owner_user_has_joined must be of type bool') + self._owner_user_has_joined = owner_user_has_joined @property - def image_url(self) -> str: - """The thumbnail URL of the document""" - return self._image_url + def is_limited_participation(self) -> bool: + """Whether the competition is a limited participation competition""" + return self._is_limited_participation - @image_url.setter - def image_url(self, image_url: str): - if image_url is None: - del self.image_url + @is_limited_participation.setter + def is_limited_participation(self, is_limited_participation: bool): + if is_limited_participation is None: + del self.is_limited_participation return - if not isinstance(image_url, str): - raise TypeError('image_url must be of type str') - self._image_url = image_url + if not isinstance(is_limited_participation, bool): + raise TypeError('is_limited_participation must be of type bool') + self._is_limited_participation = is_limited_participation @property - def create_time(self) -> datetime: - r""" - The canonical creation time of the document; May mean different things - between content types - """ - return self._create_time + def only_allow_kernel_submissions(self) -> bool: + """Whether only kernel submissions are allowed""" + return self._only_allow_kernel_submissions - @create_time.setter - def create_time(self, create_time: datetime): - if create_time is None: - del self.create_time + @only_allow_kernel_submissions.setter + def only_allow_kernel_submissions(self, only_allow_kernel_submissions: bool): + if only_allow_kernel_submissions is None: + del self.only_allow_kernel_submissions return - if not isinstance(create_time, datetime): - raise TypeError('create_time must be of type datetime') - self._create_time = create_time + if not isinstance(only_allow_kernel_submissions, bool): + raise TypeError('only_allow_kernel_submissions must be of type bool') + self._only_allow_kernel_submissions = only_allow_kernel_submissions + + +class ApiSearchCompetitionsFilters(KaggleObject): + r""" + Based on kaggle.competitions.SearchCompetitionsFilters + + Attributes: + role (SearchCompetitionsRole) + The Competition role used to filter the documents + status (SearchCompetitionsStatus) + The Competition status used to filter the documents + profile_visibility (SearchCompetitionsProfileVisibility) + Competition visibility status on user profile + earned_medal (bool) + Whether to return documents that the owner_user_id earned a medal for. + """ + + def __init__(self): + self._role = SearchCompetitionsRole.SEARCH_COMPETITIONS_ROLE_ANY + self._status = SearchCompetitionsStatus.SEARCH_COMPETITIONS_STATUS_ANY + self._profile_visibility = SearchCompetitionsProfileVisibility.SEARCH_COMPETITIONS_PROFILE_VISIBILITY_ANY + self._earned_medal = None + self._freeze() @property - def update_time(self) -> datetime: - r""" - The canonical update time of the document; May be different between content - types - """ - return self._update_time or None + def role(self) -> 'SearchCompetitionsRole': + """The Competition role used to filter the documents""" + return self._role - @update_time.setter - def update_time(self, update_time: Optional[datetime]): - if update_time is None: - del self.update_time + @role.setter + def role(self, role: 'SearchCompetitionsRole'): + if role is None: + del self.role return - if not isinstance(update_time, datetime): - raise TypeError('update_time must be of type datetime') - self._update_time = update_time + if not isinstance(role, SearchCompetitionsRole): + raise TypeError('role must be of type SearchCompetitionsRole') + self._role = role @property - def is_private(self) -> bool: - """Whether the content is marked as private""" - return self._is_private or False + def status(self) -> 'SearchCompetitionsStatus': + """The Competition status used to filter the documents""" + return self._status - @is_private.setter - def is_private(self, is_private: Optional[bool]): - if is_private is None: - del self.is_private + @status.setter + def status(self, status: 'SearchCompetitionsStatus'): + if status is None: + del self.status return - if not isinstance(is_private, bool): - raise TypeError('is_private must be of type bool') - self._is_private = is_private + if not isinstance(status, SearchCompetitionsStatus): + raise TypeError('status must be of type SearchCompetitionsStatus') + self._status = status @property - def votes(self) -> int: - """The total votes (or score, if downvotes are supported) for the document""" - return self._votes or 0 + def profile_visibility(self) -> 'SearchCompetitionsProfileVisibility': + """Competition visibility status on user profile""" + return self._profile_visibility - @votes.setter - def votes(self, votes: Optional[int]): - if votes is None: - del self.votes + @profile_visibility.setter + def profile_visibility(self, profile_visibility: 'SearchCompetitionsProfileVisibility'): + if profile_visibility is None: + del self.profile_visibility return - if not isinstance(votes, int): - raise TypeError('votes must be of type int') - self._votes = votes + if not isinstance(profile_visibility, SearchCompetitionsProfileVisibility): + raise TypeError('profile_visibility must be of type SearchCompetitionsProfileVisibility') + self._profile_visibility = profile_visibility @property - def owner_user(self) -> Optional['ApiUserAvatar']: - return self._owner_user or None + def earned_medal(self) -> bool: + """Whether to return documents that the owner_user_id earned a medal for.""" + return self._earned_medal or False - @owner_user.setter - def owner_user(self, owner_user: Optional['ApiUserAvatar']): - if owner_user is None: - del self.owner_user + @earned_medal.setter + def earned_medal(self, earned_medal: Optional[bool]): + if earned_medal is None: + del self.earned_medal return - if not isinstance(owner_user, ApiUserAvatar): - raise TypeError('owner_user must be of type ApiUserAvatar') - del self.owner_organization - self._owner_user = owner_user + if not isinstance(earned_medal, bool): + raise TypeError('earned_medal must be of type bool') + self._earned_medal = earned_medal + + +class ApiSearchDatasetsDocument(KaggleObject): + r""" + Based on kaggle.datasets.SearchDatasetsDocument + + Attributes: + usability_rating (float) + The usability rating of the Dataset + file_count (int) + How many files the Dataset has + file_types (DatasetFileType) + The file types of all the files in the Dataset + size (int) + The size of the Dataset + """ + + def __init__(self): + self._usability_rating = 0.0 + self._file_count = 0 + self._file_types = [] + self._size = 0 + self._freeze() @property - def owner_organization(self) -> Optional['ApiOrganizationCard']: - return self._owner_organization or None + def usability_rating(self) -> float: + """The usability rating of the Dataset""" + return self._usability_rating - @owner_organization.setter - def owner_organization(self, owner_organization: Optional['ApiOrganizationCard']): - if owner_organization is None: - del self.owner_organization + @usability_rating.setter + def usability_rating(self, usability_rating: float): + if usability_rating is None: + del self.usability_rating return - if not isinstance(owner_organization, ApiOrganizationCard): - raise TypeError('owner_organization must be of type ApiOrganizationCard') - del self.owner_user - self._owner_organization = owner_organization + if not isinstance(usability_rating, float): + raise TypeError('usability_rating must be of type float') + self._usability_rating = usability_rating @property - def competition_document(self) -> Optional['ApiSearchCompetitionsDocument']: - return self._competition_document or None + def file_count(self) -> int: + """How many files the Dataset has""" + return self._file_count - @competition_document.setter - def competition_document(self, competition_document: Optional['ApiSearchCompetitionsDocument']): - if competition_document is None: - del self.competition_document + @file_count.setter + def file_count(self, file_count: int): + if file_count is None: + del self.file_count return - if not isinstance(competition_document, ApiSearchCompetitionsDocument): - raise TypeError('competition_document must be of type ApiSearchCompetitionsDocument') - del self.dataset_document - del self.kernel_document - del self.model_document - del self.discussion_document - del self.user_document - self._competition_document = competition_document + if not isinstance(file_count, int): + raise TypeError('file_count must be of type int') + self._file_count = file_count @property - def dataset_document(self) -> Optional['ApiSearchDatasetsDocument']: - return self._dataset_document or None + def file_types(self) -> Optional[List['DatasetFileType']]: + """The file types of all the files in the Dataset""" + return self._file_types - @dataset_document.setter - def dataset_document(self, dataset_document: Optional['ApiSearchDatasetsDocument']): - if dataset_document is None: - del self.dataset_document + @file_types.setter + def file_types(self, file_types: Optional[List['DatasetFileType']]): + if file_types is None: + del self.file_types return - if not isinstance(dataset_document, ApiSearchDatasetsDocument): - raise TypeError('dataset_document must be of type ApiSearchDatasetsDocument') - del self.competition_document - del self.kernel_document - del self.model_document - del self.discussion_document - del self.user_document - self._dataset_document = dataset_document + if not isinstance(file_types, list): + raise TypeError('file_types must be of type list') + if not all([isinstance(t, DatasetFileType) for t in file_types]): + raise TypeError('file_types must contain only items of type DatasetFileType') + self._file_types = file_types @property - def kernel_document(self) -> Optional['ApiSearchKernelsDocument']: - return self._kernel_document or None + def size(self) -> int: + """The size of the Dataset""" + return self._size - @kernel_document.setter - def kernel_document(self, kernel_document: Optional['ApiSearchKernelsDocument']): - if kernel_document is None: - del self.kernel_document + @size.setter + def size(self, size: int): + if size is None: + del self.size return - if not isinstance(kernel_document, ApiSearchKernelsDocument): - raise TypeError('kernel_document must be of type ApiSearchKernelsDocument') - del self.competition_document - del self.dataset_document - del self.model_document - del self.discussion_document - del self.user_document - self._kernel_document = kernel_document + if not isinstance(size, int): + raise TypeError('size must be of type int') + self._size = size + + +class ApiSearchDatasetsFilters(KaggleObject): + r""" + Based on kaggle.datasets.SearchDiscussionsFilters + + Attributes: + file_type (DatasetFileTypeGroup) + The file types used to filter the documents + license_group (DatasetLicenseGroup) + The license groups used to filter the documents + size (DatasetSizeGroup) + The dataset size range used to filter the documents + earned_medal (bool) + Whether to return documents that the owner_user_id earned a medal for. + """ + + def __init__(self): + self._file_type = DatasetFileTypeGroup.DATASET_FILE_TYPE_GROUP_ALL + self._license_group = None + self._size = None + self._earned_medal = None + self._freeze() @property - def model_document(self) -> Optional['ApiSearchModelsDocument']: - return self._model_document or None + def file_type(self) -> 'DatasetFileTypeGroup': + """The file types used to filter the documents""" + return self._file_type - @model_document.setter - def model_document(self, model_document: Optional['ApiSearchModelsDocument']): - if model_document is None: - del self.model_document + @file_type.setter + def file_type(self, file_type: 'DatasetFileTypeGroup'): + if file_type is None: + del self.file_type return - if not isinstance(model_document, ApiSearchModelsDocument): - raise TypeError('model_document must be of type ApiSearchModelsDocument') - del self.competition_document - del self.dataset_document - del self.kernel_document - del self.discussion_document - del self.user_document - self._model_document = model_document + if not isinstance(file_type, DatasetFileTypeGroup): + raise TypeError('file_type must be of type DatasetFileTypeGroup') + self._file_type = file_type @property - def discussion_document(self) -> Optional['ApiSearchDiscussionsDocument']: - return self._discussion_document or None + def license_group(self) -> 'DatasetLicenseGroup': + """The license groups used to filter the documents""" + return self._license_group or DatasetLicenseGroup.DATASET_LICENSE_GROUP_ALL - @discussion_document.setter - def discussion_document(self, discussion_document: Optional['ApiSearchDiscussionsDocument']): - if discussion_document is None: - del self.discussion_document + @license_group.setter + def license_group(self, license_group: Optional['DatasetLicenseGroup']): + if license_group is None: + del self.license_group return - if not isinstance(discussion_document, ApiSearchDiscussionsDocument): - raise TypeError('discussion_document must be of type ApiSearchDiscussionsDocument') - del self.competition_document - del self.dataset_document - del self.kernel_document - del self.model_document - del self.user_document - self._discussion_document = discussion_document + if not isinstance(license_group, DatasetLicenseGroup): + raise TypeError('license_group must be of type DatasetLicenseGroup') + self._license_group = license_group @property - def user_document(self) -> Optional['ApiSearchUsersDocument']: - return self._user_document or None + def size(self) -> 'DatasetSizeGroup': + """The dataset size range used to filter the documents""" + return self._size or DatasetSizeGroup.DATASET_SIZE_GROUP_ALL - @user_document.setter - def user_document(self, user_document: Optional['ApiSearchUsersDocument']): - if user_document is None: - del self.user_document + @size.setter + def size(self, size: Optional['DatasetSizeGroup']): + if size is None: + del self.size return - if not isinstance(user_document, ApiSearchUsersDocument): - raise TypeError('user_document must be of type ApiSearchUsersDocument') - del self.competition_document - del self.dataset_document - del self.kernel_document - del self.model_document - del self.discussion_document - self._user_document = user_document + if not isinstance(size, DatasetSizeGroup): + raise TypeError('size must be of type DatasetSizeGroup') + self._size = size @property - def slug(self) -> str: - """The slug of the document (which may be close to the url)""" - return self._slug or "" + def earned_medal(self) -> bool: + """Whether to return documents that the owner_user_id earned a medal for.""" + return self._earned_medal or False - @slug.setter - def slug(self, slug: Optional[str]): - if slug is None: - del self.slug + @earned_medal.setter + def earned_medal(self, earned_medal: Optional[bool]): + if earned_medal is None: + del self.earned_medal return - if not isinstance(slug, str): - raise TypeError('slug must be of type str') - self._slug = slug + if not isinstance(earned_medal, bool): + raise TypeError('earned_medal must be of type bool') + self._earned_medal = earned_medal -class ListEntitiesFilters(KaggleObject): +class ApiSearchDiscussionsDocument(KaggleObject): r""" - Based on kaggle.search.ListSearchContentFilters + Based on kaggle.discussions.SearchDiscussionsDocument Attributes: - query (str) - The free-text query the user entered to filter results - list_type (ApiListType) - The type of list being requested - privacy (PrivacyFilter) - The privacy filter to apply - owner_type (OwnerType) - The owner type filter to apply - document_types (DocumentType) - The document type filter to apply - competition_filters (ApiSearchCompetitionsFilters) - The set of Competition filters to filter the documents - dataset_filters (ApiSearchDatasetsFilters) - The set of Dataset filters to filter the documents - discussion_filters (ApiSearchDiscussionsFilters) - The set of Discussion filters to filter the documents - kernel_filters (ApiSearchKernelsFilters) - The set of Kernel filters to filter the documents - model_filters (ApiSearchModelsFilters) - The set of Model filters to filter the documents - user_filters (ApiSearchUsersFilters) - The set of User filters to filter the documents + new_comment_url (str) + message_stripped (str) + The message of the topic/comment, stripped of HTML (at time of index) + message_markdown (str) + The markdown for the message of the topic/comment + forum_name (str) + The name of the parent forum + forum_url (str) + The URL for the parent forum + source_type (SearchDiscussionsSourceType) + The source type of the comment + topic_type (SearchDiscussionsTopicType) + The type of topic returned + type (SearchDiscussionsDocumentType) + The type of document returned + write_up_metadata (WriteUpItemInfo) + If the document is a WriteUp, extra WriteUp-specific data + is provided """ def __init__(self): - self._query = "" - self._list_type = ApiListType.API_LIST_TYPE_UNSPECIFIED - self._privacy = PrivacyFilter.ALL - self._owner_type = OwnerType.OWNER_TYPE_UNSPECIFIED - self._document_types = [] - self._competition_filters = None - self._dataset_filters = None - self._discussion_filters = None - self._kernel_filters = None - self._model_filters = None - self._user_filters = None + self._new_comment_url = None + self._message_stripped = "" + self._message_markdown = None + self._forum_name = "" + self._forum_url = None + self._source_type = SearchDiscussionsSourceType.SEARCH_DISCUSSIONS_SOURCE_TYPE_UNSPECIFIED + self._topic_type = SearchDiscussionsTopicType.SEARCH_DISCUSSIONS_TOPIC_TYPE_UNSPECIFIED + self._type = SearchDiscussionsDocumentType.SEARCH_DISCUSSIONS_DOCUMENT_TYPE_UNSPECIFIED + self._write_up_metadata = None self._freeze() @property - def query(self) -> str: - """The free-text query the user entered to filter results""" - return self._query + def new_comment_url(self) -> str: + return self._new_comment_url or "" + + @new_comment_url.setter + def new_comment_url(self, new_comment_url: Optional[str]): + if new_comment_url is None: + del self.new_comment_url + return + if not isinstance(new_comment_url, str): + raise TypeError('new_comment_url must be of type str') + self._new_comment_url = new_comment_url + + @property + def message_stripped(self) -> str: + """The message of the topic/comment, stripped of HTML (at time of index)""" + return self._message_stripped + + @message_stripped.setter + def message_stripped(self, message_stripped: str): + if message_stripped is None: + del self.message_stripped + return + if not isinstance(message_stripped, str): + raise TypeError('message_stripped must be of type str') + self._message_stripped = message_stripped + + @property + def message_markdown(self) -> str: + """The markdown for the message of the topic/comment""" + return self._message_markdown or "" - @query.setter - def query(self, query: str): - if query is None: - del self.query + @message_markdown.setter + def message_markdown(self, message_markdown: Optional[str]): + if message_markdown is None: + del self.message_markdown return - if not isinstance(query, str): - raise TypeError('query must be of type str') - self._query = query + if not isinstance(message_markdown, str): + raise TypeError('message_markdown must be of type str') + self._message_markdown = message_markdown @property - def list_type(self) -> 'ApiListType': - """The type of list being requested""" - return self._list_type + def forum_name(self) -> str: + """The name of the parent forum""" + return self._forum_name - @list_type.setter - def list_type(self, list_type: 'ApiListType'): - if list_type is None: - del self.list_type + @forum_name.setter + def forum_name(self, forum_name: str): + if forum_name is None: + del self.forum_name return - if not isinstance(list_type, ApiListType): - raise TypeError('list_type must be of type ApiListType') - self._list_type = list_type + if not isinstance(forum_name, str): + raise TypeError('forum_name must be of type str') + self._forum_name = forum_name @property - def privacy(self) -> 'PrivacyFilter': - """The privacy filter to apply""" - return self._privacy + def forum_url(self) -> str: + """The URL for the parent forum""" + return self._forum_url or "" - @privacy.setter - def privacy(self, privacy: 'PrivacyFilter'): - if privacy is None: - del self.privacy + @forum_url.setter + def forum_url(self, forum_url: Optional[str]): + if forum_url is None: + del self.forum_url return - if not isinstance(privacy, PrivacyFilter): - raise TypeError('privacy must be of type PrivacyFilter') - self._privacy = privacy + if not isinstance(forum_url, str): + raise TypeError('forum_url must be of type str') + self._forum_url = forum_url @property - def owner_type(self) -> 'OwnerType': - """The owner type filter to apply""" - return self._owner_type + def source_type(self) -> 'SearchDiscussionsSourceType': + """The source type of the comment""" + return self._source_type - @owner_type.setter - def owner_type(self, owner_type: 'OwnerType'): - if owner_type is None: - del self.owner_type + @source_type.setter + def source_type(self, source_type: 'SearchDiscussionsSourceType'): + if source_type is None: + del self.source_type return - if not isinstance(owner_type, OwnerType): - raise TypeError('owner_type must be of type OwnerType') - self._owner_type = owner_type + if not isinstance(source_type, SearchDiscussionsSourceType): + raise TypeError('source_type must be of type SearchDiscussionsSourceType') + self._source_type = source_type @property - def document_types(self) -> Optional[List['DocumentType']]: - """The document type filter to apply""" - return self._document_types + def topic_type(self) -> 'SearchDiscussionsTopicType': + """The type of topic returned""" + return self._topic_type - @document_types.setter - def document_types(self, document_types: Optional[List['DocumentType']]): - if document_types is None: - del self.document_types + @topic_type.setter + def topic_type(self, topic_type: 'SearchDiscussionsTopicType'): + if topic_type is None: + del self.topic_type return - if not isinstance(document_types, list): - raise TypeError('document_types must be of type list') - if not all([isinstance(t, DocumentType) for t in document_types]): - raise TypeError('document_types must contain only items of type DocumentType') - self._document_types = document_types + if not isinstance(topic_type, SearchDiscussionsTopicType): + raise TypeError('topic_type must be of type SearchDiscussionsTopicType') + self._topic_type = topic_type @property - def competition_filters(self) -> Optional['ApiSearchCompetitionsFilters']: - """The set of Competition filters to filter the documents""" - return self._competition_filters + def type(self) -> 'SearchDiscussionsDocumentType': + """The type of document returned""" + return self._type - @competition_filters.setter - def competition_filters(self, competition_filters: Optional['ApiSearchCompetitionsFilters']): - if competition_filters is None: - del self.competition_filters + @type.setter + def type(self, type: 'SearchDiscussionsDocumentType'): + if type is None: + del self.type return - if not isinstance(competition_filters, ApiSearchCompetitionsFilters): - raise TypeError('competition_filters must be of type ApiSearchCompetitionsFilters') - self._competition_filters = competition_filters + if not isinstance(type, SearchDiscussionsDocumentType): + raise TypeError('type must be of type SearchDiscussionsDocumentType') + self._type = type @property - def dataset_filters(self) -> Optional['ApiSearchDatasetsFilters']: - """The set of Dataset filters to filter the documents""" - return self._dataset_filters + def write_up_metadata(self) -> Optional['WriteUpItemInfo']: + r""" + If the document is a WriteUp, extra WriteUp-specific data + is provided + """ + return self._write_up_metadata or None - @dataset_filters.setter - def dataset_filters(self, dataset_filters: Optional['ApiSearchDatasetsFilters']): - if dataset_filters is None: - del self.dataset_filters + @write_up_metadata.setter + def write_up_metadata(self, write_up_metadata: Optional[Optional['WriteUpItemInfo']]): + if write_up_metadata is None: + del self.write_up_metadata return - if not isinstance(dataset_filters, ApiSearchDatasetsFilters): - raise TypeError('dataset_filters must be of type ApiSearchDatasetsFilters') - self._dataset_filters = dataset_filters + if not isinstance(write_up_metadata, WriteUpItemInfo): + raise TypeError('write_up_metadata must be of type WriteUpItemInfo') + self._write_up_metadata = write_up_metadata + + +class ApiSearchDiscussionsFilters(KaggleObject): + r""" + Based on kaggle.discussions.SearchDiscussionsFilters + + Attributes: + source_type (SearchDiscussionsSourceType) + The discussion source type used to filter the documents + only_new_comments (bool) + Show only topics with new comments + write_up_inclusion_type (WriteUpInclusionType) + Determines whether or not WriteUps should be included + write_up_types (WriteUpType) + Filters on WriteUp type + """ + + def __init__(self): + self._source_type = SearchDiscussionsSourceType.SEARCH_DISCUSSIONS_SOURCE_TYPE_UNSPECIFIED + self._only_new_comments = False + self._write_up_inclusion_type = WriteUpInclusionType.WRITE_UP_INCLUSION_TYPE_UNSPECIFIED + self._write_up_types = [] + self._freeze() @property - def discussion_filters(self) -> Optional['ApiSearchDiscussionsFilters']: - """The set of Discussion filters to filter the documents""" - return self._discussion_filters + def source_type(self) -> 'SearchDiscussionsSourceType': + """The discussion source type used to filter the documents""" + return self._source_type - @discussion_filters.setter - def discussion_filters(self, discussion_filters: Optional['ApiSearchDiscussionsFilters']): - if discussion_filters is None: - del self.discussion_filters + @source_type.setter + def source_type(self, source_type: 'SearchDiscussionsSourceType'): + if source_type is None: + del self.source_type return - if not isinstance(discussion_filters, ApiSearchDiscussionsFilters): - raise TypeError('discussion_filters must be of type ApiSearchDiscussionsFilters') - self._discussion_filters = discussion_filters + if not isinstance(source_type, SearchDiscussionsSourceType): + raise TypeError('source_type must be of type SearchDiscussionsSourceType') + self._source_type = source_type @property - def kernel_filters(self) -> Optional['ApiSearchKernelsFilters']: - """The set of Kernel filters to filter the documents""" - return self._kernel_filters + def only_new_comments(self) -> bool: + """Show only topics with new comments""" + return self._only_new_comments - @kernel_filters.setter - def kernel_filters(self, kernel_filters: Optional['ApiSearchKernelsFilters']): - if kernel_filters is None: - del self.kernel_filters + @only_new_comments.setter + def only_new_comments(self, only_new_comments: bool): + if only_new_comments is None: + del self.only_new_comments return - if not isinstance(kernel_filters, ApiSearchKernelsFilters): - raise TypeError('kernel_filters must be of type ApiSearchKernelsFilters') - self._kernel_filters = kernel_filters + if not isinstance(only_new_comments, bool): + raise TypeError('only_new_comments must be of type bool') + self._only_new_comments = only_new_comments @property - def model_filters(self) -> Optional['ApiSearchModelsFilters']: - """The set of Model filters to filter the documents""" - return self._model_filters + def write_up_inclusion_type(self) -> 'WriteUpInclusionType': + """Determines whether or not WriteUps should be included""" + return self._write_up_inclusion_type - @model_filters.setter - def model_filters(self, model_filters: Optional['ApiSearchModelsFilters']): - if model_filters is None: - del self.model_filters + @write_up_inclusion_type.setter + def write_up_inclusion_type(self, write_up_inclusion_type: 'WriteUpInclusionType'): + if write_up_inclusion_type is None: + del self.write_up_inclusion_type return - if not isinstance(model_filters, ApiSearchModelsFilters): - raise TypeError('model_filters must be of type ApiSearchModelsFilters') - self._model_filters = model_filters + if not isinstance(write_up_inclusion_type, WriteUpInclusionType): + raise TypeError('write_up_inclusion_type must be of type WriteUpInclusionType') + self._write_up_inclusion_type = write_up_inclusion_type @property - def user_filters(self) -> Optional['ApiSearchUsersFilters']: - """The set of User filters to filter the documents""" - return self._user_filters + def write_up_types(self) -> Optional[List['WriteUpType']]: + """Filters on WriteUp type""" + return self._write_up_types - @user_filters.setter - def user_filters(self, user_filters: Optional['ApiSearchUsersFilters']): - if user_filters is None: - del self.user_filters + @write_up_types.setter + def write_up_types(self, write_up_types: Optional[List['WriteUpType']]): + if write_up_types is None: + del self.write_up_types return - if not isinstance(user_filters, ApiSearchUsersFilters): - raise TypeError('user_filters must be of type ApiSearchUsersFilters') - self._user_filters = user_filters + if not isinstance(write_up_types, list): + raise TypeError('write_up_types must be of type list') + if not all([isinstance(t, WriteUpType) for t in write_up_types]): + raise TypeError('write_up_types must contain only items of type WriteUpType') + self._write_up_types = write_up_types -class ApiOrganizationCard(KaggleObject): +class ApiSearchKernelsDocument(KaggleObject): r""" - Based on kaggle.users.OrganizationCard + Based on kaggle.kernels.SearchKernelsDocument Attributes: - name (str) - id (int) - thumbnail_image_url (str) - slug (str) + session_id (int) + The session ID of the Kernel + has_linked_submission (bool) + Whether the Kernel has a linked submission + datasource_is_private (bool) + Whether the datasource is private + best_public_score (float) + The best public score of the Kernel's submission + is_draft (bool) + Whether the Kernel is a draft """ def __init__(self): - self._name = "" - self._id = 0 - self._thumbnail_image_url = "" - self._slug = "" + self._session_id = None + self._has_linked_submission = False + self._datasource_is_private = False + self._best_public_score = 0.0 + self._is_draft = False self._freeze() @property - def name(self) -> str: - return self._name + def session_id(self) -> int: + """The session ID of the Kernel""" + return self._session_id or 0 - @name.setter - def name(self, name: str): - if name is None: - del self.name + @session_id.setter + def session_id(self, session_id: Optional[int]): + if session_id is None: + del self.session_id return - if not isinstance(name, str): - raise TypeError('name must be of type str') - self._name = name + if not isinstance(session_id, int): + raise TypeError('session_id must be of type int') + self._session_id = session_id @property - def id(self) -> int: - return self._id + def has_linked_submission(self) -> bool: + """Whether the Kernel has a linked submission""" + return self._has_linked_submission - @id.setter - def id(self, id: int): - if id is None: - del self.id + @has_linked_submission.setter + def has_linked_submission(self, has_linked_submission: bool): + if has_linked_submission is None: + del self.has_linked_submission return - if not isinstance(id, int): - raise TypeError('id must be of type int') - self._id = id + if not isinstance(has_linked_submission, bool): + raise TypeError('has_linked_submission must be of type bool') + self._has_linked_submission = has_linked_submission @property - def thumbnail_image_url(self) -> str: - return self._thumbnail_image_url + def datasource_is_private(self) -> bool: + """Whether the datasource is private""" + return self._datasource_is_private - @thumbnail_image_url.setter - def thumbnail_image_url(self, thumbnail_image_url: str): - if thumbnail_image_url is None: - del self.thumbnail_image_url + @datasource_is_private.setter + def datasource_is_private(self, datasource_is_private: bool): + if datasource_is_private is None: + del self.datasource_is_private return - if not isinstance(thumbnail_image_url, str): - raise TypeError('thumbnail_image_url must be of type str') - self._thumbnail_image_url = thumbnail_image_url + if not isinstance(datasource_is_private, bool): + raise TypeError('datasource_is_private must be of type bool') + self._datasource_is_private = datasource_is_private @property - def slug(self) -> str: - return self._slug + def best_public_score(self) -> float: + """The best public score of the Kernel's submission""" + return self._best_public_score - @slug.setter - def slug(self, slug: str): - if slug is None: - del self.slug + @best_public_score.setter + def best_public_score(self, best_public_score: float): + if best_public_score is None: + del self.best_public_score return - if not isinstance(slug, str): - raise TypeError('slug must be of type str') - self._slug = slug + if not isinstance(best_public_score, float): + raise TypeError('best_public_score must be of type float') + self._best_public_score = best_public_score + + @property + def is_draft(self) -> bool: + """Whether the Kernel is a draft""" + return self._is_draft + + @is_draft.setter + def is_draft(self, is_draft: bool): + if is_draft is None: + del self.is_draft + return + if not isinstance(is_draft, bool): + raise TypeError('is_draft must be of type bool') + self._is_draft = is_draft -class ApiSearchCompetitionsDocument(KaggleObject): +class ApiSearchKernelsFilters(KaggleObject): r""" - Based on kaggle.competitions.SearchCompetitionsDocument + Based on kaggle.kernels.SearchKernelsFilters Attributes: - host_segment (HostSegment) - The host segment of the Competition - deadline (datetime) - The deadline of the Competition - team_count (int) - The total number of teams participating in the Competition - team_rank (int) - The rank of the current user's team on the Competition - is_environment_evaluation (bool) - Whether the Competition has an environment evaluation - prize_type (RewardTypeId) - The prize/award type of the Competition - prize_value (float) - The prize/award value of the Competition - is_launched (bool) - Whether the competition has launched (even if it's ended) - owner_user_has_joined (bool) - Whether the owner user (profile user, then current user) has joined the - competition - is_limited_participation (bool) - Whether the competition is a limited participation competition - only_allow_kernel_submissions (bool) - Whether only kernel submissions are allowed + language (str) + The Kernel language used to filter documents + earned_medal (bool) + Whether to return documents that the owner_user_id earned a medal for. """ def __init__(self): - self._host_segment = HostSegment.HOST_SEGMENT_UNSPECIFIED - self._deadline = None - self._team_count = 0 - self._team_rank = None - self._is_environment_evaluation = False - self._prize_type = RewardTypeId.REWARD_TYPE_ID_UNSPECIFIED - self._prize_value = None - self._is_launched = False - self._owner_user_has_joined = False - self._is_limited_participation = False - self._only_allow_kernel_submissions = False + self._language = None + self._earned_medal = None self._freeze() @property - def host_segment(self) -> 'HostSegment': - """The host segment of the Competition""" - return self._host_segment - - @host_segment.setter - def host_segment(self, host_segment: 'HostSegment'): - if host_segment is None: - del self.host_segment - return - if not isinstance(host_segment, HostSegment): - raise TypeError('host_segment must be of type HostSegment') - self._host_segment = host_segment - - @property - def deadline(self) -> datetime: - """The deadline of the Competition""" - return self._deadline - - @deadline.setter - def deadline(self, deadline: datetime): - if deadline is None: - del self.deadline - return - if not isinstance(deadline, datetime): - raise TypeError('deadline must be of type datetime') - self._deadline = deadline - - @property - def team_count(self) -> int: - """The total number of teams participating in the Competition""" - return self._team_count + def language(self) -> str: + """The Kernel language used to filter documents""" + return self._language or "" - @team_count.setter - def team_count(self, team_count: int): - if team_count is None: - del self.team_count + @language.setter + def language(self, language: Optional[str]): + if language is None: + del self.language return - if not isinstance(team_count, int): - raise TypeError('team_count must be of type int') - self._team_count = team_count + if not isinstance(language, str): + raise TypeError('language must be of type str') + self._language = language @property - def team_rank(self) -> int: - """The rank of the current user's team on the Competition""" - return self._team_rank or 0 + def earned_medal(self) -> bool: + """Whether to return documents that the owner_user_id earned a medal for.""" + return self._earned_medal or False - @team_rank.setter - def team_rank(self, team_rank: Optional[int]): - if team_rank is None: - del self.team_rank + @earned_medal.setter + def earned_medal(self, earned_medal: Optional[bool]): + if earned_medal is None: + del self.earned_medal return - if not isinstance(team_rank, int): - raise TypeError('team_rank must be of type int') - self._team_rank = team_rank + if not isinstance(earned_medal, bool): + raise TypeError('earned_medal must be of type bool') + self._earned_medal = earned_medal - @property - def is_environment_evaluation(self) -> bool: - """Whether the Competition has an environment evaluation""" - return self._is_environment_evaluation - @is_environment_evaluation.setter - def is_environment_evaluation(self, is_environment_evaluation: bool): - if is_environment_evaluation is None: - del self.is_environment_evaluation - return - if not isinstance(is_environment_evaluation, bool): - raise TypeError('is_environment_evaluation must be of type bool') - self._is_environment_evaluation = is_environment_evaluation +class ApiSearchModelsDocument(KaggleObject): + r""" + Based on kaggle.models.SearchModelsDocument - @property - def prize_type(self) -> 'RewardTypeId': - """The prize/award type of the Competition""" - return self._prize_type + Attributes: + instance_count (int) + The total number of instances in the Model + notebook_count (int) + The total number of notebooks in the Model + """ - @prize_type.setter - def prize_type(self, prize_type: 'RewardTypeId'): - if prize_type is None: - del self.prize_type - return - if not isinstance(prize_type, RewardTypeId): - raise TypeError('prize_type must be of type RewardTypeId') - self._prize_type = prize_type + def __init__(self): + self._instance_count = 0 + self._notebook_count = 0 + self._freeze() @property - def prize_value(self) -> float: - """The prize/award value of the Competition""" - return self._prize_value or 0.0 + def instance_count(self) -> int: + """The total number of instances in the Model""" + return self._instance_count - @prize_value.setter - def prize_value(self, prize_value: Optional[float]): - if prize_value is None: - del self.prize_value + @instance_count.setter + def instance_count(self, instance_count: int): + if instance_count is None: + del self.instance_count return - if not isinstance(prize_value, float): - raise TypeError('prize_value must be of type float') - self._prize_value = prize_value + if not isinstance(instance_count, int): + raise TypeError('instance_count must be of type int') + self._instance_count = instance_count @property - def is_launched(self) -> bool: - """Whether the competition has launched (even if it's ended)""" - return self._is_launched + def notebook_count(self) -> int: + """The total number of notebooks in the Model""" + return self._notebook_count - @is_launched.setter - def is_launched(self, is_launched: bool): - if is_launched is None: - del self.is_launched + @notebook_count.setter + def notebook_count(self, notebook_count: int): + if notebook_count is None: + del self.notebook_count return - if not isinstance(is_launched, bool): - raise TypeError('is_launched must be of type bool') - self._is_launched = is_launched + if not isinstance(notebook_count, int): + raise TypeError('notebook_count must be of type int') + self._notebook_count = notebook_count - @property - def owner_user_has_joined(self) -> bool: - r""" - Whether the owner user (profile user, then current user) has joined the - competition - """ - return self._owner_user_has_joined - @owner_user_has_joined.setter - def owner_user_has_joined(self, owner_user_has_joined: bool): - if owner_user_has_joined is None: - del self.owner_user_has_joined - return - if not isinstance(owner_user_has_joined, bool): - raise TypeError('owner_user_has_joined must be of type bool') - self._owner_user_has_joined = owner_user_has_joined +class ApiSearchModelsFilters(KaggleObject): + r""" + Based on kaggle.models.SearchModelsFilters - @property - def is_limited_participation(self) -> bool: - """Whether the competition is a limited participation competition""" - return self._is_limited_participation + Attributes: + size (ListSearchContentRangeFilter) + The size of the Model used to filter the documents + """ - @is_limited_participation.setter - def is_limited_participation(self, is_limited_participation: bool): - if is_limited_participation is None: - del self.is_limited_participation - return - if not isinstance(is_limited_participation, bool): - raise TypeError('is_limited_participation must be of type bool') - self._is_limited_participation = is_limited_participation + def __init__(self): + self._size = None + self._freeze() @property - def only_allow_kernel_submissions(self) -> bool: - """Whether only kernel submissions are allowed""" - return self._only_allow_kernel_submissions + def size(self) -> Optional['ListSearchContentRangeFilter']: + """The size of the Model used to filter the documents""" + return self._size - @only_allow_kernel_submissions.setter - def only_allow_kernel_submissions(self, only_allow_kernel_submissions: bool): - if only_allow_kernel_submissions is None: - del self.only_allow_kernel_submissions + @size.setter + def size(self, size: Optional['ListSearchContentRangeFilter']): + if size is None: + del self.size return - if not isinstance(only_allow_kernel_submissions, bool): - raise TypeError('only_allow_kernel_submissions must be of type bool') - self._only_allow_kernel_submissions = only_allow_kernel_submissions + if not isinstance(size, ListSearchContentRangeFilter): + raise TypeError('size must be of type ListSearchContentRangeFilter') + self._size = size -class ApiSearchCompetitionsFilters(KaggleObject): +class ApiSearchUsersDocument(KaggleObject): r""" - Based on kaggle.competitions.SearchCompetitionsFilters + Based on kaggle.users.SearchUsersDocument Attributes: - role (SearchCompetitionsRole) - The Competition role used to filter the documents - status (SearchCompetitionsStatus) - The Competition status used to filter the documents - profile_visibility (SearchCompetitionsProfileVisibility) - Competition visibility status on user profile - earned_medal (bool) - Whether to return documents that the owner_user_id earned a medal for. + grandmaster_tier_level (int) + User's GM tier level. Tier levels are awarded starting at GM. All + users who are not GM will have a tier level of 0. + user_location (str) + User location string, if location sharing is opted-in. In the format: + 'city, region, country'. + occupation_organization_name (str) + Occupation organization name as indicated on the user's profile. + competition_ranking (int) + Current ranking for the user in the competition achievement type. + competition_points (int) + Current points for the user in the competition achievement type. + kernel_ranking (int) + Current ranking for the user in the kernel achievement type. + kernel_points (int) + Current points for the user in the kernel achievement type. + dataset_ranking (int) + Current ranking for the user in dataset achievement type. + dataset_points (int) + Current points for the user in the dataset achievement type. """ def __init__(self): - self._role = SearchCompetitionsRole.SEARCH_COMPETITIONS_ROLE_ANY - self._status = SearchCompetitionsStatus.SEARCH_COMPETITIONS_STATUS_ANY - self._profile_visibility = SearchCompetitionsProfileVisibility.SEARCH_COMPETITIONS_PROFILE_VISIBILITY_ANY - self._earned_medal = None + self._grandmaster_tier_level = 0 + self._user_location = None + self._occupation_organization_name = None + self._competition_ranking = None + self._competition_points = None + self._kernel_ranking = None + self._kernel_points = None + self._dataset_ranking = None + self._dataset_points = None self._freeze() @property - def role(self) -> 'SearchCompetitionsRole': - """The Competition role used to filter the documents""" - return self._role + def grandmaster_tier_level(self) -> int: + r""" + User's GM tier level. Tier levels are awarded starting at GM. All + users who are not GM will have a tier level of 0. + """ + return self._grandmaster_tier_level - @role.setter - def role(self, role: 'SearchCompetitionsRole'): - if role is None: - del self.role + @grandmaster_tier_level.setter + def grandmaster_tier_level(self, grandmaster_tier_level: int): + if grandmaster_tier_level is None: + del self.grandmaster_tier_level return - if not isinstance(role, SearchCompetitionsRole): - raise TypeError('role must be of type SearchCompetitionsRole') - self._role = role + if not isinstance(grandmaster_tier_level, int): + raise TypeError('grandmaster_tier_level must be of type int') + self._grandmaster_tier_level = grandmaster_tier_level @property - def status(self) -> 'SearchCompetitionsStatus': - """The Competition status used to filter the documents""" - return self._status + def user_location(self) -> str: + r""" + User location string, if location sharing is opted-in. In the format: + 'city, region, country'. + """ + return self._user_location or "" - @status.setter - def status(self, status: 'SearchCompetitionsStatus'): - if status is None: - del self.status + @user_location.setter + def user_location(self, user_location: Optional[str]): + if user_location is None: + del self.user_location return - if not isinstance(status, SearchCompetitionsStatus): - raise TypeError('status must be of type SearchCompetitionsStatus') - self._status = status + if not isinstance(user_location, str): + raise TypeError('user_location must be of type str') + self._user_location = user_location @property - def profile_visibility(self) -> 'SearchCompetitionsProfileVisibility': - """Competition visibility status on user profile""" - return self._profile_visibility + def occupation_organization_name(self) -> str: + """Occupation organization name as indicated on the user's profile.""" + return self._occupation_organization_name or "" - @profile_visibility.setter - def profile_visibility(self, profile_visibility: 'SearchCompetitionsProfileVisibility'): - if profile_visibility is None: - del self.profile_visibility + @occupation_organization_name.setter + def occupation_organization_name(self, occupation_organization_name: Optional[str]): + if occupation_organization_name is None: + del self.occupation_organization_name return - if not isinstance(profile_visibility, SearchCompetitionsProfileVisibility): - raise TypeError('profile_visibility must be of type SearchCompetitionsProfileVisibility') - self._profile_visibility = profile_visibility + if not isinstance(occupation_organization_name, str): + raise TypeError('occupation_organization_name must be of type str') + self._occupation_organization_name = occupation_organization_name @property - def earned_medal(self) -> bool: - """Whether to return documents that the owner_user_id earned a medal for.""" - return self._earned_medal or False + def competition_ranking(self) -> int: + """Current ranking for the user in the competition achievement type.""" + return self._competition_ranking or 0 - @earned_medal.setter - def earned_medal(self, earned_medal: Optional[bool]): - if earned_medal is None: - del self.earned_medal + @competition_ranking.setter + def competition_ranking(self, competition_ranking: Optional[int]): + if competition_ranking is None: + del self.competition_ranking return - if not isinstance(earned_medal, bool): - raise TypeError('earned_medal must be of type bool') - self._earned_medal = earned_medal - - -class ApiSearchDatasetsDocument(KaggleObject): - r""" - Based on kaggle.datasets.SearchDatasetsDocument - - Attributes: - usability_rating (float) - The usability rating of the Dataset - file_count (int) - How many files the Dataset has - file_types (DatasetFileType) - The file types of all the files in the Dataset - size (int) - The size of the Dataset - """ + if not isinstance(competition_ranking, int): + raise TypeError('competition_ranking must be of type int') + self._competition_ranking = competition_ranking - def __init__(self): - self._usability_rating = 0.0 - self._file_count = 0 - self._file_types = [] - self._size = 0 - self._freeze() + @property + def competition_points(self) -> int: + """Current points for the user in the competition achievement type.""" + return self._competition_points or 0 + + @competition_points.setter + def competition_points(self, competition_points: Optional[int]): + if competition_points is None: + del self.competition_points + return + if not isinstance(competition_points, int): + raise TypeError('competition_points must be of type int') + self._competition_points = competition_points @property - def usability_rating(self) -> float: - """The usability rating of the Dataset""" - return self._usability_rating + def kernel_ranking(self) -> int: + """Current ranking for the user in the kernel achievement type.""" + return self._kernel_ranking or 0 - @usability_rating.setter - def usability_rating(self, usability_rating: float): - if usability_rating is None: - del self.usability_rating + @kernel_ranking.setter + def kernel_ranking(self, kernel_ranking: Optional[int]): + if kernel_ranking is None: + del self.kernel_ranking return - if not isinstance(usability_rating, float): - raise TypeError('usability_rating must be of type float') - self._usability_rating = usability_rating + if not isinstance(kernel_ranking, int): + raise TypeError('kernel_ranking must be of type int') + self._kernel_ranking = kernel_ranking @property - def file_count(self) -> int: - """How many files the Dataset has""" - return self._file_count + def kernel_points(self) -> int: + """Current points for the user in the kernel achievement type.""" + return self._kernel_points or 0 - @file_count.setter - def file_count(self, file_count: int): - if file_count is None: - del self.file_count + @kernel_points.setter + def kernel_points(self, kernel_points: Optional[int]): + if kernel_points is None: + del self.kernel_points return - if not isinstance(file_count, int): - raise TypeError('file_count must be of type int') - self._file_count = file_count + if not isinstance(kernel_points, int): + raise TypeError('kernel_points must be of type int') + self._kernel_points = kernel_points @property - def file_types(self) -> Optional[List['DatasetFileType']]: - """The file types of all the files in the Dataset""" - return self._file_types + def dataset_ranking(self) -> int: + """Current ranking for the user in dataset achievement type.""" + return self._dataset_ranking or 0 - @file_types.setter - def file_types(self, file_types: Optional[List['DatasetFileType']]): - if file_types is None: - del self.file_types + @dataset_ranking.setter + def dataset_ranking(self, dataset_ranking: Optional[int]): + if dataset_ranking is None: + del self.dataset_ranking return - if not isinstance(file_types, list): - raise TypeError('file_types must be of type list') - if not all([isinstance(t, DatasetFileType) for t in file_types]): - raise TypeError('file_types must contain only items of type DatasetFileType') - self._file_types = file_types + if not isinstance(dataset_ranking, int): + raise TypeError('dataset_ranking must be of type int') + self._dataset_ranking = dataset_ranking @property - def size(self) -> int: - """The size of the Dataset""" - return self._size + def dataset_points(self) -> int: + """Current points for the user in the dataset achievement type.""" + return self._dataset_points or 0 - @size.setter - def size(self, size: int): - if size is None: - del self.size + @dataset_points.setter + def dataset_points(self, dataset_points: Optional[int]): + if dataset_points is None: + del self.dataset_points return - if not isinstance(size, int): - raise TypeError('size must be of type int') - self._size = size + if not isinstance(dataset_points, int): + raise TypeError('dataset_points must be of type int') + self._dataset_points = dataset_points -class ApiSearchDatasetsFilters(KaggleObject): +class ApiSearchUsersFilters(KaggleObject): r""" - Based on kaggle.datasets.SearchDiscussionsFilters + Based on kaggle.users.SearchUsersFilters Attributes: - file_type (DatasetFileTypeGroup) - The file types used to filter the documents - license_group (DatasetLicenseGroup) - The license groups used to filter the documents - size (DatasetSizeGroup) - The dataset size range used to filter the documents - earned_medal (bool) - Whether to return documents that the owner_user_id earned a medal for. + user_locations (str) + Filter to users that have one of the the specified locations. Expects the + format: 'city, region, country' for each. + tier (UserAchievementTier) + Filter to users that have the specified performance tier. + user_ids (int) + Filter to users based on the provided user ids. + require_ranking_for_type (UserAchievementType) + Filter to users that have points for the specified type. + occupation_organization_names (str) + Filter to users that have one of the provided occupation organization names + indicated on their user profile, i.e. http://screen/3N68JKC4hocxWmn. Note: + This is *not* the same thing as a Kaggle Organization, such as + kaggle.com/organizations/google. + grandmaster_level (ListSearchContentRangeFilter) + Filter to users that have the specified range of Grandmaster tier level. """ def __init__(self): - self._file_type = DatasetFileTypeGroup.DATASET_FILE_TYPE_GROUP_ALL - self._license_group = None - self._size = None - self._earned_medal = None + self._user_locations = [] + self._tier = None + self._user_ids = [] + self._require_ranking_for_type = None + self._occupation_organization_names = [] + self._grandmaster_level = None self._freeze() @property - def file_type(self) -> 'DatasetFileTypeGroup': - """The file types used to filter the documents""" - return self._file_type + def user_locations(self) -> Optional[List[str]]: + r""" + Filter to users that have one of the the specified locations. Expects the + format: 'city, region, country' for each. + """ + return self._user_locations - @file_type.setter - def file_type(self, file_type: 'DatasetFileTypeGroup'): - if file_type is None: - del self.file_type + @user_locations.setter + def user_locations(self, user_locations: Optional[List[str]]): + if user_locations is None: + del self.user_locations return - if not isinstance(file_type, DatasetFileTypeGroup): - raise TypeError('file_type must be of type DatasetFileTypeGroup') - self._file_type = file_type + if not isinstance(user_locations, list): + raise TypeError('user_locations must be of type list') + if not all([isinstance(t, str) for t in user_locations]): + raise TypeError('user_locations must contain only items of type str') + self._user_locations = user_locations @property - def license_group(self) -> 'DatasetLicenseGroup': - """The license groups used to filter the documents""" - return self._license_group or DatasetLicenseGroup.DATASET_LICENSE_GROUP_ALL + def tier(self) -> 'UserAchievementTier': + """Filter to users that have the specified performance tier.""" + return self._tier or UserAchievementTier.NOVICE - @license_group.setter - def license_group(self, license_group: Optional['DatasetLicenseGroup']): - if license_group is None: - del self.license_group + @tier.setter + def tier(self, tier: Optional['UserAchievementTier']): + if tier is None: + del self.tier return - if not isinstance(license_group, DatasetLicenseGroup): - raise TypeError('license_group must be of type DatasetLicenseGroup') - self._license_group = license_group + if not isinstance(tier, UserAchievementTier): + raise TypeError('tier must be of type UserAchievementTier') + self._tier = tier @property - def size(self) -> 'DatasetSizeGroup': - """The dataset size range used to filter the documents""" - return self._size or DatasetSizeGroup.DATASET_SIZE_GROUP_ALL + def user_ids(self) -> Optional[List[int]]: + """Filter to users based on the provided user ids.""" + return self._user_ids - @size.setter - def size(self, size: Optional['DatasetSizeGroup']): - if size is None: - del self.size + @user_ids.setter + def user_ids(self, user_ids: Optional[List[int]]): + if user_ids is None: + del self.user_ids return - if not isinstance(size, DatasetSizeGroup): - raise TypeError('size must be of type DatasetSizeGroup') - self._size = size + if not isinstance(user_ids, list): + raise TypeError('user_ids must be of type list') + if not all([isinstance(t, int) for t in user_ids]): + raise TypeError('user_ids must contain only items of type int') + self._user_ids = user_ids @property - def earned_medal(self) -> bool: - """Whether to return documents that the owner_user_id earned a medal for.""" - return self._earned_medal or False + def require_ranking_for_type(self) -> 'UserAchievementType': + """Filter to users that have points for the specified type.""" + return self._require_ranking_for_type or UserAchievementType.USER_ACHIEVEMENT_TYPE_UNSPECIFIED - @earned_medal.setter - def earned_medal(self, earned_medal: Optional[bool]): - if earned_medal is None: - del self.earned_medal + @require_ranking_for_type.setter + def require_ranking_for_type(self, require_ranking_for_type: Optional['UserAchievementType']): + if require_ranking_for_type is None: + del self.require_ranking_for_type + return + if not isinstance(require_ranking_for_type, UserAchievementType): + raise TypeError('require_ranking_for_type must be of type UserAchievementType') + self._require_ranking_for_type = require_ranking_for_type + + @property + def occupation_organization_names(self) -> Optional[List[str]]: + r""" + Filter to users that have one of the provided occupation organization names + indicated on their user profile, i.e. http://screen/3N68JKC4hocxWmn. Note: + This is *not* the same thing as a Kaggle Organization, such as + kaggle.com/organizations/google. + """ + return self._occupation_organization_names + + @occupation_organization_names.setter + def occupation_organization_names(self, occupation_organization_names: Optional[List[str]]): + if occupation_organization_names is None: + del self.occupation_organization_names + return + if not isinstance(occupation_organization_names, list): + raise TypeError('occupation_organization_names must be of type list') + if not all([isinstance(t, str) for t in occupation_organization_names]): + raise TypeError('occupation_organization_names must contain only items of type str') + self._occupation_organization_names = occupation_organization_names + + @property + def grandmaster_level(self) -> Optional['ListSearchContentRangeFilter']: + """Filter to users that have the specified range of Grandmaster tier level.""" + return self._grandmaster_level + + @grandmaster_level.setter + def grandmaster_level(self, grandmaster_level: Optional['ListSearchContentRangeFilter']): + if grandmaster_level is None: + del self.grandmaster_level return - if not isinstance(earned_medal, bool): - raise TypeError('earned_medal must be of type bool') - self._earned_medal = earned_medal + if not isinstance(grandmaster_level, ListSearchContentRangeFilter): + raise TypeError('grandmaster_level must be of type ListSearchContentRangeFilter') + self._grandmaster_level = grandmaster_level -class ApiSearchDiscussionsDocument(KaggleObject): +class ApiUserAvatar(KaggleObject): r""" - Based on kaggle.discussions.SearchDiscussionsDocument + Based on kaggle.users.UserAvatar Attributes: - new_comment_url (str) - message_stripped (str) - The message of the topic/comment, stripped of HTML (at time of index) - message_markdown (str) - The markdown for the message of the topic/comment - forum_name (str) - The name of the parent forum - forum_url (str) - The URL for the parent forum - source_type (SearchDiscussionsSourceType) - The source type of the comment - topic_type (SearchDiscussionsTopicType) - The type of topic returned - type (SearchDiscussionsDocumentType) - The type of document returned - write_up_metadata (WriteUpItemInfo) - If the document is a WriteUp, extra WriteUp-specific data - is provided + id (int) + ID for the given user + display_name (str) + Display name for the given user + thumbnail_url (str) + Thumbnail URL for the given user + url (str) + Profile URL for the given user + user_name (str) + User name for the given user + progression_opt_out (bool) + True if the user is opted out of the progression system. + tier (UserAchievementTier) + Tier for the given user """ def __init__(self): - self._new_comment_url = None - self._message_stripped = "" - self._message_markdown = None - self._forum_name = "" - self._forum_url = None - self._source_type = SearchDiscussionsSourceType.SEARCH_DISCUSSIONS_SOURCE_TYPE_UNSPECIFIED - self._topic_type = SearchDiscussionsTopicType.SEARCH_DISCUSSIONS_TOPIC_TYPE_UNSPECIFIED - self._type = SearchDiscussionsDocumentType.SEARCH_DISCUSSIONS_DOCUMENT_TYPE_UNSPECIFIED - self._write_up_metadata = None + self._id = 0 + self._display_name = None + self._thumbnail_url = None + self._url = None + self._user_name = None + self._progression_opt_out = None + self._tier = UserAchievementTier.NOVICE self._freeze() @property - def new_comment_url(self) -> str: - return self._new_comment_url or "" - - @new_comment_url.setter - def new_comment_url(self, new_comment_url: Optional[str]): - if new_comment_url is None: - del self.new_comment_url - return - if not isinstance(new_comment_url, str): - raise TypeError('new_comment_url must be of type str') - self._new_comment_url = new_comment_url - - @property - def message_stripped(self) -> str: - """The message of the topic/comment, stripped of HTML (at time of index)""" - return self._message_stripped - - @message_stripped.setter - def message_stripped(self, message_stripped: str): - if message_stripped is None: - del self.message_stripped - return - if not isinstance(message_stripped, str): - raise TypeError('message_stripped must be of type str') - self._message_stripped = message_stripped - - @property - def message_markdown(self) -> str: - """The markdown for the message of the topic/comment""" - return self._message_markdown or "" + def id(self) -> int: + """ID for the given user""" + return self._id - @message_markdown.setter - def message_markdown(self, message_markdown: Optional[str]): - if message_markdown is None: - del self.message_markdown + @id.setter + def id(self, id: int): + if id is None: + del self.id return - if not isinstance(message_markdown, str): - raise TypeError('message_markdown must be of type str') - self._message_markdown = message_markdown + if not isinstance(id, int): + raise TypeError('id must be of type int') + self._id = id @property - def forum_name(self) -> str: - """The name of the parent forum""" - return self._forum_name + def display_name(self) -> str: + """Display name for the given user""" + return self._display_name or "" - @forum_name.setter - def forum_name(self, forum_name: str): - if forum_name is None: - del self.forum_name + @display_name.setter + def display_name(self, display_name: Optional[str]): + if display_name is None: + del self.display_name return - if not isinstance(forum_name, str): - raise TypeError('forum_name must be of type str') - self._forum_name = forum_name + if not isinstance(display_name, str): + raise TypeError('display_name must be of type str') + self._display_name = display_name @property - def forum_url(self) -> str: - """The URL for the parent forum""" - return self._forum_url or "" + def thumbnail_url(self) -> str: + """Thumbnail URL for the given user""" + return self._thumbnail_url or "" - @forum_url.setter - def forum_url(self, forum_url: Optional[str]): - if forum_url is None: - del self.forum_url + @thumbnail_url.setter + def thumbnail_url(self, thumbnail_url: Optional[str]): + if thumbnail_url is None: + del self.thumbnail_url return - if not isinstance(forum_url, str): - raise TypeError('forum_url must be of type str') - self._forum_url = forum_url + if not isinstance(thumbnail_url, str): + raise TypeError('thumbnail_url must be of type str') + self._thumbnail_url = thumbnail_url @property - def source_type(self) -> 'SearchDiscussionsSourceType': - """The source type of the comment""" - return self._source_type + def url(self) -> str: + """Profile URL for the given user""" + return self._url or "" - @source_type.setter - def source_type(self, source_type: 'SearchDiscussionsSourceType'): - if source_type is None: - del self.source_type + @url.setter + def url(self, url: Optional[str]): + if url is None: + del self.url return - if not isinstance(source_type, SearchDiscussionsSourceType): - raise TypeError('source_type must be of type SearchDiscussionsSourceType') - self._source_type = source_type + if not isinstance(url, str): + raise TypeError('url must be of type str') + self._url = url @property - def topic_type(self) -> 'SearchDiscussionsTopicType': - """The type of topic returned""" - return self._topic_type + def user_name(self) -> str: + """User name for the given user""" + return self._user_name or "" - @topic_type.setter - def topic_type(self, topic_type: 'SearchDiscussionsTopicType'): - if topic_type is None: - del self.topic_type + @user_name.setter + def user_name(self, user_name: Optional[str]): + if user_name is None: + del self.user_name return - if not isinstance(topic_type, SearchDiscussionsTopicType): - raise TypeError('topic_type must be of type SearchDiscussionsTopicType') - self._topic_type = topic_type + if not isinstance(user_name, str): + raise TypeError('user_name must be of type str') + self._user_name = user_name @property - def type(self) -> 'SearchDiscussionsDocumentType': - """The type of document returned""" - return self._type + def progression_opt_out(self) -> bool: + """True if the user is opted out of the progression system.""" + return self._progression_opt_out or False - @type.setter - def type(self, type: 'SearchDiscussionsDocumentType'): - if type is None: - del self.type + @progression_opt_out.setter + def progression_opt_out(self, progression_opt_out: Optional[bool]): + if progression_opt_out is None: + del self.progression_opt_out return - if not isinstance(type, SearchDiscussionsDocumentType): - raise TypeError('type must be of type SearchDiscussionsDocumentType') - self._type = type + if not isinstance(progression_opt_out, bool): + raise TypeError('progression_opt_out must be of type bool') + self._progression_opt_out = progression_opt_out @property - def write_up_metadata(self) -> Optional['WriteUpItemInfo']: - r""" - If the document is a WriteUp, extra WriteUp-specific data - is provided - """ - return self._write_up_metadata or None + def tier(self) -> 'UserAchievementTier': + """Tier for the given user""" + return self._tier - @write_up_metadata.setter - def write_up_metadata(self, write_up_metadata: Optional[Optional['WriteUpItemInfo']]): - if write_up_metadata is None: - del self.write_up_metadata + @tier.setter + def tier(self, tier: 'UserAchievementTier'): + if tier is None: + del self.tier return - if not isinstance(write_up_metadata, WriteUpItemInfo): - raise TypeError('write_up_metadata must be of type WriteUpItemInfo') - self._write_up_metadata = write_up_metadata + if not isinstance(tier, UserAchievementTier): + raise TypeError('tier must be of type UserAchievementTier') + self._tier = tier -class ApiSearchDiscussionsFilters(KaggleObject): +class ListEntitiesDocument(KaggleObject): r""" - Based on kaggle.discussions.SearchDiscussionsFilters + Based on kaggle.search.ListSearchContentDocument Attributes: - source_type (SearchDiscussionsSourceType) - The discussion source type used to filter the documents - only_new_comments (bool) - Show only topics with new comments - write_up_inclusion_type (WriteUpInclusionType) - Determines whether or not WriteUps should be included - write_up_types (WriteUpType) - Filters on WriteUp type + id (int) + The DB ID (i.e. the PK from the table) of the document + document_type (DocumentType) + The type of content of the document + title (str) + The canonical title of the document + image_url (str) + The thumbnail URL of the document + create_time (datetime) + The canonical creation time of the document; May mean different things + between content types + update_time (datetime) + The canonical update time of the document; May be different between content + types + is_private (bool) + Whether the content is marked as private + votes (int) + The total votes (or score, if downvotes are supported) for the document + owner_user (ApiUserAvatar) + owner_organization (ApiOrganizationCard) + competition_document (ApiSearchCompetitionsDocument) + dataset_document (ApiSearchDatasetsDocument) + kernel_document (ApiSearchKernelsDocument) + model_document (ApiSearchModelsDocument) + discussion_document (ApiSearchDiscussionsDocument) + user_document (ApiSearchUsersDocument) + slug (str) + The slug of the document (which may be close to the url) """ def __init__(self): - self._source_type = SearchDiscussionsSourceType.SEARCH_DISCUSSIONS_SOURCE_TYPE_UNSPECIFIED - self._only_new_comments = False - self._write_up_inclusion_type = WriteUpInclusionType.WRITE_UP_INCLUSION_TYPE_UNSPECIFIED - self._write_up_types = [] + self._id = 0 + self._document_type = DocumentType.DOCUMENT_TYPE_UNSPECIFIED + self._title = "" + self._image_url = "" + self._create_time = None + self._update_time = None + self._is_private = None + self._votes = None + self._owner_user = None + self._owner_organization = None + self._competition_document = None + self._dataset_document = None + self._kernel_document = None + self._model_document = None + self._discussion_document = None + self._user_document = None + self._slug = None self._freeze() @property - def source_type(self) -> 'SearchDiscussionsSourceType': - """The discussion source type used to filter the documents""" - return self._source_type + def id(self) -> int: + """The DB ID (i.e. the PK from the table) of the document""" + return self._id - @source_type.setter - def source_type(self, source_type: 'SearchDiscussionsSourceType'): - if source_type is None: - del self.source_type + @id.setter + def id(self, id: int): + if id is None: + del self.id return - if not isinstance(source_type, SearchDiscussionsSourceType): - raise TypeError('source_type must be of type SearchDiscussionsSourceType') - self._source_type = source_type + if not isinstance(id, int): + raise TypeError('id must be of type int') + self._id = id @property - def only_new_comments(self) -> bool: - """Show only topics with new comments""" - return self._only_new_comments + def document_type(self) -> 'DocumentType': + """The type of content of the document""" + return self._document_type - @only_new_comments.setter - def only_new_comments(self, only_new_comments: bool): - if only_new_comments is None: - del self.only_new_comments + @document_type.setter + def document_type(self, document_type: 'DocumentType'): + if document_type is None: + del self.document_type return - if not isinstance(only_new_comments, bool): - raise TypeError('only_new_comments must be of type bool') - self._only_new_comments = only_new_comments + if not isinstance(document_type, DocumentType): + raise TypeError('document_type must be of type DocumentType') + self._document_type = document_type @property - def write_up_inclusion_type(self) -> 'WriteUpInclusionType': - """Determines whether or not WriteUps should be included""" - return self._write_up_inclusion_type + def title(self) -> str: + """The canonical title of the document""" + return self._title - @write_up_inclusion_type.setter - def write_up_inclusion_type(self, write_up_inclusion_type: 'WriteUpInclusionType'): - if write_up_inclusion_type is None: - del self.write_up_inclusion_type + @title.setter + def title(self, title: str): + if title is None: + del self.title return - if not isinstance(write_up_inclusion_type, WriteUpInclusionType): - raise TypeError('write_up_inclusion_type must be of type WriteUpInclusionType') - self._write_up_inclusion_type = write_up_inclusion_type + if not isinstance(title, str): + raise TypeError('title must be of type str') + self._title = title @property - def write_up_types(self) -> Optional[List['WriteUpType']]: - """Filters on WriteUp type""" - return self._write_up_types + def image_url(self) -> str: + """The thumbnail URL of the document""" + return self._image_url - @write_up_types.setter - def write_up_types(self, write_up_types: Optional[List['WriteUpType']]): - if write_up_types is None: - del self.write_up_types + @image_url.setter + def image_url(self, image_url: str): + if image_url is None: + del self.image_url return - if not isinstance(write_up_types, list): - raise TypeError('write_up_types must be of type list') - if not all([isinstance(t, WriteUpType) for t in write_up_types]): - raise TypeError('write_up_types must contain only items of type WriteUpType') - self._write_up_types = write_up_types - - -class ApiSearchKernelsDocument(KaggleObject): - r""" - Based on kaggle.kernels.SearchKernelsDocument - - Attributes: - session_id (int) - The session ID of the Kernel - has_linked_submission (bool) - Whether the Kernel has a linked submission - datasource_is_private (bool) - Whether the datasource is private - best_public_score (float) - The best public score of the Kernel's submission - is_draft (bool) - Whether the Kernel is a draft - """ - - def __init__(self): - self._session_id = None - self._has_linked_submission = False - self._datasource_is_private = False - self._best_public_score = 0.0 - self._is_draft = False - self._freeze() + if not isinstance(image_url, str): + raise TypeError('image_url must be of type str') + self._image_url = image_url @property - def session_id(self) -> int: - """The session ID of the Kernel""" - return self._session_id or 0 + def create_time(self) -> datetime: + r""" + The canonical creation time of the document; May mean different things + between content types + """ + return self._create_time - @session_id.setter - def session_id(self, session_id: Optional[int]): - if session_id is None: - del self.session_id + @create_time.setter + def create_time(self, create_time: datetime): + if create_time is None: + del self.create_time return - if not isinstance(session_id, int): - raise TypeError('session_id must be of type int') - self._session_id = session_id + if not isinstance(create_time, datetime): + raise TypeError('create_time must be of type datetime') + self._create_time = create_time @property - def has_linked_submission(self) -> bool: - """Whether the Kernel has a linked submission""" - return self._has_linked_submission + def update_time(self) -> datetime: + r""" + The canonical update time of the document; May be different between content + types + """ + return self._update_time or None - @has_linked_submission.setter - def has_linked_submission(self, has_linked_submission: bool): - if has_linked_submission is None: - del self.has_linked_submission + @update_time.setter + def update_time(self, update_time: Optional[datetime]): + if update_time is None: + del self.update_time return - if not isinstance(has_linked_submission, bool): - raise TypeError('has_linked_submission must be of type bool') - self._has_linked_submission = has_linked_submission + if not isinstance(update_time, datetime): + raise TypeError('update_time must be of type datetime') + self._update_time = update_time @property - def datasource_is_private(self) -> bool: - """Whether the datasource is private""" - return self._datasource_is_private + def is_private(self) -> bool: + """Whether the content is marked as private""" + return self._is_private or False - @datasource_is_private.setter - def datasource_is_private(self, datasource_is_private: bool): - if datasource_is_private is None: - del self.datasource_is_private + @is_private.setter + def is_private(self, is_private: Optional[bool]): + if is_private is None: + del self.is_private return - if not isinstance(datasource_is_private, bool): - raise TypeError('datasource_is_private must be of type bool') - self._datasource_is_private = datasource_is_private + if not isinstance(is_private, bool): + raise TypeError('is_private must be of type bool') + self._is_private = is_private @property - def best_public_score(self) -> float: - """The best public score of the Kernel's submission""" - return self._best_public_score + def votes(self) -> int: + """The total votes (or score, if downvotes are supported) for the document""" + return self._votes or 0 - @best_public_score.setter - def best_public_score(self, best_public_score: float): - if best_public_score is None: - del self.best_public_score + @votes.setter + def votes(self, votes: Optional[int]): + if votes is None: + del self.votes return - if not isinstance(best_public_score, float): - raise TypeError('best_public_score must be of type float') - self._best_public_score = best_public_score + if not isinstance(votes, int): + raise TypeError('votes must be of type int') + self._votes = votes @property - def is_draft(self) -> bool: - """Whether the Kernel is a draft""" - return self._is_draft + def owner_user(self) -> Optional['ApiUserAvatar']: + return self._owner_user or None - @is_draft.setter - def is_draft(self, is_draft: bool): - if is_draft is None: - del self.is_draft + @owner_user.setter + def owner_user(self, owner_user: Optional['ApiUserAvatar']): + if owner_user is None: + del self.owner_user return - if not isinstance(is_draft, bool): - raise TypeError('is_draft must be of type bool') - self._is_draft = is_draft + if not isinstance(owner_user, ApiUserAvatar): + raise TypeError('owner_user must be of type ApiUserAvatar') + del self.owner_organization + self._owner_user = owner_user + @property + def owner_organization(self) -> Optional['ApiOrganizationCard']: + return self._owner_organization or None -class ApiSearchKernelsFilters(KaggleObject): - r""" - Based on kaggle.kernels.SearchKernelsFilters + @owner_organization.setter + def owner_organization(self, owner_organization: Optional['ApiOrganizationCard']): + if owner_organization is None: + del self.owner_organization + return + if not isinstance(owner_organization, ApiOrganizationCard): + raise TypeError('owner_organization must be of type ApiOrganizationCard') + del self.owner_user + self._owner_organization = owner_organization - Attributes: - language (str) - The Kernel language used to filter documents - earned_medal (bool) - Whether to return documents that the owner_user_id earned a medal for. - """ + @property + def competition_document(self) -> Optional['ApiSearchCompetitionsDocument']: + return self._competition_document or None - def __init__(self): - self._language = None - self._earned_medal = None - self._freeze() + @competition_document.setter + def competition_document(self, competition_document: Optional['ApiSearchCompetitionsDocument']): + if competition_document is None: + del self.competition_document + return + if not isinstance(competition_document, ApiSearchCompetitionsDocument): + raise TypeError('competition_document must be of type ApiSearchCompetitionsDocument') + del self.dataset_document + del self.kernel_document + del self.model_document + del self.discussion_document + del self.user_document + self._competition_document = competition_document @property - def language(self) -> str: - """The Kernel language used to filter documents""" - return self._language or "" + def dataset_document(self) -> Optional['ApiSearchDatasetsDocument']: + return self._dataset_document or None - @language.setter - def language(self, language: Optional[str]): - if language is None: - del self.language + @dataset_document.setter + def dataset_document(self, dataset_document: Optional['ApiSearchDatasetsDocument']): + if dataset_document is None: + del self.dataset_document return - if not isinstance(language, str): - raise TypeError('language must be of type str') - self._language = language + if not isinstance(dataset_document, ApiSearchDatasetsDocument): + raise TypeError('dataset_document must be of type ApiSearchDatasetsDocument') + del self.competition_document + del self.kernel_document + del self.model_document + del self.discussion_document + del self.user_document + self._dataset_document = dataset_document @property - def earned_medal(self) -> bool: - """Whether to return documents that the owner_user_id earned a medal for.""" - return self._earned_medal or False + def kernel_document(self) -> Optional['ApiSearchKernelsDocument']: + return self._kernel_document or None - @earned_medal.setter - def earned_medal(self, earned_medal: Optional[bool]): - if earned_medal is None: - del self.earned_medal + @kernel_document.setter + def kernel_document(self, kernel_document: Optional['ApiSearchKernelsDocument']): + if kernel_document is None: + del self.kernel_document return - if not isinstance(earned_medal, bool): - raise TypeError('earned_medal must be of type bool') - self._earned_medal = earned_medal + if not isinstance(kernel_document, ApiSearchKernelsDocument): + raise TypeError('kernel_document must be of type ApiSearchKernelsDocument') + del self.competition_document + del self.dataset_document + del self.model_document + del self.discussion_document + del self.user_document + self._kernel_document = kernel_document + @property + def model_document(self) -> Optional['ApiSearchModelsDocument']: + return self._model_document or None -class ApiSearchModelsDocument(KaggleObject): - r""" - Based on kaggle.models.SearchModelsDocument + @model_document.setter + def model_document(self, model_document: Optional['ApiSearchModelsDocument']): + if model_document is None: + del self.model_document + return + if not isinstance(model_document, ApiSearchModelsDocument): + raise TypeError('model_document must be of type ApiSearchModelsDocument') + del self.competition_document + del self.dataset_document + del self.kernel_document + del self.discussion_document + del self.user_document + self._model_document = model_document - Attributes: - instance_count (int) - The total number of instances in the Model - notebook_count (int) - The total number of notebooks in the Model - """ + @property + def discussion_document(self) -> Optional['ApiSearchDiscussionsDocument']: + return self._discussion_document or None - def __init__(self): - self._instance_count = 0 - self._notebook_count = 0 - self._freeze() + @discussion_document.setter + def discussion_document(self, discussion_document: Optional['ApiSearchDiscussionsDocument']): + if discussion_document is None: + del self.discussion_document + return + if not isinstance(discussion_document, ApiSearchDiscussionsDocument): + raise TypeError('discussion_document must be of type ApiSearchDiscussionsDocument') + del self.competition_document + del self.dataset_document + del self.kernel_document + del self.model_document + del self.user_document + self._discussion_document = discussion_document @property - def instance_count(self) -> int: - """The total number of instances in the Model""" - return self._instance_count + def user_document(self) -> Optional['ApiSearchUsersDocument']: + return self._user_document or None - @instance_count.setter - def instance_count(self, instance_count: int): - if instance_count is None: - del self.instance_count + @user_document.setter + def user_document(self, user_document: Optional['ApiSearchUsersDocument']): + if user_document is None: + del self.user_document return - if not isinstance(instance_count, int): - raise TypeError('instance_count must be of type int') - self._instance_count = instance_count + if not isinstance(user_document, ApiSearchUsersDocument): + raise TypeError('user_document must be of type ApiSearchUsersDocument') + del self.competition_document + del self.dataset_document + del self.kernel_document + del self.model_document + del self.discussion_document + self._user_document = user_document @property - def notebook_count(self) -> int: - """The total number of notebooks in the Model""" - return self._notebook_count + def slug(self) -> str: + """The slug of the document (which may be close to the url)""" + return self._slug or "" - @notebook_count.setter - def notebook_count(self, notebook_count: int): - if notebook_count is None: - del self.notebook_count + @slug.setter + def slug(self, slug: Optional[str]): + if slug is None: + del self.slug return - if not isinstance(notebook_count, int): - raise TypeError('notebook_count must be of type int') - self._notebook_count = notebook_count + if not isinstance(slug, str): + raise TypeError('slug must be of type str') + self._slug = slug -class ApiSearchModelsFilters(KaggleObject): +class ListEntitiesFilters(KaggleObject): r""" - Based on kaggle.models.SearchModelsFilters + Based on kaggle.search.ListSearchContentFilters Attributes: - size (ListSearchContentRangeFilter) - The size of the Model used to filter the documents + query (str) + The free-text query the user entered to filter results + list_type (ApiListType) + The type of list being requested + privacy (PrivacyFilter) + The privacy filter to apply + owner_type (OwnerType) + The owner type filter to apply + document_types (DocumentType) + The document type filter to apply + competition_filters (ApiSearchCompetitionsFilters) + The set of Competition filters to filter the documents + dataset_filters (ApiSearchDatasetsFilters) + The set of Dataset filters to filter the documents + discussion_filters (ApiSearchDiscussionsFilters) + The set of Discussion filters to filter the documents + kernel_filters (ApiSearchKernelsFilters) + The set of Kernel filters to filter the documents + model_filters (ApiSearchModelsFilters) + The set of Model filters to filter the documents + user_filters (ApiSearchUsersFilters) + The set of User filters to filter the documents """ def __init__(self): - self._size = None + self._query = "" + self._list_type = ApiListType.API_LIST_TYPE_UNSPECIFIED + self._privacy = PrivacyFilter.ALL + self._owner_type = OwnerType.OWNER_TYPE_UNSPECIFIED + self._document_types = [] + self._competition_filters = None + self._dataset_filters = None + self._discussion_filters = None + self._kernel_filters = None + self._model_filters = None + self._user_filters = None self._freeze() @property - def size(self) -> Optional['ListSearchContentRangeFilter']: - """The size of the Model used to filter the documents""" - return self._size + def query(self) -> str: + """The free-text query the user entered to filter results""" + return self._query - @size.setter - def size(self, size: Optional['ListSearchContentRangeFilter']): - if size is None: - del self.size + @query.setter + def query(self, query: str): + if query is None: + del self.query return - if not isinstance(size, ListSearchContentRangeFilter): - raise TypeError('size must be of type ListSearchContentRangeFilter') - self._size = size - - -class ApiSearchUsersDocument(KaggleObject): - r""" - Based on kaggle.users.SearchUsersDocument - - Attributes: - grandmaster_tier_level (int) - User's GM tier level. Tier levels are awarded starting at GM. All - users who are not GM will have a tier level of 0. - user_location (str) - User location string, if location sharing is opted-in. In the format: - 'city, region, country'. - occupation_organization_name (str) - Occupation organization name as indicated on the user's profile. - competition_ranking (int) - Current ranking for the user in the competition achievement type. - competition_points (int) - Current points for the user in the competition achievement type. - kernel_ranking (int) - Current ranking for the user in the kernel achievement type. - kernel_points (int) - Current points for the user in the kernel achievement type. - dataset_ranking (int) - Current ranking for the user in dataset achievement type. - dataset_points (int) - Current points for the user in the dataset achievement type. - """ + if not isinstance(query, str): + raise TypeError('query must be of type str') + self._query = query - def __init__(self): - self._grandmaster_tier_level = 0 - self._user_location = None - self._occupation_organization_name = None - self._competition_ranking = None - self._competition_points = None - self._kernel_ranking = None - self._kernel_points = None - self._dataset_ranking = None - self._dataset_points = None - self._freeze() + @property + def list_type(self) -> 'ApiListType': + """The type of list being requested""" + return self._list_type + + @list_type.setter + def list_type(self, list_type: 'ApiListType'): + if list_type is None: + del self.list_type + return + if not isinstance(list_type, ApiListType): + raise TypeError('list_type must be of type ApiListType') + self._list_type = list_type @property - def grandmaster_tier_level(self) -> int: - r""" - User's GM tier level. Tier levels are awarded starting at GM. All - users who are not GM will have a tier level of 0. - """ - return self._grandmaster_tier_level + def privacy(self) -> 'PrivacyFilter': + """The privacy filter to apply""" + return self._privacy - @grandmaster_tier_level.setter - def grandmaster_tier_level(self, grandmaster_tier_level: int): - if grandmaster_tier_level is None: - del self.grandmaster_tier_level + @privacy.setter + def privacy(self, privacy: 'PrivacyFilter'): + if privacy is None: + del self.privacy return - if not isinstance(grandmaster_tier_level, int): - raise TypeError('grandmaster_tier_level must be of type int') - self._grandmaster_tier_level = grandmaster_tier_level + if not isinstance(privacy, PrivacyFilter): + raise TypeError('privacy must be of type PrivacyFilter') + self._privacy = privacy @property - def user_location(self) -> str: - r""" - User location string, if location sharing is opted-in. In the format: - 'city, region, country'. - """ - return self._user_location or "" + def owner_type(self) -> 'OwnerType': + """The owner type filter to apply""" + return self._owner_type - @user_location.setter - def user_location(self, user_location: Optional[str]): - if user_location is None: - del self.user_location + @owner_type.setter + def owner_type(self, owner_type: 'OwnerType'): + if owner_type is None: + del self.owner_type return - if not isinstance(user_location, str): - raise TypeError('user_location must be of type str') - self._user_location = user_location + if not isinstance(owner_type, OwnerType): + raise TypeError('owner_type must be of type OwnerType') + self._owner_type = owner_type @property - def occupation_organization_name(self) -> str: - """Occupation organization name as indicated on the user's profile.""" - return self._occupation_organization_name or "" + def document_types(self) -> Optional[List['DocumentType']]: + """The document type filter to apply""" + return self._document_types - @occupation_organization_name.setter - def occupation_organization_name(self, occupation_organization_name: Optional[str]): - if occupation_organization_name is None: - del self.occupation_organization_name + @document_types.setter + def document_types(self, document_types: Optional[List['DocumentType']]): + if document_types is None: + del self.document_types return - if not isinstance(occupation_organization_name, str): - raise TypeError('occupation_organization_name must be of type str') - self._occupation_organization_name = occupation_organization_name + if not isinstance(document_types, list): + raise TypeError('document_types must be of type list') + if not all([isinstance(t, DocumentType) for t in document_types]): + raise TypeError('document_types must contain only items of type DocumentType') + self._document_types = document_types @property - def competition_ranking(self) -> int: - """Current ranking for the user in the competition achievement type.""" - return self._competition_ranking or 0 + def competition_filters(self) -> Optional['ApiSearchCompetitionsFilters']: + """The set of Competition filters to filter the documents""" + return self._competition_filters - @competition_ranking.setter - def competition_ranking(self, competition_ranking: Optional[int]): - if competition_ranking is None: - del self.competition_ranking + @competition_filters.setter + def competition_filters(self, competition_filters: Optional['ApiSearchCompetitionsFilters']): + if competition_filters is None: + del self.competition_filters return - if not isinstance(competition_ranking, int): - raise TypeError('competition_ranking must be of type int') - self._competition_ranking = competition_ranking + if not isinstance(competition_filters, ApiSearchCompetitionsFilters): + raise TypeError('competition_filters must be of type ApiSearchCompetitionsFilters') + self._competition_filters = competition_filters @property - def competition_points(self) -> int: - """Current points for the user in the competition achievement type.""" - return self._competition_points or 0 + def dataset_filters(self) -> Optional['ApiSearchDatasetsFilters']: + """The set of Dataset filters to filter the documents""" + return self._dataset_filters - @competition_points.setter - def competition_points(self, competition_points: Optional[int]): - if competition_points is None: - del self.competition_points + @dataset_filters.setter + def dataset_filters(self, dataset_filters: Optional['ApiSearchDatasetsFilters']): + if dataset_filters is None: + del self.dataset_filters return - if not isinstance(competition_points, int): - raise TypeError('competition_points must be of type int') - self._competition_points = competition_points + if not isinstance(dataset_filters, ApiSearchDatasetsFilters): + raise TypeError('dataset_filters must be of type ApiSearchDatasetsFilters') + self._dataset_filters = dataset_filters @property - def kernel_ranking(self) -> int: - """Current ranking for the user in the kernel achievement type.""" - return self._kernel_ranking or 0 + def discussion_filters(self) -> Optional['ApiSearchDiscussionsFilters']: + """The set of Discussion filters to filter the documents""" + return self._discussion_filters - @kernel_ranking.setter - def kernel_ranking(self, kernel_ranking: Optional[int]): - if kernel_ranking is None: - del self.kernel_ranking + @discussion_filters.setter + def discussion_filters(self, discussion_filters: Optional['ApiSearchDiscussionsFilters']): + if discussion_filters is None: + del self.discussion_filters return - if not isinstance(kernel_ranking, int): - raise TypeError('kernel_ranking must be of type int') - self._kernel_ranking = kernel_ranking + if not isinstance(discussion_filters, ApiSearchDiscussionsFilters): + raise TypeError('discussion_filters must be of type ApiSearchDiscussionsFilters') + self._discussion_filters = discussion_filters @property - def kernel_points(self) -> int: - """Current points for the user in the kernel achievement type.""" - return self._kernel_points or 0 + def kernel_filters(self) -> Optional['ApiSearchKernelsFilters']: + """The set of Kernel filters to filter the documents""" + return self._kernel_filters - @kernel_points.setter - def kernel_points(self, kernel_points: Optional[int]): - if kernel_points is None: - del self.kernel_points + @kernel_filters.setter + def kernel_filters(self, kernel_filters: Optional['ApiSearchKernelsFilters']): + if kernel_filters is None: + del self.kernel_filters return - if not isinstance(kernel_points, int): - raise TypeError('kernel_points must be of type int') - self._kernel_points = kernel_points + if not isinstance(kernel_filters, ApiSearchKernelsFilters): + raise TypeError('kernel_filters must be of type ApiSearchKernelsFilters') + self._kernel_filters = kernel_filters @property - def dataset_ranking(self) -> int: - """Current ranking for the user in dataset achievement type.""" - return self._dataset_ranking or 0 + def model_filters(self) -> Optional['ApiSearchModelsFilters']: + """The set of Model filters to filter the documents""" + return self._model_filters - @dataset_ranking.setter - def dataset_ranking(self, dataset_ranking: Optional[int]): - if dataset_ranking is None: - del self.dataset_ranking + @model_filters.setter + def model_filters(self, model_filters: Optional['ApiSearchModelsFilters']): + if model_filters is None: + del self.model_filters return - if not isinstance(dataset_ranking, int): - raise TypeError('dataset_ranking must be of type int') - self._dataset_ranking = dataset_ranking + if not isinstance(model_filters, ApiSearchModelsFilters): + raise TypeError('model_filters must be of type ApiSearchModelsFilters') + self._model_filters = model_filters @property - def dataset_points(self) -> int: - """Current points for the user in the dataset achievement type.""" - return self._dataset_points or 0 + def user_filters(self) -> Optional['ApiSearchUsersFilters']: + """The set of User filters to filter the documents""" + return self._user_filters - @dataset_points.setter - def dataset_points(self, dataset_points: Optional[int]): - if dataset_points is None: - del self.dataset_points + @user_filters.setter + def user_filters(self, user_filters: Optional['ApiSearchUsersFilters']): + if user_filters is None: + del self.user_filters return - if not isinstance(dataset_points, int): - raise TypeError('dataset_points must be of type int') - self._dataset_points = dataset_points + if not isinstance(user_filters, ApiSearchUsersFilters): + raise TypeError('user_filters must be of type ApiSearchUsersFilters') + self._user_filters = user_filters -class ApiSearchUsersFilters(KaggleObject): +class ListEntitiesRequest(KaggleObject): r""" - Based on kaggle.users.SearchUsersFilters - Attributes: - user_locations (str) - Filter to users that have one of the the specified locations. Expects the - format: 'city, region, country' for each. - tier (UserAchievementTier) - Filter to users that have the specified performance tier. - user_ids (int) - Filter to users based on the provided user ids. - require_ranking_for_type (UserAchievementType) - Filter to users that have points for the specified type. - occupation_organization_names (str) - Filter to users that have one of the provided occupation organization names - indicated on their user profile, i.e. http://screen/3N68JKC4hocxWmn. Note: - This is *not* the same thing as a Kaggle Organization, such as - kaggle.com/organizations/google. - grandmaster_level (ListSearchContentRangeFilter) - Filter to users that have the specified range of Grandmaster tier level. + filters (ListEntitiesFilters) + Canonical filters to apply to the search + canonical_order_by (ListSearchContentOrderBy) + Canonical order to apply to the results + competitions_order_by (SearchCompetitionsOrderBy) + Competitions order to apply to the results + datasets_order_by (SearchDatasetsOrderBy) + Datasets order to apply to the results + kernels_order_by (SearchKernelsOrderBy) + Kernels order to apply to the results + models_order_by (SearchModelsOrderBy) + Models order to apply to the results + discussions_order_by (SearchDiscussionsOrderBy) + Discussions order to apply to the results + users_order_by (SearchUsersOrderBy) + Users order to apply to the results + page_token (str) + Page token for paging (see aip.dev/158) + page_size (int) + Number of documents per page to return + skip (int) + How many results to skip """ def __init__(self): - self._user_locations = [] - self._tier = None - self._user_ids = [] - self._require_ranking_for_type = None - self._occupation_organization_names = [] - self._grandmaster_level = None + self._filters = None + self._canonical_order_by = None + self._competitions_order_by = None + self._datasets_order_by = None + self._kernels_order_by = None + self._models_order_by = None + self._discussions_order_by = None + self._users_order_by = None + self._page_token = "" + self._page_size = 0 + self._skip = 0 self._freeze() @property - def user_locations(self) -> Optional[List[str]]: - r""" - Filter to users that have one of the the specified locations. Expects the - format: 'city, region, country' for each. - """ - return self._user_locations + def filters(self) -> Optional['ListEntitiesFilters']: + """Canonical filters to apply to the search""" + return self._filters - @user_locations.setter - def user_locations(self, user_locations: Optional[List[str]]): - if user_locations is None: - del self.user_locations + @filters.setter + def filters(self, filters: Optional['ListEntitiesFilters']): + if filters is None: + del self.filters return - if not isinstance(user_locations, list): - raise TypeError('user_locations must be of type list') - if not all([isinstance(t, str) for t in user_locations]): - raise TypeError('user_locations must contain only items of type str') - self._user_locations = user_locations + if not isinstance(filters, ListEntitiesFilters): + raise TypeError('filters must be of type ListEntitiesFilters') + self._filters = filters @property - def tier(self) -> 'UserAchievementTier': - """Filter to users that have the specified performance tier.""" - return self._tier or UserAchievementTier.NOVICE + def canonical_order_by(self) -> 'ListSearchContentOrderBy': + """Canonical order to apply to the results""" + return self._canonical_order_by or ListSearchContentOrderBy.LIST_SEARCH_CONTENT_ORDER_BY_UNSPECIFIED - @tier.setter - def tier(self, tier: Optional['UserAchievementTier']): - if tier is None: - del self.tier + @canonical_order_by.setter + def canonical_order_by(self, canonical_order_by: 'ListSearchContentOrderBy'): + if canonical_order_by is None: + del self.canonical_order_by return - if not isinstance(tier, UserAchievementTier): - raise TypeError('tier must be of type UserAchievementTier') - self._tier = tier + if not isinstance(canonical_order_by, ListSearchContentOrderBy): + raise TypeError('canonical_order_by must be of type ListSearchContentOrderBy') + del self.competitions_order_by + del self.datasets_order_by + del self.kernels_order_by + del self.models_order_by + del self.discussions_order_by + del self.users_order_by + self._canonical_order_by = canonical_order_by @property - def user_ids(self) -> Optional[List[int]]: - """Filter to users based on the provided user ids.""" - return self._user_ids + def competitions_order_by(self) -> 'SearchCompetitionsOrderBy': + """Competitions order to apply to the results""" + return self._competitions_order_by or SearchCompetitionsOrderBy.SEARCH_COMPETITIONS_ORDER_BY_UNSPECIFIED - @user_ids.setter - def user_ids(self, user_ids: Optional[List[int]]): - if user_ids is None: - del self.user_ids + @competitions_order_by.setter + def competitions_order_by(self, competitions_order_by: 'SearchCompetitionsOrderBy'): + if competitions_order_by is None: + del self.competitions_order_by return - if not isinstance(user_ids, list): - raise TypeError('user_ids must be of type list') - if not all([isinstance(t, int) for t in user_ids]): - raise TypeError('user_ids must contain only items of type int') - self._user_ids = user_ids + if not isinstance(competitions_order_by, SearchCompetitionsOrderBy): + raise TypeError('competitions_order_by must be of type SearchCompetitionsOrderBy') + del self.canonical_order_by + del self.datasets_order_by + del self.kernels_order_by + del self.models_order_by + del self.discussions_order_by + del self.users_order_by + self._competitions_order_by = competitions_order_by @property - def require_ranking_for_type(self) -> 'UserAchievementType': - """Filter to users that have points for the specified type.""" - return self._require_ranking_for_type or UserAchievementType.USER_ACHIEVEMENT_TYPE_UNSPECIFIED + def datasets_order_by(self) -> 'SearchDatasetsOrderBy': + """Datasets order to apply to the results""" + return self._datasets_order_by or SearchDatasetsOrderBy.SEARCH_DATASETS_ORDER_BY_UNSPECIFIED - @require_ranking_for_type.setter - def require_ranking_for_type(self, require_ranking_for_type: Optional['UserAchievementType']): - if require_ranking_for_type is None: - del self.require_ranking_for_type + @datasets_order_by.setter + def datasets_order_by(self, datasets_order_by: 'SearchDatasetsOrderBy'): + if datasets_order_by is None: + del self.datasets_order_by return - if not isinstance(require_ranking_for_type, UserAchievementType): - raise TypeError('require_ranking_for_type must be of type UserAchievementType') - self._require_ranking_for_type = require_ranking_for_type + if not isinstance(datasets_order_by, SearchDatasetsOrderBy): + raise TypeError('datasets_order_by must be of type SearchDatasetsOrderBy') + del self.canonical_order_by + del self.competitions_order_by + del self.kernels_order_by + del self.models_order_by + del self.discussions_order_by + del self.users_order_by + self._datasets_order_by = datasets_order_by @property - def occupation_organization_names(self) -> Optional[List[str]]: - r""" - Filter to users that have one of the provided occupation organization names - indicated on their user profile, i.e. http://screen/3N68JKC4hocxWmn. Note: - This is *not* the same thing as a Kaggle Organization, such as - kaggle.com/organizations/google. - """ - return self._occupation_organization_names + def kernels_order_by(self) -> 'SearchKernelsOrderBy': + """Kernels order to apply to the results""" + return self._kernels_order_by or SearchKernelsOrderBy.SEARCH_KERNELS_ORDER_BY_UNSPECIFIED - @occupation_organization_names.setter - def occupation_organization_names(self, occupation_organization_names: Optional[List[str]]): - if occupation_organization_names is None: - del self.occupation_organization_names + @kernels_order_by.setter + def kernels_order_by(self, kernels_order_by: 'SearchKernelsOrderBy'): + if kernels_order_by is None: + del self.kernels_order_by return - if not isinstance(occupation_organization_names, list): - raise TypeError('occupation_organization_names must be of type list') - if not all([isinstance(t, str) for t in occupation_organization_names]): - raise TypeError('occupation_organization_names must contain only items of type str') - self._occupation_organization_names = occupation_organization_names + if not isinstance(kernels_order_by, SearchKernelsOrderBy): + raise TypeError('kernels_order_by must be of type SearchKernelsOrderBy') + del self.canonical_order_by + del self.competitions_order_by + del self.datasets_order_by + del self.models_order_by + del self.discussions_order_by + del self.users_order_by + self._kernels_order_by = kernels_order_by @property - def grandmaster_level(self) -> Optional['ListSearchContentRangeFilter']: - """Filter to users that have the specified range of Grandmaster tier level.""" - return self._grandmaster_level + def models_order_by(self) -> 'SearchModelsOrderBy': + """Models order to apply to the results""" + return self._models_order_by or SearchModelsOrderBy.MODELS_SEARCH_ORDER_BY_UNSPECIFIED - @grandmaster_level.setter - def grandmaster_level(self, grandmaster_level: Optional['ListSearchContentRangeFilter']): - if grandmaster_level is None: - del self.grandmaster_level + @models_order_by.setter + def models_order_by(self, models_order_by: 'SearchModelsOrderBy'): + if models_order_by is None: + del self.models_order_by return - if not isinstance(grandmaster_level, ListSearchContentRangeFilter): - raise TypeError('grandmaster_level must be of type ListSearchContentRangeFilter') - self._grandmaster_level = grandmaster_level - - -class ApiUserAvatar(KaggleObject): - r""" - Based on kaggle.users.UserAvatar + if not isinstance(models_order_by, SearchModelsOrderBy): + raise TypeError('models_order_by must be of type SearchModelsOrderBy') + del self.canonical_order_by + del self.competitions_order_by + del self.datasets_order_by + del self.kernels_order_by + del self.discussions_order_by + del self.users_order_by + self._models_order_by = models_order_by - Attributes: - id (int) - ID for the given user - display_name (str) - Display name for the given user - thumbnail_url (str) - Thumbnail URL for the given user - url (str) - Profile URL for the given user - user_name (str) - User name for the given user - progression_opt_out (bool) - True if the user is opted out of the progression system. - tier (UserAchievementTier) - Tier for the given user - """ + @property + def discussions_order_by(self) -> 'SearchDiscussionsOrderBy': + """Discussions order to apply to the results""" + return self._discussions_order_by or SearchDiscussionsOrderBy.SEARCH_DISCUSSIONS_ORDER_BY_UNSPECIFIED - def __init__(self): - self._id = 0 - self._display_name = None - self._thumbnail_url = None - self._url = None - self._user_name = None - self._progression_opt_out = None - self._tier = UserAchievementTier.NOVICE - self._freeze() + @discussions_order_by.setter + def discussions_order_by(self, discussions_order_by: 'SearchDiscussionsOrderBy'): + if discussions_order_by is None: + del self.discussions_order_by + return + if not isinstance(discussions_order_by, SearchDiscussionsOrderBy): + raise TypeError('discussions_order_by must be of type SearchDiscussionsOrderBy') + del self.canonical_order_by + del self.competitions_order_by + del self.datasets_order_by + del self.kernels_order_by + del self.models_order_by + del self.users_order_by + self._discussions_order_by = discussions_order_by @property - def id(self) -> int: - """ID for the given user""" - return self._id + def users_order_by(self) -> 'SearchUsersOrderBy': + """Users order to apply to the results""" + return self._users_order_by or SearchUsersOrderBy.SEARCH_USERS_ORDER_BY_UNSPECIFIED - @id.setter - def id(self, id: int): - if id is None: - del self.id + @users_order_by.setter + def users_order_by(self, users_order_by: 'SearchUsersOrderBy'): + if users_order_by is None: + del self.users_order_by return - if not isinstance(id, int): - raise TypeError('id must be of type int') - self._id = id + if not isinstance(users_order_by, SearchUsersOrderBy): + raise TypeError('users_order_by must be of type SearchUsersOrderBy') + del self.canonical_order_by + del self.competitions_order_by + del self.datasets_order_by + del self.kernels_order_by + del self.models_order_by + del self.discussions_order_by + self._users_order_by = users_order_by @property - def display_name(self) -> str: - """Display name for the given user""" - return self._display_name or "" + def page_token(self) -> str: + """Page token for paging (see aip.dev/158)""" + return self._page_token - @display_name.setter - def display_name(self, display_name: Optional[str]): - if display_name is None: - del self.display_name + @page_token.setter + def page_token(self, page_token: str): + if page_token is None: + del self.page_token return - if not isinstance(display_name, str): - raise TypeError('display_name must be of type str') - self._display_name = display_name + if not isinstance(page_token, str): + raise TypeError('page_token must be of type str') + self._page_token = page_token @property - def thumbnail_url(self) -> str: - """Thumbnail URL for the given user""" - return self._thumbnail_url or "" + def page_size(self) -> int: + """Number of documents per page to return""" + return self._page_size - @thumbnail_url.setter - def thumbnail_url(self, thumbnail_url: Optional[str]): - if thumbnail_url is None: - del self.thumbnail_url + @page_size.setter + def page_size(self, page_size: int): + if page_size is None: + del self.page_size return - if not isinstance(thumbnail_url, str): - raise TypeError('thumbnail_url must be of type str') - self._thumbnail_url = thumbnail_url + if not isinstance(page_size, int): + raise TypeError('page_size must be of type int') + self._page_size = page_size @property - def url(self) -> str: - """Profile URL for the given user""" - return self._url or "" + def skip(self) -> int: + """How many results to skip""" + return self._skip - @url.setter - def url(self, url: Optional[str]): - if url is None: - del self.url + @skip.setter + def skip(self, skip: int): + if skip is None: + del self.skip return - if not isinstance(url, str): - raise TypeError('url must be of type str') - self._url = url + if not isinstance(skip, int): + raise TypeError('skip must be of type int') + self._skip = skip - @property - def user_name(self) -> str: - """User name for the given user""" - return self._user_name or "" + def endpoint(self): + path = '/api/v1/search/list-entities' + return path.format_map(self.to_field_map(self)) - @user_name.setter - def user_name(self, user_name: Optional[str]): - if user_name is None: - del self.user_name - return - if not isinstance(user_name, str): - raise TypeError('user_name must be of type str') - self._user_name = user_name + +class ListEntitiesResponse(KaggleObject): + r""" + Attributes: + documents (ListEntitiesDocument) + The list of documents returned after filtering + total_documents (int) + The total number of documents matching any filters + next_page_token (str) + The token to request the next page + """ + + def __init__(self): + self._documents = [] + self._total_documents = 0 + self._next_page_token = "" + self._freeze() @property - def progression_opt_out(self) -> bool: - """True if the user is opted out of the progression system.""" - return self._progression_opt_out or False + def documents(self) -> Optional[List[Optional['ListEntitiesDocument']]]: + """The list of documents returned after filtering""" + return self._documents - @progression_opt_out.setter - def progression_opt_out(self, progression_opt_out: Optional[bool]): - if progression_opt_out is None: - del self.progression_opt_out + @documents.setter + def documents(self, documents: Optional[List[Optional['ListEntitiesDocument']]]): + if documents is None: + del self.documents return - if not isinstance(progression_opt_out, bool): - raise TypeError('progression_opt_out must be of type bool') - self._progression_opt_out = progression_opt_out + if not isinstance(documents, list): + raise TypeError('documents must be of type list') + if not all([isinstance(t, ListEntitiesDocument) for t in documents]): + raise TypeError('documents must contain only items of type ListEntitiesDocument') + self._documents = documents @property - def tier(self) -> 'UserAchievementTier': - """Tier for the given user""" - return self._tier + def total_documents(self) -> int: + """The total number of documents matching any filters""" + return self._total_documents - @tier.setter - def tier(self, tier: 'UserAchievementTier'): - if tier is None: - del self.tier + @total_documents.setter + def total_documents(self, total_documents: int): + if total_documents is None: + del self.total_documents return - if not isinstance(tier, UserAchievementTier): - raise TypeError('tier must be of type UserAchievementTier') - self._tier = tier + if not isinstance(total_documents, int): + raise TypeError('total_documents must be of type int') + self._total_documents = total_documents + @property + def next_page_token(self) -> str: + """The token to request the next page""" + return self._next_page_token -ListEntitiesRequest._fields = [ - FieldMetadata("filters", "filters", "_filters", ListEntitiesFilters, None, KaggleObjectSerializer()), - FieldMetadata("canonicalOrderBy", "canonical_order_by", "_canonical_order_by", ListSearchContentOrderBy, None, EnumSerializer(), optional=True), - FieldMetadata("competitionsOrderBy", "competitions_order_by", "_competitions_order_by", SearchCompetitionsOrderBy, None, EnumSerializer(), optional=True), - FieldMetadata("datasetsOrderBy", "datasets_order_by", "_datasets_order_by", SearchDatasetsOrderBy, None, EnumSerializer(), optional=True), - FieldMetadata("kernelsOrderBy", "kernels_order_by", "_kernels_order_by", SearchKernelsOrderBy, None, EnumSerializer(), optional=True), - FieldMetadata("modelsOrderBy", "models_order_by", "_models_order_by", SearchModelsOrderBy, None, EnumSerializer(), optional=True), - FieldMetadata("discussionsOrderBy", "discussions_order_by", "_discussions_order_by", SearchDiscussionsOrderBy, None, EnumSerializer(), optional=True), - FieldMetadata("usersOrderBy", "users_order_by", "_users_order_by", SearchUsersOrderBy, None, EnumSerializer(), optional=True), - FieldMetadata("pageToken", "page_token", "_page_token", str, "", PredefinedSerializer()), - FieldMetadata("pageSize", "page_size", "_page_size", int, 0, PredefinedSerializer()), - FieldMetadata("skip", "skip", "_skip", int, 0, PredefinedSerializer()), -] + @next_page_token.setter + def next_page_token(self, next_page_token: str): + if next_page_token is None: + del self.next_page_token + return + if not isinstance(next_page_token, str): + raise TypeError('next_page_token must be of type str') + self._next_page_token = next_page_token -ListEntitiesResponse._fields = [ - FieldMetadata("documents", "documents", "_documents", ListEntitiesDocument, [], ListSerializer(KaggleObjectSerializer())), - FieldMetadata("totalDocuments", "total_documents", "_total_documents", int, 0, PredefinedSerializer()), - FieldMetadata("nextPageToken", "next_page_token", "_next_page_token", str, "", PredefinedSerializer()), -] + @property + def totalDocuments(self): + return self.total_documents -ListEntitiesDocument._fields = [ - FieldMetadata("id", "id", "_id", int, 0, PredefinedSerializer()), - FieldMetadata("documentType", "document_type", "_document_type", DocumentType, DocumentType.DOCUMENT_TYPE_UNSPECIFIED, EnumSerializer()), - FieldMetadata("title", "title", "_title", str, "", PredefinedSerializer()), - FieldMetadata("imageUrl", "image_url", "_image_url", str, "", PredefinedSerializer()), - FieldMetadata("createTime", "create_time", "_create_time", datetime, None, DateTimeSerializer()), - FieldMetadata("updateTime", "update_time", "_update_time", datetime, None, DateTimeSerializer(), optional=True), - FieldMetadata("isPrivate", "is_private", "_is_private", bool, None, PredefinedSerializer(), optional=True), - FieldMetadata("votes", "votes", "_votes", int, None, PredefinedSerializer(), optional=True), - FieldMetadata("ownerUser", "owner_user", "_owner_user", ApiUserAvatar, None, KaggleObjectSerializer(), optional=True), - FieldMetadata("ownerOrganization", "owner_organization", "_owner_organization", ApiOrganizationCard, None, KaggleObjectSerializer(), optional=True), - FieldMetadata("competitionDocument", "competition_document", "_competition_document", ApiSearchCompetitionsDocument, None, KaggleObjectSerializer(), optional=True), - FieldMetadata("datasetDocument", "dataset_document", "_dataset_document", ApiSearchDatasetsDocument, None, KaggleObjectSerializer(), optional=True), - FieldMetadata("kernelDocument", "kernel_document", "_kernel_document", ApiSearchKernelsDocument, None, KaggleObjectSerializer(), optional=True), - FieldMetadata("modelDocument", "model_document", "_model_document", ApiSearchModelsDocument, None, KaggleObjectSerializer(), optional=True), - FieldMetadata("discussionDocument", "discussion_document", "_discussion_document", ApiSearchDiscussionsDocument, None, KaggleObjectSerializer(), optional=True), - FieldMetadata("userDocument", "user_document", "_user_document", ApiSearchUsersDocument, None, KaggleObjectSerializer(), optional=True), - FieldMetadata("slug", "slug", "_slug", str, None, PredefinedSerializer(), optional=True), -] + @property + def nextPageToken(self): + return self.next_page_token -ListEntitiesFilters._fields = [ - FieldMetadata("query", "query", "_query", str, "", PredefinedSerializer()), - FieldMetadata("listType", "list_type", "_list_type", ApiListType, ApiListType.API_LIST_TYPE_UNSPECIFIED, EnumSerializer()), - FieldMetadata("privacy", "privacy", "_privacy", PrivacyFilter, PrivacyFilter.ALL, EnumSerializer()), - FieldMetadata("ownerType", "owner_type", "_owner_type", OwnerType, OwnerType.OWNER_TYPE_UNSPECIFIED, EnumSerializer()), - FieldMetadata("documentTypes", "document_types", "_document_types", DocumentType, [], ListSerializer(EnumSerializer())), - FieldMetadata("competitionFilters", "competition_filters", "_competition_filters", ApiSearchCompetitionsFilters, None, KaggleObjectSerializer()), - FieldMetadata("datasetFilters", "dataset_filters", "_dataset_filters", ApiSearchDatasetsFilters, None, KaggleObjectSerializer()), - FieldMetadata("discussionFilters", "discussion_filters", "_discussion_filters", ApiSearchDiscussionsFilters, None, KaggleObjectSerializer()), - FieldMetadata("kernelFilters", "kernel_filters", "_kernel_filters", ApiSearchKernelsFilters, None, KaggleObjectSerializer()), - FieldMetadata("modelFilters", "model_filters", "_model_filters", ApiSearchModelsFilters, None, KaggleObjectSerializer()), - FieldMetadata("userFilters", "user_filters", "_user_filters", ApiSearchUsersFilters, None, KaggleObjectSerializer()), -] ApiOrganizationCard._fields = [ FieldMetadata("name", "name", "_name", str, "", PredefinedSerializer()), @@ -2433,3 +2379,57 @@ def tier(self, tier: 'UserAchievementTier'): FieldMetadata("tier", "tier", "_tier", UserAchievementTier, UserAchievementTier.NOVICE, EnumSerializer()), ] +ListEntitiesDocument._fields = [ + FieldMetadata("id", "id", "_id", int, 0, PredefinedSerializer()), + FieldMetadata("documentType", "document_type", "_document_type", DocumentType, DocumentType.DOCUMENT_TYPE_UNSPECIFIED, EnumSerializer()), + FieldMetadata("title", "title", "_title", str, "", PredefinedSerializer()), + FieldMetadata("imageUrl", "image_url", "_image_url", str, "", PredefinedSerializer()), + FieldMetadata("createTime", "create_time", "_create_time", datetime, None, DateTimeSerializer()), + FieldMetadata("updateTime", "update_time", "_update_time", datetime, None, DateTimeSerializer(), optional=True), + FieldMetadata("isPrivate", "is_private", "_is_private", bool, None, PredefinedSerializer(), optional=True), + FieldMetadata("votes", "votes", "_votes", int, None, PredefinedSerializer(), optional=True), + FieldMetadata("ownerUser", "owner_user", "_owner_user", ApiUserAvatar, None, KaggleObjectSerializer(), optional=True), + FieldMetadata("ownerOrganization", "owner_organization", "_owner_organization", ApiOrganizationCard, None, KaggleObjectSerializer(), optional=True), + FieldMetadata("competitionDocument", "competition_document", "_competition_document", ApiSearchCompetitionsDocument, None, KaggleObjectSerializer(), optional=True), + FieldMetadata("datasetDocument", "dataset_document", "_dataset_document", ApiSearchDatasetsDocument, None, KaggleObjectSerializer(), optional=True), + FieldMetadata("kernelDocument", "kernel_document", "_kernel_document", ApiSearchKernelsDocument, None, KaggleObjectSerializer(), optional=True), + FieldMetadata("modelDocument", "model_document", "_model_document", ApiSearchModelsDocument, None, KaggleObjectSerializer(), optional=True), + FieldMetadata("discussionDocument", "discussion_document", "_discussion_document", ApiSearchDiscussionsDocument, None, KaggleObjectSerializer(), optional=True), + FieldMetadata("userDocument", "user_document", "_user_document", ApiSearchUsersDocument, None, KaggleObjectSerializer(), optional=True), + FieldMetadata("slug", "slug", "_slug", str, None, PredefinedSerializer(), optional=True), +] + +ListEntitiesFilters._fields = [ + FieldMetadata("query", "query", "_query", str, "", PredefinedSerializer()), + FieldMetadata("listType", "list_type", "_list_type", ApiListType, ApiListType.API_LIST_TYPE_UNSPECIFIED, EnumSerializer()), + FieldMetadata("privacy", "privacy", "_privacy", PrivacyFilter, PrivacyFilter.ALL, EnumSerializer()), + FieldMetadata("ownerType", "owner_type", "_owner_type", OwnerType, OwnerType.OWNER_TYPE_UNSPECIFIED, EnumSerializer()), + FieldMetadata("documentTypes", "document_types", "_document_types", DocumentType, [], ListSerializer(EnumSerializer())), + FieldMetadata("competitionFilters", "competition_filters", "_competition_filters", ApiSearchCompetitionsFilters, None, KaggleObjectSerializer()), + FieldMetadata("datasetFilters", "dataset_filters", "_dataset_filters", ApiSearchDatasetsFilters, None, KaggleObjectSerializer()), + FieldMetadata("discussionFilters", "discussion_filters", "_discussion_filters", ApiSearchDiscussionsFilters, None, KaggleObjectSerializer()), + FieldMetadata("kernelFilters", "kernel_filters", "_kernel_filters", ApiSearchKernelsFilters, None, KaggleObjectSerializer()), + FieldMetadata("modelFilters", "model_filters", "_model_filters", ApiSearchModelsFilters, None, KaggleObjectSerializer()), + FieldMetadata("userFilters", "user_filters", "_user_filters", ApiSearchUsersFilters, None, KaggleObjectSerializer()), +] + +ListEntitiesRequest._fields = [ + FieldMetadata("filters", "filters", "_filters", ListEntitiesFilters, None, KaggleObjectSerializer()), + FieldMetadata("canonicalOrderBy", "canonical_order_by", "_canonical_order_by", ListSearchContentOrderBy, None, EnumSerializer(), optional=True), + FieldMetadata("competitionsOrderBy", "competitions_order_by", "_competitions_order_by", SearchCompetitionsOrderBy, None, EnumSerializer(), optional=True), + FieldMetadata("datasetsOrderBy", "datasets_order_by", "_datasets_order_by", SearchDatasetsOrderBy, None, EnumSerializer(), optional=True), + FieldMetadata("kernelsOrderBy", "kernels_order_by", "_kernels_order_by", SearchKernelsOrderBy, None, EnumSerializer(), optional=True), + FieldMetadata("modelsOrderBy", "models_order_by", "_models_order_by", SearchModelsOrderBy, None, EnumSerializer(), optional=True), + FieldMetadata("discussionsOrderBy", "discussions_order_by", "_discussions_order_by", SearchDiscussionsOrderBy, None, EnumSerializer(), optional=True), + FieldMetadata("usersOrderBy", "users_order_by", "_users_order_by", SearchUsersOrderBy, None, EnumSerializer(), optional=True), + FieldMetadata("pageToken", "page_token", "_page_token", str, "", PredefinedSerializer()), + FieldMetadata("pageSize", "page_size", "_page_size", int, 0, PredefinedSerializer()), + FieldMetadata("skip", "skip", "_skip", int, 0, PredefinedSerializer()), +] + +ListEntitiesResponse._fields = [ + FieldMetadata("documents", "documents", "_documents", ListEntitiesDocument, [], ListSerializer(KaggleObjectSerializer())), + FieldMetadata("totalDocuments", "total_documents", "_total_documents", int, 0, PredefinedSerializer()), + FieldMetadata("nextPageToken", "next_page_token", "_next_page_token", str, "", PredefinedSerializer()), +] + diff --git a/src/kagglesdk/search/types/search_service.py b/src/kagglesdk/search/types/search_service.py index 1ee0b0eb..ef7c2b1c 100644 --- a/src/kagglesdk/search/types/search_service.py +++ b/src/kagglesdk/search/types/search_service.py @@ -5,136 +5,6 @@ from kagglesdk.users.types.user_avatar import UserAvatar from typing import Optional, List -class WriteUpItemInfo(KaggleObject): - r""" - Attributes: - type (WriteUpType) - Type of WriteUp - subtitle (str) - Subtitle of WriteUp - collaborators (UserAvatar) - List of WriteUp collaborators - competition_info (WriteUpCompetitionInfo) - Competition metadata associated with WriteUp - content_state (ContentState) - Content State of WriteUp - team_name (str) - Name of the team that owns the WriteUp - id (int) - Id of the WriteUp - """ - - def __init__(self): - self._type = WriteUpType.WRITE_UP_TYPE_UNSPECIFIED - self._subtitle = None - self._collaborators = [] - self._competition_info = None - self._content_state = ContentState.CONTENT_STATE_UNSPECIFIED - self._team_name = None - self._id = 0 - self._freeze() - - @property - def type(self) -> 'WriteUpType': - """Type of WriteUp""" - return self._type - - @type.setter - def type(self, type: 'WriteUpType'): - if type is None: - del self.type - return - if not isinstance(type, WriteUpType): - raise TypeError('type must be of type WriteUpType') - self._type = type - - @property - def subtitle(self) -> str: - """Subtitle of WriteUp""" - return self._subtitle or "" - - @subtitle.setter - def subtitle(self, subtitle: Optional[str]): - if subtitle is None: - del self.subtitle - return - if not isinstance(subtitle, str): - raise TypeError('subtitle must be of type str') - self._subtitle = subtitle - - @property - def collaborators(self) -> Optional[List[Optional['UserAvatar']]]: - """List of WriteUp collaborators""" - return self._collaborators - - @collaborators.setter - def collaborators(self, collaborators: Optional[List[Optional['UserAvatar']]]): - if collaborators is None: - del self.collaborators - return - if not isinstance(collaborators, list): - raise TypeError('collaborators must be of type list') - if not all([isinstance(t, UserAvatar) for t in collaborators]): - raise TypeError('collaborators must contain only items of type UserAvatar') - self._collaborators = collaborators - - @property - def competition_info(self) -> Optional['WriteUpCompetitionInfo']: - """Competition metadata associated with WriteUp""" - return self._competition_info or None - - @competition_info.setter - def competition_info(self, competition_info: Optional[Optional['WriteUpCompetitionInfo']]): - if competition_info is None: - del self.competition_info - return - if not isinstance(competition_info, WriteUpCompetitionInfo): - raise TypeError('competition_info must be of type WriteUpCompetitionInfo') - self._competition_info = competition_info - - @property - def content_state(self) -> 'ContentState': - """Content State of WriteUp""" - return self._content_state - - @content_state.setter - def content_state(self, content_state: 'ContentState'): - if content_state is None: - del self.content_state - return - if not isinstance(content_state, ContentState): - raise TypeError('content_state must be of type ContentState') - self._content_state = content_state - - @property - def team_name(self) -> str: - """Name of the team that owns the WriteUp""" - return self._team_name or "" - - @team_name.setter - def team_name(self, team_name: Optional[str]): - if team_name is None: - del self.team_name - return - if not isinstance(team_name, str): - raise TypeError('team_name must be of type str') - self._team_name = team_name - - @property - def id(self) -> int: - """Id of the WriteUp""" - return self._id - - @id.setter - def id(self, id: int): - if id is None: - del self.id - return - if not isinstance(id, int): - raise TypeError('id must be of type int') - self._id = id - - class WriteUpCompetitionInfo(KaggleObject): r""" Attributes: @@ -280,15 +150,135 @@ def competition_id(self, competition_id: int): self._competition_id = competition_id -WriteUpItemInfo._fields = [ - FieldMetadata("type", "type", "_type", WriteUpType, WriteUpType.WRITE_UP_TYPE_UNSPECIFIED, EnumSerializer()), - FieldMetadata("subtitle", "subtitle", "_subtitle", str, None, PredefinedSerializer(), optional=True), - FieldMetadata("collaborators", "collaborators", "_collaborators", UserAvatar, [], ListSerializer(KaggleObjectSerializer())), - FieldMetadata("competitionInfo", "competition_info", "_competition_info", WriteUpCompetitionInfo, None, KaggleObjectSerializer(), optional=True), - FieldMetadata("contentState", "content_state", "_content_state", ContentState, ContentState.CONTENT_STATE_UNSPECIFIED, EnumSerializer()), - FieldMetadata("teamName", "team_name", "_team_name", str, None, PredefinedSerializer(), optional=True), - FieldMetadata("id", "id", "_id", int, 0, PredefinedSerializer()), -] +class WriteUpItemInfo(KaggleObject): + r""" + Attributes: + type (WriteUpType) + Type of WriteUp + subtitle (str) + Subtitle of WriteUp + collaborators (UserAvatar) + List of WriteUp collaborators + competition_info (WriteUpCompetitionInfo) + Competition metadata associated with WriteUp + content_state (ContentState) + Content State of WriteUp + team_name (str) + Name of the team that owns the WriteUp + id (int) + Id of the WriteUp + """ + + def __init__(self): + self._type = WriteUpType.WRITE_UP_TYPE_UNSPECIFIED + self._subtitle = None + self._collaborators = [] + self._competition_info = None + self._content_state = ContentState.CONTENT_STATE_UNSPECIFIED + self._team_name = None + self._id = 0 + self._freeze() + + @property + def type(self) -> 'WriteUpType': + """Type of WriteUp""" + return self._type + + @type.setter + def type(self, type: 'WriteUpType'): + if type is None: + del self.type + return + if not isinstance(type, WriteUpType): + raise TypeError('type must be of type WriteUpType') + self._type = type + + @property + def subtitle(self) -> str: + """Subtitle of WriteUp""" + return self._subtitle or "" + + @subtitle.setter + def subtitle(self, subtitle: Optional[str]): + if subtitle is None: + del self.subtitle + return + if not isinstance(subtitle, str): + raise TypeError('subtitle must be of type str') + self._subtitle = subtitle + + @property + def collaborators(self) -> Optional[List[Optional['UserAvatar']]]: + """List of WriteUp collaborators""" + return self._collaborators + + @collaborators.setter + def collaborators(self, collaborators: Optional[List[Optional['UserAvatar']]]): + if collaborators is None: + del self.collaborators + return + if not isinstance(collaborators, list): + raise TypeError('collaborators must be of type list') + if not all([isinstance(t, UserAvatar) for t in collaborators]): + raise TypeError('collaborators must contain only items of type UserAvatar') + self._collaborators = collaborators + + @property + def competition_info(self) -> Optional['WriteUpCompetitionInfo']: + """Competition metadata associated with WriteUp""" + return self._competition_info or None + + @competition_info.setter + def competition_info(self, competition_info: Optional[Optional['WriteUpCompetitionInfo']]): + if competition_info is None: + del self.competition_info + return + if not isinstance(competition_info, WriteUpCompetitionInfo): + raise TypeError('competition_info must be of type WriteUpCompetitionInfo') + self._competition_info = competition_info + + @property + def content_state(self) -> 'ContentState': + """Content State of WriteUp""" + return self._content_state + + @content_state.setter + def content_state(self, content_state: 'ContentState'): + if content_state is None: + del self.content_state + return + if not isinstance(content_state, ContentState): + raise TypeError('content_state must be of type ContentState') + self._content_state = content_state + + @property + def team_name(self) -> str: + """Name of the team that owns the WriteUp""" + return self._team_name or "" + + @team_name.setter + def team_name(self, team_name: Optional[str]): + if team_name is None: + del self.team_name + return + if not isinstance(team_name, str): + raise TypeError('team_name must be of type str') + self._team_name = team_name + + @property + def id(self) -> int: + """Id of the WriteUp""" + return self._id + + @id.setter + def id(self, id: int): + if id is None: + del self.id + return + if not isinstance(id, int): + raise TypeError('id must be of type int') + self._id = id + WriteUpCompetitionInfo._fields = [ FieldMetadata("competitionTitle", "competition_title", "_competition_title", str, "", PredefinedSerializer()), @@ -301,3 +291,13 @@ def competition_id(self, competition_id: int): FieldMetadata("competitionId", "competition_id", "_competition_id", int, 0, PredefinedSerializer()), ] +WriteUpItemInfo._fields = [ + FieldMetadata("type", "type", "_type", WriteUpType, WriteUpType.WRITE_UP_TYPE_UNSPECIFIED, EnumSerializer()), + FieldMetadata("subtitle", "subtitle", "_subtitle", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("collaborators", "collaborators", "_collaborators", UserAvatar, [], ListSerializer(KaggleObjectSerializer())), + FieldMetadata("competitionInfo", "competition_info", "_competition_info", WriteUpCompetitionInfo, None, KaggleObjectSerializer(), optional=True), + FieldMetadata("contentState", "content_state", "_content_state", ContentState, ContentState.CONTENT_STATE_UNSPECIFIED, EnumSerializer()), + FieldMetadata("teamName", "team_name", "_team_name", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("id", "id", "_id", int, 0, PredefinedSerializer()), +] + diff --git a/src/kagglesdk/security/types/authentication.py b/src/kagglesdk/security/types/authentication.py index e54dc288..4c790ad7 100644 --- a/src/kagglesdk/security/types/authentication.py +++ b/src/kagglesdk/security/types/authentication.py @@ -1,62 +1,6 @@ from kagglesdk.kaggle_object import * from typing import Optional, List -class AuthorizationScope(KaggleObject): - r""" - Attributes: - resource_id (int) - permission (AuthorizationPermissionScope) - role (AuthorizationRoleScope) - """ - - def __init__(self): - self._resource_id = 0 - self._permission = None - self._role = None - self._freeze() - - @property - def resource_id(self) -> int: - return self._resource_id - - @resource_id.setter - def resource_id(self, resource_id: int): - if resource_id is None: - del self.resource_id - return - if not isinstance(resource_id, int): - raise TypeError('resource_id must be of type int') - self._resource_id = resource_id - - @property - def permission(self) -> Optional['AuthorizationPermissionScope']: - return self._permission or None - - @permission.setter - def permission(self, permission: Optional['AuthorizationPermissionScope']): - if permission is None: - del self.permission - return - if not isinstance(permission, AuthorizationPermissionScope): - raise TypeError('permission must be of type AuthorizationPermissionScope') - del self.role - self._permission = permission - - @property - def role(self) -> Optional['AuthorizationRoleScope']: - return self._role or None - - @role.setter - def role(self, role: Optional['AuthorizationRoleScope']): - if role is None: - del self.role - return - if not isinstance(role, AuthorizationRoleScope): - raise TypeError('role must be of type AuthorizationRoleScope') - del self.permission - self._role = role - - class AuthorizationPermissionScope(KaggleObject): r""" Attributes: @@ -152,11 +96,61 @@ def permissions(self, permissions: Optional[List[Optional['AuthorizationPermissi self._permissions = permissions -AuthorizationScope._fields = [ - FieldMetadata("resourceId", "resource_id", "_resource_id", int, 0, PredefinedSerializer()), - FieldMetadata("permission", "permission", "_permission", AuthorizationPermissionScope, None, KaggleObjectSerializer(), optional=True), - FieldMetadata("role", "role", "_role", AuthorizationRoleScope, None, KaggleObjectSerializer(), optional=True), -] +class AuthorizationScope(KaggleObject): + r""" + Attributes: + resource_id (int) + permission (AuthorizationPermissionScope) + role (AuthorizationRoleScope) + """ + + def __init__(self): + self._resource_id = 0 + self._permission = None + self._role = None + self._freeze() + + @property + def resource_id(self) -> int: + return self._resource_id + + @resource_id.setter + def resource_id(self, resource_id: int): + if resource_id is None: + del self.resource_id + return + if not isinstance(resource_id, int): + raise TypeError('resource_id must be of type int') + self._resource_id = resource_id + + @property + def permission(self) -> Optional['AuthorizationPermissionScope']: + return self._permission or None + + @permission.setter + def permission(self, permission: Optional['AuthorizationPermissionScope']): + if permission is None: + del self.permission + return + if not isinstance(permission, AuthorizationPermissionScope): + raise TypeError('permission must be of type AuthorizationPermissionScope') + del self.role + self._permission = permission + + @property + def role(self) -> Optional['AuthorizationRoleScope']: + return self._role or None + + @role.setter + def role(self, role: Optional['AuthorizationRoleScope']): + if role is None: + del self.role + return + if not isinstance(role, AuthorizationRoleScope): + raise TypeError('role must be of type AuthorizationRoleScope') + del self.permission + self._role = role + AuthorizationPermissionScope._fields = [ FieldMetadata("name", "name", "_name", str, "", PredefinedSerializer()), @@ -169,3 +163,9 @@ def permissions(self, permissions: Optional[List[Optional['AuthorizationPermissi FieldMetadata("permissions", "permissions", "_permissions", AuthorizationPermissionScope, [], ListSerializer(KaggleObjectSerializer())), ] +AuthorizationScope._fields = [ + FieldMetadata("resourceId", "resource_id", "_resource_id", int, 0, PredefinedSerializer()), + FieldMetadata("permission", "permission", "_permission", AuthorizationPermissionScope, None, KaggleObjectSerializer(), optional=True), + FieldMetadata("role", "role", "_role", AuthorizationRoleScope, None, KaggleObjectSerializer(), optional=True), +] + diff --git a/src/kagglesdk/security/types/iam_service.py b/src/kagglesdk/security/types/iam_service.py index b582560d..09a078ba 100644 --- a/src/kagglesdk/security/types/iam_service.py +++ b/src/kagglesdk/security/types/iam_service.py @@ -42,6 +42,62 @@ def endpoint_path(): return '/api/v1/iam/get/{resource_id}' +class GroupPrincipal(KaggleObject): + r""" + Attributes: + id (int) + slug (str) + avatar (UserManagedGroup) + """ + + def __init__(self): + self._id = None + self._slug = None + self._avatar = None + self._freeze() + + @property + def id(self) -> int: + return self._id or 0 + + @id.setter + def id(self, id: int): + if id is None: + del self.id + return + if not isinstance(id, int): + raise TypeError('id must be of type int') + del self.slug + self._id = id + + @property + def slug(self) -> str: + return self._slug or "" + + @slug.setter + def slug(self, slug: str): + if slug is None: + del self.slug + return + if not isinstance(slug, str): + raise TypeError('slug must be of type str') + del self.id + self._slug = slug + + @property + def avatar(self) -> Optional['UserManagedGroup']: + return self._avatar + + @avatar.setter + def avatar(self, avatar: Optional['UserManagedGroup']): + if avatar is None: + del self.avatar + return + if not isinstance(avatar, UserManagedGroup): + raise TypeError('avatar must be of type UserManagedGroup') + self._avatar = avatar + + class IamPolicy(KaggleObject): r""" Defines an Identity and Access Management (IAM) policy. It is used to @@ -165,6 +221,45 @@ def members(self, members: Optional[List[Optional['Principal']]]): self._members = members +class OrganizationPrincipal(KaggleObject): + r""" + Attributes: + id (int) + avatar (Organization) + """ + + def __init__(self): + self._id = 0 + self._avatar = None + self._freeze() + + @property + def id(self) -> int: + return self._id + + @id.setter + def id(self, id: int): + if id is None: + del self.id + return + if not isinstance(id, int): + raise TypeError('id must be of type int') + self._id = id + + @property + def avatar(self) -> Optional['Organization']: + return self._avatar + + @avatar.setter + def avatar(self, avatar: Optional['Organization']): + if avatar is None: + del self.avatar + return + if not isinstance(avatar, Organization): + raise TypeError('avatar must be of type Organization') + self._avatar = avatar + + class Principal(KaggleObject): r""" Represents a principal of an IAM policy binding. @@ -304,56 +399,19 @@ class UserPrincipal(KaggleObject): r""" Attributes: id (int) + user_name (str) avatar (UserAvatar) """ def __init__(self): - self._id = 0 - self._avatar = None - self._freeze() - - @property - def id(self) -> int: - return self._id - - @id.setter - def id(self, id: int): - if id is None: - del self.id - return - if not isinstance(id, int): - raise TypeError('id must be of type int') - self._id = id - - @property - def avatar(self) -> Optional['UserAvatar']: - return self._avatar - - @avatar.setter - def avatar(self, avatar: Optional['UserAvatar']): - if avatar is None: - del self.avatar - return - if not isinstance(avatar, UserAvatar): - raise TypeError('avatar must be of type UserAvatar') - self._avatar = avatar - - -class GroupPrincipal(KaggleObject): - r""" - Attributes: - id (int) - avatar (UserManagedGroup) - """ - - def __init__(self): - self._id = 0 + self._id = None + self._user_name = None self._avatar = None self._freeze() @property def id(self) -> int: - return self._id + return self._id or 0 @id.setter def id(self, id: int): @@ -362,58 +420,34 @@ def id(self, id: int): return if not isinstance(id, int): raise TypeError('id must be of type int') + del self.user_name self._id = id @property - def avatar(self) -> Optional['UserManagedGroup']: - return self._avatar - - @avatar.setter - def avatar(self, avatar: Optional['UserManagedGroup']): - if avatar is None: - del self.avatar - return - if not isinstance(avatar, UserManagedGroup): - raise TypeError('avatar must be of type UserManagedGroup') - self._avatar = avatar - - -class OrganizationPrincipal(KaggleObject): - r""" - Attributes: - id (int) - avatar (Organization) - """ - - def __init__(self): - self._id = 0 - self._avatar = None - self._freeze() - - @property - def id(self) -> int: - return self._id + def user_name(self) -> str: + return self._user_name or "" - @id.setter - def id(self, id: int): - if id is None: - del self.id + @user_name.setter + def user_name(self, user_name: str): + if user_name is None: + del self.user_name return - if not isinstance(id, int): - raise TypeError('id must be of type int') - self._id = id + if not isinstance(user_name, str): + raise TypeError('user_name must be of type str') + del self.id + self._user_name = user_name @property - def avatar(self) -> Optional['Organization']: + def avatar(self) -> Optional['UserAvatar']: return self._avatar @avatar.setter - def avatar(self, avatar: Optional['Organization']): + def avatar(self, avatar: Optional['UserAvatar']): if avatar is None: del self.avatar return - if not isinstance(avatar, Organization): - raise TypeError('avatar must be of type Organization') + if not isinstance(avatar, UserAvatar): + raise TypeError('avatar must be of type UserAvatar') self._avatar = avatar @@ -421,6 +455,12 @@ def avatar(self, avatar: Optional['Organization']): FieldMetadata("resourceId", "resource_id", "_resource_id", KaggleResourceId, None, KaggleObjectSerializer()), ] +GroupPrincipal._fields = [ + FieldMetadata("id", "id", "_id", int, None, PredefinedSerializer(), optional=True), + FieldMetadata("slug", "slug", "_slug", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("avatar", "avatar", "_avatar", UserManagedGroup, None, KaggleObjectSerializer()), +] + IamPolicy._fields = [ FieldMetadata("bindings", "bindings", "_bindings", IamPolicyBinding, [], ListSerializer(KaggleObjectSerializer())), FieldMetadata("owner", "owner", "_owner", Principal, None, KaggleObjectSerializer()), @@ -431,6 +471,11 @@ def avatar(self, avatar: Optional['Organization']): FieldMetadata("members", "members", "_members", Principal, [], ListSerializer(KaggleObjectSerializer())), ] +OrganizationPrincipal._fields = [ + FieldMetadata("id", "id", "_id", int, 0, PredefinedSerializer()), + FieldMetadata("avatar", "avatar", "_avatar", Organization, None, KaggleObjectSerializer()), +] + Principal._fields = [ FieldMetadata("user", "user", "_user", UserPrincipal, None, KaggleObjectSerializer(), optional=True), FieldMetadata("group", "group", "_group", GroupPrincipal, None, KaggleObjectSerializer(), optional=True), @@ -444,17 +489,8 @@ def avatar(self, avatar: Optional['Organization']): ] UserPrincipal._fields = [ - FieldMetadata("id", "id", "_id", int, 0, PredefinedSerializer()), + FieldMetadata("id", "id", "_id", int, None, PredefinedSerializer(), optional=True), + FieldMetadata("userName", "user_name", "_user_name", str, None, PredefinedSerializer(), optional=True), FieldMetadata("avatar", "avatar", "_avatar", UserAvatar, None, KaggleObjectSerializer()), ] -GroupPrincipal._fields = [ - FieldMetadata("id", "id", "_id", int, 0, PredefinedSerializer()), - FieldMetadata("avatar", "avatar", "_avatar", UserManagedGroup, None, KaggleObjectSerializer()), -] - -OrganizationPrincipal._fields = [ - FieldMetadata("id", "id", "_id", int, 0, PredefinedSerializer()), - FieldMetadata("avatar", "avatar", "_avatar", Organization, None, KaggleObjectSerializer()), -] - diff --git a/src/kagglesdk/security/types/oauth_service.py b/src/kagglesdk/security/types/oauth_service.py index d4a74a3d..472d7647 100644 --- a/src/kagglesdk/security/types/oauth_service.py +++ b/src/kagglesdk/security/types/oauth_service.py @@ -97,7 +97,9 @@ class ExchangeOAuthTokenResponse(KaggleObject): expires_in (int) Lifetime of the access token in seconds. username (str) - Username of the user who authorized this token. + Username of the user who authorized/owns this token. + user_id (int) + Id the of user who authorized/owns this token. """ def __init__(self): @@ -106,6 +108,7 @@ def __init__(self): self._token_type = "" self._expires_in = 0 self._username = "" + self._user_id = 0 self._freeze() @property @@ -169,7 +172,7 @@ def expires_in(self, expires_in: int): @property def username(self) -> str: - """Username of the user who authorized this token.""" + """Username of the user who authorized/owns this token.""" return self._username @username.setter @@ -181,6 +184,20 @@ def username(self, username: str): raise TypeError('username must be of type str') self._username = username + @property + def user_id(self) -> int: + """Id the of user who authorized/owns this token.""" + return self._user_id + + @user_id.setter + def user_id(self, user_id: int): + if user_id is None: + del self.user_id + return + if not isinstance(user_id, int): + raise TypeError('user_id must be of type int') + self._user_id = user_id + @property def accessToken(self): return self.access_token @@ -197,6 +214,10 @@ def tokenType(self): def expiresIn(self): return self.expires_in + @property + def userId(self): + return self.user_id + class IntrospectTokenRequest(KaggleObject): r""" @@ -563,6 +584,7 @@ def endpoint(self): FieldMetadata("tokenType", "token_type", "_token_type", str, "", PredefinedSerializer()), FieldMetadata("expiresIn", "expires_in", "_expires_in", int, 0, PredefinedSerializer()), FieldMetadata("username", "username", "_username", str, "", PredefinedSerializer()), + FieldMetadata("userId", "user_id", "_user_id", int, 0, PredefinedSerializer()), ] IntrospectTokenRequest._fields = [ diff --git a/src/kagglesdk/users/services/group_api_service.py b/src/kagglesdk/users/services/group_api_service.py new file mode 100644 index 00000000..caa62ef6 --- /dev/null +++ b/src/kagglesdk/users/services/group_api_service.py @@ -0,0 +1,19 @@ +from kagglesdk.kaggle_http_client import KaggleHttpClient +from kagglesdk.users.types.group_api_service import ApiListUserManagedGroupMembershipsRequest, ApiListUserManagedGroupMembershipsResponse + +class GroupApiClient(object): + + def __init__(self, client: KaggleHttpClient): + self._client = client + + def list_user_managed_group_memberships(self, request: ApiListUserManagedGroupMembershipsRequest = None) -> ApiListUserManagedGroupMembershipsResponse: + r""" + Args: + request (ApiListUserManagedGroupMembershipsRequest): + The request object; initialized to empty instance if not specified. + """ + + if request is None: + request = ApiListUserManagedGroupMembershipsRequest() + + return self._client.call("users.GroupApiService", "ListUserManagedGroupMemberships", request, ApiListUserManagedGroupMembershipsResponse) diff --git a/src/kagglesdk/users/types/account_service.py b/src/kagglesdk/users/types/account_service.py index d95e035a..40084a14 100644 --- a/src/kagglesdk/users/types/account_service.py +++ b/src/kagglesdk/users/types/account_service.py @@ -11,6 +11,36 @@ class ApiVersion(enum.Enum): API_VERSION_V2 = 2 """Experimental, admin-only, internal ('/api/i' endpoints).""" +class AuthorizationContext(KaggleObject): + r""" + Attributes: + kernel_session_id (int) + If set, access token is restricted to be used only from the specified + notebook session. + """ + + def __init__(self): + self._kernel_session_id = None + self._freeze() + + @property + def kernel_session_id(self) -> int: + r""" + If set, access token is restricted to be used only from the specified + notebook session. + """ + return self._kernel_session_id or 0 + + @kernel_session_id.setter + def kernel_session_id(self, kernel_session_id: Optional[int]): + if kernel_session_id is None: + del self.kernel_session_id + return + if not isinstance(kernel_session_id, int): + raise TypeError('kernel_session_id must be of type int') + self._kernel_session_id = kernel_session_id + + class ExpireApiTokenRequest(KaggleObject): r""" Attributes: @@ -212,11 +242,15 @@ class GenerateAccessTokenResponse(KaggleObject): Attributes: token (str) expires_in (int) + user_name (str) + user_id (int) """ def __init__(self): self._token = "" self._expires_in = 0 + self._user_name = "" + self._user_id = 0 self._freeze() @property @@ -246,39 +280,47 @@ def expires_in(self, expires_in: int): self._expires_in = expires_in @property - def expiresIn(self): - return self.expires_in + def user_name(self) -> str: + return self._user_name + @user_name.setter + def user_name(self, user_name: str): + if user_name is None: + del self.user_name + return + if not isinstance(user_name, str): + raise TypeError('user_name must be of type str') + self._user_name = user_name -class AuthorizationContext(KaggleObject): - r""" - Attributes: - kernel_session_id (int) - If set, access token is restricted to be used only from the specified - notebook session. - """ + @property + def user_id(self) -> int: + return self._user_id - def __init__(self): - self._kernel_session_id = None - self._freeze() + @user_id.setter + def user_id(self, user_id: int): + if user_id is None: + del self.user_id + return + if not isinstance(user_id, int): + raise TypeError('user_id must be of type int') + self._user_id = user_id @property - def kernel_session_id(self) -> int: - r""" - If set, access token is restricted to be used only from the specified - notebook session. - """ - return self._kernel_session_id or 0 + def expiresIn(self): + return self.expires_in + + @property + def userName(self): + return self.user_name + + @property + def userId(self): + return self.user_id - @kernel_session_id.setter - def kernel_session_id(self, kernel_session_id: Optional[int]): - if kernel_session_id is None: - del self.kernel_session_id - return - if not isinstance(kernel_session_id, int): - raise TypeError('kernel_session_id must be of type int') - self._kernel_session_id = kernel_session_id +AuthorizationContext._fields = [ + FieldMetadata("kernelSessionId", "kernel_session_id", "_kernel_session_id", int, None, PredefinedSerializer(), optional=True), +] ExpireApiTokenRequest._fields = [ FieldMetadata("tokenId", "token_id", "_token_id", int, None, PredefinedSerializer(), optional=True), @@ -297,9 +339,7 @@ def kernel_session_id(self, kernel_session_id: Optional[int]): GenerateAccessTokenResponse._fields = [ FieldMetadata("token", "token", "_token", str, "", PredefinedSerializer()), FieldMetadata("expiresIn", "expires_in", "_expires_in", int, 0, PredefinedSerializer()), -] - -AuthorizationContext._fields = [ - FieldMetadata("kernelSessionId", "kernel_session_id", "_kernel_session_id", int, None, PredefinedSerializer(), optional=True), + FieldMetadata("userName", "user_name", "_user_name", str, "", PredefinedSerializer()), + FieldMetadata("userId", "user_id", "_user_id", int, 0, PredefinedSerializer()), ] diff --git a/src/kagglesdk/users/types/group_api_service.py b/src/kagglesdk/users/types/group_api_service.py new file mode 100644 index 00000000..945b7dee --- /dev/null +++ b/src/kagglesdk/users/types/group_api_service.py @@ -0,0 +1,181 @@ +from kagglesdk.kaggle_object import * +from typing import Optional, List + +class ApiListUserManagedGroupMembershipsRequest(KaggleObject): + r""" + Attributes: + page_size (int) + page_token (str) + skip (int) + group_slug (str) + """ + + def __init__(self): + self._page_size = 0 + self._page_token = None + self._skip = None + self._group_slug = "" + self._freeze() + + @property + def page_size(self) -> int: + return self._page_size + + @page_size.setter + def page_size(self, page_size: int): + if page_size is None: + del self.page_size + return + if not isinstance(page_size, int): + raise TypeError('page_size must be of type int') + self._page_size = page_size + + @property + def page_token(self) -> str: + return self._page_token or "" + + @page_token.setter + def page_token(self, page_token: Optional[str]): + if page_token is None: + del self.page_token + return + if not isinstance(page_token, str): + raise TypeError('page_token must be of type str') + self._page_token = page_token + + @property + def skip(self) -> int: + return self._skip or 0 + + @skip.setter + def skip(self, skip: Optional[int]): + if skip is None: + del self.skip + return + if not isinstance(skip, int): + raise TypeError('skip must be of type int') + self._skip = skip + + @property + def group_slug(self) -> str: + return self._group_slug + + @group_slug.setter + def group_slug(self, group_slug: str): + if group_slug is None: + del self.group_slug + return + if not isinstance(group_slug, str): + raise TypeError('group_slug must be of type str') + self._group_slug = group_slug + + def endpoint(self): + path = '/api/v1/groups/{group_slug}/members' + return path.format_map(self.to_field_map(self)) + + @staticmethod + def endpoint_path(): + return '/api/v1/groups/{group_slug}/members' + + +class ApiListUserManagedGroupMembershipsResponse(KaggleObject): + r""" + Attributes: + memberships (ApiListUserManagedGroupMembershipsResponse.Membership) + next_page_token (str) + """ + + class Membership(KaggleObject): + r""" + Attributes: + user_id (int) + username (str) + """ + + def __init__(self): + self._user_id = 0 + self._username = "" + self._freeze() + + @property + def user_id(self) -> int: + return self._user_id + + @user_id.setter + def user_id(self, user_id: int): + if user_id is None: + del self.user_id + return + if not isinstance(user_id, int): + raise TypeError('user_id must be of type int') + self._user_id = user_id + + @property + def username(self) -> str: + return self._username + + @username.setter + def username(self, username: str): + if username is None: + del self.username + return + if not isinstance(username, str): + raise TypeError('username must be of type str') + self._username = username + + + def __init__(self): + self._memberships = [] + self._next_page_token = "" + self._freeze() + + @property + def memberships(self) -> Optional[List[Optional['ApiListUserManagedGroupMembershipsResponse.Membership']]]: + return self._memberships + + @memberships.setter + def memberships(self, memberships: Optional[List[Optional['ApiListUserManagedGroupMembershipsResponse.Membership']]]): + if memberships is None: + del self.memberships + return + if not isinstance(memberships, list): + raise TypeError('memberships must be of type list') + if not all([isinstance(t, ApiListUserManagedGroupMembershipsResponse.Membership) for t in memberships]): + raise TypeError('memberships must contain only items of type ApiListUserManagedGroupMembershipsResponse.Membership') + self._memberships = memberships + + @property + def next_page_token(self) -> str: + return self._next_page_token + + @next_page_token.setter + def next_page_token(self, next_page_token: str): + if next_page_token is None: + del self.next_page_token + return + if not isinstance(next_page_token, str): + raise TypeError('next_page_token must be of type str') + self._next_page_token = next_page_token + + @property + def nextPageToken(self): + return self.next_page_token + + +ApiListUserManagedGroupMembershipsRequest._fields = [ + FieldMetadata("pageSize", "page_size", "_page_size", int, 0, PredefinedSerializer()), + FieldMetadata("pageToken", "page_token", "_page_token", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("skip", "skip", "_skip", int, None, PredefinedSerializer(), optional=True), + FieldMetadata("groupSlug", "group_slug", "_group_slug", str, "", PredefinedSerializer()), +] + +ApiListUserManagedGroupMembershipsResponse.Membership._fields = [ + FieldMetadata("userId", "user_id", "_user_id", int, 0, PredefinedSerializer()), + FieldMetadata("username", "username", "_username", str, "", PredefinedSerializer()), +] + +ApiListUserManagedGroupMembershipsResponse._fields = [ + FieldMetadata("memberships", "memberships", "_memberships", ApiListUserManagedGroupMembershipsResponse.Membership, [], ListSerializer(KaggleObjectSerializer())), + FieldMetadata("nextPageToken", "next_page_token", "_next_page_token", str, "", PredefinedSerializer()), +] + diff --git a/src/kagglesdk/users/types/user_avatar.py b/src/kagglesdk/users/types/user_avatar.py index 326e21af..acec03d8 100644 --- a/src/kagglesdk/users/types/user_avatar.py +++ b/src/kagglesdk/users/types/user_avatar.py @@ -11,18 +11,18 @@ class UserAvatar(KaggleObject): Thumbnail URL for the given user url (str) Profile URL for the given user - profile_url (str) - Alternate name for 'url', to aid in refactoring both are provided. - TODO(http://b/402224065) remove once clients have migrated from this. user_name (str) User name for the given user tier (UserAchievementTier) Tier for the given user + id (int) + ID for the given user + profile_url (str) + Alternate name for 'url', to aid in refactoring both are provided. + TODO(http://b/402224065) remove once clients have migrated from this. performance_tier (UserAchievementTier) Alternate name for `tier`, to aid in refactoring both are provided. TODO(http://b/402224065) remove once clients have migrated from this. - id (int) - ID for the given user user_id (int) Alternate name for `id`, to aid in refactoring both are provided. TODO(http://b/402224065) remove once clients have migrated from this. @@ -36,11 +36,11 @@ def __init__(self): self._display_name = None self._thumbnail_url = None self._url = None - self._profile_url = None self._user_name = None self._tier = UserAchievementTier.NOVICE - self._performance_tier = None self._id = 0 + self._profile_url = None + self._performance_tier = None self._user_id = None self._progression_opt_out = None self._is_phone_verified = None @@ -214,11 +214,11 @@ def is_phone_verified(self, is_phone_verified: Optional[bool]): FieldMetadata("displayName", "display_name", "_display_name", str, None, PredefinedSerializer(), optional=True), FieldMetadata("thumbnailUrl", "thumbnail_url", "_thumbnail_url", str, None, PredefinedSerializer(), optional=True), FieldMetadata("url", "url", "_url", str, None, PredefinedSerializer(), optional=True), - FieldMetadata("profileUrl", "profile_url", "_profile_url", str, None, PredefinedSerializer(), optional=True), FieldMetadata("userName", "user_name", "_user_name", str, None, PredefinedSerializer(), optional=True), FieldMetadata("tier", "tier", "_tier", UserAchievementTier, UserAchievementTier.NOVICE, EnumSerializer()), - FieldMetadata("performanceTier", "performance_tier", "_performance_tier", UserAchievementTier, None, EnumSerializer(), optional=True), FieldMetadata("id", "id", "_id", int, 0, PredefinedSerializer()), + FieldMetadata("profileUrl", "profile_url", "_profile_url", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("performanceTier", "performance_tier", "_performance_tier", UserAchievementTier, None, EnumSerializer(), optional=True), FieldMetadata("userId", "user_id", "_user_id", int, None, PredefinedSerializer(), optional=True), FieldMetadata("progressionOptOut", "progression_opt_out", "_progression_opt_out", bool, None, PredefinedSerializer(), optional=True), FieldMetadata("isPhoneVerified", "is_phone_verified", "_is_phone_verified", bool, None, PredefinedSerializer(), optional=True), diff --git a/src/ksdk/LICENSE b/src/ksdk/LICENSE new file mode 100644 index 00000000..3e8809f0 --- /dev/null +++ b/src/ksdk/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018 Kaggle Inc + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/src/ksdk/__init__.py b/src/ksdk/__init__.py new file mode 100644 index 00000000..4de60423 --- /dev/null +++ b/src/ksdk/__init__.py @@ -0,0 +1,4 @@ +from kagglesdk.kaggle_client import KaggleClient +from kagglesdk.kaggle_creds import KaggleCredentials +from kagglesdk.kaggle_env import get_access_token_from_env, KaggleEnv +from kagglesdk.kaggle_oauth import KaggleOAuth diff --git a/src/ksdk/admin/__init__.py b/src/ksdk/admin/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/ksdk/admin/services/__init__.py b/src/ksdk/admin/services/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/ksdk/admin/services/inbox_file_service.py b/src/ksdk/admin/services/inbox_file_service.py new file mode 100644 index 00000000..6a83bf56 --- /dev/null +++ b/src/ksdk/admin/services/inbox_file_service.py @@ -0,0 +1,22 @@ +from kagglesdk.admin.types.inbox_file_service import CreateInboxFileRequest, CreateInboxFileResponse +from kagglesdk.kaggle_http_client import KaggleHttpClient + +class InboxFileClient(object): + """File drop/pickup functionality.""" + + def __init__(self, client: KaggleHttpClient): + self._client = client + + def create_inbox_file(self, request: CreateInboxFileRequest = None) -> CreateInboxFileResponse: + r""" + Creates (aka 'drops') a new file into the inbox. + + Args: + request (CreateInboxFileRequest): + The request object; initialized to empty instance if not specified. + """ + + if request is None: + request = CreateInboxFileRequest() + + return self._client.call("admin.InboxFileService", "CreateInboxFile", request, CreateInboxFileResponse) diff --git a/src/ksdk/admin/types/__init__.py b/src/ksdk/admin/types/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/ksdk/admin/types/inbox_file_service.py b/src/ksdk/admin/types/inbox_file_service.py new file mode 100644 index 00000000..a3ecfc26 --- /dev/null +++ b/src/ksdk/admin/types/inbox_file_service.py @@ -0,0 +1,74 @@ +from kagglesdk.kaggle_object import * + +class CreateInboxFileRequest(KaggleObject): + r""" + Attributes: + virtual_directory (str) + Directory name used for tagging the uploaded file. + blob_file_token (str) + Token representing the uploaded file. + """ + + def __init__(self): + self._virtual_directory = "" + self._blob_file_token = "" + self._freeze() + + @property + def virtual_directory(self) -> str: + """Directory name used for tagging the uploaded file.""" + return self._virtual_directory + + @virtual_directory.setter + def virtual_directory(self, virtual_directory: str): + if virtual_directory is None: + del self.virtual_directory + return + if not isinstance(virtual_directory, str): + raise TypeError('virtual_directory must be of type str') + self._virtual_directory = virtual_directory + + @property + def blob_file_token(self) -> str: + """Token representing the uploaded file.""" + return self._blob_file_token + + @blob_file_token.setter + def blob_file_token(self, blob_file_token: str): + if blob_file_token is None: + del self.blob_file_token + return + if not isinstance(blob_file_token, str): + raise TypeError('blob_file_token must be of type str') + self._blob_file_token = blob_file_token + + def endpoint(self): + path = '/api/v1/inbox/files/create' + return path.format_map(self.to_field_map(self)) + + + @staticmethod + def method(): + return 'POST' + + @staticmethod + def body_fields(): + return '*' + + +class CreateInboxFileResponse(KaggleObject): + r""" + NOTE: This is sent to non-admins, so we're intentionally *NOT* sending back + the full InboxFile (with its URL for a direct download). + + """ + + pass + +CreateInboxFileRequest._fields = [ + FieldMetadata("virtualDirectory", "virtual_directory", "_virtual_directory", str, "", PredefinedSerializer()), + FieldMetadata("blobFileToken", "blob_file_token", "_blob_file_token", str, "", PredefinedSerializer()), +] + +CreateInboxFileResponse._fields = [] + diff --git a/src/ksdk/benchmarks/__init__.py b/src/ksdk/benchmarks/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/ksdk/benchmarks/services/__init__.py b/src/ksdk/benchmarks/services/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/ksdk/benchmarks/services/benchmarks_api_service.py b/src/ksdk/benchmarks/services/benchmarks_api_service.py new file mode 100644 index 00000000..dacbb22b --- /dev/null +++ b/src/ksdk/benchmarks/services/benchmarks_api_service.py @@ -0,0 +1,19 @@ +from kagglesdk.benchmarks.types.benchmarks_api_service import ApiBenchmarkLeaderboard, ApiGetBenchmarkLeaderboardRequest +from kagglesdk.kaggle_http_client import KaggleHttpClient + +class BenchmarksApiClient(object): + + def __init__(self, client: KaggleHttpClient): + self._client = client + + def get_benchmark_leaderboard(self, request: ApiGetBenchmarkLeaderboardRequest = None) -> ApiBenchmarkLeaderboard: + r""" + Args: + request (ApiGetBenchmarkLeaderboardRequest): + The request object; initialized to empty instance if not specified. + """ + + if request is None: + request = ApiGetBenchmarkLeaderboardRequest() + + return self._client.call("benchmarks.BenchmarksApiService", "GetBenchmarkLeaderboard", request, ApiBenchmarkLeaderboard) diff --git a/src/ksdk/benchmarks/types/__init__.py b/src/ksdk/benchmarks/types/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/ksdk/benchmarks/types/benchmark_types.py b/src/ksdk/benchmarks/types/benchmark_types.py new file mode 100644 index 00000000..a0a8ef0d --- /dev/null +++ b/src/ksdk/benchmarks/types/benchmark_types.py @@ -0,0 +1,682 @@ +from datetime import datetime +from kagglesdk.kaggle_object import * +from typing import Optional, List + +class BenchmarkVersionIdentifier(KaggleObject): + r""" + Identifier for selecting a specific benchmark version. + + Attributes: + version_id_selector (VersionIdSelector) + published_latest_selector (PublishedLatestSelector) + published_number_selector (PublishedNumberSelector) + draft_selector (DraftSelector) + benchmark_slug_selector (BenchmarkSlugSelector) + """ + + def __init__(self): + self._version_id_selector = None + self._published_latest_selector = None + self._published_number_selector = None + self._draft_selector = None + self._benchmark_slug_selector = None + self._freeze() + + @property + def version_id_selector(self) -> Optional['VersionIdSelector']: + return self._version_id_selector or None + + @version_id_selector.setter + def version_id_selector(self, version_id_selector: Optional['VersionIdSelector']): + if version_id_selector is None: + del self.version_id_selector + return + if not isinstance(version_id_selector, VersionIdSelector): + raise TypeError('version_id_selector must be of type VersionIdSelector') + del self.published_latest_selector + del self.published_number_selector + del self.draft_selector + del self.benchmark_slug_selector + self._version_id_selector = version_id_selector + + @property + def published_latest_selector(self) -> Optional['PublishedLatestSelector']: + return self._published_latest_selector or None + + @published_latest_selector.setter + def published_latest_selector(self, published_latest_selector: Optional['PublishedLatestSelector']): + if published_latest_selector is None: + del self.published_latest_selector + return + if not isinstance(published_latest_selector, PublishedLatestSelector): + raise TypeError('published_latest_selector must be of type PublishedLatestSelector') + del self.version_id_selector + del self.published_number_selector + del self.draft_selector + del self.benchmark_slug_selector + self._published_latest_selector = published_latest_selector + + @property + def published_number_selector(self) -> Optional['PublishedNumberSelector']: + return self._published_number_selector or None + + @published_number_selector.setter + def published_number_selector(self, published_number_selector: Optional['PublishedNumberSelector']): + if published_number_selector is None: + del self.published_number_selector + return + if not isinstance(published_number_selector, PublishedNumberSelector): + raise TypeError('published_number_selector must be of type PublishedNumberSelector') + del self.version_id_selector + del self.published_latest_selector + del self.draft_selector + del self.benchmark_slug_selector + self._published_number_selector = published_number_selector + + @property + def draft_selector(self) -> Optional['DraftSelector']: + return self._draft_selector or None + + @draft_selector.setter + def draft_selector(self, draft_selector: Optional['DraftSelector']): + if draft_selector is None: + del self.draft_selector + return + if not isinstance(draft_selector, DraftSelector): + raise TypeError('draft_selector must be of type DraftSelector') + del self.version_id_selector + del self.published_latest_selector + del self.published_number_selector + del self.benchmark_slug_selector + self._draft_selector = draft_selector + + @property + def benchmark_slug_selector(self) -> Optional['BenchmarkSlugSelector']: + return self._benchmark_slug_selector or None + + @benchmark_slug_selector.setter + def benchmark_slug_selector(self, benchmark_slug_selector: Optional['BenchmarkSlugSelector']): + if benchmark_slug_selector is None: + del self.benchmark_slug_selector + return + if not isinstance(benchmark_slug_selector, BenchmarkSlugSelector): + raise TypeError('benchmark_slug_selector must be of type BenchmarkSlugSelector') + del self.version_id_selector + del self.published_latest_selector + del self.published_number_selector + del self.draft_selector + self._benchmark_slug_selector = benchmark_slug_selector + + +class DraftSelector(KaggleObject): + r""" + Select by the parent benchmark's single draft version (always exists). + + Attributes: + parent_benchmark_identifier (BenchmarkIdentifier) + """ + + def __init__(self): + self._parent_benchmark_identifier = None + self._freeze() + + @property + def parent_benchmark_identifier(self) -> Optional['BenchmarkIdentifier']: + return self._parent_benchmark_identifier + + @parent_benchmark_identifier.setter + def parent_benchmark_identifier(self, parent_benchmark_identifier: Optional['BenchmarkIdentifier']): + if parent_benchmark_identifier is None: + del self.parent_benchmark_identifier + return + if not isinstance(parent_benchmark_identifier, BenchmarkIdentifier): + raise TypeError('parent_benchmark_identifier must be of type BenchmarkIdentifier') + self._parent_benchmark_identifier = parent_benchmark_identifier + + +class PublishedLatestSelector(KaggleObject): + r""" + Select by the parent benchmark's latest published version, defaults to NULL. + + Attributes: + parent_benchmark_identifier (BenchmarkIdentifier) + """ + + def __init__(self): + self._parent_benchmark_identifier = None + self._freeze() + + @property + def parent_benchmark_identifier(self) -> Optional['BenchmarkIdentifier']: + return self._parent_benchmark_identifier + + @parent_benchmark_identifier.setter + def parent_benchmark_identifier(self, parent_benchmark_identifier: Optional['BenchmarkIdentifier']): + if parent_benchmark_identifier is None: + del self.parent_benchmark_identifier + return + if not isinstance(parent_benchmark_identifier, BenchmarkIdentifier): + raise TypeError('parent_benchmark_identifier must be of type BenchmarkIdentifier') + self._parent_benchmark_identifier = parent_benchmark_identifier + + +class PublishedNumberSelector(KaggleObject): + r""" + Select by the parent benchmark's published version at a particular version + number. Defaults to NULL. + + Attributes: + parent_benchmark_identifier (BenchmarkIdentifier) + version_number (int) + """ + + def __init__(self): + self._parent_benchmark_identifier = None + self._version_number = 0 + self._freeze() + + @property + def parent_benchmark_identifier(self) -> Optional['BenchmarkIdentifier']: + return self._parent_benchmark_identifier + + @parent_benchmark_identifier.setter + def parent_benchmark_identifier(self, parent_benchmark_identifier: Optional['BenchmarkIdentifier']): + if parent_benchmark_identifier is None: + del self.parent_benchmark_identifier + return + if not isinstance(parent_benchmark_identifier, BenchmarkIdentifier): + raise TypeError('parent_benchmark_identifier must be of type BenchmarkIdentifier') + self._parent_benchmark_identifier = parent_benchmark_identifier + + @property + def version_number(self) -> int: + return self._version_number + + @version_number.setter + def version_number(self, version_number: int): + if version_number is None: + del self.version_number + return + if not isinstance(version_number, int): + raise TypeError('version_number must be of type int') + self._version_number = version_number + + +class VersionIdSelector(KaggleObject): + r""" + Select by the benchmark version id. Optional parent benchmark id. + + Attributes: + parent_benchmark_identifier (BenchmarkIdentifier) + id (int) + """ + + def __init__(self): + self._parent_benchmark_identifier = None + self._id = 0 + self._freeze() + + @property + def parent_benchmark_identifier(self) -> Optional['BenchmarkIdentifier']: + return self._parent_benchmark_identifier or None + + @parent_benchmark_identifier.setter + def parent_benchmark_identifier(self, parent_benchmark_identifier: Optional[Optional['BenchmarkIdentifier']]): + if parent_benchmark_identifier is None: + del self.parent_benchmark_identifier + return + if not isinstance(parent_benchmark_identifier, BenchmarkIdentifier): + raise TypeError('parent_benchmark_identifier must be of type BenchmarkIdentifier') + self._parent_benchmark_identifier = parent_benchmark_identifier + + @property + def id(self) -> int: + return self._id + + @id.setter + def id(self, id: int): + if id is None: + del self.id + return + if not isinstance(id, int): + raise TypeError('id must be of type int') + self._id = id + + +class BenchmarkIdentifier(KaggleObject): + r""" + Identifier for selecting a specific benchmark. + + Attributes: + id (int) + slug (str) + """ + + def __init__(self): + self._id = None + self._slug = None + self._freeze() + + @property + def id(self) -> int: + return self._id or 0 + + @id.setter + def id(self, id: int): + if id is None: + del self.id + return + if not isinstance(id, int): + raise TypeError('id must be of type int') + del self.slug + self._id = id + + @property + def slug(self) -> str: + return self._slug or "" + + @slug.setter + def slug(self, slug: str): + if slug is None: + del self.slug + return + if not isinstance(slug, str): + raise TypeError('slug must be of type str') + del self.id + self._slug = slug + + +class BenchmarkResult(KaggleObject): + r""" + TODO(bml): Integrate this proto with personal benchmarks trials. + Represents the outcome of a benchmark run. All fields are immutable. + + Attributes: + task_version_id (int) + Convenience fields for this result (for the frontend): + numeric_result (NumericResult) + boolean_result (bool) + custom_additional_results (CustomResult) + Generic additional results. These are rendered generically on the frontend: + numeric_result_private (NumericResult) + Numeric result on the private set of the benchmark version. + numeric_result_public (NumericResult) + Numeric result on the public set of the benchmark version. + evaluation_date (datetime) + The date on which evaluation was performed. + """ + + def __init__(self): + self._task_version_id = None + self._numeric_result = None + self._boolean_result = None + self._custom_additional_results = [] + self._numeric_result_private = None + self._numeric_result_public = None + self._evaluation_date = None + self._freeze() + + @property + def task_version_id(self) -> int: + """Convenience fields for this result (for the frontend):""" + return self._task_version_id or 0 + + @task_version_id.setter + def task_version_id(self, task_version_id: Optional[int]): + if task_version_id is None: + del self.task_version_id + return + if not isinstance(task_version_id, int): + raise TypeError('task_version_id must be of type int') + self._task_version_id = task_version_id + + @property + def numeric_result(self) -> Optional['NumericResult']: + return self._numeric_result or None + + @numeric_result.setter + def numeric_result(self, numeric_result: Optional['NumericResult']): + if numeric_result is None: + del self.numeric_result + return + if not isinstance(numeric_result, NumericResult): + raise TypeError('numeric_result must be of type NumericResult') + del self.boolean_result + self._numeric_result = numeric_result + + @property + def boolean_result(self) -> bool: + return self._boolean_result or False + + @boolean_result.setter + def boolean_result(self, boolean_result: bool): + if boolean_result is None: + del self.boolean_result + return + if not isinstance(boolean_result, bool): + raise TypeError('boolean_result must be of type bool') + del self.numeric_result + self._boolean_result = boolean_result + + @property + def custom_additional_results(self) -> Optional[List[Optional['CustomResult']]]: + """Generic additional results. These are rendered generically on the frontend:""" + return self._custom_additional_results + + @custom_additional_results.setter + def custom_additional_results(self, custom_additional_results: Optional[List[Optional['CustomResult']]]): + if custom_additional_results is None: + del self.custom_additional_results + return + if not isinstance(custom_additional_results, list): + raise TypeError('custom_additional_results must be of type list') + if not all([isinstance(t, CustomResult) for t in custom_additional_results]): + raise TypeError('custom_additional_results must contain only items of type CustomResult') + self._custom_additional_results = custom_additional_results + + @property + def numeric_result_private(self) -> Optional['NumericResult']: + """Numeric result on the private set of the benchmark version.""" + return self._numeric_result_private or None + + @numeric_result_private.setter + def numeric_result_private(self, numeric_result_private: Optional[Optional['NumericResult']]): + if numeric_result_private is None: + del self.numeric_result_private + return + if not isinstance(numeric_result_private, NumericResult): + raise TypeError('numeric_result_private must be of type NumericResult') + self._numeric_result_private = numeric_result_private + + @property + def numeric_result_public(self) -> Optional['NumericResult']: + """Numeric result on the public set of the benchmark version.""" + return self._numeric_result_public or None + + @numeric_result_public.setter + def numeric_result_public(self, numeric_result_public: Optional[Optional['NumericResult']]): + if numeric_result_public is None: + del self.numeric_result_public + return + if not isinstance(numeric_result_public, NumericResult): + raise TypeError('numeric_result_public must be of type NumericResult') + self._numeric_result_public = numeric_result_public + + @property + def evaluation_date(self) -> datetime: + """The date on which evaluation was performed.""" + return self._evaluation_date or None + + @evaluation_date.setter + def evaluation_date(self, evaluation_date: Optional[datetime]): + if evaluation_date is None: + del self.evaluation_date + return + if not isinstance(evaluation_date, datetime): + raise TypeError('evaluation_date must be of type datetime') + self._evaluation_date = evaluation_date + + +class BenchmarkSlugSelector(KaggleObject): + r""" + Attributes: + owner_slug (str) + benchmark_slug (str) + version_number (int) + """ + + def __init__(self): + self._owner_slug = "" + self._benchmark_slug = "" + self._version_number = None + self._freeze() + + @property + def owner_slug(self) -> str: + return self._owner_slug + + @owner_slug.setter + def owner_slug(self, owner_slug: str): + if owner_slug is None: + del self.owner_slug + return + if not isinstance(owner_slug, str): + raise TypeError('owner_slug must be of type str') + self._owner_slug = owner_slug + + @property + def benchmark_slug(self) -> str: + return self._benchmark_slug + + @benchmark_slug.setter + def benchmark_slug(self, benchmark_slug: str): + if benchmark_slug is None: + del self.benchmark_slug + return + if not isinstance(benchmark_slug, str): + raise TypeError('benchmark_slug must be of type str') + self._benchmark_slug = benchmark_slug + + @property + def version_number(self) -> int: + return self._version_number or 0 + + @version_number.setter + def version_number(self, version_number: Optional[int]): + if version_number is None: + del self.version_number + return + if not isinstance(version_number, int): + raise TypeError('version_number must be of type int') + self._version_number = version_number + + +class CustomResult(KaggleObject): + r""" + Attributes: + key (str) + value (str) + """ + + def __init__(self): + self._key = "" + self._value = "" + self._freeze() + + @property + def key(self) -> str: + return self._key + + @key.setter + def key(self, key: str): + if key is None: + del self.key + return + if not isinstance(key, str): + raise TypeError('key must be of type str') + self._key = key + + @property + def value(self) -> str: + return self._value + + @value.setter + def value(self, value: str): + if value is None: + del self.value + return + if not isinstance(value, str): + raise TypeError('value must be of type str') + self._value = value + + +class NumericResult(KaggleObject): + r""" + Attributes: + value (float) + confidence_interval (float) + Note, while we call this the 'confidence interval' - the value we store + here is actually the 'confidence radius', it should always be displayed + as a +- value. + uneven_confidence_interval (UnevenConfidenceInterval) + For asymmetric confidence intervals in which the +/- values differ + If set, prioritized over confidence_interval + """ + + def __init__(self): + self._value = 0.0 + self._confidence_interval = None + self._uneven_confidence_interval = None + self._freeze() + + @property + def value(self) -> float: + return self._value + + @value.setter + def value(self, value: float): + if value is None: + del self.value + return + if not isinstance(value, float): + raise TypeError('value must be of type float') + self._value = value + + @property + def confidence_interval(self) -> float: + r""" + Note, while we call this the 'confidence interval' - the value we store + here is actually the 'confidence radius', it should always be displayed + as a +- value. + """ + return self._confidence_interval or 0.0 + + @confidence_interval.setter + def confidence_interval(self, confidence_interval: Optional[float]): + if confidence_interval is None: + del self.confidence_interval + return + if not isinstance(confidence_interval, float): + raise TypeError('confidence_interval must be of type float') + self._confidence_interval = confidence_interval + + @property + def uneven_confidence_interval(self) -> Optional['UnevenConfidenceInterval']: + r""" + For asymmetric confidence intervals in which the +/- values differ + If set, prioritized over confidence_interval + """ + return self._uneven_confidence_interval or None + + @uneven_confidence_interval.setter + def uneven_confidence_interval(self, uneven_confidence_interval: Optional[Optional['UnevenConfidenceInterval']]): + if uneven_confidence_interval is None: + del self.uneven_confidence_interval + return + if not isinstance(uneven_confidence_interval, UnevenConfidenceInterval): + raise TypeError('uneven_confidence_interval must be of type UnevenConfidenceInterval') + self._uneven_confidence_interval = uneven_confidence_interval + + +class UnevenConfidenceInterval(KaggleObject): + r""" + Attributes: + plus (float) + minus (float) + """ + + def __init__(self): + self._plus = 0.0 + self._minus = 0.0 + self._freeze() + + @property + def plus(self) -> float: + return self._plus + + @plus.setter + def plus(self, plus: float): + if plus is None: + del self.plus + return + if not isinstance(plus, float): + raise TypeError('plus must be of type float') + self._plus = plus + + @property + def minus(self) -> float: + return self._minus + + @minus.setter + def minus(self, minus: float): + if minus is None: + del self.minus + return + if not isinstance(minus, float): + raise TypeError('minus must be of type float') + self._minus = minus + + +BenchmarkVersionIdentifier._fields = [ + FieldMetadata("versionIdSelector", "version_id_selector", "_version_id_selector", VersionIdSelector, None, KaggleObjectSerializer(), optional=True), + FieldMetadata("publishedLatestSelector", "published_latest_selector", "_published_latest_selector", PublishedLatestSelector, None, KaggleObjectSerializer(), optional=True), + FieldMetadata("publishedNumberSelector", "published_number_selector", "_published_number_selector", PublishedNumberSelector, None, KaggleObjectSerializer(), optional=True), + FieldMetadata("draftSelector", "draft_selector", "_draft_selector", DraftSelector, None, KaggleObjectSerializer(), optional=True), + FieldMetadata("benchmarkSlugSelector", "benchmark_slug_selector", "_benchmark_slug_selector", BenchmarkSlugSelector, None, KaggleObjectSerializer(), optional=True), +] + +DraftSelector._fields = [ + FieldMetadata("parentBenchmarkIdentifier", "parent_benchmark_identifier", "_parent_benchmark_identifier", BenchmarkIdentifier, None, KaggleObjectSerializer()), +] + +PublishedLatestSelector._fields = [ + FieldMetadata("parentBenchmarkIdentifier", "parent_benchmark_identifier", "_parent_benchmark_identifier", BenchmarkIdentifier, None, KaggleObjectSerializer()), +] + +PublishedNumberSelector._fields = [ + FieldMetadata("parentBenchmarkIdentifier", "parent_benchmark_identifier", "_parent_benchmark_identifier", BenchmarkIdentifier, None, KaggleObjectSerializer()), + FieldMetadata("versionNumber", "version_number", "_version_number", int, 0, PredefinedSerializer()), +] + +VersionIdSelector._fields = [ + FieldMetadata("parentBenchmarkIdentifier", "parent_benchmark_identifier", "_parent_benchmark_identifier", BenchmarkIdentifier, None, KaggleObjectSerializer(), optional=True), + FieldMetadata("id", "id", "_id", int, 0, PredefinedSerializer()), +] + +BenchmarkIdentifier._fields = [ + FieldMetadata("id", "id", "_id", int, None, PredefinedSerializer(), optional=True), + FieldMetadata("slug", "slug", "_slug", str, None, PredefinedSerializer(), optional=True), +] + +BenchmarkResult._fields = [ + FieldMetadata("taskVersionId", "task_version_id", "_task_version_id", int, None, PredefinedSerializer(), optional=True), + FieldMetadata("numericResult", "numeric_result", "_numeric_result", NumericResult, None, KaggleObjectSerializer(), optional=True), + FieldMetadata("booleanResult", "boolean_result", "_boolean_result", bool, None, PredefinedSerializer(), optional=True), + FieldMetadata("customAdditionalResults", "custom_additional_results", "_custom_additional_results", CustomResult, [], ListSerializer(KaggleObjectSerializer())), + FieldMetadata("numericResultPrivate", "numeric_result_private", "_numeric_result_private", NumericResult, None, KaggleObjectSerializer(), optional=True), + FieldMetadata("numericResultPublic", "numeric_result_public", "_numeric_result_public", NumericResult, None, KaggleObjectSerializer(), optional=True), + FieldMetadata("evaluationDate", "evaluation_date", "_evaluation_date", datetime, None, DateTimeSerializer(), optional=True), +] + +BenchmarkSlugSelector._fields = [ + FieldMetadata("ownerSlug", "owner_slug", "_owner_slug", str, "", PredefinedSerializer()), + FieldMetadata("benchmarkSlug", "benchmark_slug", "_benchmark_slug", str, "", PredefinedSerializer()), + FieldMetadata("versionNumber", "version_number", "_version_number", int, None, PredefinedSerializer(), optional=True), +] + +CustomResult._fields = [ + FieldMetadata("key", "key", "_key", str, "", PredefinedSerializer()), + FieldMetadata("value", "value", "_value", str, "", PredefinedSerializer()), +] + +NumericResult._fields = [ + FieldMetadata("value", "value", "_value", float, 0.0, PredefinedSerializer()), + FieldMetadata("confidenceInterval", "confidence_interval", "_confidence_interval", float, None, PredefinedSerializer(), optional=True), + FieldMetadata("unevenConfidenceInterval", "uneven_confidence_interval", "_uneven_confidence_interval", UnevenConfidenceInterval, None, KaggleObjectSerializer(), optional=True), +] + +UnevenConfidenceInterval._fields = [ + FieldMetadata("plus", "plus", "_plus", float, 0.0, PredefinedSerializer()), + FieldMetadata("minus", "minus", "_minus", float, 0.0, PredefinedSerializer()), +] + diff --git a/src/ksdk/benchmarks/types/benchmarks_api_service.py b/src/ksdk/benchmarks/types/benchmarks_api_service.py new file mode 100644 index 00000000..1cd6fb77 --- /dev/null +++ b/src/ksdk/benchmarks/types/benchmarks_api_service.py @@ -0,0 +1,213 @@ +from kagglesdk.benchmarks.types.benchmark_types import BenchmarkResult, BenchmarkVersionIdentifier +from kagglesdk.kaggle_object import * +from typing import List, Optional + +class ApiBenchmarkLeaderboard(KaggleObject): + r""" + Attributes: + rows (ApiBenchmarkLeaderboard.LeaderboardRow) + """ + + class LeaderboardRow(KaggleObject): + r""" + Attributes: + model_version_name (str) + model_version_slug (str) + task_results (ApiBenchmarkLeaderboard.TaskResult) + """ + + def __init__(self): + self._model_version_name = "" + self._model_version_slug = "" + self._task_results = [] + self._freeze() + + @property + def model_version_name(self) -> str: + return self._model_version_name + + @model_version_name.setter + def model_version_name(self, model_version_name: str): + if model_version_name is None: + del self.model_version_name + return + if not isinstance(model_version_name, str): + raise TypeError('model_version_name must be of type str') + self._model_version_name = model_version_name + + @property + def model_version_slug(self) -> str: + return self._model_version_slug + + @model_version_slug.setter + def model_version_slug(self, model_version_slug: str): + if model_version_slug is None: + del self.model_version_slug + return + if not isinstance(model_version_slug, str): + raise TypeError('model_version_slug must be of type str') + self._model_version_slug = model_version_slug + + @property + def task_results(self) -> Optional[List[Optional['ApiBenchmarkLeaderboard.TaskResult']]]: + return self._task_results + + @task_results.setter + def task_results(self, task_results: Optional[List[Optional['ApiBenchmarkLeaderboard.TaskResult']]]): + if task_results is None: + del self.task_results + return + if not isinstance(task_results, list): + raise TypeError('task_results must be of type list') + if not all([isinstance(t, ApiBenchmarkLeaderboard.TaskResult) for t in task_results]): + raise TypeError('task_results must contain only items of type ApiBenchmarkLeaderboard.TaskResult') + self._task_results = task_results + + + class TaskResult(KaggleObject): + r""" + Attributes: + benchmark_task_name (str) + benchmark_task_slug (str) + task_version (int) + result (BenchmarkResult) + """ + + def __init__(self): + self._benchmark_task_name = "" + self._benchmark_task_slug = "" + self._task_version = 0 + self._result = None + self._freeze() + + @property + def benchmark_task_name(self) -> str: + return self._benchmark_task_name + + @benchmark_task_name.setter + def benchmark_task_name(self, benchmark_task_name: str): + if benchmark_task_name is None: + del self.benchmark_task_name + return + if not isinstance(benchmark_task_name, str): + raise TypeError('benchmark_task_name must be of type str') + self._benchmark_task_name = benchmark_task_name + + @property + def benchmark_task_slug(self) -> str: + return self._benchmark_task_slug + + @benchmark_task_slug.setter + def benchmark_task_slug(self, benchmark_task_slug: str): + if benchmark_task_slug is None: + del self.benchmark_task_slug + return + if not isinstance(benchmark_task_slug, str): + raise TypeError('benchmark_task_slug must be of type str') + self._benchmark_task_slug = benchmark_task_slug + + @property + def task_version(self) -> int: + return self._task_version + + @task_version.setter + def task_version(self, task_version: int): + if task_version is None: + del self.task_version + return + if not isinstance(task_version, int): + raise TypeError('task_version must be of type int') + self._task_version = task_version + + @property + def result(self) -> Optional['BenchmarkResult']: + return self._result + + @result.setter + def result(self, result: Optional['BenchmarkResult']): + if result is None: + del self.result + return + if not isinstance(result, BenchmarkResult): + raise TypeError('result must be of type BenchmarkResult') + self._result = result + + + def __init__(self): + self._rows = [] + self._freeze() + + @property + def rows(self) -> Optional[List[Optional['ApiBenchmarkLeaderboard.LeaderboardRow']]]: + return self._rows + + @rows.setter + def rows(self, rows: Optional[List[Optional['ApiBenchmarkLeaderboard.LeaderboardRow']]]): + if rows is None: + del self.rows + return + if not isinstance(rows, list): + raise TypeError('rows must be of type list') + if not all([isinstance(t, ApiBenchmarkLeaderboard.LeaderboardRow) for t in rows]): + raise TypeError('rows must contain only items of type ApiBenchmarkLeaderboard.LeaderboardRow') + self._rows = rows + + +class ApiGetBenchmarkLeaderboardRequest(KaggleObject): + r""" + Attributes: + identifier (BenchmarkVersionIdentifier) + """ + + def __init__(self): + self._identifier = None + self._freeze() + + @property + def identifier(self) -> Optional['BenchmarkVersionIdentifier']: + return self._identifier + + @identifier.setter + def identifier(self, identifier: Optional['BenchmarkVersionIdentifier']): + if identifier is None: + del self.identifier + return + if not isinstance(identifier, BenchmarkVersionIdentifier): + raise TypeError('identifier must be of type BenchmarkVersionIdentifier') + self._identifier = identifier + + def endpoint(self): + path = '/api/v1/benchmarks/leaderboard' + return path.format_map(self.to_field_map(self)) + + + @staticmethod + def method(): + return 'POST' + + @staticmethod + def body_fields(): + return '*' + + +ApiBenchmarkLeaderboard.LeaderboardRow._fields = [ + FieldMetadata("modelVersionName", "model_version_name", "_model_version_name", str, "", PredefinedSerializer()), + FieldMetadata("modelVersionSlug", "model_version_slug", "_model_version_slug", str, "", PredefinedSerializer()), + FieldMetadata("taskResults", "task_results", "_task_results", ApiBenchmarkLeaderboard.TaskResult, [], ListSerializer(KaggleObjectSerializer())), +] + +ApiBenchmarkLeaderboard.TaskResult._fields = [ + FieldMetadata("benchmarkTaskName", "benchmark_task_name", "_benchmark_task_name", str, "", PredefinedSerializer()), + FieldMetadata("benchmarkTaskSlug", "benchmark_task_slug", "_benchmark_task_slug", str, "", PredefinedSerializer()), + FieldMetadata("taskVersion", "task_version", "_task_version", int, 0, PredefinedSerializer()), + FieldMetadata("result", "result", "_result", BenchmarkResult, None, KaggleObjectSerializer()), +] + +ApiBenchmarkLeaderboard._fields = [ + FieldMetadata("rows", "rows", "_rows", ApiBenchmarkLeaderboard.LeaderboardRow, [], ListSerializer(KaggleObjectSerializer())), +] + +ApiGetBenchmarkLeaderboardRequest._fields = [ + FieldMetadata("identifier", "identifier", "_identifier", BenchmarkVersionIdentifier, None, KaggleObjectSerializer()), +] + diff --git a/src/ksdk/blobs/__init__.py b/src/ksdk/blobs/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/ksdk/blobs/services/__init__.py b/src/ksdk/blobs/services/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/ksdk/blobs/services/blob_api_service.py b/src/ksdk/blobs/services/blob_api_service.py new file mode 100644 index 00000000..5258ff4b --- /dev/null +++ b/src/ksdk/blobs/services/blob_api_service.py @@ -0,0 +1,25 @@ +from kagglesdk.blobs.types.blob_api_service import ApiStartBlobUploadRequest, ApiStartBlobUploadResponse +from kagglesdk.kaggle_http_client import KaggleHttpClient + +class BlobApiClient(object): + r""" + Binary Large OBject (BLOB) service used for uploading files to Google Cloud + Storage (GCS). + """ + + def __init__(self, client: KaggleHttpClient): + self._client = client + + def start_blob_upload(self, request: ApiStartBlobUploadRequest = None) -> ApiStartBlobUploadResponse: + r""" + Starts a blob upload (i.e. reserves a spot for the upload on GCS). + + Args: + request (ApiStartBlobUploadRequest): + The request object; initialized to empty instance if not specified. + """ + + if request is None: + request = ApiStartBlobUploadRequest() + + return self._client.call("blobs.BlobApiService", "StartBlobUpload", request, ApiStartBlobUploadResponse) diff --git a/src/ksdk/blobs/types/__init__.py b/src/ksdk/blobs/types/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/ksdk/blobs/types/blob_api_service.py b/src/ksdk/blobs/types/blob_api_service.py new file mode 100644 index 00000000..75acd722 --- /dev/null +++ b/src/ksdk/blobs/types/blob_api_service.py @@ -0,0 +1,177 @@ +import enum +from kagglesdk.kaggle_object import * +from typing import Optional + +class ApiBlobType(enum.Enum): + API_BLOB_TYPE_UNSPECIFIED = 0 + DATASET = 1 + MODEL = 2 + INBOX = 3 + +class ApiStartBlobUploadRequest(KaggleObject): + r""" + Attributes: + type (ApiBlobType) + The type of the blob. + name (str) + Name (e.g. file name) of the blob. + content_type (str) + Content/MIME type (e.g. 'text/plain'). + content_length (int) + Size in bytes of the blob. + last_modified_epoch_seconds (int) + Optional user-reported time when the blob was last updated/modified. + """ + + def __init__(self): + self._type = ApiBlobType.API_BLOB_TYPE_UNSPECIFIED + self._name = "" + self._content_type = None + self._content_length = 0 + self._last_modified_epoch_seconds = None + self._freeze() + + @property + def type(self) -> 'ApiBlobType': + """The type of the blob.""" + return self._type + + @type.setter + def type(self, type: 'ApiBlobType'): + if type is None: + del self.type + return + if not isinstance(type, ApiBlobType): + raise TypeError('type must be of type ApiBlobType') + self._type = type + + @property + def name(self) -> str: + """Name (e.g. file name) of the blob.""" + return self._name + + @name.setter + def name(self, name: str): + if name is None: + del self.name + return + if not isinstance(name, str): + raise TypeError('name must be of type str') + self._name = name + + @property + def content_type(self) -> str: + """Content/MIME type (e.g. 'text/plain').""" + return self._content_type or "" + + @content_type.setter + def content_type(self, content_type: Optional[str]): + if content_type is None: + del self.content_type + return + if not isinstance(content_type, str): + raise TypeError('content_type must be of type str') + self._content_type = content_type + + @property + def content_length(self) -> int: + """Size in bytes of the blob.""" + return self._content_length + + @content_length.setter + def content_length(self, content_length: int): + if content_length is None: + del self.content_length + return + if not isinstance(content_length, int): + raise TypeError('content_length must be of type int') + self._content_length = content_length + + @property + def last_modified_epoch_seconds(self) -> int: + """Optional user-reported time when the blob was last updated/modified.""" + return self._last_modified_epoch_seconds or 0 + + @last_modified_epoch_seconds.setter + def last_modified_epoch_seconds(self, last_modified_epoch_seconds: Optional[int]): + if last_modified_epoch_seconds is None: + del self.last_modified_epoch_seconds + return + if not isinstance(last_modified_epoch_seconds, int): + raise TypeError('last_modified_epoch_seconds must be of type int') + self._last_modified_epoch_seconds = last_modified_epoch_seconds + + def endpoint(self): + path = '/api/v1/blobs/upload' + return path.format_map(self.to_field_map(self)) + + + @staticmethod + def method(): + return 'POST' + + @staticmethod + def body_fields(): + return '*' + + +class ApiStartBlobUploadResponse(KaggleObject): + r""" + Attributes: + token (str) + Opaque string token used to reference the new blob/file. + create_url (str) + URL to use to start the upload. + """ + + def __init__(self): + self._token = "" + self._create_url = "" + self._freeze() + + @property + def token(self) -> str: + """Opaque string token used to reference the new blob/file.""" + return self._token + + @token.setter + def token(self, token: str): + if token is None: + del self.token + return + if not isinstance(token, str): + raise TypeError('token must be of type str') + self._token = token + + @property + def create_url(self) -> str: + """URL to use to start the upload.""" + return self._create_url + + @create_url.setter + def create_url(self, create_url: str): + if create_url is None: + del self.create_url + return + if not isinstance(create_url, str): + raise TypeError('create_url must be of type str') + self._create_url = create_url + + @property + def createUrl(self): + return self.create_url + + +ApiStartBlobUploadRequest._fields = [ + FieldMetadata("type", "type", "_type", ApiBlobType, ApiBlobType.API_BLOB_TYPE_UNSPECIFIED, EnumSerializer()), + FieldMetadata("name", "name", "_name", str, "", PredefinedSerializer()), + FieldMetadata("contentType", "content_type", "_content_type", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("contentLength", "content_length", "_content_length", int, 0, PredefinedSerializer()), + FieldMetadata("lastModifiedEpochSeconds", "last_modified_epoch_seconds", "_last_modified_epoch_seconds", int, None, PredefinedSerializer(), optional=True), +] + +ApiStartBlobUploadResponse._fields = [ + FieldMetadata("token", "token", "_token", str, "", PredefinedSerializer()), + FieldMetadata("createUrl", "create_url", "_create_url", str, "", PredefinedSerializer()), +] + diff --git a/src/ksdk/common/__init__.py b/src/ksdk/common/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/ksdk/common/services/__init__.py b/src/ksdk/common/services/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/ksdk/common/services/operations_service.py b/src/ksdk/common/services/operations_service.py new file mode 100644 index 00000000..42cee6d3 --- /dev/null +++ b/src/ksdk/common/services/operations_service.py @@ -0,0 +1,46 @@ +from kagglesdk.common.types.operations import Operation +from kagglesdk.common.types.operations_service import GetOperationRequest +from kagglesdk.kaggle_http_client import KaggleHttpClient + +class OperationsClient(object): + r""" + Manages long-running operations with an API service. + + When an API method normally takes long time to complete, it can be designed + to return [Operation][google.longrunning.Operation] to the client, and the + client can use this interface to receive the real response asynchronously by + polling the operation resource, or pass the operation resource to another API + (such as Pub/Sub API) to receive the response. Any API service that returns + long-running operations should implement the `Operations` interface so + developers can have a consistent client experience. + """ + + def __init__(self, client: KaggleHttpClient): + self._client = client + + def get_operation(self, request: GetOperationRequest = None, name: str = None) -> Operation: + r""" + Gets the latest state of a long-running operation. Clients can use this + method to poll the operation result at intervals as recommended by the API + service. + + Args: + request (GetOperationRequest): + The request object; initialized to empty instance if not specified. + May not be specified if any of the flattened field params are specified. + name (str) + This corresponds to the ``name`` field on the ``request`` instance; + if ``request`` is provided, this should not be set. + """ + + has_flattened_args = any([name]) + if request is not None and has_flattened_args: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + if request is None: + request = GetOperationRequest() + if name is not None: + request.name = name + + return self._client.call("common.OperationsService", "GetOperation", request, Operation) diff --git a/src/ksdk/common/types/__init__.py b/src/ksdk/common/types/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/ksdk/common/types/file_download.py b/src/ksdk/common/types/file_download.py new file mode 100644 index 00000000..4c9545dd --- /dev/null +++ b/src/ksdk/common/types/file_download.py @@ -0,0 +1,102 @@ +from kagglesdk.kaggle_object import * +from typing import Optional + +class FileDownload(KaggleObject): + r""" + Standard response object representing a file download. + See http://go/kaggle-proto-handler-file-downloads + Some field names/descriptions borrowed from + google3/gdata/rosy/proto/data.proto + + Attributes: + content_type (str) + MIME type of the data + TODO(aip.dev/143): (-- api-linter: core::0143::standardized-codes=disabled + --) + file_name (str) + Original file name + token (str) + A unique fingerprint for the file/media data + content_length (int) + Size of the data, in bytes (if known) + """ + + def __init__(self): + self._content_type = "" + self._file_name = "" + self._token = "" + self._content_length = None + self._freeze() + + @property + def content_type(self) -> str: + r""" + MIME type of the data + TODO(aip.dev/143): (-- api-linter: core::0143::standardized-codes=disabled + --) + """ + return self._content_type + + @content_type.setter + def content_type(self, content_type: str): + if content_type is None: + del self.content_type + return + if not isinstance(content_type, str): + raise TypeError('content_type must be of type str') + self._content_type = content_type + + @property + def file_name(self) -> str: + """Original file name""" + return self._file_name + + @file_name.setter + def file_name(self, file_name: str): + if file_name is None: + del self.file_name + return + if not isinstance(file_name, str): + raise TypeError('file_name must be of type str') + self._file_name = file_name + + @property + def token(self) -> str: + """A unique fingerprint for the file/media data""" + return self._token + + @token.setter + def token(self, token: str): + if token is None: + del self.token + return + if not isinstance(token, str): + raise TypeError('token must be of type str') + self._token = token + + @property + def content_length(self) -> int: + """Size of the data, in bytes (if known)""" + return self._content_length or 0 + + @content_length.setter + def content_length(self, content_length: Optional[int]): + if content_length is None: + del self.content_length + return + if not isinstance(content_length, int): + raise TypeError('content_length must be of type int') + self._content_length = content_length + + @classmethod + def prepare_from(cls, http_response): + return http_response + + +FileDownload._fields = [ + FieldMetadata("contentType", "content_type", "_content_type", str, "", PredefinedSerializer()), + FieldMetadata("fileName", "file_name", "_file_name", str, "", PredefinedSerializer()), + FieldMetadata("token", "token", "_token", str, "", PredefinedSerializer()), + FieldMetadata("contentLength", "content_length", "_content_length", int, None, PredefinedSerializer(), optional=True), +] + diff --git a/src/ksdk/common/types/http_redirect.py b/src/ksdk/common/types/http_redirect.py new file mode 100644 index 00000000..ae0d83bf --- /dev/null +++ b/src/ksdk/common/types/http_redirect.py @@ -0,0 +1,105 @@ +from datetime import timedelta +from kagglesdk.kaggle_object import * +from typing import Optional + +class HttpRedirect(KaggleObject): + r""" + Represents an HTTP redirect (e.g. 301 or 302) response. + Patterned after ASP.NET MVC's RedirectResult. + + Attributes: + url (str) + Destination URL for the redirect. + permanent (bool) + Should it be an HTTP 301 (permanent) redirect or just temporary (HTTP + 302)?. + bypass_encoding (bool) + When `true`, the `url` is already encoded, so bypass `UriHelper.Encode`. + Otherwise, invoke `UriHelper.Encode` on the `url` before returning to the + client. + expiry (timedelta) + Specifies how long the redirected url can be cached. + """ + + def __init__(self): + self._url = "" + self._permanent = False + self._bypass_encoding = None + self._expiry = None + self._freeze() + + @property + def url(self) -> str: + """Destination URL for the redirect.""" + return self._url + + @url.setter + def url(self, url: str): + if url is None: + del self.url + return + if not isinstance(url, str): + raise TypeError('url must be of type str') + self._url = url + + @property + def permanent(self) -> bool: + r""" + Should it be an HTTP 301 (permanent) redirect or just temporary (HTTP + 302)?. + """ + return self._permanent + + @permanent.setter + def permanent(self, permanent: bool): + if permanent is None: + del self.permanent + return + if not isinstance(permanent, bool): + raise TypeError('permanent must be of type bool') + self._permanent = permanent + + @property + def bypass_encoding(self) -> bool: + r""" + When `true`, the `url` is already encoded, so bypass `UriHelper.Encode`. + Otherwise, invoke `UriHelper.Encode` on the `url` before returning to the + client. + """ + return self._bypass_encoding or False + + @bypass_encoding.setter + def bypass_encoding(self, bypass_encoding: Optional[bool]): + if bypass_encoding is None: + del self.bypass_encoding + return + if not isinstance(bypass_encoding, bool): + raise TypeError('bypass_encoding must be of type bool') + self._bypass_encoding = bypass_encoding + + @property + def expiry(self) -> timedelta: + """Specifies how long the redirected url can be cached.""" + return self._expiry + + @expiry.setter + def expiry(self, expiry: timedelta): + if expiry is None: + del self.expiry + return + if not isinstance(expiry, timedelta): + raise TypeError('expiry must be of type timedelta') + self._expiry = expiry + + @classmethod + def prepare_from(cls, http_response): + return http_response + + +HttpRedirect._fields = [ + FieldMetadata("url", "url", "_url", str, "", PredefinedSerializer()), + FieldMetadata("permanent", "permanent", "_permanent", bool, False, PredefinedSerializer()), + FieldMetadata("bypassEncoding", "bypass_encoding", "_bypass_encoding", bool, None, PredefinedSerializer(), optional=True), + FieldMetadata("expiry", "expiry", "_expiry", timedelta, None, TimeDeltaSerializer()), +] + diff --git a/src/ksdk/common/types/operations.py b/src/ksdk/common/types/operations.py new file mode 100644 index 00000000..30e2768f --- /dev/null +++ b/src/ksdk/common/types/operations.py @@ -0,0 +1,194 @@ +from kagglesdk.kaggle_object import * +from typing import Optional + +class Operation(KaggleObject): + r""" + This resource represents a long-running operation that is the result of a + network API call. + + Attributes: + name (str) + The server-assigned name, which is only unique within the same service that + originally returns it. If you use the default HTTP mapping, the + `name` should be a resource name ending with `operations/{unique_id}`. + metadata (object) + Service-specific metadata associated with the operation. It typically + contains progress information and common metadata such as create time. + Some services might not provide such metadata. Any method that returns a + long-running operation should document the metadata type, if any. + done (bool) + If the value is `false`, it means the operation is still in progress. + If `true`, the operation is completed, and either `error` or `response` is + available. + error (Operation.Status) + The error result of the operation in case of failure or cancellation. + response (object) + The normal, successful response of the operation. If the original + method returns no data on success, such as `Delete`, the response is + `google.protobuf.Empty`. If the original method is standard + `Get`/`Create`/`Update`, the response should be the resource. For other + methods, the response should have the type `XxxResponse`, where `Xxx` + is the original method name. For example, if the original method name + is `TakeSnapshot()`, the inferred response type is + `TakeSnapshotResponse`. + """ + + class Status(KaggleObject): + r""" + Attributes: + code (int) + The HTTP status code that corresponds to `google.rpc.Status.code`. + message (str) + This corresponds to `google.rpc.Status.message`. + """ + + def __init__(self): + self._code = 0 + self._message = "" + self._freeze() + + @property + def code(self) -> int: + """The HTTP status code that corresponds to `google.rpc.Status.code`.""" + return self._code + + @code.setter + def code(self, code: int): + if code is None: + del self.code + return + if not isinstance(code, int): + raise TypeError('code must be of type int') + self._code = code + + @property + def message(self) -> str: + """This corresponds to `google.rpc.Status.message`.""" + return self._message + + @message.setter + def message(self, message: str): + if message is None: + del self.message + return + if not isinstance(message, str): + raise TypeError('message must be of type str') + self._message = message + + + def __init__(self): + self._name = "" + self._metadata = None + self._done = False + self._error = None + self._response = None + self._freeze() + + @property + def name(self) -> str: + r""" + The server-assigned name, which is only unique within the same service that + originally returns it. If you use the default HTTP mapping, the + `name` should be a resource name ending with `operations/{unique_id}`. + """ + return self._name + + @name.setter + def name(self, name: str): + if name is None: + del self.name + return + if not isinstance(name, str): + raise TypeError('name must be of type str') + self._name = name + + @property + def metadata(self) -> object: + r""" + Service-specific metadata associated with the operation. It typically + contains progress information and common metadata such as create time. + Some services might not provide such metadata. Any method that returns a + long-running operation should document the metadata type, if any. + """ + return self._metadata + + @metadata.setter + def metadata(self, metadata: object): + if metadata is None: + del self.metadata + return + if not isinstance(metadata, object): + raise TypeError('metadata must be of type object') + self._metadata = metadata + + @property + def done(self) -> bool: + r""" + If the value is `false`, it means the operation is still in progress. + If `true`, the operation is completed, and either `error` or `response` is + available. + """ + return self._done + + @done.setter + def done(self, done: bool): + if done is None: + del self.done + return + if not isinstance(done, bool): + raise TypeError('done must be of type bool') + self._done = done + + @property + def error(self) -> Optional['Operation.Status']: + """The error result of the operation in case of failure or cancellation.""" + return self._error or None + + @error.setter + def error(self, error: Optional['Operation.Status']): + if error is None: + del self.error + return + if not isinstance(error, Operation.Status): + raise TypeError('error must be of type Operation.Status') + del self.response + self._error = error + + @property + def response(self) -> object: + r""" + The normal, successful response of the operation. If the original + method returns no data on success, such as `Delete`, the response is + `google.protobuf.Empty`. If the original method is standard + `Get`/`Create`/`Update`, the response should be the resource. For other + methods, the response should have the type `XxxResponse`, where `Xxx` + is the original method name. For example, if the original method name + is `TakeSnapshot()`, the inferred response type is + `TakeSnapshotResponse`. + """ + return self._response or None + + @response.setter + def response(self, response: object): + if response is None: + del self.response + return + if not isinstance(response, object): + raise TypeError('response must be of type object') + del self.error + self._response = response + + +Operation.Status._fields = [ + FieldMetadata("code", "code", "_code", int, 0, PredefinedSerializer()), + FieldMetadata("message", "message", "_message", str, "", PredefinedSerializer()), +] + +Operation._fields = [ + FieldMetadata("name", "name", "_name", str, "", PredefinedSerializer()), + FieldMetadata("metadata", "metadata", "_metadata", object, None, PredefinedSerializer()), + FieldMetadata("done", "done", "_done", bool, False, PredefinedSerializer()), + FieldMetadata("error", "error", "_error", Operation.Status, None, KaggleObjectSerializer(), optional=True), + FieldMetadata("response", "response", "_response", object, None, PredefinedSerializer(), optional=True), +] + diff --git a/src/ksdk/common/types/operations_service.py b/src/ksdk/common/types/operations_service.py new file mode 100644 index 00000000..1b64d620 --- /dev/null +++ b/src/ksdk/common/types/operations_service.py @@ -0,0 +1,48 @@ +from kagglesdk.kaggle_object import * + +class GetOperationRequest(KaggleObject): + r""" + The request message for + [Operations.GetOperation][google.longrunning.Operations.GetOperation]. + + Attributes: + name (str) + The name of the operation resource. + """ + + def __init__(self): + self._name = "" + self._freeze() + + @property + def name(self) -> str: + """The name of the operation resource.""" + return self._name + + @name.setter + def name(self, name: str): + if name is None: + del self.name + return + if not isinstance(name, str): + raise TypeError('name must be of type str') + self._name = name + + def endpoint(self): + path = '/api/v1/operations/get' + return path.format_map(self.to_field_map(self)) + + + @staticmethod + def method(): + return 'POST' + + @staticmethod + def body_fields(): + return '*' + + +GetOperationRequest._fields = [ + FieldMetadata("name", "name", "_name", str, "", PredefinedSerializer()), +] + diff --git a/src/ksdk/community/__init__.py b/src/ksdk/community/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/ksdk/community/types/__init__.py b/src/ksdk/community/types/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/ksdk/community/types/content_enums.py b/src/ksdk/community/types/content_enums.py new file mode 100644 index 00000000..2bb4425c --- /dev/null +++ b/src/ksdk/community/types/content_enums.py @@ -0,0 +1,44 @@ +import enum + +class ContentState(enum.Enum): + """Keep synced with /Kaggle.Sdk/cloud/kaggle/moderation/sor.proto""" + CONTENT_STATE_UNSPECIFIED = 0 + PENDING_CLASSIFICATION = 1 + r""" + Awaiting abuse classification. This exists as a non-visible state prior to + classification. + """ + PUBLISHED = 2 + r""" + Publicly viewable, although access may be restricted outside of its content + state. + """ + TEMPORARILY_QUARANTINED = 3 + r""" + Quarantined by an admin or by the system. This means that the content is + only visible to the user and admins, however users are able to toggle their + content out of this state. + """ + PERMANENTLY_QUARANTINED = 4 + r""" + Quarantined by an admin or by the system, the user cannot toggle their + content's state back to public. + """ + USER_DELETED = 5 + """Deleted by the user.""" + SYSTEM_DELETED = 6 + """Deleted by an admin or by a system account.""" + PENDING_PERMANENT_DELETE = 7 + """Awaiting hard deletion.""" + DRAFT = 8 + r""" + Initial state of entity that has never been previously published. + Unable to return back to Draft state once published. + State flow chart example: http://screen/8vDypV7HPeuHBFK + """ + UNPUBLISHED = 9 + r""" + Intermediate stage that has either been upgraded from the Draft state or + downgraded from the Published state. + """ + diff --git a/src/ksdk/community/types/organization.py b/src/ksdk/community/types/organization.py new file mode 100644 index 00000000..d4b9fa46 --- /dev/null +++ b/src/ksdk/community/types/organization.py @@ -0,0 +1,410 @@ +from datetime import datetime +import enum +from kagglesdk.community.types.content_enums import ContentState +from kagglesdk.kaggle_object import * +from kagglesdk.users.types.user_avatar import UserAvatar +from typing import Optional, List + +class OrganizationCategory(enum.Enum): + ORGANIZATION_CATEGORY_UNSPECIFIED = 0 + STUDY_GROUP = 1 + """Academic non-organization, e.g. student association""" + COMPANY_OR_NON_PROFIT_OR_GOVERNMENT = 2 + """Indicates a company, non-profit, or government organization""" + RESEARCH_LAB = 3 + r""" + Academic / corporate research-focused organization, e.g. university or + medical research + """ + +class OrganizationMembershipType(enum.Enum): + ORGANIZATION_MEMBERSHIP_TYPE_UNSPECIFIED = 0 + MEMBER = 1 + """Regular members of an organization""" + OWNER = 2 + """The current owner of an organization""" + CREATOR = 3 + """The original creator of an organization""" + +class Organization(KaggleObject): + r""" + Attributes: + name (str) + Display name for an organization + thumbnail_image_url (str) + URL for a thumbnail image of an organization + subtitle (str) + Subtitle / tagline for an organization + external_url (str) + External website for an organization + id (int) + The organization's ID + slug (str) + Full slug for the organization + featured_members (UserAvatar) + A subset of the organization's members for displaying on an organization + profile + membership_type (OrganizationMembershipType) + Membership type for the current user for this organization + content_state (ContentState) + The Content State for this Organization, if the current user can view it. + member_count (int) + The total count of members in the organization + dataset_count (int) + The total count of datasets for the organization + competition_count (int) + The total count of competitions for the organization + model_count (int) + The total count of models for the organization + invite_code (str) + This organization's invite code, if the current user can view it. + category (OrganizationCategory) + The category this organization belongs to. Historically organizations did + not have this field, so not every organization is guaranteed to have it + now. + owner_user (UserAvatar) + The current owner of the organization + overview (str) + Organization overview + create_time (datetime) + When the organization was created. + allow_model_gating (bool) + benchmark_count (int) + The total count of benchmarks for the organization + """ + + def __init__(self): + self._name = "" + self._thumbnail_image_url = "" + self._subtitle = None + self._external_url = None + self._id = 0 + self._slug = "" + self._featured_members = [] + self._membership_type = None + self._content_state = None + self._member_count = 0 + self._dataset_count = 0 + self._competition_count = 0 + self._model_count = 0 + self._invite_code = None + self._category = None + self._owner_user = None + self._overview = None + self._create_time = None + self._allow_model_gating = None + self._benchmark_count = 0 + self._freeze() + + @property + def name(self) -> str: + """Display name for an organization""" + return self._name + + @name.setter + def name(self, name: str): + if name is None: + del self.name + return + if not isinstance(name, str): + raise TypeError('name must be of type str') + self._name = name + + @property + def thumbnail_image_url(self) -> str: + """URL for a thumbnail image of an organization""" + return self._thumbnail_image_url + + @thumbnail_image_url.setter + def thumbnail_image_url(self, thumbnail_image_url: str): + if thumbnail_image_url is None: + del self.thumbnail_image_url + return + if not isinstance(thumbnail_image_url, str): + raise TypeError('thumbnail_image_url must be of type str') + self._thumbnail_image_url = thumbnail_image_url + + @property + def subtitle(self) -> str: + """Subtitle / tagline for an organization""" + return self._subtitle or "" + + @subtitle.setter + def subtitle(self, subtitle: Optional[str]): + if subtitle is None: + del self.subtitle + return + if not isinstance(subtitle, str): + raise TypeError('subtitle must be of type str') + self._subtitle = subtitle + + @property + def external_url(self) -> str: + """External website for an organization""" + return self._external_url or "" + + @external_url.setter + def external_url(self, external_url: Optional[str]): + if external_url is None: + del self.external_url + return + if not isinstance(external_url, str): + raise TypeError('external_url must be of type str') + self._external_url = external_url + + @property + def id(self) -> int: + """The organization's ID""" + return self._id + + @id.setter + def id(self, id: int): + if id is None: + del self.id + return + if not isinstance(id, int): + raise TypeError('id must be of type int') + self._id = id + + @property + def slug(self) -> str: + """Full slug for the organization""" + return self._slug + + @slug.setter + def slug(self, slug: str): + if slug is None: + del self.slug + return + if not isinstance(slug, str): + raise TypeError('slug must be of type str') + self._slug = slug + + @property + def featured_members(self) -> Optional[List[Optional['UserAvatar']]]: + r""" + A subset of the organization's members for displaying on an organization + profile + """ + return self._featured_members + + @featured_members.setter + def featured_members(self, featured_members: Optional[List[Optional['UserAvatar']]]): + if featured_members is None: + del self.featured_members + return + if not isinstance(featured_members, list): + raise TypeError('featured_members must be of type list') + if not all([isinstance(t, UserAvatar) for t in featured_members]): + raise TypeError('featured_members must contain only items of type UserAvatar') + self._featured_members = featured_members + + @property + def membership_type(self) -> 'OrganizationMembershipType': + """Membership type for the current user for this organization""" + return self._membership_type or OrganizationMembershipType.ORGANIZATION_MEMBERSHIP_TYPE_UNSPECIFIED + + @membership_type.setter + def membership_type(self, membership_type: Optional['OrganizationMembershipType']): + if membership_type is None: + del self.membership_type + return + if not isinstance(membership_type, OrganizationMembershipType): + raise TypeError('membership_type must be of type OrganizationMembershipType') + self._membership_type = membership_type + + @property + def content_state(self) -> 'ContentState': + """The Content State for this Organization, if the current user can view it.""" + return self._content_state or ContentState.CONTENT_STATE_UNSPECIFIED + + @content_state.setter + def content_state(self, content_state: Optional['ContentState']): + if content_state is None: + del self.content_state + return + if not isinstance(content_state, ContentState): + raise TypeError('content_state must be of type ContentState') + self._content_state = content_state + + @property + def member_count(self) -> int: + """The total count of members in the organization""" + return self._member_count + + @member_count.setter + def member_count(self, member_count: int): + if member_count is None: + del self.member_count + return + if not isinstance(member_count, int): + raise TypeError('member_count must be of type int') + self._member_count = member_count + + @property + def dataset_count(self) -> int: + """The total count of datasets for the organization""" + return self._dataset_count + + @dataset_count.setter + def dataset_count(self, dataset_count: int): + if dataset_count is None: + del self.dataset_count + return + if not isinstance(dataset_count, int): + raise TypeError('dataset_count must be of type int') + self._dataset_count = dataset_count + + @property + def competition_count(self) -> int: + """The total count of competitions for the organization""" + return self._competition_count + + @competition_count.setter + def competition_count(self, competition_count: int): + if competition_count is None: + del self.competition_count + return + if not isinstance(competition_count, int): + raise TypeError('competition_count must be of type int') + self._competition_count = competition_count + + @property + def model_count(self) -> int: + """The total count of models for the organization""" + return self._model_count + + @model_count.setter + def model_count(self, model_count: int): + if model_count is None: + del self.model_count + return + if not isinstance(model_count, int): + raise TypeError('model_count must be of type int') + self._model_count = model_count + + @property + def invite_code(self) -> str: + """This organization's invite code, if the current user can view it.""" + return self._invite_code or "" + + @invite_code.setter + def invite_code(self, invite_code: Optional[str]): + if invite_code is None: + del self.invite_code + return + if not isinstance(invite_code, str): + raise TypeError('invite_code must be of type str') + self._invite_code = invite_code + + @property + def category(self) -> 'OrganizationCategory': + r""" + The category this organization belongs to. Historically organizations did + not have this field, so not every organization is guaranteed to have it + now. + """ + return self._category or OrganizationCategory.ORGANIZATION_CATEGORY_UNSPECIFIED + + @category.setter + def category(self, category: Optional['OrganizationCategory']): + if category is None: + del self.category + return + if not isinstance(category, OrganizationCategory): + raise TypeError('category must be of type OrganizationCategory') + self._category = category + + @property + def owner_user(self) -> Optional['UserAvatar']: + """The current owner of the organization""" + return self._owner_user + + @owner_user.setter + def owner_user(self, owner_user: Optional['UserAvatar']): + if owner_user is None: + del self.owner_user + return + if not isinstance(owner_user, UserAvatar): + raise TypeError('owner_user must be of type UserAvatar') + self._owner_user = owner_user + + @property + def overview(self) -> str: + """Organization overview""" + return self._overview or "" + + @overview.setter + def overview(self, overview: Optional[str]): + if overview is None: + del self.overview + return + if not isinstance(overview, str): + raise TypeError('overview must be of type str') + self._overview = overview + + @property + def create_time(self) -> datetime: + """When the organization was created.""" + return self._create_time + + @create_time.setter + def create_time(self, create_time: datetime): + if create_time is None: + del self.create_time + return + if not isinstance(create_time, datetime): + raise TypeError('create_time must be of type datetime') + self._create_time = create_time + + @property + def allow_model_gating(self) -> bool: + return self._allow_model_gating or False + + @allow_model_gating.setter + def allow_model_gating(self, allow_model_gating: Optional[bool]): + if allow_model_gating is None: + del self.allow_model_gating + return + if not isinstance(allow_model_gating, bool): + raise TypeError('allow_model_gating must be of type bool') + self._allow_model_gating = allow_model_gating + + @property + def benchmark_count(self) -> int: + """The total count of benchmarks for the organization""" + return self._benchmark_count + + @benchmark_count.setter + def benchmark_count(self, benchmark_count: int): + if benchmark_count is None: + del self.benchmark_count + return + if not isinstance(benchmark_count, int): + raise TypeError('benchmark_count must be of type int') + self._benchmark_count = benchmark_count + + +Organization._fields = [ + FieldMetadata("name", "name", "_name", str, "", PredefinedSerializer()), + FieldMetadata("thumbnailImageUrl", "thumbnail_image_url", "_thumbnail_image_url", str, "", PredefinedSerializer()), + FieldMetadata("subtitle", "subtitle", "_subtitle", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("externalUrl", "external_url", "_external_url", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("id", "id", "_id", int, 0, PredefinedSerializer()), + FieldMetadata("slug", "slug", "_slug", str, "", PredefinedSerializer()), + FieldMetadata("featuredMembers", "featured_members", "_featured_members", UserAvatar, [], ListSerializer(KaggleObjectSerializer())), + FieldMetadata("membershipType", "membership_type", "_membership_type", OrganizationMembershipType, None, EnumSerializer(), optional=True), + FieldMetadata("contentState", "content_state", "_content_state", ContentState, None, EnumSerializer(), optional=True), + FieldMetadata("memberCount", "member_count", "_member_count", int, 0, PredefinedSerializer()), + FieldMetadata("datasetCount", "dataset_count", "_dataset_count", int, 0, PredefinedSerializer()), + FieldMetadata("competitionCount", "competition_count", "_competition_count", int, 0, PredefinedSerializer()), + FieldMetadata("modelCount", "model_count", "_model_count", int, 0, PredefinedSerializer()), + FieldMetadata("inviteCode", "invite_code", "_invite_code", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("category", "category", "_category", OrganizationCategory, None, EnumSerializer(), optional=True), + FieldMetadata("ownerUser", "owner_user", "_owner_user", UserAvatar, None, KaggleObjectSerializer()), + FieldMetadata("overview", "overview", "_overview", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("createTime", "create_time", "_create_time", datetime, None, DateTimeSerializer()), + FieldMetadata("allowModelGating", "allow_model_gating", "_allow_model_gating", bool, None, PredefinedSerializer(), optional=True), + FieldMetadata("benchmarkCount", "benchmark_count", "_benchmark_count", int, 0, PredefinedSerializer()), +] + diff --git a/src/ksdk/competitions/__init__.py b/src/ksdk/competitions/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/ksdk/competitions/services/__init__.py b/src/ksdk/competitions/services/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/ksdk/competitions/services/competition_api_service.py b/src/ksdk/competitions/services/competition_api_service.py new file mode 100644 index 00000000..5e455ca1 --- /dev/null +++ b/src/ksdk/competitions/services/competition_api_service.py @@ -0,0 +1,178 @@ +from kagglesdk.common.types.file_download import FileDownload +from kagglesdk.common.types.http_redirect import HttpRedirect +from kagglesdk.competitions.types.competition_api_service import ApiCompetition, ApiCreateCodeSubmissionRequest, ApiCreateCodeSubmissionResponse, ApiCreateSubmissionRequest, ApiCreateSubmissionResponse, ApiDownloadDataFileRequest, ApiDownloadDataFilesRequest, ApiDownloadLeaderboardRequest, ApiGetCompetitionDataFilesSummaryRequest, ApiGetCompetitionRequest, ApiGetLeaderboardRequest, ApiGetLeaderboardResponse, ApiGetSubmissionRequest, ApiListCompetitionsRequest, ApiListCompetitionsResponse, ApiListDataFilesRequest, ApiListDataFilesResponse, ApiListDataTreeFilesRequest, ApiListSubmissionsRequest, ApiListSubmissionsResponse, ApiStartSubmissionUploadRequest, ApiStartSubmissionUploadResponse, ApiSubmission +from kagglesdk.datasets.databundles.types.databundle_api_types import ApiDirectoryContent, ApiFilesSummary +from kagglesdk.kaggle_http_client import KaggleHttpClient + +class CompetitionApiClient(object): + + def __init__(self, client: KaggleHttpClient): + self._client = client + + def list_competitions(self, request: ApiListCompetitionsRequest = None) -> ApiListCompetitionsResponse: + r""" + Args: + request (ApiListCompetitionsRequest): + The request object; initialized to empty instance if not specified. + """ + + if request is None: + request = ApiListCompetitionsRequest() + + return self._client.call("competitions.CompetitionApiService", "ListCompetitions", request, ApiListCompetitionsResponse) + + def list_submissions(self, request: ApiListSubmissionsRequest = None) -> ApiListSubmissionsResponse: + r""" + Args: + request (ApiListSubmissionsRequest): + The request object; initialized to empty instance if not specified. + """ + + if request is None: + request = ApiListSubmissionsRequest() + + return self._client.call("competitions.CompetitionApiService", "ListSubmissions", request, ApiListSubmissionsResponse) + + def list_data_files(self, request: ApiListDataFilesRequest = None) -> ApiListDataFilesResponse: + r""" + Args: + request (ApiListDataFilesRequest): + The request object; initialized to empty instance if not specified. + """ + + if request is None: + request = ApiListDataFilesRequest() + + return self._client.call("competitions.CompetitionApiService", "ListDataFiles", request, ApiListDataFilesResponse) + + def list_data_tree_files(self, request: ApiListDataTreeFilesRequest = None) -> ApiDirectoryContent: + r""" + Args: + request (ApiListDataTreeFilesRequest): + The request object; initialized to empty instance if not specified. + """ + + if request is None: + request = ApiListDataTreeFilesRequest() + + return self._client.call("competitions.CompetitionApiService", "ListDataTreeFiles", request, ApiDirectoryContent) + + def get_leaderboard(self, request: ApiGetLeaderboardRequest = None) -> ApiGetLeaderboardResponse: + r""" + Args: + request (ApiGetLeaderboardRequest): + The request object; initialized to empty instance if not specified. + """ + + if request is None: + request = ApiGetLeaderboardRequest() + + return self._client.call("competitions.CompetitionApiService", "GetLeaderboard", request, ApiGetLeaderboardResponse) + + def download_leaderboard(self, request: ApiDownloadLeaderboardRequest = None) -> FileDownload: + r""" + Args: + request (ApiDownloadLeaderboardRequest): + The request object; initialized to empty instance if not specified. + """ + + if request is None: + request = ApiDownloadLeaderboardRequest() + + return self._client.call("competitions.CompetitionApiService", "DownloadLeaderboard", request, FileDownload) + + def create_submission(self, request: ApiCreateSubmissionRequest = None) -> ApiCreateSubmissionResponse: + r""" + Args: + request (ApiCreateSubmissionRequest): + The request object; initialized to empty instance if not specified. + """ + + if request is None: + request = ApiCreateSubmissionRequest() + + return self._client.call("competitions.CompetitionApiService", "CreateSubmission", request, ApiCreateSubmissionResponse) + + def create_code_submission(self, request: ApiCreateCodeSubmissionRequest = None) -> ApiCreateCodeSubmissionResponse: + r""" + Args: + request (ApiCreateCodeSubmissionRequest): + The request object; initialized to empty instance if not specified. + """ + + if request is None: + request = ApiCreateCodeSubmissionRequest() + + return self._client.call("competitions.CompetitionApiService", "CreateCodeSubmission", request, ApiCreateCodeSubmissionResponse) + + def get_submission(self, request: ApiGetSubmissionRequest = None) -> ApiSubmission: + r""" + Args: + request (ApiGetSubmissionRequest): + The request object; initialized to empty instance if not specified. + """ + + if request is None: + request = ApiGetSubmissionRequest() + + return self._client.call("competitions.CompetitionApiService", "GetSubmission", request, ApiSubmission) + + def start_submission_upload(self, request: ApiStartSubmissionUploadRequest = None) -> ApiStartSubmissionUploadResponse: + r""" + Args: + request (ApiStartSubmissionUploadRequest): + The request object; initialized to empty instance if not specified. + """ + + if request is None: + request = ApiStartSubmissionUploadRequest() + + return self._client.call("competitions.CompetitionApiService", "StartSubmissionUpload", request, ApiStartSubmissionUploadResponse) + + def download_data_files(self, request: ApiDownloadDataFilesRequest = None) -> HttpRedirect: + r""" + Args: + request (ApiDownloadDataFilesRequest): + The request object; initialized to empty instance if not specified. + """ + + if request is None: + request = ApiDownloadDataFilesRequest() + + return self._client.call("competitions.CompetitionApiService", "DownloadDataFiles", request, HttpRedirect) + + def download_data_file(self, request: ApiDownloadDataFileRequest = None) -> HttpRedirect: + r""" + Args: + request (ApiDownloadDataFileRequest): + The request object; initialized to empty instance if not specified. + """ + + if request is None: + request = ApiDownloadDataFileRequest() + + return self._client.call("competitions.CompetitionApiService", "DownloadDataFile", request, HttpRedirect) + + def get_competition(self, request: ApiGetCompetitionRequest = None) -> ApiCompetition: + r""" + Args: + request (ApiGetCompetitionRequest): + The request object; initialized to empty instance if not specified. + """ + + if request is None: + request = ApiGetCompetitionRequest() + + return self._client.call("competitions.CompetitionApiService", "GetCompetition", request, ApiCompetition) + + def get_competition_data_files_summary(self, request: ApiGetCompetitionDataFilesSummaryRequest = None) -> ApiFilesSummary: + r""" + Args: + request (ApiGetCompetitionDataFilesSummaryRequest): + The request object; initialized to empty instance if not specified. + """ + + if request is None: + request = ApiGetCompetitionDataFilesSummaryRequest() + + return self._client.call("competitions.CompetitionApiService", "GetCompetitionDataFilesSummary", request, ApiFilesSummary) diff --git a/src/ksdk/competitions/types/__init__.py b/src/ksdk/competitions/types/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/ksdk/competitions/types/competition.py b/src/ksdk/competitions/types/competition.py new file mode 100644 index 00000000..d75e455f --- /dev/null +++ b/src/ksdk/competitions/types/competition.py @@ -0,0 +1,14 @@ +import enum + +class RewardTypeId(enum.Enum): + REWARD_TYPE_ID_UNSPECIFIED = 0 + USD = 1 + KUDOS = 2 + AUD = 3 + EUR = 4 + JOBS = 5 + SWAG = 6 + GBP = 7 + KNOWLEDGE = 8 + PRIZES = 9 + diff --git a/src/ksdk/competitions/types/competition_api_service.py b/src/ksdk/competitions/types/competition_api_service.py new file mode 100644 index 00000000..5e0432e4 --- /dev/null +++ b/src/ksdk/competitions/types/competition_api_service.py @@ -0,0 +1,2393 @@ +from datetime import datetime +from kagglesdk.competitions.types.competition_enums import CompetitionListTab, CompetitionSortBy, HostSegment, SubmissionGroup, SubmissionSortBy +from kagglesdk.competitions.types.submission_status import SubmissionStatus +from kagglesdk.kaggle_object import * +from typing import Optional, List + +class ApiCompetition(KaggleObject): + r""" + Attributes: + id (int) + ref (str) + title (str) + url (str) + description (str) + organization_name (str) + organization_ref (str) + category (str) + reward (str) + tags (ApiCategory) + deadline (datetime) + kernel_count (int) + team_count (int) + user_has_entered (bool) + user_rank (int) + merger_deadline (datetime) + new_entrant_deadline (datetime) + enabled_date (datetime) + max_daily_submissions (int) + max_team_size (int) + evaluation_metric (str) + awards_points (bool) + is_kernels_submissions_only (bool) + submissions_disabled (bool) + thumbnail_image_url (str) + host_name (str) + """ + + def __init__(self): + self._id = 0 + self._ref = "" + self._title = None + self._url = None + self._description = None + self._organization_name = None + self._organization_ref = None + self._category = None + self._reward = None + self._tags = [] + self._deadline = None + self._kernel_count = 0 + self._team_count = 0 + self._user_has_entered = False + self._user_rank = None + self._merger_deadline = None + self._new_entrant_deadline = None + self._enabled_date = None + self._max_daily_submissions = 0 + self._max_team_size = None + self._evaluation_metric = None + self._awards_points = False + self._is_kernels_submissions_only = False + self._submissions_disabled = False + self._thumbnail_image_url = None + self._host_name = "" + self._freeze() + + @property + def id(self) -> int: + return self._id + + @id.setter + def id(self, id: int): + if id is None: + del self.id + return + if not isinstance(id, int): + raise TypeError('id must be of type int') + self._id = id + + @property + def ref(self) -> str: + return self._ref + + @ref.setter + def ref(self, ref: str): + if ref is None: + del self.ref + return + if not isinstance(ref, str): + raise TypeError('ref must be of type str') + self._ref = ref + + @property + def title(self) -> str: + return self._title or "" + + @title.setter + def title(self, title: Optional[str]): + if title is None: + del self.title + return + if not isinstance(title, str): + raise TypeError('title must be of type str') + self._title = title + + @property + def url(self) -> str: + return self._url or "" + + @url.setter + def url(self, url: Optional[str]): + if url is None: + del self.url + return + if not isinstance(url, str): + raise TypeError('url must be of type str') + self._url = url + + @property + def description(self) -> str: + return self._description or "" + + @description.setter + def description(self, description: Optional[str]): + if description is None: + del self.description + return + if not isinstance(description, str): + raise TypeError('description must be of type str') + self._description = description + + @property + def organization_name(self) -> str: + return self._organization_name or "" + + @organization_name.setter + def organization_name(self, organization_name: Optional[str]): + if organization_name is None: + del self.organization_name + return + if not isinstance(organization_name, str): + raise TypeError('organization_name must be of type str') + self._organization_name = organization_name + + @property + def organization_ref(self) -> str: + return self._organization_ref or "" + + @organization_ref.setter + def organization_ref(self, organization_ref: Optional[str]): + if organization_ref is None: + del self.organization_ref + return + if not isinstance(organization_ref, str): + raise TypeError('organization_ref must be of type str') + self._organization_ref = organization_ref + + @property + def category(self) -> str: + return self._category or "" + + @category.setter + def category(self, category: Optional[str]): + if category is None: + del self.category + return + if not isinstance(category, str): + raise TypeError('category must be of type str') + self._category = category + + @property + def reward(self) -> str: + return self._reward or "" + + @reward.setter + def reward(self, reward: Optional[str]): + if reward is None: + del self.reward + return + if not isinstance(reward, str): + raise TypeError('reward must be of type str') + self._reward = reward + + @property + def tags(self) -> Optional[List[Optional['ApiCategory']]]: + return self._tags + + @tags.setter + def tags(self, tags: Optional[List[Optional['ApiCategory']]]): + if tags is None: + del self.tags + return + if not isinstance(tags, list): + raise TypeError('tags must be of type list') + if not all([isinstance(t, ApiCategory) for t in tags]): + raise TypeError('tags must contain only items of type ApiCategory') + self._tags = tags + + @property + def deadline(self) -> datetime: + return self._deadline + + @deadline.setter + def deadline(self, deadline: datetime): + if deadline is None: + del self.deadline + return + if not isinstance(deadline, datetime): + raise TypeError('deadline must be of type datetime') + self._deadline = deadline + + @property + def kernel_count(self) -> int: + return self._kernel_count + + @kernel_count.setter + def kernel_count(self, kernel_count: int): + if kernel_count is None: + del self.kernel_count + return + if not isinstance(kernel_count, int): + raise TypeError('kernel_count must be of type int') + self._kernel_count = kernel_count + + @property + def team_count(self) -> int: + return self._team_count + + @team_count.setter + def team_count(self, team_count: int): + if team_count is None: + del self.team_count + return + if not isinstance(team_count, int): + raise TypeError('team_count must be of type int') + self._team_count = team_count + + @property + def user_has_entered(self) -> bool: + return self._user_has_entered + + @user_has_entered.setter + def user_has_entered(self, user_has_entered: bool): + if user_has_entered is None: + del self.user_has_entered + return + if not isinstance(user_has_entered, bool): + raise TypeError('user_has_entered must be of type bool') + self._user_has_entered = user_has_entered + + @property + def user_rank(self) -> int: + return self._user_rank or 0 + + @user_rank.setter + def user_rank(self, user_rank: Optional[int]): + if user_rank is None: + del self.user_rank + return + if not isinstance(user_rank, int): + raise TypeError('user_rank must be of type int') + self._user_rank = user_rank + + @property + def merger_deadline(self) -> datetime: + return self._merger_deadline + + @merger_deadline.setter + def merger_deadline(self, merger_deadline: datetime): + if merger_deadline is None: + del self.merger_deadline + return + if not isinstance(merger_deadline, datetime): + raise TypeError('merger_deadline must be of type datetime') + self._merger_deadline = merger_deadline + + @property + def new_entrant_deadline(self) -> datetime: + return self._new_entrant_deadline + + @new_entrant_deadline.setter + def new_entrant_deadline(self, new_entrant_deadline: datetime): + if new_entrant_deadline is None: + del self.new_entrant_deadline + return + if not isinstance(new_entrant_deadline, datetime): + raise TypeError('new_entrant_deadline must be of type datetime') + self._new_entrant_deadline = new_entrant_deadline + + @property + def enabled_date(self) -> datetime: + return self._enabled_date + + @enabled_date.setter + def enabled_date(self, enabled_date: datetime): + if enabled_date is None: + del self.enabled_date + return + if not isinstance(enabled_date, datetime): + raise TypeError('enabled_date must be of type datetime') + self._enabled_date = enabled_date + + @property + def max_daily_submissions(self) -> int: + return self._max_daily_submissions + + @max_daily_submissions.setter + def max_daily_submissions(self, max_daily_submissions: int): + if max_daily_submissions is None: + del self.max_daily_submissions + return + if not isinstance(max_daily_submissions, int): + raise TypeError('max_daily_submissions must be of type int') + self._max_daily_submissions = max_daily_submissions + + @property + def max_team_size(self) -> int: + return self._max_team_size or 0 + + @max_team_size.setter + def max_team_size(self, max_team_size: Optional[int]): + if max_team_size is None: + del self.max_team_size + return + if not isinstance(max_team_size, int): + raise TypeError('max_team_size must be of type int') + self._max_team_size = max_team_size + + @property + def evaluation_metric(self) -> str: + return self._evaluation_metric or "" + + @evaluation_metric.setter + def evaluation_metric(self, evaluation_metric: Optional[str]): + if evaluation_metric is None: + del self.evaluation_metric + return + if not isinstance(evaluation_metric, str): + raise TypeError('evaluation_metric must be of type str') + self._evaluation_metric = evaluation_metric + + @property + def awards_points(self) -> bool: + return self._awards_points + + @awards_points.setter + def awards_points(self, awards_points: bool): + if awards_points is None: + del self.awards_points + return + if not isinstance(awards_points, bool): + raise TypeError('awards_points must be of type bool') + self._awards_points = awards_points + + @property + def is_kernels_submissions_only(self) -> bool: + return self._is_kernels_submissions_only + + @is_kernels_submissions_only.setter + def is_kernels_submissions_only(self, is_kernels_submissions_only: bool): + if is_kernels_submissions_only is None: + del self.is_kernels_submissions_only + return + if not isinstance(is_kernels_submissions_only, bool): + raise TypeError('is_kernels_submissions_only must be of type bool') + self._is_kernels_submissions_only = is_kernels_submissions_only + + @property + def submissions_disabled(self) -> bool: + return self._submissions_disabled + + @submissions_disabled.setter + def submissions_disabled(self, submissions_disabled: bool): + if submissions_disabled is None: + del self.submissions_disabled + return + if not isinstance(submissions_disabled, bool): + raise TypeError('submissions_disabled must be of type bool') + self._submissions_disabled = submissions_disabled + + @property + def thumbnail_image_url(self) -> str: + return self._thumbnail_image_url or "" + + @thumbnail_image_url.setter + def thumbnail_image_url(self, thumbnail_image_url: Optional[str]): + if thumbnail_image_url is None: + del self.thumbnail_image_url + return + if not isinstance(thumbnail_image_url, str): + raise TypeError('thumbnail_image_url must be of type str') + self._thumbnail_image_url = thumbnail_image_url + + @property + def host_name(self) -> str: + return self._host_name + + @host_name.setter + def host_name(self, host_name: str): + if host_name is None: + del self.host_name + return + if not isinstance(host_name, str): + raise TypeError('host_name must be of type str') + self._host_name = host_name + + +class ApiCreateCodeSubmissionRequest(KaggleObject): + r""" + Attributes: + competition_name (str) + kernel_owner (str) + kernel_slug (str) + kernel_version (int) + file_name (str) + submission_description (str) + """ + + def __init__(self): + self._competition_name = "" + self._kernel_owner = "" + self._kernel_slug = "" + self._kernel_version = None + self._file_name = None + self._submission_description = None + self._freeze() + + @property + def competition_name(self) -> str: + return self._competition_name + + @competition_name.setter + def competition_name(self, competition_name: str): + if competition_name is None: + del self.competition_name + return + if not isinstance(competition_name, str): + raise TypeError('competition_name must be of type str') + self._competition_name = competition_name + + @property + def kernel_owner(self) -> str: + return self._kernel_owner + + @kernel_owner.setter + def kernel_owner(self, kernel_owner: str): + if kernel_owner is None: + del self.kernel_owner + return + if not isinstance(kernel_owner, str): + raise TypeError('kernel_owner must be of type str') + self._kernel_owner = kernel_owner + + @property + def kernel_slug(self) -> str: + return self._kernel_slug + + @kernel_slug.setter + def kernel_slug(self, kernel_slug: str): + if kernel_slug is None: + del self.kernel_slug + return + if not isinstance(kernel_slug, str): + raise TypeError('kernel_slug must be of type str') + self._kernel_slug = kernel_slug + + @property + def kernel_version(self) -> int: + return self._kernel_version or 0 + + @kernel_version.setter + def kernel_version(self, kernel_version: Optional[int]): + if kernel_version is None: + del self.kernel_version + return + if not isinstance(kernel_version, int): + raise TypeError('kernel_version must be of type int') + self._kernel_version = kernel_version + + @property + def file_name(self) -> str: + return self._file_name or "" + + @file_name.setter + def file_name(self, file_name: Optional[str]): + if file_name is None: + del self.file_name + return + if not isinstance(file_name, str): + raise TypeError('file_name must be of type str') + self._file_name = file_name + + @property + def submission_description(self) -> str: + return self._submission_description or "" + + @submission_description.setter + def submission_description(self, submission_description: Optional[str]): + if submission_description is None: + del self.submission_description + return + if not isinstance(submission_description, str): + raise TypeError('submission_description must be of type str') + self._submission_description = submission_description + + def endpoint(self): + path = '/api/v1/competitions/submissions/submit-notebook/{competition_name}' + return path.format_map(self.to_field_map(self)) + + + @staticmethod + def method(): + return 'POST' + + +class ApiCreateCodeSubmissionResponse(KaggleObject): + r""" + Attributes: + message (str) + ref (int) + """ + + def __init__(self): + self._message = "" + self._ref = 0 + self._freeze() + + @property + def message(self) -> str: + return self._message + + @message.setter + def message(self, message: str): + if message is None: + del self.message + return + if not isinstance(message, str): + raise TypeError('message must be of type str') + self._message = message + + @property + def ref(self) -> int: + return self._ref + + @ref.setter + def ref(self, ref: int): + if ref is None: + del self.ref + return + if not isinstance(ref, int): + raise TypeError('ref must be of type int') + self._ref = ref + + +class ApiCreateSubmissionRequest(KaggleObject): + r""" + Attributes: + competition_name (str) + Competition name. Example: 'titanic'. + blob_file_tokens (str) + Token identifying location of uploaded submission file. + submission_description (str) + Description of competition submission. + """ + + def __init__(self): + self._competition_name = "" + self._blob_file_tokens = "" + self._submission_description = None + self._freeze() + + @property + def competition_name(self) -> str: + """Competition name. Example: 'titanic'.""" + return self._competition_name + + @competition_name.setter + def competition_name(self, competition_name: str): + if competition_name is None: + del self.competition_name + return + if not isinstance(competition_name, str): + raise TypeError('competition_name must be of type str') + self._competition_name = competition_name + + @property + def blob_file_tokens(self) -> str: + """Token identifying location of uploaded submission file.""" + return self._blob_file_tokens + + @blob_file_tokens.setter + def blob_file_tokens(self, blob_file_tokens: str): + if blob_file_tokens is None: + del self.blob_file_tokens + return + if not isinstance(blob_file_tokens, str): + raise TypeError('blob_file_tokens must be of type str') + self._blob_file_tokens = blob_file_tokens + + @property + def submission_description(self) -> str: + """Description of competition submission.""" + return self._submission_description or "" + + @submission_description.setter + def submission_description(self, submission_description: Optional[str]): + if submission_description is None: + del self.submission_description + return + if not isinstance(submission_description, str): + raise TypeError('submission_description must be of type str') + self._submission_description = submission_description + + def endpoint(self): + path = '/api/v1/competitions/submissions/submit/{competition_name}' + return path.format_map(self.to_field_map(self)) + + + @staticmethod + def method(): + return 'POST' + + +class ApiCreateSubmissionResponse(KaggleObject): + r""" + Attributes: + message (str) + ref (int) + """ + + def __init__(self): + self._message = "" + self._ref = 0 + self._freeze() + + @property + def message(self) -> str: + return self._message + + @message.setter + def message(self, message: str): + if message is None: + del self.message + return + if not isinstance(message, str): + raise TypeError('message must be of type str') + self._message = message + + @property + def ref(self) -> int: + return self._ref + + @ref.setter + def ref(self, ref: int): + if ref is None: + del self.ref + return + if not isinstance(ref, int): + raise TypeError('ref must be of type int') + self._ref = ref + + +class ApiDownloadDataFileRequest(KaggleObject): + r""" + Attributes: + competition_name (str) + Competition name. Example: 'titanic'. + file_name (str) + Name of the file to download. Example: 'train/foo/bar.png'. + """ + + def __init__(self): + self._competition_name = "" + self._file_name = "" + self._freeze() + + @property + def competition_name(self) -> str: + """Competition name. Example: 'titanic'.""" + return self._competition_name + + @competition_name.setter + def competition_name(self, competition_name: str): + if competition_name is None: + del self.competition_name + return + if not isinstance(competition_name, str): + raise TypeError('competition_name must be of type str') + self._competition_name = competition_name + + @property + def file_name(self) -> str: + """Name of the file to download. Example: 'train/foo/bar.png'.""" + return self._file_name + + @file_name.setter + def file_name(self, file_name: str): + if file_name is None: + del self.file_name + return + if not isinstance(file_name, str): + raise TypeError('file_name must be of type str') + self._file_name = file_name + + def endpoint(self): + path = '/api/v1/competitions/data/download/{competition_name}/{file_name}' + return path.format_map(self.to_field_map(self)) + + @staticmethod + def endpoint_path(): + return '/api/v1/competitions/data/download/{competition_name}/{file_name}' + + +class ApiDownloadDataFilesRequest(KaggleObject): + r""" + Attributes: + competition_name (str) + Competition name. Example: 'titanic'. + """ + + def __init__(self): + self._competition_name = "" + self._freeze() + + @property + def competition_name(self) -> str: + """Competition name. Example: 'titanic'.""" + return self._competition_name + + @competition_name.setter + def competition_name(self, competition_name: str): + if competition_name is None: + del self.competition_name + return + if not isinstance(competition_name, str): + raise TypeError('competition_name must be of type str') + self._competition_name = competition_name + + def endpoint(self): + path = '/api/v1/competitions/data/download-all/{competition_name}' + return path.format_map(self.to_field_map(self)) + + @staticmethod + def endpoint_path(): + return '/api/v1/competitions/data/download-all/{competition_name}' + + +class ApiDownloadLeaderboardRequest(KaggleObject): + r""" + Attributes: + competition_name (str) + """ + + def __init__(self): + self._competition_name = "" + self._freeze() + + @property + def competition_name(self) -> str: + return self._competition_name + + @competition_name.setter + def competition_name(self, competition_name: str): + if competition_name is None: + del self.competition_name + return + if not isinstance(competition_name, str): + raise TypeError('competition_name must be of type str') + self._competition_name = competition_name + + def endpoint(self): + path = '/api/v1/competitions/{competition_name}/leaderboard/download' + return path.format_map(self.to_field_map(self)) + + @staticmethod + def endpoint_path(): + return '/api/v1/competitions/{competition_name}/leaderboard/download' + + +class ApiGetCompetitionDataFilesSummaryRequest(KaggleObject): + r""" + Attributes: + competition_name (str) + """ + + def __init__(self): + self._competition_name = "" + self._freeze() + + @property + def competition_name(self) -> str: + return self._competition_name + + @competition_name.setter + def competition_name(self, competition_name: str): + if competition_name is None: + del self.competition_name + return + if not isinstance(competition_name, str): + raise TypeError('competition_name must be of type str') + self._competition_name = competition_name + + def endpoint(self): + path = '/api/v1/competitions/data/summary/{competition_name}' + return path.format_map(self.to_field_map(self)) + + @staticmethod + def endpoint_path(): + return '/api/v1/competitions/data/summary/{competition_name}' + + +class ApiGetCompetitionRequest(KaggleObject): + r""" + Attributes: + competition_name (str) + """ + + def __init__(self): + self._competition_name = "" + self._freeze() + + @property + def competition_name(self) -> str: + return self._competition_name + + @competition_name.setter + def competition_name(self, competition_name: str): + if competition_name is None: + del self.competition_name + return + if not isinstance(competition_name, str): + raise TypeError('competition_name must be of type str') + self._competition_name = competition_name + + def endpoint(self): + path = '/api/v1/competitions/get/{competition_name}' + return path.format_map(self.to_field_map(self)) + + @staticmethod + def endpoint_path(): + return '/api/v1/competitions/get/{competition_name}' + + +class ApiGetLeaderboardRequest(KaggleObject): + r""" + Attributes: + competition_name (str) + Competition name. Example: 'titanic'. + override_public (bool) + By default we return the private leaderboard if it's available, otherwise + the public LB. This flag lets you override to get public even if private + is available. + page_size (int) + page_token (str) + """ + + def __init__(self): + self._competition_name = "" + self._override_public = None + self._page_size = None + self._page_token = None + self._freeze() + + @property + def competition_name(self) -> str: + """Competition name. Example: 'titanic'.""" + return self._competition_name + + @competition_name.setter + def competition_name(self, competition_name: str): + if competition_name is None: + del self.competition_name + return + if not isinstance(competition_name, str): + raise TypeError('competition_name must be of type str') + self._competition_name = competition_name + + @property + def override_public(self) -> bool: + r""" + By default we return the private leaderboard if it's available, otherwise + the public LB. This flag lets you override to get public even if private + is available. + """ + return self._override_public or False + + @override_public.setter + def override_public(self, override_public: Optional[bool]): + if override_public is None: + del self.override_public + return + if not isinstance(override_public, bool): + raise TypeError('override_public must be of type bool') + self._override_public = override_public + + @property + def page_size(self) -> int: + return self._page_size or 0 + + @page_size.setter + def page_size(self, page_size: Optional[int]): + if page_size is None: + del self.page_size + return + if not isinstance(page_size, int): + raise TypeError('page_size must be of type int') + self._page_size = page_size + + @property + def page_token(self) -> str: + return self._page_token or "" + + @page_token.setter + def page_token(self, page_token: Optional[str]): + if page_token is None: + del self.page_token + return + if not isinstance(page_token, str): + raise TypeError('page_token must be of type str') + self._page_token = page_token + + def endpoint(self): + path = '/api/v1/competitions/{competition_name}/leaderboard/view' + return path.format_map(self.to_field_map(self)) + + @staticmethod + def endpoint_path(): + return '/api/v1/competitions/{competition_name}/leaderboard/view' + + +class ApiGetLeaderboardResponse(KaggleObject): + r""" + Attributes: + submissions (ApiLeaderboardSubmission) + next_page_token (str) + """ + + def __init__(self): + self._submissions = [] + self._next_page_token = "" + self._freeze() + + @property + def submissions(self) -> Optional[List[Optional['ApiLeaderboardSubmission']]]: + return self._submissions + + @submissions.setter + def submissions(self, submissions: Optional[List[Optional['ApiLeaderboardSubmission']]]): + if submissions is None: + del self.submissions + return + if not isinstance(submissions, list): + raise TypeError('submissions must be of type list') + if not all([isinstance(t, ApiLeaderboardSubmission) for t in submissions]): + raise TypeError('submissions must contain only items of type ApiLeaderboardSubmission') + self._submissions = submissions + + @property + def next_page_token(self) -> str: + return self._next_page_token + + @next_page_token.setter + def next_page_token(self, next_page_token: str): + if next_page_token is None: + del self.next_page_token + return + if not isinstance(next_page_token, str): + raise TypeError('next_page_token must be of type str') + self._next_page_token = next_page_token + + @property + def nextPageToken(self): + return self.next_page_token + + +class ApiGetSubmissionRequest(KaggleObject): + r""" + Attributes: + ref (int) + SubmissionId. + """ + + def __init__(self): + self._ref = 0 + self._freeze() + + @property + def ref(self) -> int: + """SubmissionId.""" + return self._ref + + @ref.setter + def ref(self, ref: int): + if ref is None: + del self.ref + return + if not isinstance(ref, int): + raise TypeError('ref must be of type int') + self._ref = ref + + def endpoint(self): + path = '/api/v1/competitions/submissions/get/{ref}' + return path.format_map(self.to_field_map(self)) + + + @staticmethod + def method(): + return 'POST' + + +class ApiLeaderboardSubmission(KaggleObject): + r""" + Attributes: + team_id (int) + team_name (str) + submission_date (datetime) + score (str) + """ + + def __init__(self): + self._team_id = 0 + self._team_name = None + self._submission_date = None + self._score = None + self._freeze() + + @property + def team_id(self) -> int: + return self._team_id + + @team_id.setter + def team_id(self, team_id: int): + if team_id is None: + del self.team_id + return + if not isinstance(team_id, int): + raise TypeError('team_id must be of type int') + self._team_id = team_id + + @property + def team_name(self) -> str: + return self._team_name or "" + + @team_name.setter + def team_name(self, team_name: Optional[str]): + if team_name is None: + del self.team_name + return + if not isinstance(team_name, str): + raise TypeError('team_name must be of type str') + self._team_name = team_name + + @property + def submission_date(self) -> datetime: + return self._submission_date + + @submission_date.setter + def submission_date(self, submission_date: datetime): + if submission_date is None: + del self.submission_date + return + if not isinstance(submission_date, datetime): + raise TypeError('submission_date must be of type datetime') + self._submission_date = submission_date + + @property + def score(self) -> str: + return self._score or "" + + @score.setter + def score(self, score: Optional[str]): + if score is None: + del self.score + return + if not isinstance(score, str): + raise TypeError('score must be of type str') + self._score = score + + +class ApiListCompetitionsRequest(KaggleObject): + r""" + Attributes: + group (CompetitionListTab) + Filter competitions by a particular group (default is 'general'). + One of 'general', 'entered' and 'inClass'. + category (HostSegment) + Filter competitions by a particular category (default is 'all'). + One of 'all', 'featured', 'research', 'recruitment', 'gettingStarted', + 'masters', 'playground'. + sort_by (CompetitionSortBy) + Sort the results (default is 'latestDeadline'). + One of 'grouped', 'prize', 'earliestDeadline', 'latestDeadline', + 'numberOfTeams', 'recentlyCreated'. + search (str) + Filter competitions by search terms. + page (int) + Page number (default is 1). + page_token (str) + page_size (int) + """ + + def __init__(self): + self._group = None + self._category = None + self._sort_by = None + self._search = None + self._page = None + self._page_token = None + self._page_size = None + self._freeze() + + @property + def group(self) -> 'CompetitionListTab': + r""" + Filter competitions by a particular group (default is 'general'). + One of 'general', 'entered' and 'inClass'. + """ + return self._group or CompetitionListTab.COMPETITION_LIST_TAB_GENERAL + + @group.setter + def group(self, group: Optional['CompetitionListTab']): + if group is None: + del self.group + return + if not isinstance(group, CompetitionListTab): + raise TypeError('group must be of type CompetitionListTab') + self._group = group + + @property + def category(self) -> 'HostSegment': + r""" + Filter competitions by a particular category (default is 'all'). + One of 'all', 'featured', 'research', 'recruitment', 'gettingStarted', + 'masters', 'playground'. + """ + return self._category or HostSegment.HOST_SEGMENT_UNSPECIFIED + + @category.setter + def category(self, category: Optional['HostSegment']): + if category is None: + del self.category + return + if not isinstance(category, HostSegment): + raise TypeError('category must be of type HostSegment') + self._category = category + + @property + def sort_by(self) -> 'CompetitionSortBy': + r""" + Sort the results (default is 'latestDeadline'). + One of 'grouped', 'prize', 'earliestDeadline', 'latestDeadline', + 'numberOfTeams', 'recentlyCreated'. + """ + return self._sort_by or CompetitionSortBy.COMPETITION_SORT_BY_GROUPED + + @sort_by.setter + def sort_by(self, sort_by: Optional['CompetitionSortBy']): + if sort_by is None: + del self.sort_by + return + if not isinstance(sort_by, CompetitionSortBy): + raise TypeError('sort_by must be of type CompetitionSortBy') + self._sort_by = sort_by + + @property + def search(self) -> str: + """Filter competitions by search terms.""" + return self._search or "" + + @search.setter + def search(self, search: Optional[str]): + if search is None: + del self.search + return + if not isinstance(search, str): + raise TypeError('search must be of type str') + self._search = search + + @property + def page(self) -> int: + """Page number (default is 1).""" + return self._page or 0 + + @page.setter + def page(self, page: Optional[int]): + if page is None: + del self.page + return + if not isinstance(page, int): + raise TypeError('page must be of type int') + self._page = page + + @property + def page_token(self) -> str: + return self._page_token or "" + + @page_token.setter + def page_token(self, page_token: Optional[str]): + if page_token is None: + del self.page_token + return + if not isinstance(page_token, str): + raise TypeError('page_token must be of type str') + self._page_token = page_token + + @property + def page_size(self) -> int: + return self._page_size or 0 + + @page_size.setter + def page_size(self, page_size: Optional[int]): + if page_size is None: + del self.page_size + return + if not isinstance(page_size, int): + raise TypeError('page_size must be of type int') + self._page_size = page_size + + def endpoint(self): + path = '/api/v1/competitions/list' + return path.format_map(self.to_field_map(self)) + + +class ApiListCompetitionsResponse(KaggleObject): + r""" + Attributes: + competitions (ApiCompetition) + next_page_token (str) + """ + + def __init__(self): + self._competitions = [] + self._next_page_token = "" + self._freeze() + + @property + def competitions(self) -> Optional[List[Optional['ApiCompetition']]]: + return self._competitions + + @competitions.setter + def competitions(self, competitions: Optional[List[Optional['ApiCompetition']]]): + if competitions is None: + del self.competitions + return + if not isinstance(competitions, list): + raise TypeError('competitions must be of type list') + if not all([isinstance(t, ApiCompetition) for t in competitions]): + raise TypeError('competitions must contain only items of type ApiCompetition') + self._competitions = competitions + + @property + def next_page_token(self) -> str: + return self._next_page_token + + @next_page_token.setter + def next_page_token(self, next_page_token: str): + if next_page_token is None: + del self.next_page_token + return + if not isinstance(next_page_token, str): + raise TypeError('next_page_token must be of type str') + self._next_page_token = next_page_token + + @property + def nextPageToken(self): + return self.next_page_token + + +class ApiListDataFilesRequest(KaggleObject): + r""" + Attributes: + competition_name (str) + Competition name. Example: 'titanic'. + page_size (int) + page_token (str) + """ + + def __init__(self): + self._competition_name = "" + self._page_size = None + self._page_token = None + self._freeze() + + @property + def competition_name(self) -> str: + """Competition name. Example: 'titanic'.""" + return self._competition_name + + @competition_name.setter + def competition_name(self, competition_name: str): + if competition_name is None: + del self.competition_name + return + if not isinstance(competition_name, str): + raise TypeError('competition_name must be of type str') + self._competition_name = competition_name + + @property + def page_size(self) -> int: + return self._page_size or 0 + + @page_size.setter + def page_size(self, page_size: Optional[int]): + if page_size is None: + del self.page_size + return + if not isinstance(page_size, int): + raise TypeError('page_size must be of type int') + self._page_size = page_size + + @property + def page_token(self) -> str: + return self._page_token or "" + + @page_token.setter + def page_token(self, page_token: Optional[str]): + if page_token is None: + del self.page_token + return + if not isinstance(page_token, str): + raise TypeError('page_token must be of type str') + self._page_token = page_token + + def endpoint(self): + path = '/api/v1/competitions/data/list/{competition_name}' + return path.format_map(self.to_field_map(self)) + + @staticmethod + def endpoint_path(): + return '/api/v1/competitions/data/list/{competition_name}' + + +class ApiListDataFilesResponse(KaggleObject): + r""" + Attributes: + files (ApiDataFile) + next_page_token (str) + children_fetch_time_ms (int) + """ + + def __init__(self): + self._files = [] + self._next_page_token = "" + self._children_fetch_time_ms = 0 + self._freeze() + + @property + def files(self) -> Optional[List[Optional['ApiDataFile']]]: + return self._files + + @files.setter + def files(self, files: Optional[List[Optional['ApiDataFile']]]): + if files is None: + del self.files + return + if not isinstance(files, list): + raise TypeError('files must be of type list') + if not all([isinstance(t, ApiDataFile) for t in files]): + raise TypeError('files must contain only items of type ApiDataFile') + self._files = files + + @property + def next_page_token(self) -> str: + return self._next_page_token + + @next_page_token.setter + def next_page_token(self, next_page_token: str): + if next_page_token is None: + del self.next_page_token + return + if not isinstance(next_page_token, str): + raise TypeError('next_page_token must be of type str') + self._next_page_token = next_page_token + + @property + def children_fetch_time_ms(self) -> int: + return self._children_fetch_time_ms + + @children_fetch_time_ms.setter + def children_fetch_time_ms(self, children_fetch_time_ms: int): + if children_fetch_time_ms is None: + del self.children_fetch_time_ms + return + if not isinstance(children_fetch_time_ms, int): + raise TypeError('children_fetch_time_ms must be of type int') + self._children_fetch_time_ms = children_fetch_time_ms + + @property + def nextPageToken(self): + return self.next_page_token + + @property + def childrenFetchTimeMs(self): + return self.children_fetch_time_ms + + +class ApiListDataTreeFilesRequest(KaggleObject): + r""" + Attributes: + competition_name (str) + Competition name. Example: 'titanic'. + path (str) + The path of the directory to list files from. If not provided, the root + directory will be listed. + page_size (int) + page_token (str) + """ + + def __init__(self): + self._competition_name = "" + self._path = None + self._page_size = None + self._page_token = None + self._freeze() + + @property + def competition_name(self) -> str: + """Competition name. Example: 'titanic'.""" + return self._competition_name + + @competition_name.setter + def competition_name(self, competition_name: str): + if competition_name is None: + del self.competition_name + return + if not isinstance(competition_name, str): + raise TypeError('competition_name must be of type str') + self._competition_name = competition_name + + @property + def path(self) -> str: + r""" + The path of the directory to list files from. If not provided, the root + directory will be listed. + """ + return self._path or "" + + @path.setter + def path(self, path: Optional[str]): + if path is None: + del self.path + return + if not isinstance(path, str): + raise TypeError('path must be of type str') + self._path = path + + @property + def page_size(self) -> int: + return self._page_size or 0 + + @page_size.setter + def page_size(self, page_size: Optional[int]): + if page_size is None: + del self.page_size + return + if not isinstance(page_size, int): + raise TypeError('page_size must be of type int') + self._page_size = page_size + + @property + def page_token(self) -> str: + return self._page_token or "" + + @page_token.setter + def page_token(self, page_token: Optional[str]): + if page_token is None: + del self.page_token + return + if not isinstance(page_token, str): + raise TypeError('page_token must be of type str') + self._page_token = page_token + + def endpoint(self): + path = '/api/v1/competitions/{competition_name}/data-tree/list/' + return path.format_map(self.to_field_map(self)) + + @staticmethod + def endpoint_path(): + return '/api/v1/competitions/{competition_name}/data-tree/list/' + + +class ApiListSubmissionsRequest(KaggleObject): + r""" + Attributes: + competition_name (str) + sort_by (SubmissionSortBy) + group (SubmissionGroup) + page (int) + page_token (str) + page_size (int) + """ + + def __init__(self): + self._competition_name = "" + self._sort_by = SubmissionSortBy.SUBMISSION_SORT_BY_DATE + self._group = SubmissionGroup.SUBMISSION_GROUP_ALL + self._page = None + self._page_token = None + self._page_size = None + self._freeze() + + @property + def competition_name(self) -> str: + return self._competition_name + + @competition_name.setter + def competition_name(self, competition_name: str): + if competition_name is None: + del self.competition_name + return + if not isinstance(competition_name, str): + raise TypeError('competition_name must be of type str') + self._competition_name = competition_name + + @property + def sort_by(self) -> 'SubmissionSortBy': + return self._sort_by + + @sort_by.setter + def sort_by(self, sort_by: 'SubmissionSortBy'): + if sort_by is None: + del self.sort_by + return + if not isinstance(sort_by, SubmissionSortBy): + raise TypeError('sort_by must be of type SubmissionSortBy') + self._sort_by = sort_by + + @property + def group(self) -> 'SubmissionGroup': + return self._group + + @group.setter + def group(self, group: 'SubmissionGroup'): + if group is None: + del self.group + return + if not isinstance(group, SubmissionGroup): + raise TypeError('group must be of type SubmissionGroup') + self._group = group + + @property + def page(self) -> int: + return self._page or 0 + + @page.setter + def page(self, page: Optional[int]): + if page is None: + del self.page + return + if not isinstance(page, int): + raise TypeError('page must be of type int') + self._page = page + + @property + def page_token(self) -> str: + return self._page_token or "" + + @page_token.setter + def page_token(self, page_token: Optional[str]): + if page_token is None: + del self.page_token + return + if not isinstance(page_token, str): + raise TypeError('page_token must be of type str') + self._page_token = page_token + + @property + def page_size(self) -> int: + return self._page_size or 0 + + @page_size.setter + def page_size(self, page_size: Optional[int]): + if page_size is None: + del self.page_size + return + if not isinstance(page_size, int): + raise TypeError('page_size must be of type int') + self._page_size = page_size + + def endpoint(self): + path = '/api/v1/competitions/submissions/list/{competition_name}' + return path.format_map(self.to_field_map(self)) + + @staticmethod + def endpoint_path(): + return '/api/v1/competitions/submissions/list/{competition_name}' + + +class ApiListSubmissionsResponse(KaggleObject): + r""" + Attributes: + submissions (ApiSubmission) + next_page_token (str) + """ + + def __init__(self): + self._submissions = [] + self._next_page_token = "" + self._freeze() + + @property + def submissions(self) -> Optional[List[Optional['ApiSubmission']]]: + return self._submissions + + @submissions.setter + def submissions(self, submissions: Optional[List[Optional['ApiSubmission']]]): + if submissions is None: + del self.submissions + return + if not isinstance(submissions, list): + raise TypeError('submissions must be of type list') + if not all([isinstance(t, ApiSubmission) for t in submissions]): + raise TypeError('submissions must contain only items of type ApiSubmission') + self._submissions = submissions + + @property + def next_page_token(self) -> str: + return self._next_page_token + + @next_page_token.setter + def next_page_token(self, next_page_token: str): + if next_page_token is None: + del self.next_page_token + return + if not isinstance(next_page_token, str): + raise TypeError('next_page_token must be of type str') + self._next_page_token = next_page_token + + @property + def nextPageToken(self): + return self.next_page_token + + +class ApiStartSubmissionUploadRequest(KaggleObject): + r""" + Attributes: + competition_name (str) + content_length (int) + last_modified_epoch_seconds (int) + file_name (str) + Comes from form upload + """ + + def __init__(self): + self._competition_name = None + self._content_length = 0 + self._last_modified_epoch_seconds = 0 + self._file_name = "" + self._freeze() + + @property + def competition_name(self) -> str: + return self._competition_name or "" + + @competition_name.setter + def competition_name(self, competition_name: Optional[str]): + if competition_name is None: + del self.competition_name + return + if not isinstance(competition_name, str): + raise TypeError('competition_name must be of type str') + self._competition_name = competition_name + + @property + def content_length(self) -> int: + return self._content_length + + @content_length.setter + def content_length(self, content_length: int): + if content_length is None: + del self.content_length + return + if not isinstance(content_length, int): + raise TypeError('content_length must be of type int') + self._content_length = content_length + + @property + def last_modified_epoch_seconds(self) -> int: + return self._last_modified_epoch_seconds + + @last_modified_epoch_seconds.setter + def last_modified_epoch_seconds(self, last_modified_epoch_seconds: int): + if last_modified_epoch_seconds is None: + del self.last_modified_epoch_seconds + return + if not isinstance(last_modified_epoch_seconds, int): + raise TypeError('last_modified_epoch_seconds must be of type int') + self._last_modified_epoch_seconds = last_modified_epoch_seconds + + @property + def file_name(self) -> str: + """Comes from form upload""" + return self._file_name + + @file_name.setter + def file_name(self, file_name: str): + if file_name is None: + del self.file_name + return + if not isinstance(file_name, str): + raise TypeError('file_name must be of type str') + self._file_name = file_name + + def endpoint(self): + path = '/api/v1/competitions/submission-url' + return path.format_map(self.to_field_map(self)) + + + @staticmethod + def method(): + return 'POST' + + +class ApiStartSubmissionUploadResponse(KaggleObject): + r""" + Currently identical to StartBlobUploadResponse, but keeping separate since + they could change independently and this is a legacy V1 type. + + Attributes: + token (str) + create_url (str) + """ + + def __init__(self): + self._token = "" + self._create_url = "" + self._freeze() + + @property + def token(self) -> str: + return self._token + + @token.setter + def token(self, token: str): + if token is None: + del self.token + return + if not isinstance(token, str): + raise TypeError('token must be of type str') + self._token = token + + @property + def create_url(self) -> str: + return self._create_url + + @create_url.setter + def create_url(self, create_url: str): + if create_url is None: + del self.create_url + return + if not isinstance(create_url, str): + raise TypeError('create_url must be of type str') + self._create_url = create_url + + @property + def createUrl(self): + return self.create_url + + +class ApiSubmission(KaggleObject): + r""" + Attributes: + ref (int) + total_bytes (int) + date (datetime) + description (str) + error_description (str) + file_name (str) + public_score (str) + private_score (str) + status (SubmissionStatus) + submitted_by (str) + submitted_by_ref (str) + team_name (str) + url (str) + Minor note: ListSubmissions and GetSubmission may differ in setting this + field. + """ + + def __init__(self): + self._ref = 0 + self._total_bytes = None + self._date = None + self._description = None + self._error_description = None + self._file_name = None + self._public_score = None + self._private_score = None + self._status = SubmissionStatus.PENDING + self._submitted_by = None + self._submitted_by_ref = None + self._team_name = None + self._url = None + self._freeze() + + @property + def ref(self) -> int: + return self._ref + + @ref.setter + def ref(self, ref: int): + if ref is None: + del self.ref + return + if not isinstance(ref, int): + raise TypeError('ref must be of type int') + self._ref = ref + + @property + def total_bytes(self) -> int: + return self._total_bytes or 0 + + @total_bytes.setter + def total_bytes(self, total_bytes: Optional[int]): + if total_bytes is None: + del self.total_bytes + return + if not isinstance(total_bytes, int): + raise TypeError('total_bytes must be of type int') + self._total_bytes = total_bytes + + @property + def date(self) -> datetime: + return self._date + + @date.setter + def date(self, date: datetime): + if date is None: + del self.date + return + if not isinstance(date, datetime): + raise TypeError('date must be of type datetime') + self._date = date + + @property + def description(self) -> str: + return self._description or "" + + @description.setter + def description(self, description: Optional[str]): + if description is None: + del self.description + return + if not isinstance(description, str): + raise TypeError('description must be of type str') + self._description = description + + @property + def error_description(self) -> str: + return self._error_description or "" + + @error_description.setter + def error_description(self, error_description: Optional[str]): + if error_description is None: + del self.error_description + return + if not isinstance(error_description, str): + raise TypeError('error_description must be of type str') + self._error_description = error_description + + @property + def file_name(self) -> str: + return self._file_name or "" + + @file_name.setter + def file_name(self, file_name: Optional[str]): + if file_name is None: + del self.file_name + return + if not isinstance(file_name, str): + raise TypeError('file_name must be of type str') + self._file_name = file_name + + @property + def public_score(self) -> str: + return self._public_score or "" + + @public_score.setter + def public_score(self, public_score: Optional[str]): + if public_score is None: + del self.public_score + return + if not isinstance(public_score, str): + raise TypeError('public_score must be of type str') + self._public_score = public_score + + @property + def private_score(self) -> str: + return self._private_score or "" + + @private_score.setter + def private_score(self, private_score: Optional[str]): + if private_score is None: + del self.private_score + return + if not isinstance(private_score, str): + raise TypeError('private_score must be of type str') + self._private_score = private_score + + @property + def status(self) -> 'SubmissionStatus': + return self._status + + @status.setter + def status(self, status: 'SubmissionStatus'): + if status is None: + del self.status + return + if not isinstance(status, SubmissionStatus): + raise TypeError('status must be of type SubmissionStatus') + self._status = status + + @property + def submitted_by(self) -> str: + return self._submitted_by or "" + + @submitted_by.setter + def submitted_by(self, submitted_by: Optional[str]): + if submitted_by is None: + del self.submitted_by + return + if not isinstance(submitted_by, str): + raise TypeError('submitted_by must be of type str') + self._submitted_by = submitted_by + + @property + def submitted_by_ref(self) -> str: + return self._submitted_by_ref or "" + + @submitted_by_ref.setter + def submitted_by_ref(self, submitted_by_ref: Optional[str]): + if submitted_by_ref is None: + del self.submitted_by_ref + return + if not isinstance(submitted_by_ref, str): + raise TypeError('submitted_by_ref must be of type str') + self._submitted_by_ref = submitted_by_ref + + @property + def team_name(self) -> str: + return self._team_name or "" + + @team_name.setter + def team_name(self, team_name: Optional[str]): + if team_name is None: + del self.team_name + return + if not isinstance(team_name, str): + raise TypeError('team_name must be of type str') + self._team_name = team_name + + @property + def url(self) -> str: + r""" + Minor note: ListSubmissions and GetSubmission may differ in setting this + field. + """ + return self._url or "" + + @url.setter + def url(self, url: Optional[str]): + if url is None: + del self.url + return + if not isinstance(url, str): + raise TypeError('url must be of type str') + self._url = url + + +class ApiCategory(KaggleObject): + r""" + TODO(erdalsivri): Consider reusing with Kaggle.Sdk.Datasets.ApiCategory. + + Attributes: + ref (str) + name (str) + description (str) + full_path (str) + competition_count (int) + dataset_count (int) + script_count (int) + total_count (int) + """ + + def __init__(self): + self._ref = "" + self._name = None + self._description = None + self._full_path = None + self._competition_count = 0 + self._dataset_count = 0 + self._script_count = 0 + self._total_count = 0 + self._freeze() + + @property + def ref(self) -> str: + return self._ref + + @ref.setter + def ref(self, ref: str): + if ref is None: + del self.ref + return + if not isinstance(ref, str): + raise TypeError('ref must be of type str') + self._ref = ref + + @property + def name(self) -> str: + return self._name or "" + + @name.setter + def name(self, name: Optional[str]): + if name is None: + del self.name + return + if not isinstance(name, str): + raise TypeError('name must be of type str') + self._name = name + + @property + def description(self) -> str: + return self._description or "" + + @description.setter + def description(self, description: Optional[str]): + if description is None: + del self.description + return + if not isinstance(description, str): + raise TypeError('description must be of type str') + self._description = description + + @property + def full_path(self) -> str: + return self._full_path or "" + + @full_path.setter + def full_path(self, full_path: Optional[str]): + if full_path is None: + del self.full_path + return + if not isinstance(full_path, str): + raise TypeError('full_path must be of type str') + self._full_path = full_path + + @property + def competition_count(self) -> int: + return self._competition_count + + @competition_count.setter + def competition_count(self, competition_count: int): + if competition_count is None: + del self.competition_count + return + if not isinstance(competition_count, int): + raise TypeError('competition_count must be of type int') + self._competition_count = competition_count + + @property + def dataset_count(self) -> int: + return self._dataset_count + + @dataset_count.setter + def dataset_count(self, dataset_count: int): + if dataset_count is None: + del self.dataset_count + return + if not isinstance(dataset_count, int): + raise TypeError('dataset_count must be of type int') + self._dataset_count = dataset_count + + @property + def script_count(self) -> int: + return self._script_count + + @script_count.setter + def script_count(self, script_count: int): + if script_count is None: + del self.script_count + return + if not isinstance(script_count, int): + raise TypeError('script_count must be of type int') + self._script_count = script_count + + @property + def total_count(self) -> int: + return self._total_count + + @total_count.setter + def total_count(self, total_count: int): + if total_count is None: + del self.total_count + return + if not isinstance(total_count, int): + raise TypeError('total_count must be of type int') + self._total_count = total_count + + +class ApiDataFile(KaggleObject): + r""" + Attributes: + ref (str) + name (str) + description (str) + total_bytes (int) + url (str) + creation_date (datetime) + """ + + def __init__(self): + self._ref = "" + self._name = None + self._description = None + self._total_bytes = 0 + self._url = None + self._creation_date = None + self._freeze() + + @property + def ref(self) -> str: + return self._ref + + @ref.setter + def ref(self, ref: str): + if ref is None: + del self.ref + return + if not isinstance(ref, str): + raise TypeError('ref must be of type str') + self._ref = ref + + @property + def name(self) -> str: + return self._name or "" + + @name.setter + def name(self, name: Optional[str]): + if name is None: + del self.name + return + if not isinstance(name, str): + raise TypeError('name must be of type str') + self._name = name + + @property + def description(self) -> str: + return self._description or "" + + @description.setter + def description(self, description: Optional[str]): + if description is None: + del self.description + return + if not isinstance(description, str): + raise TypeError('description must be of type str') + self._description = description + + @property + def total_bytes(self) -> int: + return self._total_bytes + + @total_bytes.setter + def total_bytes(self, total_bytes: int): + if total_bytes is None: + del self.total_bytes + return + if not isinstance(total_bytes, int): + raise TypeError('total_bytes must be of type int') + self._total_bytes = total_bytes + + @property + def url(self) -> str: + return self._url or "" + + @url.setter + def url(self, url: Optional[str]): + if url is None: + del self.url + return + if not isinstance(url, str): + raise TypeError('url must be of type str') + self._url = url + + @property + def creation_date(self) -> datetime: + return self._creation_date + + @creation_date.setter + def creation_date(self, creation_date: datetime): + if creation_date is None: + del self.creation_date + return + if not isinstance(creation_date, datetime): + raise TypeError('creation_date must be of type datetime') + self._creation_date = creation_date + + +ApiCompetition._fields = [ + FieldMetadata("id", "id", "_id", int, 0, PredefinedSerializer()), + FieldMetadata("ref", "ref", "_ref", str, "", PredefinedSerializer()), + FieldMetadata("title", "title", "_title", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("url", "url", "_url", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("description", "description", "_description", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("organizationName", "organization_name", "_organization_name", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("organizationRef", "organization_ref", "_organization_ref", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("category", "category", "_category", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("reward", "reward", "_reward", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("tags", "tags", "_tags", ApiCategory, [], ListSerializer(KaggleObjectSerializer())), + FieldMetadata("deadline", "deadline", "_deadline", datetime, None, DateTimeSerializer()), + FieldMetadata("kernelCount", "kernel_count", "_kernel_count", int, 0, PredefinedSerializer()), + FieldMetadata("teamCount", "team_count", "_team_count", int, 0, PredefinedSerializer()), + FieldMetadata("userHasEntered", "user_has_entered", "_user_has_entered", bool, False, PredefinedSerializer()), + FieldMetadata("userRank", "user_rank", "_user_rank", int, None, PredefinedSerializer(), optional=True), + FieldMetadata("mergerDeadline", "merger_deadline", "_merger_deadline", datetime, None, DateTimeSerializer()), + FieldMetadata("newEntrantDeadline", "new_entrant_deadline", "_new_entrant_deadline", datetime, None, DateTimeSerializer()), + FieldMetadata("enabledDate", "enabled_date", "_enabled_date", datetime, None, DateTimeSerializer()), + FieldMetadata("maxDailySubmissions", "max_daily_submissions", "_max_daily_submissions", int, 0, PredefinedSerializer()), + FieldMetadata("maxTeamSize", "max_team_size", "_max_team_size", int, None, PredefinedSerializer(), optional=True), + FieldMetadata("evaluationMetric", "evaluation_metric", "_evaluation_metric", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("awardsPoints", "awards_points", "_awards_points", bool, False, PredefinedSerializer()), + FieldMetadata("isKernelsSubmissionsOnly", "is_kernels_submissions_only", "_is_kernels_submissions_only", bool, False, PredefinedSerializer()), + FieldMetadata("submissionsDisabled", "submissions_disabled", "_submissions_disabled", bool, False, PredefinedSerializer()), + FieldMetadata("thumbnailImageUrl", "thumbnail_image_url", "_thumbnail_image_url", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("hostName", "host_name", "_host_name", str, "", PredefinedSerializer()), +] + +ApiCreateCodeSubmissionRequest._fields = [ + FieldMetadata("competitionName", "competition_name", "_competition_name", str, "", PredefinedSerializer()), + FieldMetadata("kernelOwner", "kernel_owner", "_kernel_owner", str, "", PredefinedSerializer()), + FieldMetadata("kernelSlug", "kernel_slug", "_kernel_slug", str, "", PredefinedSerializer()), + FieldMetadata("kernelVersion", "kernel_version", "_kernel_version", int, None, PredefinedSerializer(), optional=True), + FieldMetadata("fileName", "file_name", "_file_name", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("submissionDescription", "submission_description", "_submission_description", str, None, PredefinedSerializer(), optional=True), +] + +ApiCreateCodeSubmissionResponse._fields = [ + FieldMetadata("message", "message", "_message", str, "", PredefinedSerializer()), + FieldMetadata("ref", "ref", "_ref", int, 0, PredefinedSerializer()), +] + +ApiCreateSubmissionRequest._fields = [ + FieldMetadata("competitionName", "competition_name", "_competition_name", str, "", PredefinedSerializer()), + FieldMetadata("blobFileTokens", "blob_file_tokens", "_blob_file_tokens", str, "", PredefinedSerializer()), + FieldMetadata("submissionDescription", "submission_description", "_submission_description", str, None, PredefinedSerializer(), optional=True), +] + +ApiCreateSubmissionResponse._fields = [ + FieldMetadata("message", "message", "_message", str, "", PredefinedSerializer()), + FieldMetadata("ref", "ref", "_ref", int, 0, PredefinedSerializer()), +] + +ApiDownloadDataFileRequest._fields = [ + FieldMetadata("competitionName", "competition_name", "_competition_name", str, "", PredefinedSerializer()), + FieldMetadata("fileName", "file_name", "_file_name", str, "", PredefinedSerializer()), +] + +ApiDownloadDataFilesRequest._fields = [ + FieldMetadata("competitionName", "competition_name", "_competition_name", str, "", PredefinedSerializer()), +] + +ApiDownloadLeaderboardRequest._fields = [ + FieldMetadata("competitionName", "competition_name", "_competition_name", str, "", PredefinedSerializer()), +] + +ApiGetCompetitionDataFilesSummaryRequest._fields = [ + FieldMetadata("competitionName", "competition_name", "_competition_name", str, "", PredefinedSerializer()), +] + +ApiGetCompetitionRequest._fields = [ + FieldMetadata("competitionName", "competition_name", "_competition_name", str, "", PredefinedSerializer()), +] + +ApiGetLeaderboardRequest._fields = [ + FieldMetadata("competitionName", "competition_name", "_competition_name", str, "", PredefinedSerializer()), + FieldMetadata("overridePublic", "override_public", "_override_public", bool, None, PredefinedSerializer(), optional=True), + FieldMetadata("pageSize", "page_size", "_page_size", int, None, PredefinedSerializer(), optional=True), + FieldMetadata("pageToken", "page_token", "_page_token", str, None, PredefinedSerializer(), optional=True), +] + +ApiGetLeaderboardResponse._fields = [ + FieldMetadata("submissions", "submissions", "_submissions", ApiLeaderboardSubmission, [], ListSerializer(KaggleObjectSerializer())), + FieldMetadata("nextPageToken", "next_page_token", "_next_page_token", str, "", PredefinedSerializer()), +] + +ApiGetSubmissionRequest._fields = [ + FieldMetadata("ref", "ref", "_ref", int, 0, PredefinedSerializer()), +] + +ApiLeaderboardSubmission._fields = [ + FieldMetadata("teamId", "team_id", "_team_id", int, 0, PredefinedSerializer()), + FieldMetadata("teamName", "team_name", "_team_name", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("submissionDate", "submission_date", "_submission_date", datetime, None, DateTimeSerializer()), + FieldMetadata("score", "score", "_score", str, None, PredefinedSerializer(), optional=True), +] + +ApiListCompetitionsRequest._fields = [ + FieldMetadata("group", "group", "_group", CompetitionListTab, None, EnumSerializer(), optional=True), + FieldMetadata("category", "category", "_category", HostSegment, None, EnumSerializer(), optional=True), + FieldMetadata("sortBy", "sort_by", "_sort_by", CompetitionSortBy, None, EnumSerializer(), optional=True), + FieldMetadata("search", "search", "_search", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("page", "page", "_page", int, None, PredefinedSerializer(), optional=True), + FieldMetadata("pageToken", "page_token", "_page_token", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("pageSize", "page_size", "_page_size", int, None, PredefinedSerializer(), optional=True), +] + +ApiListCompetitionsResponse._fields = [ + FieldMetadata("competitions", "competitions", "_competitions", ApiCompetition, [], ListSerializer(KaggleObjectSerializer())), + FieldMetadata("nextPageToken", "next_page_token", "_next_page_token", str, "", PredefinedSerializer()), +] + +ApiListDataFilesRequest._fields = [ + FieldMetadata("competitionName", "competition_name", "_competition_name", str, "", PredefinedSerializer()), + FieldMetadata("pageSize", "page_size", "_page_size", int, None, PredefinedSerializer(), optional=True), + FieldMetadata("pageToken", "page_token", "_page_token", str, None, PredefinedSerializer(), optional=True), +] + +ApiListDataFilesResponse._fields = [ + FieldMetadata("files", "files", "_files", ApiDataFile, [], ListSerializer(KaggleObjectSerializer())), + FieldMetadata("nextPageToken", "next_page_token", "_next_page_token", str, "", PredefinedSerializer()), + FieldMetadata("childrenFetchTimeMs", "children_fetch_time_ms", "_children_fetch_time_ms", int, 0, PredefinedSerializer()), +] + +ApiListDataTreeFilesRequest._fields = [ + FieldMetadata("competitionName", "competition_name", "_competition_name", str, "", PredefinedSerializer()), + FieldMetadata("path", "path", "_path", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("pageSize", "page_size", "_page_size", int, None, PredefinedSerializer(), optional=True), + FieldMetadata("pageToken", "page_token", "_page_token", str, None, PredefinedSerializer(), optional=True), +] + +ApiListSubmissionsRequest._fields = [ + FieldMetadata("competitionName", "competition_name", "_competition_name", str, "", PredefinedSerializer()), + FieldMetadata("sortBy", "sort_by", "_sort_by", SubmissionSortBy, SubmissionSortBy.SUBMISSION_SORT_BY_DATE, EnumSerializer()), + FieldMetadata("group", "group", "_group", SubmissionGroup, SubmissionGroup.SUBMISSION_GROUP_ALL, EnumSerializer()), + FieldMetadata("page", "page", "_page", int, None, PredefinedSerializer(), optional=True), + FieldMetadata("pageToken", "page_token", "_page_token", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("pageSize", "page_size", "_page_size", int, None, PredefinedSerializer(), optional=True), +] + +ApiListSubmissionsResponse._fields = [ + FieldMetadata("submissions", "submissions", "_submissions", ApiSubmission, [], ListSerializer(KaggleObjectSerializer())), + FieldMetadata("nextPageToken", "next_page_token", "_next_page_token", str, "", PredefinedSerializer()), +] + +ApiStartSubmissionUploadRequest._fields = [ + FieldMetadata("competitionName", "competition_name", "_competition_name", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("contentLength", "content_length", "_content_length", int, 0, PredefinedSerializer()), + FieldMetadata("lastModifiedEpochSeconds", "last_modified_epoch_seconds", "_last_modified_epoch_seconds", int, 0, PredefinedSerializer()), + FieldMetadata("fileName", "file_name", "_file_name", str, "", PredefinedSerializer()), +] + +ApiStartSubmissionUploadResponse._fields = [ + FieldMetadata("token", "token", "_token", str, "", PredefinedSerializer()), + FieldMetadata("createUrl", "create_url", "_create_url", str, "", PredefinedSerializer()), +] + +ApiSubmission._fields = [ + FieldMetadata("ref", "ref", "_ref", int, 0, PredefinedSerializer()), + FieldMetadata("totalBytes", "total_bytes", "_total_bytes", int, None, PredefinedSerializer(), optional=True), + FieldMetadata("date", "date", "_date", datetime, None, DateTimeSerializer()), + FieldMetadata("description", "description", "_description", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("errorDescription", "error_description", "_error_description", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("fileName", "file_name", "_file_name", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("publicScore", "public_score", "_public_score", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("privateScore", "private_score", "_private_score", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("status", "status", "_status", SubmissionStatus, SubmissionStatus.PENDING, EnumSerializer()), + FieldMetadata("submittedBy", "submitted_by", "_submitted_by", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("submittedByRef", "submitted_by_ref", "_submitted_by_ref", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("teamName", "team_name", "_team_name", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("url", "url", "_url", str, None, PredefinedSerializer(), optional=True), +] + +ApiCategory._fields = [ + FieldMetadata("ref", "ref", "_ref", str, "", PredefinedSerializer()), + FieldMetadata("name", "name", "_name", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("description", "description", "_description", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("fullPath", "full_path", "_full_path", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("competitionCount", "competition_count", "_competition_count", int, 0, PredefinedSerializer()), + FieldMetadata("datasetCount", "dataset_count", "_dataset_count", int, 0, PredefinedSerializer()), + FieldMetadata("scriptCount", "script_count", "_script_count", int, 0, PredefinedSerializer()), + FieldMetadata("totalCount", "total_count", "_total_count", int, 0, PredefinedSerializer()), +] + +ApiDataFile._fields = [ + FieldMetadata("ref", "ref", "_ref", str, "", PredefinedSerializer()), + FieldMetadata("name", "name", "_name", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("description", "description", "_description", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("totalBytes", "total_bytes", "_total_bytes", int, 0, PredefinedSerializer()), + FieldMetadata("url", "url", "_url", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("creationDate", "creation_date", "_creation_date", datetime, None, DateTimeSerializer()), +] + diff --git a/src/ksdk/competitions/types/competition_enums.py b/src/ksdk/competitions/types/competition_enums.py new file mode 100644 index 00000000..f315cd64 --- /dev/null +++ b/src/ksdk/competitions/types/competition_enums.py @@ -0,0 +1,53 @@ +import enum + +class CompetitionListTab(enum.Enum): + COMPETITION_LIST_TAB_GENERAL = 0 + """TODO(aip.dev/126): (-- api-linter: core::0126::unspecified=disabled --)""" + COMPETITION_LIST_TAB_ENTERED = 1 + COMPETITION_LIST_TAB_COMMUNITY = 2 + COMPETITION_LIST_TAB_HOSTED = 3 + COMPETITION_LIST_TAB_UNLAUNCHED = 4 + COMPETITION_LIST_TAB_UNLAUNCHED_COMMUNITY = 5 + COMPETITION_LIST_TAB_EVERYTHING = 6 + +class CompetitionSortBy(enum.Enum): + COMPETITION_SORT_BY_GROUPED = 0 + """TODO(aip.dev/126): (-- api-linter: core::0126::unspecified=disabled --)""" + COMPETITION_SORT_BY_BEST = 1 + COMPETITION_SORT_BY_PRIZE = 2 + COMPETITION_SORT_BY_EARLIEST_DEADLINE = 3 + COMPETITION_SORT_BY_LATEST_DEADLINE = 4 + COMPETITION_SORT_BY_NUMBER_OF_TEAMS = 5 + COMPETITION_SORT_BY_RELEVANCE = 6 + COMPETITION_SORT_BY_RECENTLY_CREATED = 7 + +class HostSegment(enum.Enum): + r""" + NOTE: Keep in Sync with Kaggle.Entities.HostSegment until migrated! Also keep + the comment in + competition_service.ListCompetitionsRequest.Selector.host_segment_id_filter + up to date + """ + HOST_SEGMENT_UNSPECIFIED = 0 + HOST_SEGMENT_FEATURED = 1 + HOST_SEGMENT_GETTING_STARTED = 5 + HOST_SEGMENT_MASTERS = 6 + HOST_SEGMENT_PLAYGROUND = 8 + HOST_SEGMENT_RECRUITMENT = 3 + HOST_SEGMENT_RESEARCH = 2 + HOST_SEGMENT_COMMUNITY = 10 + HOST_SEGMENT_ANALYTICS = 11 + +class SubmissionGroup(enum.Enum): + SUBMISSION_GROUP_ALL = 0 + """TODO(aip.dev/126): (-- api-linter: core::0126::unspecified=disabled --)""" + SUBMISSION_GROUP_SUCCESSFUL = 1 + SUBMISSION_GROUP_SELECTED = 2 + +class SubmissionSortBy(enum.Enum): + SUBMISSION_SORT_BY_DATE = 0 + """TODO(aip.dev/126): (-- api-linter: core::0126::unspecified=disabled --)""" + SUBMISSION_SORT_BY_NAME = 1 + SUBMISSION_SORT_BY_PRIVATE_SCORE = 2 + SUBMISSION_SORT_BY_PUBLIC_SCORE = 3 + diff --git a/src/ksdk/competitions/types/search_competitions.py b/src/ksdk/competitions/types/search_competitions.py new file mode 100644 index 00000000..cd548102 --- /dev/null +++ b/src/ksdk/competitions/types/search_competitions.py @@ -0,0 +1,27 @@ +import enum + +class SearchCompetitionsOrderBy(enum.Enum): + SEARCH_COMPETITIONS_ORDER_BY_UNSPECIFIED = 0 + SEARCH_COMPETITIONS_ORDER_BY_DEADLINE = 1 + SEARCH_COMPETITIONS_ORDER_BY_PRIZE_VALUE = 2 + SEARCH_COMPETITIONS_ORDER_BY_TEAM_RANK = 3 + SEARCH_COMPETITIONS_ORDER_BY_LAST_SUBMISSION_DATE = 4 + +class SearchCompetitionsProfileVisibility(enum.Enum): + SEARCH_COMPETITIONS_PROFILE_VISIBILITY_ANY = 0 + SEARCH_COMPETITIONS_PROFILE_VISIBILITY_VISIBLE = 1 + SEARCH_COMPETITIONS_PROFILE_VISIBILITY_HIDDEN = 2 + +class SearchCompetitionsRole(enum.Enum): + SEARCH_COMPETITIONS_ROLE_ANY = 0 + SEARCH_COMPETITIONS_ROLE_HOST = 1 + SEARCH_COMPETITIONS_ROLE_PARTICIPANT = 2 + SEARCH_COMPETITIONS_ROLE_PARTICIPANT_ONLY = 3 + """Excludes competitions user hosted, even if they are also a participant""" + +class SearchCompetitionsStatus(enum.Enum): + SEARCH_COMPETITIONS_STATUS_ANY = 0 + SEARCH_COMPETITIONS_STATUS_ACTIVE = 1 + SEARCH_COMPETITIONS_STATUS_COMPLETE = 2 + SEARCH_COMPETITIONS_STATUS_UNLAUNCHED = 3 + diff --git a/src/ksdk/competitions/types/submission_status.py b/src/ksdk/competitions/types/submission_status.py new file mode 100644 index 00000000..14aea36d --- /dev/null +++ b/src/ksdk/competitions/types/submission_status.py @@ -0,0 +1,9 @@ +import enum + +class SubmissionStatus(enum.Enum): + """TODO(aip.dev/216): (-- api-linter: core::0216::synonyms=disabled --)""" + PENDING = 0 + """TODO(aip.dev/126): (-- api-linter: core::0126::unspecified=disabled --)""" + COMPLETE = 1 + ERROR = 2 + diff --git a/src/ksdk/datasets/__init__.py b/src/ksdk/datasets/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/ksdk/datasets/databundles/__init__.py b/src/ksdk/datasets/databundles/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/ksdk/datasets/databundles/types/__init__.py b/src/ksdk/datasets/databundles/types/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/ksdk/datasets/databundles/types/databundle_api_types.py b/src/ksdk/datasets/databundles/types/databundle_api_types.py new file mode 100644 index 00000000..617108fb --- /dev/null +++ b/src/ksdk/datasets/databundles/types/databundle_api_types.py @@ -0,0 +1,540 @@ +from datetime import datetime +from kagglesdk.kaggle_object import * +from typing import List, Optional + +class ApiDirectoryContent(KaggleObject): + r""" + Attributes: + directories (ApiDirectory) + files (ApiFile) + total_children (int) + total_directories (int) + total_files (int) + next_page_token (str) + """ + + def __init__(self): + self._directories = [] + self._files = [] + self._total_children = 0 + self._total_directories = 0 + self._total_files = 0 + self._next_page_token = "" + self._freeze() + + @property + def directories(self) -> Optional[List[Optional['ApiDirectory']]]: + return self._directories + + @directories.setter + def directories(self, directories: Optional[List[Optional['ApiDirectory']]]): + if directories is None: + del self.directories + return + if not isinstance(directories, list): + raise TypeError('directories must be of type list') + if not all([isinstance(t, ApiDirectory) for t in directories]): + raise TypeError('directories must contain only items of type ApiDirectory') + self._directories = directories + + @property + def files(self) -> Optional[List[Optional['ApiFile']]]: + return self._files + + @files.setter + def files(self, files: Optional[List[Optional['ApiFile']]]): + if files is None: + del self.files + return + if not isinstance(files, list): + raise TypeError('files must be of type list') + if not all([isinstance(t, ApiFile) for t in files]): + raise TypeError('files must contain only items of type ApiFile') + self._files = files + + @property + def total_children(self) -> int: + return self._total_children + + @total_children.setter + def total_children(self, total_children: int): + if total_children is None: + del self.total_children + return + if not isinstance(total_children, int): + raise TypeError('total_children must be of type int') + self._total_children = total_children + + @property + def total_directories(self) -> int: + return self._total_directories + + @total_directories.setter + def total_directories(self, total_directories: int): + if total_directories is None: + del self.total_directories + return + if not isinstance(total_directories, int): + raise TypeError('total_directories must be of type int') + self._total_directories = total_directories + + @property + def total_files(self) -> int: + return self._total_files + + @total_files.setter + def total_files(self, total_files: int): + if total_files is None: + del self.total_files + return + if not isinstance(total_files, int): + raise TypeError('total_files must be of type int') + self._total_files = total_files + + @property + def next_page_token(self) -> str: + return self._next_page_token + + @next_page_token.setter + def next_page_token(self, next_page_token: str): + if next_page_token is None: + del self.next_page_token + return + if not isinstance(next_page_token, str): + raise TypeError('next_page_token must be of type str') + self._next_page_token = next_page_token + + +class ApiFile(KaggleObject): + r""" + Attributes: + name (str) + creation_date (datetime) + total_bytes (int) + relative_url (str) + description (str) + """ + + def __init__(self): + self._name = None + self._creation_date = None + self._total_bytes = 0 + self._relative_url = None + self._description = None + self._freeze() + + @property + def name(self) -> str: + return self._name or "" + + @name.setter + def name(self, name: Optional[str]): + if name is None: + del self.name + return + if not isinstance(name, str): + raise TypeError('name must be of type str') + self._name = name + + @property + def creation_date(self) -> datetime: + return self._creation_date + + @creation_date.setter + def creation_date(self, creation_date: datetime): + if creation_date is None: + del self.creation_date + return + if not isinstance(creation_date, datetime): + raise TypeError('creation_date must be of type datetime') + self._creation_date = creation_date + + @property + def total_bytes(self) -> int: + return self._total_bytes + + @total_bytes.setter + def total_bytes(self, total_bytes: int): + if total_bytes is None: + del self.total_bytes + return + if not isinstance(total_bytes, int): + raise TypeError('total_bytes must be of type int') + self._total_bytes = total_bytes + + @property + def relative_url(self) -> str: + return self._relative_url or "" + + @relative_url.setter + def relative_url(self, relative_url: Optional[str]): + if relative_url is None: + del self.relative_url + return + if not isinstance(relative_url, str): + raise TypeError('relative_url must be of type str') + self._relative_url = relative_url + + @property + def description(self) -> str: + return self._description or "" + + @description.setter + def description(self, description: Optional[str]): + if description is None: + del self.description + return + if not isinstance(description, str): + raise TypeError('description must be of type str') + self._description = description + + +class ApiFilesSummary(KaggleObject): + r""" + Attributes: + file_summary_info (ApiFileSummaryInfo) + column_summary_info (ApiColumnSummaryInfo) + """ + + def __init__(self): + self._file_summary_info = None + self._column_summary_info = None + self._freeze() + + @property + def file_summary_info(self) -> Optional['ApiFileSummaryInfo']: + return self._file_summary_info + + @file_summary_info.setter + def file_summary_info(self, file_summary_info: Optional['ApiFileSummaryInfo']): + if file_summary_info is None: + del self.file_summary_info + return + if not isinstance(file_summary_info, ApiFileSummaryInfo): + raise TypeError('file_summary_info must be of type ApiFileSummaryInfo') + self._file_summary_info = file_summary_info + + @property + def column_summary_info(self) -> Optional['ApiColumnSummaryInfo']: + return self._column_summary_info + + @column_summary_info.setter + def column_summary_info(self, column_summary_info: Optional['ApiColumnSummaryInfo']): + if column_summary_info is None: + del self.column_summary_info + return + if not isinstance(column_summary_info, ApiColumnSummaryInfo): + raise TypeError('column_summary_info must be of type ApiColumnSummaryInfo') + self._column_summary_info = column_summary_info + + +class ApiFileSummaryInfo(KaggleObject): + r""" + Attributes: + total_file_count (int) + file_types (ApiFileExtensionSummaryInfo) + """ + + def __init__(self): + self._total_file_count = 0 + self._file_types = [] + self._freeze() + + @property + def total_file_count(self) -> int: + return self._total_file_count + + @total_file_count.setter + def total_file_count(self, total_file_count: int): + if total_file_count is None: + del self.total_file_count + return + if not isinstance(total_file_count, int): + raise TypeError('total_file_count must be of type int') + self._total_file_count = total_file_count + + @property + def file_types(self) -> Optional[List[Optional['ApiFileExtensionSummaryInfo']]]: + return self._file_types + + @file_types.setter + def file_types(self, file_types: Optional[List[Optional['ApiFileExtensionSummaryInfo']]]): + if file_types is None: + del self.file_types + return + if not isinstance(file_types, list): + raise TypeError('file_types must be of type list') + if not all([isinstance(t, ApiFileExtensionSummaryInfo) for t in file_types]): + raise TypeError('file_types must contain only items of type ApiFileExtensionSummaryInfo') + self._file_types = file_types + + +class ApiColumnSummaryInfo(KaggleObject): + r""" + Attributes: + total_column_count (int) + column_types (ApiColumnTypeSummaryInfo) + """ + + def __init__(self): + self._total_column_count = 0 + self._column_types = [] + self._freeze() + + @property + def total_column_count(self) -> int: + return self._total_column_count + + @total_column_count.setter + def total_column_count(self, total_column_count: int): + if total_column_count is None: + del self.total_column_count + return + if not isinstance(total_column_count, int): + raise TypeError('total_column_count must be of type int') + self._total_column_count = total_column_count + + @property + def column_types(self) -> Optional[List[Optional['ApiColumnTypeSummaryInfo']]]: + return self._column_types + + @column_types.setter + def column_types(self, column_types: Optional[List[Optional['ApiColumnTypeSummaryInfo']]]): + if column_types is None: + del self.column_types + return + if not isinstance(column_types, list): + raise TypeError('column_types must be of type list') + if not all([isinstance(t, ApiColumnTypeSummaryInfo) for t in column_types]): + raise TypeError('column_types must contain only items of type ApiColumnTypeSummaryInfo') + self._column_types = column_types + + +class ApiColumnTypeSummaryInfo(KaggleObject): + r""" + Attributes: + column_type (str) + column_count (int) + """ + + def __init__(self): + self._column_type = None + self._column_count = 0 + self._freeze() + + @property + def column_type(self) -> str: + return self._column_type or "" + + @column_type.setter + def column_type(self, column_type: Optional[str]): + if column_type is None: + del self.column_type + return + if not isinstance(column_type, str): + raise TypeError('column_type must be of type str') + self._column_type = column_type + + @property + def column_count(self) -> int: + return self._column_count + + @column_count.setter + def column_count(self, column_count: int): + if column_count is None: + del self.column_count + return + if not isinstance(column_count, int): + raise TypeError('column_count must be of type int') + self._column_count = column_count + + +class ApiDirectory(KaggleObject): + r""" + Attributes: + name (str) + relative_url (str) + total_directories (int) + total_files (int) + total_children (int) + """ + + def __init__(self): + self._name = None + self._relative_url = None + self._total_directories = 0 + self._total_files = 0 + self._total_children = 0 + self._freeze() + + @property + def name(self) -> str: + return self._name or "" + + @name.setter + def name(self, name: Optional[str]): + if name is None: + del self.name + return + if not isinstance(name, str): + raise TypeError('name must be of type str') + self._name = name + + @property + def relative_url(self) -> str: + return self._relative_url or "" + + @relative_url.setter + def relative_url(self, relative_url: Optional[str]): + if relative_url is None: + del self.relative_url + return + if not isinstance(relative_url, str): + raise TypeError('relative_url must be of type str') + self._relative_url = relative_url + + @property + def total_directories(self) -> int: + return self._total_directories + + @total_directories.setter + def total_directories(self, total_directories: int): + if total_directories is None: + del self.total_directories + return + if not isinstance(total_directories, int): + raise TypeError('total_directories must be of type int') + self._total_directories = total_directories + + @property + def total_files(self) -> int: + return self._total_files + + @total_files.setter + def total_files(self, total_files: int): + if total_files is None: + del self.total_files + return + if not isinstance(total_files, int): + raise TypeError('total_files must be of type int') + self._total_files = total_files + + @property + def total_children(self) -> int: + return self._total_children + + @total_children.setter + def total_children(self, total_children: int): + if total_children is None: + del self.total_children + return + if not isinstance(total_children, int): + raise TypeError('total_children must be of type int') + self._total_children = total_children + + +class ApiFileExtensionSummaryInfo(KaggleObject): + r""" + Attributes: + extension (str) + file_count (int) + total_size (int) + """ + + def __init__(self): + self._extension = "" + self._file_count = 0 + self._total_size = 0 + self._freeze() + + @property + def extension(self) -> str: + return self._extension + + @extension.setter + def extension(self, extension: str): + if extension is None: + del self.extension + return + if not isinstance(extension, str): + raise TypeError('extension must be of type str') + self._extension = extension + + @property + def file_count(self) -> int: + return self._file_count + + @file_count.setter + def file_count(self, file_count: int): + if file_count is None: + del self.file_count + return + if not isinstance(file_count, int): + raise TypeError('file_count must be of type int') + self._file_count = file_count + + @property + def total_size(self) -> int: + return self._total_size + + @total_size.setter + def total_size(self, total_size: int): + if total_size is None: + del self.total_size + return + if not isinstance(total_size, int): + raise TypeError('total_size must be of type int') + self._total_size = total_size + + +ApiDirectoryContent._fields = [ + FieldMetadata("directories", "directories", "_directories", ApiDirectory, [], ListSerializer(KaggleObjectSerializer())), + FieldMetadata("files", "files", "_files", ApiFile, [], ListSerializer(KaggleObjectSerializer())), + FieldMetadata("totalChildren", "total_children", "_total_children", int, 0, PredefinedSerializer()), + FieldMetadata("totalDirectories", "total_directories", "_total_directories", int, 0, PredefinedSerializer()), + FieldMetadata("totalFiles", "total_files", "_total_files", int, 0, PredefinedSerializer()), + FieldMetadata("nextPageToken", "next_page_token", "_next_page_token", str, "", PredefinedSerializer()), +] + +ApiFile._fields = [ + FieldMetadata("name", "name", "_name", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("creationDate", "creation_date", "_creation_date", datetime, None, DateTimeSerializer()), + FieldMetadata("totalBytes", "total_bytes", "_total_bytes", int, 0, PredefinedSerializer()), + FieldMetadata("relativeUrl", "relative_url", "_relative_url", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("description", "description", "_description", str, None, PredefinedSerializer(), optional=True), +] + +ApiFilesSummary._fields = [ + FieldMetadata("fileSummaryInfo", "file_summary_info", "_file_summary_info", ApiFileSummaryInfo, None, KaggleObjectSerializer()), + FieldMetadata("columnSummaryInfo", "column_summary_info", "_column_summary_info", ApiColumnSummaryInfo, None, KaggleObjectSerializer()), +] + +ApiFileSummaryInfo._fields = [ + FieldMetadata("totalFileCount", "total_file_count", "_total_file_count", int, 0, PredefinedSerializer()), + FieldMetadata("fileTypes", "file_types", "_file_types", ApiFileExtensionSummaryInfo, [], ListSerializer(KaggleObjectSerializer())), +] + +ApiColumnSummaryInfo._fields = [ + FieldMetadata("totalColumnCount", "total_column_count", "_total_column_count", int, 0, PredefinedSerializer()), + FieldMetadata("columnTypes", "column_types", "_column_types", ApiColumnTypeSummaryInfo, [], ListSerializer(KaggleObjectSerializer())), +] + +ApiColumnTypeSummaryInfo._fields = [ + FieldMetadata("columnType", "column_type", "_column_type", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("columnCount", "column_count", "_column_count", int, 0, PredefinedSerializer()), +] + +ApiDirectory._fields = [ + FieldMetadata("name", "name", "_name", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("relativeUrl", "relative_url", "_relative_url", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("totalDirectories", "total_directories", "_total_directories", int, 0, PredefinedSerializer()), + FieldMetadata("totalFiles", "total_files", "_total_files", int, 0, PredefinedSerializer()), + FieldMetadata("totalChildren", "total_children", "_total_children", int, 0, PredefinedSerializer()), +] + +ApiFileExtensionSummaryInfo._fields = [ + FieldMetadata("extension", "extension", "_extension", str, "", PredefinedSerializer()), + FieldMetadata("fileCount", "file_count", "_file_count", int, 0, PredefinedSerializer()), + FieldMetadata("totalSize", "total_size", "_total_size", int, 0, PredefinedSerializer()), +] + diff --git a/src/ksdk/datasets/services/__init__.py b/src/ksdk/datasets/services/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/ksdk/datasets/services/dataset_api_service.py b/src/ksdk/datasets/services/dataset_api_service.py new file mode 100644 index 00000000..09b92ce4 --- /dev/null +++ b/src/ksdk/datasets/services/dataset_api_service.py @@ -0,0 +1,195 @@ +from kagglesdk.common.types.http_redirect import HttpRedirect +from kagglesdk.datasets.databundles.types.databundle_api_types import ApiDirectoryContent, ApiFilesSummary +from kagglesdk.datasets.types.dataset_api_service import ApiCreateDatasetRequest, ApiCreateDatasetResponse, ApiCreateDatasetVersionByIdRequest, ApiCreateDatasetVersionRequest, ApiDataset, ApiDeleteDatasetRequest, ApiDeleteDatasetResponse, ApiDownloadDatasetRawRequest, ApiDownloadDatasetRequest, ApiGetDatasetFilesSummaryRequest, ApiGetDatasetMetadataRequest, ApiGetDatasetMetadataResponse, ApiGetDatasetRequest, ApiGetDatasetStatusRequest, ApiGetDatasetStatusResponse, ApiListDatasetFilesRequest, ApiListDatasetFilesResponse, ApiListDatasetsRequest, ApiListDatasetsResponse, ApiListTreeDatasetFilesRequest, ApiUpdateDatasetMetadataRequest, ApiUpdateDatasetMetadataResponse, ApiUploadDatasetFileRequest, ApiUploadDatasetFileResponse +from kagglesdk.kaggle_http_client import KaggleHttpClient + +class DatasetApiClient(object): + + def __init__(self, client: KaggleHttpClient): + self._client = client + + def list_datasets(self, request: ApiListDatasetsRequest = None) -> ApiListDatasetsResponse: + r""" + Args: + request (ApiListDatasetsRequest): + The request object; initialized to empty instance if not specified. + """ + + if request is None: + request = ApiListDatasetsRequest() + + return self._client.call("datasets.DatasetApiService", "ListDatasets", request, ApiListDatasetsResponse) + + def get_dataset(self, request: ApiGetDatasetRequest = None) -> ApiDataset: + r""" + Args: + request (ApiGetDatasetRequest): + The request object; initialized to empty instance if not specified. + """ + + if request is None: + request = ApiGetDatasetRequest() + + return self._client.call("datasets.DatasetApiService", "GetDataset", request, ApiDataset) + + def list_dataset_files(self, request: ApiListDatasetFilesRequest = None) -> ApiListDatasetFilesResponse: + r""" + Args: + request (ApiListDatasetFilesRequest): + The request object; initialized to empty instance if not specified. + """ + + if request is None: + request = ApiListDatasetFilesRequest() + + return self._client.call("datasets.DatasetApiService", "ListDatasetFiles", request, ApiListDatasetFilesResponse) + + def list_tree_dataset_files(self, request: ApiListTreeDatasetFilesRequest = None) -> ApiDirectoryContent: + r""" + Args: + request (ApiListTreeDatasetFilesRequest): + The request object; initialized to empty instance if not specified. + """ + + if request is None: + request = ApiListTreeDatasetFilesRequest() + + return self._client.call("datasets.DatasetApiService", "ListTreeDatasetFiles", request, ApiDirectoryContent) + + def get_dataset_metadata(self, request: ApiGetDatasetMetadataRequest = None) -> ApiGetDatasetMetadataResponse: + r""" + Args: + request (ApiGetDatasetMetadataRequest): + The request object; initialized to empty instance if not specified. + """ + + if request is None: + request = ApiGetDatasetMetadataRequest() + + return self._client.call("datasets.DatasetApiService", "GetDatasetMetadata", request, ApiGetDatasetMetadataResponse) + + def update_dataset_metadata(self, request: ApiUpdateDatasetMetadataRequest = None) -> ApiUpdateDatasetMetadataResponse: + r""" + Args: + request (ApiUpdateDatasetMetadataRequest): + The request object; initialized to empty instance if not specified. + """ + + if request is None: + request = ApiUpdateDatasetMetadataRequest() + + return self._client.call("datasets.DatasetApiService", "UpdateDatasetMetadata", request, ApiUpdateDatasetMetadataResponse) + + def download_dataset(self, request: ApiDownloadDatasetRequest = None) -> HttpRedirect: + r""" + Args: + request (ApiDownloadDatasetRequest): + The request object; initialized to empty instance if not specified. + """ + + if request is None: + request = ApiDownloadDatasetRequest() + + return self._client.call("datasets.DatasetApiService", "DownloadDataset", request, HttpRedirect) + + def download_dataset_raw(self, request: ApiDownloadDatasetRawRequest = None) -> HttpRedirect: + r""" + Note: This API method has extremely low usage, and can be considered for + deprecation. The existing DownloadDataset RPC, with `raw=true` set on the + request, is equivalent. + + Args: + request (ApiDownloadDatasetRawRequest): + The request object; initialized to empty instance if not specified. + """ + + if request is None: + request = ApiDownloadDatasetRawRequest() + + return self._client.call("datasets.DatasetApiService", "DownloadDatasetRaw", request, HttpRedirect) + + def create_dataset_version(self, request: ApiCreateDatasetVersionRequest = None) -> ApiCreateDatasetResponse: + r""" + Args: + request (ApiCreateDatasetVersionRequest): + The request object; initialized to empty instance if not specified. + """ + + if request is None: + request = ApiCreateDatasetVersionRequest() + + return self._client.call("datasets.DatasetApiService", "CreateDatasetVersion", request, ApiCreateDatasetResponse) + + def create_dataset_version_by_id(self, request: ApiCreateDatasetVersionByIdRequest = None) -> ApiCreateDatasetResponse: + r""" + Args: + request (ApiCreateDatasetVersionByIdRequest): + The request object; initialized to empty instance if not specified. + """ + + if request is None: + request = ApiCreateDatasetVersionByIdRequest() + + return self._client.call("datasets.DatasetApiService", "CreateDatasetVersionById", request, ApiCreateDatasetResponse) + + def create_dataset(self, request: ApiCreateDatasetRequest = None) -> ApiCreateDatasetResponse: + r""" + Args: + request (ApiCreateDatasetRequest): + The request object; initialized to empty instance if not specified. + """ + + if request is None: + request = ApiCreateDatasetRequest() + + return self._client.call("datasets.DatasetApiService", "CreateDataset", request, ApiCreateDatasetResponse) + + def get_dataset_status(self, request: ApiGetDatasetStatusRequest = None) -> ApiGetDatasetStatusResponse: + r""" + Args: + request (ApiGetDatasetStatusRequest): + The request object; initialized to empty instance if not specified. + """ + + if request is None: + request = ApiGetDatasetStatusRequest() + + return self._client.call("datasets.DatasetApiService", "GetDatasetStatus", request, ApiGetDatasetStatusResponse) + + def get_dataset_files_summary(self, request: ApiGetDatasetFilesSummaryRequest = None) -> ApiFilesSummary: + r""" + Args: + request (ApiGetDatasetFilesSummaryRequest): + The request object; initialized to empty instance if not specified. + """ + + if request is None: + request = ApiGetDatasetFilesSummaryRequest() + + return self._client.call("datasets.DatasetApiService", "GetDatasetFilesSummary", request, ApiFilesSummary) + + def upload_dataset_file(self, request: ApiUploadDatasetFileRequest = None) -> ApiUploadDatasetFileResponse: + r""" + Deprecated. Use the new unified BlobApiService#StartBlobUpload rpc. + + Args: + request (ApiUploadDatasetFileRequest): + The request object; initialized to empty instance if not specified. + """ + + if request is None: + request = ApiUploadDatasetFileRequest() + + return self._client.call("datasets.DatasetApiService", "UploadDatasetFile", request, ApiUploadDatasetFileResponse) + + def delete_dataset(self, request: ApiDeleteDatasetRequest = None) -> ApiDeleteDatasetResponse: + r""" + Args: + request (ApiDeleteDatasetRequest): + The request object; initialized to empty instance if not specified. + """ + + if request is None: + request = ApiDeleteDatasetRequest() + + return self._client.call("datasets.DatasetApiService", "DeleteDataset", request, ApiDeleteDatasetResponse) diff --git a/src/ksdk/datasets/types/__init__.py b/src/ksdk/datasets/types/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/ksdk/datasets/types/dataset_api_service.py b/src/ksdk/datasets/types/dataset_api_service.py new file mode 100644 index 00000000..873d6187 --- /dev/null +++ b/src/ksdk/datasets/types/dataset_api_service.py @@ -0,0 +1,3047 @@ +from datetime import datetime +from kagglesdk.datasets.types.dataset_enums import DatabundleVersionStatus, DatasetFileTypeGroup, DatasetLicenseGroup, DatasetSelectionGroup, DatasetSizeGroup, DatasetSortBy, DatasetViewedGroup +from kagglesdk.datasets.types.dataset_types import DatasetInfo, DatasetSettings +from kagglesdk.kaggle_object import * +from typing import Optional, List + +class ApiCreateDatasetRequest(KaggleObject): + r""" + Attributes: + id (int) + owner_slug (str) + slug (str) + title (str) + license_name (str) + is_private (bool) + files (ApiDatasetNewFile) + subtitle (str) + description (str) + category_ids (str) + directories (ApiUploadDirectoryInfo) + """ + + def __init__(self): + self._id = None + self._owner_slug = None + self._slug = None + self._title = None + self._license_name = None + self._is_private = False + self._files = [] + self._subtitle = None + self._description = None + self._category_ids = [] + self._directories = [] + self._freeze() + + @property + def id(self) -> int: + return self._id or 0 + + @id.setter + def id(self, id: Optional[int]): + if id is None: + del self.id + return + if not isinstance(id, int): + raise TypeError('id must be of type int') + self._id = id + + @property + def owner_slug(self) -> str: + return self._owner_slug or "" + + @owner_slug.setter + def owner_slug(self, owner_slug: Optional[str]): + if owner_slug is None: + del self.owner_slug + return + if not isinstance(owner_slug, str): + raise TypeError('owner_slug must be of type str') + self._owner_slug = owner_slug + + @property + def slug(self) -> str: + return self._slug or "" + + @slug.setter + def slug(self, slug: Optional[str]): + if slug is None: + del self.slug + return + if not isinstance(slug, str): + raise TypeError('slug must be of type str') + self._slug = slug + + @property + def title(self) -> str: + return self._title or "" + + @title.setter + def title(self, title: Optional[str]): + if title is None: + del self.title + return + if not isinstance(title, str): + raise TypeError('title must be of type str') + self._title = title + + @property + def license_name(self) -> str: + return self._license_name or "" + + @license_name.setter + def license_name(self, license_name: Optional[str]): + if license_name is None: + del self.license_name + return + if not isinstance(license_name, str): + raise TypeError('license_name must be of type str') + self._license_name = license_name + + @property + def is_private(self) -> bool: + return self._is_private + + @is_private.setter + def is_private(self, is_private: bool): + if is_private is None: + del self.is_private + return + if not isinstance(is_private, bool): + raise TypeError('is_private must be of type bool') + self._is_private = is_private + + @property + def files(self) -> Optional[List[Optional['ApiDatasetNewFile']]]: + return self._files + + @files.setter + def files(self, files: Optional[List[Optional['ApiDatasetNewFile']]]): + if files is None: + del self.files + return + if not isinstance(files, list): + raise TypeError('files must be of type list') + if not all([isinstance(t, ApiDatasetNewFile) for t in files]): + raise TypeError('files must contain only items of type ApiDatasetNewFile') + self._files = files + + @property + def subtitle(self) -> str: + return self._subtitle or "" + + @subtitle.setter + def subtitle(self, subtitle: Optional[str]): + if subtitle is None: + del self.subtitle + return + if not isinstance(subtitle, str): + raise TypeError('subtitle must be of type str') + self._subtitle = subtitle + + @property + def description(self) -> str: + return self._description or "" + + @description.setter + def description(self, description: Optional[str]): + if description is None: + del self.description + return + if not isinstance(description, str): + raise TypeError('description must be of type str') + self._description = description + + @property + def category_ids(self) -> Optional[List[str]]: + return self._category_ids + + @category_ids.setter + def category_ids(self, category_ids: Optional[List[str]]): + if category_ids is None: + del self.category_ids + return + if not isinstance(category_ids, list): + raise TypeError('category_ids must be of type list') + if not all([isinstance(t, str) for t in category_ids]): + raise TypeError('category_ids must contain only items of type str') + self._category_ids = category_ids + + @property + def directories(self) -> Optional[List[Optional['ApiUploadDirectoryInfo']]]: + return self._directories + + @directories.setter + def directories(self, directories: Optional[List[Optional['ApiUploadDirectoryInfo']]]): + if directories is None: + del self.directories + return + if not isinstance(directories, list): + raise TypeError('directories must be of type list') + if not all([isinstance(t, ApiUploadDirectoryInfo) for t in directories]): + raise TypeError('directories must contain only items of type ApiUploadDirectoryInfo') + self._directories = directories + + def endpoint(self): + path = '/api/v1/datasets/create/new' + return path.format_map(self.to_field_map(self)) + + + @staticmethod + def method(): + return 'POST' + + @staticmethod + def body_fields(): + return '*' + + +class ApiCreateDatasetResponse(KaggleObject): + r""" + Attributes: + ref (str) + url (str) + status (str) + error (str) + invalid_tags (str) + """ + + def __init__(self): + self._ref = None + self._url = None + self._status = None + self._error = None + self._invalid_tags = [] + self._freeze() + + @property + def ref(self) -> str: + return self._ref or "" + + @ref.setter + def ref(self, ref: Optional[str]): + if ref is None: + del self.ref + return + if not isinstance(ref, str): + raise TypeError('ref must be of type str') + self._ref = ref + + @property + def url(self) -> str: + return self._url or "" + + @url.setter + def url(self, url: Optional[str]): + if url is None: + del self.url + return + if not isinstance(url, str): + raise TypeError('url must be of type str') + self._url = url + + @property + def status(self) -> str: + return self._status or "" + + @status.setter + def status(self, status: Optional[str]): + if status is None: + del self.status + return + if not isinstance(status, str): + raise TypeError('status must be of type str') + self._status = status + + @property + def error(self) -> str: + return self._error or "" + + @error.setter + def error(self, error: Optional[str]): + if error is None: + del self.error + return + if not isinstance(error, str): + raise TypeError('error must be of type str') + self._error = error + + @property + def invalid_tags(self) -> Optional[List[str]]: + return self._invalid_tags + + @invalid_tags.setter + def invalid_tags(self, invalid_tags: Optional[List[str]]): + if invalid_tags is None: + del self.invalid_tags + return + if not isinstance(invalid_tags, list): + raise TypeError('invalid_tags must be of type list') + if not all([isinstance(t, str) for t in invalid_tags]): + raise TypeError('invalid_tags must contain only items of type str') + self._invalid_tags = invalid_tags + + @property + def invalidTags(self): + return self.invalid_tags + + +class ApiCreateDatasetVersionByIdRequest(KaggleObject): + r""" + Attributes: + id (int) + body (ApiCreateDatasetVersionRequestBody) + """ + + def __init__(self): + self._id = 0 + self._body = None + self._freeze() + + @property + def id(self) -> int: + return self._id + + @id.setter + def id(self, id: int): + if id is None: + del self.id + return + if not isinstance(id, int): + raise TypeError('id must be of type int') + self._id = id + + @property + def body(self) -> Optional['ApiCreateDatasetVersionRequestBody']: + return self._body + + @body.setter + def body(self, body: Optional['ApiCreateDatasetVersionRequestBody']): + if body is None: + del self.body + return + if not isinstance(body, ApiCreateDatasetVersionRequestBody): + raise TypeError('body must be of type ApiCreateDatasetVersionRequestBody') + self._body = body + + def endpoint(self): + path = '/api/v1/datasets/create/version/{id}' + return path.format_map(self.to_field_map(self)) + + + @staticmethod + def method(): + return 'POST' + + @staticmethod + def body_fields(): + return 'body' + + +class ApiCreateDatasetVersionRequest(KaggleObject): + r""" + Attributes: + owner_slug (str) + dataset_slug (str) + body (ApiCreateDatasetVersionRequestBody) + """ + + def __init__(self): + self._owner_slug = "" + self._dataset_slug = "" + self._body = None + self._freeze() + + @property + def owner_slug(self) -> str: + return self._owner_slug + + @owner_slug.setter + def owner_slug(self, owner_slug: str): + if owner_slug is None: + del self.owner_slug + return + if not isinstance(owner_slug, str): + raise TypeError('owner_slug must be of type str') + self._owner_slug = owner_slug + + @property + def dataset_slug(self) -> str: + return self._dataset_slug + + @dataset_slug.setter + def dataset_slug(self, dataset_slug: str): + if dataset_slug is None: + del self.dataset_slug + return + if not isinstance(dataset_slug, str): + raise TypeError('dataset_slug must be of type str') + self._dataset_slug = dataset_slug + + @property + def body(self) -> Optional['ApiCreateDatasetVersionRequestBody']: + return self._body + + @body.setter + def body(self, body: Optional['ApiCreateDatasetVersionRequestBody']): + if body is None: + del self.body + return + if not isinstance(body, ApiCreateDatasetVersionRequestBody): + raise TypeError('body must be of type ApiCreateDatasetVersionRequestBody') + self._body = body + + def endpoint(self): + path = '/api/v1/datasets/create/version/{owner_slug}/{dataset_slug}' + return path.format_map(self.to_field_map(self)) + + + @staticmethod + def method(): + return 'POST' + + @staticmethod + def body_fields(): + return 'body' + + +class ApiCreateDatasetVersionRequestBody(KaggleObject): + r""" + Attributes: + version_notes (str) + delete_old_versions (bool) + files (ApiDatasetNewFile) + subtitle (str) + description (str) + category_ids (str) + directories (ApiUploadDirectoryInfo) + """ + + def __init__(self): + self._version_notes = None + self._delete_old_versions = False + self._files = [] + self._subtitle = None + self._description = None + self._category_ids = [] + self._directories = [] + self._freeze() + + @property + def version_notes(self) -> str: + return self._version_notes or "" + + @version_notes.setter + def version_notes(self, version_notes: Optional[str]): + if version_notes is None: + del self.version_notes + return + if not isinstance(version_notes, str): + raise TypeError('version_notes must be of type str') + self._version_notes = version_notes + + @property + def delete_old_versions(self) -> bool: + return self._delete_old_versions + + @delete_old_versions.setter + def delete_old_versions(self, delete_old_versions: bool): + if delete_old_versions is None: + del self.delete_old_versions + return + if not isinstance(delete_old_versions, bool): + raise TypeError('delete_old_versions must be of type bool') + self._delete_old_versions = delete_old_versions + + @property + def files(self) -> Optional[List[Optional['ApiDatasetNewFile']]]: + return self._files + + @files.setter + def files(self, files: Optional[List[Optional['ApiDatasetNewFile']]]): + if files is None: + del self.files + return + if not isinstance(files, list): + raise TypeError('files must be of type list') + if not all([isinstance(t, ApiDatasetNewFile) for t in files]): + raise TypeError('files must contain only items of type ApiDatasetNewFile') + self._files = files + + @property + def subtitle(self) -> str: + return self._subtitle or "" + + @subtitle.setter + def subtitle(self, subtitle: Optional[str]): + if subtitle is None: + del self.subtitle + return + if not isinstance(subtitle, str): + raise TypeError('subtitle must be of type str') + self._subtitle = subtitle + + @property + def description(self) -> str: + return self._description or "" + + @description.setter + def description(self, description: Optional[str]): + if description is None: + del self.description + return + if not isinstance(description, str): + raise TypeError('description must be of type str') + self._description = description + + @property + def category_ids(self) -> Optional[List[str]]: + return self._category_ids + + @category_ids.setter + def category_ids(self, category_ids: Optional[List[str]]): + if category_ids is None: + del self.category_ids + return + if not isinstance(category_ids, list): + raise TypeError('category_ids must be of type list') + if not all([isinstance(t, str) for t in category_ids]): + raise TypeError('category_ids must contain only items of type str') + self._category_ids = category_ids + + @property + def directories(self) -> Optional[List[Optional['ApiUploadDirectoryInfo']]]: + return self._directories + + @directories.setter + def directories(self, directories: Optional[List[Optional['ApiUploadDirectoryInfo']]]): + if directories is None: + del self.directories + return + if not isinstance(directories, list): + raise TypeError('directories must be of type list') + if not all([isinstance(t, ApiUploadDirectoryInfo) for t in directories]): + raise TypeError('directories must contain only items of type ApiUploadDirectoryInfo') + self._directories = directories + + +class ApiDataset(KaggleObject): + r""" + Attributes: + id (int) + ref (str) + subtitle (str) + creator_name (str) + creator_url (str) + total_bytes (int) + url (str) + last_updated (datetime) + download_count (int) + is_private (bool) + is_featured (bool) + license_name (str) + description (str) + owner_name (str) + owner_ref (str) + kernel_count (int) + title (str) + topic_count (int) + view_count (int) + vote_count (int) + current_version_number (int) + usability_rating (float) + tags (ApiCategory) + files (ApiDatasetFile) + versions (ApiDatasetVersion) + thumbnail_image_url (str) + """ + + def __init__(self): + self._id = 0 + self._ref = "" + self._subtitle = None + self._creator_name = None + self._creator_url = None + self._total_bytes = None + self._url = None + self._last_updated = None + self._download_count = 0 + self._is_private = False + self._is_featured = False + self._license_name = None + self._description = None + self._owner_name = None + self._owner_ref = None + self._kernel_count = 0 + self._title = None + self._topic_count = 0 + self._view_count = 0 + self._vote_count = 0 + self._current_version_number = None + self._usability_rating = None + self._tags = [] + self._files = [] + self._versions = [] + self._thumbnail_image_url = None + self._freeze() + + @property + def id(self) -> int: + return self._id + + @id.setter + def id(self, id: int): + if id is None: + del self.id + return + if not isinstance(id, int): + raise TypeError('id must be of type int') + self._id = id + + @property + def ref(self) -> str: + return self._ref + + @ref.setter + def ref(self, ref: str): + if ref is None: + del self.ref + return + if not isinstance(ref, str): + raise TypeError('ref must be of type str') + self._ref = ref + + @property + def subtitle(self) -> str: + return self._subtitle or "" + + @subtitle.setter + def subtitle(self, subtitle: Optional[str]): + if subtitle is None: + del self.subtitle + return + if not isinstance(subtitle, str): + raise TypeError('subtitle must be of type str') + self._subtitle = subtitle + + @property + def creator_name(self) -> str: + return self._creator_name or "" + + @creator_name.setter + def creator_name(self, creator_name: Optional[str]): + if creator_name is None: + del self.creator_name + return + if not isinstance(creator_name, str): + raise TypeError('creator_name must be of type str') + self._creator_name = creator_name + + @property + def creator_url(self) -> str: + return self._creator_url or "" + + @creator_url.setter + def creator_url(self, creator_url: Optional[str]): + if creator_url is None: + del self.creator_url + return + if not isinstance(creator_url, str): + raise TypeError('creator_url must be of type str') + self._creator_url = creator_url + + @property + def total_bytes(self) -> int: + return self._total_bytes or 0 + + @total_bytes.setter + def total_bytes(self, total_bytes: Optional[int]): + if total_bytes is None: + del self.total_bytes + return + if not isinstance(total_bytes, int): + raise TypeError('total_bytes must be of type int') + self._total_bytes = total_bytes + + @property + def url(self) -> str: + return self._url or "" + + @url.setter + def url(self, url: Optional[str]): + if url is None: + del self.url + return + if not isinstance(url, str): + raise TypeError('url must be of type str') + self._url = url + + @property + def last_updated(self) -> datetime: + return self._last_updated + + @last_updated.setter + def last_updated(self, last_updated: datetime): + if last_updated is None: + del self.last_updated + return + if not isinstance(last_updated, datetime): + raise TypeError('last_updated must be of type datetime') + self._last_updated = last_updated + + @property + def download_count(self) -> int: + return self._download_count + + @download_count.setter + def download_count(self, download_count: int): + if download_count is None: + del self.download_count + return + if not isinstance(download_count, int): + raise TypeError('download_count must be of type int') + self._download_count = download_count + + @property + def is_private(self) -> bool: + return self._is_private + + @is_private.setter + def is_private(self, is_private: bool): + if is_private is None: + del self.is_private + return + if not isinstance(is_private, bool): + raise TypeError('is_private must be of type bool') + self._is_private = is_private + + @property + def is_featured(self) -> bool: + return self._is_featured + + @is_featured.setter + def is_featured(self, is_featured: bool): + if is_featured is None: + del self.is_featured + return + if not isinstance(is_featured, bool): + raise TypeError('is_featured must be of type bool') + self._is_featured = is_featured + + @property + def license_name(self) -> str: + return self._license_name or "" + + @license_name.setter + def license_name(self, license_name: Optional[str]): + if license_name is None: + del self.license_name + return + if not isinstance(license_name, str): + raise TypeError('license_name must be of type str') + self._license_name = license_name + + @property + def description(self) -> str: + return self._description or "" + + @description.setter + def description(self, description: Optional[str]): + if description is None: + del self.description + return + if not isinstance(description, str): + raise TypeError('description must be of type str') + self._description = description + + @property + def owner_name(self) -> str: + return self._owner_name or "" + + @owner_name.setter + def owner_name(self, owner_name: Optional[str]): + if owner_name is None: + del self.owner_name + return + if not isinstance(owner_name, str): + raise TypeError('owner_name must be of type str') + self._owner_name = owner_name + + @property + def owner_ref(self) -> str: + return self._owner_ref or "" + + @owner_ref.setter + def owner_ref(self, owner_ref: Optional[str]): + if owner_ref is None: + del self.owner_ref + return + if not isinstance(owner_ref, str): + raise TypeError('owner_ref must be of type str') + self._owner_ref = owner_ref + + @property + def kernel_count(self) -> int: + return self._kernel_count + + @kernel_count.setter + def kernel_count(self, kernel_count: int): + if kernel_count is None: + del self.kernel_count + return + if not isinstance(kernel_count, int): + raise TypeError('kernel_count must be of type int') + self._kernel_count = kernel_count + + @property + def title(self) -> str: + return self._title or "" + + @title.setter + def title(self, title: Optional[str]): + if title is None: + del self.title + return + if not isinstance(title, str): + raise TypeError('title must be of type str') + self._title = title + + @property + def topic_count(self) -> int: + return self._topic_count + + @topic_count.setter + def topic_count(self, topic_count: int): + if topic_count is None: + del self.topic_count + return + if not isinstance(topic_count, int): + raise TypeError('topic_count must be of type int') + self._topic_count = topic_count + + @property + def view_count(self) -> int: + return self._view_count + + @view_count.setter + def view_count(self, view_count: int): + if view_count is None: + del self.view_count + return + if not isinstance(view_count, int): + raise TypeError('view_count must be of type int') + self._view_count = view_count + + @property + def vote_count(self) -> int: + return self._vote_count + + @vote_count.setter + def vote_count(self, vote_count: int): + if vote_count is None: + del self.vote_count + return + if not isinstance(vote_count, int): + raise TypeError('vote_count must be of type int') + self._vote_count = vote_count + + @property + def current_version_number(self) -> int: + return self._current_version_number or 0 + + @current_version_number.setter + def current_version_number(self, current_version_number: Optional[int]): + if current_version_number is None: + del self.current_version_number + return + if not isinstance(current_version_number, int): + raise TypeError('current_version_number must be of type int') + self._current_version_number = current_version_number + + @property + def usability_rating(self) -> float: + return self._usability_rating or 0.0 + + @usability_rating.setter + def usability_rating(self, usability_rating: Optional[float]): + if usability_rating is None: + del self.usability_rating + return + if not isinstance(usability_rating, float): + raise TypeError('usability_rating must be of type float') + self._usability_rating = usability_rating + + @property + def tags(self) -> Optional[List[Optional['ApiCategory']]]: + return self._tags + + @tags.setter + def tags(self, tags: Optional[List[Optional['ApiCategory']]]): + if tags is None: + del self.tags + return + if not isinstance(tags, list): + raise TypeError('tags must be of type list') + if not all([isinstance(t, ApiCategory) for t in tags]): + raise TypeError('tags must contain only items of type ApiCategory') + self._tags = tags + + @property + def files(self) -> Optional[List[Optional['ApiDatasetFile']]]: + return self._files + + @files.setter + def files(self, files: Optional[List[Optional['ApiDatasetFile']]]): + if files is None: + del self.files + return + if not isinstance(files, list): + raise TypeError('files must be of type list') + if not all([isinstance(t, ApiDatasetFile) for t in files]): + raise TypeError('files must contain only items of type ApiDatasetFile') + self._files = files + + @property + def versions(self) -> Optional[List[Optional['ApiDatasetVersion']]]: + return self._versions + + @versions.setter + def versions(self, versions: Optional[List[Optional['ApiDatasetVersion']]]): + if versions is None: + del self.versions + return + if not isinstance(versions, list): + raise TypeError('versions must be of type list') + if not all([isinstance(t, ApiDatasetVersion) for t in versions]): + raise TypeError('versions must contain only items of type ApiDatasetVersion') + self._versions = versions + + @property + def thumbnail_image_url(self) -> str: + return self._thumbnail_image_url or "" + + @thumbnail_image_url.setter + def thumbnail_image_url(self, thumbnail_image_url: Optional[str]): + if thumbnail_image_url is None: + del self.thumbnail_image_url + return + if not isinstance(thumbnail_image_url, str): + raise TypeError('thumbnail_image_url must be of type str') + self._thumbnail_image_url = thumbnail_image_url + + +class ApiDatasetFile(KaggleObject): + r""" + Attributes: + ref (str) + dataset_ref (str) + owner_ref (str) + name (str) + creation_date (datetime) + description (str) + file_type (str) + url (str) + total_bytes (int) + columns (ApiDatasetColumn) + """ + + def __init__(self): + self._ref = "" + self._dataset_ref = None + self._owner_ref = None + self._name = None + self._creation_date = None + self._description = None + self._file_type = None + self._url = None + self._total_bytes = 0 + self._columns = [] + self._freeze() + + @property + def ref(self) -> str: + return self._ref + + @ref.setter + def ref(self, ref: str): + if ref is None: + del self.ref + return + if not isinstance(ref, str): + raise TypeError('ref must be of type str') + self._ref = ref + + @property + def dataset_ref(self) -> str: + return self._dataset_ref or "" + + @dataset_ref.setter + def dataset_ref(self, dataset_ref: Optional[str]): + if dataset_ref is None: + del self.dataset_ref + return + if not isinstance(dataset_ref, str): + raise TypeError('dataset_ref must be of type str') + self._dataset_ref = dataset_ref + + @property + def owner_ref(self) -> str: + return self._owner_ref or "" + + @owner_ref.setter + def owner_ref(self, owner_ref: Optional[str]): + if owner_ref is None: + del self.owner_ref + return + if not isinstance(owner_ref, str): + raise TypeError('owner_ref must be of type str') + self._owner_ref = owner_ref + + @property + def name(self) -> str: + return self._name or "" + + @name.setter + def name(self, name: Optional[str]): + if name is None: + del self.name + return + if not isinstance(name, str): + raise TypeError('name must be of type str') + self._name = name + + @property + def creation_date(self) -> datetime: + return self._creation_date + + @creation_date.setter + def creation_date(self, creation_date: datetime): + if creation_date is None: + del self.creation_date + return + if not isinstance(creation_date, datetime): + raise TypeError('creation_date must be of type datetime') + self._creation_date = creation_date + + @property + def description(self) -> str: + return self._description or "" + + @description.setter + def description(self, description: Optional[str]): + if description is None: + del self.description + return + if not isinstance(description, str): + raise TypeError('description must be of type str') + self._description = description + + @property + def file_type(self) -> str: + return self._file_type or "" + + @file_type.setter + def file_type(self, file_type: Optional[str]): + if file_type is None: + del self.file_type + return + if not isinstance(file_type, str): + raise TypeError('file_type must be of type str') + self._file_type = file_type + + @property + def url(self) -> str: + return self._url or "" + + @url.setter + def url(self, url: Optional[str]): + if url is None: + del self.url + return + if not isinstance(url, str): + raise TypeError('url must be of type str') + self._url = url + + @property + def total_bytes(self) -> int: + return self._total_bytes + + @total_bytes.setter + def total_bytes(self, total_bytes: int): + if total_bytes is None: + del self.total_bytes + return + if not isinstance(total_bytes, int): + raise TypeError('total_bytes must be of type int') + self._total_bytes = total_bytes + + @property + def columns(self) -> Optional[List[Optional['ApiDatasetColumn']]]: + return self._columns + + @columns.setter + def columns(self, columns: Optional[List[Optional['ApiDatasetColumn']]]): + if columns is None: + del self.columns + return + if not isinstance(columns, list): + raise TypeError('columns must be of type list') + if not all([isinstance(t, ApiDatasetColumn) for t in columns]): + raise TypeError('columns must contain only items of type ApiDatasetColumn') + self._columns = columns + + +class ApiDatasetNewFile(KaggleObject): + r""" + Attributes: + token (str) + description (str) + columns (ApiDatasetColumn) + """ + + def __init__(self): + self._token = None + self._description = None + self._columns = [] + self._freeze() + + @property + def token(self) -> str: + return self._token or "" + + @token.setter + def token(self, token: Optional[str]): + if token is None: + del self.token + return + if not isinstance(token, str): + raise TypeError('token must be of type str') + self._token = token + + @property + def description(self) -> str: + return self._description or "" + + @description.setter + def description(self, description: Optional[str]): + if description is None: + del self.description + return + if not isinstance(description, str): + raise TypeError('description must be of type str') + self._description = description + + @property + def columns(self) -> Optional[List[Optional['ApiDatasetColumn']]]: + return self._columns + + @columns.setter + def columns(self, columns: Optional[List[Optional['ApiDatasetColumn']]]): + if columns is None: + del self.columns + return + if not isinstance(columns, list): + raise TypeError('columns must be of type list') + if not all([isinstance(t, ApiDatasetColumn) for t in columns]): + raise TypeError('columns must contain only items of type ApiDatasetColumn') + self._columns = columns + + +class ApiDatasetVersion(KaggleObject): + r""" + Attributes: + version_number (int) + creation_date (datetime) + creator_name (str) + creator_ref (str) + version_notes (str) + status (str) + """ + + def __init__(self): + self._version_number = 0 + self._creation_date = None + self._creator_name = None + self._creator_ref = None + self._version_notes = None + self._status = None + self._freeze() + + @property + def version_number(self) -> int: + return self._version_number + + @version_number.setter + def version_number(self, version_number: int): + if version_number is None: + del self.version_number + return + if not isinstance(version_number, int): + raise TypeError('version_number must be of type int') + self._version_number = version_number + + @property + def creation_date(self) -> datetime: + return self._creation_date + + @creation_date.setter + def creation_date(self, creation_date: datetime): + if creation_date is None: + del self.creation_date + return + if not isinstance(creation_date, datetime): + raise TypeError('creation_date must be of type datetime') + self._creation_date = creation_date + + @property + def creator_name(self) -> str: + return self._creator_name or "" + + @creator_name.setter + def creator_name(self, creator_name: Optional[str]): + if creator_name is None: + del self.creator_name + return + if not isinstance(creator_name, str): + raise TypeError('creator_name must be of type str') + self._creator_name = creator_name + + @property + def creator_ref(self) -> str: + return self._creator_ref or "" + + @creator_ref.setter + def creator_ref(self, creator_ref: Optional[str]): + if creator_ref is None: + del self.creator_ref + return + if not isinstance(creator_ref, str): + raise TypeError('creator_ref must be of type str') + self._creator_ref = creator_ref + + @property + def version_notes(self) -> str: + return self._version_notes or "" + + @version_notes.setter + def version_notes(self, version_notes: Optional[str]): + if version_notes is None: + del self.version_notes + return + if not isinstance(version_notes, str): + raise TypeError('version_notes must be of type str') + self._version_notes = version_notes + + @property + def status(self) -> str: + return self._status or "" + + @status.setter + def status(self, status: Optional[str]): + if status is None: + del self.status + return + if not isinstance(status, str): + raise TypeError('status must be of type str') + self._status = status + + +class ApiDeleteDatasetRequest(KaggleObject): + r""" + Attributes: + owner_slug (str) + dataset_slug (str) + """ + + def __init__(self): + self._owner_slug = "" + self._dataset_slug = "" + self._freeze() + + @property + def owner_slug(self) -> str: + return self._owner_slug + + @owner_slug.setter + def owner_slug(self, owner_slug: str): + if owner_slug is None: + del self.owner_slug + return + if not isinstance(owner_slug, str): + raise TypeError('owner_slug must be of type str') + self._owner_slug = owner_slug + + @property + def dataset_slug(self) -> str: + return self._dataset_slug + + @dataset_slug.setter + def dataset_slug(self, dataset_slug: str): + if dataset_slug is None: + del self.dataset_slug + return + if not isinstance(dataset_slug, str): + raise TypeError('dataset_slug must be of type str') + self._dataset_slug = dataset_slug + + def endpoint(self): + path = '/api/v1/dataset/{owner_slug}/{dataset_slug}/delete' + return path.format_map(self.to_field_map(self)) + + + @staticmethod + def method(): + return 'POST' + + +class ApiDeleteDatasetResponse(KaggleObject): + r""" + Attributes: + error (str) + """ + + def __init__(self): + self._error = None + self._freeze() + + @property + def error(self) -> str: + return self._error or "" + + @error.setter + def error(self, error: Optional[str]): + if error is None: + del self.error + return + if not isinstance(error, str): + raise TypeError('error must be of type str') + self._error = error + + +class ApiDownloadDatasetRawRequest(KaggleObject): + r""" + Attributes: + owner_slug (str) + dataset_slug (str) + file_name (str) + dataset_version_number (int) + """ + + def __init__(self): + self._owner_slug = "" + self._dataset_slug = "" + self._file_name = None + self._dataset_version_number = None + self._freeze() + + @property + def owner_slug(self) -> str: + return self._owner_slug + + @owner_slug.setter + def owner_slug(self, owner_slug: str): + if owner_slug is None: + del self.owner_slug + return + if not isinstance(owner_slug, str): + raise TypeError('owner_slug must be of type str') + self._owner_slug = owner_slug + + @property + def dataset_slug(self) -> str: + return self._dataset_slug + + @dataset_slug.setter + def dataset_slug(self, dataset_slug: str): + if dataset_slug is None: + del self.dataset_slug + return + if not isinstance(dataset_slug, str): + raise TypeError('dataset_slug must be of type str') + self._dataset_slug = dataset_slug + + @property + def file_name(self) -> str: + return self._file_name or "" + + @file_name.setter + def file_name(self, file_name: Optional[str]): + if file_name is None: + del self.file_name + return + if not isinstance(file_name, str): + raise TypeError('file_name must be of type str') + self._file_name = file_name + + @property + def dataset_version_number(self) -> int: + return self._dataset_version_number or 0 + + @dataset_version_number.setter + def dataset_version_number(self, dataset_version_number: Optional[int]): + if dataset_version_number is None: + del self.dataset_version_number + return + if not isinstance(dataset_version_number, int): + raise TypeError('dataset_version_number must be of type int') + self._dataset_version_number = dataset_version_number + + def endpoint(self): + path = '/api/v1/datasets/download-raw/{owner_slug}/{dataset_slug}/{file_name}' + return path.format_map(self.to_field_map(self)) + + @staticmethod + def endpoint_path(): + return '/api/v1/datasets/download-raw/{owner_slug}/{dataset_slug}/{file_name}' + + +class ApiDownloadDatasetRequest(KaggleObject): + r""" + Attributes: + owner_slug (str) + dataset_slug (str) + file_name (str) + dataset_version_number (int) + raw (bool) + hash_link (str) + """ + + def __init__(self): + self._owner_slug = "" + self._dataset_slug = "" + self._file_name = None + self._dataset_version_number = None + self._raw = False + self._hash_link = None + self._freeze() + + @property + def owner_slug(self) -> str: + return self._owner_slug + + @owner_slug.setter + def owner_slug(self, owner_slug: str): + if owner_slug is None: + del self.owner_slug + return + if not isinstance(owner_slug, str): + raise TypeError('owner_slug must be of type str') + self._owner_slug = owner_slug + + @property + def dataset_slug(self) -> str: + return self._dataset_slug + + @dataset_slug.setter + def dataset_slug(self, dataset_slug: str): + if dataset_slug is None: + del self.dataset_slug + return + if not isinstance(dataset_slug, str): + raise TypeError('dataset_slug must be of type str') + self._dataset_slug = dataset_slug + + @property + def file_name(self) -> str: + return self._file_name or "" + + @file_name.setter + def file_name(self, file_name: Optional[str]): + if file_name is None: + del self.file_name + return + if not isinstance(file_name, str): + raise TypeError('file_name must be of type str') + self._file_name = file_name + + @property + def dataset_version_number(self) -> int: + return self._dataset_version_number or 0 + + @dataset_version_number.setter + def dataset_version_number(self, dataset_version_number: Optional[int]): + if dataset_version_number is None: + del self.dataset_version_number + return + if not isinstance(dataset_version_number, int): + raise TypeError('dataset_version_number must be of type int') + self._dataset_version_number = dataset_version_number + + @property + def raw(self) -> bool: + return self._raw + + @raw.setter + def raw(self, raw: bool): + if raw is None: + del self.raw + return + if not isinstance(raw, bool): + raise TypeError('raw must be of type bool') + self._raw = raw + + @property + def hash_link(self) -> str: + return self._hash_link or "" + + @hash_link.setter + def hash_link(self, hash_link: Optional[str]): + if hash_link is None: + del self.hash_link + return + if not isinstance(hash_link, str): + raise TypeError('hash_link must be of type str') + self._hash_link = hash_link + + def endpoint(self): + if self.file_name: + path = '/api/v1/datasets/download/{owner_slug}/{dataset_slug}/{file_name}' + else: + path = '/api/v1/datasets/download/{owner_slug}/{dataset_slug}' + return path.format_map(self.to_field_map(self)) + + @staticmethod + def endpoint_path(): + return '/api/v1/datasets/download/{owner_slug}/{dataset_slug}' + + +class ApiGetDatasetFilesSummaryRequest(KaggleObject): + r""" + Attributes: + owner_slug (str) + dataset_slug (str) + dataset_version_number (int) + """ + + def __init__(self): + self._owner_slug = "" + self._dataset_slug = "" + self._dataset_version_number = None + self._freeze() + + @property + def owner_slug(self) -> str: + return self._owner_slug + + @owner_slug.setter + def owner_slug(self, owner_slug: str): + if owner_slug is None: + del self.owner_slug + return + if not isinstance(owner_slug, str): + raise TypeError('owner_slug must be of type str') + self._owner_slug = owner_slug + + @property + def dataset_slug(self) -> str: + return self._dataset_slug + + @dataset_slug.setter + def dataset_slug(self, dataset_slug: str): + if dataset_slug is None: + del self.dataset_slug + return + if not isinstance(dataset_slug, str): + raise TypeError('dataset_slug must be of type str') + self._dataset_slug = dataset_slug + + @property + def dataset_version_number(self) -> int: + return self._dataset_version_number or 0 + + @dataset_version_number.setter + def dataset_version_number(self, dataset_version_number: Optional[int]): + if dataset_version_number is None: + del self.dataset_version_number + return + if not isinstance(dataset_version_number, int): + raise TypeError('dataset_version_number must be of type int') + self._dataset_version_number = dataset_version_number + + def endpoint(self): + path = '/api/v1/datasets/files/summary/{owner_slug}/{dataset_slug}' + return path.format_map(self.to_field_map(self)) + + @staticmethod + def endpoint_path(): + return '/api/v1/datasets/files/summary/{owner_slug}/{dataset_slug}' + + +class ApiGetDatasetMetadataRequest(KaggleObject): + r""" + Attributes: + owner_slug (str) + dataset_slug (str) + """ + + def __init__(self): + self._owner_slug = "" + self._dataset_slug = "" + self._freeze() + + @property + def owner_slug(self) -> str: + return self._owner_slug + + @owner_slug.setter + def owner_slug(self, owner_slug: str): + if owner_slug is None: + del self.owner_slug + return + if not isinstance(owner_slug, str): + raise TypeError('owner_slug must be of type str') + self._owner_slug = owner_slug + + @property + def dataset_slug(self) -> str: + return self._dataset_slug + + @dataset_slug.setter + def dataset_slug(self, dataset_slug: str): + if dataset_slug is None: + del self.dataset_slug + return + if not isinstance(dataset_slug, str): + raise TypeError('dataset_slug must be of type str') + self._dataset_slug = dataset_slug + + def endpoint(self): + path = '/api/v1/datasets/metadata/{owner_slug}/{dataset_slug}' + return path.format_map(self.to_field_map(self)) + + @staticmethod + def endpoint_path(): + return '/api/v1/datasets/metadata/{owner_slug}/{dataset_slug}' + + +class ApiGetDatasetMetadataResponse(KaggleObject): + r""" + Attributes: + info (DatasetInfo) + error_message (str) + Required for backwards-compatibility. See + https://github.com/Kaggle/kaggle-api/issues/235 + """ + + def __init__(self): + self._info = None + self._error_message = None + self._freeze() + + @property + def info(self) -> Optional['DatasetInfo']: + return self._info + + @info.setter + def info(self, info: Optional['DatasetInfo']): + if info is None: + del self.info + return + if not isinstance(info, DatasetInfo): + raise TypeError('info must be of type DatasetInfo') + self._info = info + + @property + def error_message(self) -> str: + r""" + Required for backwards-compatibility. See + https://github.com/Kaggle/kaggle-api/issues/235 + """ + return self._error_message or "" + + @error_message.setter + def error_message(self, error_message: Optional[str]): + if error_message is None: + del self.error_message + return + if not isinstance(error_message, str): + raise TypeError('error_message must be of type str') + self._error_message = error_message + + @property + def errorMessage(self): + return self.error_message + + +class ApiGetDatasetRequest(KaggleObject): + r""" + Attributes: + owner_slug (str) + dataset_slug (str) + """ + + def __init__(self): + self._owner_slug = "" + self._dataset_slug = "" + self._freeze() + + @property + def owner_slug(self) -> str: + return self._owner_slug + + @owner_slug.setter + def owner_slug(self, owner_slug: str): + if owner_slug is None: + del self.owner_slug + return + if not isinstance(owner_slug, str): + raise TypeError('owner_slug must be of type str') + self._owner_slug = owner_slug + + @property + def dataset_slug(self) -> str: + return self._dataset_slug + + @dataset_slug.setter + def dataset_slug(self, dataset_slug: str): + if dataset_slug is None: + del self.dataset_slug + return + if not isinstance(dataset_slug, str): + raise TypeError('dataset_slug must be of type str') + self._dataset_slug = dataset_slug + + def endpoint(self): + path = '/api/v1/datasets/view/{owner_slug}/{dataset_slug}' + return path.format_map(self.to_field_map(self)) + + @staticmethod + def endpoint_path(): + return '/api/v1/datasets/view/{owner_slug}/{dataset_slug}' + + +class ApiGetDatasetStatusRequest(KaggleObject): + r""" + Attributes: + owner_slug (str) + dataset_slug (str) + """ + + def __init__(self): + self._owner_slug = "" + self._dataset_slug = "" + self._freeze() + + @property + def owner_slug(self) -> str: + return self._owner_slug + + @owner_slug.setter + def owner_slug(self, owner_slug: str): + if owner_slug is None: + del self.owner_slug + return + if not isinstance(owner_slug, str): + raise TypeError('owner_slug must be of type str') + self._owner_slug = owner_slug + + @property + def dataset_slug(self) -> str: + return self._dataset_slug + + @dataset_slug.setter + def dataset_slug(self, dataset_slug: str): + if dataset_slug is None: + del self.dataset_slug + return + if not isinstance(dataset_slug, str): + raise TypeError('dataset_slug must be of type str') + self._dataset_slug = dataset_slug + + def endpoint(self): + path = '/api/v1/datasets/status/{owner_slug}/{dataset_slug}' + return path.format_map(self.to_field_map(self)) + + @staticmethod + def endpoint_path(): + return '/api/v1/datasets/status/{owner_slug}/{dataset_slug}' + + +class ApiGetDatasetStatusResponse(KaggleObject): + r""" + Attributes: + status (DatabundleVersionStatus) + """ + + def __init__(self): + self._status = DatabundleVersionStatus.NOT_YET_PERSISTED + self._freeze() + + @property + def status(self) -> 'DatabundleVersionStatus': + return self._status + + @status.setter + def status(self, status: 'DatabundleVersionStatus'): + if status is None: + del self.status + return + if not isinstance(status, DatabundleVersionStatus): + raise TypeError('status must be of type DatabundleVersionStatus') + self._status = status + + +class ApiListDatasetFilesRequest(KaggleObject): + r""" + Attributes: + owner_slug (str) + dataset_slug (str) + dataset_version_number (int) + page_token (str) + page_size (int) + """ + + def __init__(self): + self._owner_slug = "" + self._dataset_slug = "" + self._dataset_version_number = None + self._page_token = None + self._page_size = None + self._freeze() + + @property + def owner_slug(self) -> str: + return self._owner_slug + + @owner_slug.setter + def owner_slug(self, owner_slug: str): + if owner_slug is None: + del self.owner_slug + return + if not isinstance(owner_slug, str): + raise TypeError('owner_slug must be of type str') + self._owner_slug = owner_slug + + @property + def dataset_slug(self) -> str: + return self._dataset_slug + + @dataset_slug.setter + def dataset_slug(self, dataset_slug: str): + if dataset_slug is None: + del self.dataset_slug + return + if not isinstance(dataset_slug, str): + raise TypeError('dataset_slug must be of type str') + self._dataset_slug = dataset_slug + + @property + def dataset_version_number(self) -> int: + return self._dataset_version_number or 0 + + @dataset_version_number.setter + def dataset_version_number(self, dataset_version_number: Optional[int]): + if dataset_version_number is None: + del self.dataset_version_number + return + if not isinstance(dataset_version_number, int): + raise TypeError('dataset_version_number must be of type int') + self._dataset_version_number = dataset_version_number + + @property + def page_token(self) -> str: + return self._page_token or "" + + @page_token.setter + def page_token(self, page_token: Optional[str]): + if page_token is None: + del self.page_token + return + if not isinstance(page_token, str): + raise TypeError('page_token must be of type str') + self._page_token = page_token + + @property + def page_size(self) -> int: + return self._page_size or 0 + + @page_size.setter + def page_size(self, page_size: Optional[int]): + if page_size is None: + del self.page_size + return + if not isinstance(page_size, int): + raise TypeError('page_size must be of type int') + self._page_size = page_size + + def endpoint(self): + path = '/api/v1/datasets/list/{owner_slug}/{dataset_slug}' + return path.format_map(self.to_field_map(self)) + + @staticmethod + def endpoint_path(): + return '/api/v1/datasets/list/{owner_slug}/{dataset_slug}' + + +class ApiListDatasetFilesResponse(KaggleObject): + r""" + Attributes: + dataset_files (ApiDatasetFile) + error_message (str) + next_page_token (str) + """ + + def __init__(self): + self._dataset_files = [] + self._error_message = None + self._next_page_token = None + self._freeze() + + @property + def dataset_files(self) -> Optional[List[Optional['ApiDatasetFile']]]: + return self._dataset_files + + @dataset_files.setter + def dataset_files(self, dataset_files: Optional[List[Optional['ApiDatasetFile']]]): + if dataset_files is None: + del self.dataset_files + return + if not isinstance(dataset_files, list): + raise TypeError('dataset_files must be of type list') + if not all([isinstance(t, ApiDatasetFile) for t in dataset_files]): + raise TypeError('dataset_files must contain only items of type ApiDatasetFile') + self._dataset_files = dataset_files + + @property + def error_message(self) -> str: + return self._error_message or "" + + @error_message.setter + def error_message(self, error_message: Optional[str]): + if error_message is None: + del self.error_message + return + if not isinstance(error_message, str): + raise TypeError('error_message must be of type str') + self._error_message = error_message + + @property + def next_page_token(self) -> str: + return self._next_page_token or "" + + @next_page_token.setter + def next_page_token(self, next_page_token: Optional[str]): + if next_page_token is None: + del self.next_page_token + return + if not isinstance(next_page_token, str): + raise TypeError('next_page_token must be of type str') + self._next_page_token = next_page_token + + @property + def files(self): + return self.dataset_files + + @property + def errorMessage(self): + return self.error_message + + @property + def nextPageToken(self): + return self.next_page_token + + +class ApiListDatasetsRequest(KaggleObject): + r""" + Attributes: + group (DatasetSelectionGroup) + sort_by (DatasetSortBy) + size (DatasetSizeGroup) + file_type (DatasetFileTypeGroup) + license (DatasetLicenseGroup) + viewed (DatasetViewedGroup) + tag_ids (str) + search (str) + user (str) + min_size (int) + max_size (int) + page (int) + page_token (str) + page_size (int) + """ + + def __init__(self): + self._group = DatasetSelectionGroup.DATASET_SELECTION_GROUP_PUBLIC + self._sort_by = DatasetSortBy.DATASET_SORT_BY_HOTTEST + self._size = DatasetSizeGroup.DATASET_SIZE_GROUP_ALL + self._file_type = DatasetFileTypeGroup.DATASET_FILE_TYPE_GROUP_ALL + self._license = DatasetLicenseGroup.DATASET_LICENSE_GROUP_ALL + self._viewed = DatasetViewedGroup.DATASET_VIEWED_GROUP_UNSPECIFIED + self._tag_ids = None + self._search = None + self._user = None + self._min_size = None + self._max_size = None + self._page = None + self._page_token = None + self._page_size = None + self._freeze() + + @property + def group(self) -> 'DatasetSelectionGroup': + return self._group + + @group.setter + def group(self, group: 'DatasetSelectionGroup'): + if group is None: + del self.group + return + if not isinstance(group, DatasetSelectionGroup): + raise TypeError('group must be of type DatasetSelectionGroup') + self._group = group + + @property + def sort_by(self) -> 'DatasetSortBy': + return self._sort_by + + @sort_by.setter + def sort_by(self, sort_by: 'DatasetSortBy'): + if sort_by is None: + del self.sort_by + return + if not isinstance(sort_by, DatasetSortBy): + raise TypeError('sort_by must be of type DatasetSortBy') + self._sort_by = sort_by + + @property + def size(self) -> 'DatasetSizeGroup': + return self._size + + @size.setter + def size(self, size: 'DatasetSizeGroup'): + if size is None: + del self.size + return + if not isinstance(size, DatasetSizeGroup): + raise TypeError('size must be of type DatasetSizeGroup') + self._size = size + + @property + def file_type(self) -> 'DatasetFileTypeGroup': + return self._file_type + + @file_type.setter + def file_type(self, file_type: 'DatasetFileTypeGroup'): + if file_type is None: + del self.file_type + return + if not isinstance(file_type, DatasetFileTypeGroup): + raise TypeError('file_type must be of type DatasetFileTypeGroup') + self._file_type = file_type + + @property + def license(self) -> 'DatasetLicenseGroup': + return self._license + + @license.setter + def license(self, license: 'DatasetLicenseGroup'): + if license is None: + del self.license + return + if not isinstance(license, DatasetLicenseGroup): + raise TypeError('license must be of type DatasetLicenseGroup') + self._license = license + + @property + def viewed(self) -> 'DatasetViewedGroup': + return self._viewed + + @viewed.setter + def viewed(self, viewed: 'DatasetViewedGroup'): + if viewed is None: + del self.viewed + return + if not isinstance(viewed, DatasetViewedGroup): + raise TypeError('viewed must be of type DatasetViewedGroup') + self._viewed = viewed + + @property + def tag_ids(self) -> str: + return self._tag_ids or "" + + @tag_ids.setter + def tag_ids(self, tag_ids: Optional[str]): + if tag_ids is None: + del self.tag_ids + return + if not isinstance(tag_ids, str): + raise TypeError('tag_ids must be of type str') + self._tag_ids = tag_ids + + @property + def search(self) -> str: + return self._search or "" + + @search.setter + def search(self, search: Optional[str]): + if search is None: + del self.search + return + if not isinstance(search, str): + raise TypeError('search must be of type str') + self._search = search + + @property + def user(self) -> str: + return self._user or "" + + @user.setter + def user(self, user: Optional[str]): + if user is None: + del self.user + return + if not isinstance(user, str): + raise TypeError('user must be of type str') + self._user = user + + @property + def min_size(self) -> int: + return self._min_size or 0 + + @min_size.setter + def min_size(self, min_size: Optional[int]): + if min_size is None: + del self.min_size + return + if not isinstance(min_size, int): + raise TypeError('min_size must be of type int') + self._min_size = min_size + + @property + def max_size(self) -> int: + return self._max_size or 0 + + @max_size.setter + def max_size(self, max_size: Optional[int]): + if max_size is None: + del self.max_size + return + if not isinstance(max_size, int): + raise TypeError('max_size must be of type int') + self._max_size = max_size + + @property + def page(self) -> int: + return self._page or 0 + + @page.setter + def page(self, page: Optional[int]): + if page is None: + del self.page + return + if not isinstance(page, int): + raise TypeError('page must be of type int') + self._page = page + + @property + def page_token(self) -> str: + return self._page_token or "" + + @page_token.setter + def page_token(self, page_token: Optional[str]): + if page_token is None: + del self.page_token + return + if not isinstance(page_token, str): + raise TypeError('page_token must be of type str') + self._page_token = page_token + + @property + def page_size(self) -> int: + return self._page_size or 0 + + @page_size.setter + def page_size(self, page_size: Optional[int]): + if page_size is None: + del self.page_size + return + if not isinstance(page_size, int): + raise TypeError('page_size must be of type int') + self._page_size = page_size + + def endpoint(self): + path = '/api/v1/datasets/list' + return path.format_map(self.to_field_map(self)) + + +class ApiListDatasetsResponse(KaggleObject): + r""" + Attributes: + datasets (ApiDataset) + next_page_token (str) + """ + + def __init__(self): + self._datasets = [] + self._next_page_token = "" + self._freeze() + + @property + def datasets(self) -> Optional[List[Optional['ApiDataset']]]: + return self._datasets + + @datasets.setter + def datasets(self, datasets: Optional[List[Optional['ApiDataset']]]): + if datasets is None: + del self.datasets + return + if not isinstance(datasets, list): + raise TypeError('datasets must be of type list') + if not all([isinstance(t, ApiDataset) for t in datasets]): + raise TypeError('datasets must contain only items of type ApiDataset') + self._datasets = datasets + + @property + def next_page_token(self) -> str: + return self._next_page_token + + @next_page_token.setter + def next_page_token(self, next_page_token: str): + if next_page_token is None: + del self.next_page_token + return + if not isinstance(next_page_token, str): + raise TypeError('next_page_token must be of type str') + self._next_page_token = next_page_token + + @property + def nextPageToken(self): + return self.next_page_token + + +class ApiListTreeDatasetFilesRequest(KaggleObject): + r""" + Attributes: + owner_slug (str) + dataset_slug (str) + dataset_version_number (int) + path (str) + The path of the directory to list files from. If not provided, the root + directory will be listed. + page_token (str) + page_size (int) + """ + + def __init__(self): + self._owner_slug = "" + self._dataset_slug = "" + self._dataset_version_number = None + self._path = None + self._page_token = None + self._page_size = None + self._freeze() + + @property + def owner_slug(self) -> str: + return self._owner_slug + + @owner_slug.setter + def owner_slug(self, owner_slug: str): + if owner_slug is None: + del self.owner_slug + return + if not isinstance(owner_slug, str): + raise TypeError('owner_slug must be of type str') + self._owner_slug = owner_slug + + @property + def dataset_slug(self) -> str: + return self._dataset_slug + + @dataset_slug.setter + def dataset_slug(self, dataset_slug: str): + if dataset_slug is None: + del self.dataset_slug + return + if not isinstance(dataset_slug, str): + raise TypeError('dataset_slug must be of type str') + self._dataset_slug = dataset_slug + + @property + def dataset_version_number(self) -> int: + return self._dataset_version_number or 0 + + @dataset_version_number.setter + def dataset_version_number(self, dataset_version_number: Optional[int]): + if dataset_version_number is None: + del self.dataset_version_number + return + if not isinstance(dataset_version_number, int): + raise TypeError('dataset_version_number must be of type int') + self._dataset_version_number = dataset_version_number + + @property + def path(self) -> str: + r""" + The path of the directory to list files from. If not provided, the root + directory will be listed. + """ + return self._path or "" + + @path.setter + def path(self, path: Optional[str]): + if path is None: + del self.path + return + if not isinstance(path, str): + raise TypeError('path must be of type str') + self._path = path + + @property + def page_token(self) -> str: + return self._page_token or "" + + @page_token.setter + def page_token(self, page_token: Optional[str]): + if page_token is None: + del self.page_token + return + if not isinstance(page_token, str): + raise TypeError('page_token must be of type str') + self._page_token = page_token + + @property + def page_size(self) -> int: + return self._page_size or 0 + + @page_size.setter + def page_size(self, page_size: Optional[int]): + if page_size is None: + del self.page_size + return + if not isinstance(page_size, int): + raise TypeError('page_size must be of type int') + self._page_size = page_size + + def endpoint(self): + path = '/api/v1/datasets/list-tree/{owner_slug}/{dataset_slug}' + return path.format_map(self.to_field_map(self)) + + @staticmethod + def endpoint_path(): + return '/api/v1/datasets/list-tree/{owner_slug}/{dataset_slug}' + + +class ApiUpdateDatasetMetadataRequest(KaggleObject): + r""" + Attributes: + owner_slug (str) + dataset_slug (str) + settings (DatasetSettings) + """ + + def __init__(self): + self._owner_slug = "" + self._dataset_slug = "" + self._settings = None + self._freeze() + + @property + def owner_slug(self) -> str: + return self._owner_slug + + @owner_slug.setter + def owner_slug(self, owner_slug: str): + if owner_slug is None: + del self.owner_slug + return + if not isinstance(owner_slug, str): + raise TypeError('owner_slug must be of type str') + self._owner_slug = owner_slug + + @property + def dataset_slug(self) -> str: + return self._dataset_slug + + @dataset_slug.setter + def dataset_slug(self, dataset_slug: str): + if dataset_slug is None: + del self.dataset_slug + return + if not isinstance(dataset_slug, str): + raise TypeError('dataset_slug must be of type str') + self._dataset_slug = dataset_slug + + @property + def settings(self) -> Optional['DatasetSettings']: + return self._settings + + @settings.setter + def settings(self, settings: Optional['DatasetSettings']): + if settings is None: + del self.settings + return + if not isinstance(settings, DatasetSettings): + raise TypeError('settings must be of type DatasetSettings') + self._settings = settings + + def endpoint(self): + path = '/api/v1/datasets/metadata/{owner_slug}/{dataset_slug}' + return path.format_map(self.to_field_map(self)) + + + @staticmethod + def method(): + return 'POST' + + @staticmethod + def body_fields(): + return 'settings' + + +class ApiUpdateDatasetMetadataResponse(KaggleObject): + r""" + Attributes: + errors (str) + Required for backwards-compatibility. + """ + + def __init__(self): + self._errors = [] + self._freeze() + + @property + def errors(self) -> Optional[List[str]]: + """Required for backwards-compatibility.""" + return self._errors + + @errors.setter + def errors(self, errors: Optional[List[str]]): + if errors is None: + del self.errors + return + if not isinstance(errors, list): + raise TypeError('errors must be of type list') + if not all([isinstance(t, str) for t in errors]): + raise TypeError('errors must contain only items of type str') + self._errors = errors + + +class ApiUploadDatasetFileRequest(KaggleObject): + r""" + Attributes: + file_name (str) + content_length (int) + last_modified_epoch_seconds (int) + """ + + def __init__(self): + self._file_name = "" + self._content_length = 0 + self._last_modified_epoch_seconds = 0 + self._freeze() + + @property + def file_name(self) -> str: + return self._file_name + + @file_name.setter + def file_name(self, file_name: str): + if file_name is None: + del self.file_name + return + if not isinstance(file_name, str): + raise TypeError('file_name must be of type str') + self._file_name = file_name + + @property + def content_length(self) -> int: + return self._content_length + + @content_length.setter + def content_length(self, content_length: int): + if content_length is None: + del self.content_length + return + if not isinstance(content_length, int): + raise TypeError('content_length must be of type int') + self._content_length = content_length + + @property + def last_modified_epoch_seconds(self) -> int: + return self._last_modified_epoch_seconds + + @last_modified_epoch_seconds.setter + def last_modified_epoch_seconds(self, last_modified_epoch_seconds: int): + if last_modified_epoch_seconds is None: + del self.last_modified_epoch_seconds + return + if not isinstance(last_modified_epoch_seconds, int): + raise TypeError('last_modified_epoch_seconds must be of type int') + self._last_modified_epoch_seconds = last_modified_epoch_seconds + + def endpoint(self): + path = '/api/v1/datasets/upload/file/{content_length}/{last_modified_epoch_seconds}' + return path.format_map(self.to_field_map(self)) + + + @staticmethod + def method(): + return 'POST' + + +class ApiUploadDatasetFileResponse(KaggleObject): + r""" + Attributes: + token (str) + Opaque string token used to reference the new BlobFile. + create_url (str) + URL to use to start the upload + """ + + def __init__(self): + self._token = "" + self._create_url = "" + self._freeze() + + @property + def token(self) -> str: + """Opaque string token used to reference the new BlobFile.""" + return self._token + + @token.setter + def token(self, token: str): + if token is None: + del self.token + return + if not isinstance(token, str): + raise TypeError('token must be of type str') + self._token = token + + @property + def create_url(self) -> str: + """URL to use to start the upload""" + return self._create_url + + @create_url.setter + def create_url(self, create_url: str): + if create_url is None: + del self.create_url + return + if not isinstance(create_url, str): + raise TypeError('create_url must be of type str') + self._create_url = create_url + + @property + def createUrl(self): + return self.create_url + + +class ApiUploadDirectoryInfo(KaggleObject): + r""" + Attributes: + name (str) + directories (ApiUploadDirectoryInfo) + files (ApiDatasetNewFile) + """ + + def __init__(self): + self._name = "" + self._directories = [] + self._files = [] + self._freeze() + + @property + def name(self) -> str: + return self._name + + @name.setter + def name(self, name: str): + if name is None: + del self.name + return + if not isinstance(name, str): + raise TypeError('name must be of type str') + self._name = name + + @property + def directories(self) -> Optional[List[Optional['ApiUploadDirectoryInfo']]]: + return self._directories + + @directories.setter + def directories(self, directories: Optional[List[Optional['ApiUploadDirectoryInfo']]]): + if directories is None: + del self.directories + return + if not isinstance(directories, list): + raise TypeError('directories must be of type list') + if not all([isinstance(t, ApiUploadDirectoryInfo) for t in directories]): + raise TypeError('directories must contain only items of type ApiUploadDirectoryInfo') + self._directories = directories + + @property + def files(self) -> Optional[List[Optional['ApiDatasetNewFile']]]: + return self._files + + @files.setter + def files(self, files: Optional[List[Optional['ApiDatasetNewFile']]]): + if files is None: + del self.files + return + if not isinstance(files, list): + raise TypeError('files must be of type list') + if not all([isinstance(t, ApiDatasetNewFile) for t in files]): + raise TypeError('files must contain only items of type ApiDatasetNewFile') + self._files = files + + +class ApiCategory(KaggleObject): + r""" + Attributes: + ref (str) + name (str) + description (str) + full_path (str) + competition_count (int) + dataset_count (int) + script_count (int) + total_count (int) + """ + + def __init__(self): + self._ref = "" + self._name = None + self._description = None + self._full_path = None + self._competition_count = 0 + self._dataset_count = 0 + self._script_count = 0 + self._total_count = 0 + self._freeze() + + @property + def ref(self) -> str: + return self._ref + + @ref.setter + def ref(self, ref: str): + if ref is None: + del self.ref + return + if not isinstance(ref, str): + raise TypeError('ref must be of type str') + self._ref = ref + + @property + def name(self) -> str: + return self._name or "" + + @name.setter + def name(self, name: Optional[str]): + if name is None: + del self.name + return + if not isinstance(name, str): + raise TypeError('name must be of type str') + self._name = name + + @property + def description(self) -> str: + return self._description or "" + + @description.setter + def description(self, description: Optional[str]): + if description is None: + del self.description + return + if not isinstance(description, str): + raise TypeError('description must be of type str') + self._description = description + + @property + def full_path(self) -> str: + return self._full_path or "" + + @full_path.setter + def full_path(self, full_path: Optional[str]): + if full_path is None: + del self.full_path + return + if not isinstance(full_path, str): + raise TypeError('full_path must be of type str') + self._full_path = full_path + + @property + def competition_count(self) -> int: + return self._competition_count + + @competition_count.setter + def competition_count(self, competition_count: int): + if competition_count is None: + del self.competition_count + return + if not isinstance(competition_count, int): + raise TypeError('competition_count must be of type int') + self._competition_count = competition_count + + @property + def dataset_count(self) -> int: + return self._dataset_count + + @dataset_count.setter + def dataset_count(self, dataset_count: int): + if dataset_count is None: + del self.dataset_count + return + if not isinstance(dataset_count, int): + raise TypeError('dataset_count must be of type int') + self._dataset_count = dataset_count + + @property + def script_count(self) -> int: + return self._script_count + + @script_count.setter + def script_count(self, script_count: int): + if script_count is None: + del self.script_count + return + if not isinstance(script_count, int): + raise TypeError('script_count must be of type int') + self._script_count = script_count + + @property + def total_count(self) -> int: + return self._total_count + + @total_count.setter + def total_count(self, total_count: int): + if total_count is None: + del self.total_count + return + if not isinstance(total_count, int): + raise TypeError('total_count must be of type int') + self._total_count = total_count + + +class ApiDatasetColumn(KaggleObject): + r""" + Attributes: + order (int) + name (str) + type (str) + original_type (str) + description (str) + """ + + def __init__(self): + self._order = None + self._name = None + self._type = None + self._original_type = None + self._description = None + self._freeze() + + @property + def order(self) -> int: + return self._order or 0 + + @order.setter + def order(self, order: Optional[int]): + if order is None: + del self.order + return + if not isinstance(order, int): + raise TypeError('order must be of type int') + self._order = order + + @property + def name(self) -> str: + return self._name or "" + + @name.setter + def name(self, name: Optional[str]): + if name is None: + del self.name + return + if not isinstance(name, str): + raise TypeError('name must be of type str') + self._name = name + + @property + def type(self) -> str: + return self._type or "" + + @type.setter + def type(self, type: Optional[str]): + if type is None: + del self.type + return + if not isinstance(type, str): + raise TypeError('type must be of type str') + self._type = type + + @property + def original_type(self) -> str: + return self._original_type or "" + + @original_type.setter + def original_type(self, original_type: Optional[str]): + if original_type is None: + del self.original_type + return + if not isinstance(original_type, str): + raise TypeError('original_type must be of type str') + self._original_type = original_type + + @property + def description(self) -> str: + return self._description or "" + + @description.setter + def description(self, description: Optional[str]): + if description is None: + del self.description + return + if not isinstance(description, str): + raise TypeError('description must be of type str') + self._description = description + + +ApiCreateDatasetRequest._fields = [ + FieldMetadata("id", "id", "_id", int, None, PredefinedSerializer(), optional=True), + FieldMetadata("ownerSlug", "owner_slug", "_owner_slug", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("slug", "slug", "_slug", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("title", "title", "_title", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("licenseName", "license_name", "_license_name", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("isPrivate", "is_private", "_is_private", bool, False, PredefinedSerializer()), + FieldMetadata("files", "files", "_files", ApiDatasetNewFile, [], ListSerializer(KaggleObjectSerializer())), + FieldMetadata("subtitle", "subtitle", "_subtitle", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("description", "description", "_description", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("categoryIds", "category_ids", "_category_ids", str, [], ListSerializer(PredefinedSerializer())), + FieldMetadata("directories", "directories", "_directories", ApiUploadDirectoryInfo, [], ListSerializer(KaggleObjectSerializer())), +] + +ApiCreateDatasetResponse._fields = [ + FieldMetadata("ref", "ref", "_ref", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("url", "url", "_url", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("status", "status", "_status", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("error", "error", "_error", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("invalidTags", "invalid_tags", "_invalid_tags", str, [], ListSerializer(PredefinedSerializer())), +] + +ApiCreateDatasetVersionByIdRequest._fields = [ + FieldMetadata("id", "id", "_id", int, 0, PredefinedSerializer()), + FieldMetadata("body", "body", "_body", ApiCreateDatasetVersionRequestBody, None, KaggleObjectSerializer()), +] + +ApiCreateDatasetVersionRequest._fields = [ + FieldMetadata("ownerSlug", "owner_slug", "_owner_slug", str, "", PredefinedSerializer()), + FieldMetadata("datasetSlug", "dataset_slug", "_dataset_slug", str, "", PredefinedSerializer()), + FieldMetadata("body", "body", "_body", ApiCreateDatasetVersionRequestBody, None, KaggleObjectSerializer()), +] + +ApiCreateDatasetVersionRequestBody._fields = [ + FieldMetadata("versionNotes", "version_notes", "_version_notes", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("deleteOldVersions", "delete_old_versions", "_delete_old_versions", bool, False, PredefinedSerializer()), + FieldMetadata("files", "files", "_files", ApiDatasetNewFile, [], ListSerializer(KaggleObjectSerializer())), + FieldMetadata("subtitle", "subtitle", "_subtitle", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("description", "description", "_description", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("categoryIds", "category_ids", "_category_ids", str, [], ListSerializer(PredefinedSerializer())), + FieldMetadata("directories", "directories", "_directories", ApiUploadDirectoryInfo, [], ListSerializer(KaggleObjectSerializer())), +] + +ApiDataset._fields = [ + FieldMetadata("id", "id", "_id", int, 0, PredefinedSerializer()), + FieldMetadata("ref", "ref", "_ref", str, "", PredefinedSerializer()), + FieldMetadata("subtitle", "subtitle", "_subtitle", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("creatorName", "creator_name", "_creator_name", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("creatorUrl", "creator_url", "_creator_url", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("totalBytes", "total_bytes", "_total_bytes", int, None, PredefinedSerializer(), optional=True), + FieldMetadata("url", "url", "_url", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("lastUpdated", "last_updated", "_last_updated", datetime, None, DateTimeSerializer()), + FieldMetadata("downloadCount", "download_count", "_download_count", int, 0, PredefinedSerializer()), + FieldMetadata("isPrivate", "is_private", "_is_private", bool, False, PredefinedSerializer()), + FieldMetadata("isFeatured", "is_featured", "_is_featured", bool, False, PredefinedSerializer()), + FieldMetadata("licenseName", "license_name", "_license_name", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("description", "description", "_description", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("ownerName", "owner_name", "_owner_name", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("ownerRef", "owner_ref", "_owner_ref", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("kernelCount", "kernel_count", "_kernel_count", int, 0, PredefinedSerializer()), + FieldMetadata("title", "title", "_title", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("topicCount", "topic_count", "_topic_count", int, 0, PredefinedSerializer()), + FieldMetadata("viewCount", "view_count", "_view_count", int, 0, PredefinedSerializer()), + FieldMetadata("voteCount", "vote_count", "_vote_count", int, 0, PredefinedSerializer()), + FieldMetadata("currentVersionNumber", "current_version_number", "_current_version_number", int, None, PredefinedSerializer(), optional=True), + FieldMetadata("usabilityRating", "usability_rating", "_usability_rating", float, None, PredefinedSerializer(), optional=True), + FieldMetadata("tags", "tags", "_tags", ApiCategory, [], ListSerializer(KaggleObjectSerializer())), + FieldMetadata("files", "files", "_files", ApiDatasetFile, [], ListSerializer(KaggleObjectSerializer())), + FieldMetadata("versions", "versions", "_versions", ApiDatasetVersion, [], ListSerializer(KaggleObjectSerializer())), + FieldMetadata("thumbnailImageUrl", "thumbnail_image_url", "_thumbnail_image_url", str, None, PredefinedSerializer(), optional=True), +] + +ApiDatasetFile._fields = [ + FieldMetadata("ref", "ref", "_ref", str, "", PredefinedSerializer()), + FieldMetadata("datasetRef", "dataset_ref", "_dataset_ref", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("ownerRef", "owner_ref", "_owner_ref", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("name", "name", "_name", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("creationDate", "creation_date", "_creation_date", datetime, None, DateTimeSerializer()), + FieldMetadata("description", "description", "_description", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("fileType", "file_type", "_file_type", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("url", "url", "_url", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("totalBytes", "total_bytes", "_total_bytes", int, 0, PredefinedSerializer()), + FieldMetadata("columns", "columns", "_columns", ApiDatasetColumn, [], ListSerializer(KaggleObjectSerializer())), +] + +ApiDatasetNewFile._fields = [ + FieldMetadata("token", "token", "_token", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("description", "description", "_description", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("columns", "columns", "_columns", ApiDatasetColumn, [], ListSerializer(KaggleObjectSerializer())), +] + +ApiDatasetVersion._fields = [ + FieldMetadata("versionNumber", "version_number", "_version_number", int, 0, PredefinedSerializer()), + FieldMetadata("creationDate", "creation_date", "_creation_date", datetime, None, DateTimeSerializer()), + FieldMetadata("creatorName", "creator_name", "_creator_name", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("creatorRef", "creator_ref", "_creator_ref", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("versionNotes", "version_notes", "_version_notes", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("status", "status", "_status", str, None, PredefinedSerializer(), optional=True), +] + +ApiDeleteDatasetRequest._fields = [ + FieldMetadata("ownerSlug", "owner_slug", "_owner_slug", str, "", PredefinedSerializer()), + FieldMetadata("datasetSlug", "dataset_slug", "_dataset_slug", str, "", PredefinedSerializer()), +] + +ApiDeleteDatasetResponse._fields = [ + FieldMetadata("error", "error", "_error", str, None, PredefinedSerializer(), optional=True), +] + +ApiDownloadDatasetRawRequest._fields = [ + FieldMetadata("ownerSlug", "owner_slug", "_owner_slug", str, "", PredefinedSerializer()), + FieldMetadata("datasetSlug", "dataset_slug", "_dataset_slug", str, "", PredefinedSerializer()), + FieldMetadata("fileName", "file_name", "_file_name", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("datasetVersionNumber", "dataset_version_number", "_dataset_version_number", int, None, PredefinedSerializer(), optional=True), +] + +ApiDownloadDatasetRequest._fields = [ + FieldMetadata("ownerSlug", "owner_slug", "_owner_slug", str, "", PredefinedSerializer()), + FieldMetadata("datasetSlug", "dataset_slug", "_dataset_slug", str, "", PredefinedSerializer()), + FieldMetadata("fileName", "file_name", "_file_name", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("datasetVersionNumber", "dataset_version_number", "_dataset_version_number", int, None, PredefinedSerializer(), optional=True), + FieldMetadata("raw", "raw", "_raw", bool, False, PredefinedSerializer()), + FieldMetadata("hashLink", "hash_link", "_hash_link", str, None, PredefinedSerializer(), optional=True), +] + +ApiGetDatasetFilesSummaryRequest._fields = [ + FieldMetadata("ownerSlug", "owner_slug", "_owner_slug", str, "", PredefinedSerializer()), + FieldMetadata("datasetSlug", "dataset_slug", "_dataset_slug", str, "", PredefinedSerializer()), + FieldMetadata("datasetVersionNumber", "dataset_version_number", "_dataset_version_number", int, None, PredefinedSerializer(), optional=True), +] + +ApiGetDatasetMetadataRequest._fields = [ + FieldMetadata("ownerSlug", "owner_slug", "_owner_slug", str, "", PredefinedSerializer()), + FieldMetadata("datasetSlug", "dataset_slug", "_dataset_slug", str, "", PredefinedSerializer()), +] + +ApiGetDatasetMetadataResponse._fields = [ + FieldMetadata("info", "info", "_info", DatasetInfo, None, KaggleObjectSerializer()), + FieldMetadata("errorMessage", "error_message", "_error_message", str, None, PredefinedSerializer(), optional=True), +] + +ApiGetDatasetRequest._fields = [ + FieldMetadata("ownerSlug", "owner_slug", "_owner_slug", str, "", PredefinedSerializer()), + FieldMetadata("datasetSlug", "dataset_slug", "_dataset_slug", str, "", PredefinedSerializer()), +] + +ApiGetDatasetStatusRequest._fields = [ + FieldMetadata("ownerSlug", "owner_slug", "_owner_slug", str, "", PredefinedSerializer()), + FieldMetadata("datasetSlug", "dataset_slug", "_dataset_slug", str, "", PredefinedSerializer()), +] + +ApiGetDatasetStatusResponse._fields = [ + FieldMetadata("status", "status", "_status", DatabundleVersionStatus, DatabundleVersionStatus.NOT_YET_PERSISTED, EnumSerializer()), +] + +ApiListDatasetFilesRequest._fields = [ + FieldMetadata("ownerSlug", "owner_slug", "_owner_slug", str, "", PredefinedSerializer()), + FieldMetadata("datasetSlug", "dataset_slug", "_dataset_slug", str, "", PredefinedSerializer()), + FieldMetadata("datasetVersionNumber", "dataset_version_number", "_dataset_version_number", int, None, PredefinedSerializer(), optional=True), + FieldMetadata("pageToken", "page_token", "_page_token", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("pageSize", "page_size", "_page_size", int, None, PredefinedSerializer(), optional=True), +] + +ApiListDatasetFilesResponse._fields = [ + FieldMetadata("datasetFiles", "dataset_files", "_dataset_files", ApiDatasetFile, [], ListSerializer(KaggleObjectSerializer())), + FieldMetadata("errorMessage", "error_message", "_error_message", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("nextPageToken", "next_page_token", "_next_page_token", str, None, PredefinedSerializer(), optional=True), +] + +ApiListDatasetsRequest._fields = [ + FieldMetadata("group", "group", "_group", DatasetSelectionGroup, DatasetSelectionGroup.DATASET_SELECTION_GROUP_PUBLIC, EnumSerializer()), + FieldMetadata("sortBy", "sort_by", "_sort_by", DatasetSortBy, DatasetSortBy.DATASET_SORT_BY_HOTTEST, EnumSerializer()), + FieldMetadata("size", "size", "_size", DatasetSizeGroup, DatasetSizeGroup.DATASET_SIZE_GROUP_ALL, EnumSerializer()), + FieldMetadata("fileType", "file_type", "_file_type", DatasetFileTypeGroup, DatasetFileTypeGroup.DATASET_FILE_TYPE_GROUP_ALL, EnumSerializer()), + FieldMetadata("license", "license", "_license", DatasetLicenseGroup, DatasetLicenseGroup.DATASET_LICENSE_GROUP_ALL, EnumSerializer()), + FieldMetadata("viewed", "viewed", "_viewed", DatasetViewedGroup, DatasetViewedGroup.DATASET_VIEWED_GROUP_UNSPECIFIED, EnumSerializer()), + FieldMetadata("tagIds", "tag_ids", "_tag_ids", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("search", "search", "_search", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("user", "user", "_user", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("minSize", "min_size", "_min_size", int, None, PredefinedSerializer(), optional=True), + FieldMetadata("maxSize", "max_size", "_max_size", int, None, PredefinedSerializer(), optional=True), + FieldMetadata("page", "page", "_page", int, None, PredefinedSerializer(), optional=True), + FieldMetadata("pageToken", "page_token", "_page_token", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("pageSize", "page_size", "_page_size", int, None, PredefinedSerializer(), optional=True), +] + +ApiListDatasetsResponse._fields = [ + FieldMetadata("datasets", "datasets", "_datasets", ApiDataset, [], ListSerializer(KaggleObjectSerializer())), + FieldMetadata("nextPageToken", "next_page_token", "_next_page_token", str, "", PredefinedSerializer()), +] + +ApiListTreeDatasetFilesRequest._fields = [ + FieldMetadata("ownerSlug", "owner_slug", "_owner_slug", str, "", PredefinedSerializer()), + FieldMetadata("datasetSlug", "dataset_slug", "_dataset_slug", str, "", PredefinedSerializer()), + FieldMetadata("datasetVersionNumber", "dataset_version_number", "_dataset_version_number", int, None, PredefinedSerializer(), optional=True), + FieldMetadata("path", "path", "_path", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("pageToken", "page_token", "_page_token", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("pageSize", "page_size", "_page_size", int, None, PredefinedSerializer(), optional=True), +] + +ApiUpdateDatasetMetadataRequest._fields = [ + FieldMetadata("ownerSlug", "owner_slug", "_owner_slug", str, "", PredefinedSerializer()), + FieldMetadata("datasetSlug", "dataset_slug", "_dataset_slug", str, "", PredefinedSerializer()), + FieldMetadata("settings", "settings", "_settings", DatasetSettings, None, KaggleObjectSerializer()), +] + +ApiUpdateDatasetMetadataResponse._fields = [ + FieldMetadata("errors", "errors", "_errors", str, [], ListSerializer(PredefinedSerializer())), +] + +ApiUploadDatasetFileRequest._fields = [ + FieldMetadata("fileName", "file_name", "_file_name", str, "", PredefinedSerializer()), + FieldMetadata("contentLength", "content_length", "_content_length", int, 0, PredefinedSerializer()), + FieldMetadata("lastModifiedEpochSeconds", "last_modified_epoch_seconds", "_last_modified_epoch_seconds", int, 0, PredefinedSerializer()), +] + +ApiUploadDatasetFileResponse._fields = [ + FieldMetadata("token", "token", "_token", str, "", PredefinedSerializer()), + FieldMetadata("createUrl", "create_url", "_create_url", str, "", PredefinedSerializer()), +] + +ApiUploadDirectoryInfo._fields = [ + FieldMetadata("name", "name", "_name", str, "", PredefinedSerializer()), + FieldMetadata("directories", "directories", "_directories", ApiUploadDirectoryInfo, [], ListSerializer(KaggleObjectSerializer())), + FieldMetadata("files", "files", "_files", ApiDatasetNewFile, [], ListSerializer(KaggleObjectSerializer())), +] + +ApiCategory._fields = [ + FieldMetadata("ref", "ref", "_ref", str, "", PredefinedSerializer()), + FieldMetadata("name", "name", "_name", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("description", "description", "_description", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("fullPath", "full_path", "_full_path", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("competitionCount", "competition_count", "_competition_count", int, 0, PredefinedSerializer()), + FieldMetadata("datasetCount", "dataset_count", "_dataset_count", int, 0, PredefinedSerializer()), + FieldMetadata("scriptCount", "script_count", "_script_count", int, 0, PredefinedSerializer()), + FieldMetadata("totalCount", "total_count", "_total_count", int, 0, PredefinedSerializer()), +] + +ApiDatasetColumn._fields = [ + FieldMetadata("order", "order", "_order", int, None, PredefinedSerializer(), optional=True), + FieldMetadata("name", "name", "_name", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("type", "type", "_type", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("originalType", "original_type", "_original_type", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("description", "description", "_description", str, None, PredefinedSerializer(), optional=True), +] + diff --git a/src/ksdk/datasets/types/dataset_enums.py b/src/ksdk/datasets/types/dataset_enums.py new file mode 100644 index 00000000..e379d826 --- /dev/null +++ b/src/ksdk/datasets/types/dataset_enums.py @@ -0,0 +1,102 @@ +import enum + +class DatabundleVersionStatus(enum.Enum): + NOT_YET_PERSISTED = 0 + BLOBS_RECEIVED = 1 + BLOBS_DECOMPRESSED = 2 + BLOBS_COPIED_TO_SDS = 3 + INDIVIDUAL_BLOBS_COMPRESSED = 4 + READY = 5 + FAILED = 6 + DELETED = 7 + REPROCESSING = 8 + +class DatasetFileTypeGroup(enum.Enum): + r""" + This enum drives acceptable values from the python API, so avoid changing + enum member names if possible + """ + DATASET_FILE_TYPE_GROUP_ALL = 0 + DATASET_FILE_TYPE_GROUP_CSV = 1 + DATASET_FILE_TYPE_GROUP_SQLITE = 2 + DATASET_FILE_TYPE_GROUP_JSON = 3 + DATASET_FILE_TYPE_GROUP_BIG_QUERY = 4 + DATASET_FILE_TYPE_GROUP_PARQUET = 5 + +class DatasetLicenseGroup(enum.Enum): + r""" + This enum drives acceptable values from the python API, so avoid changing + enum member names if possible + """ + DATASET_LICENSE_GROUP_ALL = 0 + DATASET_LICENSE_GROUP_CC = 1 + DATASET_LICENSE_GROUP_GPL = 2 + DATASET_LICENSE_GROUP_ODB = 3 + DATASET_LICENSE_GROUP_OTHER = 4 + +class DatasetSelectionGroup(enum.Enum): + DATASET_SELECTION_GROUP_PUBLIC = 0 + DATASET_SELECTION_GROUP_MY = 1 + DATASET_SELECTION_GROUP_USER = 2 + DATASET_SELECTION_GROUP_USER_SHARED_WITH_ME = 3 + DATASET_SELECTION_GROUP_UPVOTED = 4 + DATASET_SELECTION_GROUP_MY_PRIVATE = 5 + DATASET_SELECTION_GROUP_MY_PUBLIC = 10 + DATASET_SELECTION_GROUP_ORGANIZATION = 6 + DATASET_SELECTION_GROUP_BOOKMARKED = 11 + DATASET_SELECTION_GROUP_COLLABORATION = 12 + DATASET_SELECTION_GROUP_SHARED_WITH_USER = 13 + DATASET_SELECTION_GROUP_FEATURED = 7 + """Old""" + DATASET_SELECTION_GROUP_ALL = 8 + DATASET_SELECTION_GROUP_UNFEATURED = 9 + +class DatasetSizeGroup(enum.Enum): + r""" + This enum drives acceptable values from the python API, so avoid changing + enum member names if possible + """ + DATASET_SIZE_GROUP_ALL = 0 + DATASET_SIZE_GROUP_SMALL = 1 + DATASET_SIZE_GROUP_MEDIUM = 2 + DATASET_SIZE_GROUP_LARGE = 3 + +class DatasetSortBy(enum.Enum): + r""" + This enum drives acceptable values from the python API, so avoid changing + enum member names if possible + """ + DATASET_SORT_BY_HOTTEST = 0 + DATASET_SORT_BY_VOTES = 1 + DATASET_SORT_BY_UPDATED = 2 + DATASET_SORT_BY_ACTIVE = 3 + """Deprecated""" + DATASET_SORT_BY_PUBLISHED = 4 + DATASET_SORT_BY_RELEVANCE = 5 + """Old world""" + DATASET_SORT_BY_LAST_VIEWED = 6 + DATASET_SORT_BY_USABILITY = 7 + DATASET_SORT_BY_DOWNLOAD_COUNT = 8 + +class DatasetViewedGroup(enum.Enum): + DATASET_VIEWED_GROUP_UNSPECIFIED = 0 + DATASET_VIEWED_GROUP_VIEWED = 1 + +class DatabundleVersionType(enum.Enum): + DATABUNDLE_VERSION_TYPE_UNSPECIFIED = 0 + FILESET = 1 + BIG_QUERY = 2 + REMOTE_URL_FILE_SET = 3 + REMOTE_GIT_REPOSITORY_FILE_SET = 4 + KERNEL_OUTPUT_FILE_SET = 5 + GCS_FILE_SET = 6 + API_MODEL = 7 + REMOTE_HUGGING_FACE_REPOSITORY_FILE_SET = 8 + +class DatasetFileType(enum.Enum): + DATASET_FILE_TYPE_UNSPECIFIED = 0 + DATASET_FILE_TYPE_CSV = 1 + DATASET_FILE_TYPE_JSON = 2 + DATASET_FILE_TYPE_SQLITE = 3 + DATASET_FILE_TYPE_OTHER = 4 + diff --git a/src/ksdk/datasets/types/dataset_service.py b/src/ksdk/datasets/types/dataset_service.py new file mode 100644 index 00000000..b3a18794 --- /dev/null +++ b/src/ksdk/datasets/types/dataset_service.py @@ -0,0 +1,145 @@ +from datetime import datetime +from kagglesdk.datasets.types.dataset_enums import DatabundleVersionStatus +from kagglesdk.kaggle_object import * +from typing import Optional + +class DatabundleVersionCreationStatus(KaggleObject): + r""" + Attributes: + status (DatabundleVersionStatus) + creation_percent_complete (float) + creation_exception (str) + creation_last_update (datetime) + creation_step (str) + creation_start (datetime) + user_message (str) + version_number (int) + """ + + def __init__(self): + self._status = DatabundleVersionStatus.NOT_YET_PERSISTED + self._creation_percent_complete = 0.0 + self._creation_exception = None + self._creation_last_update = None + self._creation_step = None + self._creation_start = None + self._user_message = None + self._version_number = None + self._freeze() + + @property + def status(self) -> 'DatabundleVersionStatus': + return self._status + + @status.setter + def status(self, status: 'DatabundleVersionStatus'): + if status is None: + del self.status + return + if not isinstance(status, DatabundleVersionStatus): + raise TypeError('status must be of type DatabundleVersionStatus') + self._status = status + + @property + def creation_percent_complete(self) -> float: + return self._creation_percent_complete + + @creation_percent_complete.setter + def creation_percent_complete(self, creation_percent_complete: float): + if creation_percent_complete is None: + del self.creation_percent_complete + return + if not isinstance(creation_percent_complete, float): + raise TypeError('creation_percent_complete must be of type float') + self._creation_percent_complete = creation_percent_complete + + @property + def creation_exception(self) -> str: + return self._creation_exception or "" + + @creation_exception.setter + def creation_exception(self, creation_exception: Optional[str]): + if creation_exception is None: + del self.creation_exception + return + if not isinstance(creation_exception, str): + raise TypeError('creation_exception must be of type str') + self._creation_exception = creation_exception + + @property + def creation_last_update(self) -> datetime: + return self._creation_last_update + + @creation_last_update.setter + def creation_last_update(self, creation_last_update: datetime): + if creation_last_update is None: + del self.creation_last_update + return + if not isinstance(creation_last_update, datetime): + raise TypeError('creation_last_update must be of type datetime') + self._creation_last_update = creation_last_update + + @property + def creation_step(self) -> str: + return self._creation_step or "" + + @creation_step.setter + def creation_step(self, creation_step: Optional[str]): + if creation_step is None: + del self.creation_step + return + if not isinstance(creation_step, str): + raise TypeError('creation_step must be of type str') + self._creation_step = creation_step + + @property + def creation_start(self) -> datetime: + return self._creation_start + + @creation_start.setter + def creation_start(self, creation_start: datetime): + if creation_start is None: + del self.creation_start + return + if not isinstance(creation_start, datetime): + raise TypeError('creation_start must be of type datetime') + self._creation_start = creation_start + + @property + def user_message(self) -> str: + return self._user_message or "" + + @user_message.setter + def user_message(self, user_message: Optional[str]): + if user_message is None: + del self.user_message + return + if not isinstance(user_message, str): + raise TypeError('user_message must be of type str') + self._user_message = user_message + + @property + def version_number(self) -> int: + return self._version_number or 0 + + @version_number.setter + def version_number(self, version_number: Optional[int]): + if version_number is None: + del self.version_number + return + if not isinstance(version_number, int): + raise TypeError('version_number must be of type int') + self._version_number = version_number + + +DatabundleVersionCreationStatus._fields = [ + FieldMetadata("status", "status", "_status", DatabundleVersionStatus, DatabundleVersionStatus.NOT_YET_PERSISTED, EnumSerializer()), + FieldMetadata("creationPercentComplete", "creation_percent_complete", "_creation_percent_complete", float, 0.0, PredefinedSerializer()), + FieldMetadata("creationException", "creation_exception", "_creation_exception", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("creationLastUpdate", "creation_last_update", "_creation_last_update", datetime, None, DateTimeSerializer()), + FieldMetadata("creationStep", "creation_step", "_creation_step", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("creationStart", "creation_start", "_creation_start", datetime, None, DateTimeSerializer()), + FieldMetadata("userMessage", "user_message", "_user_message", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("versionNumber", "version_number", "_version_number", int, None, PredefinedSerializer(), optional=True), +] + diff --git a/src/ksdk/datasets/types/dataset_types.py b/src/ksdk/datasets/types/dataset_types.py new file mode 100644 index 00000000..8ad389c8 --- /dev/null +++ b/src/ksdk/datasets/types/dataset_types.py @@ -0,0 +1,646 @@ +from kagglesdk.kaggle_object import * +from kagglesdk.users.types.users_enums import CollaboratorType +from typing import Optional, List + +class DatasetInfo(KaggleObject): + r""" + Attributes: + dataset_id (int) + dataset_slug (str) + owner_user (str) + usability_rating (float) + total_views (int) + total_votes (int) + total_downloads (int) + title (str) + Copy/paste from DatasetSettings below. Can't use composition because + that'd be a backwards-incompatible change for the Python Api. + subtitle (str) + description (str) + is_private (bool) + keywords (str) + licenses (SettingsLicense) + collaborators (DatasetCollaborator) + data (DatasetSettingsFile) + """ + + def __init__(self): + self._dataset_id = 0 + self._dataset_slug = None + self._owner_user = None + self._usability_rating = None + self._total_views = 0 + self._total_votes = 0 + self._total_downloads = 0 + self._title = None + self._subtitle = None + self._description = None + self._is_private = False + self._keywords = [] + self._licenses = [] + self._collaborators = [] + self._data = [] + self._freeze() + + @property + def dataset_id(self) -> int: + return self._dataset_id + + @dataset_id.setter + def dataset_id(self, dataset_id: int): + if dataset_id is None: + del self.dataset_id + return + if not isinstance(dataset_id, int): + raise TypeError('dataset_id must be of type int') + self._dataset_id = dataset_id + + @property + def dataset_slug(self) -> str: + return self._dataset_slug or "" + + @dataset_slug.setter + def dataset_slug(self, dataset_slug: Optional[str]): + if dataset_slug is None: + del self.dataset_slug + return + if not isinstance(dataset_slug, str): + raise TypeError('dataset_slug must be of type str') + self._dataset_slug = dataset_slug + + @property + def owner_user(self) -> str: + return self._owner_user or "" + + @owner_user.setter + def owner_user(self, owner_user: Optional[str]): + if owner_user is None: + del self.owner_user + return + if not isinstance(owner_user, str): + raise TypeError('owner_user must be of type str') + self._owner_user = owner_user + + @property + def usability_rating(self) -> float: + return self._usability_rating or 0.0 + + @usability_rating.setter + def usability_rating(self, usability_rating: Optional[float]): + if usability_rating is None: + del self.usability_rating + return + if not isinstance(usability_rating, float): + raise TypeError('usability_rating must be of type float') + self._usability_rating = usability_rating + + @property + def total_views(self) -> int: + return self._total_views + + @total_views.setter + def total_views(self, total_views: int): + if total_views is None: + del self.total_views + return + if not isinstance(total_views, int): + raise TypeError('total_views must be of type int') + self._total_views = total_views + + @property + def total_votes(self) -> int: + return self._total_votes + + @total_votes.setter + def total_votes(self, total_votes: int): + if total_votes is None: + del self.total_votes + return + if not isinstance(total_votes, int): + raise TypeError('total_votes must be of type int') + self._total_votes = total_votes + + @property + def total_downloads(self) -> int: + return self._total_downloads + + @total_downloads.setter + def total_downloads(self, total_downloads: int): + if total_downloads is None: + del self.total_downloads + return + if not isinstance(total_downloads, int): + raise TypeError('total_downloads must be of type int') + self._total_downloads = total_downloads + + @property + def title(self) -> str: + r""" + Copy/paste from DatasetSettings below. Can't use composition because + that'd be a backwards-incompatible change for the Python Api. + """ + return self._title or "" + + @title.setter + def title(self, title: Optional[str]): + if title is None: + del self.title + return + if not isinstance(title, str): + raise TypeError('title must be of type str') + self._title = title + + @property + def subtitle(self) -> str: + return self._subtitle or "" + + @subtitle.setter + def subtitle(self, subtitle: Optional[str]): + if subtitle is None: + del self.subtitle + return + if not isinstance(subtitle, str): + raise TypeError('subtitle must be of type str') + self._subtitle = subtitle + + @property + def description(self) -> str: + return self._description or "" + + @description.setter + def description(self, description: Optional[str]): + if description is None: + del self.description + return + if not isinstance(description, str): + raise TypeError('description must be of type str') + self._description = description + + @property + def is_private(self) -> bool: + return self._is_private + + @is_private.setter + def is_private(self, is_private: bool): + if is_private is None: + del self.is_private + return + if not isinstance(is_private, bool): + raise TypeError('is_private must be of type bool') + self._is_private = is_private + + @property + def keywords(self) -> Optional[List[str]]: + return self._keywords + + @keywords.setter + def keywords(self, keywords: Optional[List[str]]): + if keywords is None: + del self.keywords + return + if not isinstance(keywords, list): + raise TypeError('keywords must be of type list') + if not all([isinstance(t, str) for t in keywords]): + raise TypeError('keywords must contain only items of type str') + self._keywords = keywords + + @property + def licenses(self) -> Optional[List[Optional['SettingsLicense']]]: + return self._licenses + + @licenses.setter + def licenses(self, licenses: Optional[List[Optional['SettingsLicense']]]): + if licenses is None: + del self.licenses + return + if not isinstance(licenses, list): + raise TypeError('licenses must be of type list') + if not all([isinstance(t, SettingsLicense) for t in licenses]): + raise TypeError('licenses must contain only items of type SettingsLicense') + self._licenses = licenses + + @property + def collaborators(self) -> Optional[List[Optional['DatasetCollaborator']]]: + return self._collaborators + + @collaborators.setter + def collaborators(self, collaborators: Optional[List[Optional['DatasetCollaborator']]]): + if collaborators is None: + del self.collaborators + return + if not isinstance(collaborators, list): + raise TypeError('collaborators must be of type list') + if not all([isinstance(t, DatasetCollaborator) for t in collaborators]): + raise TypeError('collaborators must contain only items of type DatasetCollaborator') + self._collaborators = collaborators + + @property + def data(self) -> Optional[List[Optional['DatasetSettingsFile']]]: + return self._data + + @data.setter + def data(self, data: Optional[List[Optional['DatasetSettingsFile']]]): + if data is None: + del self.data + return + if not isinstance(data, list): + raise TypeError('data must be of type list') + if not all([isinstance(t, DatasetSettingsFile) for t in data]): + raise TypeError('data must contain only items of type DatasetSettingsFile') + self._data = data + + +class DatasetSettings(KaggleObject): + r""" + Attributes: + title (str) + subtitle (str) + description (str) + is_private (bool) + keywords (str) + licenses (SettingsLicense) + collaborators (DatasetCollaborator) + data (DatasetSettingsFile) + """ + + def __init__(self): + self._title = None + self._subtitle = None + self._description = None + self._is_private = False + self._keywords = [] + self._licenses = [] + self._collaborators = [] + self._data = [] + self._freeze() + + @property + def title(self) -> str: + return self._title or "" + + @title.setter + def title(self, title: Optional[str]): + if title is None: + del self.title + return + if not isinstance(title, str): + raise TypeError('title must be of type str') + self._title = title + + @property + def subtitle(self) -> str: + return self._subtitle or "" + + @subtitle.setter + def subtitle(self, subtitle: Optional[str]): + if subtitle is None: + del self.subtitle + return + if not isinstance(subtitle, str): + raise TypeError('subtitle must be of type str') + self._subtitle = subtitle + + @property + def description(self) -> str: + return self._description or "" + + @description.setter + def description(self, description: Optional[str]): + if description is None: + del self.description + return + if not isinstance(description, str): + raise TypeError('description must be of type str') + self._description = description + + @property + def is_private(self) -> bool: + return self._is_private + + @is_private.setter + def is_private(self, is_private: bool): + if is_private is None: + del self.is_private + return + if not isinstance(is_private, bool): + raise TypeError('is_private must be of type bool') + self._is_private = is_private + + @property + def keywords(self) -> Optional[List[str]]: + return self._keywords + + @keywords.setter + def keywords(self, keywords: Optional[List[str]]): + if keywords is None: + del self.keywords + return + if not isinstance(keywords, list): + raise TypeError('keywords must be of type list') + if not all([isinstance(t, str) for t in keywords]): + raise TypeError('keywords must contain only items of type str') + self._keywords = keywords + + @property + def licenses(self) -> Optional[List[Optional['SettingsLicense']]]: + return self._licenses + + @licenses.setter + def licenses(self, licenses: Optional[List[Optional['SettingsLicense']]]): + if licenses is None: + del self.licenses + return + if not isinstance(licenses, list): + raise TypeError('licenses must be of type list') + if not all([isinstance(t, SettingsLicense) for t in licenses]): + raise TypeError('licenses must contain only items of type SettingsLicense') + self._licenses = licenses + + @property + def collaborators(self) -> Optional[List[Optional['DatasetCollaborator']]]: + return self._collaborators + + @collaborators.setter + def collaborators(self, collaborators: Optional[List[Optional['DatasetCollaborator']]]): + if collaborators is None: + del self.collaborators + return + if not isinstance(collaborators, list): + raise TypeError('collaborators must be of type list') + if not all([isinstance(t, DatasetCollaborator) for t in collaborators]): + raise TypeError('collaborators must contain only items of type DatasetCollaborator') + self._collaborators = collaborators + + @property + def data(self) -> Optional[List[Optional['DatasetSettingsFile']]]: + return self._data + + @data.setter + def data(self, data: Optional[List[Optional['DatasetSettingsFile']]]): + if data is None: + del self.data + return + if not isinstance(data, list): + raise TypeError('data must be of type list') + if not all([isinstance(t, DatasetSettingsFile) for t in data]): + raise TypeError('data must contain only items of type DatasetSettingsFile') + self._data = data + + +class DatasetSettingsFile(KaggleObject): + r""" + Attributes: + name (str) + description (str) + total_bytes (int) + columns (DatasetSettingsFileColumn) + """ + + def __init__(self): + self._name = "" + self._description = None + self._total_bytes = 0 + self._columns = [] + self._freeze() + + @property + def name(self) -> str: + return self._name + + @name.setter + def name(self, name: str): + if name is None: + del self.name + return + if not isinstance(name, str): + raise TypeError('name must be of type str') + self._name = name + + @property + def description(self) -> str: + return self._description or "" + + @description.setter + def description(self, description: Optional[str]): + if description is None: + del self.description + return + if not isinstance(description, str): + raise TypeError('description must be of type str') + self._description = description + + @property + def total_bytes(self) -> int: + return self._total_bytes + + @total_bytes.setter + def total_bytes(self, total_bytes: int): + if total_bytes is None: + del self.total_bytes + return + if not isinstance(total_bytes, int): + raise TypeError('total_bytes must be of type int') + self._total_bytes = total_bytes + + @property + def columns(self) -> Optional[List[Optional['DatasetSettingsFileColumn']]]: + return self._columns + + @columns.setter + def columns(self, columns: Optional[List[Optional['DatasetSettingsFileColumn']]]): + if columns is None: + del self.columns + return + if not isinstance(columns, list): + raise TypeError('columns must be of type list') + if not all([isinstance(t, DatasetSettingsFileColumn) for t in columns]): + raise TypeError('columns must contain only items of type DatasetSettingsFileColumn') + self._columns = columns + + +class DatasetSettingsFileColumn(KaggleObject): + r""" + Attributes: + name (str) + description (str) + type (str) + """ + + def __init__(self): + self._name = "" + self._description = None + self._type = None + self._freeze() + + @property + def name(self) -> str: + return self._name + + @name.setter + def name(self, name: str): + if name is None: + del self.name + return + if not isinstance(name, str): + raise TypeError('name must be of type str') + self._name = name + + @property + def description(self) -> str: + return self._description or "" + + @description.setter + def description(self, description: Optional[str]): + if description is None: + del self.description + return + if not isinstance(description, str): + raise TypeError('description must be of type str') + self._description = description + + @property + def type(self) -> str: + return self._type or "" + + @type.setter + def type(self, type: Optional[str]): + if type is None: + del self.type + return + if not isinstance(type, str): + raise TypeError('type must be of type str') + self._type = type + + +class SettingsLicense(KaggleObject): + r""" + Attributes: + name (str) + """ + + def __init__(self): + self._name = None + self._freeze() + + @property + def name(self) -> str: + return self._name or "" + + @name.setter + def name(self, name: Optional[str]): + if name is None: + del self.name + return + if not isinstance(name, str): + raise TypeError('name must be of type str') + self._name = name + + +class DatasetCollaborator(KaggleObject): + r""" + Attributes: + username (str) + group_slug (str) + role (CollaboratorType) + """ + + def __init__(self): + self._username = None + self._group_slug = None + self._role = CollaboratorType.COLLABORATOR_TYPE_UNSPECIFIED + self._freeze() + + @property + def username(self) -> str: + return self._username or "" + + @username.setter + def username(self, username: str): + if username is None: + del self.username + return + if not isinstance(username, str): + raise TypeError('username must be of type str') + del self.group_slug + self._username = username + + @property + def group_slug(self) -> str: + return self._group_slug or "" + + @group_slug.setter + def group_slug(self, group_slug: str): + if group_slug is None: + del self.group_slug + return + if not isinstance(group_slug, str): + raise TypeError('group_slug must be of type str') + del self.username + self._group_slug = group_slug + + @property + def role(self) -> 'CollaboratorType': + return self._role + + @role.setter + def role(self, role: 'CollaboratorType'): + if role is None: + del self.role + return + if not isinstance(role, CollaboratorType): + raise TypeError('role must be of type CollaboratorType') + self._role = role + + +DatasetInfo._fields = [ + FieldMetadata("datasetId", "dataset_id", "_dataset_id", int, 0, PredefinedSerializer()), + FieldMetadata("datasetSlug", "dataset_slug", "_dataset_slug", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("ownerUser", "owner_user", "_owner_user", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("usabilityRating", "usability_rating", "_usability_rating", float, None, PredefinedSerializer(), optional=True), + FieldMetadata("totalViews", "total_views", "_total_views", int, 0, PredefinedSerializer()), + FieldMetadata("totalVotes", "total_votes", "_total_votes", int, 0, PredefinedSerializer()), + FieldMetadata("totalDownloads", "total_downloads", "_total_downloads", int, 0, PredefinedSerializer()), + FieldMetadata("title", "title", "_title", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("subtitle", "subtitle", "_subtitle", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("description", "description", "_description", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("isPrivate", "is_private", "_is_private", bool, False, PredefinedSerializer()), + FieldMetadata("keywords", "keywords", "_keywords", str, [], ListSerializer(PredefinedSerializer())), + FieldMetadata("licenses", "licenses", "_licenses", SettingsLicense, [], ListSerializer(KaggleObjectSerializer())), + FieldMetadata("collaborators", "collaborators", "_collaborators", DatasetCollaborator, [], ListSerializer(KaggleObjectSerializer())), + FieldMetadata("data", "data", "_data", DatasetSettingsFile, [], ListSerializer(KaggleObjectSerializer())), +] + +DatasetSettings._fields = [ + FieldMetadata("title", "title", "_title", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("subtitle", "subtitle", "_subtitle", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("description", "description", "_description", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("isPrivate", "is_private", "_is_private", bool, False, PredefinedSerializer()), + FieldMetadata("keywords", "keywords", "_keywords", str, [], ListSerializer(PredefinedSerializer())), + FieldMetadata("licenses", "licenses", "_licenses", SettingsLicense, [], ListSerializer(KaggleObjectSerializer())), + FieldMetadata("collaborators", "collaborators", "_collaborators", DatasetCollaborator, [], ListSerializer(KaggleObjectSerializer())), + FieldMetadata("data", "data", "_data", DatasetSettingsFile, [], ListSerializer(KaggleObjectSerializer())), +] + +DatasetSettingsFile._fields = [ + FieldMetadata("name", "name", "_name", str, "", PredefinedSerializer()), + FieldMetadata("description", "description", "_description", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("totalBytes", "total_bytes", "_total_bytes", int, 0, PredefinedSerializer()), + FieldMetadata("columns", "columns", "_columns", DatasetSettingsFileColumn, [], ListSerializer(KaggleObjectSerializer())), +] + +DatasetSettingsFileColumn._fields = [ + FieldMetadata("name", "name", "_name", str, "", PredefinedSerializer()), + FieldMetadata("description", "description", "_description", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("type", "type", "_type", str, None, PredefinedSerializer(), optional=True), +] + +SettingsLicense._fields = [ + FieldMetadata("name", "name", "_name", str, None, PredefinedSerializer(), optional=True), +] + +DatasetCollaborator._fields = [ + FieldMetadata("username", "username", "_username", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("groupSlug", "group_slug", "_group_slug", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("role", "role", "_role", CollaboratorType, CollaboratorType.COLLABORATOR_TYPE_UNSPECIFIED, EnumSerializer()), +] + diff --git a/src/ksdk/datasets/types/search_datasets.py b/src/ksdk/datasets/types/search_datasets.py new file mode 100644 index 00000000..a5949df4 --- /dev/null +++ b/src/ksdk/datasets/types/search_datasets.py @@ -0,0 +1,6 @@ +import enum + +class SearchDatasetsOrderBy(enum.Enum): + SEARCH_DATASETS_ORDER_BY_UNSPECIFIED = 0 + SEARCH_DATASETS_ORDER_BY_USABILITY_RATING = 1 + diff --git a/src/ksdk/discussions/__init__.py b/src/ksdk/discussions/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/ksdk/discussions/types/__init__.py b/src/ksdk/discussions/types/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/ksdk/discussions/types/search_discussions.py b/src/ksdk/discussions/types/search_discussions.py new file mode 100644 index 00000000..68a5fe20 --- /dev/null +++ b/src/ksdk/discussions/types/search_discussions.py @@ -0,0 +1,42 @@ +import enum + +class SearchDiscussionsOrderBy(enum.Enum): + SEARCH_DISCUSSIONS_ORDER_BY_UNSPECIFIED = 0 + SEARCH_DISCUSSIONS_ORDER_BY_LAST_TOPIC_COMMENT_DATE = 1 + +class SearchDiscussionsDocumentType(enum.Enum): + SEARCH_DISCUSSIONS_DOCUMENT_TYPE_UNSPECIFIED = 0 + SEARCH_DISCUSSIONS_DOCUMENT_TYPE_COMMENT = 1 + SEARCH_DISCUSSIONS_DOCUMENT_TYPE_TOPIC = 2 + SEARCH_DISCUSSIONS_DOCUMENT_TYPE_WRITE_UP = 3 + +class SearchDiscussionsSourceType(enum.Enum): + SEARCH_DISCUSSIONS_SOURCE_TYPE_UNSPECIFIED = 0 + SEARCH_DISCUSSIONS_SOURCE_TYPE_COMPETITION = 1 + SEARCH_DISCUSSIONS_SOURCE_TYPE_DATASET = 2 + SEARCH_DISCUSSIONS_SOURCE_TYPE_KERNEL = 4 + SEARCH_DISCUSSIONS_SOURCE_TYPE_SITE_FORUM = 5 + SEARCH_DISCUSSIONS_SOURCE_TYPE_COMPETITION_SOLUTION = 6 + SEARCH_DISCUSSIONS_SOURCE_TYPE_MODEL = 7 + SEARCH_DISCUSSIONS_SOURCE_TYPE_WRITE_UP = 8 + +class SearchDiscussionsTopicType(enum.Enum): + SEARCH_DISCUSSIONS_TOPIC_TYPE_UNSPECIFIED = 0 + SEARCH_DISCUSSIONS_TOPIC_TYPE_TOPICS = 1 + SEARCH_DISCUSSIONS_TOPIC_TYPE_WRITE_UPS = 2 + +class WriteUpInclusionType(enum.Enum): + WRITE_UP_INCLUSION_TYPE_UNSPECIFIED = 0 + WRITE_UP_INCLUSION_TYPE_EXCLUDE = 1 + r""" + Only ForumTopics will be included, while + WriteUps will be excluded + """ + WRITE_UP_INCLUSION_TYPE_INCLUDE = 2 + """WriteUps and ForumTopics will be included""" + WRITE_UP_INCLUSION_TYPE_ONLY = 3 + r""" + Only WriteUps will be included, while + ForumTopics will be excluded + """ + diff --git a/src/ksdk/discussions/types/writeup_enums.py b/src/ksdk/discussions/types/writeup_enums.py new file mode 100644 index 00000000..57785326 --- /dev/null +++ b/src/ksdk/discussions/types/writeup_enums.py @@ -0,0 +1,11 @@ +import enum + +class WriteUpType(enum.Enum): + WRITE_UP_TYPE_UNSPECIFIED = 0 + HACKATHON_PROJECT = 1 + COMPETITION_SOLUTION = 2 + PERSONAL_PROJECT = 3 + KNOWLEDGE = 4 + FORUM_TOPIC = 5 + BLOG = 6 + diff --git a/src/ksdk/education/__init__.py b/src/ksdk/education/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/ksdk/education/services/__init__.py b/src/ksdk/education/services/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/ksdk/education/services/education_api_service.py b/src/ksdk/education/services/education_api_service.py new file mode 100644 index 00000000..0d453228 --- /dev/null +++ b/src/ksdk/education/services/education_api_service.py @@ -0,0 +1,19 @@ +from kagglesdk.education.types.education_api_service import ApiTrackExerciseInteractionRequest, ApiTrackExerciseInteractionResponse +from kagglesdk.kaggle_http_client import KaggleHttpClient + +class EducationApiClient(object): + + def __init__(self, client: KaggleHttpClient): + self._client = client + + def track_exercise_interaction(self, request: ApiTrackExerciseInteractionRequest = None) -> ApiTrackExerciseInteractionResponse: + r""" + Args: + request (ApiTrackExerciseInteractionRequest): + The request object; initialized to empty instance if not specified. + """ + + if request is None: + request = ApiTrackExerciseInteractionRequest() + + return self._client.call("education.EducationApiService", "TrackExerciseInteraction", request, ApiTrackExerciseInteractionResponse) diff --git a/src/ksdk/education/types/__init__.py b/src/ksdk/education/types/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/ksdk/education/types/education_api_service.py b/src/ksdk/education/types/education_api_service.py new file mode 100644 index 00000000..f365fecd --- /dev/null +++ b/src/ksdk/education/types/education_api_service.py @@ -0,0 +1,248 @@ +from kagglesdk.education.types.education_service import LearnExerciseInteractionType, LearnExerciseOutcomeType, LearnExerciseQuestionType, LearnNudge +from kagglesdk.kaggle_object import * +from typing import Optional + +class ApiTrackExerciseInteractionRequest(KaggleObject): + r""" + This is copied from TrackExerciseInteractionRequest in + education_service.proto, which will eventually be deprecated. In the + meantime, make sure to keep these in sync. + + NOTE: there's one small rename from `fork_parent_script_version_id` to + `fork_parent_kernel_session_id`. + + Attributes: + exception_class (str) + failure_message (str) + interaction_type (LearnExerciseInteractionType) + learn_tools_version (str) + fork_parent_kernel_session_id (int) + outcome_type (LearnExerciseOutcomeType) + question_id (str) + question_type (LearnExerciseQuestionType) + trace (str) + value_towards_completion (float) + """ + + def __init__(self): + self._exception_class = "" + self._failure_message = "" + self._interaction_type = LearnExerciseInteractionType.LEARN_EXERCISE_INTERACTION_TYPE_UNSPECIFIED + self._learn_tools_version = "" + self._fork_parent_kernel_session_id = 0 + self._outcome_type = LearnExerciseOutcomeType.LEARN_EXERCISE_OUTCOME_TYPE_UNSPECIFIED + self._question_id = "" + self._question_type = LearnExerciseQuestionType.LEARN_EXERCISE_QUESTION_TYPE_UNSPECIFIED + self._trace = "" + self._value_towards_completion = None + self._freeze() + + @property + def exception_class(self) -> str: + return self._exception_class + + @exception_class.setter + def exception_class(self, exception_class: str): + if exception_class is None: + del self.exception_class + return + if not isinstance(exception_class, str): + raise TypeError('exception_class must be of type str') + self._exception_class = exception_class + + @property + def failure_message(self) -> str: + return self._failure_message + + @failure_message.setter + def failure_message(self, failure_message: str): + if failure_message is None: + del self.failure_message + return + if not isinstance(failure_message, str): + raise TypeError('failure_message must be of type str') + self._failure_message = failure_message + + @property + def interaction_type(self) -> 'LearnExerciseInteractionType': + return self._interaction_type + + @interaction_type.setter + def interaction_type(self, interaction_type: 'LearnExerciseInteractionType'): + if interaction_type is None: + del self.interaction_type + return + if not isinstance(interaction_type, LearnExerciseInteractionType): + raise TypeError('interaction_type must be of type LearnExerciseInteractionType') + self._interaction_type = interaction_type + + @property + def learn_tools_version(self) -> str: + return self._learn_tools_version + + @learn_tools_version.setter + def learn_tools_version(self, learn_tools_version: str): + if learn_tools_version is None: + del self.learn_tools_version + return + if not isinstance(learn_tools_version, str): + raise TypeError('learn_tools_version must be of type str') + self._learn_tools_version = learn_tools_version + + @property + def fork_parent_kernel_session_id(self) -> int: + return self._fork_parent_kernel_session_id + + @fork_parent_kernel_session_id.setter + def fork_parent_kernel_session_id(self, fork_parent_kernel_session_id: int): + if fork_parent_kernel_session_id is None: + del self.fork_parent_kernel_session_id + return + if not isinstance(fork_parent_kernel_session_id, int): + raise TypeError('fork_parent_kernel_session_id must be of type int') + self._fork_parent_kernel_session_id = fork_parent_kernel_session_id + + @property + def outcome_type(self) -> 'LearnExerciseOutcomeType': + return self._outcome_type + + @outcome_type.setter + def outcome_type(self, outcome_type: 'LearnExerciseOutcomeType'): + if outcome_type is None: + del self.outcome_type + return + if not isinstance(outcome_type, LearnExerciseOutcomeType): + raise TypeError('outcome_type must be of type LearnExerciseOutcomeType') + self._outcome_type = outcome_type + + @property + def question_id(self) -> str: + return self._question_id + + @question_id.setter + def question_id(self, question_id: str): + if question_id is None: + del self.question_id + return + if not isinstance(question_id, str): + raise TypeError('question_id must be of type str') + self._question_id = question_id + + @property + def question_type(self) -> 'LearnExerciseQuestionType': + return self._question_type + + @question_type.setter + def question_type(self, question_type: 'LearnExerciseQuestionType'): + if question_type is None: + del self.question_type + return + if not isinstance(question_type, LearnExerciseQuestionType): + raise TypeError('question_type must be of type LearnExerciseQuestionType') + self._question_type = question_type + + @property + def trace(self) -> str: + return self._trace + + @trace.setter + def trace(self, trace: str): + if trace is None: + del self.trace + return + if not isinstance(trace, str): + raise TypeError('trace must be of type str') + self._trace = trace + + @property + def value_towards_completion(self) -> float: + return self._value_towards_completion or 0.0 + + @value_towards_completion.setter + def value_towards_completion(self, value_towards_completion: Optional[float]): + if value_towards_completion is None: + del self.value_towards_completion + return + if not isinstance(value_towards_completion, float): + raise TypeError('value_towards_completion must be of type float') + self._value_towards_completion = value_towards_completion + + def endpoint(self): + path = '/api/v1/learn/track' + return path.format_map(self.to_field_map(self)) + + + @staticmethod + def method(): + return 'POST' + + @staticmethod + def body_fields(): + return '*' + + +class ApiTrackExerciseInteractionResponse(KaggleObject): + r""" + This is copied from TrackExerciseInteractionResponse in + education_service.proto, which will eventually be deprecated. In the + meantime, make sure to keep these in sync. + + Attributes: + nudge (LearnNudge) + show_login_prompt (bool) + """ + + def __init__(self): + self._nudge = None + self._show_login_prompt = False + self._freeze() + + @property + def nudge(self) -> Optional['LearnNudge']: + return self._nudge + + @nudge.setter + def nudge(self, nudge: Optional['LearnNudge']): + if nudge is None: + del self.nudge + return + if not isinstance(nudge, LearnNudge): + raise TypeError('nudge must be of type LearnNudge') + self._nudge = nudge + + @property + def show_login_prompt(self) -> bool: + return self._show_login_prompt + + @show_login_prompt.setter + def show_login_prompt(self, show_login_prompt: bool): + if show_login_prompt is None: + del self.show_login_prompt + return + if not isinstance(show_login_prompt, bool): + raise TypeError('show_login_prompt must be of type bool') + self._show_login_prompt = show_login_prompt + + @property + def showLoginPrompt(self): + return self.show_login_prompt + + +ApiTrackExerciseInteractionRequest._fields = [ + FieldMetadata("exceptionClass", "exception_class", "_exception_class", str, "", PredefinedSerializer()), + FieldMetadata("failureMessage", "failure_message", "_failure_message", str, "", PredefinedSerializer()), + FieldMetadata("interactionType", "interaction_type", "_interaction_type", LearnExerciseInteractionType, LearnExerciseInteractionType.LEARN_EXERCISE_INTERACTION_TYPE_UNSPECIFIED, EnumSerializer()), + FieldMetadata("learnToolsVersion", "learn_tools_version", "_learn_tools_version", str, "", PredefinedSerializer()), + FieldMetadata("forkParentKernelSessionId", "fork_parent_kernel_session_id", "_fork_parent_kernel_session_id", int, 0, PredefinedSerializer()), + FieldMetadata("outcomeType", "outcome_type", "_outcome_type", LearnExerciseOutcomeType, LearnExerciseOutcomeType.LEARN_EXERCISE_OUTCOME_TYPE_UNSPECIFIED, EnumSerializer()), + FieldMetadata("questionId", "question_id", "_question_id", str, "", PredefinedSerializer()), + FieldMetadata("questionType", "question_type", "_question_type", LearnExerciseQuestionType, LearnExerciseQuestionType.LEARN_EXERCISE_QUESTION_TYPE_UNSPECIFIED, EnumSerializer()), + FieldMetadata("trace", "trace", "_trace", str, "", PredefinedSerializer()), + FieldMetadata("valueTowardsCompletion", "value_towards_completion", "_value_towards_completion", float, None, PredefinedSerializer(), optional=True), +] + +ApiTrackExerciseInteractionResponse._fields = [ + FieldMetadata("nudge", "nudge", "_nudge", LearnNudge, None, KaggleObjectSerializer()), + FieldMetadata("showLoginPrompt", "show_login_prompt", "_show_login_prompt", bool, False, PredefinedSerializer()), +] + diff --git a/src/ksdk/education/types/education_service.py b/src/ksdk/education/types/education_service.py new file mode 100644 index 00000000..4f5f2adf --- /dev/null +++ b/src/ksdk/education/types/education_service.py @@ -0,0 +1,139 @@ +import enum +from kagglesdk.kaggle_object import * +from typing import Optional + +class LearnExerciseInteractionType(enum.Enum): + LEARN_EXERCISE_INTERACTION_TYPE_UNSPECIFIED = 0 + CHECK = 1 + HINT = 2 + SOLUTION = 3 + +class LearnExerciseOutcomeType(enum.Enum): + LEARN_EXERCISE_OUTCOME_TYPE_UNSPECIFIED = 0 + PASS = 1 + FAIL = 2 + EXCEPTION = 3 + UNATTEMPTED = 4 + +class LearnExerciseQuestionType(enum.Enum): + LEARN_EXERCISE_QUESTION_TYPE_UNSPECIFIED = 0 + EQUALITY_CHECK_PROBLEM = 1 + CODING_PROBLEM = 2 + FUNCTION_PROBLEM = 3 + THOUGHT_EXPERIMENT = 4 + +class LearnNudgeType(enum.Enum): + COURSE_COMPLETE_NO_BONUS_LESSONS = 0 + COURSE_COMPLETE_WITH_BONUS_LESSONS = 1 + COURSE_INCOMPLETE = 2 + DO_EXERCISE = 3 + DO_TUTORIAL = 4 + +class LearnNudge(KaggleObject): + r""" + Attributes: + course_index (int) + course_name (str) + course_slug (str) + next_item_name (str) + next_item_url (str) + next_item_type (LearnNudgeType) + """ + + def __init__(self): + self._course_index = 0 + self._course_name = "" + self._course_slug = "" + self._next_item_name = "" + self._next_item_url = "" + self._next_item_type = LearnNudgeType.COURSE_COMPLETE_NO_BONUS_LESSONS + self._freeze() + + @property + def course_index(self) -> int: + return self._course_index + + @course_index.setter + def course_index(self, course_index: int): + if course_index is None: + del self.course_index + return + if not isinstance(course_index, int): + raise TypeError('course_index must be of type int') + self._course_index = course_index + + @property + def course_name(self) -> str: + return self._course_name + + @course_name.setter + def course_name(self, course_name: str): + if course_name is None: + del self.course_name + return + if not isinstance(course_name, str): + raise TypeError('course_name must be of type str') + self._course_name = course_name + + @property + def course_slug(self) -> str: + return self._course_slug + + @course_slug.setter + def course_slug(self, course_slug: str): + if course_slug is None: + del self.course_slug + return + if not isinstance(course_slug, str): + raise TypeError('course_slug must be of type str') + self._course_slug = course_slug + + @property + def next_item_name(self) -> str: + return self._next_item_name + + @next_item_name.setter + def next_item_name(self, next_item_name: str): + if next_item_name is None: + del self.next_item_name + return + if not isinstance(next_item_name, str): + raise TypeError('next_item_name must be of type str') + self._next_item_name = next_item_name + + @property + def next_item_url(self) -> str: + return self._next_item_url + + @next_item_url.setter + def next_item_url(self, next_item_url: str): + if next_item_url is None: + del self.next_item_url + return + if not isinstance(next_item_url, str): + raise TypeError('next_item_url must be of type str') + self._next_item_url = next_item_url + + @property + def next_item_type(self) -> 'LearnNudgeType': + return self._next_item_type + + @next_item_type.setter + def next_item_type(self, next_item_type: 'LearnNudgeType'): + if next_item_type is None: + del self.next_item_type + return + if not isinstance(next_item_type, LearnNudgeType): + raise TypeError('next_item_type must be of type LearnNudgeType') + self._next_item_type = next_item_type + + +LearnNudge._fields = [ + FieldMetadata("courseIndex", "course_index", "_course_index", int, 0, PredefinedSerializer()), + FieldMetadata("courseName", "course_name", "_course_name", str, "", PredefinedSerializer()), + FieldMetadata("courseSlug", "course_slug", "_course_slug", str, "", PredefinedSerializer()), + FieldMetadata("nextItemName", "next_item_name", "_next_item_name", str, "", PredefinedSerializer()), + FieldMetadata("nextItemUrl", "next_item_url", "_next_item_url", str, "", PredefinedSerializer()), + FieldMetadata("nextItemType", "next_item_type", "_next_item_type", LearnNudgeType, LearnNudgeType.COURSE_COMPLETE_NO_BONUS_LESSONS, EnumSerializer()), +] + diff --git a/src/ksdk/kaggle_client.py b/src/ksdk/kaggle_client.py new file mode 100644 index 00000000..dd9765ea --- /dev/null +++ b/src/ksdk/kaggle_client.py @@ -0,0 +1,99 @@ +from kagglesdk.kernels.services.kernels_api_service import KernelsApiClient +from kagglesdk.blobs.services.blob_api_service import BlobApiClient +from kagglesdk.education.services.education_api_service import EducationApiClient +from kagglesdk.benchmarks.services.benchmarks_api_service import BenchmarksApiClient +from kagglesdk.models.services.model_api_service import ModelApiClient +from kagglesdk.models.services.model_service import ModelClient +from kagglesdk.competitions.services.competition_api_service import CompetitionApiClient +from kagglesdk.datasets.services.dataset_api_service import DatasetApiClient +from kagglesdk.common.services.operations_service import OperationsClient +from kagglesdk.admin.services.inbox_file_service import InboxFileClient +from kagglesdk.security.services.iam_service import IamClient +from kagglesdk.security.services.oauth_service import OAuthClient +from kagglesdk.search.services.search_api_service import SearchApiClient +from kagglesdk.users.services.account_service import AccountClient +from kagglesdk.kaggle_env import KaggleEnv +from kagglesdk.kaggle_http_client import KaggleHttpClient + + +class KaggleClient(object): + class Kernels(object): + def __init__(self, http_client: KaggleHttpClient): + self.kernels_api_client = KernelsApiClient(http_client) + + class Blobs(object): + def __init__(self, http_client: KaggleHttpClient): + self.blob_api_client = BlobApiClient(http_client) + + class Education(object): + def __init__(self, http_client: KaggleHttpClient): + self.education_api_client = EducationApiClient(http_client) + + class Benchmarks(object): + def __init__(self, http_client: KaggleHttpClient): + self.benchmarks_api_client = BenchmarksApiClient(http_client) + + class Models(object): + def __init__(self, http_client: KaggleHttpClient): + self.model_api_client = ModelApiClient(http_client) + self.model_client = ModelClient(http_client) + + class Competitions(object): + def __init__(self, http_client: KaggleHttpClient): + self.competition_api_client = CompetitionApiClient(http_client) + + class Datasets(object): + def __init__(self, http_client: KaggleHttpClient): + self.dataset_api_client = DatasetApiClient(http_client) + + class Common(object): + def __init__(self, http_client: KaggleHttpClient): + self.operations_client = OperationsClient(http_client) + + class Admin(object): + def __init__(self, http_client: KaggleHttpClient): + self.inbox_file_client = InboxFileClient(http_client) + + class Security(object): + def __init__(self, http_client: KaggleHttpClient): + self.iam_client = IamClient(http_client) + self.oauth_client = OAuthClient(http_client) + + class Search(object): + def __init__(self, http_client: KaggleHttpClient): + self.search_api_client = SearchApiClient(http_client) + + class Users(object): + def __init__(self, http_client: KaggleHttpClient): + self.account_client = AccountClient(http_client) + + def __init__(self, env: KaggleEnv = None, verbose: bool = False, username: str = None, password: str = None, api_token: str = None): + self._http_client = http_client = KaggleHttpClient(env, verbose, username=username, password=password, api_token=api_token) + self.kernels = KaggleClient.Kernels(http_client) + self.blobs = KaggleClient.Blobs(http_client) + self.education = KaggleClient.Education(http_client) + self.benchmarks = KaggleClient.Benchmarks(http_client) + self.models = KaggleClient.Models(http_client) + self.competitions = KaggleClient.Competitions(http_client) + self.datasets = KaggleClient.Datasets(http_client) + self.common = KaggleClient.Common(http_client) + self.admin = KaggleClient.Admin(http_client) + self.security = KaggleClient.Security(http_client) + self.search = KaggleClient.Search(http_client) + self.users = KaggleClient.Users(http_client) + self.username = username + self.password = password + self.api_token = api_token + + def http_client(self) -> str: + return self._http_client + + def _renew_iap_token(self) -> str: + return self.admin.admin_client.renew_iap_token() + + def __enter__(self): + self._http_client.__enter__() + return self + + def __exit__(self, exc_type, exc_value, tb): + self._http_client.__exit__(exc_type, exc_value, tb) diff --git a/src/ksdk/kaggle_creds.py b/src/ksdk/kaggle_creds.py new file mode 100644 index 00000000..3680cf04 --- /dev/null +++ b/src/ksdk/kaggle_creds.py @@ -0,0 +1,148 @@ +import json +import os +from datetime import datetime, timedelta, timezone +from kagglesdk.kaggle_client import KaggleClient +from kagglesdk.security.types.oauth_service import IntrospectTokenRequest +from kagglesdk.users.types.account_service import ( + ApiVersion, + GenerateAccessTokenRequest, + GenerateAccessTokenResponse, + ExpireApiTokenRequest, +) + + +class KaggleCredentials: + DEFAULT_CREDENTIALS_FILE = "~/.kaggle/credentials.json" + DEFAULT_ACCESS_TOKEN_EXPIRATION = timedelta(hours=12) + + def __init__( + self, + client: KaggleClient, + refresh_token: str = None, + access_token: str = None, + access_token_expiration: datetime = None, + username: str = None, + scopes: list[str] = None, + ): + self._client = client + self._refresh_token = refresh_token + self._access_token = access_token + self._access_token_expiration = access_token_expiration + self._username = username + self._scopes = scopes if scopes is not None else [] + + @classmethod + def load(cls, client: KaggleClient, file_path: str = None) -> "KaggleCredentials": + file_path = os.path.expanduser(file_path or KaggleCredentials.DEFAULT_CREDENTIALS_FILE) + if not os.path.exists(file_path): + return None + + try: + with open(file_path, "r") as f: + data = json.load(f) + except (json.JSONDecodeError, KeyError): + return None + + refresh_token = data.get("refresh_token") + if not refresh_token: + return None + + access_token_expiration = data.get("access_token_expiration") + if access_token_expiration: + access_token_expiration = datetime.fromisoformat(access_token_expiration) + else: + access_token_expiration = None + + return cls( + client=client, + refresh_token=refresh_token, + access_token=data.get("access_token"), + access_token_expiration=access_token_expiration, + username=data.get("username"), + scopes=data.get("scopes"), + ) + + def delete(self, file_path=DEFAULT_CREDENTIALS_FILE) -> None: + file_path = os.path.expanduser(file_path) + if os.path.exists(file_path): + os.remove(file_path) + + def save(self, file_path=DEFAULT_CREDENTIALS_FILE) -> None: + if not self._refresh_token: + raise Exception("Missing refresh token") + + file_path = os.path.expanduser(file_path) + dir_name = os.path.dirname(file_path) + if not os.path.exists(dir_name): + os.makedirs(dir_name) + + data = { + "refresh_token": self._refresh_token, + "access_token": self._access_token or "", + "access_token_expiration": ( + self._access_token_expiration.isoformat() if self._access_token_expiration else "" + ), + "username": self._username or "", + "scopes": self._scopes or [], + } + + with open(file_path, "w") as f: + json.dump(data, f, indent=2) + + try: + os.chmod(file_path, 0o600) + except OSError: + pass # Ignore errors, especially on Windows + + def introspect(self) -> str: + request = IntrospectTokenRequest() + request.token = self._access_token + response = self._client.security.oauth_client.introspect_token(request) + + if not response.active or not response.username: + raise Exception("Authentication failed.") + + self._username = response.username + return response.username + + def refresh_access_token(self) -> None: + if not self._refresh_token: + raise Exception("Missing refresh token") + + response = self.generate_access_token() + self._access_token_expiration = datetime.now(timezone.utc) + timedelta(seconds=response.expires_in) + self._access_token = response.token + self.save() + + def access_token_has_expired(self) -> bool: + return not self._access_token_expiration or self._access_token_expiration < datetime.now( + timezone.utc + ) - timedelta(minutes=30) + + def get_access_token(self) -> str: + if not self._access_token or self.access_token_has_expired(): + if not self._refresh_token: + return None + self.refresh_access_token() + return self._access_token + + def generate_access_token(self, expiration_duration: timedelta = None) -> GenerateAccessTokenResponse: + if not self._refresh_token: + return None + request = GenerateAccessTokenRequest() + request.refresh_token = self._refresh_token + request.api_version = ApiVersion.API_VERSION_V1 + request.expiration_duration = expiration_duration or KaggleCredentials.DEFAULT_ACCESS_TOKEN_EXPIRATION + return self._client.users.account_client.generate_access_token(request) + + def revoke_token(self, reason: str) -> None: + if not self._refresh_token: + return + request = ExpireApiTokenRequest() + request.token = self._refresh_token + request.reason = reason + self._client.users.account_client.expire_api_token(request) + self.delete() + + def get_username(self) -> str: + return self._username diff --git a/src/ksdk/kaggle_env.py b/src/ksdk/kaggle_env.py new file mode 100644 index 00000000..d108f602 --- /dev/null +++ b/src/ksdk/kaggle_env.py @@ -0,0 +1,91 @@ +import logging +import os +from enum import Enum +from pathlib import Path + +KAGGLE_NOTEBOOK_ENV_VAR_NAME = "KAGGLE_KERNEL_RUN_TYPE" +KAGGLE_DATA_PROXY_URL_ENV_VAR_NAME = "KAGGLE_DATA_PROXY_URL" +KAGGLE_API_V1_TOKEN_PATH = "KAGGLE_API_V1_TOKEN" + + +def get_logger(): + return logging.getLogger(__name__) + + +class KaggleEnv(Enum): + LOCAL = 0 # localhost + STAGING = 1 # staging.kaggle.com + ADMIN = 2 # admin.kaggle.com + QA = 3 # qa.kaggle.com + PROD = 4 # api.kaggle.com + + +_env_to_endpoint = { + KaggleEnv.LOCAL: "http://localhost", + KaggleEnv.STAGING: "https://staging.kaggle.com", + KaggleEnv.ADMIN: "https://admin.kaggle.com", + KaggleEnv.QA: "https://qa.kaggle.com", + KaggleEnv.PROD: "https://api.kaggle.com", +} + + +def get_endpoint(env: KaggleEnv): + return _env_to_endpoint[env] + + +def get_env(): + env = os.getenv("KAGGLE_API_ENVIRONMENT") + if env is None or env == "PROD": + return KaggleEnv.PROD + if env == "LOCALHOST": + return KaggleEnv.LOCAL + if env == "ADMIN": + return KaggleEnv.ADMIN + if env == "STAGING": + return KaggleEnv.STAGING + if env == "QA": + return KaggleEnv.QA + raise Exception(f'Unrecognized value in KAGGLE_API_ENVIRONMENT: "{env}"') + + +def is_in_kaggle_notebook() -> bool: + if os.getenv(KAGGLE_NOTEBOOK_ENV_VAR_NAME) is not None: + if os.getenv(KAGGLE_DATA_PROXY_URL_ENV_VAR_NAME) is None: + # Missing endpoint for the Jwt client + get_logger().warning( + "Can't use the Kaggle Cache. " + f"The '{KAGGLE_DATA_PROXY_URL_ENV_VAR_NAME}' environment variable is not set." + ) + return False + return True + return False + + +def _get_access_token_from_file(path): + if not path: + return None + + token_path = Path(path) + if not token_path.exists(): + return None + + token_value = token_path.read_text().strip() + if not token_value: + return None + + get_logger().debug(f'Using access token from file: "{path}"') + return token_value + + +def get_access_token_from_env(): + if is_in_kaggle_notebook(): + token = _get_access_token_from_file(os.environ.get(KAGGLE_API_V1_TOKEN_PATH)) + if token: + return (token, KAGGLE_API_V1_TOKEN_PATH) + + access_token = os.environ.get("KAGGLE_API_TOKEN") + if access_token is not None: + get_logger().debug("Using access token from KAGGLE_API_TOKEN environment variable") + return (access_token, "KAGGLE_API_TOKEN") + + return (None, None) diff --git a/src/ksdk/kaggle_http_client.py b/src/ksdk/kaggle_http_client.py new file mode 100644 index 00000000..2e938371 --- /dev/null +++ b/src/ksdk/kaggle_http_client.py @@ -0,0 +1,259 @@ +import binascii +import codecs +import json +import os +import urllib.parse +from io import BytesIO +from pathlib import Path + +import requests +from urllib3.fields import RequestField + +from kagglesdk.kaggle_env import ( + get_endpoint, + get_env, + is_in_kaggle_notebook, + get_access_token_from_env, + KaggleEnv, +) +from kagglesdk.kaggle_object import KaggleObject +from typing import Type + +# TODO (http://b/354237483) Generate the client from the existing one. +# This was created from kaggle_api_client.py, prior to recent changes to +# auth handling. The new client requires KAGGLE_API_TOKEN, so it is not +# currently usable by the CLI. + + +def _headers_to_str(headers): + return "\n".join(f"{k}: {v}" for k, v in headers.items()) + + +def _get_apikey_creds(): + apikey_filename = os.path.expanduser("~/.kaggle/kaggle.json") + if not os.path.exists(apikey_filename): + return None + + kaggle_json = None + with open(apikey_filename) as apikey_file: + kaggle_json = apikey_file.read() + + if not kaggle_json or not kaggle_json.strip(): + return None + + api_key_data = json.loads(kaggle_json) + username = api_key_data["username"] + api_key = api_key_data["key"] + return username, api_key + + +class KaggleHttpClient(object): + _xsrf_cookie_name = "XSRF-TOKEN" + _csrf_cookie_name = "CSRF-TOKEN" + _xsrf_cookies = (_xsrf_cookie_name, _csrf_cookie_name) + _xsrf_header_name = "X-XSRF-TOKEN" + + def __init__( + self, + env: KaggleEnv = None, + verbose: bool = False, + username: str = None, + password: str = None, + api_token: str = None, + ): + self._env = env or get_env() + self._signed_in = None + self._endpoint = get_endpoint(self._env) + self._verbose = verbose + self._session = None + self._username = username + self._password = password + self._api_token = api_token + + def call( + self, + service_name: str, + request_name: str, + request: KaggleObject, + response_type: Type[KaggleObject], + ): + self._init_session() + http_request = self._prepare_request(service_name, request_name, request) + + # Merge environment settings into session + settings = self._session.merge_environment_settings(http_request.url, {}, None, None, None) + http_response = self._session.send(http_request, **settings) + + response = self._prepare_response(response_type, http_response) + return response + + def _prepare_request(self, service_name: str, request_name: str, request: KaggleObject): + request_url = self._get_request_url(service_name, request_name) + http_request = requests.Request( + method="POST", + url=request_url, + json=request.__class__.to_dict(request), + headers=self._session.headers, + auth=self._session.auth, + ) + prepared_request = http_request.prepare() + self._print_request(prepared_request) + return prepared_request + + def _prepare_response(self, response_type, http_response): + """Extract the kaggle response and raise an exception if it is an error.""" + self._print_response(http_response) + try: + if "application/json" in http_response.headers["Content-Type"]: + resp = http_response.json() + if "code" in resp and resp["code"] >= 400: + raise requests.exceptions.HTTPError(resp["message"], response=http_response) + except KeyError: + pass + http_response.raise_for_status() + if response_type is None: # Method doesn't have a return type + return None + return response_type.prepare_from(http_response) + + def _print_request(self, request): + if not self._verbose: + return + self._print("---------------------Request----------------------") + self._print(f"{request.method} {request.url}\n{_headers_to_str(request.headers)}\n\n{request.body}") + self._print("--------------------------------------------------") + + def _print_response(self, response, body=True): + if not self._verbose: + return + self._print("---------------------Response---------------------") + self._print(f"{response.status_code}\n{_headers_to_str(response.headers)}") + if body: + self._print(f"\n{response.text}") + self._print("--------------------------------------------------") + + def _print(self, message: str): + if self._verbose: + print(message) + + def __enter__(self): + self._init_session() + return self + + def __exit__(self, exc_type, exc_value, tb): + if self._session is not None: + self._session.close() + + def _init_session(self): + if self._session is not None: + return self._session + + self._session = requests.Session() + self._session.headers.update({"User-Agent": "kaggle-api/v1.7.0", "Content-Type": "application/json"}) # Was: V2 + + iap_token = self._get_iap_token_if_required() + if iap_token is not None: + self._session.headers.update( + { + # https://cloud.google.com/iap/docs/authentication-howto#authenticating_from_proxy-authorization_header + "Proxy-Authorization": f"Bearer {iap_token}", + } + ) + + self._try_fill_auth() + # self._fill_xsrf_token(iap_token) # TODO Make this align with original handler. + + def _get_iap_token_if_required(self): + if self._env not in (KaggleEnv.STAGING, KaggleEnv.ADMIN): + return None + iap_token = os.getenv("KAGGLE_IAP_TOKEN") + if iap_token is None: + raise Exception(f'Must set KAGGLE_IAP_TOKEN to access "{self._endpoint}"') + return iap_token + + def _fill_xsrf_token(self, iap_token): + initial_get_request = requests.Request( + method="GET", + url=self._endpoint, + headers=self._session.headers, + auth=self._session.auth, + ) + prepared_request = initial_get_request.prepare() + self._print_request(prepared_request) + + http_response = self._session.send(prepared_request) + + self._print_response(http_response, body=False) + if iap_token is not None and http_response.status_code in (401, 403): + raise requests.exceptions.HTTPError("IAP token invalid or expired") + http_response.raise_for_status() + + self._session.headers.update( + { + KaggleHttpClient._xsrf_header_name: self._session.cookies[KaggleHttpClient._xsrf_cookie_name], + } + ) + + def build_start_oauth_url( + self, + client_id: str, + redirect_uri: str, + scope: list[str], + state: str, + code_challenge: str, + ) -> str: + params = { + "response_type": "code", + "client_id": client_id, + "redirect_uri": redirect_uri, + "scope": " ".join(scope), + "state": state, + "code_challenge": code_challenge, + "code_challenge_method": "S256", + "response_type": "code", + "response_mode": "query", + } + auth_url = f"{self.get_non_api_endpoint()}/api/v1/oauth2/authorize" + query_string = urllib.parse.urlencode(params, quote_via=urllib.parse.quote_plus) + return f"{auth_url}?{query_string}" + + def get_oauth_default_redirect_url(self) -> str: + return f"{self.get_non_api_endpoint()}/account/api/oauth/token" + + def get_non_api_endpoint(self) -> str: + return "https://www.kaggle.com" if self._env == KaggleEnv.PROD else self._endpoint + + class BearerAuth(requests.auth.AuthBase): + + def __init__(self, token): + self.token = token + + def __call__(self, r): + r.headers["Authorization"] = f"Bearer {self.token}" + return r + + def _try_fill_auth(self): + if self._signed_in is not None: + return + + if self._api_token is None: + (api_token, _) = get_access_token_from_env() + self._api_token = api_token + + if self._api_token is not None: + self._session.auth = KaggleHttpClient.BearerAuth(self._api_token) + self._signed_in = True + return + + if self._username and self._password: + apikey_creds = self._username, self._password + else: + apikey_creds = _get_apikey_creds() + if apikey_creds is not None: + self._session.auth = apikey_creds + self._signed_in = True + return + + self._signed_in = False + + def _get_request_url(self, service_name: str, request_name: str): + return f"{self._endpoint}/v1/{service_name}/{request_name}" diff --git a/src/ksdk/kaggle_oauth.py b/src/ksdk/kaggle_oauth.py new file mode 100644 index 00000000..61340ac9 --- /dev/null +++ b/src/ksdk/kaggle_oauth.py @@ -0,0 +1,200 @@ +import base64 +import hashlib +import http.server +import logging +import os +import platform +import random +import secrets +import socketserver +import uuid +import urllib.parse +import webbrowser +from datetime import datetime, timedelta, timezone +from kagglesdk.kaggle_client import KaggleClient +from kagglesdk.kaggle_creds import KaggleCredentials +from kagglesdk.security.types.oauth_service import ExchangeOAuthTokenRequest + + +class KaggleOAuth: + OAUTH_CLIENT_ID = "kagglesdk" + + def __init__(self, client: KaggleClient): + self._client = client + self._http_client = client.http_client() + self._server_running = False + self._creds = None + self._logger = logging.getLogger(__name__) + + class OAuthState: + def __init__(self): + self.state = str(uuid.uuid4()) + self.code_verifier = KaggleOAuth.OAuthState._generate_code_verifier() + self.code_challenge = KaggleOAuth.OAuthState._generate_code_challenge(self.code_verifier) + + def _generate_state(length: int = 32): + return secrets.token_urlsafe(length) + + def _generate_code_verifier(length: int = 64) -> str: + if not 42 <= length <= 128: + raise ValueError("Code verifier length must be between 42 and 128 characters.") + return secrets.token_urlsafe(length) + + def _generate_code_challenge(code_verifier: str) -> str: + code_verifier_bytes = code_verifier.encode("utf-8") + code_challenge_bytes = hashlib.sha256(code_verifier_bytes).digest() + code_challenge_base64 = base64.urlsafe_b64encode(code_challenge_bytes).decode("utf-8") + return code_challenge_base64 + + class OAuthCallbackHandler(http.server.BaseHTTPRequestHandler): + def __init__( + self, + *args, + oauth: "KaggleOAuth" = None, + oauth_state: "KaggleOAuth.OAuthState" = None, + on_success=None, + logger=None, + **kwargs, + ): + self._oauth = oauth + self._oauth_state = oauth_state + self._on_success = on_success + self._logger = logger + super().__init__(*args, **kwargs) + + def do_GET(self): + if self.path == "/favicon.ico": + return + try: + self._handle_oauth_callback() + finally: + self._stop_server() + + def _handle_oauth_callback(self): + parsed_url = urllib.parse.urlparse(self.path) + query_params = urllib.parse.parse_qs(parsed_url.query) + if "code" in query_params and "state" in query_params: + code = query_params["code"][0] + state = query_params["state"][0] + self._logger.debug(f"\nReceived OAuth Callback:") + self._logger.debug(f" code : {code}") + self._logger.debug(f" state: {state}") + if state == self._oauth_state.state: + self.send_response(200) + self.send_header("Content-type", "text/html") + self.end_headers() + self.wfile.write( + b"
You can close this window.
" + ) + self._on_success(code) + else: + self._logger.error(f"Invalid state! Expected: {self._oauth_state.state}, Received: {state}") + self.send_response(400) + self.send_header("Content-type", "text/html") + self.end_headers() + self.wfile.write(b"Invalid callback parameters.
" + ) + + def _stop_server(self): + self._oauth.stop_server() + + @staticmethod + def _can_open_browser(): + if platform.system() in ["Windows", "Darwin"]: + return True # Assume GUI on Windows/Mac + + if "DISPLAY" in os.environ and os.environ["DISPLAY"] != "": + return True # X11 display available + + return False + + def _exchange_oauth_token(self, code: str, scopes: list[str], oauth_state: "KaggleOAuth.OAuthState"): + request = ExchangeOAuthTokenRequest() + request.code = code + request.code_verifier = oauth_state.code_verifier + request.grant_type = "authorization_code" + + response = self._client.security.oauth_client.exchange_oauth_token(request) + self._creds = KaggleCredentials( + client=self._client, + refresh_token=response.refreshToken, + access_token=response.accessToken, + access_token_expiration=datetime.now(timezone.utc) + timedelta(seconds=response.expires_in), + username=response.username, + scopes=scopes, + ) + + def _run_oauth_flow(self, scopes: list[str], no_launch_browser: bool) -> KaggleCredentials: + use_browser = not no_launch_browser and KaggleOAuth._can_open_browser() + redirect_uri = self._http_client.get_oauth_default_redirect_url() + if use_browser: + port = random.randint(8000, 9000) + redirect_uri = f"http://localhost:{port}" + self._logger.debug(f"Will listen for the callback at: {redirect_uri}") + + oauth_state = KaggleOAuth.OAuthState() + oauth_start_url = self._http_client.build_start_oauth_url( + client_id=KaggleOAuth.OAUTH_CLIENT_ID, + redirect_uri=redirect_uri, + scope=scopes, + state=oauth_state.state, + code_challenge=oauth_state.code_challenge, + ) + + if use_browser: + webbrowser.open(oauth_start_url) + print("Your browser has been opened to visit:") + print(f" {oauth_start_url}\n\n") + + def exchange_oauth_token(code: str): + self._exchange_oauth_token(code, scopes, oauth_state) + + def handler_factory(*args, **kwargs): + return KaggleOAuth.OAuthCallbackHandler( + *args, + oauth=self, + oauth_state=oauth_state, + on_success=exchange_oauth_token, + logger=self._logger, + **kwargs, + ) + + self._server_running = True + with socketserver.TCPServer(("127.0.0.1", port), handler_factory) as httpd: + self._logger.debug(f"Listening for callback on port {port}...") + while self._server_running: + httpd.handle_request() + self._logger.debug("OAuth flow completed (or server stopped).") + else: + print("\nGo to the following link in your browser, and complete the sign-in prompts at Kaggle:\n") + print(f" {oauth_start_url}") + print( + "\nOnce finished, enter the verification code provided in your browser: ", + end="", + ) + code = input() + self._exchange_oauth_token(code, scopes, oauth_state) + + return self._creds + + def stop_server(self): + self._server_running = False + + def _ensure_creds_valid(self, creds: KaggleCredentials): + if not creds: + raise Exception("Authentication failed.") + return creds.introspect() + + def authenticate(self, scopes: list[str], no_launch_browser: bool = False) -> KaggleCredentials: + creds = self._run_oauth_flow(scopes, no_launch_browser) + username = self._ensure_creds_valid(creds) + creds.save() + print(f"\nYou are now logged in as [{username}]\n") + return creds diff --git a/src/ksdk/kaggle_object.py b/src/ksdk/kaggle_object.py new file mode 100644 index 00000000..6f050e87 --- /dev/null +++ b/src/ksdk/kaggle_object.py @@ -0,0 +1,344 @@ +import json +import re +from datetime import datetime, timedelta +from google.protobuf.field_mask_pb2 import FieldMask + + +class ObjectSerializer(object): + + def __init__(self, to_dict_value, from_dict_value): + self.to_dict_value = to_dict_value + self.from_dict_value = from_dict_value + + +class PredefinedSerializer(ObjectSerializer): + + def __init__(self): + """Predefined objects such as int, float etc are serialized/deserialized directly.""" + ObjectSerializer.__init__(self, lambda cls, v, _: v, lambda cls, v: v) + + +# Adapted from https://stackoverflow.com/questions/1175208/elegant-python-function-to-convert-camelcase-to-snake-case +_pascal_to_upper_snake_case_regex = re.compile("((?<=[a-z0-9])[A-Z]|(?!^)[A-Z](?=[a-z]))") + + +def _pascal_case_to_upper_snake_case(string): + return _pascal_to_upper_snake_case_regex.sub(r"_\1", string).upper() + + +def _convert(camel_input): + words = re.findall(r"[A-Z]?[a-z]+|[A-Z]{2,}(?=[A-Z][a-z]|\d|\W|$)|\d+", camel_input) + return "_".join(map(str.lower, words)) + + +class EnumSerializer(ObjectSerializer): + + def __init__(self): + """ + Enum objects are serialized using their ".name" field and deserialized by indexing the string in the Enum type. + Example: + class Foo(Enum): + TEST = 1 + foo = Foo.TEST + foo.name # => returns "TEST" + Foo["TEST"] # => returns Foo.TEST enum value. + """ + ObjectSerializer.__init__( + self, + lambda cls, v, _: EnumSerializer._to_str(cls, v), + lambda cls, v: EnumSerializer._from_str(cls, v), + ) + + @staticmethod + def _to_str(cls, v): + # "v" corresponds to an enum instance: Example foo or Foo.Test above. + # "cls" corresponds to the enum type Foo above. + return v.name + + @staticmethod + def _from_str(cls, v): + # "v" corresponds to enum string: Example "TEST" above. + # "cls" corresponds to the enum type Foo above. + try: + return cls[v] + except KeyError: + dct = vars(cls) + n = v.lower() + nn = _convert(v).lower() + enum_prefix = _pascal_case_to_upper_snake_case(cls.__name__).lower() + for key in dct.keys(): + k = key.lower() + if k == n: + return dct[key] + if k.startswith(enum_prefix) and k.endswith(n) or k.endswith(nn): + return dct[key] + raise + + +class ListSerializer(ObjectSerializer): + + def __init__(self, item_serializer: ObjectSerializer): + """ + Lists are serialized based on the type they contain. Since objects are generated from proto files, a list always + contains objects of the same type, which is serialized/deserialized using "item_serializer". + """ + ObjectSerializer.__init__( + self, + lambda cls, l, ignore_defaults: [item_serializer.to_dict_value(cls, v, ignore_defaults) for v in l], + lambda cls, l: [item_serializer.from_dict_value(cls, v) for v in l], + ) + + +class MapSerializer(ObjectSerializer): + + def __init__(self, item_serializer: ObjectSerializer): + """ + Maps are serialized based on type of their values. Since maps keys are always predefined types, we don't need a + serializer for them. + """ + ObjectSerializer.__init__( + self, + lambda cls, d, ignore_defaults: { + k: item_serializer.to_dict_value(cls, v, ignore_defaults) for k, v in d.items() + }, + lambda cls, d: {k: item_serializer.from_dict_value(cls, v) for k, v in d.items()}, + ) + + +class DateTimeSerializer(ObjectSerializer): + + def __init__(self): + """Date times are serialized/deserialized as a string in iso format""" + ObjectSerializer.__init__( + self, + lambda cls, dt, _: DateTimeSerializer._to_str(dt), + lambda _, v: DateTimeSerializer._from_str(v), + ) + + @staticmethod + def _to_str(dt): + return dt.isoformat(timespec="milliseconds") + "Z" + + @staticmethod + def _from_str(v): + v = v.rstrip("Z") + fields = v.rsplit(".", maxsplit=1) + if len(fields) == 1: + return datetime.fromisoformat(v) + (dt, nanos) = fields + millis = nanos[:3] + try: + return datetime.fromisoformat(f"{dt}.{millis}") + except ValueError: + return datetime.fromisoformat(dt) # Python 3.9, 3.10 + + +class TimeDeltaSerializer(ObjectSerializer): + # Scaling factors idea from https://github.com/protocolbuffers/protobuf/blob/master/csharp/src/Google.Protobuf/JsonParser.cs + SUBSECOND_SCALING_FACTORS = [ + 0, + 100_000_000, + 10_000_000, + 1_000_000, + 100_000, + 10_000, + 1_000, + 100, + 10, + 1, + ] + + def __init__(self): + """Time deltas are serialized/deserialized as a string in "