Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 4 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -2426,7 +2426,7 @@ There are type-specific methods. but they can be used in the same way.

```python
dataset_object = client.create_image_dataset_object(
dataset_id="YOUR_DATASET_ID",
dataset_version_id="YOUR_DATASET_VERSION_ID",
name="brushwood_dog.jpg",
file_path="./brushwood_dog.jpg",
)
Expand Down Expand Up @@ -2466,7 +2466,7 @@ Success response is the same as when created.
Get all dataset object in the dataset. (Up to 1000 tasks)

```python
dataset_objects = client.get_dataset_objects(dataset_id="YOUR_DATASET_ID")
dataset_objects = client.get_dataset_objects(dataset_version_id="YOUR_DATASET_VERSION_ID")
```

The success response is the same as when created, but it is an array.
Expand All @@ -2475,7 +2475,7 @@ You can filter by keywords.

```python
dataset_objects = client.get_dataset_objects(
dataset_id="YOUR_DATASET_ID", keyword="dog"
dataset_version_id="YOUR_DATASET_VERSION_ID", keyword="dog"
)
```

Expand Down Expand Up @@ -2503,7 +2503,7 @@ Get all import histories in the dataset. (Up to 1000 tasks)

```python
datasets = client.get_dataset_object_import_histories(
dataset_id="YOUR_DATASET_ID"
dataset_version_id="YOUR_DATASET_VERSION_ID"
)
```

Expand Down
2 changes: 1 addition & 1 deletion examples/create_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,6 @@
client = fastlabel.Client()

dataset = client.create_dataset(
name="Japanese Dogs", slug="japanese-dogs", type="video"
name="Japanese Dogs", slug="japanese-dogs", type="image"
)
pprint(dataset)
12 changes: 12 additions & 0 deletions examples/create_image_dataset_object.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
from pprint import pprint

import fastlabel

client = fastlabel.Client()

dataset_object = client.create_image_dataset_object(
dataset_version_id="YOUR_DATASET_VERSION_ID",
name="NAME",
file_path="FILE_PATH",
)
pprint(dataset_object)
5 changes: 5 additions & 0 deletions examples/delete_dataset.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
import fastlabel

client = fastlabel.Client()

client.delete_dataset(dataset_id="YOUR_DATASET_ID")
11 changes: 11 additions & 0 deletions examples/delete_dataset_objects.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
import fastlabel

client = fastlabel.Client()

client.delete_dataset_objects(
dataset_id="YOUR_DATASET_ID",
dataset_object_ids=[
"YOUR_DATASET_OBJECT_ID_1",
"YOUR_DATASET_OBJECT_ID_2",
],
)
8 changes: 8 additions & 0 deletions examples/find_dataset.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
from pprint import pprint

import fastlabel

client = fastlabel.Client()

dataset = client.find_dataset(dataset_id="YOUR_DATASET_ID")
pprint(dataset)
8 changes: 8 additions & 0 deletions examples/find_dataset_object.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
from pprint import pprint

import fastlabel

client = fastlabel.Client()

dataset_object = client.find_dataset_object(dataset_object_id="YOUR_DATASET_OBJECT_ID")
pprint(dataset_object)
10 changes: 10 additions & 0 deletions examples/get_dataset_object_import_histories.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
from pprint import pprint

import fastlabel

client = fastlabel.Client()

dataset_object_import_histories = client.get_dataset_object_import_histories(
dataset_version_id="YOUR_DATASET_VERSION_ID"
)
pprint(dataset_object_import_histories)
10 changes: 10 additions & 0 deletions examples/get_dataset_objects.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
from pprint import pprint

import fastlabel

client = fastlabel.Client()

dataset_objects = client.get_dataset_objects(
dataset_version_id="YOUR_DATASET_VERSION_ID"
)
pprint(dataset_objects)
8 changes: 8 additions & 0 deletions examples/get_datasets.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
from pprint import pprint

import fastlabel

client = fastlabel.Client()

datasets = client.get_datasets()
pprint(datasets)
8 changes: 8 additions & 0 deletions examples/update_dataset.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
from pprint import pprint

import fastlabel

client = fastlabel.Client()

dataset = client.update_dataset(dataset_id="YOUR_DATASET_ID", name="New Name")
pprint(dataset)
42 changes: 23 additions & 19 deletions fastlabel/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -1096,7 +1096,11 @@ def create_integrated_image_classification_task(
external_approver is slug of external approve user (Optional).
"""
endpoint = "tasks/integrated-image/classification"
payload = {"project": project, "filePath": file_path, "storageType": storage_type}
payload = {
"project": project,
"filePath": file_path,
"storageType": storage_type,
}
attributes = attributes or []
tags = tags or []
if status:
Expand Down Expand Up @@ -3780,7 +3784,7 @@ def copy_project(self, project_id: str) -> None:

def find_dataset(self, dataset_id: str) -> dict:
"""
Find a dataset.
Find a dataset with latest version.
"""
endpoint = "datasets/" + dataset_id
return self.api.get_request(endpoint)
Expand All @@ -3793,7 +3797,7 @@ def get_datasets(
limit: int = 100,
) -> list:
"""
Returns a list of datasets.
Returns a list of datasets with latest version.

Returns up to 1000 at a time, to get more, set offset as the starting position
to fetch.
Expand Down Expand Up @@ -3873,7 +3877,7 @@ def find_dataset_object(self, dataset_object_id: str) -> dict:

def get_dataset_objects(
self,
dataset_id: str = None,
dataset_version_id: str,
keyword: str = None,
offset: int = None,
limit: int = 100,
Expand All @@ -3884,7 +3888,7 @@ def get_dataset_objects(
Returns up to 1000 at a time, to get more, set offset as the starting position
to fetch.

dataset_id is dataset object in dataset (Required).
dataset_version_id is dataset object in dataset version (Required).
keyword are search terms in the dataset object name (Optional).
offset is the starting position number to fetch (Optional).
limit is the max number to fetch (Optional).
Expand All @@ -3894,7 +3898,7 @@ def get_dataset_objects(
"Limit must be less than or equal to 1000.", 422
)
endpoint = "dataset-objects"
params = {"datasetId": dataset_id}
params = {"datasetVersionId": dataset_version_id}
if keyword:
params["keyword"] = keyword
if offset:
Expand All @@ -3905,14 +3909,14 @@ def get_dataset_objects(

def create_image_dataset_object(
self,
dataset_id: str,
dataset_version_id: str,
name: str,
file_path: str,
) -> dict:
"""
Create a image dataset object.

dataset_id is dataset object in dataset (Required).
dataset_version_id is dataset object in dataset version (Required).
name is an unique identifier of dataset object in your dataset (Required).
file_path is a path to data. Supported extensions are png, jpg, jpeg (Required).
"""
Expand All @@ -3926,7 +3930,7 @@ def create_image_dataset_object(
raise FastLabelInvalidException("Supported image size is under 20 MB.", 422)

payload = {
"datasetId": dataset_id,
"datasetVersionId": dataset_version_id,
"name": name,
"file": utils.base64_encode(file_path),
"type": "image",
Expand All @@ -3935,14 +3939,14 @@ def create_image_dataset_object(

def create_video_dataset_object(
self,
dataset_id: str,
dataset_version_id: str,
name: str,
file_path: str,
) -> dict:
"""
Create a video dataset object.

dataset_id is dataset object in dataset (Required).
dataset_version_id is dataset object in dataset version (Required).
name is an unique identifier of dataset object in your dataset (Required).
file_path is a path to data. Supported extensions are mp4 (Required).
"""
Expand All @@ -3956,7 +3960,7 @@ def create_video_dataset_object(
)

payload = {
"datasetId": dataset_id,
"datasetVersionId": dataset_version_id,
"name": name,
"file": utils.base64_encode(file_path),
"type": "video",
Expand All @@ -3965,14 +3969,14 @@ def create_video_dataset_object(

def create_audio_dataset_object(
self,
dataset_id: str,
dataset_version_id: str,
name: str,
file_path: str,
) -> dict:
"""
Create a audio dataset object.

dataset_id is dataset object in dataset (Required).
dataset_version_id is dataset object in dataset version (Required).
name is an unique identifier of dataset object in your dataset (Required).
file_path is a path to data. Supported extensions are mp3, wav, w4a (Required).
"""
Expand All @@ -3988,7 +3992,7 @@ def create_audio_dataset_object(
)

payload = {
"datasetId": dataset_id,
"datasetVersionId": dataset_version_id,
"name": name,
"file": utils.base64_encode(file_path),
"type": "audio",
Expand All @@ -4007,17 +4011,17 @@ def delete_dataset_objects(

def get_dataset_object_import_histories(
self,
dataset_id: str = None,
dataset_version_id: str,
offset: int = None,
limit: int = 5,
) -> list:
"""
Returns a list of dataset objects.
Returns a list of dataset object import histories.

Returns up to 1000 at a time, to get more, set offset as the starting position
to fetch.

dataset_id is import histories in dataset (Required).
dataset_version_id is import histories in dataset version (Required).
offset is the starting position number to fetch (Optional).
limit is the max number to fetch (Optional).
"""
Expand All @@ -4026,7 +4030,7 @@ def get_dataset_object_import_histories(
"Limit must be less than or equal to 1000.", 422
)
endpoint = "dataset-objects/imports/histories"
params = {"datasetId": dataset_id}
params = {"datasetVersionId": dataset_version_id}
if offset:
params["offset"] = offset
if limit:
Expand Down
Loading