From 763f7c0982caa135a13bdb8f3707c518123f1dee Mon Sep 17 00:00:00 2001 From: nvoxland-al <151186252+nvoxland-al@users.noreply.github.com> Date: Fri, 26 Jan 2024 19:43:58 +0000 Subject: [PATCH] Update tests to work with controlplane changes (#2752) Update tests to work with controlplane changes --------- Co-authored-by: Nathan Voxland Co-authored-by: activesoull --- .github/workflows/test-push.yml | 2 +- deeplake/api/tests/test_dataset.py | 13 ---- deeplake/cli/test_cli.py | 19 ------ deeplake/client/client.py | 12 +--- deeplake/client/test_client.py | 13 ---- deeplake/constants.py | 3 + deeplake/core/chunk_engine.py | 44 ++++++++----- deeplake/core/dataset/dataset.py | 28 +++++---- deeplake/core/io.py | 3 +- deeplake/core/meta/dataset_meta.py | 8 ++- deeplake/core/query/test/test_query.py | 8 ++- deeplake/core/sample.py | 8 ++- deeplake/core/tensor.py | 16 +++-- deeplake/core/tests/test_query.py | 21 +++---- .../dataset_handlers/dataset_handler_base.py | 6 +- .../vectorstore/test_deeplake_vectorstore.py | 63 ------------------- .../version_control/test_version_control.py | 18 +++--- deeplake/enterprise/test_query.py | 9 +-- deeplake/integrations/mmdet/mmdet_.py | 1 - deeplake/integrations/mmdet/mmdet_utils.py | 6 +- deeplake/integrations/tf/__init__.py | 1 + .../integrations/tf/datasettotensorflow.py | 1 + .../tf/deeplake_tensorflow_dataset.py | 1 + deeplake/tests/client_fixtures.py | 16 +++-- deeplake/util/diff.py | 4 +- deeplake/util/object_3d/ply_readers.py | 12 ++-- deeplake/util/transform.py | 8 ++- 27 files changed, 124 insertions(+), 220 deletions(-) diff --git a/.github/workflows/test-push.yml b/.github/workflows/test-push.yml index bf22600a15..1bf9b2438c 100644 --- a/.github/workflows/test-push.yml +++ b/.github/workflows/test-push.yml @@ -142,7 +142,7 @@ jobs: gcp_sa_credentials_json: ${{ secrets.GCP_SA_CREDENTIALS_JSON }} azure_creds_json: ${{ secrets.AZURE_CREDS_JSON }} hub_username: ${{ secrets.ACTIVELOOP_HUB_USERNAME }} - hub_password: ${{ secrets.ACTIVELOOP_HUB_PASSWORD }} + hub_token: ${{ secrets.ACTIVELOOP_HUB_TOKEN }} kaggle_username: ${{ secrets.KAGGLE_USERNAME }} kaggle_key: ${{ secrets.KAGGLE_KEY }} oauth_client_id: ${{ secrets.GDRIVE_CLIENT_ID }} diff --git a/deeplake/api/tests/test_dataset.py b/deeplake/api/tests/test_dataset.py index 001a8c3be9..0106067ddf 100644 --- a/deeplake/api/tests/test_dataset.py +++ b/deeplake/api/tests/test_dataset.py @@ -95,16 +95,3 @@ def test_persistence_bug(local_ds_generator): ds = local_ds_generator() np.testing.assert_array_equal(ds[tensor_name].numpy(), np.array([[1], [2]])) - - -def test_dataset_token(local_ds_generator, hub_cloud_dev_credentials): - username, password = hub_cloud_dev_credentials - CliRunner().invoke(login, f"-u {username} -p {password}") - ds = local_ds_generator() - token = ds.token - token_username = jwt.decode(token, options={"verify_signature": False})["id"] - assert token_username == username - - CliRunner().invoke(logout) - ds = local_ds_generator() - assert ds.token is None diff --git a/deeplake/cli/test_cli.py b/deeplake/cli/test_cli.py index 1611944bd4..082404fa82 100644 --- a/deeplake/cli/test_cli.py +++ b/deeplake/cli/test_cli.py @@ -5,25 +5,6 @@ import pytest -@pytest.mark.parametrize("method", ["creds", "token"]) -def test_cli_auth(hub_cloud_dev_credentials, hub_cloud_dev_token, method): - username, password = hub_cloud_dev_credentials - - runner = CliRunner() - - if method == "creds": - result = runner.invoke(login, f"-u {username} -p {password}") - elif method == "token": - result = runner.invoke(login, f"-t {hub_cloud_dev_token}") - - assert result.exit_code == 0 - assert result.output == "Successfully logged in to Activeloop.\n" - - result = runner.invoke(logout) - assert result.exit_code == 0 - assert result.output == "Logged out of Activeloop.\n" - - def test_bad_token(): runner = CliRunner() diff --git a/deeplake/client/client.py b/deeplake/client/client.py index 416179d288..f2b5fd0fff 100644 --- a/deeplake/client/client.py +++ b/deeplake/client/client.py @@ -28,7 +28,6 @@ HUB_REST_ENDPOINT, HUB_REST_ENDPOINT_LOCAL, HUB_REST_ENDPOINT_DEV, - GET_TOKEN_SUFFIX, HUB_REST_ENDPOINT_STAGING, REGISTER_USER_SUFFIX, DEFAULT_REQUEST_TIMEOUT, @@ -188,15 +187,10 @@ def request_auth_token(self, username: str, password: str): LoginException: If there is an issue retrieving the auth token. """ - json = {"username": username, "password": password} - response = self.request("POST", GET_TOKEN_SUFFIX, json=json) + if username != "public": + raise LoginException("Can only request a token for the public user") - try: - token_dict = response.json() - token = token_dict["token"] - except Exception: - raise LoginException() - return token + return "PUBLIC TOKEN " + ("_" * 150) def send_register_request(self, username: str, email: str, password: str): """Sends a request to backend to register a new user. diff --git a/deeplake/client/test_client.py b/deeplake/client/test_client.py index c2c3051fad..7fe9ec8777 100644 --- a/deeplake/client/test_client.py +++ b/deeplake/client/test_client.py @@ -15,19 +15,6 @@ from time import sleep -@pytest.mark.slow -def test_client_requests(hub_cloud_dev_credentials): - username, password = hub_cloud_dev_credentials - - deeplake_client = DeepLakeBackendClient() - deeplake_client.request_auth_token(username, password) - with pytest.raises(Exception): - # request will fail as username already exists - deeplake_client.send_register_request( - "activeloop", "abc@d.com", "notactualpassword" - ) - - def test_client_utils(): write_token("abcdefgh") assert read_token() == "abcdefgh" diff --git a/deeplake/constants.py b/deeplake/constants.py index c669d0df82..edb1056f8e 100644 --- a/deeplake/constants.py +++ b/deeplake/constants.py @@ -81,6 +81,9 @@ # environment variables ENV_HUB_DEV_USERNAME = "ACTIVELOOP_HUB_USERNAME" ENV_HUB_DEV_PASSWORD = "ACTIVELOOP_HUB_PASSWORD" + +ENV_HUB_DEV_TOKEN = "ACTIVELOOP_HUB_TOKEN" + ENV_HUB_DEV_MANAGED_CREDS_KEY = "ACTIVELOOP_HUB_MANAGED_CREDS_KEY" ENV_KAGGLE_USERNAME = "KAGGLE_USERNAME" diff --git a/deeplake/core/chunk_engine.py b/deeplake/core/chunk_engine.py index a167d39955..4702995aa6 100644 --- a/deeplake/core/chunk_engine.py +++ b/deeplake/core/chunk_engine.py @@ -1156,10 +1156,14 @@ def _extend_sequence(self, samples, progressbar, link_callback, ignore_errors): def _prepare_samples_for_link_callback(self, samples): if not isinstance(samples, np.ndarray): samples = [ - None - if is_empty_list(s) - or (isinstance(s, deeplake.core.tensor.Tensor) and s.is_empty_tensor) - else s + ( + None + if is_empty_list(s) + or ( + isinstance(s, deeplake.core.tensor.Tensor) and s.is_empty_tensor + ) + else s + ) for s in samples ] return samples @@ -2536,9 +2540,11 @@ def update_links_and_encoders(idx): """Update linked tensors and sample level encoders""" self.commit_diff.pop( idx, - sample_id_tensor[idx].numpy().item() - if sample_id_tensor is not None - else None, + ( + sample_id_tensor[idx].numpy().item() + if sample_id_tensor is not None + else None + ), ) if link_callback: link_callback(idx) @@ -2996,9 +3002,11 @@ def _merge_seq_shape(self, shape, sample_index): is_same = np.all(shape == shape[0, :], axis=0) # type: ignore shape = (len(shape),) + ( tuple( - int(shape[0, i]) # type: ignore - if is_same[i] # type: ignore - else -1 + ( + int(shape[0, i]) # type: ignore + if is_same[i] # type: ignore + else -1 + ) for i in range(shape.shape[1]) # type: ignore ) or (1,) @@ -3165,9 +3173,11 @@ def _apply_deeper_indexing(self, sample_shapes, num_samples, sample_index): def _sample_shapes_to_shape(self, sample_shapes, squeeze_dims, sample_ndim): is_same = np.all(sample_shapes == sample_shapes[0, :], axis=0) shape = [ # type: ignore - int(sample_shapes[0, i]) - if sample_shapes[0, i] != -1 and is_same[i] - else None + ( + int(sample_shapes[0, i]) + if sample_shapes[0, i] != -1 and is_same[i] + else None + ) for i in range(sample_ndim) ] @@ -3318,9 +3328,11 @@ def _transform_callback( meta = self.tensor_meta vs = func( samples, - factor=tensor.info.downsampling_factor - if func == extend_downsample - else None, + factor=( + tensor.info.downsampling_factor + if func == extend_downsample + else None + ), compression=meta.sample_compression, htype=meta.htype, link_creds=self.link_creds, diff --git a/deeplake/core/dataset/dataset.py b/deeplake/core/dataset/dataset.py index 5bc2d42b95..107b8095a2 100644 --- a/deeplake/core/dataset/dataset.py +++ b/deeplake/core/dataset/dataset.py @@ -289,9 +289,9 @@ def __init__( else None ) self._first_load_init() - self._initial_autoflush: List[ - bool - ] = [] # This is a stack to support nested with contexts + self._initial_autoflush: List[bool] = ( + [] + ) # This is a stack to support nested with contexts self._indexing_history: List[int] = [] if not self.read_only: @@ -4122,16 +4122,18 @@ def _copy( token=token, overwrite=overwrite, public=public, - unlink=[ - t - for t in self.tensors - if ( - self.tensors[t].base_htype != "video" - or deeplake.constants._UNLINK_VIDEOS - ) - ] - if unlink - else False, + unlink=( + [ + t + for t in self.tensors + if ( + self.tensors[t].base_htype != "video" + or deeplake.constants._UNLINK_VIDEOS + ) + ] + if unlink + else False + ), verbose=verbose, ) diff --git a/deeplake/core/io.py b/deeplake/core/io.py index 3e47bca1ea..1a36e7e048 100644 --- a/deeplake/core/io.py +++ b/deeplake/core/io.py @@ -100,8 +100,7 @@ def __len__(self): class Scheduler(ABC): @abstractmethod - def schedule(self, jobs: List[IOBlock]) -> List[Schedule]: - ... + def schedule(self, jobs: List[IOBlock]) -> List[Schedule]: ... class SingleThreadScheduler(Scheduler): diff --git a/deeplake/core/meta/dataset_meta.py b/deeplake/core/meta/dataset_meta.py index 44deb5f4d0..8b6f7905c3 100644 --- a/deeplake/core/meta/dataset_meta.py +++ b/deeplake/core/meta/dataset_meta.py @@ -100,9 +100,11 @@ def rename_group(self, name, new_name): self.groups.remove(name) self.groups = list( map( - lambda g: posixpath.join(new_name, relpath(g, name)) - if (g == name or g.startswith(name + "/")) - else g, + lambda g: ( + posixpath.join(new_name, relpath(g, name)) + if (g == name or g.startswith(name + "/")) + else g + ), self.groups, ) ) diff --git a/deeplake/core/query/test/test_query.py b/deeplake/core/query/test/test_query.py index 56384e46ef..6f6d389230 100644 --- a/deeplake/core/query/test/test_query.py +++ b/deeplake/core/query/test/test_query.py @@ -466,9 +466,11 @@ def test_link_materialize(local_ds, num_workers): ds.create_tensor("abc", htype="link[image]", sample_compression="jpg") ds.abc.extend( [ - deeplake.link("https://picsum.photos/20/20") - if i % 2 - else deeplake.link("https://picsum.photos/10/10") + ( + deeplake.link("https://picsum.photos/20/20") + if i % 2 + else deeplake.link("https://picsum.photos/10/10") + ) for i in range(20) ] ) diff --git a/deeplake/core/sample.py b/deeplake/core/sample.py index c4b3058b0f..144ec99ed7 100644 --- a/deeplake/core/sample.py +++ b/deeplake/core/sample.py @@ -224,9 +224,11 @@ def _get_dicom_meta(self) -> dict: x.keyword: { "name": x.name, "tag": str(x.tag), - "value": x.value - if isinstance(x.value, (str, int, float)) - else x.to_json_dict(None, None).get("Value", ""), # type: ignore + "value": ( + x.value + if isinstance(x.value, (str, int, float)) + else x.to_json_dict(None, None).get("Value", "") # type: ignore + ), "vr": x.VR, } for x in dcm diff --git a/deeplake/core/tensor.py b/deeplake/core/tensor.py index 921fccd01e..94b8ecbba9 100644 --- a/deeplake/core/tensor.py +++ b/deeplake/core/tensor.py @@ -1095,9 +1095,11 @@ def _extend_links(self, samples, flat: Optional[bool], progressbar: bool = False func = get_link_transform(func_name) vs = func( samples, - factor=tensor.info.downsampling_factor - if func == extend_downsample - else None, + factor=( + tensor.info.downsampling_factor + if func == extend_downsample + else None + ), compression=self.meta.sample_compression, htype=self.htype, link_creds=self.link_creds, @@ -1145,9 +1147,11 @@ def _update_links( val = func( new_sample, old_value=tensor[global_sample_index], - factor=tensor.info.downsampling_factor - if func == update_downsample - else None, + factor=( + tensor.info.downsampling_factor + if func == update_downsample + else None + ), compression=self.meta.sample_compression, htype=self.htype, link_creds=self.link_creds, diff --git a/deeplake/core/tests/test_query.py b/deeplake/core/tests/test_query.py index 07223112ad..36a389759a 100644 --- a/deeplake/core/tests/test_query.py +++ b/deeplake/core/tests/test_query.py @@ -8,18 +8,18 @@ @requires_libdeeplake def test_single_source_query( - hub_cloud_dev_credentials, + hub_cloud_dev_token, ): - username, password = hub_cloud_dev_credentials - token = DeepLakeBackendClient().request_auth_token(username, password) - ds = deeplake.query('SELECT * FROM "hub://activeloop/mnist-train"', token=token) + ds = deeplake.query( + 'SELECT * FROM "hub://activeloop/mnist-train"', token=hub_cloud_dev_token + ) assert len(ds) == 60000 assert len(ds.tensors) == 2 assert ds.images.meta.htype == "image" assert ds.labels.meta.htype == "class_label" ds = deeplake.query( - 'SELECT images FROM "hub://activeloop/mnist-train"', token=token + 'SELECT images FROM "hub://activeloop/mnist-train"', token=hub_cloud_dev_token ) assert len(ds) == 60000 assert len(ds.tensors) == 1 @@ -27,19 +27,16 @@ def test_single_source_query( @requires_libdeeplake -def test_multi_source_query(hub_cloud_dev_credentials): - username, password = hub_cloud_dev_credentials - token = DeepLakeBackendClient().request_auth_token(username, password) - +def test_multi_source_query(hub_cloud_dev_token): with pytest.raises(RuntimeError): ds = deeplake.query( 'SELECT * FROM "hub://activeloop/mnist-train" UNION (SELECT * FROM "hub://activeloop/coco-train")', - token=token, + token=hub_cloud_dev_token, ) ds = deeplake.query( 'SELECT * FROM "hub://activeloop/mnist-train" UNION (SELECT images, categories[0] as labels FROM "hub://activeloop/coco-train")', - token=token, + token=hub_cloud_dev_token, ) assert len(ds) == 178287 assert len(ds.tensors) == 2 @@ -48,7 +45,7 @@ def test_multi_source_query(hub_cloud_dev_credentials): ds = deeplake.query( 'SELECT * FROM (SELECT * FROM "hub://activeloop/mnist-train" UNION (SELECT images, labels FROM "hub://activeloop/cifar100-train")) WHERE labels == 0', - token=token, + token=hub_cloud_dev_token, ) assert len(ds) == 6423 assert len(ds.tensors) == 2 diff --git a/deeplake/core/vectorstore/dataset_handlers/dataset_handler_base.py b/deeplake/core/vectorstore/dataset_handlers/dataset_handler_base.py index cc3771ee6f..582474fe18 100644 --- a/deeplake/core/vectorstore/dataset_handlers/dataset_handler_base.py +++ b/deeplake/core/vectorstore/dataset_handlers/dataset_handler_base.py @@ -71,9 +71,9 @@ def __init__( self.bugout_reporting_path, "vs.initialize", { - "tensor_params": "default" - if tensor_params is not None - else tensor_params, + "tensor_params": ( + "default" if tensor_params is not None else tensor_params + ), "embedding_function": True if embedding_function is not None else False, "num_workers": num_workers, "overwrite": overwrite, diff --git a/deeplake/core/vectorstore/test_deeplake_vectorstore.py b/deeplake/core/vectorstore/test_deeplake_vectorstore.py index e30fec8dbd..08facdcb14 100644 --- a/deeplake/core/vectorstore/test_deeplake_vectorstore.py +++ b/deeplake/core/vectorstore/test_deeplake_vectorstore.py @@ -2734,69 +2734,6 @@ def test_exec_option_with_auth(local_path, hub_cloud_path, hub_cloud_dev_token): assert db.dataset_handler.exec_option == "tensor_db" -@requires_libdeeplake -def test_exec_option_cli( - local_path, - hub_cloud_path, - hub_cloud_dev_token, - hub_cloud_dev_credentials, -): - runner = CliRunner() - username, password = hub_cloud_dev_credentials - # Testing exec_option with cli login and logout commands are executed - runner.invoke(login, f"-u {username} -p {password}") - - # local dataset and logged in with cli - db = VectorStore( - path=local_path, - ) - assert db.dataset_handler.exec_option == "compute_engine" - - # hub cloud dataset and logged in with cli - db = VectorStore( - path=hub_cloud_path, - ) - assert db.dataset_handler.exec_option == "compute_engine" - - # hub cloud dataset and logged in with cli - db = VectorStore( - path="mem://abc", - ) - assert db.dataset_handler.exec_option == "python" - - # logging out with cli - runner.invoke(logout) - - # local dataset and logged out with cli - db = VectorStore( - path=local_path, - ) - assert db.dataset_handler.exec_option == "python" - - # Check whether after logging out exec_option changes to python - # logging in with cli token - runner.invoke(login, f"-t {hub_cloud_dev_token}") - db = VectorStore( - path=local_path, - ) - assert db.dataset_handler.exec_option == "compute_engine" - # logging out with cli - runner.invoke(logout) - assert db.dataset_handler.exec_option == "python" - - # Check whether after logging out when token specified exec_option doesn't change - # logging in with cli token - runner.invoke(login, f"-t {hub_cloud_dev_token}") - db = VectorStore( - path=local_path, - token=hub_cloud_dev_token, - ) - assert db.dataset_handler.exec_option == "compute_engine" - # logging out with cli - runner.invoke(logout) - assert db.dataset_handler.exec_option == "compute_engine" - - @requires_libdeeplake @pytest.mark.parametrize( "path, creds", diff --git a/deeplake/core/version_control/test_version_control.py b/deeplake/core/version_control/test_version_control.py index 6008f92cf2..1009da71d3 100644 --- a/deeplake/core/version_control/test_version_control.py +++ b/deeplake/core/version_control/test_version_control.py @@ -1422,9 +1422,9 @@ def test_rename_diff_branch(local_ds, capsys): with local_ds: local_ds.rename_tensor("abc", "xyz") expected_dataset_diff_from_a_on_alt["renamed"]["abc"] = "xyz" - expected_tensor_diff_from_a_on_alt[ - "xyz" - ] = expected_tensor_diff_from_a_on_alt.pop("abc") + expected_tensor_diff_from_a_on_alt["xyz"] = ( + expected_tensor_diff_from_a_on_alt.pop("abc") + ) local_ds.xyz.append([4, 5, 6]) expected_tensor_diff_from_a_on_alt["xyz"]["data_added"] = [1, 2] @@ -1459,18 +1459,18 @@ def test_rename_diff_branch(local_ds, capsys): with local_ds: local_ds.rename_tensor("abc", "efg") expected_dataset_diff_from_c_on_alt2["renamed"]["abc"] = "efg" - expected_tensor_diff_from_c_on_alt2[ - "efg" - ] = expected_tensor_diff_from_c_on_alt2.pop("abc") + expected_tensor_diff_from_c_on_alt2["efg"] = ( + expected_tensor_diff_from_c_on_alt2.pop("abc") + ) local_ds.efg.append([5, 6, 7]) expected_tensor_diff_from_c_on_alt2["efg"]["data_added"] = [2, 3] local_ds.efg.info["hello"] = "world" expected_tensor_diff_from_c_on_alt2["efg"]["info_updated"] = True local_ds.rename_tensor("red", "blue") expected_dataset_diff_from_c_on_alt2["renamed"]["red"] = "blue" - expected_tensor_diff_from_c_on_alt2[ - "blue" - ] = expected_tensor_diff_from_c_on_alt2.pop("red") + expected_tensor_diff_from_c_on_alt2["blue"] = ( + expected_tensor_diff_from_c_on_alt2.pop("red") + ) d = local_ds.commit() expected_tensor_diff_from_d_on_alt2 = { diff --git a/deeplake/enterprise/test_query.py b/deeplake/enterprise/test_query.py index b27d6ca585..818e3a6bd4 100644 --- a/deeplake/enterprise/test_query.py +++ b/deeplake/enterprise/test_query.py @@ -30,11 +30,7 @@ def test_query(hub_cloud_ds): @requires_libdeeplake -def test_query_on_local_datasets(local_ds, hub_cloud_dev_credentials): - username, password = hub_cloud_dev_credentials - runner = CliRunner() - runner.invoke(logout) - +def test_query_on_local_datasets(local_ds, hub_cloud_dev_token): path = local_ds.path ds = deeplake.empty(path, overwrite=True) ds.create_tensor("label") @@ -44,8 +40,7 @@ def test_query_on_local_datasets(local_ds, hub_cloud_dev_credentials): with pytest.raises(EmptyTokenException): dsv = ds.query("SELECT * WHERE CONTAINS(label, 2)") - runner.invoke(login, f"-u {username} -p {password}") - ds = deeplake.empty(path, overwrite=True) + ds = deeplake.empty(path, overwrite=True, token=hub_cloud_dev_token) ds.create_tensor("label") for i in range(100): ds.label.append(floor(i / 20)) diff --git a/deeplake/integrations/mmdet/mmdet_.py b/deeplake/integrations/mmdet/mmdet_.py index 5e4e66df8a..398ce99cd7 100644 --- a/deeplake/integrations/mmdet/mmdet_.py +++ b/deeplake/integrations/mmdet/mmdet_.py @@ -167,7 +167,6 @@ >>> mmdet_deeplake.train_detector(model, cfg, distributed=args.distributed, validate=args.validate) """ - from collections import OrderedDict from typing import Callable, Optional, List, Dict diff --git a/deeplake/integrations/mmdet/mmdet_utils.py b/deeplake/integrations/mmdet/mmdet_utils.py index 00bf32bc15..d5bb374bbb 100644 --- a/deeplake/integrations/mmdet/mmdet_utils.py +++ b/deeplake/integrations/mmdet/mmdet_utils.py @@ -120,9 +120,9 @@ def createDeeplakeIndex(self): "category_id": categories[bbox_index], "bbox": bbox, "area": bbox[2] * bbox[3], - "segmentation": mask - if masks is not None - else None, # optimize here + "segmentation": ( + mask if masks is not None else None + ), # optimize here "iscrowd": int(is_crowds[bbox_index]), } diff --git a/deeplake/integrations/tf/__init__.py b/deeplake/integrations/tf/__init__.py index ea1c09544f..8a61b82884 100644 --- a/deeplake/integrations/tf/__init__.py +++ b/deeplake/integrations/tf/__init__.py @@ -1,4 +1,5 @@ """ export dataset_to_tensorflow for using in dataset.tensorflow() function """ + from .datasettotensorflow import dataset_to_tensorflow diff --git a/deeplake/integrations/tf/datasettotensorflow.py b/deeplake/integrations/tf/datasettotensorflow.py index 007bd88d36..921b3a97f9 100644 --- a/deeplake/integrations/tf/datasettotensorflow.py +++ b/deeplake/integrations/tf/datasettotensorflow.py @@ -1,6 +1,7 @@ """ Helper function for exporting tensorflow dataset wrapper """ + import warnings from deeplake.util.exceptions import ( diff --git a/deeplake/integrations/tf/deeplake_tensorflow_dataset.py b/deeplake/integrations/tf/deeplake_tensorflow_dataset.py index bfe4835a06..f0ac0d0828 100644 --- a/deeplake/integrations/tf/deeplake_tensorflow_dataset.py +++ b/deeplake/integrations/tf/deeplake_tensorflow_dataset.py @@ -1,6 +1,7 @@ """ Tensorflow dataset wrapper """ + import abc import tensorflow as tf # type: ignore diff --git a/deeplake/tests/client_fixtures.py b/deeplake/tests/client_fixtures.py index d8b06c5f93..e7434d1719 100644 --- a/deeplake/tests/client_fixtures.py +++ b/deeplake/tests/client_fixtures.py @@ -6,7 +6,7 @@ ENV_AZURE_CLIENT_SECRET, ENV_AZURE_TENANT_ID, ENV_HUB_DEV_USERNAME, - ENV_HUB_DEV_PASSWORD, + ENV_HUB_DEV_TOKEN, ENV_KAGGLE_USERNAME, ENV_KAGGLE_KEY, KAGGLE_OPT, @@ -35,24 +35,22 @@ def hub_cloud_dev_credentials(request): ) username = os.getenv(ENV_HUB_DEV_USERNAME) - password = os.getenv(ENV_HUB_DEV_PASSWORD) assert ( username is not None ), f"Deep Lake dev username was not found in the environment variable '{ENV_HUB_DEV_USERNAME}'. This is necessary for testing deeplake cloud datasets." - assert ( - password is not None - ), f"Deep Lake dev password was not found in the environment variable '{ENV_HUB_DEV_PASSWORD}'. This is necessary for testing deeplake cloud datasets." - return username, password + return username, None @pytest.fixture(scope="session") def hub_cloud_dev_token(hub_cloud_dev_credentials): - username, password = hub_cloud_dev_credentials + token = os.getenv(ENV_HUB_DEV_TOKEN) + + assert ( + token is not None + ), f"Deep Lake dev token was not found in the environment variable '{ENV_HUB_DEV_USERNAME}'. This is necessary for testing deeplake cloud datasets." - client = DeepLakeBackendClient() - token = client.request_auth_token(username, password) return token diff --git a/deeplake/util/diff.py b/deeplake/util/diff.py index b026914298..29c199ad57 100644 --- a/deeplake/util/diff.py +++ b/deeplake/util/diff.py @@ -12,9 +12,7 @@ ) -def get_changes_and_messages( - version_state, storage, id_1, id_2 -) -> Tuple[ +def get_changes_and_messages(version_state, storage, id_1, id_2) -> Tuple[ List[dict], Optional[List[dict]], List[dict], diff --git a/deeplake/util/object_3d/ply_readers.py b/deeplake/util/object_3d/ply_readers.py index 46343010a3..41f98a38b1 100644 --- a/deeplake/util/object_3d/ply_readers.py +++ b/deeplake/util/object_3d/ply_readers.py @@ -41,9 +41,9 @@ def _parse_properties(self, fmt, ext, line, has_texture, meta_data, name): self.meta_data["dimensions_names_to_dtype"][line[2].decode()] = ply_dtypes[ line[1] ] - self.meta_data["element_name_to_property_dtypes"][name][ - line[2].decode() - ] = ply_dtypes[line[1]] + self.meta_data["element_name_to_property_dtypes"][name][line[2].decode()] = ( + ply_dtypes[line[1]] + ) dimensions_names.append(line[2].decode()) @@ -134,9 +134,9 @@ def _parse_properties(self, fmt, ext, line, has_texture, meta_data, name): meta_data["dimensions_names_to_dtype"][line[2].decode()] = ply_dtypes[ line[1] ] - meta_data["element_name_to_property_dtypes"][name][ - line[2].decode() - ] = ply_dtypes[line[1]] + meta_data["element_name_to_property_dtypes"][name][line[2].decode()] = ( + ply_dtypes[line[1]] + ) dimensions_names.append(line[2].decode()) meta_data["dimensions_names"] += dimensions_names return has_texture diff --git a/deeplake/util/transform.py b/deeplake/util/transform.py index c349b5d437..f2405475f7 100644 --- a/deeplake/util/transform.py +++ b/deeplake/util/transform.py @@ -443,9 +443,11 @@ def create_worker_chunk_engines( tiling_threshold = storage_chunk_engine.tiling_threshold new_tensor_meta = TensorMeta( htype=existing_meta.htype, - dtype=np.dtype(existing_meta.typestr) - if existing_meta.typestr - else existing_meta.dtype, + dtype=( + np.dtype(existing_meta.typestr) + if existing_meta.typestr + else existing_meta.dtype + ), sample_compression=existing_meta.sample_compression, chunk_compression=existing_meta.chunk_compression, max_chunk_size=chunk_size,