Skip to content

Updated version for next release #1457

Updated version for next release

Updated version for next release #1457

GitHub Actions / JUnit Test Report failed Dec 11, 2023 in 0s

12228 tests run, 10275 passed, 1896 skipped, 57 failed.

Annotations

Check failure on line 770 in deeplake/core/vectorstore/deep_memory/test_deepmemory.py

See this annotation in the file changed.

@github-actions github-actions / JUnit Test Report

test_deepmemory.test_deepmemory_evaluate_with_embedding_function_specified_in_constructor_should_not_throw_any_exception

deeplake.util.exceptions.DatasetHandlerError: A dataset already exists at the given path (hub://testingacc2/deepmemory_test_corpus_managed_2_eval_queries). If you want to create a new empty dataset, either specify another path or use overwrite=True. If you want to load the dataset that exists at this path, use deeplake.load() instead.
Raw output
corpus_query_pair_path = ('hub://testingacc2/deepmemory_test_corpus_managed_2', 'hub://testingacc2/deepmemory_test_corpus_managed_2_eval_queries')
hub_cloud_dev_token = 'eyJhbGciOiJIUzUxMiIsImlhdCI6MTcwMjA1MzcxMywiZXhwIjoxNzA1NjUzNzEzfQ.eyJpZCI6InRlc3RpbmdhY2MyIn0.W82UH88qd7vmfZ8EEYEkw8yM87quWoMzP0AVZ3NlZj0nUH2mx5U4YDMn40yFvPdperNF75r_eOABRjngiDH8Sw'

    @pytest.mark.slow
    @pytest.mark.flaky(reruns=3)
    @pytest.mark.skipif(sys.platform == "win32", reason="Does not run on Windows")
    def test_deepmemory_evaluate_with_embedding_function_specified_in_constructor_should_not_throw_any_exception(
        corpus_query_pair_path,
        hub_cloud_dev_token,
    ):
        corpus, queries = corpus_query_pair_path
    
        db = VectorStore(
            path=corpus,
            runtime={"tensor_db": True},
            token=hub_cloud_dev_token,
            embedding_function=embedding_fn,
        )
    
>       queries_vs = VectorStore(
            path=queries,
            runtime={"tensor_db": True},
            token=hub_cloud_dev_token,
            embedding_function=embedding_fn,
        )

deeplake/core/vectorstore/deep_memory/test_deepmemory.py:770: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
deeplake/core/vectorstore/deeplake_vectorstore.py:114: in __init__
    self.dataset_handler = get_dataset_handler(
deeplake/core/vectorstore/dataset_handlers/dataset_handler.py:13: in get_dataset_handler
    return ClientSideDH(*args, **kwargs)
deeplake/core/vectorstore/dataset_handlers/client_side_dataset_handler.py:66: in __init__
    self.dataset = dataset or dataset_utils.create_or_load_dataset(
deeplake/core/vectorstore/vector_search/dataset/dataset.py:69: in create_or_load_dataset
    return create_dataset(
deeplake/core/vectorstore/vector_search/dataset/dataset.py:189: in create_dataset
    dataset = deeplake.empty(
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

path = 'hub://testingacc2/deepmemory_test_corpus_managed_2_eval_queries'
runtime = {'tensor_db': True}, overwrite = False, public = False
memory_cache_size = 2000, local_cache_size = 0, creds = {}
token = 'eyJhbGciOiJIUzUxMiIsImlhdCI6MTcwMjA1MzcxMywiZXhwIjoxNzA1NjUzNzEzfQ.eyJpZCI6InRlc3RpbmdhY2MyIn0.W82UH88qd7vmfZ8EEYEkw8yM87quWoMzP0AVZ3NlZj0nUH2mx5U4YDMn40yFvPdperNF75r_eOABRjngiDH8Sw'
org_id = None, lock_enabled = True, lock_timeout = 0, verbose = False
index_params = {'additional_params': {'M': 32, 'efConstruction': 600}, 'distance_metric': 'COS', 'threshold': -1}

    @staticmethod
    def empty(
        path: Union[str, pathlib.Path],
        runtime: Optional[dict] = None,
        overwrite: bool = False,
        public: bool = False,
        memory_cache_size: int = DEFAULT_MEMORY_CACHE_SIZE,
        local_cache_size: int = DEFAULT_LOCAL_CACHE_SIZE,
        creds: Optional[Union[Dict, str]] = None,
        token: Optional[str] = None,
        org_id: Optional[str] = None,
        lock_enabled: Optional[bool] = True,
        lock_timeout: Optional[int] = 0,
        verbose: bool = True,
        index_params: Optional[Dict[str, Union[int, str]]] = None,
    ) -> Dataset:
        """Creates an empty dataset
    
        Args:
            path (str, pathlib.Path): - The full path to the dataset. It can be:
                - a Deep Lake cloud path of the form ``hub://org_id/dataset_name``. Requires registration with Deep Lake.
                - an s3 path of the form ``s3://bucketname/path/to/dataset``. Credentials are required in either the environment or passed to the creds argument.
                - a local file system path of the form ``./path/to/dataset`` or ``~/path/to/dataset`` or ``path/to/dataset``.
                - a memory path of the form ``mem://path/to/dataset`` which doesn't save the dataset but keeps it in memory instead. Should be used only for testing as it does not persist.
            runtime (dict): Parameters for creating a dataset in the Deep Lake Tensor Database. Only applicable for paths of the form ``hub://org_id/dataset_name`` and runtime  must be ``{"tensor_db": True}``.
            overwrite (bool): If set to ``True`` this overwrites the dataset if it already exists. Defaults to ``False``.
            public (bool): Defines if the dataset will have public access. Applicable only if Deep Lake cloud storage is used and a new Dataset is being created. Defaults to ``False``.
            memory_cache_size (int): The size of the memory cache to be used in MB.
            local_cache_size (int): The size of the local filesystem cache to be used in MB.
            creds (dict, str, optional): The string ``ENV`` or a dictionary containing credentials used to access the dataset at the path.
                - If 'aws_access_key_id', 'aws_secret_access_key', 'aws_session_token' are present, these take precedence over credentials present in the environment or in credentials file. Currently only works with s3 paths.
                - It supports 'aws_access_key_id', 'aws_secret_access_key', 'aws_session_token', 'endpoint_url', 'aws_region', 'profile_name' as keys.
                - If 'ENV' is passed, credentials are fetched from the environment variables. This is also the case when creds is not passed for cloud datasets. For datasets connected to hub cloud, specifying 'ENV' will override the credentials fetched from Activeloop and use local ones.
            token (str, optional): Activeloop token, used for fetching credentials to the dataset at path if it is a Deep Lake dataset. This is optional, tokens are normally autogenerated.
            org_id (str, Optional): Organization id to be used for enabling high-performance features. Only applicable for local datasets.
            verbose (bool): If True, logs will be printed. Defaults to True.
            lock_timeout (int): Number of seconds to wait before throwing a LockException. If None, wait indefinitely
            lock_enabled (bool): If true, the dataset manages a write lock. NOTE: Only set to False if you are managing concurrent access externally.
            index_params: Optional[Dict[str, Union[int, str]]]: Index parameters used while creating vector store, passed down to dataset.
    
        Returns:
            Dataset: Dataset created using the arguments provided.
    
        Raises:
            DatasetHandlerError: If a Dataset already exists at the given path and overwrite is False.
            UserNotLoggedInException: When user is not logged in
            InvalidTokenException: If the specified toke is invalid
            TokenPermissionError: When there are permission or other errors related to token
            ValueError: If version is specified in the path
    
        Danger:
            Setting ``overwrite`` to ``True`` will delete all of your data if it exists! Be very careful when setting this parameter.
        """
        path, address = process_dataset_path(path)
    
        if org_id is not None and get_path_type(path) != "local":
            raise ValueError("org_id parameter can only be used with local datasets")
        db_engine = parse_runtime_parameters(path, runtime)["tensor_db"]
    
        if address:
            raise ValueError(
                "deeplake.empty does not accept version address in the dataset path."
            )
    
        verify_dataset_name(path)
    
        if creds is None:
            creds = {}
    
        try:
            storage, cache_chain = get_storage_and_cache_chain(
                path=path,
                db_engine=db_engine,
                read_only=False,
                creds=creds,
                token=token,
                memory_cache_size=memory_cache_size,
                local_cache_size=local_cache_size,
            )
    
            feature_report_path(
                path,
                "empty",
                {
                    "runtime": runtime,
                    "overwrite": overwrite,
                    "lock_enabled": lock_enabled,
                    "lock_timeout": lock_timeout,
                    "index_params": index_params,
                },
                token=token,
            )
        except Exception as e:
            if isinstance(e, UserNotLoggedInException):
                raise UserNotLoggedInException from None
            raise
    
        if overwrite and dataset_exists(cache_chain):
            cache_chain.clear()
        elif dataset_exists(cache_chain):
>           raise DatasetHandlerError(
                f"A dataset already exists at the given path ({path}). If you want to create"
                f" a new empty dataset, either specify another path or use overwrite=True. "
                f"If you want to load the dataset that exists at this path, use deeplake.load() instead."
            )
E           deeplake.util.exceptions.DatasetHandlerError: A dataset already exists at the given path (hub://testingacc2/deepmemory_test_corpus_managed_2_eval_queries). If you want to create a new empty dataset, either specify another path or use overwrite=True. If you want to load the dataset that exists at this path, use deeplake.load() instead.

deeplake/api/dataset.py:457: DatasetHandlerError

Check failure on line 20 in deeplake/cli/test_cli.py

See this annotation in the file changed.

@github-actions github-actions / JUnit Test Report

test_cli.test_cli_auth[creds]

AssertionError: assert 'Encountered ...gain later.\n' == 'Successfully...Activeloop.\n'
  - Successfully logged in to Activeloop.
  + Encountered an error You are over the allowed limits for this operation. Please try again later.
Raw output
hub_cloud_dev_credentials = ('testingacc2', '63Fj@u#wHdxptRDn')
hub_cloud_dev_token = 'eyJhbGciOiJIUzUxMiIsImlhdCI6MTcwMjA1MzMyMiwiZXhwIjoxNzA1NjUzMzIyfQ.eyJpZCI6InRlc3RpbmdhY2MyIn0.VdKsxFo2FRAgKSnGyeM1qsKrb0KqNolo8LrHdp8mof8bcNnR7zMcEnX7Yrce0nhNmgAt4nw_7OMnybJ_wXkN1Q'
method = 'creds'

    @pytest.mark.parametrize("method", ["creds", "token"])
    def test_cli_auth(hub_cloud_dev_credentials, hub_cloud_dev_token, method):
        username, password = hub_cloud_dev_credentials
    
        runner = CliRunner()
    
        if method == "creds":
            result = runner.invoke(login, f"-u {username} -p {password}")
        elif method == "token":
            result = runner.invoke(login, f"-t {hub_cloud_dev_token}")
    
        assert result.exit_code == 0
>       assert result.output == "Successfully logged in to Activeloop.\n"
E       AssertionError: assert 'Encountered ...gain later.\n' == 'Successfully...Activeloop.\n'
E         - Successfully logged in to Activeloop.
E         + Encountered an error You are over the allowed limits for this operation. Please try again later.

deeplake/cli/test_cli.py:20: AssertionError

Check failure on line 719 in deeplake/core/vectorstore/test_deeplake_vectorstore.py

See this annotation in the file changed.

@github-actions github-actions / JUnit Test Report

test_deeplake_vectorstore.test_search_managed

deeplake.util.exceptions.BadGatewayException: Invalid response from Activeloop server.
Raw output
hub_cloud_dev_token = 'eyJhbGciOiJIUzUxMiIsImlhdCI6MTcwMjA1MzcxOCwiZXhwIjoxNzA1NjUzNzE4fQ.eyJpZCI6InRlc3RpbmdhY2MyIn0.Ufu9a-ypZm-FyrOyvYxxHlUg7QLTbH56yYuOmk_YUatP1las5Xu9zQv-3Ah5CDDpepyUIFQYWpnvnxh2Sgq9hg'

    @requires_libdeeplake
    @pytest.mark.slow
    def test_search_managed(hub_cloud_dev_token):
        """Test whether managed TQL and client-side TQL return the same results"""
        # initialize vector store object:
        vector_store = DeepLakeVectorStore(
            path="hub://testingacc2/vectorstore_test_managed",
            read_only=True,
            token=hub_cloud_dev_token,
        )
    
        # use indra implementation to search the data
        data_ce = vector_store.search(
            embedding=query_embedding,
            exec_option="compute_engine",
        )
    
>       data_db = vector_store.search(
            embedding=query_embedding,
            exec_option="tensor_db",
        )

deeplake/core/vectorstore/test_deeplake_vectorstore.py:719: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
deeplake/core/vectorstore/deeplake_vectorstore.py:313: in search
    return self.dataset_handler.search(
deeplake/core/vectorstore/deep_memory/deep_memory.py:53: in wrapper
    return func(self, *args, **kwargs)
deeplake/core/vectorstore/dataset_handlers/client_side_dataset_handler.py:235: in search
    return vector_search.search(
deeplake/core/vectorstore/vector_search/vector_search.py:57: in search
    return EXEC_OPTION_TO_SEARCH_TYPE[exec_option](
deeplake/core/vectorstore/vector_search/indra/vector_search.py:47: in vector_search
    return vectorstore.indra_search_algorithm(
deeplake/core/vectorstore/vector_search/indra/search_algorithm.py:209: in search
    return searcher.run(
deeplake/core/vectorstore/vector_search/indra/search_algorithm.py:57: in run
    view = self._get_view(
deeplake/core/vectorstore/vector_search/indra/search_algorithm.py:151: in _get_view
    view, data = self.deeplake_dataset.query(
deeplake/core/dataset/dataset.py:2338: in query
    response = client.remote_query(org_id, ds_name, query_string)
deeplake/client/client.py:507: in remote_query
    response = self.request(
deeplake/client/client.py:163: in request
    check_response_status(response)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

response = <Response [502]>

    def check_response_status(response: requests.Response):
        """Check response status and throw corresponding exception on failure."""
        code = response.status_code
        if code >= 200 and code < 300:
            return
    
        try:
            message = response.json()["description"]
        except Exception:
            message = " "
    
        if code == 400:
            raise BadRequestException(message)
        elif response.status_code == 401:
            raise AuthenticationException
        elif response.status_code == 403:
            raise AuthorizationException(message, response=response)
        elif response.status_code == 404:
            if message != " ":
                raise ResourceNotFoundException(message)
            raise ResourceNotFoundException
        elif response.status_code == 422:
            raise UnprocessableEntityException(message)
        elif response.status_code == 423:
            raise LockedException
        elif response.status_code == 429:
            raise OverLimitException
        elif response.status_code == 502:
>           raise BadGatewayException
E           deeplake.util.exceptions.BadGatewayException: Invalid response from Activeloop server.

deeplake/client/utils.py:101: BadGatewayException

Check failure on line 548 in deeplake/core/vectorstore/deep_memory/test_deepmemory.py

See this annotation in the file changed.

@github-actions github-actions / JUnit Test Report

test_deepmemory.test_deepmemory_search

deeplake.util.exceptions.BadGatewayException: Invalid response from Activeloop server.
Raw output
corpus_query_relevances_copy = ('hub://testingacc2/tmp8001_test_deepmemory_test_deepmemory_search', ['0-dimensional biomaterials lack inductive prope...265107', 1]], [['32587939', 1]], ...], 'hub://testingacc2/tmp8001_test_deepmemory_test_deepmemory_search_eval_queries')
testing_relevance_query_deepmemory = ('31715818', [-0.015188165009021759, 0.02033962868154049, -0.012286307290196419, 0.009264647960662842, -0.00939110480248928, 0.00015578352031297982, ...])
hub_cloud_dev_token = 'eyJhbGciOiJIUzUxMiIsImlhdCI6MTcwMjA1NTg2NywiZXhwIjoxNzA1NjU1ODY3fQ.eyJpZCI6InRlc3RpbmdhY2MyIn0.BgUMKjjrg0Re8JqIahEBnLr6KIiPEPVOLbwAmC4An9PR33ENKPAJWbJXHQQ4qMtL7IM37ECCDjnUynWuPJ2eig'

    @pytest.mark.slow
    @pytest.mark.skipif(sys.platform == "win32", reason="Does not run on Windows")
    def test_deepmemory_search(
        corpus_query_relevances_copy,
        testing_relevance_query_deepmemory,
        hub_cloud_dev_token,
    ):
        corpus, _, _, _ = corpus_query_relevances_copy
        relevance, query_embedding = testing_relevance_query_deepmemory
    
        db = VectorStore(
            path=corpus,
            runtime={"tensor_db": True},
            token=hub_cloud_dev_token,
        )
    
>       output = db.search(
            embedding=query_embedding, deep_memory=True, return_tensors=["id"]
        )

deeplake/core/vectorstore/deep_memory/test_deepmemory.py:548: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
deeplake/core/vectorstore/deeplake_vectorstore.py:313: in search
    return self.dataset_handler.search(
deeplake/core/vectorstore/deep_memory/deep_memory.py:53: in wrapper
    return func(self, *args, **kwargs)
deeplake/core/vectorstore/dataset_handlers/client_side_dataset_handler.py:235: in search
    return vector_search.search(
deeplake/core/vectorstore/vector_search/vector_search.py:57: in search
    return EXEC_OPTION_TO_SEARCH_TYPE[exec_option](
deeplake/core/vectorstore/vector_search/indra/vector_search.py:47: in vector_search
    return vectorstore.indra_search_algorithm(
deeplake/core/vectorstore/vector_search/indra/search_algorithm.py:209: in search
    return searcher.run(
deeplake/core/vectorstore/vector_search/indra/search_algorithm.py:57: in run
    view = self._get_view(
deeplake/core/vectorstore/vector_search/indra/search_algorithm.py:151: in _get_view
    view, data = self.deeplake_dataset.query(
deeplake/core/dataset/dataset.py:2338: in query
    response = client.remote_query(org_id, ds_name, query_string)
deeplake/client/client.py:507: in remote_query
    response = self.request(
deeplake/client/client.py:163: in request
    check_response_status(response)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

response = <Response [502]>

    def check_response_status(response: requests.Response):
        """Check response status and throw corresponding exception on failure."""
        code = response.status_code
        if code >= 200 and code < 300:
            return
    
        try:
            message = response.json()["description"]
        except Exception:
            message = " "
    
        if code == 400:
            raise BadRequestException(message)
        elif response.status_code == 401:
            raise AuthenticationException
        elif response.status_code == 403:
            raise AuthorizationException(message, response=response)
        elif response.status_code == 404:
            if message != " ":
                raise ResourceNotFoundException(message)
            raise ResourceNotFoundException
        elif response.status_code == 422:
            raise UnprocessableEntityException(message)
        elif response.status_code == 423:
            raise LockedException
        elif response.status_code == 429:
            raise OverLimitException
        elif response.status_code == 502:
>           raise BadGatewayException
E           deeplake.util.exceptions.BadGatewayException: Invalid response from Activeloop server.

deeplake/client/utils.py:101: BadGatewayException

Check failure on line 706 in deeplake/core/vectorstore/deep_memory/test_deepmemory.py

See this annotation in the file changed.

@github-actions github-actions / JUnit Test Report

test_deepmemory.test_deepmemory_search_should_contain_correct_answer

deeplake.util.exceptions.BadGatewayException: Invalid response from Activeloop server.
Raw output
corpus_query_relevances_copy = ('hub://testingacc2/tmp8001_test_deepmemory_test_deepmemory_search_should_contain_correct_answer', ['0-dimensional bio...], ...], 'hub://testingacc2/tmp8001_test_deepmemory_test_deepmemory_search_should_contain_correct_answer_eval_queries')
testing_relevance_query_deepmemory = ('31715818', [-0.015188165009021759, 0.02033962868154049, -0.012286307290196419, 0.009264647960662842, -0.00939110480248928, 0.00015578352031297982, ...])
hub_cloud_dev_token = 'eyJhbGciOiJIUzUxMiIsImlhdCI6MTcwMjA1NTg2NywiZXhwIjoxNzA1NjU1ODY3fQ.eyJpZCI6InRlc3RpbmdhY2MyIn0.BgUMKjjrg0Re8JqIahEBnLr6KIiPEPVOLbwAmC4An9PR33ENKPAJWbJXHQQ4qMtL7IM37ECCDjnUynWuPJ2eig'

    @pytest.mark.slow
    @pytest.mark.skipif(sys.platform == "win32", reason="Does not run on Windows")
    def test_deepmemory_search_should_contain_correct_answer(
        corpus_query_relevances_copy,
        testing_relevance_query_deepmemory,
        hub_cloud_dev_token,
    ):
        corpus, _, _, _ = corpus_query_relevances_copy
        relevance, query_embedding = testing_relevance_query_deepmemory
    
        db = VectorStore(
            path=corpus,
            token=hub_cloud_dev_token,
        )
    
>       output = db.search(
            embedding=query_embedding, deep_memory=True, return_tensors=["id"]
        )

deeplake/core/vectorstore/deep_memory/test_deepmemory.py:706: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
deeplake/core/vectorstore/deeplake_vectorstore.py:313: in search
    return self.dataset_handler.search(
deeplake/core/vectorstore/deep_memory/deep_memory.py:53: in wrapper
    return func(self, *args, **kwargs)
deeplake/core/vectorstore/dataset_handlers/client_side_dataset_handler.py:235: in search
    return vector_search.search(
deeplake/core/vectorstore/vector_search/vector_search.py:57: in search
    return EXEC_OPTION_TO_SEARCH_TYPE[exec_option](
deeplake/core/vectorstore/vector_search/indra/vector_search.py:47: in vector_search
    return vectorstore.indra_search_algorithm(
deeplake/core/vectorstore/vector_search/indra/search_algorithm.py:209: in search
    return searcher.run(
deeplake/core/vectorstore/vector_search/indra/search_algorithm.py:57: in run
    view = self._get_view(
deeplake/core/vectorstore/vector_search/indra/search_algorithm.py:151: in _get_view
    view, data = self.deeplake_dataset.query(
deeplake/core/dataset/dataset.py:2338: in query
    response = client.remote_query(org_id, ds_name, query_string)
deeplake/client/client.py:507: in remote_query
    response = self.request(
deeplake/client/client.py:163: in request
    check_response_status(response)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

response = <Response [502]>

    def check_response_status(response: requests.Response):
        """Check response status and throw corresponding exception on failure."""
        code = response.status_code
        if code >= 200 and code < 300:
            return
    
        try:
            message = response.json()["description"]
        except Exception:
            message = " "
    
        if code == 400:
            raise BadRequestException(message)
        elif response.status_code == 401:
            raise AuthenticationException
        elif response.status_code == 403:
            raise AuthorizationException(message, response=response)
        elif response.status_code == 404:
            if message != " ":
                raise ResourceNotFoundException(message)
            raise ResourceNotFoundException
        elif response.status_code == 422:
            raise UnprocessableEntityException(message)
        elif response.status_code == 423:
            raise LockedException
        elif response.status_code == 429:
            raise OverLimitException
        elif response.status_code == 502:
>           raise BadGatewayException
E           deeplake.util.exceptions.BadGatewayException: Invalid response from Activeloop server.

deeplake/client/utils.py:101: BadGatewayException

Check failure on line 727 in deeplake/core/vectorstore/deep_memory/test_deepmemory.py

See this annotation in the file changed.

@github-actions github-actions / JUnit Test Report

test_deepmemory.test_deeplake_search_should_not_contain_correct_answer

deeplake.util.exceptions.ServerException: Server under maintenance, try again later.
Raw output
corpus_query_relevances_copy = ('hub://testingacc2/tmp8001_test_deepmemory_test_deeplake_search_should_not_contain_correct_answer', ['0-dimensional b... ...], 'hub://testingacc2/tmp8001_test_deepmemory_test_deeplake_search_should_not_contain_correct_answer_eval_queries')
testing_relevance_query_deepmemory = ('31715818', [-0.015188165009021759, 0.02033962868154049, -0.012286307290196419, 0.009264647960662842, -0.00939110480248928, 0.00015578352031297982, ...])
hub_cloud_dev_token = 'eyJhbGciOiJIUzUxMiIsImlhdCI6MTcwMjA1NTg2NywiZXhwIjoxNzA1NjU1ODY3fQ.eyJpZCI6InRlc3RpbmdhY2MyIn0.BgUMKjjrg0Re8JqIahEBnLr6KIiPEPVOLbwAmC4An9PR33ENKPAJWbJXHQQ4qMtL7IM37ECCDjnUynWuPJ2eig'

    @pytest.mark.slow
    @pytest.mark.skipif(sys.platform == "win32", reason="Does not run on Windows")
    def test_deeplake_search_should_not_contain_correct_answer(
        corpus_query_relevances_copy,
        testing_relevance_query_deepmemory,
        hub_cloud_dev_token,
    ):
        corpus, _, _, _ = corpus_query_relevances_copy
        relevance, query_embedding = testing_relevance_query_deepmemory
    
        db = VectorStore(
            path=corpus,
            token=hub_cloud_dev_token,
        )
>       output = db.search(embedding=query_embedding)

deeplake/core/vectorstore/deep_memory/test_deepmemory.py:727: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
deeplake/core/vectorstore/deeplake_vectorstore.py:313: in search
    return self.dataset_handler.search(
deeplake/core/vectorstore/deep_memory/deep_memory.py:53: in wrapper
    return func(self, *args, **kwargs)
deeplake/core/vectorstore/dataset_handlers/client_side_dataset_handler.py:235: in search
    return vector_search.search(
deeplake/core/vectorstore/vector_search/vector_search.py:57: in search
    return EXEC_OPTION_TO_SEARCH_TYPE[exec_option](
deeplake/core/vectorstore/vector_search/indra/vector_search.py:47: in vector_search
    return vectorstore.indra_search_algorithm(
deeplake/core/vectorstore/vector_search/indra/search_algorithm.py:209: in search
    return searcher.run(
deeplake/core/vectorstore/vector_search/indra/search_algorithm.py:57: in run
    view = self._get_view(
deeplake/core/vectorstore/vector_search/indra/search_algorithm.py:151: in _get_view
    view, data = self.deeplake_dataset.query(
deeplake/core/dataset/dataset.py:2338: in query
    response = client.remote_query(org_id, ds_name, query_string)
deeplake/client/client.py:507: in remote_query
    response = self.request(
deeplake/client/client.py:163: in request
    check_response_status(response)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

response = <Response [503]>

    def check_response_status(response: requests.Response):
        """Check response status and throw corresponding exception on failure."""
        code = response.status_code
        if code >= 200 and code < 300:
            return
    
        try:
            message = response.json()["description"]
        except Exception:
            message = " "
    
        if code == 400:
            raise BadRequestException(message)
        elif response.status_code == 401:
            raise AuthenticationException
        elif response.status_code == 403:
            raise AuthorizationException(message, response=response)
        elif response.status_code == 404:
            if message != " ":
                raise ResourceNotFoundException(message)
            raise ResourceNotFoundException
        elif response.status_code == 422:
            raise UnprocessableEntityException(message)
        elif response.status_code == 423:
            raise LockedException
        elif response.status_code == 429:
            raise OverLimitException
        elif response.status_code == 502:
            raise BadGatewayException
        elif response.status_code == 504:
            raise GatewayTimeoutException
        elif 500 <= response.status_code < 600:
>           raise ServerException("Server under maintenance, try again later.")
E           deeplake.util.exceptions.ServerException: Server under maintenance, try again later.

deeplake/client/utils.py:105: ServerException

Check failure on line 1310 in deeplake/core/vectorstore/test_deeplake_vectorstore.py

See this annotation in the file changed.

@github-actions github-actions / JUnit Test Report

test_deeplake_vectorstore.test_update_embedding_row_ids_and_ids_specified_should_throw_exception

ImportError: High performance features require the libdeeplake package which is not available in Windows OS
Raw output
local_path = './hub_pytest/test_deeplake_vectorstore/test_update_embedding_row_ids_and_ids_specified_should_throw_exception'
vector_store_hash_ids = ['0', '1', '2', '3', '4']
vector_store_row_ids = [0, 1, 2, 3, 4]
hub_cloud_dev_token = 'eyJhbGciOiJIUzUxMiIsImlhdCI6MTcwMjA1Mzk5NSwiZXhwIjoxNzA1NjUzOTk1fQ.eyJpZCI6InRlc3RpbmdhY2MyIn0.qwwJNEd0ptOQQyxAV35cJvJwmrIK_KKZ3GuxJ_MoZ-_0fHDYyt_oUdaoHTawpblKGCfW-PS8n6yUKZ3eYR4PvQ'

    def test_update_embedding_row_ids_and_ids_specified_should_throw_exception(
        local_path,
        vector_store_hash_ids,
        vector_store_row_ids,
        hub_cloud_dev_token,
    ):
        # specifying both row_ids and ids during update embedding should throw an exception
        # initializing vectorstore and populating it:
>       vector_store = create_and_populate_vs(
            local_path,
            token=hub_cloud_dev_token,
        )

deeplake\core\vectorstore\test_deeplake_vectorstore.py:1336: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
deeplake\core\vectorstore\test_deeplake_vectorstore.py:1310: in create_and_populate_vs
    vector_store = DeepLakeVectorStore(
deeplake\core\vectorstore\deeplake_vectorstore.py:114: in __init__
    self.dataset_handler = get_dataset_handler(
deeplake\core\vectorstore\dataset_handlers\dataset_handler.py:13: in get_dataset_handler
    return ClientSideDH(*args, **kwargs)
deeplake\core\vectorstore\dataset_handlers\client_side_dataset_handler.py:66: in __init__
    self.dataset = dataset or dataset_utils.create_or_load_dataset(
deeplake\core\vectorstore\vector_search\dataset\dataset.py:48: in create_or_load_dataset
    utils.check_indra_installation(
deeplake\core\vectorstore\vector_search\utils.py:143: in check_indra_installation
    raise raise_indra_installation_error(
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

indra_import_error = False

    def raise_indra_installation_error(indra_import_error: Optional[Exception] = None):
        if not indra_import_error:
            if os.name == "nt":
>               raise ImportError(
                    "High performance features require the libdeeplake package which is not available in Windows OS"
                )
E               ImportError: High performance features require the libdeeplake package which is not available in Windows OS

deeplake\enterprise\util.py:13: ImportError

Check failure on line 1310 in deeplake/core/vectorstore/test_deeplake_vectorstore.py

See this annotation in the file changed.

@github-actions github-actions / JUnit Test Report

test_deeplake_vectorstore.test_update_embedding_row_ids_and_filter_specified_should_throw_exception

ImportError: High performance features require the libdeeplake package which is not available in Windows OS
Raw output
local_path = './hub_pytest/test_deeplake_vectorstore/test_update_embedding_row_ids_and_filter_specified_should_throw_exception'
vector_store_filters = {'metadata': {'a': 1}}
vector_store_row_ids = [0, 1, 2, 3, 4]
hub_cloud_dev_token = 'eyJhbGciOiJIUzUxMiIsImlhdCI6MTcwMjA1Mzk5NSwiZXhwIjoxNzA1NjUzOTk1fQ.eyJpZCI6InRlc3RpbmdhY2MyIn0.qwwJNEd0ptOQQyxAV35cJvJwmrIK_KKZ3GuxJ_MoZ-_0fHDYyt_oUdaoHTawpblKGCfW-PS8n6yUKZ3eYR4PvQ'

    def test_update_embedding_row_ids_and_filter_specified_should_throw_exception(
        local_path,
        vector_store_filters,
        vector_store_row_ids,
        hub_cloud_dev_token,
    ):
        # specifying both row_ids and filter during update embedding should throw an exception
        # initializing vectorstore and populating it:
>       vector_store = create_and_populate_vs(
            local_path,
            token=hub_cloud_dev_token,
        )

deeplake\core\vectorstore\test_deeplake_vectorstore.py:1359: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
deeplake\core\vectorstore\test_deeplake_vectorstore.py:1310: in create_and_populate_vs
    vector_store = DeepLakeVectorStore(
deeplake\core\vectorstore\deeplake_vectorstore.py:114: in __init__
    self.dataset_handler = get_dataset_handler(
deeplake\core\vectorstore\dataset_handlers\dataset_handler.py:13: in get_dataset_handler
    return ClientSideDH(*args, **kwargs)
deeplake\core\vectorstore\dataset_handlers\client_side_dataset_handler.py:66: in __init__
    self.dataset = dataset or dataset_utils.create_or_load_dataset(
deeplake\core\vectorstore\vector_search\dataset\dataset.py:48: in create_or_load_dataset
    utils.check_indra_installation(
deeplake\core\vectorstore\vector_search\utils.py:143: in check_indra_installation
    raise raise_indra_installation_error(
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

indra_import_error = False

    def raise_indra_installation_error(indra_import_error: Optional[Exception] = None):
        if not indra_import_error:
            if os.name == "nt":
>               raise ImportError(
                    "High performance features require the libdeeplake package which is not available in Windows OS"
                )
E               ImportError: High performance features require the libdeeplake package which is not available in Windows OS

deeplake\enterprise\util.py:13: ImportError

Check failure on line 1310 in deeplake/core/vectorstore/test_deeplake_vectorstore.py

See this annotation in the file changed.

@github-actions github-actions / JUnit Test Report

test_deeplake_vectorstore.test_vs_commit

ImportError: High performance features require the libdeeplake package which is not available in Windows OS
Raw output
local_path = './hub_pytest/test_deeplake_vectorstore/test_vs_commit'

    def test_vs_commit(local_path):
        # TODO: add index params, when index will support commit
>       db = create_and_populate_vs(
            local_path, number_of_data=NUMBER_OF_DATA, index_params=None
        )

deeplake\core\vectorstore\test_deeplake_vectorstore.py:2847: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
deeplake\core\vectorstore\test_deeplake_vectorstore.py:1310: in create_and_populate_vs
    vector_store = DeepLakeVectorStore(
deeplake\core\vectorstore\deeplake_vectorstore.py:114: in __init__
    self.dataset_handler = get_dataset_handler(
deeplake\core\vectorstore\dataset_handlers\dataset_handler.py:13: in get_dataset_handler
    return ClientSideDH(*args, **kwargs)
deeplake\core\vectorstore\dataset_handlers\client_side_dataset_handler.py:66: in __init__
    self.dataset = dataset or dataset_utils.create_or_load_dataset(
deeplake\core\vectorstore\vector_search\dataset\dataset.py:48: in create_or_load_dataset
    utils.check_indra_installation(
deeplake\core\vectorstore\vector_search\utils.py:143: in check_indra_installation
    raise raise_indra_installation_error(
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

indra_import_error = False

    def raise_indra_installation_error(indra_import_error: Optional[Exception] = None):
        if not indra_import_error:
            if os.name == "nt":
>               raise ImportError(
                    "High performance features require the libdeeplake package which is not available in Windows OS"
                )
E               ImportError: High performance features require the libdeeplake package which is not available in Windows OS

deeplake\enterprise\util.py:13: ImportError

Check failure on line 2992 in deeplake/core/vectorstore/test_deeplake_vectorstore.py

See this annotation in the file changed.

@github-actions github-actions / JUnit Test Report

test_deeplake_vectorstore.test_returning_tql_for_exec_option_compute_engine_should_return_correct_tql

NotImplementedError: return_tql is not supported for exec_option=python
Raw output
local_path = './hub_pytest/test_deeplake_vectorstore/test_returning_tql_for_exec_option_compute_engine_should_return_correct_tql'
hub_cloud_dev_token = 'eyJhbGciOiJIUzUxMiIsImlhdCI6MTcwMjA1Mzk5NSwiZXhwIjoxNzA1NjUzOTk1fQ.eyJpZCI6InRlc3RpbmdhY2MyIn0.qwwJNEd0ptOQQyxAV35cJvJwmrIK_KKZ3GuxJ_MoZ-_0fHDYyt_oUdaoHTawpblKGCfW-PS8n6yUKZ3eYR4PvQ'

    def test_returning_tql_for_exec_option_compute_engine_should_return_correct_tql(
        local_path,
        hub_cloud_dev_token,
    ):
        db = VectorStore(
            path=local_path,
            token=hub_cloud_dev_token,
        )
    
        texts, embeddings, ids, metadatas, _ = utils.create_data(
            number_of_data=10, embedding_dim=3
        )
    
        db.add(text=texts, embedding=embeddings, id=ids, metadata=metadatas)
    
        query_embedding = np.zeros(3, dtype=np.float32)
>       output = db.search(embedding=query_embedding, return_tql=True)

deeplake\core\vectorstore\test_deeplake_vectorstore.py:2992: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
deeplake\core\vectorstore\deeplake_vectorstore.py:313: in search
    return self.dataset_handler.search(
deeplake\core\vectorstore\deep_memory\deep_memory.py:53: in wrapper
    return func(self, *args, **kwargs)
deeplake\core\vectorstore\dataset_handlers\client_side_dataset_handler.py:235: in search
    return vector_search.search(
deeplake\core\vectorstore\vector_search\vector_search.py:57: in search
    return EXEC_OPTION_TO_SEARCH_TYPE[exec_option](
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _

query = None, query_emb = array([0., 0., 0.], dtype=float32)
exec_option = 'python'
dataset = Dataset(path='./hub_pytest/test_deeplake_vectorstore/test_returning_tql_for_exec_option_compute_engine_should_return_correct_tql', tensors=['text', 'metadata', 'embedding', 'id'])
logger = <Logger deeplake.core.vectorstore.deeplake_vectorstore (INFO)>
filter = None, embedding_tensor = 'embedding', distance_metric = 'COS', k = 4
return_tensors = ['text', 'metadata', 'id'], return_view = False
token = 'eyJhbGciOiJIUzUxMiIsImlhdCI6MTcwMjA1Mzk5NSwiZXhwIjoxNzA1NjUzOTk1fQ.eyJpZCI6InRlc3RpbmdhY2MyIn0.qwwJNEd0ptOQQyxAV35cJvJwmrIK_KKZ3GuxJ_MoZ-_0fHDYyt_oUdaoHTawpblKGCfW-PS8n6yUKZ3eYR4PvQ'
org_id = None, return_tql = True

    def vector_search(
        query,
        query_emb,
        exec_option,
        dataset,
        logger,
        filter,
        embedding_tensor,
        distance_metric,
        k,
        return_tensors,
        return_view,
        token,
        org_id,
        return_tql,
    ) -> Union[Dict, DeepLakeDataset]:
        if query is not None:
            raise NotImplementedError(
                f"User-specified TQL queries are not supported for exec_option={exec_option} "
            )
    
        if return_tql:
>           raise NotImplementedError(
                f"return_tql is not supported for exec_option={exec_option}"
            )
E           NotImplementedError: return_tql is not supported for exec_option=python

deeplake\core\vectorstore\vector_search\python\vector_search.py:31: NotImplementedError

Check failure on line 58 in deeplake/client/test_client.py

See this annotation in the file changed.

@github-actions github-actions / JUnit Test Report

test_client.test_client_workspace_organizations[creds]

AssertionError: assert 'testingacc2' in ['public']
 +  where ['public'] = <bound method DeepLakeBackendClient.get_user_organizations of <deeplake.client.client.DeepLakeBackendClient object at 0x7fc3c32d9650>>()
 +    where <bound method DeepLakeBackendClient.get_user_organizations of <deeplake.client.client.DeepLakeBackendClient object at 0x7fc3c32d9650>> = <deeplake.client.client.DeepLakeBackendClient object at 0x7fc3c32d9650>.get_user_organizations
Raw output
method = 'creds'
hub_cloud_dev_credentials = ('testingacc2', '63Fj@u#wHdxptRDn')
hub_cloud_dev_token = 'eyJhbGciOiJIUzUxMiIsImlhdCI6MTcwMjA1MzYwNSwiZXhwIjoxNzA1NjUzNjA1fQ.eyJpZCI6InRlc3RpbmdhY2MyIn0.9sIr6pQgxcU1XquEwYUuUU7baXqe8Qgmt0Gew57xa3xBDM2amkoCAgYuFlaxqMHjsHp3AUApqarBKnNe3ixUqw'

    @pytest.mark.slow
    @pytest.mark.parametrize("method", ["creds", "token"])
    def test_client_workspace_organizations(
        method, hub_cloud_dev_credentials, hub_cloud_dev_token
    ):
        username, password = hub_cloud_dev_credentials
        deeplake_client = DeepLakeBackendClient()
    
        runner = CliRunner()
        result = runner.invoke(logout)
        assert result.exit_code == 0
    
        assert deeplake_client.get_user_organizations() == ["public"]
    
        if method == "creds":
            runner.invoke(login, f"-u {username} -p {password}")
        elif method == "token":
            runner.invoke(login, f"-t {hub_cloud_dev_token}")
    
        deeplake_client = DeepLakeBackendClient()
>       assert username in deeplake_client.get_user_organizations()
E       AssertionError: assert 'testingacc2' in ['public']
E        +  where ['public'] = <bound method DeepLakeBackendClient.get_user_organizations of <deeplake.client.client.DeepLakeBackendClient object at 0x7fc3c32d9650>>()
E        +    where <bound method DeepLakeBackendClient.get_user_organizations of <deeplake.client.client.DeepLakeBackendClient object at 0x7fc3c32d9650>> = <deeplake.client.client.DeepLakeBackendClient object at 0x7fc3c32d9650>.get_user_organizations

deeplake/client/test_client.py:58: AssertionError

Check failure on line 1 in deeplake/core/vectorstore/test_deeplake_vectorstore.py

See this annotation in the file changed.

@github-actions github-actions / JUnit Test Report

test_deeplake_vectorstore.test_update_embedding[embedding_fn3-hub_cloud_ds-None-None-None-None-vector_store_query-hub_cloud_dev_token]

failed on setup with "deeplake.util.exceptions.OverLimitException: You are over the allowed limits for this operation."
Raw output
hub_cloud_dev_credentials = ('testingacc2', '63Fj@u#wHdxptRDn')

    @pytest.fixture(scope="session")
    def hub_cloud_dev_token(hub_cloud_dev_credentials):
        username, password = hub_cloud_dev_credentials
    
        client = DeepLakeBackendClient()
>       token = client.request_auth_token(username, password)

deeplake/tests/client_fixtures.py:51: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
deeplake/client/client.py:192: in request_auth_token
    response = self.request("POST", GET_TOKEN_SUFFIX, json=json)
deeplake/client/client.py:163: in request
    check_response_status(response)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

response = <Response [429]>

    def check_response_status(response: requests.Response):
        """Check response status and throw corresponding exception on failure."""
        code = response.status_code
        if code >= 200 and code < 300:
            return
    
        try:
            message = response.json()["description"]
        except Exception:
            message = " "
    
        if code == 400:
            raise BadRequestException(message)
        elif response.status_code == 401:
            raise AuthenticationException
        elif response.status_code == 403:
            raise AuthorizationException(message, response=response)
        elif response.status_code == 404:
            if message != " ":
                raise ResourceNotFoundException(message)
            raise ResourceNotFoundException
        elif response.status_code == 422:
            raise UnprocessableEntityException(message)
        elif response.status_code == 423:
            raise LockedException
        elif response.status_code == 429:
>           raise OverLimitException
E           deeplake.util.exceptions.OverLimitException: You are over the allowed limits for this operation.

deeplake/client/utils.py:99: OverLimitException

Check failure on line 1 in deeplake/core/vectorstore/test_deeplake_vectorstore.py

See this annotation in the file changed.

@github-actions github-actions / JUnit Test Report

test_deeplake_vectorstore.test_update_embedding[None-local_auth_ds-vector_store_hash_ids-None-None-None-None-hub_cloud_dev_token]

failed on setup with "deeplake.util.exceptions.OverLimitException: You are over the allowed limits for this operation."
Raw output
hub_cloud_dev_credentials = ('testingacc2', '63Fj@u#wHdxptRDn')

    @pytest.fixture(scope="session")
    def hub_cloud_dev_token(hub_cloud_dev_credentials):
        username, password = hub_cloud_dev_credentials
    
        client = DeepLakeBackendClient()
>       token = client.request_auth_token(username, password)

deeplake/tests/client_fixtures.py:51: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
deeplake/client/client.py:192: in request_auth_token
    response = self.request("POST", GET_TOKEN_SUFFIX, json=json)
deeplake/client/client.py:163: in request
    check_response_status(response)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

response = <Response [429]>

    def check_response_status(response: requests.Response):
        """Check response status and throw corresponding exception on failure."""
        code = response.status_code
        if code >= 200 and code < 300:
            return
    
        try:
            message = response.json()["description"]
        except Exception:
            message = " "
    
        if code == 400:
            raise BadRequestException(message)
        elif response.status_code == 401:
            raise AuthenticationException
        elif response.status_code == 403:
            raise AuthorizationException(message, response=response)
        elif response.status_code == 404:
            if message != " ":
                raise ResourceNotFoundException(message)
            raise ResourceNotFoundException
        elif response.status_code == 422:
            raise UnprocessableEntityException(message)
        elif response.status_code == 423:
            raise LockedException
        elif response.status_code == 429:
>           raise OverLimitException
E           deeplake.util.exceptions.OverLimitException: You are over the allowed limits for this operation.

deeplake/client/utils.py:99: OverLimitException

Check failure on line 1 in deeplake/core/vectorstore/test_deeplake_vectorstore.py

See this annotation in the file changed.

@github-actions github-actions / JUnit Test Report

test_deeplake_vectorstore.test_update_embedding[None-local_auth_ds-None-vector_store_row_ids-None-None-None-hub_cloud_dev_token]

failed on setup with "deeplake.util.exceptions.OverLimitException: You are over the allowed limits for this operation."
Raw output
hub_cloud_dev_credentials = ('testingacc2', '63Fj@u#wHdxptRDn')

    @pytest.fixture(scope="session")
    def hub_cloud_dev_token(hub_cloud_dev_credentials):
        username, password = hub_cloud_dev_credentials
    
        client = DeepLakeBackendClient()
>       token = client.request_auth_token(username, password)

deeplake/tests/client_fixtures.py:51: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
deeplake/client/client.py:192: in request_auth_token
    response = self.request("POST", GET_TOKEN_SUFFIX, json=json)
deeplake/client/client.py:163: in request
    check_response_status(response)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

response = <Response [429]>

    def check_response_status(response: requests.Response):
        """Check response status and throw corresponding exception on failure."""
        code = response.status_code
        if code >= 200 and code < 300:
            return
    
        try:
            message = response.json()["description"]
        except Exception:
            message = " "
    
        if code == 400:
            raise BadRequestException(message)
        elif response.status_code == 401:
            raise AuthenticationException
        elif response.status_code == 403:
            raise AuthorizationException(message, response=response)
        elif response.status_code == 404:
            if message != " ":
                raise ResourceNotFoundException(message)
            raise ResourceNotFoundException
        elif response.status_code == 422:
            raise UnprocessableEntityException(message)
        elif response.status_code == 423:
            raise LockedException
        elif response.status_code == 429:
>           raise OverLimitException
E           deeplake.util.exceptions.OverLimitException: You are over the allowed limits for this operation.

deeplake/client/utils.py:99: OverLimitException

Check failure on line 1 in deeplake/core/vectorstore/test_deeplake_vectorstore.py

See this annotation in the file changed.

@github-actions github-actions / JUnit Test Report

test_deeplake_vectorstore.test_update_embedding[None-local_auth_ds-None-None-None-vector_store_filter_udf-None-hub_cloud_dev_token]

failed on setup with "deeplake.util.exceptions.OverLimitException: You are over the allowed limits for this operation."
Raw output
hub_cloud_dev_credentials = ('testingacc2', '63Fj@u#wHdxptRDn')

    @pytest.fixture(scope="session")
    def hub_cloud_dev_token(hub_cloud_dev_credentials):
        username, password = hub_cloud_dev_credentials
    
        client = DeepLakeBackendClient()
>       token = client.request_auth_token(username, password)

deeplake/tests/client_fixtures.py:51: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
deeplake/client/client.py:192: in request_auth_token
    response = self.request("POST", GET_TOKEN_SUFFIX, json=json)
deeplake/client/client.py:163: in request
    check_response_status(response)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

response = <Response [429]>

    def check_response_status(response: requests.Response):
        """Check response status and throw corresponding exception on failure."""
        code = response.status_code
        if code >= 200 and code < 300:
            return
    
        try:
            message = response.json()["description"]
        except Exception:
            message = " "
    
        if code == 400:
            raise BadRequestException(message)
        elif response.status_code == 401:
            raise AuthenticationException
        elif response.status_code == 403:
            raise AuthorizationException(message, response=response)
        elif response.status_code == 404:
            if message != " ":
                raise ResourceNotFoundException(message)
            raise ResourceNotFoundException
        elif response.status_code == 422:
            raise UnprocessableEntityException(message)
        elif response.status_code == 423:
            raise LockedException
        elif response.status_code == 429:
>           raise OverLimitException
E           deeplake.util.exceptions.OverLimitException: You are over the allowed limits for this operation.

deeplake/client/utils.py:99: OverLimitException

Check failure on line 1 in deeplake/core/vectorstore/test_deeplake_vectorstore.py

See this annotation in the file changed.

@github-actions github-actions / JUnit Test Report

test_deeplake_vectorstore.test_update_embedding[None-local_auth_ds-None-None-vector_store_filters-None-None-hub_cloud_dev_token]

failed on setup with "deeplake.util.exceptions.OverLimitException: You are over the allowed limits for this operation."
Raw output
hub_cloud_dev_credentials = ('testingacc2', '63Fj@u#wHdxptRDn')

    @pytest.fixture(scope="session")
    def hub_cloud_dev_token(hub_cloud_dev_credentials):
        username, password = hub_cloud_dev_credentials
    
        client = DeepLakeBackendClient()
>       token = client.request_auth_token(username, password)

deeplake/tests/client_fixtures.py:51: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
deeplake/client/client.py:192: in request_auth_token
    response = self.request("POST", GET_TOKEN_SUFFIX, json=json)
deeplake/client/client.py:163: in request
    check_response_status(response)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

response = <Response [429]>

    def check_response_status(response: requests.Response):
        """Check response status and throw corresponding exception on failure."""
        code = response.status_code
        if code >= 200 and code < 300:
            return
    
        try:
            message = response.json()["description"]
        except Exception:
            message = " "
    
        if code == 400:
            raise BadRequestException(message)
        elif response.status_code == 401:
            raise AuthenticationException
        elif response.status_code == 403:
            raise AuthorizationException(message, response=response)
        elif response.status_code == 404:
            if message != " ":
                raise ResourceNotFoundException(message)
            raise ResourceNotFoundException
        elif response.status_code == 422:
            raise UnprocessableEntityException(message)
        elif response.status_code == 423:
            raise LockedException
        elif response.status_code == 429:
>           raise OverLimitException
E           deeplake.util.exceptions.OverLimitException: You are over the allowed limits for this operation.

deeplake/client/utils.py:99: OverLimitException

Check failure on line 1 in deeplake/core/vectorstore/test_deeplake_vectorstore.py

See this annotation in the file changed.

@github-actions github-actions / JUnit Test Report

test_deeplake_vectorstore.test_update_embedding[None-hub_cloud_ds-None-None-None-None-vector_store_query-hub_cloud_dev_token]

failed on setup with "deeplake.util.exceptions.OverLimitException: You are over the allowed limits for this operation."
Raw output
hub_cloud_dev_credentials = ('testingacc2', '63Fj@u#wHdxptRDn')

    @pytest.fixture(scope="session")
    def hub_cloud_dev_token(hub_cloud_dev_credentials):
        username, password = hub_cloud_dev_credentials
    
        client = DeepLakeBackendClient()
>       token = client.request_auth_token(username, password)

deeplake/tests/client_fixtures.py:51: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
deeplake/client/client.py:192: in request_auth_token
    response = self.request("POST", GET_TOKEN_SUFFIX, json=json)
deeplake/client/client.py:163: in request
    check_response_status(response)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

response = <Response [429]>

    def check_response_status(response: requests.Response):
        """Check response status and throw corresponding exception on failure."""
        code = response.status_code
        if code >= 200 and code < 300:
            return
    
        try:
            message = response.json()["description"]
        except Exception:
            message = " "
    
        if code == 400:
            raise BadRequestException(message)
        elif response.status_code == 401:
            raise AuthenticationException
        elif response.status_code == 403:
            raise AuthorizationException(message, response=response)
        elif response.status_code == 404:
            if message != " ":
                raise ResourceNotFoundException(message)
            raise ResourceNotFoundException
        elif response.status_code == 422:
            raise UnprocessableEntityException(message)
        elif response.status_code == 423:
            raise LockedException
        elif response.status_code == 429:
>           raise OverLimitException
E           deeplake.util.exceptions.OverLimitException: You are over the allowed limits for this operation.

deeplake/client/utils.py:99: OverLimitException

Check failure on line 548 in deeplake/core/vectorstore/deep_memory/test_deepmemory.py

See this annotation in the file changed.

@github-actions github-actions / JUnit Test Report

test_deepmemory.test_deepmemory_search

deeplake.util.exceptions.BadGatewayException: Invalid response from Activeloop server.
Raw output
corpus_query_relevances_copy = ('hub://testingacc2/tmp71a9_test_deepmemory_test_deepmemory_search', ['0-dimensional biomaterials lack inductive prope...265107', 1]], [['32587939', 1]], ...], 'hub://testingacc2/tmp71a9_test_deepmemory_test_deepmemory_search_eval_queries')
testing_relevance_query_deepmemory = ('31715818', [-0.015188165009021759, 0.02033962868154049, -0.012286307290196419, 0.009264647960662842, -0.00939110480248928, 0.00015578352031297982, ...])
hub_cloud_dev_token = 'eyJhbGciOiJIUzUxMiIsImlhdCI6MTcwMjA1NDk0NywiZXhwIjoxNzA1NjU0OTQ3fQ.eyJpZCI6InRlc3RpbmdhY2MyIn0.ZWQ-8eRq5iglFN3ZkYv1cOmCQIsalT_EJYj-Nw7J-sYAbk91czjibnSTPUJPMVsbmfH-WAjnuXDFHkWH1DVa8w'

    @pytest.mark.slow
    @pytest.mark.skipif(sys.platform == "win32", reason="Does not run on Windows")
    def test_deepmemory_search(
        corpus_query_relevances_copy,
        testing_relevance_query_deepmemory,
        hub_cloud_dev_token,
    ):
        corpus, _, _, _ = corpus_query_relevances_copy
        relevance, query_embedding = testing_relevance_query_deepmemory
    
        db = VectorStore(
            path=corpus,
            runtime={"tensor_db": True},
            token=hub_cloud_dev_token,
        )
    
>       output = db.search(
            embedding=query_embedding, deep_memory=True, return_tensors=["id"]
        )

deeplake/core/vectorstore/deep_memory/test_deepmemory.py:548: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
deeplake/core/vectorstore/deeplake_vectorstore.py:313: in search
    return self.dataset_handler.search(
deeplake/core/vectorstore/deep_memory/deep_memory.py:53: in wrapper
    return func(self, *args, **kwargs)
deeplake/core/vectorstore/dataset_handlers/client_side_dataset_handler.py:235: in search
    return vector_search.search(
deeplake/core/vectorstore/vector_search/vector_search.py:57: in search
    return EXEC_OPTION_TO_SEARCH_TYPE[exec_option](
deeplake/core/vectorstore/vector_search/indra/vector_search.py:47: in vector_search
    return vectorstore.indra_search_algorithm(
deeplake/core/vectorstore/vector_search/indra/search_algorithm.py:209: in search
    return searcher.run(
deeplake/core/vectorstore/vector_search/indra/search_algorithm.py:57: in run
    view = self._get_view(
deeplake/core/vectorstore/vector_search/indra/search_algorithm.py:151: in _get_view
    view, data = self.deeplake_dataset.query(
deeplake/core/dataset/dataset.py:2338: in query
    response = client.remote_query(org_id, ds_name, query_string)
deeplake/client/client.py:507: in remote_query
    response = self.request(
deeplake/client/client.py:163: in request
    check_response_status(response)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

response = <Response [502]>

    def check_response_status(response: requests.Response):
        """Check response status and throw corresponding exception on failure."""
        code = response.status_code
        if code >= 200 and code < 300:
            return
    
        try:
            message = response.json()["description"]
        except Exception:
            message = " "
    
        if code == 400:
            raise BadRequestException(message)
        elif response.status_code == 401:
            raise AuthenticationException
        elif response.status_code == 403:
            raise AuthorizationException(message, response=response)
        elif response.status_code == 404:
            if message != " ":
                raise ResourceNotFoundException(message)
            raise ResourceNotFoundException
        elif response.status_code == 422:
            raise UnprocessableEntityException(message)
        elif response.status_code == 423:
            raise LockedException
        elif response.status_code == 429:
            raise OverLimitException
        elif response.status_code == 502:
>           raise BadGatewayException
E           deeplake.util.exceptions.BadGatewayException: Invalid response from Activeloop server.

deeplake/client/utils.py:101: BadGatewayException

Check failure on line 706 in deeplake/core/vectorstore/deep_memory/test_deepmemory.py

See this annotation in the file changed.

@github-actions github-actions / JUnit Test Report

test_deepmemory.test_deepmemory_search_should_contain_correct_answer

deeplake.util.exceptions.ServerException: Server under maintenance, try again later.
Raw output
corpus_query_relevances_copy = ('hub://testingacc2/tmp71a9_test_deepmemory_test_deepmemory_search_should_contain_correct_answer', ['0-dimensional bio...], ...], 'hub://testingacc2/tmp71a9_test_deepmemory_test_deepmemory_search_should_contain_correct_answer_eval_queries')
testing_relevance_query_deepmemory = ('31715818', [-0.015188165009021759, 0.02033962868154049, -0.012286307290196419, 0.009264647960662842, -0.00939110480248928, 0.00015578352031297982, ...])
hub_cloud_dev_token = 'eyJhbGciOiJIUzUxMiIsImlhdCI6MTcwMjA1NDk0NywiZXhwIjoxNzA1NjU0OTQ3fQ.eyJpZCI6InRlc3RpbmdhY2MyIn0.ZWQ-8eRq5iglFN3ZkYv1cOmCQIsalT_EJYj-Nw7J-sYAbk91czjibnSTPUJPMVsbmfH-WAjnuXDFHkWH1DVa8w'

    @pytest.mark.slow
    @pytest.mark.skipif(sys.platform == "win32", reason="Does not run on Windows")
    def test_deepmemory_search_should_contain_correct_answer(
        corpus_query_relevances_copy,
        testing_relevance_query_deepmemory,
        hub_cloud_dev_token,
    ):
        corpus, _, _, _ = corpus_query_relevances_copy
        relevance, query_embedding = testing_relevance_query_deepmemory
    
        db = VectorStore(
            path=corpus,
            token=hub_cloud_dev_token,
        )
    
>       output = db.search(
            embedding=query_embedding, deep_memory=True, return_tensors=["id"]
        )

deeplake/core/vectorstore/deep_memory/test_deepmemory.py:706: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
deeplake/core/vectorstore/deeplake_vectorstore.py:313: in search
    return self.dataset_handler.search(
deeplake/core/vectorstore/deep_memory/deep_memory.py:53: in wrapper
    return func(self, *args, **kwargs)
deeplake/core/vectorstore/dataset_handlers/client_side_dataset_handler.py:235: in search
    return vector_search.search(
deeplake/core/vectorstore/vector_search/vector_search.py:57: in search
    return EXEC_OPTION_TO_SEARCH_TYPE[exec_option](
deeplake/core/vectorstore/vector_search/indra/vector_search.py:47: in vector_search
    return vectorstore.indra_search_algorithm(
deeplake/core/vectorstore/vector_search/indra/search_algorithm.py:209: in search
    return searcher.run(
deeplake/core/vectorstore/vector_search/indra/search_algorithm.py:57: in run
    view = self._get_view(
deeplake/core/vectorstore/vector_search/indra/search_algorithm.py:151: in _get_view
    view, data = self.deeplake_dataset.query(
deeplake/core/dataset/dataset.py:2338: in query
    response = client.remote_query(org_id, ds_name, query_string)
deeplake/client/client.py:507: in remote_query
    response = self.request(
deeplake/client/client.py:163: in request
    check_response_status(response)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

response = <Response [503]>

    def check_response_status(response: requests.Response):
        """Check response status and throw corresponding exception on failure."""
        code = response.status_code
        if code >= 200 and code < 300:
            return
    
        try:
            message = response.json()["description"]
        except Exception:
            message = " "
    
        if code == 400:
            raise BadRequestException(message)
        elif response.status_code == 401:
            raise AuthenticationException
        elif response.status_code == 403:
            raise AuthorizationException(message, response=response)
        elif response.status_code == 404:
            if message != " ":
                raise ResourceNotFoundException(message)
            raise ResourceNotFoundException
        elif response.status_code == 422:
            raise UnprocessableEntityException(message)
        elif response.status_code == 423:
            raise LockedException
        elif response.status_code == 429:
            raise OverLimitException
        elif response.status_code == 502:
            raise BadGatewayException
        elif response.status_code == 504:
            raise GatewayTimeoutException
        elif 500 <= response.status_code < 600:
>           raise ServerException("Server under maintenance, try again later.")
E           deeplake.util.exceptions.ServerException: Server under maintenance, try again later.

deeplake/client/utils.py:105: ServerException

Check failure on line 727 in deeplake/core/vectorstore/deep_memory/test_deepmemory.py

See this annotation in the file changed.

@github-actions github-actions / JUnit Test Report

test_deepmemory.test_deeplake_search_should_not_contain_correct_answer

deeplake.util.exceptions.ServerException: Server under maintenance, try again later.
Raw output
corpus_query_relevances_copy = ('hub://testingacc2/tmp71a9_test_deepmemory_test_deeplake_search_should_not_contain_correct_answer', ['0-dimensional b... ...], 'hub://testingacc2/tmp71a9_test_deepmemory_test_deeplake_search_should_not_contain_correct_answer_eval_queries')
testing_relevance_query_deepmemory = ('31715818', [-0.015188165009021759, 0.02033962868154049, -0.012286307290196419, 0.009264647960662842, -0.00939110480248928, 0.00015578352031297982, ...])
hub_cloud_dev_token = 'eyJhbGciOiJIUzUxMiIsImlhdCI6MTcwMjA1NDk0NywiZXhwIjoxNzA1NjU0OTQ3fQ.eyJpZCI6InRlc3RpbmdhY2MyIn0.ZWQ-8eRq5iglFN3ZkYv1cOmCQIsalT_EJYj-Nw7J-sYAbk91czjibnSTPUJPMVsbmfH-WAjnuXDFHkWH1DVa8w'

    @pytest.mark.slow
    @pytest.mark.skipif(sys.platform == "win32", reason="Does not run on Windows")
    def test_deeplake_search_should_not_contain_correct_answer(
        corpus_query_relevances_copy,
        testing_relevance_query_deepmemory,
        hub_cloud_dev_token,
    ):
        corpus, _, _, _ = corpus_query_relevances_copy
        relevance, query_embedding = testing_relevance_query_deepmemory
    
        db = VectorStore(
            path=corpus,
            token=hub_cloud_dev_token,
        )
>       output = db.search(embedding=query_embedding)

deeplake/core/vectorstore/deep_memory/test_deepmemory.py:727: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
deeplake/core/vectorstore/deeplake_vectorstore.py:313: in search
    return self.dataset_handler.search(
deeplake/core/vectorstore/deep_memory/deep_memory.py:53: in wrapper
    return func(self, *args, **kwargs)
deeplake/core/vectorstore/dataset_handlers/client_side_dataset_handler.py:235: in search
    return vector_search.search(
deeplake/core/vectorstore/vector_search/vector_search.py:57: in search
    return EXEC_OPTION_TO_SEARCH_TYPE[exec_option](
deeplake/core/vectorstore/vector_search/indra/vector_search.py:47: in vector_search
    return vectorstore.indra_search_algorithm(
deeplake/core/vectorstore/vector_search/indra/search_algorithm.py:209: in search
    return searcher.run(
deeplake/core/vectorstore/vector_search/indra/search_algorithm.py:57: in run
    view = self._get_view(
deeplake/core/vectorstore/vector_search/indra/search_algorithm.py:151: in _get_view
    view, data = self.deeplake_dataset.query(
deeplake/core/dataset/dataset.py:2338: in query
    response = client.remote_query(org_id, ds_name, query_string)
deeplake/client/client.py:507: in remote_query
    response = self.request(
deeplake/client/client.py:163: in request
    check_response_status(response)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

response = <Response [503]>

    def check_response_status(response: requests.Response):
        """Check response status and throw corresponding exception on failure."""
        code = response.status_code
        if code >= 200 and code < 300:
            return
    
        try:
            message = response.json()["description"]
        except Exception:
            message = " "
    
        if code == 400:
            raise BadRequestException(message)
        elif response.status_code == 401:
            raise AuthenticationException
        elif response.status_code == 403:
            raise AuthorizationException(message, response=response)
        elif response.status_code == 404:
            if message != " ":
                raise ResourceNotFoundException(message)
            raise ResourceNotFoundException
        elif response.status_code == 422:
            raise UnprocessableEntityException(message)
        elif response.status_code == 423:
            raise LockedException
        elif response.status_code == 429:
            raise OverLimitException
        elif response.status_code == 502:
            raise BadGatewayException
        elif response.status_code == 504:
            raise GatewayTimeoutException
        elif 500 <= response.status_code < 600:
>           raise ServerException("Server under maintenance, try again later.")
E           deeplake.util.exceptions.ServerException: Server under maintenance, try again later.

deeplake/client/utils.py:105: ServerException

Check failure on line 20 in deeplake/cli/test_cli.py

See this annotation in the file changed.

@github-actions github-actions / JUnit Test Report

test_cli.test_cli_auth[creds]

AssertionError: assert 'Encountered ...gain later.\n' == 'Successfully...Activeloop.\n'
  - Successfully logged in to Activeloop.
  + Encountered an error You are over the allowed limits for this operation. Please try again later.
Raw output
hub_cloud_dev_credentials = ('testingacc2', '63Fj@u#wHdxptRDn')
hub_cloud_dev_token = 'eyJhbGciOiJIUzUxMiIsImlhdCI6MTcwMjA1MzMyNywiZXhwIjoxNzA1NjUzMzI3fQ.eyJpZCI6InRlc3RpbmdhY2MyIn0.rm_VYuJZCXubPhSaLX8YgouprLzq00xRbUQmVF6l5igM0WP0mnDhTT48kIbcyhsJUN_aFr1jd-hwWHSdpLiFhw'
method = 'creds'

    @pytest.mark.parametrize("method", ["creds", "token"])
    def test_cli_auth(hub_cloud_dev_credentials, hub_cloud_dev_token, method):
        username, password = hub_cloud_dev_credentials
    
        runner = CliRunner()
    
        if method == "creds":
            result = runner.invoke(login, f"-u {username} -p {password}")
        elif method == "token":
            result = runner.invoke(login, f"-t {hub_cloud_dev_token}")
    
        assert result.exit_code == 0
>       assert result.output == "Successfully logged in to Activeloop.\n"
E       AssertionError: assert 'Encountered ...gain later.\n' == 'Successfully...Activeloop.\n'
E         - Successfully logged in to Activeloop.
E         + Encountered an error You are over the allowed limits for this operation. Please try again later.

deeplake/cli/test_cli.py:20: AssertionError

Check failure on line 23 in deeplake/client/test_client.py

See this annotation in the file changed.

@github-actions github-actions / JUnit Test Report

test_client.test_client_requests

deeplake.util.exceptions.OverLimitException: You are over the allowed limits for this operation.
Raw output
hub_cloud_dev_credentials = ('testingacc2', '63Fj@u#wHdxptRDn')

    @pytest.mark.slow
    def test_client_requests(hub_cloud_dev_credentials):
        username, password = hub_cloud_dev_credentials
    
        deeplake_client = DeepLakeBackendClient()
>       deeplake_client.request_auth_token(username, password)

deeplake/client/test_client.py:23: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
deeplake/client/client.py:192: in request_auth_token
    response = self.request("POST", GET_TOKEN_SUFFIX, json=json)
deeplake/client/client.py:163: in request
    check_response_status(response)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

response = <Response [429]>

    def check_response_status(response: requests.Response):
        """Check response status and throw corresponding exception on failure."""
        code = response.status_code
        if code >= 200 and code < 300:
            return
    
        try:
            message = response.json()["description"]
        except Exception:
            message = " "
    
        if code == 400:
            raise BadRequestException(message)
        elif response.status_code == 401:
            raise AuthenticationException
        elif response.status_code == 403:
            raise AuthorizationException(message, response=response)
        elif response.status_code == 404:
            if message != " ":
                raise ResourceNotFoundException(message)
            raise ResourceNotFoundException
        elif response.status_code == 422:
            raise UnprocessableEntityException(message)
        elif response.status_code == 423:
            raise LockedException
        elif response.status_code == 429:
>           raise OverLimitException
E           deeplake.util.exceptions.OverLimitException: You are over the allowed limits for this operation.

deeplake/client/utils.py:99: OverLimitException

Check failure on line 58 in deeplake/client/test_client.py

See this annotation in the file changed.

@github-actions github-actions / JUnit Test Report

test_client.test_client_workspace_organizations[creds]

AssertionError: assert 'testingacc2' in ['public']
 +  where ['public'] = <bound method DeepLakeBackendClient.get_user_organizations of <deeplake.client.client.DeepLakeBackendClient object at 0x7f9d256e6ee0>>()
 +    where <bound method DeepLakeBackendClient.get_user_organizations of <deeplake.client.client.DeepLakeBackendClient object at 0x7f9d256e6ee0>> = <deeplake.client.client.DeepLakeBackendClient object at 0x7f9d256e6ee0>.get_user_organizations
Raw output
method = 'creds'
hub_cloud_dev_credentials = ('testingacc2', '63Fj@u#wHdxptRDn')
hub_cloud_dev_token = 'eyJhbGciOiJIUzUxMiIsImlhdCI6MTcwMjA1MzY5MiwiZXhwIjoxNzA1NjUzNjkyfQ.eyJpZCI6InRlc3RpbmdhY2MyIn0.KqxD9tv0ywoyjbQtf20Rmb55qdd0K1LF7QyrDvJYTDdxBXzOAnRDIkfW5riA06cZJcyKvJQg7eoW8VPvtRKsEQ'

    @pytest.mark.slow
    @pytest.mark.parametrize("method", ["creds", "token"])
    def test_client_workspace_organizations(
        method, hub_cloud_dev_credentials, hub_cloud_dev_token
    ):
        username, password = hub_cloud_dev_credentials
        deeplake_client = DeepLakeBackendClient()
    
        runner = CliRunner()
        result = runner.invoke(logout)
        assert result.exit_code == 0
    
        assert deeplake_client.get_user_organizations() == ["public"]
    
        if method == "creds":
            runner.invoke(login, f"-u {username} -p {password}")
        elif method == "token":
            runner.invoke(login, f"-t {hub_cloud_dev_token}")
    
        deeplake_client = DeepLakeBackendClient()
>       assert username in deeplake_client.get_user_organizations()
E       AssertionError: assert 'testingacc2' in ['public']
E        +  where ['public'] = <bound method DeepLakeBackendClient.get_user_organizations of <deeplake.client.client.DeepLakeBackendClient object at 0x7f9d256e6ee0>>()
E        +    where <bound method DeepLakeBackendClient.get_user_organizations of <deeplake.client.client.DeepLakeBackendClient object at 0x7f9d256e6ee0>> = <deeplake.client.client.DeepLakeBackendClient object at 0x7f9d256e6ee0>.get_user_organizations

deeplake/client/test_client.py:58: AssertionError

Check failure on line 548 in deeplake/core/vectorstore/deep_memory/test_deepmemory.py

See this annotation in the file changed.

@github-actions github-actions / JUnit Test Report

test_deepmemory.test_deepmemory_search

deeplake.util.exceptions.BadGatewayException: Invalid response from Activeloop server.
Raw output
corpus_query_relevances_copy = ('hub://testingacc2/tmpfa35_test_deepmemory_test_deepmemory_search', ['0-dimensional biomaterials lack inductive prope...265107', 1]], [['32587939', 1]], ...], 'hub://testingacc2/tmpfa35_test_deepmemory_test_deepmemory_search_eval_queries')
testing_relevance_query_deepmemory = ('31715818', [-0.015188165009021759, 0.02033962868154049, -0.012286307290196419, 0.009264647960662842, -0.00939110480248928, 0.00015578352031297982, ...])
hub_cloud_dev_token = 'eyJhbGciOiJIUzUxMiIsImlhdCI6MTcwMjA1NTcyMCwiZXhwIjoxNzA1NjU1NzIwfQ.eyJpZCI6InRlc3RpbmdhY2MyIn0.sy4l6kH5Yi6yawAZ2JWT0UNffsIYa0P2AobTG2vf4qHSBSGHogi67b3R1GUvZseOJpmK-jFCwcc2oxVgk7dVfg'

    @pytest.mark.slow
    @pytest.mark.skipif(sys.platform == "win32", reason="Does not run on Windows")
    def test_deepmemory_search(
        corpus_query_relevances_copy,
        testing_relevance_query_deepmemory,
        hub_cloud_dev_token,
    ):
        corpus, _, _, _ = corpus_query_relevances_copy
        relevance, query_embedding = testing_relevance_query_deepmemory
    
        db = VectorStore(
            path=corpus,
            runtime={"tensor_db": True},
            token=hub_cloud_dev_token,
        )
    
>       output = db.search(
            embedding=query_embedding, deep_memory=True, return_tensors=["id"]
        )

deeplake/core/vectorstore/deep_memory/test_deepmemory.py:548: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
deeplake/core/vectorstore/deeplake_vectorstore.py:313: in search
    return self.dataset_handler.search(
deeplake/core/vectorstore/deep_memory/deep_memory.py:53: in wrapper
    return func(self, *args, **kwargs)
deeplake/core/vectorstore/dataset_handlers/client_side_dataset_handler.py:235: in search
    return vector_search.search(
deeplake/core/vectorstore/vector_search/vector_search.py:57: in search
    return EXEC_OPTION_TO_SEARCH_TYPE[exec_option](
deeplake/core/vectorstore/vector_search/indra/vector_search.py:47: in vector_search
    return vectorstore.indra_search_algorithm(
deeplake/core/vectorstore/vector_search/indra/search_algorithm.py:209: in search
    return searcher.run(
deeplake/core/vectorstore/vector_search/indra/search_algorithm.py:57: in run
    view = self._get_view(
deeplake/core/vectorstore/vector_search/indra/search_algorithm.py:151: in _get_view
    view, data = self.deeplake_dataset.query(
deeplake/core/dataset/dataset.py:2338: in query
    response = client.remote_query(org_id, ds_name, query_string)
deeplake/client/client.py:507: in remote_query
    response = self.request(
deeplake/client/client.py:163: in request
    check_response_status(response)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

response = <Response [502]>

    def check_response_status(response: requests.Response):
        """Check response status and throw corresponding exception on failure."""
        code = response.status_code
        if code >= 200 and code < 300:
            return
    
        try:
            message = response.json()["description"]
        except Exception:
            message = " "
    
        if code == 400:
            raise BadRequestException(message)
        elif response.status_code == 401:
            raise AuthenticationException
        elif response.status_code == 403:
            raise AuthorizationException(message, response=response)
        elif response.status_code == 404:
            if message != " ":
                raise ResourceNotFoundException(message)
            raise ResourceNotFoundException
        elif response.status_code == 422:
            raise UnprocessableEntityException(message)
        elif response.status_code == 423:
            raise LockedException
        elif response.status_code == 429:
            raise OverLimitException
        elif response.status_code == 502:
>           raise BadGatewayException
E           deeplake.util.exceptions.BadGatewayException: Invalid response from Activeloop server.

deeplake/client/utils.py:101: BadGatewayException

Check failure on line 706 in deeplake/core/vectorstore/deep_memory/test_deepmemory.py

See this annotation in the file changed.

@github-actions github-actions / JUnit Test Report

test_deepmemory.test_deepmemory_search_should_contain_correct_answer

deeplake.util.exceptions.BadGatewayException: Invalid response from Activeloop server.
Raw output
corpus_query_relevances_copy = ('hub://testingacc2/tmpfa35_test_deepmemory_test_deepmemory_search_should_contain_correct_answer', ['0-dimensional bio...], ...], 'hub://testingacc2/tmpfa35_test_deepmemory_test_deepmemory_search_should_contain_correct_answer_eval_queries')
testing_relevance_query_deepmemory = ('31715818', [-0.015188165009021759, 0.02033962868154049, -0.012286307290196419, 0.009264647960662842, -0.00939110480248928, 0.00015578352031297982, ...])
hub_cloud_dev_token = 'eyJhbGciOiJIUzUxMiIsImlhdCI6MTcwMjA1NTcyMCwiZXhwIjoxNzA1NjU1NzIwfQ.eyJpZCI6InRlc3RpbmdhY2MyIn0.sy4l6kH5Yi6yawAZ2JWT0UNffsIYa0P2AobTG2vf4qHSBSGHogi67b3R1GUvZseOJpmK-jFCwcc2oxVgk7dVfg'

    @pytest.mark.slow
    @pytest.mark.skipif(sys.platform == "win32", reason="Does not run on Windows")
    def test_deepmemory_search_should_contain_correct_answer(
        corpus_query_relevances_copy,
        testing_relevance_query_deepmemory,
        hub_cloud_dev_token,
    ):
        corpus, _, _, _ = corpus_query_relevances_copy
        relevance, query_embedding = testing_relevance_query_deepmemory
    
        db = VectorStore(
            path=corpus,
            token=hub_cloud_dev_token,
        )
    
>       output = db.search(
            embedding=query_embedding, deep_memory=True, return_tensors=["id"]
        )

deeplake/core/vectorstore/deep_memory/test_deepmemory.py:706: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
deeplake/core/vectorstore/deeplake_vectorstore.py:313: in search
    return self.dataset_handler.search(
deeplake/core/vectorstore/deep_memory/deep_memory.py:53: in wrapper
    return func(self, *args, **kwargs)
deeplake/core/vectorstore/dataset_handlers/client_side_dataset_handler.py:235: in search
    return vector_search.search(
deeplake/core/vectorstore/vector_search/vector_search.py:57: in search
    return EXEC_OPTION_TO_SEARCH_TYPE[exec_option](
deeplake/core/vectorstore/vector_search/indra/vector_search.py:47: in vector_search
    return vectorstore.indra_search_algorithm(
deeplake/core/vectorstore/vector_search/indra/search_algorithm.py:209: in search
    return searcher.run(
deeplake/core/vectorstore/vector_search/indra/search_algorithm.py:57: in run
    view = self._get_view(
deeplake/core/vectorstore/vector_search/indra/search_algorithm.py:151: in _get_view
    view, data = self.deeplake_dataset.query(
deeplake/core/dataset/dataset.py:2338: in query
    response = client.remote_query(org_id, ds_name, query_string)
deeplake/client/client.py:507: in remote_query
    response = self.request(
deeplake/client/client.py:163: in request
    check_response_status(response)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

response = <Response [502]>

    def check_response_status(response: requests.Response):
        """Check response status and throw corresponding exception on failure."""
        code = response.status_code
        if code >= 200 and code < 300:
            return
    
        try:
            message = response.json()["description"]
        except Exception:
            message = " "
    
        if code == 400:
            raise BadRequestException(message)
        elif response.status_code == 401:
            raise AuthenticationException
        elif response.status_code == 403:
            raise AuthorizationException(message, response=response)
        elif response.status_code == 404:
            if message != " ":
                raise ResourceNotFoundException(message)
            raise ResourceNotFoundException
        elif response.status_code == 422:
            raise UnprocessableEntityException(message)
        elif response.status_code == 423:
            raise LockedException
        elif response.status_code == 429:
            raise OverLimitException
        elif response.status_code == 502:
>           raise BadGatewayException
E           deeplake.util.exceptions.BadGatewayException: Invalid response from Activeloop server.

deeplake/client/utils.py:101: BadGatewayException