Merge pull request #2831 from activeloopai/SSLError #1549
GitHub Actions / JUnit Test Report
failed
Apr 24, 2024 in 0s
21991 tests run, 11440 passed, 10549 skipped, 2 failed.
Annotations
Check failure on line 467 in deeplake/core/query/test/test_query.py
github-actions / JUnit Test Report
test_query.test_link_materialize[1]
deeplake.util.exceptions.SampleAppendError: Failed to append a sample to the tensor 'abc'. See more details in the traceback.
Raw output
self = Sample(is_lazy=True, path=https://picsum.photos/20/20)
def _read_from_path(self) -> bytes: # type: ignore
if self._buffer is None:
path_type = get_path_type(self.path)
try:
if path_type == "local":
self._buffer = self._read_from_local()
elif path_type == "gcs":
self._buffer = self._read_from_gcs()
elif path_type == "s3":
self._buffer = self._read_from_s3()
elif path_type == "azure":
self._buffer = self._read_from_azure()
elif path_type == "gdrive":
self._buffer = self._read_from_gdrive()
elif path_type == "http":
> self._buffer = self._read_from_http(timeout=self._timeout)
deeplake\core\sample.py:459:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = Sample(is_lazy=True, path=https://picsum.photos/20/20), timeout = None
def _read_from_http(self, timeout=None) -> bytes:
assert self.path is not None
if "Authorization" in self._creds:
headers = {"Authorization": self._creds["Authorization"]}
else:
headers = {}
result = requests.get(self.path, headers=headers, timeout=timeout)
if result.status_code != 200:
> raise UnableToReadFromUrlError(self.path, result.status_code)
E deeplake.util.exceptions.UnableToReadFromUrlError: Unable to read from url https://picsum.photos/20/20. Status code: 520
deeplake\core\sample.py:525: UnableToReadFromUrlError
The above exception was the direct cause of the following exception:
sample_path = 'https://picsum.photos/20/20', sample_creds_key = None
link_creds = <deeplake.core.link_creds.LinkCreds object at 0x0000017D0ECDDF30>
verify = True
def read_linked_sample(
sample_path: str, sample_creds_key: Optional[str], link_creds, verify: bool
):
provider_type = get_path_type(sample_path)
try:
if provider_type == "local":
return deeplake.read(sample_path, verify=verify)
elif provider_type == "http":
> return _read_http_linked_sample(
link_creds, sample_creds_key, sample_path, verify
)
deeplake\core\linked_sample.py:29:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
deeplake\core\linked_sample.py:50: in wrapper
raise e
deeplake\core\linked_sample.py:43: in wrapper
return f(linked_creds, sample_creds_key, *args, **kwargs)
deeplake\core\linked_sample.py:72: in _read_http_linked_sample
return deeplake.read(sample_path, verify=verify, creds=creds)
deeplake\api\read.py:63: in read
return Sample(
deeplake\core\sample.py:105: in __init__
compressed_bytes = self._read_from_path()
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = Sample(is_lazy=True, path=https://picsum.photos/20/20)
def _read_from_path(self) -> bytes: # type: ignore
if self._buffer is None:
path_type = get_path_type(self.path)
try:
if path_type == "local":
self._buffer = self._read_from_local()
elif path_type == "gcs":
self._buffer = self._read_from_gcs()
elif path_type == "s3":
self._buffer = self._read_from_s3()
elif path_type == "azure":
self._buffer = self._read_from_azure()
elif path_type == "gdrive":
self._buffer = self._read_from_gdrive()
elif path_type == "http":
self._buffer = self._read_from_http(timeout=self._timeout)
except Exception as e:
> raise SampleReadError(self.path) from e # type: ignore
E deeplake.util.exceptions.SampleReadError: Unable to read sample from https://picsum.photos/20/20
deeplake\core\sample.py:461: SampleReadError
The above exception was the direct cause of the following exception:
self = <deeplake.core.linked_chunk_engine.LinkedChunkEngine object at 0x0000017D0ECDF160>
samples = [<deeplake.core.linked_sample.LinkedSample object at 0x0000017D0B320C10>, <deeplake.core.linked_sample.LinkedSample ob...nkedSample object at 0x0000017D0ECDCF40>, <deeplake.core.linked_sample.LinkedSample object at 0x0000017D0ECDC910>, ...]
verify = True, ignore_errors = False
def check_each_sample(self, samples, verify=True, ignore_errors=False):
link_creds = self.link_creds
verified_samples = []
skipped = []
for i, sample in enumerate(samples):
try:
if isinstance(sample, deeplake.core.tensor.Tensor) and sample.is_link:
sample = sample._linked_sample()
samples[i] = sample
elif (
not isinstance(sample, (LinkedSample, LinkedTiledSample))
and sample is not None
):
raise TypeError(
f"Expected LinkedSample or LinkedTiledSample, got {type(sample)} instead. Use deeplake.link() to link samples or deeplake.link_tiled() to link multiple images as tiles."
)
path, creds_key = get_path_creds_key(sample)
# verifies existence of creds_key
if verify:
link_creds.get_encoding(creds_key, path)
if sample is None or sample.path == "":
verified_samples.append(sample)
elif isinstance(sample, LinkedTiledSample):
verify_samples = self.verify and verify
sample.set_check_tile_shape(self.link_creds, verify_samples)
sample.set_sample_shape()
verified_samples.append(sample)
else:
try:
_verify = verify and self.verify
verified_samples.append(
> read_linked_sample(
sample.path,
sample.creds_key,
self.link_creds,
verify=_verify,
)
)
deeplake\core\linked_chunk_engine.py:280:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
sample_path = 'https://picsum.photos/20/20', sample_creds_key = None
link_creds = <deeplake.core.link_creds.LinkCreds object at 0x0000017D0ECDDF30>
verify = True
def read_linked_sample(
sample_path: str, sample_creds_key: Optional[str], link_creds, verify: bool
):
provider_type = get_path_type(sample_path)
try:
if provider_type == "local":
return deeplake.read(sample_path, verify=verify)
elif provider_type == "http":
return _read_http_linked_sample(
link_creds, sample_creds_key, sample_path, verify
)
else:
return _read_cloud_linked_sample(
link_creds, sample_creds_key, sample_path, provider_type, verify
)
except Exception as e:
> raise GetDataFromLinkError(sample_path) from e
E deeplake.util.exceptions.GetDataFromLinkError: Unable to get data from link https://picsum.photos/20/20.
deeplake\core\linked_sample.py:37: GetDataFromLinkError
The above exception was the direct cause of the following exception:
self = <deeplake.core.linked_chunk_engine.LinkedChunkEngine object at 0x0000017D0ECDF160>
samples = [<deeplake.core.linked_sample.LinkedSample object at 0x0000017D0B320C10>, <deeplake.core.linked_sample.LinkedSample ob...nkedSample object at 0x0000017D0ECDCF40>, <deeplake.core.linked_sample.LinkedSample object at 0x0000017D0ECDC910>, ...]
progressbar = False
link_callback = <bound method Tensor._extend_links of Tensor(key='abc')>
pg_callback = None, ignore_errors = False, verified_samples = None
def extend(
self,
samples,
progressbar: bool = False,
link_callback: Optional[Callable] = None,
pg_callback=None,
ignore_errors: bool = False,
verified_samples=None,
):
try:
assert not (progressbar and pg_callback)
self.check_link_ready()
if not self.write_initialization_done:
self._write_initialization()
self.write_initialization_done = True
initial_autoflush = self.cache.autoflush
self.cache.autoflush = False
num_samples = self.tensor_length
if self.is_sequence:
self._extend_sequence(
samples,
progressbar,
link_callback,
ignore_errors,
verified_samples,
)
else:
> verified_samples = self._extend(
samples,
progressbar,
pg_callback=pg_callback,
ignore_errors=ignore_errors,
verified_samples=verified_samples,
)
deeplake\core\chunk_engine.py:1235:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
deeplake\core\chunk_engine.py:1092: in _extend
verified_samples = verified_samples or self.check_each_sample(
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <deeplake.core.linked_chunk_engine.LinkedChunkEngine object at 0x0000017D0ECDF160>
samples = [<deeplake.core.linked_sample.LinkedSample object at 0x0000017D0B320C10>, <deeplake.core.linked_sample.LinkedSample ob...nkedSample object at 0x0000017D0ECDCF40>, <deeplake.core.linked_sample.LinkedSample object at 0x0000017D0ECDC910>, ...]
verify = True, ignore_errors = False
def check_each_sample(self, samples, verify=True, ignore_errors=False):
link_creds = self.link_creds
verified_samples = []
skipped = []
for i, sample in enumerate(samples):
try:
if isinstance(sample, deeplake.core.tensor.Tensor) and sample.is_link:
sample = sample._linked_sample()
samples[i] = sample
elif (
not isinstance(sample, (LinkedSample, LinkedTiledSample))
and sample is not None
):
raise TypeError(
f"Expected LinkedSample or LinkedTiledSample, got {type(sample)} instead. Use deeplake.link() to link samples or deeplake.link_tiled() to link multiple images as tiles."
)
path, creds_key = get_path_creds_key(sample)
# verifies existence of creds_key
if verify:
link_creds.get_encoding(creds_key, path)
if sample is None or sample.path == "":
verified_samples.append(sample)
elif isinstance(sample, LinkedTiledSample):
verify_samples = self.verify and verify
sample.set_check_tile_shape(self.link_creds, verify_samples)
sample.set_sample_shape()
verified_samples.append(sample)
else:
try:
_verify = verify and self.verify
verified_samples.append(
read_linked_sample(
sample.path,
sample.creds_key,
self.link_creds,
verify=_verify,
)
)
except Exception as e:
> raise BadLinkError(sample.path, sample.creds_key) from e
E deeplake.util.exceptions.BadLinkError: Verification of link failed. Make sure that the link you are trying to append is correct.
E
E Failed link: https://picsum.photos/20/20
E creds_key used: None
E
E No credentials have been provided to access the link. If the link is not publibly accessible, add access credentials to your dataset and use the appropriate creds_key.
deeplake\core\linked_chunk_engine.py:288: BadLinkError
The above exception was the direct cause of the following exception:
local_ds = Dataset(path='./hub_pytest/test_query/test_link_materialize-1-', tensors=['abc'])
num_workers = 1
@pytest.mark.slow
@pytest.mark.parametrize("num_workers", [1, 2])
def test_link_materialize(local_ds, num_workers):
with local_ds as ds:
ds.create_tensor("abc", htype="link[image]", sample_compression="jpg")
> ds.abc.extend(
[
(
deeplake.link("https://picsum.photos/20/20")
if i % 2
else deeplake.link("https://picsum.photos/10/10")
)
for i in range(20)
]
)
deeplake\core\query\test\test_query.py:467:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
deeplake\util\invalid_view_op.py:22: in inner
return callable(x, *args, **kwargs)
deeplake\core\tensor.py:363: in extend
self._extend(samples, progressbar=progressbar, ignore_errors=ignore_errors)
deeplake\core\tensor.py:313: in _extend
self.chunk_engine.extend(
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <deeplake.core.linked_chunk_engine.LinkedChunkEngine object at 0x0000017D0ECDF160>
samples = [<deeplake.core.linked_sample.LinkedSample object at 0x0000017D0B320C10>, <deeplake.core.linked_sample.LinkedSample ob...nkedSample object at 0x0000017D0ECDCF40>, <deeplake.core.linked_sample.LinkedSample object at 0x0000017D0ECDC910>, ...]
progressbar = False
link_callback = <bound method Tensor._extend_links of Tensor(key='abc')>
pg_callback = None, ignore_errors = False, verified_samples = None
def extend(
self,
samples,
progressbar: bool = False,
link_callback: Optional[Callable] = None,
pg_callback=None,
ignore_errors: bool = False,
verified_samples=None,
):
try:
assert not (progressbar and pg_callback)
self.check_link_ready()
if not self.write_initialization_done:
self._write_initialization()
self.write_initialization_done = True
initial_autoflush = self.cache.autoflush
self.cache.autoflush = False
num_samples = self.tensor_length
if self.is_sequence:
self._extend_sequence(
samples,
progressbar,
link_callback,
ignore_errors,
verified_samples,
)
else:
verified_samples = self._extend(
samples,
progressbar,
pg_callback=pg_callback,
ignore_errors=ignore_errors,
verified_samples=verified_samples,
)
if link_callback:
verified_samples = self._prepare_samples_for_link_callback(
verified_samples
)
self._extend_link_callback(
link_callback,
verified_samples,
None,
progressbar,
ignore_errors,
)
self.cache.autoflush = initial_autoflush
self.cache.maybe_flush()
except Exception as e:
self.pop(list(range(num_samples, self.tensor_length)))
> raise SampleAppendError(self.name) from e
E deeplake.util.exceptions.SampleAppendError: Failed to append a sample to the tensor 'abc'. See more details in the traceback.
deeplake\core\chunk_engine.py:1258: SampleAppendError
Check failure on line 983 in deeplake/api/tests/test_api.py
github-actions / JUnit Test Report
test_api.test_dataset_rename[True-azure_ds_generator-azure_path-hub_cloud_dev_token]
azure.core.exceptions.ClientAuthenticationError: ERROR: The command failed with an unexpected error. Here is the traceback:
ERROR: [Errno 13] Permission denied: 'C:\\azureCli\\msal_token_cache.bin.lockfile'
Traceback (most recent call last):
File "C:\hostedtoolcache\windows\Python\3.8.10\x64\lib\site-packages\knack\cli.py", line 233, in invoke
cmd_result = self.invocation.execute(args)
File "C:\hostedtoolcache\windows\Python\3.8.10\x64\lib\site-packages\azure\cli\core\commands\__init__.py", line 664, in execute
raise ex
File "C:\hostedtoolcache\windows\Python\3.8.10\x64\lib\site-packages\azure\cli\core\commands\__init__.py", line 731, in _run_jobs_serially
results.append(self._run_job(expanded_arg, cmd_copy))
File "C:\hostedtoolcache\windows\Python\3.8.10\x64\lib\site-packages\azure\cli\core\commands\__init__.py", line 701, in _run_job
result = cmd_copy(params)
File "C:\hostedtoolcache\windows\Python\3.8.10\x64\lib\site-packages\azure\cli\core\commands\__init__.py", line 334, in __call__
return self.handler(*args, **kwargs)
File "C:\hostedtoolcache\windows\Python\3.8.10\x64\lib\site-packages\azure\cli\core\commands\command_operation.py", line 121, in handler
return op(**command_args)
File "C:\hostedtoolcache\windows\Python\3.8.10\x64\lib\site-packages\azure\cli\command_modules\profile\custom.py", line 66, in get_access_token
creds, subscription, tenant = profile.get_raw_token(subscription=subscription, resource=resource, scopes=scopes,
File "C:\hostedtoolcache\windows\Python\3.8.10\x64\lib\site-packages\azure\cli\core\_profile.py", line 383, in get_raw_token
sdk_token = credential.get_token(*scopes)
File "C:\hostedtoolcache\windows\Python\3.8.10\x64\lib\site-packages\azure\cli\core\auth\msal_authentication.py", line 138, in get_token
result = self.acquire_token_for_client(scopes, **kwargs)
File "C:\hostedtoolcache\windows\Python\3.8.10\x64\lib\site-packages\msal\application.py", line 2171, in acquire_token_for_client
return _clean_up(self._acquire_token_silent_with_error(
File "C:\hostedtoolcache\windows\Python\3.8.10\x64\lib\site-packages\msal\application.py", line 1294, in _acquire_token_silent_with_error
result = self._acquire_token_silent_from_cache_and_possibly_refresh_it(
File "C:\hostedtoolcache\windows\Python\3.8.10\x64\lib\site-packages\msal\application.py", line 1431, in _acquire_token_silent_from_cache_and_possibly_refresh_it
result = self._acquire_token_for_client(
File "C:\hostedtoolcache\windows\Python\3.8.10\x64\lib\site-packages\msal\application.py", line 2190, in _acquire_token_for_client
response = client.obtain_token_for_client(
File "C:\hostedtoolcache\windows\Python\3.8.10\x64\lib\site-packages\msal\oauth2cli\oauth2.py", line 752, in obtain_token_for_client
return self._obtain_token("client_credentials", data=data, **kwargs)
File "C:\hostedtoolcache\windows\Python\3.8.10\x64\lib\site-packages\msal\oauth2cli\oidc.py", line 170, in _obtain_token
ret = super(Client, self)._obtain_token(grant_type, *args, **kwargs)
File "C:\hostedtoolcache\windows\Python\3.8.10\x64\lib\site-packages\msal\oauth2cli\oauth2.py", line 788, in _obtain_token
(on_obtaining_tokens or self.on_obtaining_tokens)({
File "C:\hostedtoolcache\windows\Python\3.8.10\x64\lib\site-packages\msal\application.py", line 682, in <lambda>
on_obtaining_tokens=lambda event: self.token_cache.add(dict(
File "C:\hostedtoolcache\windows\Python\3.8.10\x64\lib\site-packages\msal\token_cache.py", line 374, in add
super(SerializableTokenCache, self).add(event, **kwargs)
File "C:\hostedtoolcache\windows\Python\3.8.10\x64\lib\site-packages\msal\token_cache.py", line 180, in add
return self.__add(event, now=now)
File "C:\hostedtoolcache\windows\Python\3.8.10\x64\lib\site-packages\msal\token_cache.py", line 244, in __add
self.modify(self.CredentialType.ACCESS_TOKEN, at, at)
File "C:\hostedtoolcache\windows\Python\3.8.10\x64\lib\site-packages\msal_extensions\token_cache.py", line 63, in modify
with CrossPlatLock(self._lock_location):
File "C:\hostedtoolcache\windows\Python\3.8.10\x64\lib\site-packages\msal_extensions\cache_lock.py", line 57, in __enter__
if not self._try_to_create_lock_file():
File "C:\hostedtoolcache\windows\Python\3.8.10\x64\lib\site-packages\msal_extensions\cache_lock.py", line 43, in _try_to_create_lock_file
with open(self._lockpath, 'x'): # pylint: disable=unspecified-encoding
PermissionError: [Errno 13] Permission denied: 'C:\\azureCli\\msal_token_cache.bin.lockfile'
To check existing issues, please visit: https://github.com/Azure/azure-cli/issues
Raw output
command = 'az account get-access-token --output json --resource https://storage.azure.com'
timeout = 10
def _run_command(command: str, timeout: int) -> str:
# Ensure executable exists in PATH first. This avoids a subprocess call that would fail anyway.
if shutil.which(EXECUTABLE_NAME) is None:
raise CredentialUnavailableError(message=CLI_NOT_FOUND)
if sys.platform.startswith("win"):
args = ["cmd", "/c", command]
else:
args = ["/bin/sh", "-c", command]
try:
working_directory = get_safe_working_dir()
kwargs: Dict[str, Any] = {
"stderr": subprocess.PIPE,
"stdin": subprocess.DEVNULL,
"cwd": working_directory,
"universal_newlines": True,
"timeout": timeout,
"env": dict(os.environ, AZURE_CORE_NO_COLOR="true"),
}
> return subprocess.check_output(args, **kwargs)
c:\hostedtoolcache\windows\python\3.8.10\x64\lib\site-packages\azure\identity\_credentials\azure_cli.py:199:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
c:\hostedtoolcache\windows\python\3.8.10\x64\lib\subprocess.py:415: in check_output
return run(*popenargs, stdout=PIPE, timeout=timeout, check=True,
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
input = None, capture_output = False, timeout = 10, check = True
popenargs = (['cmd', '/c', 'az account get-access-token --output json --resource https://storage.azure.com'],)
kwargs = {'cwd': 'C:\\Windows', 'env': {'ACTIONS_ID_TOKEN_REQUEST_TOKEN': 'eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiIsIng1dCI6Ikh5cTRO...wiYXBpX2tleSI6IjU4Y0tLb1p6UE1BbThPU2RpbTRiZ2tBekhWekt1VUE3MFJpNTNyZUpKRTJuaiJ9.', ...}, 'stderr': -1, 'stdin': -3, ...}
process = <subprocess.Popen object at 0x0000023641F45F70>, stdout = ''
stderr = "ERROR: The command failed with an unexpected error. Here is the traceback:\nERROR: [Errno 13] Permission denied: 'C:\...\\\msal_token_cache.bin.lockfile'\nTo check existing issues, please visit: https://github.com/Azure/azure-cli/issues\n"
retcode = 1
def run(*popenargs,
input=None, capture_output=False, timeout=None, check=False, **kwargs):
"""Run command with arguments and return a CompletedProcess instance.
The returned instance will have attributes args, returncode, stdout and
stderr. By default, stdout and stderr are not captured, and those attributes
will be None. Pass stdout=PIPE and/or stderr=PIPE in order to capture them.
If check is True and the exit code was non-zero, it raises a
CalledProcessError. The CalledProcessError object will have the return code
in the returncode attribute, and output & stderr attributes if those streams
were captured.
If timeout is given, and the process takes too long, a TimeoutExpired
exception will be raised.
There is an optional argument "input", allowing you to
pass bytes or a string to the subprocess's stdin. If you use this argument
you may not also use the Popen constructor's "stdin" argument, as
it will be used internally.
By default, all communication is in bytes, and therefore any "input" should
be bytes, and the stdout and stderr will be bytes. If in text mode, any
"input" should be a string, and stdout and stderr will be strings decoded
according to locale encoding, or by "encoding" if set. Text mode is
triggered by setting any of text, encoding, errors or universal_newlines.
The other arguments are the same as for the Popen constructor.
"""
if input is not None:
if kwargs.get('stdin') is not None:
raise ValueError('stdin and input arguments may not both be used.')
kwargs['stdin'] = PIPE
if capture_output:
if kwargs.get('stdout') is not None or kwargs.get('stderr') is not None:
raise ValueError('stdout and stderr arguments may not be used '
'with capture_output.')
kwargs['stdout'] = PIPE
kwargs['stderr'] = PIPE
with Popen(*popenargs, **kwargs) as process:
try:
stdout, stderr = process.communicate(input, timeout=timeout)
except TimeoutExpired as exc:
process.kill()
if _mswindows:
# Windows accumulates the output in a single blocking
# read() call run on child threads, with the timeout
# being done in a join() on those threads. communicate()
# _after_ kill() is required to collect that and add it
# to the exception.
exc.stdout, exc.stderr = process.communicate()
else:
# POSIX _communicate already populated the output so
# far into the TimeoutExpired exception.
process.wait()
raise
except: # Including KeyboardInterrupt, communicate handled that.
process.kill()
# We don't call process.wait() as .__exit__ does that for us.
raise
retcode = process.poll()
if check and retcode:
> raise CalledProcessError(retcode, process.args,
output=stdout, stderr=stderr)
E subprocess.CalledProcessError: Command '['cmd', '/c', 'az account get-access-token --output json --resource https://storage.azure.com']' returned non-zero exit status 1.
c:\hostedtoolcache\windows\python\3.8.10\x64\lib\subprocess.py:516: CalledProcessError
The above exception was the direct cause of the following exception:
ds_generator = <function azure_ds_generator.<locals>.generate_azure_ds at 0x0000023643375790>
path = 'az://activeloopgen2/deeplake-tests/tmp9be1/test_api/test_dataset_rename-True-azure_ds_generator-azure_path-hub_cloud_dev_token-'
hub_token = None, convert_to_pathlib = True
@pytest.mark.parametrize(
("ds_generator", "path", "hub_token"),
[
("local_ds_generator", "local_path", "hub_cloud_dev_token"),
pytest.param(
"s3_ds_generator", "s3_path", "hub_cloud_dev_token", marks=pytest.mark.slow
),
pytest.param(
"gcs_ds_generator",
"gcs_path",
"hub_cloud_dev_token",
marks=pytest.mark.slow,
),
pytest.param(
"azure_ds_generator",
"azure_path",
"hub_cloud_dev_token",
marks=pytest.mark.slow,
),
pytest.param(
"hub_cloud_ds_generator",
"hub_cloud_path",
"hub_cloud_dev_token",
marks=pytest.mark.slow,
),
],
indirect=True,
)
@pytest.mark.parametrize("convert_to_pathlib", [True, False])
def test_dataset_rename(ds_generator, path, hub_token, convert_to_pathlib):
ds = ds_generator()
ds.create_tensor("abc")
ds.abc.append([1, 2, 3, 4])
new_path = "_".join([path, "renamed"])
ds.path = convert_string_to_pathlib_if_needed(ds.path, convert_to_pathlib)
new_path = convert_string_to_pathlib_if_needed(new_path, convert_to_pathlib)
with pytest.raises(RenameError):
ds.rename("wrongfolder/new_ds")
if str(ds.path).startswith("hub://"):
with pytest.raises(BadRequestException):
ds.rename(ds.path)
else:
with pytest.raises(PathNotEmptyException):
ds.rename(ds.path)
ds = deeplake.rename(ds.path, new_path, token=hub_token)
assert ds.path == str(new_path)
assert_array_equal(ds.abc.numpy(), np.array([[1, 2, 3, 4]]))
ds = deeplake.load(new_path, token=hub_token)
assert_array_equal(ds.abc.numpy(), np.array([[1, 2, 3, 4]]))
> deeplake.delete(new_path, token=hub_token)
deeplake\api\tests\test_api.py:983:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
deeplake\util\spinner.py:151: in inner
return func(*args, **kwargs)
deeplake\api\dataset.py:905: in delete
ds.delete(large_ok=large_ok)
deeplake\util\invalid_view_op.py:22: in inner
return callable(x, *args, **kwargs)
deeplake\core\dataset\dataset.py:2664: in delete
self.storage.clear()
deeplake\core\storage\lru_cache.py:373: in clear
self.next_storage.clear(prefix=prefix)
deeplake\core\storage\azure.py:156: in clear
self.container_client.delete_blobs(*batch)
c:\hostedtoolcache\windows\python\3.8.10\x64\lib\site-packages\azure\core\tracing\decorator.py:78: in wrapper_use_tracer
return func(*args, **kwargs)
c:\hostedtoolcache\windows\python\3.8.10\x64\lib\site-packages\azure\storage\blob\_container_client.py:1535: in delete_blobs
return self._batch_send(*reqs, **options)
c:\hostedtoolcache\windows\python\3.8.10\x64\lib\site-packages\azure\storage\blob\_shared\base_client.py:297: in _batch_send
Pipeline._prepare_multipart_mixed_request(request) # pylint: disable=protected-access
c:\hostedtoolcache\windows\python\3.8.10\x64\lib\site-packages\azure\core\pipeline\_base.py:206: in _prepare_multipart_mixed_request
[ # pylint: disable=expression-not-assigned, unnecessary-comprehension
c:\hostedtoolcache\windows\python\3.8.10\x64\lib\site-packages\azure\core\pipeline\_base.py:206: in <listcomp>
[ # pylint: disable=expression-not-assigned, unnecessary-comprehension
c:\hostedtoolcache\windows\python\3.8.10\x64\lib\concurrent\futures\_base.py:619: in result_iterator
yield fs.pop().result()
c:\hostedtoolcache\windows\python\3.8.10\x64\lib\concurrent\futures\_base.py:437: in result
return self.__get_result()
c:\hostedtoolcache\windows\python\3.8.10\x64\lib\concurrent\futures\_base.py:389: in __get_result
raise self._exception
c:\hostedtoolcache\windows\python\3.8.10\x64\lib\concurrent\futures\thread.py:57: in run
result = self.fn(*self.args, **self.kwargs)
c:\hostedtoolcache\windows\python\3.8.10\x64\lib\site-packages\azure\core\pipeline\_base.py:202: in prepare_requests
_await_result(policy.on_request, pipeline_request)
c:\hostedtoolcache\windows\python\3.8.10\x64\lib\site-packages\azure\core\pipeline\_tools.py:49: in await_result
result = func(*args, **kwargs)
c:\hostedtoolcache\windows\python\3.8.10\x64\lib\site-packages\azure\core\pipeline\policies\_authentication.py:99: in on_request
self._token = self._credential.get_token(*self._scopes)
c:\hostedtoolcache\windows\python\3.8.10\x64\lib\site-packages\azure\identity\_credentials\default.py:219: in get_token
token = self._successful_credential.get_token(*scopes, claims=claims, tenant_id=tenant_id, **kwargs)
c:\hostedtoolcache\windows\python\3.8.10\x64\lib\site-packages\azure\identity\_internal\decorators.py:33: in wrapper
token = fn(*args, **kwargs)
c:\hostedtoolcache\windows\python\3.8.10\x64\lib\site-packages\azure\identity\_credentials\azure_cli.py:112: in get_token
output = _run_command(command, self._process_timeout)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
command = 'az account get-access-token --output json --resource https://storage.azure.com'
timeout = 10
def _run_command(command: str, timeout: int) -> str:
# Ensure executable exists in PATH first. This avoids a subprocess call that would fail anyway.
if shutil.which(EXECUTABLE_NAME) is None:
raise CredentialUnavailableError(message=CLI_NOT_FOUND)
if sys.platform.startswith("win"):
args = ["cmd", "/c", command]
else:
args = ["/bin/sh", "-c", command]
try:
working_directory = get_safe_working_dir()
kwargs: Dict[str, Any] = {
"stderr": subprocess.PIPE,
"stdin": subprocess.DEVNULL,
"cwd": working_directory,
"universal_newlines": True,
"timeout": timeout,
"env": dict(os.environ, AZURE_CORE_NO_COLOR="true"),
}
return subprocess.check_output(args, **kwargs)
except subprocess.CalledProcessError as ex:
# non-zero return from shell
# Fallback check in case the executable is not found while executing subprocess.
if ex.returncode == 127 or ex.stderr.startswith("'az' is not recognized"):
raise CredentialUnavailableError(message=CLI_NOT_FOUND) from ex
if ("az login" in ex.stderr or "az account set" in ex.stderr) and "AADSTS" not in ex.stderr:
raise CredentialUnavailableError(message=NOT_LOGGED_IN) from ex
# return code is from the CLI -> propagate its output
if ex.stderr:
message = sanitize_output(ex.stderr)
else:
message = "Failed to invoke Azure CLI"
if within_dac.get():
raise CredentialUnavailableError(message=message) from ex
> raise ClientAuthenticationError(message=message) from ex
E azure.core.exceptions.ClientAuthenticationError: ERROR: The command failed with an unexpected error. Here is the traceback:
E ERROR: [Errno 13] Permission denied: 'C:\\azureCli\\msal_token_cache.bin.lockfile'
E Traceback (most recent call last):
E File "C:\hostedtoolcache\windows\Python\3.8.10\x64\lib\site-packages\knack\cli.py", line 233, in invoke
E cmd_result = self.invocation.execute(args)
E File "C:\hostedtoolcache\windows\Python\3.8.10\x64\lib\site-packages\azure\cli\core\commands\__init__.py", line 664, in execute
E raise ex
E File "C:\hostedtoolcache\windows\Python\3.8.10\x64\lib\site-packages\azure\cli\core\commands\__init__.py", line 731, in _run_jobs_serially
E results.append(self._run_job(expanded_arg, cmd_copy))
E File "C:\hostedtoolcache\windows\Python\3.8.10\x64\lib\site-packages\azure\cli\core\commands\__init__.py", line 701, in _run_job
E result = cmd_copy(params)
E File "C:\hostedtoolcache\windows\Python\3.8.10\x64\lib\site-packages\azure\cli\core\commands\__init__.py", line 334, in __call__
E return self.handler(*args, **kwargs)
E File "C:\hostedtoolcache\windows\Python\3.8.10\x64\lib\site-packages\azure\cli\core\commands\command_operation.py", line 121, in handler
E return op(**command_args)
E File "C:\hostedtoolcache\windows\Python\3.8.10\x64\lib\site-packages\azure\cli\command_modules\profile\custom.py", line 66, in get_access_token
E creds, subscription, tenant = profile.get_raw_token(subscription=subscription, resource=resource, scopes=scopes,
E File "C:\hostedtoolcache\windows\Python\3.8.10\x64\lib\site-packages\azure\cli\core\_profile.py", line 383, in get_raw_token
E sdk_token = credential.get_token(*scopes)
E File "C:\hostedtoolcache\windows\Python\3.8.10\x64\lib\site-packages\azure\cli\core\auth\msal_authentication.py", line 138, in get_token
E result = self.acquire_token_for_client(scopes, **kwargs)
E File "C:\hostedtoolcache\windows\Python\3.8.10\x64\lib\site-packages\msal\application.py", line 2171, in acquire_token_for_client
E return _clean_up(self._acquire_token_silent_with_error(
E File "C:\hostedtoolcache\windows\Python\3.8.10\x64\lib\site-packages\msal\application.py", line 1294, in _acquire_token_silent_with_error
E result = self._acquire_token_silent_from_cache_and_possibly_refresh_it(
E File "C:\hostedtoolcache\windows\Python\3.8.10\x64\lib\site-packages\msal\application.py", line 1431, in _acquire_token_silent_from_cache_and_possibly_refresh_it
E result = self._acquire_token_for_client(
E File "C:\hostedtoolcache\windows\Python\3.8.10\x64\lib\site-packages\msal\application.py", line 2190, in _acquire_token_for_client
E response = client.obtain_token_for_client(
E File "C:\hostedtoolcache\windows\Python\3.8.10\x64\lib\site-packages\msal\oauth2cli\oauth2.py", line 752, in obtain_token_for_client
E return self._obtain_token("client_credentials", data=data, **kwargs)
E File "C:\hostedtoolcache\windows\Python\3.8.10\x64\lib\site-packages\msal\oauth2cli\oidc.py", line 170, in _obtain_token
E ret = super(Client, self)._obtain_token(grant_type, *args, **kwargs)
E File "C:\hostedtoolcache\windows\Python\3.8.10\x64\lib\site-packages\msal\oauth2cli\oauth2.py", line 788, in _obtain_token
E (on_obtaining_tokens or self.on_obtaining_tokens)({
E File "C:\hostedtoolcache\windows\Python\3.8.10\x64\lib\site-packages\msal\application.py", line 682, in <lambda>
E on_obtaining_tokens=lambda event: self.token_cache.add(dict(
E File "C:\hostedtoolcache\windows\Python\3.8.10\x64\lib\site-packages\msal\token_cache.py", line 374, in add
E super(SerializableTokenCache, self).add(event, **kwargs)
E File "C:\hostedtoolcache\windows\Python\3.8.10\x64\lib\site-packages\msal\token_cache.py", line 180, in add
E return self.__add(event, now=now)
E File "C:\hostedtoolcache\windows\Python\3.8.10\x64\lib\site-packages\msal\token_cache.py", line 244, in __add
E self.modify(self.CredentialType.ACCESS_TOKEN, at, at)
E File "C:\hostedtoolcache\windows\Python\3.8.10\x64\lib\site-packages\msal_extensions\token_cache.py", line 63, in modify
E with CrossPlatLock(self._lock_location):
E File "C:\hostedtoolcache\windows\Python\3.8.10\x64\lib\site-packages\msal_extensions\cache_lock.py", line 57, in __enter__
E if not self._try_to_create_lock_file():
E File "C:\hostedtoolcache\windows\Python\3.8.10\x64\lib\site-packages\msal_extensions\cache_lock.py", line 43, in _try_to_create_lock_file
E with open(self._lockpath, 'x'): # pylint: disable=unspecified-encoding
E PermissionError: [Errno 13] Permission denied: 'C:\\azureCli\\msal_token_cache.bin.lockfile'
E To check existing issues, please visit: https://github.com/Azure/azure-cli/issues
c:\hostedtoolcache\windows\python\3.8.10\x64\lib\site-packages\azure\identity\_credentials\azure_cli.py:215: ClientAuthenticationError
Loading