Skip to content

Commit

Permalink
Merge pull request #2768 from activeloopai/fix_login_references
Browse files Browse the repository at this point in the history
Removed use of the term "log in" and references to the CLI "activeloop login"
  • Loading branch information
nvoxland-al authored Feb 21, 2024
2 parents d011617 + 05bd882 commit 4e1eb75
Show file tree
Hide file tree
Showing 5 changed files with 22 additions and 33 deletions.
22 changes: 11 additions & 11 deletions deeplake/api/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@ def init(
Args:
path (str, pathlib.Path): - The full path to the dataset. Can be:
- a Deep Lake cloud path of the form ``hub://username/datasetname``. To write to Deep Lake cloud datasets, ensure that you are logged in to Deep Lake (use 'activeloop login' from command line)
- a Deep Lake cloud path of the form ``hub://username/datasetname``. To write to Deep Lake cloud datasets, ensure that you are authenticated to Deep Lake (pass in a token using the 'token' parameter).
- an s3 path of the form ``s3://bucketname/path/to/dataset``. Credentials are required in either the environment or passed to the creds argument.
- a local file system path of the form ``./path/to/dataset`` or ``~/path/to/dataset`` or ``path/to/dataset``.
- a memory path of the form ``mem://path/to/dataset`` which doesn't save the dataset but keeps it in memory instead. Should be used only for testing as it does not persist.
Expand Down Expand Up @@ -183,7 +183,7 @@ def init(
Raises:
AgreementError: When agreement is rejected
UserNotLoggedInException: When user is not logged in
UserNotLoggedInException: When user is not authenticated
InvalidTokenException: If the specified token is invalid
TokenPermissionError: When there are permission or other errors related to token
CheckoutError: If version address specified in the path cannot be found
Expand Down Expand Up @@ -404,7 +404,7 @@ def empty(
Raises:
DatasetHandlerError: If a Dataset already exists at the given path and overwrite is False.
UserNotLoggedInException: When user is not logged in
UserNotLoggedInException: When user is not authenticated
InvalidTokenException: If the specified toke is invalid
TokenPermissionError: When there are permission or other errors related to token
ValueError: If version is specified in the path
Expand Down Expand Up @@ -522,7 +522,7 @@ def load(
Args:
path (str, pathlib.Path): - The full path to the dataset. Can be:
- a Deep Lake cloud path of the form ``hub://username/datasetname``. To write to Deep Lake cloud datasets, ensure that you are logged in to Deep Lake (use 'activeloop login' from command line)
- a Deep Lake cloud path of the form ``hub://username/datasetname``. To write to Deep Lake cloud datasets, ensure that you are authenticated to Deep Lake (pass in a token using the 'token' parameter).
- an s3 path of the form ``s3://bucketname/path/to/dataset``. Credentials are required in either the environment or passed to the creds argument.
- a local file system path of the form ``./path/to/dataset`` or ``~/path/to/dataset`` or ``path/to/dataset``.
- a memory path of the form ``mem://path/to/dataset`` which doesn't save the dataset but keeps it in memory instead. Should be used only for testing as it does not persist.
Expand Down Expand Up @@ -579,7 +579,7 @@ def load(
Raises:
DatasetHandlerError: If a Dataset does not exist at the given path.
AgreementError: When agreement is rejected
UserNotLoggedInException: When user is not logged in
UserNotLoggedInException: When user is not authenticated
InvalidTokenException: If the specified toke is invalid
TokenPermissionError: When there are permission or other errors related to token
CheckoutError: If version address specified in the path cannot be found
Expand Down Expand Up @@ -834,7 +834,7 @@ def delete(
Raises:
DatasetHandlerError: If a Dataset does not exist at the given path and ``force = False``.
UserNotLoggedInException: When user is not logged in.
UserNotLoggedInException: When user is not authenticated.
NotImplementedError: When attempting to delete a managed view.
ValueError: If version is specified in the path
Expand Down Expand Up @@ -1511,7 +1511,7 @@ def ingest_coco(
annotation_files (str, pathlib.Path, List[str]): Path to JSON annotation files in COCO format.
dest (str, pathlib.Path):
- The full path to the dataset. Can be:
- a Deep Lake cloud path of the form ``hub://org_id/datasetname``. To write to Deep Lake cloud datasets, ensure that you are logged in to Deep Lake (use 'activeloop login' from command line), or pass in a token using the 'token' parameter.
- a Deep Lake cloud path of the form ``hub://org_id/datasetname``. To write to Deep Lake cloud datasets, ensure that you are authenticated to Deep Lake (pass in a token using the 'token' parameter).
- an s3 path of the form ``s3://bucketname/path/to/dataset``. Credentials are required in either the environment or passed to the creds argument.
- a local file system path of the form ``./path/to/dataset`` or ``~/path/to/dataset`` or ``path/to/dataset``.
- a memory path of the form ``mem://path/to/dataset`` which doesn't save the dataset but keeps it in memory instead. Should be used only for testing as it does not persist.
Expand Down Expand Up @@ -1627,7 +1627,7 @@ def ingest_yolo(
data_directory (str, pathlib.Path): The path to the directory containing the data (images files and annotation files(see 'annotations_directory' input for specifying annotations in a separate directory).
dest (str, pathlib.Path):
- The full path to the dataset. Can be:
- a Deep Lake cloud path of the form ``hub://org_id/datasetname``. To write to Deep Lake cloud datasets, ensure that you are logged in to Deep Lake (use 'activeloop login' from command line), or pass in a token using the 'token' parameter.
- a Deep Lake cloud path of the form ``hub://org_id/datasetname``. To write to Deep Lake cloud datasets, ensure that you are authenticated to Deep Lake (pass in a token using the 'token' parameter).
- an s3 path of the form ``s3://bucketname/path/to/dataset``. Credentials are required in either the environment or passed to the creds argument.
- a local file system path of the form ``./path/to/dataset`` or ``~/path/to/dataset`` or ``path/to/dataset``.
- a memory path of the form ``mem://path/to/dataset`` which doesn't save the dataset but keeps it in memory instead. Should be used only for testing as it does not persist.
Expand Down Expand Up @@ -1730,7 +1730,7 @@ def ingest_classification(
Args:
src (str, pathlib.Path): Local path to where the unstructured dataset of images is stored or path to csv file.
dest (str, pathlib.Path): - The full path to the dataset. Can be:
- a Deep Lake cloud path of the form ``hub://org_id/datasetname``. To write to Deep Lake cloud datasets, ensure that you are logged in to Deep Lake (use 'activeloop login' from command line)
- a Deep Lake cloud path of the form ``hub://org_id/datasetname``. To write to Deep Lake cloud datasets, ensure that you are authenticated to Deep Lake (pass in a token using the 'token' parameter).
- an s3 path of the form ``s3://bucketname/path/to/dataset``. Credentials are required in either the environment or passed to the creds argument.
- a local file system path of the form ``./path/to/dataset`` or ``~/path/to/dataset`` or ``path/to/dataset``.
- a memory path of the form ``mem://path/to/dataset`` which doesn't save the dataset but keeps it in memory instead. Should be used only for testing as it does not persist.
Expand Down Expand Up @@ -1885,7 +1885,7 @@ def ingest_kaggle(
tag (str): Kaggle dataset tag. Example: ``"coloradokb/dandelionimages"`` points to https://www.kaggle.com/coloradokb/dandelionimages
src (str, pathlib.Path): Local path to where the raw kaggle dataset will be downlaoded to.
dest (str, pathlib.Path): - The full path to the dataset. Can be:
- a Deep Lake cloud path of the form ``hub://username/datasetname``. To write to Deep Lake cloud datasets, ensure that you are logged in to Deep Lake (use 'activeloop login' from command line)
- a Deep Lake cloud path of the form ``hub://username/datasetname``. To write to Deep Lake cloud datasets, ensure that you are authenticated to Deep Lake (pass in a token using the 'token' parameter).
- an s3 path of the form ``s3://bucketname/path/to/dataset``. Credentials are required in either the environment or passed to the creds argument.
- a local file system path of the form ``./path/to/dataset`` or ``~/path/to/dataset`` or ``path/to/dataset``.
- a memory path of the form ``mem://path/to/dataset`` which doesn't save the dataset but keeps it in memory instead. Should be used only for testing as it does not persist.
Expand Down Expand Up @@ -2010,7 +2010,7 @@ def ingest_dataframe(
src (pd.DataFrame): The pandas dataframe to be converted.
dest (str, pathlib.Path):
- A Dataset or The full path to the dataset. Can be:
- a Deep Lake cloud path of the form ``hub://username/datasetname``. To write to Deep Lake cloud datasets, ensure that you are logged in to Deep Lake (use 'activeloop login' from command line)
- a Deep Lake cloud path of the form ``hub://username/datasetname``. To write to Deep Lake cloud datasets, ensure that you are authenticated to Deep Lake (pass in a token using the 'token' parameter).
- an s3 path of the form ``s3://bucketname/path/to/dataset``. Credentials are required in either the environment or passed to the creds argument.
- a local file system path of the form ``./path/to/dataset`` or ``~/path/to/dataset`` or ``path/to/dataset``.
- a memory path of the form ``mem://path/to/dataset`` which doesn't save the dataset but keeps it in memory instead. Should be used only for testing as it does not persist.
Expand Down
2 changes: 1 addition & 1 deletion deeplake/auto/unstructured/image_classification.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ def __init__(self, source: str):
Args:
source (str): The full path to the dataset.
Can be a Deep Lake cloud path of the form hub://username/datasetname. To write to Deep Lake cloud datasets, ensure that you are logged in to Deep Lake (use 'activeloop login' from command line)
Can be a Deep Lake cloud path of the form hub://username/datasetname. To write to Deep Lake cloud datasets, ensure that you are authenticated to Deep Lake (pass in a token using the 'token' parameter).
Can be a s3 path of the form s3://bucketname/path/to/dataset. Credentials are required in either the environment or passed to the creds argument.
Can be a local file system path of the form ./path/to/dataset or ~/path/to/dataset or path/to/dataset.
Can be a memory path of the form mem://path/to/dataset which doesn't save the dataset but keeps it in memory instead. Should be used only for testing as it does not persist.
Expand Down
6 changes: 3 additions & 3 deletions deeplake/client/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -226,11 +226,11 @@ def get_dataset_credentials(
tuple: containing full url to dataset, credentials, mode and expiration time respectively.
Raises:
UserNotLoggedInException: When user is not logged in
UserNotLoggedInException: When user is not authenticated
InvalidTokenException: If the specified token is invalid
TokenPermissionError: when there are permission or other errors related to token
AgreementNotAcceptedError: when user has not accepted the agreement
NotLoggedInAgreementError: when user is not logged in and dataset has agreement which needs to be signed
NotLoggedInAgreementError: when user is not authenticated and dataset has agreement which needs to be signed
"""
import json

Expand Down Expand Up @@ -397,7 +397,7 @@ def rename_dataset_entry(self, username, old_name, new_name):
)

def get_user_organizations(self):
"""Get list of user organizations from the backend. If user is not logged in, returns ['public'].
"""Get list of user organizations from the backend. If user is not authenticated, returns ['public'].
Returns:
list: user/organization names
Expand Down
2 changes: 1 addition & 1 deletion deeplake/client/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ def check_response_status(response: requests.Response):


def get_user_name() -> str:
"""Returns the name of the user currently logged into Hub."""
"""Returns the name of the user currently authenticated."""
path = REPORTING_CONFIG_FILE_PATH
try:
with open(path, "r") as f:
Expand Down
23 changes: 6 additions & 17 deletions deeplake/util/exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -200,11 +200,9 @@ def __init__(
class UserNotLoggedInException(Exception):
def __init__(self):
message = (
"You are not logged in and an API token was not found. To complete the operation, you can\n"
"1. Login with your username and password using the `activeloop login` CLI command.\n"
"2. Create an API token at https://app.activeloop.ai and use it in any of the following ways:\n"
"No API token found. To complete the operation, you must "
"create an API token at https://app.activeloop.ai and use it in any of the following ways:\n"
" - Set the environment variable `ACTIVELOOP_TOKEN` to the token value.\n"
" - Use the CLI command `activeloop login -t <token>`.\n"
" - Pass the API token to the `token` parameter of this function.\n"
"Visit https://docs.activeloop.ai/getting-started/using-activeloop-storage for more information."
)
Expand Down Expand Up @@ -261,7 +259,7 @@ def __init__(
):
extra = ""
if path.startswith("hub://"):
extra = "Since the path is a `hub://` dataset, if you believe you should have write permissions, try running `activeloop login`."
extra = "Since the path is a `hub://` dataset, if you believe you should have write permissions, ensure you are using the 'token' parameter or the ACTIVELOOP_TOKEN environment variable."

message = f"Dataset at '{path}' doesn't exist, and you have no permissions to create one there. Maybe a typo? {extra}"
super().__init__(message)
Expand Down Expand Up @@ -807,20 +805,11 @@ def __init__(self, agreements=None):
class NotLoggedInAgreementError(AgreementError):
def __init__(self):
super().__init__(
"You are not logged in. Please log in to accept the agreement."
)


class NotLoggedInError(AgreementError):
def __init__(self, msg=None):
msg = msg or (
"This dataset includes an agreement that needs to be accepted before you can use it.\n"
"You need to be signed in to accept this agreement.\n"
"You can login using 'activeloop login' on the command line if you have an account or using 'activeloop register' if you don't have one."
"This dataset requires acceptance of a user agreement. "
"You must set an API token in order to agree to the license terms. "
"Visit https://docs.activeloop.ai/getting-started/using-activeloop-storage for more information."
)

super().__init__(msg)


class RenameError(Exception):
def __init__(self, msg="Only name of the dataset can be different in new path."):
Expand Down

0 comments on commit 4e1eb75

Please sign in to comment.