Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add __repr__ to FS classes #323

Merged
merged 2 commits into from
Aug 31, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 6 additions & 1 deletion pfio/v2/fs.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
from abc import abstractmethod
from io import IOBase
from types import TracebackType
from typing import Any, Callable, Iterator, Optional, Type, Union
from typing import Any, Callable, Dict, Iterator, Optional, Type, Union
from urllib.parse import urlparse

from deprecation import deprecated
Expand Down Expand Up @@ -474,3 +474,8 @@ def lazify(init_func, lazy_init=True, recreate_on_fork=True):

'''
pass


def format_repr(cls: Type, data: Dict[str, Any]) -> str:
data_str = ", ".join(f"{name}={value!r}" for name, value in data.items())
return f"{cls.__module__}.{cls.__name__}({data_str})"
10 changes: 9 additions & 1 deletion pfio/v2/hdfs.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
except ImportError:
has_hdfs = False

from .fs import FS, FileStat, ForkedError
from .fs import FS, FileStat, ForkedError, format_repr

logger = logging.getLogger(__name__)
logger.addHandler(logging.StreamHandler())
Expand Down Expand Up @@ -259,6 +259,14 @@ def __getstate__(self):
def __setstate__(self, state):
self.__dict__ = state

def __repr__(self):
return format_repr(
Hdfs,
{
"cwd": self._cwd,
},
)

def _get_principal_name(self):
# get the default principal name from `klist` cache
principal_name = _get_principal_name_from_klist()
Expand Down
10 changes: 9 additions & 1 deletion pfio/v2/local.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
import shutil
from typing import Optional

from .fs import FS, FileStat
from .fs import FS, FileStat, format_repr


class LocalFileStat(FileStat):
Expand Down Expand Up @@ -75,6 +75,14 @@ def cwd(self, value: str):
def _reset(self):
pass

def __repr__(self):
return format_repr(
Local,
{
"cwd": self._cwd,
},
)

def open(self, file_path, mode='r',
buffering=-1, encoding=None, errors=None,
newline=None, closefd=True, opener=None):
Expand Down
12 changes: 11 additions & 1 deletion pfio/v2/s3.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
import boto3
from botocore.exceptions import ClientError

from .fs import FS, FileStat
from .fs import FS, FileStat, format_repr

DEFAULT_MAX_BUFFER_SIZE = 16 * 1024 * 1024

Expand Down Expand Up @@ -383,6 +383,16 @@ def __getstate__(self):
def __setstate__(self, state):
self.__dict__ = state

def __repr__(self) -> str:
return format_repr(
S3,
{
"bucket": self.bucket,
"prefix": self.cwd,
"endpoint": self.endpoint
},
)

def open(self, path, mode='r', **kwargs):
'''Opens an object accessor for read or write

Expand Down
12 changes: 11 additions & 1 deletion pfio/v2/zip.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@

from pfio.cache.sparse_file import MPCachedWrapper

from .fs import FS, FileStat
from .fs import FS, FileStat, format_repr

logger = logging.getLogger(__name__)
logger.addHandler(logging.StreamHandler())
Expand Down Expand Up @@ -133,6 +133,16 @@ def __getstate__(self):
def __setstate__(self, state):
self.__dict__ = state

def __repr__(self):
return format_repr(
Zip,
{
"file_path": self.file_path,
"mode": self.mode,
"backend": self.backend,
},
)

def open(self, file_path, mode='r',
buffering=-1, encoding=None, errors=None,
newline=None, closefd=True, opener=None):
Expand Down
5 changes: 5 additions & 0 deletions tests/v2_tests/test_hdfs.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,11 @@ def tearDown(self):
self.hdfs.remove(self.dirname, recursive=True)
self.hdfs.close()

def test_repr_str(self):
with Hdfs(self.dirname) as fs:
repr(fs)
str(fs)

def test_read_non_exist(self):
non_exist_file = "non_exist_file.txt"

Expand Down
5 changes: 5 additions & 0 deletions tests/v2_tests/test_local.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,11 @@ def setUp(self):
def tearDown(self):
self.testdir.cleanup()

def test_repr_str(self):
with Local(self.testdir.name) as fs:
str(fs)
repr(fs)

def test_read_string(self):

with Local() as fs:
Expand Down
6 changes: 6 additions & 0 deletions tests/v2_tests/test_s3.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,12 @@ def test_s3_init(s3_fixture):
assert s3.endpoint is None


def test_s3_repr_str(s3_fixture):
with from_url('s3://test-bucket/base', **s3_fixture.aws_kwargs) as s3:
repr(s3)
str(s3)


def test_s3_files(s3_fixture):
with from_url('s3://test-bucket/base',
**s3_fixture.aws_kwargs) as s3:
Expand Down
5 changes: 5 additions & 0 deletions tests/v2_tests/test_zip.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,6 +114,11 @@ def tearDown(self):
self.tmpdir.cleanup()
local.remove(self.zip_file_path)

def test_repr_str(self):
with local.open_zip(self.zip_file_path) as z:
repr(z)
str(z)

def test_read_bytes(self):
with local.open_zip(os.path.abspath(self.zip_file_path)) as z:
with z.open(self.zipped_file_path, "rb") as zipped_file:
Expand Down