Skip to content

Commit

Permalink
Merge pull request #27 from dask/direct_info
Browse files Browse the repository at this point in the history
Direct info
  • Loading branch information
martindurant committed Apr 11, 2016
2 parents 29ba6ce + 0e0cf39 commit 82eef34
Show file tree
Hide file tree
Showing 2 changed files with 11 additions and 9 deletions.
14 changes: 8 additions & 6 deletions s3fs/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
import boto3
import boto3.compat
import boto3.s3.transfer as trans
from botocore.exceptions import ClientError
from botocore.exceptions import ClientError, ParamValidationError
from botocore.client import Config

from .utils import read_block
Expand Down Expand Up @@ -415,7 +415,7 @@ def copy(self, path1, path2):
try:
self.s3.copy_object(Bucket=buc2, Key=key2,
CopySource='/'.join([buc1, key1]))
except ClientError:
except (ClientError, ParamValidationError):
raise IOError('Copy failed', (path1, path2))
self._ls(path2, refresh=True)

Expand Down Expand Up @@ -469,7 +469,7 @@ def touch(self, path):
try:
self.s3.create_bucket(Bucket=bucket)
self._ls("", refresh=True)
except ClientError:
except (ClientError, ParamValidationError):
raise IOError('Bucket create failed', path)

def read_block(self, fn, offset, length, delimiter=None):
Expand Down Expand Up @@ -588,7 +588,7 @@ def __init__(self, s3, path, mode='rb', block_size=5 * 2 ** 20):
raise ValueError('Block size must be >=5MB')
try:
self.mpu = s3.s3.create_multipart_upload(Bucket=bucket, Key=key)
except ClientError:
except (ClientError, ParamValidationError):
raise IOError('Open for write failed', path)
self.forced = False
if mode == 'ab' and s3.exists(path):
Expand All @@ -605,12 +605,14 @@ def __init__(self, s3, path, mode='rb', block_size=5 * 2 ** 20):
else:
try:
self.size = self.info()['Size']
except ClientError:
except (ClientError, ParamValidationError):
raise IOError("File not accessible", path)

def info(self):
""" File information about this path """
return self.s3.info(self.path)
info = self.s3.s3.head_object(Bucket=self.bucket, Key=self.key)
info['Size'] = info.get('ContentLength')
return info

def tell(self):
""" Current file location """
Expand Down
6 changes: 3 additions & 3 deletions s3fs/tests/test_s3fs.py
Original file line number Diff line number Diff line change
Expand Up @@ -316,9 +316,6 @@ def test_errors(s3):
with pytest.raises((IOError, OSError)):
s3.mv(test_bucket_name+'/tmp/test/shfoshf/x', 'tmp/test/shfoshf/y')

#with pytest.raises((IOError, OSError)):
# s3.open('x', 'wb')

with pytest.raises((IOError, OSError)):
s3.open('x', 'rb')

Expand All @@ -334,6 +331,9 @@ def test_errors(s3):
f.close()
f.read()

with pytest.raises((IOError, OSError)):
s3.mkdir('/')


def test_read_small(s3):
fn = test_bucket_name+'/2014-01-01.csv'
Expand Down

0 comments on commit 82eef34

Please sign in to comment.