Skip to content

Commit

Permalink
Test that pagination is working for large directories
Browse files Browse the repository at this point in the history
  • Loading branch information
timj committed Mar 4, 2021
1 parent 16e9568 commit 18967c2
Showing 1 changed file with 31 additions and 0 deletions.
31 changes: 31 additions & 0 deletions tests/test_uri.py
Original file line number Diff line number Diff line change
Expand Up @@ -437,6 +437,37 @@ def testWalk(self):
for got, expect in zip(found, expected):
self.assertEqual(tuple(u.path for u in got), expect)

# Check pagination works with large numbers of files. S3 API limits
# us to 1000 response per list_objects call so create lots of files
created = set()
counter = 1
n_dir1 = 1100
while counter <= n_dir1:
new = ButlerURI(self.makeS3Uri(f"test/file{counter:04d}.txt"))
new.write(f"{counter}".encode())
created.add(str(new))
counter += 1
counter = 1
# Put some in a subdirectory to make sure we are looking in a
# hierarchy.
n_dir2 = 100
while counter <= n_dir2:
new = ButlerURI(self.makeS3Uri(f"test/subdir/file{counter:04d}.txt"))
new.write(f"{counter}".encode())
created.add(str(new))
counter += 1

found = ButlerURI.findFileResources([ButlerURI(self.makeS3Uri("test/"))])
self.assertEqual({str(u) for u in found}, created)

# Again with grouping.
found = list(ButlerURI.findFileResources([ButlerURI(self.makeS3Uri("test/"))], grouped=True))
self.assertEqual(len(found), 2)
dir_1 = list(found[0])
dir_2 = list(found[1])
self.assertEqual(len(dir_1), n_dir1)
self.assertEqual(len(dir_2), n_dir2)

def testWrite(self):
s3write = ButlerURI(self.makeS3Uri("created.txt"))
content = "abcdefghijklmnopqrstuv\n"
Expand Down

0 comments on commit 18967c2

Please sign in to comment.