Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

remove more usage of "future" #1362

Open
wants to merge 2 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
24 changes: 10 additions & 14 deletions src/allmydata/dirnode.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,6 @@
Ported to Python 3.
"""

from past.builtins import unicode

import time

from zope.interface import implementer
Expand Down Expand Up @@ -39,31 +37,29 @@
)

NAME = Field.for_types(
u"name",
# Make sure this works on Python 2; with str, it gets Future str which
# breaks Eliot.
[unicode],
u"The name linking the parent to this node.",
"name",
[str],
"The name linking the parent to this node.",
)

METADATA = Field.for_types(
u"metadata",
"metadata",
[dict],
u"Data about a node.",
"Data about a node.",
)

OVERWRITE = Field.for_types(
u"overwrite",
"overwrite",
[bool],
u"True to replace an existing file of the same name, "
u"false to fail with a collision error.",
"True to replace an existing file of the same name, "
"false to fail with a collision error.",
)

ADD_FILE = ActionType(
u"dirnode:add-file",
"dirnode:add-file",
[NAME, METADATA, OVERWRITE],
[],
u"Add a new file as a child of a directory.",
"Add a new file as a child of a directory.",
)


Expand Down
12 changes: 5 additions & 7 deletions src/allmydata/mutable/layout.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,6 @@
Ported to Python 3.
"""

from past.utils import old_div

import struct
from allmydata.mutable.common import NeedMoreDataError, UnknownVersionError, \
BadShareError
Expand Down Expand Up @@ -260,7 +258,7 @@ def __init__(self,
self._required_shares)
assert expected_segment_size == segment_size

self._block_size = old_div(self._segment_size, self._required_shares)
self._block_size = self._segment_size // self._required_shares

# This is meant to mimic how SDMF files were built before MDMF
# entered the picture: we generate each share in its entirety,
Expand Down Expand Up @@ -793,7 +791,7 @@ def __init__(self,
# and also because it provides a useful amount of bounds checking.
self._num_segments = mathutil.div_ceil(self._data_length,
self._segment_size)
self._block_size = old_div(self._segment_size, self._required_shares)
self._block_size = self._segment_size // self._required_shares
# We also calculate the share size, to help us with block
# constraints later.
tail_size = self._data_length % self._segment_size
Expand All @@ -802,7 +800,7 @@ def __init__(self,
else:
self._tail_block_size = mathutil.next_multiple(tail_size,
self._required_shares)
self._tail_block_size = old_div(self._tail_block_size, self._required_shares)
self._tail_block_size = self._tail_block_size // self._required_shares

# We already know where the sharedata starts; right after the end
# of the header (which is defined as the signable part + the offsets)
Expand Down Expand Up @@ -1324,7 +1322,7 @@ def _process_encoding_parameters(self, encoding_parameters):
self._segment_size = segsize
self._data_length = datalen

self._block_size = old_div(self._segment_size, self._required_shares)
self._block_size = self._segment_size // self._required_shares
# We can upload empty files, and need to account for this fact
# so as to avoid zero-division and zero-modulo errors.
if datalen > 0:
Expand All @@ -1336,7 +1334,7 @@ def _process_encoding_parameters(self, encoding_parameters):
else:
self._tail_block_size = mathutil.next_multiple(tail_size,
self._required_shares)
self._tail_block_size = old_div(self._tail_block_size, self._required_shares)
self._tail_block_size = self._tail_block_size // self._required_shares

return encoding_parameters

Expand Down
7 changes: 1 addition & 6 deletions src/allmydata/storage/immutable.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,6 @@
Ported to Python 3.
"""


from future.utils import bytes_to_native_str

import os, stat, struct, time

from collections_extended import RangeMap
Expand Down Expand Up @@ -534,9 +531,7 @@ def __init__(self, ss, sharefname, storage_index=None, shnum=None):

def __repr__(self):
return "<%s %s %s>" % (self.__class__.__name__,
bytes_to_native_str(
base32.b2a(self.storage_index[:8])[:12]
),
base32.b2a(self.storage_index[:8])[:12].decode(),
self.shnum)

def read(self, offset, length):
Expand Down
21 changes: 15 additions & 6 deletions src/allmydata/storage/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
"""
from __future__ import annotations

from future.utils import bytes_to_native_str
from typing import Iterable, Any

import os, re
Expand Down Expand Up @@ -905,7 +904,12 @@ def remote_advise_corrupt_share(self, share_type, storage_index, shnum,

"""

def render_corruption_report(share_type, si_s, shnum, reason):
def render_corruption_report(
share_type: bytes,
si_s: bytes,
shnum: int,
reason: bytes
) -> str:
"""
Create a string that explains a corruption report using freeform text.

Expand All @@ -920,13 +924,18 @@ def render_corruption_report(share_type, si_s, shnum, reason):
report.
"""
return CORRUPTION_REPORT_FORMAT.format(
type=bytes_to_native_str(share_type),
storage_index=bytes_to_native_str(si_s),
type=share_type.decode(),
storage_index=si_s.decode(),
share_number=shnum,
reason=bytes_to_native_str(reason),
reason=reason.decode(),
)

def get_corruption_report_path(base_dir, now, si_s, shnum):
def get_corruption_report_path(
base_dir: str,
now: str,
si_s: str,
shnum: int
) -> str:
"""
Determine the path to which a certain corruption report should be written.

Expand Down
4 changes: 2 additions & 2 deletions src/allmydata/test/test_encode.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
Ported to Python 3.
"""

from past.builtins import chr as byteschr, long
from past.builtins import chr as byteschr

from zope.interface import implementer
from twisted.trial import unittest
Expand Down Expand Up @@ -99,7 +99,7 @@ def abort(self):
def get_block_data(self, blocknum, blocksize, size):
d = self._start()
def _try(unused=None):
assert isinstance(blocknum, (int, long))
assert isinstance(blocknum, int)
if self.mode == "bad block":
return flip_bit(self.blocks[blocknum])
return self.blocks[blocknum]
Expand Down
6 changes: 2 additions & 4 deletions src/allmydata/test/test_encodingutil.py
Original file line number Diff line number Diff line change
Expand Up @@ -343,8 +343,7 @@ def test_to_filepath(self):

for fp in (nosep_fp, sep_fp):
self.failUnlessReallyEqual(fp, FilePath(foo_u))
if encodingutil.use_unicode_filepath:
self.failUnlessReallyEqual(fp.path, foo_u)
self.failUnlessReallyEqual(fp.path, foo_u)

if sys.platform == "win32":
long_u = u'\\\\?\\C:\\foo'
Expand All @@ -360,8 +359,7 @@ def test_extend_filepath(self):
for foo_fp in (foo_bfp, foo_ufp):
fp = extend_filepath(foo_fp, [u'bar', u'baz'])
self.failUnlessReallyEqual(fp, FilePath(foo_bar_baz_u))
if encodingutil.use_unicode_filepath:
self.failUnlessReallyEqual(fp.path, foo_bar_baz_u)
self.failUnlessReallyEqual(fp.path, foo_bar_baz_u)

def test_unicode_from_filepath(self):
foo_bfp = FilePath(win32_other(b'C:\\foo', b'/foo'))
Expand Down
5 changes: 1 addition & 4 deletions src/allmydata/test/test_log.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,6 @@
Ported to Python 3.
"""


from future.utils import native_str

from twisted.trial import unittest
from twisted.python.failure import Failure

Expand Down Expand Up @@ -161,4 +158,4 @@ class LoggingObject17(tahoe_log.PrefixingLogMixin):
obj.log(**{"my": "message"})
for message in self.messages:
for k in message[-1].keys():
self.assertIsInstance(k, native_str)
self.assertIsInstance(k, str)
10 changes: 5 additions & 5 deletions src/allmydata/test/test_storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
"""

from __future__ import annotations
from future.utils import native_str, bytes_to_native_str, bchr
from future.utils import bchr
from six import ensure_str

from io import (
Expand Down Expand Up @@ -109,7 +109,7 @@ def test_storage_index_to_dir(self):
path = storage_index_to_dir(s)
parts = os.path.split(path)
self.assertThat(parts[0], Equals(parts[1][:2]))
self.assertThat(path, IsInstance(native_str))
self.assertThat(path, IsInstance(str))

def test_get_share_file_mutable(self):
"""A mutable share is identified by get_share_file()."""
Expand Down Expand Up @@ -1242,7 +1242,7 @@ def test_advise_corruption(self):

reports = os.listdir(reportdir)
self.assertThat(reports, HasLength(2))
report_si1 = [r for r in reports if bytes_to_native_str(si1_s) in r][0]
report_si1 = [r for r in reports if si1_s.decode() in r][0]
f = open(os.path.join(reportdir, report_si1), "rb")
report = f.read()
f.close()
Expand Down Expand Up @@ -1809,10 +1809,10 @@ def test_remove(self):
self.assertThat(readv(b"si1", [], [(0,10)]),
Equals({}))
# and the bucket directory should now be gone
si = base32.b2a(b"si1")
si = base32.b2a(b"si1").decode()
# note: this is a detail of the storage server implementation, and
# may change in the future
si = bytes_to_native_str(si) # filesystem paths are native strings
# filesystem paths are native strings
prefix = si[:2]
prefixdir = os.path.join(self.workdir("test_remove"), "shares", prefix)
bucketdir = os.path.join(prefixdir, si)
Expand Down
4 changes: 2 additions & 2 deletions src/allmydata/test/test_system.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
"""
from __future__ import annotations

from past.builtins import chr as byteschr, long
from past.builtins import chr as byteschr
from six import ensure_text

import os, re, sys, time, json
Expand Down Expand Up @@ -395,7 +395,7 @@ def _uploaded(results):
# this is really bytes received rather than sent, but it's
# convenient and basically measures the same thing
bytes_sent = results.get_ciphertext_fetched()
self.failUnless(isinstance(bytes_sent, (int, long)), bytes_sent)
self.failUnless(isinstance(bytes_sent, int), bytes_sent)

# We currently don't support resumption of upload if the data is
# encrypted with a random key. (Because that would require us
Expand Down