Skip to content
This repository has been archived by the owner on Apr 26, 2024. It is now read-only.

Commit

Permalink
Merge pull request #3151 from NotAFile/py3-xrange-1
Browse files Browse the repository at this point in the history
Move more xrange to six
  • Loading branch information
richvdh committed Apr 30, 2018
2 parents 7b908ae + db75c86 commit 683149c
Show file tree
Hide file tree
Showing 9 changed files with 28 additions and 11 deletions.
4 changes: 3 additions & 1 deletion synapse/federation/federation_client.py
Expand Up @@ -19,6 +19,8 @@
import logging
import random

from six.moves import range

from twisted.internet import defer

from synapse.api.constants import Membership
Expand Down Expand Up @@ -413,7 +415,7 @@ def random_server_list():

batch_size = 20
missing_events = list(missing_events)
for i in xrange(0, len(missing_events), batch_size):
for i in range(0, len(missing_events), batch_size):
batch = set(missing_events[i:i + batch_size])

deferreds = [
Expand Down
4 changes: 3 additions & 1 deletion synapse/handlers/room_list.py
Expand Up @@ -15,6 +15,8 @@

from twisted.internet import defer

from six.moves import range

from ._base import BaseHandler

from synapse.api.constants import (
Expand Down Expand Up @@ -200,7 +202,7 @@ def get_order_for_room(room_id):
step = len(rooms_to_scan) if len(rooms_to_scan) != 0 else 1

chunk = []
for i in xrange(0, len(rooms_to_scan), step):
for i in range(0, len(rooms_to_scan), step):
batch = rooms_to_scan[i:i + step]
logger.info("Processing %i rooms for result", len(batch))
yield concurrently_execute(
Expand Down
4 changes: 3 additions & 1 deletion synapse/storage/registration.py
Expand Up @@ -22,6 +22,8 @@
from synapse.storage._base import SQLBaseStore
from synapse.util.caches.descriptors import cached, cachedInlineCallbacks

from six.moves import range


class RegistrationWorkerStore(SQLBaseStore):
@cached()
Expand Down Expand Up @@ -469,7 +471,7 @@ def _find_next_generated_user_id(txn):
match = regex.search(user_id)
if match:
found.add(int(match.group(1)))
for i in xrange(len(found) + 1):
for i in range(len(found) + 1):
if i not in found:
return i

Expand Down
4 changes: 3 additions & 1 deletion synapse/storage/schema/delta/30/as_users.py
Expand Up @@ -14,6 +14,8 @@
import logging
from synapse.config.appservice import load_appservices

from six.moves import range


logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -58,7 +60,7 @@ def run_upgrade(cur, database_engine, config, *args, **kwargs):

for as_id, user_ids in owned.items():
n = 100
user_chunks = (user_ids[i:i + 100] for i in xrange(0, len(user_ids), n))
user_chunks = (user_ids[i:i + 100] for i in range(0, len(user_ids), n))
for chunk in user_chunks:
cur.execute(
database_engine.convert_param_style(
Expand Down
4 changes: 3 additions & 1 deletion synapse/storage/stream.py
Expand Up @@ -47,6 +47,8 @@
import abc
import logging

from six.moves import range


logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -196,7 +198,7 @@ def get_room_events_stream_for_rooms(self, room_ids, from_key, to_key, limit=0,

results = {}
room_ids = list(room_ids)
for rm_ids in (room_ids[i:i + 20] for i in xrange(0, len(room_ids), 20)):
for rm_ids in (room_ids[i:i + 20] for i in range(0, len(room_ids), 20)):
res = yield make_deferred_yieldable(defer.gatherResults([
run_in_background(
self.get_room_events_stream_for_room,
Expand Down
4 changes: 3 additions & 1 deletion synapse/storage/tags.py
Expand Up @@ -22,6 +22,8 @@
import simplejson as json
import logging

from six.moves import range

logger = logging.getLogger(__name__)


Expand Down Expand Up @@ -98,7 +100,7 @@ def get_tag_content(txn, tag_ids):

batch_size = 50
results = []
for i in xrange(0, len(tag_ids), batch_size):
for i in range(0, len(tag_ids), batch_size):
tags = yield self.runInteraction(
"get_all_updated_tag_content",
get_tag_content,
Expand Down
6 changes: 4 additions & 2 deletions synapse/util/async.py
Expand Up @@ -27,6 +27,8 @@

import logging

from six.moves import range

logger = logging.getLogger(__name__)


Expand Down Expand Up @@ -158,13 +160,13 @@ def concurrently_execute(func, args, limit):
def _concurrently_execute_inner():
try:
while True:
yield func(it.next())
yield func(next(it))
except StopIteration:
pass

return logcontext.make_deferred_yieldable(defer.gatherResults([
run_in_background(_concurrently_execute_inner)
for _ in xrange(limit)
for _ in range(limit)
], consumeErrors=True)).addErrback(unwrapFirstError)


Expand Down
5 changes: 3 additions & 2 deletions synapse/util/stringutils.py
Expand Up @@ -15,19 +15,20 @@

import random
import string
from six.moves import range

_string_with_symbols = (
string.digits + string.ascii_letters + ".,;:^&*-_+=#~@"
)


def random_string(length):
return ''.join(random.choice(string.ascii_letters) for _ in xrange(length))
return ''.join(random.choice(string.ascii_letters) for _ in range(length))


def random_string_with_symbols(length):
return ''.join(
random.choice(_string_with_symbols) for _ in xrange(length)
random.choice(_string_with_symbols) for _ in range(length)
)


Expand Down
4 changes: 3 additions & 1 deletion synapse/util/wheel_timer.py
Expand Up @@ -13,6 +13,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.

from six.moves import range


class _Entry(object):
__slots__ = ["end_key", "queue"]
Expand Down Expand Up @@ -68,7 +70,7 @@ def insert(self, now, obj, then):
# Add empty entries between the end of the current list and when we want
# to insert. This ensures there are no gaps.
self.entries.extend(
_Entry(key) for key in xrange(last_key, then_key + 1)
_Entry(key) for key in range(last_key, then_key + 1)
)

self.entries[-1].queue.append(obj)
Expand Down

0 comments on commit 683149c

Please sign in to comment.