Skip to content

Commit

Permalink
Use print() function in both Python 2 and Python 3 (#459)
Browse files Browse the repository at this point in the history
  • Loading branch information
cclauss authored and gimite committed Dec 6, 2018
1 parent 917f100 commit d97a20e
Show file tree
Hide file tree
Showing 16 changed files with 112 additions and 96 deletions.
11 changes: 6 additions & 5 deletions tests/load_test/load_test.py
Expand Up @@ -89,6 +89,7 @@
test_mode:
If true, allows running "create" test for an existing repository.
"""
from __future__ import print_function

import datetime
import logging
Expand Down Expand Up @@ -214,7 +215,7 @@ def save_result(self):
json.dump(output, f)

def print_stats(self):
print
print()
print ('Average request latency (sec):\t%.3f'
% self.average(self.data['request_latency_seconds']))
print ('90%%tile request latency (sec):\t%.3f'
Expand All @@ -224,8 +225,8 @@ def print_stats(self):
print ('90%%tile interval between requests (sec):\t%.3f'
% self.ninety_percentile(self.data['request_interval_seconds']))

print
print 'http_status:'
print()
print('http_status:')
http_status_freqs = {}
for status in self.data['http_statuses']:
if status in http_status_freqs:
Expand All @@ -234,8 +235,8 @@ def print_stats(self):
http_status_freqs[status] = 1
for status, freq in http_status_freqs.iteritems():
status_str = str(status) if status else 'Error'
print ' %s: %d (%.1f%%)' % (
status_str, freq, 100.0 * freq / len(self.data['http_statuses']))
print(' %s: %d (%.1f%%)' % (
status_str, freq, 100.0 * freq / len(self.data['http_statuses'])))

def average(self, deltas):
if deltas:
Expand Down
21 changes: 11 additions & 10 deletions tests/scrape.py
Expand Up @@ -26,6 +26,7 @@
- store and send cookies according to domain and path
- submit forms with file upload
"""
from __future__ import print_function

__author__ = 'Ka-Ping Yee <ping@zesty.ca>'
__date__ = '$Date: 2012/09/22 00:00:00 $'.split()[1].replace('/', '-')
Expand Down Expand Up @@ -56,12 +57,12 @@ def request(scheme, method, host, path, headers, data='', verbose=0):
import socket
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
if verbose >= 3:
print >>sys.stderr, 'connect:', host, port
print('connect:', host, port, file=sys.stderr)
sock.connect((host, int(port)))
file = scheme == 'https' and socket.ssl(sock) or sock.makefile()
if verbose >= 3:
print >>sys.stderr, ('\r\n' + request.rstrip()).replace(
'\r\n', '\nrequest: ').lstrip()
print(('\r\n' + request.rstrip()).replace(
'\r\n', '\nrequest: ').lstrip(), file=sys.stderr)
file.write(request)
if hasattr(file, 'flush'):
file.flush()
Expand Down Expand Up @@ -90,7 +91,7 @@ def curl(url, headers={}, data=None, verbose=0):
command += ' --header ' + shellquote('%s: %s' % (name, value))
command += ' ' + shellquote(url)
if verbose >= 3:
print >>sys.stderr, 'execute:', command
print('execute:', command, file=sys.stderr)
os.system(command + ' > ' + tempname)
reply = open(tempname).read()
os.remove(tempname)
Expand Down Expand Up @@ -187,7 +188,7 @@ def fetch(url, data='', agent=None, referrer=None, charset=None, verbose=0,

# Make the HTTP or HTTPS request using Python or cURL.
if verbose:
print >>sys.stderr, '>', method, url
print('>', method, url, file=sys.stderr)
import socket
if scheme == 'http' or scheme == 'https' and hasattr(socket, 'ssl'):
if query:
Expand All @@ -209,24 +210,24 @@ def fetch(url, data='', agent=None, referrer=None, charset=None, verbose=0,
head = head.replace('\r\n', '\n').replace('\r', '\n')
response, head = head.split('\n', 1)
if verbose >= 3:
print >>sys.stderr, 'reply:', response.rstrip()
print('reply:', response.rstrip(), file=sys.stderr)
status = int(response.split()[1])
message = ' '.join(response.split()[2:])
for line in head.split('\n'):
if verbose >= 3:
print >>sys.stderr, 'reply:', line.rstrip()
print('reply:', line.rstrip(), file=sys.stderr)
name, value = line.split(': ', 1)
name = name.lower()
if name in headers:
headers[name] += '\n' + value
else:
headers[name] = value
if verbose >= 2:
print >>sys.stderr, 'content: %d byte%s\n' % (
len(content), content != 1 and 's' or '')
print('content: %d byte%s\n' % (
len(content), content != 1 and 's' or ''), file=sys.stderr)
if verbose >= 3:
for line in content.rstrip('\n').split('\n'):
print >>sys.stderr, 'content: ' + repr(line + '\n')
print('content: ' + repr(line + '\n'), file=sys.stderr)

# Store any received cookies.
if 'set-cookie' in headers:
Expand Down
17 changes: 9 additions & 8 deletions tests/server_tests.py
Expand Up @@ -27,6 +27,7 @@
Specify -s to see the messages printed by all tests as they run (by default,
stdout/stderr will be captured and then shown only for failing tests).
"""
from __future__ import print_function

import os
import pytest
Expand Down Expand Up @@ -104,10 +105,10 @@ def stop(self):
self.clean_up()
if self.failed:
self.flush_output()
print >>sys.stderr, '%s failed (status %s).\n' % (
self.name, self.process.returncode)
print('%s failed (status %s).\n' % (
self.name, self.process.returncode), file=sys.stderr)
else:
print >>sys.stderr, '%s stopped.' % self.name
print('%s stopped.' % self.name, file=sys.stderr)

def flush_output(self):
"""Flushes the buffered output from this subprocess to stderr."""
Expand All @@ -126,7 +127,7 @@ def wait_until_ready(self, timeout=10):
if not self.ready:
self.flush_output() # after each second, show output
if self.ready:
print >>sys.stderr, '%s started.' % self.name
print('%s started.' % self.name, file=sys.stderr)
else:
raise RuntimeError('%s failed to start.' % self.name)

Expand Down Expand Up @@ -191,19 +192,19 @@ def __init__(self, port):
def run(self):
class MailServer(smtpd.SMTPServer):
def process_message(self, peer, mailfrom, rcpttos, data):
print >>sys.stderr, 'mail from:', mailfrom, 'to:', rcpttos
print('mail from:', mailfrom, 'to:', rcpttos, file=sys.stderr)
MailThread.messages.append(
{'from': mailfrom, 'to': rcpttos, 'data': data})

try:
server = MailServer(('localhost', self.port), None)
except Exception, e:
print >>sys.stderr, 'SMTP server failed: %s' % e
print('SMTP server failed: %s' % e, file=sys.stderr)
sys.exit(-1)
print >>sys.stderr, 'SMTP server started.'
print('SMTP server started.', file=sys.stderr)
while not self.stop_requested:
smtpd.asyncore.loop(timeout=0.5, count=1)
print >>sys.stderr, 'SMTP server stopped.'
print('SMTP server stopped.', file=sys.stderr)

def stop(self):
self.stop_requested = True
Expand Down
3 changes: 2 additions & 1 deletion tests/server_tests_base.py
Expand Up @@ -15,6 +15,7 @@
# limitations under the License.

"""Test cases for end-to-end testing. Run with the server_tests script."""
from __future__ import print_function

import calendar
import datetime
Expand Down Expand Up @@ -208,7 +209,7 @@ def log(self, message, *args):
message = message.encode('utf-8')
else:
message = str(message)
print >>sys.stderr, '%6.2f:' % (now - last_star), message, args or ''
print('%6.2f:' % (now - last_star), message, args or '', file=sys.stderr)
if message[:1] == '*':
last_star = now

Expand Down
5 changes: 3 additions & 2 deletions tools/batch_delete.py
Expand Up @@ -17,6 +17,7 @@
Instead of running this script directly, use the 'batch_delete' shell script,
which sets up the PYTHONPATH and other necessary environment variables."""
from __future__ import print_function

import optparse

Expand Down Expand Up @@ -96,11 +97,11 @@ def main():
attr_value = str(entity.key().id_or_name())
else:
attr_value = getattr(entity, options.output_attr)
print '%s %s with %s = %r' % (
print('%s %s with %s = %r' % (
message_prefix,
type(entity).kind(),
options.output_attr,
attr_value)
attr_value))

if options.mode == 'delete':
db.delete(entities)
Expand Down
23 changes: 12 additions & 11 deletions tools/find_missing_translations.py
Expand Up @@ -22,6 +22,7 @@
PO file format:
http://www.gnu.org/software/hello/manual/gettext/PO-Files.html
"""
from __future__ import print_function

import optparse
import os
Expand Down Expand Up @@ -101,7 +102,7 @@ def message_to_xmb(message):
# Remove all but the missing messages.
for id in missing_ids:
if not translations[id]:
print >>sys.stderr, 'missing id: %s' % id
print('missing id: %s' % id, file=sys.stderr)
continue
translations[id].string = '' # remove fuzzy translations
if 'fuzzy' in translations[id].flags: # remove the fuzzy flag
Expand All @@ -120,14 +121,14 @@ def message_to_xmb(message):
translations = pofile.read_po(open(get_po_filename('en')))
ids = set(message.id for message in translations)
if missing_ids:
print '%s: %d missing' % (filename, len(missing_ids))
print('%s: %d missing' % (filename, len(missing_ids)))
new_file = open(filename, 'w')
for id in ids - set(missing_ids):
del translations[id]
pofile.write_po(new_file, translations, no_location=True,
omit_header=True, sort_output=True)
new_file.close()
print '\n\n# LANGUAGE = %s\n' % locale
print('\n\n# LANGUAGE = %s\n' % locale)

if options.format == 'xmb':
# Produce one XMB file for each set of locales that have the same
Expand All @@ -136,7 +137,7 @@ def message_to_xmb(message):
key=lambda t: (len(t), t)):
filename = '.'.join(locales_by_missing_ids[missing_ids]) + '.xmb'
if missing_ids:
print '%s: %d missing' % (filename, len(missing_ids))
print('%s: %d missing' % (filename, len(missing_ids)))
file = open(filename, 'w')
file.write('<?xml version="1.0" encoding="UTF-8"?>\n')
file.write('<messagebundle>\n')
Expand All @@ -152,17 +153,17 @@ def message_to_xmb(message):
locales_by_missing_ids, key=lambda t: (len(t), t)):
locales = ' '.join(locales_by_missing_ids[missing_ids])
if missing_ids:
print '%s: %d missing' % (locales, len(missing_ids))
print('%s: %d missing' % (locales, len(missing_ids)))
for id in sorted(missing_ids)[:10]:
if isinstance(id, tuple):
id = id[0]
id_repr = repr(id.encode('ascii', 'ignore'))
truncated = len(id_repr) > 70
print ' %s%s' % (id_repr[:70], truncated and '...' or '')
print(' %s%s' % (id_repr[:70], truncated and '...' or ''))
if len(missing_ids) > 10:
print ' ... (%d more)' % (len(missing_ids) - 10)
print(' ... (%d more)' % (len(missing_ids) - 10))
else:
print '%s: ok' % locales
print('%s: ok' % locales)

if options.format == 'list':
# List all the missing messages, collecting together the locales
Expand All @@ -171,8 +172,8 @@ def message_to_xmb(message):
locales_by_missing_ids, key=lambda t: (len(t), t)):
locales = ' '.join(locales_by_missing_ids[missing_ids])
if missing_ids:
print '%s: %d missing' % (locales, len(missing_ids))
print('%s: %d missing' % (locales, len(missing_ids)))
for id in sorted(missing_ids):
print ' ' + repr(id.encode('ascii', 'ignore'))
print(' ' + repr(id.encode('ascii', 'ignore')))
else:
print '%s: ok' % locales
print('%s: ok' % locales)
11 changes: 6 additions & 5 deletions tools/import.py
Expand Up @@ -14,6 +14,7 @@
# limitations under the License.

"""Unix command-line utility: import CSV files into the datastore."""
from __future__ import print_function

import remote_api

Expand All @@ -24,17 +25,17 @@
SHOW_ERRORS = 5

def import_from_file(host, repo, kind, converter, filename):
print '%s: importing %s records from %s' % (host, kind, filename)
print('%s: importing %s records from %s' % (host, kind, filename))
written, skipped, total = importer.import_records(
repo, source_domain, converter,
importer.utf8_decoder(csv.DictReader(open(filename))))
for error, record in skipped[:SHOW_ERRORS]:
print ' - %s: %r' % (error, record)
print(' - %s: %r' % (error, record))
if len(skipped) > SHOW_ERRORS:
print ' (more errors not shown)'
print(' (more errors not shown)')

print 'wrote %d of %d (skipped %d with errors)' % (
written, total, len(skipped))
print('wrote %d of %d (skipped %d with errors)' % (
written, total, len(skipped)))

if __name__ == '__main__':
if len(sys.argv) < 6:
Expand Down
9 changes: 5 additions & 4 deletions tools/iterate.py
Expand Up @@ -13,6 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.

from __future__ import print_function
import time
import re
import sys
Expand All @@ -28,14 +29,14 @@ def iterate(query, callback=lambda x: x, batch_size=1000, verbose=True):
for row in results:
output = callback(row)
if output:
print output
print(output)
count += 1
if verbose:
print '%s rows processed in %.1fs' % (count, time.time() - rstart)
print 'total time: %.1fs' % (time.time() - start)
print('%s rows processed in %.1fs' % (count, time.time() - rstart))
print('total time: %.1fs' % (time.time() - start))
results = query.with_cursor(query.cursor()).fetch(batch_size)
callback()
print 'total rows: %s, total time: %.1fs' % (count, time.time() - start)
print('total rows: %s, total time: %.1fs' % (count, time.time() - start))


def dangling_pic(pic):
Expand Down
13 changes: 7 additions & 6 deletions tools/merge_messages.py
Expand Up @@ -52,6 +52,7 @@
- To minimize unnecessary changes from version to version, the target file
has no "#: filename:line" comments and the messages are sorted by msgid.
"""
from __future__ import print_function

import babel.messages
from babel.messages import pofile
Expand Down Expand Up @@ -95,7 +96,7 @@ def characters(self, content):

def log(text):
"""Prints out Unicode text."""
print text.encode('utf-8')
print(text.encode('utf-8'))


def log_change(old_message, new_message):
Expand All @@ -104,7 +105,7 @@ def log_change(old_message, new_message):
if new_message.id:
log('+ msgid "%s"' % str(new_message.id))
else:
print >>sys.stderr, 'no message id: %s' % new_message
print('no message id: %s' % new_message, file=sys.stderr)
log('+ msgstr "%s"' % str(new_message.string.encode('ascii', 'ignore')))
if new_message.flags:
log('+ #, %s' % ', '.join(sorted(new_message.flags)))
Expand Down Expand Up @@ -166,7 +167,7 @@ def merge_file(source_filename, target_filename, template_filename):
if __name__ == '__main__':
args = sys.argv[1:]
if len(args) not in [1, 2, 3]:
print __doc__
print(__doc__)
sys.exit(1)
args = (args + [None, None])[:3]
source_path = args[0]
Expand All @@ -176,13 +177,13 @@ def merge_file(source_filename, target_filename, template_filename):
# If a single file is specified, merge it.
if ((source_path.endswith('.po') or source_path.endswith('.xml')) and
target_path.endswith('.po')):
print target_path
print(target_path)
merge_file(source_path, target_path, template_path)
sys.exit(0)

# Otherwise, we expect two directories.
if not os.path.isdir(source_path) or not os.path.isdir(target_path):
print __doc__
print(__doc__)
sys.exit(1)

# Find all the source files.
Expand All @@ -199,5 +200,5 @@ def find_po_file(key, dir, filenames):
# Merge them into the target files.
for locale in sorted(source_filenames.keys()):
target = os.path.join(target_path, locale, 'LC_MESSAGES', 'django.po')
print target
print(target)
merge_file(source_filenames[locale], target, template_path)

0 comments on commit d97a20e

Please sign in to comment.