Skip to content

Commit

Permalink
Merge "[WIP] raise ServerError for request.status 104" into nexqt
Browse files Browse the repository at this point in the history
  • Loading branch information
xqt authored and Gerrit Code Review committed Apr 25, 2016
2 parents 19d0288 + b40f1eb commit ce46a21
Show file tree
Hide file tree
Showing 31 changed files with 134 additions and 68 deletions.
4 changes: 2 additions & 2 deletions generate_family_file.py
Expand Up @@ -30,8 +30,8 @@

# Disable user-config checks so the family can be created first,
# and then used when generating the user-config
_orig_no_user_config = os.environ.get('PYWIKIBOT2_NO_USER_CONFIG') # noqa
os.environ['PYWIKIBOT2_NO_USER_CONFIG'] = '2' # noqa
_orig_no_user_config = os.environ.get('PYWIKIBOT2_NO_USER_CONFIG')
os.environ['PYWIKIBOT2_NO_USER_CONFIG'] = '2'

from pywikibot.site_detect import MWSite as Wiki

Expand Down
4 changes: 2 additions & 2 deletions generate_user_files.py
Expand Up @@ -18,8 +18,8 @@
from warnings import warn

# Disable user-config usage as we are creating it here
_orig_no_user_config = os.environ.get('PYWIKIBOT2_NO_USER_CONFIG') # noqa
os.environ['PYWIKIBOT2_NO_USER_CONFIG'] = '2' # noqa
_orig_no_user_config = os.environ.get('PYWIKIBOT2_NO_USER_CONFIG')
os.environ['PYWIKIBOT2_NO_USER_CONFIG'] = '2'

import pywikibot

Expand Down
6 changes: 3 additions & 3 deletions pwb.py
Expand Up @@ -72,13 +72,13 @@ def tryimport_pwb():
"""
global pwb
try:
import pywikibot # noqa
import pywikibot # flake8: disable=F811
pwb = pywikibot
except RuntimeError:
remove_modules()

os.environ['PYWIKIBOT2_NO_USER_CONFIG'] = '2'
import pywikibot # noqa
import pywikibot # flake8: disable=E402
pwb = pywikibot


Expand Down Expand Up @@ -184,7 +184,7 @@ def abspath(path):
if sys.platform == 'win32' and sys.version_info[0] < 3:
_pwb_dir = str(_pwb_dir)
os.environ[str('PYWIKIBOT2_DIR_PWB')] = _pwb_dir
import pywikibot # noqa
import pywikibot # flake8: disable=F401
except RuntimeError as err:
# user-config.py to be created
print("NOTE: 'user-config.py' was not found!")
Expand Down
8 changes: 5 additions & 3 deletions pywikibot/bot.py
Expand Up @@ -84,18 +84,20 @@
from pywikibot import config
from pywikibot import daemonize
from pywikibot import version
from pywikibot.bot_choice import ( # noqa: unused imports
from pywikibot.bot_choice import ( # flake8: disable=F401 (unused imports)
Option, StandardOption, NestedOption, IntegerOption, ContextOption,
ListOption, OutputProxyOption, HighlightContextOption,
ChoiceException, QuitKeyboardInterrupt,
)
from pywikibot.logging import CRITICAL, ERROR, INFO, WARNING # noqa: unused
from pywikibot.logging import ( # flake8: disable=F401
CRITICAL, ERROR, INFO, WARNING,
)
from pywikibot.logging import DEBUG, INPUT, STDOUT, VERBOSE
from pywikibot.logging import (
add_init_routine,
debug, error, exception, log, output, stdout, warning,
)
from pywikibot.logging import critical # noqa: unused
from pywikibot.logging import critical # flake8: disable=F401
from pywikibot.tools import deprecated, deprecated_args, PY2, PYTHON_VERSION
from pywikibot.tools._logging import (
LoggingFormatter as _LoggingFormatter,
Expand Down
2 changes: 1 addition & 1 deletion pywikibot/comms/http.py
Expand Up @@ -93,7 +93,7 @@ def _flush():
message = 'Closing network session.'
if hasattr(sys, 'last_type'):
# we quit because of an exception
print(sys.last_type) # noqa: print
print(sys.last_type) # flake8: disable=T003 (print)
critical(message)
else:
log(message)
Expand Down
2 changes: 1 addition & 1 deletion pywikibot/editor.py
Expand Up @@ -23,7 +23,7 @@
from pywikibot.tools import deprecated

try:
from pywikibot.userinterfaces import gui # noqa
from pywikibot.userinterfaces import gui
except ImportError as e:
gui = e

Expand Down
2 changes: 1 addition & 1 deletion pywikibot/exceptions.py
Expand Up @@ -160,7 +160,7 @@ def __init__(self, page, message=None):

if '%(' in self.message and ')s' in self.message:
super(PageRelatedError, self).__init__(
self.message % self.__dict__) # noqa: H501
self.message % self.__dict__)
else:
super(PageRelatedError, self).__init__(self.message % page)

Expand Down
4 changes: 2 additions & 2 deletions pywikibot/page.py
Expand Up @@ -40,7 +40,7 @@
from html import entities as htmlentitydefs
from urllib.parse import quote_from_bytes, unquote_to_bytes
else:
chr = unichr # noqa
chr = unichr # flake8 F821 (undefined name) disabled by tox.ini
import htmlentitydefs
from urllib import quote as quote_from_bytes, unquote as unquote_to_bytes

Expand All @@ -67,7 +67,7 @@
first_upper, remove_last_args, _NotImplementedWarning,
OrderedDict, Counter,
)
from pywikibot.tools.ip import ip_regexp # noqa & deprecated
from pywikibot.tools.ip import ip_regexp # flake8: disable=F401 (unused import)
from pywikibot.tools.ip import is_IP


Expand Down
4 changes: 3 additions & 1 deletion pywikibot/pagegenerators.py
Expand Up @@ -689,6 +689,7 @@ def intNone(v):
elif arg == '-recentchanges':
rcstart = None
rcend = None
total = None
params = value.split(',') if value else []
if len(params) == 2:
offset = float(params[0])
Expand All @@ -698,12 +699,13 @@ def intNone(v):
elif len(params) > 2:
raise ValueError('More than two parameters passed.')
else:
value = int(value) if value else 60
total = int(value) if value else 60
if len(params) == 2:
ts_time = self.site.server_time()
rcstart = ts_time + timedelta(minutes=-(offset + duration))
rcend = ts_time + timedelta(minutes=-offset)
gen = RecentChangesPageGenerator(namespaces=self.namespaces,
total=total,
start=rcstart,
end=rcend,
site=self.site,
Expand Down
37 changes: 36 additions & 1 deletion pywikibot/proofreadpage.py
Expand Up @@ -30,6 +30,8 @@

import pywikibot

from pywikibot.data.api import Request


class FullHeader(object):

Expand Down Expand Up @@ -420,6 +422,23 @@ def pre_summary(self):
return '/* {0.status} */ '.format(self)


class PurgeRequest(Request):

"""Subclass of Request which skips the check on write rights.
Workaround for T128994.
# TODO: remove once bug is fixed.
"""

def __init__(self, **kwargs):
"""Monkeypatch action in Request constructor."""
action = kwargs['parameters']['action']
kwargs['parameters']['action'] = 'dummy'
super(PurgeRequest, self).__init__(**kwargs)
self.action = action
self.update({'action': action})


class IndexPage(pywikibot.Page):

"""Index Page page used in Mediawiki ProofreadPage extension."""
Expand Down Expand Up @@ -481,6 +500,22 @@ def _parse_redlink(self, href):
else:
return None

def purge(self):
"""Overwrite purge method.
Workaround for T128994.
# TODO: remove once bug is fixed.
Instead of a proper purge action, use PurgeRequest, which
skips the check on write rights.
"""
params = {'action': 'purge', 'titles': [self.title()]}
request = PurgeRequest(site=self.site, parameters=params)
rawdata = request.submit()
error_message = 'Purge action failed for %s' % self
assert 'purge' in rawdata, error_message
assert 'purged' in rawdata['purge'][0], error_message

def _get_page_mappings(self):
"""Associate label and number for each page linked to the index."""
# Clean cache, if any.
Expand Down Expand Up @@ -517,7 +552,7 @@ def _get_page_mappings(self):
self._soup = BeautifulSoup(self._parsed_text, 'html.parser')
if not self._soup.find_all('a', attrs=attrs):
raise ValueError(
'Missing class="qualityN prp-pagequality-N" or'
'Missing class="qualityN prp-pagequality-N" or '
'class="new" in: %s.'
% self)

Expand Down
4 changes: 2 additions & 2 deletions pywikibot/site.py
Expand Up @@ -5739,7 +5739,7 @@ def create_warnings_list(response):
3)
if isinstance(ignore_warnings, Iterable):
ignored_warnings = ignore_warnings
ignore_warnings = lambda warnings: all( # noqa: E731
ignore_warnings = lambda warnings: all( # flake8: disable=E731
w.code in ignored_warnings for w in warnings)
ignore_all_warnings = not callable(ignore_warnings) and ignore_warnings
if text is None:
Expand Down Expand Up @@ -6816,7 +6816,7 @@ def property_namespace(self):
def _get_baserevid(self, claim, baserevid):
"""Check that claim.on_item is set and matches baserevid if used."""
if not claim.on_item:
issue_deprecation_warning('claim without on_item set', 3)
issue_deprecation_warning('claim without on_item set', None, 3)
if not baserevid:
warn('Neither claim.on_item nor baserevid provided',
UserWarning, 3)
Expand Down
10 changes: 9 additions & 1 deletion pywikibot/site_detect.py
Expand Up @@ -54,6 +54,8 @@ def __init__(self, fromurl):
r = fetch(fromurl)
if r.status == 503:
raise ServerError('Service Unavailable')
if r.status == 104:
raise ServerError('Connection reset by peer')

if fromurl != r.data.url:
pywikibot.log('{0} redirected to {1}'.format(fromurl, r.data.url))
Expand Down Expand Up @@ -104,6 +106,10 @@ def __init__(self, fromurl):
self.version < MediaWikiVersion('1.14')):
raise RuntimeError('Unsupported version: {0}'.format(self.version))

def __repr__(self):
return '{0}("{1}")'.format(
self.__class__.__name__, self.fromurl)

@property
def langs(self):
"""Build interwikimap."""
Expand Down Expand Up @@ -157,7 +163,9 @@ def _fetch_old_version(self):
def _parse_post_117(self):
"""Parse 1.17+ siteinfo data."""
response = fetch(self.api + '?action=query&meta=siteinfo&format=json')
info = json.loads(response.content)
# remove preleading newlines and Byte Order Mark (BOM), see T128992
content = response.content.strip().lstrip('\uFEFF')
info = json.loads(content)
self.private_wiki = ('error' in info and
info['error']['code'] == 'readapidenied')
if self.private_wiki:
Expand Down
4 changes: 2 additions & 2 deletions pywikibot/tools/__init__.py
Expand Up @@ -147,9 +147,9 @@ def count(start=0, step=1):


else:
from collections import Counter # noqa ; unused
from collections import Counter # flake8: disable=F401 (unused import)
from collections import OrderedDict
from itertools import count # noqa ; unused
from itertools import count # flake8: disable=F401 (unused import)


def empty_iterator():
Expand Down
4 changes: 2 additions & 2 deletions pywikibot/tools/ip.py
Expand Up @@ -15,7 +15,7 @@
from distutils.version import StrictVersion
from warnings import warn

from pywikibot.tools import DeprecatedRegex
from pywikibot.tools import DeprecatedRegex, UnicodeType

_ipaddress_e = _ipaddr_e = _ipaddr_version = None

Expand Down Expand Up @@ -61,7 +61,7 @@

def ip_address_patched(IP):
"""Safe ip_address."""
return orig_ip_address(unicode(IP)) # noqa
return orig_ip_address(UnicodeType(IP))

ip_address = ip_address_patched
except ValueError:
Expand Down
5 changes: 3 additions & 2 deletions pywikibot/userinterfaces/gui.py
Expand Up @@ -36,6 +36,7 @@
import pywikibot

from pywikibot import __url__
from pywikibot.tools import PY2, UnicodeType


class TextEditor(ScrolledText):
Expand Down Expand Up @@ -415,8 +416,8 @@ def pressedOK(self):
# if the editbox contains ASCII characters only, get() will
# return string, otherwise unicode (very annoying). We only want
# it to return unicode, so we work around this.
if sys.version[0] == 2 and isinstance(self.text, str):
self.text = unicode(self.text) # noqa
if PY2 and isinstance(self.text, str):
self.text = UnicodeType(self.text)
self.parent.destroy()

def debug(self, event=None):
Expand Down
2 changes: 1 addition & 1 deletion pywikibot/userinterfaces/terminal_interface_base.py
Expand Up @@ -228,7 +228,7 @@ def _raw_input(self):
if not PY2:
return input()
else:
return raw_input() # noqa
return raw_input()

def input(self, question, password=False, default='', force=False):
"""
Expand Down
8 changes: 4 additions & 4 deletions scripts/flickrripper.py
Expand Up @@ -51,7 +51,7 @@
except ImportError as e:
print('This script requires the python flickrapi module. \n'
'See: http://stuvel.eu/projects/flickrapi') # noqa: print
print(e) # noqa: print
print(e) # flake8: disable=T003 (print)
sys.exit(1)

import pywikibot
Expand Down Expand Up @@ -355,7 +355,7 @@ def getPhotos(flickr, user_id=u'', group_id=u'', photoset_id=u'',
user_id=user_id, tags=tags,
per_page='100', page='1')
pages = photos.find('photos').attrib['pages']
gen = lambda i: flickr.groups_pools_getPhotos( # noqa: E731
gen = lambda i: flickr.groups_pools_getPhotos( # flake8: disable=E731
group_id=group_id, user_id=user_id, tags=tags,
per_page='100', page=i
).find('photos').getchildren()
Expand All @@ -365,7 +365,7 @@ def getPhotos(flickr, user_id=u'', group_id=u'', photoset_id=u'',
photos = flickr.photosets_getPhotos(photoset_id=photoset_id,
per_page='100', page='1')
pages = photos.find('photoset').attrib['pages']
gen = lambda i: flickr.photosets_getPhotos( # noqa: E731
gen = lambda i: flickr.photosets_getPhotos( # flake8: disable=E731
photoset_id=photoset_id, per_page='100', page=i
).find('photoset').getchildren()
# https://www.flickr.com/services/api/flickr.people.getPublicPhotos.html
Expand All @@ -374,7 +374,7 @@ def getPhotos(flickr, user_id=u'', group_id=u'', photoset_id=u'',
photos = flickr.people_getPublicPhotos(user_id=user_id,
per_page='100', page='1')
pages = photos.find('photos').attrib['pages']
gen = lambda i: flickr.people_getPublicPhotos( # noqa: E731
gen = lambda i: flickr.people_getPublicPhotos( # flake8: disable=E731
user_id=user_id, per_page='100', page=i
).find('photos').getchildren()
for i in range(1, int(pages) + 1):
Expand Down
4 changes: 2 additions & 2 deletions scripts/interwiki.py
Expand Up @@ -2332,10 +2332,10 @@ def compareLanguages(old, new, insite):
if not globalvar.summary and \
len(adding) + len(removing) + len(modifying) <= 3:
# Use an extended format for the string linking to all added pages.
fmt = lambda d, site: unicode(d[site]) # noqa: E731
fmt = lambda d, site: unicode(d[site]) # flake8: disable=E731
else:
# Use short format, just the language code
fmt = lambda d, site: site.code # noqa: E731
fmt = lambda d, site: site.code # flake8: disable=E731

mods = mcomment = u''

Expand Down
5 changes: 3 additions & 2 deletions scripts/maintenance/cache.py
Expand Up @@ -76,8 +76,9 @@

from pywikibot.data import api

from pywikibot.page import User # noqa
from pywikibot.site import APISite, DataSite, LoginStatus # noqa
# The follow attributes are used by eval()
from pywikibot.page import User # flake8: disable=F401 (unused import)
from pywikibot.site import APISite, DataSite, LoginStatus # flake8: disable=F401


class ParseError(Exception):
Expand Down
10 changes: 8 additions & 2 deletions scripts/nowcommons.py
Expand Up @@ -51,8 +51,8 @@
#
# (C) Wikipedian, 2006-2007
# (C) Siebrand Mazeland, 2007-2008
# (C) xqt, 2010-2014
# (C) Pywikibot team, 2006-2015
# (C) xqt, 2010-2016
# (C) Pywikibot team, 2006-2016
#
# Distributed under the terms of the MIT license.
#
Expand Down Expand Up @@ -451,6 +451,12 @@ def run(self):
except (pywikibot.NoPage, pywikibot.IsRedirectPage) as e:
pywikibot.output(u'%s' % e[0])
continue
else:
self._treat_counter += 1
if not self._treat_counter:
pywikibot.output(
'No transcluded files found for %s.' % self.ncTemplates[0])
self.exit()


def main(*args):
Expand Down

0 comments on commit ce46a21

Please sign in to comment.