diff --git a/readthedocs/builds/models.py b/readthedocs/builds/models.py index ba439a66707..973097249e1 100644 --- a/readthedocs/builds/models.py +++ b/readthedocs/builds/models.py @@ -247,7 +247,7 @@ def clean_build_path(self): try: path = self.get_build_path() if path is not None: - log.debug('Removing build path {0} for {1}'.format(path, self)) + log.debug('Removing build path %s for %s', path, self) rmtree(path) except OSError: log.exception('Build path cleanup failed') diff --git a/readthedocs/builds/syncers.py b/readthedocs/builds/syncers.py index 66ebca0c4ab..d1a4b43c8da 100644 --- a/readthedocs/builds/syncers.py +++ b/readthedocs/builds/syncers.py @@ -58,8 +58,7 @@ def copy(cls, path, target, is_file=False, **__): mkdir_cmd = ("ssh %s@%s mkdir -p %s" % (sync_user, server, target)) ret = os.system(mkdir_cmd) if ret != 0: - log.info("COPY ERROR to app servers:") - log.info(mkdir_cmd) + log.error("Copy error to app servers: cmd=%s", mkdir_cmd) if is_file: slash = "" else: @@ -75,8 +74,7 @@ def copy(cls, path, target, is_file=False, **__): target=target)) ret = os.system(sync_cmd) if ret != 0: - log.info("COPY ERROR to app servers.") - log.info(sync_cmd) + log.error("Copy error to app servers: cmd=%s", sync_cmd) class DoubleRemotePuller(object): @@ -100,8 +98,7 @@ def copy(cls, path, target, host, is_file=False, **__): ) ret = os.system(mkdir_cmd) if ret != 0: - log.info("MKDIR ERROR to app servers:") - log.info(mkdir_cmd) + log.error("MkDir error to app servers: cmd=%s", mkdir_cmd) # Add a slash when copying directories sync_cmd = ( "ssh {user}@{server} 'rsync -av " @@ -114,8 +111,7 @@ def copy(cls, path, target, host, is_file=False, **__): target=target)) ret = os.system(sync_cmd) if ret != 0: - log.info("COPY ERROR to app servers.") - log.info(sync_cmd) + log.error("Copy error to app servers: cmd=%s", sync_cmd) class RemotePuller(object): @@ -142,7 +138,11 @@ def copy(cls, path, target, host, is_file=False, **__): ) ret = os.system(sync_cmd) if ret != 0: - log.error("COPY ERROR to app servers. Command: [{}] Return: [{}]".format(sync_cmd, ret)) + log.error( + "Copy error to app servers. Command: [%s] Return: [%s]", + sync_cmd, + ret, + ) class Syncer(SettingsOverrideObject): diff --git a/readthedocs/core/management/commands/clean_builds.py b/readthedocs/core/management/commands/clean_builds.py index 4d80df69969..da365b1a157 100644 --- a/readthedocs/core/management/commands/clean_builds.py +++ b/readthedocs/core/management/commands/clean_builds.py @@ -44,14 +44,19 @@ def handle(self, *args, **options): version = Version.objects.get(id=build['version']) latest_build = version.builds.latest('date') if latest_build.date > max_date: - log.warning('{0} is newer than {1}'.format( - latest_build, max_date)) + log.warning( + '%s is newer than %s', + latest_build, + max_date, + ) path = version.get_build_path() if path is not None: log.info( - ('Found stale build path for {0} ' - 'at {1}, last used on {2}').format( - version, path, latest_build.date)) + 'Found stale build path for %s at %s, last used on %s', + version, + path, + latest_build.date, + ) if not options['dryrun']: version.clean_build_path() except Version.DoesNotExist: diff --git a/readthedocs/core/management/commands/reindex_elasticsearch.py b/readthedocs/core/management/commands/reindex_elasticsearch.py index a2bce6df840..b736a1cd426 100644 --- a/readthedocs/core/management/commands/reindex_elasticsearch.py +++ b/readthedocs/core/management/commands/reindex_elasticsearch.py @@ -53,4 +53,4 @@ def handle(self, *args, **options): update_search(version.pk, commit, delete_non_commit_files=False) except Exception: - log.exception('Reindex failed for {}'.format(version)) + log.exception('Reindex failed for %s', version) diff --git a/readthedocs/core/signals.py b/readthedocs/core/signals.py index aa2a30936c0..c054fae293a 100644 --- a/readthedocs/core/signals.py +++ b/readthedocs/core/signals.py @@ -57,10 +57,9 @@ def decide_if_cors(sender, request, **kwargs): # pylint: disable=unused-argumen project = Project.objects.get(slug=project_slug) except Project.DoesNotExist: log.warning( - 'Invalid project passed to domain. [{project}:{domain}'.format( - project=project_slug, - domain=host, - ) + 'Invalid project passed to domain. [%s:%s]', + project_slug, + host, ) return False diff --git a/readthedocs/core/views/hooks.py b/readthedocs/core/views/hooks.py index 4564f7394d6..9a306aaea73 100644 --- a/readthedocs/core/views/hooks.py +++ b/readthedocs/core/views/hooks.py @@ -233,12 +233,13 @@ def gitlab_build(request): # noqa: D205 log.info( 'GitLab webhook search: url=%s branches=%s', search_url, - branches + branches, ) projects = get_project_from_url(search_url) if projects: return _build_url(search_url, projects, branches) - log.error('Project match not found: url=%s', search_url) + + log.info('Project match not found: url=%s', search_url) return HttpResponseNotFound('Project match not found') return HttpResponse('Method not allowed, POST is required', status=405) @@ -294,7 +295,7 @@ def bitbucket_build(request): log.info( 'Bitbucket webhook search: url=%s branches=%s', search_url, - branches + branches, ) log.debug('Bitbucket webhook payload:\n\n%s\n\n', data) projects = get_project_from_url(search_url) @@ -304,10 +305,10 @@ def bitbucket_build(request): log.error( 'Commit/branch not found url=%s branches=%s', search_url, - branches + branches, ) return HttpResponseNotFound('Commit/branch not found') - log.error('Project match not found: url=%s', search_url) + log.info('Project match not found: url=%s', search_url) return HttpResponseNotFound('Project match not found') return HttpResponse('Method not allowed, POST is required', status=405) diff --git a/readthedocs/doc_builder/environments.py b/readthedocs/doc_builder/environments.py index 7123cf983cf..4419af5c84e 100644 --- a/readthedocs/doc_builder/environments.py +++ b/readthedocs/doc_builder/environments.py @@ -445,12 +445,12 @@ def handle_exception(self, exc_type, exc_value, _): a failure and the context will be gracefully exited. """ if exc_type is not None: - log.error(LOG_TEMPLATE - .format(project=self.project.slug, - version=self.version.slug, - msg=exc_value), - exc_info=True) if not issubclass(exc_type, BuildEnvironmentWarning): + log.error(LOG_TEMPLATE + .format(project=self.project.slug, + version=self.version.slug, + msg=exc_value), + exc_info=True) self.failure = exc_value return True @@ -574,10 +574,9 @@ def update_build(self, state=None): try: api_v2.build(self.build['id']).put(self.build) except HttpClientError as e: - log.error( - "Unable to update build: id=%d error=%s", + log.exception( + "Unable to update build: id=%d", self.build['id'], - e.content, ) except Exception: log.exception("Unknown build exception") diff --git a/readthedocs/oauth/services/base.py b/readthedocs/oauth/services/base.py index bc56e0057e3..ee1a6b480ee 100644 --- a/readthedocs/oauth/services/base.py +++ b/readthedocs/oauth/services/base.py @@ -138,7 +138,7 @@ def paginate(self, url, **kwargs): return results # Catch specific exception related to OAuth except InvalidClientIdError: - log.error('access_token or refresh_token failed: %s', url) + log.warning('access_token or refresh_token failed: %s', url) raise Exception('You should reconnect your account') # Catch exceptions with request or deserializing JSON except (RequestException, ValueError): @@ -149,7 +149,10 @@ def paginate(self, url, **kwargs): except ValueError: debug_data = resp.content log.debug( - 'paginate failed at %s with response: %s', url, debug_data) + 'Paginate failed at %s with response: %s', + url, + debug_data, + ) else: return [] diff --git a/readthedocs/oauth/services/bitbucket.py b/readthedocs/oauth/services/bitbucket.py index 5f997a2b26a..b14d1de84e1 100644 --- a/readthedocs/oauth/services/bitbucket.py +++ b/readthedocs/oauth/services/bitbucket.py @@ -45,8 +45,7 @@ def sync_repositories(self): for repo in repos: self.create_repository(repo) except (TypeError, ValueError) as e: - log.error('Error syncing Bitbucket repositories: %s', - str(e), exc_info=True) + log.exception('Error syncing Bitbucket repositories') raise Exception('Could not sync your Bitbucket repositories, ' 'try reconnecting your account') @@ -80,8 +79,7 @@ def sync_teams(self): for repo in repos: self.create_repository(repo, organization=org) except ValueError as e: - log.error('Error syncing Bitbucket organizations: %s', - str(e), exc_info=True) + log.exception('Error syncing Bitbucket organizations') raise Exception('Could not sync your Bitbucket team repositories, ' 'try reconnecting your account') @@ -220,19 +218,27 @@ def setup_webhook(self, project): recv_data = resp.json() integration.provider_data = recv_data integration.save() - log.info('Bitbucket webhook creation successful for project: %s', - project) + log.info( + 'Bitbucket webhook creation successful for project: %s', + project, + ) return (True, resp) # Catch exceptions with request or deserializing JSON except (RequestException, ValueError): - log.error('Bitbucket webhook creation failed for project: %s', - project, exc_info=True) + log.exception( + 'Bitbucket webhook creation failed for project: %s', + project, + ) else: - log.error('Bitbucket webhook creation failed for project: %s', - project) + log.exception( + 'Bitbucket webhook creation failed for project: %s', + project, + ) try: - log.debug('Bitbucket webhook creation failure response: %s', - resp.json()) + log.debug( + 'Bitbucket webhook creation failure response: %s', + resp.json(), + ) except ValueError: pass return (False, resp) @@ -263,20 +269,29 @@ def update_webhook(self, project, integration): recv_data = resp.json() integration.provider_data = recv_data integration.save() - log.info('Bitbucket webhook update successful for project: %s', - project) + log.info( + 'Bitbucket webhook update successful for project: %s', + project, + ) return (True, resp) # Catch exceptions with request or deserializing JSON except (KeyError, RequestException, ValueError): - log.error('Bitbucket webhook update failed for project: %s', - project, exc_info=True) + log.exception( + 'Bitbucket webhook update failed for project: %s', + project, + ) else: - log.error('Bitbucket webhook update failed for project: %s', - project) + log.exception( + 'Bitbucket webhook update failed for project: %s', + project, + ) # Response data should always be JSON, still try to log if not though try: debug_data = resp.json() except ValueError: debug_data = resp.content - log.debug('Bitbucket webhook update failure response: %s', debug_data) + log.debug( + 'Bitbucket webhook update failure response: %s', + debug_data, + ) return (False, resp) diff --git a/readthedocs/oauth/services/github.py b/readthedocs/oauth/services/github.py index 7be4a38c20b..609332bf4af 100644 --- a/readthedocs/oauth/services/github.py +++ b/readthedocs/oauth/services/github.py @@ -42,8 +42,7 @@ def sync_repositories(self): for repo in repos: self.create_repository(repo) except (TypeError, ValueError) as e: - log.error('Error syncing GitHub repositories: %s', - str(e), exc_info=True) + log.exception('Error syncing GitHub repositories') raise Exception('Could not sync your GitHub repositories, ' 'try reconnecting your account') @@ -62,8 +61,7 @@ def sync_organizations(self): for repo in org_repos: self.create_repository(repo, organization=org_obj) except (TypeError, ValueError) as e: - log.error('Error syncing GitHub organizations: %s', - str(e), exc_info=True) + log.exception('Error syncing GitHub organizations') raise Exception('Could not sync your GitHub organizations, ' 'try reconnecting your account') @@ -211,18 +209,25 @@ def setup_webhook(self, project): return (True, resp) # Catch exceptions with request or deserializing JSON except (RequestException, ValueError): - log.error('GitHub webhook creation failed for project: %s', - project, exc_info=True) + log.exception( + 'GitHub webhook creation failed for project: %s', + project, + ) else: - log.error('GitHub webhook creation failed for project: %s', - project) - # Response data should always be JSON, still try to log if not though + log.exception( + 'GitHub webhook creation failed for project: %s', + project, + ) + # Response data should always be JSON, still try to log if not + # though try: debug_data = resp.json() except ValueError: debug_data = resp.content - log.debug('GitHub webhook creation failure response: %s', - debug_data) + log.debug( + 'GitHub webhook creation failure response: %s', + debug_data, + ) return (False, resp) def update_webhook(self, project, integration): @@ -251,22 +256,30 @@ def update_webhook(self, project, integration): recv_data = resp.json() integration.provider_data = recv_data integration.save() - log.info('GitHub webhook creation successful for project: %s', - project) + log.info( + 'GitHub webhook creation successful for project: %s', + project, + ) return (True, resp) # Catch exceptions with request or deserializing JSON except (RequestException, ValueError): - log.error('GitHub webhook update failed for project: %s', - project, exc_info=True) + log.exception( + 'GitHub webhook update failed for project: %s', + project, + ) else: - log.error('GitHub webhook update failed for project: %s', - project) + log.exception( + 'GitHub webhook update failed for project: %s', + project, + ) try: debug_data = resp.json() except ValueError: debug_data = resp.content - log.debug('GitHub webhook creation failure response: %s', - debug_data) + log.debug( + 'GitHub webhook creation failure response: %s', + debug_data, + ) return (False, resp) @classmethod diff --git a/readthedocs/oauth/services/gitlab.py b/readthedocs/oauth/services/gitlab.py index 4919ef88a55..62313c89d66 100644 --- a/readthedocs/oauth/services/gitlab.py +++ b/readthedocs/oauth/services/gitlab.py @@ -336,7 +336,10 @@ def update_webhook(self, project, integration): log.exception( 'GitLab webhook update failed for project: %s', project) else: - log.error('GitLab webhook update failed for project: %s', project) + log.exception( + 'GitLab webhook update failed for project: %s', + project, + ) try: debug_data = resp.json() except ValueError: diff --git a/readthedocs/payments/forms.py b/readthedocs/payments/forms.py index 0d655ef1584..983dfe638f4 100644 --- a/readthedocs/payments/forms.py +++ b/readthedocs/payments/forms.py @@ -166,8 +166,7 @@ def clean(self): forms.ValidationError(str(e)), ) except stripe.error.StripeError as e: - log.error('There was a problem communicating with Stripe: %s', - str(e), exc_info=True) + log.exception('There was a problem communicating with Stripe') raise forms.ValidationError( _('There was a problem communicating with Stripe')) return cleaned_data diff --git a/readthedocs/projects/search_indexes.py b/readthedocs/projects/search_indexes.py index 5300fea5b9a..93b041e22c2 100644 --- a/readthedocs/projects/search_indexes.py +++ b/readthedocs/projects/search_indexes.py @@ -85,8 +85,11 @@ def prepare_text(self, obj): with codecs.open(file_path, encoding='utf-8', mode='r') as f: content = f.read() except IOError as e: - log.info('(Search Index) Unable to index file: %s, error :%s', - file_path, e) + log.info( + '(Search Index) Unable to index file: %s', + file_path, + exc_info=True, + ) return log.debug('(Search Index) Indexing %s:%s', obj.project, obj.path) document_pyquery_path = getattr(settings, 'DOCUMENT_PYQUERY_PATH', diff --git a/readthedocs/projects/tasks.py b/readthedocs/projects/tasks.py index 4eec679380b..947ac8904b3 100644 --- a/readthedocs/projects/tasks.py +++ b/readthedocs/projects/tasks.py @@ -183,12 +183,15 @@ def run(self, version_pk): # pylint: disable=arguments-differ self.project = self.version.project self.sync_repo() return True - # Catch unhandled errors when syncing + except RepositoryError: + # Do not log as ERROR handled exceptions + log.warning('There was an error with the repository', exc_info=True) except Exception: + # Catch unhandled errors when syncing log.exception( 'An unhandled exception was raised during VCS syncing', ) - return False + return False class UpdateDocsTask(SyncRepositoryMixin, Task): @@ -513,7 +516,10 @@ def update_app_instances(self, html=False, localmedia=False, search=False, 'built': True, }) except HttpClientError: - log.exception('Updating version failed, skipping file sync: version=%s' % self.version) + log.exception( + 'Updating version failed, skipping file sync: version=%s', + self.version, + ) # Broadcast finalization steps to web application instances broadcast( @@ -898,7 +904,7 @@ def _manage_imported_files(version, path, commit): name=filename, ) except ImportedFile.MultipleObjectsReturned: - log.exception('Error creating ImportedFile') + log.warning('Error creating ImportedFile') continue if obj.md5 != md5: obj.md5 = md5 diff --git a/readthedocs/projects/views/base.py b/readthedocs/projects/views/base.py index db6e1195fbe..3db095f9eaf 100644 --- a/readthedocs/projects/views/base.py +++ b/readthedocs/projects/views/base.py @@ -93,9 +93,10 @@ class ProjectSpamMixin(object): def post(self, request, *args, **kwargs): if request.user.profile.banned: - log.error( + log.info( 'Rejecting project POST from shadowbanned user %s', - request.user) + request.user, + ) return HttpResponseRedirect(self.get_failure_url()) try: return super(ProjectSpamMixin, self).post(request, *args, **kwargs) @@ -104,11 +105,12 @@ def post(self, request, *args, **kwargs): if request.user.date_joined > date_maturity: request.user.profile.banned = True request.user.profile.save() - log.error( + log.info( 'Spam detected from new user, shadowbanned user %s', - request.user) + request.user, + ) else: - log.error('Spam detected from user %s', request.user) + log.info('Spam detected from user %s', request.user) return HttpResponseRedirect(self.get_failure_url()) def get_failure_url(self): diff --git a/readthedocs/projects/views/private.py b/readthedocs/projects/views/private.py index c9d52748b2b..5ae4d68602e 100644 --- a/readthedocs/projects/views/private.py +++ b/readthedocs/projects/views/private.py @@ -295,8 +295,6 @@ def get(self, request, *args, **kwargs): messages.success( request, _('Your demo project is currently being imported')) else: - for (__, msg) in list(form.errors.items()): - log.error(msg) messages.error( request, _('There was a problem adding the demo project'), diff --git a/readthedocs/restapi/views/model_views.py b/readthedocs/restapi/views/model_views.py index 1351b5c5d57..0daf74409d4 100644 --- a/readthedocs/restapi/views/model_views.py +++ b/readthedocs/restapi/views/model_views.py @@ -170,7 +170,7 @@ def sync_versions(self, request, **kwargs): # noqa: D205 added_versions.update(ret_set) deleted_versions = api_utils.delete_versions(project, data) except Exception as e: - log.exception('Sync Versions Error: %s', e.message) + log.exception('Sync Versions Error') return Response( { 'error': e.message, diff --git a/readthedocs/rtd_tests/tests/test_privacy.py b/readthedocs/rtd_tests/tests/test_privacy.py index 5a4870e7b5e..ac8b15ac98d 100644 --- a/readthedocs/rtd_tests/tests/test_privacy.py +++ b/readthedocs/rtd_tests/tests/test_privacy.py @@ -32,8 +32,11 @@ def setUp(self): def _create_kong(self, privacy_level='private', version_privacy_level='private'): self.client.login(username='eric', password='test') - log.info(("Making kong with privacy: %s and version privacy: %s" - % (privacy_level, version_privacy_level))) + log.info( + "Making kong with privacy: %s and version privacy: %s", + privacy_level, + version_privacy_level, + ) # Create project via project form, simulate import wizard without magic form = UpdateProjectForm( data={'repo_type': 'git', diff --git a/readthedocs/rtd_tests/utils.py b/readthedocs/rtd_tests/utils.py index a2a4cded84f..3651aa72df9 100644 --- a/readthedocs/rtd_tests/utils.py +++ b/readthedocs/rtd_tests/utils.py @@ -36,6 +36,7 @@ def make_test_git(): chdir(directory) # Initialize and configure + # TODO: move the ``log.info`` call inside the ``check_output``` log.info(check_output(['git', 'init'] + [directory], env=env)) log.info(check_output( ['git', 'config', 'user.email', 'dev@readthedocs.org'], diff --git a/readthedocs/search/parse_json.py b/readthedocs/search/parse_json.py index 196caf2bd12..4583822ac5e 100644 --- a/readthedocs/search/parse_json.py +++ b/readthedocs/search/parse_json.py @@ -96,7 +96,7 @@ def process_file(filename): with codecs.open(filename, encoding='utf-8', mode='r') as f: file_contents = f.read() except IOError as e: - log.info('Unable to index file: %s, error :%s', filename, e) + log.info('Unable to index file: %s', filename, exc_info=True) return data = json.loads(file_contents) sections = [] diff --git a/readthedocs/search/utils.py b/readthedocs/search/utils.py index 57e700a0d83..c5e47b309a7 100644 --- a/readthedocs/search/utils.py +++ b/readthedocs/search/utils.py @@ -62,9 +62,15 @@ def valid_mkdocs_json(file_path): with codecs.open(file_path, encoding='utf-8', mode='r') as f: content = f.read() except IOError as e: - log.warning('(Search Index) Unable to index file: %s, error: %s', file_path, e) + log.warning( + '(Search Index) Unable to index file: %s', + file_path, + exc_info=True, + ) return None + # TODO: wrap this in a try/except block and use ``exc_info=True`` in the + # ``log.warning`` call page_json = json.loads(content) for to_check in ['url', 'content']: if to_check not in page_json: @@ -80,9 +86,14 @@ def parse_path_from_file(file_path): with codecs.open(file_path, encoding='utf-8', mode='r') as f: content = f.read() except IOError as e: - log.warning('(Search Index) Unable to index file: %s, error: %s', file_path, e) + log.warning( + '(Search Index) Unable to index file: %s', + file_path, + exc_info=True, + ) return '' + # TODO: wrap this in a try/except block page_json = json.loads(content) path = page_json['url'] @@ -104,9 +115,14 @@ def parse_content_from_file(file_path): with codecs.open(file_path, encoding='utf-8', mode='r') as f: content = f.read() except IOError as e: - log.info('(Search Index) Unable to index file: %s, error :%s', file_path, e) + log.info( + '(Search Index) Unable to index file: %s', + file_path, + exc_info=True, + ) return '' + # TODO: wrap this in a try/except block page_json = json.loads(content) page_content = page_json['content'] content = parse_content(page_content) @@ -137,10 +153,14 @@ def parse_headers_from_file(documentation_type, file_path): with codecs.open(file_path, encoding='utf-8', mode='r') as f: content = f.read() except IOError as e: - log.info('(Search Index) Unable to index file: %s, error :%s', - file_path, e) + log.info( + '(Search Index) Unable to index file: %s', + file_path, + exc_info=True, + ) return '' + # TODO: wrap this in a try/except block page_json = json.loads(content) page_content = page_json['content'] headers = parse_headers(documentation_type, page_content) @@ -164,9 +184,14 @@ def parse_sections_from_file(documentation_type, file_path): with codecs.open(file_path, encoding='utf-8', mode='r') as f: content = f.read() except IOError as e: - log.info('(Search Index) Unable to index file: %s, error :%s', file_path, e) + log.info( + '(Search Index) Unable to index file: %s', + file_path, + exc_info=True, + ) return '' + # TODO: wrap this in a try/except block page_json = json.loads(content) page_content = page_json['content'] sections = parse_sections(documentation_type, page_content) diff --git a/readthedocs/vcs_support/utils.py b/readthedocs/vcs_support/utils.py index 0525156c0d2..e41a547534e 100644 --- a/readthedocs/vcs_support/utils.py +++ b/readthedocs/vcs_support/utils.py @@ -1,10 +1,13 @@ """Locking utilities.""" from __future__ import absolute_import -from builtins import object + +import errno import logging import os -import time import stat +import time +from builtins import object + log = logging.getLogger(__name__) @@ -56,8 +59,14 @@ def __exit__(self, exc, value, tb): try: log.info("Lock (%s): Releasing", self.name) os.remove(self.fpath) - except OSError: - log.exception("Lock (%s): Failed to release, ignoring...", self.name) + except OSError as e: + # We want to ignore "No such file or directory" and log any other + # type of error. + if e.errno != errno.ENOENT: + log.exception( + "Lock (%s): Failed to release, ignoring...", + self.name, + ) class NonBlockingLock(object): @@ -100,6 +109,12 @@ def __exit__(self, exc_type, exc_val, exc_tb): try: log.info("Lock (%s): Releasing", self.name) os.remove(self.fpath) - except (IOError, OSError): - log.error("Lock (%s): Failed to release, ignoring...", self.name, - exc_info=True) + except (IOError, OSError) as e: + # We want to ignore "No such file or directory" and log any other + # type of error. + if e.errno != errno.ENOENT: + log.error( + 'Lock (%s): Failed to release, ignoring...', + self.name, + exc_info=True, + )