Skip to content

Commit

Permalink
style: format code with Black
Browse files Browse the repository at this point in the history
This commit fixes the style issues introduced in 9c693cb according to the output
from Black.

Details: None
  • Loading branch information
deepsource-autofix[bot] committed Nov 30, 2023
1 parent 9c693cb commit 54309a5
Show file tree
Hide file tree
Showing 30 changed files with 1,118 additions and 927 deletions.
24 changes: 11 additions & 13 deletions lastversion/BitBucketRepoSession.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,21 +5,19 @@


class BitBucketRepoSession(ProjectHolder):
DEFAULT_HOSTNAME = 'bitbucket.org'
DEFAULT_HOSTNAME = "bitbucket.org"
CAN_BE_SELF_HOSTED = True
KNOWN_REPO_URLS = {
'mmonit.com': {
'repo': 'tildeslash/monit',
"mmonit.com": {
"repo": "tildeslash/monit",
# get URL from the official website because it is a "prepared"
# source that has the `./configure` script available
'release_url_format': "https://mmonit.com/{name}/dist/{name}-"
"{version}.tar.gz"
"release_url_format": "https://mmonit.com/{name}/dist/{name}-"
"{version}.tar.gz",
}
}

KNOWN_REPOS_BY_NAME = {
'monit': KNOWN_REPO_URLS['mmonit.com']
}
KNOWN_REPOS_BY_NAME = {"monit": KNOWN_REPO_URLS["mmonit.com"]}

def __init__(self, repo, hostname):
super(BitBucketRepoSession, self).__init__(repo, hostname)
Expand All @@ -33,9 +31,9 @@ def get_latest(self, pre_ok=False, major=None):
f"https://api.bitbucket.org/2.0/repositories/{self.repo}/downloads"
)
data = response.json()
release = data['values'][0]
version = self.sanitize_version(release['name'], pre_ok, major)
release['version'] = version
release['tag_name'] = release['name']
release['tag_date'] = parser.parse(release['created_on'])
release = data["values"][0]
version = self.sanitize_version(release["name"], pre_ok, major)
release["version"] = version
release["tag_name"] = release["name"]
release["tag_date"] = parser.parse(release["created_on"])
return release
35 changes: 16 additions & 19 deletions lastversion/FeedRepoSession.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,10 +11,10 @@

class FeedRepoSession(ProjectHolder):
KNOWN_REPOS_BY_NAME = {
'filezilla': {
'repo': 'filezilla',
'hostname': 'filezilla-project.org',
'only': 'FileZilla Client'
"filezilla": {
"repo": "filezilla",
"hostname": "filezilla-project.org",
"only": "FileZilla Client",
}
}
CAN_BE_SELF_HOSTED = True
Expand All @@ -26,6 +26,7 @@ def find_feed(self, site):
"""Find the feed for a given site"""
# noinspection PyPep8Naming
from bs4 import BeautifulSoup as bs4

raw = self.get(site).text
result = []
possible_feeds = []
Expand All @@ -49,7 +50,7 @@ def find_feed(self, site):
if not href:
continue
if "xml" in href or "rss" in href or "feed" in href:
possible_feeds.append(base + '/' + href.lstrip('/'))
possible_feeds.append(base + "/" + href.lstrip("/"))
for url in list(set(possible_feeds)):
f = feedparser.parse(url)
if len(f.entries) > 0 and url not in result:
Expand All @@ -59,11 +60,11 @@ def find_feed(self, site):
def __init__(self, repo, hostname):
super(FeedRepoSession, self).__init__(repo, hostname)
self.home_soup = None
feeds = self.find_feed('https://' + hostname + '/')
feeds = self.find_feed("https://" + hostname + "/")
if not feeds:
return
self.hostname = hostname
log.info('Using feed URL: %s', feeds[0])
log.info("Using feed URL: %s", feeds[0])
self.feed_url = feeds[0]

def is_instance(self):
Expand All @@ -79,21 +80,17 @@ def get_latest(self, pre_ok=False, major=None):
r = self.get(self.feed_url)
feed = feedparser.parse(r.text)
for tag in feed.entries:
tag_name = tag['title']
tag_name = tag["title"]
version = self.sanitize_version(tag_name, pre_ok, major)
if not version:
continue
if not ret or version > ret['version']:
if not ret or version > ret["version"]:
ret = tag
tag['tag_name'] = tag['title']
tag['version'] = version
if 'published_parsed' in tag:
tag["tag_name"] = tag["title"]
tag["version"] = version
if "published_parsed" in tag:
# converting from struct
tag['tag_date'] = datetime.datetime(
*tag['published_parsed'][:6]
)
elif 'updated_parsed' in tag:
tag['tag_date'] = datetime.datetime(
*tag['updated_parsed'][:6]
)
tag["tag_date"] = datetime.datetime(*tag["published_parsed"][:6])
elif "updated_parsed" in tag:
tag["tag_date"] = datetime.datetime(*tag["updated_parsed"][:6])
return ret
67 changes: 35 additions & 32 deletions lastversion/GitLabRepoSession.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@


class GitLabRepoSession(ProjectHolder):
DEFAULT_HOSTNAME = 'gitlab.com'
DEFAULT_HOSTNAME = "gitlab.com"
CAN_BE_SELF_HOSTED = True
# Domains gitlab.example.com
SUBDOMAIN_INDICATOR = "gitlab"
Expand All @@ -25,15 +25,15 @@ def __init__(self, repo, hostname):
if not self.hostname:
self.hostname = self.DEFAULT_HOSTNAME
if self.pa_token:
log.info('Using Personal Access token.')
self.headers.update({'Private-Token': self.pa_token})
self.api_base = f'https://{self.hostname}/api/v4'
self.repo_id = self.repo.replace('/', '%2F')
log.info("Using Personal Access token.")
self.headers.update({"Private-Token": self.pa_token})
self.api_base = f"https://{self.hostname}/api/v4"
self.repo_id = self.repo.replace("/", "%2F")
# lazy loaded dict cache of /releases response keyed by tag, only first page
self.formal_releases_by_tag = None

def repo_query(self, uri):
url = f'{self.api_base}/projects/{self.repo_id}{uri}'
url = f"{self.api_base}/projects/{self.repo_id}{uri}"
return self.get(url)

def ensure_formal_releases_fetched(self):
Expand All @@ -43,17 +43,17 @@ def ensure_formal_releases_fetched(self):
"""
if self.formal_releases_by_tag is None:
self.formal_releases_by_tag = {}
r = self.repo_query('/releases')
r = self.repo_query("/releases")
if r.status_code == 200:
for release in r.json():
self.formal_releases_by_tag[release['tag_name']] = release
self.formal_releases_by_tag[release["tag_name"]] = release

def get_formal_release_for_tag(self, tag):
"""Get formal release for a given GitLab tag"""
self.ensure_formal_releases_fetched()
# no releases in /releases means no
if self.formal_releases_by_tag and tag not in self.formal_releases_by_tag:
r = self.repo_query(f'/releases/{tag}')
r = self.repo_query(f"/releases/{tag}")
if r.status_code == 200:
self.formal_releases_by_tag[tag] = r.json()

Expand All @@ -64,50 +64,51 @@ def get_latest(self, pre_ok=False, major=None):
ret = None

# gitlab returns tags by updated in desc order; this is just what we want :)
r = self.repo_query('/repository/tags')
r = self.repo_query("/repository/tags")
if r.status_code == 200:
for t in r.json():
tag = t['name']
tag_date = parser.parse(t['commit']['created_at'])
tag = t["name"]
tag_date = parser.parse(t["commit"]["created_at"])
version = self.sanitize_version(tag, pre_ok, major)
if not version:
continue
if ret and tag_date + timedelta(days=365) < ret['tag_date']:
log.info('The version %s is newer, but is too old!', version)
if ret and tag_date + timedelta(days=365) < ret["tag_date"]:
log.info("The version %s is newer, but is too old!", version)
break
if not ret or ret and version > ret['version']:
if not ret or ret and version > ret["version"]:
log.info("Setting version as current selection: %s.", version)
ret = t
ret['tag_name'] = tag
ret['tag_date'] = tag_date
ret['version'] = version
ret['type'] = 'tag'
ret["tag_name"] = tag
ret["tag_date"] = tag_date
ret["version"] = version
ret["type"] = "tag"
if ret:
formal_release = self.get_formal_release_for_tag(ret['tag_name'])
formal_release = self.get_formal_release_for_tag(ret["tag_name"])
if formal_release:
ret.update(formal_release)
return ret

def get_assets(self, release, short_urls, assets_filter=None):
urls = []
assets = release.get('assets', []).get('links', [])
assets = release.get("assets", []).get("links", [])
arch_matched_assets = []
if not assets_filter and platform.machine() in ['x86_64', 'AMD64']:
if not assets_filter and platform.machine() in ["x86_64", "AMD64"]:
for asset in assets:
if 'x86_64' in asset['name']:
if "x86_64" in asset["name"]:
arch_matched_assets.append(asset)
if arch_matched_assets:
assets = arch_matched_assets

for asset in assets:
if assets_filter and not re.search(assets_filter, asset['name']):
if assets_filter and not re.search(assets_filter, asset["name"]):
continue
if not assets_filter and asset_does_not_belong_to_machine(asset['name']):
if not assets_filter and asset_does_not_belong_to_machine(asset["name"]):
log.info(
'Skipping asset %s as it does not belong to this machine.', asset['name']
"Skipping asset %s as it does not belong to this machine.",
asset["name"],
)
continue
urls.append(asset['url'])
urls.append(asset["url"])

if not urls:
download_url = self.release_download_url(release, short_urls)
Expand All @@ -118,12 +119,14 @@ def get_assets(self, release, short_urls, assets_filter=None):
def release_download_url(self, release, shorter=False):
"""Get release download URL."""
if shorter:
log.info('Shorter URLs are not supported for GitLab yet')
log.info("Shorter URLs are not supported for GitLab yet")
# https://gitlab.com/onedr0p/sonarr-episode-prune/-/archive/v3.0.0/sonarr-episode-prune-v3.0.0.tar.gz
ext = 'zip' if os.name == 'nt' else 'tar.gz'
tag = release['tag_name']
url_format = 'https://{}/{}/-/archive/{}/{}-{}.{}'
return url_format.format(self.hostname, self.repo, tag, self.repo.split('/')[1], tag, ext)
ext = "zip" if os.name == "nt" else "tar.gz"
tag = release["tag_name"]
url_format = "https://{}/{}/-/archive/{}/{}-{}.{}"
return url_format.format(
self.hostname, self.repo, tag, self.repo.split("/")[1], tag, ext
)

def repo_license(self, tag):
# TODO implement
Expand Down
Loading

0 comments on commit 54309a5

Please sign in to comment.