-
Notifications
You must be signed in to change notification settings - Fork 15
/
consolidate.py
executable file
·150 lines (112 loc) · 5.58 KB
/
consolidate.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
#!/usr/bin/env python3
import asyncio
import dataclasses
import json
from pathlib import Path
from typing import AsyncIterable, Dict, Optional, Tuple
import json5
from docker.registry import DockerRegistry
from extension import Extension
from extension.models import ExtensionMetadata, ExtensionVersion
from logger import Logger
from utils import EnhancedJSONEncoder
MANIFEST_FILE = "manifest.json"
MANIFEST_LOG = "manifest.log"
REPO_ROOT_URL = "https://raw.githubusercontent.com/bluerobotics/BlueOS-Extensions-Repository/master/repos"
@dataclasses.dataclass
class RepositoryEntry(ExtensionMetadata):
"""
Represents a repository entry in the manifest output
Attributes:
versions (Dict[str, Version]): Available extension versions.
"""
versions: Dict[str, ExtensionVersion] = dataclasses.field(default_factory=dict)
class Consolidator:
"""
This class is used to consolidate the BlueOS extensions repository generating
a manifest file with all the extensions available and their versions.
"""
@staticmethod
def repo_folder() -> Path:
return Path(__file__).parent.parent.joinpath("repos")
def fetch_remote_extension_logos(
self, identifier: str, repository: Path, repositories: Path
) -> Tuple[Optional[str], Optional[str]]:
"""
Fetch the remote extension and company logos for a given repository.
Args:
identifier (str): Extension identifier.
repository (Path): Path to the repository folder.
repositories (Path): Path to the repositories folder.
Returns:
Tuple[Optional[str], Optional[str]]: Remote extension and company logos URLs.
"""
company_logo = (repository / "../../company_logo.png").resolve().relative_to(repositories.resolve())
extension_logo_file = (repository / "../extension_logo.png").resolve()
if extension_logo_file.exists():
extension_logo = extension_logo_file.resolve().relative_to(repositories.resolve())
else:
Logger.warning(
identifier, f"Extension logo not found for {identifier}, trying to use company logo as alternative"
)
extension_logo = company_logo
remote_extension_logo = f"{REPO_ROOT_URL}/{extension_logo}" if extension_logo else None
remote_company_logo = f"{REPO_ROOT_URL}/{company_logo}" if company_logo else None
if not remote_company_logo or not remote_extension_logo:
Logger.warning(identifier, f"Unable to find extension or company logo for {identifier}")
return remote_extension_logo, remote_company_logo
async def fetch_extensions_metadata(self) -> AsyncIterable[ExtensionMetadata]:
"""
Fetch the metadata for all the extensions in the repository.
Returns:
List[ExtensionMetadata]: List of all the extensions metadata.
"""
repos = self.repo_folder()
for repo in repos.glob("**/metadata.json"):
with open(repo, "r", encoding="utf-8") as metadata_file:
company, extension_name = repo.as_posix().split("/")[-3:-1]
identifier = ".".join([company, extension_name])
Logger.info(identifier, f"Starting metadata processing for {identifier}")
try:
data = json5.load(metadata_file)
except Exception as error: # pylint: disable=broad-except
Logger.error(identifier, f"Skipping {identifier}, unable to parse metadata file, error: {error}")
continue
extension_logo, company_logo = self.fetch_remote_extension_logos(identifier, repo, repos)
try:
metadata = ExtensionMetadata(
identifier=identifier,
name=data["name"],
docker=data["docker"],
description=data["description"],
website=data["website"],
extension_logo=extension_logo,
company_logo=company_logo,
)
Logger.info(identifier, f"Finished metadata processing for {identifier}")
yield metadata
except Exception as error: # pylint: disable=broad-except
Logger.error(identifier, f"Skipping {identifier}, invalid metadata file, error: {error}")
continue
async def run(self) -> None:
preview = DockerRegistry.from_preview()
try:
Logger.start_docker_rate_limit(await preview.get_rate_limit())
except Exception as error: # pylint: disable=broad-except
print(f"Unable to fetch initial docker rate limit, error: {error}")
extensions = [Extension(metadata) async for metadata in self.fetch_extensions_metadata()]
await asyncio.gather(*(ext.inflate() for ext in extensions))
consolidated_data = [
RepositoryEntry(**dataclasses.asdict(ext.metadata), versions=ext.sorted_versions)
for ext in extensions
if ext.sorted_versions
]
try:
Logger.final_docker_rate_limit(await preview.get_rate_limit())
except Exception as error: # pylint: disable=broad-except
print(f"Unable to fetch final docker rate limit, error: {error}")
with open(MANIFEST_FILE, "w", encoding="utf-8") as manifest_file:
manifest_file.write(json.dumps(consolidated_data, indent=4, cls=EnhancedJSONEncoder))
Logger.dump(MANIFEST_LOG)
consolidator = Consolidator()
asyncio.run(consolidator.run())