From 8cd0a0957f8d196d3e0ba3de387a152811c621e7 Mon Sep 17 00:00:00 2001 From: Revisto Date: Sun, 11 May 2025 23:17:47 +0330 Subject: [PATCH 1/5] Implement Transifex stats reporting via official API --- scripts/transifex/__init__.py | 0 scripts/transifex/client.py | 57 +++++ scripts/transifex/config.py | 82 ++++++ scripts/transifex/main.py | 32 +++ scripts/transifex/reporting.py | 387 +++++++++++++++++++++++++++++ scripts/transifex/requirements.txt | 2 + scripts/transifex/utils.py | 17 ++ 7 files changed, 577 insertions(+) create mode 100644 scripts/transifex/__init__.py create mode 100644 scripts/transifex/client.py create mode 100644 scripts/transifex/config.py create mode 100644 scripts/transifex/main.py create mode 100644 scripts/transifex/reporting.py create mode 100644 scripts/transifex/requirements.txt create mode 100644 scripts/transifex/utils.py diff --git a/scripts/transifex/__init__.py b/scripts/transifex/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/scripts/transifex/client.py b/scripts/transifex/client.py new file mode 100644 index 00000000..8824cc93 --- /dev/null +++ b/scripts/transifex/client.py @@ -0,0 +1,57 @@ +from transifex.api import transifex_api +from .config import PROJECT, LANGUAGE, ORGANISATION + +_api_cache = {} + + +def _fetch_from_api(cache_key, api_call_func, *args, **kwargs): + if cache_key not in _api_cache: + _api_cache[cache_key] = api_call_func(*args, **kwargs) + return _api_cache[cache_key] + + +def get_all_resources(): + """Fetches all resources for the configured project, with caching.""" + cache_key = "all_resources" + return _fetch_from_api( + cache_key, transifex_api.Resource.filter, project=PROJECT + ).all() + + +def get_resource_language_stats(): + """Fetches language statistics for all resources, with caching.""" + cache_key = "resource_language_stats" + return _fetch_from_api( + cache_key, + transifex_api.ResourceLanguageStats.filter, + project=PROJECT, + language=LANGUAGE, + ).all() + + +def get_team_members(): + """Fetches all team members, with caching.""" + cache_key = "team_members" + # Fetching with 'user' include to get user details like username + return ( + _fetch_from_api( + cache_key, + transifex_api.TeamMembership.filter, + organization=ORGANISATION, + language=LANGUAGE, + ) + .include("user") + .all() + ) + + +def get_resource_translations(resource): + """Fetches translations for a given resource, with caching per resource.""" + resource_id = resource.id if hasattr(resource, "id") else str(resource) + cache_key = f"resource_translations_{resource_id}" + return _fetch_from_api( + cache_key, + transifex_api.ResourceTranslation.filter, + resource=resource, + language=LANGUAGE, + ).all() diff --git a/scripts/transifex/config.py b/scripts/transifex/config.py new file mode 100644 index 00000000..800dbcce --- /dev/null +++ b/scripts/transifex/config.py @@ -0,0 +1,82 @@ +import os +from pathlib import Path +from transifex.api import transifex_api + +# Transifex API Authentication +TRANSIFEX_AUTH_TOKEN = os.getenv("TRANSIFEX_API_TOKEN") +if not TRANSIFEX_AUTH_TOKEN: + raise ValueError("TRANSIFEX_API_TOKEN environment variable not set.") +transifex_api.setup(auth=TRANSIFEX_AUTH_TOKEN) + +# Language and Project Configuration +LANG = "fa" +ORGANISATION_ID = "o:python-doc" +PROJECT_ID = "o:python-doc:p:python-newest" +LANGUAGE_ID = f"l:{LANG}" + +try: + ORGANISATION = transifex_api.Organization.get(id=ORGANISATION_ID) + PROJECT = transifex_api.Project.get(id=PROJECT_ID) + LANGUAGE = transifex_api.Language.get(id=LANGUAGE_ID) +except Exception as e: + print(f"Error initializing Transifex API objects: {e}") + raise + +# Calculate the project root (assuming config.py is in scripts/transifex/) +SCRIPT_DIR = Path(__file__).resolve().parent +PROJECT_ROOT = SCRIPT_DIR.parent.parent + +# Resource Mapping +RESOURCE_NAME_MAP = {"glossary_": "glossary"} + +# Output file paths +TX_CONFIG_PATH = PROJECT_ROOT / ".tx/config" +RESOURCE_STATS_MD_PATH = PROJECT_ROOT / "RESOURCE.md" +TEAM_STATS_MD_PATH = PROJECT_ROOT / "TEAM.md" +CONTRIBUTOR_CHART_DIR = PROJECT_ROOT / "reports" +CONTRIBUTOR_CHART_FILENAME_PREFIX = "contributor_stats_" + +# README Update Configuration +README_PATH = PROJECT_ROOT / "README.md" +README_STATS_START_MARKER = "" +README_STATS_END_MARKER = "" +README_CONTRIBUTORS_HEADER = "مشارکت‌های کاربران" +README_PROGRESS_HEADER = "پیشرفت کلی ترجمه" +README_UPDATED_ON = "به‌روزرسانی" + +CHART_PASTEL_COLORS = [ + "#A6C7E8", # Pastel blue + "#B5EAD7", # Pastel green + "#FFDFD3", # Pastel pink + "#FFF1AC", # Pastel yellow + "#E2D1F9", # Pastel lavender + "#FFD7BA", # Pastel orange + "#FFABAB", # Pastel coral + "#C7F0DB", # Pastel mint + "#FFDAC1", # Pastel peach + "#C7CEEA", # Pastel sky blue +] + +REPORT_HEADERS = { + "resource_stats": { + "file": "File", + "translated": "Translated", + "reviewed": "Reviewed", + "proofread": "Proofread", + "alignment": "|:-----|:-----------:|:-----------:|:-----------:|\n", + }, + "team_stats": { + "user": "User", + "role": "Role", + "translated_count": "Translated Count", + "reviewed_count": "Reviewed Count", + "proofread_count": "Proofread Count", + "alignment": "|:-----|:------:|:------------------:|:-------------------:|:----------------------:|\n", + }, + "contributor_chart": { + "title_base": "User Contributions", + "title_top_n_suffix": " (Top {top_n})", + "xlabel_username": "Username", + "ylabel_total_contributions": "Total Contributions", + }, +} diff --git a/scripts/transifex/main.py b/scripts/transifex/main.py new file mode 100644 index 00000000..104446ac --- /dev/null +++ b/scripts/transifex/main.py @@ -0,0 +1,32 @@ +import argparse +import sys +from .reporting import REPORTERS + + +def main(): + parser = argparse.ArgumentParser( + description="Transifex utility scripts for Python docs (Persian team)." + ) + + valid_commands = list(REPORTERS.keys()) + parser.add_argument( + "command", + choices=valid_commands, + help=f"The command to execute. Available commands: {', '.join(valid_commands)}", + ) + + args = parser.parse_args() + + selected_reporter_class = REPORTERS.get(args.command) + + if selected_reporter_class: + reporter_instance = selected_reporter_class() + reporter_instance.generate() + else: + print(f"Error: Unknown command '{args.command}'.", file=sys.stderr) + parser.print_help(sys.stderr) + sys.exit(1) + + +if __name__ == "__main__": + main() diff --git a/scripts/transifex/reporting.py b/scripts/transifex/reporting.py new file mode 100644 index 00000000..f7cc0077 --- /dev/null +++ b/scripts/transifex/reporting.py @@ -0,0 +1,387 @@ +import datetime +import glob +import re +from pathlib import Path + +import matplotlib.pyplot as plt +import matplotlib.ticker as mticker + +from . import config as app_config +from . import client as tx_client +from . import utils as tx_utils + + +def _get_processed_contributor_data(): + """ + Fetches and processes contributor data (translations, reviews, proofreads). + Returns a list of dictionaries, each containing: + { + "user_id": str, + "username": str, + "role": str, + "translated": int, + "reviewed": int, + "proofread": int, + "total": int + } + """ + members = tx_client.get_team_members() + users_details = {} + for member in members: + if member.user: + users_details[member.user.id] = { + "username": member.user.attributes.get("username", member.user.id), + "role": member.attributes["role"], + } + else: + # Fallback if user data is somehow missing, using member.id as key + users_details[member.id] = { + "username": "Unknown User", + "role": member.attributes["role"], + } + + translators = {user_id: 0 for user_id in users_details} + reviewers = {user_id: 0 for user_id in users_details} + proofreaders = {user_id: 0 for user_id in users_details} + + all_resources = tx_client.get_all_resources() + for resource in all_resources: + translations = tx_client.get_resource_translations(resource) + for translation in translations: + if ( + translation.relationships.get("translator") + and translation.relationships["translator"]["data"] + ): + translator_id = translation.relationships["translator"]["data"]["id"] + if translator_id in translators: + translators[translator_id] += 1 + if ( + translation.relationships.get("reviewer") + and translation.relationships["reviewer"]["data"] + ): + reviewer_id = translation.relationships["reviewer"]["data"]["id"] + if reviewer_id in reviewers: + reviewers[reviewer_id] += 1 + if ( + translation.relationships.get("proofreader") + and translation.relationships["proofreader"]["data"] + ): + proofreader_id = translation.relationships["proofreader"]["data"]["id"] + if proofreader_id in proofreaders: + proofreaders[proofreader_id] += 1 + + processed_data = [] + for user_id, details in users_details.items(): + t = translators.get(user_id, 0) + r = reviewers.get(user_id, 0) + p = proofreaders.get(user_id, 0) + total_contributions = t + r + p + processed_data.append( + { + "user_id": user_id, + "username": details["username"], + "role": details["role"], + "translated": t, + "reviewed": r, + "proofread": p, + "total": total_contributions, + } + ) + return processed_data + + +class ReportGenerator: + """Base class for report generators.""" + + def __init__(self): + pass + + def generate(self): + raise NotImplementedError("Subclasses must implement the 'generate' method.") + + +class TxConfigReporter(ReportGenerator): + """Generates the Transifex client configuration file (.tx/config).""" + + def generate(self) -> None: + resources = tx_client.get_all_resources() + output_path = Path(app_config.TX_CONFIG_PATH) + output_path.parent.mkdir(parents=True, exist_ok=True) + + with open(output_path, "w", encoding="utf-8") as fo: + fo.writelines(("[main]\n", "host = https://www.transifex.com\n")) + for resource in resources: + path_obj = tx_utils.slug_to_file_path(resource.slug) + + fo.writelines( + ( + f"\n[{app_config.PROJECT.id}:{resource.slug}]\n", + f"file_filter = {str(path_obj)}\n", + "type = PO\n", + "source_lang = en\n", + f"source_file = {str(path_obj)}\n", + f"trans.{app_config.LANG} = {str(path_obj)}\n", + ) + ) + print(f"Generated Transifex config at {output_path}") + + +class ResourceStatsMarkdownReporter(ReportGenerator): + """Generates a Markdown file with resource translation statistics.""" + + def generate(self) -> None: + stats = tx_client.get_resource_language_stats() + output_path = Path(app_config.RESOURCE_STATS_MD_PATH) + + headers_conf = app_config.REPORT_HEADERS["resource_stats"] + header_line = f"| {headers_conf['file']} | {headers_conf['translated']} | {headers_conf['reviewed']} | {headers_conf['proofread']} |\n" + alignment_line = headers_conf["alignment"] + + rows_data = [] + for stat in stats: + resource_id_str = stat.relationships["resource"]["data"]["id"] + resource_slug = resource_id_str.split(":")[-1] + file_name = tx_utils.slug_to_file_path(resource_slug) + + total_words = stat.attributes["total_words"] + if total_words == 0: + translated_pct, reviewed_pct, proofread_pct = 0.0, 0.0, 0.0 + else: + translated_pct = round( + 100 * stat.attributes["translated_words"] / total_words, 1 + ) + reviewed_pct = round( + 100 * stat.attributes["reviewed_words"] / total_words, 1 + ) + proofread_pct = round( + 100 * stat.attributes["proofread_words"] / total_words, 1 + ) + rows_data.append((file_name, translated_pct, reviewed_pct, proofread_pct)) + + # Sort: first by reviewed_pct (desc), then by translated_pct (desc) + rows_sorted = sorted( + rows_data, + key=lambda row: ( + row[2], + row[1], + ), # row[2]=reviewed_pct, row[1]=translated_pct + reverse=True, + ) + + with open(output_path, "w", encoding="utf-8") as fo: + fo.writelines((header_line, alignment_line)) + for file_name, translated_pct, reviewed_pct, proofread_pct in rows_sorted: + fo.writelines( + f"| {file_name} | {translated_pct}% | {reviewed_pct}% | {proofread_pct}% |\n" + ) + print(f"Generated resource stats at {output_path}") + + +class TeamStatsMarkdownReporter(ReportGenerator): + """Generates a Markdown file with team contribution statistics.""" + + def generate(self) -> None: + contributor_data = _get_processed_contributor_data() + + output_path = Path(app_config.TEAM_STATS_MD_PATH) + headers_conf = app_config.REPORT_HEADERS["team_stats"] + header_line = f"| {headers_conf['user']} | {headers_conf['role']} | {headers_conf['translated_count']} | {headers_conf['reviewed_count']} | {headers_conf['proofread_count']} |\n" + alignment_line = headers_conf["alignment"] + + # Sort by total contributions (descending) + rows_sorted = sorted(contributor_data, key=lambda x: x["total"], reverse=True) + + with open(output_path, "w", encoding="utf-8") as fo: + fo.writelines((header_line, alignment_line)) + for contributor in rows_sorted: + fo.writelines( + f"| {contributor['username']} | {contributor['role']} | {contributor['translated']} | {contributor['reviewed']} | {contributor['proofread']} |\n" + ) + print(f"Generated team stats at {output_path}") + + +class ContributorChartReporter(ReportGenerator): + """Generates a bar chart of user contributions.""" + + def __init__(self, top_n=10): + super().__init__() + self.top_n = top_n + + def generate(self) -> None: + contributor_data = _get_processed_contributor_data() + chart_data = [c for c in contributor_data if c["total"] > 0] + sorted_contributions = sorted( + chart_data, key=lambda x: x["total"], reverse=True + ) + + if self.top_n and len(sorted_contributions) > self.top_n: + sorted_contributions = sorted_contributions[: self.top_n] + + if not sorted_contributions: + print("No contributor data to generate chart.") + return + + usernames = [item["username"] for item in sorted_contributions] + totals = [item["total"] for item in sorted_contributions] + bar_colors = [ + app_config.CHART_PASTEL_COLORS[i % len(app_config.CHART_PASTEL_COLORS)] + for i in range(len(usernames)) + ] + + plt.figure(figsize=(12, 7)) + bars = plt.bar(usernames, totals, color=bar_colors) + + chart_labels = app_config.REPORT_HEADERS["contributor_chart"] + chart_title = chart_labels["title_base"] + if self.top_n: + chart_title += chart_labels["title_top_n_suffix"].format(top_n=self.top_n) + + plt.title(chart_title) + plt.xlabel(chart_labels["xlabel_username"]) + plt.ylabel(chart_labels["ylabel_total_contributions"]) + plt.xticks(rotation=45, ha="right") + + if mticker: + plt.gca().yaxis.set_major_locator(mticker.MaxNLocator(integer=True)) + plt.grid(axis="y", linestyle="--", alpha=0.7) + + for bar in bars: + height = bar.get_height() + if height > 0: + plt.text( + bar.get_x() + bar.get_width() / 2.0, + height + 0.1, + f"{int(height)}", + ha="center", + fontweight="bold", + ) + plt.tight_layout() + + chart_dir = Path(app_config.CONTRIBUTOR_CHART_DIR) + chart_dir.mkdir(parents=True, exist_ok=True) + + timestamp = datetime.datetime.now().strftime("%Y_%m_%d") + filename = ( + chart_dir / f"{app_config.CONTRIBUTOR_CHART_FILENAME_PREFIX}{timestamp}.png" + ) + + plt.savefig(filename) + plt.close() + print(f"Saved user contributions chart to {filename}") + + +class ReadmeUpdaterReporter(ReportGenerator): + """Updates the README.md file with the latest statistics chart.""" + + def _find_latest_chart(self) -> str | None: + """Finds the latest contributor chart image.""" + chart_dir = Path(app_config.CONTRIBUTOR_CHART_DIR) + pattern = str( + chart_dir / f"{app_config.CONTRIBUTOR_CHART_FILENAME_PREFIX}*.png" + ) + print(chart_dir) + print(pattern) + files = glob.glob(pattern) + print(files) + if not files: + return None + + # Sort files by name (timestamp ensures latest is last) + latest_file = sorted(files)[-1] + # Return path relative to README.md (which is at repo root) + return Path(latest_file).as_posix() + + def generate(self) -> None: + readme_path = app_config.README_PATH + print(f"Updating README.md at {readme_path}") + if not readme_path.exists(): + print(f"Error: README.md not found at {readme_path}") + return + + latest_chart_path = self._find_latest_chart() + if not latest_chart_path: + print("Warning: No contributor chart found to update README.") + return + + today_display = datetime.datetime.now().strftime("%Y-%m-%d") + + # Construct the new stats section content + # For now, only contributor chart. Add progress chart later if needed. + stats_section_content = ( + f"### {app_config.README_CONTRIBUTORS_HEADER}\n" + f"![{app_config.README_CONTRIBUTORS_HEADER}]({latest_chart_path})\n" + f"({app_config.README_UPDATED_ON}: {today_display})" + # Add progress chart here if you implement it: + # f"\n\n### {app_config.README_PROGRESS_HEADER}\n" + # f"![{app_config.README_PROGRESS_HEADER}]({latest_progress_chart_path})\n" + # f"({app_config.README_UPDATED_ON}: {today_display})" + ) + + full_replacement_text = ( + f"{app_config.README_STATS_START_MARKER}\n" + f"{stats_section_content}\n" + f"{app_config.README_STATS_END_MARKER}" + ) + + with open(readme_path, "r+", encoding="utf-8") as f: + content = f.read() + + # Pattern to find the section between markers + pattern = re.compile( + f"{re.escape(app_config.README_STATS_START_MARKER)}.*?{re.escape(app_config.README_STATS_END_MARKER)}", + re.DOTALL, + ) + + if pattern.search(content): + updated_content = pattern.sub(full_replacement_text, content) + else: + # If markers are not found, append the new section. + # You might want a more sophisticated way to place it, e.g., before a specific header. + # For simplicity, appending if not found. + print( + f"Warning: Markers {app_config.README_STATS_START_MARKER} not found. Appending stats section." + ) + updated_content = content.rstrip() + "\n\n" + full_replacement_text + + f.seek(0) + f.write(updated_content) + f.truncate() + print( + f"README.md updated successfully at {readme_path} with chart {latest_chart_path}" + ) + + +class CombinedStatsReporter(ReportGenerator): + """Generates all reports in a single run""" + + def generate(self) -> None: + print("Generating all reports in a single run...") + + # Generate resource stats + resource_reporter = ResourceStatsMarkdownReporter() + resource_reporter.generate() + + # Generate team stats + team_reporter = TeamStatsMarkdownReporter() + team_reporter.generate() + + # Generate contributor chart + chart_reporter = ContributorChartReporter() + chart_reporter.generate() + + # Update README with latest stats + readme_reporter = ReadmeUpdaterReporter() + readme_reporter.generate() + + print("All reports generated successfully!") + + +# Mapping commands to reporter classes +REPORTERS = { + "recreate-config": TxConfigReporter, + "recreate-resource-stats": ResourceStatsMarkdownReporter, + "recreate-team-stats": TeamStatsMarkdownReporter, + "generate-contributor-chart": ContributorChartReporter, + "update-readme": ReadmeUpdaterReporter, + "generate-all-stats": CombinedStatsReporter, +} diff --git a/scripts/transifex/requirements.txt b/scripts/transifex/requirements.txt new file mode 100644 index 00000000..59095259 --- /dev/null +++ b/scripts/transifex/requirements.txt @@ -0,0 +1,2 @@ +matplotlib +transifex-python \ No newline at end of file diff --git a/scripts/transifex/utils.py b/scripts/transifex/utils.py new file mode 100644 index 00000000..adc76409 --- /dev/null +++ b/scripts/transifex/utils.py @@ -0,0 +1,17 @@ +# filepath: scripts/transifex_utils/utils.py +import re +from pathlib import Path +from .config import RESOURCE_NAME_MAP + + +def slug_to_file_path(slug: str) -> Path: + """ + Converts a Transifex resource slug to a local .po file path. + Handles legacy mappings and specific formatting rules. + """ + file_path_str = RESOURCE_NAME_MAP.get(slug, slug) + file_path_str = file_path_str.replace("--", "/") + if re.fullmatch(r"\d+_\d+", file_path_str): + file_path_str = file_path_str.replace("_", ".", 1) + file_path_str += ".po" + return Path(file_path_str) From 1677f2a666ef4c6a801bd90179e0d86167b07255 Mon Sep 17 00:00:00 2001 From: Revisto Date: Sun, 11 May 2025 23:17:56 +0330 Subject: [PATCH 2/5] Add GitHub Action to update translation statistics weekly --- .../workflows/update-translation-stats.yml | 30 +++++++++++++++++++ 1 file changed, 30 insertions(+) create mode 100644 .github/workflows/update-translation-stats.yml diff --git a/.github/workflows/update-translation-stats.yml b/.github/workflows/update-translation-stats.yml new file mode 100644 index 00000000..08aa7b80 --- /dev/null +++ b/.github/workflows/update-translation-stats.yml @@ -0,0 +1,30 @@ +name: Update Translation Statistics +on: + schedule: + - cron: '0 0 * * 6' + workflow_dispatch: +jobs: + update-stats: + runs-on: ubuntu-latest + steps: + - name: Check out repository + uses: actions/checkout@v3 + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: '3.10' + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install -r scripts/transifex/requirements.txt + - name: Generate all stats + run: python -m scripts.transifex.main generate-all-stats + env: + TRANSIFEX_API_TOKEN: ${{ secrets.TRANSIFEX_API_TOKEN }} + - name: Commit and push if changes + run: | + git config --local user.email "action@github.com" + git config --local user.name "GitHub Action" + git add RESOURCE.md TEAM.md reports/ README.md + git commit -m "Update translation statistics [skip ci]" || exit 0 + git push From e7ce3ba72362f446de36ef0955773f84008b714b Mon Sep 17 00:00:00 2001 From: Revisto Date: Mon, 12 May 2025 00:51:48 +0330 Subject: [PATCH 3/5] Remove debug print statements from ReadmeUpdaterReporter class --- scripts/transifex/reporting.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/scripts/transifex/reporting.py b/scripts/transifex/reporting.py index f7cc0077..d5750ee6 100644 --- a/scripts/transifex/reporting.py +++ b/scripts/transifex/reporting.py @@ -279,10 +279,7 @@ def _find_latest_chart(self) -> str | None: pattern = str( chart_dir / f"{app_config.CONTRIBUTOR_CHART_FILENAME_PREFIX}*.png" ) - print(chart_dir) - print(pattern) files = glob.glob(pattern) - print(files) if not files: return None From 8abc867890571358e06e54e98207ecba3f27dd96 Mon Sep 17 00:00:00 2001 From: Revisto Date: Mon, 12 May 2025 00:53:22 +0330 Subject: [PATCH 4/5] Remove unnecessary comments --- scripts/transifex/reporting.py | 9 --------- 1 file changed, 9 deletions(-) diff --git a/scripts/transifex/reporting.py b/scripts/transifex/reporting.py index d5750ee6..b68ae9b0 100644 --- a/scripts/transifex/reporting.py +++ b/scripts/transifex/reporting.py @@ -302,16 +302,10 @@ def generate(self) -> None: today_display = datetime.datetime.now().strftime("%Y-%m-%d") - # Construct the new stats section content - # For now, only contributor chart. Add progress chart later if needed. stats_section_content = ( f"### {app_config.README_CONTRIBUTORS_HEADER}\n" f"![{app_config.README_CONTRIBUTORS_HEADER}]({latest_chart_path})\n" f"({app_config.README_UPDATED_ON}: {today_display})" - # Add progress chart here if you implement it: - # f"\n\n### {app_config.README_PROGRESS_HEADER}\n" - # f"![{app_config.README_PROGRESS_HEADER}]({latest_progress_chart_path})\n" - # f"({app_config.README_UPDATED_ON}: {today_display})" ) full_replacement_text = ( @@ -332,9 +326,6 @@ def generate(self) -> None: if pattern.search(content): updated_content = pattern.sub(full_replacement_text, content) else: - # If markers are not found, append the new section. - # You might want a more sophisticated way to place it, e.g., before a specific header. - # For simplicity, appending if not found. print( f"Warning: Markers {app_config.README_STATS_START_MARKER} not found. Appending stats section." ) From a26896fc5fa74b66a9859cb0768fdd80c39dab9f Mon Sep 17 00:00:00 2001 From: Revisto Date: Mon, 12 May 2025 01:01:39 +0330 Subject: [PATCH 5/5] Refactor ReadmeUpdaterReporter to return relative file paths from the project root --- scripts/transifex/reporting.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/scripts/transifex/reporting.py b/scripts/transifex/reporting.py index b68ae9b0..8d630cb1 100644 --- a/scripts/transifex/reporting.py +++ b/scripts/transifex/reporting.py @@ -284,9 +284,10 @@ def _find_latest_chart(self) -> str | None: return None # Sort files by name (timestamp ensures latest is last) - latest_file = sorted(files)[-1] - # Return path relative to README.md (which is at repo root) - return Path(latest_file).as_posix() + latest_file_path = Path(sorted(files)[-1]) + # Return path relative to the project root + relative_path = latest_file_path.relative_to(app_config.PROJECT_ROOT) + return relative_path.as_posix() def generate(self) -> None: readme_path = app_config.README_PATH