Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
319 changes: 319 additions & 0 deletions .github/workflows/wger-catalog-refresh.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,319 @@
name: Wger Catalog Refresh

'on':
workflow_dispatch:
inputs:
fail_on_breaking:
description: "Fail workflow if diff detects breaking changes"
required: false
default: true
type: boolean
publish_release_assets:
description: "Publish DB/manifest/build-report to the catalog release channel"
required: false
default: true
type: boolean
schedule:
- cron: "0 5 * * 1"

permissions:
contents: write

jobs:
refresh-catalog:
runs-on: ubuntu-latest
env:
GENERATED_DB_PATH: artifacts/hypertrack_training.db
BUILD_REPORT_PATH: artifacts/wger_build_report.json
DIFF_REPORT_PATH: artifacts/wger_diff_report.json
MANIFEST_PATH: artifacts/wger_catalog_manifest.json
RELEASE_NOTES_PATH: artifacts/wger_release_notes.md
REFERENCE_DB_PATH: assets/db/hypertrack_training.db
RELEASE_TAG: wger-catalog-stable
RELEASE_NAME: Wger Catalog Data (stable channel)
RELEASE_CHANNEL: stable
RELEASE_DOWNLOAD_BASE: https://github.com/${{ github.repository }}/releases/download/wger-catalog-stable/
RELEASE_PAGE_URL: https://github.com/${{ github.repository }}/releases/tag/wger-catalog-stable
FAIL_ON_BREAKING: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.fail_on_breaking || 'true' }}
PUBLISH_RELEASE_ASSETS: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.publish_release_assets || 'true' }}

steps:
- name: Checkout repository
uses: actions/checkout@v4

- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: "3.11"

- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install requests

- name: Validate script syntax
run: |
python -m py_compile skript/create_wger_exercise_db.py
python -m py_compile skript/wger_catalog_diff.py

- name: Generate exercise catalog and build report
run: |
mkdir -p artifacts
python skript/create_wger_exercise_db.py \
--db-out "$GENERATED_DB_PATH" \
--report-json-out "$BUILD_REPORT_PATH"

- name: Diff against committed reference DB (if available)
id: diff
continue-on-error: true
run: |
if [ -f "$REFERENCE_DB_PATH" ]; then
echo "reference_db_found=true" >> "$GITHUB_OUTPUT"
DIFF_CMD="python skript/wger_catalog_diff.py --old \"$REFERENCE_DB_PATH\" --new \"$GENERATED_DB_PATH\" --json-out \"$DIFF_REPORT_PATH\""
if [ "$FAIL_ON_BREAKING" = "true" ]; then
DIFF_CMD="$DIFF_CMD --fail-on-breaking"
fi
echo "Running diff command: $DIFF_CMD"
eval "$DIFF_CMD"
else
echo "reference_db_found=false" >> "$GITHUB_OUTPUT"
echo "Reference DB not found at $REFERENCE_DB_PATH. Skipping diff." | tee artifacts/wger_diff_skipped.txt
python - <<'PY'
import json
import os

out = os.environ["DIFF_REPORT_PATH"]
payload = {
"skipped": True,
"reason": "reference_db_missing",
"reference_db_path": os.environ.get("REFERENCE_DB_PATH", ""),
}
with open(out, "w", encoding="utf-8") as f:
json.dump(payload, f, ensure_ascii=False, indent=2)
PY
fi

- name: Build catalog manifest artifact
run: |
python - <<'PY'
import hashlib
import json
import math
import os

build_path = os.environ["BUILD_REPORT_PATH"]
diff_path = os.environ["DIFF_REPORT_PATH"]
manifest_path = os.environ["MANIFEST_PATH"]
release_base = os.environ["RELEASE_DOWNLOAD_BASE"]

with open(build_path, "r", encoding="utf-8") as f:
build_report = json.load(f)

diff_report = {}
if os.path.exists(diff_path):
with open(diff_path, "r", encoding="utf-8") as f:
diff_report = json.load(f)

build = build_report.get("build", {})
summary = build_report.get("summary", {})

db_file = os.path.basename(os.environ["GENERATED_DB_PATH"])
build_report_file = os.path.basename(build_path)
diff_report_file = os.path.basename(diff_path)

def sha256_file(path: str) -> str:
h = hashlib.sha256()
with open(path, "rb") as f:
for chunk in iter(lambda: f.read(1024 * 1024), b""):
h.update(chunk)
return h.hexdigest()

db_sha256 = sha256_file(os.environ["GENERATED_DB_PATH"])
build_report_sha256 = sha256_file(build_path)
diff_report_sha256 = sha256_file(diff_path) if os.path.exists(diff_path) else None

imported_count = int(summary.get("imported_count", 0) or 0)
min_exercise_count = max(50, math.floor(imported_count * 0.85))
manifest = {
"source_id": "wger_catalog",
"channel": os.environ.get("RELEASE_CHANNEL", "stable"),
"release_tag": os.environ.get("RELEASE_TAG", ""),
"release_page_url": os.environ.get("RELEASE_PAGE_URL", ""),
"asset_base_url": release_base,
"version": build.get("db_version", ""),
"generated_at": build.get("generated_at"),
"db_file": db_file,
"db_url": f"{release_base}{db_file}",
"db_sha256": db_sha256,
"build_report_file": build_report_file,
"build_report_url": f"{release_base}{build_report_file}",
"build_report_sha256": build_report_sha256,
"diff_report_file": diff_report_file,
"diff_report_url": f"{release_base}{diff_report_file}",
"diff_report_sha256": diff_report_sha256,
"expected_exercise_count": imported_count,
"min_exercise_count": int(min_exercise_count),
"safety": {
"diff_skipped": bool(diff_report.get("skipped", False)),
"diff_removed_count": diff_report.get("summary", {}).get("removed_count"),
"diff_added_count": diff_report.get("summary", {}).get("added_count"),
},
}

with open(manifest_path, "w", encoding="utf-8") as f:
json.dump(manifest, f, ensure_ascii=False, indent=2)
PY

- name: Build release notes
run: |
python - <<'PY'
import json
import os

build_path = os.environ["BUILD_REPORT_PATH"]
diff_path = os.environ["DIFF_REPORT_PATH"]
out_path = os.environ["RELEASE_NOTES_PATH"]

with open(build_path, "r", encoding="utf-8") as f:
build = json.load(f)

diff = {}
if os.path.exists(diff_path):
with open(diff_path, "r", encoding="utf-8") as f:
diff = json.load(f)

bmeta = build.get("build", {})
bsum = build.get("summary", {})
dsum = diff.get("summary", {}) if isinstance(diff, dict) else {}

lines = [
"# Wger Catalog Data Refresh",
"",
f"- Version: `{bmeta.get('db_version', 'n/a')}`",
f"- Generated at: `{bmeta.get('generated_at', 'n/a')}`",
f"- Imported exercises: `{bsum.get('imported_count', 'n/a')}`",
f"- Rejected exercises: `{bsum.get('rejected_count', 'n/a')}`",
]

if diff.get("skipped"):
lines.append("- Diff: skipped (reference DB missing)")
elif dsum:
lines.append(f"- Diff removed IDs: `{dsum.get('removed_count', 'n/a')}`")
lines.append(f"- Diff added IDs: `{dsum.get('added_count', 'n/a')}`")
else:
lines.append("- Diff summary unavailable")

lines.append("")
lines.append("This is a data-artifact release channel used by app-side catalog refresh.")

with open(out_path, "w", encoding="utf-8") as f:
f.write("\n".join(lines) + "\n")
PY

- name: Upload generated DB artifact
if: always()
uses: actions/upload-artifact@v4
with:
name: wger-generated-db
path: ${{ env.GENERATED_DB_PATH }}
if-no-files-found: error

- name: Upload build report artifact
if: always()
uses: actions/upload-artifact@v4
with:
name: wger-build-report
path: ${{ env.BUILD_REPORT_PATH }}
if-no-files-found: error

- name: Upload catalog manifest artifact
if: always()
uses: actions/upload-artifact@v4
with:
name: wger-catalog-manifest
path: ${{ env.MANIFEST_PATH }}
if-no-files-found: error

- name: Upload diff report artifact
if: always()
uses: actions/upload-artifact@v4
with:
name: wger-diff-report
path: ${{ env.DIFF_REPORT_PATH }}
if-no-files-found: error

- name: Upload release notes artifact
if: always()
uses: actions/upload-artifact@v4
with:
name: wger-release-notes
path: ${{ env.RELEASE_NOTES_PATH }}
if-no-files-found: error

- name: Publish release assets (catalog channel)
id: publish_release
if: always() && env.PUBLISH_RELEASE_ASSETS == 'true' && (steps.diff.outputs.reference_db_found != 'true' || steps.diff.outcome == 'success' || env.FAIL_ON_BREAKING != 'true')
uses: ncipollo/release-action@v1
with:
token: ${{ secrets.GITHUB_TOKEN }}
tag: ${{ env.RELEASE_TAG }}
name: ${{ env.RELEASE_NAME }}
prerelease: true
allowUpdates: true
replacesArtifacts: true
makeLatest: 'false'
artifacts: |
${{ env.GENERATED_DB_PATH }}
${{ env.BUILD_REPORT_PATH }}
${{ env.DIFF_REPORT_PATH }}
${{ env.MANIFEST_PATH }}
bodyFile: ${{ env.RELEASE_NOTES_PATH }}

- name: Publish run summary
if: always()
run: |
python - <<'PY'
import json
import os

build_path = os.environ["BUILD_REPORT_PATH"]
diff_path = os.environ["DIFF_REPORT_PATH"]
release_page_url = os.environ.get("RELEASE_PAGE_URL", "")
publish_outcome = os.environ.get("PUBLISH_OUTCOME", "skipped")

lines = ["## Wger Catalog Refresh Summary", ""]

if os.path.exists(build_path):
with open(build_path, "r", encoding="utf-8") as f:
build = json.load(f)
bmeta = build.get("build", {})
summary = build.get("summary", {})
lines.append(f"- DB version: `{bmeta.get('db_version', 'n/a')}`")
lines.append(f"- Generated at: `{bmeta.get('generated_at', 'n/a')}`")
lines.append(f"- Imported: `{summary.get('imported_count', 'n/a')}`")
lines.append(f"- Rejected: `{summary.get('rejected_count', 'n/a')}`")

if os.path.exists(diff_path):
with open(diff_path, "r", encoding="utf-8") as f:
diff = json.load(f)
if diff.get("skipped"):
lines.append("- Diff: skipped (reference DB missing)")
else:
dsum = diff.get("summary", {})
lines.append(f"- Removed IDs: `{dsum.get('removed_count', 'n/a')}`")
lines.append(f"- Added IDs: `{dsum.get('added_count', 'n/a')}`")

lines.append(f"- Release publication: `{publish_outcome}`")
lines.append(f"- Catalog release page: {release_page_url}")

with open(os.environ["GITHUB_STEP_SUMMARY"], "a", encoding="utf-8") as f:
f.write("\n".join(lines) + "\n")
PY
env:
PUBLISH_OUTCOME: ${{ steps.publish_release.outcome }}

- name: Enforce safety gate result
if: always() && steps.diff.outputs.reference_db_found == 'true' && env.FAIL_ON_BREAKING == 'true' && steps.diff.outcome == 'failure'
run: |
echo "Diff safety validation failed under --fail-on-breaking."
exit 1
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ This README is intentionally implementation-focused and reflects the **current w
- [UI & Widgets](documentation/ui_and_widgets.md)
- [Health Steps Module (Current Implementation)](documentation/health_steps_alpha.md)
- [One-way Health Export (Current Implementation)](documentation/health_export_one_way.md)
- [Wger Catalog Refresh & Distribution](documentation/wger_catalog_refresh_system.md)
- [Shared Analytics Definitions (Legacy Reference)](documentation/analytics_definitions.md)

## What Hypertrack currently supports
Expand Down
18 changes: 18 additions & 0 deletions assets/db/wger_catalog_manifest.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
{
"source_id": "wger_catalog",
"channel": "stable",
"release_tag": "wger-catalog-stable",
"release_page_url": "https://github.com/rfivesix/hypertrack/releases/tag/wger-catalog-stable",
"asset_base_url": "https://github.com/rfivesix/hypertrack/releases/download/wger-catalog-stable/",
"version": "202512260209",
"generated_at": "2025-12-26T02:09:00Z",
"db_file": "hypertrack_training.db",
"db_url": "https://github.com/rfivesix/hypertrack/releases/download/wger-catalog-stable/hypertrack_training.db",
"db_sha256": "77967b5658cb841cdfbef830088167289f3e30000bc64b101fad5de1281fd12a",
"build_report_file": "wger_build_report.json",
"build_report_url": "https://github.com/rfivesix/hypertrack/releases/download/wger-catalog-stable/wger_build_report.json",
"diff_report_file": "wger_diff_report.json",
"diff_report_url": "https://github.com/rfivesix/hypertrack/releases/download/wger-catalog-stable/wger_diff_report.json",
"expected_exercise_count": 789,
"min_exercise_count": 670
}
25 changes: 25 additions & 0 deletions documentation/data_models_and_storage.md
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,31 @@ Main access paths currently in use:
- `lib/data/product_database_helper.dart`
- Sleep DAOs in `lib/features/sleep/data/persistence/dao/*`

## Exercise catalog source and refresh

Bundled exercise seed data ships as:

- `assets/db/hypertrack_training.db`

Startup import path:

- `lib/screens/app_initializer_screen.dart` -> `BasisDataManager.checkForBasisDataUpdate(...)`

Remote refresh service:

- `lib/services/exercise_catalog_refresh_service.dart`

Remote source configuration is centralized in:

- `lib/config/app_data_sources.dart`

The app checks the release-distributed catalog manifest and can adopt a newer
catalog DB after structural validation. On any remote error, startup falls back
to the bundled asset source.

Tracking state for remote refresh checks is kept in `SharedPreferences` keys
under the `exercise_catalog_*` namespace.

## Core app entities (non-sleep)

The traditional app model classes remain under `lib/models/*` (nutrition, workouts, measurements, supplements, chart/timeline helpers, backup serialization).
Expand Down
2 changes: 2 additions & 0 deletions documentation/overview.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ This document describes the app as implemented in the **current working copy**.
Hypertrack currently implements:

- Workout tracking and analytics
- Exercise catalog refresh via release-distributed wger data artifacts
- Nutrition/fluid logging
- Adaptive nutrition recommendation generation (weekly due-week model with explicit manual apply)
- Measurements
Expand Down Expand Up @@ -122,5 +123,6 @@ Implemented controls include:
- [Statistics module](statistics_module.md)
- [Sleep current state](sleep/sleep_current_state.md)
- [Health export one-way](health_export_one_way.md)
- [Wger catalog refresh & distribution](wger_catalog_refresh_system.md)
- [Architecture](architecture.md)
- [Data models and storage](data_models_and_storage.md)
Loading