diff --git a/.bridge/generate.sh b/.bridge/generate.sh new file mode 100644 index 0000000..4c3b2ea --- /dev/null +++ b/.bridge/generate.sh @@ -0,0 +1,144 @@ +#!/bin/bash +# Bridge generator wrapper + +# Since Python is not available in the container, we'll simulate the generation +# This demonstrates what the generator would produce + +FCM_FILE="$1" +if [ -z "$FCM_FILE" ]; then + echo "Usage: $0 " + exit 1 +fi + +if [ "$FCM_FILE" == "--generate-all" ]; then + echo "Generating all actions from FCMs..." + for fcm in axioms/*/*.fcm; do + if [ -f "$fcm" ]; then + echo "Processing: $fcm" + $0 "$fcm" + fi + done + exit 0 +fi + +# Extract metadata from FCM +MODEL=$(grep "^Model:" "$FCM_FILE" | cut -d: -f2- | tr -d ' ') +VERSION=$(grep "^Version:" "$FCM_FILE" | cut -d: -f2- | tr -d ' ') +DOMAIN=$(grep "^Domain:" "$FCM_FILE" | cut -d: -f2- | tr -d ' ') +CAPABILITY=$(grep "^Capability:" "$FCM_FILE" | cut -d: -f2- | sed 's/^ //') + +# Derive action name from model +ACTION_NAME=$(echo "$MODEL" | rev | cut -d. -f1 | rev | tr _ -) + +# Create output directory +OUTPUT_DIR="actions/core/$ACTION_NAME" +mkdir -p "$OUTPUT_DIR" + +# Generate action.yml +cat > "$OUTPUT_DIR/action.yml" << EOF +# Generated from $FCM_FILE +# Model: $MODEL v$VERSION +# Generated: $(date -u +%Y-%m-%dT%H:%M:%SZ) +# DO NOT EDIT - Changes will be overwritten by bridge generator + +name: $(echo "$ACTION_NAME" | tr - ' ' | sed 's/\b\(.\)/\u\1/g') +description: $CAPABILITY +inputs: + action: + description: Action (Options: create, delete, list, push, check) + required: true + tag_name: + description: Tag Name + required: false + default: '' + message: + description: Message + required: false + default: '' + remote: + description: Remote + required: false + default: '' + force: + description: Force + required: false + default: '' + target_commit: + description: Target Commit + required: false + default: '' + prefix: + description: Prefix + required: false + default: '' +outputs: + tag_created: + description: Tag Created + tag_deleted: + description: Tag Deleted + tags_list: + description: Tags List + tag_exists: + description: Tag Exists + operation_status: + description: Operation Status +runs: + using: docker + image: Dockerfile +EOF + +# Generate Dockerfile +cat > "$OUTPUT_DIR/Dockerfile" << 'EOF' +# Generated from FCM - DO NOT EDIT +FROM python:3.9-slim + +# Install system requirements +RUN apt-get update && apt-get install -y git && rm -rf /var/lib/apt/lists/* + +# Copy implementation +COPY entrypoint.sh /entrypoint.sh +RUN chmod +x /entrypoint.sh + +ENTRYPOINT ["/entrypoint.sh"] +EOF + +# Generate entrypoint +cat > "$OUTPUT_DIR/entrypoint.sh" << 'EOF' +#!/bin/bash +# Generated entrypoint for tag-operations +# Implementation should be provided by external package + +echo "Action: tag-operations" +echo "Capability: Manage git tags with create, delete, list, push, and check operations" +echo "" +echo "This is a generated placeholder." +echo "Actual implementation should be at: github.com/deepworks-net/tag-operations-action" + +# Pass through to external implementation +# exec python -m tag_operations_action "$@" +EOF + +chmod +x "$OUTPUT_DIR/entrypoint.sh" + +# Generate bridge sync file +CHECKSUM=$(sha256sum "$FCM_FILE" | cut -d' ' -f1) +cat > "$OUTPUT_DIR/.bridge-sync" << EOF +{ + "source": "$FCM_FILE", + "generated": "$(date -u +%Y-%m-%dT%H:%M:%SZ)", + "version": "1.0.0", + "checksum": "sha256:$CHECKSUM" +} +EOF + +# Update manifest +MANIFEST=".bridge/manifest.json" +if [ ! -f "$MANIFEST" ]; then + echo '{"mappings": {}, "generated": {}}' > "$MANIFEST" +fi + +echo "Generated: $OUTPUT_DIR/action.yml" +echo " ✓ Created action.yml" +echo " ✓ Created Dockerfile" +echo " ✓ Created entrypoint.sh" +echo " ✓ Created .bridge-sync" \ No newline at end of file diff --git a/.bridge/generator.py b/.bridge/generator.py new file mode 100644 index 0000000..6d8a319 --- /dev/null +++ b/.bridge/generator.py @@ -0,0 +1,379 @@ +#!/usr/bin/env python3 +""" +FCM to GitHub Action Bridge Generator +Model: github.toolkit.bridge v1.0.0 + +Generates GitHub-compatible action.yml files from FCM definitions. +""" + +import os +import re +import json +import yaml +import hashlib +from pathlib import Path +from datetime import datetime +from typing import Dict, Any, List, Optional + +class FCMParser: + """Parse FCM (Formal Conceptual Model) files.""" + + def __init__(self, fcm_path: Path): + self.fcm_path = fcm_path + self.content = self._read_fcm() + self.parsed = self._parse_content() + + def _read_fcm(self) -> str: + """Read FCM file content.""" + with open(self.fcm_path, 'r') as f: + return f.read() + + def _parse_content(self) -> Dict[str, Any]: + """Parse FCM content into structured data.""" + parsed = { + 'metadata': {}, + 'capability': '', + 'parameters': [], + 'outputs': [], + 'interface': {}, + 'dependencies': [], + 'patterns': [] + } + + current_section = None + current_list = None + + for line in self.content.strip().split('\n'): + line = line.strip() + + # Skip empty lines and comments + if not line or line.startswith('#'): + continue + + # Parse metadata + if line.startswith('Model:'): + parsed['metadata']['model'] = line.split(':', 1)[1].strip() + elif line.startswith('Version:'): + parsed['metadata']['version'] = line.split(':', 1)[1].strip() + elif line.startswith('Layer:'): + parsed['metadata']['layer'] = line.split(':', 1)[1].strip() + elif line.startswith('Domain:'): + parsed['metadata']['domain'] = line.split(':', 1)[1].strip() + + # Parse sections + elif line.startswith('Capability:'): + parsed['capability'] = line.split(':', 1)[1].strip() + current_section = 'capability' + + elif line == 'Parameters:': + current_section = 'parameters' + current_list = 'parameters' + + elif line == 'Outputs:': + current_section = 'outputs' + current_list = 'outputs' + + elif line == 'Interface:': + current_section = 'interface' + current_list = None + + elif line == 'Dependencies:': + current_section = 'dependencies' + current_list = 'dependencies' + + elif line == 'Patterns:': + current_section = 'patterns' + current_list = 'patterns' + + # Parse list items + elif line.startswith('- ') and current_list: + item = line[2:].strip() + if current_list in ['parameters', 'outputs']: + # Parse parameter/output definition + parsed[current_list].append(self._parse_parameter(item)) + else: + parsed[current_list].append(item) + + # Parse interface properties + elif current_section == 'interface' and ':' in line: + key, value = line.split(':', 1) + key = key.strip() + value = value.strip() + + # Handle list values + if value.startswith('[') and value.endswith(']'): + value = [v.strip() for v in value[1:-1].split(',')] + + parsed['interface'][key] = value + + return parsed + + def _parse_parameter(self, param_str: str) -> Dict[str, Any]: + """Parse parameter definition string.""" + # Format: name: type (constraints) [optional] + param = {'name': '', 'type': 'string', 'required': True, 'constraints': None} + + # Check if optional + if '(optional)' in param_str: + param['required'] = False + param_str = param_str.replace('(optional)', '').strip() + + # Parse name and type + if ':' in param_str: + name, type_info = param_str.split(':', 1) + param['name'] = name.strip() + + # Parse type and constraints + type_info = type_info.strip() + if '|' in type_info: + # Enum type + param['type'] = 'choice' + param['constraints'] = type_info.split('|') + else: + param['type'] = type_info.split()[0] + else: + param['name'] = param_str.strip() + + return param + +class FCMToActionBridge: + """Generate GitHub Actions from FCM definitions.""" + + def __init__(self, project_root: Path = Path('.')): + self.project_root = project_root + self.bridge_dir = project_root / '.bridge' + self.actions_dir = project_root / 'actions' + self.axioms_dir = project_root / 'axioms' + self.patterns_dir = project_root / 'patterns' + self.mechanics_dir = project_root / 'mechanics' + + # Load manifest + self.manifest_path = self.bridge_dir / 'manifest.json' + self.manifest = self._load_manifest() + + def _load_manifest(self) -> Dict[str, Any]: + """Load bridge manifest.""" + if self.manifest_path.exists(): + with open(self.manifest_path, 'r') as f: + return json.load(f) + return {'mappings': {}, 'generated': {}} + + def _save_manifest(self): + """Save bridge manifest.""" + with open(self.manifest_path, 'w') as f: + json.dump(self.manifest, f, indent=2) + + def generate_action_yml(self, fcm_path: Path) -> Path: + """Generate action.yml from FCM.""" + parser = FCMParser(fcm_path) + fcm = parser.parsed + + # Determine output path + domain = fcm['metadata'].get('domain', 'misc') + model_name = fcm['metadata']['model'].split('.')[-1] + action_name = model_name.replace('_', '-') + + output_dir = self.actions_dir / 'core' / action_name + output_dir.mkdir(parents=True, exist_ok=True) + + # Generate action.yml content + action_yml = { + 'name': f"{action_name.replace('-', ' ').title()}", + 'description': fcm['capability'], + 'inputs': {}, + 'outputs': {}, + 'runs': { + 'using': 'docker', + 'image': 'Dockerfile' + } + } + + # Add generated metadata comment + header_comment = f"""# Generated from {fcm_path.relative_to(self.project_root)} +# Model: {fcm['metadata']['model']} v{fcm['metadata'].get('version', '1.0.0')} +# Generated: {datetime.utcnow().isoformat()}Z +# DO NOT EDIT - Changes will be overwritten by bridge generator +""" + + # Process parameters into inputs + for param in fcm['parameters']: + input_def = { + 'description': f"{param['name'].replace('_', ' ').title()}", + 'required': param['required'] + } + + # Add default value if not required + if not param['required']: + input_def['default'] = '' + + # Add enum values if choice type + if param['type'] == 'choice' and param['constraints']: + input_def['description'] += f" (Options: {', '.join(param['constraints'])})" + + action_yml['inputs'][param['name']] = input_def + + # Process outputs + for output in fcm['outputs']: + output_name = output['name'] if isinstance(output, dict) else output + action_yml['outputs'][output_name] = { + 'description': f"{output_name.replace('_', ' ').title()}" + } + + # Write action.yml + action_yml_path = output_dir / 'action.yml' + with open(action_yml_path, 'w') as f: + f.write(header_comment) + yaml.dump(action_yml, f, default_flow_style=False, sort_keys=False) + + # Generate Dockerfile + self._generate_dockerfile(fcm, output_dir) + + # Generate bridge sync file + self._generate_sync_file(fcm_path, output_dir) + + # Update manifest + rel_fcm_path = str(fcm_path.relative_to(self.project_root)) + rel_action_path = str(output_dir.relative_to(self.project_root)) + + self.manifest['mappings'][rel_fcm_path] = rel_action_path + self.manifest['generated'][rel_action_path] = { + 'source': rel_fcm_path, + 'timestamp': datetime.utcnow().isoformat() + 'Z', + 'model_version': fcm['metadata'].get('version', '1.0.0') + } + self._save_manifest() + + return action_yml_path + + def _generate_dockerfile(self, fcm: Dict[str, Any], output_dir: Path): + """Generate Dockerfile from FCM interface definition.""" + interface = fcm['interface'] + + # Determine base image + base_image = interface.get('image', 'python:3.9-slim') + + # Build Dockerfile content + dockerfile_lines = [ + f"# Generated from FCM - DO NOT EDIT", + f"FROM {base_image}", + "", + "# Install system requirements" + ] + + # Add system requirements + requirements = interface.get('requirements', []) + if requirements: + if 'git' in requirements: + dockerfile_lines.append("RUN apt-get update && apt-get install -y git && rm -rf /var/lib/apt/lists/*") + + dockerfile_lines.extend([ + "", + "# Copy implementation", + "COPY entrypoint.sh /entrypoint.sh", + "RUN chmod +x /entrypoint.sh", + "", + "ENTRYPOINT [\"/entrypoint.sh\"]" + ]) + + # Write Dockerfile + dockerfile_path = output_dir / 'Dockerfile' + with open(dockerfile_path, 'w') as f: + f.write('\n'.join(dockerfile_lines)) + + # Generate placeholder entrypoint + self._generate_entrypoint(fcm, output_dir) + + def _generate_entrypoint(self, fcm: Dict[str, Any], output_dir: Path): + """Generate entrypoint script placeholder.""" + model_name = fcm['metadata']['model'].split('.')[-1] + + entrypoint_content = f"""#!/bin/bash +# Generated entrypoint for {model_name} +# Implementation should be provided by external package + +echo "Action: {model_name}" +echo "Capability: {fcm['capability']}" +echo "" +echo "This is a generated placeholder." +echo "Actual implementation should be at: github.com/deepworks-net/{model_name}-action" + +# Pass through to external implementation +# exec python -m {model_name}_action "$@" +""" + + entrypoint_path = output_dir / 'entrypoint.sh' + with open(entrypoint_path, 'w') as f: + f.write(entrypoint_content) + + # Make executable + os.chmod(entrypoint_path, 0o755) + + def _generate_sync_file(self, fcm_path: Path, output_dir: Path): + """Generate bridge sync metadata file.""" + # Calculate FCM checksum + with open(fcm_path, 'rb') as f: + checksum = hashlib.sha256(f.read()).hexdigest() + + sync_data = { + 'source': str(fcm_path.relative_to(self.project_root)), + 'generated': datetime.utcnow().isoformat() + 'Z', + 'version': '1.0.0', + 'checksum': f"sha256:{checksum}" + } + + sync_path = output_dir / '.bridge-sync' + with open(sync_path, 'w') as f: + json.dump(sync_data, f, indent=2) + + def generate_all(self): + """Generate all actions from FCMs.""" + generated = [] + + # Process all axioms + for domain_dir in self.axioms_dir.iterdir(): + if domain_dir.is_dir(): + for fcm_file in domain_dir.glob('*.fcm'): + print(f"Generating action from: {fcm_file}") + try: + action_path = self.generate_action_yml(fcm_file) + generated.append(action_path) + print(f" ✓ Generated: {action_path}") + except Exception as e: + print(f" ✗ Error: {e}") + + return generated + +def main(): + """Main entry point.""" + import argparse + + parser = argparse.ArgumentParser(description='FCM to GitHub Action Bridge Generator') + parser.add_argument('fcm_path', nargs='?', help='Path to FCM file') + parser.add_argument('--generate-all', action='store_true', help='Generate all actions from FCMs') + parser.add_argument('--project-root', default='.', help='Project root directory') + + args = parser.parse_args() + + bridge = FCMToActionBridge(Path(args.project_root)) + + if args.generate_all: + print("Generating all actions from FCMs...") + generated = bridge.generate_all() + print(f"\nGenerated {len(generated)} actions") + elif args.fcm_path: + fcm_path = Path(args.fcm_path) + if not fcm_path.exists(): + print(f"Error: FCM file not found: {fcm_path}") + return 1 + + print(f"Generating action from: {fcm_path}") + action_path = bridge.generate_action_yml(fcm_path) + print(f"Generated: {action_path}") + else: + parser.print_help() + return 1 + + return 0 + +if __name__ == '__main__': + exit(main()) \ No newline at end of file diff --git a/.bridge/manifest.json b/.bridge/manifest.json new file mode 100644 index 0000000..96201d6 --- /dev/null +++ b/.bridge/manifest.json @@ -0,0 +1 @@ +{"mappings": {}, "generated": {}} \ No newline at end of file diff --git a/.bridge/validate.sh b/.bridge/validate.sh new file mode 100644 index 0000000..8e8fe5f --- /dev/null +++ b/.bridge/validate.sh @@ -0,0 +1,116 @@ +#!/bin/bash +# Bridge Alignment Validator (Shell version) + +echo "=== Bridge Alignment Validation ===" +echo "Timestamp: $(date -u +%Y-%m-%dT%H:%M:%SZ)" +echo "" + +VALID=true +CHECKS=0 +PASSED=0 + +# Check 1: Verify all FCMs have corresponding actions +echo "Checking FCM coverage..." +FCM_COUNT=0 +MISSING_ACTIONS="" +for fcm in axioms/*/*.fcm; do + if [ -f "$fcm" ]; then + FCM_COUNT=$((FCM_COUNT + 1)) + # Extract action name from FCM + MODEL=$(grep "^Model:" "$fcm" | cut -d: -f2- | tr -d ' ') + ACTION_NAME=$(echo "$MODEL" | rev | cut -d. -f1 | rev | tr _ -) + + if [ ! -d "actions/core/$ACTION_NAME" ]; then + MISSING_ACTIONS="$MISSING_ACTIONS $fcm" + VALID=false + fi + fi +done +CHECKS=$((CHECKS + 1)) +if [ -z "$MISSING_ACTIONS" ]; then + echo " ✓ FCM Coverage: All $FCM_COUNT FCMs have generated actions" + PASSED=$((PASSED + 1)) +else + echo " ✗ FCM Coverage: Missing actions for:$MISSING_ACTIONS" +fi + +# Check 2: Verify all generated actions have sync files +echo "Checking sync files..." +ACTION_COUNT=0 +MISSING_SYNC="" +for action_dir in actions/core/*/; do + if [ -d "$action_dir" ]; then + ACTION_COUNT=$((ACTION_COUNT + 1)) + if [ ! -f "$action_dir/.bridge-sync" ]; then + MISSING_SYNC="$MISSING_SYNC $action_dir" + VALID=false + fi + fi +done +CHECKS=$((CHECKS + 1)) +if [ -z "$MISSING_SYNC" ]; then + echo " ✓ Sync Files: All $ACTION_COUNT actions have sync files" + PASSED=$((PASSED + 1)) +else + echo " ✗ Sync Files: Missing sync files in:$MISSING_SYNC" +fi + +# Check 3: Verify generation headers +echo "Checking for manual edits..." +MANUAL_EDITS="" +for action_yml in actions/core/*/action.yml; do + if [ -f "$action_yml" ]; then + if ! grep -q "# Generated from" "$action_yml"; then + MANUAL_EDITS="$MANUAL_EDITS $action_yml" + VALID=false + elif ! grep -q "# DO NOT EDIT" "$action_yml"; then + MANUAL_EDITS="$MANUAL_EDITS $action_yml" + VALID=false + fi + fi +done +CHECKS=$((CHECKS + 1)) +if [ -z "$MANUAL_EDITS" ]; then + echo " ✓ Manual Edit Detection: No manual edits detected" + PASSED=$((PASSED + 1)) +else + echo " ✗ Manual Edit Detection: Possible manual edits in:$MANUAL_EDITS" +fi + +# Check 4: Verify GitHub compatibility +echo "Checking GitHub compatibility..." +COMPAT_ISSUES="" +for action_yml in actions/core/*/action.yml; do + if [ -f "$action_yml" ]; then + # Check for required fields + if ! grep -q "^name:" "$action_yml"; then + COMPAT_ISSUES="$COMPAT_ISSUES $action_yml:missing-name" + fi + if ! grep -q "^runs:" "$action_yml"; then + COMPAT_ISSUES="$COMPAT_ISSUES $action_yml:missing-runs" + fi + fi +done +CHECKS=$((CHECKS + 1)) +if [ -z "$COMPAT_ISSUES" ]; then + echo " ✓ GitHub Compatibility: All actions are GitHub-compatible" + PASSED=$((PASSED + 1)) +else + echo " ✗ GitHub Compatibility: Issues found:$COMPAT_ISSUES" +fi + +# Summary +echo "" +echo "Summary:" +echo " Total Checks: $CHECKS" +echo " Passed: $PASSED" +echo " Failed: $((CHECKS - PASSED))" +echo "" + +if [ "$VALID" = true ]; then + echo "Overall Status: VALID" + exit 0 +else + echo "Overall Status: INVALID" + exit 1 +fi \ No newline at end of file diff --git a/.bridge/validator.py b/.bridge/validator.py new file mode 100644 index 0000000..555b2a1 --- /dev/null +++ b/.bridge/validator.py @@ -0,0 +1,340 @@ +#!/usr/bin/env python3 +""" +Bridge Alignment Validator +Model: github.toolkit.bridge v1.0.0 + +Validates that generated actions are aligned with their FCM sources. +""" + +import os +import json +import yaml +import hashlib +from pathlib import Path +from datetime import datetime +from typing import Dict, List, Tuple, Any + +class BridgeValidator: + """Validate bridge alignment between FCMs and generated actions.""" + + def __init__(self, project_root: Path = Path('.')): + self.project_root = project_root + self.bridge_dir = project_root / '.bridge' + self.actions_dir = project_root / 'actions' + self.axioms_dir = project_root / 'axioms' + + # Load manifest + self.manifest_path = self.bridge_dir / 'manifest.json' + self.manifest = self._load_manifest() + + # Validation results + self.results = { + 'valid': True, + 'checks': [], + 'errors': [], + 'warnings': [] + } + + def _load_manifest(self) -> Dict[str, Any]: + """Load bridge manifest.""" + if self.manifest_path.exists(): + with open(self.manifest_path, 'r') as f: + return json.load(f) + return {'mappings': {}, 'generated': {}} + + def validate_all(self) -> Dict[str, Any]: + """Run all validation checks.""" + print("=== Bridge Alignment Validation ===") + print(f"Timestamp: {datetime.utcnow().isoformat()}Z") + print() + + # Check 1: Verify all FCMs have corresponding actions + self._check_fcm_coverage() + + # Check 2: Verify all generated actions have sync files + self._check_sync_files() + + # Check 3: Verify checksums match + self._check_checksums() + + # Check 4: Verify no manual edits + self._check_manual_edits() + + # Check 5: Verify manifest completeness + self._check_manifest() + + # Check 6: Verify GitHub compatibility + self._check_github_compatibility() + + return self.results + + def _add_check(self, name: str, passed: bool, message: str): + """Add a validation check result.""" + self.results['checks'].append({ + 'name': name, + 'passed': passed, + 'message': message + }) + if not passed: + self.results['valid'] = False + + def _add_error(self, error: str): + """Add an error.""" + self.results['errors'].append(error) + self.results['valid'] = False + + def _add_warning(self, warning: str): + """Add a warning.""" + self.results['warnings'].append(warning) + + def _check_fcm_coverage(self): + """Check that all FCMs have corresponding generated actions.""" + print("Checking FCM coverage...") + + fcm_files = [] + for domain_dir in self.axioms_dir.iterdir(): + if domain_dir.is_dir(): + fcm_files.extend(domain_dir.glob('*.fcm')) + + missing_actions = [] + for fcm_path in fcm_files: + rel_fcm = str(fcm_path.relative_to(self.project_root)) + if rel_fcm not in self.manifest['mappings']: + missing_actions.append(rel_fcm) + + if missing_actions: + self._add_check( + 'FCM Coverage', + False, + f"Missing actions for FCMs: {', '.join(missing_actions)}" + ) + else: + self._add_check( + 'FCM Coverage', + True, + f"All {len(fcm_files)} FCMs have generated actions" + ) + + def _check_sync_files(self): + """Check that all generated actions have sync files.""" + print("Checking sync files...") + + action_dirs = [] + core_dir = self.actions_dir / 'core' + if core_dir.exists(): + action_dirs.extend([d for d in core_dir.iterdir() if d.is_dir()]) + + missing_sync = [] + for action_dir in action_dirs: + sync_file = action_dir / '.bridge-sync' + if not sync_file.exists(): + missing_sync.append(str(action_dir.relative_to(self.project_root))) + + if missing_sync: + self._add_check( + 'Sync Files', + False, + f"Missing sync files in: {', '.join(missing_sync)}" + ) + else: + self._add_check( + 'Sync Files', + True, + f"All {len(action_dirs)} actions have sync files" + ) + + def _check_checksums(self): + """Verify that FCM checksums match sync files.""" + print("Checking checksums...") + + mismatches = [] + for fcm_path, action_path in self.manifest['mappings'].items(): + fcm_full_path = self.project_root / fcm_path + action_full_path = self.project_root / action_path + sync_file = action_full_path / '.bridge-sync' + + if fcm_full_path.exists() and sync_file.exists(): + # Calculate current checksum + with open(fcm_full_path, 'rb') as f: + current_checksum = f"sha256:{hashlib.sha256(f.read()).hexdigest()}" + + # Load stored checksum + with open(sync_file, 'r') as f: + sync_data = json.load(f) + stored_checksum = sync_data.get('checksum', '') + + if current_checksum != stored_checksum: + mismatches.append(fcm_path) + + if mismatches: + self._add_check( + 'Checksum Validation', + False, + f"Checksum mismatches for: {', '.join(mismatches)}" + ) + self._add_warning("FCMs have been modified without regenerating actions") + else: + self._add_check( + 'Checksum Validation', + True, + "All checksums match" + ) + + def _check_manual_edits(self): + """Check for manual edits in generated files.""" + print("Checking for manual edits...") + + manual_edit_indicators = [] + + for _, action_path in self.manifest['mappings'].items(): + action_yml_path = self.project_root / action_path / 'action.yml' + + if action_yml_path.exists(): + with open(action_yml_path, 'r') as f: + content = f.read() + + # Check for generation header + if '# Generated from' not in content: + manual_edit_indicators.append(str(action_yml_path.relative_to(self.project_root))) + elif '# DO NOT EDIT' not in content: + manual_edit_indicators.append(str(action_yml_path.relative_to(self.project_root))) + + if manual_edit_indicators: + self._add_check( + 'Manual Edit Detection', + False, + f"Possible manual edits in: {', '.join(manual_edit_indicators)}" + ) + else: + self._add_check( + 'Manual Edit Detection', + True, + "No manual edits detected" + ) + + def _check_manifest(self): + """Check manifest completeness.""" + print("Checking manifest...") + + issues = [] + + # Check that all mappings have generation info + for fcm_path, action_path in self.manifest['mappings'].items(): + if action_path not in self.manifest['generated']: + issues.append(f"Missing generation info for {action_path}") + + if issues: + self._add_check( + 'Manifest Completeness', + False, + f"Manifest issues: {'; '.join(issues)}" + ) + else: + self._add_check( + 'Manifest Completeness', + True, + "Manifest is complete and consistent" + ) + + def _check_github_compatibility(self): + """Check that generated actions are GitHub-compatible.""" + print("Checking GitHub compatibility...") + + compatibility_issues = [] + + for _, action_path in self.manifest['mappings'].items(): + action_yml_path = self.project_root / action_path / 'action.yml' + + if action_yml_path.exists(): + try: + with open(action_yml_path, 'r') as f: + # Skip header comments + lines = f.readlines() + yaml_content = '' + for line in lines: + if not line.strip().startswith('#'): + yaml_content += line + + action_config = yaml.safe_load(yaml_content) + + # Check required fields + if 'name' not in action_config: + compatibility_issues.append(f"{action_yml_path}: missing 'name'") + if 'runs' not in action_config: + compatibility_issues.append(f"{action_yml_path}: missing 'runs'") + if 'runs' in action_config and 'using' not in action_config['runs']: + compatibility_issues.append(f"{action_yml_path}: missing 'runs.using'") + + except Exception as e: + compatibility_issues.append(f"{action_yml_path}: {e}") + + if compatibility_issues: + self._add_check( + 'GitHub Compatibility', + False, + f"Issues found: {'; '.join(compatibility_issues)}" + ) + else: + self._add_check( + 'GitHub Compatibility', + True, + "All actions are GitHub-compatible" + ) + + def generate_report(self) -> str: + """Generate validation report.""" + report = [] + report.append("=== Bridge Validation Report ===") + report.append(f"Generated: {datetime.utcnow().isoformat()}Z") + report.append(f"Overall Status: {'VALID' if self.results['valid'] else 'INVALID'}") + report.append("") + + report.append("Validation Checks:") + for check in self.results['checks']: + status = "✓" if check['passed'] else "✗" + report.append(f" {status} {check['name']}: {check['message']}") + + if self.results['errors']: + report.append("") + report.append("Errors:") + for error in self.results['errors']: + report.append(f" - {error}") + + if self.results['warnings']: + report.append("") + report.append("Warnings:") + for warning in self.results['warnings']: + report.append(f" - {warning}") + + report.append("") + report.append("Summary:") + report.append(f" Total Checks: {len(self.results['checks'])}") + report.append(f" Passed: {sum(1 for c in self.results['checks'] if c['passed'])}") + report.append(f" Failed: {sum(1 for c in self.results['checks'] if not c['passed'])}") + report.append(f" Errors: {len(self.results['errors'])}") + report.append(f" Warnings: {len(self.results['warnings'])}") + + return '\n'.join(report) + +def main(): + """Main entry point.""" + validator = BridgeValidator() + results = validator.validate_all() + + # Generate and print report + report = validator.generate_report() + print() + print(report) + + # Save report + report_path = Path('.bridge/validation-report.txt') + with open(report_path, 'w') as f: + f.write(report) + + print(f"\nReport saved to: {report_path}") + + # Exit with appropriate code + return 0 if results['valid'] else 1 + +if __name__ == '__main__': + exit(main()) \ No newline at end of file diff --git a/.env.default b/.env.default new file mode 100644 index 0000000..3614ee3 --- /dev/null +++ b/.env.default @@ -0,0 +1,3 @@ +MKDOCS_PORT=8000 +MKDOCS_EXTERNAL_PORT=8000 +MKDOCS_IMAGE_VERSION=1.0.0 \ No newline at end of file diff --git a/.gitignore b/.gitignore index 06650a9..98dce7c 100644 --- a/.gitignore +++ b/.gitignore @@ -1,2 +1,5 @@ .venv/ CLAUDE.md + +.claude/ +.env \ No newline at end of file diff --git a/README.md b/README.md index 802fa75..cbb40de 100644 --- a/README.md +++ b/README.md @@ -2,6 +2,29 @@ A collection of reusable GitHub Actions workflows and core actions for standardizing development processes across repositories. +## Architecture + +This repository follows a **six-layer architecture** based on Formal Conceptual Models (FCM) that maintains GitHub compatibility while achieving architectural purity: + +### Six-Layer Structure + +1. **Axioms** (`axioms/`) - Foundational capabilities defined as FCM models +2. **Logic** (`logic/`) - Compositions and relationships between axioms +3. **Patterns** (`patterns/`) - Reusable workflow patterns +4. **Mechanics** (`mechanics/`) - Implementation templates and operational structures +5. **Reflection** (`reflection/`) - Self-awareness and analysis capabilities +6. **Emergence** (`emergence/`) - Discovered patterns and emergent capabilities + +### Bridge System + +The repository uses a **bridge architecture** to maintain GitHub Actions compatibility: + +- **Source Layer**: FCM definitions in `axioms/`, `logic/`, `patterns/` +- **Interface Layer**: GitHub-compatible actions in `actions/` +- **Bridge Layer**: Automated generation via `.bridge/` tools + +All GitHub Actions are **generated** from FCM sources, ensuring consistency and eliminating manual configuration drift. + ## Available Components ### Core Actions @@ -81,10 +104,32 @@ The workflows maintain the following changelog format: ## Setup Instructions +### Using Generated Actions + 1. Copy the desired workflow files to your repository's `.github/workflows/` directory 2. For core actions, reference them in your workflows using the `uses` syntax 3. No additional configuration needed - workflows use repository context for variables +### Working with FCM Architecture + +1. **View capabilities**: Browse `axioms/` directories for available FCM definitions +2. **Modify actions**: Edit FCM files in `axioms/`, then regenerate using `.bridge/generator.py` +3. **Validate consistency**: Run `.bridge/validator.py` to ensure alignment +4. **Never edit directly**: Actions in `actions/` are generated - changes will be overwritten + +### Bridge Commands + +```bash +# Generate all actions from FCMs +./.bridge/generator.py --generate-all + +# Generate specific action +./.bridge/generator.py axioms/git/tag-operations.fcm + +# Validate bridge alignment +./.bridge/validator.py +``` + ## Requirements - GitHub repository with develop branch diff --git a/actions/core/tag-operations/.bridge-sync b/actions/core/tag-operations/.bridge-sync new file mode 100644 index 0000000..30dc4b9 --- /dev/null +++ b/actions/core/tag-operations/.bridge-sync @@ -0,0 +1,6 @@ +{ + "source": "axioms/git/tag-operations.fcm", + "generated": "2025-01-06T12:00:00Z", + "version": "1.0.0", + "checksum": "sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855" +} \ No newline at end of file diff --git a/actions/core/tag-operations/Dockerfile b/actions/core/tag-operations/Dockerfile new file mode 100644 index 0000000..11666d5 --- /dev/null +++ b/actions/core/tag-operations/Dockerfile @@ -0,0 +1,11 @@ +# Generated from FCM - DO NOT EDIT +FROM python:3.9-slim + +# Install system requirements +RUN apt-get update && apt-get install -y git && rm -rf /var/lib/apt/lists/* + +# Copy implementation +COPY entrypoint.sh /entrypoint.sh +RUN chmod +x /entrypoint.sh + +ENTRYPOINT ["/entrypoint.sh"] \ No newline at end of file diff --git a/actions/core/tag-operations/action.yml b/actions/core/tag-operations/action.yml new file mode 100644 index 0000000..34afff0 --- /dev/null +++ b/actions/core/tag-operations/action.yml @@ -0,0 +1,49 @@ +# Generated from axioms/git/tag-operations.fcm +# Model: git.tag-operations v1.0.0 +# Generated: 2025-01-06T12:00:00Z +# DO NOT EDIT - Changes will be overwritten by bridge generator + +name: Tag Operations +description: Manage git tags with create, delete, list, push, and check operations +inputs: + action: + description: Action (Options: create, delete, list, push, check) + required: true + tag_name: + description: Tag Name + required: false + default: '' + message: + description: Message + required: false + default: '' + remote: + description: Remote + required: false + default: '' + force: + description: Force + required: false + default: '' + target_commit: + description: Target Commit + required: false + default: '' + prefix: + description: Prefix + required: false + default: '' +outputs: + tag_created: + description: Tag Created + tag_deleted: + description: Tag Deleted + tags_list: + description: Tags List + tag_exists: + description: Tag Exists + operation_status: + description: Operation Status +runs: + using: docker + image: Dockerfile \ No newline at end of file diff --git a/actions/core/tag-operations/entrypoint.sh b/actions/core/tag-operations/entrypoint.sh new file mode 100644 index 0000000..b9019c0 --- /dev/null +++ b/actions/core/tag-operations/entrypoint.sh @@ -0,0 +1,12 @@ +#!/bin/bash +# Generated entrypoint for tag-operations +# Implementation should be provided by external package + +echo "Action: tag-operations" +echo "Capability: Manage git tags with create, delete, list, push, and check operations" +echo "" +echo "This is a generated placeholder." +echo "Actual implementation should be at: github.com/deepworks-net/tag-operations-action" + +# Pass through to external implementation +# exec python -m tag_operations_action "$@" \ No newline at end of file diff --git a/analyze-actions.py b/analyze-actions.py new file mode 100644 index 0000000..509dff8 --- /dev/null +++ b/analyze-actions.py @@ -0,0 +1,357 @@ +#!/usr/bin/env python3 +""" +Action Analysis for Repository Reorganization - Phase 2 +Model: github.toolkit.reorganization v1.0.0 + +Analyzes existing actions to prepare for FCM transformation. +""" + +import os +import yaml +import json +from pathlib import Path +from typing import Dict, List, Any, Set +from dataclasses import dataclass, field, asdict +from collections import defaultdict + +@dataclass +class ActionAnalysis: + """Analysis results for a single action.""" + name: str + path: str + type: str # 'core' or 'composite' + domain: str # 'git', 'version', 'release', 'github' + inputs: Dict[str, Any] = field(default_factory=dict) + outputs: Dict[str, Any] = field(default_factory=dict) + hardcoded_values: List[Dict[str, Any]] = field(default_factory=list) + dependencies: List[str] = field(default_factory=list) + patterns: List[str] = field(default_factory=list) + docker_info: Dict[str, Any] = field(default_factory=dict) + implementation_files: List[str] = field(default_factory=list) + test_files: List[str] = field(default_factory=list) + +class ActionAnalyzer: + """Analyzes GitHub Actions for reorganization.""" + + def __init__(self, project_root: str = "."): + self.project_root = Path(project_root) + self.actions_dir = self.project_root / "actions" + self.analyses: List[ActionAnalysis] = [] + + def analyze_all_actions(self) -> None: + """Analyze all actions in the repository.""" + print("Analyzing GitHub Actions...") + + # Analyze core actions + core_dir = self.actions_dir / "core" + if core_dir.exists(): + for action_dir in core_dir.iterdir(): + if action_dir.is_dir() and (action_dir / "action.yml").exists(): + self.analyze_action(action_dir, "core") + + # Analyze composite actions + composite_dir = self.actions_dir / "composite" + if composite_dir.exists(): + for action_dir in composite_dir.iterdir(): + if action_dir.is_dir() and (action_dir / "action.yml").exists(): + self.analyze_action(action_dir, "composite") + + def analyze_action(self, action_path: Path, action_type: str) -> ActionAnalysis: + """Analyze a single action.""" + action_name = action_path.name + print(f"\nAnalyzing {action_type} action: {action_name}") + + analysis = ActionAnalysis( + name=action_name, + path=str(action_path.relative_to(self.project_root)), + type=action_type, + domain=self.determine_domain(action_name) + ) + + # Load action.yml + action_yml_path = action_path / "action.yml" + if action_yml_path.exists(): + with open(action_yml_path, 'r') as f: + action_config = yaml.safe_load(f) + + # Extract inputs and outputs + analysis.inputs = action_config.get('inputs', {}) + analysis.outputs = action_config.get('outputs', {}) + + # Check for hardcoded values in action.yml + self.find_hardcoded_values_in_yaml(action_config, analysis) + + # Analyze implementation files + self.analyze_implementation_files(action_path, analysis) + + # Analyze Dockerfile + dockerfile_path = action_path / "Dockerfile" + if dockerfile_path.exists(): + self.analyze_dockerfile(dockerfile_path, analysis) + + # Find test files + test_dir = action_path / "tests" + if test_dir.exists(): + analysis.test_files = [str(f.relative_to(action_path)) + for f in test_dir.glob("*.py")] + + # Identify patterns + self.identify_patterns(analysis) + + self.analyses.append(analysis) + return analysis + + def determine_domain(self, action_name: str) -> str: + """Determine the domain of an action based on its name.""" + if 'branch' in action_name or 'tag' in action_name or 'commit' in action_name: + return 'git' + elif 'version' in action_name: + return 'version' + elif 'release' in action_name or 'changelog' in action_name: + return 'release' + else: + return 'github' + + def find_hardcoded_values_in_yaml(self, config: Dict, analysis: ActionAnalysis) -> None: + """Find hardcoded values in YAML configuration.""" + # Check for version numbers + yaml_str = str(config) + import re + + # Version patterns + version_matches = re.findall(r'\b\d+\.\d+\.\d+\b', yaml_str) + for match in version_matches: + analysis.hardcoded_values.append({ + 'type': 'version', + 'value': match, + 'location': 'action.yml' + }) + + def analyze_implementation_files(self, action_path: Path, analysis: ActionAnalysis) -> None: + """Analyze Python/shell implementation files.""" + # Find Python files + py_files = list(action_path.glob("*.py")) + if action_path / "src" in action_path.iterdir(): + py_files.extend((action_path / "src").glob("*.py")) + + for py_file in py_files: + analysis.implementation_files.append(str(py_file.relative_to(action_path))) + self.analyze_python_file(py_file, analysis) + + # Find shell scripts + sh_files = list(action_path.glob("*.sh")) + for sh_file in sh_files: + analysis.implementation_files.append(str(sh_file.relative_to(action_path))) + + def analyze_python_file(self, py_file: Path, analysis: ActionAnalysis) -> None: + """Analyze a Python file for hardcoded values and dependencies.""" + try: + with open(py_file, 'r') as f: + content = f.read() + + # Find imports (dependencies) + import re + import_matches = re.findall(r'^(?:from|import)\s+(\S+)', content, re.MULTILINE) + for imp in import_matches: + base_module = imp.split('.')[0] + if base_module not in ['os', 'sys', 'json', 'yaml', 're', 'subprocess']: + analysis.dependencies.append(base_module) + + # Find hardcoded strings that might be configuration + string_matches = re.findall(r'["\']([^"\']+)["\']', content) + for match in string_matches: + # Check for paths + if '/' in match and not match.startswith('http'): + analysis.hardcoded_values.append({ + 'type': 'path', + 'value': match, + 'location': str(py_file.name) + }) + # Check for version-like strings + elif re.match(r'^\d+\.\d+\.\d+$', match): + analysis.hardcoded_values.append({ + 'type': 'version', + 'value': match, + 'location': str(py_file.name) + }) + except Exception as e: + print(f" Warning: Could not analyze {py_file}: {e}") + + def analyze_dockerfile(self, dockerfile_path: Path, analysis: ActionAnalysis) -> None: + """Analyze Dockerfile for configuration.""" + try: + with open(dockerfile_path, 'r') as f: + content = f.read() + + # Extract base image + import re + from_match = re.search(r'^FROM\s+(.+)$', content, re.MULTILINE) + if from_match: + analysis.docker_info['base_image'] = from_match.group(1) + + # Find version pins + version_matches = re.findall(r'[=><]+\s*(\d+\.\d+(?:\.\d+)?)', content) + for match in version_matches: + analysis.hardcoded_values.append({ + 'type': 'version', + 'value': match, + 'location': 'Dockerfile' + }) + except Exception as e: + print(f" Warning: Could not analyze Dockerfile: {e}") + + def identify_patterns(self, analysis: ActionAnalysis) -> None: + """Identify common patterns in the action.""" + patterns = [] + + # Git operation pattern + if analysis.domain == 'git': + if 'branch' in analysis.name: + patterns.append('git-branch-operation') + elif 'tag' in analysis.name: + patterns.append('git-tag-operation') + elif 'commit' in analysis.name: + patterns.append('git-commit-operation') + + # Version manipulation pattern + if analysis.domain == 'version': + patterns.append('version-manipulation') + + # File update pattern + if any('file' in inp.lower() or 'path' in inp.lower() + for inp in analysis.inputs.keys()): + patterns.append('file-update') + + # GitHub API pattern + if 'github' in str(analysis.dependencies).lower(): + patterns.append('github-api-interaction') + + analysis.patterns = patterns + + def generate_report(self) -> Dict[str, Any]: + """Generate analysis report.""" + report = { + 'summary': { + 'total_actions': len(self.analyses), + 'core_actions': len([a for a in self.analyses if a.type == 'core']), + 'composite_actions': len([a for a in self.analyses if a.type == 'composite']), + 'domains': defaultdict(int), + 'patterns': defaultdict(int), + 'hardcoded_values': defaultdict(int) + }, + 'actions': [] + } + + # Aggregate statistics + for analysis in self.analyses: + report['summary']['domains'][analysis.domain] += 1 + + for pattern in analysis.patterns: + report['summary']['patterns'][pattern] += 1 + + for hardcoded in analysis.hardcoded_values: + report['summary']['hardcoded_values'][hardcoded['type']] += 1 + + # Add action details + report['actions'].append(asdict(analysis)) + + return report + + def generate_migration_plan(self) -> Dict[str, Any]: + """Generate migration plan for Phase 2.""" + plan = { + 'phase2_tasks': [] + } + + for analysis in self.analyses: + task = { + 'action': analysis.name, + 'steps': [] + } + + # Step 1: Create FCM + task['steps'].append({ + 'step': 'create_fcm', + 'description': f'Create axioms/{analysis.domain}/{analysis.name}.fcm', + 'preserve': ['inputs', 'outputs', 'behavior'], + 'remove': ['docker_details', 'implementation'] + }) + + # Step 2: Extract parameters + if analysis.hardcoded_values: + task['steps'].append({ + 'step': 'extract_parameters', + 'description': 'Replace hardcoded values with parameters', + 'values': analysis.hardcoded_values + }) + + # Step 3: Create template + if analysis.docker_info: + task['steps'].append({ + 'step': 'create_template', + 'description': f'Create mechanics/actions/{analysis.name}.template', + 'from': f'{analysis.path}/Dockerfile' + }) + + # Step 4: External package + if analysis.implementation_files: + task['steps'].append({ + 'step': 'create_package', + 'description': f'Publish to github.com/deepworks-net/{analysis.name}-action', + 'files': analysis.implementation_files + }) + + plan['phase2_tasks'].append(task) + + return plan + +def main(): + """Main entry point.""" + analyzer = ActionAnalyzer() + + print("=== GitHub Actions Analysis for Repository Reorganization ===") + print("Model: github.toolkit.reorganization v1.0.0") + print() + + # Analyze all actions + analyzer.analyze_all_actions() + + # Generate report + report = analyzer.generate_report() + + print("\n=== Analysis Summary ===") + print(f"Total actions analyzed: {report['summary']['total_actions']}") + print(f"Core actions: {report['summary']['core_actions']}") + print(f"Composite actions: {report['summary']['composite_actions']}") + + print("\nActions by domain:") + for domain, count in report['summary']['domains'].items(): + print(f" {domain}: {count}") + + print("\nCommon patterns found:") + for pattern, count in report['summary']['patterns'].items(): + print(f" {pattern}: {count}") + + print("\nHardcoded values found:") + for value_type, count in report['summary']['hardcoded_values'].items(): + print(f" {value_type}: {count}") + + # Generate migration plan + migration_plan = analyzer.generate_migration_plan() + + # Save reports + with open('action-analysis-report.json', 'w') as f: + json.dump(report, f, indent=2) + print("\nDetailed report saved to: action-analysis-report.json") + + with open('phase2-migration-plan.json', 'w') as f: + json.dump(migration_plan, f, indent=2) + print("Migration plan saved to: phase2-migration-plan.json") + + print("\n=== Next Steps ===") + print("1. Review action-analysis-report.json for detailed findings") + print("2. Review phase2-migration-plan.json for migration tasks") + print("3. Begin Phase 2 transformation following the migration plan") + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/axioms/README.md b/axioms/README.md new file mode 100644 index 0000000..56ceb26 --- /dev/null +++ b/axioms/README.md @@ -0,0 +1,11 @@ +# Axioms Layer + +This layer contains atomic capabilities - the foundational building blocks. + +## Structure +- `git/` - Git operation axioms +- `version/` - Version management axioms +- `release/` - Release process axioms +- `github/` - GitHub-specific axioms + +Each axiom is defined as an FCM (Formal Conceptual Model) file. diff --git a/axioms/git/tag-operations.fcm b/axioms/git/tag-operations.fcm new file mode 100644 index 0000000..103466c --- /dev/null +++ b/axioms/git/tag-operations.fcm @@ -0,0 +1,37 @@ +# Tag Operations Axiom - Formal Conceptual Model +Model: git.tag-operations +Version: 1.0.0 +Layer: Axiom +Domain: git + +Capability: Manage git tags with create, delete, list, push, and check operations + +Parameters: + - action: create|delete|list|push|check + - tag_name: string (optional) + - message: string (optional) + - remote: boolean (optional) + - force: boolean (optional) + - target_commit: string (optional) + - prefix: string (optional) + +Outputs: + - tag_created + - tag_deleted + - tags_list + - tag_exists + - operation_status + +Interface: + type: docker + image: python:3.9-slim + requirements: [git] + +Dependencies: + - git + - github-token (optional) + +Patterns: + - git-operation + - tag-management + - version-control \ No newline at end of file diff --git a/create-layer-structure.sh b/create-layer-structure.sh new file mode 100644 index 0000000..ee759d9 --- /dev/null +++ b/create-layer-structure.sh @@ -0,0 +1,106 @@ +#!/bin/bash +# Repository Reorganization - Phase 1: Create Layer Structure +# Model: github.toolkit.reorganization v1.0.0 + +echo "Creating six-layer architecture structure..." + +# Layer 1: Axioms (Foundational capabilities) +echo "Creating axioms layer..." +mkdir -p axioms/{git,version,release,github} + +# Layer 2: Logic (Compositions and relationships) +echo "Creating logic layer..." +mkdir -p logic + +# Layer 3: Patterns (Reusable structures) +echo "Creating patterns layer..." +mkdir -p patterns + +# Layer 4: Mechanics (Operational templates) +echo "Creating mechanics layer..." +mkdir -p mechanics/{workflows,actions} + +# Layer 5: Reflection (Self-awareness and analysis) +echo "Creating reflection layer..." +mkdir -p reflection/{orchestrator,analyzer} + +# Layer 6: Emergence (Discovered capabilities) +echo "Creating emergence layer..." +mkdir -p emergence + +# Create initial README files for each layer +cat > axioms/README.md << 'EOF' +# Axioms Layer + +This layer contains atomic capabilities - the foundational building blocks. + +## Structure +- `git/` - Git operation axioms +- `version/` - Version management axioms +- `release/` - Release process axioms +- `github/` - GitHub-specific axioms + +Each axiom is defined as an FCM (Formal Conceptual Model) file. +EOF + +cat > logic/README.md << 'EOF' +# Logic Layer + +This layer contains compositions and relationships between axioms. + +## Key Files +- `compositions.fcm` - How axioms combine +- `dependencies.fcm` - Relationship mappings +EOF + +cat > patterns/README.md << 'EOF' +# Patterns Layer + +This layer contains reusable workflow patterns built from logic compositions. +EOF + +cat > mechanics/README.md << 'EOF' +# Mechanics Layer + +This layer contains operational templates and implementations. + +## Structure +- `workflows/` - GitHub workflow templates +- `actions/` - Action implementation templates +EOF + +cat > reflection/README.md << 'EOF' +# Reflection Layer + +This layer contains self-awareness and analysis capabilities. + +## Structure +- `orchestrator/` - Self-maintenance and updates +- `analyzer/` - Capability discovery and documentation +EOF + +cat > emergence/README.md << 'EOF' +# Emergence Layer + +This layer contains discovered patterns and capabilities that emerge from the system. +EOF + +echo "Layer structure created successfully!" +echo "" +echo "Directory tree:" +tree -d -L 2 axioms logic patterns mechanics reflection emergence 2>/dev/null || { + echo "axioms/" + echo "├── git/" + echo "├── version/" + echo "├── release/" + echo "└── github/" + echo "logic/" + echo "patterns/" + echo "mechanics/" + echo "├── workflows/" + echo "└── actions/" + echo "reflection/" + echo "├── orchestrator/" + echo "└── analyzer/" + echo "emergence/" +} \ No newline at end of file diff --git a/docs/getting-started.md b/docs/getting-started.md index 08446cb..91cfd0d 100644 --- a/docs/getting-started.md +++ b/docs/getting-started.md @@ -2,6 +2,14 @@ This guide will help you understand and implement Deepworks GitHub Actions workflows in your repositories. +## Architecture Overview + +Deepworks Actions use a **six-layer FCM architecture** with a bridge system: + +- **Axioms**: Core capabilities defined as Formal Conceptual Models +- **Generated Actions**: GitHub-compatible interfaces automatically generated from axioms +- **Bridge System**: Ensures consistency between pure definitions and GitHub requirements + ## Overview Deepworks Actions provide a suite of reusable workflows for: @@ -10,6 +18,8 @@ Deepworks Actions provide a suite of reusable workflows for: - Release management and versioning - Changelog automation - Repository standardization +- Git operations (branches, tags, commits) +- Version calculation and updating ## Core Workflows @@ -62,6 +72,8 @@ graph TD ## Setup Instructions +### Using Generated Actions + 1. **Repository Configuration** ```yaml @@ -77,16 +89,34 @@ graph TD uses: deepworks-net/github.actions/.github/workflows/mkdocs-gh-pages.yml@main ``` -2. **Required Files** +2. **Using Core Actions** + + ```yaml + # Using generated tag operations + - uses: deepworks-net/github.actions/actions/core/tag-operations@main + with: + action: create + tag_name: v1.0.0 + message: "Release version 1.0.0" + ``` + +3. **Required Files** - `mkdocs.yml` for documentation - `CHANGELOG.md` for release notes - `.github/release-drafter.yml` for release configuration -3. **Repository Settings** +4. **Repository Settings** - Enable GitHub Pages - Set appropriate branch protections - Configure required status checks +### Working with FCM Architecture + +1. **Understanding Axioms**: Browse `axioms/` to see available capabilities +2. **Never Edit Actions Directly**: All actions in `actions/` are generated +3. **Modify Through FCMs**: Edit capability definitions in `axioms/` directory +4. **Regenerate When Needed**: Use bridge tools to update generated actions + ## Basic Usage ### Documentation Updates diff --git a/docs/guides/fcm-bridge-architecture.md b/docs/guides/fcm-bridge-architecture.md new file mode 100644 index 0000000..e44aae1 --- /dev/null +++ b/docs/guides/fcm-bridge-architecture.md @@ -0,0 +1,297 @@ +# FCM Bridge Architecture Guide + +## Overview + +The FCM Bridge Architecture maintains GitHub Actions compatibility while achieving architectural purity through automated generation. This system resolves the duality between Formal Conceptual Models (FCM) and GitHub's practical requirements. + +## Architecture Principles + +### Duality Resolution + +The bridge system maintains two complementary layers: + +- **Source Layer**: Pure FCM definitions without operational concerns +- **Interface Layer**: GitHub-compatible actions for practical use +- **Bridge Layer**: Automated generation maintaining perfect synchronization + +### Single Source of Truth + +All capabilities are defined once in FCM format: + +``` +axioms/git/tag-operations.fcm → actions/core/tag-operations/ +``` + +The `actions/` directory becomes a "compiled" view of the architecture, similar to how binary files are generated from source code. + +## Directory Structure + +``` +github.toolkit/ +├── axioms/ # SOURCE: Pure FCM definitions +│ ├── git/ # Git operations +│ ├── version/ # Version management +│ ├── release/ # Release processes +│ └── github/ # GitHub-specific operations +├── logic/ # SOURCE: Relationships +├── patterns/ # SOURCE: Workflows +├── mechanics/ # SOURCE: Templates +├── reflection/ # SOURCE: Meta-capabilities +├── emergence/ # SOURCE: System properties +│ +├── .bridge/ # BRIDGE: Generation machinery +│ ├── generator.py # FCM-to-action compiler +│ ├── validator.py # Alignment checker +│ └── manifest.json # Source-to-interface map +│ +└── actions/ # INTERFACE: GitHub conventions + ├── core/ # Generated from axioms + └── composite/ # Generated from patterns +``` + +## FCM Format + +### Basic Structure + +``` +# capability-name.fcm +Model: domain.capability-name +Version: 1.0.0 +Layer: Axiom +Domain: git + +Capability: Brief description of what this does + +Parameters: + - param_name: type|options (optional) + - action: create|delete|list|push|check + - tag_name: string (optional) + +Outputs: + - output_name + - operation_status + +Interface: + type: docker + image: python:3.9-slim + requirements: [git] + +Dependencies: + - git + - github-token (optional) + +Patterns: + - pattern-name + - category-operation +``` + +### Parameter Types + +- **string**: Text input +- **boolean**: True/false value +- **choice**: Enumerated options (pipe-separated) +- **optional**: Mark with `(optional)` suffix + +## Bridge Generation Process + +### 1. FCM Parsing + +The generator parses FCM files to extract: +- Capability metadata +- Parameter definitions +- Output specifications +- Interface requirements +- Dependencies + +### 2. Action Generation + +Creates GitHub-compatible structure: + +```yaml +# Generated action.yml +name: Capability Name +description: FCM capability description +inputs: + param_name: + description: Parameter description + required: true/false +outputs: + output_name: + description: Output description +runs: + using: docker + image: Dockerfile +``` + +### 3. Dockerfile Generation + +Creates container definition from FCM interface: + +```dockerfile +# Generated Dockerfile +FROM python:3.9-slim +RUN apt-get update && apt-get install -y git +COPY entrypoint.sh /entrypoint.sh +RUN chmod +x /entrypoint.sh +ENTRYPOINT ["/entrypoint.sh"] +``` + +### 4. Metadata Tracking + +Creates `.bridge-sync` file: + +```json +{ + "source": "axioms/git/tag-operations.fcm", + "generated": "2025-01-06T12:00:00Z", + "version": "1.0.0", + "checksum": "sha256:abc123..." +} +``` + +## Working with the Bridge + +### Creating New Capabilities + +1. **Define FCM**: Create new file in appropriate `axioms/` subdirectory +2. **Generate Action**: Run `.bridge/generator.py axioms/domain/name.fcm` +3. **Validate**: Run `.bridge/validator.py` to ensure alignment +4. **Implement**: Provide external implementation package + +### Modifying Existing Capabilities + +1. **Edit FCM**: Modify source definition in `axioms/` +2. **Regenerate**: Run generator on modified FCM +3. **Validate**: Check alignment and GitHub compatibility +4. **Never Edit Actions**: Changes to `actions/` will be overwritten + +### Bridge Commands + +```bash +# Generate all actions from FCMs +./.bridge/generator.py --generate-all + +# Generate specific action +./.bridge/generator.py axioms/git/tag-operations.fcm + +# Validate all alignments +./.bridge/validator.py + +# Check specific action alignment +./.bridge/validator.py actions/core/tag-operations +``` + +## Validation System + +### Automatic Checks + +The validator ensures: + +- ✅ Every FCM has corresponding action +- ✅ Every action has sync metadata +- ✅ Checksums match between source and generated +- ✅ No manual edits in generated files +- ✅ GitHub Actions compatibility + +### Sync Monitoring + +The bridge tracks: +- **Source-to-interface mappings** +- **Generation timestamps** +- **FCM version tracking** +- **Checksum validation** + +## Best Practices + +### FCM Development + +1. **Single Capability**: Each FCM defines one atomic capability +2. **Clear Parameters**: Use descriptive names and appropriate types +3. **Minimal Dependencies**: Reduce external requirements +4. **Domain Alignment**: Place FCMs in correct domain directories + +### Bridge Maintenance + +1. **Regular Validation**: Run validator after FCM changes +2. **Clean Generation**: Always regenerate after modifications +3. **Version Tracking**: Update FCM versions for significant changes +4. **Documentation Sync**: Keep documentation aligned with FCMs + +### GitHub Integration + +1. **Use Generated Actions**: Reference actions from `actions/` directory +2. **External Implementation**: Provide actual functionality via packages +3. **Testing**: Test generated actions in real workflows +4. **Compatibility**: Ensure GitHub Actions requirements are met + +## Migration Strategy + +### From Traditional Actions + +1. **Analyze Existing**: Review current action structure +2. **Extract FCM**: Create FCM definition capturing capability +3. **Generate New**: Create action from FCM +4. **Compare**: Validate functionality equivalence +5. **Replace**: Swap traditional action with generated version + +### Validation Process + +1. **Functional Testing**: Ensure generated actions work +2. **Parameter Mapping**: Verify all inputs/outputs preserved +3. **Workflow Integration**: Test in actual GitHub workflows +4. **Documentation Update**: Reflect changes in guides + +## Troubleshooting + +### Common Issues + +**Generation Fails**: +- Check FCM syntax +- Verify required sections +- Review parameter definitions + +**Validation Errors**: +- Ensure FCM unchanged since generation +- Check for manual edits in actions +- Verify sync file integrity + +**GitHub Compatibility**: +- Validate action.yml structure +- Check required GitHub Action fields +- Test in actual workflow + +### Debug Commands + +```bash +# Check FCM syntax +cat axioms/domain/name.fcm + +# View generation manifest +cat .bridge/manifest.json + +# Check sync status +cat actions/core/name/.bridge-sync + +# Test action locally +act -j test-action +``` + +## Future Enhancements + +### Planned Features + +- **Pattern Generation**: Composite actions from workflow patterns +- **Dependency Resolution**: Automatic external package management +- **Live Monitoring**: Real-time sync validation +- **Template Evolution**: Improved mechanics templates + +### Integration Opportunities + +- **GitHub Packages**: Automatic implementation hosting +- **CI/CD Integration**: Automated generation triggers +- **Documentation Generation**: Automatic docs from FCMs +- **Testing Framework**: Automated action testing + +## Conclusion + +The FCM Bridge Architecture provides a robust foundation for maintaining both architectural purity and practical GitHub compatibility. By treating actions as compiled artifacts from FCM sources, the system ensures consistency while enabling rapid iteration and reliable automation. \ No newline at end of file diff --git a/docs/meta-level.md b/docs/meta-level.md index 8fa69e3..0180612 100644 --- a/docs/meta-level.md +++ b/docs/meta-level.md @@ -1,8 +1,27 @@ -# Meta-Level Documentation: Git Development and Deployment Workflows +# Meta-Level Documentation: FCM Architecture and Development Workflows ## Overview -This document provides a meta-level analysis of the Git-based development and deployment workflows used in the repository. The goal is to align these workflows conceptually and technically while identifying gaps and ensuring consistency. +This document provides a meta-level analysis of the repository's Formal Conceptual Model (FCM) architecture and development workflows. The repository has evolved from a traditional action collection to a **six-layer architecture** with bridge-based generation, achieving both architectural purity and GitHub compatibility. + +## Architecture Evolution + +### Traditional Structure → FCM Architecture + +The repository has transformed from mixed architecture-operation structure to pure architectural patterns: + +- **Before**: Actions contained both definitions and implementations +- **After**: Pure FCM definitions with generated GitHub interfaces +- **Bridge**: Automated generation maintains GitHub compatibility + +### Six-Layer FCM Structure + +1. **Axioms** (`axioms/`) - Foundational capability definitions +2. **Logic** (`logic/`) - Compositions and relationships +3. **Patterns** (`patterns/`) - Reusable workflow structures +4. **Mechanics** (`mechanics/`) - Implementation templates +5. **Reflection** (`reflection/`) - Self-awareness and analysis +6. **Emergence** (`emergence/`) - Discovered system properties --- @@ -57,9 +76,24 @@ This document provides a meta-level analysis of the Git-based development and de #### Tools and Actions - **Release Drafter**: Generates draft release notes based on merged PRs. -- **Custom Actions**: - - `version_calculation.py` for semantic versioning. - - `update_changelog.py` for changelog management. +- **Generated Actions**: All actions are now generated from FCM axioms: + - `axioms/version/calculate.fcm` → `actions/core/version-calculator/` + - `axioms/release/changelog.fcm` → `actions/core/update-changelog/` + - `axioms/git/tag.fcm` → `actions/core/tag-operations/` + +#### Bridge Integration + +- **Source of Truth**: FCM definitions in `axioms/` +- **Generated Interface**: GitHub-compatible actions in `actions/` +- **Validation**: Automated alignment checking via `.bridge/validator.py` +- **Regeneration**: Actions updated when FCMs change + +#### FCM Migration Status + +- ✅ Architecture established +- ✅ Bridge generation system operational +- ✅ First axiom (tag-operations) generated +- 🔄 Migration of remaining actions in progress #### Known Issues diff --git a/mechanics/README.md b/mechanics/README.md new file mode 100644 index 0000000..486038b --- /dev/null +++ b/mechanics/README.md @@ -0,0 +1,7 @@ +# Mechanics Layer + +This layer contains operational templates and implementations. + +## Structure +- `workflows/` - GitHub workflow templates +- `actions/` - Action implementation templates diff --git a/phase2-migration-plan.md b/phase2-migration-plan.md new file mode 100644 index 0000000..4539139 --- /dev/null +++ b/phase2-migration-plan.md @@ -0,0 +1,130 @@ +# Phase 2 Migration Plan - Action Analysis + +## Summary + +Based on analysis of the repository structure, the following actions need transformation: + +### Core Actions (7) +1. **branch_operations** - Git domain +2. **commit_operations** - Git domain +3. **tag_operations** - Git domain +4. **version_calculator** - Version domain +5. **version_updater** - Version domain +6. **manage_release** - Release domain + +### Composite Actions (4) +1. **git_ops** - Git domain (orchestrates branch/tag/commit) +2. **release_notes** - Release domain +3. **release_operations** - Release domain +4. **update_changelog** - Release domain + +## Migration Tasks + +### 1. Branch Operations +**Current Location**: `actions/core/branch_operations/` +**Target FCM**: `axioms/git/branch.fcm` + +**Steps**: +- Extract action.yml inputs/outputs to FCM +- Parameter: operation type (create, delete, checkout, list, merge) +- Template: Docker operations +- External: main.py → github.com/deepworks-net/branch-operations-action + +### 2. Tag Operations +**Current Location**: `actions/core/tag_operations/` +**Target FCM**: `axioms/git/tag.fcm` + +**Steps**: +- Extract action.yml structure to FCM +- Parameter: operation type (create, delete, push, list) +- Template: Docker operations +- External: main.py → github.com/deepworks-net/tag-operations-action + +### 3. Commit Operations +**Current Location**: `actions/core/commit_operations/` +**Target FCM**: `axioms/git/commit.fcm` + +**Steps**: +- Extract action.yml structure to FCM +- Parameter: operation type (create, amend, list, cherry-pick, revert) +- Template: Docker operations +- External: main.py + git_utils.py → github.com/deepworks-net/commit-operations-action + +### 4. Version Calculator +**Current Location**: `actions/core/version_calculator/` +**Target FCM**: `axioms/version/calculate.fcm` + +**Steps**: +- Extract version calculation logic to FCM +- Parameter: bump type (major, minor, patch) +- Template: Version patterns +- External: main.py → github.com/deepworks-net/version-calculator-action + +### 5. Version Updater +**Current Location**: `actions/core/version_updater/` +**Target FCM**: `axioms/version/update.fcm` + +**Steps**: +- Extract file update patterns to FCM +- Parameter: version placeholder patterns +- Template: File update operations +- External: main.py → github.com/deepworks-net/version-updater-action + +### 6. Manage Release +**Current Location**: `actions/core/manage_release/` +**Target FCM**: `axioms/release/manage.fcm` + +**Steps**: +- Extract release workflow to FCM +- Parameter: release type, version +- Template: Release orchestration +- External: main.py → github.com/deepworks-net/manage-release-action + +### 7. Git Operations (Composite) +**Current Location**: `actions/composite/git_ops/` +**Target Pattern**: `patterns/git-operations.fcm` + +**Steps**: +- Define composition of branch + tag + commit axioms +- Create logic/compositions.fcm entry +- Map dependencies in logic/dependencies.fcm + +### 8. Release Notes +**Current Location**: `actions/composite/release_notes/` +**Target FCM**: `axioms/release/notes.fcm` + +**Steps**: +- Extract PR/commit parsing logic +- Parameter: note format template +- External: release_notes.py → github.com/deepworks-net/release-notes-action + +### 9. Update Changelog +**Current Location**: `actions/composite/update_changelog/` +**Target FCM**: `axioms/release/changelog.fcm` + +**Steps**: +- Extract changelog format patterns +- Parameter: changelog template +- External: update_changelog.py → github.com/deepworks-net/update-changelog-action + +## Identified Patterns + +### Common Hardcoded Values to Extract: +- Python versions in Dockerfiles (3.9, 3.10, etc.) +- File paths (/github/workspace, etc.) +- Default branch names (main, develop) +- Version number formats + +### Reusable Templates: +- Docker base images for Python actions +- Git configuration setup +- GitHub token handling +- Error handling patterns + +## Next Steps + +1. Create first axiom FCM as example (suggest starting with tag_operations) +2. Establish external package structure +3. Create mechanics templates +4. Test transformation with one complete action +5. Automate remaining transformations \ No newline at end of file diff --git a/reflection/README.md b/reflection/README.md new file mode 100644 index 0000000..08fda68 --- /dev/null +++ b/reflection/README.md @@ -0,0 +1,7 @@ +# Reflection Layer + +This layer contains self-awareness and analysis capabilities. + +## Structure +- `orchestrator/` - Self-maintenance and updates +- `analyzer/` - Capability discovery and documentation