diff --git a/cortex-cleanup.sh b/cortex-cleanup.sh deleted file mode 100755 index 0b1972f..0000000 --- a/cortex-cleanup.sh +++ /dev/null @@ -1,147 +0,0 @@ -#!/bin/bash -# Cortex Linux - Repo Cleanup Script -# Run this once to organize the repo for public launch -# Usage: cd ~/cortex && bash cortex-cleanup.sh - -set -e - -echo "๐Ÿงน CORTEX LINUX REPO CLEANUP" -echo "============================" -echo "" - -cd ~/cortex || { echo "โŒ ~/cortex not found"; exit 1; } - -# Confirm we're in the right place -if [ ! -f "README.md" ] || [ ! -d ".git" ]; then - echo "โŒ Not in cortex repo root. Run from ~/cortex" - exit 1 -fi - -echo "๐Ÿ“ Current root files: $(ls *.py *.sh *.json *.csv *.md 2>/dev/null | wc -l | tr -d ' ')" -echo "" - -# Step 1: Create directories if they don't exist -echo "1๏ธโƒฃ Creating directory structure..." -mkdir -p cortex/modules -mkdir -p tests -mkdir -p scripts -mkdir -p docs -mkdir -p internal - -# Step 2: Move Python modules into cortex/ -echo "2๏ธโƒฃ Moving Python modules to cortex/..." -for file in context_memory.py dependency_resolver.py error_parser.py \ - installation_history.py installation_verifier.py llm_router.py \ - logging_system.py; do - if [ -f "$file" ]; then - mv "$file" cortex/ 2>/dev/null && echo " โœ“ $file โ†’ cortex/" - fi -done - -# Step 3: Move test files into tests/ -echo "3๏ธโƒฃ Moving test files to tests/..." -for file in test_*.py; do - if [ -f "$file" ]; then - mv "$file" tests/ 2>/dev/null && echo " โœ“ $file โ†’ tests/" - fi -done - -# Step 4: Move shell scripts into scripts/ -echo "4๏ธโƒฃ Moving shell scripts to scripts/..." -for file in *.sh; do - # Keep this cleanup script in root temporarily - if [ "$file" != "cortex-cleanup.sh" ] && [ -f "$file" ]; then - mv "$file" scripts/ 2>/dev/null && echo " โœ“ $file โ†’ scripts/" - fi -done - -# Step 5: Move markdown docs to docs/ (except key root files) -echo "5๏ธโƒฃ Moving documentation to docs/..." -for file in *.md; do - case "$file" in - README.md|CHANGELOG.md|LICENSE|Contributing.md) - echo " โŠ˜ $file (keeping in root)" - ;; - *) - if [ -f "$file" ]; then - mv "$file" docs/ 2>/dev/null && echo " โœ“ $file โ†’ docs/" - fi - ;; - esac -done - -# Step 6: Move internal/admin files and gitignore them -echo "6๏ธโƒฃ Moving internal files to internal/..." -for file in bounties_owed.csv bounties_pending.json contributors.json \ - issue_status.json payments_history.json pr_status.json; do - if [ -f "$file" ]; then - mv "$file" internal/ 2>/dev/null && echo " โœ“ $file โ†’ internal/" - fi -done - -# Step 7: Delete duplicate/junk files -echo "7๏ธโƒฃ Removing duplicate files..." -rm -f "README_DEPENDENCIES (1).md" 2>/dev/null && echo " โœ“ Removed README_DEPENDENCIES (1).md" -rm -f "deploy_jesse_system (1).sh" 2>/dev/null && echo " โœ“ Removed deploy_jesse_system (1).sh" - -# Step 8: Update .gitignore -echo "8๏ธโƒฃ Updating .gitignore..." -if ! grep -q "internal/" .gitignore 2>/dev/null; then - echo "" >> .gitignore - echo "# Internal admin files (bounties, payments, etc.)" >> .gitignore - echo "internal/" >> .gitignore - echo " โœ“ Added internal/ to .gitignore" -else - echo " โŠ˜ internal/ already in .gitignore" -fi - -# Step 9: Create __init__.py files if missing -echo "9๏ธโƒฃ Ensuring Python packages are importable..." -touch cortex/__init__.py 2>/dev/null -touch tests/__init__.py 2>/dev/null -echo " โœ“ __init__.py files created" - -# Step 10: Show results -echo "" -echo "๐Ÿ“Š CLEANUP COMPLETE" -echo "===================" -echo "Root files now: $(ls *.py *.sh *.json *.csv 2>/dev/null | wc -l | tr -d ' ') (should be ~0)" -echo "" -echo "Directory structure:" -echo " cortex/ - $(ls cortex/*.py 2>/dev/null | wc -l | tr -d ' ') Python modules" -echo " tests/ - $(ls tests/*.py 2>/dev/null | wc -l | tr -d ' ') test files" -echo " scripts/ - $(ls scripts/*.sh 2>/dev/null | wc -l | tr -d ' ') shell scripts" -echo " docs/ - $(ls docs/*.md 2>/dev/null | wc -l | tr -d ' ') markdown files" -echo " internal/ - $(ls internal/ 2>/dev/null | wc -l | tr -d ' ') admin files (gitignored)" -echo "" - -# Step 11: Git commit -echo "๐Ÿ”Ÿ Committing changes..." -git add -A -git status --short -echo "" -read -p "Commit and push these changes? (y/n): " -n 1 -r -echo -if [[ $REPLY =~ ^[Yy]$ ]]; then - git commit -m "Reorganize repo structure for public launch - -- Move Python modules to cortex/ -- Move tests to tests/ -- Move scripts to scripts/ -- Move docs to docs/ -- Move internal admin files to internal/ (gitignored) -- Remove duplicate files -- Clean root directory for professional appearance" - - git push origin main - echo "" - echo "โœ… DONE! Repo is now clean and pushed." -else - echo "" - echo "โš ๏ธ Changes staged but NOT committed. Run 'git commit' when ready." -fi - -echo "" -echo "๐Ÿงช NEXT STEP: Test the CLI" -echo " cd ~/cortex && source venv/bin/activate && cortex install nginx --dry-run" -echo "" diff --git a/cortex/cli.py b/cortex/cli.py index 17004c6..b775b56 100644 --- a/cortex/cli.py +++ b/cortex/cli.py @@ -38,8 +38,9 @@ validate_installation_id, ValidationError ) -# Import the new Notification Manager +# Import Notification Manager from cortex.notification_manager import NotificationManager +from cortex.optimizer import CleanupOptimizer class CortexCLI: @@ -112,10 +113,9 @@ def _clear_line(self): sys.stdout.write('\r\033[K') sys.stdout.flush() - # --- New Notification Method --- + # --- Notification Method --- def notify(self, args): """Handle notification commands""" - # Addressing CodeRabbit feedback: Handle missing subcommand gracefully if not args.notify_action: self._print_error("Please specify a subcommand (config/enable/disable/dnd/send)") return 1 @@ -132,16 +132,14 @@ def notify(self, args): elif args.notify_action == 'enable': mgr.config["enabled"] = True - # Addressing CodeRabbit feedback: Ideally should use a public method instead of private _save_config, - # but keeping as is for a simple fix (or adding a save method to NotificationManager would be best). - mgr._save_config() + mgr.save_config() self._print_success("Notifications enabled") return 0 elif args.notify_action == 'disable': mgr.config["enabled"] = False - mgr._save_config() - cx_print("Notifications disabled (Critical alerts will still show)", "warning") + mgr.save_config() + self._print_success("Notifications disabled (Critical alerts will still show)") return 0 elif args.notify_action == 'dnd': @@ -149,7 +147,6 @@ def notify(self, args): self._print_error("Please provide start and end times (HH:MM)") return 1 - # Addressing CodeRabbit feedback: Add time format validation try: datetime.strptime(args.start, "%H:%M") datetime.strptime(args.end, "%H:%M") @@ -159,7 +156,7 @@ def notify(self, args): mgr.config["dnd_start"] = args.start mgr.config["dnd_end"] = args.end - mgr._save_config() + mgr.save_config() self._print_success(f"DND Window updated: {args.start} - {args.end}") return 0 @@ -174,7 +171,99 @@ def notify(self, args): else: self._print_error("Unknown notify command") return 1 - # ------------------------------- + + + def cleanup(self, args): + """Run system cleanup optimization""" + optimizer = CleanupOptimizer() + + if args.cleanup_action == 'scan': + return self._cleanup_scan(optimizer) + + elif args.cleanup_action == 'run': + return self._cleanup_run(args, optimizer) + + else: + self._print_error("Unknown cleanup action") + return 1 + + def _cleanup_scan(self, optimizer): + self._print_status("๐Ÿ”", "Scanning for cleanup opportunities...") + opportunities = optimizer.scan() + + if not opportunities: + self._print_success("No cleanup opportunities found! system is clean.") + return 0 + + total_bytes = sum(o.size_bytes for o in opportunities) + total_mb = total_bytes / (1024 * 1024) + + console.print() + cx_header(f"Cleanup Scan Results ({total_mb:.1f} MB Reclaimable)") + + from rich.table import Table + table = Table(box=None) + table.add_column("Type", style="cyan") + table.add_column("Description") + table.add_column("Size", justify="right", style="green") + + for opp in opportunities: + size_mb = opp.size_bytes / (1024 * 1024) + table.add_row( + opp.type.replace('_', ' ').title(), + opp.description, + f"{size_mb:.1f} MB" + ) + + console.print(table) + console.print() + console.print("[dim]Run 'cortex cleanup run' to clean these items.[/dim]") + return 0 + + def _cleanup_run(self, args, optimizer): + safe_mode = not args.force + + self._print_status("๐Ÿ”", "Preparing cleanup plan...") + commands = optimizer.get_cleanup_plan() + + if not commands: + self._print_success("Nothing to clean!") + return 0 + + console.print("[bold]Proposed Cleanup Operations:[/bold]") + for i, cmd in enumerate(commands, 1): + console.print(f" {i}. {cmd}") + + if getattr(args, 'dry_run', False): + console.print("\n[dim](Dry run mode - no changes made)[/dim]") + return 0 + + if not args.yes: + if not safe_mode: + console.print("\n[bold red]WARNING: Running in FORCE mode (no backups)[/bold red]") + + confirm = input("\nProceed with cleanup? (y/n): ") + if confirm.lower() != 'y': + print("Operation cancelled.") + return 0 + + # Use InstallationCoordinator for execution + def progress_callback(current, total, step): + print(f"[{current}/{total}] {step.description}") + + coordinator = InstallationCoordinator( + commands=commands, + descriptions=[f"Cleanup Step {i+1}" for i in range(len(commands))], + progress_callback=progress_callback + ) + + result = coordinator.execute() + if result.success: + self._print_success("Cleanup completed successfully!") + return 0 + else: + self._print_error("Cleanup encountered errors.") + return 1 def install(self, software: str, execute: bool = False, dry_run: bool = False): # Validate input first @@ -543,7 +632,7 @@ def show_rich_help(): table.add_row("install ", "Install software") table.add_row("history", "View history") table.add_row("rollback ", "Undo installation") - table.add_row("notify", "Manage desktop notifications") # Added this line + table.add_row("notify", "Manage desktop notifications") console.print(table) console.print() @@ -598,7 +687,7 @@ def main(): edit_pref_parser.add_argument('key', nargs='?') edit_pref_parser.add_argument('value', nargs='?') - # --- New Notify Command --- + # --- Notify Command --- notify_parser = subparsers.add_parser('notify', help='Manage desktop notifications') notify_subs = notify_parser.add_subparsers(dest='notify_action', help='Notify actions') @@ -615,6 +704,19 @@ def main(): send_parser.add_argument('--title', default='Cortex Notification') send_parser.add_argument('--level', choices=['low', 'normal', 'critical'], default='normal') send_parser.add_argument('--actions', nargs='*', help='Action buttons') + + + # --- Cleanup Command --- + cleanup_parser = subparsers.add_parser('cleanup', help='Optimize disk space') + cleanup_subs = cleanup_parser.add_subparsers(dest='cleanup_action', help='Cleanup actions') + + cleanup_subs.add_parser('scan', help='Scan for cleanable items') + + run_parser = cleanup_subs.add_parser('run', help='Execute cleanup') + run_parser.add_argument('--safe', action='store_true', default=True, help='Run safely (with backups)') + run_parser.add_argument('--force', action='store_true', help='Force cleanup (no backups)') + run_parser.add_argument('--yes', '-y', action='store_true', help='Skip confirmation') + run_parser.add_argument('--dry-run', action='store_true', help='Show proposed changes without executing') # -------------------------- args = parser.parse_args() @@ -642,9 +744,12 @@ def main(): return cli.check_pref(key=args.key) elif args.command == 'edit-pref': return cli.edit_pref(action=args.action, key=args.key, value=args.value) - # Handle the new notify command elif args.command == 'notify': return cli.notify(args) + # Handle new command + + elif args.command == 'cleanup': + return cli.cleanup(args) else: parser.print_help() return 1 diff --git a/cortex/optimizer.py b/cortex/optimizer.py new file mode 100644 index 0000000..7160094 --- /dev/null +++ b/cortex/optimizer.py @@ -0,0 +1,234 @@ +import os +import shutil +import subprocess +import glob +import gzip +import time +import logging +from typing import List, Dict, Optional +from pathlib import Path +from dataclasses import dataclass + +from cortex.packages import PackageManager + +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + +@dataclass +class CleanupOpportunity: + type: str # 'package_cache', 'orphans', 'logs', 'temp' + size_bytes: int + description: str + items: List[str] # List of files or packages + +class LogManager: + """Manages log file compression and cleanup.""" + def __init__(self, log_dir: str = "/var/log"): + self.log_dir = log_dir + + def scan(self) -> Optional[CleanupOpportunity]: + """Scan logs to identify old files that can be compressed.""" + candidates = [] + total_size = 0 + + if not os.path.exists(self.log_dir): + return None + + # Look for .1, .2, or .log.old files that aren't compressed + # Also look for .log files older than 7 days + patterns = ["**/*.1", "**/*.2", "**/*.log.old", "**/*.log"] + cutoff = time.time() - (7 * 86400) # 7 days + + for pattern in patterns: + p_candidates, p_size = self._scan_pattern(pattern, cutoff) + candidates.extend(p_candidates) + total_size += p_size + + if candidates: + return CleanupOpportunity( + type="logs", + size_bytes=total_size, + description=f"Old log files ({len(candidates)})", + items=candidates + ) + return None + + def _scan_pattern(self, pattern: str, cutoff: float) -> tuple[List[str], int]: + """Scan for a specific pattern.""" + candidates = [] + total_size = 0 + # We need to be careful with permissions here. + for log_file in glob.glob(os.path.join(self.log_dir, pattern), recursive=True): + try: + # Skip if already compressed + if log_file.endswith('.gz'): + continue + + stat = os.stat(log_file) + + # For .log files, check age + if log_file.endswith('.log'): + if stat.st_mtime > cutoff: + continue + + candidates.append(log_file) + total_size += stat.st_size + except OSError: + pass + return candidates, total_size + + + def get_cleanup_commands(self) -> List[str]: + """Generate commands to compress old logs.""" + # More robust find command + return [ + f"find {self.log_dir} -name '*.log' -type f -mtime +7 -exec gzip {{}} \\+", + f"find {self.log_dir} -name '*.1' -type f -exec gzip {{}} \\+", + f"find {self.log_dir} -name '*.2' -type f -exec gzip {{}} \\+" + ] + +class TempCleaner: + """Manages temporary file cleanup.""" + def __init__(self, temp_dirs: List[str] = None): + if temp_dirs is None: + self.temp_dirs = ["/tmp", "/var/tmp"] + else: + self.temp_dirs = temp_dirs + + def scan(self) -> Optional[CleanupOpportunity]: + """Scan temp directories for old files.""" + candidates = [] + total_size = 0 + cutoff = time.time() - (7 * 86400) # 7 days + + for d in self.temp_dirs: + if not os.path.exists(d): + continue + + d_candidates, d_size = self._scan_directory(d, cutoff) + candidates.extend(d_candidates) + total_size += d_size + + if candidates: + return CleanupOpportunity( + type="temp", + size_bytes=total_size, + description=f"Old temporary files ({len(candidates)})", + items=candidates + ) + return None + + def _scan_directory(self, directory: str, cutoff: float) -> tuple[List[str], int]: + """Helper to scan a single directory safely.""" + candidates = [] + total_size = 0 + try: + for root, _, files in os.walk(directory): + for name in files: + fpath = os.path.join(root, name) + try: + stat = os.stat(fpath) + if stat.st_atime < cutoff and stat.st_mtime < cutoff: + candidates.append(fpath) + total_size += stat.st_size + except OSError: + pass + except OSError: + pass + return candidates, total_size + + def get_cleanup_commands(self) -> List[str]: + """Generate commands to clean temp files.""" + commands = [] + for d in self.temp_dirs: + # Delete files accessed more than 10 days ago + commands.append(f"find {d} -type f -atime +10 -delete") + # Delete empty directories + commands.append(f"find {d} -type d -empty -delete") + return commands + +class CleanupOptimizer: + """Orchestrator for system cleanup operations.""" + def __init__(self): + self.pm = PackageManager() + self.log_manager = LogManager() + self.temp_cleaner = TempCleaner() + self.backup_dir = Path("/var/lib/cortex/backups/cleanup") + self._ensure_backup_dir() + + def _ensure_backup_dir(self): + try: + self.backup_dir.mkdir(parents=True, exist_ok=True) + except PermissionError: + self.backup_dir = Path.home() / ".cortex" / "backups" / "cleanup" + self.backup_dir.mkdir(parents=True, exist_ok=True) + + def scan(self) -> List[CleanupOpportunity]: + """Scan system for cleanup opportunities.""" + opportunities = [] + + # 1. Package Manager + pkg_info = self.pm.get_cleanable_items() + if pkg_info.get("cache_size_bytes", 0) > 0: + opportunities.append(CleanupOpportunity( + type="package_cache", + size_bytes=pkg_info["cache_size_bytes"], + description="Package manager cache", + items=["Package cache files"] + )) + + if pkg_info.get("orphaned_packages"): + opportunities.append(CleanupOpportunity( + type="orphans", + size_bytes=pkg_info.get("orphaned_size_bytes", 0), + description=f"Orphaned packages ({len(pkg_info['orphaned_packages'])})", + items=pkg_info["orphaned_packages"] + )) + + # 2. Logs + log_opp = self.log_manager.scan() + if log_opp: + opportunities.append(log_opp) + + # 3. Temp + temp_opp = self.temp_cleaner.scan() + if temp_opp: + opportunities.append(temp_opp) + + return opportunities + + def get_cleanup_plan(self) -> List[str]: + """Generate a list of shell commands to execute the cleanup.""" + commands = [] + + # 1. Package Cleanup + commands.extend(self.pm.get_cleanup_commands('cache')) + commands.extend(self.pm.get_cleanup_commands('orphans')) + + # 2. Log Cleanup + commands.extend(self.log_manager.get_cleanup_commands()) + + # 3. Temp Cleanup + commands.extend(self.temp_cleaner.get_cleanup_commands()) + + return commands + + def schedule_cleanup(self, frequency: str) -> bool: + """Schedule cleanup job (daily, weekly, monthly).""" + cron_cmd = "cortex cleanup run --safe > /var/log/cortex-cleanup.log 2>&1" + cron_time = "@daily" + if frequency == 'weekly': cron_time = "@weekly" + elif frequency == 'monthly': cron_time = "@monthly" + + entry = f"{cron_time} {cron_cmd}" + + try: + current_crontab = subprocess.run("crontab -l", shell=True, capture_output=True, text=True).stdout + if cron_cmd in current_crontab: + return True + + new_crontab = current_crontab + f"\n# Cortex Auto-Cleanup\n{entry}\n" + proc = subprocess.run(["crontab", "-"], input=new_crontab, text=True, capture_output=True) + return proc.returncode == 0 + except Exception: + return False diff --git a/cortex/packages.py b/cortex/packages.py index a846cff..973867f 100644 --- a/cortex/packages.py +++ b/cortex/packages.py @@ -450,4 +450,97 @@ def get_package_info(self, package_name: str) -> Optional[Dict[str, str]]: pass return None + def get_cleanable_items(self) -> Dict[str, any]: + """ + Identify cleanable items managed by the package manager. + + Returns: + Dictionary containing cleanup opportunities (cache size, orphaned packages) + """ + opportunities = { + "cache_size_bytes": 0, + "orphaned_packages": [], + "orphaned_size_bytes": 0 + } + + try: + if self.pm_type == PackageManagerType.APT: + self._get_apt_cleanable_items(opportunities) + elif self.pm_type in (PackageManagerType.YUM, PackageManagerType.DNF): + self._get_yum_cleanable_items(opportunities) + except Exception: + pass + + return opportunities + + def _get_apt_cleanable_items(self, opportunities: Dict[str, any]): + # Check apt cache size + result = subprocess.run( + "du -sb /var/cache/apt/archives 2>/dev/null | cut -f1", + shell=True, capture_output=True, text=True + ) + if result.returncode == 0 and result.stdout.strip(): + opportunities["cache_size_bytes"] = int(result.stdout.strip()) + + # Check for autoremovable packages + result = subprocess.run( + ["apt-get", "--dry-run", "autoremove"], + capture_output=True, text=True, env={"LANG": "C"} + ) + + if result.returncode == 0: + for line in result.stdout.split('\n'): + if line.startswith("Remv"): + parts = line.split() + if len(parts) >= 2: + pkg_name = parts[1] + opportunities["orphaned_packages"].append(pkg_name) + + # Estimate size + if opportunities["orphaned_packages"]: + self._estimate_apt_orphans_size(opportunities) + def _estimate_apt_orphans_size(self, opportunities: Dict[str, any]): + cmd = ["dpkg-query", "-W", "-f=${Installed-Size}\n"] + opportunities["orphaned_packages"] + size_res = subprocess.run(cmd, capture_output=True, text=True) + if size_res.returncode == 0: + total_kb = sum(int(s) for s in size_res.stdout.split() if s.isdigit()) + opportunities["orphaned_size_bytes"] = total_kb * 1024 + + def _get_yum_cleanable_items(self, opportunities: Dict[str, any]): + # Check cache size + cache_dir = "/var/cache/dnf" if self.pm_type == PackageManagerType.DNF else "/var/cache/yum" + result = subprocess.run( + f"du -sb {cache_dir} 2>/dev/null | cut -f1", + shell=True, capture_output=True, text=True + ) + if result.returncode == 0 and result.stdout.strip(): + opportunities["cache_size_bytes"] = int(result.stdout.strip()) + + # Check for autoremovable packages - unimplemented logic + + + def get_cleanup_commands(self, item_type: str) -> List[str]: + """ + Get commands to clean specific items. + + Args: + item_type: Type of item to clean ('cache', 'orphans') + + Returns: + List of commands + """ + if self.pm_type == PackageManagerType.APT: + if item_type == 'cache': + return ["apt-get clean"] + elif item_type == 'orphans': + return ["apt-get autoremove -y"] + + elif self.pm_type in (PackageManagerType.YUM, PackageManagerType.DNF): + pm_cmd = "yum" if self.pm_type == PackageManagerType.YUM else "dnf" + if item_type == 'cache': + return [f"{pm_cmd} clean all"] + elif item_type == 'orphans': + return [f"{pm_cmd} autoremove -y"] + + return [] diff --git a/docs/ASSESSMENT.md b/docs/ASSESSMENT.md deleted file mode 100644 index 3e84053..0000000 --- a/docs/ASSESSMENT.md +++ /dev/null @@ -1,344 +0,0 @@ -# Cortex Linux - Comprehensive Code Assessment - -**Assessment Date:** November 2025 -**Assessor:** Claude Code Analysis -**Repository:** https://github.com/cortexlinux/cortex -**Version Analyzed:** 0.1.0 - ---- - -## Executive Summary - -Cortex Linux is an ambitious AI-native operating system project that aims to simplify complex software installation on Linux through natural language commands. The codebase demonstrates solid foundational architecture with several well-implemented components, but requires significant improvements in code organization, security hardening, documentation, and test coverage before production use. - -**Overall Assessment:** ๐ŸŸก **Early Alpha** - Functional prototype with notable gaps requiring attention. - ---- - -## 1. Architecture & Code Quality - -### 1.1 Design Patterns - -**Strengths:** -- Clean separation of concerns between CLI (`cortex/cli.py`), coordination (`cortex/coordinator.py`), and LLM integration (`LLM/interpreter.py`) -- Dataclasses used effectively for structured data (`InstallationStep`, `InstallationRecord`, `ExecutionResult`) -- Enum patterns for type safety (`StepStatus`, `InstallationType`, `PackageManagerType`) -- Factory pattern in `InstallationCoordinator.from_plan()` for flexible initialization - -**Weaknesses:** -- **No dependency injection** - Components create their own dependencies, making testing harder -- **God class tendency** in `InstallationHistory` (780+ lines) - should be split into Repository, Service layers -- **Inconsistent module organization** - Related files scattered (e.g., `src/hwprofiler.py` vs `cortex/packages.py`) -- **Missing interface abstractions** - No base classes for LLM providers, package managers - -### 1.2 Code Duplication (DRY Violations) - -| Location | Issue | Impact | -|----------|-------|--------| -| `_run_command()` | Duplicated in 4+ files (`installation_history.py`, `dependency_resolver.py`, `error_parser.py`) | High | -| Logging setup | Repeated in each module with `logging.basicConfig()` | Medium | -| JSON file operations | Same read/write patterns in multiple modules | Medium | -| Path validation | Similar path traversal checks in `sandbox_executor.py` lines 278-340 and elsewhere | Medium | - -### 1.3 Error Handling Gaps - -**Critical Issues:** -1. **Bare exception catches** in `coordinator.py:173-178` - swallows all errors -2. **No retry logic** for API calls in `LLM/interpreter.py` -3. **Silent failures** in logging setup (`sandbox_executor.py:134`) -4. **Unchecked file operations** - Missing `try/except` around file reads in multiple locations - -**Example of problematic code:** -```python -# coordinator.py:134 -except Exception: - pass # Silently ignores all errors -``` - -### 1.4 Security Vulnerabilities - -| Severity | Issue | Location | Risk | -|----------|-------|----------|------| -| **CRITICAL** | Shell injection via `shell=True` | `coordinator.py:144-150` | Commands constructed from LLM output executed directly | -| **HIGH** | Incomplete dangerous pattern list | `sandbox_executor.py:114-125` | Missing patterns: `wget -O \|`, `curl \| sh`, `eval` | -| **HIGH** | API keys in environment variables | `cli.py:26-29` | No validation of key format, potential leakage in logs | -| **MEDIUM** | MD5 for ID generation | `installation_history.py:250` | MD5 is cryptographically weak | -| **MEDIUM** | No rate limiting | `LLM/interpreter.py` | API abuse possible | -| **LOW** | Path traversal not fully mitigated | `sandbox_executor.py:278-340` | Complex allowlist logic with edge cases | - -### 1.5 Performance Bottlenecks - -1. **No caching** for LLM responses or package dependency lookups -2. **Synchronous execution** - No async/await for I/O operations -3. **Full file reads** in `installation_history.py` for history queries -4. **No connection pooling** for API clients - -### 1.6 Dead Code & Unused Dependencies - -**Unused Files:** -- `deploy_jesse_system (1).sh` - Duplicate with space in name -- `README_DEPENDENCIES (1).md` - Duplicate -- Multiple shell scripts appear unused (`merge-mike-prs.sh`, `organize-issues.sh`) - -**Empty/Placeholder Files:** -- `bounties_pending.json` - Contains only `[]` -- `contributors.json` - Contains only `[]` -- `payments_history.json` - Contains only `[]` - ---- - -## 2. Documentation Gaps - -### 2.1 Missing README Sections - -| Section | Status | Priority | -|---------|--------|----------| -| Installation instructions | โŒ Missing | Critical | -| Prerequisites & dependencies | โŒ Missing | Critical | -| Configuration guide | โŒ Missing | High | -| API documentation | โŒ Missing | High | -| Architecture diagram | โŒ Missing | Medium | -| Troubleshooting guide | โŒ Missing | Medium | -| Changelog | โŒ Missing | Medium | -| License details in README | โš ๏ธ Incomplete | Low | - -### 2.2 Undocumented APIs/Functions - -**Files lacking docstrings:** -- `cortex/__init__.py` - No module docstring -- Multiple private methods in `CortexCLI` class -- `context_memory.py` - Minimal documentation for complex class - -**Missing type hints:** -- `cortex/cli.py` - Return types missing on several methods -- Callback functions lack proper typing - -### 2.3 Setup/Installation Instructions - -Current state: **Non-existent** - -Missing: -- System requirements specification -- Python version requirements (says 3.8+ in setup.py but 3.11+ in README) -- Required system packages (firejail, hwinfo) -- Virtual environment setup -- API key configuration -- First run guide - ---- - -## 3. Repository Hygiene - -### 3.1 Git Issues - -| Issue | Files Affected | Action Required | -|-------|----------------|-----------------| -| Untracked files in root | 100+ files | Add to .gitignore or organize | -| Duplicate files | `deploy_jesse_system (1).sh`, `README_DEPENDENCIES (1).md` | Remove duplicates | -| Large shell scripts | Multiple 20KB+ scripts | Consider modularization | -| JSON data files checked in | `bounties_pending.json`, etc. | Should be gitignored | - -### 3.2 Missing .gitignore Entries - -```gitignore -# Should be added: -*.db -*.sqlite3 -history.db -*_audit.log -*_audit.json -.cortex/ -``` - -### 3.3 File Naming Inconsistencies - -- `README_*.md` files use different naming than standard `docs/` pattern -- Mix of `snake_case.py` and `kebab-case.sh` scripts -- `LLM/` directory uses uppercase (should be `llm/`) - -### 3.4 License Clarification Needed - -- LICENSE file is Apache 2.0 -- README mentions "MIT License" in some contexts -- `llm_router.py` header says "Modified MIT License" -- **Action:** Standardize license references - ---- - -## 4. Test Coverage Analysis - -### 4.1 Current Test Status - -| Module | Test File | Coverage Estimate | Status | -|--------|-----------|-------------------|--------| -| `cortex/cli.py` | `test/test_cli.py` | ~70% | โœ… Good | -| `cortex/coordinator.py` | `test/test_coordinator.py` | ~65% | โœ… Good | -| `cortex/packages.py` | `test/test_packages.py` | ~80% | โœ… Good | -| `installation_history.py` | `test/test_installation_history.py` | ~50% | โš ๏ธ Needs work | -| `LLM/interpreter.py` | `LLM/test_interpreter.py` | ~40% | โš ๏ธ Needs work | -| `src/sandbox_executor.py` | `src/test_sandbox_executor.py` | ~60% | โš ๏ธ Needs work | -| `src/hwprofiler.py` | `src/test_hwprofiler.py` | ~55% | โš ๏ธ Needs work | -| `error_parser.py` | `test_error_parser.py` | ~45% | โš ๏ธ Needs work | -| `llm_router.py` | `test_llm_router.py` | ~50% | โš ๏ธ Needs work | -| `dependency_resolver.py` | None | 0% | โŒ Missing | -| `context_memory.py` | `test_context_memory.py` | ~35% | โš ๏ธ Needs work | -| `logging_system.py` | `test_logging_system.py` | ~30% | โš ๏ธ Needs work | - -### 4.2 Missing Test Types - -- **Integration tests** - No end-to-end workflow tests -- **Security tests** - No tests for injection prevention -- **Performance tests** - No benchmarks or load tests -- **Mock tests** - Limited mocking of external services - -### 4.3 CI/CD Issues - -**Current workflow (`automation.yml`):** -```yaml -- name: Run tests - run: | - if [ -d tests ]; then # Wrong directory name! - python -m pytest tests/ || echo "Tests not yet implemented" -``` - -**Issues:** -1. Wrong test directory (`tests/` vs `test/`) -2. Silently passes on test failure (`|| echo ...`) -3. No coverage reporting -4. No linting/type checking -5. No security scanning (Bandit, safety) - ---- - -## 5. Specific Code Issues - -### 5.1 Critical Fixes Needed - -#### Issue #1: Shell Injection Vulnerability -**File:** `cortex/coordinator.py:144-150` -```python -# VULNERABLE: Command from LLM executed directly -result = subprocess.run( - step.command, - shell=True, # DANGEROUS - capture_output=True, - text=True, - timeout=self.timeout -) -``` -**Fix:** Use `shlex.split()` and `shell=False`, validate commands before execution. - -#### Issue #2: Inconsistent Python Version Requirements -**File:** `setup.py:35` vs `README.md:60` -- setup.py: `python_requires=">=3.8"` -- README: "Python 3.11+" -**Fix:** Align to Python 3.10+ (reasonable minimum). - -#### Issue #3: Database Path Hardcoded -**File:** `installation_history.py:71` -```python -def __init__(self, db_path: str = "/var/lib/cortex/history.db"): -``` -**Fix:** Use environment variable or XDG standards (`~/.local/share/cortex/`). - -### 5.2 High Priority Fixes - -#### Issue #4: Missing requirements.txt at Root -Root `requirements.txt` missing - only `LLM/requirements.txt` and `src/requirements.txt` exist. - -#### Issue #5: Circular Import Risk -`cortex/cli.py` imports from parent directory with `sys.path.insert()` - fragile pattern. - -#### Issue #6: No Graceful Degradation -If Firejail unavailable, security is significantly reduced with only a warning. - -### 5.3 Medium Priority Fixes - -1. Add `__all__` exports to all modules -2. Implement proper logging configuration (single config point) -3. Add request timeout configuration for API calls -4. Implement connection retry logic with exponential backoff -5. Add input validation for all user-facing functions - ---- - -## 6. Dependency Analysis - -### 6.1 Direct Dependencies - -| Package | Version | Purpose | Security Status | -|---------|---------|---------|-----------------| -| `openai` | >=1.0.0 | GPT API | โœ… Current | -| `anthropic` | >=0.18.0 | Claude API | โœ… Current | - -### 6.2 Missing from Requirements - -Should be added to root `requirements.txt`: -``` -anthropic>=0.18.0 -openai>=1.0.0 -typing-extensions>=4.0.0 # For older Python compatibility -``` - -### 6.3 Development Dependencies Missing - -Create `requirements-dev.txt`: -``` -pytest>=7.0.0 -pytest-cov>=4.0.0 -pytest-mock>=3.10.0 -black>=23.0.0 -mypy>=1.0.0 -pylint>=2.17.0 -bandit>=1.7.0 -safety>=2.3.0 -``` - ---- - -## 7. Summary Statistics - -| Metric | Value | -|--------|-------| -| Total Python Files | 32 | -| Total Lines of Code | ~12,000 | -| Test Files | 12 | -| Documentation Files | 18 | -| Shell Scripts | 15 | -| Critical Issues | 3 | -| High Priority Issues | 8 | -| Medium Priority Issues | 15 | -| Low Priority Issues | 10+ | -| Estimated Test Coverage | ~45% | - ---- - -## 8. Recommendations Summary - -### Immediate Actions (Week 1) -1. Fix shell injection vulnerability -2. Create root `requirements.txt` -3. Fix CI/CD pipeline -4. Standardize Python version requirements - -### Short-term (Weeks 2-3) -1. Reorganize directory structure -2. Add comprehensive installation docs -3. Implement dependency injection -4. Add security scanning to CI - -### Medium-term (Month 1-2) -1. Achieve 80% test coverage -2. Add integration tests -3. Implement async operations -4. Add caching layer - -### Long-term (Quarter 1) -1. Extract shared utilities into common module -2. Add plugin architecture for LLM providers -3. Implement comprehensive logging/monitoring -4. Security audit by external party - ---- - -*Assessment generated by automated code analysis. Manual review recommended for security-critical findings.* diff --git a/docs/Bounties.md b/docs/Bounties.md deleted file mode 100644 index feea7d2..0000000 --- a/docs/Bounties.md +++ /dev/null @@ -1,141 +0,0 @@ -# Bounty Program - -## Overview - -Get paid for contributing to Cortex Linux. Cash bounties on every merged PR, plus 2x bonus at funding. - -## Current Bounties - -Browse issues with the `bounty` label: -https://github.com/cortexlinux/cortex/issues?q=is%3Aissue+is%3Aopen+label%3Abounty - -## Payment Structure - -### Immediate Payment -- Paid within 48 hours of PR merge -- Bitcoin, USDC, or PayPal -- No equity required - -### 2x Bonus -- February 2025 (when seed funding closes) -- Doubles all bounties earned -- Example: Earn $500 now โ†’ Get $500 bonus later = $1,000 total - -### Bounty Tiers - -| Complexity | Bounty | Example | -|------------|--------|---------| -| Critical | $150-200 | Package manager, plugin system | -| Important | $100-150 | Rollback, dependency resolution | -| Standard | $75-100 | Config templates, verification | -| Testing | $50-75 | Integration tests, validation | -| Docs | $25-50 | User guides, API docs | - -## How It Works - -### 1. Find an Issue -Browse bounty issues: -https://github.com/cortexlinux/cortex/issues?q=is%3Aissue+is%3Aopen+label%3Abounty - -### 2. Claim It -Comment: "I'd like to work on this" -Wait for assignment - -### 3. Build It -- Complete implementation -- Write tests (>80% coverage) -- Add documentation -- Submit PR - -### 4. Get Paid -- PR reviewed and merged -- Provide payment details -- Receive payment within 48 hours - -## Requirements - -### Code Quality -- โœ… Complete implementation (no TODOs) -- โœ… Unit tests with >80% coverage -- โœ… Documentation with examples -- โœ… Integrates with existing code -- โœ… Follows project style - -### Testing -- All tests pass locally -- CI checks pass -- Manual testing done -- Edge cases covered - -### Documentation -- README for the feature -- Code comments for complex logic -- Usage examples -- API documentation (if applicable) - -## Payment Methods - -### Cryptocurrency (Preferred) -- **Bitcoin (BTC)** -- **USDC (ERC-20 or Polygon)** - -Provide your wallet address in PR comments. - -### Traditional -- **PayPal** -- **Venmo** (US only) -- **Zelle** (US only) - -Provide your payment email. - -## Top Contributors - -### November 2025 - -| Developer | PRs | Total Earned | Status | -|-----------|-----|--------------|--------| -| @aliraza556 | 2 | $300 | Processing | -| @dhvll | 1 | $100 | Processing | -| @chandrapratamar | 1 | $100 | Processing | -| @AlexanderLuzDH | 1 | $125 | Paid | - -*At 2x bonus: $1,250 total* - -## Founding Team Opportunities - -Top contributors may be invited to: -- **CTO position** (15-20% equity) -- **Core team** (employment post-funding) -- **Advisory board** -- **Early equity grants** - -Performance matters. Show consistent quality and you'll be considered. - -## FAQ - -**Q: How fast do I get paid?** -A: Within 48 hours of PR merge. - -**Q: What if my PR isn't merged?** -A: No payment. Only merged PRs are paid. - -**Q: Can I work on multiple issues?** -A: Yes! Claim as many as you can handle. - -**Q: What's the 2x bonus?** -A: When funding closes (Feb 2025), all bounties earned get doubled. - -**Q: Do I need to sign anything?** -A: No contracts. Payment on merge. - -**Q: What currency?** -A: USD equivalent in BTC, USDC, or PayPal. - -**Q: Can I negotiate bounties?** -A: For exceptionally complex features, yes. Ask first. - -## Questions? - -Ask in Discord #dev-questions or comment on the issue. - -**Start earning:** https://github.com/cortexlinux/cortex/issues diff --git a/docs/CONFIGURATION.md b/docs/CONFIGURATION.md deleted file mode 100644 index c90ca70..0000000 --- a/docs/CONFIGURATION.md +++ /dev/null @@ -1,592 +0,0 @@ -# Configuration Management for Cortex Linux - -## Overview - -Cortex Linux's Configuration Management feature enables you to export, share, and import system configurations for reproducibility and team collaboration. This feature is essential for: - -- **Team Collaboration**: Share exact development environments with team members -- **Infrastructure as Code**: Version control your system configurations -- **Disaster Recovery**: Quickly restore systems to known-good states -- **Onboarding**: New team members can replicate production environments instantly -- **CI/CD**: Ensure consistent environments across development, staging, and production - -## Installation - -### Prerequisites - -- Python 3.8 or higher -- Cortex Linux 0.2.0 or compatible version -- System package managers: apt, pip3, npm (depending on what you want to export/import) - -### Dependencies - -Install required Python dependencies: - -```bash -pip3 install pyyaml>=6.0.1 packaging>=23.0 -``` - -### System Requirements - -- Ubuntu 24.04 LTS (or compatible Debian-based distribution) -- Sufficient disk space for configuration files -- Root/sudo access for package installation - -## Usage - -The Configuration Manager provides three main commands: - -1. **export** - Export current system configuration -2. **import** - Import and apply configuration -3. **diff** - Compare current system with configuration file - -### Exporting Configuration - -#### Basic Export - -Export your current system configuration: - -```bash -python3 config_manager.py export --output my-config.yaml -``` - -This creates a YAML file containing: -- Cortex version -- OS version -- Installed packages (apt, pip, npm) -- User preferences -- Selected environment variables - -#### Export with Hardware Information - -Include hardware profile in the export: - -```bash -python3 config_manager.py export --output dev-machine.yaml --include-hardware -``` - -Hardware information includes: -- CPU model and core count -- GPU details (NVIDIA, AMD, Intel) -- RAM size -- Storage devices -- Network interfaces - -#### Export Packages Only - -Export only package information (no preferences or hardware): - -```bash -python3 config_manager.py export --output packages.yaml --packages-only -``` - -#### Export Without Preferences - -Export everything except user preferences: - -```bash -python3 config_manager.py export --output config.yaml --no-preferences -``` - -### Importing Configuration - -#### Preview Changes (Dry-Run) - -Preview what would change without applying anything: - -```bash -python3 config_manager.py import dev-machine.yaml --dry-run -``` - -Output shows: -- Packages to install -- Packages to upgrade/downgrade -- Preferences that will change -- Warnings about compatibility - -#### Apply Configuration - -Import and apply the configuration: - -```bash -python3 config_manager.py import dev-machine.yaml -``` - -This will: -1. Validate compatibility -2. Install missing packages -3. Upgrade outdated packages -4. Update user preferences - -#### Force Import - -Skip compatibility checks (use with caution): - -```bash -python3 config_manager.py import dev-machine.yaml --force -``` - -#### Selective Import - -Import only packages: - -```bash -python3 config_manager.py import dev-machine.yaml --packages-only -``` - -Import only preferences: - -```bash -python3 config_manager.py import dev-machine.yaml --preferences-only -``` - -### Comparing Configurations - -Show differences between current system and configuration file: - -```bash -python3 config_manager.py diff production-config.yaml -``` - -Output includes: -- Number of packages to install -- Number of packages to upgrade/downgrade -- Packages already installed -- Changed preferences -- Compatibility warnings - -## Configuration File Format - -Configuration files are in YAML format with the following structure: - -```yaml -cortex_version: 0.2.0 -exported_at: '2025-11-14T14:23:15.123456' -os: ubuntu-24.04 - -hardware: # Optional - cpu: - model: AMD Ryzen 9 5950X - cores: 16 - architecture: x86_64 - gpu: - - vendor: NVIDIA - model: RTX 4090 - vram: 24576 - cuda: '12.3' - ram: 65536 - storage: - - type: nvme - size: 2097152 - device: nvme0n1 - network: - interfaces: - - name: eth0 - speed_mbps: 1000 - max_speed_mbps: 1000 - -packages: - - name: docker - version: 24.0.7-1 - source: apt - - name: numpy - version: 1.24.0 - source: pip - - name: typescript - version: 5.0.0 - source: npm - -preferences: - confirmations: minimal - verbosity: normal - -environment_variables: - LANG: en_US.UTF-8 - SHELL: /bin/bash -``` - -### Field Descriptions - -- **cortex_version**: Version of Cortex Linux that created this config -- **exported_at**: ISO timestamp of export -- **os**: Operating system identifier (e.g., ubuntu-24.04) -- **hardware**: Optional hardware profile from HardwareProfiler -- **packages**: List of installed packages with name, version, and source -- **preferences**: User preferences for Cortex behavior -- **environment_variables**: Selected environment variables (exported for reference only; not automatically restored during import) - -### Package Sources - -Supported package sources: - -- **apt**: System packages via APT/dpkg -- **pip**: Python packages via pip/pip3 -- **npm**: Node.js global packages via npm - -## Integration with SandboxExecutor - -For enhanced security, ConfigManager can integrate with SandboxExecutor to safely install packages: - -```python -from config_manager import ConfigManager -from sandbox_executor import SandboxExecutor - -# Create instances -executor = SandboxExecutor() -manager = ConfigManager(sandbox_executor=executor) - -# All package installations will go through sandbox -manager.import_configuration('config.yaml') -``` - -Benefits: -- Commands are validated before execution -- Resource limits prevent runaway installations -- Audit logging of all operations -- Rollback capability on failures - -## Best Practices - -### Version Control Your Configs - -Store configuration files in Git: - -```bash -git add environments/ -git commit -m "Add production environment config" -git push -``` - -### Use Meaningful Filenames - -Name files descriptively: - -```text -dev-machine-john.yaml -production-web-server.yaml -ml-training-gpu-rig.yaml -team-baseline-2024-11.yaml -``` - -### Always Test with Dry-Run First - -Before applying any configuration: - -```bash -# 1. Check differences -python3 config_manager.py diff config.yaml - -# 2. Dry-run to see exactly what will happen -python3 config_manager.py import config.yaml --dry-run - -# 3. Apply if everything looks good -python3 config_manager.py import config.yaml -``` - -### Regular Backups - -Export your configuration regularly: - -```bash -# Daily backup script -python3 config_manager.py export \ - --output "backups/config-$(date +%Y-%m-%d).yaml" \ - --include-hardware -``` - -### Team Onboarding Workflow - -1. **Team Lead**: Export reference configuration - ```bash - python3 config_manager.py export --output team-baseline.yaml --include-hardware - ``` - -2. **Share**: Commit to repository or share via secure channel - -3. **New Member**: Preview then import - ```bash - python3 config_manager.py import team-baseline.yaml --dry-run - python3 config_manager.py import team-baseline.yaml - ``` - -### Environment-Specific Configs - -Maintain separate configs for different environments: - -```text -configs/ -โ”œโ”€โ”€ development.yaml -โ”œโ”€โ”€ staging.yaml -โ””โ”€โ”€ production.yaml -``` - -### Selective Operations - -Use selective import for fine-grained control: - -```bash -# Update only packages, keep local preferences -python3 config_manager.py import prod.yaml --packages-only - -# Update only preferences, keep packages -python3 config_manager.py import team-prefs.yaml --preferences-only -``` - -## Troubleshooting - -### Compatibility Errors - -**Problem**: "Incompatible configuration: Incompatible major version" - -**Solution**: Configuration was created with a different major version of Cortex. Use `--force` to bypass (risky) or update Cortex version. - -### OS Mismatch Warnings - -**Problem**: "Warning: OS mismatch (config=ubuntu-24.04, current=ubuntu-22.04)" - -**Solution**: Configuration may not work perfectly on different OS versions. Proceed with caution or update your OS. - -### Package Installation Failures - -**Problem**: Some packages fail to install - -**Solution**: -1. Check network connectivity -2. Update package indexes: `sudo apt-get update` -3. Check for conflicting packages -4. Review failed packages in output and install manually if needed - -### Permission Errors - -**Problem**: "Permission denied" when installing packages - -**Solution**: Run with appropriate privileges: -```bash -# Use sudo for system package installation -sudo python3 config_manager.py import config.yaml -``` - -### Missing Package Managers - -**Problem**: npm or pip packages fail because manager not installed - -**Solution**: Install missing package managers first: -```bash -sudo apt-get install npm python3-pip -``` - -### Large Package Lists - -**Problem**: Import takes very long with many packages - -**Solution**: -1. Use `--packages-only` to skip other operations -2. Consider splitting into smaller configs -3. Increase timeout if using SandboxExecutor - -### YAML Syntax Errors - -**Problem**: "Failed to load configuration file: YAML error" - -**Solution**: Validate YAML syntax: -```bash -python3 -c "import yaml; yaml.safe_load(open('config.yaml'))" -``` - -## Advanced Usage - -### Programmatic API - -Use ConfigManager in Python scripts: - -```python -from config_manager import ConfigManager - -manager = ConfigManager() - -# Export -manager.export_configuration( - output_path='config.yaml', - include_hardware=True, - package_sources=['apt', 'pip'] -) - -# Import with dry-run -result = manager.import_configuration( - config_path='config.yaml', - dry_run=True -) - -# Check diff - load the config file first -import yaml -with open('config.yaml', 'r') as f: - config = yaml.safe_load(f) -diff = manager.diff_configuration(config) -print(f"To install: {len(diff['packages_to_install'])}") -``` - -### Custom Package Sources - -Extend detection for additional package managers: - -```python -class CustomConfigManager(ConfigManager): - def detect_cargo_packages(self): - # Implement Rust cargo package detection - pass - - def detect_installed_packages(self, sources=None): - packages = super().detect_installed_packages(sources) - if 'cargo' in (sources or []): - packages.extend(self.detect_cargo_packages()) - return packages -``` - -### Batch Operations - -Process multiple configurations: - -```bash -# Export all team members -for user in team_members; do - python3 config_manager.py export \ - --output "team/$user-config.yaml" -done - -# Compare all configs -for config in team/*.yaml; do - echo "=== $config ===" - python3 config_manager.py diff "$config" -done -``` - -## Security Considerations - -### Sensitive Data - -Configuration files may contain sensitive information: - -- Package versions that reveal security vulnerabilities -- Environment variables with API keys or tokens -- Hardware details useful for targeted attacks - -**Recommendations**: -- Review exported configs before sharing -- Sanitize environment variables -- Use `.gitignore` for sensitive configs -- Encrypt configs containing secrets - -### Sandboxed Installation - -Always use SandboxExecutor for production imports: - -```python -from sandbox_executor import SandboxExecutor -from config_manager import ConfigManager - -executor = SandboxExecutor( - max_memory_mb=2048, - timeout_seconds=600, - enable_rollback=True -) -manager = ConfigManager(sandbox_executor=executor) -``` - -### Validation - -Configuration validation checks: -- Version compatibility -- OS compatibility -- Package source availability - -Use `--dry-run` extensively before applying configurations. - -## API Reference - -### ConfigManager Class - -#### Constructor - -```python -ConfigManager(sandbox_executor=None) -``` - -Parameters: -- `sandbox_executor` (optional): SandboxExecutor instance for safe command execution - -#### Methods - -##### export_configuration() - -```python -export_configuration( - output_path: str, - include_hardware: bool = True, - include_preferences: bool = True, - package_sources: List[str] = None -) -> str -``` - -Export system configuration to YAML file. - -##### import_configuration() - -```python -import_configuration( - config_path: str, - dry_run: bool = False, - selective: Optional[List[str]] = None, - force: bool = False -) -> Dict[str, Any] -``` - -Import configuration from YAML file. - -##### diff_configuration() - -```python -diff_configuration(config: Dict[str, Any]) -> Dict[str, Any] -``` - -Compare current system state with configuration. - -##### validate_compatibility() - -```python -validate_compatibility(config: Dict[str, Any]) -> Tuple[bool, Optional[str]] -``` - -Validate if configuration can be imported. - -##### detect_installed_packages() - -```python -detect_installed_packages(sources: List[str] = None) -> List[Dict[str, Any]] -``` - -Detect all installed packages from specified sources. - -## Contributing - -Contributions are welcome! Areas for improvement: - -- Additional package manager support (cargo, gem, etc.) -- Configuration validation schemas -- Migration tools between versions -- GUI for configuration management -- Cloud storage integration - -## License - -Cortex Linux Configuration Management is part of the Cortex Linux project. - -## Support - -- **Issues**: [https://github.com/cortexlinux/cortex/issues](https://github.com/cortexlinux/cortex/issues) -- **Discord**: [https://discord.gg/uCqHvxjU83](https://discord.gg/uCqHvxjU83) -- **Email**: [mike@cortexlinux.com](mailto:mike@cortexlinux.com) - ---- - -**Version**: 0.2.0 -**Last Updated**: November 2024 diff --git a/docs/FIRST_RUN_WIZARD.md b/docs/FIRST_RUN_WIZARD.md deleted file mode 100644 index 0290ce7..0000000 --- a/docs/FIRST_RUN_WIZARD.md +++ /dev/null @@ -1,431 +0,0 @@ -# First-Run Wizard Module - -**Issue:** #256 -**Status:** Ready for Review -**Bounty:** As specified in issue (+ bonus after funding) - -## Overview - -A seamless onboarding experience for new Cortex users. The wizard guides users through API setup, hardware detection, preference configuration, and shell integration in a friendly, step-by-step process. - -## Features - -### Interactive Setup Flow - -1. **Welcome** - Introduction to Cortex -2. **API Configuration** - Set up Claude, OpenAI, or Ollama -3. **Hardware Detection** - Detect GPU, RAM, storage -4. **Preferences** - Configure behavior settings -5. **Shell Integration** - Tab completion and shortcuts -6. **Test Command** - Verify everything works - -### Smart Defaults - -- Auto-detects existing API keys -- Sensible defaults for all preferences -- Non-interactive mode for automation -- Resume capability if interrupted - -### Multiple API Providers - -| Provider | Setup | Notes | -|----------|-------|-------| -| Claude (Anthropic) | API key | Recommended | -| OpenAI | API key | Alternative | -| Ollama | Local install | Free, offline | -| None | Skip | Basic apt only | - -## Installation - -The wizard runs automatically on first use: - -```bash -cortex install anything -# โ†’ First-run wizard starts automatically -``` - -Or run manually: - -```bash -cortex setup -# or -python -m cortex.first_run_wizard -``` - -## Usage - -### Automatic First Run - -```python -from cortex.first_run_wizard import needs_first_run, run_wizard - -# Check if setup needed -if needs_first_run(): - success = run_wizard() - if not success: - print("Setup cancelled or failed") -``` - -### Non-Interactive Mode - -```python -from cortex.first_run_wizard import run_wizard - -# For automation/CI -success = run_wizard(interactive=False) -``` - -### Access Configuration - -```python -from cortex.first_run_wizard import get_config - -config = get_config() -print(f"API Provider: {config.get('api_provider')}") -print(f"Preferences: {config.get('preferences')}") -``` - -### Custom Wizard Instance - -```python -from cortex.first_run_wizard import FirstRunWizard -from pathlib import Path - -wizard = FirstRunWizard(interactive=True) - -# Customize paths (optional) -wizard.CONFIG_DIR = Path("/custom/config") -wizard.CONFIG_FILE = wizard.CONFIG_DIR / "config.json" - -# Run wizard -wizard.run() -``` - -## Wizard Steps - -### Step 1: Welcome - -Introduces Cortex and explains what it does: -- Natural language package management -- AI-powered command understanding -- Safe execution with rollback - -### Step 2: API Configuration - -Sets up the AI backend: - -**Claude (Recommended):** -``` -1. Go to https://console.anthropic.com -2. Create an API key -3. Enter key in wizard -``` - -**OpenAI:** -``` -1. Go to https://platform.openai.com -2. Create an API key -3. Enter key in wizard -``` - -**Ollama (Local):** -``` -1. Install Ollama -2. Pull llama3.2 model -3. No API key needed -``` - -### Step 3: Hardware Detection - -Automatically detects: -- CPU model and cores -- RAM amount -- GPU vendor and model -- Available disk space - -Special handling for: -- NVIDIA GPUs (CUDA setup option) -- AMD GPUs (ROCm info) -- Intel GPUs (oneAPI info) - -### Step 4: Preferences - -Configures: - -| Setting | Options | Default | -|---------|---------|---------| -| Auto-confirm | Yes/No | No | -| Verbosity | Quiet/Normal/Verbose | Normal | -| Caching | Enable/Disable | Enabled | - -### Step 5: Shell Integration - -Sets up: -- Tab completion for `cortex` command -- Supported shells: bash, zsh, fish -- Optional keyboard shortcuts - -### Step 6: Test Command - -Runs a simple test to verify setup: -```bash -cortex search text editors -``` - -## API Reference - -### FirstRunWizard - -Main wizard class. - -**Constructor:** -```python -FirstRunWizard(interactive: bool = True) -``` - -**Methods:** - -| Method | Description | -|--------|-------------| -| `needs_setup()` | Check if first-run is needed | -| `run()` | Run the complete wizard | -| `load_state()` | Load saved wizard state | -| `save_state()` | Save current wizard state | -| `save_config()` | Save configuration | -| `mark_setup_complete()` | Mark setup as finished | - -### WizardState - -Tracks wizard progress. - -```python -@dataclass -class WizardState: - current_step: WizardStep - completed_steps: List[WizardStep] - skipped_steps: List[WizardStep] - collected_data: Dict[str, Any] - started_at: datetime - completed_at: Optional[datetime] -``` - -### WizardStep - -Enum of wizard steps: - -```python -class WizardStep(Enum): - WELCOME = "welcome" - API_SETUP = "api_setup" - HARDWARE_DETECTION = "hardware_detection" - PREFERENCES = "preferences" - SHELL_INTEGRATION = "shell_integration" - TEST_COMMAND = "test_command" - COMPLETE = "complete" -``` - -### StepResult - -Result of each step: - -```python -@dataclass -class StepResult: - success: bool - message: str = "" - data: Dict[str, Any] = field(default_factory=dict) - next_step: Optional[WizardStep] = None - skip_to: Optional[WizardStep] = None -``` - -## Configuration Files - -### Location - -All files stored in `~/.cortex/`: - -| File | Purpose | -|------|---------| -| `config.json` | User configuration | -| `wizard_state.json` | Wizard progress | -| `.setup_complete` | Setup completion marker | -| `completion.bash` | Shell completion | - -### Config Format - -```json -{ - "api_provider": "anthropic", - "api_key_configured": true, - "hardware": { - "cpu": "Intel Core i7-9700K", - "ram_gb": 32, - "gpu": "NVIDIA GeForce RTX 4090", - "gpu_vendor": "nvidia", - "disk_gb": 500 - }, - "preferences": { - "auto_confirm": false, - "verbosity": "normal", - "enable_cache": true - } -} -``` - -## CLI Integration - -### In Main CLI - -```python -# In cortex/cli.py -from cortex.first_run_wizard import needs_first_run, run_wizard - -@cli.callback() -def main(): - if needs_first_run(): - if not run_wizard(): - raise SystemExit("Setup required") - -@cli.command() -def setup(force: bool = False): - """Run setup wizard.""" - if force or needs_first_run(): - run_wizard() - else: - print("Already set up. Use --force to run again.") -``` - -### As Standalone - -```bash -# Run wizard directly -python -m cortex.first_run_wizard - -# Force re-run -python -m cortex.first_run_wizard --force -``` - -## Shell Completion - -### Bash - -Added to `~/.bashrc`: -```bash -# Cortex completion -[ -f ~/.cortex/completion.bash ] && source ~/.cortex/completion.bash -``` - -### Zsh - -Added to `~/.zshrc`: -```bash -# Cortex completion -[ -f ~/.cortex/completion.zsh ] && source ~/.cortex/completion.zsh -``` - -### Fish - -Added to `~/.config/fish/config.fish`: -```fish -# Cortex completion -source ~/.cortex/completion.fish -``` - -## Testing - -```bash -# Run all tests -pytest tests/test_first_run_wizard.py -v - -# Run with coverage -pytest tests/test_first_run_wizard.py --cov=cortex.first_run_wizard - -# Test specific functionality -pytest tests/test_first_run_wizard.py -k "api_setup" -v -``` - -## Architecture - -``` -โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” -โ”‚ FirstRunWizard โ”‚ -โ”‚ โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” โ”‚ -โ”‚ โ”‚ WizardState โ”‚ โ”‚ StepResult โ”‚ โ”‚ Step Handlers โ”‚ โ”‚ -โ”‚ โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ โ”‚ -โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ฌโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ - โ”‚ - โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ผโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” - โ–ผ โ–ผ โ–ผ -โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” -โ”‚ Config โ”‚ โ”‚ State โ”‚ โ”‚ Shell โ”‚ -โ”‚ File โ”‚ โ”‚ File โ”‚ โ”‚ Config โ”‚ -โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ -``` - -## Troubleshooting - -### Wizard Won't Start - -```python -from cortex.first_run_wizard import FirstRunWizard - -wizard = FirstRunWizard() -print(f"Setup complete file: {wizard.SETUP_COMPLETE_FILE}") -print(f"Exists: {wizard.SETUP_COMPLETE_FILE.exists()}") - -# Remove to re-run -wizard.SETUP_COMPLETE_FILE.unlink() -``` - -### API Key Not Saved - -```bash -# Check if key is in environment -echo $ANTHROPIC_API_KEY - -# Check shell config -grep ANTHROPIC ~/.bashrc ~/.zshrc - -# Restart shell or source config -source ~/.bashrc -``` - -### Shell Completion Not Working - -```bash -# Check if completion file exists -ls -la ~/.cortex/completion.* - -# Source manually -source ~/.cortex/completion.bash - -# Check for errors -bash -x ~/.cortex/completion.bash -``` - -### Resume Interrupted Wizard - -```python -from cortex.first_run_wizard import FirstRunWizard - -wizard = FirstRunWizard() -wizard.load_state() - -print(f"Current step: {wizard.state.current_step}") -print(f"Completed: {wizard.state.completed_steps}") - -# Continue from where left off -wizard.run() -``` - -## Contributing - -1. Add new steps to `WizardStep` enum -2. Create step handler method `_step_` -3. Add to steps list in `run()` -4. Add tests for new functionality -5. Update documentation - ---- - -**Closes:** #256 diff --git a/docs/GRACEFUL_DEGRADATION.md b/docs/GRACEFUL_DEGRADATION.md deleted file mode 100644 index ca81988..0000000 --- a/docs/GRACEFUL_DEGRADATION.md +++ /dev/null @@ -1,323 +0,0 @@ -# Graceful Degradation Module - -**Issue:** #257 -**Status:** Ready for Review -**Bounty:** As specified in issue (+ bonus after funding) - -## Overview - -The Graceful Degradation module ensures Cortex continues to function even when the LLM API is unavailable. It provides multiple fallback strategies to maintain core functionality: - -1. **Response Caching** - Uses previously cached LLM responses -2. **Pattern Matching** - Local regex-based command generation -3. **Manual Mode** - Direct apt command guidance - -## Features - -### Multi-Level Fallback System - -``` -API Available โ†’ Full AI Mode (100% confidence) - โ†“ (API fails) -Cache Hit โ†’ Cached Response (90% confidence) - โ†“ (no cache) -Similar Cache โ†’ Fuzzy Match (70% confidence) - โ†“ (no similar) -Pattern Match โ†’ Local Regex (70-80% confidence) - โ†“ (no pattern) -Manual Mode โ†’ User Guidance (0% confidence) -``` - -### Response Caching - -- SQLite-based persistent cache -- Automatic caching of successful LLM responses -- Similar query matching using keyword overlap -- Cache statistics and cleanup utilities - -### Pattern Matching - -Pre-built patterns for common operations: - -| Category | Examples | -|----------|----------| -| Web Dev | docker, nginx, nodejs, python, postgresql | -| Dev Tools | git, vim, curl, wget, htop, tmux | -| Languages | rust, golang, java | -| ML/AI | cuda, tensorflow, pytorch | -| Operations | update, clean, search, remove | - -### Health Monitoring - -- Automatic API health checks -- Configurable check intervals -- Failure counting with automatic mode switching -- Recovery detection when API returns - -## Installation - -```python -from cortex.graceful_degradation import GracefulDegradation, process_with_fallback - -# Quick usage with convenience function -result = process_with_fallback("install docker") -print(result["command"]) # sudo apt install docker.io - -# Or with full control -manager = GracefulDegradation() -result = manager.process_query("install nginx", llm_fn=your_llm_function) -``` - -## Usage Examples - -### Basic Usage - -```python -from cortex.graceful_degradation import GracefulDegradation - -manager = GracefulDegradation() - -# Process a query with automatic fallback -result = manager.process_query("install docker") - -print(f"Source: {result['source']}") -print(f"Confidence: {result['confidence']:.0%}") -print(f"Command: {result['command']}") -``` - -### With LLM Integration - -```python -def call_claude(query: str) -> str: - # Your Claude API call here - return response - -manager = GracefulDegradation() -result = manager.process_query("install docker", llm_fn=call_claude) - -# If Claude is available: source="llm", confidence=100% -# If Claude fails: automatically falls back to cache/patterns -``` - -### Checking System Status - -```python -status = manager.get_status() -print(f"Mode: {status['mode']}") -print(f"API Status: {status['api_status']}") -print(f"Cache Entries: {status['cache_entries']}") -print(f"Cache Hits: {status['cache_hits']}") -``` - -### Manual Health Check - -```python -# With default check (API key presence) -result = manager.check_api_health() - -# With custom health check -def ping_claude(): - try: - # Lightweight API ping - return True - except: - return False - -result = manager.check_api_health(api_check_fn=ping_claude) -print(f"API Status: {result.status.value}") -``` - -## API Reference - -### GracefulDegradation - -Main class for handling graceful degradation. - -**Constructor Parameters:** -- `cache` (ResponseCache, optional): Custom cache instance -- `health_check_interval` (int): Seconds between health checks (default: 60) -- `api_timeout` (float): API timeout in seconds (default: 10.0) - -**Methods:** - -| Method | Description | -|--------|-------------| -| `process_query(query, llm_fn)` | Process query with automatic fallback | -| `check_api_health(api_check_fn)` | Check if API is available | -| `get_status()` | Get current degradation status | -| `force_mode(mode)` | Force a specific operating mode | -| `reset()` | Reset to default state | - -### ResponseCache - -SQLite-based cache for LLM responses. - -**Methods:** - -| Method | Description | -|--------|-------------| -| `get(query)` | Get cached response for exact query | -| `put(query, response)` | Store a response | -| `get_similar(query, limit)` | Get similar cached responses | -| `get_stats()` | Get cache statistics | -| `clear_old_entries(days)` | Remove old entries | - -### PatternMatcher - -Local pattern matching for common operations. - -**Methods:** - -| Method | Description | -|--------|-------------| -| `match(query)` | Match query against known patterns | - -## Operating Modes - -| Mode | Description | When Used | -|------|-------------|-----------| -| `FULL_AI` | Normal operation with LLM | API available | -| `CACHED_ONLY` | Use cached responses only | After 1-2 API failures | -| `PATTERN_MATCHING` | Local regex matching | After 3+ failures, no cache | -| `MANUAL_MODE` | User guidance only | Unknown queries | - -## Response Format - -```python -{ - "query": "original query", - "response": "human-readable response", - "command": "apt command if applicable", - "source": "llm|cache|cache_similar|pattern_matching|manual_mode", - "confidence": 0.0-1.0, - "mode": "current operating mode", - "cached": True/False -} -``` - -## Configuration - -### Environment Variables - -The module checks for API keys to determine initial health: - -- `ANTHROPIC_API_KEY` - Claude API key -- `OPENAI_API_KEY` - OpenAI API key - -### Cache Location - -Default: `~/.cortex/response_cache.db` - -Override by passing custom `ResponseCache`: - -```python -from pathlib import Path -cache = ResponseCache(Path("/custom/path/cache.db")) -manager = GracefulDegradation(cache=cache) -``` - -## Testing - -```bash -# Run all tests -pytest tests/test_graceful_degradation.py -v - -# Run with coverage -pytest tests/test_graceful_degradation.py --cov=cortex.graceful_degradation - -# Run specific test class -pytest tests/test_graceful_degradation.py::TestGracefulDegradation -v -``` - -## Integration with Cortex - -This module integrates with the main Cortex CLI: - -```python -# In cortex/cli.py -from cortex.graceful_degradation import get_degradation_manager - -manager = get_degradation_manager() - -def handle_user_query(query: str): - result = manager.process_query(query, llm_fn=call_claude) - - if result["confidence"] < 0.5: - print("โš ๏ธ Running in offline mode - results may be limited") - - return result["command"] -``` - -## Architecture - -``` -โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” -โ”‚ User Query โ”‚ -โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ฌโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ - โ–ผ -โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” -โ”‚ GracefulDegradation โ”‚ -โ”‚ โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” โ”‚ -โ”‚ โ”‚ Health Check โ”‚โ†’ โ”‚ Mode Selector โ”‚โ†’ โ”‚ Processor โ”‚ โ”‚ -โ”‚ โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ โ”‚ -โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ฌโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ - โ–ผ - โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ผโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” - โ–ผ โ–ผ โ–ผ -โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” -โ”‚ LLM API โ”‚ โ”‚ Cache โ”‚ โ”‚ Pattern โ”‚ -โ”‚ โ”‚ โ”‚ (SQLite) โ”‚ โ”‚ Matcher โ”‚ -โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ -``` - -## Troubleshooting - -### Cache Not Working - -```python -# Check cache status -stats = manager.cache.get_stats() -print(f"Entries: {stats['total_entries']}") -print(f"DB Size: {stats['db_size_kb']:.1f} KB") - -# Clear corrupted cache -import os -os.remove(Path.home() / ".cortex" / "response_cache.db") -``` - -### Stuck in Offline Mode - -```python -# Force reset -manager.reset() - -# Or manually check API -result = manager.check_api_health() -print(f"Status: {result.status.value}") -print(f"Error: {result.error_message}") -``` - -### Pattern Not Matching - -```python -# Test pattern directly -matcher = PatternMatcher() -result = matcher.match("your query") -print(result) # None if no match - -# Check available patterns -print(matcher.INSTALL_PATTERNS.keys()) -``` - -## Contributing - -To add new patterns: - -1. Edit `PatternMatcher.INSTALL_PATTERNS` or `OPERATION_PATTERNS` -2. Use regex with `(?:...)` for non-capturing groups -3. Add tests in `tests/test_graceful_degradation.py` -4. Submit PR referencing this issue - ---- - -**Closes:** #257 diff --git a/docs/HARDWARE_DETECTION.md b/docs/HARDWARE_DETECTION.md deleted file mode 100644 index 308abd6..0000000 --- a/docs/HARDWARE_DETECTION.md +++ /dev/null @@ -1,257 +0,0 @@ -# Hardware Detection Module - -**Issue:** #253 -**Status:** Ready for Review -**Bounty:** As specified in issue (+ bonus after funding) - -## Overview - -Instant, comprehensive hardware detection on first run. Automatically identifies CPU, GPU, RAM, storage, and provides optimization recommendations for package installation. - -## Features - -| Feature | Description | -|---------|-------------| -| Instant Detection | Sub-second hardware scan | -| GPU Support | NVIDIA, AMD, Intel detection with driver info | -| CUDA Detection | Version and compute capability | -| Smart Recommendations | Hardware-aware package suggestions | -| JSON Export | Machine-readable output | - -## Quick Start - -```python -from cortex.hardware_detection import detect_hardware, get_recommendations - -# Detect all hardware -info = detect_hardware() -print(f"CPU: {info.cpu.model}") -print(f"GPU: {info.gpu.model if info.gpu.detected else 'None'}") -print(f"RAM: {info.memory.total_gb:.1f} GB") - -# Get recommendations -recs = get_recommendations(info) -for rec in recs: - print(f"โ€ข {rec}") -``` - -## CLI Usage - -```bash -# Show hardware info -cortex hardware - -# JSON output -cortex hardware --json - -# Check GPU only -cortex hardware --gpu - -# Get recommendations -cortex hardware --recommend -``` - -## API Reference - -### detect_hardware() - -Detects all system hardware and returns `HardwareInfo` object. - -```python -info = detect_hardware() - -# CPU info -info.cpu.model # "AMD Ryzen 9 5900X" -info.cpu.cores # 12 -info.cpu.threads # 24 -info.cpu.architecture # "x86_64" - -# GPU info -info.gpu.detected # True -info.gpu.model # "NVIDIA GeForce RTX 4090" -info.gpu.vendor # "nvidia" -info.gpu.driver # "535.154.05" -info.gpu.cuda_version # "12.3" -info.gpu.vram_gb # 24.0 - -# Memory info -info.memory.total_gb # 64.0 -info.memory.available_gb # 48.5 - -# Storage info -info.storage.devices # [StorageDevice(...), ...] -info.storage.total_gb # 2000.0 -``` - -### get_recommendations() - -Returns hardware-aware package recommendations. - -```python -recs = get_recommendations(info) -# Returns: [ -# "nvidia-driver-535 (GPU detected)", -# "cuda-toolkit-12-3 (CUDA available)", -# "python3-venv (development)", -# ] -``` - -## Detection Methods - -### CPU Detection - -```python -# Sources: -# 1. /proc/cpuinfo -# 2. lscpu command -# 3. platform module fallback -``` - -### GPU Detection - -```python -# NVIDIA: nvidia-smi -# AMD: rocm-smi, lspci -# Intel: lspci -``` - -### Memory Detection - -```python -# Sources: -# 1. /proc/meminfo -# 2. free command fallback -``` - -## Data Classes - -### HardwareInfo - -```python -@dataclass -class HardwareInfo: - cpu: CPUInfo - gpu: GPUInfo - memory: MemoryInfo - storage: StorageInfo - network: NetworkInfo - detected_at: datetime -``` - -### CPUInfo - -```python -@dataclass -class CPUInfo: - model: str - vendor: str - cores: int - threads: int - architecture: str - frequency_mhz: float - cache_mb: float - flags: List[str] # CPU features -``` - -### GPUInfo - -```python -@dataclass -class GPUInfo: - detected: bool - model: str - vendor: str # nvidia, amd, intel - driver: str - vram_gb: float - cuda_version: str - compute_capability: str -``` - -## Integration - -### With Package Manager - -```python -from cortex.hardware_detection import detect_hardware -from cortex.package_manager import install - -info = detect_hardware() - -if info.gpu.detected and info.gpu.vendor == "nvidia": - # Install with GPU optimizations - install("tensorflow", gpu=True) -else: - # CPU-only installation - install("tensorflow") -``` - -### With First-Run Wizard - -```python -# Automatically called during setup -from cortex.first_run_wizard import FirstRunWizard - -wizard = FirstRunWizard() -wizard._detect_system() # Uses hardware_detection internally -``` - -## Output Formats - -### Human-Readable - -``` -System Hardware -=============== -CPU: AMD Ryzen 9 5900X 12-Core @ 3.70 GHz - Architecture: x86_64, Threads: 24 - -GPU: NVIDIA GeForce RTX 4090 - Driver: 535.154.05, CUDA: 12.3 - VRAM: 24 GB - -Memory: 64.0 GB total, 48.5 GB available - -Storage: - /dev/nvme0n1: 1000 GB (NVMe SSD) - /dev/sda: 2000 GB (HDD) -``` - -### JSON - -```json -{ - "cpu": { - "model": "AMD Ryzen 9 5900X", - "cores": 12, - "threads": 24 - }, - "gpu": { - "detected": true, - "model": "NVIDIA GeForce RTX 4090", - "cuda_version": "12.3" - }, - "memory": { - "total_gb": 64.0, - "available_gb": 48.5 - } -} -``` - -## Performance - -| Operation | Time | -|-----------|------| -| CPU detection | <50ms | -| GPU detection | <200ms | -| Full scan | <500ms | - -## Testing - -```bash -pytest tests/test_hardware_detection.py -v -pytest tests/test_hardware_detection.py --cov=cortex.hardware_detection -``` - ---- - -**Closes:** #253 diff --git a/docs/IMPLEMENTATION_SUMMARY.md b/docs/IMPLEMENTATION_SUMMARY.md deleted file mode 100644 index 0c6a51e..0000000 --- a/docs/IMPLEMENTATION_SUMMARY.md +++ /dev/null @@ -1,288 +0,0 @@ -# Implementation Summary - Issue #27: Progress Notifications & Status Updates - -## ๐Ÿ“‹ Overview - -Implemented comprehensive progress tracking system for Cortex Linux with real-time progress bars, time estimation, multi-stage tracking, desktop notifications, and cancellation support. - -**Bounty**: $50 upon merge -**Issue**: https://github.com/cortexlinux/cortex/issues/27 -**Developer**: @AlexanderLuzDH - -## โœ… Completed Features - -### 1. Progress Bar Implementation -- โœ… Beautiful Unicode progress bars using `rich` library -- โœ… Real-time visual feedback with percentage completion -- โœ… Graceful fallback to plain text when `rich` unavailable -- โœ… Color-coded status indicators (green for complete, cyan for in-progress, red for failed) - -### 2. Time Estimation Algorithm -- โœ… Smart ETA calculation based on completed stages -- โœ… Adaptive estimation that improves as operation progresses -- โœ… Multiple time formats (seconds, minutes, hours) -- โœ… Byte-based progress tracking for downloads - -### 3. Multi-Stage Progress Tracking -- โœ… Track unlimited number of stages -- โœ… Individual progress per stage (0-100%) -- โœ… Overall progress calculation across all stages -- โœ… Stage status tracking (pending/in-progress/completed/failed/cancelled) -- โœ… Per-stage timing and elapsed time display - -### 4. Background Operation Support -- โœ… Fully async implementation using `asyncio` -- โœ… Non-blocking progress updates -- โœ… Support for concurrent operations -- โœ… `run_with_progress()` helper for easy async execution - -### 5. Desktop Notifications -- โœ… Cross-platform notifications using `plyer` -- โœ… Configurable notification triggers (completion/error) -- โœ… Graceful degradation when notifications unavailable -- โœ… Custom notification messages and timeouts - -### 6. Cancellation Support -- โœ… Graceful Ctrl+C handling via signal handlers -- โœ… Cleanup callback support for resource cleanup -- โœ… Proper stage status updates on cancellation -- โœ… User-friendly cancellation messages - -### 7. Testing -- โœ… **35 comprehensive unit tests** covering all features -- โœ… 100% test pass rate -- โœ… Tests for edge cases and error handling -- โœ… Async operation testing -- โœ… Mock-based tests for external dependencies - -### 8. Documentation -- โœ… Complete API documentation -- โœ… Usage examples and code snippets -- โœ… Integration guide -- โœ… Troubleshooting section -- โœ… Configuration options - -## ๐Ÿ“ Files Added - -``` -src/ -โ”œโ”€โ”€ progress_tracker.py # Core implementation (485 lines) -โ””โ”€โ”€ test_progress_tracker.py # Comprehensive tests (350 lines) - -docs/ -โ””โ”€โ”€ PROGRESS_TRACKER.md # Full documentation - -examples/ -โ”œโ”€โ”€ progress_demo.py # Integration demo with SandboxExecutor -โ””โ”€โ”€ standalone_demo.py # Cross-platform standalone demo - -requirements.txt # Updated with new dependencies -IMPLEMENTATION_SUMMARY.md # This file -``` - -## ๐ŸŽฏ Acceptance Criteria Status - -All requirements from the issue have been met: - -- โœ… **Progress bar implementation** - Using rich library with Unicode bars -- โœ… **Time estimation based on package size** - Smart ETA with byte-based tracking -- โœ… **Multi-stage tracking** - Unlimited stages with individual progress -- โœ… **Background mode support** - Full async/await implementation -- โœ… **Desktop notifications (optional)** - Cross-platform via plyer -- โœ… **Cancellation handling** - Graceful Ctrl+C with cleanup -- โœ… **Tests included** - 35 comprehensive tests, all passing -- โœ… **Documentation** - Complete API docs, examples, and integration guide - -## ๐Ÿš€ Example Output - -``` -Installing PostgreSQL... -โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ” 45% -โฑ๏ธ Estimated time remaining: 2m 15s - - โœ“ Update package lists (5s) - โœ“ Download postgresql-15 (1m 23s) - โ†’ Installing dependencies (current) - Configuring database - Running tests -``` - -## ๐Ÿ”ง Technical Implementation - -### Architecture - -**Class Hierarchy:** -``` -ProgressStage # Individual stage data and status - โ†“ -ProgressTracker # Main tracker with all features - โ†“ -RichProgressTracker # Enhanced version with rich.Live integration -``` - -**Key Design Decisions:** - -1. **Separation of Concerns**: Stage logic separated from display logic -2. **Graceful Degradation**: Works without `rich` or `plyer` installed -3. **Async-First**: Built on asyncio for modern Python patterns -4. **Type Safety**: Full type hints throughout codebase -5. **Testability**: Modular design makes testing easy - -### Dependencies - -**Required:** -- Python 3.8+ - -**Recommended:** -- `rich>=13.0.0` - Beautiful terminal UI -- `plyer>=2.0.0` - Desktop notifications - -**Development:** -- `pytest>=7.0.0` -- `pytest-asyncio>=0.21.0` -- `pytest-cov>=4.0.0` - -## ๐Ÿ“Š Test Results - -``` -============================= test session starts ============================= -platform win32 -- Python 3.11.4, pytest-7.4.3 -collected 35 items - -test_progress_tracker.py::TestProgressStage::test_stage_creation PASSED [ 2%] -test_progress_tracker.py::TestProgressStage::test_stage_elapsed_time PASSED [ 5%] -test_progress_tracker.py::TestProgressStage::test_stage_is_complete PASSED [ 8%] -test_progress_tracker.py::TestProgressStage::test_format_elapsed PASSED [ 11%] -... -test_progress_tracker.py::TestEdgeCases::test_render_without_rich PASSED [100%] - -============================= 35 passed in 2.98s =============================== -``` - -**Test Coverage:** -- ProgressStage class: 100% -- ProgressTracker class: 100% -- RichProgressTracker class: 100% -- Async helpers: 100% -- Edge cases: 100% - -## ๐Ÿ’ก Usage Examples - -### Basic Usage - -```python -from progress_tracker import ProgressTracker, run_with_progress - -async def install_package(tracker): - # Add stages - download_idx = tracker.add_stage("Download package", total_bytes=10_000_000) - install_idx = tracker.add_stage("Install package") - - # Execute stages with progress - tracker.start_stage(download_idx) - # ... download logic ... - tracker.complete_stage(download_idx) - - tracker.start_stage(install_idx) - # ... install logic ... - tracker.complete_stage(install_idx) - -# Run with progress tracking -tracker = ProgressTracker("Installing Package") -await run_with_progress(tracker, install_package) -``` - -### With Cancellation - -```python -def cleanup(): - # Cleanup partial downloads, temp files, etc. - pass - -tracker = ProgressTracker("Installation") -tracker.setup_cancellation_handler(callback=cleanup) - -# User can press Ctrl+C safely -await run_with_progress(tracker, install_package) -``` - -## ๐Ÿ” Code Quality - -- **Type Hints**: Full type annotations throughout -- **Docstrings**: Comprehensive documentation for all public methods -- **Error Handling**: Robust exception handling with graceful failures -- **Platform Support**: Works on Windows, Linux, macOS -- **Performance**: Minimal overhead (<0.1% CPU, ~1KB per stage) - -## ๐Ÿงช Testing - -Run tests: -```bash -cd src -pytest test_progress_tracker.py -v -pytest test_progress_tracker.py --cov=progress_tracker --cov-report=html -``` - -Run demo: -```bash -python examples/standalone_demo.py -``` - -## ๐Ÿ“ Integration Notes - -The progress tracker is designed to integrate seamlessly with existing Cortex components: - -1. **SandboxExecutor Integration**: Wrap executor calls with progress tracking -2. **LLM Integration**: Display AI reasoning progress -3. **Package Manager**: Track apt/pip operations -4. **Hardware Profiler**: Show detection progress - -Example integration pattern: -```python -from progress_tracker import ProgressTracker -from sandbox_executor import SandboxExecutor - -async def cortex_install(package: str): - tracker = ProgressTracker(f"Installing {package}") - executor = SandboxExecutor() - - update_idx = tracker.add_stage("Update") - install_idx = tracker.add_stage("Install") - - tracker.start() - - tracker.start_stage(update_idx) - result = executor.execute("apt-get update") - tracker.complete_stage(update_idx) - - tracker.start_stage(install_idx) - result = executor.execute(f"apt-get install -y {package}") - tracker.complete_stage(install_idx) - - tracker.complete(success=result.success) -``` - -## ๐ŸŽ‰ Key Achievements - -1. **All acceptance criteria met** - Every requirement from the issue completed -2. **35 tests, 100% passing** - Comprehensive test coverage -3. **Production-ready code** - Type-safe, well-documented, error-handled -4. **Cross-platform** - Works on Windows, Linux, macOS -5. **Extensible design** - Easy to add new features -6. **Beautiful UX** - Modern terminal UI with rich formatting - -## ๐Ÿš€ Next Steps - -1. Submit pull request to cortexlinux/cortex -2. Address any code review feedback -3. Merge and claim $50 bounty! - -## ๐Ÿ“ž Contact - -**GitHub**: @AlexanderLuzDH -**For questions**: Comment on Issue #27 - ---- - -*Implementation completed in <8 hours total development time* -*Ready for review and merge! ๐ŸŽฏ* - diff --git a/docs/KERNEL_FEATURES.md b/docs/KERNEL_FEATURES.md deleted file mode 100644 index bbad83f..0000000 --- a/docs/KERNEL_FEATURES.md +++ /dev/null @@ -1,48 +0,0 @@ -# Cortex Kernel Features - -User-space implementations of kernel-level AI concepts. These demonstrate kernel-level thinking while running on standard Ubuntu 24.04. - -## Components - -### 1. Model Lifecycle Manager -Systemd-based LLM service management. - -```bash -cortex model register llama-70b --path meta-llama/Llama-2-70b-hf --backend vllm -cortex model start llama-70b -cortex model status -``` - -### 2. KV-Cache Manager -Shared memory cache pools for LLM inference. - -```bash -cortex cache create llama-cache --size 16G -cortex cache status -cortex cache destroy llama-cache -``` - -### 3. Accelerator Limits -cgroups v2 wrapper for AI workloads. - -```bash -cortex limits create inference-job --preset inference --gpus 2 -cortex limits status -``` - -### 4. /dev/llm Virtual Device -FUSE-based file interface to LLMs. - -```bash -cortex-llm-device mount /mnt/llm -echo "Hello" > /mnt/llm/claude/prompt -cat /mnt/llm/claude/response -``` - -## Architecture - -These are Tier 1 features from our kernel enhancement roadmap - user-space implementations that can ship now while we work on upstream kernel contributions. - -## Patents - -The KV-Cache Manager implements concepts from our provisional patent applications for kernel-managed KV-cache memory regions. diff --git a/docs/POST_MVP_AUDIT.md b/docs/POST_MVP_AUDIT.md deleted file mode 100644 index c508e59..0000000 --- a/docs/POST_MVP_AUDIT.md +++ /dev/null @@ -1,769 +0,0 @@ -# Cortex Linux Post-MVP Audit Report - -**Generated:** 2025-11-28 -**Target:** February 2025 Seed Funding ($2-3M) -**Repository:** https://github.com/cortexlinux/cortex - ---- - -## Executive Summary Dashboard - -| Category | Current State | Target State | Priority | -|----------|--------------|--------------|----------| -| **MVP Completion** | 89% (25/28 issues closed) | 100% | ๐Ÿ”ด Critical | -| **Branch Protection** | โŒ None | โœ… Required reviews + CI | ๐Ÿ”ด Critical | -| **Security Scanning** | โŒ Disabled | โœ… All enabled | ๐Ÿ”ด Critical | -| **Open PRs** | 5 with conflicts | 0 conflicts | ๐ŸŸก High | -| **Marketing Site** | โŒ None | โœ… Investor-ready | ๐Ÿ”ด Critical | -| **Documentation** | โœ… Good (recent overhaul) | โœ… Complete | ๐ŸŸข Done | -| **CI/CD** | โœ… Working | โœ… Enhanced | ๐ŸŸข Done | - ---- - -## Part 1: Closed Issues Audit - -### Summary Statistics -- **Total Closed Issues:** 169 -- **Completed (COMPLETED):** ~15 -- **Deferred (NOT_PLANNED):** ~154 -- **Reopen Candidates:** 28 - -### Issues to REOPEN NOW (Post-MVP Priority) - -| # | Title | Original Bounty | New Bounty | Milestone | Rationale | -|---|-------|-----------------|------------|-----------|-----------| -| **42** | Package Conflict Resolution UI | $25 | $100 | v0.2 | PR #203 exists, core UX feature | -| **43** | Smart Retry Logic with Exponential Backoff | $25 | $75 | v0.2 | Reliability feature | -| **44** | Installation Templates for Common Stacks | $25 | $75 | v0.2 | PR #201 exists, high demand | -| **45** | System Snapshot and Rollback Points | $25 | $150 | v0.2 | Enterprise requirement | -| **103** | Installation Simulation Mode | $25 | $75 | v0.2 | Safety feature, demo-worthy | -| **112** | Alternative Package Suggestions | $25 | $50 | v0.3 | AI-powered UX enhancement | -| **117** | Smart Package Search with Fuzzy Matching | $25 | $75 | v0.2 | Core search improvement | -| **119** | Package Recommendation Based on System Role | $25 | $100 | v0.3 | AI differentiator | -| **125** | Smart Cleanup and Disk Space Optimizer | $25 | $50 | v0.3 | Utility feature | -| **126** | Package Import from Requirements Files | $25 | $75 | v0.2 | Developer workflow | -| **128** | System Health Score and Recommendations | $25 | $100 | v0.3 | Dashboard feature | -| **170** | Package Performance Profiling | $25 | $100 | v1.0 | Enterprise feature | -| **171** | Immutable Infrastructure Mode | $25 | $150 | v1.0 | Enterprise/DevOps | -| **172** | Package Certification and Attestation | $25 | $200 | v1.0 | Security feature | -| **178** | Chaos Engineering Integration | $25 | $100 | v1.0 | Enterprise testing | -| **177** | AI-Powered Capacity Planning | $25 | $150 | v1.0 | Enterprise feature | - -### Issues to REOPEN LATER (Post-Funding) - -| # | Title | Bounty | Milestone | Notes | -|---|-------|--------|-----------|-------| -| 131 | AI-Powered Installation Tutor | $50 | v1.0 | Nice-to-have AI feature | -| 135 | Desktop Notification System | $50 | v1.0 | UX enhancement | -| 144 | Package Installation Profiles | $75 | v0.3 | User personalization | -| 175 | Time-Travel Debugging | $100 | v1.0 | Advanced debugging | -| 182 | Automated Technical Debt Detection | $75 | v1.0 | Code quality | -| 185 | Self-Healing System Architecture | $200 | v1.0+ | Ambitious AI feature | - -### Issues to KEEP CLOSED (Not Relevant) - -| # | Title | Reason | -|---|-------|--------| -| 173 | Energy Efficiency Optimization | Too niche, low demand | -| 174 | Federated Learning for Package Intelligence | Over-engineered for current stage | -| 176 | Package Dependency Marketplace | Requires ecosystem, premature | -| 179 | Package DNA and Genetic Lineage | Experimental, low value | -| 180 | Smart Contract Integration | Web3 hype, not core value | -| 181 | Package Sentiment Analysis | Scope creep | -| 183 | Package Installation Gamification | Distracting from core value | -| 184 | Quantum Computing Package Support | Too early | -| 186 | Package Installation Streaming | Not core feature | - -### CLI Commands to Reopen Issues - -```bash -# Reopen high-priority issues for v0.2 -gh issue reopen 42 43 44 45 103 117 126 --repo cortexlinux/cortex - -# Add labels and milestone -for issue in 42 43 44 45 103 117 126; do - gh issue edit $issue --repo cortexlinux/cortex \ - --add-label "priority: high,bounty,post-mvp" \ - --milestone "Post-MVP - Enhancements" -done - -# Reopen medium-priority issues for v0.3 -gh issue reopen 112 119 125 128 144 --repo cortexlinux/cortex - -for issue in 112 119 125 128 144; do - gh issue edit $issue --repo cortexlinux/cortex \ - --add-label "priority: medium,bounty" \ - --milestone "Post-MVP - Enhancements" -done -``` - ---- - -## Part 2: Repository Settings Audit - -### ๐Ÿ”ด CRITICAL GAPS (Fix This Week) - -| Setting | Current | Recommended | CLI Command | -|---------|---------|-------------|-------------| -| **Branch Protection** | โŒ None | Required reviews + CI | See below | -| **Secret Scanning** | โŒ Disabled | โœ… Enabled | GitHub UI | -| **Push Protection** | โŒ Disabled | โœ… Enabled | GitHub UI | -| **Dependabot Security** | โŒ Disabled | โœ… Enabled | GitHub UI | -| **Code Scanning** | โŒ None | โœ… CodeQL | Add workflow | -| **SECURITY.md** | โŒ Missing | โœ… Present | Create file | -| **CODEOWNERS** | โŒ Missing | โœ… Present | Create file | - -### Enable Branch Protection - -```bash -gh api repos/cortexlinux/cortex/branches/main/protection -X PUT \ - -H "Accept: application/vnd.github+json" \ - -f required_status_checks='{"strict":true,"contexts":["test (3.10)","test (3.11)","test (3.12)","lint","security"]}' \ - -f enforce_admins=false \ - -f required_pull_request_reviews='{"required_approving_review_count":1,"dismiss_stale_reviews":true}' \ - -f restrictions=null \ - -f allow_force_pushes=false \ - -f allow_deletions=false -``` - -### Create SECURITY.md - -```bash -cat > SECURITY.md << 'EOF' -# Security Policy - -## Supported Versions - -| Version | Supported | -| ------- | ------------------ | -| 0.1.x | :white_check_mark: | - -## Reporting a Vulnerability - -Please report security vulnerabilities to: security@cortexlinux.com - -**Do NOT open public issues for security vulnerabilities.** - -We will acknowledge receipt within 48 hours and provide a detailed response within 7 days. - -## Security Measures - -- All commands are validated against dangerous patterns before execution -- Firejail sandboxing for untrusted command execution -- No execution of piped curl/wget to shell -- Regular dependency scanning via Dependabot -EOF -``` - -### Create CODEOWNERS - -```bash -mkdir -p .github -cat > .github/CODEOWNERS << 'EOF' -# Cortex Linux Code Owners -* @mikejmorgan-ai - -# Security-sensitive files -cortex/coordinator.py @mikejmorgan-ai -cortex/utils/commands.py @mikejmorgan-ai -src/sandbox_executor.py @mikejmorgan-ai - -# CI/CD -.github/ @mikejmorgan-ai -EOF -``` - -### Add CodeQL Workflow - -```bash -cat > .github/workflows/codeql.yml << 'EOF' -name: "CodeQL" - -on: - push: - branches: [ main ] - pull_request: - branches: [ main ] - schedule: - - cron: '0 6 * * 1' - -jobs: - analyze: - name: Analyze - runs-on: ubuntu-latest - permissions: - actions: read - contents: read - security-events: write - - steps: - - uses: actions/checkout@v4 - - uses: github/codeql-action/init@v3 - with: - languages: python - - uses: github/codeql-action/analyze@v3 -EOF -``` - -### ๐ŸŸข GOOD STATUS - -| Setting | Status | -|---------|--------| -| Visibility | โœ… Public | -| Issues | โœ… Enabled | -| Discussions | โœ… Enabled | -| Wiki | โœ… Enabled | -| Discord Webhook | โœ… Active | -| Topics | โœ… ai, automation, linux, package-manager | - -### ๐ŸŸก RECOMMENDED IMPROVEMENTS - -| Setting | Current | Recommended | -|---------|---------|-------------| -| Auto-delete branches | โŒ | โœ… Enable | -| Auto-merge | โŒ | โœ… Enable | -| GitHub Pages | โŒ | โœ… Enable for docs | -| Environments | โŒ None | staging, production | -| Homepage | โŒ null | cortexlinux.com | - -```bash -# Enable auto-delete and auto-merge -gh repo edit cortexlinux/cortex --delete-branch-on-merge --enable-auto-merge - -# Add homepage -gh repo edit cortexlinux/cortex --homepage "https://cortexlinux.com" -``` - ---- - -## Part 3: Web Interface Roadmap - -### A. Marketing Site (cortexlinux.com) - MUST HAVE FOR FUNDING - -**Recommended Stack:** Astro + Tailwind CSS on Vercel - -| Option | Pros | Cons | Time | Cost/mo | -|--------|------|------|------|---------| -| **Astro + Tailwind** โœ… | Fast, SEO-friendly, modern | Learning curve | 2-3 weeks | $0 (Vercel free) | -| Next.js | Full-stack capable | Overkill for marketing | 3-4 weeks | $0-20 | -| GitHub Pages + Jekyll | Free, simple | Limited design | 1-2 weeks | $0 | - -**Recommended:** Astro + Tailwind on Vercel for investor-ready quality with minimal cost. - -#### Marketing Site Requirements - -``` -cortexlinux.com/ -โ”œโ”€โ”€ / (Landing) -โ”‚ โ”œโ”€โ”€ Hero with terminal animation "cortex install docker" -โ”‚ โ”œโ”€โ”€ Value proposition (3 bullets) -โ”‚ โ”œโ”€โ”€ Live GitHub stats widget -โ”‚ โ””โ”€โ”€ CTA: "Get Started" โ†’ GitHub -โ”œโ”€โ”€ /features -โ”‚ โ”œโ”€โ”€ AI-Powered Installation -โ”‚ โ”œโ”€โ”€ Conflict Resolution -โ”‚ โ”œโ”€โ”€ Rollback & Recovery -โ”‚ โ””โ”€โ”€ Security Sandboxing -โ”œโ”€โ”€ /pricing -โ”‚ โ”œโ”€โ”€ Community (Free) -โ”‚ โ””โ”€โ”€ Enterprise (Contact us) -โ”œโ”€โ”€ /docs โ†’ Link to GitHub wiki or separate docs site -โ””โ”€โ”€ /about - โ”œโ”€โ”€ Team - โ””โ”€โ”€ Investors/Advisors -``` - -#### Implementation Timeline - -| Week | Deliverable | -|------|-------------| -| 1 | Design mockups + Astro project setup | -| 2 | Landing page + features page | -| 3 | Pricing + about + polish | -| 4 | Testing + launch | - -### B. Product Dashboard (app.cortexlinux.com) - NICE TO HAVE - -**Recommended Stack:** Streamlit (fastest to MVP) or React + Vite - -| Option | Pros | Cons | Time | Cost/mo | -|--------|------|------|------|---------| -| **Streamlit** โœ… | Python-native, fast | Limited customization | 1-2 weeks | $0-50 | -| React + Vite | Full control | More development time | 4-6 weeks | $0-20 | -| Electron | Desktop app | Distribution complexity | 6-8 weeks | $0 | -| Textual TUI | Terminal users love it | Niche audience | 2-3 weeks | $0 | - -**Recommended:** Start with Streamlit for quick dashboard MVP, migrate to React later if needed. - -#### Dashboard Features (MVP) - -1. Installation History Viewer -2. Rollback Interface -3. Package Search -4. System Health Score -5. Settings Management - -### C. Domain Setup - -```bash -# Purchase domains (if not already owned) -# cortexlinux.com - Marketing site -# app.cortexlinux.com - Dashboard (subdomain) -# docs.cortexlinux.com - Documentation (subdomain) -``` - ---- - -## Part 4: Open PR Triage - -### PR Status Summary - -| PR | Title | Author | CI | Conflicts | Verdict | -|----|-------|--------|----|-----------|---------| -| **#199** | Self-update version mgmt | @dhvll | โœ… Pass | โš ๏ธ Yes | REQUEST CHANGES | -| **#201** | Installation Templates | @aliraza556 | โœ… Pass | โš ๏ธ Yes | REQUEST CHANGES | -| **#203** | Conflict Resolution | @Sahilbhatane | โœ… Pass | โš ๏ธ Yes | REQUEST CHANGES | -| **#38** | Pre-flight Checker | @AlexanderLuzDH | โŒ Fail | โš ๏ธ Yes | REQUEST CHANGES | -| **#21** | Config Templates | @aliraza556 | โŒ Fail | โš ๏ธ Yes | CLOSE (Superseded) | - -### PR #199 - Self Update Version Management -**Author:** @dhvll | **Additions:** 802 | **Files:** 9 - -**Code Review:** -- โœ… Good: Adds update channel support (stable/beta) -- โœ… Good: Checksum verification -- โœ… Good: Automatic rollback on failure -- โš ๏ธ Issue: Merge conflicts with main -- โš ๏ธ Issue: Removes some README content - -**Verdict:** REQUEST CHANGES - Rebase needed - -```bash -gh pr comment 199 --repo cortexlinux/cortex --body "$(cat <<'EOF' -## Code Review - -Thanks for implementing the self-update system! The update channel support and rollback mechanism look solid. - -### Required Changes -1. **Rebase required** - This PR has merge conflicts with main. Please run: - ```bash - git fetch origin main - git rebase origin/main - git push --force-with-lease - ``` - -2. **README changes** - Please preserve the existing README content while adding the update documentation. - -Once rebased, this is ready to merge. ๐Ÿš€ -EOF -)" -``` - -### PR #201 - Installation Templates System -**Author:** @aliraza556 | **Additions:** 2,418 | **Files:** 11 - -**Code Review:** -- โœ… Good: Comprehensive template system (LAMP, MEAN, ML, etc.) -- โœ… Good: YAML template format -- โœ… Good: Hardware compatibility checks -- โœ… Good: Template validation -- โš ๏ธ Issue: Merge conflicts with main - -**Verdict:** REQUEST CHANGES - Rebase needed - -```bash -gh pr comment 201 --repo cortexlinux/cortex --body "$(cat <<'EOF' -## Code Review - -Excellent work on the installation templates system! The template format is well-designed and the hardware compatibility checking is a great addition. - -### Required Changes -1. **Rebase required** - This PR has merge conflicts. Please run: - ```bash - git fetch origin main - git rebase origin/main - git push --force-with-lease - ``` - -### After Rebase -This PR is approved and ready to merge once conflicts are resolved. Great contribution! ๐ŸŽ‰ -EOF -)" -``` - -### PR #203 - Interactive Package Conflict Resolution -**Author:** @Sahilbhatane | **Additions:** 1,677 | **Files:** 5 - -**Code Review:** -- โœ… Good: Interactive conflict UI -- โœ… Good: Saved preferences system -- โœ… Good: Integration with PreferencesManager -- โš ๏ธ Issue: Merge conflicts - -**Verdict:** REQUEST CHANGES - Rebase needed - -```bash -gh pr comment 203 --repo cortexlinux/cortex --body "$(cat <<'EOF' -## Code Review - -Great implementation of the conflict resolution system! The saved preferences feature is particularly useful for repeat installations. - -### Required Changes -1. **Rebase required** - Please resolve merge conflicts: - ```bash - git fetch origin main - git rebase origin/main - git push --force-with-lease - ``` - -Ready to merge after rebase! ๐Ÿš€ -EOF -)" -``` - -### PR #38 - System Requirements Pre-flight Checker -**Author:** @AlexanderLuzDH | **Additions:** 628 | **Deletions:** 2,815 | **Files:** 18 - -**Code Review:** -- โš ๏ธ Concern: Large number of deletions (2,815 lines) -- โš ๏ธ Concern: SonarCloud analysis failed -- โš ๏ธ Concern: Old PR (Nov 12) -- โš ๏ธ Issue: Merge conflicts - -**Verdict:** REQUEST CHANGES - Needs significant work - -```bash -gh pr comment 38 --repo cortexlinux/cortex --body "$(cat <<'EOF' -## Code Review - -Thanks for working on the pre-flight checker! However, there are some concerns: - -### Required Changes -1. **Large deletions** - This PR removes 2,815 lines. Please ensure no critical code is being removed unintentionally. - -2. **CI Failure** - SonarCloud analysis is failing. Please investigate and fix. - -3. **Rebase required** - Please resolve merge conflicts. - -4. **Scope review** - Please provide a summary of what files/features are being removed and why. - -Once these issues are addressed, we can proceed with the review. -EOF -)" -``` - -### PR #21 - Configuration File Template System -**Author:** @aliraza556 | **Additions:** 3,642 | **Files:** 19 - -**Code Review:** -- โš ๏ธ Already approved but never merged -- โš ๏ธ Very old (Nov 8) -- โš ๏ธ May be superseded by PR #201 - -**Verdict:** CLOSE - Superseded by newer implementation - -```bash -gh pr close 21 --repo cortexlinux/cortex --comment "$(cat <<'EOF' -Closing this PR as the configuration template functionality has been implemented differently in the codebase. - -@aliraza556 - Thank you for your contribution! Your work on PR #201 (Installation Templates) is the preferred implementation path. Please focus on getting that PR rebased and merged. -EOF -)" -``` - ---- - -## Part 5: Contributor Pipeline - -### Outstanding Bounties (Merged PRs) - -| PR | Title | Author | Bounty | Status | -|----|-------|--------|--------|--------| -| #198 | Installation history tracking | @aliraza556 | $75 | **UNPAID** | -| #195 | Package manager wrapper | @dhvll | $50 | **UNPAID** | -| #190 | Installation coordinator | @Sahilbhatane | $50 | **UNPAID** | -| #37 | Progress notifications | @AlexanderLuzDH | $25 | **UNPAID** | -| #6 | Sandbox executor | @dhvll | $50 | **UNPAID** | -| #5 | LLM integration | @Sahilbhatane | $100 | **UNPAID** | -| #4 | Hardware profiling | @dhvll | $50 | **UNPAID** | -| #200 | User Preferences | @Sahilbhatane | $50 | **UNPAID** | -| #202 | Config export/import | @danishirfan21 | $50 | **UNPAID** | - -**Total Outstanding:** ~$500 - -### Contributor Summary - -| Contributor | Merged PRs | Total Bounty Owed | -|-------------|------------|-------------------| -| @Sahilbhatane | 3 | $200 | -| @dhvll | 3 | $150 | -| @aliraza556 | 1 | $75 | -| @AlexanderLuzDH | 1 | $25 | -| @danishirfan21 | 1 | $50 | - -### New Bounty Issues to Create - -```bash -# Issue 1: Marketing Website -gh issue create --repo cortexlinux/cortex \ - --title "Build Marketing Website (cortexlinux.com)" \ - --body "$(cat <<'EOF' -## Description -Create an investor-ready marketing website for Cortex Linux. - -## Requirements -- Astro + Tailwind CSS -- Landing page with terminal demo animation -- Features page -- Pricing page (Community free / Enterprise contact) -- Mobile responsive -- < 2s load time -- Deploy on Vercel - -## Acceptance Criteria -- [ ] Landing page with hero animation -- [ ] Features overview -- [ ] Pricing table -- [ ] Mobile responsive -- [ ] Lighthouse score > 90 -- [ ] Deployed to cortexlinux.com - -**Skills:** Astro, Tailwind CSS, Web Design -**Bounty:** $500 upon merge -**Priority:** Critical -**Deadline:** January 15, 2025 -EOF -)" --label "bounty,priority: critical,help wanted" - -# Issue 2: Streamlit Dashboard MVP -gh issue create --repo cortexlinux/cortex \ - --title "Build Streamlit Dashboard MVP" \ - --body "$(cat <<'EOF' -## Description -Create a web dashboard for Cortex using Streamlit. - -## Features -- Installation history viewer -- Package search -- System health score display -- Settings management - -## Acceptance Criteria -- [ ] View installation history -- [ ] Search packages -- [ ] Display system health -- [ ] Basic settings UI -- [ ] Deploy instructions - -**Skills:** Python, Streamlit, UI/UX -**Bounty:** $200 upon merge -**Priority:** High -EOF -)" --label "bounty,priority: high" - -# Issue 3: Test Coverage Improvement -gh issue create --repo cortexlinux/cortex \ - --title "Increase Test Coverage to 80%" \ - --body "$(cat <<'EOF' -## Description -Improve test coverage across the codebase to 80%+. - -## Current State -- Test directory: test/ -- Framework: pytest -- Current coverage: ~40% - -## Requirements -- Add unit tests for cortex/coordinator.py -- Add unit tests for cortex/packages.py -- Add unit tests for LLM/interpreter.py -- Add integration tests - -## Acceptance Criteria -- [ ] Coverage >= 80% -- [ ] All tests pass -- [ ] Coverage report in CI - -**Skills:** Python, pytest, testing -**Bounty:** $150 upon merge -**Priority:** High -EOF -)" --label "bounty,testing,priority: high" - -# Issue 4: Documentation Improvements -gh issue create --repo cortexlinux/cortex \ - --title "API Documentation with Sphinx" \ - --body "$(cat <<'EOF' -## Description -Generate API documentation using Sphinx. - -## Requirements -- Sphinx setup -- Auto-generated from docstrings -- Published to GitHub Pages or docs.cortexlinux.com - -## Acceptance Criteria -- [ ] Sphinx configuration -- [ ] API reference generated -- [ ] Hosted documentation -- [ ] CI workflow for doc generation - -**Skills:** Python, Sphinx, Documentation -**Bounty:** $100 upon merge -**Priority:** Medium -EOF -)" --label "bounty,documentation" - -# Issue 5: Multi-Distro Support -gh issue create --repo cortexlinux/cortex \ - --title "Add Fedora/RHEL Support" \ - --body "$(cat <<'EOF' -## Description -Extend package manager support to Fedora/RHEL (dnf/yum). - -## Requirements -- Detect distro family -- Map apt commands to dnf equivalents -- Test on Fedora 39+ - -## Acceptance Criteria -- [ ] Distro detection -- [ ] dnf/yum command mapping -- [ ] Tests for RHEL family -- [ ] Documentation update - -**Skills:** Python, Linux, Package Management -**Bounty:** $150 upon merge -**Priority:** Medium -EOF -)" --label "bounty,enhancement" -``` - ---- - -## Immediate Actions (Run Now) - -### Security Settings (GitHub UI) -1. Go to Settings โ†’ Code security and analysis -2. Enable: Dependabot alerts โœ… -3. Enable: Dependabot security updates โœ… -4. Enable: Secret scanning โœ… -5. Enable: Push protection โœ… - -### CLI Commands to Execute - -```bash -# 1. Post PR review comments -gh pr comment 199 --repo cortexlinux/cortex --body "Please rebase: git fetch origin main && git rebase origin/main && git push --force-with-lease" -gh pr comment 201 --repo cortexlinux/cortex --body "Please rebase: git fetch origin main && git rebase origin/main && git push --force-with-lease" -gh pr comment 203 --repo cortexlinux/cortex --body "Please rebase: git fetch origin main && git rebase origin/main && git push --force-with-lease" -gh pr comment 38 --repo cortexlinux/cortex --body "Large deletions need review. Please explain the 2,815 lines removed." - -# 2. Close superseded PR -gh pr close 21 --repo cortexlinux/cortex --comment "Superseded by newer implementation" - -# 3. Reopen high-priority issues -gh issue reopen 42 43 44 45 103 117 126 --repo cortexlinux/cortex 2>/dev/null || echo "Some issues may already be open" - -# 4. Update repository settings -gh repo edit cortexlinux/cortex --delete-branch-on-merge --enable-auto-merge - -# 5. Create SECURITY.md and CODEOWNERS (run in repo directory) -cd /Users/allbots/cortex-review -echo '# Security Policy...' > SECURITY.md -mkdir -p .github -echo '* @mikejmorgan-ai' > .github/CODEOWNERS -``` - ---- - -## This Week Actions - -| Day | Task | Owner | -|-----|------|-------| -| Mon | Enable all security settings in GitHub UI | Admin | -| Mon | Add branch protection rules | Admin | -| Mon | Post PR review comments | Admin | -| Tue | Create SECURITY.md and CODEOWNERS | Admin | -| Tue | Add CodeQL workflow | Admin | -| Wed | Reopen priority issues with new bounties | Admin | -| Wed | Create new bounty issues | Admin | -| Thu | Follow up with contributors on PR rebases | Admin | -| Fri | Pay outstanding bounties ($500) | Admin | - ---- - -## Pre-Funding Actions (Before February 2025) - -### Critical Path - -``` -Week 1-2: Security & Infrastructure -โ”œโ”€โ”€ Enable all security features -โ”œโ”€โ”€ Add branch protection -โ”œโ”€โ”€ Create SECURITY.md, CODEOWNERS -โ””โ”€โ”€ Merge pending PRs (after rebase) - -Week 3-4: Marketing Website -โ”œโ”€โ”€ Design mockups -โ”œโ”€โ”€ Build landing page -โ”œโ”€โ”€ Build features page -โ””โ”€โ”€ Deploy to Vercel - -Week 5-6: Polish & Demo -โ”œโ”€โ”€ Streamlit dashboard MVP -โ”œโ”€โ”€ Demo video recording -โ”œโ”€โ”€ Documentation polish -โ””โ”€โ”€ GitHub profile optimization - -Week 7-8: Investor Prep -โ”œโ”€โ”€ Pitch deck finalization -โ”œโ”€โ”€ Demo environment stable -โ”œโ”€โ”€ Metrics dashboard -โ””โ”€โ”€ Launch marketing site -``` - -### Milestone Targets - -| Milestone | Target Date | Issues | -|-----------|-------------|--------| -| MVP Complete | Dec 15, 2024 | Close remaining 3 issues | -| Security Hardened | Dec 20, 2024 | All security settings enabled | -| Marketing Site Live | Jan 15, 2025 | cortexlinux.com deployed | -| Demo Ready | Jan 31, 2025 | Streamlit dashboard + video | -| Funding Ready | Feb 10, 2025 | All materials complete | - ---- - -## Budget Summary - -| Category | Amount | -|----------|--------| -| Outstanding Bounties | $500 | -| New Bounty Issues | $1,100 | -| Marketing Site Bounty | $500 | -| Domain (if needed) | $50/yr | -| **Total Pre-Funding** | ~$2,150 | - ---- - -## Risk Assessment - -| Risk | Likelihood | Impact | Mitigation | -|------|------------|--------|------------| -| PRs not rebased | Medium | Medium | Direct contributor outreach | -| Marketing site delay | Medium | High | Start immediately, hire if needed | -| Security incident | Low | Critical | Enable all security features NOW | -| Contributor burnout | Medium | Medium | Pay bounties promptly | - ---- - -## Contact Information - -**Repository:** https://github.com/cortexlinux/cortex -**Discord:** https://discord.gg/uCqHvxjU83 -**Issues:** https://github.com/cortexlinux/cortex/issues - ---- - -*Generated by Claude Code audit on 2025-11-28* diff --git a/docs/PROGRESS_INDICATORS.md b/docs/PROGRESS_INDICATORS.md deleted file mode 100644 index 12611a9..0000000 --- a/docs/PROGRESS_INDICATORS.md +++ /dev/null @@ -1,414 +0,0 @@ -# Progress Indicators Module - -**Issue:** #259 -**Status:** Ready for Review -**Bounty:** As specified in issue (+ bonus after funding) - -## Overview - -Beautiful, informative progress indicators for all Cortex operations. Uses the Rich library for stunning terminal UI when available, with graceful fallback to basic terminal output. - -## Features - -### Multiple Progress Types - -| Type | Use Case | Visual | -|------|----------|--------| -| Spinner | Indeterminate operations | โ ‹ Loading... | -| Progress Bar | Known duration operations | [โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–‘โ–‘] 80% | -| Multi-Step | Complex workflows | โœ“ Step 1 โ†’ โ— Step 2 โ†’ โ—‹ Step 3 | -| Download | File transfers | โฌ‡๏ธ 5.2 MB/s ETA 00:03 | -| Operation | General tasks | ๐Ÿ“ฆ Installing Docker... | - -### Automatic Fallback - -Works beautifully with Rich library installed, falls back gracefully to basic terminal output when Rich isn't available. - -```python -# Rich installed: Beautiful animated UI -# Rich not installed: Simple but functional text output -``` - -### Operation Type Icons - -| Operation | Icon | -|-----------|------| -| INSTALL | ๐Ÿ“ฆ | -| REMOVE | ๐Ÿ—‘๏ธ | -| UPDATE | ๐Ÿ”„ | -| DOWNLOAD | โฌ‡๏ธ | -| CONFIGURE | โš™๏ธ | -| VERIFY | โœ… | -| ANALYZE | ๐Ÿ” | -| LLM_QUERY | ๐Ÿง  | -| DEPENDENCY_RESOLVE | ๐Ÿ”— | -| ROLLBACK | โช | - -## Installation - -```bash -# Basic functionality (no dependencies) -pip install cortex-linux - -# With beautiful Rich UI (recommended) -pip install cortex-linux[ui] -# or -pip install rich -``` - -## Usage Examples - -### Simple Spinner - -```python -from cortex.progress_indicators import spinner - -with spinner("Analyzing system..."): - result = analyze_system() - -# Output: -# โ ‹ Analyzing system... -# โœ“ Analyzing system... -``` - -### Operation with Updates - -```python -from cortex.progress_indicators import operation, OperationType - -with operation("Installing Docker", OperationType.INSTALL) as op: - op.update("Checking dependencies...") - check_deps() - - op.update("Downloading images...") - download() - - op.update("Configuring...") - configure() - - op.complete("Docker ready!") - -# Output: -# ๐Ÿ“ฆ Installing Docker - Checking dependencies... -# ๐Ÿ“ฆ Installing Docker - Downloading images... -# ๐Ÿ“ฆ Installing Docker - Configuring... -# โœ“ Installing Docker - Docker ready! -``` - -### Progress Bar - -```python -from cortex.progress_indicators import progress_bar - -packages = ["nginx", "redis", "postgresql", "nodejs"] - -for pkg in progress_bar(packages, "Installing packages"): - install_package(pkg) - -# Output: -# Installing packages: [โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘โ–‘] 3/4 -``` - -### Download Tracker - -```python -from cortex.progress_indicators import ProgressIndicator - -progress = ProgressIndicator() - -tracker = progress.download_progress(total_bytes=50_000_000, description="Downloading update") - -for chunk in download_stream(): - tracker.update(len(chunk)) - -tracker.complete() - -# Output: -# โฌ‡๏ธ Downloading update [โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–‘โ–‘โ–‘โ–‘] 40.0/50.0 MB 5.2 MB/s ETA 00:02 -# โœ“ Downloaded 50.0 MB in 9.6s (5.2 MB/s) -``` - -### Multi-Step Workflow - -```python -from cortex.progress_indicators import ProgressIndicator - -progress = ProgressIndicator() - -tracker = progress.multi_step([ - {"name": "Download", "description": "Downloading package files"}, - {"name": "Verify", "description": "Checking file integrity"}, - {"name": "Extract", "description": "Extracting contents"}, - {"name": "Install", "description": "Installing to system"}, - {"name": "Configure", "description": "Configuring service"}, -], title="Package Installation") - -for i in range(5): - tracker.start_step(i) - do_step(i) - tracker.complete_step(i) - -tracker.finish() - -# Output: -# Package Installation -# โœ“ Download Downloading package files -# โœ“ Verify Checking file integrity -# โœ“ Extract Extracting contents -# โ— Install Installing to system -# โ—‹ Configure Configuring service -``` - -### Status Messages - -```python -from cortex.progress_indicators import get_progress_indicator - -progress = get_progress_indicator() - -progress.print_success("Package installed successfully") -progress.print_error("Installation failed") -progress.print_warning("Disk space low") -progress.print_info("Using cached version") - -# Output: -# โœ“ Package installed successfully -# โœ— Installation failed -# โš  Disk space low -# โ„น Using cached version -``` - -## API Reference - -### ProgressIndicator - -Main class for all progress indicators. - -**Constructor:** -```python -ProgressIndicator(use_rich: bool = True) -``` - -**Methods:** - -| Method | Description | -|--------|-------------| -| `operation(title, type, steps)` | Context manager for tracked operations | -| `spinner(message)` | Context manager for indeterminate progress | -| `progress_bar(items, description)` | Iterator with progress display | -| `download_progress(total, description)` | Create download tracker | -| `multi_step(steps, title)` | Create multi-step tracker | -| `print_success(message)` | Print success message | -| `print_error(message)` | Print error message | -| `print_warning(message)` | Print warning message | -| `print_info(message)` | Print info message | - -### OperationType - -Enum of supported operation types: - -```python -class OperationType(Enum): - INSTALL = "install" - REMOVE = "remove" - UPDATE = "update" - DOWNLOAD = "download" - CONFIGURE = "configure" - VERIFY = "verify" - ANALYZE = "analyze" - LLM_QUERY = "llm_query" - DEPENDENCY_RESOLVE = "dependency_resolve" - ROLLBACK = "rollback" - GENERIC = "generic" -``` - -### OperationStep - -Dataclass representing a single step: - -```python -@dataclass -class OperationStep: - name: str - description: str - status: str = "pending" # pending, running, completed, failed, skipped - progress: float = 0.0 - start_time: Optional[datetime] = None - end_time: Optional[datetime] = None - error_message: Optional[str] = None -``` - -### DownloadTracker - -**Methods:** - -| Method | Description | -|--------|-------------| -| `update(bytes)` | Update with bytes received | -| `complete()` | Mark download complete | -| `fail(error)` | Mark download failed | - -### MultiStepTracker - -**Methods:** - -| Method | Description | -|--------|-------------| -| `start_step(index)` | Start a step | -| `complete_step(index)` | Complete a step | -| `fail_step(index, error)` | Fail a step | -| `skip_step(index, reason)` | Skip a step | -| `finish()` | Display final summary | - -## Integration with Cortex - -### CLI Integration - -```python -# In cortex/cli.py -from cortex.progress_indicators import get_progress_indicator, OperationType - -progress = get_progress_indicator() - -@cli.command() -def install(package: str): - with progress.operation(f"Installing {package}", OperationType.INSTALL) as op: - op.update("Resolving dependencies...") - deps = resolve_deps(package) - - op.update("Downloading...") - download(package) - - op.update("Installing...") - install(package) - - op.complete(f"{package} installed successfully") -``` - -### LLM Integration - -```python -from cortex.progress_indicators import spinner - -def query_llm(prompt: str) -> str: - with spinner("๐Ÿง  Thinking..."): - response = claude_api.complete(prompt) - return response -``` - -### Batch Operations - -```python -from cortex.progress_indicators import progress_bar - -def install_batch(packages: List[str]): - for pkg in progress_bar(packages, "Installing packages"): - install_single(pkg) -``` - -## Customization - -### Disable Rich (Force Fallback) - -```python -progress = ProgressIndicator(use_rich=False) -``` - -### Custom Operation Tracking - -```python -from cortex.progress_indicators import OperationContext, OperationType - -context = OperationContext( - operation_type=OperationType.INSTALL, - title="Custom Operation", - metadata={"package": "nginx", "version": "1.24"} -) - -# Access timing info -print(f"Started: {context.start_time}") -print(f"Progress: {context.overall_progress:.0%}") -``` - -## Architecture - -``` -โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” -โ”‚ ProgressIndicator โ”‚ -โ”‚ โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” โ”‚ -โ”‚ โ”‚ Spinner โ”‚ โ”‚ ProgressBar โ”‚ โ”‚ MultiStep โ”‚ โ”‚ -โ”‚ โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ โ”‚ -โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ฌโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ - โ”‚ - โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ผโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” - โ–ผ โ–ผ โ–ผ -โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” -โ”‚ Rich โ”‚ โ”‚ Fallback โ”‚ โ”‚ Output โ”‚ -โ”‚ Console โ”‚ โ”‚ Progress โ”‚ โ”‚ Handlers โ”‚ -โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ -``` - -## Testing - -```bash -# Run all tests -pytest tests/test_progress_indicators.py -v - -# Run with coverage -pytest tests/test_progress_indicators.py --cov=cortex.progress_indicators - -# Test Rich integration (if installed) -pytest tests/test_progress_indicators.py -k "Rich" -v -``` - -## Performance - -- Spinner updates: 10 FPS (100ms interval) -- Progress bar: Updates on each iteration -- Multi-step: Renders on state change only -- Memory: Minimal overhead (<1MB) - -## Troubleshooting - -### Rich Not Detected - -```python -from cortex.progress_indicators import RICH_AVAILABLE - -print(f"Rich available: {RICH_AVAILABLE}") - -# Install Rich if needed -# pip install rich -``` - -### Terminal Compatibility - -```python -# Force simple output for non-interactive terminals -import sys - -if not sys.stdout.isatty(): - progress = ProgressIndicator(use_rich=False) -``` - -### Progress Not Showing - -```python -# Ensure stdout is flushed -import sys - -with spinner("Working..."): - sys.stdout.flush() - do_work() -``` - -## Contributing - -1. Add new operation types to `OperationType` enum -2. Create corresponding icons in `OPERATION_ICONS` -3. Add tests for new functionality -4. Update documentation - ---- - -**Closes:** #259 diff --git a/docs/PROGRESS_TRACKER.md b/docs/PROGRESS_TRACKER.md deleted file mode 100644 index 640c9c3..0000000 --- a/docs/PROGRESS_TRACKER.md +++ /dev/null @@ -1,446 +0,0 @@ -# Progress Notifications & Status Updates - -## Overview - -The Progress Tracker provides real-time progress updates with time estimates, multi-stage tracking, desktop notifications, and cancellation support for Cortex Linux operations. - -## Features - -- โœ… **Beautiful Progress Bars**: Rich terminal UI with Unicode progress bars -- โœ… **Time Estimation**: Smart ETA calculation based on throughput and historical data -- โœ… **Multi-Stage Tracking**: Track complex operations with multiple sub-tasks -- โœ… **Desktop Notifications**: Optional system notifications for completion/errors -- โœ… **Cancellation Support**: Graceful handling of Ctrl+C with cleanup callbacks -- โœ… **Background Operations**: Async support for non-blocking operations -- โœ… **Fallback Mode**: Plain text output when rich library is unavailable - -## Installation - -```bash -# Install required dependencies -pip install rich plyer - -# Or install from requirements.txt -pip install -r requirements.txt -``` - -## Quick Start - -### Basic Usage - -```python -from progress_tracker import ProgressTracker -import asyncio - -async def install_postgresql(tracker): - # Add stages - update_idx = tracker.add_stage("Update package lists") - download_idx = tracker.add_stage("Download postgresql-15", total_bytes=50_000_000) - install_idx = tracker.add_stage("Installing dependencies") - configure_idx = tracker.add_stage("Configuring database") - test_idx = tracker.add_stage("Running tests") - - # Execute stages - tracker.start_stage(update_idx) - # ... do work ... - tracker.complete_stage(update_idx) - - # Download with byte tracking - tracker.start_stage(download_idx) - bytes_downloaded = 0 - while bytes_downloaded < 50_000_000: - # Download chunk - bytes_downloaded += chunk_size - tracker.update_stage_progress(download_idx, processed_bytes=bytes_downloaded) - tracker.display_progress() - tracker.complete_stage(download_idx) - - # ... continue with other stages ... - -# Run with progress tracking -tracker = ProgressTracker("Installing PostgreSQL") -await run_with_progress(tracker, install_postgresql) -``` - -### Example Output - -``` -Installing PostgreSQL... -โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ” 45% -โฑ๏ธ Estimated time remaining: 2m 15s - -[โœ“] Update package lists (5s) -[โœ“] Download postgresql-15 (1m 23s) -[โ†’] Installing dependencies (current) -[ ] Configuring database -[ ] Running tests -``` - -## API Reference - -### ProgressTracker - -Main class for tracking progress. - -#### Constructor - -```python -ProgressTracker( - operation_name: str, - enable_notifications: bool = True, - notification_on_complete: bool = True, - notification_on_error: bool = True, - console: Optional[Console] = None -) -``` - -**Parameters:** -- `operation_name`: Name of the operation (displayed in progress output) -- `enable_notifications`: Enable desktop notifications (requires `plyer`) -- `notification_on_complete`: Send notification when operation completes -- `notification_on_error`: Send notification when operation fails -- `console`: Rich Console instance (auto-created if None) - -#### Methods - -##### add_stage(name: str, total_bytes: Optional[int] = None) -> int - -Add a new stage to the operation. - -```python -download_idx = tracker.add_stage("Download package", total_bytes=10_000_000) -``` - -##### start() - -Start tracking the operation. - -```python -tracker.start() -``` - -##### start_stage(stage_index: int) - -Begin a specific stage. - -```python -tracker.start_stage(download_idx) -``` - -##### update_stage_progress(stage_index: int, progress: float = None, processed_bytes: int = None) - -Update progress for a stage. - -```python -# Update by percentage (0.0 to 1.0) -tracker.update_stage_progress(stage_idx, progress=0.75) - -# Or by bytes processed -tracker.update_stage_progress(download_idx, processed_bytes=7_500_000) -``` - -##### complete_stage(stage_index: int, error: Optional[str] = None) - -Mark a stage as complete or failed. - -```python -# Success -tracker.complete_stage(stage_idx) - -# Failure -tracker.complete_stage(stage_idx, error="Failed to download package") -``` - -##### display_progress() - -Refresh the progress display. - -```python -tracker.display_progress() -``` - -##### complete(success: bool = True, message: Optional[str] = None) - -Mark the entire operation as complete. - -```python -tracker.complete(success=True, message="Installation complete") -``` - -##### cancel(message: str = "Cancelled by user") - -Cancel the operation. - -```python -tracker.cancel("Operation cancelled by user") -``` - -##### setup_cancellation_handler(callback: Optional[Callable] = None) - -Setup Ctrl+C handler with optional cleanup callback. - -```python -def cleanup(): - # Cleanup code here - pass - -tracker.setup_cancellation_handler(callback=cleanup) -``` - -## Advanced Usage - -### With Rich Library (Enhanced UI) - -```python -from progress_tracker import RichProgressTracker - -tracker = RichProgressTracker("Installing Docker") - -# Add stages -stages = [ - tracker.add_stage("Update repositories"), - tracker.add_stage("Download Docker", total_bytes=100_000_000), - tracker.add_stage("Install dependencies"), - tracker.add_stage("Configure daemon"), - tracker.add_stage("Start service") -] - -async with tracker.live_progress(): - for idx in stages: - tracker.start_stage(idx) - # ... do work ... - tracker.complete_stage(idx) -``` - -### Background Operations - -```python -import asyncio - -async def long_running_install(tracker): - # Your installation logic - pass - -# Run in background -tracker = ProgressTracker("Background Install") -task = asyncio.create_task(run_with_progress(tracker, long_running_install)) - -# Do other work... -await asyncio.sleep(5) - -# Wait for completion -await task -``` - -### Byte-Based Progress Tracking - -```python -tracker = ProgressTracker("Downloading Files") -download_idx = tracker.add_stage("Download large_file.tar.gz", total_bytes=500_000_000) - -tracker.start() -tracker.start_stage(download_idx) - -# Update as bytes come in -bytes_received = 0 -while bytes_received < 500_000_000: - chunk = await download_chunk() - bytes_received += len(chunk) - tracker.update_stage_progress(download_idx, processed_bytes=bytes_received) - tracker.display_progress() - -tracker.complete_stage(download_idx) -tracker.complete(success=True) -``` - -### Error Handling - -```python -tracker = ProgressTracker("Installing PostgreSQL") -tracker.start() - -try: - download_idx = tracker.add_stage("Download") - tracker.start_stage(download_idx) - - # Attempt download - result = download_package() - - if result.failed: - tracker.complete_stage(download_idx, error=result.error) - tracker.complete(success=False, message="Download failed") - else: - tracker.complete_stage(download_idx) - tracker.complete(success=True) - -except KeyboardInterrupt: - tracker.cancel("Cancelled by user") -except Exception as e: - tracker.complete(success=False, message=str(e)) -``` - -## Integration with Existing Code - -### Integrating with SandboxExecutor - -```python -from sandbox_executor import SandboxExecutor -from progress_tracker import ProgressTracker - -async def install_package_with_progress(package_name: str): - tracker = ProgressTracker(f"Installing {package_name}") - executor = SandboxExecutor() - - # Add stages - update_idx = tracker.add_stage("Update package lists") - download_idx = tracker.add_stage(f"Download {package_name}") - install_idx = tracker.add_stage(f"Install {package_name}") - - tracker.start() - tracker.setup_cancellation_handler() - - try: - # Stage 1: Update - tracker.start_stage(update_idx) - result = executor.execute("sudo apt-get update") - if result.failed: - tracker.complete_stage(update_idx, error=result.stderr) - tracker.complete(success=False) - return - tracker.complete_stage(update_idx) - - # Stage 2: Download - tracker.start_stage(download_idx) - result = executor.execute(f"apt-get download {package_name}") - tracker.complete_stage(download_idx) - - # Stage 3: Install - tracker.start_stage(install_idx) - result = executor.execute(f"sudo apt-get install -y {package_name}") - if result.success: - tracker.complete_stage(install_idx) - tracker.complete(success=True) - else: - tracker.complete_stage(install_idx, error=result.stderr) - tracker.complete(success=False) - - except KeyboardInterrupt: - tracker.cancel() -``` - -## Configuration - -### Disabling Notifications - -```python -# Disable all notifications -tracker = ProgressTracker("Operation", enable_notifications=False) - -# Or disable specific notification types -tracker = ProgressTracker( - "Operation", - notification_on_complete=False, # No notification on success - notification_on_error=True # Only notify on errors -) -``` - -### Custom Console - -```python -from rich.console import Console - -# Custom console with specific settings -console = Console(width=120, force_terminal=True) -tracker = ProgressTracker("Operation", console=console) -``` - -## Testing - -Run the test suite: - -```bash -# Run all tests -pytest src/test_progress_tracker.py -v - -# Run with coverage -pytest src/test_progress_tracker.py --cov=progress_tracker --cov-report=html - -# Run specific test class -pytest src/test_progress_tracker.py::TestProgressTracker -v -``` - -## Requirements - -### Python Dependencies - -- **Required**: Python 3.8+ -- **Recommended**: `rich` for enhanced UI (gracefully degrades without it) -- **Optional**: `plyer` for desktop notifications - -### System Dependencies - -None - pure Python implementation - -## Performance Considerations - -- **Memory**: Minimal overhead (~1KB per stage) -- **CPU**: Negligible impact (<0.1% CPU) -- **Thread-safe**: Uses asyncio for concurrent operations -- **Scalability**: Tested with 100+ concurrent stages - -## Troubleshooting - -### Rich library not rendering correctly - -**Solution**: Ensure terminal supports Unicode and ANSI colors - -```python -# Force disable rich if needed -import progress_tracker -progress_tracker.RICH_AVAILABLE = False -``` - -### Notifications not working - -**Solution**: Install plyer and check system notification support - -```bash -pip install plyer - -# Test notifications -python -c "from plyer import notification; notification.notify(title='Test', message='Working')" -``` - -### Progress bars flickering - -**Solution**: Use `Live` context or reduce update frequency - -```python -# Update less frequently -if iterations % 10 == 0: # Update every 10th iteration - tracker.display_progress() -``` - -## Examples - -See `progress_tracker.py` main section for a complete working example demonstrating all features. - -## License - -MIT License - See LICENSE file for details - -## Contributing - -1. Fork the repository -2. Create a feature branch -3. Add tests for new features -4. Ensure all tests pass: `pytest` -5. Submit a pull request - -## Support - -For issues and questions: -- GitHub Issues: https://github.com/cortexlinux/cortex/issues -- Discord: https://discord.gg/uCqHvxjU83 -- Email: mike@cortexlinux.com - diff --git a/docs/PR_MANAGEMENT_INSTRUCTIONS.md b/docs/PR_MANAGEMENT_INSTRUCTIONS.md deleted file mode 100644 index 20f2095..0000000 --- a/docs/PR_MANAGEMENT_INSTRUCTIONS.md +++ /dev/null @@ -1,574 +0,0 @@ -# CORTEX PR MANAGEMENT SYSTEM -## Executive Instructions - ---- - -## Bottom Line - -**You have 11 PRs = $575 in bounties waiting** - -I've created **3 specialized scripts** that handle different PR workflows: - -1. **cortex-pr-dashboard.sh** - Master control center (START HERE) -2. **review-contributor-prs.sh** - Guided review for 5 contributor PRs -3. **merge-mike-prs.sh** - Batch merge your 6 PRs - ---- - -## The Reality Check - -### PR Status Breakdown - -| Type | Count | Total Bounties | Who's Waiting | -|------|-------|----------------|---------------| -| **Critical** | 1 | $100 | @chandrapratamar - 9 days | -| **High Priority** | 4 | $475 | 3 contributors - 7-8 days | -| **Your PRs** | 6 | $0 | Nobody (you can merge anytime) | - -### The Blocker - -**PR #17 (Package Manager Wrapper) = THE MVP BLOCKER** - -- Everything waits on this -- 9 days old -- $100 bounty -- Author: @chandrapratamar - -**Action:** Review this first, today if possible. - ---- - -## Quick Start (Recommended) - -### One-Command Dashboard - -```bash -cd ~/Downloads -chmod +x cortex-pr-dashboard.sh -mv cortex-pr-dashboard.sh ~/cortex/ -cd ~/cortex && bash cortex-pr-dashboard.sh -``` - -**What happens:** -1. Shows complete PR overview -2. Highlights PR #17 as critical -3. Offers 6 quick actions: - - Review PR #17 (THE BLOCKER) - - Review all contributor PRs - - Batch merge your PRs - - View in browser - - Generate bounty report - - Post Discord update - -**Time:** 5-60 minutes depending on what you choose - ---- - -## The 3 Scripts Explained - -### 1. cortex-pr-dashboard.sh (Master Control) - -**Purpose:** Bird's-eye view and quick action center - -**Features:** -- Complete PR status overview -- Bounty calculations ($575 pending, $1,150 at 2x) -- One-click access to other workflows -- Discord announcement generator -- Bounty payment report - -**Use when:** You want to see everything and decide what to tackle - -**Time:** 2 minutes to view + action time - ---- - -### 2. review-contributor-prs.sh (Guided Review) - -**Purpose:** Systematically review 5 contributor PRs - -**Features:** -- Reviews in priority order (PR #17 first) -- Shows review checklist before each PR -- Interactive: view/approve/change/comment/skip -- Auto-posts thank-you messages on approval -- Tracks bounties owed in CSV file -- Generates Discord announcement - -**Use when:** You're ready to approve/merge contributor work - -**Time:** 30-60 minutes for all 5 PRs - -**Process flow:** -``` -For each PR: -โ”œโ”€ Show: Developer, feature, bounty, priority -โ”œโ”€ Display: Review checklist -โ”œโ”€ Offer: View in browser -โ”œโ”€ Ask: Approve / Request changes / Comment / Skip -โ”œโ”€ If approved: Post thank-you, merge, track bounty -โ””โ”€ Move to next PR -``` - -**What gets tracked:** -- Creates `~/cortex/bounties_owed.csv` -- Records: PR#, Developer, Feature, Amount, Date, Status -- Shows total owed at end - ---- - -### 3. merge-mike-prs.sh (Your PRs) - -**Purpose:** Quickly merge your 6 PRs to clear backlog - -**Features:** -- Batch processes PRs #20, #22, #23, #34, #36, #41 -- Checks mergeable status -- Asks confirmation for each -- Squash merges + deletes branches -- Shows progress - -**Use when:** You want to clear your PR backlog fast - -**Time:** 5-10 minutes - -**PRs it merges:** -- PR #41: LLM Router (Issue #34) -- PR #36: Logging System (Issue #29) -- PR #34: Context Memory (Issue #24) -- PR #23: Error Parser (Issue #13) -- PR #22: File uploads -- PR #20: File uploads (critical/ready) - ---- - -## Recommended Workflow - -### Today (30 minutes) - -**Step 1: Launch Dashboard** -```bash -cd ~/cortex && bash cortex-pr-dashboard.sh -``` - -**Step 2: Choose Option 1 (Review PR #17)** -- This opens THE critical blocker -- Review the code -- Approve or request changes -- **Impact:** Unblocks entire MVP if approved - -**Step 3: If Approved, Choose Option 6 (Discord)** -- Post announcement that PR #17 merged -- Celebrate unblocking MVP -- Show momentum to team - -**Total time: 30 minutes** -**Impact: MVP BLOCKER cleared + team energized** - ---- - -### This Week (2 hours) - -**Monday:** Review PR #17 (done above โœ…) - -**Wednesday:** -```bash -cd ~/cortex && bash review-contributor-prs.sh -``` -- Review PRs #37, #38, #21 -- Approve quality work -- Request changes on any issues -- **Impact:** $475 in bounties processed - -**Friday:** -```bash -cd ~/cortex && bash merge-mike-prs.sh -``` -- Merge all 6 of your PRs -- Clear your backlog -- **Impact:** 6 features merged, dependencies unblocked - -**Total: 2 hours, $575 in bounties processed, 7 PRs merged** - ---- - -## What Each Script Produces - -### cortex-pr-dashboard.sh Output - -``` -๐Ÿ“Š PR STATUS OVERVIEW -Total Open PRs: 11 - โ”œโ”€ From Contributors: 5 (Need review) - โ””โ”€ From Mike: 6 (Can merge anytime) - -๐Ÿ’ฐ ESTIMATED BOUNTIES AT STAKE -Contributor PRs: $575 -At 2x bonus: $1,150 - -๐Ÿ”ด CRITICAL PRIORITY -PR #17: Package Manager Wrapper -Author: @chandrapratamar -Age: 9 days old -Bounty: $100 -Impact: โš ๏ธ MVP BLOCKER - -[Interactive menu with 6 options] -``` - ---- - -### review-contributor-prs.sh Output - -``` -๐Ÿ“‹ PR #17 - Package Manager Wrapper (Issue #7) -๐Ÿ‘ค Developer: @chandrapratamar -๐Ÿ’ฐ Bounty: $100 -๐Ÿ”ฅ Priority: CRITICAL_MVP_BLOCKER - -REVIEW CHECKLIST - [ ] Code implements feature - [ ] Unit tests >80% coverage - [ ] Documentation included - [ ] Integrates with architecture - [ ] No bugs/security issues - -Actions: [v]iew [a]pprove [c]hange [m]comment [s]kip [q]uit -``` - -**If you approve:** -- Posts thank-you message with bounty details -- Merges PR automatically -- Records in bounties_owed.csv -- Shows running total - ---- - -### merge-mike-prs.sh Output - -``` -๐Ÿš€ CORTEX - MERGE MIKE'S IMPLEMENTATION PRs - -PR #41 -Title: LLM Router - Multi-Provider Support -State: OPEN -Mergeable: MERGEABLE - -Merge this PR? (y/n) -[Interactive confirmation for each PR] -``` - ---- - -## Bounty Tracking System - -### The CSV File - -Location: `~/cortex/bounties_owed.csv` - -**Format:** -```csv -PR,Developer,Feature,Bounty_Amount,Date_Merged,Status -17,chandrapratamar,Package Manager Wrapper,100,2025-11-17,PENDING -37,AlexanderLuzDH,Progress Notifications,125,2025-11-17,PENDING -``` - -**Uses:** -1. Track what you owe -2. Process payments systematically -3. Update status when paid -4. Calculate totals at funding (2x bonus) - -**Payment workflow:** -1. PR merges โ†’ Entry created with "PENDING" -2. You process payment โ†’ Update status to "PAID" -3. At funding โ†’ Calculate 2x bonus from all PAID entries - ---- - -## Strategic Value - -### Time Savings - -**Traditional approach:** -- Review 11 PRs manually: 3-4 hours -- Track bounties in spreadsheet: 30 minutes -- Write thank-you messages: 30 minutes -- Post Discord updates: 15 minutes -- **Total: 4-5 hours** - -**With these scripts:** -- Dashboard overview: 2 minutes -- Review workflow: 30-60 minutes -- Batch merge: 5-10 minutes -- Auto-tracking: 0 minutes -- Auto-messages: 0 minutes -- **Total: 37-72 minutes** - -**Savings: 75-85% time reduction** - ---- - -### Business Impact - -**For Contributors:** -- โœ… Fast response time (professional) -- โœ… Clear thank-you messages -- โœ… Bounty coordination automated -- โœ… 2x bonus reminder included - -**For Investors:** -- โœ… Shows systematic team management -- โœ… Demonstrates execution velocity -- โœ… Professional bounty tracking -- โœ… Clear MVP progress (when #17 merges) - -**For MVP:** -- โœ… PR #17 unblocks everything -- โœ… Quick merges maintain momentum -- โœ… February timeline stays on track - ---- - -## Troubleshooting - -### "gh: command not found" - -```bash -brew install gh -gh auth login -``` - -### "GITHUB_TOKEN not found" - -```bash -echo 'export GITHUB_TOKEN="your_token"' >> ~/.zshrc -source ~/.zshrc -``` - -### "Could not post review" - -- Check token permissions (needs repo write access) -- Try manual review through web interface -- Script will still track bounties locally - -### "Merge conflicts detected" - -- Script will skip PRs with conflicts -- Needs manual resolution in GitHub web UI -- Re-run script after conflicts resolved - ---- - -## The PR #17 Decision Tree - -Since PR #17 is THE blocker, here's how to decide: - -### If Code Looks Good: -```bash -# Approve and merge immediately -Choose option 1 in dashboard -Press 'a' to approve -``` - -**Result:** MVP unblocked, $100 bounty owed, team energized - -### If Code Needs Minor Fixes: -```bash -# Request specific changes -Choose option 1 in dashboard -Press 'c' to request changes -Enter what needs fixing -``` - -**Result:** Clear feedback, fast iteration, merge within 1-2 days - -### If Code Has Major Issues: -```bash -# Comment with concerns -Choose option 1 in dashboard -Press 'm' to comment -"Thanks for the effort! Let's discuss approach in Discord first." -``` - -**Result:** Protect quality, redirect collaboratively - -### If Unsure: -```bash -# Ask dhvil or aliraza556 for technical review -Post comment: "@dhvll @aliraza556 can you review this? Need second opinion." -``` - -**Result:** Get expert input before merging critical feature - ---- - -## What Happens After Merging - -### Immediate (Automated): - -1. **Thank-you message posted** with: - - Bounty amount and payment timeline - - 2x bonus reminder - - Payment method coordination - -2. **Bounty tracked** in CSV: - - Developer name - - Amount owed - - Date merged - - Status: PENDING - -3. **Branch deleted** automatically - -### Within 48 Hours (Manual): - -1. **Process payment:** - - Contact developer via GitHub comment - - Coordinate payment method (crypto/PayPal) - - Send payment - - Update CSV status to PAID - -2. **Post Discord announcement:** - - Celebrate the merge - - Thank contributor publicly - - Show progress to team - -### At Funding (February 2025): - -1. **Calculate 2x bonuses:** - - Read bounties_owed.csv - - Sum all PAID entries - - Pay matching bonus - ---- - -## Integration with Other Tools - -### Works With: - -โœ… **Your existing automation:** -- create_github_pr.py (for uploading code) -- GitHub webhooks โ†’ Discord -- Bounty tracking system - -โœ… **Developer welcome system:** -- When PRs merge, welcome messages already sent -- New PRs can use same approval templates - -โœ… **Funding preparation:** -- Bounty CSV = proof of systematic management -- Merge velocity = execution capability -- Professional comments = team culture - ---- - -## Success Metrics - -### You'll know it's working when: - -**Within 24 hours:** -- [ ] PR #17 reviewed (approved or changes requested) -- [ ] Dashboard shows clear status -- [ ] Discord announcement posted - -**Within 1 week:** -- [ ] 3-5 PRs merged -- [ ] $300-500 in bounties processed -- [ ] bounties_owed.csv tracking multiple payments -- [ ] Contributors respond positively - -**Within 2 weeks:** -- [ ] PR backlog under 5 PRs -- [ ] All contributor PRs reviewed -- [ ] Your PRs cleared -- [ ] MVP unblocked (if #17 merged) - ---- - -## Files Summary - -| File | Purpose | Time to Execute | Impact | -|------|---------|----------------|---------| -| **cortex-pr-dashboard.sh** | Master control | 2 min + actions | Complete overview | -| **review-contributor-prs.sh** | Review workflow | 30-60 min | Process all 5 contributor PRs | -| **merge-mike-prs.sh** | Batch merge | 5-10 min | Clear your 6 PRs | - -All scripts are in `/mnt/user-data/outputs/` ready to download. - ---- - -## My Recommendation - -**Execute this workflow TODAY:** - -```bash -# 1. Download and setup (2 min) -cd ~/Downloads -chmod +x cortex-pr-dashboard.sh review-contributor-prs.sh merge-mike-prs.sh -mv *.sh ~/cortex/ - -# 2. Launch dashboard (30 min) -cd ~/cortex && bash cortex-pr-dashboard.sh -# Choose option 1: Review PR #17 -# Approve if quality is good - -# 3. Post to Discord -# Copy/paste the generated announcement - -# Done for today! -``` - -**Tomorrow or this week:** - -```bash -# Review remaining contributor PRs -cd ~/cortex && bash review-contributor-prs.sh - -# Merge your PRs -cd ~/cortex && bash merge-mike-prs.sh -``` - ---- - -## What This Unlocks - -### If PR #17 Merges: - -โœ… **Issue #7 COMPLETE** - Package Manager working -โœ… **Issue #12 unblocked** - Dependencies can be resolved -โœ… **Issue #10 unblocked** - Installations can be verified -โœ… **Issue #14 unblocked** - Rollback system can function -โœ… **MVP demonstrable** - Core workflow works end-to-end -โœ… **February funding timeline secure** - Critical path cleared - -### The Domino Effect: - -``` -PR #17 merges - โ†“ -5 MVP features unblocked - โ†“ -Contributors submit dependent PRs - โ†“ -3-5 more features complete by end of month - โ†“ -MVP demo ready for investors - โ†“ -February funding timeline on track - โ†“ -$2-3M raised - โ†“ -2x bounties paid to all contributors - โ†“ -Full-time team hired - โ†“ -Cortex Linux becomes reality -``` - -**It all starts with reviewing PR #17.** - ---- - -โœ… **Ready to execute. Download the 3 scripts and launch the dashboard.** - -**What's the priority - review PR #17 now, or download and explore first?** diff --git a/docs/PR_SUBMISSION_GUIDE.md b/docs/PR_SUBMISSION_GUIDE.md deleted file mode 100644 index 5837ec5..0000000 --- a/docs/PR_SUBMISSION_GUIDE.md +++ /dev/null @@ -1,232 +0,0 @@ -# ๐Ÿš€ Pull Request Submission Guide - Issue #27 - -## โœ… Implementation Complete! - -All code is ready and tested. Follow these steps to submit the PR and claim the **$50 bounty**. - ---- - -## ๐Ÿ“ฆ What Was Implemented - -โœ… **Progress bar implementation** - Beautiful Unicode bars with rich -โœ… **Time estimation** - Smart ETA with adaptive calculation -โœ… **Multi-stage tracking** - Unlimited stages with individual progress -โœ… **Background operations** - Full async/await support -โœ… **Desktop notifications** - Cross-platform notifications -โœ… **Cancellation support** - Graceful Ctrl+C handling -โœ… **35 comprehensive tests** - 100% passing -โœ… **Complete documentation** - API docs, examples, integration guide - ---- - -## ๐Ÿ”ง Steps to Submit PR - -### Step 1: Fork the Repository - -1. Go to: https://github.com/cortexlinux/cortex -2. Click the **"Fork"** button in the top right -3. Wait for your fork to be created at `https://github.com/AlexanderLuzDH/cortex` - -### Step 2: Add Your Fork as Remote - -```bash -cd D:\Projects\ten_fifty_nine\cortex_progress_bounty - -# Add your fork as a remote -git remote add fork https://github.com/AlexanderLuzDH/cortex.git - -# Verify remotes -git remote -v -``` - -### Step 3: Push Your Branch - -```bash -# Push the feature branch to your fork -git push fork feature/progress-notifications-issue-27 -``` - -### Step 4: Create Pull Request - -1. Go to your fork: https://github.com/AlexanderLuzDH/cortex -2. GitHub will show a banner: **"Compare & pull request"** - Click it -3. OR go to: https://github.com/cortexlinux/cortex/compare/main...AlexanderLuzDH:feature/progress-notifications-issue-27 - -### Step 5: Fill Out PR Template - -**Title:** -``` -feat: Add comprehensive progress notifications & status updates (Issue #27) -``` - -**Description:** -```markdown -## ๐ŸŽฏ Summary - -Implements comprehensive progress tracking system for Cortex Linux as requested in #27. - -## โœ… Features Implemented - -- โœ… **Progress bar implementation** - Beautiful terminal progress bars using rich library -- โœ… **Time estimation** - Smart ETA calculation based on throughput -- โœ… **Multi-stage tracking** - Track complex operations with unlimited stages -- โœ… **Background operations** - Full async/await implementation -- โœ… **Desktop notifications** - Cross-platform notifications (optional) -- โœ… **Cancellation support** - Graceful Ctrl+C handling with cleanup callbacks -- โœ… **Comprehensive tests** - 35 tests, 100% passing -- โœ… **Complete documentation** - API docs, examples, integration guide - -## ๐Ÿ“Š Test Results - -``` -============================= test session starts ============================= -collected 35 items - -test_progress_tracker.py::TestProgressStage::... PASSED [100%] - -============================= 35 passed in 2.98s =============================== -``` - -## ๐Ÿ“ Files Added - -- `src/progress_tracker.py` - Core implementation (485 lines) -- `src/test_progress_tracker.py` - Test suite (350 lines, 35 tests) -- `docs/PROGRESS_TRACKER.md` - Complete documentation -- `examples/standalone_demo.py` - Cross-platform demo -- `examples/progress_demo.py` - Integration example -- `src/requirements.txt` - Updated dependencies -- `IMPLEMENTATION_SUMMARY.md` - Implementation overview - -## ๐ŸŽจ Example Output - -``` -Installing PostgreSQL... -โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ” 45% -โฑ๏ธ Estimated time remaining: 2m 15s - - โœ“ Update package lists (5s) - โœ“ Download postgresql-15 (1m 23s) - โ†’ Installing dependencies (current) - Configuring database - Running tests -``` - -## ๐Ÿ”ง Testing Instructions - -```bash -# Install dependencies -pip install -r src/requirements.txt - -# Run tests -cd src -pytest test_progress_tracker.py -v - -# Run demo -cd .. -python examples/standalone_demo.py -``` - -## ๐Ÿ“š Documentation - -See `docs/PROGRESS_TRACKER.md` for: -- Complete API reference -- Usage examples -- Integration patterns -- Configuration options -- Troubleshooting guide - -## ๐ŸŽฏ Acceptance Criteria - -All requirements from Issue #27 have been met: - -- โœ… Progress bar implementation -- โœ… Time estimation based on package size -- โœ… Multi-stage tracking -- โœ… Background mode support -- โœ… Desktop notifications (optional) -- โœ… Cancellation handling -- โœ… Tests included -- โœ… Documentation - -## ๐Ÿ’ฐ Bounty - -Claiming $50 bounty as specified in Issue #27. - -## ๐Ÿ“ž Contact - -Happy to address any feedback or make adjustments! - -GitHub: @AlexanderLuzDH - -Closes #27 -``` - -### Step 6: Submit and Wait - -1. Click **"Create pull request"** -2. The maintainer will review your code -3. Address any feedback if requested -4. Once merged, you get the **$50 bounty**! - ---- - -## ๐ŸŽฏ Quick Commands Reference - -```bash -# If you need to make changes after pushing: -git add -git commit -m "fix: address review feedback" -git push fork feature/progress-notifications-issue-27 - -# Update from main branch: -git fetch origin -git rebase origin/main -git push fork feature/progress-notifications-issue-27 --force-with-lease -``` - ---- - -## โœจ Implementation Highlights - -### Production-Ready Code -- Full type hints throughout -- Comprehensive error handling -- Cross-platform compatibility -- Zero warnings or errors - -### Excellent Test Coverage -- 35 unit tests covering all features -- Integration tests -- Edge case handling -- Async operation testing -- 100% pass rate - -### Complete Documentation -- API reference with examples -- Integration guide -- Troubleshooting section -- Configuration options - -### Beautiful UX -- Modern terminal UI with rich -- Unicode progress bars -- Color-coded status -- Clear time estimates - ---- - -## ๐Ÿ’ฐ Expected Timeline - -1. **Submit PR**: Today (5 minutes) -2. **Code Review**: 1-3 days -3. **Merge**: After approval -4. **Payment**: Upon merge ($50) - ---- - -## ๐ŸŽ‰ You're Ready! - -All code is complete, tested, and documented. Just follow the steps above to submit your PR and claim the bounty! - -**Good luck! ๐Ÿš€** - diff --git a/docs/ROADMAP.md b/docs/ROADMAP.md deleted file mode 100644 index c7f74c5..0000000 --- a/docs/ROADMAP.md +++ /dev/null @@ -1,600 +0,0 @@ -# Cortex Linux - Improvement Roadmap - -**Created:** November 2025 -**Last Updated:** November 2025 -**Status:** Active Development - ---- - -## Priority Levels - -| Level | Description | Timeline | -|-------|-------------|----------| -| ๐Ÿ”ด **Critical** | Security/breaking issues - fix immediately | 1-3 days | -| ๐ŸŸ  **High** | Major improvements for quality and UX | 1-2 weeks | -| ๐ŸŸก **Medium** | Maintainability enhancements | 2-4 weeks | -| ๐ŸŸข **Low** | Nice-to-haves and polish | Ongoing | - ---- - -## Phase 1: Critical Fixes (Days 1-3) - -### ๐Ÿ”ด C-1: Fix Shell Injection Vulnerability -**File:** `cortex/coordinator.py` -**Lines:** 144-150 -**Risk:** Commands from LLM can execute arbitrary shell code - -**Before:** -```python -result = subprocess.run( - step.command, - shell=True, - capture_output=True, - text=True, - timeout=self.timeout -) -``` - -**After:** -```python -import shlex - -# Validate command first -validated_cmd = self._validate_and_sanitize(step.command) -result = subprocess.run( - shlex.split(validated_cmd), - shell=False, - capture_output=True, - text=True, - timeout=self.timeout -) -``` - -**Effort:** 2-4 hours - ---- - -### ๐Ÿ”ด C-2: Create Root requirements.txt -**Issue:** No root requirements file - installation fails - -**Action:** Create `/requirements.txt`: -``` -# Core dependencies -anthropic>=0.18.0 -openai>=1.0.0 - -# Standard library extensions -typing-extensions>=4.0.0 -``` - -**Effort:** 15 minutes - ---- - -### ๐Ÿ”ด C-3: Fix CI/CD Pipeline -**File:** `.github/workflows/automation.yml` -**Issue:** Wrong directory name, silently passes failures - -**Before:** -```yaml -if [ -d tests ]; then - python -m pytest tests/ || echo "Tests not yet implemented" -``` - -**After:** -```yaml -- name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install -r requirements.txt - pip install pytest pytest-cov - -- name: Run tests - run: | - python -m pytest test/ -v --cov=cortex --cov-report=xml - -- name: Upload coverage - uses: codecov/codecov-action@v3 -``` - -**Effort:** 1-2 hours - ---- - -## Phase 2: High Priority Improvements (Week 1-2) - -### ๐ŸŸ  H-1: Reorganize Directory Structure -**Current (Problematic):** -``` -cortex/ -โ”œโ”€โ”€ cortex/ # Core module -โ”œโ”€โ”€ LLM/ # Uppercase, separate -โ”œโ”€โ”€ src/ # More modules here -โ”œโ”€โ”€ test/ # Tests -โ”œโ”€โ”€ *.py # Root-level modules -โ””โ”€โ”€ *.sh # Shell scripts -``` - -**Proposed:** -``` -cortex/ -โ”œโ”€โ”€ cortex/ -โ”‚ โ”œโ”€โ”€ __init__.py -โ”‚ โ”œโ”€โ”€ cli.py -โ”‚ โ”œโ”€โ”€ coordinator.py -โ”‚ โ”œโ”€โ”€ packages.py -โ”‚ โ”œโ”€โ”€ llm/ -โ”‚ โ”‚ โ”œโ”€โ”€ __init__.py -โ”‚ โ”‚ โ”œโ”€โ”€ interpreter.py -โ”‚ โ”‚ โ”œโ”€โ”€ router.py -โ”‚ โ”‚ โ””โ”€โ”€ providers/ -โ”‚ โ”œโ”€โ”€ security/ -โ”‚ โ”‚ โ”œโ”€โ”€ __init__.py -โ”‚ โ”‚ โ””โ”€โ”€ sandbox.py -โ”‚ โ”œโ”€โ”€ hardware/ -โ”‚ โ”‚ โ”œโ”€โ”€ __init__.py -โ”‚ โ”‚ โ””โ”€โ”€ profiler.py -โ”‚ โ”œโ”€โ”€ history/ -โ”‚ โ”‚ โ”œโ”€โ”€ __init__.py -โ”‚ โ”‚ โ””โ”€โ”€ tracker.py -โ”‚ โ””โ”€โ”€ utils/ -โ”‚ โ”œโ”€โ”€ __init__.py -โ”‚ โ”œโ”€โ”€ logging.py -โ”‚ โ””โ”€โ”€ commands.py -โ”œโ”€โ”€ tests/ -โ”‚ โ”œโ”€โ”€ unit/ -โ”‚ โ”œโ”€โ”€ integration/ -โ”‚ โ””โ”€โ”€ conftest.py -โ”œโ”€โ”€ docs/ -โ”œโ”€โ”€ scripts/ -โ””โ”€โ”€ examples/ -``` - -**Effort:** 4-8 hours - ---- - -### ๐ŸŸ  H-2: Add Comprehensive Installation Docs -**Create:** `docs/INSTALLATION.md` - -**Content to include:** -- System requirements (Ubuntu 24.04+, Python 3.10+) -- Installing Firejail for sandbox support -- API key setup (OpenAI, Anthropic) -- Virtual environment setup -- First run verification -- Troubleshooting common issues - -**Effort:** 2-3 hours - ---- - -### ๐ŸŸ  H-3: Extract Shared Command Utility -**Issue:** `_run_command()` duplicated in 4+ files - -**Create:** `cortex/utils/commands.py` -```python -import subprocess -from typing import Tuple, List, Optional -from dataclasses import dataclass - -@dataclass -class CommandResult: - success: bool - stdout: str - stderr: str - return_code: int - -def run_command( - cmd: List[str], - timeout: int = 30, - capture_output: bool = True -) -> CommandResult: - """Execute a command safely with timeout.""" - try: - result = subprocess.run( - cmd, - capture_output=capture_output, - text=True, - timeout=timeout - ) - return CommandResult( - success=result.returncode == 0, - stdout=result.stdout, - stderr=result.stderr, - return_code=result.returncode - ) - except subprocess.TimeoutExpired: - return CommandResult(False, "", "Command timed out", -1) - except FileNotFoundError: - return CommandResult(False, "", f"Command not found: {cmd[0]}", -1) -``` - -**Effort:** 2-3 hours - ---- - -### ๐ŸŸ  H-4: Add Dangerous Command Patterns -**File:** `src/sandbox_executor.py` -**Lines:** 114-125 - -**Add patterns:** -```python -DANGEROUS_PATTERNS = [ - # Existing patterns... - r'rm\s+-rf\s+[/\*]', - r'dd\s+if=', - # NEW patterns to add: - r'curl\s+.*\|\s*sh', - r'wget\s+.*\|\s*sh', - r'curl\s+.*\|\s*bash', - r'wget\s+.*\|\s*bash', - r'\beval\s+', - r'python\s+-c\s+["\'].*exec', - r'base64\s+-d\s+.*\|', - r'>\s*/etc/', - r'chmod\s+777', - r'chmod\s+\+s', -] -``` - -**Effort:** 1 hour - ---- - -### ๐ŸŸ  H-5: Implement API Retry Logic -**File:** `LLM/interpreter.py` - -**Add retry decorator:** -```python -import time -from functools import wraps - -def retry_with_backoff(max_retries=3, base_delay=1): - def decorator(func): - @wraps(func) - def wrapper(*args, **kwargs): - for attempt in range(max_retries): - try: - return func(*args, **kwargs) - except (RuntimeError, ConnectionError) as e: - if attempt == max_retries - 1: - raise - delay = base_delay * (2 ** attempt) - time.sleep(delay) - return func(*args, **kwargs) - return wrapper - return decorator -``` - -**Effort:** 1-2 hours - ---- - -### ๐ŸŸ  H-6: Standardize Python Version -**Files to update:** -- `setup.py`: Change to `python_requires=">=3.10"` -- `README.md`: Update to "Python 3.10+" -- `.github/workflows/automation.yml`: Test on 3.10, 3.11, 3.12 - -**Effort:** 30 minutes - ---- - -### ๐ŸŸ  H-7: Add Security Scanning to CI -**File:** `.github/workflows/automation.yml` - -**Add jobs:** -```yaml -security: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - name: Run Bandit - run: | - pip install bandit - bandit -r cortex/ -ll - - - name: Check dependencies - run: | - pip install safety - safety check -r requirements.txt -``` - -**Effort:** 1 hour - ---- - -### ๐ŸŸ  H-8: Add Input Validation -**All user-facing functions need validation** - -**Example for `cli.py`:** -```python -import re - -def validate_software_name(name: str) -> str: - """Validate and sanitize software name input.""" - if not name or not name.strip(): - raise ValueError("Software name cannot be empty") - - # Remove potentially dangerous characters - sanitized = re.sub(r'[;&|`$]', '', name) - - # Limit length - if len(sanitized) > 200: - raise ValueError("Software name too long") - - return sanitized.strip() -``` - -**Effort:** 2-3 hours - ---- - -## Phase 3: Medium Priority (Weeks 2-4) - -### ๐ŸŸก M-1: Implement Dependency Injection -**Pattern to follow:** - -```python -# Before (hard coupling) -class CortexCLI: - def install(self, software): - interpreter = CommandInterpreter(api_key=self._get_api_key()) - -# After (dependency injection) -class CortexCLI: - def __init__(self, interpreter: Optional[CommandInterpreter] = None): - self._interpreter = interpreter - - def install(self, software): - interpreter = self._interpreter or CommandInterpreter(...) -``` - -**Effort:** 4-6 hours - ---- - -### ๐ŸŸก M-2: Centralize Logging Configuration -**Create:** `cortex/utils/logging.py` - -```python -import logging -import sys -from pathlib import Path - -def setup_logging( - level: int = logging.INFO, - log_file: Optional[Path] = None -) -> logging.Logger: - """Configure logging for the entire application.""" - logger = logging.getLogger('cortex') - logger.setLevel(level) - - # Console handler - console = logging.StreamHandler(sys.stderr) - console.setLevel(logging.WARNING) - console.setFormatter(logging.Formatter( - '%(levelname)s: %(message)s' - )) - logger.addHandler(console) - - # File handler (if specified) - if log_file: - file_handler = logging.FileHandler(log_file) - file_handler.setFormatter(logging.Formatter( - '%(asctime)s - %(name)s - %(levelname)s - %(message)s' - )) - logger.addHandler(file_handler) - - return logger -``` - -**Effort:** 2-3 hours - ---- - -### ๐ŸŸก M-3: Add Test Coverage Targets -**Update CI to enforce coverage:** - -```yaml -- name: Check coverage - run: | - coverage=$(python -m pytest --cov=cortex --cov-fail-under=70) -``` - -**Target milestones:** -- Week 2: 60% coverage -- Week 4: 70% coverage -- Week 8: 80% coverage - -**Effort:** Ongoing - ---- - -### ๐ŸŸก M-4: Add Integration Tests -**Create:** `tests/integration/test_install_flow.py` - -```python -import pytest -from unittest.mock import Mock, patch - -class TestInstallationFlow: - """End-to-end installation flow tests.""" - - @pytest.fixture - def mock_api(self): - with patch('cortex.llm.interpreter.OpenAI') as mock: - yield mock - - def test_full_install_dry_run(self, mock_api): - """Test complete installation flow in dry-run mode.""" - # Setup - mock_api.return_value.chat.completions.create.return_value = ... - - # Execute - result = cli.install("docker", dry_run=True) - - # Verify - assert result == 0 -``` - -**Effort:** 4-6 hours - ---- - -### ๐ŸŸก M-5: Implement Response Caching -**Create:** `cortex/utils/cache.py` - -```python -from functools import lru_cache -from typing import Optional -import hashlib - -class LLMCache: - """Simple cache for LLM responses.""" - - def __init__(self, max_size: int = 100): - self._cache = {} - self._max_size = max_size - - def get(self, prompt: str) -> Optional[str]: - key = hashlib.sha256(prompt.encode()).hexdigest() - return self._cache.get(key) - - def set(self, prompt: str, response: str) -> None: - if len(self._cache) >= self._max_size: - # Remove oldest entry - self._cache.pop(next(iter(self._cache))) - key = hashlib.sha256(prompt.encode()).hexdigest() - self._cache[key] = response -``` - -**Effort:** 2-3 hours - ---- - -### ๐ŸŸก M-6: Add Type Hints Throughout -**Files needing type hints:** -- `cortex/cli.py` - return types -- `context_memory.py` - all methods -- `logging_system.py` - all methods - -**Run mypy:** -```bash -mypy cortex/ --ignore-missing-imports -``` - -**Effort:** 3-4 hours - ---- - -### ๐ŸŸก M-7: Remove Duplicate Files -**Delete:** -- `deploy_jesse_system (1).sh` -- `README_DEPENDENCIES (1).md` - -**Effort:** 5 minutes - ---- - -### ๐ŸŸก M-8: Use XDG Base Directory Standard -**Current:** `/var/lib/cortex/history.db` -**Should be:** `~/.local/share/cortex/history.db` - -```python -from pathlib import Path -import os - -def get_data_dir() -> Path: - """Get XDG-compliant data directory.""" - xdg_data = os.environ.get('XDG_DATA_HOME', Path.home() / '.local/share') - data_dir = Path(xdg_data) / 'cortex' - data_dir.mkdir(parents=True, exist_ok=True) - return data_dir -``` - -**Effort:** 1 hour - ---- - -## Phase 4: Low Priority (Ongoing) - -### ๐ŸŸข L-1: Add Architecture Diagrams -Create Mermaid diagrams in `docs/ARCHITECTURE.md` - -### ๐ŸŸข L-2: Add Async Support -Convert I/O operations to async for better performance - -### ๐ŸŸข L-3: Plugin Architecture -Allow custom LLM providers and package managers - -### ๐ŸŸข L-4: Add Telemetry (Opt-in) -Anonymous usage statistics for improvement - -### ๐ŸŸข L-5: Interactive Mode -REPL-style interface for multi-step operations - -### ๐ŸŸข L-6: Shell Completion -Add bash/zsh completions for CLI - -### ๐ŸŸข L-7: Man Pages -Generate man pages from docstrings - -### ๐ŸŸข L-8: Docker Development Environment -Dockerfile for consistent development - ---- - -## Implementation Timeline - -``` -Week 1: -โ”œโ”€โ”€ Day 1-2: C-1 (Shell injection fix) -โ”œโ”€โ”€ Day 2: C-2 (requirements.txt) -โ”œโ”€โ”€ Day 3: C-3 (CI/CD fix) -โ””โ”€โ”€ Day 3-5: H-1 (Directory structure) - -Week 2: -โ”œโ”€โ”€ H-2 (Installation docs) -โ”œโ”€โ”€ H-3 (Command utility) -โ”œโ”€โ”€ H-4 (Dangerous patterns) -โ””โ”€โ”€ H-5 (Retry logic) - -Week 3: -โ”œโ”€โ”€ H-6, H-7, H-8 (Standards & validation) -โ”œโ”€โ”€ M-1 (Dependency injection) -โ””โ”€โ”€ M-2 (Logging) - -Week 4: -โ”œโ”€โ”€ M-3, M-4 (Tests) -โ”œโ”€โ”€ M-5 (Caching) -โ””โ”€โ”€ M-6 (Type hints) - -Ongoing: -โ””โ”€โ”€ Low priority items as time permits -``` - ---- - -## Success Metrics - -| Metric | Current | Target | Timeline | -|--------|---------|--------|----------| -| Test Coverage | ~45% | 80% | 4 weeks | -| Security Issues | 3 critical | 0 critical | 1 week | -| Documentation | Incomplete | Complete | 2 weeks | -| CI Pass Rate | Unknown | >95% | 1 week | -| Type Coverage | ~30% | 80% | 4 weeks | - ---- - -## Resources Needed - -- **Development:** 1-2 developers, 40-80 hours total -- **Review:** Security audit recommended after Phase 2 -- **Testing:** Manual testing on Ubuntu 24.04 - ---- - -*This roadmap is a living document. Update as progress is made.* diff --git a/docs/TRANSACTION_HISTORY.md b/docs/TRANSACTION_HISTORY.md deleted file mode 100644 index 22617af..0000000 --- a/docs/TRANSACTION_HISTORY.md +++ /dev/null @@ -1,439 +0,0 @@ -# Transaction History and Undo Module - -**Issue:** #258 -**Status:** Ready for Review -**Bounty:** As specified in issue (+ bonus after funding) - -## Overview - -Complete transaction tracking and undo capabilities for all Cortex package operations. Every install, remove, upgrade, and configure operation is recorded with full state snapshots, enabling safe rollback when needed. - -## Features - -### Full Transaction Tracking - -- Records all package operations with timestamps -- Captures before/after package states -- Tracks operation duration and success/failure -- Stores rollback commands automatically - -### Safe Undo Operations - -- Preview what undo will do before executing -- Dry-run mode for safety -- Warnings for system-critical packages -- Partial rollback recovery - -### Rich History Search - -- Filter by package name -- Filter by operation type -- Filter by date range -- Filter by status - -## Installation - -```python -from cortex.transaction_history import ( - TransactionHistory, - UndoManager, - record_install, - undo_last, - show_history -) -``` - -## Usage Examples - -### Recording Transactions - -```python -from cortex.transaction_history import TransactionHistory, TransactionType - -history = TransactionHistory() - -# Start a transaction -tx = history.begin_transaction( - TransactionType.INSTALL, - ["nginx", "redis"], - "cortex install nginx redis" -) - -# ... perform the actual installation ... - -# Complete the transaction -history.complete_transaction(tx, success=True) -``` - -### Using Convenience Functions - -```python -from cortex.transaction_history import record_install, record_remove - -# Record an install -tx = record_install(["docker"], "cortex install docker") -# ... do installation ... -tx.complete(success=True) - -# Record a removal -tx = record_remove(["vim"], "cortex remove vim") -# ... do removal ... -tx.complete(success=True) -``` - -### Viewing History - -```python -from cortex.transaction_history import show_history, get_history - -# Quick view of recent transactions -recent = show_history(limit=10) -for tx in recent: - print(f"{tx['timestamp']} | {tx['transaction_type']} | {tx['packages']}") - -# Advanced search -history = get_history() -nginx_txs = history.search(package="nginx") -installs = history.search(transaction_type=TransactionType.INSTALL) -today = history.search(since=datetime.now() - timedelta(days=1)) -``` - -### Undo Operations - -```python -from cortex.transaction_history import UndoManager, get_undo_manager - -manager = get_undo_manager() - -# Check if undo is possible -can_undo, reason = manager.can_undo(transaction_id) -print(f"Can undo: {can_undo}, Reason: {reason}") - -# Preview the undo -preview = manager.preview_undo(transaction_id) -print(f"Commands to execute: {preview['commands']}") -print(f"Safe to undo: {preview['is_safe']}") - -# Execute undo (dry run first) -result = manager.undo(transaction_id, dry_run=True) -print(f"Would execute: {result['commands']}") - -# Execute for real -result = manager.undo(transaction_id) -print(f"Success: {result['success']}") -``` - -### Quick Undo Last Operation - -```python -from cortex.transaction_history import undo_last - -# Preview -result = undo_last(dry_run=True) - -# Execute -result = undo_last() -if result['success']: - print("Rollback complete!") -else: - print(f"Error: {result['error']}") -``` - -## API Reference - -### TransactionHistory - -Main class for transaction storage and retrieval. - -**Constructor:** -```python -TransactionHistory(db_path: Optional[Path] = None) -``` - -**Methods:** - -| Method | Description | -|--------|-------------| -| `begin_transaction(type, packages, command)` | Start tracking a transaction | -| `complete_transaction(tx, success, error_message)` | Complete a transaction | -| `get_transaction(id)` | Get transaction by ID | -| `get_recent(limit, status_filter)` | Get recent transactions | -| `search(package, type, since, until)` | Search with filters | -| `get_stats()` | Get statistics | - -### UndoManager - -Handles undo/rollback operations. - -**Methods:** - -| Method | Description | -|--------|-------------| -| `can_undo(transaction_id)` | Check if transaction can be undone | -| `preview_undo(transaction_id)` | Preview undo operation | -| `undo(transaction_id, dry_run, force)` | Execute undo | -| `undo_last(dry_run)` | Undo most recent transaction | - -### Transaction Types - -```python -class TransactionType(Enum): - INSTALL = "install" - REMOVE = "remove" - UPGRADE = "upgrade" - DOWNGRADE = "downgrade" - AUTOREMOVE = "autoremove" - PURGE = "purge" - CONFIGURE = "configure" - BATCH = "batch" -``` - -### Transaction Statuses - -```python -class TransactionStatus(Enum): - PENDING = "pending" - IN_PROGRESS = "in_progress" - COMPLETED = "completed" - FAILED = "failed" - ROLLED_BACK = "rolled_back" - PARTIALLY_COMPLETED = "partially_completed" -``` - -## Data Model - -### Transaction - -```python -@dataclass -class Transaction: - id: str # Unique transaction ID - transaction_type: TransactionType - packages: List[str] # Packages involved - timestamp: datetime # When started - status: TransactionStatus - before_state: Dict[str, PackageState] # State before operation - after_state: Dict[str, PackageState] # State after operation - command: str # Original command - user: str # User who ran it - duration_seconds: float # How long it took - error_message: Optional[str] # Error if failed - rollback_commands: List[str] # Commands to undo - is_rollback_safe: bool # Safe to rollback? - rollback_warning: Optional[str] # Warning message -``` - -### PackageState - -```python -@dataclass -class PackageState: - name: str # Package name - version: Optional[str] # Version if installed - installed: bool # Is it installed? - config_files: List[str] # Config file paths - dependencies: List[str] # Package dependencies -``` - -## Storage - -### Database Location - -Default: `~/.cortex/transaction_history.db` - -Override: -```python -history = TransactionHistory(Path("/custom/path/history.db")) -``` - -### Schema - -```sql -CREATE TABLE transactions ( - id TEXT PRIMARY KEY, - transaction_type TEXT NOT NULL, - packages TEXT NOT NULL, -- JSON array - timestamp TEXT NOT NULL, - status TEXT NOT NULL, - before_state TEXT, -- JSON object - after_state TEXT, -- JSON object - command TEXT, - user TEXT, - duration_seconds REAL, - error_message TEXT, - rollback_commands TEXT, -- JSON array - is_rollback_safe INTEGER, - rollback_warning TEXT -); -``` - -## Rollback Safety - -### Safe Operations - -| Operation | Rollback | Notes | -|-----------|----------|-------| -| Install | Remove | Full restore | -| Remove | Install | Restores package | -| Upgrade | Downgrade | Restores previous version | - -### Unsafe Operations - -| Operation | Rollback | Warning | -|-----------|----------|---------| -| Purge | Install | Config files lost | -| System packages | Varies | May affect stability | - -### Critical Packages - -These packages trigger safety warnings: -- `apt`, `dpkg`, `libc6` -- `systemd`, `bash`, `coreutils` -- `linux-image`, `grub`, `init` - -## CLI Integration - -```python -# In cortex/cli.py -from cortex.transaction_history import get_history, get_undo_manager - -@cli.command() -def install(packages: List[str]): - history = get_history() - - # Record the transaction - tx = history.begin_transaction( - TransactionType.INSTALL, - packages, - f"cortex install {' '.join(packages)}" - ) - - try: - # Do the actual installation - result = do_install(packages) - history.complete_transaction(tx, success=True) - except Exception as e: - history.complete_transaction(tx, success=False, error_message=str(e)) - raise - -@cli.command() -def undo(transaction_id: Optional[str] = None, dry_run: bool = False): - manager = get_undo_manager() - - if transaction_id: - result = manager.undo(transaction_id, dry_run=dry_run) - else: - result = manager.undo_last(dry_run=dry_run) - - if result['success']: - print("โœ“ Rollback complete") - else: - print(f"โœ— {result['error']}") - -@cli.command() -def history(limit: int = 10, package: Optional[str] = None): - history = get_history() - - if package: - transactions = history.search(package=package, limit=limit) - else: - transactions = history.get_recent(limit=limit) - - for tx in transactions: - print(f"{tx.timestamp:%Y-%m-%d %H:%M} | {tx.transaction_type.value:10} | {', '.join(tx.packages)}") -``` - -## Architecture - -``` -โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” -โ”‚ CLI Commands โ”‚ -โ”‚ install / remove / upgrade / undo โ”‚ -โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ฌโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ - โ”‚ -โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ–ผโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” -โ”‚ TransactionHistory โ”‚ -โ”‚ โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” โ”‚ -โ”‚ โ”‚ begin_tx() โ”‚ โ”‚ complete_tx()โ”‚ โ”‚ search() โ”‚ โ”‚ -โ”‚ โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ โ”‚ -โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ฌโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ - โ”‚ -โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ–ผโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” -โ”‚ UndoManager โ”‚ -โ”‚ โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” โ”‚ -โ”‚ โ”‚ can_undo() โ”‚ โ”‚ preview() โ”‚ โ”‚ undo() โ”‚ โ”‚ -โ”‚ โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ โ”‚ -โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ฌโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ - โ”‚ -โ”Œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ–ผโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ” -โ”‚ SQLite Database โ”‚ -โ”‚ ~/.cortex/transaction_history.db โ”‚ -โ””โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”˜ -``` - -## Testing - -```bash -# Run all tests -pytest tests/test_transaction_history.py -v - -# Run with coverage -pytest tests/test_transaction_history.py --cov=cortex.transaction_history - -# Test specific functionality -pytest tests/test_transaction_history.py -k "undo" -v -``` - -## Troubleshooting - -### Database Corruption - -```python -import os -from pathlib import Path - -# Backup and recreate -db_path = Path.home() / ".cortex" / "transaction_history.db" -if db_path.exists(): - db_path.rename(db_path.with_suffix('.db.bak')) - -# New database will be created automatically -history = TransactionHistory() -``` - -### Undo Not Working - -```python -manager = get_undo_manager() - -# Check why undo failed -can_undo, reason = manager.can_undo(tx_id) -print(f"Can undo: {can_undo}") -print(f"Reason: {reason}") - -# Preview what would happen -preview = manager.preview_undo(tx_id) -print(f"Commands: {preview['commands']}") -print(f"Warning: {preview['warning']}") -``` - -### Missing State Information - -```python -# Transaction was created before state capture was implemented -tx = history.get_transaction(tx_id) -if not tx.before_state: - print("No state information - cannot safely undo") - print("Consider manual rollback") -``` - -## Contributing - -1. Add new transaction types to `TransactionType` enum -2. Update rollback command calculation in `_calculate_rollback_commands` -3. Add tests for new functionality -4. Update documentation - ---- - -**Closes:** #258 diff --git a/docs/USER_PREFERENCES_IMPLEMENTATION.md b/docs/USER_PREFERENCES_IMPLEMENTATION.md deleted file mode 100644 index 6c0c1a7..0000000 --- a/docs/USER_PREFERENCES_IMPLEMENTATION.md +++ /dev/null @@ -1,519 +0,0 @@ -# User Preferences & Settings System - Implementation Guide - -## Overview - -The User Preferences System provides persistent configuration management for Cortex Linux, allowing users to customize behavior through YAML-based configuration files and intuitive CLI commands. This implementation satisfies **Issue #26** requirements for saving user preferences across sessions, customizing AI behavior, setting default options, and managing confirmation prompts. - -**Status:** โœ… **Fully Implemented & Tested** (39/39 tests passing) - -**Key Features:** -- โœ… YAML-based config file management -- โœ… 6 preference categories (confirmations, verbosity, auto-update, AI, packages, UI) -- โœ… Full validation with error reporting -- โœ… Reset to defaults option -- โœ… CLI commands for viewing and editing preferences -- โœ… Import/Export functionality -- โœ… Atomic writes with automatic backup -- โœ… Type coercion for CLI values -- โœ… Cross-platform support (Linux, Windows, macOS) - -## Architecture - -### Data Models - -#### UserPreferences -Main dataclass containing all user preferences: -- `verbosity`: Output verbosity level (quiet, normal, verbose, debug) -- `confirmations`: Confirmation prompt settings -- `auto_update`: Automatic update configuration -- `ai`: AI behavior settings -- `packages`: Package management preferences -- `theme`: UI theme -- `language`: Interface language -- `timezone`: User timezone - -#### ConfirmationSettings -- `before_install`: Confirm before installing packages -- `before_remove`: Confirm before removing packages -- `before_upgrade`: Confirm before upgrading packages -- `before_system_changes`: Confirm before system-wide changes - -#### AutoUpdateSettings -- `check_on_start`: Check for updates on startup -- `auto_install`: Automatically install updates -- `frequency_hours`: Update check frequency in hours - -#### AISettings -- `model`: AI model to use (default: claude-sonnet-4) -- `creativity`: Creativity level (conservative, balanced, creative) -- `explain_steps`: Show step-by-step explanations -- `suggest_alternatives`: Suggest alternative approaches -- `learn_from_history`: Learn from past interactions -- `max_suggestions`: Maximum number of suggestions (1-20) - -#### PackageSettings -- `default_sources`: List of default package sources -- `prefer_latest`: Prefer latest versions over stable -- `auto_cleanup`: Automatically cleanup unused packages -- `backup_before_changes`: Create backup before changes - -### Storage - -**Configuration File Location:** -- Linux/Mac: `~/.config/cortex/preferences.yaml` -- Windows: `%USERPROFILE%\.config\cortex\preferences.yaml` - -**Features:** -- YAML format for human readability -- Automatic backup (`.yaml.bak`) before each write -- Atomic writes using temporary files -- Cross-platform path handling - -## API Reference - -### PreferencesManager - -#### Initialization -```python -manager = PreferencesManager() # Uses default config path -# or -manager = PreferencesManager(config_path=Path("/custom/path.yaml")) -``` - -#### Loading and Saving -```python -manager.load() # Load from disk -manager.save() # Save to disk with backup -``` - -#### Getting Values -```python -# Dot notation access -value = manager.get('ai.model') -value = manager.get('confirmations.before_install') - -# With default -value = manager.get('nonexistent.key', default='fallback') -``` - -#### Setting Values -```python -# Dot notation setting with automatic type coercion -manager.set('verbosity', 'verbose') -manager.set('ai.model', 'gpt-4') -manager.set('confirmations.before_install', True) -manager.set('auto_update.frequency_hours', 24) -``` - -**Type Coercion:** -- Strings โ†’ Booleans: 'true', 'yes', '1', 'on' โ†’ True -- Strings โ†’ Integers: '42' โ†’ 42 -- Strings โ†’ Lists: 'a, b, c' โ†’ ['a', 'b', 'c'] -- Strings โ†’ Enums: 'verbose' โ†’ VerbosityLevel.VERBOSE - -#### Validation -```python -errors = manager.validate() -if errors: - for error in errors: - print(f"Validation error: {error}") -``` - -**Validation Rules:** -- `ai.max_suggestions`: Must be between 1 and 20 -- `auto_update.frequency_hours`: Must be at least 1 -- `language`: Must be valid language code (en, es, fr, de, ja, zh, pt, ru) - -#### Import/Export -```python -# Export to JSON -manager.export_json(Path('backup.json')) - -# Import from JSON -manager.import_json(Path('backup.json')) -``` - -#### Reset -```python -manager.reset() # Reset all preferences to defaults -``` - -#### Metadata -```python -# Get all settings as dictionary -settings = manager.get_all_settings() - -# Get config file metadata -info = manager.get_config_info() -# Returns: config_path, config_exists, config_size_bytes, last_modified -``` - -## CLI Integration - -The User Preferences System is fully integrated into the Cortex CLI with two primary commands: - -### `cortex check-pref` - Check/Display Preferences - -View all preferences or specific preference values. - -#### Show All Preferences -```bash -cortex check-pref -``` - -This displays: -- All preference categories with current values -- Validation status (โœ… valid or โŒ with errors) -- Configuration file location and metadata -- Last modified timestamp and file size - -#### Show Specific Preference -```bash -cortex check-pref ai.model -cortex check-pref confirmations.before_install -cortex check-pref auto_update.frequency_hours -``` - -### `cortex edit-pref` - Edit Preferences - -Modify, delete, reset, or manage preferences. - -#### Set/Update a Preference -```bash -cortex edit-pref set verbosity verbose -cortex edit-pref add ai.model gpt-4 -cortex edit-pref update confirmations.before_install false -cortex edit-pref set auto_update.frequency_hours 24 -cortex edit-pref set packages.default_sources "official, community" -``` - -Aliases: `set`, `add`, `update` (all perform the same action) - -**Features:** -- Automatic type coercion (strings โ†’ bools, ints, lists) -- Shows old vs new values -- Automatic validation after changes -- Warns if validation errors are introduced - -#### Delete/Reset a Preference to Default -```bash -cortex edit-pref delete ai.model -cortex edit-pref remove theme -``` - -Aliases: `delete`, `remove`, `reset-key` - -This resets the specific preference to its default value. - -#### List All Preferences -```bash -cortex edit-pref list -cortex edit-pref show -cortex edit-pref display -``` - -Same as `cortex check-pref` (shows all preferences). - -#### Reset All Preferences to Defaults -```bash -cortex edit-pref reset-all -``` - -**Warning:** This resets ALL preferences to defaults and prompts for confirmation. - -#### Validate Configuration -```bash -cortex edit-pref validate -``` - -Checks all preferences against validation rules: -- `ai.max_suggestions` must be 1-20 -- `auto_update.frequency_hours` must be โ‰ฅ1 -- `language` must be valid language code - -#### Export/Import Configuration - -**Export to JSON:** -```bash -cortex edit-pref export ~/my-cortex-config.json -cortex edit-pref export /backup/prefs.json -``` - -**Import from JSON:** -```bash -cortex edit-pref import ~/my-cortex-config.json -cortex edit-pref import /backup/prefs.json -``` - -Useful for: -- Backing up configuration -- Sharing config between machines -- Version control of preferences - -## Testing - -### Running Tests -```bash -# Run all preference tests (from project root) -python test/test_user_preferences.py - -# Or with unittest module -python -m unittest test.test_user_preferences -v - -# Run specific test class -python -m unittest test.test_user_preferences.TestPreferencesManager -v - -# Run specific test -python -m unittest test.test_user_preferences.TestPreferencesManager.test_save_and_load -``` - -### Test Coverage - -The test suite includes 39 comprehensive tests covering: - -1. **Data Models** (7 tests) - - Default initialization for all dataclasses - - Custom initialization with values - - UserPreferences with all categories - - ConfirmationSettings - - AutoUpdateSettings - - AISettings - - PackageSettings - -2. **PreferencesManager Core** (17 tests) - - Initialization and default config - - Save and load operations - - Get/set with dot notation - - Nested value access - - Default values handling - - Non-existent key handling - - Set with type coercion - - Get all settings - - Config file metadata - -3. **Type Coercion** (5 tests) - - Boolean coercion (true/false/yes/no/1/0) - - Integer coercion from strings - - List coercion (comma-separated) - - Enum coercion (VerbosityLevel, AICreativity) - - String handling - -4. **Validation** (5 tests) - - Valid configuration passes - - Max suggestions range (1-20) - - Frequency hours minimum (โ‰ฅ1) - - Language code validation - - Multiple error reporting - -5. **Import/Export** (2 tests) - - JSON export with all data - - JSON import and restoration - -6. **File Operations** (4 tests) - - Automatic backup creation - - Atomic writes (temp file + rename) - - Config info retrieval - - Cross-platform path handling - -7. **Helpers** (4 tests) - - format_preference_value() for all types - - Enum formatting - - List formatting - - Dictionary formatting - -**All 39 tests passing โœ…** - -### Manual Testing - -1. **Install Dependencies** -```bash -pip install PyYAML>=6.0 -``` - -2. **Test Configuration Creation** -```python -from user_preferences import PreferencesManager - -manager = PreferencesManager() -print(f"Config location: {manager.config_path}") -print(f"Config exists: {manager.config_path.exists()}") -``` - -3. **Test Get/Set Operations** -```python -# Get default value -print(manager.get('ai.model')) # claude-sonnet-4 - -# Set new value -manager.set('ai.model', 'gpt-4') -print(manager.get('ai.model')) # gpt-4 - -# Verify persistence -manager2 = PreferencesManager() -print(manager2.get('ai.model')) # gpt-4 (persisted) -``` - -4. **Test Validation** -```python -# Valid configuration -errors = manager.validate() -print(f"Validation errors: {errors}") # [] - -# Invalid configuration -manager.preferences.ai.max_suggestions = 0 -errors = manager.validate() -print(f"Validation errors: {errors}") # ['ai.max_suggestions must be at least 1'] -``` - -5. **Test Import/Export** -```python -from pathlib import Path - -# Export -manager.export_json(Path('test_export.json')) - -# Modify preferences -manager.set('theme', 'modified') - -# Import (restore) -manager.import_json(Path('test_export.json')) -print(manager.get('theme')) # Original value restored -``` - -## Default Configuration - -```yaml -verbosity: normal - -confirmations: - before_install: true - before_remove: true - before_upgrade: false - before_system_changes: true - -auto_update: - check_on_start: true - auto_install: false - frequency_hours: 24 - -ai: - model: claude-sonnet-4 - creativity: balanced - explain_steps: true - suggest_alternatives: true - learn_from_history: true - max_suggestions: 5 - -packages: - default_sources: - - official - prefer_latest: false - auto_cleanup: true - backup_before_changes: true - -theme: default -language: en -timezone: UTC -``` - -## Migration Guide - -### From No Config to v1.0 -Automatic - first run creates default config file. - -### Future Config Versions -The system is designed to support migration: -1. Add version field to config -2. Implement migration functions for each version -3. Auto-migrate on load - -Example: -```python -def migrate_v1_to_v2(data: dict) -> dict: - # Add new fields with defaults - if 'new_field' not in data: - data['new_field'] = default_value - return data -``` - -## Security Considerations - -1. **File Permissions**: Config file created with user-only read/write (600) -2. **Atomic Writes**: Uses temp file + rename to prevent corruption -3. **Backup System**: Automatic backup before each write -4. **Input Validation**: All values validated before storage -5. **Type Safety**: Type coercion with validation prevents injection - -## Troubleshooting - -### Config File Not Found -```python -# Check default location -from pathlib import Path -config_path = Path.home() / ".config" / "cortex" / "preferences.yaml" -print(f"Config should be at: {config_path}") -print(f"Exists: {config_path.exists()}") -``` - -### Validation Errors -```python -manager = PreferencesManager() -errors = manager.validate() -for error in errors: - print(f"Error: {error}") -``` - -### Corrupted Config -```python -# Reset to defaults -manager.reset() - -# Or restore from backup -import shutil -backup = manager.config_path.with_suffix('.yaml.bak') -if backup.exists(): - shutil.copy2(backup, manager.config_path) - manager.load() -``` - -### Permission Issues -```bash -# Check file permissions -ls -l ~/.config/cortex/preferences.yaml - -# Fix permissions if needed -chmod 600 ~/.config/cortex/preferences.yaml -``` - -## Performance - -- **Load time**: < 10ms for typical config -- **Save time**: < 20ms (includes backup) -- **Memory**: ~10KB for loaded config -- **File size**: ~1KB typical, ~5KB maximum - -## Future Enhancements - -1. **Configuration Profiles**: Multiple named configuration sets -2. **Remote Sync**: Sync config across devices -3. **Schema Versioning**: Automatic migration between versions -4. **Encrypted Settings**: Encrypt sensitive values -5. **Configuration Templates**: Pre-built configurations for common use cases -6. **GUI Editor**: Visual configuration editor -7. **Configuration Diff**: Show changes between configs -8. **Rollback**: Restore previous configuration versions - -## Contributing - -When adding new preferences: - -1. Add field to appropriate dataclass -2. Update validation rules if needed -3. Add tests for new field -4. Update documentation -5. Update default config example -6. Consider migration if changing existing fields - -## License - -Part of Cortex Linux - Licensed under Apache-2.0 diff --git a/docs/guides/Developer-Guide.md b/docs/guides/Developer-Guide.md deleted file mode 100644 index 99e5ab7..0000000 --- a/docs/guides/Developer-Guide.md +++ /dev/null @@ -1,146 +0,0 @@ -# Developer Guide - -## Development Setup -```bash -# Clone repository -git clone https://github.com/cortexlinux/cortex.git -cd cortex - -# Create virtual environment -python3 -m venv venv -source venv/bin/activate - -# Install dev dependencies -pip install -r requirements.txt -pip install -r requirements-dev.txt - -# Run tests -pytest tests/ - -# Run with coverage -pytest --cov=cortex tests/ -``` - -## Project Structure -``` -cortex/ -โ”œโ”€โ”€ cortex/ -โ”‚ โ”œโ”€โ”€ __init__.py -โ”‚ โ”œโ”€โ”€ packages.py # Package manager wrapper -โ”‚ โ”œโ”€โ”€ llm_integration.py # Claude API integration -โ”‚ โ”œโ”€โ”€ sandbox.py # Safe command execution -โ”‚ โ”œโ”€โ”€ hardware.py # Hardware detection -โ”‚ โ”œโ”€โ”€ dependencies.py # Dependency resolution -โ”‚ โ”œโ”€โ”€ verification.py # Installation verification -โ”‚ โ”œโ”€โ”€ rollback.py # Rollback system -โ”‚ โ”œโ”€โ”€ config_templates.py # Config generation -โ”‚ โ”œโ”€โ”€ logging_system.py # Logging & diagnostics -โ”‚ โ””โ”€โ”€ context_memory.py # AI memory system -โ”œโ”€โ”€ tests/ -โ”‚ โ””โ”€โ”€ test_*.py # Unit tests -โ”œโ”€โ”€ docs/ -โ”‚ โ””โ”€โ”€ *.md # Documentation -โ””โ”€โ”€ .github/ - โ””โ”€โ”€ workflows/ # CI/CD -``` - -## Architecture - -### Core Flow -``` -User Input (Natural Language) - โ†“ -LLM Integration Layer (Claude API) - โ†“ -Package Manager Wrapper (apt/yum/dnf) - โ†“ -Dependency Resolver - โ†“ -Sandbox Executor (Firejail) - โ†“ -Installation Verifier - โ†“ -Context Memory (learns patterns) -``` - -### Key Components - -**LLM Integration (`llm_integration.py`)** -- Interfaces with Claude API -- Parses natural language -- Generates installation plans - -**Package Manager (`packages.py`)** -- Translates intent to commands -- Supports apt, yum, dnf -- 32+ software categories - -**Sandbox (`sandbox.py`)** -- Firejail isolation -- AppArmor policies -- Safe command execution - -**Hardware Detection (`hardware.py`)** -- GPU/CPU detection -- Optimization recommendations -- Driver compatibility - -## Contributing - -### Claiming Issues - -1. Browse [open issues](https://github.com/cortexlinux/cortex/issues) -2. Comment "I'd like to work on this" -3. Get assigned -4. Submit PR - -### PR Requirements - -- Tests with >80% coverage -- Documentation included -- Follows code style -- Passes CI checks - -### Bounty Program - -Cash bounties on merge: -- Critical features: $150-200 -- Standard features: $75-150 -- Testing/integration: $50-75 -- 2x bonus at funding (Feb 2025) - -Payment: Bitcoin, USDC, or PayPal - -See [Bounty Program](Bounties) for details. - -## Testing -```bash -# Run all tests -pytest - -# Specific test file -pytest tests/test_packages.py - -# With coverage -pytest --cov=cortex tests/ - -# Watch mode -pytest-watch -``` - -## Code Style -```bash -# Format code -black cortex/ - -# Lint -pylint cortex/ - -# Type checking -mypy cortex/ -``` - -## Questions? - -- Discord: https://discord.gg/uCqHvxjU83 -- GitHub Discussions: https://github.com/cortexlinux/cortex/discussions diff --git a/docs/guides/FAQ.md b/docs/guides/FAQ.md deleted file mode 100644 index 50de74e..0000000 --- a/docs/guides/FAQ.md +++ /dev/null @@ -1,108 +0,0 @@ -# Frequently Asked Questions - -## General - -**Q: What is Cortex Linux?** -A: An AI-native operating system that understands natural language. No more Stack Overflow, no more dependency hell. - -**Q: Is it ready to use?** -A: MVP is 95% complete (November 2025). Demo-ready, production release coming soon. - -**Q: What platforms does it support?** -A: Ubuntu 24.04 LTS currently. Other Debian-based distros coming soon. - -**Q: Is it free?** -A: Community edition is free and open source (Apache 2.0). Enterprise subscriptions available. - -## Usage - -**Q: How do I install software?** -A: Just tell Cortex what you need: -```bash -cortex install "python for machine learning" -cortex install "web development environment" -``` - -**Q: What if something goes wrong?** -A: Cortex has automatic rollback: -```bash -cortex rollback -``` - -**Q: Can I test before installing?** -A: Yes, simulation mode: -```bash -cortex simulate "install oracle database" -``` - -**Q: Does it work with existing package managers?** -A: Yes, Cortex wraps apt/yum/dnf. Your existing commands still work. - -## Contributing - -**Q: How do I contribute?** -A: Browse issues, claim one, submit PR. See [Contributing](Contributing). - -**Q: Do you pay for contributions?** -A: Yes! Cash bounties on merge. See [Bounty Program](Bounties). - -**Q: How much can I earn?** -A: $25-200 per feature, plus 2x bonus at funding. - -**Q: What skills do you need?** -A: Python, Linux systems, DevOps, AI/ML, or technical writing. - -**Q: Can non-developers contribute?** -A: Yes! Documentation, testing, design, community management. - -## Technical - -**Q: What AI model does it use?** -A: Claude (Anthropic) for natural language understanding. - -**Q: Is it secure?** -A: Yes. Firejail sandboxing + AppArmor policies. AI actions are validated before execution. - -**Q: Does it phone home?** -A: Only for AI API calls. No telemetry. Enterprise can run air-gapped with local LLMs. - -**Q: Can I use my own LLM?** -A: Coming soon. Plugin system will support local models. - -**Q: What's the overhead?** -A: Minimal. AI calls only during installation planning. Execution is native Linux. - -## Business - -**Q: Who's behind this?** -A: Michael J. Morgan (CEO), AI Venture Holdings LLC. Patent holder in AI systems. - -**Q: What's the business model?** -A: Open source community + Enterprise subscriptions (like Red Hat). - -**Q: Are you hiring?** -A: Yes! Top contributors may join the founding team. See [Contributing](Contributing). - -**Q: When is the seed round?** -A: February 2025 ($2-3M target). - -**Q: Can I invest?** -A: Contact mike@cortexlinux.com for investor information. - -## Support - -**Q: Where do I get help?** -A: Discord: https://discord.gg/uCqHvxjU83 - -**Q: How do I report bugs?** -A: GitHub Issues: https://github.com/cortexlinux/cortex/issues - -**Q: Is there documentation?** -A: Yes! This wiki + in-code docs. - -**Q: Can I request features?** -A: Yes! GitHub Discussions or Discord. - -## More Questions? - -Ask in [Discord](https://discord.gg/uCqHvxjU83) or open a [Discussion](https://github.com/cortexlinux/cortex/discussions). diff --git a/docs/guides/Getting-Started.md b/docs/guides/Getting-Started.md deleted file mode 100644 index 89b84ce..0000000 --- a/docs/guides/Getting-Started.md +++ /dev/null @@ -1,44 +0,0 @@ -# Getting Started with Cortex Linux - -## Prerequisites - -- Ubuntu 24.04 LTS (or compatible) -- Python 3.11+ -- Internet connection - -## Quick Install -```bash -# Clone repository -git clone https://github.com/cortexlinux/cortex.git -cd cortex - -# Install dependencies -pip install -r requirements.txt - -# Configure API key -export ANTHROPIC_API_KEY="your-key-here" - -# Run Cortex -python -m cortex install "nodejs" -``` - -## First Commands -```bash -# Install development environment -cortex install "web development environment" - -# Install with GPU optimization -cortex install "tensorflow" --optimize-gpu - -# Simulate before installing -cortex simulate "install oracle database" - -# Check system health -cortex health -``` - -## Next Steps - -- Read the [User Guide](User-Guide) for complete command reference -- Join [Discord](https://discord.gg/uCqHvxjU83) for support -- Check [FAQ](FAQ) for common questions diff --git a/docs/guides/Home.md b/docs/guides/Home.md deleted file mode 100644 index fb6e933..0000000 --- a/docs/guides/Home.md +++ /dev/null @@ -1,43 +0,0 @@ -# Cortex Linux Wiki - -**The AI-Native Operating System - Complete Documentation** - -## Quick Links - -- [Getting Started](Getting-Started) -- [Installation Guide](Installation) -- [User Guide](User-Guide) -- [Developer Guide](Developer-Guide) -- [Contributing](Contributing) -- [Bounty Program](Bounties) -- [FAQ](FAQ) - -## What is Cortex Linux? - -Cortex Linux is an AI-native operating system that understands natural language. No more Stack Overflow, no more dependency hell. - -**Example:** -```bash -cortex install "python for machine learning" -# Installs Python, CUDA, PyTorch, Jupyter - fully configured in 2 minutes -``` - -## MVP Status (November 2025) - -โœ… **95% Complete - Demo Ready** - -**Working Features:** -- Natural language package management -- Hardware-aware optimization (GPU/CPU) -- Dependency resolution -- Installation verification -- Rollback system -- Error recovery -- Progress notifications -- Config file generation - -## Community - -- **Discord:** https://discord.gg/uCqHvxjU83 -- **GitHub:** https://github.com/cortexlinux/cortex -- **Discussions:** https://github.com/cortexlinux/cortex/discussions diff --git a/docs/guides/User-Guide.md b/docs/guides/User-Guide.md deleted file mode 100644 index ceb5f26..0000000 --- a/docs/guides/User-Guide.md +++ /dev/null @@ -1,107 +0,0 @@ -# Cortex Linux User Guide - -## Basic Commands - -### Installation -```bash -# Natural language installation -cortex install "python for data science" - -# Specific packages -cortex install nginx postgresql redis - -# With optimization -cortex install "cuda drivers" --optimize-gpu -``` - -### System Management -```bash -# Check what's installed -cortex list - -# System health check -cortex health - -# View installation history -cortex history - -# Rollback last installation -cortex rollback - -# Rollback to specific point -cortex rollback --to -``` - -### Simulation Mode - -Test installations without making changes: -```bash -cortex simulate "install oracle 23 ai" -# Shows: disk space, dependencies, estimated time -``` - -### Progress & Notifications -```bash -# Installation with progress -cortex install "docker kubernetes" --show-progress - -# Desktop notifications (if available) -cortex install "large-package" --notify -``` - -## Advanced Features - -### Import from Requirements -```bash -# Python projects -cortex import requirements.txt - -# Node projects -cortex import package.json -``` - -### Configuration Templates -```bash -# Generate nginx config -cortex config nginx --template webserver - -# Generate PostgreSQL config -cortex config postgresql --template production -``` - -### System Profiles -```bash -# Install complete stacks -cortex profile "web-development" -cortex profile "data-science" -cortex profile "devops" -``` - -## Troubleshooting - -### Installation Failed -```bash -# View error details -cortex log --last - -# Auto-fix attempt -cortex fix --last-error - -# Manual rollback -cortex rollback -``` - -### Check Dependencies -```bash -# View dependency tree -cortex deps - -# Check conflicts -cortex check conflicts -``` - -## Getting Help - -- **Discord:** https://discord.gg/uCqHvxjU83 -- **FAQ:** [FAQ](FAQ) -- **Issues:** https://github.com/cortexlinux/cortex/issues diff --git a/docs/modules/README_CONTEXT_MEMORY.md b/docs/modules/README_CONTEXT_MEMORY.md deleted file mode 100644 index 1c8163b..0000000 --- a/docs/modules/README_CONTEXT_MEMORY.md +++ /dev/null @@ -1,521 +0,0 @@ -# AI Context Memory System - -## Overview - -The **AI Context Memory System** is a sophisticated learning and pattern recognition engine for Cortex Linux. It provides persistent memory that enables the AI to learn from user interactions, remember preferences, detect patterns, and generate intelligent suggestions. - -## Features - -### ๐Ÿง  Core Capabilities - -- **Persistent Memory Storage**: Records all user interactions with full context -- **Pattern Recognition**: Automatically detects recurring behaviors and workflows -- **Intelligent Suggestions**: Generates optimization recommendations based on history -- **Preference Management**: Stores and retrieves user preferences -- **Privacy-Preserving**: Anonymized pattern matching protects sensitive data -- **Export/Import**: Full data portability with JSON export - -### ๐Ÿ“Š Memory Categories - -The system tracks interactions across multiple categories: - -- **Package**: Package installations and management -- **Command**: Shell command executions -- **Pattern**: Detected behavioral patterns -- **Preference**: User settings and preferences -- **Error**: Error occurrences and resolutions - -## Installation - -```bash -# Copy the module to your Cortex Linux installation -cp context_memory.py /opt/cortex/lib/ - -# Or install as a Python package -pip install -e . -``` - -## Usage - -### Basic Usage - -```python -from context_memory import ContextMemory, MemoryEntry - -# Initialize the memory system -memory = ContextMemory() - -# Record an interaction -entry = MemoryEntry( - category="package", - context="User wants to install Docker for containerization", - action="apt install docker-ce docker-compose", - result="Successfully installed Docker 24.0.5", - success=True, - metadata={"packages": ["docker-ce", "docker-compose"], "version": "24.0.5"} -) - -entry_id = memory.record_interaction(entry) -print(f"Recorded interaction #{entry_id}") -``` - -### Pattern Detection - -```python -# Get detected patterns (minimum 70% confidence) -patterns = memory.get_patterns(min_confidence=0.7) - -for pattern in patterns: - print(f"Pattern: {pattern.description}") - print(f" Frequency: {pattern.frequency}") - print(f" Confidence: {pattern.confidence:.0%}") - print(f" Actions: {', '.join(pattern.actions)}") -``` - -### Intelligent Suggestions - -```python -# Generate suggestions based on memory and patterns -suggestions = memory.generate_suggestions() - -for suggestion in suggestions: - print(f"[{suggestion.suggestion_type}] {suggestion.title}") - print(f" {suggestion.description}") - print(f" Confidence: {suggestion.confidence:.0%}") -``` - -### Preference Management - -```python -# Store preferences -memory.set_preference("default_editor", "vim") -memory.set_preference("auto_update", True) -memory.set_preference("theme", {"name": "dark", "accent": "#007acc"}) - -# Retrieve preferences -editor = memory.get_preference("default_editor") -update = memory.get_preference("auto_update") -theme = memory.get_preference("theme") - -# Get preference with default -shell = memory.get_preference("default_shell", default="/bin/bash") -``` - -### Finding Similar Interactions - -```python -# Search for similar past interactions -similar = memory.get_similar_interactions( - context="Docker installation problems", - limit=5 -) - -for entry in similar: - print(f"{entry.timestamp}: {entry.action}") - print(f" Result: {entry.result}") - print(f" Success: {entry.success}") -``` - -### Statistics and Analytics - -```python -# Get memory system statistics -stats = memory.get_statistics() - -print(f"Total Entries: {stats['total_entries']}") -print(f"Success Rate: {stats['success_rate']:.1f}%") -print(f"Total Patterns: {stats['total_patterns']}") -print(f"Active Suggestions: {stats['active_suggestions']}") -print(f"Recent Activity (7 days): {stats['recent_activity']}") - -# Category breakdown -print("\nBy Category:") -for category, count in stats['by_category'].items(): - print(f" {category}: {count}") -``` - -### Export Memory Data - -```python -# Export all memory data to JSON -memory.export_memory( - output_path="/backup/cortex_memory_export.json", - include_dismissed=False # Exclude dismissed suggestions -) -``` - -## Data Model - -### MemoryEntry - -Represents a single user interaction: - -```python -@dataclass -class MemoryEntry: - id: Optional[int] = None - timestamp: str = "" # ISO format datetime - category: str = "" # package, command, pattern, etc. - context: str = "" # What the user was trying to do - action: str = "" # What action was taken - result: str = "" # Outcome of the action - success: bool = True # Whether it succeeded - confidence: float = 1.0 # Confidence in the result (0-1) - frequency: int = 1 # How many times this occurred - metadata: Dict[str, Any] = None # Additional structured data -``` - -### Pattern - -Represents a detected behavioral pattern: - -```python -@dataclass -class Pattern: - pattern_id: str # Unique identifier - pattern_type: str # installation, configuration, workflow - description: str # Human-readable description - frequency: int # How many times seen - last_seen: str # Last occurrence timestamp - confidence: float # Pattern confidence (0-1) - actions: List[str] # Actions in the pattern - context: Dict[str, Any] # Additional context -``` - -### Suggestion - -Represents an AI-generated suggestion: - -```python -@dataclass -class Suggestion: - suggestion_id: str # Unique identifier - suggestion_type: str # optimization, alternative, warning - title: str # Short title - description: str # Detailed description - confidence: float # Confidence in suggestion (0-1) - based_on: List[str] # Memory entry IDs it's based on - created_at: str # Creation timestamp -``` - -## Database Schema - -The system uses SQLite with the following tables: - -### memory_entries -Stores all user interactions with full context. - -| Column | Type | Description | -|--------|------|-------------| -| id | INTEGER PRIMARY KEY | Unique entry ID | -| timestamp | TEXT | When the interaction occurred | -| category | TEXT | Category (package, command, etc.) | -| context | TEXT | What the user was trying to do | -| action | TEXT | What action was taken | -| result | TEXT | Outcome of the action | -| success | BOOLEAN | Whether it succeeded | -| confidence | REAL | Confidence in the result | -| frequency | INTEGER | Occurrence count | -| metadata | TEXT (JSON) | Additional structured data | - -### patterns -Stores detected behavioral patterns. - -| Column | Type | Description | -|--------|------|-------------| -| pattern_id | TEXT PRIMARY KEY | Unique pattern identifier | -| pattern_type | TEXT | Type of pattern | -| description | TEXT | Human-readable description | -| frequency | INTEGER | How many times seen | -| last_seen | TEXT | Last occurrence | -| confidence | REAL | Pattern confidence | -| actions | TEXT (JSON) | Actions in pattern | -| context | TEXT (JSON) | Pattern context | - -### suggestions -Stores AI-generated suggestions. - -| Column | Type | Description | -|--------|------|-------------| -| suggestion_id | TEXT PRIMARY KEY | Unique suggestion ID | -| suggestion_type | TEXT | Type of suggestion | -| title | TEXT | Short title | -| description | TEXT | Detailed description | -| confidence | REAL | Confidence score | -| based_on | TEXT (JSON) | Source memory entry IDs | -| created_at | TEXT | Creation timestamp | -| dismissed | BOOLEAN | Whether user dismissed it | - -### preferences -Stores user preferences. - -| Column | Type | Description | -|--------|------|-------------| -| key | TEXT PRIMARY KEY | Preference key | -| value | TEXT (JSON) | Preference value | -| category | TEXT | Preference category | -| updated_at | TEXT | Last update timestamp | - -## Suggestion Types - -### Optimization Suggestions -Generated when the system detects repeated actions that could be automated or optimized. - -**Example:** -``` -Title: Frequent Installation: docker-ce -Description: You've installed docker-ce 5 times recently. - Consider adding it to your default setup script. -Confidence: 100% -``` - -### Alternative Suggestions -Generated when an action fails and the system knows successful alternatives. - -**Example:** -``` -Title: Alternative to: pip install broken-package -Description: Based on your history, try: pip install working-package -Confidence: 70% -``` - -### Proactive Suggestions -Generated when high-confidence patterns indicate automation opportunities. - -**Example:** -``` -Title: Automate: Recurring pattern: configure nginx ssl -Description: You frequently do this (8 times). Would you like to automate it? -Confidence: 80% -``` - -## Configuration - -### Database Location - -Default: `~/.cortex/context_memory.db` - -Change by passing a custom path: - -```python -memory = ContextMemory(db_path="/custom/path/memory.db") -``` - -### Pattern Detection Thresholds - -Patterns are detected when: -- **Minimum frequency**: 3 occurrences within 30 days -- **Confidence calculation**: `min(1.0, frequency / 10.0)` -- **Retrieval threshold**: Default 0.5 (50% confidence) - -### Suggestion Generation - -Suggestions are generated based on: -- **Optimization**: 3+ identical actions within 7 days -- **Alternatives**: Failed actions with successful similar actions -- **Proactive**: Patterns with 80%+ confidence and 5+ frequency - -## Privacy & Security - -### Data Anonymization -- Pattern matching uses keywords, not full text -- No personally identifiable information (PII) stored by default -- Metadata is user-controlled - -### Local Storage -- All data stored locally in SQLite -- No external transmission -- User has full control over data - -### Data Export -- Complete data portability via JSON export -- User can audit all stored information -- Easy deletion of specific entries or categories - -## Performance Considerations - -### Database Size -- Typical usage: ~1-10 MB per year -- Automatic indexing on frequently queried columns -- Periodic cleanup recommended for large datasets - -### Query Optimization -- Indexes on: category, timestamp, pattern_type, suggestion_type -- Limit queries use pagination -- Recent activity queries optimized with date filters - -### Memory Footprint -- Minimal RAM usage (~5-10 MB) -- SQLite connection pooling -- Lazy loading of large result sets - -## Integration with Cortex Linux - -### LLM Integration -```python -from cortex.llm import CortexLLM -from context_memory import ContextMemory - -llm = CortexLLM() -memory = ContextMemory() - -# Get context for AI decision-making -context = memory.get_similar_interactions("install cuda", limit=5) -patterns = memory.get_patterns(pattern_type="package") - -# Use in prompt -prompt = f""" -Previous similar installations: {context} -Detected patterns: {patterns} - -User wants to: install cuda drivers -What should I recommend? -""" - -response = llm.generate(prompt) -``` - -### Package Manager Wrapper -```python -from cortex.package_manager import PackageManager -from context_memory import ContextMemory, MemoryEntry - -pm = PackageManager() -memory = ContextMemory() - -def install_package(package_name): - # Record the attempt - entry = MemoryEntry( - category="package", - context=f"User requested: {package_name}", - action=f"apt install {package_name}", - success=False # Will update later - ) - - # Attempt installation - result = pm.install(package_name) - - # Update memory - entry.success = result.success - entry.result = result.message - entry.metadata = result.metadata - - memory.record_interaction(entry) - - # Check for suggestions - if not result.success: - suggestions = memory.generate_suggestions(context=package_name) - for suggestion in suggestions: - if suggestion.suggestion_type == "alternative": - print(f"๐Ÿ’ก Suggestion: {suggestion.description}") - - return result -``` - -## Testing - -Run the comprehensive test suite: - -```bash -# Run all tests -python test_context_memory.py - -# Run with verbose output -python test_context_memory.py -v - -# Run specific test class -python -m unittest test_context_memory.TestContextMemory - -# Run specific test -python -m unittest test_context_memory.TestContextMemory.test_record_interaction -``` - -### Test Coverage - -The test suite includes: - -- โœ… Database initialization and schema -- โœ… Memory entry recording and retrieval -- โœ… Pattern detection and confidence calculation -- โœ… Suggestion generation (all types) -- โœ… Preference management -- โœ… Statistics calculation -- โœ… Data export functionality -- โœ… Integration workflows - -**Expected coverage**: >85% - -## Troubleshooting - -### Database Locked Error - -**Problem**: `sqlite3.OperationalError: database is locked` - -**Solution**: Ensure no other processes are accessing the database. Use a context manager: - -```python -# Instead of multiple connections -conn1 = sqlite3.connect(db_path) -conn2 = sqlite3.connect(db_path) # May cause locking - -# Use single connection or context manager -with sqlite3.connect(db_path) as conn: - cursor = conn.cursor() - # Do work -``` - -### Pattern Not Detected - -**Problem**: Patterns not appearing despite repeated actions - -**Solution**: Check minimum thresholds: -- At least 3 occurrences within 30 days -- Use lower confidence threshold: `get_patterns(min_confidence=0.3)` - -### Slow Query Performance - -**Problem**: Queries taking too long - -**Solution**: -1. Check database size: `ls -lh ~/.cortex/context_memory.db` -2. Rebuild indexes: `REINDEX` -3. Use date filters for large datasets -4. Consider archiving old entries - -## Future Enhancements - -- [ ] Machine learning-based pattern recognition -- [ ] Cross-user anonymized pattern sharing -- [ ] Natural language query interface -- [ ] Automatic workflow script generation -- [ ] Integration with system monitoring -- [ ] Predictive failure detection -- [ ] Smart caching of frequent queries -- [ ] Multi-user support with privacy isolation - -## Contributing - -Contributions welcome! Areas for improvement: - -1. **Pattern Recognition**: Better algorithms for pattern detection -2. **Suggestion Quality**: More sophisticated suggestion generation -3. **Performance**: Query optimization for large datasets -4. **Privacy**: Enhanced anonymization techniques -5. **Integration**: Hooks for other Cortex modules - -## License - -Part of Cortex Linux - AI-Native Operating System - -## Support - -- **Issues**: https://github.com/cortexlinux/cortex/issues -- **Discussions**: https://github.com/cortexlinux/cortex/discussions -- **Discord**: https://discord.gg/uCqHvxjU83 -- **Email**: mike@cortexlinux.com - ---- - -**Issue #24** - AI Context Memory System -**Bounty**: $200 upon merge -**Skills**: Python, SQLite, Machine Learning, Pattern Recognition diff --git a/docs/modules/README_DEPENDENCIES.md b/docs/modules/README_DEPENDENCIES.md deleted file mode 100644 index 30e5580..0000000 --- a/docs/modules/README_DEPENDENCIES.md +++ /dev/null @@ -1,249 +0,0 @@ -# Dependency Resolution System - -AI-powered dependency detection and resolution for Cortex Linux. - -## Features - -- โœ… Automatic dependency detection via apt-cache -- โœ… Predefined patterns for 8+ common packages -- โœ… Transitive dependency resolution -- โœ… Conflict detection -- โœ… Optimal installation order calculation -- โœ… Installation plan generation -- โœ… Dependency tree visualization -- โœ… JSON export for automation - -## Usage - -### Show Dependency Tree - -```bash -python3 dependency_resolver.py docker --tree -``` - -Output: -``` -๐Ÿ“ฆ Dependency tree for docker: -============================================================ -โŒ docker - โŒ containerd - Required dependency - โŒ docker-ce-cli - Required dependency - โŒ docker-buildx-plugin - Required dependency - โœ… iptables (1.8.7-1) - System dependency - โœ… ca-certificates (20230311) - System dependency -``` - -### Generate Installation Plan - -```bash -python3 dependency_resolver.py postgresql --plan -``` - -Output: -``` -๐Ÿ“‹ Installation plan for postgresql: -============================================================ - -Package: postgresql -Total dependencies: 5 -โœ… Already satisfied: 2 -โŒ Need to install: 3 - -๐Ÿ“ Installation order: - 1. โŒ postgresql-common - 2. โŒ postgresql-client - 3. โŒ postgresql - -โฑ๏ธ Estimated time: 1.5 minutes - -๐Ÿ’ป Commands to run: - sudo apt-get update - sudo apt-get install -y postgresql-common - sudo apt-get install -y postgresql-client - sudo apt-get install -y postgresql -``` - -### Show Missing Dependencies Only - -```bash -python3 dependency_resolver.py nginx --missing -``` - -### Export to JSON - -```bash -python3 dependency_resolver.py redis-server --export redis-deps.json -``` - -## Programmatic Usage - -```python -from dependency_resolver import DependencyResolver - -resolver = DependencyResolver() - -# Get dependency graph -graph = resolver.resolve_dependencies('docker') - -print(f"Total dependencies: {len(graph.all_dependencies)}") -print(f"Installation order: {graph.installation_order}") - -# Check for conflicts -if graph.conflicts: - print("โš ๏ธ Conflicts detected:") - for pkg1, pkg2 in graph.conflicts: - print(f" {pkg1} <-> {pkg2}") - -# Get missing dependencies -missing = resolver.get_missing_dependencies('docker') -for dep in missing: - print(f"Need to install: {dep.name} ({dep.reason})") - -# Generate installation plan -plan = resolver.generate_install_plan('nginx') -print(f"Estimated install time: {plan['estimated_time_minutes']} minutes") - -# Execute installation commands -for cmd in plan['install_commands']: - print(f"Run: {cmd}") -``` - -## Supported Packages - -Predefined dependency patterns for: -- docker -- postgresql -- mysql-server -- nginx -- apache2 -- nodejs -- redis-server -- python3-pip - -For other packages, uses apt-cache dependency data. - -## Architecture - -### Dependency Class -Represents a single package dependency: -- `name`: Package name -- `version`: Required version (optional) -- `reason`: Why this dependency exists -- `is_satisfied`: Whether already installed -- `installed_version`: Current version if installed - -### DependencyGraph Class -Complete dependency information: -- `package_name`: Target package -- `direct_dependencies`: Immediate dependencies -- `all_dependencies`: Including transitive deps -- `conflicts`: Conflicting packages -- `installation_order`: Optimal install sequence - -### DependencyResolver Class -Main resolver with: -- **Dependency Detection**: Via apt-cache and predefined patterns -- **Conflict Detection**: Identifies incompatible packages -- **Installation Planning**: Generates optimal install sequence -- **Caching**: Speeds up repeated queries - -## Conflict Detection - -Detects known conflicts: -- mysql-server โ†” mariadb-server -- apache2 โ†” nginx (port conflicts) - -Example: -```python -resolver = DependencyResolver() -graph = resolver.resolve_dependencies('mysql-server') - -if graph.conflicts: - print("Cannot install - conflicts detected!") -``` - -## Installation Order - -Uses intelligent ordering: -1. System libraries (libc, libssl, etc.) -2. Base dependencies (ca-certificates, curl, etc.) -3. Package-specific dependencies -4. Target package - -This minimizes installation failures. - -## Integration with Cortex - -```python -# In cortex install command -from dependency_resolver import DependencyResolver - -resolver = DependencyResolver() - -# Get installation plan -plan = resolver.generate_install_plan(package_name) - -# Check for conflicts -if plan['conflicts']: - raise InstallationError(f"Conflicts: {plan['conflicts']}") - -# Execute in order -for package in plan['installation_order']: - if not resolver.is_package_installed(package): - install_package(package) -``` - -## Testing - -```bash -python3 test_dependency_resolver.py -``` - -## Performance - -- **Cache**: Dependency graphs are cached per session -- **Speed**: ~0.5s per package for apt-cache queries -- **Memory**: <50MB for typical dependency graphs - -## Future Enhancements - -- [ ] Support for pip/npm dependencies -- [ ] AI-powered dependency suggestions -- [ ] Version constraint resolution -- [ ] Automatic conflict resolution -- [ ] PPA repository detection -- [ ] Circular dependency detection -- [ ] Parallel installation planning - -## Example: Complete Workflow - -```python -from dependency_resolver import DependencyResolver -from installation_verifier import InstallationVerifier - -# Step 1: Resolve dependencies -resolver = DependencyResolver() -plan = resolver.generate_install_plan('docker') - -# Step 2: Check conflicts -if plan['conflicts']: - print("โš ๏ธ Resolve conflicts first") - exit(1) - -# Step 3: Install in order -for package in plan['installation_order']: - if not resolver.is_package_installed(package): - print(f"Installing {package}...") - # execute: apt-get install package - -# Step 4: Verify installation -verifier = InstallationVerifier() -result = verifier.verify_package('docker') - -if result.status == VerificationStatus.SUCCESS: - print("โœ… Installation complete and verified!") -``` - -## License - -MIT License - Part of Cortex Linux diff --git a/docs/modules/README_ERROR_PARSER.md b/docs/modules/README_ERROR_PARSER.md deleted file mode 100644 index b34d40c..0000000 --- a/docs/modules/README_ERROR_PARSER.md +++ /dev/null @@ -1,308 +0,0 @@ -# Error Message Parser - -Intelligent error message parsing and fix suggestions for Cortex Linux. - -## Features - -- โœ… Recognizes 13+ error categories -- โœ… Pattern matching with confidence scores -- โœ… Automatic fix suggestions -- โœ… Severity assessment -- โœ… Data extraction from error messages -- โœ… Automatic fix commands when available -- โœ… CLI and programmatic interfaces -- โœ… JSON export - -## Usage - -### Basic Parsing - -```bash -# Parse error message directly -python3 error_parser.py "E: Unable to locate package test-package" - -# Parse from file -python3 error_parser.py --file error.log - -# Pipe error output -apt-get install nonexistent 2>&1 | python3 error_parser.py -``` - -### Example Output - -``` -============================================================ -ERROR ANALYSIS -============================================================ - -๐Ÿ“‹ Category: package_not_found -โš ๏ธ Severity: MEDIUM -๐Ÿ”ง Fixable: Yes - -โœ… Matched 1 error pattern(s) - 1. package_not_found (confidence: 95%) - -๐Ÿ’ก Suggested Fixes: - 1. Update package lists: sudo apt-get update - 2. Check package name spelling - 3. Package may need a PPA: search for "test-package ubuntu ppa" - 4. Try searching: apt-cache search test-package - -๐Ÿค– Automatic Fix Available: - sudo apt-get update - -============================================================ -``` - -### Get Only Auto-Fix Command - -```bash -python3 error_parser.py "E: No space left on device" --auto-fix -# Output: sudo apt-get clean && sudo apt-get autoremove -y -``` - -### Export to JSON - -```bash -python3 error_parser.py "Error message" --export analysis.json -``` - -## Programmatic Usage - -```python -from error_parser import ErrorParser, ErrorCategory - -parser = ErrorParser() - -# Parse error -error_msg = "E: Unable to locate package test-pkg" -analysis = parser.parse_error(error_msg) - -# Check category -if analysis.primary_category == ErrorCategory.PACKAGE_NOT_FOUND: - print("Package not found!") - -# Get fixes -for fix in analysis.suggested_fixes: - print(f"Try: {fix}") - -# Apply automatic fix if available -if analysis.automatic_fix_available: - import subprocess - subprocess.run(analysis.automatic_fix_command, shell=True) -``` - -## Supported Error Categories - -1. **DEPENDENCY_MISSING** - Missing package dependencies -2. **PACKAGE_NOT_FOUND** - Package doesn't exist in repositories -3. **PERMISSION_DENIED** - Insufficient permissions -4. **DISK_SPACE** - Not enough disk space -5. **NETWORK_ERROR** - Network/connectivity issues -6. **CONFLICT** - Package conflicts -7. **BROKEN_PACKAGE** - Broken/held packages -8. **GPG_KEY_ERROR** - Missing repository keys -9. **REPOSITORY_ERROR** - Repository configuration issues -10. **LOCK_ERROR** - Package manager lock files -11. **VERSION_CONFLICT** - Version incompatibilities -12. **CONFIGURATION_ERROR** - Package configuration issues -13. **UNKNOWN** - Unrecognized errors - -## Error Categories Detail - -### DEPENDENCY_MISSING -**Example:** `E: nginx: Depends: libssl1.1 but it is not installable` - -**Severity:** High -**Fixable:** Yes -**Auto-fix:** `sudo apt-get install -y {dependency}` - -### PACKAGE_NOT_FOUND -**Example:** `E: Unable to locate package nonexistent` - -**Severity:** Medium -**Fixable:** Yes -**Auto-fix:** `sudo apt-get update` - -### DISK_SPACE -**Example:** `E: No space left on device` - -**Severity:** Critical -**Fixable:** Yes (with user confirmation) -**Auto-fix:** `sudo apt-get clean && sudo apt-get autoremove -y` - -### BROKEN_PACKAGE -**Example:** `E: You have held broken packages` - -**Severity:** Critical -**Fixable:** Yes -**Auto-fix:** `sudo apt-get install -f -y` - -### LOCK_ERROR -**Example:** `E: Could not get lock /var/lib/dpkg/lock` - -**Severity:** High -**Fixable:** Yes -**Auto-fix:** Kill processes and remove locks - -### GPG_KEY_ERROR -**Example:** `GPG error: NO_PUBKEY 0EBFCD88` - -**Severity:** Medium -**Fixable:** Yes -**Auto-fix:** `sudo apt-key adv --keyserver keyserver.ubuntu.com --recv-keys {key_id}` - -## Integration with Cortex - -### Automatic Error Recovery - -```python -from error_parser import ErrorParser -import subprocess - -def install_with_auto_fix(package_name, max_retries=3): - """Install package with automatic error recovery""" - parser = ErrorParser() - - for attempt in range(max_retries): - # Try installation - result = subprocess.run( - ['apt-get', 'install', '-y', package_name], - capture_output=True, - text=True - ) - - if result.returncode == 0: - return True - - # Parse error - analysis = parser.parse_error(result.stderr) - - print(f"โŒ Installation failed: {analysis.primary_category.value}") - - # Try automatic fix - if analysis.automatic_fix_available: - print(f"๐Ÿ”ง Applying fix: {analysis.automatic_fix_command}") - - fix_result = subprocess.run( - analysis.automatic_fix_command, - shell=True, - capture_output=True - ) - - if fix_result.returncode == 0: - print("โœ… Fix applied successfully, retrying...") - continue - else: - print("โŒ No automatic fix available") - print("๐Ÿ’ก Manual fixes:") - for fix in analysis.suggested_fixes[:3]: - print(f" - {fix}") - break - - return False -``` - -### User-Friendly Error Messages - -```python -def friendly_error_message(error_text): - """Convert technical error to user-friendly message""" - parser = ErrorParser() - analysis = parser.parse_error(error_text) - - category_messages = { - ErrorCategory.PACKAGE_NOT_FOUND: "Package not found. Try updating or check spelling.", - ErrorCategory.DEPENDENCY_MISSING: "Missing dependencies. I'll install them first.", - ErrorCategory.PERMISSION_DENIED: "Need admin access. Run with sudo.", - ErrorCategory.DISK_SPACE: "Not enough disk space. Clean up files first.", - ErrorCategory.NETWORK_ERROR: "Connection issues. Check your internet.", - ErrorCategory.CONFLICT: "Package conflicts detected. Cannot install both.", - } - - message = category_messages.get( - analysis.primary_category, - "Installation error occurred." - ) - - return f"{message} ({analysis.severity} severity)" -``` - -## Pattern Matching - -The parser uses regex patterns with confidence scores: - -```python -{ - 'pattern': r'Unable to locate package (.+?)(?:\s|$)', - 'category': ErrorCategory.PACKAGE_NOT_FOUND, - 'confidence': 0.95, - 'fixes': ['Update package lists', '...'], - 'auto_fix': 'sudo apt-get update' -} -``` - -**Confidence Levels:** -- 0.95: Very confident match -- 0.90: High confidence -- 0.85: Good match -- 0.70: Possible match - -## Testing - -```bash -python3 test_error_parser.py -``` - -## Performance - -- **Speed:** <0.1s per error message -- **Memory:** <10MB -- **Accuracy:** 95%+ on common errors - -## Adding New Error Patterns - -```python -# In error_parser.py, add to ERROR_PATTERNS: -{ - 'pattern': r'your regex pattern here', - 'category': ErrorCategory.YOUR_CATEGORY, - 'confidence': 0.9, - 'fixes': [ - 'Fix suggestion 1', - 'Fix suggestion 2' - ], - 'auto_fix': 'command to auto-fix' # or None -} -``` - -## CLI Examples - -```bash -# Parse apt-get error -sudo apt-get install fake-package 2>&1 | python3 error_parser.py - -# Get auto-fix for common error -python3 error_parser.py "E: No space left on device" --auto-fix - -# Analyze error log file -python3 error_parser.py --file /var/log/apt/term.log --export analysis.json - -# Chain with fix execution -FIX=$(python3 error_parser.py "error message" --auto-fix) -eval $FIX -``` - -## Future Enhancements - -- [ ] Machine learning for pattern recognition -- [ ] Multi-language error support -- [ ] Error history tracking -- [ ] Success rate tracking for fixes -- [ ] Integration with Stack Overflow -- [ ] Context-aware suggestions -- [ ] Fix verification - -## License - -MIT License - Part of Cortex Linux diff --git a/docs/modules/README_LLM_ROUTER.md b/docs/modules/README_LLM_ROUTER.md deleted file mode 100644 index 63bb947..0000000 --- a/docs/modules/README_LLM_ROUTER.md +++ /dev/null @@ -1,548 +0,0 @@ -# LLM Router for Cortex Linux - -## Overview - -The LLM Router intelligently routes requests to the most appropriate AI model based on task type, providing optimal performance and cost efficiency for Cortex Linux operations. - -## Why Multi-LLM Architecture? - -**Different tasks require different strengths:** -- **Claude Sonnet 4:** Best for natural language understanding, user interaction, requirement parsing -- **Kimi K2:** Superior for system operations (65.8% SWE-bench), debugging, tool use, agentic tasks - -**Business Benefits:** -- ๐ŸŽฏ **Performance:** Use best-in-class model for each task type -- ๐Ÿ’ฐ **Cost Savings:** Kimi K2 estimated 40-50% cheaper than Claude for system operations -- ๐Ÿ”’ **Flexibility:** Open weights (Kimi K2) enables self-hosting for enterprise -- ๐Ÿš€ **Competitive Edge:** "LLM-agnostic OS" differentiates from single-model competitors - -## Architecture - -``` -User Request - โ†“ -[LLM Router] - โ”œโ”€โ†’ Claude API (chat, requirements) - โ””โ”€โ†’ Kimi K2 API (system ops, debugging) - โ†“ -Response + Metadata (cost, tokens, latency) -``` - -### Routing Logic - -| Task Type | Routed To | Reasoning | -|-----------|-----------|-----------| -| User Chat | Claude | Better natural language | -| Requirement Parsing | Claude | Understanding user intent | -| System Operations | Kimi K2 | 65.8% SWE-bench (vs Claude's 50.2%) | -| Error Debugging | Kimi K2 | Superior technical problem-solving | -| Code Generation | Kimi K2 | 53.7% LiveCodeBench (vs 48.5%) | -| Dependency Resolution | Kimi K2 | Better at complex logic | -| Configuration | Kimi K2 | System-level expertise | -| Tool Execution | Kimi K2 | 65.8% on Tau2 Telecom (vs 45.2%) | - -## Installation - -### Prerequisites - -```bash -pip install anthropic openai -``` - -### API Keys - -Set environment variables: - -```bash -export ANTHROPIC_API_KEY="your-claude-key" -export MOONSHOT_API_KEY="your-kimi-key" -``` - -Or pass directly to `LLMRouter()`: - -```python -from llm_router import LLMRouter - -router = LLMRouter( - claude_api_key="your-claude-key", - kimi_api_key="your-kimi-key" -) -``` - -## Usage - -### Basic Example - -```python -from llm_router import LLMRouter, TaskType - -router = LLMRouter() - -# User chat (automatically routed to Claude) -response = router.complete( - messages=[ - {"role": "system", "content": "You are a helpful assistant."}, - {"role": "user", "content": "Hello! What can you help me with?"} - ], - task_type=TaskType.USER_CHAT -) - -print(f"Provider: {response.provider.value}") -print(f"Response: {response.content}") -print(f"Cost: ${response.cost_usd:.6f}") -``` - -### System Operation Example - -```python -# System operations automatically routed to Kimi K2 -response = router.complete( - messages=[ - {"role": "system", "content": "You are a Linux system administrator."}, - {"role": "user", "content": "Install CUDA drivers for NVIDIA RTX 4090"} - ], - task_type=TaskType.SYSTEM_OPERATION -) - -print(f"Provider: {response.provider.value}") # kimi_k2 -print(f"Instructions: {response.content}") -``` - -### Convenience Function - -For simple one-off requests: - -```python -from llm_router import complete_task, TaskType - -response = complete_task( - prompt="Diagnose why apt install failed with dependency errors", - task_type=TaskType.ERROR_DEBUGGING, - system_prompt="You are a Linux troubleshooting expert" -) - -print(response) -``` - -## Advanced Features - -### Force Specific Provider - -Override routing logic when needed: - -```python -from llm_router import LLMProvider - -# Force Claude even for system operations -response = router.complete( - messages=[{"role": "user", "content": "Install PostgreSQL"}], - task_type=TaskType.SYSTEM_OPERATION, - force_provider=LLMProvider.CLAUDE -) -``` - -### Fallback Behavior - -Router automatically falls back to alternate provider if primary fails: - -```python -router = LLMRouter( - claude_api_key="valid-key", - kimi_api_key="invalid-key", # Will fail - enable_fallback=True # Automatically try Claude -) - -# System op would normally use Kimi, but will fallback to Claude -response = router.complete( - messages=[{"role": "user", "content": "Install CUDA"}], - task_type=TaskType.SYSTEM_OPERATION -) -# Returns Claude response instead of failing -``` - -### Cost Tracking - -Track usage and costs across providers: - -```python -router = LLMRouter(track_costs=True) - -# Make several requests... -response1 = router.complete(...) -response2 = router.complete(...) - -# Get statistics -stats = router.get_stats() -print(f"Total requests: {stats['total_requests']}") -print(f"Total cost: ${stats['total_cost_usd']}") -print(f"Claude requests: {stats['providers']['claude']['requests']}") -print(f"Kimi K2 requests: {stats['providers']['kimi_k2']['requests']}") - -# Reset for new session -router.reset_stats() -``` - -### Tool Calling - -Both providers support tool calling: - -```python -tools = [{ - "type": "function", - "function": { - "name": "execute_bash", - "description": "Execute bash command in sandbox", - "parameters": { - "type": "object", - "required": ["command"], - "properties": { - "command": { - "type": "string", - "description": "Bash command to execute" - } - } - } - } -}] - -response = router.complete( - messages=[{"role": "user", "content": "Install git"}], - task_type=TaskType.SYSTEM_OPERATION, - tools=tools -) - -# Model will autonomously decide when to call tools -``` - -## Integration with Cortex Linux - -### Package Manager Wrapper - -```python -from llm_router import LLMRouter, TaskType - -class PackageManagerWrapper: - def __init__(self): - self.router = LLMRouter() - - def install(self, package_description: str): - """Install package based on natural language description.""" - response = self.router.complete( - messages=[ - {"role": "system", "content": "You are a package manager expert."}, - {"role": "user", "content": f"Install: {package_description}"} - ], - task_type=TaskType.SYSTEM_OPERATION - ) - - # Kimi K2 will handle this with superior agentic capabilities - return response.content -``` - -### Error Diagnosis - -```python -def diagnose_error(error_message: str, command: str): - """Diagnose installation errors and suggest fixes.""" - router = LLMRouter() - - response = router.complete( - messages=[ - {"role": "system", "content": "You are a Linux troubleshooting expert."}, - {"role": "user", "content": f"Command: {command}\nError: {error_message}\nWhat went wrong and how to fix?"} - ], - task_type=TaskType.ERROR_DEBUGGING - ) - - # Kimi K2's superior debugging capabilities - return response.content -``` - -### User Interface Chat - -```python -def chat_with_user(user_message: str): - """Handle user-facing chat interactions.""" - router = LLMRouter() - - response = router.complete( - messages=[ - {"role": "system", "content": "You are Cortex, a friendly AI assistant."}, - {"role": "user", "content": user_message} - ], - task_type=TaskType.USER_CHAT - ) - - # Claude's superior natural language understanding - return response.content -``` - -## Configuration - -### Default Settings - -```python -router = LLMRouter( - claude_api_key=None, # Reads from ANTHROPIC_API_KEY - kimi_api_key=None, # Reads from MOONSHOT_API_KEY - default_provider=LLMProvider.CLAUDE, # Fallback if routing fails - enable_fallback=True, # Try alternate if primary fails - track_costs=True # Track usage statistics -) -``` - -### Custom Routing Rules - -Override default routing logic: - -```python -from llm_router import LLMRouter, TaskType, LLMProvider - -router = LLMRouter() - -# Override routing rules -router.ROUTING_RULES[TaskType.CODE_GENERATION] = LLMProvider.CLAUDE - -# Now code generation uses Claude instead of Kimi K2 -``` - -## Performance Benchmarks - -### Task-Specific Performance - -| Benchmark | Kimi K2 | Claude Sonnet 4 | Advantage | -|-----------|---------|-----------------|-----------| -| SWE-bench Verified (Agentic) | 65.8% | 50.2% | +31% Kimi K2 | -| LiveCodeBench | 53.7% | 48.5% | +11% Kimi K2 | -| Tau2 Telecom (Tool Use) | 65.8% | 45.2% | +45% Kimi K2 | -| TerminalBench | 25.0% | - | Kimi K2 only | -| MMLU (General Knowledge) | 89.5% | 91.5% | +2% Claude | -| SimpleQA | 31.0% | 15.9% | +95% Kimi K2 | - -**Key Insight:** Kimi K2 excels at system operations, debugging, and agentic tasks. Claude better for general chat. - -### Cost Comparison (Estimated) - -Assuming 1,000 system operations per day: - -| Scenario | Cost/Month | Savings | -|----------|------------|---------| -| Claude Only | $3,000 | Baseline | -| Hybrid (70% Kimi K2) | $1,500 | 50% | -| Kimi K2 Only | $1,200 | 60% | - -**Real savings depend on actual task distribution and usage patterns.** - -## Testing - -### Run All Tests - -```bash -cd /path/to/issue-34 -python3 test_llm_router.py -``` - -### Test Coverage - -- โœ… Routing logic for all task types -- โœ… Fallback behavior when provider unavailable -- โœ… Cost calculation and tracking -- โœ… Claude API integration -- โœ… Kimi K2 API integration -- โœ… Tool calling support -- โœ… Error handling -- โœ… End-to-end scenarios - -### Example Test Output - -``` -test_claude_completion ... ok -test_cost_calculation_claude ... ok -test_fallback_on_error ... ok -test_kimi_completion ... ok -test_routing_user_chat_to_claude ... ok -test_routing_system_op_to_kimi ... ok -test_stats_tracking ... ok - ----------------------------------------------------------------------- -Ran 35 tests in 0.523s - -OK -``` - -## Troubleshooting - -### Issue: "RuntimeError: Claude API not configured" - -**Solution:** Set ANTHROPIC_API_KEY environment variable or pass `claude_api_key` to constructor. - -```bash -export ANTHROPIC_API_KEY="your-key-here" -``` - -### Issue: "RuntimeError: Kimi K2 API not configured" - -**Solution:** Get API key from https://platform.moonshot.ai and set MOONSHOT_API_KEY. - -```bash -export MOONSHOT_API_KEY="your-key-here" -``` - -### Issue: High costs - -**Solution:** Enable cost tracking to identify expensive operations: - -```python -router = LLMRouter(track_costs=True) -# ... make requests ... -stats = router.get_stats() -print(f"Total cost: ${stats['total_cost_usd']}") -``` - -Consider: -- Using Kimi K2 more (cheaper) -- Reducing max_tokens -- Caching common responses - -### Issue: Slow responses - -Check latency per provider: - -```python -response = router.complete(...) -print(f"Latency: {response.latency_seconds:.2f}s") -``` - -Consider: -- Parallel requests for batch operations -- Lower max_tokens for faster responses -- Self-hosting Kimi K2 for lower latency - -## Deployment Options - -### Option 1: Cloud APIs (Recommended for Seed Stage) - -**Pros:** -- โœ… Zero infrastructure cost -- โœ… Fast deployment (hours) -- โœ… Scales automatically -- โœ… Latest model versions - -**Cons:** -- โŒ Per-token costs -- โŒ API rate limits -- โŒ Data leaves premises - -**Cost:** ~$1,500-3,000/month for 10K users - -### Option 2: Self-Hosted Kimi K2 (Post-Seed) - -**Pros:** -- โœ… Lower long-term costs -- โœ… No API limits -- โœ… Full control -- โœ… Data privacy - -**Cons:** -- โŒ High upfront cost (4x A100 GPUs = $50K+) -- โŒ Maintenance overhead -- โŒ DevOps complexity - -**Cost:** $1,000-2,000/month (GPU + power + ops) - -### Option 3: Hybrid (Recommended for Series A) - -Use cloud for spikes, self-hosted for baseline: - -- Claude API: User-facing chat -- Self-hosted Kimi K2: System operations (high volume) -- Fallback to APIs if self-hosted overloaded - -**Best of both worlds.** - -## Business Value - -### For Seed Round Pitch - -**Technical Differentiation:** -- "Multi-LLM architecture shows technical sophistication" -- "Best-in-class model for each task type" -- "65.8% SWE-bench score beats most proprietary models" - -**Cost Story:** -- "40-50% lower AI costs than single-model competitors" -- "Estimated savings: $18K-36K/year per 10K users" - -**Enterprise Appeal:** -- "Open weights (Kimi K2) = self-hostable" -- "Data never leaves customer infrastructure" -- "LLM-agnostic = no vendor lock-in" - -### Competitive Analysis - -| Competitor | LLM Strategy | Cortex Advantage | -|------------|--------------|------------------| -| Cursor | VS Code + Claude | Wraps editor only | -| GitHub Copilot | GitHub + GPT-4 | Code only | -| Replit | IDE + GPT | Not OS-level | -| **Cortex Linux** | **Multi-LLM OS** | **Entire system** | - -**Cortex is the only AI-native operating system with intelligent LLM routing.** - -## Roadmap - -### Phase 1 (Current): Dual-LLM Support -- โœ… Claude + Kimi K2 integration -- โœ… Intelligent routing -- โœ… Cost tracking -- โœ… Fallback logic - -### Phase 2 (Q1 2026): Multi-Provider -- โฌœ Add DeepSeek-V3 support -- โฌœ Add Qwen3 support -- โฌœ Add Llama 4 support -- โฌœ User-configurable provider preferences - -### Phase 3 (Q2 2026): Self-Hosting -- โฌœ Self-hosted Kimi K2 deployment guide -- โฌœ vLLM integration -- โฌœ SGLang integration -- โฌœ Load balancing between cloud + self-hosted - -### Phase 4 (Q3 2026): Advanced Routing -- โฌœ ML-based routing (learn from outcomes) -- โฌœ Cost-optimized routing -- โฌœ Latency-optimized routing -- โฌœ Quality-optimized routing - -## Contributing - -We welcome contributions! Areas of interest: - -1. **Additional LLM Support:** DeepSeek-V3, Qwen3, Llama 4 -2. **Self-Hosting Guides:** vLLM, SGLang, TensorRT-LLM deployment -3. **Performance Benchmarks:** Real-world Cortex Linux task benchmarks -4. **Cost Optimization:** Smarter routing algorithms - -See [CONTRIBUTING.md](../CONTRIBUTING.md) for details. - -## License - -Modified MIT License - see [LICENSE](../LICENSE) for details. - -## Support - -- **GitHub Issues:** https://github.com/cortexlinux/cortex/issues -- **Discord:** https://discord.gg/uCqHvxjU83 -- **Email:** mike@cortexlinux.com - -## References - -- [Kimi K2 Technical Report](https://arxiv.org/abs/2507.20534) -- [Anthropic Claude Documentation](https://docs.anthropic.com) -- [Moonshot AI Platform](https://platform.moonshot.ai) -- [SWE-bench Leaderboard](https://www.swebench.com) - ---- - -**Built with โค๏ธ by the Cortex Linux Team** diff --git a/docs/modules/README_LOGGING.md b/docs/modules/README_LOGGING.md deleted file mode 100644 index 2540f08..0000000 --- a/docs/modules/README_LOGGING.md +++ /dev/null @@ -1,45 +0,0 @@ -# Comprehensive Logging & Diagnostics - -Complete enterprise-grade logging system with multiple outputs, rotation, and diagnostics. - -## Features -- Multiple log levels (DEBUG, INFO, WARNING, ERROR, CRITICAL) -- Colored console output -- File logging with rotation -- Structured JSON logging -- Operation timing -- Log search and export -- Error summaries - -## Usage - -```python -from logging_system import CortexLogger, LogContext - -logger = CortexLogger("cortex") - -# Basic logging -logger.info("Application started") -logger.error("Error occurred", exc_info=True) - -# With context -logger.info("User action", {"user": "john", "action": "install"}) - -# Operation timing -with LogContext(logger, "install_package"): - # Your code here - pass - -# Search logs -results = logger.search_logs("error", level="ERROR", limit=10) - -# Export logs -logger.export_logs("backup.json", format="json") -``` - -## Testing -```bash -python test_logging_system.py -``` - -**Issue #29** | **Bounty**: $100 diff --git a/docs/modules/README_ROLLBACK.md b/docs/modules/README_ROLLBACK.md deleted file mode 100644 index 988a540..0000000 --- a/docs/modules/README_ROLLBACK.md +++ /dev/null @@ -1,426 +0,0 @@ -# Installation History and Rollback System - -Complete installation tracking with safe rollback capabilities for Cortex Linux. - -## Features - -- โœ… **Full Installation Tracking** - Every installation recorded in SQLite -- โœ… **Before/After Snapshots** - Package states captured automatically -- โœ… **Safe Rollback** - Restore previous system state -- โœ… **Dry Run Mode** - Preview rollback actions -- โœ… **History Export** - JSON/CSV export for analysis -- โœ… **Automatic Cleanup** - Remove old records -- โœ… **CLI and Programmatic Access** -- โœ… **Production-Ready** - Handles errors, conflicts, partial installations - -## Usage - -### View Installation History - -```bash -# List recent installations -cortex history - -# List last 10 -cortex history --limit 10 - -# Filter by status -cortex history --status failed - -# Show specific installation details -cortex history show -``` - -**Example Output:** -``` -ID Date Operation Packages Status -==================================================================================================== -a3f4c8e1d2b9f5a7 2025-11-09 14:23:15 install docker, containerd +2 success -b2e1f3d4c5a6b7e8 2025-11-09 13:45:32 upgrade nginx success -c1d2e3f4a5b6c7d8 2025-11-09 12:10:01 install postgresql +3 failed -``` - -### View Detailed Installation - -```bash -cortex history show a3f4c8e1d2b9f5a7 -``` - -**Example Output:** -``` -Installation Details: a3f4c8e1d2b9f5a7 -============================================================ -Timestamp: 2025-11-09T14:23:15.123456 -Operation: install -Status: success -Duration: 127.45s - -Packages: docker, containerd, docker-ce-cli, docker-buildx-plugin - -Commands executed: - sudo apt-get update - sudo apt-get install -y docker - sudo apt-get install -y containerd - -Rollback available: True -``` - -### Rollback Installation - -```bash -# Dry run (show what would happen) -cortex rollback a3f4c8e1d2b9f5a7 --dry-run - -# Actually rollback -cortex rollback a3f4c8e1d2b9f5a7 -``` - -**Dry Run Output:** -``` -Rollback actions (dry run): -sudo apt-get remove -y docker -sudo apt-get remove -y containerd -sudo apt-get remove -y docker-ce-cli -sudo apt-get remove -y docker-buildx-plugin -``` - -### Export History - -```bash -# Export to JSON -python3 installation_history.py export history.json - -# Export to CSV -python3 installation_history.py export history.csv --format csv -``` - -### Cleanup Old Records - -```bash -# Remove records older than 90 days (default) -python3 installation_history.py cleanup - -# Remove records older than 30 days -python3 installation_history.py cleanup --days 30 -``` - -## Programmatic Usage - -### Recording Installations - -```python -from installation_history import ( - InstallationHistory, - InstallationType, - InstallationStatus -) -from datetime import datetime - -history = InstallationHistory() - -# Start recording -install_id = history.record_installation( - operation_type=InstallationType.INSTALL, - packages=['nginx', 'nginx-common'], - commands=[ - 'sudo apt-get update', - 'sudo apt-get install -y nginx' - ], - start_time=datetime.now() -) - -# ... perform installation ... - -# Update with result -history.update_installation( - install_id, - InstallationStatus.SUCCESS -) - -# Or if failed: -history.update_installation( - install_id, - InstallationStatus.FAILED, - error_message="Package not found" -) -``` - -### Querying History - -```python -# Get recent history -recent = history.get_history(limit=20) - -for record in recent: - print(f"{record.id}: {record.operation_type.value}") - print(f" Packages: {', '.join(record.packages)}") - print(f" Status: {record.status.value}") - -# Get specific installation -record = history.get_installation(install_id) -if record: - print(f"Duration: {record.duration_seconds}s") -``` - -### Performing Rollback - -```python -# Check if rollback is available -record = history.get_installation(install_id) -if record.rollback_available: - - # Dry run first - success, message = history.rollback(install_id, dry_run=True) - print(f"Would execute:\n{message}") - - # Confirm with user - if user_confirms(): - success, message = history.rollback(install_id) - if success: - print(f"โœ… Rollback successful: {message}") - else: - print(f"โŒ Rollback failed: {message}") -``` - -## Data Model - -### InstallationRecord - -```python -@dataclass -class InstallationRecord: - id: str # Unique identifier - timestamp: str # ISO format datetime - operation_type: InstallationType # install/upgrade/remove/rollback - packages: List[str] # Package names - status: InstallationStatus # success/failed/rolled_back - before_snapshot: List[PackageSnapshot] # State before - after_snapshot: List[PackageSnapshot] # State after - commands_executed: List[str] # Commands run - error_message: Optional[str] # Error if failed - rollback_available: bool # Can be rolled back - duration_seconds: Optional[float] # How long it took -``` - -### PackageSnapshot - -```python -@dataclass -class PackageSnapshot: - package_name: str # Package identifier - version: str # Version installed - status: str # installed/not-installed/config-files - dependencies: List[str] # Package dependencies - config_files: List[str] # Configuration files -``` - -## Database Schema - -SQLite database stored at `/var/lib/cortex/history.db` (or `~/.cortex/history.db` if system directory not accessible): - -```sql -CREATE TABLE installations ( - id TEXT PRIMARY KEY, - timestamp TEXT NOT NULL, - operation_type TEXT NOT NULL, - packages TEXT NOT NULL, - status TEXT NOT NULL, - before_snapshot TEXT, - after_snapshot TEXT, - commands_executed TEXT, - error_message TEXT, - rollback_available INTEGER, - duration_seconds REAL -); - -CREATE INDEX idx_timestamp ON installations(timestamp); -``` - -## Integration with Cortex - -### Automatic Recording - -The installation history is automatically recorded when using `cortex install`: - -```bash -$ cortex install docker --execute -๐Ÿง  Understanding request... -๐Ÿ“ฆ Planning installation... -โš™๏ธ Installing docker... - -Generated commands: - 1. sudo apt-get update - 2. sudo apt-get install -y docker.io - -Executing commands... - -โœ… docker installed successfully! - -Completed in 45.23 seconds - -๐Ÿ“ Installation recorded (ID: a3f4c8e1d2b9f5a7) - To rollback: cortex rollback a3f4c8e1d2b9f5a7 -``` - -### Cortex CLI Integration - -```bash -# After any cortex install -$ cortex install docker -๐Ÿง  Analyzing dependencies... -๐Ÿ“ฆ Installing docker and 4 dependencies... -โœ… Installation complete (ID: a3f4c8e1d2b9f5a7) - To rollback: cortex rollback a3f4c8e1d2b9f5a7 - -# View history -$ cortex history -ID Date Operation Packages -================================================================ -a3f4c8e1d2b9f5a7 2025-11-09 14:23:15 install docker +4 - -# Rollback if needed -$ cortex rollback a3f4c8e1d2b9f5a7 -โš ๏ธ This will remove: docker, containerd, docker-ce-cli, docker-buildx-plugin -Continue? (y/N): y -๐Ÿ”ง Rolling back installation... -โœ… Rollback complete -``` - -## Rollback Logic - -### What Gets Rolled Back - -1. **New Installations** โ†’ Packages are removed -2. **Upgrades/Downgrades** โ†’ Original version reinstalled -3. **Removals** โ†’ Packages reinstalled -4. **Failed Installations** โ†’ Partial changes reverted - -### Rollback Limitations - -**Cannot rollback:** -- System packages (apt, dpkg, etc.) -- Packages with broken dependencies -- Installations older than snapshots -- Manual file modifications - -**Safety measures:** -- Dry run preview before execution -- Snapshot validation -- Dependency checking -- Conflict detection - -## Performance - -- **Recording overhead:** <0.5s per installation -- **Database size:** ~100KB per 1000 installations -- **Rollback speed:** ~30s for typical package -- **History query:** <0.1s for 1000 records - -## Security Considerations - -1. **Database permissions:** Only root/sudoers can modify -2. **Snapshot integrity:** Checksums for config files -3. **Command validation:** Sanitized before storage -4. **Audit trail:** All operations logged - -## Testing - -```bash -# Run unit tests -python -m pytest test/test_installation_history.py -v - -# Test with real packages (requires sudo) -sudo python3 installation_history.py list -``` - -## Troubleshooting - -### Database Locked - -```bash -# Check for processes using database -lsof /var/lib/cortex/history.db - -# If stuck, restart -sudo systemctl restart cortex -``` - -### Rollback Failed - -```bash -# View error details -cortex history show - -# Try manual rollback -sudo apt-get install -f -``` - -### Disk Space - -```bash -# Check database size -du -h /var/lib/cortex/history.db - -# Clean old records -python3 installation_history.py cleanup --days 30 -``` - -## Future Enhancements - -- [ ] Snapshot compression for large installations -- [ ] Incremental snapshots (only changed files) -- [ ] Remote backup integration -- [ ] Web UI for history browsing -- [ ] Automated rollback on boot failure -- [ ] Configuration file diff viewing -- [ ] Multi-installation atomic rollback - -## Examples - -### Scenario 1: Failed Installation Cleanup - -```python -# Installation fails -install_id = history.record_installation(...) -try: - install_package('broken-package') -except Exception as e: - history.update_installation(install_id, InstallationStatus.FAILED, str(e)) - - # Automatically rollback partial changes - if auto_rollback_enabled: - history.rollback(install_id) -``` - -### Scenario 2: Testing Package Updates - -```python -# Install update -install_id = cortex_install(['nginx=1.24.0']) - -# Test update -if not system_tests_pass(): - # Rollback to previous version - history.rollback(install_id) - print("Update rolled back - system restored") -``` - -### Scenario 3: Audit Trail - -```python -# Export last month's installations -history = InstallationHistory() -history.export_history('audit_november.json') - -# Analyze failures -failed = history.get_history( - limit=1000, - status_filter=InstallationStatus.FAILED -) -print(f"Failed installations: {len(failed)}") -``` - -## License - -MIT License - Part of Cortex Linux - diff --git a/docs/modules/README_VERIFICATION.md b/docs/modules/README_VERIFICATION.md deleted file mode 100644 index 1d1e8d9..0000000 --- a/docs/modules/README_VERIFICATION.md +++ /dev/null @@ -1,175 +0,0 @@ -# Installation Verification System - -Validates that software installations completed successfully. - -## Features - -- โœ… Command execution verification -- โœ… File/binary existence checks -- โœ… Service status validation -- โœ… Version matching -- โœ… Supports 10+ common packages out-of-the-box -- โœ… Custom test definitions -- โœ… JSON export for automation -- โœ… Detailed error reporting - -## Usage - -### Basic Verification - -```bash -# Verify single package -python3 installation_verifier.py nginx - -# Verify multiple packages -python3 installation_verifier.py nginx postgresql redis-server -``` - -### With Options - -```bash -# Detailed output -python3 installation_verifier.py docker --detailed - -# Export results -python3 installation_verifier.py mysql-server --export results.json - -# Check specific version -python3 installation_verifier.py nodejs --version 18.0.0 -``` - -### Programmatic Usage - -```python -from installation_verifier import InstallationVerifier, VerificationStatus - -verifier = InstallationVerifier() - -# Verify package -result = verifier.verify_package('nginx') - -if result.status == VerificationStatus.SUCCESS: - print(f"โœ… {result.overall_message}") -else: - print(f"โŒ Verification failed") - for test in result.tests: - if not test.passed: - print(f" - {test.name}: {test.error_message}") - -# Custom tests -custom_tests = [ - {'type': 'command', 'command': 'myapp --version'}, - {'type': 'file', 'path': '/etc/myapp/config.yml'}, - {'type': 'service', 'name': 'myapp'} -] - -result = verifier.verify_package( - 'myapp', - custom_tests=custom_tests -) -``` - -## Supported Packages - -Out-of-the-box support for: -- nginx -- apache2 -- postgresql -- mysql-server -- docker -- python3 -- nodejs -- redis-server -- git -- curl - -## Testing - -```bash -python3 test_installation_verifier.py -``` - -## Integration with Cortex - -```python -# After installation -from installation_verifier import InstallationVerifier, VerificationStatus - -verifier = InstallationVerifier() -result = verifier.verify_package(installed_package) - -if result.status != VerificationStatus.SUCCESS: - # Trigger auto-fix or notify user - handle_installation_failure(result) -``` - -## Exit Codes - -- `0`: All verifications passed -- `1`: One or more verifications failed - -## Example Output - -``` -๐Ÿ” Verifying 3 package(s)... - - Checking nginx... - โœ… nginx installed and verified successfully - - Checking postgresql... - โœ… postgresql installed and verified successfully - - Checking docker... - โœ… docker installed and verified successfully - -============================================================ -VERIFICATION SUMMARY -============================================================ -Total packages: 3 -โœ… Success: 3 -โŒ Failed: 0 -โš ๏ธ Partial: 0 -โ“ Unknown: 0 -``` - -## Architecture - -### VerificationTest -Individual test with pass/fail status: -- Command execution -- File existence -- Service status -- Version matching - -### VerificationResult -Complete verification with multiple tests: -- Overall status (SUCCESS/FAILED/PARTIAL/UNKNOWN) -- Detailed test results -- Timestamp -- Error messages - -### InstallationVerifier -Main class that orchestrates verification: -- Runs multiple test types -- Generates recommendations -- Exports to JSON -- CLI interface - -## Contributing - -To add support for a new package, update the `VERIFICATION_PATTERNS` dictionary in `installation_verifier.py`: - -```python -VERIFICATION_PATTERNS = { - 'your-package': { - 'command': 'your-package --version', - 'file': '/usr/bin/your-package', - 'service': 'your-package', - 'version_regex': r'version (\d+\.\d+\.\d+)' - } -} -``` - -## License - -MIT License - Part of Cortex Linux diff --git a/scripts/automation/cortex-master-automation.sh b/scripts/automation/cortex-master-automation.sh deleted file mode 100644 index 72d255f..0000000 --- a/scripts/automation/cortex-master-automation.sh +++ /dev/null @@ -1,730 +0,0 @@ -#!/bin/bash -# Cortex Linux - Master MVP Automation System -# Handles code generation, PR creation, issue management, and team coordination - -set -e - -# Colors -RED='\033[0;31m' -GREEN='\033[0;32m' -YELLOW='\033[1;33m' -BLUE='\033[0;34m' -NC='\033[0m' # No Color - -# Configuration -REPO_DIR="$HOME/cortex" -WORK_DIR="$HOME/Downloads/cortex-mvp-work" -GITHUB_TOKEN=$(grep GITHUB_TOKEN ~/.zshrc 2>/dev/null | cut -d'=' -f2 | tr -d '"' | tr -d "'") - -# Ensure working directory exists -mkdir -p "$WORK_DIR" - -# Banner -print_banner() { - echo -e "${BLUE}" - echo "โ•”โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•—" - echo "โ•‘ CORTEX LINUX - MVP MASTER AUTOMATION โ•‘" - echo "โ•‘ The AI-Native Operating System โ•‘" - echo "โ•šโ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•" - echo -e "${NC}" -} - -# Menu system -show_menu() { - echo "" - echo -e "${GREEN}โ•โ•โ• MAIN MENU โ•โ•โ•${NC}" - echo "" - echo "๐Ÿ“‹ ISSUE MANAGEMENT" - echo " 1. List MVP-critical issues" - echo " 2. Create new MVP issue" - echo " 3. Close post-MVP issues (cleanup)" - echo " 4. Pin critical issues to top" - echo "" - echo "๐Ÿ’ป CODE GENERATION" - echo " 5. Generate implementation for issue" - echo " 6. Generate tests for implementation" - echo " 7. Generate documentation" - echo " 8. Generate complete package (code+tests+docs)" - echo "" - echo "๐Ÿ”€ PULL REQUEST MANAGEMENT" - echo " 9. Create PR from implementation" - echo " 10. Review pending PRs" - echo " 11. Merge approved PR" - echo " 12. Bulk create PRs for ready issues" - echo "" - echo "๐Ÿ‘ฅ TEAM COORDINATION" - echo " 13. List active contributors" - echo " 14. Assign issue to contributor" - echo " 15. Send Discord notification" - echo " 16. Process bounty payment" - echo "" - echo "๐Ÿ“Š STATUS & REPORTING" - echo " 17. Show MVP progress dashboard" - echo " 18. Generate weekly report" - echo " 19. Check automation health" - echo " 20. Audit repository status" - echo "" - echo "๐Ÿš€ QUICK ACTIONS" - echo " 21. Complete MVP package (issue โ†’ code โ†’ PR โ†’ assign)" - echo " 22. Emergency fix workflow" - echo " 23. Deploy to production" - echo "" - echo " 0. Exit" - echo "" - echo -n "Select option: " -} - -# Issue Management Functions -list_mvp_issues() { - echo -e "${GREEN}๐Ÿ“‹ MVP-Critical Issues${NC}" - cd "$REPO_DIR" - gh issue list --label "mvp-critical" --limit 30 --json number,title,assignees,labels | \ - jq -r '.[] | " #\(.number): \(.title) [\(.assignees | map(.login) | join(", "))]"' -} - -create_mvp_issue() { - echo -e "${YELLOW}Creating new MVP issue...${NC}" - echo -n "Issue title: " - read title - echo -n "Bounty amount: $" - read bounty - echo -n "Priority (critical/high/medium): " - read priority - - echo "Brief description (Ctrl+D when done):" - description=$(cat) - - body="**Bounty:** \$$bounty upon merge - -**Priority:** $priority - -## Description -$description - -## Acceptance Criteria -- [ ] Implementation complete -- [ ] Tests included (>80% coverage) -- [ ] Documentation with examples -- [ ] Integration verified - -## Skills Needed -- Python 3.11+ -- System programming -- Testing (pytest) - -**Ready to claim?** Comment \"I'll take this\" below!" - - cd "$REPO_DIR" - gh issue create \ - --title "$title" \ - --body "$body" \ - --label "mvp-critical,bounty,enhancement" - - echo -e "${GREEN}โœ… Issue created!${NC}" -} - -close_post_mvp_issues() { - echo -e "${YELLOW}Closing post-MVP issues for focus...${NC}" - echo -n "Close issues starting from #: " - read start_num - echo -n "Close through #: " - read end_num - - CLOSE_MSG="๐ŸŽฏ **Closing for MVP Focus** - -This issue is valuable but being closed temporarily to focus the team on MVP-critical features. - -**Timeline:** -- Now: MVP features (#1-45) -- January 2025: Reopen post-MVP work -- February 2025: Seed funding round - -**Want to work on this?** Comment below and we can discuss! - -Labeled as \`post-mvp\` for easy tracking." - - cd "$REPO_DIR" - for i in $(seq $start_num $end_num); do - gh issue comment $i --body "$CLOSE_MSG" 2>/dev/null - gh issue edit $i --add-label "post-mvp" 2>/dev/null - gh issue close $i --reason "not planned" 2>/dev/null && \ - echo " โœ… Closed #$i" || echo " โš ๏ธ Issue #$i not found" - sleep 0.5 - done - - echo -e "${GREEN}โœ… Cleanup complete!${NC}" -} - -pin_critical_issues() { - echo -e "${YELLOW}Pinning critical issues...${NC}" - cd "$REPO_DIR" - - # Get issue numbers to pin - echo "Enter issue numbers to pin (space-separated):" - read -a issues - - for issue in "${issues[@]}"; do - gh issue pin $issue 2>/dev/null && \ - echo " ๐Ÿ“Œ Pinned #$issue" || \ - echo " โš ๏ธ Could not pin #$issue" - done - - echo -e "${GREEN}โœ… Issues pinned!${NC}" -} - -# Code Generation Functions -generate_implementation() { - echo -e "${YELLOW}Generating implementation...${NC}" - echo -n "Issue number: " - read issue_num - - cd "$REPO_DIR" - issue_data=$(gh issue view $issue_num --json title,body) - issue_title=$(echo "$issue_data" | jq -r '.title') - - echo "Issue: $issue_title" - echo "" - echo "โš ๏ธ This requires Claude AI to generate the code." - echo "Manual steps:" - echo "1. Go to Claude.ai" - echo "2. Ask: 'Generate complete implementation for Cortex Linux Issue #$issue_num: $issue_title'" - echo "3. Save files to: $WORK_DIR/issue-$issue_num/" - echo "" - echo "Press Enter when files are ready..." - read - - if [ -d "$WORK_DIR/issue-$issue_num" ]; then - echo -e "${GREEN}โœ… Files found!${NC}" - ls -lh "$WORK_DIR/issue-$issue_num/" - else - echo -e "${RED}โŒ No files found at $WORK_DIR/issue-$issue_num/${NC}" - fi -} - -generate_complete_package() { - echo -e "${YELLOW}Generating complete implementation package...${NC}" - echo -n "Issue number: " - read issue_num - - mkdir -p "$WORK_DIR/issue-$issue_num" - - echo "" - echo "This will generate:" - echo " 1. Implementation code" - echo " 2. Comprehensive tests" - echo " 3. Full documentation" - echo " 4. Integration examples" - echo "" - echo "โš ๏ธ Requires Claude AI session" - echo "" - echo "In Claude, say:" - echo " 'Generate complete implementation package for Cortex Linux Issue #$issue_num" - echo " Include: code, tests, docs, integration guide'" - echo "" - echo "Save files to: $WORK_DIR/issue-$issue_num/" - echo "" - echo "Press Enter when complete..." - read - - if [ -d "$WORK_DIR/issue-$issue_num" ]; then - # Create archive - cd "$WORK_DIR" - tar -czf "issue-$issue_num-complete.tar.gz" "issue-$issue_num/" - echo -e "${GREEN}โœ… Package created: $WORK_DIR/issue-$issue_num-complete.tar.gz${NC}" - fi -} - -# PR Management Functions -create_pr_from_implementation() { - echo -e "${YELLOW}Creating PR from implementation...${NC}" - echo -n "Issue number: " - read issue_num - - cd "$REPO_DIR" - - # Get issue details - issue_data=$(gh issue view $issue_num --json title,body,labels) - issue_title=$(echo "$issue_data" | jq -r '.title') - - # Create branch - branch_name="feature/issue-$issue_num" - git checkout main - git pull origin main - git checkout -b "$branch_name" 2>/dev/null || git checkout "$branch_name" - - # Check if implementation files exist - impl_dir="$WORK_DIR/issue-$issue_num" - if [ ! -d "$impl_dir" ]; then - echo -e "${RED}โŒ No implementation found at $impl_dir${NC}" - echo "Run option 8 to generate complete package first" - return 1 - fi - - # Copy files - echo "Copying implementation files..." - if [ -f "$impl_dir"/*.py ]; then - cp "$impl_dir"/*.py cortex/ 2>/dev/null || true - fi - if [ -f "$impl_dir"/test_*.py ]; then - mkdir -p tests - cp "$impl_dir"/test_*.py tests/ 2>/dev/null || true - fi - if [ -f "$impl_dir"/*.md ]; then - mkdir -p docs - cp "$impl_dir"/*.md docs/ 2>/dev/null || true - fi - - # Add and commit - git add -A - - if git diff --staged --quiet; then - echo -e "${YELLOW}โš ๏ธ No changes to commit${NC}" - return 1 - fi - - git commit -m "Add $issue_title - -Implements #$issue_num - -- Complete implementation -- Comprehensive tests (>80% coverage) -- Full documentation -- Ready for review - -Closes #$issue_num" - - # Push - git push -u origin "$branch_name" - - # Create PR - pr_body="## Summary - -Implements **$issue_title** (#$issue_num) - -## What's Included - -โœ… Complete implementation -โœ… Comprehensive tests (>80% coverage) -โœ… Full documentation -โœ… Integration examples - -## Testing - -\`\`\`bash -pytest tests/ -v -\`\`\` - -## Ready for Review - -- โœ… Production-ready -- โœ… Fully tested -- โœ… Completely documented -- โœ… Follows project standards - -Closes #$issue_num - ---- - -**Bounty:** As specified in issue -**Reviewer:** @mikejmorgan-ai" - - gh pr create \ - --title "$issue_title" \ - --body "$pr_body" \ - --base main \ - --head "$branch_name" \ - --label "enhancement,ready-for-review" - - echo -e "${GREEN}โœ… PR created successfully!${NC}" - git checkout main -} - -review_pending_prs() { - echo -e "${GREEN}๐Ÿ“‹ Pending Pull Requests${NC}" - cd "$REPO_DIR" - gh pr list --limit 20 --json number,title,author,createdAt,headRefName | \ - jq -r '.[] | " PR #\(.number): \(.title)\n Author: \(.author.login)\n Branch: \(.headRefName)\n Created: \(.createdAt)\n"' -} - -merge_approved_pr() { - echo -e "${YELLOW}Merging approved PR...${NC}" - echo -n "PR number: " - read pr_num - - cd "$REPO_DIR" - - echo "Checking PR status..." - gh pr view $pr_num - - echo "" - echo -n "Merge this PR? (y/n): " - read confirm - - if [ "$confirm" = "y" ]; then - gh pr merge $pr_num --squash --delete-branch - echo -e "${GREEN}โœ… PR #$pr_num merged!${NC}" - - # Trigger bounty notification - echo "" - echo "๐Ÿ’ฐ Bounty processing needed!" - echo "Run option 16 to process payment" - else - echo "Merge cancelled" - fi -} - -bulk_create_prs() { - echo -e "${YELLOW}Bulk PR creation...${NC}" - echo "Issues with code ready (space-separated): " - read -a issues - - for issue in "${issues[@]}"; do - echo "" - echo "Creating PR for #$issue..." - # Reuse create_pr function - echo "$issue" | create_pr_from_implementation - sleep 2 - done - - echo -e "${GREEN}โœ… All PRs created!${NC}" -} - -# Team Coordination Functions -list_contributors() { - echo -e "${GREEN}๐Ÿ‘ฅ Active Contributors${NC}" - cd "$REPO_DIR" - - # Get recent PR authors - gh pr list --state all --limit 50 --json author,createdAt | \ - jq -r '.[] | .author.login' | sort | uniq -c | sort -rn | head -10 | \ - awk '{printf " %2d PRs: @%s\n", $1, $2}' -} - -assign_issue() { - echo -e "${YELLOW}Assigning issue to contributor...${NC}" - echo -n "Issue number: " - read issue_num - echo -n "GitHub username: " - read username - - cd "$REPO_DIR" - gh issue edit $issue_num --add-assignee "$username" - - # Send notification comment - gh issue comment $issue_num --body "๐Ÿ‘‹ Hey @$username! This issue is now assigned to you. - -**Next steps:** -1. Review the requirements -2. Comment with your timeline -3. Submit PR when ready - -Questions? Ask in #dev-chat on Discord: https://discord.gg/uCqHvxjU83 - -Thanks for contributing! ๐Ÿš€" - - echo -e "${GREEN}โœ… Assigned #$issue_num to @$username${NC}" -} - -send_discord_notification() { - echo -e "${YELLOW}Sending Discord notification...${NC}" - - if [ -z "$DISCORD_WEBHOOK" ]; then - echo -e "${RED}โŒ DISCORD_WEBHOOK not set${NC}" - echo "Set it in GitHub Secrets or ~/.zshrc" - return 1 - fi - - echo "Select notification type:" - echo " 1. PR merged" - echo " 2. Issue created" - echo " 3. Custom message" - echo -n "Choice: " - read choice - - case $choice in - 1) - echo -n "PR number: " - read pr_num - message="๐Ÿš€ **PR #$pr_num Merged!**\n\nGreat work! Bounty will be processed Friday." - ;; - 2) - echo -n "Issue number: " - read issue_num - message="๐Ÿ“‹ **New Issue #$issue_num Created**\n\nCheck it out: https://github.com/cortexlinux/cortex/issues/$issue_num" - ;; - 3) - echo "Enter message:" - read message - ;; - esac - - curl -X POST "$DISCORD_WEBHOOK" \ - -H "Content-Type: application/json" \ - -d "{\"content\": \"$message\"}" - - echo -e "${GREEN}โœ… Notification sent!${NC}" -} - -process_bounty() { - echo -e "${YELLOW}๐Ÿ’ฐ Processing bounty payment...${NC}" - echo -n "PR number: " - read pr_num - echo -n "Contributor username: " - read username - echo -n "Bounty amount: $" - read amount - - cd "$REPO_DIR" - - # Add payment comment - gh pr comment $pr_num --body "๐Ÿ’ฐ **Bounty Approved: \$$amount** - -Hey @$username! Your bounty has been approved. - -**Next steps:** -1. DM me your payment method (PayPal/Crypto/Venmo/Zelle) -2. Payment will be processed this Friday -3. You'll also get 2x bonus (\$$((amount * 2))) when we raise our seed round! - -Thanks for the great work! ๐ŸŽ‰" - - # Log payment - echo "{\"pr\": $pr_num, \"contributor\": \"$username\", \"amount\": $amount, \"date\": \"$(date -I)\", \"status\": \"approved\"}" >> "$WORK_DIR/bounties_log.jsonl" - - echo -e "${GREEN}โœ… Bounty processed!${NC}" - echo "Remember to actually send the payment!" -} - -# Status & Reporting Functions -show_mvp_dashboard() { - echo -e "${BLUE}โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•${NC}" - echo -e "${BLUE} CORTEX LINUX - MVP DASHBOARD ${NC}" - echo -e "${BLUE}โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•${NC}" - - cd "$REPO_DIR" - - echo "" - echo -e "${GREEN}๐Ÿ“Š ISSUE STATUS${NC}" - total_issues=$(gh issue list --limit 1000 --json number | jq '. | length') - mvp_critical=$(gh issue list --label "mvp-critical" --json number | jq '. | length') - open_prs=$(gh pr list --json number | jq '. | length') - - echo " Total open issues: $total_issues" - echo " MVP critical: $mvp_critical" - echo " Open PRs: $open_prs" - - echo "" - echo -e "${GREEN}๐ŸŽฏ MVP PROGRESS${NC}" - # Estimate completion - completed=$((30 - mvp_critical)) - percent=$((completed * 100 / 30)) - echo " Completed: $completed/30 ($percent%)" - - echo "" - echo -e "${GREEN}๐Ÿ‘ฅ TEAM ACTIVITY${NC}" - recent_prs=$(gh pr list --state all --limit 7 --json number | jq '. | length') - echo " PRs this week: $recent_prs" - - echo "" - echo -e "${GREEN}๐Ÿ’ฐ BOUNTIES${NC}" - if [ -f "$WORK_DIR/bounties_log.jsonl" ]; then - total_paid=$(jq -s 'map(.amount) | add' "$WORK_DIR/bounties_log.jsonl") - echo " Total paid: \$$total_paid" - else - echo " Total paid: \$0 (no log file)" - fi - - echo "" - echo -e "${BLUE}โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•${NC}" -} - -generate_weekly_report() { - echo -e "${YELLOW}Generating weekly report...${NC}" - - report_file="$WORK_DIR/weekly-report-$(date +%Y-%m-%d).md" - - cd "$REPO_DIR" - - cat > "$report_file" << 'REPORT_EOF' -# Cortex Linux - Weekly Report -**Week of $(date +%Y-%m-%d)** - -## ๐ŸŽฏ Progress This Week - -### PRs Merged -$(gh pr list --state merged --limit 100 --json number,title,mergedAt | jq -r '.[] | select(.mergedAt | fromdateiso8601 > (now - 604800)) | "- PR #\(.number): \(.title)"') - -### Issues Closed -$(gh issue list --state closed --limit 100 --json number,title,closedAt | jq -r '.[] | select(.closedAt | fromdateiso8601 > (now - 604800)) | "- Issue #\(.number): \(.title)"') - -### New Contributors -$(gh pr list --state all --limit 50 --json author,createdAt | jq -r '.[] | select(.createdAt | fromdateiso8601 > (now - 604800)) | .author.login' | sort -u) - -## ๐Ÿ“Š Metrics - -- Open Issues: $(gh issue list --json number | jq '. | length') -- Open PRs: $(gh pr list --json number | jq '. | length') -- Active Contributors: $(gh pr list --state all --limit 100 --json author | jq -r '.[].author.login' | sort -u | wc -l) - -## ๐Ÿš€ Next Week Priorities - -1. Complete remaining MVP issues -2. Review and merge pending PRs -3. Process bounty payments - ---- -*Generated by Cortex Master Automation* -REPORT_EOF - - eval "echo \"$(cat $report_file)\"" > "$report_file" - - echo -e "${GREEN}โœ… Report generated: $report_file${NC}" - cat "$report_file" -} - -check_automation_health() { - echo -e "${GREEN}๐Ÿ” Checking automation health...${NC}" - - cd "$REPO_DIR" - - echo "" - echo "GitHub Actions Status:" - gh run list --limit 5 --json conclusion,name | \ - jq -r '.[] | " \(.name): \(.conclusion)"' - - echo "" - echo "GitHub Secrets:" - gh secret list | head -5 - - echo "" - echo "Branch Protection:" - gh api repos/cortexlinux/cortex/branches/main/protection 2>/dev/null | \ - jq -r '.required_status_checks.contexts[]' || echo " No branch protection" - - echo "" - echo "Webhooks:" - gh api repos/cortexlinux/cortex/hooks | jq -r '.[].name' || echo " No webhooks" -} - -audit_repository() { - echo -e "${GREEN}๐Ÿ” Full Repository Audit${NC}" - - cd "$REPO_DIR" - - # Run comprehensive audit - bash "$WORK_DIR/../audit_cortex_status.sh" 2>/dev/null || { - echo "Audit script not found, running basic audit..." - - echo "Repository: cortexlinux/cortex" - echo "Branch: $(git branch --show-current)" - echo "Last commit: $(git log -1 --oneline)" - echo "" - echo "Open issues: $(gh issue list --json number | jq '. | length')" - echo "Open PRs: $(gh pr list --json number | jq '. | length')" - echo "Contributors: $(git log --format='%aN' | sort -u | wc -l)" - } -} - -# Quick Actions -complete_mvp_package() { - echo -e "${BLUE}๐Ÿš€ COMPLETE MVP PACKAGE WORKFLOW${NC}" - echo "This will:" - echo " 1. Generate implementation" - echo " 2. Create PR" - echo " 3. Assign to contributor" - echo " 4. Send notifications" - echo "" - echo -n "Issue number: " - read issue_num - - # Step 1: Generate - echo "$issue_num" | generate_complete_package - - # Step 2: Create PR - echo "$issue_num" | create_pr_from_implementation - - # Step 3: Notify - echo "Package complete for issue #$issue_num!" - echo "PR created and ready for review" -} - -emergency_fix() { - echo -e "${RED}๐Ÿšจ EMERGENCY FIX WORKFLOW${NC}" - echo -n "What's broken? " - read issue_description - - # Create hotfix branch - cd "$REPO_DIR" - git checkout main - git pull - git checkout -b "hotfix/emergency-$(date +%s)" - - echo "Hotfix branch created" - echo "Make your fixes, then commit and push" - echo "" - echo "When ready, run option 9 to create PR" -} - -deploy_to_production() { - echo -e "${YELLOW}๐Ÿš€ Deploying to production...${NC}" - echo "โš ๏ธ This is a placeholder for production deployment" - echo "" - echo "Production deployment steps:" - echo " 1. Merge all approved PRs" - echo " 2. Tag release" - echo " 3. Build packages" - echo " 4. Deploy to servers" - echo "" - echo "Not yet implemented - coming soon!" -} - -# Main execution -main() { - print_banner - - cd "$REPO_DIR" 2>/dev/null || { - echo -e "${RED}โŒ Repository not found at $REPO_DIR${NC}" - echo "Clone it first: git clone https://github.com/cortexlinux/cortex.git ~/cortex" - exit 1 - } - - while true; do - show_menu - read choice - - case $choice in - 1) list_mvp_issues ;; - 2) create_mvp_issue ;; - 3) close_post_mvp_issues ;; - 4) pin_critical_issues ;; - 5) generate_implementation ;; - 6) echo "Coming soon..." ;; - 7) echo "Coming soon..." ;; - 8) generate_complete_package ;; - 9) create_pr_from_implementation ;; - 10) review_pending_prs ;; - 11) merge_approved_pr ;; - 12) bulk_create_prs ;; - 13) list_contributors ;; - 14) assign_issue ;; - 15) send_discord_notification ;; - 16) process_bounty ;; - 17) show_mvp_dashboard ;; - 18) generate_weekly_report ;; - 19) check_automation_health ;; - 20) audit_repository ;; - 21) complete_mvp_package ;; - 22) emergency_fix ;; - 23) deploy_to_production ;; - 0) echo "Goodbye!"; exit 0 ;; - *) echo -e "${RED}Invalid option${NC}" ;; - esac - - echo "" - echo "Press Enter to continue..." - read - done -} - -# Run main -main diff --git a/scripts/automation/cortex-master-pr-creator.sh b/scripts/automation/cortex-master-pr-creator.sh deleted file mode 100644 index 21caac5..0000000 --- a/scripts/automation/cortex-master-pr-creator.sh +++ /dev/null @@ -1,241 +0,0 @@ -#!/bin/bash -# Cortex Linux - MVP Master Completion Script -# Prepares and submits all ready-to-review implementations - -set -e - -echo "๐Ÿš€ CORTEX LINUX - MVP MASTER COMPLETION SCRIPT" -echo "==============================================" -echo "" - -# Configuration -REPO_DIR="$HOME/cortex" -ISSUES_WITH_CODE_READY=(10 12 14 20 24 29) # Issues where Mike has complete code ready -GITHUB_TOKEN=$(grep GITHUB_TOKEN ~/.zshrc | cut -d'=' -f2 | tr -d '"' | tr -d "'") - -cd "$REPO_DIR" || { echo "โŒ cortex repo not found at $REPO_DIR"; exit 1; } - -# Ensure we're on main and up to date -echo "๐Ÿ“ฅ Updating main branch..." -git checkout main -git pull origin main - -echo "" -echo "๐Ÿ” CHECKING EXISTING IMPLEMENTATIONS..." -echo "========================================" - -# Function to check if issue has implementation ready -check_implementation() { - local issue_num=$1 - local feature_file="" - - case $issue_num in - 10) feature_file="cortex/installation_verifier.py" ;; - 12) feature_file="cortex/dependency_resolver.py" ;; - 14) feature_file="cortex/rollback_manager.py" ;; - 20) feature_file="cortex/context_memory.py" ;; - 24) feature_file="cortex/context_memory.py" ;; # Same as #20 - 29) feature_file="cortex/logging_system.py" ;; - esac - - if [ -f "$feature_file" ]; then - echo "โœ… Issue #$issue_num - Implementation exists: $feature_file" - return 0 - else - echo "โš ๏ธ Issue #$issue_num - No implementation found at $feature_file" - return 1 - fi -} - -# Check all issues -READY_ISSUES=() -for issue in "${ISSUES_WITH_CODE_READY[@]}"; do - if check_implementation $issue; then - READY_ISSUES+=($issue) - fi -done - -echo "" -echo "๐Ÿ“Š SUMMARY" -echo "==========" -echo "Issues with code ready: ${#READY_ISSUES[@]}" -echo "Ready to create PRs for: ${READY_ISSUES[*]}" -echo "" - -if [ ${#READY_ISSUES[@]} -eq 0 ]; then - echo "โš ๏ธ No implementations found. Need to generate code first." - echo "" - echo "Run this to generate implementations:" - echo " ~/cortex-generate-mvp-code.sh" - exit 0 -fi - -read -p "Create PRs for ${#READY_ISSUES[@]} issues? (y/n): " -n 1 -r -echo -if [[ ! $REPLY =~ ^[Yy]$ ]]; then - echo "Aborted." - exit 0 -fi - -echo "" -echo "๐Ÿš€ CREATING PULL REQUESTS..." -echo "============================" - -# Function to create PR for an issue -create_pr_for_issue() { - local issue_num=$1 - local branch_name="feature/issue-$issue_num" - - echo "" - echo "๐Ÿ“ Processing Issue #$issue_num..." - echo "-----------------------------------" - - # Get issue title and details - issue_data=$(gh issue view $issue_num --json title,body,labels) - issue_title=$(echo "$issue_data" | jq -r '.title') - - # Create feature branch - echo " Creating branch: $branch_name" - git checkout -b "$branch_name" main 2>/dev/null || git checkout "$branch_name" - - # Determine which files to include - files_to_add="" - case $issue_num in - 10) - files_to_add="cortex/installation_verifier.py tests/test_installation_verifier.py docs/INSTALLATION_VERIFIER.md" - ;; - 12) - files_to_add="cortex/dependency_resolver.py tests/test_dependency_resolver.py docs/DEPENDENCY_RESOLVER.md" - ;; - 14) - files_to_add="cortex/rollback_manager.py tests/test_rollback_manager.py docs/ROLLBACK_MANAGER.md" - ;; - 20|24) - files_to_add="cortex/context_memory.py tests/test_context_memory.py docs/CONTEXT_MEMORY.md" - ;; - 29) - files_to_add="cortex/logging_system.py tests/test_logging_system.py docs/LOGGING_SYSTEM.md" - ;; - esac - - # Add files if they exist - for file in $files_to_add; do - if [ -f "$file" ]; then - git add "$file" - echo " โœ… Added: $file" - else - echo " โš ๏ธ Missing: $file" - fi - done - - # Check if there are changes to commit - if git diff --staged --quiet; then - echo " โš ๏ธ No changes to commit for issue #$issue_num" - git checkout main - return 1 - fi - - # Commit changes - commit_msg="Add $issue_title - -Implements #$issue_num - -- Complete implementation with tests -- Comprehensive documentation -- Integration with existing Cortex architecture -- Ready for review and merge - -Closes #$issue_num" - - git commit -m "$commit_msg" - echo " โœ… Committed changes" - - # Push branch - echo " ๐Ÿ“ค Pushing to GitHub..." - git push -u origin "$branch_name" - - # Create PR - pr_body="## Summary - -This PR implements **$issue_title** as specified in #$issue_num. - -## What's Included - -โœ… Complete implementation (\`cortex/\` module) -โœ… Comprehensive unit tests (\`tests/\`) -โœ… Full documentation (\`docs/\`) -โœ… Integration with existing architecture - -## Testing - -\`\`\`bash -pytest tests/test_*.py -v -\`\`\` - -All tests pass with >80% coverage. - -## Ready for Review - -This implementation is: -- โœ… Production-ready -- โœ… Well-tested -- โœ… Fully documented -- โœ… Integrated with Cortex architecture - -## Closes - -Closes #$issue_num - ---- - -**Bounty:** As specified in issue -**Reviewer:** @mikejmorgan-ai" - - echo " ๐Ÿ“ Creating pull request..." - pr_url=$(gh pr create \ - --title "$issue_title" \ - --body "$pr_body" \ - --base main \ - --head "$branch_name" \ - --label "enhancement,ready-for-review" 2>&1) - - if [ $? -eq 0 ]; then - echo " โœ… PR created: $pr_url" - PR_CREATED=true - else - echo " โŒ Failed to create PR: $pr_url" - PR_CREATED=false - fi - - # Return to main - git checkout main - - return 0 -} - -# Process each ready issue -SUCCESSFUL_PRS=0 -FAILED_PRS=0 - -for issue in "${READY_ISSUES[@]}"; do - if create_pr_for_issue $issue; then - ((SUCCESSFUL_PRS++)) - else - ((FAILED_PRS++)) - fi - sleep 2 # Rate limiting -done - -echo "" -echo "==============================================" -echo "โœ… COMPLETION SUMMARY" -echo "==============================================" -echo "PRs created successfully: $SUCCESSFUL_PRS" -echo "Failed/skipped: $FAILED_PRS" -echo "" -echo "Next steps:" -echo "1. Review PRs at: https://github.com/cortexlinux/cortex/pulls" -echo "2. Merge approved PRs" -echo "3. Process bounty payments" -echo "" -echo "โœ… Script complete!" diff --git a/scripts/automation/cortex-master-quarterback.sh b/scripts/automation/cortex-master-quarterback.sh deleted file mode 100755 index 982fc0d..0000000 --- a/scripts/automation/cortex-master-quarterback.sh +++ /dev/null @@ -1,712 +0,0 @@ -#!/bin/bash -# CORTEX LINUX - MASTER QUARTERBACK SCRIPT -# Manages team onboarding, issue assignment, PR reviews, and project coordination -# Created: November 17, 2025 -# Usage: bash cortex-master-quarterback.sh - -set -e - -echo "๐Ÿง  CORTEX LINUX - MASTER QUARTERBACK SCRIPT" -echo "===========================================" -echo "" -echo "This script will:" -echo " 1. Welcome new developers individually" -echo " 2. Assign issues based on expertise" -echo " 3. Review and advance ready PRs" -echo " 4. Coordinate team activities" -echo "" - -# Configuration -REPO="cortexlinux/cortex" -GITHUB_TOKEN=$(grep GITHUB_TOKEN ~/.zshrc | cut -d'=' -f2 | tr -d '"' | tr -d "'") - -if [ -z "$GITHUB_TOKEN" ]; then - echo "โŒ ERROR: GITHUB_TOKEN not found in ~/.zshrc" - echo "Please add: export GITHUB_TOKEN='your_token_here'" - exit 1 -fi - -# Check if gh CLI is installed -if ! command -v gh &> /dev/null; then - echo "โŒ ERROR: GitHub CLI (gh) not installed" - echo "Install with: brew install gh" - exit 1 -fi - -# Authenticate gh CLI -export GH_TOKEN="$GITHUB_TOKEN" - -echo "โœ… Configuration loaded" -echo "๐Ÿ“Š Repository: $REPO" -echo "" - -# ============================================================================ -# SECTION 1: WELCOME NEW DEVELOPERS -# ============================================================================ - -echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" -echo "๐Ÿ‘‹ SECTION 1: WELCOMING NEW DEVELOPERS" -echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" -echo "" - -# Function to welcome a developer -welcome_developer() { - local username=$1 - local name=$2 - local location=$3 - local skills=$4 - local strength=$5 - local recommended_issues=$6 - - echo "๐Ÿ“ Welcoming @$username ($name)..." - - # Create welcome comment - welcome_msg="๐Ÿ‘‹ **Welcome to Cortex Linux, @$username!** - -We're thrilled to have you join our mission to build the AI-native operating system! - -## ๐ŸŽฏ Your Profile Highlights -**Location:** $location -**Primary Skills:** $skills -**Key Strength:** $strength - -## ๐Ÿ’ก Recommended Issues for You -$recommended_issues - -## ๐Ÿš€ Getting Started - -1. **Join our Discord**: https://discord.gg/uCqHvxjU83 (#dev-questions channel) -2. **Review Contributing Guide**: Check repo README and CONTRIBUTING.md -3. **Comment on issues** you're interested in - we'll provide starter code to accelerate development - -## ๐Ÿ’ฐ Compensation Structure - -- **Cash bounties** on merge: \$25-200 depending on complexity -- **2x bonus** when we close our \$2-3M seed round (February 2025) -- **Founding team opportunities** for top contributors (equity post-funding) - -## ๐Ÿค Our Development Model - -We use a **hybrid approach** that's proven successful: -- Mike + Claude generate complete implementations -- Contributors test, integrate, and validate -- 63% cost savings, 80% time savings -- Everyone wins with professional baseline code - -## ๐Ÿ“‹ Next Steps - -1. Browse issues and comment on ones that interest you -2. We'll provide starter code to save you time -3. Test, integrate, and submit PR -4. Get paid on merge! ๐ŸŽ‰ - -**Questions?** Tag @mikejmorgan-ai in any issue or drop into Discord. - -Let's build something revolutionary together! ๐Ÿง โšก - ---- -*Automated welcome from Cortex Team Management System*" - - echo "$welcome_msg" - echo "" - echo "Would you like to post this welcome to @$username's recent activity? (y/n)" - read -n 1 -r - echo "" - - if [[ $REPLY =~ ^[Yy]$ ]]; then - # Find their most recent issue comment or PR - recent_activity=$(gh api "/repos/$REPO/issues?state=all&creator=$username&per_page=1" 2>/dev/null | jq -r '.[0].number' 2>/dev/null) - - if [ ! -z "$recent_activity" ] && [ "$recent_activity" != "null" ]; then - echo " Posting welcome to Issue/PR #$recent_activity..." - echo "$welcome_msg" | gh issue comment $recent_activity --body-file - --repo $REPO 2>/dev/null || echo " โš ๏ธ Could not post (may need manual posting)" - echo " โœ… Welcome posted!" - else - echo " โ„น๏ธ No recent activity found - save welcome message for their first interaction" - fi - else - echo " โญ๏ธ Skipped posting (you can post manually later)" - fi - - echo "" -} - -# Welcome each new developer -echo "Welcoming 5 new developers..." -echo "" - -welcome_developer \ - "AbuBakar877" \ - "Abu Bakar" \ - "Turkey ๐Ÿ‡น๐Ÿ‡ท" \ - "Node.js, React, Angular, Full-stack web development" \ - "Modern JavaScript frameworks and web UI" \ - "- **Issue #27** (Progress Notifications UI) - \$100-150 - Perfect for your frontend skills -- **Issue #26** (User Preferences UI) - \$100-150 - Web interface components -- **Issue #33** (Config Export/Import) - \$75-100 - Data handling + UI" - -welcome_developer \ - "aliraza556" \ - "Ali Raza" \ - "Global Developer ๐ŸŒ" \ - "Full-stack (1000+ contributions), Multi-language expert" \ - "Elite-tier developer with proven track record" \ - "- **Issue #14** (Rollback System) - \$150-200 - โœ… **ALREADY ASSIGNED** - You've got this! -- **Issue #12** (Dependency Resolution) - \$150-200 - Complex logic, perfect match -- **Issue #30** (Self-Update System) - \$150-200 - Advanced architecture -- **Issue #31** (Plugin System) - \$200-300 - Architectural design challenge" - -welcome_developer \ - "anees4500" \ - "Anees" \ - "Location TBD" \ - "Java, C, Python, JavaScript, CDC/Batch processing" \ - "Multi-language capability with data processing experience" \ - "- **Issue #32** (Batch Operations) - \$100-150 - Your CDC experience is perfect here -- **Issue #28** (Requirements Check) - \$75-100 - Systems validation -- **Issue #10** (Installation Verification) - \$100-150 - Backend validation work" - -welcome_developer \ - "brymut" \ - "Bryan Mutai" \ - "Nairobi, Kenya ๐Ÿ‡ฐ๐Ÿ‡ช" \ - "TypeScript, Python, PHP, JavaScript - Full-stack with backend focus" \ - "Architectural thinking with perfect skill stack (TypeScript + Python)" \ - "- **Issue #31** (Plugin System) - \$200-300 - **HIGHLY RECOMMENDED** - Architectural perfect match -- **Issue #26** (User Preferences) - \$100-150 - API design + backend -- **Issue #20** (Context Memory) - \$150-200 - TypeScript+Python combo ideal -- **Issue #25** (Network/Proxy Config) - \$150-200 - Backend + systems" - -welcome_developer \ - "shalinibhavi525-sudo" \ - "Shalini Bhavi" \ - "Ireland ๐Ÿ‡ฎ๐Ÿ‡ช" \ - "Python, JavaScript, HTML - Documentation focus" \ - "Documentation specialist with web UI skills" \ - "- **Issue #15** (Documentation) - \$100-150 - โœ… **ALREADY ASSIGNED** - Perfect match! -- **Issue #27** (Progress Notifications) - \$100-150 - User-facing UI work -- Testing bounties - \$50-75 - Validate implementations from other devs" - -echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" -echo "โœ… Section 1 Complete: Developer welcomes prepared" -echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" -echo "" - -# ============================================================================ -# SECTION 2: ISSUE ASSIGNMENTS -# ============================================================================ - -echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" -echo "๐ŸŽฏ SECTION 2: STRATEGIC ISSUE ASSIGNMENTS" -echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" -echo "" - -echo "Analyzing current issue status..." - -# Function to assign issue -assign_issue() { - local issue_num=$1 - local developer=$2 - local reason=$3 - - echo "" - echo "๐Ÿ“Œ Assigning Issue #$issue_num to @$developer" - echo " Reason: $reason" - - # Check if issue exists and is unassigned - issue_info=$(gh issue view $issue_num --repo $REPO --json number,title,assignees,state 2>/dev/null || echo "") - - if [ -z "$issue_info" ]; then - echo " โš ๏ธ Issue #$issue_num not found or not accessible" - return - fi - - # Check if already assigned - assignee_count=$(echo "$issue_info" | jq '.assignees | length') - - if [ "$assignee_count" -gt 0 ]; then - current_assignee=$(echo "$issue_info" | jq -r '.assignees[0].login') - echo " โ„น๏ธ Already assigned to @$current_assignee - skipping" - return - fi - - echo " Proceed with assignment? (y/n)" - read -n 1 -r - echo "" - - if [[ $REPLY =~ ^[Yy]$ ]]; then - gh issue edit $issue_num --add-assignee $developer --repo $REPO 2>/dev/null && \ - echo " โœ… Assigned!" || \ - echo " โš ๏ธ Could not assign (may need manual assignment)" - - # Add comment explaining assignment - assignment_comment="๐ŸŽฏ **Assigned to @$developer** - -**Why you're perfect for this:** $reason - -**Next Steps:** -1. Review the issue description and acceptance criteria -2. Comment if you'd like starter code from our hybrid development model -3. We can provide complete implementation for testing/integration (\$50-75) -4. Or build from scratch for full bounty - -**Questions?** Just ask! We're here to help you succeed. - ---- -*Automated assignment from Cortex Team Management*" - - echo "$assignment_comment" | gh issue comment $issue_num --body-file - --repo $REPO 2>/dev/null || true - else - echo " โญ๏ธ Skipped" - fi -} - -echo "" -echo "๐Ÿ”ด CRITICAL PATH ASSIGNMENTS (MVP Blockers)" -echo "โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€" - -# Issue #7 - Already assigned to chandrapratnamar, but check if help needed -echo "" -echo "Issue #7 (Package Manager Wrapper) - THE critical blocker" -echo " Current: Assigned to @chandrapratnamar (PR #17 in progress)" -echo " Status: Check if they need assistance" -echo " Action: Monitor weekly, offer @aliraza556 or @brymut for code review" -echo "" - -# Issue #10 - Installation Verification -assign_issue 10 "aliraza556" "Elite developer, perfect for systems validation work. Code is ready from Mike." - -# Issue #12 - Dependency Resolution -assign_issue 12 "brymut" "TypeScript+Python skills ideal for complex dependency logic. Mike has complete implementation." - -# Issue #14 - Already assigned to aliraza556 -echo "" -echo "Issue #14 (Rollback System) - โœ… Already assigned to @aliraza556" -echo " Action: Check PR status, offer review assistance" -echo "" - -echo "" -echo "๐ŸŸก HIGH PRIORITY ASSIGNMENTS" -echo "โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€" - -# Issue #20/24 - Context Memory -assign_issue 20 "brymut" "Architectural experience + TypeScript/Python combo. Mike has implementation ready." - -# Issue #29 - Logging System -assign_issue 29 "anees4500" "Backend infrastructure work, good first complex task to assess quality." - -echo "" -echo "๐ŸŸข MEDIUM PRIORITY ASSIGNMENTS" -echo "โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€" - -# Issue #25 - Network Config -assign_issue 25 "brymut" "Backend + systems knowledge required for proxy/network configuration." - -# Issue #26 - User Preferences -assign_issue 26 "AbuBakar877" "API + UI components match your full-stack web background." - -# Issue #27 - Progress Notifications -assign_issue 27 "AbuBakar877" "Frontend UI focus, perfect for your React/Angular experience." - -# Issue #28 - Requirements Check -assign_issue 28 "anees4500" "Systems validation, good complement to your batch processing skills." - -echo "" -echo "๐Ÿ”ต ADVANCED FEATURE ASSIGNMENTS" -echo "โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€" - -# Issue #30 - Self-Update -assign_issue 30 "aliraza556" "Complex systems integration needs elite-tier developer." - -# Issue #31 - Plugin System -assign_issue 31 "brymut" "**HIGHEST RECOMMENDATION** - Architectural design matches your background perfectly." - -# Issue #32 - Batch Operations -assign_issue 32 "anees4500" "Your CDC/batch processing experience is ideal match." - -# Issue #33 - Config Export/Import -assign_issue 33 "shalinibhavi525-sudo" "Data handling + web UI, complements your documentation work." - -# Issue #15 - Already assigned -echo "" -echo "Issue #15 (Documentation) - โœ… Already assigned to @shalinibhavi525-sudo" -echo " Action: Check progress, offer assistance if needed" -echo "" - -echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" -echo "โœ… Section 2 Complete: Strategic assignments made" -echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" -echo "" - -# ============================================================================ -# SECTION 3: PULL REQUEST REVIEW -# ============================================================================ - -echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" -echo "๐Ÿ” SECTION 3: PULL REQUEST REVIEW & ADVANCEMENT" -echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" -echo "" - -echo "Fetching open pull requests..." - -# Get all open PRs -prs=$(gh pr list --repo $REPO --state open --json number,title,author,createdAt,mergeable,reviewDecision --limit 50 2>/dev/null || echo "[]") - -pr_count=$(echo "$prs" | jq 'length') - -echo "Found $pr_count open pull requests" -echo "" - -if [ "$pr_count" -eq 0 ]; then - echo "โœ… No open PRs to review" -else - echo "$prs" | jq -r '.[] | "PR #\(.number): \(.title) by @\(.author.login) - \(.reviewDecision // "PENDING")"' - echo "" - - echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" - echo "PR REVIEW PRIORITIES" - echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" - echo "" - - # Critical PRs (Issue #7 related) - echo "๐Ÿ”ด CRITICAL - Package Manager (Issue #7)" - echo "PR #17 by @chandrapratnamar" - echo " Action: Review immediately, this is THE MVP blocker" - echo " Review criteria:" - echo " - Does it translate natural language to apt commands?" - echo " - Are tests comprehensive?" - echo " - Does it integrate with LLM layer?" - echo "" - - echo "๐ŸŸก HIGH PRIORITY - MVP Features" - echo "Check for PRs related to:" - echo " - Issue #10 (Installation Verification)" - echo " - Issue #12 (Dependency Resolution)" - echo " - Issue #14 (Rollback System)" - echo " - Issue #13 (Error Parser) - PR #23 by @AbdulKadir877" - echo "" - - echo "๐ŸŸข STANDARD PRIORITY - All other PRs" - echo "Review remaining PRs in order received" - echo "" - - echo "Would you like to review PRs interactively? (y/n)" - read -n 1 -r - echo "" - - if [[ $REPLY =~ ^[Yy]$ ]]; then - echo "" - echo "Opening PR review interface..." - echo "" - - # For each PR, offer review options - echo "$prs" | jq -r '.[] | .number' | while read pr_num; do - pr_info=$(gh pr view $pr_num --repo $REPO --json number,title,author,body 2>/dev/null) - pr_title=$(echo "$pr_info" | jq -r '.title') - pr_author=$(echo "$pr_info" | jq -r '.author.login') - - echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" - echo "Reviewing PR #$pr_num: $pr_title" - echo "Author: @$pr_author" - echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" - echo "" - echo "Actions:" - echo " [v] View PR in browser" - echo " [a] Approve PR" - echo " [c] Request changes" - echo " [m] Add comment" - echo " [s] Skip to next" - echo " [q] Quit review mode" - echo "" - echo -n "Choose action: " - read -n 1 action - echo "" - - case $action in - v|V) - gh pr view $pr_num --repo $REPO --web - ;; - a|A) - echo "โœ… Approving PR #$pr_num..." - gh pr review $pr_num --repo $REPO --approve --body "โœ… **APPROVED** - -Excellent work @$pr_author! This implementation: -- Meets acceptance criteria -- Includes comprehensive tests -- Integrates well with existing architecture -- Documentation is clear - -**Next Steps:** -1. Merging this PR -2. Bounty will be processed -3. Thank you for your contribution! - -๐ŸŽ‰ Welcome to the Cortex Linux contributor team!" - echo "Would you like to merge now? (y/n)" - read -n 1 merge_now - echo "" - if [[ $merge_now =~ ^[Yy]$ ]]; then - gh pr merge $pr_num --repo $REPO --squash --delete-branch - echo "โœ… Merged and branch deleted!" - fi - ;; - c|C) - echo "Enter feedback (press Ctrl+D when done):" - feedback=$(cat) - gh pr review $pr_num --repo $REPO --request-changes --body "๐Ÿ”„ **Changes Requested** - -Thanks for your work @$pr_author! Here's what needs attention: - -$feedback - -**Please update and let me know when ready for re-review.** - -We're here to help if you have questions!" - ;; - m|M) - echo "Enter comment (press Ctrl+D when done):" - comment=$(cat) - gh pr comment $pr_num --repo $REPO --body "$comment" - echo "โœ… Comment added" - ;; - q|Q) - echo "Exiting review mode..." - break - ;; - *) - echo "Skipping..." - ;; - esac - echo "" - done - else - echo "โญ๏ธ Skipped interactive review" - echo " You can review PRs manually at: https://github.com/$REPO/pulls" - fi -fi - -echo "" -echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" -echo "โœ… Section 3 Complete: PR review assistance provided" -echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" -echo "" - -# ============================================================================ -# SECTION 4: TEAM COORDINATION -# ============================================================================ - -echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" -echo "๐Ÿค SECTION 4: TEAM COORDINATION & NEXT ACTIONS" -echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" -echo "" - -echo "๐Ÿ“Š CURRENT PROJECT STATUS" -echo "โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€" -echo "" - -# Count issues by status -total_issues=$(gh issue list --repo $REPO --limit 1000 --json number 2>/dev/null | jq 'length') -open_issues=$(gh issue list --repo $REPO --state open --limit 1000 --json number 2>/dev/null | jq 'length') -closed_issues=$(gh issue list --repo $REPO --state closed --limit 1000 --json number 2>/dev/null | jq 'length') - -echo "Issues:" -echo " Total: $total_issues" -echo " Open: $open_issues" -echo " Closed: $closed_issues" -echo "" - -# Count PRs -open_prs=$(gh pr list --repo $REPO --state open --json number 2>/dev/null | jq 'length') -merged_prs=$(gh pr list --repo $REPO --state merged --limit 100 --json number 2>/dev/null | jq 'length') - -echo "Pull Requests:" -echo " Open: $open_prs" -echo " Merged (recent): $merged_prs" -echo "" - -echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" -echo "" - -echo "๐ŸŽฏ IMMEDIATE ACTION ITEMS (Priority Order)" -echo "โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€" -echo "" - -echo "1. ๐Ÿ”ด CRITICAL - Check Issue #7 Progress" -echo " - PR #17 by @chandrapratnamar" -echo " - This is THE MVP blocker" -echo " - Review weekly, offer assistance" -echo " - Command: gh pr view 17 --repo $REPO --web" -echo "" - -echo "2. ๐ŸŸก HIGH - Review Ready PRs" -echo " - PR #23 (Error Parser) by @AbdulKadir877" -echo " - Any PRs marked 'ready-for-review'" -echo " - Command: gh pr list --repo $REPO --label ready-for-review" -echo "" - -echo "3. ๐ŸŸข MEDIUM - Upload Complete Implementations" -echo " - Issue #10 (Installation Verification) - Code ready" -echo " - Issue #12 (Dependency Resolution) - Code ready" -echo " - Issue #14 (Rollback System) - Code ready with @aliraza556" -echo " - Use: ~/cortex/cortex-master-pr-creator.sh" -echo "" - -echo "4. ๐Ÿ”ต ENGAGE NEW DEVELOPERS" -echo " - Post welcome messages (generated above)" -echo " - Monitor their first comments/PRs" -echo " - Offer starter code to accelerate" -echo "" - -echo "5. ๐Ÿ’ฐ PROCESS BOUNTIES" -echo " - Track merged PRs" -echo " - Calculate owed bounties" -echo " - Process payments (crypto for international)" -echo "" - -echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" -echo "" - -echo "๐Ÿ“‹ RECOMMENDED WEEKLY ROUTINE" -echo "โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€" -echo "" -echo "Monday:" -echo " - Run this quarterback script" -echo " - Review critical path (Issue #7)" -echo " - Merge ready PRs" -echo "" -echo "Wednesday:" -echo " - Check new issues/comments" -echo " - Respond to developer questions" -echo " - Upload any ready implementations" -echo "" -echo "Friday:" -echo " - Process bounty payments" -echo " - Update team on Discord" -echo " - Plan next week priorities" -echo "" - -echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" -echo "" - -echo "๐Ÿ”— QUICK LINKS" -echo "โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€" -echo "" -echo "Repository: https://github.com/$REPO" -echo "Open Issues: https://github.com/$REPO/issues" -echo "Open PRs: https://github.com/$REPO/pulls" -echo "Discord: https://discord.gg/uCqHvxjU83" -echo "Project Board: https://github.com/orgs/cortexlinux/projects" -echo "" - -echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" -echo "" - -echo "๐Ÿ“ฑ POST TO DISCORD" -echo "โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€" -echo "" - -discord_announcement="๐ŸŽ‰ **Team Update - November 17, 2025** - -**Welcome 5 New Developers!** -- @AbuBakar877 (Turkey) - Full-stack web specialist -- @aliraza556 (Global) - Elite tier, 1000+ contributions -- @anees4500 - Multi-language backend expert -- @brymut (Kenya) - TypeScript + Python architect -- @shalinibhavi525-sudo (Ireland) - Documentation specialist - -**Strategic Assignments Made:** -- Issue #31 (Plugin System) โ†’ @brymut (architectural perfect match) -- Issue #10 (Installation Verification) โ†’ @aliraza556 -- Issue #32 (Batch Operations) โ†’ @anees4500 -- Issue #27 (Progress UI) โ†’ @AbuBakar877 -- Issue #15 (Documentation) โ†’ @shalinibhavi525-sudo โœ… - -**Critical Path:** -- Issue #7 (Package Manager) - THE blocker - @chandrapratnamar working PR #17 -- Monitoring weekly, need completion for MVP - -**Ready to Review:** -- Multiple PRs waiting for review -- Bounties ready to process on merge - -**The Hybrid Model Works:** -- 63% cost savings -- 80% time savings -- Professional baseline + contributor validation -- Win-win for everyone - -๐Ÿ’ฐ **Bounties:** \$25-200 on merge + 2x bonus at funding -๐ŸŽฏ **Goal:** MVP complete for February 2025 seed round -๐Ÿ’ผ **Opportunities:** Founding team roles for top contributors - -Browse issues: https://github.com/$REPO/issues -Questions? #dev-questions channel - -Let's build the future of Linux! ๐Ÿง โšก" - -echo "$discord_announcement" -echo "" -echo "Copy the above message and post to Discord #announcements" -echo "" - -echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" -echo "โœ… Section 4 Complete: Team coordination completed" -echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" -echo "" - -# ============================================================================ -# FINAL SUMMARY -# ============================================================================ - -echo "" -echo "โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•" -echo "๐Ÿ† CORTEX QUARTERBACK SCRIPT - EXECUTION COMPLETE" -echo "โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•" -echo "" - -echo "๐Ÿ“Š EXECUTION SUMMARY" -echo "โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€" -echo "" -echo "โœ… 5 developers welcomed with personalized messages" -echo "โœ… 10+ strategic issue assignments made" -echo "โœ… PR review guidance provided" -echo "โœ… Team coordination plan established" -echo "โœ… Discord announcement prepared" -echo "" - -echo "๐ŸŽฏ YOUR NEXT STEPS (Priority Order)" -echo "โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€" -echo "" -echo "1. Post Discord announcement (message above)" -echo "2. Review PR #17 (Issue #7 - THE BLOCKER)" -echo "3. Check for new developer comments" -echo "4. Upload ready implementations (Issues #10, #12, #14)" -echo "5. Process any merged PR bounties" -echo "" - -echo "๐Ÿ’ก STRATEGIC RECOMMENDATIONS" -echo "โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€" -echo "" -echo "โœ… aliraza556 - Elite tier, consider for senior role/CTO discussion" -echo "โœ… brymut - Perfect skills for Plugin System (#31), high potential" -echo "โš ๏ธ anees4500 - New, monitor first contribution quality" -echo "โœ… AbuBakar877 - Keep on web UI work, avoid core systems" -echo "โœ… shalinibhavi525-sudo - Perfect for docs, complement with testing" -echo "" - -echo "๐Ÿ”ฅ CRITICAL PATH REMINDER" -echo "โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€" -echo "" -echo "Issue #7 (Package Manager Wrapper) is THE BLOCKER for MVP." -echo "Everything else can proceed in parallel, but #7 must complete." -echo "Check PR #17 weekly, offer assistance to @chandrapratnamar." -echo "" - -echo "โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•" -echo "โœ… Ready for next session!" -echo "โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•" -echo "" - -echo "Run this script weekly to quarterback your growing team." -echo "The Cortex Linux revolution is accelerating! ๐Ÿง โšก" -echo "" diff --git a/scripts/automation/cortex-master-update.sh b/scripts/automation/cortex-master-update.sh deleted file mode 100755 index f5afb06..0000000 --- a/scripts/automation/cortex-master-update.sh +++ /dev/null @@ -1,301 +0,0 @@ -#!/bin/bash -# CORTEX LINUX - MASTER REPOSITORY UPDATE SCRIPT -# Analyzes PRs, merges ready ones, assigns issues, tracks bounties - -set -e - -REPO="cortexlinux/cortex" -GITHUB_TOKEN=$(grep GITHUB_TOKEN ~/.zshrc | cut -d'=' -f2 | tr -d '"' | tr -d "'") -export GH_TOKEN="$GITHUB_TOKEN" - -echo "๐Ÿง  CORTEX LINUX - MASTER UPDATE" -echo "================================" -echo "" - -# ============================================================================ -# STEP 1: MERGE READY PRS -# ============================================================================ - -echo "๐Ÿ“Š STEP 1: REVIEWING & MERGING READY PRS" -echo "โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€" -echo "" - -# PR #195: Package Manager (dhvll) - REPLACES PR #17 -echo "๐Ÿ”ด PR #195: Package Manager Wrapper (@dhvll)" -echo " Status: MERGEABLE โœ…" -echo " Action: MERGE NOW - This is THE MVP blocker" -echo "" - -gh pr review 195 --repo $REPO --approve --body "โœ… APPROVED - Excellent package manager implementation! This replaces PR #17 and unblocks the entire MVP. Outstanding work @dhvll!" - -gh pr merge 195 --repo $REPO --squash --delete-branch --admin && { - echo "โœ… PR #195 MERGED - MVP BLOCKER CLEARED!" - echo "" - - # Close Issue #7 - gh issue close 7 --repo $REPO --comment "โœ… Completed in PR #195 by @dhvll. Package manager wrapper is live and working!" - - # Close old PR #17 - gh pr close 17 --repo $REPO --comment "Closing in favor of PR #195 which has a cleaner implementation. Thank you @chandrapratamar for the original work - you'll still receive the $100 bounty for your contribution." - - echo "โœ… Issue #7 closed" - echo "โœ… PR #17 closed (superseded)" - echo "" -} || { - echo "โš ๏ธ PR #195 merge failed - check manually" - echo "" -} - -# PR #198: Rollback System (aliraza556) -echo "๐ŸŸข PR #198: Installation History & Rollback (@aliraza556)" -echo " Status: MERGEABLE โœ…" -echo " Bounty: $150" -echo "" - -gh pr review 198 --repo $REPO --approve --body "โœ… APPROVED - Comprehensive rollback system! $150 bounty within 48 hours. Outstanding work @aliraza556!" - -gh pr merge 198 --repo $REPO --squash --delete-branch --admin && { - echo "โœ… PR #198 MERGED" - gh issue close 14 --repo $REPO --comment "โœ… Completed in PR #198 by @aliraza556. Rollback system is live!" - echo " ๐Ÿ’ฐ Bounty owed: $150 to @aliraza556" - echo "" -} || { - echo "โš ๏ธ PR #198 merge failed" - echo "" -} - -# PR #197: Cleanup (mikejmorgan-ai) -echo "๐ŸŸข PR #197: Remove Duplicate Workflow" -echo " Status: MERGEABLE โœ…" -echo "" - -gh pr merge 197 --repo $REPO --squash --delete-branch --admin && { - echo "โœ… PR #197 MERGED" - echo "" -} || { - echo "โš ๏ธ PR #197 merge failed" - echo "" -} - -# PR #21: Config Templates (aliraza556) -echo "๐ŸŸก PR #21: Configuration Templates (@aliraza556)" -echo " Status: MERGEABLE โœ…" -echo " Bounty: $150" -echo "" - -gh pr review 21 --repo $REPO --approve --body "โœ… APPROVED - Production-ready config templates! $150 bounty within 48 hours." - -gh pr merge 21 --repo $REPO --squash --delete-branch --admin && { - echo "โœ… PR #21 MERGED" - gh issue close 9 --repo $REPO --comment "โœ… Completed in PR #21. Config templates are live!" - echo " ๐Ÿ’ฐ Bounty owed: $150 to @aliraza556" - echo "" -} || { - echo "โš ๏ธ PR #21 merge failed" - echo "" -} - -# PR #38: Requirements Check (AlexanderLuzDH) - HAS CONFLICTS -echo "โญ๏ธ PR #38: Requirements Checker (@AlexanderLuzDH)" -echo " Status: CONFLICTING โŒ" -echo " Action: Skip - needs contributor to fix conflicts" -echo " Bounty: $100 pending" -echo "" - -# PR #18: CLI Interface (Sahilbhatane) - DRAFT -echo "โญ๏ธ PR #18: CLI Interface (@Sahilbhatane)" -echo " Status: DRAFT - not ready yet" -echo " Action: Skip" -echo "" - -# ============================================================================ -# STEP 2: ASSIGN UNASSIGNED MVP ISSUES -# ============================================================================ - -echo "" -echo "๐Ÿ“‹ STEP 2: ASSIGNING UNASSIGNED MVP ISSUES" -echo "โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€" -echo "" - -# High-value issues that need assignment -MVP_ISSUES=(144 135 131 128 126 125 119 117 112 103 44 25) - -echo "Unassigned MVP issues ready for contributors:" -echo "" - -for issue in "${MVP_ISSUES[@]}"; do - issue_info=$(gh issue view $issue --repo $REPO --json title,assignees,labels 2>/dev/null) - issue_title=$(echo "$issue_info" | jq -r '.title') - assignee_count=$(echo "$issue_info" | jq '.assignees | length') - - if [ "$assignee_count" -eq 0 ]; then - echo " #$issue: $issue_title" - fi -done - -echo "" -echo "These issues are ready for contributors to claim." -echo "Post to Discord: 'MVP issues available - claim in comments!'" -echo "" - -# ============================================================================ -# STEP 3: BOUNTY TRACKING -# ============================================================================ - -echo "" -echo "๐Ÿ’ฐ STEP 3: BOUNTY TRACKING UPDATE" -echo "โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€" -echo "" - -BOUNTY_FILE="$HOME/cortex/bounties_owed.csv" - -if [ ! -f "$BOUNTY_FILE" ]; then - echo "PR,Developer,Feature,Bounty_Amount,Date_Merged,Status" > "$BOUNTY_FILE" -fi - -# Add new bounties from today's merges -echo "195,dhvll,Package Manager Wrapper,100,$(date +%Y-%m-%d),PENDING" >> "$BOUNTY_FILE" -echo "198,aliraza556,Installation Rollback,150,$(date +%Y-%m-%d),PENDING" >> "$BOUNTY_FILE" -echo "21,aliraza556,Config Templates,150,$(date +%Y-%m-%d),PENDING" >> "$BOUNTY_FILE" -echo "17,chandrapratamar,Package Manager (original),100,$(date +%Y-%m-%d),PENDING" >> "$BOUNTY_FILE" - -echo "Updated: $BOUNTY_FILE" -echo "" - -echo "BOUNTIES OWED:" -echo "โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€" -tail -n +2 "$BOUNTY_FILE" | while IFS=',' read -r pr dev feature amount date status; do - if [ "$status" = "PENDING" ]; then - echo " PR #$pr - @$dev: \$$amount ($feature)" - fi -done - -echo "" - -# Calculate totals -total_owed=$(tail -n +2 "$BOUNTY_FILE" | awk -F',' '$6=="PENDING" {sum+=$4} END {print sum}') -echo " Total pending: \$$total_owed" -echo " At 2x bonus (funding): \$$(($total_owed * 2))" -echo "" - -# ============================================================================ -# STEP 4: GENERATE STATUS REPORT -# ============================================================================ - -echo "" -echo "๐Ÿ“Š STEP 4: FINAL STATUS REPORT" -echo "โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€" -echo "" - -echo "=== CORTEX REPOSITORY STATUS ===" -echo "" - -# Count current state -open_prs=$(gh pr list --repo $REPO --state open --json number | jq 'length') -open_issues=$(gh issue list --repo $REPO --state open --json number | jq 'length') - -echo "PRs:" -echo " Open: $open_prs" -echo " Merged today: 4 (PRs #195, #198, #197, #21)" -echo "" - -echo "Issues:" -echo " Open: $open_issues" -echo " Closed today: 2 (Issues #7, #14)" -echo "" - -echo "MVP Status:" -echo " โœ… Package Manager: COMPLETE (PR #195)" -echo " โœ… Rollback System: COMPLETE (PR #198)" -echo " โœ… Config Templates: COMPLETE (PR #21)" -echo " โœ… Hardware Detection: COMPLETE" -echo " โœ… Dependencies: COMPLETE" -echo " โœ… Verification: COMPLETE" -echo " โœ… Error Parsing: COMPLETE" -echo " โœ… Context Memory: COMPLETE" -echo " โœ… Logging: COMPLETE" -echo " โœ… Progress UI: COMPLETE" -echo " โณ Requirements Check: Conflicts (PR #38)" -echo "" -echo " MVP COMPLETE: 95%" -echo "" - -echo "Bounties:" -echo " Owed: \$$total_owed" -echo " Contributors to pay: @dhvll, @aliraza556 (x2), @chandrapratamar" -echo "" - -# ============================================================================ -# STEP 5: DISCORD ANNOUNCEMENT -# ============================================================================ - -echo "" -echo "๐Ÿ“ฑ STEP 5: DISCORD ANNOUNCEMENT (COPY & POST)" -echo "โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€" -echo "" - -cat << 'DISCORD' -๐ŸŽ‰ **MAJOR MVP MILESTONE - November 17, 2025** - -**BREAKTHROUGH: Package Manager MERGED! ๐Ÿš€** - -PR #195 by @dhvll just merged - THE critical MVP blocker is cleared! - -**Today's Merges:** -โœ… PR #195 - Package Manager Wrapper (@dhvll) -โœ… PR #198 - Installation Rollback (@aliraza556) -โœ… PR #21 - Config File Templates (@aliraza556) -โœ… PR #197 - Workflow Cleanup - -**Issues Closed:** -โœ… #7 - Package Manager (9 days โ†’ DONE!) -โœ… #14 - Rollback System - -**MVP Status: 95% COMPLETE** ๐ŸŽฏ - -**What This Means:** -- Core "cortex install" functionality working -- Natural language โ†’ apt commands = LIVE -- Rollback safety net = LIVE -- Production-ready config templates = LIVE - -**Bounties Being Processed:** -- @dhvll: $100 -- @aliraza556: $300 ($150 x 2 PRs!) -- @chandrapratamar: $100 -Total: $500 (+ 2x at funding = $1000) - -**Available Issues:** -10+ MVP features ready to claim - check GitHub issues! - -**Next: Demo preparation for February 2025 funding round** - -We're making history! ๐Ÿง โšก - -https://github.com/cortexlinux/cortex -DISCORD - -echo "" -echo "โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€" -echo "" - -# ============================================================================ -# STEP 6: NEXT STEPS -# ============================================================================ - -echo "๐ŸŽฏ NEXT STEPS" -echo "โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€" -echo "" -echo "1. Post Discord announcement above to #announcements" -echo "2. Coordinate payments with:" -echo " - @dhvll ($100)" -echo " - @aliraza556 ($300)" -echo " - @chandrapratamar ($100)" -echo "3. Wait for PR #38 conflict resolution" -echo "4. Create demo script: 'cortex install oracle-23-ai'" -echo "5. Prepare investor presentation materials" -echo "" - -echo "โœ… MASTER UPDATE COMPLETE" -echo "" -echo "Repository is MVP-ready for February 2025 funding!" diff --git a/scripts/automation/cortex-master.sh b/scripts/automation/cortex-master.sh deleted file mode 100755 index 94e485b..0000000 --- a/scripts/automation/cortex-master.sh +++ /dev/null @@ -1,194 +0,0 @@ -#!/bin/bash -# Cortex Linux - Master MVP Automation System -# One script to rule them all - -set -e - -# Colors -GREEN='\033[0;32m' -YELLOW='\033[1;33m' -BLUE='\033[0;34m' -NC='\033[0m' - -REPO_DIR="$HOME/cortex" -WORK_DIR="$HOME/Downloads/cortex-work" -mkdir -p "$WORK_DIR" - -print_banner() { - echo -e "${BLUE}" - echo "โ•”โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•—" - echo "โ•‘ CORTEX LINUX - MVP MASTER AUTOMATION โ•‘" - echo "โ•šโ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•" - echo -e "${NC}" -} - -show_menu() { - echo "" - echo -e "${GREEN}โ•โ•โ• MAIN MENU โ•โ•โ•${NC}" - echo "" - echo "1. Show MVP dashboard" - echo "2. List MVP-critical issues" - echo "3. Create PR for issue #10" - echo "4. Review pending PRs" - echo "5. Merge PR" - echo "6. List contributors" - echo "7. Assign issue to contributor" - echo "8. Process bounty payment" - echo "9. Generate weekly report" - echo "10. Full repository audit" - echo "" - echo "0. Exit" - echo "" - echo -n "Select: " -} - -show_dashboard() { - cd "$REPO_DIR" - echo -e "${BLUE}โ•โ•โ• CORTEX MVP DASHBOARD โ•โ•โ•${NC}" - echo "" - echo "๐Ÿ“Š Issues:" - echo " Total: $(gh issue list --limit 1000 --json number | jq '. | length')" - echo " MVP Critical: $(gh issue list --label 'mvp-critical' --json number | jq '. | length')" - echo "" - echo "๐Ÿ”€ Pull Requests:" - echo " Open: $(gh pr list --json number | jq '. | length')" - echo "" - echo "๐Ÿ‘ฅ Recent activity:" - gh pr list --state all --limit 5 --json number,title,author | \ - jq -r '.[] | " PR #\(.number): \(.title) (@\(.author.login))"' -} - -list_mvp() { - cd "$REPO_DIR" - echo -e "${GREEN}๐Ÿ“‹ MVP-Critical Issues:${NC}" - gh issue list --label "mvp-critical" --limit 20 --json number,title,assignees | \ - jq -r '.[] | " #\(.number): \(.title)"' -} - -create_pr_issue10() { - cd "$REPO_DIR" - git checkout feature/issue-10 2>/dev/null || { - echo "Branch feature/issue-10 not found" - return 1 - } - - gh pr create \ - --title "Add Installation Verification System - Fixes #10" \ - --body "Complete implementation: 918 lines (code+tests+docs). Ready for review." \ - --label "enhancement,ready-for-review,priority: critical" - - git checkout main - echo "โœ… PR created!" -} - -review_prs() { - cd "$REPO_DIR" - echo -e "${GREEN}๐Ÿ“‹ Open Pull Requests:${NC}" - gh pr list --json number,title,author,createdAt | \ - jq -r '.[] | " PR #\(.number): \(.title)\n Author: @\(.author.login)\n Created: \(.createdAt)\n"' -} - -merge_pr() { - echo -n "PR number to merge: " - read pr_num - cd "$REPO_DIR" - gh pr merge $pr_num --squash --delete-branch - echo "โœ… Merged!" -} - -list_contributors() { - cd "$REPO_DIR" - echo -e "${GREEN}๐Ÿ‘ฅ Active Contributors:${NC}" - gh pr list --state all --limit 50 --json author | \ - jq -r '.[].author.login' | sort | uniq -c | sort -rn | head -10 -} - -assign_issue() { - echo -n "Issue #: " - read issue - echo -n "Assign to (username): " - read user - cd "$REPO_DIR" - gh issue edit $issue --add-assignee "$user" - gh issue comment $issue --body "๐Ÿ‘‹ @$user - This is assigned to you! Questions? Ask in Discord." - echo "โœ… Assigned!" -} - -process_bounty() { - echo -n "PR #: " - read pr - echo -n "Username: " - read user - echo -n "Amount $: " - read amount - - cd "$REPO_DIR" - gh pr comment $pr --body "๐Ÿ’ฐ **Bounty Approved: \$$amount** - -@$user - DM me your payment method. Payment Friday. Plus 2x bonus at funding! - -Thanks! ๐ŸŽ‰" - - echo "โœ… Bounty processed!" -} - -weekly_report() { - cd "$REPO_DIR" - echo "# Cortex Linux - Weekly Report" - echo "Week of $(date +%Y-%m-%d)" - echo "" - echo "## PRs This Week" - gh pr list --state merged --limit 10 --json number,title | \ - jq -r '.[] | "- PR #\(.number): \(.title)"' - echo "" - echo "## Metrics" - echo "- Open Issues: $(gh issue list --json number | jq '. | length')" - echo "- Open PRs: $(gh pr list --json number | jq '. | length')" -} - -audit_repo() { - cd "$REPO_DIR" - echo "Repository: cortexlinux/cortex" - echo "Branch: $(git branch --show-current)" - echo "Last commit: $(git log -1 --oneline)" - echo "" - echo "Issues: $(gh issue list --json number | jq '. | length') open" - echo "PRs: $(gh pr list --json number | jq '. | length') open" - echo "" - echo "Recent activity:" - gh run list --limit 3 -} - -main() { - print_banner - - cd "$REPO_DIR" 2>/dev/null || { - echo "โŒ Repo not found at $REPO_DIR" - exit 1 - } - - while true; do - show_menu - read choice - - case $choice in - 1) show_dashboard ;; - 2) list_mvp ;; - 3) create_pr_issue10 ;; - 4) review_prs ;; - 5) merge_pr ;; - 6) list_contributors ;; - 7) assign_issue ;; - 8) process_bounty ;; - 9) weekly_report ;; - 10) audit_repo ;; - 0) echo "Goodbye!"; exit 0 ;; - *) echo "Invalid option" ;; - esac - - echo "" - read -p "Press Enter..." - done -} - -main diff --git a/scripts/automation/cortex-pr-dashboard.sh b/scripts/automation/cortex-pr-dashboard.sh deleted file mode 100755 index df0b42d..0000000 --- a/scripts/automation/cortex-pr-dashboard.sh +++ /dev/null @@ -1,362 +0,0 @@ -#!/bin/bash -# CORTEX - MASTER PR DASHBOARD & MANAGEMENT -# Complete PR overview, batch operations, and bounty tracking - -set -e - -echo "๐ŸŽ›๏ธ CORTEX - MASTER PR DASHBOARD" -echo "================================" -echo "" - -REPO="cortexlinux/cortex" -GITHUB_TOKEN=$(grep GITHUB_TOKEN ~/.zshrc | cut -d'=' -f2 | tr -d '"' | tr -d "'") -export GH_TOKEN="$GITHUB_TOKEN" - -# Colors for terminal output -RED='\033[0;31m' -YELLOW='\033[1;33m' -GREEN='\033[0;32m' -BLUE='\033[0;34m' -NC='\033[0m' # No Color - -echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" -echo "๐Ÿ“Š PR STATUS OVERVIEW" -echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" -echo "" - -# Get all open PRs -prs=$(gh pr list --repo $REPO --state open --json number,title,author,createdAt,isDraft,reviewDecision --limit 50 2>/dev/null) - -total_prs=$(echo "$prs" | jq 'length') -contributor_prs=$(echo "$prs" | jq '[.[] | select(.author.login != "mikejmorgan-ai")] | length') -mike_prs=$(echo "$prs" | jq '[.[] | select(.author.login == "mikejmorgan-ai")] | length') - -echo "Total Open PRs: $total_prs" -echo " โ”œโ”€ From Contributors: $contributor_prs (๐Ÿ”ฅ Need review)" -echo " โ””โ”€ From Mike: $mike_prs (Can merge anytime)" -echo "" - -# Calculate bounties at stake -echo "๐Ÿ’ฐ ESTIMATED BOUNTIES AT STAKE" -echo "โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€" -echo "" - -declare -A BOUNTY_MAP -BOUNTY_MAP[17]=100 # Package Manager -BOUNTY_MAP[37]=125 # Progress Notifications -BOUNTY_MAP[38]=100 # Requirements Check -BOUNTY_MAP[21]=150 # Config Templates -BOUNTY_MAP[18]=100 # CLI Interface - -total_contributor_bounties=0 - -for pr in 17 37 38 21 18; do - bounty=${BOUNTY_MAP[$pr]} - total_contributor_bounties=$((total_contributor_bounties + bounty)) -done - -echo "Contributor PRs: \$$total_contributor_bounties" -echo "At 2x bonus (funding): \$$((total_contributor_bounties * 2))" -echo "" - -echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" -echo "๐Ÿ”ด CRITICAL PRIORITY" -echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" -echo "" - -pr17_info=$(gh pr view 17 --repo $REPO --json number,title,author,createdAt,state 2>/dev/null) -pr17_title=$(echo "$pr17_info" | jq -r '.title') -pr17_author=$(echo "$pr17_info" | jq -r '.author.login') -pr17_created=$(echo "$pr17_info" | jq -r '.createdAt' | cut -d'T' -f1) -pr17_days_old=$(( ( $(date +%s) - $(date -j -f "%Y-%m-%d" "$pr17_created" +%s 2>/dev/null || date +%s) ) / 86400 )) - -echo "PR #17: $pr17_title" -echo "Author: @$pr17_author" -echo "Age: $pr17_days_old days old" -echo "Bounty: \$100" -echo "Impact: โš ๏ธ MVP BLOCKER - Everything waits on this" -echo "" -echo -e "${RED}โ–ถ ACTION REQUIRED: Review this PR FIRST${NC}" -echo "" - -echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" -echo "๐ŸŸก HIGH PRIORITY (Contributors Waiting)" -echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" -echo "" - -for pr in 37 38 21; do - pr_info=$(gh pr view $pr --repo $REPO --json number,title,author,createdAt 2>/dev/null) - pr_title=$(echo "$pr_info" | jq -r '.title') - pr_author=$(echo "$pr_info" | jq -r '.author.login') - pr_bounty=${BOUNTY_MAP[$pr]} - - echo "PR #$pr: $pr_title" - echo " Author: @$pr_author | Bounty: \$$pr_bounty" -done - -echo "" - -echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" -echo "๐ŸŸข MIKE'S PRs (Ready to Merge)" -echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" -echo "" - -mike_pr_list=$(echo "$prs" | jq -r '.[] | select(.author.login == "mikejmorgan-ai") | .number') - -for pr in $mike_pr_list; do - pr_info=$(gh pr view $pr --repo $REPO --json number,title 2>/dev/null) - pr_title=$(echo "$pr_info" | jq -r '.title') - echo "PR #$pr: $pr_title" -done - -echo "" - -echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" -echo "๐ŸŽฏ QUICK ACTIONS" -echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" -echo "" - -echo "What would you like to do?" -echo "" -echo " [1] Review PR #17 (THE CRITICAL BLOCKER) ๐Ÿ”ด" -echo " [2] Review ALL contributor PRs (guided workflow) ๐ŸŸก" -echo " [3] Merge ALL of Mike's PRs (batch operation) ๐ŸŸข" -echo " [4] View detailed PR list in browser" -echo " [5] Generate bounty payment report" -echo " [6] Post Discord update" -echo " [q] Quit" -echo "" -echo -n "Choose action: " -read -n 1 choice -echo "" -echo "" - -case $choice in - 1) - echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" - echo "๐Ÿ”ด REVIEWING PR #17 - PACKAGE MANAGER WRAPPER" - echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" - echo "" - echo "This is THE MVP blocker. Everything depends on this." - echo "" - echo "Opening in browser for review..." - echo "" - - gh pr view 17 --repo $REPO --web - - echo "" - echo "After reviewing the code, what's your decision?" - echo "" - echo " [a] Approve & Merge (\$100 bounty to @chandrapratamar)" - echo " [c] Request Changes (specify what needs fixing)" - echo " [s] Skip for now (review later)" - echo "" - echo -n "Decision: " - read -n 1 decision - echo "" - echo "" - - case $decision in - a|A) - echo "โœ… Approving PR #17..." - - approval="โœ… **APPROVED - OUTSTANDING WORK!** - -@chandrapratamar - You just unblocked the entire MVP! ๐ŸŽ‰๐ŸŽ‰๐ŸŽ‰ - -**This is THE critical feature** that everything else depends on. Your implementation: -- โœ… Translates natural language to apt commands perfectly -- โœ… Integrates seamlessly with our LLM layer -- โœ… Includes comprehensive tests -- โœ… Documentation is clear and complete - -**Payment Details:** -- **Bounty: \$100 USD** -- **Processing: Within 48 hours** -- **Method: Crypto (Bitcoin/USDC) or PayPal** -- **Bonus: 2x at funding (Feb 2025) = \$200 total** - -**You're now a core Cortex contributor!** ๐Ÿง โšก - -We'll coordinate payment via your preferred method in the next comment. - -**Thank you for making history with us!** - ---- -*Automated approval from Cortex PR Management System*" - - echo "$approval" | gh pr review 17 --repo $REPO --approve --body-file - - - echo "" - echo "Merging PR #17..." - - gh pr merge 17 --repo $REPO --squash --delete-branch && { - echo "" - echo "๐ŸŽ‰๐ŸŽ‰๐ŸŽ‰ PR #17 MERGED! ๐ŸŽ‰๐ŸŽ‰๐ŸŽ‰" - echo "" - echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" - echo "๐Ÿš€ MVP BLOCKER CLEARED!" - echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" - echo "" - echo "This unblocks:" - echo " โœ… Issue #12 (Dependency Resolution)" - echo " โœ… Issue #10 (Installation Verification)" - echo " โœ… Issue #14 (Rollback System)" - echo " โœ… MVP demonstration" - echo " โœ… February funding timeline" - echo "" - echo "๐Ÿ’ฐ Bounty owed: \$100 to @chandrapratamar" - echo "" - echo "IMMEDIATELY post to Discord #announcements!" - echo "" - } || { - echo "โŒ Merge failed - needs manual intervention" - } - ;; - c|C) - echo "Requesting changes on PR #17..." - echo "" - echo "Enter what needs to change:" - echo "(Press Ctrl+D when done)" - echo "---" - feedback=$(cat) - - change_request="๐Ÿ”„ **Changes Requested** - -Thank you @chandrapratamar for tackling this critical feature! - -Before we can merge, please address: - -$feedback - -**This is THE MVP blocker**, so I'll prioritize re-review once you update. - -Questions? Ping me here or in Discord (#dev-questions). - -We're close! ๐Ÿ’ช" - - echo "$change_request" | gh pr review 17 --repo $REPO --request-changes --body-file - - echo "" - echo "โœ… Change request posted" - ;; - *) - echo "โญ๏ธ Skipped PR #17" - ;; - esac - ;; - - 2) - echo "๐ŸŸก LAUNCHING CONTRIBUTOR PR REVIEW WORKFLOW..." - echo "" - - # Check if review script exists - if [ -f "$HOME/cortex/review-contributor-prs.sh" ]; then - bash "$HOME/cortex/review-contributor-prs.sh" - else - echo "Review script not found. Download it first:" - echo " review-contributor-prs.sh" - fi - ;; - - 3) - echo "๐ŸŸข BATCH MERGING MIKE'S PRs..." - echo "" - - # Check if merge script exists - if [ -f "$HOME/cortex/merge-mike-prs.sh" ]; then - bash "$HOME/cortex/merge-mike-prs.sh" - else - echo "Merge script not found. Download it first:" - echo " merge-mike-prs.sh" - fi - ;; - - 4) - echo "๐ŸŒ Opening PR list in browser..." - gh pr list --repo $REPO --web - ;; - - 5) - echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" - echo "๐Ÿ’ฐ BOUNTY PAYMENT REPORT" - echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" - echo "" - - echo "PENDING BOUNTIES (if merged):" - echo "โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€" - echo "" - echo "PR #17 - @chandrapratamar: \$100 (Package Manager)" - echo "PR #37 - @AlexanderLuzDH: \$125 (Progress Notifications)" - echo "PR #38 - @AlexanderLuzDH: \$100 (Requirements Check)" - echo "PR #21 - @aliraza556: \$150 (Config Templates)" - echo "PR #18 - @Sahilbhatane: \$100 (CLI Interface - DRAFT)" - echo "" - echo "โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€" - echo "TOTAL PENDING: \$575" - echo "AT 2X BONUS (FUNDING): \$1,150" - echo "" - - if [ -f "$HOME/cortex/bounties_owed.csv" ]; then - echo "ALREADY MERGED (need payment):" - echo "โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€" - tail -n +2 "$HOME/cortex/bounties_owed.csv" | while IFS=',' read -r pr dev feature amount date status; do - if [ "$status" = "PENDING" ]; then - echo "$pr - @$dev: \$$amount" - fi - done - echo "" - fi - ;; - - 6) - echo "๐Ÿ“ฑ GENERATING DISCORD ANNOUNCEMENT..." - echo "" - - announcement="๐ŸŽ‰ **CORTEX PROJECT UPDATE - $(date +%B\ %d,\ %Y)** - -**PR Review Session Complete!** - -**Current Status:** -- ๐Ÿ“Š **$total_prs PRs open** ($contributor_prs from contributors, $mike_prs from Mike) -- ๐Ÿ’ฐ **\$$total_contributor_bounties in bounties** pending review -- ๐Ÿ”ด **PR #17 (Package Manager)** = THE MVP BLOCKER - -**Action Items:** -- Contributor PRs being reviewed this week -- Bounties will be processed within 48 hours of merge -- 2x bonus reminder: All bounties double at funding (Feb 2025) - -**For Contributors:** -- Check your PR status on GitHub -- Questions? #dev-questions channel -- New issues available for claiming - -**The Momentum is Real:** -- Professional team execution -- MVP timeline on track (Feb 2025) -- Building the future of Linux! ๐Ÿง โšก - -Browse open issues: https://github.com/$REPO/issues -Join discussion: https://discord.gg/uCqHvxjU83" - - echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" - echo "$announcement" - echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" - echo "" - echo "Copy the above and post to Discord #announcements" - ;; - - q|Q) - echo "๐Ÿ‘‹ Exiting dashboard..." - exit 0 - ;; - - *) - echo "Invalid choice" - ;; -esac - -echo "" -echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" -echo "โœ… Dashboard session complete" -echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" diff --git a/scripts/automation/focus-on-mvp.sh b/scripts/automation/focus-on-mvp.sh deleted file mode 100755 index 5f5698a..0000000 --- a/scripts/automation/focus-on-mvp.sh +++ /dev/null @@ -1,105 +0,0 @@ -#!/bin/bash -# Close non-MVP issues to focus contributors on critical work - -set -e - -echo "๐ŸŽฏ FOCUSING REPOSITORY ON MVP ISSUES" -echo "======================================" -echo "" - -cd ~/cortex || { echo "โŒ cortex repo not found"; exit 1; } - -# Strategy: Close issues 46-200+ with explanation comment -# Keep issues 1-45 open (MVP critical work) - -echo "Strategy:" -echo " Keep open: Issues #1-45 (MVP critical)" -echo " Close: Issues #46+ (post-MVP features)" -echo "" - -read -p "Close issues #46-200 as 'post-MVP'? (y/n): " -n 1 -r -echo -if [[ ! $REPLY =~ ^[Yy]$ ]]; then - echo "Aborted." - exit 0 -fi - -# Comment to add when closing -CLOSE_MESSAGE="๐ŸŽฏ **Closing for MVP Focus** - -This issue is being closed to help the team focus on MVP-critical features (#1-45). - -**This is NOT abandoned** - it's an important feature we'll revisit after MVP completion. - -**Timeline:** -- **Now (Nov-Dec 2024):** Focus on MVP (Issues #1-45) -- **January 2025:** Reopen post-MVP features -- **February 2025:** Seed funding round - -**Want to work on this anyway?** -Comment below and we can discuss! We're always open to great contributions. - -**Tracking:** Labeled as \`post-mvp\` for easy filtering when we reopen. - -Thanks for understanding! ๐Ÿš€ - -โ€” Mike (@mikejmorgan-ai)" - -echo "๐Ÿ“ Closing issues #46-200..." -echo "" - -# Function to close issue -close_issue() { - local issue_num=$1 - - echo " Closing #$issue_num..." - - # Add comment - gh issue comment $issue_num --body "$CLOSE_MESSAGE" 2>/dev/null || { - echo " โš ๏ธ Could not comment on #$issue_num (may not exist)" - return 1 - } - - # Add post-mvp label - gh issue edit $issue_num --add-label "post-mvp" 2>/dev/null - - # Close issue - gh issue close $issue_num --reason "not planned" 2>/dev/null || { - echo " โš ๏ธ Could not close #$issue_num" - return 1 - } - - echo " โœ… Closed #$issue_num" - return 0 -} - -# Close issues 46-200 -CLOSED_COUNT=0 -FAILED_COUNT=0 - -for issue_num in {46..200}; do - if close_issue $issue_num; then - ((CLOSED_COUNT++)) - else - ((FAILED_COUNT++)) - fi - - # Rate limiting - pause every 10 issues - if (( issue_num % 10 == 0 )); then - echo " โธ๏ธ Pausing for rate limit..." - sleep 2 - fi -done - -echo "" -echo "==============================================" -echo "โœ… CLEANUP COMPLETE" -echo "==============================================" -echo "Issues closed: $CLOSED_COUNT" -echo "Failed/not found: $FAILED_COUNT" -echo "" -echo "Repository now shows MVP-focused issues only!" -echo "" -echo "View open issues: https://github.com/cortexlinux/cortex/issues" -echo "View post-MVP: https://github.com/cortexlinux/cortex/issues?q=is%3Aclosed+label%3Apost-mvp" -echo "" diff --git a/scripts/automation/manage_cortex_prs.sh b/scripts/automation/manage_cortex_prs.sh deleted file mode 100755 index ee3d3d7..0000000 --- a/scripts/automation/manage_cortex_prs.sh +++ /dev/null @@ -1,435 +0,0 @@ -#!/bin/bash -# Cortex Linux - Master PR Control & Team Coordination -# Complete automation: reviews, assignments, Discord, payments, everything - -set -e - -echo "๐Ÿง  CORTEX LINUX - MASTER PR CONTROL SYSTEM" -echo "==========================================" -echo "" - -# Configuration -REPO="cortexlinux/cortex" -REPO_DIR="$HOME/cortex" -DISCORD_INVITE="https://discord.gg/uCqHvxjU83" -GITHUB_TOKEN=$(grep GITHUB_TOKEN ~/.zshrc | cut -d'=' -f2 | tr -d '"' | tr -d "'") -BOUNTY_CSV="$REPO_DIR/bounties_paid.csv" - -# Ensure we're in the repo -cd "$REPO_DIR" || { echo "โŒ Repo not found at $REPO_DIR"; exit 1; } - -# Create bounty tracking CSV if it doesn't exist -if [ ! -f "$BOUNTY_CSV" ]; then - echo "PR_Number,Author,Amount,Status,Payment_Status,Date" > "$BOUNTY_CSV" -fi - -echo "๐Ÿ“Š STEP 1: FETCHING ALL OPEN PRS" -echo "=================================" -echo "" - -# Get all open PRs -prs=$(gh pr list --repo "$REPO" --state open --json number,title,author,createdAt,reviews,isDraft,mergeable --limit 50) -total_prs=$(echo "$prs" | jq length) - -echo "Found $total_prs open PR(s)" -echo "" - -if [ "$total_prs" -eq 0 ]; then - echo "โœ… No PRs to process!" - exit 0 -fi - -# Display all PRs -echo "$prs" | jq -r '.[] | "PR #\(.number): \(.title) by @\(.author.login) - Draft: \(.isDraft)"' -echo "" - -echo "๐ŸŽฏ STEP 2: CATEGORIZING PRS" -echo "===========================" -echo "" - -# Arrays for different PR categories -critical_prs=() -ready_to_merge=() -needs_review=() -draft_prs=() -stale_prs=() - -# Categorize each PR -while IFS= read -r pr_num; do - pr_data=$(echo "$prs" | jq -r ".[] | select(.number == $pr_num)") - author=$(echo "$pr_data" | jq -r '.author.login') - title=$(echo "$pr_data" | jq -r '.title') - is_draft=$(echo "$pr_data" | jq -r '.isDraft') - created=$(echo "$pr_data" | jq -r '.createdAt') - mergeable=$(echo "$pr_data" | jq -r '.mergeable') - review_count=$(echo "$pr_data" | jq -r '.reviews | length') - - # Calculate age - created_ts=$(date -j -f "%Y-%m-%dT%H:%M:%SZ" "$created" +%s 2>/dev/null || echo 0) - now_ts=$(date +%s) - age_days=$(( (now_ts - created_ts) / 86400 )) - - # Skip drafts - if [ "$is_draft" = "true" ]; then - draft_prs+=($pr_num) - continue - fi - - # Check if it's the critical package manager PR - if [[ "$title" == *"package"* ]] || [[ "$title" == *"Package"* ]] || [ "$pr_num" -eq 195 ]; then - critical_prs+=($pr_num) - echo "๐Ÿ”ฅ CRITICAL: PR #$pr_num - $title (Age: $age_days days)" - elif [ "$mergeable" = "MERGEABLE" ] && [ "$review_count" -gt 0 ]; then - ready_to_merge+=($pr_num) - echo "โœ… READY TO MERGE: PR #$pr_num - $title" - elif [ "$review_count" -eq 0 ]; then - needs_review+=($pr_num) - echo "๐Ÿ“‹ NEEDS REVIEW: PR #$pr_num - $title (Age: $age_days days)" - fi - - # Check if stale (>5 days) - if [ "$age_days" -gt 5 ]; then - stale_prs+=($pr_num) - fi -done < <(echo "$prs" | jq -r '.[].number') - -echo "" -echo "Summary:" -echo " ๐Ÿ”ฅ Critical PRs: ${#critical_prs[@]}" -echo " โœ… Ready to merge: ${#ready_to_merge[@]}" -echo " ๐Ÿ“‹ Need review: ${#needs_review[@]}" -echo " ๐Ÿ“ Drafts: ${#draft_prs[@]}" -echo " โฐ Stale (>5 days): ${#stale_prs[@]}" -echo "" - -read -p "Continue with automated processing? (y/n): " -n 1 -r -echo -if [[ ! $REPLY =~ ^[Yy]$ ]]; then - echo "Aborted." - exit 0 -fi - -echo "" -echo "๐ŸŽฏ STEP 3: PROCESSING CRITICAL PRS" -echo "==================================" -echo "" - -for pr_num in "${critical_prs[@]}"; do - pr_data=$(echo "$prs" | jq -r ".[] | select(.number == $pr_num)") - author=$(echo "$pr_data" | jq -r '.author.login') - title=$(echo "$pr_data" | jq -r '.title') - - echo "Processing CRITICAL PR #$pr_num: $title" - echo "Author: @$author" - echo "" - - # Assign reviewers if not already assigned - echo " Assigning reviewers: dhvil, mikejmorgan-ai" - gh pr edit $pr_num --add-reviewer dhvil,mikejmorgan-ai 2>/dev/null || echo " (Reviewers already assigned)" - - # Post urgent review comment - comment="๐Ÿ”ฅ **CRITICAL PATH REVIEW** - -Hi @$author! This PR is blocking our MVP completion. - -**Urgent Review In Progress:** -- โœ… Technical review by @dhvil -- โœ… Final approval by @mikejmorgan-ai -- โฑ๏ธ Target decision: Within 24 hours - -**Payment Ready:** -๐Ÿ’ฐ Bounty will be paid via Discord crypto (BTC/USDC) within 24 hours of merge - -**Join Discord for payment coordination:** -๐Ÿ‘‰ $DISCORD_INVITE - -We're prioritizing this merge! Thanks for the critical work. ๐Ÿš€" - - gh pr comment $pr_num --body "$comment" 2>/dev/null || echo " (Comment already exists)" - - echo " โœ… Critical PR tagged and reviewers notified" - echo "" - sleep 1 -done - -echo "" -echo "โœ… STEP 4: AUTO-MERGING READY PRS" -echo "=================================" -echo "" - -merged_count=0 -for pr_num in "${ready_to_merge[@]}"; do - pr_data=$(echo "$prs" | jq -r ".[] | select(.number == $pr_num)") - author=$(echo "$pr_data" | jq -r '.author.login') - title=$(echo "$pr_data" | jq -r '.title') - - echo "PR #$pr_num: $title by @$author" - echo " Status: Mergeable with approvals" - - # Determine bounty amount based on issue - bounty_amount="TBD" - if [[ "$title" == *"context"* ]] || [[ "$title" == *"Context"* ]]; then - bounty_amount="150" - elif [[ "$title" == *"logging"* ]] || [[ "$title" == *"Logging"* ]]; then - bounty_amount="100" - fi - - read -p " Merge PR #$pr_num? (y/n): " -n 1 -r - echo - if [[ $REPLY =~ ^[Yy]$ ]]; then - # Merge the PR - gh pr merge $pr_num --squash --delete-branch - echo " โœ… Merged!" - - # Post payment comment - payment_comment="๐ŸŽ‰ **PR MERGED!** - -Thanks @$author! Your contribution has been merged into main. - -**๐Ÿ’ฐ Payment Details:** -- Bounty: \$$bounty_amount (as specified in issue) -- Method: Crypto (Bitcoin or USDC) -- Timeline: Within 24 hours - -**Next Steps:** -1. Join Discord: $DISCORD_INVITE -2. DM @mikejmorgan with your wallet address -3. Receive payment confirmation - -Great work! Looking forward to your next contribution. ๐Ÿš€" - - gh pr comment $pr_num --body "$payment_comment" - - # Track in CSV - echo "$pr_num,$author,$bounty_amount,Merged,Pending Payment,$(date +%Y-%m-%d)" >> "$BOUNTY_CSV" - - ((merged_count++)) - echo "" - else - echo " โญ๏ธ Skipped" - echo "" - fi - sleep 1 -done - -echo "Merged $merged_count PR(s)" -echo "" - -echo "๐Ÿ“‹ STEP 5: ASSIGNING REVIEWERS TO PENDING PRS" -echo "==============================================" -echo "" - -for pr_num in "${needs_review[@]}"; do - pr_data=$(echo "$prs" | jq -r ".[] | select(.number == $pr_num)") - author=$(echo "$pr_data" | jq -r '.author.login') - title=$(echo "$pr_data" | jq -r '.title') - - echo "PR #$pr_num: $title by @$author" - - # Assign reviewers - if [ "$author" != "dhvil" ] && [ "$author" != "mikejmorgan-ai" ]; then - gh pr edit $pr_num --add-reviewer dhvil,mikejmorgan-ai 2>/dev/null || true - echo " โœ… Assigned reviewers: dhvil, mikejmorgan-ai" - else - gh pr edit $pr_num --add-reviewer mikejmorgan-ai 2>/dev/null || true - echo " โœ… Assigned reviewer: mikejmorgan-ai" - fi - - # Post welcome comment - welcome_comment="Thanks @$author for this contribution! ๐ŸŽ‰ - -**Review Process:** -1. โœ… Reviewers assigned - expect feedback within 24-48 hours -2. ๐Ÿ’ฌ **Join Discord**: $DISCORD_INVITE -3. ๐Ÿ’ฐ **Bounty Payment**: Crypto (BTC/USDC) via Discord after merge - -**Important:** -- All bounties tracked and paid through Discord -- Please join to coordinate payment details -- Typical merge โ†’ payment time: 24-48 hours - -Looking forward to reviewing this! ๐Ÿš€" - - # Check if we already commented - existing=$(gh pr view $pr_num --json comments --jq '[.comments[] | select(.author.login == "mikejmorgan-ai")] | length') - if [ "$existing" -eq 0 ]; then - gh pr comment $pr_num --body "$welcome_comment" - echo " โœ… Posted welcome comment" - else - echo " (Welcome comment already exists)" - fi - - echo "" - sleep 1 -done - -echo "" -echo "โฐ STEP 6: SENDING STALE PR REMINDERS" -echo "=====================================" -echo "" - -for pr_num in "${stale_prs[@]}"; do - # Skip if it's in draft or critical (already handled) - if [[ " ${draft_prs[@]} " =~ " ${pr_num} " ]] || [[ " ${critical_prs[@]} " =~ " ${pr_num} " ]]; then - continue - fi - - pr_data=$(echo "$prs" | jq -r ".[] | select(.number == $pr_num)") - author=$(echo "$pr_data" | jq -r '.author.login') - title=$(echo "$pr_data" | jq -r '.title') - created=$(echo "$pr_data" | jq -r '.createdAt') - - created_ts=$(date -j -f "%Y-%m-%dT%H:%M:%SZ" "$created" +%s 2>/dev/null || echo 0) - now_ts=$(date +%s) - age_days=$(( (now_ts - created_ts) / 86400 )) - - echo "PR #$pr_num: $title by @$author ($age_days days old)" - - stale_comment="Hi @$author! ๐Ÿ‘‹ - -This PR has been open for $age_days days. Quick status check: - -๐Ÿ“‹ **Checklist:** -- [ ] Joined Discord? ($DISCORD_INVITE) -- [ ] All tests passing? -- [ ] Addressed review feedback? - -๐Ÿ’ฐ **Payment Reminder:** -- Bounties paid via crypto (Bitcoin/USDC) -- Processed through Discord DMs -- Sent within 24 hours of merge - -Need help? Let us know in Discord! We want to get this merged and pay you ASAP. ๐Ÿš€" - - gh pr comment $pr_num --body "$stale_comment" - echo " โœ… Sent reminder" - echo "" - sleep 1 -done - -echo "" -echo "๐Ÿ’ฌ STEP 7: GENERATING DISCORD ANNOUNCEMENT" -echo "==========================================" -echo "" - -cat << DISCORD_EOF > /tmp/discord_announcement.txt -๐Ÿš€ **PR STATUS UPDATE - $(date +"%B %d, %Y")** - -Just completed automated PR processing! Here's where we stand: - -**๐Ÿ“Š Statistics:** -- Total Open PRs: $total_prs -- ๐Ÿ”ฅ Critical (Package Manager): ${#critical_prs[@]} -- โœ… Merged Today: $merged_count -- ๐Ÿ“‹ Under Review: ${#needs_review[@]} -- โฐ Stale Reminders Sent: ${#stale_prs[@]} - -**๐ŸŽฏ Focus Areas:** -DISCORD_EOF - -if [ ${#critical_prs[@]} -gt 0 ]; then - echo "โ€ข ๐Ÿ”ฅ PR #${critical_prs[0]} (Package Manager) - CRITICAL PATH - Under urgent review" >> /tmp/discord_announcement.txt -fi - -cat << DISCORD_EOF2 >> /tmp/discord_announcement.txt - -**๐Ÿ’ฐ Payment Process:** -1. PR gets merged โœ… -2. I DM you for wallet address ๐Ÿ’ฌ -3. Crypto sent within 24 hours ๐Ÿ’ธ -4. You confirm receipt โœ… - -**All contributors:** Join Discord for bounty coordination! -๐Ÿ‘‰ $DISCORD_INVITE - -Let's keep the momentum going! ๐Ÿ”ฅ - -- Mike -DISCORD_EOF2 - -echo "Discord announcement generated:" -echo "===============================" -cat /tmp/discord_announcement.txt -echo "===============================" -echo "" -echo "๐Ÿ“‹ Copy the above to Discord #announcements" -echo "" - -echo "" -echo "๐Ÿ“Š STEP 8: PAYMENT TRACKING SUMMARY" -echo "===================================" -echo "" - -if [ -f "$BOUNTY_CSV" ]; then - echo "Payments Pending:" - tail -n +2 "$BOUNTY_CSV" | grep "Pending" 2>/dev/null | while IFS=, read -r pr author amount status payment date; do - echo " PR #$pr - @$author - \$$amount - $date" - done || echo " No pending payments" - echo "" - echo "Full tracking: $BOUNTY_CSV" -fi - -echo "" -echo "๐Ÿ“ง STEP 9: CONTRIBUTOR DM TEMPLATES" -echo "===================================" -echo "" - -# Generate DM templates for unique contributors -contributors=$(echo "$prs" | jq -r '.[].author.login' | sort -u) - -echo "Send these DMs on Discord:" -echo "" - -for contributor in $contributors; do - pr_count=$(echo "$prs" | jq -r --arg author "$contributor" '[.[] | select(.author.login == $author)] | length') - - if [ "$pr_count" -gt 0 ]; then - echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" - echo "To: @$contributor ($pr_count open PR)" - echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" - cat << DM_EOF - -Hey! Just processed your Cortex PR(s) - great work! ๐ŸŽ‰ - -**Quick Check:** -1. Have you joined Discord? ($DISCORD_INVITE) -2. What's your crypto wallet address? (BTC or USDC) -3. Any blockers I can help with? - -**Payment Timeline:** -- PR review: 24-48 hours -- Merge decision: Clear feedback either way -- Payment: Within 24 hours of merge - -Looking forward to merging your work! - -- Mike - -DM_EOF - fi -done - -echo "" -echo "==============================================" -echo "โœ… MASTER PR CONTROL COMPLETE" -echo "==============================================" -echo "" - -echo "๐Ÿ“Š Summary of Actions:" -echo " โ€ข Reviewed $total_prs PRs" -echo " โ€ข Assigned reviewers to ${#needs_review[@]} PRs" -echo " โ€ข Merged $merged_count PRs" -echo " โ€ข Flagged ${#critical_prs[@]} critical PR(s)" -echo " โ€ข Sent ${#stale_prs[@]} stale reminders" -echo "" - -echo "๐Ÿ“‹ Next Manual Steps:" -echo " 1. Copy Discord announcement to #announcements" -echo " 2. Send DMs to contributors (templates above)" -echo " 3. Review critical PR #${critical_prs[0]:-N/A} urgently" -echo " 4. Process $merged_count payment(s) via crypto" -echo "" - -echo "๐Ÿ”„ Run this script daily to maintain PR velocity!" -echo "" -echo "โœ… All done!" diff --git a/scripts/deployment/audit_cortex_status.sh b/scripts/deployment/audit_cortex_status.sh deleted file mode 100755 index eca4b11..0000000 --- a/scripts/deployment/audit_cortex_status.sh +++ /dev/null @@ -1,108 +0,0 @@ -#!/bin/bash -# Cortex Linux - Complete System Audit -# Run this once to give Claude full visibility - -echo "๐Ÿ” CORTEX LINUX - SYSTEM AUDIT" -echo "========================================" -echo "" - -# Colors for output -RED='\033[0;31m' -GREEN='\033[0;32m' -YELLOW='\033[1;33m' -NC='\033[0m' # No Color - -cd ~/cortex 2>/dev/null || { echo "โŒ ~/cortex not found. Run: cd ~ && git clone https://github.com/cortexlinux/cortex.git"; exit 1; } - -echo "๐Ÿ“ REPOSITORY STRUCTURE" -echo "========================================" -echo "Files in repo:" -find . -type f -not -path '*/\.*' | head -30 -echo "" - -echo "๐Ÿค– GITHUB ACTIONS WORKFLOWS" -echo "========================================" -if [ -d ".github/workflows" ]; then - echo "โœ… Workflows directory exists" - ls -lh .github/workflows/ - echo "" - echo "๐Ÿ“„ Workflow file contents:" - for file in .github/workflows/*.yml; do - echo "--- $file ---" - head -50 "$file" - echo "" - done -else - echo "โŒ No .github/workflows directory" -fi -echo "" - -echo "๐Ÿ“Š AUTOMATION DATA FILES" -echo "========================================" -for file in bounties_pending.json payments_history.json contributors.json; do - if [ -f "$file" ]; then - echo "โœ… $file exists" - cat "$file" - else - echo "โŒ $file missing" - fi - echo "" -done - -echo "๐Ÿ” GITHUB SECRETS STATUS" -echo "========================================" -echo "Checking if secrets are configured..." -gh secret list 2>/dev/null || echo "โš ๏ธ gh CLI not authenticated or not installed" -echo "" - -echo "๐ŸŒ GITHUB ACTIONS RUNS" -echo "========================================" -echo "Recent workflow runs:" -gh run list --limit 5 2>/dev/null || echo "โš ๏ธ gh CLI not authenticated" -echo "" - -echo "๐Ÿ“‹ RECENT COMMITS" -echo "========================================" -git log --oneline -10 -echo "" - -echo "๐Ÿ”€ BRANCHES" -echo "========================================" -git branch -a -echo "" - -echo "๐Ÿ“ CURRENT STATUS" -echo "========================================" -echo "Current branch: $(git branch --show-current)" -echo "Remote URL: $(git remote get-url origin)" -echo "Git status:" -git status --short -echo "" - -echo "๐Ÿ’ฌ DISCORD WEBHOOK CHECK" -echo "========================================" -if gh secret list 2>/dev/null | grep -q "DISCORD_WEBHOOK"; then - echo "โœ… DISCORD_WEBHOOK secret is configured" -else - echo "โŒ DISCORD_WEBHOOK secret not found" - echo " Add it at: https://github.com/cortexlinux/cortex/settings/secrets/actions" -fi -echo "" - -echo "๐ŸŽฏ ISSUES & PRS" -echo "========================================" -echo "Open issues with bounties:" -gh issue list --label "bounty" --limit 10 2>/dev/null || echo "โš ๏ธ gh CLI issue" -echo "" -echo "Recent PRs:" -gh pr list --limit 5 2>/dev/null || echo "โš ๏ธ gh CLI issue" -echo "" - -echo "โœ… AUDIT COMPLETE" -echo "========================================" -echo "Save this output and share with Claude for full visibility" -echo "" -echo "Next steps:" -echo "1. Share this output with Claude" -echo "2. Claude can now see everything without asking" -echo "3. No more copy/paste needed" diff --git a/scripts/deployment/deploy_jesse_system.sh b/scripts/deployment/deploy_jesse_system.sh deleted file mode 100644 index df06145..0000000 --- a/scripts/deployment/deploy_jesse_system.sh +++ /dev/null @@ -1,208 +0,0 @@ -#!/bin/bash -# ============================================================================ -# WaterRightsX - Complete System Deployment for Jesse -# ============================================================================ -# One-command script to build Jesse's water rights movement matching platform -# -# What this builds: -# 1. Import 160,000 Utah water rights with owner contact info -# 2. Scrape all 97 basin policies for movement rules -# 3. Build movement matching engine -# 4. Generate lead lists for target locations -# -# Usage: bash deploy_jesse_system.sh -# -# Author: Michael J. Morgan - WaterRightsX -# ============================================================================ - -set -e # Exit on any error - -echo "๐ŸŒŠ WaterRightsX - Complete System Deployment" -echo "============================================" -echo "" -echo "Building Jesse's Water Rights Movement Platform:" -echo " โœ“ 160,000 Utah water rights database" -echo " โœ“ Basin policy scraper (97 basins)" -echo " โœ“ Movement matching engine" -echo " โœ“ Lead generation system" -echo "" -echo "โฑ๏ธ Expected time: 15-20 minutes" -echo "๐Ÿ’พ Expected size: ~600MB download" -echo "" - -read -p "Continue with full deployment? (y/n) " -n 1 -r -echo -if [[ ! $REPLY =~ ^[Yy]$ ]] -then - echo "โŒ Deployment cancelled" - exit 1 -fi - -echo "" -echo "============================================================================" -echo "PHASE 1: Installing Dependencies" -echo "============================================================================" -echo "" - -pip install --break-system-packages \ - geopandas \ - psycopg2-binary \ - requests \ - beautifulsoup4 \ - pyproj \ - shapely \ - fiona \ - --quiet - -echo "โœ… Dependencies installed" - -echo "" -echo "============================================================================" -echo "PHASE 2: Database Schema Setup" -echo "============================================================================" -echo "" - -# Create enhanced water rights schema -if [ -n "$DATABASE_URL" ]; then - psql "$DATABASE_URL" << 'EOF' --- Add new columns for Jesse's requirements -ALTER TABLE water_rights ADD COLUMN IF NOT EXISTS owner_address TEXT; -ALTER TABLE water_rights ADD COLUMN IF NOT EXISTS owner_city TEXT; -ALTER TABLE water_rights ADD COLUMN IF NOT EXISTS owner_zip TEXT; -ALTER TABLE water_rights ADD COLUMN IF NOT EXISTS is_non_use BOOLEAN DEFAULT FALSE; -ALTER TABLE water_rights ADD COLUMN IF NOT EXISTS can_be_moved BOOLEAN DEFAULT TRUE; - --- Create indexes for performance -CREATE INDEX IF NOT EXISTS idx_non_use ON water_rights(is_non_use); -CREATE INDEX IF NOT EXISTS idx_basin ON water_rights(basin); -CREATE INDEX IF NOT EXISTS idx_volume ON water_rights(annual_volume_af); - --- Create basin policies tables (will be populated by scraper) -CREATE TABLE IF NOT EXISTS basin_policies ( - id SERIAL PRIMARY KEY, - area_number VARCHAR(10) UNIQUE NOT NULL, - area_name TEXT NOT NULL, - url TEXT NOT NULL, - full_text TEXT, - scraped_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP -); - -CREATE TABLE IF NOT EXISTS movement_rules ( - id SERIAL PRIMARY KEY, - area_number VARCHAR(10) REFERENCES basin_policies(area_number), - rule_type VARCHAR(50), - rule_text TEXT NOT NULL, - is_restriction BOOLEAN DEFAULT FALSE, - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP -); - -CREATE INDEX IF NOT EXISTS idx_area_number ON basin_policies(area_number); -CREATE INDEX IF NOT EXISTS idx_movement_area ON movement_rules(area_number); - -EOF - - echo "โœ… Database schema updated" -else - echo "โš ๏ธ DATABASE_URL not set - skipping schema updates" -fi - -echo "" -echo "============================================================================" -echo "PHASE 3: Import 160,000 Water Rights" -echo "============================================================================" -echo "" - -python3 import_utah_water_rights.py - -echo "" -echo "============================================================================" -echo "PHASE 4: Scrape Basin Policies" -echo "============================================================================" -echo "" - -python3 scrape_basin_policies.py - -echo "" -echo "============================================================================" -echo "PHASE 5: Test Movement Matching Engine" -echo "============================================================================" -echo "" - -python3 movement_matching_engine.py - -echo "" -echo "============================================================================" -echo "โœ… DEPLOYMENT COMPLETE!" -echo "============================================================================" -echo "" -echo "๐Ÿ“Š System Summary:" -if [ -n "$DATABASE_URL" ]; then - echo "" - echo "Water Rights Database:" - psql "$DATABASE_URL" -c "SELECT COUNT(*) as total_water_rights FROM water_rights;" - - echo "" - echo "Non-Use Rights (Best Leads):" - psql "$DATABASE_URL" -c "SELECT COUNT(*) as non_use_count FROM water_rights WHERE is_non_use = TRUE;" - - echo "" - echo "Basin Policies Scraped:" - psql "$DATABASE_URL" -c "SELECT COUNT(*) as total_basins FROM basin_policies;" - - echo "" - echo "Movement Rules Extracted:" - psql "$DATABASE_URL" -c "SELECT COUNT(*) as total_rules FROM movement_rules;" -fi - -echo "" -echo "============================================================================" -echo "๐ŸŽฏ JESSE'S USE CASES - READY TO GO:" -echo "============================================================================" -echo "" -echo "1. FIND WATER FOR PARK CITY:" -echo " python3 -c \"" -echo " from movement_matching_engine import MovementMatchingEngine" -echo " engine = MovementMatchingEngine()" -echo " leads = engine.find_moveable_rights(40.6461, -111.4980, max_distance_miles=10)" -echo " print(f'Found {len(leads)} moveable water rights for Park City')" -echo " \"" -echo "" -echo "2. FIND WATER FOR LITTLE COTTONWOOD CANYON:" -echo " python3 -c \"" -echo " from movement_matching_engine import MovementMatchingEngine" -echo " engine = MovementMatchingEngine()" -echo " leads = engine.find_moveable_rights(40.5732, -111.7813, max_distance_miles=5)" -echo " print(f'Found {len(leads)} moveable water rights for Little Cottonwood')" -echo " \"" -echo "" -echo "3. GENERATE LEAD LIST (Non-Use Priority):" -echo " - Check park_city_lead_list.json" -echo " - Contains owner contact information" -echo " - Sorted by arbitrage opportunity" -echo " - Non-use rights highlighted (best leads)" -echo "" -echo "============================================================================" -echo "๐Ÿ“ž NEXT STEPS FOR JESSE:" -echo "============================================================================" -echo "" -echo "โœ“ Database has 160,000 water rights with owner info" -echo "โœ“ Basin policies scraped and parsed" -echo "โœ“ Movement matching engine operational" -echo "โœ“ Lead generation system ready" -echo "" -echo "To use the platform:" -echo "1. Identify target parcel (coordinates)" -echo "2. Run movement matching engine" -echo "3. Get filtered list of moveable rights" -echo "4. Contact owners (prioritize non-use status)" -echo "5. Negotiate purchase/lease" -echo "6. File change application with State Engineer" -echo "" -echo "For web interface, restart your application to see:" -echo "โ€ข Interactive map with all 160K water rights" -echo "โ€ข Movement analyzer tool" -echo "โ€ข Lead generator with owner contact info" -echo "โ€ข Basin policy viewer" -echo "" -echo "============================================================================" diff --git a/scripts/deployment/setup_and_upload.sh b/scripts/deployment/setup_and_upload.sh deleted file mode 100644 index ae7060e..0000000 --- a/scripts/deployment/setup_and_upload.sh +++ /dev/null @@ -1,55 +0,0 @@ -#!/bin/bash - -echo "==========================================" -echo " GitHub Token Setup" -echo "==========================================" -echo "" -echo "Get your token from: https://github.com/settings/tokens" -echo "Click 'Generate new token (classic)'" -echo "Check 'repo' scope, then generate" -echo "" -echo "Paste your GitHub token here:" -read -s TOKEN -echo "" - -if [ -z "$TOKEN" ]; then - echo "โŒ No token provided" - exit 1 -fi - -# Remove any old GITHUB_TOKEN lines -grep -v "GITHUB_TOKEN" ~/.zshrc > ~/.zshrc.tmp 2>/dev/null || touch ~/.zshrc.tmp -mv ~/.zshrc.tmp ~/.zshrc - -# Add new token -echo "export GITHUB_TOKEN=\"$TOKEN\"" >> ~/.zshrc - -# Reload -export GITHUB_TOKEN="$TOKEN" - -echo "โœ… Token saved to ~/.zshrc" -echo "" - -# Test it -echo "Testing token..." -python3 << 'PYEOF' -from github import Github -import os - -token = os.getenv("GITHUB_TOKEN") -try: - g = Github(token) - user = g.get_user() - print(f"โœ… Token works! Logged in as: {user.login}") -except Exception as e: - print(f"โŒ Token invalid: {e}") -PYEOF - -echo "" -echo "==========================================" -echo "Now running file upload..." -echo "==========================================" -echo "" - -# Run the upload -python3 /Users/allbots/Downloads/commit_files.py diff --git a/scripts/deployment/upload_issue_34.sh b/scripts/deployment/upload_issue_34.sh deleted file mode 100755 index 9441bc9..0000000 --- a/scripts/deployment/upload_issue_34.sh +++ /dev/null @@ -1,36 +0,0 @@ -#!/bin/bash - -# Upload Issue #34 files to GitHub - -echo "๐Ÿ” Enter your GitHub Personal Access Token:" -read -s GITHUB_TOKEN - -REPO="cortexlinux/cortex" -BRANCH="feature/issue-34" - -echo "" -echo "๐Ÿ“ค Uploading llm_router.py..." -curl -X PUT \ - -H "Authorization: token $GITHUB_TOKEN" \ - -H "Content-Type: application/json" \ - -d "{\"message\":\"Add LLM Router implementation\",\"content\":\"$(base64 -i llm_router.py)\",\"branch\":\"$BRANCH\"}" \ - "https://api.github.com/repos/$REPO/contents/src/llm_router.py" - -echo "" -echo "๐Ÿ“ค Uploading test_llm_router.py..." -curl -X PUT \ - -H "Authorization: token $GITHUB_TOKEN" \ - -H "Content-Type: application/json" \ - -d "{\"message\":\"Add LLM Router tests\",\"content\":\"$(base64 -i test_llm_router.py)\",\"branch\":\"$BRANCH\"}" \ - "https://api.github.com/repos/$REPO/contents/src/test_llm_router.py" - -echo "" -echo "๐Ÿ“ค Uploading README_LLM_ROUTER.md..." -curl -X PUT \ - -H "Authorization: token $GITHUB_TOKEN" \ - -H "Content-Type: application/json" \ - -d "{\"message\":\"Add LLM Router documentation\",\"content\":\"$(base64 -i README_LLM_ROUTER.md)\",\"branch\":\"$BRANCH\"}" \ - "https://api.github.com/repos/$REPO/contents/docs/README_LLM_ROUTER.md" - -echo "" -echo "โœ… Upload complete! Check: https://github.com/$REPO/tree/$BRANCH" diff --git a/scripts/github/merge-mike-prs.sh b/scripts/github/merge-mike-prs.sh deleted file mode 100755 index 1831ac9..0000000 --- a/scripts/github/merge-mike-prs.sh +++ /dev/null @@ -1,81 +0,0 @@ -#!/bin/bash -# CORTEX - Quick Merge Mike's PRs -# Merges all PRs authored by @mikejmorgan-ai to clear backlog - -set -e - -echo "๐Ÿš€ CORTEX - MERGE MIKE'S IMPLEMENTATION PRs" -echo "===========================================" -echo "" - -REPO="cortexlinux/cortex" -GITHUB_TOKEN=$(grep GITHUB_TOKEN ~/.zshrc | cut -d'=' -f2 | tr -d '"' | tr -d "'") - -export GH_TOKEN="$GITHUB_TOKEN" - -echo "Merging PRs authored by @mikejmorgan-ai..." -echo "" - -# PRs to merge (excluding #17, #18, #21, #37, #38 which are from contributors) -MIKE_PRS=(41 36 34 23 22 20) - -for pr in "${MIKE_PRS[@]}"; do - echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" - echo "PR #$pr" - echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" - - # Get PR info - pr_info=$(gh pr view $pr --repo $REPO --json title,state,mergeable 2>/dev/null || echo "") - - if [ -z "$pr_info" ]; then - echo "โŒ PR #$pr not found or not accessible" - echo "" - continue - fi - - pr_title=$(echo "$pr_info" | jq -r '.title') - pr_state=$(echo "$pr_info" | jq -r '.state') - pr_mergeable=$(echo "$pr_info" | jq -r '.mergeable') - - echo "Title: $pr_title" - echo "State: $pr_state" - echo "Mergeable: $pr_mergeable" - echo "" - - if [ "$pr_state" != "OPEN" ]; then - echo "โญ๏ธ PR already merged or closed" - echo "" - continue - fi - - if [ "$pr_mergeable" = "CONFLICTING" ]; then - echo "โš ๏ธ PR has merge conflicts - needs manual resolution" - echo "" - continue - fi - - echo "Merge this PR? (y/n)" - read -n 1 -r - echo "" - - if [[ $REPLY =~ ^[Yy]$ ]]; then - echo "๐Ÿ”„ Merging PR #$pr..." - - gh pr merge $pr --repo $REPO --squash --delete-branch 2>/dev/null && \ - echo "โœ… PR #$pr merged successfully!" || \ - echo "โŒ Failed to merge PR #$pr (may need manual merge)" - else - echo "โญ๏ธ Skipped PR #$pr" - fi - - echo "" -done - -echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" -echo "โœ… MERGE PROCESS COMPLETE" -echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" -echo "" -echo "Next steps:" -echo "1. Review contributor PRs: #17, #21, #37, #38" -echo "2. Process bounty payments" -echo "3. Post update to Discord" diff --git a/scripts/github/organize-issues.sh b/scripts/github/organize-issues.sh deleted file mode 100755 index 36d7a17..0000000 --- a/scripts/github/organize-issues.sh +++ /dev/null @@ -1,51 +0,0 @@ -#!/bin/bash -# Label and organize issues for MVP focus - -set -e - -echo "๐ŸŽฏ ORGANIZING ISSUES FOR MVP FOCUS" -echo "=====================================" - -cd ~/cortex - -echo "Strategy:" -echo " Issues #1-30: MVP Critical" -echo " Issues #31-45: MVP Nice-to-Have" -echo " Issues #46+: Post-MVP" -echo "" - -read -p "Organize all issues? (y/n): " -n 1 -r -echo -if [[ ! $REPLY =~ ^[Yy]$ ]]; then - echo "Aborted." - exit 0 -fi - -# Create milestones -echo "๐Ÿ“‹ Creating milestones..." -gh api repos/cortexlinux/cortex/milestones --method POST \ - -f title='MVP - Core Features' \ - -f description='Critical features required for MVP launch' 2>/dev/null || echo " MVP milestone exists" - -gh api repos/cortexlinux/cortex/milestones --method POST \ - -f title='Post-MVP - Enhancements' \ - -f description='Features for post-MVP releases' 2>/dev/null || echo " Post-MVP milestone exists" - -echo "" -echo "๐Ÿท๏ธ Labeling MVP Critical (#1-30)..." -for i in {1..30}; do - gh issue edit $i --add-label "mvp-critical,priority: critical" --milestone "MVP - Core Features" 2>/dev/null && echo " โœ… #$i" || echo " โš ๏ธ #$i not found" - sleep 0.3 -done - -echo "" -echo "๐Ÿท๏ธ Labeling Post-MVP (#46-150)..." -for i in {46..150}; do - gh issue edit $i --add-label "post-mvp" --milestone "Post-MVP - Enhancements" 2>/dev/null - (( i % 20 == 0 )) && echo " Processed through #$i..." && sleep 1 -done - -echo "" -echo "โœ… COMPLETE!" -echo "" -echo "View MVP Critical: https://github.com/cortexlinux/cortex/issues?q=is%3Aopen+label%3Amvp-critical" diff --git a/scripts/github/review-contributor-prs.sh b/scripts/github/review-contributor-prs.sh deleted file mode 100755 index 8a5be9d..0000000 --- a/scripts/github/review-contributor-prs.sh +++ /dev/null @@ -1,314 +0,0 @@ -#!/bin/bash -# CORTEX - CONTRIBUTOR PR REVIEW & MERGE SYSTEM -# Reviews PRs from contributors, tracks bounties, posts thank-yous - -set -e - -echo "๐Ÿ” CORTEX - CONTRIBUTOR PR REVIEW SYSTEM" -echo "========================================" -echo "" - -REPO="cortexlinux/cortex" -GITHUB_TOKEN=$(grep GITHUB_TOKEN ~/.zshrc | cut -d'=' -f2 | tr -d '"' | tr -d "'") - -export GH_TOKEN="$GITHUB_TOKEN" - -# Track bounties owed -BOUNTIES_FILE="$HOME/cortex/bounties_owed.csv" - -# Create bounties file if doesn't exist -if [ ! -f "$BOUNTIES_FILE" ]; then - echo "PR,Developer,Feature,Bounty_Amount,Date_Merged,Status" > "$BOUNTIES_FILE" -fi - -echo "๐Ÿ“Š CONTRIBUTOR PR REVIEW QUEUE" -echo "โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€" -echo "" - -# Contributor PRs to review (in priority order) -declare -A PR_DETAILS -PR_DETAILS[17]="chandrapratamar|Package Manager Wrapper (Issue #7)|100|CRITICAL_MVP_BLOCKER" -PR_DETAILS[37]="AlexanderLuzDH|Progress Notifications (Issue #27)|125|HIGH_PRIORITY" -PR_DETAILS[38]="AlexanderLuzDH|Requirements Pre-flight Check (Issue #28)|100|HIGH_PRIORITY" -PR_DETAILS[21]="aliraza556|Config File Templates (Issue #16)|150|HIGH_PRIORITY" -PR_DETAILS[18]="Sahilbhatane|CLI Interface (Issue #11)|100|DRAFT_WAIT" - -# Function to review a PR -review_pr() { - local pr_num=$1 - local pr_data="${PR_DETAILS[$pr_num]}" - - IFS='|' read -r developer feature bounty priority <<< "$pr_data" - - echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" - echo "๐Ÿ“‹ PR #$pr_num - $feature" - echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" - echo "" - echo "๐Ÿ‘ค Developer: @$developer" - echo "๐ŸŽฏ Feature: $feature" - echo "๐Ÿ’ฐ Bounty: \$$bounty" - echo "๐Ÿ”ฅ Priority: $priority" - echo "" - - # Check if draft - pr_state=$(gh pr view $pr_num --repo $REPO --json isDraft 2>/dev/null | jq -r '.isDraft') - - if [ "$pr_state" = "true" ]; then - echo "๐Ÿ“ Status: DRAFT - Not ready for review yet" - echo " Action: Skip for now, will review when marked ready" - echo "" - return - fi - - echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" - echo "REVIEW CHECKLIST" - echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" - echo "" - echo "Before approving, verify:" - echo " [ ] Code implements the feature described in the issue" - echo " [ ] Unit tests included with >80% coverage" - echo " [ ] Documentation/README included" - echo " [ ] Integrates with existing Cortex architecture" - echo " [ ] No obvious bugs or security issues" - echo " [ ] Follows Python best practices" - echo "" - - echo "Actions:" - echo " [v] View PR in browser (to review code)" - echo " [a] Approve & Merge (if review passed)" - echo " [c] Request Changes (if issues found)" - echo " [m] Add Comment (questions/feedback)" - echo " [s] Skip to next PR" - echo " [q] Quit review mode" - echo "" - echo -n "Choose action: " - read -n 1 action - echo "" - echo "" - - case $action in - v|V) - echo "๐ŸŒ Opening PR #$pr_num in browser..." - gh pr view $pr_num --repo $REPO --web - echo "" - echo "After reviewing, come back to approve/change/comment." - echo "" - echo "Take action now? (y/n)" - read -n 1 take_action - echo "" - - if [[ ! $take_action =~ ^[Yy]$ ]]; then - echo "โญ๏ธ Skipping for now..." - return - fi - - # Ask again which action - echo "" - echo "What action? [a]pprove [c]hange [m]comment [s]kip" - read -n 1 action - echo "" - ;;& # Continue to next pattern - - a|A) - echo "โœ… APPROVING & MERGING PR #$pr_num" - echo "" - - # Post approval review - approval_msg="โœ… **APPROVED - Excellent Work!** - -Thank you @$developer for this outstanding contribution! ๐ŸŽ‰ - -**Review Summary:** -- โœ… Code quality: Professional implementation -- โœ… Testing: Comprehensive unit tests included -- โœ… Documentation: Clear and complete -- โœ… Integration: Works seamlessly with Cortex architecture - -**What's Next:** -1. Merging this PR immediately -2. Your bounty of **\$$bounty USD** will be processed within 48 hours -3. Payment via crypto (Bitcoin/USDC) or PayPal - we'll coordinate via issue comment - -**You're making history** - this is a foundational piece of the AI-native operating system! ๐Ÿง โšก - -**Bonus Reminder:** At funding (Feb 2025), you'll receive **2x this bounty** as a thank-you bonus. - -Welcome to the Cortex Linux core contributor team! ๐Ÿš€ - ---- -*Automated review from Cortex PR Management System*" - - echo "$approval_msg" | gh pr review $pr_num --repo $REPO --approve --body-file - 2>/dev/null || \ - echo "โš ๏ธ Could not post review (may need manual approval)" - - echo "" - echo "Merging PR #$pr_num now..." - - gh pr merge $pr_num --repo $REPO --squash --delete-branch 2>/dev/null && { - echo "โœ… PR #$pr_num merged successfully!" - - # Record bounty owed - merge_date=$(date +%Y-%m-%d) - echo "$pr_num,$developer,$feature,$bounty,$merge_date,PENDING" >> "$BOUNTIES_FILE" - - echo "" - echo "๐Ÿ’ฐ Bounty recorded: \$$bounty owed to @$developer" - echo " Recorded in: $BOUNTIES_FILE" - } || { - echo "โŒ Merge failed - may need manual intervention" - } - - echo "" - ;; - - c|C) - echo "๐Ÿ”„ REQUESTING CHANGES on PR #$pr_num" - echo "" - echo "Enter your feedback (what needs to change):" - echo "Press Ctrl+D when done" - echo "---" - feedback=$(cat) - - change_msg="๐Ÿ”„ **Changes Requested** - -Thank you for your contribution @$developer! The code is solid, but a few items need attention before merge: - -$feedback - -**Please update and let me know when ready** for re-review. I'll prioritize getting this merged quickly once addressed. - -**Questions?** Comment here or ping me in Discord (#dev-questions). - -We appreciate your patience! ๐Ÿ™ - ---- -*Automated review from Cortex PR Management System*" - - echo "$change_msg" | gh pr review $pr_num --repo $REPO --request-changes --body-file - 2>/dev/null || \ - echo "โš ๏ธ Could not post review" - - echo "" - echo "โœ… Change request posted" - echo "" - ;; - - m|M) - echo "๐Ÿ’ฌ ADDING COMMENT to PR #$pr_num" - echo "" - echo "Enter your comment:" - echo "Press Ctrl+D when done" - echo "---" - comment=$(cat) - - gh pr comment $pr_num --repo $REPO --body "$comment" 2>/dev/null && \ - echo "โœ… Comment posted" || \ - echo "โš ๏ธ Could not post comment" - - echo "" - ;; - - s|S) - echo "โญ๏ธ Skipping PR #$pr_num" - echo "" - ;; - - q|Q) - echo "๐Ÿ‘‹ Exiting review mode..." - echo "" - return 1 - ;; - - *) - echo "โญ๏ธ Invalid action, skipping..." - echo "" - ;; - esac -} - -# Main review loop -echo "Starting PR review process..." -echo "" - -PR_ORDER=(17 37 38 21 18) # Priority order - -for pr in "${PR_ORDER[@]}"; do - review_pr $pr || break -done - -echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" -echo "๐Ÿ“Š REVIEW SESSION COMPLETE" -echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" -echo "" - -# Show bounties owed -if [ -f "$BOUNTIES_FILE" ]; then - echo "๐Ÿ’ฐ BOUNTIES OWED (from this session and previous)" - echo "โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€" - echo "" - - total_owed=0 - - tail -n +2 "$BOUNTIES_FILE" | while IFS=',' read -r pr dev feature amount date status; do - if [ "$status" = "PENDING" ]; then - echo " PR #$pr - @$dev: \$$amount ($feature)" - total_owed=$((total_owed + amount)) - fi - done - - echo "" - echo " Total pending: \$$total_owed USD" - echo "" - echo " Payment file: $BOUNTIES_FILE" - echo "" -fi - -echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" -echo "๐ŸŽฏ NEXT STEPS" -echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" -echo "" -echo "1. Process bounty payments (see $BOUNTIES_FILE)" -echo "2. Post Discord announcement about merged PRs" -echo "3. Check if Issue #7 unblocked (if PR #17 merged)" -echo "4. Welcome new developers to comment on issues" -echo "" - -echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" -echo "" - -# Generate Discord announcement -discord_msg="๐ŸŽ‰ **PR MERGE UPDATE - $(date +%Y-%m-%d)** - -**PRs Merged Today:** -(Check the bounties file for details) - -**Critical Path Progress:** -- Issue #7 (Package Manager): $([ -f "$BOUNTIES_FILE" ] && grep -q "^17," "$BOUNTIES_FILE" && echo "โœ… MERGED - MVP BLOCKER CLEARED!" || echo "โณ In review") - -**Bounties Being Processed:** -- See individual PR comments for payment coordination -- 2x bonus reminder: When we close funding (Feb 2025), all bounties paid so far get 2x bonus - -**What This Means:** -- MVP velocity accelerating -- February funding timeline on track -- Professional team execution demonstrated - -**For Contributors:** -- Check your merged PRs for bounty coordination comments -- Payment within 48 hours of merge -- Crypto (Bitcoin/USDC) or PayPal options - -**Open Issues Still Available:** -Browse: https://github.com/cortexlinux/cortex/issues -Join: Discord #dev-questions - -Let's keep the momentum! ๐Ÿง โšก" - -echo "๐Ÿ“ฑ DISCORD ANNOUNCEMENT (copy and post to #announcements)" -echo "โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€" -echo "" -echo "$discord_msg" -echo "" -echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" -echo "โœ… PR Review System Complete!" -echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" diff --git a/scripts/github/setup-github-automation.sh b/scripts/github/setup-github-automation.sh deleted file mode 100644 index 4fd6e8c..0000000 --- a/scripts/github/setup-github-automation.sh +++ /dev/null @@ -1,114 +0,0 @@ -#!/bin/bash -# Cortex Linux - GitHub Automation Setup -# Run this once to set up everything - -set -e - -echo "๐Ÿš€ CORTEX LINUX AUTOMATION SETUP" -echo "==================================" -echo "" - -# Check if we're in a git repo -if [ ! -d .git ]; then - echo "โŒ Error: Not in a git repository" - echo " Run this from your cortex repo root: cd ~/path/to/cortex" - exit 1 -fi - -# Check GitHub CLI -if ! command -v gh &> /dev/null; then - echo "โŒ Error: GitHub CLI not found" - echo " Install: brew install gh" - echo " Then: gh auth login" - exit 1 -fi - -echo "โœ… Prerequisites check passed" -echo "" - -# Create .github/workflows directory -echo "๐Ÿ“ Creating .github/workflows directory..." -mkdir -p .github/workflows - -# Copy workflow file -echo "๐Ÿ“„ Installing automation workflow..." -if [ -f ~/Downloads/cortex-automation-github.yml ]; then - cp ~/Downloads/cortex-automation-github.yml .github/workflows/automation.yml - echo "โœ… Workflow file installed" -else - echo "โŒ Error: cortex-automation-github.yml not found in Downloads" - echo " Download it first from Claude" - exit 1 -fi - -# Create tracking files -echo "๐Ÿ“Š Creating tracking files..." -echo "[]" > bounties_pending.json -echo "[]" > payments_history.json -echo "{}" > contributors.json -echo "โœ… Tracking files created" - -# Add to .gitignore if needed -if [ ! -f .gitignore ]; then - touch .gitignore -fi - -if ! grep -q "bounties_pending.json" .gitignore; then - echo "" >> .gitignore - echo "# Cortex Automation tracking files" >> .gitignore - echo "bounties_pending.json" >> .gitignore - echo "payments_history.json" >> .gitignore - echo "contributors.json" >> .gitignore - echo "bounty_report.txt" >> .gitignore - echo "discord_message.txt" >> .gitignore - echo "โœ… Added to .gitignore" -fi - -# Commit and push -echo "" -echo "๐Ÿ’พ Committing automation setup..." -git add .github/workflows/automation.yml -git add bounties_pending.json payments_history.json contributors.json -git add .gitignore -git commit -m "Add GitHub Actions automation for bounty tracking" || echo "Nothing to commit" - -echo "" -echo "๐Ÿ“ค Pushing to GitHub..." -git push - -echo "" -echo "โœ… SETUP COMPLETE!" -echo "" -echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" -echo "" -echo "๐Ÿ” NEXT: Add Discord Webhook to GitHub Secrets" -echo "" -echo "1. Get Discord webhook URL:" -echo " โ€ข Go to your Discord server" -echo " โ€ข Server Settings โ†’ Integrations โ†’ Webhooks" -echo " โ€ข Click 'New Webhook'" -echo " โ€ข Name: 'Cortex Bot'" -echo " โ€ข Channel: #announcements" -echo " โ€ข Copy Webhook URL" -echo "" -echo "2. Add to GitHub Secrets:" -echo " โ€ข Go to: https://github.com/cortexlinux/cortex/settings/secrets/actions" -echo " โ€ข Click 'New repository secret'" -echo " โ€ข Name: DISCORD_WEBHOOK" -echo " โ€ข Value: [paste webhook URL]" -echo " โ€ข Click 'Add secret'" -echo "" -echo "โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”" -echo "" -echo "๐ŸŽ‰ AUTOMATION IS NOW LIVE!" -echo "" -echo "What happens automatically:" -echo " โœ… Every Friday 6pm UTC - Bounty report posted to Discord" -echo " โœ… Every Monday noon UTC - Leaderboard updated" -echo " โœ… Every PR merge - Discord notification + welcome message" -echo "" -echo "You just approve payments in Discord. That's it!" -echo "" -echo "Test it now:" -echo " gh workflow run automation.yml" -echo "" diff --git a/src/.gitignore b/src/.gitignore deleted file mode 100644 index 2c4fc58..0000000 --- a/src/.gitignore +++ /dev/null @@ -1,34 +0,0 @@ -# Python -__pycache__/ -*.py[cod] -*$py.class -*.so -.Python -*.egg-info/ -dist/ -build/ - -# Virtual environments -venv/ -env/ -ENV/ - -# IDE -.vscode/ -.idea/ -*.swp -*.swo -*~ - -# OS -.DS_Store -Thumbs.db - -# Testing -.pytest_cache/ -.coverage -htmlcov/ - -# Logs -*.log - diff --git a/src/__init__.py b/src/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/src/config_manager.py b/src/config_manager.py deleted file mode 100755 index ff6e91c..0000000 --- a/src/config_manager.py +++ /dev/null @@ -1,1044 +0,0 @@ -""" -Configuration Manager for Cortex Linux -Handles export/import of system state for reproducibility. - -Part of Cortex Linux - AI-native OS that needs to export/import system configurations. -""" - -import os -import json -import yaml -import subprocess -import re -from typing import Dict, List, Optional, Any, Tuple, ClassVar -from datetime import datetime -from pathlib import Path - - -class ConfigManager: - """ - Manages configuration export/import for Cortex Linux. - - Features: - - Export current system state to YAML (packages, configs, preferences) - - Import configuration from YAML file - - Validate version compatibility between export and import - - Support dry-run mode (preview without applying) - - Generate diff between current state and config file - - Handle selective export/import (packages only, configs only, etc.) - """ - - CORTEX_VERSION = "0.2.0" - - # Timeout constants - DETECTION_TIMEOUT = 30 # seconds for package detection - INSTALLATION_TIMEOUT = 300 # seconds for package installation - - # Package sources - SOURCE_APT = 'apt' - SOURCE_PIP = 'pip' - SOURCE_NPM = 'npm' - DEFAULT_SOURCES: ClassVar[List[str]] = [SOURCE_APT, SOURCE_PIP, SOURCE_NPM] - - def __init__(self, sandbox_executor=None): - """ - Initialize ConfigManager. - - Args: - sandbox_executor: Optional SandboxExecutor instance for safe command execution - - Raises: - PermissionError: If directory ownership or permissions cannot be secured - """ - self.sandbox_executor = sandbox_executor - self.cortex_dir = Path.home() / '.cortex' - self.preferences_file = self.cortex_dir / 'preferences.yaml' - - # Ensure .cortex directory exists with secure permissions - self.cortex_dir.mkdir(mode=0o700, exist_ok=True) - self._enforce_directory_security(self.cortex_dir) - - def _enforce_directory_security(self, directory: Path) -> None: - """ - Enforce ownership and permission security on a directory. - - Ensures the directory is owned by the current user and has mode 0o700 - (read/write/execute for owner only). - - Args: - directory: Path to the directory to secure - - Raises: - PermissionError: If ownership or permissions cannot be secured - """ - try: - # Get directory statistics - stat_info = directory.stat() - current_uid = os.getuid() - current_gid = os.getgid() - - # Check and fix ownership if needed - if stat_info.st_uid != current_uid or stat_info.st_gid != current_gid: - try: - os.chown(directory, current_uid, current_gid) - except PermissionError: - raise PermissionError( - f"Directory {directory} is owned by uid={stat_info.st_uid}, " - f"gid={stat_info.st_gid}, but process is running as uid={current_uid}, " - f"gid={current_gid}. Insufficient privileges to change ownership." - ) - - # Enforce mode 0o700 - os.chmod(directory, 0o700) - - # Verify the chmod succeeded - stat_info = directory.stat() - actual_mode = stat_info.st_mode & 0o777 - if actual_mode != 0o700: - raise PermissionError( - f"Failed to set secure permissions on {directory}. " - f"Expected mode 0o700, but actual mode is {oct(actual_mode)}. " - f"Security invariant failed." - ) - except OSError as e: - if isinstance(e, PermissionError): - raise - raise PermissionError( - f"Failed to enforce security on {directory}: {e}" - ) - - def detect_apt_packages(self) -> List[Dict[str, Any]]: - """ - Detect installed APT packages. - - Returns: - List of package dictionaries with name, version, and source - """ - packages = [] - - try: - result = subprocess.run( - ['dpkg-query', '-W', '-f=${Package}\t${Version}\n'], - capture_output=True, - text=True, - timeout=self.DETECTION_TIMEOUT - ) - - if result.returncode == 0: - for line in result.stdout.strip().split('\n'): - if line.strip(): - parts = line.split('\t') - if len(parts) >= 2: - packages.append({ - 'name': parts[0], - 'version': parts[1], - 'source': self.SOURCE_APT - }) - except (subprocess.TimeoutExpired, FileNotFoundError): - # Silently handle errors - package manager may not be available - pass - - return packages - - def detect_pip_packages(self) -> List[Dict[str, Any]]: - """ - Detect installed PIP packages. - - Returns: - List of package dictionaries with name, version, and source - """ - packages = [] - - # Try pip3 first, then pip - for pip_cmd in ['pip3', 'pip']: - try: - result = subprocess.run( - [pip_cmd, 'list', '--format=json'], - capture_output=True, - text=True, - timeout=self.DETECTION_TIMEOUT - ) - - if result.returncode == 0: - pip_packages = json.loads(result.stdout) - for pkg in pip_packages: - packages.append({ - 'name': pkg['name'], - 'version': pkg['version'], - 'source': self.SOURCE_PIP - }) - break # Success, no need to try other pip commands - except (subprocess.TimeoutExpired, FileNotFoundError, json.JSONDecodeError): - continue - - return packages - - def detect_npm_packages(self) -> List[Dict[str, Any]]: - """ - Detect globally installed NPM packages. - - Returns: - List of package dictionaries with name, version, and source - """ - packages = [] - - try: - result = subprocess.run( - ['npm', 'list', '-g', '--depth=0', '--json'], - capture_output=True, - text=True, - timeout=self.DETECTION_TIMEOUT - ) - - if result.returncode == 0: - npm_data = json.loads(result.stdout) - dependencies = npm_data.get('dependencies', {}) - - for name, info in dependencies.items(): - version = info.get('version', 'unknown') - packages.append({ - 'name': name, - 'version': version, - 'source': self.SOURCE_NPM - }) - except (subprocess.TimeoutExpired, FileNotFoundError, json.JSONDecodeError): - # Silently handle errors - npm may not be installed or global packages unavailable - pass - - return packages - - def detect_installed_packages(self, sources: Optional[List[str]] = None) -> List[Dict[str, Any]]: - """ - Detect all installed packages from specified sources. - - Args: - sources: List of package sources to detect ['apt', 'pip', 'npm'] - If None, detects from all sources - - Returns: - List of package dictionaries sorted by name - """ - if sources is None: - sources = self.DEFAULT_SOURCES - - all_packages = [] - - if self.SOURCE_APT in sources: - all_packages.extend(self.detect_apt_packages()) - - if self.SOURCE_PIP in sources: - all_packages.extend(self.detect_pip_packages()) - - if self.SOURCE_NPM in sources: - all_packages.extend(self.detect_npm_packages()) - - # Remove duplicates based on name and source (more efficient) - unique_packages_dict = {} - for pkg in all_packages: - key = (pkg['name'], pkg['source']) - unique_packages_dict[key] = pkg - - # Sort by name - unique_packages = sorted(unique_packages_dict.values(), key=lambda x: x['name']) - - return unique_packages - - def _detect_os_version(self) -> str: - """ - Detect OS version from /etc/os-release. - - Returns: - OS version string (e.g., 'ubuntu-24.04') - """ - try: - os_release_path = Path('/etc/os-release') - if not os_release_path.exists(): - return "unknown" - - with open(os_release_path, 'r') as f: - os_release = f.read() - - # Extract distribution name and version - name_match = re.search(r'ID=([^\n]+)', os_release) - version_match = re.search(r'VERSION_ID="?([^"\n]+)"?', os_release) - - if name_match and version_match: - name = name_match.group(1).strip().strip('"') - version = version_match.group(1).strip() - return f"{name}-{version}" - - return "unknown" - except Exception: - return "unknown" - - def _load_preferences(self) -> Dict[str, Any]: - """ - Load user preferences from ~/.cortex/preferences.yaml. - - Returns: - Dictionary of preferences - """ - if self.preferences_file.exists(): - try: - with open(self.preferences_file, 'r') as f: - return yaml.safe_load(f) or {} - except Exception: - pass - - return {} - - def _save_preferences(self, preferences: Dict[str, Any]) -> None: - """ - Save user preferences to ~/.cortex/preferences.yaml. - - Args: - preferences: Dictionary of preferences to save - """ - try: - with open(self.preferences_file, 'w') as f: - yaml.safe_dump(preferences, f, default_flow_style=False) - except Exception as e: - raise RuntimeError(f"Failed to save preferences: {e}") - - def export_configuration(self, - output_path: str, - include_hardware: bool = True, - include_preferences: bool = True, - package_sources: Optional[List[str]] = None) -> str: - """ - Export current system configuration to YAML file. - - Args: - output_path: Path to save YAML configuration file - include_hardware: Include hardware profile from HardwareProfiler - include_preferences: Include user preferences - package_sources: List of package sources to export ['apt', 'pip', 'npm'] - If None, exports all - - Returns: - Success message with file path - """ - if package_sources is None: - package_sources = self.DEFAULT_SOURCES - - # Build configuration dictionary - config = { - 'cortex_version': self.CORTEX_VERSION, - 'exported_at': datetime.now().isoformat(), - 'os': self._detect_os_version(), - } - - # Add hardware profile if requested - if include_hardware: - try: - from hwprofiler import HardwareProfiler - profiler = HardwareProfiler() - config['hardware'] = profiler.profile() - except Exception as e: - config['hardware'] = {'error': f'Failed to detect hardware: {e}'} - - # Add packages - config['packages'] = self.detect_installed_packages(sources=package_sources) - - # Add preferences if requested - if include_preferences: - config['preferences'] = self._load_preferences() - - # Add environment variables (selected safe ones) - config['environment_variables'] = {} - safe_env_vars = ['LANG', 'LANGUAGE', 'LC_ALL', 'PATH', 'SHELL'] - for var in safe_env_vars: - if var in os.environ: - config['environment_variables'][var] = os.environ[var] - - # Write to file - try: - output_path_obj = Path(output_path) - output_path_obj.parent.mkdir(parents=True, exist_ok=True) - - with open(output_path_obj, 'w') as f: - yaml.safe_dump(config, f, default_flow_style=False, sort_keys=False) - - return f"Configuration exported successfully to {output_path}" - except Exception as e: - raise RuntimeError(f"Failed to export configuration: {e}") - - def validate_compatibility(self, config: Dict[str, Any]) -> Tuple[bool, Optional[str]]: - """ - Validate if configuration can be imported on this system. - - Args: - config: Configuration dictionary from YAML - - Returns: - Tuple of (is_compatible, reason_if_not) - """ - # Check required fields - if 'cortex_version' not in config: - return False, "Missing cortex_version field in configuration" - - if 'os' not in config: - return False, "Missing os field in configuration" - - if 'packages' not in config: - return False, "Missing packages field in configuration" - - # Check cortex version compatibility - config_version = config['cortex_version'] - current_version = self.CORTEX_VERSION - - # Parse versions (simple major.minor.patch comparison) - try: - config_parts = [int(x) for x in config_version.split('.')] - current_parts = [int(x) for x in current_version.split('.')] - - # Major version must match - if config_parts[0] != current_parts[0]: - return False, f"Incompatible major version: config={config_version}, current={current_version}" - - # Minor version: current should be >= config - if current_parts[1] < config_parts[1]: - return False, f"Configuration requires newer Cortex version: {config_version} > {current_version}" - except Exception: - # If version parsing fails, be lenient - pass - - # Check OS compatibility (warn but allow) - config_os = config.get('os', 'unknown') - current_os = self._detect_os_version() - - if config_os != current_os and config_os != 'unknown' and current_os != 'unknown': - # Don't fail, just warn in the return message - return True, f"Warning: OS mismatch (config={config_os}, current={current_os}). Proceed with caution." - - return True, None - - def _categorize_package(self, pkg: Dict[str, Any], current_pkg_map: Dict[Tuple[str, str], str]) -> Tuple[str, Optional[Dict[str, Any]]]: - """ - Categorize a package as install, upgrade, downgrade, or already installed. - - Args: - pkg: Package dictionary from config - current_pkg_map: Map of (name, source) to current version - - Returns: - Tuple of (category, package_data) where category is one of: - 'install', 'upgrade', 'downgrade', 'already_installed', 'skip' - package_data is the modified package dict (with current_version if applicable) - """ - name = pkg.get('name') - version = pkg.get('version') - source = pkg.get('source') - - if not name or not source: - return 'skip', None - - key = (name, source) - - if key not in current_pkg_map: - return 'install', pkg - - current_version = current_pkg_map[key] - if current_version == version: - return 'already_installed', pkg - - # Compare versions - try: - pkg_with_version = {**pkg, 'current_version': current_version} - if self._compare_versions(current_version, version) < 0: - return 'upgrade', pkg_with_version - else: - return 'downgrade', pkg_with_version - except Exception: - # If comparison fails, treat as upgrade - return 'upgrade', {**pkg, 'current_version': current_version} - - def diff_configuration(self, config: Dict[str, Any]) -> Dict[str, Any]: - """ - Compare current system state with configuration file. - - Args: - config: Configuration dictionary from YAML - - Returns: - Dictionary with differences - """ - diff = { - 'packages_to_install': [], - 'packages_to_upgrade': [], - 'packages_to_downgrade': [], - 'packages_already_installed': [], - 'preferences_changed': {}, - 'warnings': [] - } - - # Get current packages - current_packages = self.detect_installed_packages() - current_pkg_map = { - (pkg['name'], pkg['source']): pkg['version'] - for pkg in current_packages - } - - # Compare packages from config - config_packages = config.get('packages', []) - for pkg in config_packages: - category, pkg_data = self._categorize_package(pkg, current_pkg_map) - - if category == 'skip': - diff['warnings'].append(f"Malformed package entry skipped: {pkg}") - elif category == 'install': - diff['packages_to_install'].append(pkg_data) - elif category == 'upgrade': - diff['packages_to_upgrade'].append(pkg_data) - elif category == 'downgrade': - diff['packages_to_downgrade'].append(pkg_data) - elif category == 'already_installed': - diff['packages_already_installed'].append(pkg_data) - - # Compare preferences - current_prefs = self._load_preferences() - config_prefs = config.get('preferences', {}) - - for key, value in config_prefs.items(): - if key not in current_prefs or current_prefs[key] != value: - diff['preferences_changed'][key] = { - 'current': current_prefs.get(key), - 'new': value - } - - # Add warnings - if diff['packages_to_downgrade']: - diff['warnings'].append( - f"Warning: {len(diff['packages_to_downgrade'])} packages will be downgraded" - ) - - return diff - - def _compare_versions(self, version1: str, version2: str) -> int: - """ - Compare two version strings using packaging library for robustness. - - Args: - version1: First version string - version2: Second version string - - Returns: - -1 if version1 < version2, 0 if equal, 1 if version1 > version2 - """ - try: - from packaging import version - v1 = version.parse(version1) - v2 = version.parse(version2) - if v1 < v2: - return -1 - elif v1 > v2: - return 1 - return 0 - except Exception: - # Fallback to simple numeric comparison - return self._simple_version_compare(version1, version2) - - def _simple_version_compare(self, version1: str, version2: str) -> int: - """ - Fallback version comparison using numeric extraction. - - Used when the packaging library is unavailable or fails to parse - version strings. Extracts numeric components and compares them - sequentially, padding shorter versions with zeros. - - This method provides a basic version comparison by extracting all - numeric parts from the version strings and comparing them position - by position. It handles simple version schemes well but may not - correctly handle complex pre-release tags or build metadata. - - Args: - version1: First version string (e.g., "1.2.3", "2.0.0-rc1") - version2: Second version string to compare against - - Returns: - int: -1 if version1 < version2 - 0 if versions are equal - 1 if version1 > version2 - - Example: - >>> _simple_version_compare("1.2.3", "1.2.4") - -1 - >>> _simple_version_compare("2.0.0", "1.9.9") - 1 - >>> _simple_version_compare("1.0", "1.0.0") - 0 - - Note: - This is a simplified comparison that only considers numeric parts. - Complex version schemes (pre-release tags, build metadata) may not - be handled correctly. Prefer using packaging.version when available. - """ - # Simple version comparison (extract numeric parts) - v1_parts = re.findall(r'\d+', version1) - v2_parts = re.findall(r'\d+', version2) - - # Handle case where no numeric parts found - if not v1_parts and not v2_parts: - return 0 # Both have no numeric parts, treat as equal - if not v1_parts: - return -1 # version1 has no numeric parts, consider it less - if not v2_parts: - return 1 # version2 has no numeric parts, consider it greater - - # Pad to same length - max_len = max(len(v1_parts), len(v2_parts)) - v1_parts += ['0'] * (max_len - len(v1_parts)) - v2_parts += ['0'] * (max_len - len(v2_parts)) - - for p1, p2 in zip(v1_parts, v2_parts): - n1, n2 = int(p1), int(p2) - if n1 < n2: - return -1 - elif n1 > n2: - return 1 - - return 0 - - def import_configuration(self, - config_path: str, - dry_run: bool = False, - selective: Optional[List[str]] = None, - force: bool = False) -> Dict[str, Any]: - """ - Import configuration from YAML file. - - Args: - config_path: Path to YAML configuration file - dry_run: If True, preview changes without applying - selective: Import only specified sections ['packages', 'preferences'] - If None, imports all - force: Skip compatibility checks - - Returns: - Summary dictionary with results - """ - # Load configuration - try: - with open(config_path, 'r') as f: - config = yaml.safe_load(f) - except Exception as e: - raise RuntimeError(f"Failed to load configuration file: {e}") - - # Validate compatibility - if not force: - is_compatible, reason = self.validate_compatibility(config) - if not is_compatible: - raise RuntimeError(f"Incompatible configuration: {reason}") - elif reason: # Warning - print(f"โš ๏ธ {reason}") - - # If dry run, return diff - if dry_run: - diff = self.diff_configuration(config) - return { - 'dry_run': True, - 'diff': diff, - 'message': 'Dry-run completed. Use import without --dry-run to apply changes.' - } - - # Determine what to import - if selective is None: - selective = ['packages', 'preferences'] - - summary = { - 'installed': [], - 'upgraded': [], - 'downgraded': [], - 'failed': [], - 'skipped': [], - 'preferences_updated': False - } - - # Import packages - if 'packages' in selective: - self._import_packages(config, summary) - - # Import preferences - if 'preferences' in selective: - self._import_preferences(config, summary) - - return summary - - def _import_packages(self, config: Dict[str, Any], summary: Dict[str, Any]) -> None: - """ - Import packages from configuration and update system state. - - This method processes package installations by first computing the - difference between the current system state and the target configuration - using diff_configuration(). It then attempts to install, upgrade, or - downgrade packages as needed. - - The method continues processing all packages even if individual packages - fail to install, ensuring maximum success. Failed installations are - tracked in the summary for user review. - - Args: - config: Configuration dictionary containing package specifications - Expected to have 'packages' key with list of package dicts - summary: Summary dictionary to update with results. Modified in-place - with keys: 'installed', 'upgraded', 'failed' - - Updates: - summary['installed']: List of successfully installed package names - summary['upgraded']: List of successfully upgraded package names - summary['failed']: List of failed package names (with error details) - - Note: - Uses _install_package() internally for actual package installation. - Each package is categorized based on diff results (install vs upgrade). - Errors are caught and logged to allow processing to continue. - """ - diff = self.diff_configuration(config) - packages_to_process = ( - diff['packages_to_install'] + - diff['packages_to_upgrade'] + - diff['packages_to_downgrade'] - ) - - for pkg in packages_to_process: - try: - success = self._install_package(pkg) - if success: - if pkg in diff['packages_to_install']: - summary['installed'].append(pkg['name']) - elif pkg in diff['packages_to_downgrade']: - summary['downgraded'].append(pkg['name']) - else: - summary['upgraded'].append(pkg['name']) - else: - summary['failed'].append(pkg['name']) - except Exception as e: - summary['failed'].append(f"{pkg['name']} ({str(e)})") - - def _import_preferences(self, config: Dict[str, Any], summary: Dict[str, Any]) -> None: - """ - Import user preferences from configuration and save to disk. - - Extracts preferences from the configuration dictionary and saves them - to the user's Cortex preferences file at ~/.cortex/preferences.yaml. - If preferences are empty or missing, no action is taken. - - This method handles the persistence of user-configurable settings such - as confirmation levels, verbosity settings, and other behavioral - preferences for the Cortex system. - - Args: - config: Configuration dictionary containing optional 'preferences' key - with user preference settings as a dictionary - summary: Summary dictionary to update with results. Modified in-place - with keys: 'preferences_updated', 'failed' - - Updates: - summary['preferences_updated']: Set to True on successful save - summary['failed']: Appends error message if save fails - - Note: - Uses _save_preferences() internally to persist to disk. - Errors during save are caught and added to failed list with details. - If config has no preferences or they are empty, silently succeeds. - """ - config_prefs = config.get('preferences', {}) - if config_prefs: - try: - self._save_preferences(config_prefs) - summary['preferences_updated'] = True - except Exception as e: - summary['failed'].append(f"preferences ({str(e)})") - - def _validate_package_identifier(self, identifier: str, allow_slash: bool = False) -> bool: - """ - Validate package name or version contains only safe characters. - - Prevents command injection by ensuring package identifiers only contain - alphanumeric characters and common package naming characters. - Supports NPM scoped packages (@scope/package) when allow_slash=True. - - Args: - identifier: Package name or version string to validate - allow_slash: Whether to allow a single slash (for NPM scoped packages) - - Returns: - bool: True if identifier is safe, False otherwise - """ - # Reject path-like patterns immediately - if identifier.startswith('.') or identifier.startswith('/') or identifier.startswith('~'): - return False - if '..' in identifier or '/.' in identifier: - return False - - # Apply character whitelist with optional slash support - if allow_slash: - # Allow exactly one forward slash for NPM scoped packages (@scope/package) - return bool(re.match(r'^[a-zA-Z0-9._:@=+\-]+(/[a-zA-Z0-9._\-]+)?$', identifier)) - else: - # No slashes allowed for versions or non-NPM packages - return bool(re.match(r'^[a-zA-Z0-9._:@=+\-]+$', identifier)) - - def _install_with_sandbox(self, name: str, version: Optional[str], source: str) -> bool: - """ - Install package using sandbox executor. - - Args: - name: Package name - version: Package version (optional) - source: Package source (apt/pip/npm) - - Returns: - True if successful, False otherwise - """ - try: - if source == self.SOURCE_APT: - command = f"sudo apt-get install -y {name}={version}" if version else f"sudo apt-get install -y {name}" - elif source == self.SOURCE_PIP: - command = f"pip3 install {name}=={version}" if version else f"pip3 install {name}" - elif source == self.SOURCE_NPM: - command = f"npm install -g {name}@{version}" if version else f"npm install -g {name}" - else: - return False - - result = self.sandbox_executor.execute(command) - return result.success - except Exception: - return False - - def _install_direct(self, name: str, version: Optional[str], source: str) -> bool: - """ - Install package directly using subprocess (not recommended in production). - - Args: - name: Package name - version: Package version (optional) - source: Package source (apt/pip/npm) - - Returns: - True if successful, False otherwise - """ - try: - if source == self.SOURCE_APT: - cmd = ['sudo', 'apt-get', 'install', '-y', f'{name}={version}' if version else name] - elif source == self.SOURCE_PIP: - cmd = ['pip3', 'install', f'{name}=={version}'] if version else ['pip3', 'install', name] - elif source == self.SOURCE_NPM: - cmd = ['npm', 'install', '-g', f'{name}@{version}'] if version else ['npm', 'install', '-g', name] - else: - return False - - result = subprocess.run(cmd, capture_output=True, timeout=self.INSTALLATION_TIMEOUT) - return result.returncode == 0 - except Exception: - return False - - def _install_package(self, pkg: Dict[str, Any]) -> bool: - """ - Install a single package using appropriate package manager. - - Args: - pkg: Package dictionary with name, version, source - - Returns: - True if successful, False otherwise - """ - name = pkg['name'] - version = pkg.get('version', '') - source = pkg['source'] - - # Validate package identifiers to prevent command injection - # Allow slash only for NPM package names (for scoped packages like @scope/package) - allow_slash = (source == self.SOURCE_NPM) - if not self._validate_package_identifier(name, allow_slash=allow_slash): - return False - if version and not self._validate_package_identifier(version, allow_slash=False): - return False - - if self.sandbox_executor: - return self._install_with_sandbox(name, version or None, source) - else: - return self._install_direct(name, version or None, source) - - -def _setup_argument_parser(): - """Create and configure argument parser for CLI.""" - import argparse - - parser = argparse.ArgumentParser(description='Cortex Configuration Manager') - subparsers = parser.add_subparsers(dest='command', help='Command to execute') - - # Export command - export_parser = subparsers.add_parser('export', help='Export system configuration') - export_parser.add_argument('--output', '-o', required=True, help='Output file path') - export_parser.add_argument('--include-hardware', action='store_true', - help='Include hardware information') - export_parser.add_argument('--no-preferences', action='store_true', - help='Exclude user preferences') - export_parser.add_argument('--packages-only', action='store_true', - help='Export only packages') - - # Import command - import_parser = subparsers.add_parser('import', help='Import configuration') - import_parser.add_argument('config_file', help='Configuration file to import') - import_parser.add_argument('--dry-run', action='store_true', - help='Preview changes without applying') - import_parser.add_argument('--force', action='store_true', - help='Skip compatibility checks') - import_parser.add_argument('--packages-only', action='store_true', - help='Import only packages') - import_parser.add_argument('--preferences-only', action='store_true', - help='Import only preferences') - - # Diff command - diff_parser = subparsers.add_parser('diff', help='Show configuration differences') - diff_parser.add_argument('config_file', help='Configuration file to compare') - - return parser - - -def _print_package_list(packages: List[Dict[str, Any]], max_display: int = 5) -> None: - """Print a list of packages with optional truncation.""" - for pkg in packages[:max_display]: - if 'current_version' in pkg: - print(f" - {pkg['name']} ({pkg.get('current_version')} โ†’ {pkg['version']})") - else: - print(f" - {pkg['name']} ({pkg['source']})") - - if len(packages) > max_display: - print(f" ... and {len(packages) - max_display} more") - - -def _print_dry_run_results(result: Dict[str, Any]) -> None: - """Print dry-run results in a formatted manner.""" - print("\n๐Ÿ” Dry-run results:\n") - diff = result['diff'] - - if diff['packages_to_install']: - print(f"๐Ÿ“ฆ Packages to install: {len(diff['packages_to_install'])}") - _print_package_list(diff['packages_to_install']) - - if diff['packages_to_upgrade']: - print(f"\nโฌ†๏ธ Packages to upgrade: {len(diff['packages_to_upgrade'])}") - _print_package_list(diff['packages_to_upgrade']) - - if diff['packages_to_downgrade']: - print(f"\nโฌ‡๏ธ Packages to downgrade: {len(diff['packages_to_downgrade'])}") - _print_package_list(diff['packages_to_downgrade']) - - if diff['preferences_changed']: - print(f"\nโš™๏ธ Preferences to change: {len(diff['preferences_changed'])}") - for key in diff['preferences_changed']: - print(f" - {key}") - - if diff['warnings']: - print("\nโš ๏ธ Warnings:") - for warning in diff['warnings']: - print(f" {warning}") - - print(f"\n{result['message']}") - - -def _print_import_results(result: Dict[str, Any]) -> None: - """Print import results in a formatted manner.""" - print("\nโœ… Import completed:\n") - - if result['installed']: - print(f"๐Ÿ“ฆ Installed: {len(result['installed'])} packages") - if result['upgraded']: - print(f"โฌ†๏ธ Upgraded: {len(result['upgraded'])} packages") - if result.get('downgraded'): - print(f"โฌ‡๏ธ Downgraded: {len(result['downgraded'])} packages") - if result['failed']: - print(f"โŒ Failed: {len(result['failed'])} packages") - for pkg in result['failed']: - print(f" - {pkg}") - if result['preferences_updated']: - print("โš™๏ธ Preferences updated") - - -def _handle_export_command(manager: 'ConfigManager', args) -> None: - """Handle the export command.""" - include_hardware = args.include_hardware - include_preferences = not args.no_preferences - - if args.packages_only: - include_hardware = False - include_preferences = False - - message = manager.export_configuration( - output_path=args.output, - include_hardware=include_hardware, - include_preferences=include_preferences - ) - print(message) - - -def _handle_import_command(manager: 'ConfigManager', args) -> None: - """Handle the import command.""" - selective = None - if args.packages_only: - selective = ['packages'] - elif args.preferences_only: - selective = ['preferences'] - - result = manager.import_configuration( - config_path=args.config_file, - dry_run=args.dry_run, - selective=selective, - force=args.force - ) - - if args.dry_run: - _print_dry_run_results(result) - else: - _print_import_results(result) - - -def _handle_diff_command(manager: 'ConfigManager', args) -> None: - """Handle the diff command.""" - with open(args.config_file, 'r') as f: - config = yaml.safe_load(f) - - diff = manager.diff_configuration(config) - - print("\n๐Ÿ“Š Configuration Differences:\n") - print(f"Packages to install: {len(diff['packages_to_install'])}") - print(f"Packages to upgrade: {len(diff['packages_to_upgrade'])}") - print(f"Packages to downgrade: {len(diff['packages_to_downgrade'])}") - print(f"Packages already installed: {len(diff['packages_already_installed'])}") - print(f"Preferences changed: {len(diff['preferences_changed'])}") - - if diff['warnings']: - print("\nโš ๏ธ Warnings:") - for warning in diff['warnings']: - print(f" {warning}") - - -def main(): - """CLI entry point for configuration manager.""" - import sys - - parser = _setup_argument_parser() - args = parser.parse_args() - - if not args.command: - parser.print_help() - sys.exit(1) - - manager = ConfigManager() - - try: - if args.command == 'export': - _handle_export_command(manager, args) - elif args.command == 'import': - _handle_import_command(manager, args) - elif args.command == 'diff': - _handle_diff_command(manager, args) - except Exception as e: - print(f"โŒ Error: {e}", file=sys.stderr) - sys.exit(1) - - -if __name__ == '__main__': - main() diff --git a/src/cortex/__init__.py b/src/cortex/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/src/cortex/core/__init__.py b/src/cortex/core/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/src/cortex/utils/__init__.py b/src/cortex/utils/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/src/demo_script.sh b/src/demo_script.sh deleted file mode 100755 index 3fadde0..0000000 --- a/src/demo_script.sh +++ /dev/null @@ -1,230 +0,0 @@ -#!/bin/bash -# Sandbox Executor - Video Demonstration Script -# Run commands in this order to showcase the implementation - -clear -echo "============================================================" -echo " CORTEX LINUX - SANDBOXED COMMAND EXECUTOR DEMONSTRATION" -echo "============================================================" -sleep 2 - -echo "" -echo "1. CHECKING SYSTEM STATUS" -echo "============================================================" -cd /home/dhaval/projects/open-source/cortex/src -python3 -c " -from sandbox_executor import SandboxExecutor -e = SandboxExecutor() -print(f'Firejail Available: {e.is_firejail_available()}') -print(f'Firejail Path: {e.firejail_path}') -print(f'Resource Limits: CPU={e.max_cpu_cores}, Memory={e.max_memory_mb}MB, Timeout={e.timeout_seconds}s') -" -sleep 2 - -echo "" -echo "2. BASIC FUNCTIONALITY - EXECUTING SAFE COMMAND" -echo "============================================================" -python3 -c " -from sandbox_executor import SandboxExecutor -e = SandboxExecutor() -result = e.execute('echo \"Hello from Cortex Sandbox!\"') -print(f'Command: echo \"Hello from Cortex Sandbox!\"') -print(f'Exit Code: {result.exit_code}') -print(f'Output: {result.stdout.strip()}') -print(f'Status: SUCCESS โœ“') -" -sleep 2 - -echo "" -echo "3. SECURITY - BLOCKING DANGEROUS COMMANDS" -echo "============================================================" -python3 -c " -from sandbox_executor import SandboxExecutor, CommandBlocked - -e = SandboxExecutor() -dangerous = [ - 'rm -rf /', - 'dd if=/dev/zero of=/dev/sda', - 'mkfs.ext4 /dev/sda1' -] - -for cmd in dangerous: - try: - e.execute(cmd) - print(f'โœ— {cmd}: ALLOWED (ERROR!)') - except CommandBlocked as err: - print(f'โœ“ {cmd}: BLOCKED - {str(err)[:50]}') -" -sleep 2 - -echo "" -echo "4. WHITELIST VALIDATION" -echo "============================================================" -python3 -c " -from sandbox_executor import SandboxExecutor -e = SandboxExecutor() - -print('Allowed Commands:') -allowed = ['echo test', 'python3 --version', 'git --version'] -for cmd in allowed: - is_valid, _ = e.validate_command(cmd) - print(f' โœ“ {cmd}: ALLOWED' if is_valid else f' โœ— {cmd}: BLOCKED') - -print('\nBlocked Commands:') -blocked = ['nc -l 1234', 'nmap localhost', 'bash -c evil'] -for cmd in blocked: - is_valid, reason = e.validate_command(cmd) - print(f' โœ“ {cmd}: BLOCKED - {reason[:40]}' if not is_valid else f' โœ— {cmd}: ALLOWED (ERROR!)') -" -sleep 2 - -echo "" -echo "5. DRY-RUN MODE - PREVIEW WITHOUT EXECUTION" -echo "============================================================" -python3 -c " -from sandbox_executor import SandboxExecutor -e = SandboxExecutor() -result = e.execute('apt-get update', dry_run=True) -print('Command: apt-get update') -print('Mode: DRY-RUN (no actual execution)') -print(f'Preview: {result.preview}') -print('โœ“ Safe preview generated') -" -sleep 2 - -echo "" -echo "6. FIREJAIL INTEGRATION - FULL SANDBOX ISOLATION" -echo "============================================================" -python3 -c " -from sandbox_executor import SandboxExecutor -e = SandboxExecutor() -cmd = e._create_firejail_command('echo test') -print('Firejail Command Structure:') -print(' '.join(cmd[:8]) + ' ...') -print('\nSecurity Features:') -features = { - 'Private namespace': '--private', - 'CPU limits': '--cpu=', - 'Memory limits': '--rlimit-as', - 'Network disabled': '--net=none', - 'No root': '--noroot', - 'Capabilities dropped': '--caps.drop=all', - 'Seccomp enabled': '--seccomp' -} -cmd_str = ' '.join(cmd) -for name, flag in features.items(): - print(f' โœ“ {name}' if flag in cmd_str else f' โœ— {name}') -" -sleep 2 - -echo "" -echo "7. SUDO RESTRICTIONS - PACKAGE INSTALLATION ONLY" -echo "============================================================" -python3 -c " -from sandbox_executor import SandboxExecutor -e = SandboxExecutor() - -print('Allowed Sudo Commands:') -allowed_sudo = ['sudo apt-get install python3', 'sudo pip install numpy'] -for cmd in allowed_sudo: - is_valid, _ = e.validate_command(cmd) - print(f' โœ“ {cmd}: ALLOWED' if is_valid else f' โœ— {cmd}: BLOCKED') - -print('\nBlocked Sudo Commands:') -blocked_sudo = ['sudo rm -rf /', 'sudo chmod 777 /'] -for cmd in blocked_sudo: - is_valid, reason = e.validate_command(cmd) - print(f' โœ“ {cmd}: BLOCKED' if not is_valid else f' โœ— {cmd}: ALLOWED (ERROR!)') -" -sleep 2 - -echo "" -echo "8. RESOURCE LIMITS ENFORCEMENT" -echo "============================================================" -python3 -c " -from sandbox_executor import SandboxExecutor -e = SandboxExecutor() -print(f'CPU Limit: {e.max_cpu_cores} cores') -print(f'Memory Limit: {e.max_memory_mb} MB') -print(f'Disk Limit: {e.max_disk_mb} MB') -print(f'Timeout: {e.timeout_seconds} seconds (5 minutes)') -print('โœ“ All resource limits configured and enforced') -" -sleep 2 - -echo "" -echo "9. COMPREHENSIVE LOGGING - AUDIT TRAIL" -echo "============================================================" -python3 -c " -from sandbox_executor import SandboxExecutor -e = SandboxExecutor() -e.execute('echo test1', dry_run=True) -e.execute('echo test2', dry_run=True) -audit = e.get_audit_log() -print(f'Total Log Entries: {len(audit)}') -print('\nRecent Entries:') -for entry in audit[-3:]: - print(f' - [{entry[\"type\"]}] {entry[\"command\"][:50]}') - print(f' Timestamp: {entry[\"timestamp\"]}') -print('โœ“ Complete audit trail maintained') -" -sleep 2 - -echo "" -echo "10. REAL-WORLD SCENARIO - PYTHON SCRIPT EXECUTION" -echo "============================================================" -python3 -c " -from sandbox_executor import SandboxExecutor -e = SandboxExecutor() -result = e.execute('python3 -c \"print(\\\"Hello from Python in sandbox!\\\")\"') -print('Command: python3 script execution') -print(f'Exit Code: {result.exit_code}') -print(f'Output: {result.stdout.strip() if result.stdout else \"(no output)\"}') -print(f'Status: {\"SUCCESS โœ“\" if result.success else \"FAILED\"}') -print('โœ“ Script executed safely in sandbox') -" -sleep 2 - -echo "" -echo "11. ROLLBACK CAPABILITY" -echo "============================================================" -python3 -c " -from sandbox_executor import SandboxExecutor -e = SandboxExecutor() -snapshot = e._create_snapshot('demo_session') -print(f'Snapshot Created: {\"demo_session\" in e.rollback_snapshots}') -print(f'Rollback Enabled: {e.enable_rollback}') -print('โœ“ Rollback mechanism ready') -" -sleep 2 - -echo "" -echo "12. FINAL VERIFICATION - ALL REQUIREMENTS MET" -echo "============================================================" -python3 -c " -print('Requirements Checklist:') -print(' โœ“ Firejail/Containerization: IMPLEMENTED') -print(' โœ“ Whitelist of commands: WORKING') -print(' โœ“ Resource limits: CONFIGURED') -print(' โœ“ Dry-run mode: FUNCTIONAL') -print(' โœ“ Rollback capability: READY') -print(' โœ“ Comprehensive logging: ACTIVE') -print(' โœ“ Security blocking: ENFORCED') -print(' โœ“ Sudo restrictions: ACTIVE') -print(' โœ“ Timeout protection: 5 MINUTES') -print(' โœ“ Path validation: WORKING') -" -sleep 2 - -echo "" -echo "============================================================" -echo " DEMONSTRATION COMPLETE - ALL FEATURES VERIFIED โœ“" -echo "============================================================" -echo "" -echo "Summary:" -echo " - 20/20 Unit Tests: PASSING" -echo " - All Requirements: MET" -echo " - Security Features: ACTIVE" -echo " - Production Ready: YES" -echo "" - diff --git a/src/hwprofiler.py b/src/hwprofiler.py deleted file mode 100755 index 97b012f..0000000 --- a/src/hwprofiler.py +++ /dev/null @@ -1,459 +0,0 @@ -#!/usr/bin/env python3 -""" -Hardware Profiling System for Cortex Linux -Detects CPU, GPU, RAM, storage, and network capabilities. -""" - -import json -import subprocess -import re -import os -from typing import Dict, List, Optional, Any -from pathlib import Path - - -class HardwareProfiler: - """Detects and profiles system hardware.""" - - def __init__(self): - self.cpu_info = None - self.gpu_info = [] - self.ram_info = None - self.storage_info = [] - self.network_info = None - - def detect_cpu(self) -> Dict[str, Any]: - """ - Detect CPU information: model, cores, architecture. - - Returns: - dict: CPU information with model, cores, and architecture - """ - cpu_info = {} - - try: - # Read /proc/cpuinfo for CPU details - with open('/proc/cpuinfo', 'r') as f: - cpuinfo = f.read() - - # Extract model name - model_match = re.search(r'model name\s*:\s*(.+)', cpuinfo) - if model_match: - cpu_info['model'] = model_match.group(1).strip() - else: - # Fallback for ARM or other architectures - model_match = re.search(r'Processor\s*:\s*(.+)', cpuinfo) - if model_match: - cpu_info['model'] = model_match.group(1).strip() - else: - cpu_info['model'] = "Unknown CPU" - - # Count physical cores - physical_cores = 0 - core_ids = set() - for line in cpuinfo.split('\n'): - if line.startswith('core id'): - core_id = line.split(':')[1].strip() - if core_id: - core_ids.add(core_id) - elif line.startswith('physical id'): - physical_cores = len(core_ids) if core_ids else 0 - - # If we couldn't get physical cores, count logical cores - if physical_cores == 0: - logical_cores = len([l for l in cpuinfo.split('\n') if l.startswith('processor')]) - cpu_info['cores'] = logical_cores - else: - # Get number of physical CPUs - physical_ids = set() - for line in cpuinfo.split('\n'): - if line.startswith('physical id'): - pid = line.split(':')[1].strip() - if pid: - physical_ids.add(pid) - cpu_info['cores'] = len(physical_ids) * len(core_ids) if core_ids else len(core_ids) - - # Fallback: use nproc if available - if cpu_info.get('cores', 0) == 0: - try: - result = subprocess.run(['nproc'], capture_output=True, text=True, timeout=1) - if result.returncode == 0: - cpu_info['cores'] = int(result.stdout.strip()) - except (subprocess.TimeoutExpired, ValueError, FileNotFoundError): - pass - - # Detect architecture - try: - result = subprocess.run(['uname', '-m'], capture_output=True, text=True, timeout=1) - if result.returncode == 0: - arch = result.stdout.strip() - cpu_info['architecture'] = arch - else: - cpu_info['architecture'] = 'unknown' - except (subprocess.TimeoutExpired, FileNotFoundError): - cpu_info['architecture'] = 'unknown' - - except Exception as e: - cpu_info = { - 'model': 'Unknown', - 'cores': 0, - 'architecture': 'unknown', - 'error': str(e) - } - - self.cpu_info = cpu_info - return cpu_info - - def detect_gpu(self) -> List[Dict[str, Any]]: - """ - Detect GPU information: vendor, model, VRAM, CUDA version. - - Returns: - list: List of GPU information dictionaries - """ - gpus = [] - - # Detect NVIDIA GPUs - try: - result = subprocess.run( - ['nvidia-smi', '--query-gpu=name,memory.total,driver_version', '--format=csv,noheader,nounits'], - capture_output=True, - text=True, - timeout=2 - ) - if result.returncode == 0: - for line in result.stdout.strip().split('\n'): - if line.strip(): - parts = [p.strip() for p in line.split(',')] - if len(parts) >= 2: - gpu_name = parts[0] - vram_mb = int(parts[1]) if parts[1].isdigit() else 0 - - gpu_info = { - 'vendor': 'NVIDIA', - 'model': gpu_name, - 'vram': vram_mb - } - - # Try to get CUDA version - try: - cuda_result = subprocess.run( - ['nvidia-smi', '--query-gpu=cuda_version', '--format=csv,noheader'], - capture_output=True, - text=True, - timeout=1 - ) - if cuda_result.returncode == 0 and cuda_result.stdout.strip(): - gpu_info['cuda'] = cuda_result.stdout.strip() - except (subprocess.TimeoutExpired, FileNotFoundError, ValueError): - # Try nvcc as fallback - try: - nvcc_result = subprocess.run( - ['nvcc', '--version'], - capture_output=True, - text=True, - timeout=1 - ) - if nvcc_result.returncode == 0: - version_match = re.search(r'release (\d+\.\d+)', nvcc_result.stdout) - if version_match: - gpu_info['cuda'] = version_match.group(1) - except (subprocess.TimeoutExpired, FileNotFoundError): - pass - - gpus.append(gpu_info) - except (subprocess.TimeoutExpired, FileNotFoundError): - pass - - # Detect AMD GPUs using lspci - try: - result = subprocess.run( - ['lspci'], - capture_output=True, - text=True, - timeout=1 - ) - if result.returncode == 0: - for line in result.stdout.split('\n'): - if 'VGA' in line or 'Display' in line: - if 'AMD' in line or 'ATI' in line or 'Radeon' in line: - # Extract model name - model_match = re.search(r'(?:AMD|ATI|Radeon)[\s/]+([A-Za-z0-9\s]+)', line) - model = model_match.group(1).strip() if model_match else 'Unknown AMD GPU' - - # Check if we already have this GPU (avoid duplicates) - if not any(g.get('vendor') == 'AMD' and g.get('model') == model for g in gpus): - gpu_info = { - 'vendor': 'AMD', - 'model': model, - 'vram': None # AMD VRAM detection requires rocm-smi or other tools - } - - # Try to get VRAM using rocm-smi if available - try: - rocm_result = subprocess.run( - ['rocm-smi', '--showmeminfo', 'vram'], - capture_output=True, - text=True, - timeout=1 - ) - if rocm_result.returncode == 0: - # Parse VRAM from rocm-smi output - vram_match = re.search(r'(\d+)\s*MB', rocm_result.stdout) - if vram_match: - gpu_info['vram'] = int(vram_match.group(1)) - except (subprocess.TimeoutExpired, FileNotFoundError): - pass - - gpus.append(gpu_info) - except (subprocess.TimeoutExpired, FileNotFoundError): - pass - - # Detect Intel GPUs - try: - result = subprocess.run( - ['lspci'], - capture_output=True, - text=True, - timeout=1 - ) - if result.returncode == 0: - for line in result.stdout.split('\n'): - if 'VGA' in line or 'Display' in line: - if 'Intel' in line: - model_match = re.search(r'Intel[^:]*:\s*([^\(]+)', line) - model = model_match.group(1).strip() if model_match else 'Unknown Intel GPU' - - if not any(g.get('vendor') == 'Intel' and g.get('model') == model for g in gpus): - gpus.append({ - 'vendor': 'Intel', - 'model': model, - 'vram': None # Intel integrated GPUs share system RAM - }) - except (subprocess.TimeoutExpired, FileNotFoundError): - pass - - self.gpu_info = gpus - return gpus - - def detect_ram(self) -> int: - """ - Detect total RAM in MB. - - Returns: - int: Total RAM in MB - """ - try: - # Read /proc/meminfo - with open('/proc/meminfo', 'r') as f: - meminfo = f.read() - - # Extract MemTotal - match = re.search(r'MemTotal:\s+(\d+)\s+kB', meminfo) - if match: - ram_kb = int(match.group(1)) - ram_mb = ram_kb // 1024 - self.ram_info = ram_mb - return ram_mb - else: - self.ram_info = 0 - return 0 - except Exception as e: - self.ram_info = 0 - return 0 - - def detect_storage(self) -> List[Dict[str, Any]]: - """ - Detect storage devices: type and size. - - Returns: - list: List of storage device information - """ - storage_devices = [] - - try: - # Use lsblk to get block device information - result = subprocess.run( - ['lsblk', '-d', '-o', 'NAME,TYPE,SIZE', '-n'], - capture_output=True, - text=True, - timeout=2 - ) - - if result.returncode == 0: - for line in result.stdout.strip().split('\n'): - if line.strip(): - parts = line.split() - if len(parts) >= 2: - device_name = parts[0] - - # Skip loop devices and other virtual devices - if device_name.startswith('loop') or device_name.startswith('ram'): - continue - - device_type = parts[1] if len(parts) > 1 else 'unknown' - size_str = parts[2] if len(parts) > 2 else '0' - - # Convert size to MB - size_mb = 0 - if 'G' in size_str.upper(): - size_mb = int(float(re.sub(r'[^0-9.]', '', size_str.replace('G', '').replace('g', ''))) * 1024) - elif 'T' in size_str.upper(): - size_mb = int(float(re.sub(r'[^0-9.]', '', size_str.replace('T', '').replace('t', ''))) * 1024 * 1024) - elif 'M' in size_str.upper(): - size_mb = int(float(re.sub(r'[^0-9.]', '', size_str.replace('M', '').replace('m', '')))) - - # Determine storage type - storage_type = 'unknown' - device_path = f'/sys/block/{device_name}' - - # Check if it's NVMe - if 'nvme' in device_name.lower(): - storage_type = 'nvme' - # Check if it's SSD (by checking if it's rotational) - elif os.path.exists(f'{device_path}/queue/rotational'): - try: - with open(f'{device_path}/queue/rotational', 'r') as f: - is_rotational = f.read().strip() == '1' - storage_type = 'hdd' if is_rotational else 'ssd' - except Exception: - storage_type = 'unknown' - else: - # Fallback: guess based on device name - if 'sd' in device_name.lower(): - storage_type = 'hdd' # Default assumption - elif 'nvme' in device_name.lower(): - storage_type = 'nvme' - - storage_devices.append({ - 'type': storage_type, - 'size': size_mb, - 'device': device_name - }) - except (subprocess.TimeoutExpired, FileNotFoundError) as e: - pass - - self.storage_info = storage_devices - return storage_devices - - def detect_network(self) -> Dict[str, Any]: - """ - Detect network capabilities. - - Returns: - dict: Network information including interfaces and speeds - """ - network_info = { - 'interfaces': [], - 'max_speed_mbps': 0 - } - - try: - # Get network interfaces using ip command - result = subprocess.run( - ['ip', '-o', 'link', 'show'], - capture_output=True, - text=True, - timeout=1 - ) - - if result.returncode == 0: - interfaces = [] - for line in result.stdout.split('\n'): - if ': ' in line: - parts = line.split(': ') - if len(parts) >= 2: - interface_name = parts[1].split('@')[0].split()[0] if '@' in parts[1] else parts[1].split()[0] - - # Skip loopback - if interface_name == 'lo': - continue - - # Try to get interface speed - speed = None - try: - speed_path = f'/sys/class/net/{interface_name}/speed' - if os.path.exists(speed_path): - with open(speed_path, 'r') as f: - speed_str = f.read().strip() - if speed_str.isdigit(): - speed = int(speed_str) - except Exception: - pass - - interfaces.append({ - 'name': interface_name, - 'speed_mbps': speed - }) - - if speed and speed > network_info['max_speed_mbps']: - network_info['max_speed_mbps'] = speed - - network_info['interfaces'] = interfaces - except (subprocess.TimeoutExpired, FileNotFoundError) as e: - pass - - self.network_info = network_info - return network_info - - def profile(self) -> Dict[str, Any]: - """ - Run complete hardware profiling. - - Returns: - dict: Complete hardware profile in JSON format - """ - # Run all detection methods - cpu = self.detect_cpu() - gpu = self.detect_gpu() - ram = self.detect_ram() - storage = self.detect_storage() - network = self.detect_network() - - # Build result dictionary - result = { - 'cpu': { - 'model': cpu.get('model', 'Unknown'), - 'cores': cpu.get('cores', 0), - 'architecture': cpu.get('architecture', 'unknown') - }, - 'gpu': gpu, - 'ram': ram, - 'storage': storage, - 'network': network - } - - return result - - def to_json(self, indent: int = 2) -> str: - """ - Convert hardware profile to JSON string. - - Args: - indent: JSON indentation level - - Returns: - str: JSON string representation - """ - profile = self.profile() - return json.dumps(profile, indent=indent) - - -def main(): - """CLI entry point for hardware profiler.""" - import sys - - profiler = HardwareProfiler() - - try: - profile = profiler.profile() - print(profiler.to_json()) - sys.exit(0) - except Exception as e: - print(json.dumps({'error': str(e)}, indent=2), file=sys.stderr) - sys.exit(1) - - -if __name__ == '__main__': - main() - diff --git a/src/progress_tracker.py b/src/progress_tracker.py deleted file mode 100644 index 3312ee9..0000000 --- a/src/progress_tracker.py +++ /dev/null @@ -1,725 +0,0 @@ -#!/usr/bin/env python3 -""" -Progress Notifications & Status Updates for Cortex Linux -Real-time progress tracking with time estimates and desktop notifications. - -Features: -- Beautiful progress bars with rich formatting -- Multi-stage progress tracking -- Time estimation algorithm -- Background operation support -- Desktop notifications -- Cancellation support with cleanup -""" - -import asyncio -import time -import sys -import signal -from typing import Optional, Callable, Dict, List, Any -from dataclasses import dataclass, field -from datetime import datetime, timedelta -from enum import Enum -from contextlib import asynccontextmanager - - -try: - from rich.console import Console - from rich.progress import ( - Progress, SpinnerColumn, BarColumn, TextColumn, - TimeElapsedColumn, TimeRemainingColumn, TaskProgressColumn - ) - from rich.live import Live - from rich.table import Table - from rich.panel import Panel - RICH_AVAILABLE = True -except ImportError: - RICH_AVAILABLE = False - - -try: - from plyer import notification as plyer_notification - PLYER_AVAILABLE = True -except ImportError: - PLYER_AVAILABLE = False - - -class StageStatus(Enum): - """Status of a progress stage.""" - PENDING = "pending" - IN_PROGRESS = "in_progress" - COMPLETED = "completed" - FAILED = "failed" - CANCELLED = "cancelled" - - -@dataclass -class ProgressStage: - """Represents a single stage in a multi-stage operation.""" - name: str - status: StageStatus = StageStatus.PENDING - progress: float = 0.0 # 0.0 to 1.0 - start_time: Optional[float] = None - end_time: Optional[float] = None - error: Optional[str] = None - total_bytes: Optional[int] = None - processed_bytes: int = 0 - - @property - def elapsed_time(self) -> float: - """Calculate elapsed time for this stage.""" - if self.start_time is None: - return 0.0 - end = self.end_time or time.time() - return end - self.start_time - - @property - def is_complete(self) -> bool: - """Check if stage is complete.""" - return self.status in (StageStatus.COMPLETED, StageStatus.FAILED, StageStatus.CANCELLED) - - def format_elapsed(self) -> str: - """Format elapsed time as human-readable string.""" - elapsed = self.elapsed_time - if elapsed < 60: - return f"{elapsed:.0f}s" - elif elapsed < 3600: - minutes = int(elapsed // 60) - seconds = int(elapsed % 60) - return f"{minutes}m {seconds}s" - else: - hours = int(elapsed // 3600) - minutes = int((elapsed % 3600) // 60) - return f"{hours}h {minutes}m" - - -class ProgressTracker: - """ - Multi-stage progress tracker with time estimation and notifications. - - Features: - - Rich terminal progress bars - - Time estimation based on throughput - - Multi-stage operation tracking - - Desktop notifications (optional) - - Cancellation support - - Background operation support - """ - - def __init__(self, - operation_name: str, - enable_notifications: bool = True, - notification_on_complete: bool = True, - notification_on_error: bool = True, - console: Optional[Any] = None): - """ - Initialize progress tracker. - - Args: - operation_name: Name of the operation (e.g., "Installing PostgreSQL") - enable_notifications: Enable desktop notifications - notification_on_complete: Send notification on completion - notification_on_error: Send notification on error - console: Rich console instance (created if None) - """ - self.operation_name = operation_name - self.enable_notifications = enable_notifications and PLYER_AVAILABLE - self.notification_on_complete = notification_on_complete - self.notification_on_error = notification_on_error - - # Rich console - if RICH_AVAILABLE: - self.console = console or Console() - else: - self.console = None - - # Stages - self.stages: List[ProgressStage] = [] - self.current_stage_index: int = -1 - - # Timing - self.start_time: Optional[float] = None - self.end_time: Optional[float] = None - - # Cancellation - self.cancelled: bool = False - self.cancel_callback: Optional[Callable] = None - - # Background task - self.background_task: Optional[asyncio.Task] = None - - def add_stage(self, name: str, total_bytes: Optional[int] = None) -> int: - """ - Add a stage to the operation. - - Args: - name: Name of the stage - total_bytes: Total bytes for this stage (for download/install tracking) - - Returns: - Index of the added stage - """ - stage = ProgressStage(name=name, total_bytes=total_bytes) - self.stages.append(stage) - return len(self.stages) - 1 - - def start(self): - """Start tracking progress.""" - self.start_time = time.time() - if RICH_AVAILABLE: - self.console.print(f"\n[bold cyan]{self.operation_name}[/bold cyan]") - else: - print(f"\n{self.operation_name}") - - def start_stage(self, stage_index: int): - """ - Start a specific stage. - - Args: - stage_index: Index of the stage to start - """ - if 0 <= stage_index < len(self.stages): - self.current_stage_index = stage_index - stage = self.stages[stage_index] - stage.status = StageStatus.IN_PROGRESS - stage.start_time = time.time() - - def update_stage_progress(self, stage_index: int, progress: float = None, - processed_bytes: int = None): - """ - Update progress for a specific stage. - - Args: - stage_index: Index of the stage - progress: Progress value (0.0 to 1.0) - processed_bytes: Number of bytes processed - """ - if 0 <= stage_index < len(self.stages): - stage = self.stages[stage_index] - - if progress is not None: - stage.progress = min(1.0, max(0.0, progress)) - - if processed_bytes is not None: - stage.processed_bytes = processed_bytes - if stage.total_bytes and stage.total_bytes > 0: - stage.progress = min(1.0, processed_bytes / stage.total_bytes) - - def complete_stage(self, stage_index: int, error: Optional[str] = None): - """ - Mark a stage as complete or failed. - - Args: - stage_index: Index of the stage - error: Error message if stage failed - """ - if 0 <= stage_index < len(self.stages): - stage = self.stages[stage_index] - stage.end_time = time.time() - - if error: - stage.status = StageStatus.FAILED - stage.error = error - else: - stage.status = StageStatus.COMPLETED - stage.progress = 1.0 - - def estimate_remaining_time(self) -> Optional[float]: - """ - Estimate remaining time based on completed stages and current progress. - - Returns: - Estimated seconds remaining, or None if cannot estimate - """ - if not self.stages or self.start_time is None: - return None - - # Calculate average time per completed stage - completed_stages = [s for s in self.stages if s.status == StageStatus.COMPLETED] - if not completed_stages: - # No completed stages yet - use current stage progress - if self.current_stage_index >= 0: - current_stage = self.stages[self.current_stage_index] - if current_stage.progress > 0 and current_stage.start_time: - elapsed = time.time() - current_stage.start_time - estimated_stage_time = elapsed / current_stage.progress - remaining_in_stage = estimated_stage_time - elapsed - - # Add time for remaining stages (estimate equal time) - remaining_stages = len(self.stages) - self.current_stage_index - 1 - return remaining_in_stage + (remaining_stages * estimated_stage_time) - - return None - - avg_stage_time = sum(s.elapsed_time for s in completed_stages) / len(completed_stages) - - # Calculate remaining stages - remaining_stages = len(self.stages) - len(completed_stages) - - # If there's a current stage in progress, estimate its remaining time - if self.current_stage_index >= 0: - current_stage = self.stages[self.current_stage_index] - if current_stage.status == StageStatus.IN_PROGRESS: - if current_stage.progress > 0: - elapsed = current_stage.elapsed_time - estimated_total = elapsed / current_stage.progress - remaining_in_current = estimated_total - elapsed - return remaining_in_current + ((remaining_stages - 1) * avg_stage_time) - - return remaining_stages * avg_stage_time - - def format_time_remaining(self) -> str: - """Format estimated time remaining as human-readable string.""" - remaining = self.estimate_remaining_time() - if remaining is None: - return "calculating..." - - if remaining < 60: - return f"{int(remaining)}s" - elif remaining < 3600: - minutes = int(remaining // 60) - seconds = int(remaining % 60) - return f"{minutes}m {seconds}s" - else: - hours = int(remaining // 3600) - minutes = int((remaining % 3600) // 60) - return f"{hours}h {minutes}m" - - def get_overall_progress(self) -> float: - """ - Calculate overall progress across all stages. - - Returns: - Overall progress (0.0 to 1.0) - """ - if not self.stages: - return 0.0 - - total_progress = sum(s.progress for s in self.stages) - return total_progress / len(self.stages) - - def render_text_progress(self) -> str: - """ - Render progress as plain text (fallback when rich is not available). - - Returns: - Plain text progress representation - """ - lines = [f"\n{self.operation_name}"] - - overall_progress = self.get_overall_progress() - bar_width = 40 - filled = int(bar_width * overall_progress) - bar = "=" * filled + "-" * (bar_width - filled) - lines.append(f"[{bar}] {overall_progress * 100:.0f}%") - - # Time estimate - time_remaining = self.format_time_remaining() - lines.append(f"โฑ๏ธ Estimated time remaining: {time_remaining}") - lines.append("") - - # Stages - for i, stage in enumerate(self.stages): - if stage.status == StageStatus.COMPLETED: - icon = "[โœ“]" - info = f"({stage.format_elapsed()})" - elif stage.status == StageStatus.IN_PROGRESS: - icon = "[โ†’]" - info = "(current)" - elif stage.status == StageStatus.FAILED: - icon = "[โœ—]" - info = f"(failed: {stage.error})" - elif stage.status == StageStatus.CANCELLED: - icon = "[โŠ—]" - info = "(cancelled)" - else: - icon = "[ ]" - info = "" - - lines.append(f"{icon} {stage.name} {info}") - - return "\n".join(lines) - - def render_rich_progress(self) -> Table: - """ - Render progress using rich formatting. - - Returns: - Rich table with progress information - """ - if not RICH_AVAILABLE: - return None - - table = Table(show_header=False, box=None, padding=(0, 1)) - table.add_column("Icon", width=3) - table.add_column("Stage", ratio=1) - table.add_column("Info", justify="right") - - for stage in self.stages: - if stage.status == StageStatus.COMPLETED: - icon = "[green]โœ“[/green]" - info = f"[dim]({stage.format_elapsed()})[/dim]" - style = "green" - elif stage.status == StageStatus.IN_PROGRESS: - icon = "[cyan]โ†’[/cyan]" - info = "[cyan](current)[/cyan]" - style = "cyan bold" - elif stage.status == StageStatus.FAILED: - icon = "[red]โœ—[/red]" - info = f"[red](failed)[/red]" - style = "red" - elif stage.status == StageStatus.CANCELLED: - icon = "[yellow]โŠ—[/yellow]" - info = "[yellow](cancelled)[/yellow]" - style = "yellow" - else: - icon = "[dim][ ][/dim]" - info = "" - style = "dim" - - table.add_row(icon, f"[{style}]{stage.name}[/{style}]", info) - - return table - - def display_progress(self): - """Display current progress to console.""" - if RICH_AVAILABLE and self.console: - # Clear and redraw - self.console.clear() - - # Overall progress - overall = self.get_overall_progress() - time_remaining = self.format_time_remaining() - - self.console.print(f"\n[bold cyan]{self.operation_name}[/bold cyan]") - - # Progress bar - bar_width = 40 - filled = int(bar_width * overall) - bar = "โ”" * filled + "โ”€" * (bar_width - filled) - self.console.print(f"[cyan]{bar}[/cyan] {overall * 100:.0f}%") - self.console.print(f"โฑ๏ธ Estimated time remaining: [yellow]{time_remaining}[/yellow]\n") - - # Stages table - table = self.render_rich_progress() - if table: - self.console.print(table) - else: - # Fallback to plain text - print("\033[2J\033[H", end="") # Clear screen - print(self.render_text_progress()) - - def complete(self, success: bool = True, message: Optional[str] = None): - """ - Mark operation as complete. - - Args: - success: Whether operation completed successfully - message: Optional completion message - """ - self.end_time = time.time() - - # Complete any in-progress stages - for stage in self.stages: - if stage.status == StageStatus.IN_PROGRESS: - self.complete_stage( - self.stages.index(stage), - error=None if success else message - ) - - # Final display - self.display_progress() - - # Calculate total time - total_time = self.end_time - self.start_time if self.start_time else 0 - - # Display completion message - if RICH_AVAILABLE and self.console: - if success: - elapsed_str = self._format_duration(total_time) - final_msg = message or f"{self.operation_name} completed" - self.console.print(f"\n[green]โœ… {final_msg}[/green] [dim]({elapsed_str})[/dim]") - else: - self.console.print(f"\n[red]โŒ {message or 'Operation failed'}[/red]") - else: - if success: - print(f"\nโœ… {message or 'Completed'} ({total_time:.1f}s)") - else: - print(f"\nโŒ {message or 'Failed'}") - - # Send desktop notification - if self.enable_notifications: - if success and self.notification_on_complete: - self._send_notification( - f"{self.operation_name} Complete", - f"Finished in {self._format_duration(total_time)}" - ) - elif not success and self.notification_on_error: - self._send_notification( - f"{self.operation_name} Failed", - message or "Operation failed", - timeout=10 - ) - - def cancel(self, message: str = "Cancelled by user"): - """ - Cancel the operation. - - Args: - message: Cancellation message - """ - self.cancelled = True - - # Mark all pending/in-progress stages as cancelled - for stage in self.stages: - if stage.status in (StageStatus.PENDING, StageStatus.IN_PROGRESS): - stage.status = StageStatus.CANCELLED - if stage.start_time and not stage.end_time: - stage.end_time = time.time() - - # Call cancel callback if provided - if self.cancel_callback: - try: - self.cancel_callback() - except Exception as e: - if RICH_AVAILABLE and self.console: - self.console.print(f"[yellow]Warning: Cancel callback failed: {e}[/yellow]") - - # Display cancellation - if RICH_AVAILABLE and self.console: - self.console.print(f"\n[yellow]โŠ— {message}[/yellow]") - else: - print(f"\nโŠ— {message}") - - # Send notification - if self.enable_notifications: - self._send_notification( - f"{self.operation_name} Cancelled", - message, - timeout=5 - ) - - def _send_notification(self, title: str, message: str, timeout: int = 5): - """ - Send desktop notification. - - Args: - title: Notification title - message: Notification message - timeout: Notification timeout in seconds - """ - if not PLYER_AVAILABLE: - return - - try: - plyer_notification.notify( - title=title, - message=message, - app_name="Cortex Linux", - timeout=timeout - ) - except Exception: - # Silently fail if notifications aren't supported - pass - - def _format_duration(self, seconds: float) -> str: - """Format duration as human-readable string.""" - if seconds < 60: - return f"{seconds:.0f}s" - elif seconds < 3600: - minutes = int(seconds // 60) - secs = int(seconds % 60) - return f"{minutes}m {secs}s" - else: - hours = int(seconds // 3600) - minutes = int((seconds % 3600) // 60) - return f"{hours}h {minutes}m" - - def setup_cancellation_handler(self, callback: Optional[Callable] = None): - """ - Setup signal handler for graceful cancellation (Ctrl+C). - - Args: - callback: Optional callback to run on cancellation - """ - self.cancel_callback = callback - - def signal_handler(signum, frame): - self.cancel("Operation cancelled by user (Ctrl+C)") - sys.exit(130) # Exit code for Ctrl+C - - signal.signal(signal.SIGINT, signal_handler) - signal.signal(signal.SIGTERM, signal_handler) - - -class RichProgressTracker(ProgressTracker): - """ - Enhanced progress tracker using rich library for beautiful terminal output. - """ - - def __init__(self, *args, **kwargs): - if not RICH_AVAILABLE: - raise ImportError("rich library is required for RichProgressTracker. Install with: pip install rich") - super().__init__(*args, **kwargs) - self.progress_obj: Optional[Progress] = None - self.live: Optional[Live] = None - self.task_ids: Dict[int, Any] = {} - - @asynccontextmanager - async def live_progress(self): - """Context manager for live progress updates.""" - self.progress_obj = Progress( - SpinnerColumn(), - TextColumn("[bold blue]{task.description}"), - BarColumn(), - TaskProgressColumn(), - TimeElapsedColumn(), - TimeRemainingColumn(), - ) - - # Add tasks for each stage - for i, stage in enumerate(self.stages): - task_id = self.progress_obj.add_task( - stage.name, - total=100, - visible=(i == 0) # Only show first stage initially - ) - self.task_ids[i] = task_id - - try: - with self.progress_obj: - yield self - finally: - self.progress_obj = None - self.task_ids = {} - - def start_stage(self, stage_index: int): - """Start a stage and make its progress bar visible.""" - super().start_stage(stage_index) - - if self.progress_obj and stage_index in self.task_ids: - task_id = self.task_ids[stage_index] - self.progress_obj.update(task_id, visible=True) - - def update_stage_progress(self, stage_index: int, progress: float = None, - processed_bytes: int = None): - """Update stage progress and refresh progress bar.""" - super().update_stage_progress(stage_index, progress, processed_bytes) - - if self.progress_obj and stage_index in self.task_ids: - stage = self.stages[stage_index] - task_id = self.task_ids[stage_index] - self.progress_obj.update(task_id, completed=stage.progress * 100) - - def complete_stage(self, stage_index: int, error: Optional[str] = None): - """Complete a stage and update its status.""" - super().complete_stage(stage_index, error) - - if self.progress_obj and stage_index in self.task_ids: - task_id = self.task_ids[stage_index] - if error: - self.progress_obj.update(task_id, description=f"[red]{self.stages[stage_index].name} (failed)[/red]") - else: - self.progress_obj.update(task_id, completed=100) - - -async def run_with_progress(tracker: ProgressTracker, - operation_func: Callable, - *args, **kwargs) -> Any: - """ - Run an async operation with progress tracking. - - Args: - tracker: ProgressTracker instance - operation_func: Async function to execute - *args, **kwargs: Arguments to pass to operation_func - - Returns: - Result from operation_func - """ - tracker.start() - tracker.setup_cancellation_handler() - - try: - result = await operation_func(tracker, *args, **kwargs) - tracker.complete(success=True) - return result - except asyncio.CancelledError: - tracker.cancel("Operation cancelled") - raise - except Exception as e: - tracker.complete(success=False, message=str(e)) - raise - - -# Example usage demonstrating the API -async def example_installation(tracker: ProgressTracker): - """Example installation with multiple stages.""" - - # Add stages - update_idx = tracker.add_stage("Update package lists") - download_idx = tracker.add_stage("Download postgresql-15", total_bytes=50_000_000) # 50MB - install_idx = tracker.add_stage("Installing dependencies") - configure_idx = tracker.add_stage("Configuring database") - test_idx = tracker.add_stage("Running tests") - - # Stage 1: Update package lists - tracker.start_stage(update_idx) - await asyncio.sleep(1) # Simulate work - for i in range(10): - tracker.update_stage_progress(update_idx, progress=(i + 1) / 10) - tracker.display_progress() - await asyncio.sleep(0.1) - tracker.complete_stage(update_idx) - - # Stage 2: Download - tracker.start_stage(download_idx) - bytes_downloaded = 0 - chunk_size = 5_000_000 # 5MB chunks - while bytes_downloaded < 50_000_000: - await asyncio.sleep(0.2) - bytes_downloaded = min(bytes_downloaded + chunk_size, 50_000_000) - tracker.update_stage_progress(download_idx, processed_bytes=bytes_downloaded) - tracker.display_progress() - tracker.complete_stage(download_idx) - - # Stage 3: Install dependencies - tracker.start_stage(install_idx) - for i in range(15): - tracker.update_stage_progress(install_idx, progress=(i + 1) / 15) - tracker.display_progress() - await asyncio.sleep(0.15) - tracker.complete_stage(install_idx) - - # Stage 4: Configure - tracker.start_stage(configure_idx) - for i in range(8): - tracker.update_stage_progress(configure_idx, progress=(i + 1) / 8) - tracker.display_progress() - await asyncio.sleep(0.2) - tracker.complete_stage(configure_idx) - - # Stage 5: Test - tracker.start_stage(test_idx) - for i in range(5): - tracker.update_stage_progress(test_idx, progress=(i + 1) / 5) - tracker.display_progress() - await asyncio.sleep(0.3) - tracker.complete_stage(test_idx) - - -async def main(): - """Demo of progress tracking.""" - tracker = ProgressTracker( - operation_name="Installing PostgreSQL", - enable_notifications=True - ) - - await run_with_progress(tracker, example_installation) - - -if __name__ == '__main__': - print("Progress Tracker Demo") - print("=" * 50) - asyncio.run(main()) - diff --git a/src/requirements.txt b/src/requirements.txt deleted file mode 100644 index 81aca17..0000000 --- a/src/requirements.txt +++ /dev/null @@ -1,22 +0,0 @@ -# Cortex Linux Requirements -# Python 3.8+ required - -# Core Dependencies -rich>=13.0.0 # Beautiful terminal progress bars and formatting -plyer>=2.0.0 # Desktop notifications (optional but recommended) - -# Configuration Management -pyyaml>=6.0.1 -packaging>=23.0 - -# Testing Dependencies (dev) -pytest>=7.0.0 -pytest-asyncio>=0.21.0 -pytest-cov>=4.0.0 - -# System dependencies (Ubuntu 22.04+): -# - nvidia-smi (for NVIDIA GPU detection) -# - rocm-smi (optional, for AMD GPU VRAM detection) -# - lspci (usually pre-installed) -# - lsblk (usually pre-installed) -# - ip (usually pre-installed) diff --git a/src/sandbox_example.py b/src/sandbox_example.py deleted file mode 100644 index af551cc..0000000 --- a/src/sandbox_example.py +++ /dev/null @@ -1,227 +0,0 @@ -#!/usr/bin/env python3 -""" -Example usage of Sandboxed Command Executor. - -This demonstrates how to use the sandbox executor to safely run AI-generated commands. -""" - -from sandbox_executor import SandboxExecutor, CommandBlocked - - -def example_basic_usage(): - """Basic usage example.""" - print("=== Basic Usage ===") - - # Create executor - executor = SandboxExecutor() - - # Execute a safe command - try: - result = executor.execute('echo "Hello, Cortex!"') - print(f"Exit code: {result.exit_code}") - print(f"Output: {result.stdout}") - print(f"Execution time: {result.execution_time:.2f}s") - except CommandBlocked as e: - print(f"Command blocked: {e}") - - -def example_dry_run(): - """Dry-run mode example.""" - print("\n=== Dry-Run Mode ===") - - executor = SandboxExecutor() - - # Preview what would execute - result = executor.execute('apt-get update', dry_run=True) - print(f"Preview: {result.preview}") - print(f"Output: {result.stdout}") - - -def example_blocked_commands(): - """Example of blocked commands.""" - print("\n=== Blocked Commands ===") - - executor = SandboxExecutor() - - dangerous_commands = [ - 'rm -rf /', - 'dd if=/dev/zero of=/dev/sda', - 'mkfs.ext4 /dev/sda1', - ] - - for cmd in dangerous_commands: - try: - result = executor.execute(cmd) - print(f"Unexpected: {cmd} was allowed") - except CommandBlocked as e: - print(f"โœ“ Blocked: {cmd} - {e}") - - -def example_with_rollback(): - """Example with rollback capability.""" - print("\n=== Rollback Example ===") - - executor = SandboxExecutor(enable_rollback=True) - - # Execute a command that might fail - try: - result = executor.execute('invalid-command-that-fails') - if result.failed: - print(f"Command failed, rollback triggered") - print(f"Stderr: {result.stderr}") - except CommandBlocked as e: - print(f"Command blocked: {e}") - - -def example_audit_logging(): - """Example of audit logging.""" - print("\n=== Audit Logging ===") - - executor = SandboxExecutor() - - # Execute some commands - try: - executor.execute('echo "test1"', dry_run=True) - executor.execute('echo "test2"', dry_run=True) - except: - pass - - # Get audit log - audit_log = executor.get_audit_log() - print(f"Total log entries: {len(audit_log)}") - - for entry in audit_log[-5:]: # Last 5 entries - print(f" - {entry['timestamp']}: {entry['command']} (type: {entry['type']})") - - # Save audit log - executor.save_audit_log('audit_log.json') - print("Audit log saved to audit_log.json") - - -def example_resource_limits(): - """Example of resource limits.""" - print("\n=== Resource Limits ===") - - # Create executor with custom limits - executor = SandboxExecutor( - max_cpu_cores=1, - max_memory_mb=1024, - max_disk_mb=512, - timeout_seconds=60 - ) - - print(f"CPU limit: {executor.max_cpu_cores} cores") - print(f"Memory limit: {executor.max_memory_mb} MB") - print(f"Disk limit: {executor.max_disk_mb} MB") - print(f"Timeout: {executor.timeout_seconds} seconds") - - -def example_sudo_commands(): - """Example of sudo command handling.""" - print("\n=== Sudo Commands ===") - - executor = SandboxExecutor() - - # Allowed sudo commands (package installation) - allowed_sudo = [ - 'sudo apt-get install python3', - 'sudo pip install numpy', - ] - - for cmd in allowed_sudo: - is_valid, violation = executor.validate_command(cmd) - if is_valid: - print(f"โœ“ Allowed: {cmd}") - else: - print(f"โœ— Blocked: {cmd} - {violation}") - - # Blocked sudo commands - blocked_sudo = [ - 'sudo rm -rf /', - 'sudo chmod 777 /', - ] - - for cmd in blocked_sudo: - is_valid, violation = executor.validate_command(cmd) - if not is_valid: - print(f"โœ“ Blocked: {cmd} - {violation}") - - -def example_status_check(): - """Check system status and configuration.""" - print("\n=== System Status ===") - - executor = SandboxExecutor() - - # Check Firejail availability - if executor.is_firejail_available(): - print("โœ“ Firejail is available - Full sandbox isolation enabled") - print(f" Firejail path: {executor.firejail_path}") - else: - print("โš  Firejail not found - Using fallback mode (reduced security)") - print(" Install with: sudo apt-get install firejail") - - # Show configuration - print(f"\nResource Limits:") - print(f" CPU: {executor.max_cpu_cores} cores") - print(f" Memory: {executor.max_memory_mb} MB") - print(f" Disk: {executor.max_disk_mb} MB") - print(f" Timeout: {executor.timeout_seconds} seconds") - print(f" Rollback: {'Enabled' if executor.enable_rollback else 'Disabled'}") - - -def example_command_validation(): - """Demonstrate command validation.""" - print("\n=== Command Validation ===") - - executor = SandboxExecutor() - - test_commands = [ - ('echo "test"', True), - ('python3 --version', True), - ('rm -rf /', False), - ('sudo apt-get install python3', True), - ('sudo rm -rf /', False), - ('nc -l 1234', False), # Not whitelisted - ] - - for cmd, expected_valid in test_commands: - is_valid, violation = executor.validate_command(cmd) - status = "โœ“" if (is_valid == expected_valid) else "โœ—" - result = "ALLOWED" if is_valid else "BLOCKED" - print(f"{status} {result}: {cmd}") - if not is_valid and violation: - print(f" Reason: {violation}") - - -def main(): - """Run all examples.""" - print("=" * 60) - print("Sandboxed Command Executor - Usage Examples") - print("=" * 60) - - example_status_check() - example_basic_usage() - example_dry_run() - example_command_validation() - example_blocked_commands() - example_with_rollback() - example_audit_logging() - example_resource_limits() - example_sudo_commands() - - print("\n" + "=" * 60) - print("Examples Complete") - print("=" * 60) - print("\nSummary:") - print(" โœ“ Command validation working") - print(" โœ“ Security blocking active") - print(" โœ“ Dry-run mode functional") - print(" โœ“ Audit logging enabled") - print(" โœ“ Resource limits configured") - print(" โœ“ Sudo restrictions enforced") - - -if __name__ == '__main__': - main() - diff --git a/src/sandbox_executor.py b/src/sandbox_executor.py deleted file mode 100644 index af52417..0000000 --- a/src/sandbox_executor.py +++ /dev/null @@ -1,681 +0,0 @@ -#!/usr/bin/env python3 -""" -Sandboxed Command Execution Layer for Cortex Linux -Critical security component - AI-generated commands must run in isolated environment. - -Features: -- Firejail-based sandboxing -- Command whitelisting -- Resource limits (CPU, memory, disk, time) -- Dry-run mode -- Rollback capability -- Comprehensive logging -""" - -import subprocess -import shlex -import os -import sys -import re -import json -import time -import shutil -import logging -import resource -from typing import Dict, List, Optional, Tuple, Any -from datetime import datetime - - -class CommandBlocked(Exception): - """Raised when a command is blocked.""" - pass - - -class ExecutionResult: - """Result of command execution.""" - - def __init__(self, command: str, exit_code: int = 0, stdout: str = "", - stderr: str = "", execution_time: float = 0.0, - blocked: bool = False, violation: Optional[str] = None, - preview: Optional[str] = None): - self.command = command - self.exit_code = exit_code - self.stdout = stdout - self.stderr = stderr - self.execution_time = execution_time - self.blocked = blocked - self.violation = violation - self.preview = preview - self.timestamp = datetime.now().isoformat() - - @property - def success(self) -> bool: - """Check if command executed successfully.""" - return not self.blocked and self.exit_code == 0 - - @property - def failed(self) -> bool: - """Check if command failed.""" - return not self.success - - def to_dict(self) -> Dict[str, Any]: - """Convert to dictionary.""" - return { - 'command': self.command, - 'exit_code': self.exit_code, - 'stdout': self.stdout, - 'stderr': self.stderr, - 'execution_time': self.execution_time, - 'blocked': self.blocked, - 'violation': self.violation, - 'preview': self.preview, - 'timestamp': self.timestamp, - 'success': self.success - } - - -class SandboxExecutor: - """ - Sandboxed command executor with security controls. - - Features: - - Firejail sandboxing - - Command whitelisting - - Resource limits - - Dry-run mode - - Rollback capability - - Comprehensive logging - """ - - # Whitelist of allowed commands (base commands only) - ALLOWED_COMMANDS = { - 'apt-get', 'apt', 'dpkg', - 'pip', 'pip3', 'python', 'python3', - 'npm', 'yarn', 'node', - 'git', 'make', 'cmake', - 'gcc', 'g++', 'clang', - 'curl', 'wget', - 'tar', 'unzip', 'zip', - 'echo', 'cat', 'grep', 'sed', 'awk', - 'ls', 'pwd', 'cd', 'mkdir', 'touch', - 'chmod', 'chown', # Limited use - 'systemctl', # Read-only operations - } - - # Commands that require sudo (package installation only) - SUDO_ALLOWED_COMMANDS = { - 'apt-get install', 'apt-get update', 'apt-get upgrade', - 'apt install', 'apt update', 'apt upgrade', - 'pip install', 'pip3 install', - 'dpkg -i', - } - - # Dangerous patterns to block - DANGEROUS_PATTERNS = [ - r'rm\s+-rf\s+[/\*]', # rm -rf / or rm -rf /* - r'rm\s+-rf\s+\$HOME', # rm -rf $HOME - r'rm\s+--no-preserve-root', # rm with no-preserve-root - r'dd\s+if=', # dd command - r'mkfs\.', # mkfs commands - r'fdisk', # fdisk - r'parted', # parted - r'wipefs', # wipefs - r'format\s+', # format commands - r'>\s*/dev/', # Redirect to device files - r'chmod\s+[0-7]{3,4}\s+/', # chmod on root - r'chmod\s+777', # World-writable permissions - r'chmod\s+\+s', # Setuid bit - r'chown\s+.*\s+/', # chown on root - # Remote code execution patterns - r'curl\s+.*\|\s*sh', # curl pipe to shell - r'curl\s+.*\|\s*bash', # curl pipe to bash - r'wget\s+.*\|\s*sh', # wget pipe to shell - r'wget\s+.*\|\s*bash', # wget pipe to bash - r'curl\s+-o\s+-\s+.*\|', # curl output to pipe - # Code injection patterns - r'\beval\s+', # eval command - r'python\s+-c\s+["\'].*exec', # python -c exec - r'python\s+-c\s+["\'].*__import__', # python -c import - r'base64\s+-d\s+.*\|', # base64 decode to pipe - r'>\s*/etc/', # Write to /etc - # Privilege escalation - r'sudo\s+su\s*$', # sudo su - r'sudo\s+-i\s*$', # sudo -i (interactive root) - # Environment manipulation - r'export\s+LD_PRELOAD', # LD_PRELOAD hijacking - r'export\s+LD_LIBRARY_PATH.*=/', # Library path hijacking - # Fork bomb - r':\s*\(\)\s*\{\s*:\s*\|\s*:\s*&\s*\}', # :(){ :|:& };: - ] - - # Allowed directories for file operations - ALLOWED_DIRECTORIES = [ - '/tmp', - '/var/tmp', - os.path.expanduser('~'), - ] - - def __init__(self, - firejail_path: Optional[str] = None, - log_file: Optional[str] = None, - max_cpu_cores: int = 2, - max_memory_mb: int = 2048, - max_disk_mb: int = 1024, - timeout_seconds: int = 300, # 5 minutes - enable_rollback: bool = True): - """ - Initialize sandbox executor. - - Args: - firejail_path: Path to firejail binary (auto-detected if None) - log_file: Path to audit log file - max_cpu_cores: Maximum CPU cores to use - max_memory_mb: Maximum memory in MB - max_disk_mb: Maximum disk space in MB - timeout_seconds: Maximum execution time in seconds - enable_rollback: Enable automatic rollback on failure - """ - self.firejail_path = firejail_path or self._find_firejail() - self.max_cpu_cores = max_cpu_cores - self.max_memory_mb = max_memory_mb - self.max_disk_mb = max_disk_mb - self.timeout_seconds = timeout_seconds - self.enable_rollback = enable_rollback - - # Setup logging - self.log_file = log_file or os.path.join( - os.path.expanduser('~'), '.cortex', 'sandbox_audit.log' - ) - self._setup_logging() - - # Rollback tracking - self.rollback_snapshots: Dict[str, Dict[str, Any]] = {} - self.current_session_id: Optional[str] = None - - # Audit log - self.audit_log: List[Dict[str, Any]] = [] - - # Verify firejail is available - if not self.firejail_path: - self.logger.warning( - "Firejail not found. Sandboxing will be limited. " - "Install firejail for full security: sudo apt-get install firejail" - ) - - def _find_firejail(self) -> Optional[str]: - """Find firejail binary in system PATH.""" - firejail_path = shutil.which('firejail') - return firejail_path - - def is_firejail_available(self) -> bool: - """ - Check if Firejail is available on the system. - - Returns: - True if Firejail is available, False otherwise - """ - return self.firejail_path is not None - - def _setup_logging(self): - """Setup logging configuration.""" - # Create log directory if it doesn't exist - log_dir = os.path.dirname(self.log_file) - if log_dir and not os.path.exists(log_dir): - os.makedirs(log_dir, mode=0o700, exist_ok=True) - - # Setup logger (avoid duplicate handlers) - self.logger = logging.getLogger('SandboxExecutor') - self.logger.setLevel(logging.INFO) - - # Clear existing handlers to avoid duplicates - self.logger.handlers.clear() - - # File handler - file_handler = logging.FileHandler(self.log_file) - file_handler.setLevel(logging.INFO) - - # Console handler (only warnings and above) - console_handler = logging.StreamHandler(sys.stderr) - console_handler.setLevel(logging.WARNING) - - # Formatter - formatter = logging.Formatter( - '%(asctime)s - %(name)s - %(levelname)s - %(message)s' - ) - file_handler.setFormatter(formatter) - console_handler.setFormatter(formatter) - - self.logger.addHandler(file_handler) - self.logger.addHandler(console_handler) - - # Prevent propagation to root logger - self.logger.propagate = False - - def validate_command(self, command: str) -> Tuple[bool, Optional[str]]: - """ - Validate command for security. - - Args: - command: Command string to validate - - Returns: - Tuple of (is_valid, violation_reason) - """ - # Check for dangerous patterns - for pattern in self.DANGEROUS_PATTERNS: - if re.search(pattern, command, re.IGNORECASE): - return False, f"Dangerous pattern detected: {pattern}" - - # Parse command - try: - parts = shlex.split(command) - if not parts: - return False, "Empty command" - - base_command = parts[0] - - # Check if command is in whitelist - if base_command not in self.ALLOWED_COMMANDS: - # Check if it's a sudo command - if base_command == 'sudo': - if len(parts) < 2: - return False, "Sudo command without arguments" - - sudo_command = ' '.join(parts[1:3]) if len(parts) >= 3 else parts[1] - - # Check if sudo command is allowed - if not any(sudo_command.startswith(allowed) for allowed in self.SUDO_ALLOWED_COMMANDS): - return False, f"Sudo command not whitelisted: {sudo_command}" - else: - return False, f"Command not whitelisted: {base_command}" - - # Validate file paths in command - path_violation = self._validate_paths(command) - if path_violation: - return False, path_violation - - return True, None - - except ValueError as e: - return False, f"Invalid command syntax: {str(e)}" - - def _validate_paths(self, command: str) -> Optional[str]: - """ - Validate file paths in command to prevent path traversal attacks. - - Args: - command: Command string - - Returns: - Violation reason if found, None otherwise - """ - # Extract potential file paths - # This is a simplified check - in production, use proper shell parsing - path_pattern = r'[/~][^\s<>|&;]*' - paths = re.findall(path_pattern, command) - - for path in paths: - # Expand user home - expanded = os.path.expanduser(path) - # Resolve to absolute path - try: - abs_path = os.path.abspath(expanded) - except (OSError, ValueError): - continue - - # Check if path is in allowed directories - allowed = False - for allowed_dir in self.ALLOWED_DIRECTORIES: - allowed_expanded = os.path.expanduser(allowed_dir) - allowed_abs = os.path.abspath(allowed_expanded) - - # Allow if path is within allowed directory - try: - if os.path.commonpath([abs_path, allowed_abs]) == allowed_abs: - allowed = True - break - except ValueError: - # Paths don't share common path - pass - - # Block access to critical system directories - critical_dirs = ['/boot', '/sys', '/proc', '/dev', '/etc', '/usr/bin', '/usr/sbin', '/sbin', '/bin'] - for critical in critical_dirs: - if abs_path.startswith(critical): - # Allow /dev/null for redirection - if abs_path == '/dev/null': - continue - # Allow reading from /etc for some commands (like apt-get) - if critical == '/etc' and 'read' in command.lower(): - continue - return f"Access to critical directory blocked: {abs_path}" - - # Block path traversal attempts - if '..' in path or path.startswith('/') and not any(abs_path.startswith(os.path.expanduser(d)) for d in self.ALLOWED_DIRECTORIES): - # Allow if it's a command argument (like --config=/etc/file.conf) - if not any(abs_path.startswith(os.path.expanduser(d)) for d in self.ALLOWED_DIRECTORIES): - # More permissive: only block if clearly dangerous - if any(danger in abs_path for danger in ['/etc/passwd', '/etc/shadow', '/boot', '/sys']): - return f"Path traversal to sensitive location blocked: {abs_path}" - - # If not in allowed directory and not a standard command argument, warn - # (This is permissive - adjust based on security requirements) - - return None - - def _create_firejail_command(self, command: str) -> List[str]: - """ - Create firejail command with resource limits. - - Args: - command: Command to execute - - Returns: - List of command parts for subprocess - """ - if not self.firejail_path: - # Fallback to direct execution (not recommended) - return shlex.split(command) - - # Build firejail command with security options - memory_bytes = self.max_memory_mb * 1024 * 1024 - firejail_cmd = [ - self.firejail_path, - '--quiet', # Suppress firejail messages - '--noprofile', # Don't use default profile - '--private', # Private home directory - '--private-tmp', # Private /tmp - f'--cpu={self.max_cpu_cores}', # CPU limit - f'--rlimit-as={memory_bytes}', # Memory limit (address space) - '--net=none', # No network (adjust if needed) - '--noroot', # No root access - '--caps.drop=all', # Drop all capabilities - '--shell=none', # No shell - '--seccomp', # Enable seccomp filtering - ] - - # Add command - firejail_cmd.extend(shlex.split(command)) - - return firejail_cmd - - def _create_snapshot(self, session_id: str) -> Dict[str, Any]: - """ - Create snapshot of current state for rollback. - - Args: - session_id: Session identifier - - Returns: - Snapshot dictionary - """ - snapshot = { - 'session_id': session_id, - 'timestamp': datetime.now().isoformat(), - 'files_modified': [], - 'files_created': [], - 'file_backups': {}, # Store file contents for restoration - } - - # Track files in allowed directories that might be modified - # Store their current state for potential rollback - for allowed_dir in self.ALLOWED_DIRECTORIES: - allowed_expanded = os.path.expanduser(allowed_dir) - if os.path.exists(allowed_expanded): - # Note: Full file tracking would require inotify or filesystem monitoring - # For now, we track the directory state - try: - snapshot['directories_tracked'] = snapshot.get('directories_tracked', []) - snapshot['directories_tracked'].append(allowed_expanded) - except Exception: - pass - - self.rollback_snapshots[session_id] = snapshot - self.logger.debug(f"Created snapshot for session {session_id}") - return snapshot - - def _rollback(self, session_id: str) -> bool: - """ - Rollback changes from a session. - - Args: - session_id: Session identifier - - Returns: - True if rollback successful - """ - if session_id not in self.rollback_snapshots: - self.logger.warning(f"No snapshot found for session {session_id}") - return False - - snapshot = self.rollback_snapshots[session_id] - self.logger.info(f"Rolling back session {session_id}") - - # Restore backed up files - restored_count = 0 - for file_path, file_content in snapshot.get('file_backups', {}).items(): - try: - if os.path.exists(file_path): - with open(file_path, 'wb') as f: - f.write(file_content) - restored_count += 1 - self.logger.debug(f"Restored file: {file_path}") - except Exception as e: - self.logger.warning(f"Failed to restore {file_path}: {e}") - - # Remove created files - for file_path in snapshot.get('files_created', []): - try: - if os.path.exists(file_path): - os.remove(file_path) - self.logger.debug(f"Removed created file: {file_path}") - except Exception as e: - self.logger.warning(f"Failed to remove {file_path}: {e}") - - self.logger.info(f"Rollback completed: {restored_count} files restored, " - f"{len(snapshot.get('files_created', []))} files removed") - return True - - def execute(self, - command: str, - dry_run: bool = False, - enable_rollback: Optional[bool] = None) -> ExecutionResult: - """ - Execute command in sandbox. - - Args: - command: Command to execute - dry_run: If True, only show what would execute - enable_rollback: Override default rollback setting - - Returns: - ExecutionResult object - """ - start_time = time.time() - session_id = f"session_{int(start_time)}" - self.current_session_id = session_id - - # Validate command - is_valid, violation = self.validate_command(command) - if not is_valid: - result = ExecutionResult( - command=command, - exit_code=-1, - blocked=True, - violation=violation, - execution_time=time.time() - start_time - ) - self._log_security_event(result) - raise CommandBlocked(violation or "Command blocked") - - # Create snapshot for rollback - if (enable_rollback if enable_rollback is not None else self.enable_rollback): - self._create_snapshot(session_id) - - # Dry-run mode - if dry_run: - firejail_cmd = self._create_firejail_command(command) - preview = ' '.join(shlex.quote(arg) for arg in firejail_cmd) - - result = ExecutionResult( - command=command, - exit_code=0, - stdout=f"[DRY-RUN] Would execute: {preview}", - preview=preview, - execution_time=time.time() - start_time - ) - self._log_execution(result) - return result - - # Execute command - try: - firejail_cmd = self._create_firejail_command(command) - - self.logger.info(f"Executing: {command}") - - # Set resource limits if not using Firejail - preexec_fn = None - if not self.firejail_path: - def set_resource_limits(): - """Set resource limits for the subprocess.""" - try: - # Memory limit (RSS - Resident Set Size) - memory_bytes = self.max_memory_mb * 1024 * 1024 - resource.setrlimit(resource.RLIMIT_AS, (memory_bytes, memory_bytes)) - # CPU time limit (soft and hard) - cpu_seconds = self.timeout_seconds - resource.setrlimit(resource.RLIMIT_CPU, (cpu_seconds, cpu_seconds)) - # File size limit - disk_bytes = self.max_disk_mb * 1024 * 1024 - resource.setrlimit(resource.RLIMIT_FSIZE, (disk_bytes, disk_bytes)) - except (ValueError, OSError) as e: - self.logger.warning(f"Failed to set resource limits: {e}") - preexec_fn = set_resource_limits - - process = subprocess.Popen( - firejail_cmd, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - text=True, - preexec_fn=preexec_fn - ) - - stdout, stderr = process.communicate(timeout=self.timeout_seconds) - exit_code = process.returncode - execution_time = time.time() - start_time - - result = ExecutionResult( - command=command, - exit_code=exit_code, - stdout=stdout, - stderr=stderr, - execution_time=execution_time - ) - - # Rollback on failure if enabled - if result.failed and (enable_rollback if enable_rollback is not None else self.enable_rollback): - self._rollback(session_id) - result.stderr += "\n[ROLLBACK] Changes reverted due to failure" - - self._log_execution(result) - return result - - except subprocess.TimeoutExpired: - process.kill() - result = ExecutionResult( - command=command, - exit_code=-1, - stderr=f"Command timed out after {self.timeout_seconds} seconds", - execution_time=time.time() - start_time - ) - self._log_execution(result) - return result - - except Exception as e: - result = ExecutionResult( - command=command, - exit_code=-1, - stderr=f"Execution error: {str(e)}", - execution_time=time.time() - start_time - ) - self._log_execution(result) - return result - - def _log_execution(self, result: ExecutionResult): - """Log command execution to audit log.""" - log_entry = result.to_dict() - log_entry['type'] = 'execution' - self.audit_log.append(log_entry) - self.logger.info(f"Command executed: {result.command} (exit_code={result.exit_code})") - - def _log_security_event(self, result: ExecutionResult): - """Log security violation.""" - log_entry = result.to_dict() - log_entry['type'] = 'security_violation' - self.audit_log.append(log_entry) - self.logger.warning(f"Security violation: {result.command} - {result.violation}") - - def get_audit_log(self, limit: Optional[int] = None) -> List[Dict[str, Any]]: - """ - Get audit log entries. - - Args: - limit: Maximum number of entries to return - - Returns: - List of audit log entries - """ - if limit: - return self.audit_log[-limit:] - return self.audit_log.copy() - - def save_audit_log(self, file_path: Optional[str] = None): - """Save audit log to file.""" - file_path = file_path or self.log_file.replace('.log', '_audit.json') - with open(file_path, 'w') as f: - json.dump(self.audit_log, f, indent=2) - - -def main(): - """CLI entry point for sandbox executor.""" - import argparse - - parser = argparse.ArgumentParser(description='Sandboxed Command Executor') - parser.add_argument('command', help='Command to execute') - parser.add_argument('--dry-run', action='store_true', help='Dry-run mode') - parser.add_argument('--no-rollback', action='store_true', help='Disable rollback') - parser.add_argument('--timeout', type=int, default=300, help='Timeout in seconds') - - args = parser.parse_args() - - executor = SandboxExecutor(timeout_seconds=args.timeout) - - try: - result = executor.execute( - args.command, - dry_run=args.dry_run, - enable_rollback=not args.no_rollback - ) - - if result.blocked: - print(f"Command blocked: {result.violation}", file=sys.stderr) - sys.exit(1) - - if result.stdout: - print(result.stdout) - if result.stderr: - print(result.stderr, file=sys.stderr) - - sys.exit(result.exit_code) - - except CommandBlocked as e: - print(f"Command blocked: {e}", file=sys.stderr) - sys.exit(1) - except Exception as e: - print(f"Error: {e}", file=sys.stderr) - sys.exit(1) - - -if __name__ == '__main__': - main() - diff --git a/tests/test_optimizer.py b/tests/test_optimizer.py new file mode 100644 index 0000000..0d6726e --- /dev/null +++ b/tests/test_optimizer.py @@ -0,0 +1,98 @@ +import unittest +from unittest.mock import MagicMock, patch, mock_open +import os +import time +from cortex.optimizer import CleanupOptimizer, LogManager, TempCleaner, CleanupOpportunity + +class TestCleanupOptimizer(unittest.TestCase): + def setUp(self): + self.optimizer = CleanupOptimizer() + # Mock the internal managers to isolate tests + self.optimizer.pm = MagicMock() + self.optimizer.log_manager = MagicMock() + self.optimizer.temp_cleaner = MagicMock() + + def test_scan_aggregates_opportunities(self): + # Setup mocks + self.optimizer.pm.get_cleanable_items.return_value = { + "cache_size_bytes": 1024, + "orphaned_packages": ["pkg1"], + "orphaned_size_bytes": 2048 + } + self.optimizer.log_manager.scan.return_value = CleanupOpportunity( + type="logs", size_bytes=500, description="Old logs", items=[] + ) + self.optimizer.temp_cleaner.scan.return_value = None + + opportunities = self.optimizer.scan() + + self.assertEqual(len(opportunities), 3) # pkg cache, orphans, logs + self.assertEqual(opportunities[0].type, "package_cache") + self.assertEqual(opportunities[1].type, "orphans") + self.assertEqual(opportunities[2].type, "logs") + + def test_get_cleanup_plan(self): + self.optimizer.pm.get_cleanup_commands.side_effect = lambda x: [f"clean {x}"] + self.optimizer.log_manager.get_cleanup_commands.return_value = ["compress logs"] + self.optimizer.temp_cleaner.get_cleanup_commands.return_value = ["clean temp"] + + plan = self.optimizer.get_cleanup_plan() + + expected = ["clean cache", "clean orphans", "compress logs", "clean temp"] + self.assertEqual(plan, expected) + +class TestLogManager(unittest.TestCase): + @patch('os.path.exists', return_value=True) + @patch('glob.glob') + @patch('os.stat') + def test_scan_finds_old_logs(self, mock_stat, mock_glob, mock_exists): + manager = LogManager() + + # Setup mock file + def glob_side_effect(path, recursive=False): + if path.endswith("*.log"): + return ["/var/log/old.log"] + return [] + + mock_glob.side_effect = glob_side_effect + + # Mock stat to return old time + old_time = time.time() - (8 * 86400) # 8 days ago + mock_stat_obj = MagicMock() + mock_stat_obj.st_mtime = old_time + mock_stat_obj.st_size = 100 + mock_stat.return_value = mock_stat_obj + + opp = manager.scan() + + self.assertIsNotNone(opp) + self.assertEqual(opp.type, "logs") + self.assertEqual(opp.size_bytes, 100) + self.assertEqual(opp.items, ["/var/log/old.log"]) + +class TestTempCleaner(unittest.TestCase): + @patch('os.path.exists', return_value=True) + @patch('os.walk') + @patch('os.stat') + def test_scan_finds_temp_files(self, mock_stat, mock_walk, mock_exists): + manager = TempCleaner(temp_dirs=["/tmp"]) + + # Setup mock walk + mock_walk.return_value = [("/tmp", [], ["tempfile"])] + + # Mock stat to return old time + old_time = time.time() - (8 * 86400) # 8 days ago + mock_stat_obj = MagicMock() + mock_stat_obj.st_atime = old_time + mock_stat_obj.st_mtime = old_time + mock_stat_obj.st_size = 50 + mock_stat.return_value = mock_stat_obj + + opp = manager.scan() + + self.assertIsNotNone(opp) + self.assertEqual(opp.type, "temp") + self.assertEqual(opp.size_bytes, 50) + +if __name__ == '__main__': + unittest.main()