Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
119 changes: 114 additions & 5 deletions dumpyarabot/arq_jobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
from datetime import datetime, timezone
from pathlib import Path
from typing import Dict, Any, Optional
from urllib.parse import urlsplit, urlunsplit

from rich.console import Console

Expand All @@ -33,13 +34,63 @@
re.compile(r'(token[=:]\s*)\S+', re.IGNORECASE),
re.compile(r'(password[=:]\s*)\S+', re.IGNORECASE),
]
_URL_PATTERN = re.compile(r'https?://[^\s<>"\']+', re.IGNORECASE)


def _sanitize_traceback(tb_str: str) -> str:
"""Remove sensitive tokens and credentials from traceback strings."""
for pattern in _SENSITIVE_PATTERNS:
tb_str = pattern.sub(r'\1[REDACTED]', tb_str)
return tb_str
return _URL_PATTERN.sub(
lambda match: _sanitize_url_for_log(match.group(0).rstrip(".,;:!?)\"]}'")),
tb_str,
)


def _sanitize_text(value: Any) -> str:
"""Sanitize arbitrary log text."""
return _sanitize_traceback(str(value))


def _derive_last_successful_step(progress_history: list[Dict[str, Any]], failed_step: Optional[str] = None) -> Optional[str]:
"""Return the most recent successful step before a failed step, if known."""
if not progress_history:
return None

if failed_step:
for entry in reversed(progress_history):
message = entry.get("message")
if message and message != failed_step:
return message
return None

latest = progress_history[-1].get("message")
return latest if latest else None


def _sanitize_url_for_log(url_value: Any) -> str:
"""Redact credentials and query parameters from logged URLs."""
url = str(url_value or "unknown")
try:
parts = urlsplit(url)
except ValueError:
return url

try:
hostname = parts.hostname or ""
port = parts.port
username = parts.username
except ValueError:
return url

netloc = hostname
if port:
netloc = f"{netloc}:{port}"
if username:
netloc = f"[REDACTED]@{netloc}"

sanitized = urlunsplit((parts.scheme, netloc, parts.path, "", ""))
return sanitized or url


class PeriodicTimerUpdate:
Expand Down Expand Up @@ -135,6 +186,44 @@ async def _send_status_update(
)


def _build_failure_log_text(job_data: Dict[str, Any]) -> str:
"""Assemble a plain-text failure log from job metadata."""
lines = ["=== DUMPYARABOT JOB FAILURE LOG ==="]

lines.append(f"Job ID: {job_data.get('job_id', 'unknown')}")
lines.append(f"Worker: {job_data.get('worker_id', 'unknown')}")
url = (job_data.get("dump_args") or {}).get("url", "unknown")
lines.append(f"URL: {_sanitize_url_for_log(url)}")

metadata = job_data.get("metadata") or {}
lines.append(f"Started: {metadata.get('start_time', 'unknown')}")
lines.append(f"Failed: {metadata.get('end_time', 'unknown')}")

lines.append("\n=== PROGRESS HISTORY ===")
for entry in metadata.get("progress_history") or []:
ts = entry.get("timestamp", "")
msg = entry.get("message", "")
try:
pct_display = f"{float(entry.get('percentage', 0) or 0):.0f}%"
except (TypeError, ValueError):
pct_display = "?%"
lines.append(f"[{ts}] ({pct_display}) {_sanitize_text(msg)}")

error_ctx = metadata.get("error_context") or {}
if error_ctx:
lines.append("\n=== ERROR CONTEXT ===")
lines.append(f"Failed at: {_sanitize_text(error_ctx.get('current_step', 'unknown'))}")
if error_ctx.get('last_successful_step'):
lines.append(f"Last successful: {_sanitize_text(error_ctx['last_successful_step'])}")
lines.append(f"Error message: {_sanitize_text(error_ctx.get('message', 'unknown'))}")
tb = error_ctx.get("traceback")
if tb:
lines.append("\n=== TRACEBACK (sanitized) ===")
lines.append(_sanitize_traceback(tb))

Comment thread
deadman96385 marked this conversation as resolved.
return "\n".join(lines)


async def _send_failure_notification(job_data: Dict[str, Any], error_details: str) -> None:
"""Send a failure notification using existing message queue - PRESERVING ALL TELEGRAM FEATURES."""

Expand All @@ -143,7 +232,8 @@ async def _send_failure_notification(job_data: Dict[str, Any], error_details: st
metadata = job_data.get("metadata") or {}
progress_history = metadata.get("progress_history") or []
last_progress = progress_history[-1] if progress_history else {}
last_step = last_progress.get("message", "Unknown step")
error_ctx = metadata.get("error_context") or {}
last_step = error_ctx.get("current_step") or last_progress.get("message", "Unknown step")
last_pct = last_progress.get("percentage", 0.0)

failure_progress = {
Expand Down Expand Up @@ -201,6 +291,22 @@ async def _send_failure_notification(job_data: Dict[str, Any], error_details: st

console.print(f"[green]Sent failure notification for job {job_data.get('job_id', 'unknown')}[/green]")

# Send failure log as a text file for debugging
try:
log_text = _build_failure_log_text(job_data)
log_bytes = log_text.encode("utf-8")
job_id_short = str(job_data.get("job_id", "unknown"))[:8]
filename = f"dump_failure_{job_id_short}.txt"
target_chat = primary_allowed_chat if is_moderated_request and primary_allowed_chat is not None else chat_id
await message_queue.send_document(
chat_id=target_chat,
content=log_bytes,
filename=filename,
caption="Failure log",
)
except Exception as log_err:
console.print(f"[yellow]Could not queue failure log file: {log_err}[/yellow]")

except Exception as e:
console.print(f"[red]Failed to send failure notification: {e}[/red]")
console.print_exception()
Expand Down Expand Up @@ -458,8 +564,11 @@ async def _on_download_progress(dp: DownloadProgress) -> None:
"end_time": datetime.now(timezone.utc).isoformat(),
"error_context": {
"message": str(e),
"current_step": progress.get("current_step", "Unknown step"),
"last_successful_step": progress_history[-1]["message"] if progress_history else "None",
"current_step": progress_history[-1].get("message", "Unknown step") if progress_history else "Unknown step",
"last_successful_step": _derive_last_successful_step(
progress_history,
progress_history[-1].get("message") if progress_history else None,
),
"failure_time": datetime.now(timezone.utc).isoformat(),
"traceback": _sanitize_traceback(traceback.format_exc())
}
Expand Down Expand Up @@ -488,7 +597,7 @@ async def _on_download_progress(dp: DownloadProgress) -> None:
"error_context": {
"message": f"Critical error: {str(e)}",
"current_step": "Critical failure",
"last_successful_step": progress_history[-1]["message"] if progress_history else "None",
"last_successful_step": _derive_last_successful_step(progress_history) or "None",
"failure_time": datetime.now(timezone.utc).isoformat(),
"traceback": _sanitize_traceback(traceback.format_exc())
}
Expand Down
47 changes: 32 additions & 15 deletions dumpyarabot/firmware_extractor.py
Original file line number Diff line number Diff line change
Expand Up @@ -165,9 +165,16 @@ async def _extract_fsg_partition(self):
safe_remove_file(fsg_file)
console.print("[green]Successfully extracted fsg.mbn[/green]")

async def process_boot_images(self) -> None:
"""Process boot images (boot.img, vendor_boot.img, etc.)."""
boot_images = ["init_boot.img", "vendor_kernel_boot.img", "vendor_boot.img", "boot.img", "dtbo.img"]
async def process_boot_images(self) -> None:
"""Process boot images (boot.img, vendor_boot.img, etc.)."""
boot_images = [
"init_boot.img",
"vendor_kernel_boot.img",
"vendor_boot.img",
"boot.img",
"recovery.img",
"dtbo.img",
]

# Move boot images to work directory root if they're in subdirectories
for image_name in boot_images:
Expand All @@ -191,12 +198,14 @@ async def _process_single_boot_image(self, image_path: Path):

console.print(f"[blue]Processing {image_name}...[/blue]")

if image_name == "boot.img":
await self._process_boot_img(image_path, output_dir)
elif image_name in ["vendor_boot.img", "vendor_kernel_boot.img", "init_boot.img"]:
await self._process_vendor_boot_img(image_path, output_dir)
elif image_name == "dtbo.img":
await self._process_dtbo_img(image_path, output_dir)
if image_name == "boot.img":
await self._process_boot_img(image_path, output_dir)
elif image_name == "recovery.img":
await self._process_recovery_img(image_path, output_dir)
elif image_name in ["vendor_boot.img", "vendor_kernel_boot.img", "init_boot.img"]:
await self._process_vendor_boot_img(image_path, output_dir)
elif image_name == "dtbo.img":
await self._process_dtbo_img(image_path, output_dir)

async def _process_boot_img(self, image_path: Path, output_dir: Path):
"""Process boot.img with comprehensive analysis."""
Expand All @@ -218,16 +227,25 @@ async def _process_boot_img(self, image_path: Path, output_dir: Path):
# Extract and process device tree blobs
await self._extract_device_trees(image_path, output_dir)

async def _process_vendor_boot_img(self, image_path: Path, output_dir: Path):
"""Process vendor_boot.img or similar images."""
output_dir.mkdir(exist_ok=True)
async def _process_vendor_boot_img(self, image_path: Path, output_dir: Path):
"""Process vendor_boot.img or similar images."""
output_dir.mkdir(exist_ok=True)

# Extract contents if using alternative dumper
if self.firmware_extractor_path.exists():
await self._unpack_boot_image(image_path, output_dir)

# Extract device tree blobs
await self._extract_device_trees(image_path, output_dir)
# Extract device tree blobs
await self._extract_device_trees(image_path, output_dir)

async def _process_recovery_img(self, image_path: Path, output_dir: Path):
"""Process recovery.img by unpacking the image and extracting its ramdisk."""
output_dir.mkdir(exist_ok=True)

if self.firmware_extractor_path.exists():
await self._unpack_boot_image(image_path, output_dir)

await self._extract_device_trees(image_path, output_dir)

async def _process_dtbo_img(self, image_path: Path, output_dir: Path):
"""Process dtbo.img."""
Expand Down Expand Up @@ -447,4 +465,3 @@ async def _process_oppo_images(self):
console.print(f"[green]Extracted {img_file.name}[/green]")
else:
console.print(f"[yellow]Failed to extract {img_file.name}[/yellow]")

12 changes: 6 additions & 6 deletions dumpyarabot/handlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ async def dump(
else:
initial_text = f" *Firmware Dump Queued*\n\n *URL:* `{url}`\n"

initial_text += f"🆔 *Job ID:* `{job.job_id}`\n"
initial_text += f"*Job ID:* `{job.job_id}`\n"

Comment thread
deadman96385 marked this conversation as resolved.
# Format options
options_list = []
Expand All @@ -118,7 +118,7 @@ async def dump(

initial_text += f"\n{generate_progress_bar(None)}\n"
initial_text += " Queued for processing...\n\n"
initial_text += "*Elapsed:* 0s\n"
initial_text += "*Elapsed:* 0s\n"
initial_text += " *Worker:* Waiting for assignment...\n"

# Send initial message directly to get real Telegram message ID
Expand Down Expand Up @@ -231,17 +231,17 @@ async def cancel_dump(update: Update, context: ContextTypes.DEFAULT_TYPE) -> Non

if cancelled:
escaped_job_id = escape_markdown(job_id)
response_message = f" *Job cancelled successfully*\n\n🆔 *Job ID:* `{escaped_job_id}`\n\nThe dump job has been removed from the queue or stopped if it was in progress."
response_message = f" *Job cancelled successfully*\n\n*Job ID:* `{escaped_job_id}`\n\nThe dump job has been removed from the queue or stopped if it was in progress."
console.print(f"[green]Successfully cancelled job {job_id}[/green]")
else:
escaped_job_id = escape_markdown(job_id)
response_message = f" *Job not found*\n\n🆔 *Job ID:* `{escaped_job_id}`\n\nThe job was not found in the queue or may have already completed."
response_message = f" *Job not found*\n\n*Job ID:* `{escaped_job_id}`\n\nThe job was not found in the queue or may have already completed."
except Exception as e:
console.print(f"[red]Error processing cancel request: {e}[/red]")
console.print_exception()
escaped_job_id = escape_markdown(job_id)
escaped_error = escape_markdown(str(e))
response_message = f" *Error cancelling job*\n\n🆔 *Job ID:* `{escaped_job_id}`\n\nError: {escaped_error}"
response_message = f" *Error cancelling job*\n\n*Job ID:* `{escaped_job_id}`\n\nError: {escaped_error}"

await message_queue.send_reply(
chat_id=chat.id,
Expand Down Expand Up @@ -407,7 +407,7 @@ async def restart(update: Update, context: ContextTypes.DEFAULT_TYPE) -> None:
"• Reload configuration and code\n"
"• Clear in-memory state\n"
"• Restart with latest changes\n\n"
"*This confirmation will expire in 30 seconds*"
"*This confirmation will expire in 30 seconds*"
)

# Convert keyboard to dict for queue serialization
Expand Down
10 changes: 2 additions & 8 deletions dumpyarabot/message_formatting.py
Original file line number Diff line number Diff line change
Expand Up @@ -387,13 +387,6 @@ def format_build_summary_info(
Returns:
Formatted build summary
"""
# Format result with emoji
result_emoji = {
"SUCCESS": "",
"FAILURE": "",
"UNSTABLE": "",
"ABORTED": "⏹",
}.get(result, "")

# Build summary parts
escaped_job_name = escape_markdown(job_name)
Expand All @@ -402,7 +395,7 @@ def format_build_summary_info(
summary_parts = [
f"*Job:* `{escaped_job_name}`",
f"*Build:* `#{escaped_build_number}`",
f"*Result:* {result_emoji} {result or 'Unknown'}"
f"*Result:* {result or 'Unknown'}"
]
Comment thread
deadman96385 marked this conversation as resolved.

if timestamp_str:
Expand Down Expand Up @@ -682,3 +675,4 @@ def format_time_ago(timestamp) -> str:
return f"{seconds // 3600}h ago"
else:
return f"{seconds // 86400}d ago"

Loading