Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions src/conductor/cli/run.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,13 @@ def generate_log_path(workflow_name: str) -> Path:
Returns:
Path to the auto-generated log file.
"""
import secrets

timestamp = time.strftime("%Y%m%d-%H%M%S")
# Append random suffix to avoid filename collisions
# when multiple runs start in the same second
suffix = secrets.token_hex(4)
timestamp = f"{timestamp}-{suffix}"
path = Path(tempfile.gettempdir()) / "conductor" / f"conductor-{workflow_name}-{timestamp}.log"
path.parent.mkdir(parents=True, exist_ok=True)
return path
Expand Down
6 changes: 6 additions & 0 deletions src/conductor/engine/checkpoint.py
Original file line number Diff line number Diff line change
Expand Up @@ -167,7 +167,13 @@ def save_checkpoint(
workflow_hash = "sha256:unknown"

# Build checkpoint dict
import secrets

timestamp = time.strftime("%Y%m%d-%H%M%S")
# Append random suffix to avoid filename collisions
# when multiple runs start in the same second
suffix = secrets.token_hex(4)
timestamp = f"{timestamp}-{suffix}"
created_at = datetime.now(UTC).isoformat()
workflow_name = workflow_path.stem

Expand Down
6 changes: 6 additions & 0 deletions src/conductor/engine/event_log.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,13 @@ class EventLogSubscriber:
"""

def __init__(self, workflow_name: str) -> None:
import secrets

ts = time.strftime("%Y%m%d-%H%M%S")
# Append random suffix to avoid filename collisions
# when multiple runs start in the same second
suffix = secrets.token_hex(4)
ts = f"{ts}-{suffix}"
self._path = (
Path(tempfile.gettempdir())
/ "conductor"
Expand Down
20 changes: 20 additions & 0 deletions tests/test_engine/test_event_log.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,26 @@ def test_safe_after_close(self, tmp_path, monkeypatch):
sub.on_event(WorkflowEvent(type="late", timestamp=time.time(), data={}))
sub.close() # Double close should be safe

def test_filenames_unique_for_simultaneous_starts(self, tmp_path, monkeypatch):
monkeypatch.setenv("TMPDIR", str(tmp_path))
subs = [EventLogSubscriber("same-workflow") for _ in range(3)]
paths = [s.path for s in subs]
# All paths must be distinct even when created in rapid succession
assert len(set(paths)) == len(paths), f"Expected unique paths, got {paths}"
for s in subs:
s.close()

def test_filename_contains_random_suffix(self, tmp_path, monkeypatch):
monkeypatch.setenv("TMPDIR", str(tmp_path))
sub = EventLogSubscriber("ts-test")
# Filename should match pattern: conductor-<name>-YYYYMMDD-HHMMSS-<8 hex chars>.events.jsonl
import re

assert re.search(r"\d{8}-\d{6}-[0-9a-f]{8}\.events\.jsonl$", sub.path.name), (
f"Filename lacks random suffix: {sub.path.name}"
)
sub.close()

def test_integrates_with_emitter(self, tmp_path, monkeypatch):
from conductor.events import WorkflowEventEmitter

Expand Down
Loading