Skip to content
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 8 additions & 9 deletions src/sampleworks/utils/guidance_script_utils.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from __future__ import annotations

import argparse
import json
import os
import pickle
import traceback
Expand Down Expand Up @@ -387,6 +388,9 @@ def run_guidance(
log_path = getattr(args, "log_path", None) or os.path.join(args.output_dir, "run.log")
os.makedirs(os.path.dirname(log_path) or ".", exist_ok=True)

# just in case log_path does not go to args.output_dir, make sure the latter exists
os.makedirs(args.output_dir, exist_ok=True)

# separate logs for each guidance run
handle = logger.add(
log_path, level="INFO", filter=lambda rec: rec["extra"].get("special", False) is True
Expand Down Expand Up @@ -578,17 +582,12 @@ def run_guidance_job_queue(job_queue_path: str) -> list[JobResult]:
job_results = []
for i, job in enumerate(job_queue):
logger.info(f"Running job {i + 1}/{len(job_queue)}: {job}")
# TODO: I think it is safe now to re-use the wrapper, it might save us some time.
# The model wrapper can persist state across runs, so we need to re-initialize it each run.
# if job.model_checkpoint is None:
# raise ValueError(
# "Running guidance requires that you specify a model checkpoint, not None"
# )
# device, model_wrapper = get_model_and_device(
# str(device), job.model_checkpoint, job.model, job.method, model_wrapper.model
# )

job_result = run_guidance(job, job.guidance_type, model_wrapper, device)
# write out the job parameters to a JSON file in the same directory as the refined.cif file
with open(Path(job_result.output_dir) / "job_metadata.json", "w") as fp:
json.dump(job.__dict__, fp)
Comment thread
coderabbitai[bot] marked this conversation as resolved.

job_results.append(job_result)
torch.cuda.empty_cache() # just in case

Expand Down