Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
42 changes: 17 additions & 25 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -56,43 +56,34 @@ phi login

## Quick start

### Single-sequence / single-structure jobs
### Try the tutorial

```bash
# Structure prediction (ESMFold)
phi folding --fasta sequences.fasta

# Complex structure prediction (AlphaFold2 multimer)
phi complex_folding --fasta binder_target.fasta
The fastest way to get started — downloads five example PD-L1 binder structures
and walks you through the full pipeline:

# Sequence design via inverse folding (ProteinMPNN)
phi inverse_folding --pdb design.pdb --num-sequences 20
```bash
phi tutorial
```

### Batch scoring workflow
This fetches the example files, prints step-by-step instructions, and leaves
you ready to run `phi filter`.

### Scoring your own structures

```bash
# 1. Upload a directory of PDB/CIF files
# 1. Upload PDB/CIF files
phi upload ./designs/

# Output:
# dataset_id d7c3a1b2-...
# Dashboard: https://design.dynotx.com/dashboard/datasets/d7c3a1b2-...
# Run a job against this dataset:
# phi folding --dataset-id d7c3a1b2-...
# phi complex_folding --dataset-id d7c3a1b2-...
# phi inverse_folding --dataset-id d7c3a1b2-...
# phi filter --dataset-id d7c3a1b2-... --preset default --wait

# 2. Run the full filter pipeline (inverse folding → folding → complex folding → score)
phi filter --dataset-id d7c3a1b2-... --preset default --wait
# 2. Run the full filter pipeline
phi filter --preset default --wait

# 3. Download results (structures, scores CSV, raw score JSONs)
# 3. View scores and download results
phi scores
phi download --out ./results/
```

After each command, `phi` prints the active dataset and job IDs and a link to
the dashboard:
After each command, `phi` prints the active dataset and a link to the
dashboard:

```
Active: dataset [d7c3a1b2-...] · job [cb4553f5-...]
Expand All @@ -105,6 +96,7 @@ Dashboard: https://design.dynotx.com/dashboard/datasets/d7c3a1b2-...

| Command | Alias | Description |
|---|---|---|
| `phi tutorial` | — | Download example structures and print a step-by-step walkthrough |
| `phi login` | — | Verify API key and print identity |
| `phi upload` | — | Upload PDB/CIF files or a directory |
| `phi fetch` | — | Download a structure from RCSB PDB or AlphaFold DB, crop, and optionally upload |
Expand Down
2 changes: 2 additions & 0 deletions src/phi/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,13 +16,15 @@
)
from phi.commands.research import cmd_notes, cmd_research
from phi.commands.structure import cmd_fetch
from phi.commands.tutorial import cmd_tutorial
from phi.config import _load_state
from phi.display import _C_BLUE, _die, console
from phi.parser import build_parser
from phi.types import PhiApiError

COMMANDS = {
"login": cmd_login,
"tutorial": cmd_tutorial,
"upload": cmd_upload,
"ingest-session": cmd_ingest_session,
"datasets": cmd_datasets,
Expand Down
12 changes: 12 additions & 0 deletions src/phi/commands/models.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,21 @@
from __future__ import annotations

import argparse
import os
from pathlib import Path

from phi.api import _require_key, _submit
from phi.display import _die, _print_status, _print_submission, console
from phi.download import _download_job, _read_fasta
from phi.polling import _poll

_DESIGN_ENABLED = os.environ.get("DYNO_ENABLE_DESIGN", "").lower() in ("1", "true", "yes")


def _require_design_flag() -> None:
if not _DESIGN_ENABLED:
_die("This command is not yet available.")


def _run_model_job(job_type: str, params: dict, args: argparse.Namespace) -> None:
from phi.config import POLL_INTERVAL as _INTERVAL
Expand Down Expand Up @@ -92,6 +102,7 @@ def cmd_boltz(args: argparse.Namespace) -> None:


def cmd_rfdiffusion3(args: argparse.Namespace) -> None:
_require_design_flag()
params: dict = {
"num_designs": args.num_designs,
"inference_steps": args.steps,
Expand Down Expand Up @@ -129,6 +140,7 @@ def cmd_rfdiffusion3(args: argparse.Namespace) -> None:


def cmd_boltzgen(args: argparse.Namespace) -> None:
_require_design_flag()
params: dict = {
"protocol": args.protocol,
"num_designs": args.num_designs,
Expand Down
8 changes: 2 additions & 6 deletions src/phi/commands/research.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,8 +111,8 @@ def cmd_notes(args: argparse.Namespace) -> None:
return

content: str = data.get("content") or ""
gcs_url: str | None = data.get("gcs_url")
gcs_uri: str | None = data.get("gcs_uri")
data.get("gcs_url")
data.get("gcs_uri")

if args.out:
out = Path(args.out)
Expand All @@ -124,8 +124,6 @@ def cmd_notes(args: argparse.Namespace) -> None:
dest.parent.mkdir(parents=True, exist_ok=True)
dest.write_text(content, encoding="utf-8")
console.print(f"[{_C_SAND}]Notes saved[/] → {dest}")
if gcs_url:
console.print(f"[dim]Download URL:[/] {gcs_url}")
return

if args.json:
Expand All @@ -141,5 +139,3 @@ def cmd_notes(args: argparse.Namespace) -> None:
padding=(1, 2),
)
)
if gcs_uri:
console.print(f"[dim]Storage URI:[/] {gcs_uri}")
6 changes: 2 additions & 4 deletions src/phi/commands/structure.py
Original file line number Diff line number Diff line change
Expand Up @@ -153,14 +153,12 @@ def _fetch_and_upload(
("Source ", source_url),
("File ", str(out_path)),
("Dataset ", dataset_id),
("GCS URI ", gcs_uri),
]:
console.print(f" [bold]{label}[/bold] {value}")
console.print()
console.print(" [dim]Next steps:[/dim]")
console.print(
f" [dim] phi design --target-pdb-gcs {gcs_uri} --hotspots <A45,A67> --num-designs 50[/dim]"
)
console.print(f" [dim] phi upload {out_path}[/dim]")
console.print(" [dim] phi filter --preset default --wait[/dim]")
console.rule()


Expand Down
88 changes: 88 additions & 0 deletions src/phi/commands/tutorial.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,88 @@
from __future__ import annotations

import argparse
import urllib.request
from pathlib import Path

from phi.api import _request
from phi.config import _save_state, _ssl_context
from phi.display import _C_BLUE, _C_SAND, _die, console


def cmd_tutorial(args: argparse.Namespace) -> None:
out = Path(args.out)

# ── 1. Fetch manifest (standard Clerk JWT auth, same as all endpoints) ───
console.print("[dim]Fetching tutorial dataset …[/]")
try:
manifest = _request("GET", "/tutorial")
except Exception as exc:
_die(
f"Could not reach the tutorial endpoint: {exc}\n"
" Check your connection and API key, then try again."
)

files: list[dict] = manifest.get("files", [])
dataset_id: str | None = manifest.get("dataset_id")
message: str | None = manifest.get("message")

if not files:
_die("No tutorial files returned by the API.")

# ── 2. Download each file (plain HTTP — signed URLs are self-authenticating)
out.mkdir(parents=True, exist_ok=True)
console.print(f" Downloading {len(files)} file(s) to [{_C_BLUE}]{out}/[/] …\n")

for entry in files:
filename: str = entry["filename"]
url: str = entry["url"]
dest = out / filename
dest.parent.mkdir(parents=True, exist_ok=True)
try:
req = urllib.request.Request(url)
with urllib.request.urlopen(req, context=_ssl_context()) as resp:
dest.write_bytes(resp.read())
console.print(f" [bold {_C_SAND}]✓[/] {filename}")
except Exception as exc:
_die(f"Failed to download {filename}: {exc}")

# ── 3. Cache dataset_id so phi filter needs zero extra flags ─────────────
if dataset_id:
_save_state({"dataset_id": dataset_id})
console.print(
f"\n[dim]dataset_id [{_C_BLUE}]{dataset_id}[/] cached — "
f"run [bold]phi filter[/] to start scoring.[/]"
)

# ── 4. Print step-by-step guide ──────────────────────────────────────────
if message:
console.print(f"\n[dim]{message}[/]")

if dataset_id:
upload_step = "[dim] (skipped — dataset already ready)[/]"
else:
upload_step = f" [{_C_SAND}]phi upload {out}/[/]"

console.print(f"""
[bold]── Tutorial: PD-L1 binder scoring pipeline ──────────────────[/]

You have {len(files)} example binder structures in [{_C_BLUE}]{out}/[/].

[bold]Step 1 — Upload[/]
{upload_step}

[bold]Step 2 — Run the filter pipeline[/]
[{_C_SAND}]phi filter --preset default --wait[/]

Runs: ProteinMPNN → ESMFold → AlphaFold2 → score
Typical runtime: 10–30 min for {len(files)} structures.

[bold]Step 3 — View scores[/]
[{_C_SAND}]phi scores[/]

[bold]Step 4 — Download results[/]
[{_C_SAND}]phi download --out ./results[/]

[bold]Dashboard[/]
[{_C_BLUE}]https://design.dynotx.com/dashboard[/]
""")
3 changes: 2 additions & 1 deletion src/phi/display.py
Original file line number Diff line number Diff line change
Expand Up @@ -272,7 +272,8 @@ def _print_status(s: dict) -> None:
table.add_column("filename", style=_C_BLUE, no_wrap=True)
table.add_column("type", style="dim")
for f in files[:10]:
fname = f.get("filename") or f.get("gcs_url", "?")
raw = f.get("filename") or f.get("gcs_url", "?")
fname = raw.split("/")[-1] if raw.startswith("gs://") else raw
ftype = f.get("artifact_type", "")
table.add_row(fname, ftype)
if len(files) > 10:
Expand Down
30 changes: 21 additions & 9 deletions src/phi/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,26 +4,27 @@
from phi.config import _FILTER_PRESETS, POLL_INTERVAL

_CLI_EPILOG = """\
Quick start:
phi tutorial # download example structures + print step-by-step guide
phi filter --preset default --wait
phi scores
phi download --out ./results

Fetch and prepare target structures:
phi fetch --pdb 4ZQK --chain A --residues 56-290 --out target.pdb
phi fetch --uniprot Q9NZQ7 --trim-low-confidence 70 --upload

Design (backbone generation):
phi design --target-pdb target.pdb --hotspots A45,A67 --num-designs 50
phi design --length 80 --num-designs 20
phi boltzgen --yaml design.yaml --protocol protein-anything --num-designs 10

Validation (fold + score):
phi esmfold --fasta sequences.fasta
phi alphafold --fasta complex.fasta
phi proteinmpnn --pdb design.pdb --num-sequences 20
phi esm2 --fasta sequences.fasta
phi boltz --fasta complex.fasta

Batch filter pipeline (100-50,000 designs):
phi upload --dir ./designs/ --file-type pdb
phi filter --dataset-id <id> --preset default --wait
phi download --out ./results
Batch filter pipeline:
phi upload ./designs/
phi filter --preset default --wait
phi download --out ./results

Dataset management:
phi datasets # list your datasets
Expand Down Expand Up @@ -253,6 +254,17 @@ def build_parser() -> argparse.ArgumentParser:
)
sub = root.add_subparsers(dest="command", required=True)

p = sub.add_parser(
"tutorial",
help="Download example structures and print a step-by-step scoring walkthrough",
)
p.add_argument(
"--out",
metavar="DIR",
default="examples",
help="Directory to download example files into (default: ./examples)",
)

p = sub.add_parser("login", help="Verify API key and print connection + identity details")
p.add_argument("--json", action="store_true")

Expand Down
Loading