Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions cmd/vectorize.go
Original file line number Diff line number Diff line change
Expand Up @@ -111,8 +111,10 @@ func VectorizeOutputs(ioPath string, toolCid string, outputDir string) (map[stri
}
}

cidPath := fmt.Sprintf("%s/%s", fileOutput.IPFS, fileOutput.FilePath)
ov.FilePaths = append(ov.FilePaths, absoluteFilePath)
ov.CIDs = append(ov.CIDs, fileOutput.IPFS)
ov.CidPaths = append(ov.CidPaths, cidPath)
outputMap[key] = ov
}
}
Expand Down
1 change: 1 addition & 0 deletions internal/ipwl/io.go
Original file line number Diff line number Diff line change
Expand Up @@ -157,4 +157,5 @@ func PrintIOGraphStatus(ioList []IO) {
type OutputValues struct {
FilePaths []string `json:"filePaths"`
CIDs []string `json:"cids"`
CidPaths []string `json:"cidPaths"`
}
7 changes: 7 additions & 0 deletions python/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -23,3 +23,10 @@ export PLAT_NAME=win_amd64
python setup.py bdist_wheel --plat-name win_amd64
twine upload dist/*
```

# Dev Setups
```
cd python/dev
pip install -e ../
python example.py
```
30 changes: 30 additions & 0 deletions python/dev/example.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
import os

from plex import CoreTools, ScatteringMethod, plex_init, plex_run, plex_vectorize, plex_mint

plex_dir = os.path.dirname(os.path.dirname(os.getcwd()))
plex_path = os.path.join(plex_dir, "plex")
jobs_dir = os.path.join(plex_dir, "jobs")
test_data_dir = os.path.join(plex_dir, "testdata")

print(f"Using plex_path, {plex_path}, if this looks incorrect then make sure you are running from the python/dev directory")

small_molecules = [f"{test_data_dir}/binding/abl/ZINC000003986735.sdf", f"{test_data_dir}/binding/abl/ZINC000019632618.sdf"]
proteins = [f"{test_data_dir}/binding/abl/7n9g.pdb"]

initial_io_cid = plex_init(
CoreTools.EQUIBIND.value,
ScatteringMethod.CROSS_PRODUCT.value,
plex_path=plex_path,
small_molecule=small_molecules,
protein=proteins)

completed_io_cid, io_file_path = plex_run(initial_io_cid, output_dir=jobs_dir, plex_path=plex_path)

vectors = plex_vectorize(io_file_path, CoreTools.EQUIBIND.value, plex_path=plex_path)

print(vectors)
print(vectors['best_docked_small_molecule']['filePaths'])
print(vectors['best_docked_small_molecule']['cids'])

plex_mint(completed_io_cid, plex_path=plex_path)
125 changes: 125 additions & 0 deletions python/dev/rfdiffusion.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,125 @@

import os
import json
import requests

from tempfile import TemporaryDirectory, NamedTemporaryFile

from plex import CoreTools, ScatteringMethod, plex_init, plex_run, plex_vectorize, plex_mint, plex_upload

plex_dir = os.path.dirname(os.path.dirname(os.getcwd()))
plex_path = os.path.join(plex_dir, "plex")
jobs_dir = os.path.join(plex_dir, "jobs")


def move_from_file_url_to_ipfs(url: str) -> str:
# Create a temporary directory using the `with` statement, so it's automatically cleaned up when we're done
with TemporaryDirectory() as tmp_dir:
# Get the file name from the url
file_name = url.split('/')[-1]

# Create the path to save the file
save_path = os.path.join(tmp_dir, file_name)

# Send a HTTP request to the url
response = requests.get(url, stream=True)

if response.status_code == 200:
# If the request is successful, open the file in write mode and download the file
with open(save_path, 'wb') as f:
for chunk in response.iter_content(chunk_size=1024):
if chunk:
f.write(chunk)
print(f"File downloaded successfully and saved as {save_path}")
cid = plex_upload(save_path, plex_path=plex_path)
else:
print(f"Failed to download file. HTTP Response Code: {response.status_code}")
cid = ""
return cid, file_name

url = 'https://raw.githubusercontent.com/labdao/plex/447-add-rfdiffusion-to-plex/tools/rfdiffusion/6vja_stripped.pdb'
protein_6vja_cid, protein_6vja_file_name = move_from_file_url_to_ipfs(url)

hotspot_rfdiffusion_tool = {
"class": "CommandLineTool",
"name": "rfdiffusion_hotspot",
"description": "design protein binders; generally useful for conditional generation of protein backbones",
"baseCommand": ["/bin/bash", "-c"],
"arguments": [
"source activate SE3nv && python3 /app/scripts/run_inference.py 'contigmap.contigs=[$(inputs.motif.default) $(inputs.binder_length_min.default)-$(inputs.binder_length_max.default)]' $(inputs.hotspot.default) inference.input_pdb=$(inputs.protein.filepath) inference.output_prefix=/outputs/$(inputs.protein.basename)_backbone inference.num_designs=$(inputs.number_of_designs.default) denoiser.noise_scale_ca=0 denoiser.noise_scale_frame=0;"
],
"dockerPull": "public.ecr.aws/p7l9w5o7/rfdiffusion:latest@sha256:0a6ff53004958ee5e770b0b25cd7f270eaf9fc285f6e91f17ad4024d2cc4ea91",
"gpuBool": True,
"networkBool": False,
"inputs": {
"protein": {
"type": "File",
"item": "",
"glob": ["*.pdb"]
},
"motif": {
"type": "string",
"item": "",
"default": "D46-200/0"
},
"hotspot": {
"type": "string",
"item": "",
"default": "'ppi.hotspot_res=[D170, D171, D172, D173, D174, D76, D161]'"
},
"binder_length_min": {
"type": "int",
"item": "",
"default": "50"
},
"binder_length_max": {
"type": "int",
"item": "",
"default": "100"
},
"number_of_designs": {
"type": "int",
"item": "",
"default": "10"
}
},
"outputs": {
"designed_backbones": {
"type": "Array",
"item": "File",
"glob": ["*_backbone_*.pdb"]
},
"first_designed_backbone": {
"type": "File",
"item": "",
"glob": ["*_backbone_0.pdb"]
}
}
}

with NamedTemporaryFile(suffix=".json", delete=False, mode='w') as temp_file:
# Use json.dump to write the dictionary to the file
json.dump(hotspot_rfdiffusion_tool, temp_file)
print(f"Temporary file saved as {temp_file.name}")

# flush data to disk
temp_file.flush()
hotspot_rfdiffusion_tool_cid = plex_upload(temp_file.name, wrap_file=False, plex_path=plex_path)
print(f"Tool saved to IPFS as {hotspot_rfdiffusion_tool_cid}")

initial_io_cid = plex_init(
hotspot_rfdiffusion_tool_cid,
plex_path=plex_path,
protein=[f'{protein_6vja_cid}/{protein_6vja_file_name}'],
)

CACHE = True
if CACHE:
completed_io_cid = 'QmbHneUZZpNh24is1uTnCpMYKRWStgEqAaYL4aDjN6tNzQ'
io_cid_file_path = ''
else:
completed_io_cid, io_cid_file_path = plex_run(initial_io_cid, plex_path=plex_path)

vectors = plex_vectorize(completed_io_cid, hotspot_rfdiffusion_tool_cid, plex_path=plex_path)

print(vectors['first_designed_backbone']['cidPaths'])
4 changes: 2 additions & 2 deletions python/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ def run(self):
# Retrieve platform from environment variable
plat_name = os.environ['PLAT_NAME']

current_binary_version = "0.8.2"
current_binary_version = "0.8.3"
# map plat_name to go_bin_url
urls = {
"darwin_x86_64": f"https://github.com/labdao/plex/releases/download/v{current_binary_version}/plex_{current_binary_version}_darwin_amd64.tar.gz",
Expand Down Expand Up @@ -54,7 +54,7 @@ def download_and_extract(self, go_bin_url, temp_dir):

setup(
name="PlexLabExchange",
version="0.8.16",
version="0.8.17",
packages=find_packages(where='src'), # tell setuptools to look in the 'src' directory for packages
package_dir={'': 'src'}, # tell setuptools that the packages are under the 'src' directory
cmdclass={
Expand Down
69 changes: 42 additions & 27 deletions python/src/plex/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,9 @@


class ScatteringMethod(Enum):
DOT_PRODUCT = 'dotproduct'
CROSS_PRODUCT = 'cross_product'
# values are camel case to match GoLang CLI
DOT_PRODUCT = 'dotProduct'
CROSS_PRODUCT = 'crossProduct'


class CoreTools(Enum):
Expand All @@ -35,35 +36,39 @@ def __init__(self, message):
super().__init__(f"{self.message}\n{self.github_issue_message}")


def plex_init(toolpath: str, scatteringMethod="dotProduct", plex_path="plex", **kwargs):
def plex_init(tool_path: str, scattering_method=ScatteringMethod.DOT_PRODUCT.value, plex_path="plex", **kwargs):
cwd = os.getcwd()
plex_work_dir = os.environ.get("PLEX_WORK_DIR", os.path.dirname(os.path.dirname(cwd)))

# Convert kwargs dictionary to a JSON string
inputs = json.dumps(kwargs)

cmd = [plex_path, "init", "-t", toolpath, "-i", inputs, f"--scatteringMethod={scatteringMethod}"]
cmd = [plex_path, "init", "-t", tool_path, "-i", inputs, f"--scatteringMethod={scattering_method}"]

print(' '.join(cmd))

io_json_cid = ""
with subprocess.Popen(cmd, stdout=subprocess.PIPE, bufsize=1, universal_newlines=True, cwd=plex_work_dir) as p:
with subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, bufsize=1, universal_newlines=True, cwd=plex_work_dir) as p:
for line in p.stdout:
if "Pinned IO JSON CID:" in line:
parts = line.split()
io_json_cid = parts[-1]
print(line, end='')
for line in p.stderr:
print(line, end='')

if io_json_cid == "":
raise PlexError("Failed to initialize IO JSON CID")

return io_json_cid


def plex_vectorize(io_path: str, tool_cid: str, outputDir="", plex_path="plex"):
def plex_vectorize(io_path: str, tool_cid: str, output_dir="", plex_path="plex"):
cwd = os.getcwd()
plex_work_dir = os.environ.get("PLEX_WORK_DIR", os.path.dirname(os.path.dirname(cwd)))

cmd = [plex_path, "vectorize", "-i", io_path, "-t", tool_cid, "-o", outputDir]
with subprocess.Popen(cmd, stdout=subprocess.PIPE, bufsize=1, universal_newlines=True, cwd=plex_work_dir) as p:
cmd = [plex_path, "vectorize", "-i", io_path, "-t", tool_cid, "-o", output_dir]
with subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, bufsize=1, universal_newlines=True, cwd=plex_work_dir) as p:
outvects = ""
for line in p.stdout:
if "Output Vectors were saved at:" in line:
Expand All @@ -73,40 +78,44 @@ def plex_vectorize(io_path: str, tool_cid: str, outputDir="", plex_path="plex"):
outvects = json.load(f)
os.remove(io_vector_outpath)
print(line, end='')
for line in p.stderr:
print(line, end='')

if outvects == "":
raise PlexError("Failed to vectorize IO JSON CID")

return outvects


def plex_upload(filePath: str, wrapFile=True, plex_path="plex"):
cmd = [plex_path, "upload", "-p", filePath]
def plex_upload(file_path: str, wrap_file=True, plex_path="plex"):
cmd = [plex_path, "upload", "-p", file_path]

if not wrapFile:
if not wrap_file:
cmd.append("-w=false")

with subprocess.Popen(cmd, stdout=subprocess.PIPE, bufsize=1, universal_newlines=True) as p:
with subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, bufsize=1, universal_newlines=True) as p:
file_cid = ""
for line in p.stdout:
if "Uploaded CID:" in line:
parts = line.split()
file_cid = parts[-1]
print(line, end='')
for line in p.stderr:
print(line, end='')

if file_cid == "":
raise PlexError("Failed to upload file to IPFS")

return file_cid


def plex_create(toolpath: str, inputDir: str, layers=2, outputDir="", verbose=False, showAnimation=False, concurrency="1", annotations=[], plex_path="plex"):
def plex_create(tool_path: str, input_dir: str, layers=2, output_dir="", verbose=False, show_animation=False, concurrency="1", annotations=[], plex_path="plex"):
cwd = os.getcwd()
plex_work_dir = os.environ.get("PLEX_WORK_DIR", os.path.dirname(os.path.dirname(cwd)))
cmd = [plex_path, "create", "-t", toolpath, "-i", inputDir, f"--layers={layers}"]
cmd = [plex_path, "create", "-t", tool_path, "-i", input_dir, f"--layers={layers}"]

if outputDir:
cmd.append(f"-o={outputDir}")
if output_dir:
cmd.append(f"-o={output_dir}")

if verbose:
cmd.append("-v=true")
Expand All @@ -117,31 +126,33 @@ def plex_create(toolpath: str, inputDir: str, layers=2, outputDir="", verbose=Fa
if annotations:
cmd.append(f"--annotations={annotations.join(',')}")

if not showAnimation: # default is true in the CLI
if not show_animation: # default is true in the CLI
cmd.append("--showAnimation=false")

with subprocess.Popen(cmd, stdout=subprocess.PIPE, bufsize=1, universal_newlines=True, cwd=plex_work_dir) as p:
with subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, bufsize=1, universal_newlines=True, cwd=plex_work_dir) as p:
io_json_cid = ""
for line in p.stdout:
if "Initial IO JSON file CID:" in line:
parts = line.split()
io_json_cid = parts[-1]
print(line, end='')
for line in p.stderr:
print(line, end='')

if io_json_cid == "":
raise PlexError("Failed to create IO JSON CID")

return io_json_cid


def plex_run(io_json_cid: str, outputDir="", verbose=False, showAnimation=False, concurrency="1", annotations=[], plex_path="plex"):
def plex_run(io_json_cid: str, output_dir="", verbose=False, show_animation=False, concurrency="1", annotations=[], plex_path="plex"):
cwd = os.getcwd()
# plex_work_dir = os.environ.get("PLEX_WORK_DIR", os.path.dirname(os.path.dirname(cwd)))
plex_work_dir = os.environ.get("PLEX_WORK_DIR", os.path.dirname(cwd))
cmd = [plex_path, "run", "-i", io_json_cid]

if outputDir:
cmd.append(f"-o={outputDir}")
if output_dir:
cmd.append(f"-o={output_dir}")

if verbose:
cmd.append("-v=true")
Expand All @@ -152,10 +163,10 @@ def plex_run(io_json_cid: str, outputDir="", verbose=False, showAnimation=False,
if annotations:
cmd.append(f"--annotations={annotations.join(',')}")

if not showAnimation: # default is true in the CLI
if not show_animation: # default is true in the CLI
cmd.append("--showAnimation=false")

with subprocess.Popen(cmd, stdout=subprocess.PIPE, bufsize=1, universal_newlines=True, cwd=plex_work_dir) as p:
with subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, bufsize=1, universal_newlines=True, cwd=plex_work_dir) as p:
io_json_cid = ""
io_json_local_filepath = ""
for line in p.stdout:
Expand All @@ -166,24 +177,28 @@ def plex_run(io_json_cid: str, outputDir="", verbose=False, showAnimation=False,
parts = line.split()
io_json_local_filepath = parts[-1]
print(line, end='')
for line in p.stderr:
print(line, end='')

if io_json_cid == "" or io_json_local_filepath == "":
raise PlexError("Failed to run IO JSON CID")

return io_json_cid, io_json_local_filepath


def plex_mint(io_json_cid: str, imageCid="", plex_path="plex"):
def plex_mint(io_json_cid: str, image_cid="", plex_path="plex"):
cwd = os.getcwd()
plex_work_dir = os.environ.get("PLEX_WORK_DIR", os.path.dirname(os.path.dirname(cwd)))
cmd = [plex_path, "mint", "-i", io_json_cid]

if imageCid:
cmd.append(f"-imageCid={imageCid}")
if image_cid:
cmd.append(f"-imageCid={image_cid}")

with subprocess.Popen(cmd, stdout=subprocess.PIPE, bufsize=1, universal_newlines=True, cwd=plex_work_dir) as p:
with subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, bufsize=1, universal_newlines=True, cwd=plex_work_dir) as p:
for line in p.stdout:
print(line, end='')
for line in p.stderr:
print(line, end='')


def print_io_graph_status(io_graph):
Expand Down