Skip to content

Commit

Permalink
Fixing quick start (#477)
Browse files Browse the repository at this point in the history
  • Loading branch information
Hedingber committed Oct 15, 2020
1 parent e47027a commit 388a0dd
Show file tree
Hide file tree
Showing 5 changed files with 39 additions and 18 deletions.
1 change: 1 addition & 0 deletions dockerfiles/jupyter/Dockerfile
Expand Up @@ -29,6 +29,7 @@ ENV MLRUN_DBPATH=http://localhost:8080 \
JUPYTER_ENABLE_LAB=yes \
MLRUN_HTTPDB__DATA_VOLUME=$HOME/data \
MLRUN_HTTPDB__DSN='sqlite:////home/jovyan/data/mlrun.db?check_same_thread=false' \
MLRUN_HTTPDB__LOGS_PATH=$HOME/data/logs \
MLRUN_PVC_MOUNT=nfsvol:/home/jovyan/data

# run the mlrun db (api) and the notebook in parallel
Expand Down
9 changes: 4 additions & 5 deletions docs/quick-start.md
Expand Up @@ -21,15 +21,14 @@ MLRun requires separate containers for the API and the dashboard (UI).

To install and run MLRun locally using Docker:
``` sh
MLRUN_IP=localhost
SHARED_DIR=/home/me/data
# On Windows, use host.docker.internal for MLRUN_IP
SHARED_DIR=~/mlrun-data

docker pull mlrun/mlrun-ui:0.5.2
docker pull mlrun/jupyter:0.5.2

docker run -it -p 4000:80 --rm -d --name mlrun-ui -e MLRUN_API_PROXY_URL=http://${MLRUN_IP}:8080 mlrun/mlrun-ui:0.5.2
docker run -it -p 8080:8080 -p 8888:8888 --rm -d --name jupy -v $(SHARED_DIR}:/home/jovyan/data mlrun/jupyter:0.5.2
docker network create mlrun-network
docker run -it -p 8080:8080 -p 8888:8888 --rm -d --network mlrun-network --name jupyter -v ${SHARED_DIR}:/home/jovyan/data mlrun/jupyter:0.5.2
docker run -it -p 4000:80 --rm -d --network mlrun-network --name mlrun-ui -e MLRUN_API_PROXY_URL=http://jupyter:8080 mlrun/mlrun-ui:0.5.2
```

When using Docker MLRun can only use local runs.
Expand Down
27 changes: 15 additions & 12 deletions mlrun/api/api/endpoints/workflows.py
@@ -1,6 +1,7 @@
from fastapi import APIRouter

from mlrun.run import list_piplines
from mlrun.k8s_utils import get_k8s_helper
from mlrun.run import list_pipelines

router = APIRouter()

Expand All @@ -15,16 +16,18 @@ def list_workflows(
full: bool = False,
page_size: int = 10,
):
total_size, next_page_token, runs = list_piplines(
full=full,
page_token=page_token,
page_size=page_size,
sort_by=sort_by,
experiment_id=experiment_id,
namespace=namespace,
)
total_size, next_page_token, runs = None, None, None
if get_k8s_helper(silent=True).is_running_inside_kubernetes_cluster():
total_size, next_page_token, runs = list_pipelines(
full=full,
page_token=page_token,
page_size=page_size,
sort_by=sort_by,
experiment_id=experiment_id,
namespace=namespace,
)
return {
"runs": runs,
"total_size": total_size,
"next_page_token": next_page_token,
"runs": runs or [],
"total_size": total_size or 0,
"next_page_token": next_page_token or None,
}
2 changes: 1 addition & 1 deletion mlrun/run.py
Expand Up @@ -888,7 +888,7 @@ def get_pipeline(run_id, namespace=None):
return resp


def list_piplines(
def list_pipelines(
full=False,
page_token="",
page_size=10,
Expand Down
18 changes: 18 additions & 0 deletions tests/api/api/test_workflows.py
@@ -0,0 +1,18 @@
import deepdiff
from http import HTTPStatus

from fastapi.testclient import TestClient
from sqlalchemy.orm import Session


def test_list_workflows(db: Session, client: TestClient) -> None:
response = client.get("/api/workflows")
assert response.status_code == HTTPStatus.OK.value
expected_response = {
"runs": [],
"total_size": 0,
"next_page_token": None,
}
assert (
deepdiff.DeepDiff(expected_response, response.json(), ignore_order=True,) == {}
)

0 comments on commit 388a0dd

Please sign in to comment.