Skip to content

Commit

Permalink
Add host commandline argument for UI and sklearn server (#27)
Browse files Browse the repository at this point in the history
* Add host argument.

* Add host argument.

* Clarify the help text.
  • Loading branch information
mdagost authored and aarondav committed Jun 16, 2018
1 parent 5e8be77 commit 1bb2a15
Show file tree
Hide file tree
Showing 2 changed files with 12 additions and 4 deletions.
8 changes: 6 additions & 2 deletions mlflow/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,12 +124,16 @@ def run(uri, entry_point, version, param_list, experiment_id, mode, cluster_spec
@click.option("--file-store-path", default=None,
help="The root of the backing file store for experiment and run data. Defaults to %s."
% file_store._default_root_dir())
def ui(file_store_path):
@click.option("--host", default="127.0.0.1",
help="The networking interface on which the UI server listens. Defaults to "
"127.0.0.1. Use 0.0.0.0 to bind to all addresses, which is useful for running "
"inside of docker.")
def ui(file_store_path, host):
"""
Run the MLflow tracking UI. The UI is served at http://localhost:5000.
"""
server.handlers.store = FileStore(file_store_path)
server.app.run("0.0.0.0")
server.app.run(host)


cli.add_command(mlflow.sklearn.commands)
Expand Down
8 changes: 6 additions & 2 deletions mlflow/sklearn.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,11 @@ def commands():
@click.argument("model_path")
@click.option("--run_id", "-r", metavar="RUN_ID", help="Run ID to look for the model in.")
@click.option("--port", "-p", default=5000, help="Server port. [default: 5000]")
def serve_model(model_path, run_id=None, port=None):
@click.option("--host", default="127.0.0.1",
help="The networking interface on which the prediction server listens. Defaults to "
"127.0.0.1. Use 0.0.0.0 to bind to all addresses, which is useful for running "
"inside of docker.")
def serve_model(model_path, run_id=None, port=None, host="127.0.0.1"):
"""
Serve a SciKit-Learn model saved with MLflow.
Expand All @@ -96,4 +100,4 @@ def predict(): # noqa
result = json.dumps({"predictions": predictions.tolist()})
return flask.Response(status=200, response=result + "\n", mimetype='application/json')

app.run(port=port)
app.run(host, port=port)

0 comments on commit 1bb2a15

Please sign in to comment.