diff --git a/mlflow/cli.py b/mlflow/cli.py index 34ea842c9121c..e0961ea90c630 100644 --- a/mlflow/cli.py +++ b/mlflow/cli.py @@ -124,12 +124,16 @@ def run(uri, entry_point, version, param_list, experiment_id, mode, cluster_spec @click.option("--file-store-path", default=None, help="The root of the backing file store for experiment and run data. Defaults to %s." % file_store._default_root_dir()) -def ui(file_store_path): +@click.option("--host", default="127.0.0.1", + help="The networking interface on which the UI server listens. Defaults to " + "127.0.0.1. Use 0.0.0.0 to bind to all addresses, which is useful for running " + "inside of docker.") +def ui(file_store_path, host): """ Run the MLflow tracking UI. The UI is served at http://localhost:5000. """ server.handlers.store = FileStore(file_store_path) - server.app.run("0.0.0.0") + server.app.run(host) cli.add_command(mlflow.sklearn.commands) diff --git a/mlflow/sklearn.py b/mlflow/sklearn.py index a5cca8485bc70..f61a61b56cf3b 100644 --- a/mlflow/sklearn.py +++ b/mlflow/sklearn.py @@ -76,7 +76,11 @@ def commands(): @click.argument("model_path") @click.option("--run_id", "-r", metavar="RUN_ID", help="Run ID to look for the model in.") @click.option("--port", "-p", default=5000, help="Server port. [default: 5000]") -def serve_model(model_path, run_id=None, port=None): +@click.option("--host", default="127.0.0.1", + help="The networking interface on which the prediction server listens. Defaults to " + "127.0.0.1. Use 0.0.0.0 to bind to all addresses, which is useful for running " + "inside of docker.") +def serve_model(model_path, run_id=None, port=None, host="127.0.0.1"): """ Serve a SciKit-Learn model saved with MLflow. @@ -96,4 +100,4 @@ def predict(): # noqa result = json.dumps({"predictions": predictions.tolist()}) return flask.Response(status=200, response=result + "\n", mimetype='application/json') - app.run(port=port) + app.run(host, port=port)