Skip to content

Commit

Permalink
playbook changes
Browse files Browse the repository at this point in the history
  • Loading branch information
amesar committed Apr 25, 2019
1 parent f0829cc commit 3d4791f
Show file tree
Hide file tree
Showing 4 changed files with 308 additions and 45 deletions.
26 changes: 13 additions & 13 deletions hello_world/playbook.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
},
{
"cell_type": "code",
"execution_count": 3,
"execution_count": 1,
"metadata": {},
"outputs": [],
"source": [
Expand All @@ -19,7 +19,7 @@
},
{
"cell_type": "code",
"execution_count": 4,
"execution_count": 2,
"metadata": {},
"outputs": [
{
Expand All @@ -30,8 +30,8 @@
"Tracking URI: http://localhost:5000\n",
"experiment_name: hello_world\n",
"experiment_id: 1\n",
"runId: daac922bf6474045bda1f3e36736a517\n",
"artifact_uri: /Users/ander/work/mlflow/local_mlrun/mlruns/1/daac922bf6474045bda1f3e36736a517/artifacts\n",
"runId: f12c8d9e6d56450280943ec814cdb32e\n",
"artifact_uri: /Users/ander/work/mlflow/local_mlrun/mlruns/1/f12c8d9e6d56450280943ec814cdb32e/artifacts\n",
"alpha: 0.1\n",
"log_artifact: False\n",
"run_origin: \n"
Expand All @@ -44,7 +44,7 @@
},
{
"cell_type": "code",
"execution_count": 5,
"execution_count": 3,
"metadata": {},
"outputs": [
{
Expand All @@ -55,8 +55,8 @@
"Tracking URI: http://localhost:5000\n",
"experiment_name: hello_world\n",
"experiment_id: 1\n",
"runId: fdb6552fbe7b4c25b7d1cd7a813581db\n",
"artifact_uri: /Users/ander/work/mlflow/local_mlrun/mlruns/1/fdb6552fbe7b4c25b7d1cd7a813581db/artifacts\n",
"runId: 34b98f61e9f94ada81d6b8be892c8f65\n",
"artifact_uri: /Users/ander/work/mlflow/local_mlrun/mlruns/1/34b98f61e9f94ada81d6b8be892c8f65/artifacts\n",
"alpha: 0.1\n",
"log_artifact: True\n",
"run_origin: \n"
Expand All @@ -69,25 +69,25 @@
},
{
"cell_type": "code",
"execution_count": 6,
"execution_count": 4,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"2019/04/24 21:18:05 INFO mlflow.projects: === Created directory /var/folders/_9/tbkxzw0116v2cp_zq4f1_1cm0000gp/T/tmpa03tjmqq for downloading remote URIs passed to arguments of type 'path' ===\n",
"2019/04/24 21:18:05 INFO mlflow.projects: === Running command 'source activate mlflow-aacce47b0cb7984f4aead56265692d3969388f30 && python hello_world.py --alpha .01 --run_origin LocalRun --log_artifact True' in run with ID '86dbabcbf73946a1a877b869c78c56b9' === \n",
"2019/04/25 13:42:50 INFO mlflow.projects: === Created directory /var/folders/_9/tbkxzw0116v2cp_zq4f1_1cm0000gp/T/tmpdihQqS for downloading remote URIs passed to arguments of type 'path' ===\n",
"2019/04/25 13:42:50 INFO mlflow.projects: === Running command 'source activate mlflow-aacce47b0cb7984f4aead56265692d3969388f30 && python hello_world.py --alpha .01 --run_origin LocalRun --log_artifact True' in run with ID '91bdea4b8e7f47379688d98bfeb424a0' === \n",
"MLflow Version: 0.9.1\n",
"Tracking URI: http://localhost:5000\n",
"experiment_name: hello_world\n",
"experiment_id: 1\n",
"runId: 86dbabcbf73946a1a877b869c78c56b9\n",
"artifact_uri: /Users/ander/work/mlflow/local_mlrun/mlruns/0/86dbabcbf73946a1a877b869c78c56b9/artifacts\n",
"runId: 91bdea4b8e7f47379688d98bfeb424a0\n",
"artifact_uri: /Users/ander/work/mlflow/local_mlrun/mlruns/0/91bdea4b8e7f47379688d98bfeb424a0/artifacts\n",
"alpha: 0.01\n",
"log_artifact: True\n",
"run_origin: LocalRun\n",
"2019/04/24 21:18:10 INFO mlflow.projects: === Run (ID '86dbabcbf73946a1a877b869c78c56b9') succeeded ===\n"
"2019/04/25 13:42:51 INFO mlflow.projects: === Run (ID '91bdea4b8e7f47379688d98bfeb424a0') succeeded ===\n"
]
}
],
Expand Down
66 changes: 61 additions & 5 deletions pyspark/playbook.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,13 @@
"! mlflow --version"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Train"
]
},
{
"cell_type": "code",
"execution_count": 24,
Expand Down Expand Up @@ -172,25 +179,74 @@
" -P max_depth=3 -P max_bins=24 \\\n",
" --experiment-id=2"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Predict"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"19/04/25 14:01:33 WARN Utils: Your hostname, C02VD1RGHTDD resolves to a loopback address: 127.0.0.1; using 10.64.185.74 instead (on interface en0)\n",
"19/04/25 14:01:33 WARN Utils: Set SPARK_LOCAL_IP if you need to bind to another address\n",
"19/04/25 14:01:34 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable\n",
"MLflow Version: 0.9.1\n",
"Tracking URI: /Users/ander/git/andre/clean/mlflow-spark-summit-2019/pyspark/mlruns\n",
"run_id: 6ca69795529e491983d217181ab2dae9\n",
"data_path: ../data/sample_libsvm_data.txt\n",
"Traceback (most recent call last):\n",
" File \"/Users/ander/git/andre/clean/mlflow-spark-summit-2019/pyspark/predict.py\", line 20, in <module>\n",
" model = mlflow_spark.load_model(\"spark-model\", run_id=run_id)\n",
" File \"/Users/ander/venvs/mlflow-venv/lib/python2.7/site-packages/mlflow/spark.py\", line 348, in load_model\n",
" path = mlflow.tracking.utils._get_model_log_dir(model_name=path, run_id=run_id)\n",
" File \"/Users/ander/venvs/mlflow-venv/lib/python2.7/site-packages/mlflow/tracking/utils.py\", line 279, in _get_model_log_dir\n",
" run = store.get_run(run_id)\n",
" File \"/Users/ander/venvs/mlflow-venv/lib/python2.7/site-packages/mlflow/store/file_store.py\", line 368, in get_run\n",
" run_info = self._get_run_info(run_uuid)\n",
" File \"/Users/ander/venvs/mlflow-venv/lib/python2.7/site-packages/mlflow/store/file_store.py\", line 384, in _get_run_info\n",
" databricks_pb2.RESOURCE_DOES_NOT_EXIST)\n",
"mlflow.exceptions.MlflowException: Run '6ca69795529e491983d217181ab2dae9' not found\n"
]
}
],
"source": [
"! spark-submit --master local[2] predict.py 6ca69795529e491983d217181ab2dae9"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"display_name": "Python 2",
"language": "python",
"name": "python3"
"name": "python2"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
"version": 2
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.6.8"
"pygments_lexer": "ipython2",
"version": "2.7.14"
}
},
"nbformat": 4,
Expand Down
108 changes: 87 additions & 21 deletions scala_spark/playbook.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -9,29 +9,19 @@
},
{
"cell_type": "code",
"execution_count": 4,
"execution_count": 8,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"mlflow, version 0.9.1\r\n"
]
}
],
"outputs": [],
"source": [
"! mlflow --version"
"import os\n",
"os.environ[\"MLFLOW_TRACKING_URI\"] = \"http://localhost:5000\""
]
},
{
"cell_type": "code",
"execution_count": 5,
"cell_type": "markdown",
"metadata": {},
"outputs": [],
"source": [
"import os\n",
"os.environ[\"MLFLOW_TRACKING_URI\"] = \"http://localhost:5000\""
"### Train"
]
},
{
Expand Down Expand Up @@ -125,25 +115,101 @@
" --dataPath ../data/sample_libsvm_data.txt \\\n",
" --modelPath model_sample --maxDepth 5 --maxBins 5"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Predict"
]
},
{
"cell_type": "code",
"execution_count": 9,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"19/04/25 14:05:42 WARN Utils: Your hostname, C02VD1RGHTDD resolves to a loopback address: 127.0.0.1; using 10.64.185.74 instead (on interface en0)\n",
"19/04/25 14:05:42 WARN Utils: Set SPARK_LOCAL_IP if you need to bind to another address\n",
"19/04/25 14:05:42 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable\n",
"Options:\n",
" dataPath: ../data/sample_libsvm_data.txt\n",
" tracking URI: null\n",
" token: null\n",
" runId: 3418ce2004454821b63e2fcff64177f4\n",
"MLFLOW_TRACKING_URI: http://localhost:5000\n",
"==== Spark ML\n",
"+----------+-----+--------------------+\n",
"|prediction|label| features|\n",
"+----------+-----+--------------------+\n",
"| 0.0| 0.0|(692,[127,128,129...|\n",
"| 1.0| 1.0|(692,[158,159,160...|\n",
"| 1.0| 1.0|(692,[124,125,126...|\n",
"| 1.0| 1.0|(692,[152,153,154...|\n",
"| 1.0| 1.0|(692,[151,152,153...|\n",
"| 0.0| 0.0|(692,[129,130,131...|\n",
"| 1.0| 1.0|(692,[158,159,160...|\n",
"| 1.0| 1.0|(692,[99,100,101,...|\n",
"| 0.0| 0.0|(692,[154,155,156...|\n",
"| 0.0| 0.0|(692,[127,128,129...|\n",
"+----------+-----+--------------------+\n",
"only showing top 10 rows\n",
"\n",
"==== MLeap\n",
"+----------+-----+--------------------+\n",
"|prediction|label| features|\n",
"+----------+-----+--------------------+\n",
"| 0.0| 0.0|(692,[127,128,129...|\n",
"| 0.0| 1.0|(692,[158,159,160...|\n",
"| 0.0| 1.0|(692,[124,125,126...|\n",
"| 0.0| 1.0|(692,[152,153,154...|\n",
"| 0.0| 1.0|(692,[151,152,153...|\n",
"| 0.0| 0.0|(692,[129,130,131...|\n",
"| 0.0| 1.0|(692,[158,159,160...|\n",
"| 0.0| 1.0|(692,[99,100,101,...|\n",
"| 0.0| 0.0|(692,[154,155,156...|\n",
"| 0.0| 0.0|(692,[127,128,129...|\n",
"+----------+-----+--------------------+\n",
"only showing top 10 rows\n",
"\n"
]
}
],
"source": [
"! spark-submit --class org.andre.mlflow.examples.decisiontree.PredictDecisionTree \\\n",
" --master local[2] target/mlflow-spark-examples-1.0-SNAPSHOT.jar \\\n",
" --dataPath ../data/sample_libsvm_data.txt \\\n",
" --runId 3418ce2004454821b63e2fcff64177f4"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"display_name": "Python 2",
"language": "python",
"name": "python3"
"name": "python2"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
"version": 2
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.6.8"
"pygments_lexer": "ipython2",
"version": "2.7.14"
}
},
"nbformat": 4,
Expand Down
Loading

0 comments on commit 3d4791f

Please sign in to comment.