From 127c70a9f806c6f412e56c2d766b4b65d53d342e Mon Sep 17 00:00:00 2001 From: Ilya Matiach Date: Tue, 2 Nov 2021 18:55:42 -0400 Subject: [PATCH 01/40] docs: add explanation dashboard integration example notebook (#1236) --- ...rpretability - Explanation Dashboard.ipynb | 430 ++++++++++++++++++ 1 file changed, 430 insertions(+) create mode 100644 notebooks/Interpretability - Explanation Dashboard.ipynb diff --git a/notebooks/Interpretability - Explanation Dashboard.ipynb b/notebooks/Interpretability - Explanation Dashboard.ipynb new file mode 100644 index 0000000000..2f045172ad --- /dev/null +++ b/notebooks/Interpretability - Explanation Dashboard.ipynb @@ -0,0 +1,430 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "application/vnd.databricks.v1+cell": { + "inputWidgets": {}, + "nuid": "4a463c67-7543-42d2-a116-e70e8451b09b", + "showTitle": false, + "title": "" + } + }, + "source": [ + "## Interpretability - Explanation Dashboard\n", + "\n", + "In this example, similar to the \"Interpretability - Tabular SHAP explainer\" notebook, we use Kernel SHAP to explain a tabular classification model built from the Adults Census dataset and then visualize the explanation in the ExplanationDashboard from https://github.com/microsoft/responsible-ai-widgets.\n", + "\n", + "First we import the packages and define some UDFs we will need later." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "application/vnd.databricks.v1+cell": { + "inputWidgets": {}, + "nuid": "bf0fdfc2-97b2-48e4-b3d9-794b0cb3da67", + "showTitle": false, + "title": "" + }, + "collapsed": true + }, + "outputs": [], + "source": [ + "import pyspark\n", + "from synapse.ml.explainers import *\n", + "from pyspark.ml import Pipeline\n", + "from pyspark.ml.classification import LogisticRegression\n", + "from pyspark.ml.feature import StringIndexer, OneHotEncoder, VectorAssembler\n", + "from pyspark.sql.types import *\n", + "from pyspark.sql.functions import *\n", + "import pandas as pd\n", + "\n", + "vec_access = udf(lambda v, i: float(v[i]), FloatType())\n", + "vec2array = udf(lambda vec: vec.toArray().tolist(), ArrayType(FloatType()))" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "application/vnd.databricks.v1+cell": { + "inputWidgets": {}, + "nuid": "ae47e1f9-0672-47ed-94de-10970e1b14b5", + "showTitle": false, + "title": "" + } + }, + "source": [ + "Now let's read the data and train a simple binary classification model." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "application/vnd.databricks.v1+cell": { + "inputWidgets": {}, + "nuid": "58807448-d8e0-4818-adc8-27536d561fb3", + "showTitle": false, + "title": "" + }, + "collapsed": true + }, + "outputs": [], + "source": [ + "df = spark.read.parquet(\"wasbs://publicwasb@mmlspark.blob.core.windows.net/AdultCensusIncome.parquet\")\n", + "\n", + "labelIndexer = StringIndexer(inputCol=\"income\", outputCol=\"label\", stringOrderType=\"alphabetAsc\").fit(df)\n", + "print(\"Label index assigment: \" + str(set(zip(labelIndexer.labels, [0, 1]))))\n", + "\n", + "training = labelIndexer.transform(df)\n", + "display(training)\n", + "categorical_features = [\n", + " \"workclass\",\n", + " \"education\",\n", + " \"marital-status\",\n", + " \"occupation\",\n", + " \"relationship\",\n", + " \"race\",\n", + " \"sex\",\n", + " \"native-country\",\n", + "]\n", + "categorical_features_idx = [col + \"_idx\" for col in categorical_features]\n", + "categorical_features_enc = [col + \"_enc\" for col in categorical_features]\n", + "numeric_features = [\"age\", \"education-num\", \"capital-gain\", \"capital-loss\", \"hours-per-week\"]\n", + "\n", + "strIndexer = StringIndexer(inputCols=categorical_features, outputCols=categorical_features_idx)\n", + "onehotEnc = OneHotEncoder(inputCols=categorical_features_idx, outputCols=categorical_features_enc)\n", + "vectAssem = VectorAssembler(inputCols=categorical_features_enc + numeric_features, outputCol=\"features\")\n", + "lr = LogisticRegression(featuresCol=\"features\", labelCol=\"label\", weightCol=\"fnlwgt\")\n", + "pipeline = Pipeline(stages=[strIndexer, onehotEnc, vectAssem, lr])\n", + "model = pipeline.fit(training)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "application/vnd.databricks.v1+cell": { + "inputWidgets": {}, + "nuid": "f617f9a4-7e67-43f8-8fa9-92680b635b3d", + "showTitle": false, + "title": "" + } + }, + "source": [ + "After the model is trained, we randomly select some observations to be explained." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "application/vnd.databricks.v1+cell": { + "inputWidgets": {}, + "nuid": "f55757a6-6204-4f64-a91e-65bfbacf62bc", + "showTitle": false, + "title": "" + }, + "collapsed": true + }, + "outputs": [], + "source": [ + "explain_instances = model.transform(training).orderBy(rand()).limit(5).repartition(200).cache()\n", + "display(explain_instances)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "application/vnd.databricks.v1+cell": { + "inputWidgets": {}, + "nuid": "48a0c8ee-8e36-4bd3-9a04-eded6d2c8894", + "showTitle": false, + "title": "" + } + }, + "source": [ + "We create a TabularSHAP explainer, set the input columns to all the features the model takes, specify the model and the target output column we are trying to explain. In this case, we are trying to explain the \"probability\" output which is a vector of length 2, and we are only looking at class 1 probability. Specify targetClasses to `[0, 1]` if you want to explain class 0 and 1 probability at the same time. Finally we sample 100 rows from the training data for background data, which is used for integrating out features in Kernel SHAP." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "application/vnd.databricks.v1+cell": { + "inputWidgets": {}, + "nuid": "7e097552-e617-4e1c-a085-b66eca5bcb69", + "showTitle": false, + "title": "" + }, + "collapsed": true + }, + "outputs": [], + "source": [ + "shap = TabularSHAP(\n", + " inputCols=categorical_features + numeric_features,\n", + " outputCol=\"shapValues\",\n", + " numSamples=5000,\n", + " model=model,\n", + " targetCol=\"probability\",\n", + " targetClasses=[1],\n", + " backgroundData=training.orderBy(rand()).limit(100).cache(),\n", + ")\n", + "\n", + "shap_df = shap.transform(explain_instances)\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "application/vnd.databricks.v1+cell": { + "inputWidgets": {}, + "nuid": "6933b52b-7d46-4210-810a-f984b76dd4a2", + "showTitle": false, + "title": "" + } + }, + "source": [ + "Once we have the resulting dataframe, we extract the class 1 probability of the model output, the SHAP values for the target class, the original features and the true label. Then we convert it to a pandas dataframe for visisualization.\n", + "For each observation, the first element in the SHAP values vector is the base value (the mean output of the background dataset), and each of the following element is the SHAP values for each feature." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "application/vnd.databricks.v1+cell": { + "inputWidgets": {}, + "nuid": "05e01f98-e44c-46c9-a8ae-26ba892f85b3", + "showTitle": false, + "title": "" + }, + "collapsed": true + }, + "outputs": [], + "source": [ + "shaps = (\n", + " shap_df.withColumn(\"probability\", vec_access(col(\"probability\"), lit(1)))\n", + " .withColumn(\"shapValues\", vec2array(col(\"shapValues\").getItem(0)))\n", + " .select([\"shapValues\", \"probability\", \"label\"] + categorical_features + numeric_features)\n", + ")\n", + "\n", + "shaps_local = shaps.toPandas()\n", + "shaps_local.sort_values(\"probability\", ascending=False, inplace=True, ignore_index=True)\n", + "pd.set_option(\"display.max_colwidth\", None)\n", + "shaps_local" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "application/vnd.databricks.v1+cell": { + "inputWidgets": {}, + "nuid": "f9317a27-900a-4d1d-9e9f-9fe906eae75c", + "showTitle": false, + "title": "" + } + }, + "source": [ + "We can visualize the explanation in the [interpret-community format](https://github.com/interpretml/interpret-community) in the ExplanationDashboard from https://github.com/microsoft/responsible-ai-widgets/" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "application/vnd.databricks.v1+cell": { + "inputWidgets": {}, + "nuid": "c9b4c03e-eac8-4314-a6c2-0a451525e6a4", + "showTitle": false, + "title": "" + }, + "collapsed": true + }, + "outputs": [], + "source": [ + "import pandas as pd\n", + "import numpy as np\n", + "\n", + "features = categorical_features + numeric_features\n", + "features_with_base = [\"Base\"] + features\n", + "\n", + "rows = shaps_local.shape[0]\n", + "\n", + "local_importance_values = shaps_local[['shapValues']]\n", + "eval_data = shaps_local[features]\n", + "true_y = np.array(shaps_local[['label']])" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "list_local_importance_values = local_importance_values.values.tolist()\n", + "converted_importance_values = []\n", + "bias = []\n", + "for classarray in list_local_importance_values:\n", + " for rowarray in classarray:\n", + " converted_list = rowarray.tolist()\n", + " bias.append(converted_list[0])\n", + " # remove the bias from local importance values\n", + " del converted_list[0]\n", + " converted_importance_values.append(converted_list)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "When running Synapse Analytics, please follow instructions here [Package management - Azure Synapse Analytics | Microsoft Docs](https://docs.microsoft.com/en-us/azure/synapse-analytics/spark/apache-spark-azure-portal-add-libraries) to install [\"raiwidgets\"](https://pypi.org/project/raiwidgets/) and [\"interpret-community\"](https://pypi.org/project/interpret-community/) packages." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "!pip install --upgrade raiwidgets" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "!pip install --upgrade interpret-community" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "from interpret_community.adapter import ExplanationAdapter\n", + "adapter = ExplanationAdapter(features, classification=True)\n", + "global_explanation = adapter.create_global(converted_importance_values, eval_data, expected_values=bias)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# view the global importance values\n", + "global_explanation.global_importance_values" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# view the local importance values\n", + "global_explanation.local_importance_values" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "class wrapper(object):\n", + " def __init__(self, model):\n", + " self.model = model\n", + " \n", + " def predict(self, data):\n", + " sparkdata = spark.createDataFrame(data)\n", + " return model.transform(sparkdata).select('prediction').toPandas().values.flatten().tolist()\n", + " \n", + " def predict_proba(self, data):\n", + " sparkdata = spark.createDataFrame(data)\n", + " prediction = model.transform(sparkdata).select('probability').toPandas().values.flatten().tolist()\n", + " proba_list = [vector.values.tolist() for vector in prediction]\n", + " return proba_list" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# view the explanation in the ExplanationDashboard\n", + "from raiwidgets import ExplanationDashboard\n", + "ExplanationDashboard(global_explanation, wrapper(model), dataset=eval_data, true_y=true_y)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "application/vnd.databricks.v1+cell": { + "inputWidgets": {}, + "nuid": "8f22fceb-0fc0-4a86-a0ca-2a7b47b4795a", + "showTitle": false, + "title": "" + } + }, + "source": [ + "Your results will look like:\n", + "\n", + "" + ] + } + ], + "metadata": { + "application/vnd.databricks.v1+notebook": { + "dashboards": [], + "language": "python", + "notebookMetadata": { + "pythonIndentUnit": 2 + }, + "notebookName": "Interpretability - Tabular SHAP explainer", + "notebookOrigID": 4343954975413564, + "widgets": {} + }, + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.9" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} From 81f5f80bc68918840c51023a0ba8a3cbae55a814 Mon Sep 17 00:00:00 2001 From: Serena Ruan <82044803+serena-ruan@users.noreply.github.com> Date: Wed, 3 Nov 2021 11:10:20 +0800 Subject: [PATCH 02/40] chore: release synapseml 0.9.2 (#1237) --- README.md | 26 +++---- notebooks/CognitiveServices - Overview.ipynb | 72 +++++++++---------- ...CyberML - Anomalous Access Detection.ipynb | 2 +- notebooks/Regression - Auto Imports.ipynb | 10 +-- ...on - Flight Delays with DataCleaning.ipynb | 6 +- website/docs/about.md | 4 +- .../documentation/estimators/_LightGBM.md | 6 +- website/docs/documentation/estimators/_VW.md | 4 +- .../documentation/estimators/core/_AutoML.md | 4 +- .../estimators/core/_Featurize.md | 10 +-- .../estimators/core/_IsolationForest.md | 2 +- .../docs/documentation/estimators/core/_NN.md | 4 +- .../estimators/core/_Recommendation.md | 4 +- .../documentation/estimators/core/_Stages.md | 6 +- .../documentation/estimators/core/_Train.md | 4 +- .../documentation/transformers/_OpenCV.md | 4 +- .../docs/documentation/transformers/_VW.md | 8 +-- .../cognitive/_AnomalyDetection.md | 6 +- .../transformers/cognitive/_AzureSearch.md | 2 +- .../cognitive/_BingImageSearch.md | 2 +- .../transformers/cognitive/_ComputerVision.md | 16 ++--- .../transformers/cognitive/_Face.md | 10 +-- .../transformers/cognitive/_FormRecognizer.md | 16 ++--- .../transformers/cognitive/_SpeechToText.md | 4 +- .../transformers/cognitive/_TextAnalytics.md | 12 ++-- .../transformers/cognitive/_Translator.md | 14 ++-- .../transformers/core/_Explainers.md | 16 ++--- .../transformers/core/_Featurize.md | 8 +-- .../documentation/transformers/core/_IO.md | 14 ++-- .../documentation/transformers/core/_Image.md | 6 +- .../transformers/core/_Stages.md | 34 ++++----- .../core/_SuperpixelTransformer.md | 2 +- .../documentation/transformers/core/_Train.md | 4 +- .../transformers/deep_learning/_ONNXModel.md | 2 +- .../CyberML - Anomalous Access Detection.md | 2 +- .../regression/Regression - Auto Imports.md | 10 +-- ...ssion - Flight Delays with DataCleaning.md | 6 +- .../features/CognitiveServices - Overview.md | 72 +++++++++---------- website/docs/getting_started/installation.md | 18 ++--- website/docs/reference/R-setup.md | 6 +- website/docs/reference/cyber.md | 24 +++---- website/docs/reference/docker.md | 8 +-- website/src/pages/index.js | 14 ++-- 43 files changed, 252 insertions(+), 252 deletions(-) diff --git a/README.md b/README.md index ae9f798b95..b02ed23ea7 100644 --- a/README.md +++ b/README.md @@ -4,9 +4,9 @@ [![Build Status](https://msdata.visualstudio.com/A365/_apis/build/status/microsoft.SynapseML?branchName=master)](https://msdata.visualstudio.com/A365/_build/latest?definitionId=17563&branchName=master) [![codecov](https://codecov.io/gh/Microsoft/SynapseML/branch/master/graph/badge.svg)](https://codecov.io/gh/Microsoft/SynapseML) [![Gitter](https://badges.gitter.im/Microsoft/MMLSpark.svg)](https://gitter.im/Microsoft/MMLSpark?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge) -[![Release Notes](https://img.shields.io/badge/release-notes-blue)](https://github.com/Microsoft/SynapseML/releases) [![Scala Docs](https://img.shields.io/static/v1?label=api%20docs&message=scala&color=blue&logo=scala)](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/index.html#package) [![PySpark Docs](https://img.shields.io/static/v1?label=api%20docs&message=python&color=blue&logo=python)](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/index.html) [![Academic Paper](https://img.shields.io/badge/academic-paper-7fdcf7)](https://arxiv.org/abs/1810.08744) +[![Release Notes](https://img.shields.io/badge/release-notes-blue)](https://github.com/Microsoft/SynapseML/releases) [![Scala Docs](https://img.shields.io/static/v1?label=api%20docs&message=scala&color=blue&logo=scala)](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/index.html#package) [![PySpark Docs](https://img.shields.io/static/v1?label=api%20docs&message=python&color=blue&logo=python)](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/index.html) [![Academic Paper](https://img.shields.io/badge/academic-paper-7fdcf7)](https://arxiv.org/abs/1810.08744) -[![Version](https://img.shields.io/badge/version-0.9.1-blue)](https://github.com/Microsoft/SynapseML/releases) [![Snapshot Version](https://mmlspark.blob.core.windows.net/icons/badges/master_version3.svg)](#sbt) +[![Version](https://img.shields.io/badge/version-0.9.2-blue)](https://github.com/Microsoft/SynapseML/releases) [![Snapshot Version](https://mmlspark.blob.core.windows.net/icons/badges/master_version3.svg)](#sbt) SynapseML is an ecosystem of tools aimed towards expanding the distributed computing framework @@ -24,8 +24,8 @@ sub-millisecond latency web services, backed by your Spark cluster. SynapseML requires Scala 2.12, Spark 3.0+, and Python 3.6+. See the API documentation [for -Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/index.html#package) and [for -PySpark](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/index.html). +Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/index.html#package) and [for +PySpark](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/index.html).
Table of Contents @@ -149,7 +149,7 @@ the above example, or from python: ```python import pyspark spark = pyspark.sql.SparkSession.builder.appName("MyApp") \ - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") \ + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") \ .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") \ .getOrCreate() import synapse.ml @@ -162,7 +162,7 @@ your `build.sbt`: ```scala resolvers += "SynapseML" at "https://mmlspark.azureedge.net/maven" -libraryDependencies += "com.microsoft.azure" % "synapseml" % "0.9.1" +libraryDependencies += "com.microsoft.azure" % "synapseml" % "0.9.2" ``` @@ -172,9 +172,9 @@ SynapseML can be conveniently installed on existing Spark clusters via the `--packages` option, examples: ```bash -spark-shell --packages com.microsoft.azure:synapseml:0.9.1 -pyspark --packages com.microsoft.azure:synapseml:0.9.1 -spark-submit --packages com.microsoft.azure:synapseml:0.9.1 MyApp.jar +spark-shell --packages com.microsoft.azure:synapseml:0.9.2 +pyspark --packages com.microsoft.azure:synapseml:0.9.2 +spark-submit --packages com.microsoft.azure:synapseml:0.9.2 MyApp.jar ``` This can be used in other Spark contexts too. For example, you can use SynapseML @@ -189,7 +189,7 @@ cloud](http://community.cloud.databricks.com), create a new [library from Maven coordinates](https://docs.databricks.com/user-guide/libraries.html#libraries-from-maven-pypi-or-spark-packages) in your workspace. -For the coordinates use: `com.microsoft.azure:synapseml:0.9.1` +For the coordinates use: `com.microsoft.azure:synapseml:0.9.2` with the resolver: `https://mmlspark.azureedge.net/maven`. Ensure this library is attached to your target cluster(s). @@ -197,7 +197,7 @@ Finally, ensure that your Spark cluster has at least Spark 3.12 and Scala 2.12. You can use SynapseML in both your Scala and PySpark notebooks. To get started with our example notebooks import the following databricks archive: -`https://mmlspark.blob.core.windows.net/dbcs/SynapseMLExamplesv0.9.1.dbc` +`https://mmlspark.blob.core.windows.net/dbcs/SynapseMLExamplesv0.9.2.dbc` ### Apache Livy and HDInsight @@ -210,7 +210,7 @@ Excluding certain packages from the library may be necessary due to current issu { "name": "synapseml", "conf": { - "spark.jars.packages": "com.microsoft.azure:synapseml:0.9.1", + "spark.jars.packages": "com.microsoft.azure:synapseml:0.9.2", "spark.jars.repositories": "https://mmlspark.azureedge.net/maven", "spark.jars.excludes": "org.scala-lang:scala-reflect,org.apache.spark:spark-tags_2.12,org.scalactic:scalactic_2.12,org.scalatest:scalatest_2.12" } @@ -224,7 +224,7 @@ In Azure Synapse, "spark.yarn.user.classpath.first" should be set to "true" to o { "name": "synapseml", "conf": { - "spark.jars.packages": "com.microsoft.azure:synapseml:0.9.1", + "spark.jars.packages": "com.microsoft.azure:synapseml:0.9.2", "spark.jars.repositories": "https://mmlspark.azureedge.net/maven", "spark.jars.excludes": "org.scala-lang:scala-reflect,org.apache.spark:spark-tags_2.12,org.scalactic:scalactic_2.12,org.scalatest:scalatest_2.12", "spark.yarn.user.classpath.first": "true" diff --git a/notebooks/CognitiveServices - Overview.ipynb b/notebooks/CognitiveServices - Overview.ipynb index 7e4e76bb27..dcd0975e0e 100644 --- a/notebooks/CognitiveServices - Overview.ipynb +++ b/notebooks/CognitiveServices - Overview.ipynb @@ -30,60 +30,60 @@ "\n", "### Vision\n", "[**Computer Vision**](https://azure.microsoft.com/en-us/services/cognitive-services/computer-vision/)\n", - "- Describe: provides description of an image in human readable language ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/DescribeImage.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.DescribeImage))\n", - "- Analyze (color, image type, face, adult/racy content): analyzes visual features of an image ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/AnalyzeImage.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AnalyzeImage))\n", - "- OCR: reads text from an image ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/OCR.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.OCR))\n", - "- Recognize Text: reads text from an image ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/RecognizeText.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.RecognizeText))\n", - "- Thumbnail: generates a thumbnail of user-specified size from the image ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/GenerateThumbnails.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.GenerateThumbnails))\n", - "- Recognize domain-specific content: recognizes domain-specific content (celebrity, landmark) ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/RecognizeDomainSpecificContent.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.RecognizeDomainSpecificContent))\n", - "- Tag: identifies list of words that are relevant to the in0put image ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/TagImage.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.TagImage))\n", + "- Describe: provides description of an image in human readable language ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/DescribeImage.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.DescribeImage))\n", + "- Analyze (color, image type, face, adult/racy content): analyzes visual features of an image ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/AnalyzeImage.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AnalyzeImage))\n", + "- OCR: reads text from an image ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/OCR.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.OCR))\n", + "- Recognize Text: reads text from an image ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/RecognizeText.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.RecognizeText))\n", + "- Thumbnail: generates a thumbnail of user-specified size from the image ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/GenerateThumbnails.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.GenerateThumbnails))\n", + "- Recognize domain-specific content: recognizes domain-specific content (celebrity, landmark) ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/RecognizeDomainSpecificContent.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.RecognizeDomainSpecificContent))\n", + "- Tag: identifies list of words that are relevant to the in0put image ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/TagImage.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.TagImage))\n", "\n", "[**Face**](https://azure.microsoft.com/en-us/services/cognitive-services/face/)\n", - "- Detect: detects human faces in an image ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/DetectFace.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.DetectFace))\n", - "- Verify: verifies whether two faces belong to a same person, or a face belongs to a person ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/VerifyFaces.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.VerifyFaces))\n", - "- Identify: finds the closest matches of the specific query person face from a person group ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/IdentifyFaces.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.IdentifyFaces))\n", - "- Find similar: finds similar faces to the query face in a face list ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/FindSimilarFace.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.FindSimilarFace))\n", - "- Group: divides a group of faces into disjoint groups based on similarity ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/GroupFaces.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.GroupFaces))\n", + "- Detect: detects human faces in an image ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/DetectFace.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.DetectFace))\n", + "- Verify: verifies whether two faces belong to a same person, or a face belongs to a person ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/VerifyFaces.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.VerifyFaces))\n", + "- Identify: finds the closest matches of the specific query person face from a person group ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/IdentifyFaces.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.IdentifyFaces))\n", + "- Find similar: finds similar faces to the query face in a face list ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/FindSimilarFace.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.FindSimilarFace))\n", + "- Group: divides a group of faces into disjoint groups based on similarity ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/GroupFaces.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.GroupFaces))\n", "\n", "### Speech\n", "[**Speech Services**](https://azure.microsoft.com/en-us/services/cognitive-services/speech-services/)\n", - "- Speech-to-text: transcribes audio streams ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/SpeechToText.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.SpeechToText))\n", + "- Speech-to-text: transcribes audio streams ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/SpeechToText.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.SpeechToText))\n", "\n", "### Language\n", "[**Text Analytics**](https://azure.microsoft.com/en-us/services/cognitive-services/text-analytics/)\n", - "- Language detection: detects language of the input text ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/LanguageDetector.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.LanguageDetector))\n", - "- Key phrase extraction: identifies the key talking points in the input text ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/KeyPhraseExtractor.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.KeyPhraseExtractor))\n", - "- Named entity recognition: identifies known entities and general named entities in the input text ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/NER.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.NER))\n", - "- Sentiment analysis: returns a score betwee 0 and 1 indicating the sentiment in the input text ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/TextSentiment.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.TextSentiment))\n", + "- Language detection: detects language of the input text ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/LanguageDetector.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.LanguageDetector))\n", + "- Key phrase extraction: identifies the key talking points in the input text ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/KeyPhraseExtractor.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.KeyPhraseExtractor))\n", + "- Named entity recognition: identifies known entities and general named entities in the input text ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/NER.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.NER))\n", + "- Sentiment analysis: returns a score betwee 0 and 1 indicating the sentiment in the input text ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/TextSentiment.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.TextSentiment))\n", "\n", "[**Translator**](https://azure.microsoft.com/en-us/services/cognitive-services/translator/)\n", - "- Translate: Translates text. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/Translate.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.Translate))\n", - "- Transliterate: Converts text in one language from one script to another script. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/Transliterate.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.Transliterate))\n", - "- Detect: Identifies the language of a piece of text. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/Detect.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.Detect))\n", - "- BreakSentence: Identifies the positioning of sentence boundaries in a piece of text. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/BreakSentence.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.BreakSentence))\n", - "- Dictionary Lookup: Provides alternative translations for a word and a small number of idiomatic phrases. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/DictionaryLookup.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.DictionaryLookup))\n", - "- Dictionary Examples: Provides examples that show how terms in the dictionary are used in context. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/DictionaryExamples.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.DictionaryExamples))\n", - "- Document Translation: Translates documents across all supported languages and dialects while preserving document structure and data format. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/DocumentTranslator.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.DocumentTranslator))\n", + "- Translate: Translates text. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/Translate.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.Translate))\n", + "- Transliterate: Converts text in one language from one script to another script. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/Transliterate.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.Transliterate))\n", + "- Detect: Identifies the language of a piece of text. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/Detect.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.Detect))\n", + "- BreakSentence: Identifies the positioning of sentence boundaries in a piece of text. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/BreakSentence.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.BreakSentence))\n", + "- Dictionary Lookup: Provides alternative translations for a word and a small number of idiomatic phrases. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/DictionaryLookup.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.DictionaryLookup))\n", + "- Dictionary Examples: Provides examples that show how terms in the dictionary are used in context. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/DictionaryExamples.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.DictionaryExamples))\n", + "- Document Translation: Translates documents across all supported languages and dialects while preserving document structure and data format. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/DocumentTranslator.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.DocumentTranslator))\n", "\n", "### Azure Form Recognizer\n", "[**Form Recognizer**](https://azure.microsoft.com/en-us/services/form-recognizer/)\n", - "- Analyze Layout: Extract text and layout information from a given document. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/AnalyzeLayout.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AnalyzeLayout))\n", - "- Analyze Receipts: Detects and extracts data from receipts using optical character recognition (OCR) and our receipt model, enabling you to easily extract structured data from receipts such as merchant name, merchant phone number, transaction date, transaction total, and more. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/AnalyzeReceipts.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AnalyzeReceipts))\n", - "- Analyze Business Cards: Detects and extracts data from business cards using optical character recognition (OCR) and our business card model, enabling you to easily extract structured data from business cards such as contact names, company names, phone numbers, emails, and more. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/AnalyzeBusinessCards.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AnalyzeBusinessCards))\n", - "- Analyze Invoices: Detects and extracts data from invoices using optical character recognition (OCR) and our invoice understanding deep learning models, enabling you to easily extract structured data from invoices such as customer, vendor, invoice ID, invoice due date, total, invoice amount due, tax amount, ship to, bill to, line items and more. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/AnalyzeInvoices.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AnalyzeInvoices))\n", - "- Analyze ID Documents: Detects and extracts data from identification documents using optical character recognition (OCR) and our ID document model, enabling you to easily extract structured data from ID documents such as first name, last name, date of birth, document number, and more. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/AnalyzeIDDocuments.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AnalyzeIDDocuments))\n", - "- Analyze Custom Form: Extracts information from forms (PDFs and images) into structured data based on a model created from a set of representative training forms. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/AnalyzeCustomModel.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AnalyzeCustomModel))\n", - "- Get Custom Model: Get detailed information about a custom model. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/GetCustomModel.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/ListCustomModels.html))\n", - "- List Custom Models: Get information about all custom models. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/ListCustomModels.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.ListCustomModels))\n", + "- Analyze Layout: Extract text and layout information from a given document. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/AnalyzeLayout.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AnalyzeLayout))\n", + "- Analyze Receipts: Detects and extracts data from receipts using optical character recognition (OCR) and our receipt model, enabling you to easily extract structured data from receipts such as merchant name, merchant phone number, transaction date, transaction total, and more. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/AnalyzeReceipts.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AnalyzeReceipts))\n", + "- Analyze Business Cards: Detects and extracts data from business cards using optical character recognition (OCR) and our business card model, enabling you to easily extract structured data from business cards such as contact names, company names, phone numbers, emails, and more. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/AnalyzeBusinessCards.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AnalyzeBusinessCards))\n", + "- Analyze Invoices: Detects and extracts data from invoices using optical character recognition (OCR) and our invoice understanding deep learning models, enabling you to easily extract structured data from invoices such as customer, vendor, invoice ID, invoice due date, total, invoice amount due, tax amount, ship to, bill to, line items and more. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/AnalyzeInvoices.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AnalyzeInvoices))\n", + "- Analyze ID Documents: Detects and extracts data from identification documents using optical character recognition (OCR) and our ID document model, enabling you to easily extract structured data from ID documents such as first name, last name, date of birth, document number, and more. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/AnalyzeIDDocuments.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AnalyzeIDDocuments))\n", + "- Analyze Custom Form: Extracts information from forms (PDFs and images) into structured data based on a model created from a set of representative training forms. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/AnalyzeCustomModel.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AnalyzeCustomModel))\n", + "- Get Custom Model: Get detailed information about a custom model. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/GetCustomModel.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/ListCustomModels.html))\n", + "- List Custom Models: Get information about all custom models. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/ListCustomModels.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.ListCustomModels))\n", "\n", "### Decision\n", "[**Anomaly Detector**](https://azure.microsoft.com/en-us/services/cognitive-services/anomaly-detector/)\n", - "- Anomaly status of latest point: generates a model using preceding points and determines whether the latest point is anomalous ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/DetectLastAnomaly.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.DetectLastAnomaly))\n", - "- Find anomalies: generates a model using an entire series and finds anomalies in the series ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/DetectAnomalies.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.DetectAnomalies))\n", + "- Anomaly status of latest point: generates a model using preceding points and determines whether the latest point is anomalous ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/DetectLastAnomaly.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.DetectLastAnomaly))\n", + "- Find anomalies: generates a model using an entire series and finds anomalies in the series ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/DetectAnomalies.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.DetectAnomalies))\n", "\n", "### Search\n", - "- [Bing Image search](https://azure.microsoft.com/en-us/services/cognitive-services/bing-image-search-api/) ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/BingImageSearch.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.BingImageSearch))\n", - "- [Azure Cognitive search](https://docs.microsoft.com/en-us/azure/search/search-what-is-azure-search) ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/index.html#com.microsoft.azure.synapse.ml.cognitive.AzureSearchWriter$), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AzureSearchWriter))\n" + "- [Bing Image search](https://azure.microsoft.com/en-us/services/cognitive-services/bing-image-search-api/) ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/BingImageSearch.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.BingImageSearch))\n", + "- [Azure Cognitive search](https://docs.microsoft.com/en-us/azure/search/search-what-is-azure-search) ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/index.html#com.microsoft.azure.synapse.ml.cognitive.AzureSearchWriter$), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AzureSearchWriter))\n" ], "metadata": {} }, diff --git a/notebooks/CyberML - Anomalous Access Detection.ipynb b/notebooks/CyberML - Anomalous Access Detection.ipynb index 966bfe1db4..04980812d9 100644 --- a/notebooks/CyberML - Anomalous Access Detection.ipynb +++ b/notebooks/CyberML - Anomalous Access Detection.ipynb @@ -34,7 +34,7 @@ "# Create an Azure Databricks cluster and install the following libs\n", "\n", "1. In Cluster Libraries install from library source Maven:\n", - "Coordinates: com.microsoft.azure:synapseml:0.9.1\n", + "Coordinates: com.microsoft.azure:synapseml:0.9.2\n", "Repository: https://mmlspark.azureedge.net/maven\n", "\n", "2. In Cluster Libraries install from PyPI the library called plotly" diff --git a/notebooks/Regression - Auto Imports.ipynb b/notebooks/Regression - Auto Imports.ipynb index 3180ef5bc6..f6be7fe319 100644 --- a/notebooks/Regression - Auto Imports.ipynb +++ b/notebooks/Regression - Auto Imports.ipynb @@ -15,15 +15,15 @@ "\n", "This sample demonstrates the use of several members of the synapseml library:\n", "- [`TrainRegressor`\n", - " ](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.train.html?#module-synapse.ml.train.TrainRegressor)\n", + " ](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.train.html?#module-synapse.ml.train.TrainRegressor)\n", "- [`SummarizeData`\n", - " ](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.stages.html?#module-synapse.ml.stages.SummarizeData)\n", + " ](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.stages.html?#module-synapse.ml.stages.SummarizeData)\n", "- [`CleanMissingData`\n", - " ](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.featurize.html?#module-synapse.ml.featurize.CleanMissingData)\n", + " ](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.featurize.html?#module-synapse.ml.featurize.CleanMissingData)\n", "- [`ComputeModelStatistics`\n", - " ](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.train.html?#module-synapse.ml.train.ComputeModelStatistics)\n", + " ](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.train.html?#module-synapse.ml.train.ComputeModelStatistics)\n", "- [`FindBestModel`\n", - " ](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.automl.html?#module-synapse.ml.automl.FindBestModel)\n", + " ](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.automl.html?#module-synapse.ml.automl.FindBestModel)\n", "\n", "First, import the pandas package so that we can read and parse the datafile\n", "using `pandas.read_csv()`" diff --git a/notebooks/Regression - Flight Delays with DataCleaning.ipynb b/notebooks/Regression - Flight Delays with DataCleaning.ipynb index 7892f6a378..1d397e83a4 100644 --- a/notebooks/Regression - Flight Delays with DataCleaning.ipynb +++ b/notebooks/Regression - Flight Delays with DataCleaning.ipynb @@ -16,11 +16,11 @@ "\n", "This sample demonstrates how to use the following APIs:\n", "- [`TrainRegressor`\n", - " ](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.train.html?#module-synapse.ml.train.TrainRegressor)\n", + " ](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.train.html?#module-synapse.ml.train.TrainRegressor)\n", "- [`ComputePerInstanceStatistics`\n", - " ](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.train.html?#module-synapse.ml.train.ComputePerInstanceStatistics)\n", + " ](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.train.html?#module-synapse.ml.train.ComputePerInstanceStatistics)\n", "- [`DataConversion`\n", - " ](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.featurize.html?#module-synapse.ml.featurize.DataConversion)\n", + " ](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.featurize.html?#module-synapse.ml.featurize.DataConversion)\n", "\n", "First, import the pandas package" ] diff --git a/website/docs/about.md b/website/docs/about.md index da2951d9b3..61a1098d08 100644 --- a/website/docs/about.md +++ b/website/docs/about.md @@ -25,8 +25,8 @@ sub-millisecond latency web services, backed by your Spark cluster. SynapseML requires Scala 2.12, Spark 3.0+, and Python 3.6+. See the API documentation [for -Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/index.html#package) and [for -PySpark](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/index.html). +Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/index.html#package) and [for +PySpark](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/index.html). import Link from '@docusaurus/Link'; diff --git a/website/docs/documentation/estimators/_LightGBM.md b/website/docs/documentation/estimators/_LightGBM.md index a9927b744f..3fdb32209f 100644 --- a/website/docs/documentation/estimators/_LightGBM.md +++ b/website/docs/documentation/estimators/_LightGBM.md @@ -10,7 +10,7 @@ import json from IPython.display import display spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -96,7 +96,7 @@ from IPython.display import display from pyspark.sql.functions import * spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -168,7 +168,7 @@ from IPython.display import display from pyspark.sql.functions import * spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) diff --git a/website/docs/documentation/estimators/_VW.md b/website/docs/documentation/estimators/_VW.md index 2e1348fef7..5c658d789d 100644 --- a/website/docs/documentation/estimators/_VW.md +++ b/website/docs/documentation/estimators/_VW.md @@ -10,7 +10,7 @@ import json from IPython.display import display spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -93,7 +93,7 @@ from IPython.display import display from pyspark.sql.functions import * spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) diff --git a/website/docs/documentation/estimators/core/_AutoML.md b/website/docs/documentation/estimators/core/_AutoML.md index b4d092ca52..0b7d131423 100644 --- a/website/docs/documentation/estimators/core/_AutoML.md +++ b/website/docs/documentation/estimators/core/_AutoML.md @@ -10,7 +10,7 @@ import json from IPython.display import display spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -152,7 +152,7 @@ from IPython.display import display from pyspark.sql.functions import * spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) diff --git a/website/docs/documentation/estimators/core/_Featurize.md b/website/docs/documentation/estimators/core/_Featurize.md index 92157a5d34..60092a70da 100644 --- a/website/docs/documentation/estimators/core/_Featurize.md +++ b/website/docs/documentation/estimators/core/_Featurize.md @@ -10,7 +10,7 @@ import json from IPython.display import display spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -121,7 +121,7 @@ from IPython.display import display from pyspark.sql.functions import * spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -198,7 +198,7 @@ from IPython.display import display from pyspark.sql.functions import * spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -286,7 +286,7 @@ from IPython.display import display from pyspark.sql.functions import * spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -361,7 +361,7 @@ from IPython.display import display from pyspark.sql.functions import * spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) diff --git a/website/docs/documentation/estimators/core/_IsolationForest.md b/website/docs/documentation/estimators/core/_IsolationForest.md index ae84a4936f..8d66c167f2 100644 --- a/website/docs/documentation/estimators/core/_IsolationForest.md +++ b/website/docs/documentation/estimators/core/_IsolationForest.md @@ -10,7 +10,7 @@ import json from IPython.display import display spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) diff --git a/website/docs/documentation/estimators/core/_NN.md b/website/docs/documentation/estimators/core/_NN.md index 02092cdcf5..5aa5dcae50 100644 --- a/website/docs/documentation/estimators/core/_NN.md +++ b/website/docs/documentation/estimators/core/_NN.md @@ -10,7 +10,7 @@ import json from IPython.display import display spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -85,7 +85,7 @@ from IPython.display import display from pyspark.sql.functions import * spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) diff --git a/website/docs/documentation/estimators/core/_Recommendation.md b/website/docs/documentation/estimators/core/_Recommendation.md index 992ad410ca..9135ee55bf 100644 --- a/website/docs/documentation/estimators/core/_Recommendation.md +++ b/website/docs/documentation/estimators/core/_Recommendation.md @@ -10,7 +10,7 @@ import json from IPython.display import display spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -253,7 +253,7 @@ from IPython.display import display from pyspark.sql.functions import * spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) diff --git a/website/docs/documentation/estimators/core/_Stages.md b/website/docs/documentation/estimators/core/_Stages.md index c1c910ad58..a9c2822e1e 100644 --- a/website/docs/documentation/estimators/core/_Stages.md +++ b/website/docs/documentation/estimators/core/_Stages.md @@ -10,7 +10,7 @@ import json from IPython.display import display spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -109,7 +109,7 @@ from IPython.display import display from pyspark.sql.functions import * spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -195,7 +195,7 @@ from IPython.display import display from pyspark.sql.functions import * spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) diff --git a/website/docs/documentation/estimators/core/_Train.md b/website/docs/documentation/estimators/core/_Train.md index 0cda3a70b0..7cb0e687a0 100644 --- a/website/docs/documentation/estimators/core/_Train.md +++ b/website/docs/documentation/estimators/core/_Train.md @@ -10,7 +10,7 @@ import json from IPython.display import display spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -121,7 +121,7 @@ from IPython.display import display from pyspark.sql.functions import * spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) diff --git a/website/docs/documentation/transformers/_OpenCV.md b/website/docs/documentation/transformers/_OpenCV.md index 3864bfe6d7..22f8e85360 100644 --- a/website/docs/documentation/transformers/_OpenCV.md +++ b/website/docs/documentation/transformers/_OpenCV.md @@ -10,7 +10,7 @@ import json from IPython.display import display spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -96,7 +96,7 @@ import json from IPython.display import display spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) diff --git a/website/docs/documentation/transformers/_VW.md b/website/docs/documentation/transformers/_VW.md index f647433eb9..deb56683a8 100644 --- a/website/docs/documentation/transformers/_VW.md +++ b/website/docs/documentation/transformers/_VW.md @@ -10,7 +10,7 @@ import json from IPython.display import display spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -120,7 +120,7 @@ import json from IPython.display import display spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -186,7 +186,7 @@ import json from IPython.display import display spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -254,7 +254,7 @@ import json from IPython.display import display spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) diff --git a/website/docs/documentation/transformers/cognitive/_AnomalyDetection.md b/website/docs/documentation/transformers/cognitive/_AnomalyDetection.md index 185783181d..0f5e9a12a1 100644 --- a/website/docs/documentation/transformers/cognitive/_AnomalyDetection.md +++ b/website/docs/documentation/transformers/cognitive/_AnomalyDetection.md @@ -11,7 +11,7 @@ from IPython.display import display from pyspark.sql.functions import col, collect_list, lit, sort_array, struct spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -145,7 +145,7 @@ from IPython.display import display from pyspark.sql.functions import col, collect_list, lit, sort_array, struct spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -263,7 +263,7 @@ from IPython.display import display from pyspark.sql.functions import col, collect_list, lit, sort_array, struct spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) diff --git a/website/docs/documentation/transformers/cognitive/_AzureSearch.md b/website/docs/documentation/transformers/cognitive/_AzureSearch.md index bedacee18b..adbc038426 100644 --- a/website/docs/documentation/transformers/cognitive/_AzureSearch.md +++ b/website/docs/documentation/transformers/cognitive/_AzureSearch.md @@ -14,7 +14,7 @@ os.environ["PYSPARK_DRIVER_PYTHON"] = "jupyter" os.environ["PYSPARK_DRIVER_PYTHON_OPTS"] = "notebook" spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) diff --git a/website/docs/documentation/transformers/cognitive/_BingImageSearch.md b/website/docs/documentation/transformers/cognitive/_BingImageSearch.md index 7855fbd15f..a80fe56213 100644 --- a/website/docs/documentation/transformers/cognitive/_BingImageSearch.md +++ b/website/docs/documentation/transformers/cognitive/_BingImageSearch.md @@ -14,7 +14,7 @@ os.environ["PYSPARK_DRIVER_PYTHON"] = "jupyter" os.environ["PYSPARK_DRIVER_PYTHON_OPTS"] = "notebook" spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) diff --git a/website/docs/documentation/transformers/cognitive/_ComputerVision.md b/website/docs/documentation/transformers/cognitive/_ComputerVision.md index 671d7267da..03e87b1323 100644 --- a/website/docs/documentation/transformers/cognitive/_ComputerVision.md +++ b/website/docs/documentation/transformers/cognitive/_ComputerVision.md @@ -11,7 +11,7 @@ from IPython.display import display from pyspark.sql.functions import col, collect_list, lit, sort_array, struct spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -108,7 +108,7 @@ from IPython.display import display from pyspark.sql.functions import col, collect_list, lit, sort_array, struct spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -200,7 +200,7 @@ from IPython.display import display from pyspark.sql.functions import col, collect_list, lit, sort_array, struct spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -289,7 +289,7 @@ from IPython.display import display from pyspark.sql.functions import col, collect_list, lit, sort_array, struct spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -376,7 +376,7 @@ from IPython.display import display from pyspark.sql.functions import col, collect_list, lit, sort_array, struct spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -459,7 +459,7 @@ from IPython.display import display from pyspark.sql.functions import col, collect_list, lit, sort_array, struct spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -546,7 +546,7 @@ from IPython.display import display from pyspark.sql.functions import col, collect_list, lit, sort_array, struct spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -627,7 +627,7 @@ from IPython.display import display from pyspark.sql.functions import col, collect_list, lit, sort_array, struct spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) diff --git a/website/docs/documentation/transformers/cognitive/_Face.md b/website/docs/documentation/transformers/cognitive/_Face.md index 625cf6f5c9..33f4e0cf28 100644 --- a/website/docs/documentation/transformers/cognitive/_Face.md +++ b/website/docs/documentation/transformers/cognitive/_Face.md @@ -11,7 +11,7 @@ from IPython.display import display from pyspark.sql.functions import col, collect_list, lit, sort_array, struct spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -113,7 +113,7 @@ from IPython.display import display from pyspark.sql.functions import col, collect_list, lit, sort_array, struct spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -225,7 +225,7 @@ from IPython.display import display from pyspark.sql.functions import col, collect_list, lit, sort_array, struct spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -335,7 +335,7 @@ from IPython.display import display from pyspark.sql.functions import col, collect_list, lit, sort_array, struct spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -410,7 +410,7 @@ from IPython.display import display from pyspark.sql.functions import col, collect_list, lit, sort_array, struct spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) diff --git a/website/docs/documentation/transformers/cognitive/_FormRecognizer.md b/website/docs/documentation/transformers/cognitive/_FormRecognizer.md index de5dd48ed9..655d7bb646 100644 --- a/website/docs/documentation/transformers/cognitive/_FormRecognizer.md +++ b/website/docs/documentation/transformers/cognitive/_FormRecognizer.md @@ -11,7 +11,7 @@ from IPython.display import display from pyspark.sql.functions import col, flatten, regexp_replace, explode, create_map, lit spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -113,7 +113,7 @@ from IPython.display import display from pyspark.sql.functions import col, collect_list, lit, sort_array, struct spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -198,7 +198,7 @@ from IPython.display import display from pyspark.sql.functions import col, collect_list, lit, sort_array, struct spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -281,7 +281,7 @@ from IPython.display import display from pyspark.sql.functions import col, collect_list, lit, sort_array, struct, explode spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -367,7 +367,7 @@ from IPython.display import display from pyspark.sql.functions import col, collect_list, lit, sort_array, struct, explode spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -453,7 +453,7 @@ from IPython.display import display from pyspark.sql.functions import * spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -546,7 +546,7 @@ from IPython.display import display from pyspark.sql.functions import col, collect_list, lit, sort_array, struct spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -633,7 +633,7 @@ from IPython.display import display from pyspark.sql.functions import col, collect_list, lit, sort_array, struct spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) diff --git a/website/docs/documentation/transformers/cognitive/_SpeechToText.md b/website/docs/documentation/transformers/cognitive/_SpeechToText.md index 068c9ea148..56b6af5225 100644 --- a/website/docs/documentation/transformers/cognitive/_SpeechToText.md +++ b/website/docs/documentation/transformers/cognitive/_SpeechToText.md @@ -11,7 +11,7 @@ from IPython.display import display from pyspark.sql.functions import col, collect_list, lit, sort_array, struct spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -114,7 +114,7 @@ from IPython.display import display from pyspark.sql.functions import col, collect_list, lit, sort_array, struct spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) diff --git a/website/docs/documentation/transformers/cognitive/_TextAnalytics.md b/website/docs/documentation/transformers/cognitive/_TextAnalytics.md index 6d11f26d86..7f3c9503de 100644 --- a/website/docs/documentation/transformers/cognitive/_TextAnalytics.md +++ b/website/docs/documentation/transformers/cognitive/_TextAnalytics.md @@ -11,7 +11,7 @@ from IPython.display import display from pyspark.sql.functions import col spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -108,7 +108,7 @@ from IPython.display import display from pyspark.sql.functions import col, collect_list, lit, sort_array, struct spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -195,7 +195,7 @@ from IPython.display import display from pyspark.sql.functions import col, collect_list, lit, sort_array, struct spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -284,7 +284,7 @@ from IPython.display import display from pyspark.sql.functions import col, collect_list, lit, sort_array, struct spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -368,7 +368,7 @@ from IPython.display import display from pyspark.sql.functions import col, collect_list, lit, sort_array, struct spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -455,7 +455,7 @@ from IPython.display import display from pyspark.sql.functions import col, collect_list, lit, sort_array, struct spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) diff --git a/website/docs/documentation/transformers/cognitive/_Translator.md b/website/docs/documentation/transformers/cognitive/_Translator.md index 557041141d..3e68c3d9e9 100644 --- a/website/docs/documentation/transformers/cognitive/_Translator.md +++ b/website/docs/documentation/transformers/cognitive/_Translator.md @@ -11,7 +11,7 @@ from IPython.display import display from pyspark.sql.functions import col, flatten spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -115,7 +115,7 @@ from IPython.display import display from pyspark.sql.functions import col, collect_list, lit, sort_array, struct spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -209,7 +209,7 @@ from IPython.display import display from pyspark.sql.functions import col, collect_list, lit, sort_array, struct spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -295,7 +295,7 @@ from IPython.display import display from pyspark.sql.functions import * spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -381,7 +381,7 @@ from IPython.display import display from pyspark.sql.functions import * spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -473,7 +473,7 @@ from IPython.display import display from pyspark.sql.functions import * spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -561,7 +561,7 @@ from IPython.display import display from pyspark.sql.functions import col, collect_list, lit, sort_array, struct spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) diff --git a/website/docs/documentation/transformers/core/_Explainers.md b/website/docs/documentation/transformers/core/_Explainers.md index bfc381e17f..573250e81a 100644 --- a/website/docs/documentation/transformers/core/_Explainers.md +++ b/website/docs/documentation/transformers/core/_Explainers.md @@ -10,7 +10,7 @@ import json from IPython.display import display spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -104,7 +104,7 @@ import json from IPython.display import display spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -189,7 +189,7 @@ import json from IPython.display import display spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -281,7 +281,7 @@ import json from IPython.display import display spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -371,7 +371,7 @@ import json from IPython.display import display spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -451,7 +451,7 @@ import json from IPython.display import display spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -529,7 +529,7 @@ import json from IPython.display import display spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -629,7 +629,7 @@ import json from IPython.display import display spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) diff --git a/website/docs/documentation/transformers/core/_Featurize.md b/website/docs/documentation/transformers/core/_Featurize.md index dd8f7b0fcb..bd61b8fb56 100644 --- a/website/docs/documentation/transformers/core/_Featurize.md +++ b/website/docs/documentation/transformers/core/_Featurize.md @@ -10,7 +10,7 @@ import json from IPython.display import display spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -100,7 +100,7 @@ import json from IPython.display import display spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -183,7 +183,7 @@ import json from IPython.display import display spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -279,7 +279,7 @@ import json from IPython.display import display spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) diff --git a/website/docs/documentation/transformers/core/_IO.md b/website/docs/documentation/transformers/core/_IO.md index 91cd9e8b02..8a7e38c526 100644 --- a/website/docs/documentation/transformers/core/_IO.md +++ b/website/docs/documentation/transformers/core/_IO.md @@ -10,7 +10,7 @@ import json from IPython.display import display spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -95,7 +95,7 @@ import json from IPython.display import display spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -166,7 +166,7 @@ import json from IPython.display import display spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -229,7 +229,7 @@ import json from IPython.display import display spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -294,7 +294,7 @@ import json from IPython.display import display spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -355,7 +355,7 @@ import json from IPython.display import display spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -417,7 +417,7 @@ import json from IPython.display import display spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) diff --git a/website/docs/documentation/transformers/core/_Image.md b/website/docs/documentation/transformers/core/_Image.md index a96de7be85..6e8984a230 100644 --- a/website/docs/documentation/transformers/core/_Image.md +++ b/website/docs/documentation/transformers/core/_Image.md @@ -10,7 +10,7 @@ import json from IPython.display import display spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -98,7 +98,7 @@ import json from IPython.display import display spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -187,7 +187,7 @@ import json from IPython.display import display spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) diff --git a/website/docs/documentation/transformers/core/_Stages.md b/website/docs/documentation/transformers/core/_Stages.md index 82b57e7561..4a97eae2e5 100644 --- a/website/docs/documentation/transformers/core/_Stages.md +++ b/website/docs/documentation/transformers/core/_Stages.md @@ -10,7 +10,7 @@ import json from IPython.display import display spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -113,7 +113,7 @@ import json from IPython.display import display spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -186,7 +186,7 @@ import json from IPython.display import display spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -267,7 +267,7 @@ import json from IPython.display import display spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -340,7 +340,7 @@ import json from IPython.display import display spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -424,7 +424,7 @@ import json from IPython.display import display spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -490,7 +490,7 @@ import json from IPython.display import display spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -551,7 +551,7 @@ import json from IPython.display import display spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -620,7 +620,7 @@ import json from IPython.display import display spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -689,7 +689,7 @@ import json from IPython.display import display spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -762,7 +762,7 @@ import json from IPython.display import display spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -853,7 +853,7 @@ import json from IPython.display import display spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -926,7 +926,7 @@ import json from IPython.display import display spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -1015,7 +1015,7 @@ import json from IPython.display import display spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -1088,7 +1088,7 @@ import json from IPython.display import display spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -1181,7 +1181,7 @@ import json from IPython.display import display spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -1266,7 +1266,7 @@ import json from IPython.display import display spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) diff --git a/website/docs/documentation/transformers/core/_SuperpixelTransformer.md b/website/docs/documentation/transformers/core/_SuperpixelTransformer.md index ae2583711b..622ffea5e0 100644 --- a/website/docs/documentation/transformers/core/_SuperpixelTransformer.md +++ b/website/docs/documentation/transformers/core/_SuperpixelTransformer.md @@ -10,7 +10,7 @@ import json from IPython.display import display spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) diff --git a/website/docs/documentation/transformers/core/_Train.md b/website/docs/documentation/transformers/core/_Train.md index 5077cfaa31..e00aa728cf 100644 --- a/website/docs/documentation/transformers/core/_Train.md +++ b/website/docs/documentation/transformers/core/_Train.md @@ -10,7 +10,7 @@ import json from IPython.display import display spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) @@ -100,7 +100,7 @@ import json from IPython.display import display spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) diff --git a/website/docs/documentation/transformers/deep_learning/_ONNXModel.md b/website/docs/documentation/transformers/deep_learning/_ONNXModel.md index 9e696c891e..e10f0fb789 100644 --- a/website/docs/documentation/transformers/deep_learning/_ONNXModel.md +++ b/website/docs/documentation/transformers/deep_learning/_ONNXModel.md @@ -7,7 +7,7 @@ import DocTable from "@theme/DocumentationTable"; import pyspark import os spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) ``` diff --git a/website/docs/examples/CyberML - Anomalous Access Detection.md b/website/docs/examples/CyberML - Anomalous Access Detection.md index 97723b1daf..196df99174 100644 --- a/website/docs/examples/CyberML - Anomalous Access Detection.md +++ b/website/docs/examples/CyberML - Anomalous Access Detection.md @@ -28,7 +28,7 @@ Note: the data does NOT contain information about departments, this information # Create an Azure Databricks cluster and install the following libs 1. In Cluster Libraries install from library source Maven: -Coordinates: com.microsoft.azure:synapseml:0.9.1 +Coordinates: com.microsoft.azure:synapseml:0.9.2 Repository: https://mmlspark.azureedge.net/maven 2. In Cluster Libraries install from PyPI the library called plotly diff --git a/website/docs/examples/regression/Regression - Auto Imports.md b/website/docs/examples/regression/Regression - Auto Imports.md index f6a5976d5a..407bae96ef 100644 --- a/website/docs/examples/regression/Regression - Auto Imports.md +++ b/website/docs/examples/regression/Regression - Auto Imports.md @@ -14,15 +14,15 @@ and evaluating the model on the Automobile Imports data set. This sample demonstrates the use of several members of the synapseml library: - [`TrainRegressor` - ](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.train.html?#module-synapse.ml.train.TrainRegressor) + ](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.train.html?#module-synapse.ml.train.TrainRegressor) - [`SummarizeData` - ](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.stages.html?#module-synapse.ml.stages.SummarizeData) + ](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.stages.html?#module-synapse.ml.stages.SummarizeData) - [`CleanMissingData` - ](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.featurize.html?#module-synapse.ml.featurize.CleanMissingData) + ](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.featurize.html?#module-synapse.ml.featurize.CleanMissingData) - [`ComputeModelStatistics` - ](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.train.html?#module-synapse.ml.train.ComputeModelStatistics) + ](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.train.html?#module-synapse.ml.train.ComputeModelStatistics) - [`FindBestModel` - ](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.automl.html?#module-synapse.ml.automl.FindBestModel) + ](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.automl.html?#module-synapse.ml.automl.FindBestModel) First, import the pandas package so that we can read and parse the datafile using `pandas.read_csv()` diff --git a/website/docs/examples/regression/Regression - Flight Delays with DataCleaning.md b/website/docs/examples/regression/Regression - Flight Delays with DataCleaning.md index 48b6738345..d86ed446f9 100644 --- a/website/docs/examples/regression/Regression - Flight Delays with DataCleaning.md +++ b/website/docs/examples/regression/Regression - Flight Delays with DataCleaning.md @@ -15,11 +15,11 @@ instead of iterating over the columns and applying the `StringIndexer`. This sample demonstrates how to use the following APIs: - [`TrainRegressor` - ](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.train.html?#module-synapse.ml.train.TrainRegressor) + ](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.train.html?#module-synapse.ml.train.TrainRegressor) - [`ComputePerInstanceStatistics` - ](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.train.html?#module-synapse.ml.train.ComputePerInstanceStatistics) + ](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.train.html?#module-synapse.ml.train.ComputePerInstanceStatistics) - [`DataConversion` - ](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.featurize.html?#module-synapse.ml.featurize.DataConversion) + ](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.featurize.html?#module-synapse.ml.featurize.DataConversion) First, import the pandas package diff --git a/website/docs/features/CognitiveServices - Overview.md b/website/docs/features/CognitiveServices - Overview.md index 59694c4598..cb607de9c2 100644 --- a/website/docs/features/CognitiveServices - Overview.md +++ b/website/docs/features/CognitiveServices - Overview.md @@ -13,60 +13,60 @@ status: stable ### Vision [**Computer Vision**](https://azure.microsoft.com/en-us/services/cognitive-services/computer-vision/) -- Describe: provides description of an image in human readable language ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/DescribeImage.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.DescribeImage)) -- Analyze (color, image type, face, adult/racy content): analyzes visual features of an image ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/AnalyzeImage.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AnalyzeImage)) -- OCR: reads text from an image ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/OCR.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.OCR)) -- Recognize Text: reads text from an image ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/RecognizeText.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.RecognizeText)) -- Thumbnail: generates a thumbnail of user-specified size from the image ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/GenerateThumbnails.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.GenerateThumbnails)) -- Recognize domain-specific content: recognizes domain-specific content (celebrity, landmark) ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/RecognizeDomainSpecificContent.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.RecognizeDomainSpecificContent)) -- Tag: identifies list of words that are relevant to the in0put image ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/TagImage.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.TagImage)) +- Describe: provides description of an image in human readable language ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/DescribeImage.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.DescribeImage)) +- Analyze (color, image type, face, adult/racy content): analyzes visual features of an image ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/AnalyzeImage.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AnalyzeImage)) +- OCR: reads text from an image ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/OCR.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.OCR)) +- Recognize Text: reads text from an image ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/RecognizeText.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.RecognizeText)) +- Thumbnail: generates a thumbnail of user-specified size from the image ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/GenerateThumbnails.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.GenerateThumbnails)) +- Recognize domain-specific content: recognizes domain-specific content (celebrity, landmark) ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/RecognizeDomainSpecificContent.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.RecognizeDomainSpecificContent)) +- Tag: identifies list of words that are relevant to the in0put image ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/TagImage.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.TagImage)) [**Face**](https://azure.microsoft.com/en-us/services/cognitive-services/face/) -- Detect: detects human faces in an image ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/DetectFace.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.DetectFace)) -- Verify: verifies whether two faces belong to a same person, or a face belongs to a person ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/VerifyFaces.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.VerifyFaces)) -- Identify: finds the closest matches of the specific query person face from a person group ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/IdentifyFaces.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.IdentifyFaces)) -- Find similar: finds similar faces to the query face in a face list ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/FindSimilarFace.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.FindSimilarFace)) -- Group: divides a group of faces into disjoint groups based on similarity ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/GroupFaces.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.GroupFaces)) +- Detect: detects human faces in an image ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/DetectFace.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.DetectFace)) +- Verify: verifies whether two faces belong to a same person, or a face belongs to a person ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/VerifyFaces.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.VerifyFaces)) +- Identify: finds the closest matches of the specific query person face from a person group ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/IdentifyFaces.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.IdentifyFaces)) +- Find similar: finds similar faces to the query face in a face list ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/FindSimilarFace.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.FindSimilarFace)) +- Group: divides a group of faces into disjoint groups based on similarity ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/GroupFaces.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.GroupFaces)) ### Speech [**Speech Services**](https://azure.microsoft.com/en-us/services/cognitive-services/speech-services/) -- Speech-to-text: transcribes audio streams ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/SpeechToText.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.SpeechToText)) +- Speech-to-text: transcribes audio streams ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/SpeechToText.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.SpeechToText)) ### Language [**Text Analytics**](https://azure.microsoft.com/en-us/services/cognitive-services/text-analytics/) -- Language detection: detects language of the input text ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/LanguageDetector.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.LanguageDetector)) -- Key phrase extraction: identifies the key talking points in the input text ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/KeyPhraseExtractor.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.KeyPhraseExtractor)) -- Named entity recognition: identifies known entities and general named entities in the input text ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/NER.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.NER)) -- Sentiment analysis: returns a score betwee 0 and 1 indicating the sentiment in the input text ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/TextSentiment.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.TextSentiment)) +- Language detection: detects language of the input text ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/LanguageDetector.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.LanguageDetector)) +- Key phrase extraction: identifies the key talking points in the input text ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/KeyPhraseExtractor.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.KeyPhraseExtractor)) +- Named entity recognition: identifies known entities and general named entities in the input text ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/NER.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.NER)) +- Sentiment analysis: returns a score betwee 0 and 1 indicating the sentiment in the input text ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/TextSentiment.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.TextSentiment)) [**Translator**](https://azure.microsoft.com/en-us/services/cognitive-services/translator/) -- Translate: Translates text. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/Translate.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.Translate)) -- Transliterate: Converts text in one language from one script to another script. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/Transliterate.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.Transliterate)) -- Detect: Identifies the language of a piece of text. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/Detect.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.Detect)) -- BreakSentence: Identifies the positioning of sentence boundaries in a piece of text. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/BreakSentence.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.BreakSentence)) -- Dictionary Lookup: Provides alternative translations for a word and a small number of idiomatic phrases. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/DictionaryLookup.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.DictionaryLookup)) -- Dictionary Examples: Provides examples that show how terms in the dictionary are used in context. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/DictionaryExamples.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.DictionaryExamples)) -- Document Translation: Translates documents across all supported languages and dialects while preserving document structure and data format. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/DocumentTranslator.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.DocumentTranslator)) +- Translate: Translates text. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/Translate.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.Translate)) +- Transliterate: Converts text in one language from one script to another script. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/Transliterate.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.Transliterate)) +- Detect: Identifies the language of a piece of text. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/Detect.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.Detect)) +- BreakSentence: Identifies the positioning of sentence boundaries in a piece of text. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/BreakSentence.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.BreakSentence)) +- Dictionary Lookup: Provides alternative translations for a word and a small number of idiomatic phrases. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/DictionaryLookup.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.DictionaryLookup)) +- Dictionary Examples: Provides examples that show how terms in the dictionary are used in context. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/DictionaryExamples.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.DictionaryExamples)) +- Document Translation: Translates documents across all supported languages and dialects while preserving document structure and data format. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/DocumentTranslator.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.DocumentTranslator)) ### Azure Form Recognizer [**Form Recognizer**](https://azure.microsoft.com/en-us/services/form-recognizer/) -- Analyze Layout: Extract text and layout information from a given document. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/AnalyzeLayout.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AnalyzeLayout)) -- Analyze Receipts: Detects and extracts data from receipts using optical character recognition (OCR) and our receipt model, enabling you to easily extract structured data from receipts such as merchant name, merchant phone number, transaction date, transaction total, and more. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/AnalyzeReceipts.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AnalyzeReceipts)) -- Analyze Business Cards: Detects and extracts data from business cards using optical character recognition (OCR) and our business card model, enabling you to easily extract structured data from business cards such as contact names, company names, phone numbers, emails, and more. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/AnalyzeBusinessCards.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AnalyzeBusinessCards)) -- Analyze Invoices: Detects and extracts data from invoices using optical character recognition (OCR) and our invoice understanding deep learning models, enabling you to easily extract structured data from invoices such as customer, vendor, invoice ID, invoice due date, total, invoice amount due, tax amount, ship to, bill to, line items and more. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/AnalyzeInvoices.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AnalyzeInvoices)) -- Analyze ID Documents: Detects and extracts data from identification documents using optical character recognition (OCR) and our ID document model, enabling you to easily extract structured data from ID documents such as first name, last name, date of birth, document number, and more. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/AnalyzeIDDocuments.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AnalyzeIDDocuments)) -- Analyze Custom Form: Extracts information from forms (PDFs and images) into structured data based on a model created from a set of representative training forms. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/AnalyzeCustomModel.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AnalyzeCustomModel)) -- Get Custom Model: Get detailed information about a custom model. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/GetCustomModel.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/ListCustomModels.html)) -- List Custom Models: Get information about all custom models. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/ListCustomModels.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.ListCustomModels)) +- Analyze Layout: Extract text and layout information from a given document. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/AnalyzeLayout.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AnalyzeLayout)) +- Analyze Receipts: Detects and extracts data from receipts using optical character recognition (OCR) and our receipt model, enabling you to easily extract structured data from receipts such as merchant name, merchant phone number, transaction date, transaction total, and more. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/AnalyzeReceipts.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AnalyzeReceipts)) +- Analyze Business Cards: Detects and extracts data from business cards using optical character recognition (OCR) and our business card model, enabling you to easily extract structured data from business cards such as contact names, company names, phone numbers, emails, and more. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/AnalyzeBusinessCards.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AnalyzeBusinessCards)) +- Analyze Invoices: Detects and extracts data from invoices using optical character recognition (OCR) and our invoice understanding deep learning models, enabling you to easily extract structured data from invoices such as customer, vendor, invoice ID, invoice due date, total, invoice amount due, tax amount, ship to, bill to, line items and more. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/AnalyzeInvoices.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AnalyzeInvoices)) +- Analyze ID Documents: Detects and extracts data from identification documents using optical character recognition (OCR) and our ID document model, enabling you to easily extract structured data from ID documents such as first name, last name, date of birth, document number, and more. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/AnalyzeIDDocuments.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AnalyzeIDDocuments)) +- Analyze Custom Form: Extracts information from forms (PDFs and images) into structured data based on a model created from a set of representative training forms. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/AnalyzeCustomModel.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AnalyzeCustomModel)) +- Get Custom Model: Get detailed information about a custom model. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/GetCustomModel.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/ListCustomModels.html)) +- List Custom Models: Get information about all custom models. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/ListCustomModels.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.ListCustomModels)) ### Decision [**Anomaly Detector**](https://azure.microsoft.com/en-us/services/cognitive-services/anomaly-detector/) -- Anomaly status of latest point: generates a model using preceding points and determines whether the latest point is anomalous ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/DetectLastAnomaly.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.DetectLastAnomaly)) -- Find anomalies: generates a model using an entire series and finds anomalies in the series ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/DetectAnomalies.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.DetectAnomalies)) +- Anomaly status of latest point: generates a model using preceding points and determines whether the latest point is anomalous ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/DetectLastAnomaly.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.DetectLastAnomaly)) +- Find anomalies: generates a model using an entire series and finds anomalies in the series ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/DetectAnomalies.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.DetectAnomalies)) ### Search -- [Bing Image search](https://azure.microsoft.com/en-us/services/cognitive-services/bing-image-search-api/) ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/BingImageSearch.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.BingImageSearch)) -- [Azure Cognitive search](https://docs.microsoft.com/en-us/azure/search/search-what-is-azure-search) ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/index.html#com.microsoft.azure.synapse.ml.cognitive.AzureSearchWriter$), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AzureSearchWriter)) +- [Bing Image search](https://azure.microsoft.com/en-us/services/cognitive-services/bing-image-search-api/) ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/BingImageSearch.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.BingImageSearch)) +- [Azure Cognitive search](https://docs.microsoft.com/en-us/azure/search/search-what-is-azure-search) ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/index.html#com.microsoft.azure.synapse.ml.cognitive.AzureSearchWriter$), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AzureSearchWriter)) ## Prerequisites diff --git a/website/docs/getting_started/installation.md b/website/docs/getting_started/installation.md index 0a9660391f..9e227bf505 100644 --- a/website/docs/getting_started/installation.md +++ b/website/docs/getting_started/installation.md @@ -12,7 +12,7 @@ the above example, or from python: ```python import pyspark spark = pyspark.sql.SparkSession.builder.appName("MyApp") \ - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") \ + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") \ .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") \ .getOrCreate() import synapse.ml @@ -25,7 +25,7 @@ your `build.sbt`: ```scala resolvers += "SynapseML" at "https://mmlspark.azureedge.net/maven" -libraryDependencies += "com.microsoft.azure" %% "synapseml" % "0.9.1" +libraryDependencies += "com.microsoft.azure" %% "synapseml" % "0.9.2" ``` @@ -35,9 +35,9 @@ SynapseML can be conveniently installed on existing Spark clusters via the `--packages` option, examples: ```bash -spark-shell --packages com.microsoft.azure:synapseml:0.9.1 -pyspark --packages com.microsoft.azure:synapseml:0.9.1 -spark-submit --packages com.microsoft.azure:synapseml:0.9.1 MyApp.jar +spark-shell --packages com.microsoft.azure:synapseml:0.9.2 +pyspark --packages com.microsoft.azure:synapseml:0.9.2 +spark-submit --packages com.microsoft.azure:synapseml:0.9.2 MyApp.jar ``` This can be used in other Spark contexts too. For example, you can use SynapseML @@ -52,7 +52,7 @@ cloud](http://community.cloud.databricks.com), create a new [library from Maven coordinates](https://docs.databricks.com/user-guide/libraries.html#libraries-from-maven-pypi-or-spark-packages) in your workspace. -For the coordinates use: `com.microsoft.azure:synapseml:0.9.1` +For the coordinates use: `com.microsoft.azure:synapseml:0.9.2` with the resolver: `https://mmlspark.azureedge.net/maven`. Ensure this library is attached to your target cluster(s). @@ -60,7 +60,7 @@ Finally, ensure that your Spark cluster has at least Spark 3.12 and Scala 2.12. You can use SynapseML in both your Scala and PySpark notebooks. To get started with our example notebooks import the following databricks archive: -`https://mmlspark.blob.core.windows.net/dbcs/SynapseMLExamplesv0.9.1.dbc` +`https://mmlspark.blob.core.windows.net/dbcs/SynapseMLExamplesv0.9.2.dbc` ### Apache Livy and HDInsight @@ -73,7 +73,7 @@ Excluding certain packages from the library may be necessary due to current issu { "name": "synapseml", "conf": { - "spark.jars.packages": "com.microsoft.azure:synapseml:0.9.1", + "spark.jars.packages": "com.microsoft.azure:synapseml:0.9.2", "spark.jars.repositories": "https://mmlspark.azureedge.net/maven", "spark.jars.excludes": "org.scala-lang:scala-reflect,org.apache.spark:spark-tags_2.12,org.scalactic:scalactic_2.12,org.scalatest:scalatest_2.12" } @@ -87,7 +87,7 @@ In Azure Synapse, "spark.yarn.user.classpath.first" should be set to "true" to o { "name": "synapseml", "conf": { - "spark.jars.packages": "com.microsoft.azure:synapseml:0.9.1", + "spark.jars.packages": "com.microsoft.azure:synapseml:0.9.2", "spark.jars.repositories": "https://mmlspark.azureedge.net/maven", "spark.jars.excludes": "org.scala-lang:scala-reflect,org.apache.spark:spark-tags_2.12,org.scalactic:scalactic_2.12,org.scalatest:scalatest_2.12", "spark.yarn.user.classpath.first": "true" diff --git a/website/docs/reference/R-setup.md b/website/docs/reference/R-setup.md index fbc7f8aa61..539273f689 100644 --- a/website/docs/reference/R-setup.md +++ b/website/docs/reference/R-setup.md @@ -18,7 +18,7 @@ To install the current SynapseML package for R use: ```R ... -devtools::install_url("https://mmlspark.azureedge.net/rrr/synapseml-0.9.1.zip") +devtools::install_url("https://mmlspark.azureedge.net/rrr/synapseml-0.9.2.zip") ... ``` @@ -31,7 +31,7 @@ It will take some time to install all dependencies. Then, run: library(sparklyr) library(dplyr) config <- spark_config() -config$sparklyr.defaultPackages <- "com.microsoft.azure:synapseml:0.9.1" +config$sparklyr.defaultPackages <- "com.microsoft.azure:synapseml:0.9.2" sc <- spark_connect(master = "local", config = config) ... ``` @@ -91,7 +91,7 @@ and then use spark_connect with method = "databricks": ```R install.packages("devtools") -devtools::install_url("https://mmlspark.azureedge.net/rrr/synapseml-0.9.1.zip") +devtools::install_url("https://mmlspark.azureedge.net/rrr/synapseml-0.9.2.zip") library(sparklyr) library(dplyr) sc <- spark_connect(method = "databricks") diff --git a/website/docs/reference/cyber.md b/website/docs/reference/cyber.md index cf2a98faf2..3570e04f7a 100644 --- a/website/docs/reference/cyber.md +++ b/website/docs/reference/cyber.md @@ -18,50 +18,50 @@ sidebar_label: CyberML (i.e., it returns a sample from the complement set). ## feature engineering: [indexers.py](https://github.com/microsoft/SynapseML/blob/master/core/src/main/python/synapse/ml/cyber/feature/indexers.py) -1. [IdIndexer](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cyber.feature.html#synapse.ml.cyber.feature.indexers.IdIndexer) +1. [IdIndexer](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cyber.feature.html#synapse.ml.cyber.feature.indexers.IdIndexer) is a SparkML [Estimator](https://spark.apache.org/docs/2.2.0/api/java/index.html?org/apache/spark/ml/Estimator.html). Given a dataframe, it creates an IdIndexerModel (described next) for categorical features which contains the information to map each partition and column seen in the given dataframe to an id. for each partition or one consecutive range for all partition and column values. -2. [IdIndexerModel](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cyber.feature.html#synapse.ml.cyber.feature.indexers.IdIndexerModel) +2. [IdIndexerModel](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cyber.feature.html#synapse.ml.cyber.feature.indexers.IdIndexerModel) is a SparkML [Transformer](https://spark.apache.org/docs/2.2.0/api/java/index.html?org/apache/spark/ml/Transformer.html). Given a dataframe maps each partition and column field to a consecutive integer id. Partitions or column values not encountered in the estimator are mapped to 0. The model can operate in two modes, either create consecutive integer id independently -3. [MultiIndexer](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cyber.feature.html#synapse.ml.cyber.feature.indexers.MultiIndexer) +3. [MultiIndexer](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cyber.feature.html#synapse.ml.cyber.feature.indexers.MultiIndexer) is a SparkML [Estimator](https://spark.apache.org/docs/2.2.0/api/java/index.html?org/apache/spark/ml/Estimator.html). Uses multiple IdIndexer to generate a MultiIndexerModel (described next) for categorical features which contains multiple IdIndexers for multiple partitions and columns. -4. [MultiIndexerModel](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cyber.feature.html#synapse.ml.cyber.feature.indexers.MultiIndexerModel) +4. [MultiIndexerModel](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cyber.feature.html#synapse.ml.cyber.feature.indexers.MultiIndexerModel) is a SparkML [Transformer](https://spark.apache.org/docs/2.2.0/api/java/index.html?org/apache/spark/ml/Transformer.html). Given a dataframe maps each partition and column field to a consecutive integer id. Partitions or column values not encountered in the estimator are mapped to 0. The model can operate in two modes, either create consecutive integer id independently ## feature engineering: [scalers.py](https://github.com/microsoft/SynapseML/blob/master/core/src/main/python/synapse/ml/cyber/feature/scalers.py) -1. [StandardScalarScaler](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cyber.feature.html#synapse.ml.cyber.feature.scalers.StandardScalarScaler) +1. [StandardScalarScaler](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cyber.feature.html#synapse.ml.cyber.feature.scalers.StandardScalarScaler) is a SparkML [Estimator](https://spark.apache.org/docs/2.2.0/api/java/index.html?org/apache/spark/ml/Estimator.html). Given a dataframe it creates a StandardScalarScalerModel (described next) which normalizes any given dataframe according to the mean and standard deviation calculated on the dataframe given to the estimator. -2. [StandardScalarScalerModel](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cyber.feature.html#synapse.ml.cyber.feature.scalers.StandardScalarScalerModel) +2. [StandardScalarScalerModel](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cyber.feature.html#synapse.ml.cyber.feature.scalers.StandardScalarScalerModel) is a SparkML [Transformer](https://spark.apache.org/docs/2.2.0/api/java/index.html?org/apache/spark/ml/Transformer.html). Given a dataframe with a value column x, the transformer changes its value as follows: x'=(x-mean)/stddev, i.e., if the transformer is given the same dataframe the estimator was given then the value column will have a mean of 0.0 and a standard deviation of 1.0. -3. [LinearScalarScaler](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cyber.feature.html#synapse.ml.cyber.feature.scalers.LinearScalarScaler) +3. [LinearScalarScaler](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cyber.feature.html#synapse.ml.cyber.feature.scalers.LinearScalarScaler) is a SparkML [Estimator](https://spark.apache.org/docs/2.2.0/api/java/index.html?org/apache/spark/ml/Estimator.html). Given a dataframe it creates a LinearScalarScalerModel (described next) which normalizes any given dataframe according to the minimum and maximum values calculated on the dataframe given to the estimator. -4. [LinearScalarScalerModel](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cyber.feature.html#synapse.ml.cyber.feature.scalers.LinearScalarScalerModel) +4. [LinearScalarScalerModel](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cyber.feature.html#synapse.ml.cyber.feature.scalers.LinearScalarScalerModel) is a SparkML [Transformer](https://spark.apache.org/docs/2.2.0/api/java/index.html?org/apache/spark/ml/Transformer.html). Given a dataframe with a value column x, the transformer changes its value such that if the transformer is given the same dataframe the estimator was given then the value column will be scaled linearly to the given ranges. ## access anomalies: [collaborative_filtering.py](https://github.com/microsoft/SynapseML/blob/master/core/src/main/python/synapse/ml/cyber/anomaly/collaborative_filtering.py) -1. [AccessAnomaly](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cyber.anomaly.html#synapse.ml.cyber.anomaly.collaborative_filtering.AccessAnomaly) +1. [AccessAnomaly](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cyber.anomaly.html#synapse.ml.cyber.anomaly.collaborative_filtering.AccessAnomaly) is a SparkML [Estimator](https://spark.apache.org/docs/2.2.0/api/java/index.html?org/apache/spark/ml/Estimator.html). Given a dataframe the estimator generates an AccessAnomalyModel (next described) which can detect anomalous access of users to resources in such a way where the access @@ -69,14 +69,14 @@ sidebar_label: CyberML a resource from Finance. This is based solely on access patterns rather than explicit features. Internally this is based on Collaborative Filtering as implemented in Spark using Matrix Factorization with Alternating Least Squares. -2. [AccessAnomalyModel](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cyber.anomaly.html#synapse.ml.cyber.anomaly.collaborative_filtering.AccessAnomalyModel) +2. [AccessAnomalyModel](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cyber.anomaly.html#synapse.ml.cyber.anomaly.collaborative_filtering.AccessAnomalyModel) is a SparkML [Transformer](https://spark.apache.org/docs/2.2.0/api/java/index.html?org/apache/spark/ml/Transformer.html). Given a dataframe the transformer computes a value between (-inf, inf) where positive values indicate an anomaly score. Anomaly scores are computed to have a mean of 1.0 and a standard deviation of 1.0 over the original dataframe given to the estimator. -3. [ModelNormalizeTransformer](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cyber.anomaly.html#synapse.ml.cyber.anomaly.collaborative_filtering.ModelNormalizeTransformer) +3. [ModelNormalizeTransformer](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cyber.anomaly.html#synapse.ml.cyber.anomaly.collaborative_filtering.ModelNormalizeTransformer) is a SparkML [Transformer](https://spark.apache.org/docs/2.2.0/api/java/index.html?org/apache/spark/ml/Transformer.html). This is a transformer used internally by AccessAnomaly to normalize a model to generate anomaly scores with mean 0.0 and standard deviation of 1.0. -4. [AccessAnomalyConfig](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cyber.anomaly.html#synapse.ml.cyber.anomaly.collaborative_filtering.AccessAnomalyConfig) +4. [AccessAnomalyConfig](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cyber.anomaly.html#synapse.ml.cyber.anomaly.collaborative_filtering.AccessAnomalyConfig) contains the default values for AccessAnomaly. diff --git a/website/docs/reference/docker.md b/website/docs/reference/docker.md index 43e87a50d2..7880e61569 100644 --- a/website/docs/reference/docker.md +++ b/website/docs/reference/docker.md @@ -32,7 +32,7 @@ You can now select one of the sample notebooks and run it, or create your own. In the above, `mcr.microsoft.com/mmlspark/release` specifies the project and image name that you want to run. There is another component implicit here which is the _tsag_ (= version) that you want to use — specifying it explicitly looks like -`mcr.microsoft.com/mmlspark/release:0.9.1` for the `0.9.1` tag. +`mcr.microsoft.com/mmlspark/release:0.9.2` for the `0.9.2` tag. Leaving `mcr.microsoft.com/mmlspark/release` by itself has an implicit `latest` tag, so it is equivalent to `mcr.microsoft.com/mmlspark/release:latest`. The `latest` tag is identical to the @@ -48,7 +48,7 @@ that you will probably want to use can look as follows: docker run -it --rm \ -p 127.0.0.1:80:8888 \ -v ~/myfiles:/notebooks/myfiles \ - mcr.microsoft.com/mmlspark/release:0.9.1 + mcr.microsoft.com/mmlspark/release:0.9.2 ``` In this example, backslashes are used to break things up for readability; you @@ -58,7 +58,7 @@ path and line breaks looks a little different: docker run -it --rm ` -p 127.0.0.1:80:8888 ` -v C:\myfiles:/notebooks/myfiles ` - mcr.microsoft.com/mmlspark/release:0.9.1 + mcr.microsoft.com/mmlspark/release:0.9.2 Let's break this command and go over the meaning of each part: @@ -141,7 +141,7 @@ Let's break this command and go over the meaning of each part: model.write().overwrite().save('myfiles/myTrainedModel.mml') ``` -- **`mcr.microsoft.com/mmlspark/release:0.9.1`** +- **`mcr.microsoft.com/mmlspark/release:0.9.2`** Finally, this specifies an explicit version tag for the image that we want to run. diff --git a/website/src/pages/index.js b/website/src/pages/index.js index fa4afd8b3c..0be749c07e 100644 --- a/website/src/pages/index.js +++ b/website/src/pages/index.js @@ -236,9 +236,9 @@ function Home() { MMLSpark can be conveniently installed on existing Spark clusters via the --packages option, examples: This can be used in other Spark contexts too. For example, you @@ -266,7 +266,7 @@ spark-submit --packages com.microsoft.azure:synapseml:0.9.1 MyApp.jar`}

For the coordinates use: with the resolver: @@ -284,7 +284,7 @@ spark-submit --packages com.microsoft.azure:synapseml:0.9.1 MyApp.jar`} To get started with our example notebooks import the following databricks archive: @@ -322,7 +322,7 @@ spark-submit --packages com.microsoft.azure:synapseml:0.9.1 MyApp.jar`} From 5733b850297abd3265781619ee6081c14b9b96ba Mon Sep 17 00:00:00 2001 From: Jason Wang Date: Wed, 3 Nov 2021 00:45:01 -0700 Subject: [PATCH 03/40] fix: performance issue in interpretability notebooks (#1238) --- notebooks/Interpretability - Explanation Dashboard.ipynb | 2 +- notebooks/Interpretability - Tabular SHAP explainer.ipynb | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/notebooks/Interpretability - Explanation Dashboard.ipynb b/notebooks/Interpretability - Explanation Dashboard.ipynb index 2f045172ad..a86960cf0d 100644 --- a/notebooks/Interpretability - Explanation Dashboard.ipynb +++ b/notebooks/Interpretability - Explanation Dashboard.ipynb @@ -169,7 +169,7 @@ " model=model,\n", " targetCol=\"probability\",\n", " targetClasses=[1],\n", - " backgroundData=training.orderBy(rand()).limit(100).cache(),\n", + " backgroundData=broadcast(training.orderBy(rand()).limit(100).cache()),\n", ")\n", "\n", "shap_df = shap.transform(explain_instances)\n" diff --git a/notebooks/Interpretability - Tabular SHAP explainer.ipynb b/notebooks/Interpretability - Tabular SHAP explainer.ipynb index 8bde934744..0f72094d9c 100644 --- a/notebooks/Interpretability - Tabular SHAP explainer.ipynb +++ b/notebooks/Interpretability - Tabular SHAP explainer.ipynb @@ -165,7 +165,7 @@ " model=model,\n", " targetCol=\"probability\",\n", " targetClasses=[1],\n", - " backgroundData=training.orderBy(rand()).limit(100).cache(),\n", + " backgroundData=broadcast(training.orderBy(rand()).limit(100).cache()),\n", ")\n", "\n", "shap_df = shap.transform(explain_instances)\n" From 8adff5a4dd900cc84069cfd924e2590c9ca611f4 Mon Sep 17 00:00:00 2001 From: Ilya Matiach Date: Wed, 3 Nov 2021 14:39:33 -0400 Subject: [PATCH 04/40] fix: min_data_in_leaf missing from dataset parameters in lightgbm (#1239) Co-authored-by: Mark Hamilton --- .../com/microsoft/azure/synapse/ml/lightgbm/LightGBMBase.scala | 1 + 1 file changed, 1 insertion(+) diff --git a/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/LightGBMBase.scala b/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/LightGBMBase.scala index f815c3f7e0..08f30a7c06 100644 --- a/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/LightGBMBase.scala +++ b/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/LightGBMBase.scala @@ -267,6 +267,7 @@ trait LightGBMBase[TrainedModel <: Model[TrainedModel]] extends Estimator[Traine def getDatasetParams(categoricalIndexes: Array[Int], numThreads: Int): String = { val datasetParams = s"max_bin=$getMaxBin is_pre_partition=True " + s"bin_construct_sample_cnt=$getBinSampleCount " + + s"min_data_in_leaf=$getMinDataInLeaf " + s"num_threads=$numThreads " + (if (categoricalIndexes.isEmpty) "" else s"categorical_feature=${categoricalIndexes.mkString(",")}") From 6f601ff1b5274e783afe1e3ffef19f273818b95c Mon Sep 17 00:00:00 2001 From: ms-kashyap <64443771+ms-kashyap@users.noreply.github.com> Date: Wed, 3 Nov 2021 17:22:59 -0400 Subject: [PATCH 05/40] docs: Adding document and notebook for Data Balance Analysis (#1226) * [DataBalanceAnalysis] Add doc and sample notebook * Clear outputs in sample notebook * Address jasowang PR comments * [DataBalanceAnalysis] Update notebook and doc * [Databricks E2E Tests] Upgrade DBR from 8.3 to 9.1 LTS * [Databricks E2E Tests] Revert DBR from 9.1 LTS to 8.3 Co-authored-by: Patel, Kashyap M --- ...lance Analysis - Adult Census Income.ipynb | 663 ++++++++++++++++++ .../exploratory/Data Balance Analysis.md | 194 +++++ 2 files changed, 857 insertions(+) create mode 100644 notebooks/Data Balance Analysis - Adult Census Income.ipynb create mode 100644 website/docs/features/exploratory/Data Balance Analysis.md diff --git a/notebooks/Data Balance Analysis - Adult Census Income.ipynb b/notebooks/Data Balance Analysis - Adult Census Income.ipynb new file mode 100644 index 0000000000..ba4cd7c427 --- /dev/null +++ b/notebooks/Data Balance Analysis - Adult Census Income.ipynb @@ -0,0 +1,663 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "application/vnd.databricks.v1+cell": { + "inputWidgets": {}, + "nuid": "f4e01a16-20fa-446a-9e3d-b560907b9ab2", + "showTitle": false, + "title": "" + } + }, + "source": [ + "## Data Balance Analysis using the Adult Census Income dataset\n", + "\n", + "In this example, we will conduct Data Balance Analysis (which consists on running three groups of measures) on the Adult Census Income dataset to determine how well features and feature values are represented in the dataset.\n", + "\n", + "This dataset can be used to predict whether annual income exceeds $50,000/year or not based on demographic data from the 1994 U.S. Census. The dataset we're reading contains 32,561 rows and 14 columns/features.\n", + "\n", + "[More info on the dataset here](https://archive.ics.uci.edu/ml/datasets/Adult)\n", + "\n", + "---\n", + "\n", + "Data Balance Analysis is relevant for overall understanding of datasets, but it becomes essential when thinking about building Machine Learning services out of such datasets. Having a well balanced data representation is critical when developing models in a responsible way, specially in terms of fairness. \n", + "It is unfortunately all too easy to build an ML model that produces biased results for subsets of an overall population, by training or testing the model on biased ground truth data. There are multiple case studies of biased models assisting in granting loans, healthcare, recruitment opportunities and many other decision making tasks. In most of these examples, the data from which these models are trained was the common issue. These findings emphasize how important it is for model creators and auditors to analyze data balance: to measure training data across sub-populations and ensure the data has good coverage and a balanced representation of labels across sensitive categories and category combinations, and to check that test data is representative of the target population.\n", + "\n", + "In summary, Data Balance Analysis, used as a step for building ML models has the following benefits:\n", + "* **Reduces risks for unbalanced models (facilitate service fairness) and reduces costs of ML building** by identifying early on data representation gaps that prompt data scientists to seek mitigation steps (collect more data, follow a specific sampling mechanism, create synthetic data, etc.) before proceeding to train their models. \n", + "* **Enables easy e2e debugging of ML systems** in combination with [Fairlearn](https://fairlearn.org/) by providing a clear view if for an unbalanced model the issue is tied to the data or the model. \n", + "\n", + "---\n", + "\n", + "Note: If you are running this notebook in a Spark environment such as Azure Synapse or Databricks, then you can easily visualize the imbalance measures using the built-in plotting features.\n", + "\n", + "Python dependencies:\n", + "* matplotlib==3.2.2\n", + "* numpy==1.19.2" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "application/vnd.databricks.v1+cell": { + "inputWidgets": {}, + "nuid": "a51d55f3-8f47-47e6-8698-4b78e65f034d", + "showTitle": false, + "title": "" + } + }, + "outputs": [], + "source": [ + "import matplotlib\n", + "import matplotlib.pyplot as plt\n", + "import numpy as np\n", + "import pyspark.sql.functions as F" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "application/vnd.databricks.v1+cell": { + "inputWidgets": {}, + "nuid": "5c7332a8-b256-4c57-a593-ab338f7ca623", + "showTitle": false, + "title": "" + } + }, + "outputs": [], + "source": [ + "df = spark.read.parquet(\"wasbs://publicwasb@mmlspark.blob.core.windows.net/AdultCensusIncome.parquet\")\n", + "display(df)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "application/vnd.databricks.v1+cell": { + "inputWidgets": {}, + "nuid": "267c342b-2770-4dff-aae3-aa75af24adef", + "showTitle": false, + "title": "" + } + }, + "outputs": [], + "source": [ + "# Convert the \"income\" column from {<=50K, >50K} to {0, 1} to represent our binary classification label column\n", + "label_col = \"income\"\n", + "df = df.withColumn(label_col, F.when(F.col(label_col).contains(\"<=50K\"), F.lit(0)).otherwise(F.lit(1)))" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "application/vnd.databricks.v1+cell": { + "inputWidgets": {}, + "nuid": "5af3f65c-5f1b-4e11-9bc9-ffa2b00116ae", + "showTitle": false, + "title": "" + } + }, + "source": [ + "### Perform preliminary analysis on columns of interest" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "application/vnd.databricks.v1+cell": { + "inputWidgets": {}, + "nuid": "819cb707-a3fd-45c0-a3d9-96e54d4a7e6f", + "showTitle": false, + "title": "" + } + }, + "outputs": [], + "source": [ + "display(df.groupBy(\"race\").count())" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "application/vnd.databricks.v1+cell": { + "inputWidgets": {}, + "nuid": "dd78d2b8-fefa-458b-bc23-629f7e763414", + "showTitle": false, + "title": "" + } + }, + "outputs": [], + "source": [ + "display(df.groupBy(\"sex\").count())" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "application/vnd.databricks.v1+cell": { + "inputWidgets": {}, + "nuid": "0d14030f-1fd8-4c1e-8742-7ad7d2dea4d2", + "showTitle": false, + "title": "" + } + }, + "outputs": [], + "source": [ + "# Choose columns/features to do data balance analysis on\n", + "cols_of_interest = [\"race\", \"sex\"]\n", + "display(df.select(cols_of_interest + [label_col]))" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "application/vnd.databricks.v1+cell": { + "inputWidgets": {}, + "nuid": "ae54d20f-f04a-4ffd-a442-e995955d922e", + "showTitle": false, + "title": "" + } + }, + "source": [ + "### Calculate Feature Balance Measures\n", + "\n", + "Feature Balance Measures allow us to see whether each combination of sensitive feature is receiving the positive outcome (true prediction) at equal rates.\n", + "\n", + "In this context, we define a feature balance measure, also referred to as the parity, for label y as the absolute difference between the association metrics of two different sensitive classes \\\\([x_A, x_B]\\\\), with respect to the association metric \\\\(A(x_i, y)\\\\). That is:\n", + "\n", + "$$parity(y \\vert x_A, x_B, A(\\cdot)) \\coloneqq A(x_A, y) - A(x_B, y) $$\n", + "\n", + "Using the dataset, we can see if the various sexes and races are receiving >50k income at equal or unequal rates.\n", + "\n", + "Note: Many of these metrics were influenced by this paper [Measuring Model Biases in the Absence of Ground Truth](https://arxiv.org/abs/2103.03417).\n", + "\n", + "Measure | Family | Description | Interpretation/Formula | Reference\n", + "- | - | - | - | -\n", + "Demographic Parity | Fairness | Proportion of each segment of a protected class (e.g. gender) should receive the positive outcome at equal rates. | As close to 0 means better parity. \\\\(DP = P(Y \\vert A = \"Male\") - P(Y \\vert A = \"Female\")\\\\). Y = Positive label rate. | [Link](https://en.wikipedia.org/wiki/Fairness_%28machine_learning%29)\n", + "Pointwise Mutual Information (PMI), normalized PMI | Entropy | The PMI of a pair of feature values (ex: Gender=Male and Gender=Female) quantifies the discrepancy between the probability of their coincidence given their joint distribution and their individual distributions (assuming independence). | Range (normalized) [-1, 1]. -1 for no co-occurences. 0 for co-occurences at random. 1 for complete co-occurences. | [Link](https://en.wikipedia.org/wiki/Pointwise_mutual_information)\n", + "Sorensen-Dice Coefficient (SDC) | Intersection-over-Union | Used to gauge the similarity of two samples. Related to F1 score. | Equals twice the number of elements common to both sets divided by the sum of the number of elements in each set. | [Link](https://en.wikipedia.org/wiki/S%C3%B8rensen%E2%80%93Dice_coefficient)\n", + "Jaccard Index | Intersection-over-Union | Similar to SDC, guages the similarity and diversity of sample sets. | Equals the size of the intersection divided by the size of the union of the sample sets. | [Link](https://en.wikipedia.org/wiki/Jaccard_index)\n", + "Kendall Rank Correlation | Correlation and Statistical Tests | Used to measure the ordinal association between two measured quantities. | High when observations have a similar rank and low when observations have a dissimilar rank between the two variables. | [Link](https://en.wikipedia.org/wiki/Kendall_rank_correlation_coefficient)\n", + "Log-Likelihood Ratio | Correlation and Statistical Tests | Calculates the degree to which data supports one variable versus another. Log of the likelihood ratio, which gives the probability of correctly predicting the label in ratio to probability of incorrectly predicting label. | If likelihoods are similar, it should be close to 0. | [Link](https://en.wikipedia.org/wiki/Likelihood_function#Likelihood_ratio)\n", + "t-test | Correlation and Statistical Tests | Used to compare the means of two groups (pairwise). | Value looked up in t-Distribution tell if statistically significant or not. | [Link](https://en.wikipedia.org/wiki/Student's_t-test)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "application/vnd.databricks.v1+cell": { + "inputWidgets": {}, + "nuid": "5dd892b3-b2e6-4fcb-8829-9c058fa4fd5e", + "showTitle": false, + "title": "" + } + }, + "outputs": [], + "source": [ + "from synapse.ml.exploratory import FeatureBalanceMeasure\n", + "\n", + "feature_balance_measures = (\n", + " FeatureBalanceMeasure()\n", + " .setSensitiveCols(cols_of_interest)\n", + " .setLabelCol(label_col)\n", + " .setVerbose(True)\n", + " .transform(df)\n", + ")\n", + "\n", + "# Sort by Demographic Parity descending for all features\n", + "display(feature_balance_measures.sort(F.abs(\"FeatureBalanceMeasure.dp\").desc()))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "application/vnd.databricks.v1+cell": { + "inputWidgets": {}, + "nuid": "46e1a9a7-97c7-437e-bead-eaf4c3b9e0d6", + "showTitle": false, + "title": "" + } + }, + "outputs": [], + "source": [ + "# Drill down to feature == \"sex\"\n", + "display(feature_balance_measures.filter(F.col(\"FeatureName\") == \"sex\").sort(F.abs(\"FeatureBalanceMeasure.dp\").desc()))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "application/vnd.databricks.v1+cell": { + "inputWidgets": {}, + "nuid": "d4bd77a1-3c10-4e16-9892-4ac920fb4432", + "showTitle": false, + "title": "" + } + }, + "outputs": [], + "source": [ + "# Drill down to feature == \"race\"\n", + "display(feature_balance_measures.filter(F.col(\"FeatureName\") == \"race\").sort(F.abs(\"FeatureBalanceMeasure.dp\").desc()))" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "application/vnd.databricks.v1+cell": { + "inputWidgets": {}, + "nuid": "aaec9b6a-06c6-4afb-86c8-a7fbc3df92d7", + "showTitle": false, + "title": "" + } + }, + "source": [ + "#### Visualize Feature Balance Measures" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "application/vnd.databricks.v1+cell": { + "inputWidgets": {}, + "nuid": "969e62cd-bb6c-4417-9046-dd8aa6d0fa9e", + "showTitle": false, + "title": "" + } + }, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAWwAAAEYCAYAAAB4LMxuAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAgAElEQVR4nOydZ3hVxdaA3wUBQSCAoJAEaYpCCAQIHaRLCQgGUUFpluv1qvAJlmsBRRBFRRD0Xr1WVEQUhAihBURUkJaEqlhAQUkCKkqxUBLW92PPSU6SU3KSQE5w3ueZ5+w9de2ZfdaePTN7lqgqFovFYgl+ShW3ABaLxWLJH1ZhWywWSwnBKmyLxWIpIViFbbFYLCUEq7AtFoulhGAVtsVisZQQrML+GyIiE0Rkto/wL0Sky1kUyZMMv4tI/bNQTnkRWSwiR0Rk3pkur6g5m20lImtE5NaijmvJP39bhS0ie0XkLxE5JiKHReRzEbldRP62deJCVRur6ppA0ohIXRFRo2h/N/X7QCFkqKiq35m8Z4nI4wXNyw+DgBpANVW9NnegebidMtfkuk/anSFZAqYgbeWOOHwnIl8WoVi5yxgpImv9xFkjIsfN//GoiCSLyAMicl4A5aiIXFp4iYOjHE/83ZXTVapaCagDTAH+DbxWvCL5R0RKF7cMPqiiqhWBIcAjItI7kMQiEnJmxPJKHeAbVc3wEec9c03VgY+BEtcT90En4CKgvoi0KmZZ7jL/xzDgHmAwsFREpHjFCh7+7gobAFU9oqqLgOuBESISBSAi54nIVBH5QUQOishLIlLehHURkf0icr+I/CQi6SJytYjEisg3IvKriDzkKsPk9ZyIpBn3nHvvweSTbsJudX+Kmx7miyKyVET+ALqKSF8R2WJ6Iz+KyAS3vFy93dtMfukicm+uyy4rIm+ZHs0XItLSLf1eEelhjkuLyEMissfETRaRi/NRp+uBL4AoEWktIutNDzVdRF4QkbJu5amI3Cki3wLfuvldKiK3ATcC95te7mIRuU9EPnAvT0RmisgMT7KISCPTgztsrrW/8X8MeAS43uR9i59rygDeASJE5EKTh79raywiK839cNB1T4hIKdOD3CMih0TkfRG5wISVE5HZxv+wiGwWkRpers29rSaYfDy2qxdGAB8CS82xe95XishX4gwXvQCIW1iOYTW3ey4kVx6NgJeAdqaOD/uRB1X9w7w19AfaAX1NXl7rWkQ+Ncm3mXKuF5GqIpIgIj+LyG/muJabbCPFebs4JiLfi8iNbmE3i8guk26FiNTxVo6/6ylSVPVv6YC9QA8P/j8A/zLH04FFwAVAJWAx8KQJ6wJk4PzhywD/AH4G5pi4jYG/gHom/kRgA05v5kLgc2CSCesNHDBpzgdmAwpcasJnAUeADjgP2XKm/CbmvClwELjaxK9r0r8LVDDxfnZdLzABOA7EAqWBJ4ENnuoGuA/YAVyO84eNxhk+yF1vrjJDTLwOwJ9AdyAGaGvC6gK7gLvd0iqw0tRzeTc/9+t/3C1+GPAHTm8ek+9PQIwHucoAu4GHgLJAN+AYcLlbXcz2cZ9khZv0U4BfgBDj5/XazH2QjtNbLGfO25iw/8O5H2oB5wH/A941Yf/EudfON+0TA4T6u4/9tauHtOcDR038a8x1lTVh1U09DTJ1OAbnfr/VU725t785X+MWdySw1s//MSt+Lv9Pgaf81XXue8acVzPXdb6p+3lAvAmrYK7ddR+EAY3N8QBzzzQyZY0DPvdWzlnVW8VRaDA4vCvsDcDDOErnD+ASt7B2wPfmuAuOQi5tziuZhmzjFj+ZbCW6B4h1C+sF7DXHr2MeBOb8UvIqrLf8XM9zwHRz7PrzNHQLfxp4zRxPAFa5hUUCf3mqG+BrYEA+6tNV5mHgN/NnGu0l7t3AQrdzBbrliuNVYRu/ZcA/zHE/4EsvZV2B8zAs5eb3LjDBrS78KeyT5roygUNAFx/xs64NZ1hoi5d4u4DubudhwCkcBXEzzgO9aSD3sb929ZB2KM6DPATngXIEiDNhw8n5EBdgP2dfYc8FXgngPvKqSIFmwG/muIJp02swnYRc99YtbuelcDofdfJTzpl0dkgkLxHArzi94POBZPMKdhhYbvxdHFLVTHP8l/k96Bb+F1DRHIcD+9zC9hk/V9iPbmHuxx79RKSNiHxsXveOALfj9Iq8pXEvDxwl5uJPoFzu11nDxTgPm/xSXVWrqmojVZ1pZL3MvI4eEJGjwBN+ZM0Pb+IoHMzv217ihQM/quppN799OO2cX95X1So4k5M7cXp6gN9r81V3dYCFbvfWLpwHQg1zLSuAueIMaT0tImXyKWt+2xWcIZD3VTVDVY8DH5A9LJLjnlRHUwXaRkWB6/+Y3/soCxE5X0T+JyL7TPxPgSoiUlpV/8AZAr0dSBeRJSLS0CStA8xwa5tfcR5YgdwzZwSrsN0QZ9IlAliL83r4F85rUhXjKqsz+VQQ0nBuBBe1jR84r8213MI8jRHn3lZxDs5wzcWqWhlnnDD35Ix7Pu7lBcKPwCUFSOfOi8BXQANVDcUZnsgtq69tIz2FxQNNxZlv6IcztuyJNOBiybn6pzaQmh/Bcwih+gtwGzBBRMKMt69r+xHwtjTxR6CP271VRVXLqWqqqp5S1cdUNRJob65veKDy+sKM5XYDhhoFeABn+CNWRKrj3JMXu8UXct5Pf+B0aFzU9FFcgbYEFWeuJAb4zHjl5z5y5x6cobw2Jn4nV9YAqrpCVa/Eebv5CnjFhP8I/DNX25RX1c8Lch1FiVXYgIiEikg/nNev2aq6w/TIXgGmi8hFJl6EiPQqYDHvAuNE5ELzh3gEZ6wa4H3gJnEmx84Hxucjv0rAr6p6XERaAzd4iDPe9DIaAzcB7xVA7leBSSLSQByaiki1APOohDNe+LvpxfwrwPQHyaX4TI9wPs6Da5Oq/uAl7Uacnub9IlJGnDXLV+G0dcCo6tc4vd/7jZeva0sAwkTkbnEmnSuJSBsT9hIw2W0y60IRGWCOu4pIE3FWAx3FGSpxf0MoCoYB3+AotGbGXYYz7DEEWAI0FpGBpoc+mpxKeSvQSURqi0hl4EEfZR0EaonbZKwvzD3bGWcydBPOhCj4v49y3yeVcDpdh8WZ0H3UrYwaIjJARCoAJ4Dfya7jl4AHzf8GEaksIu5LPvPcj2eLv7vCXiwix3CeqA8D03AUm4t/40w+bDCvVKtwbvCC8DiQBGzHmcRLMX6o6jJgJs6Ssd044+jg3EjeuAOYaOR/BEfp5+YTk99HwFRVTSyA3NNM3ok4f5bXgPIB5nEvzgPlGM5DMNAHx2tApHlFjXfzfxNnQtXbcAiqehJHQffBeWv6LzBcVb8KUAZ3ngFuMw9yr9emqseAK035B3BWwHQ1wTNw3pASTRtuAFzKvCbOw+gozlDJJ76usYCMAP6rqgfcHY6yGmHeJq7FmWQ9BDQA1rld20pzrdtx5moSfJS1GmfF0AER+cVHvBdMXRzEmZP5AOjtNpzl7z6aALxp7pPrTB7lcdp9A86QpotSwFicN7Bfgc6YB4CqLgSewhmSOoozDNbHRzlnDTGD6JYgQpylUDuB89T3+mBv6esC3wNlCpK+pCAitXFeZWuq6tHilsdiOdP83XvYQYOIxJnX5qo4T/fF57KyLSxmTHosMNcqa8vfBauwg4d/4qwl3oOzWiDQcd6/DWbc8SjOcMOjfqJbLIjI6+J84LbTS7iI8/HVbhHZLiIt3MJGiMi3xo3wlP5sYYdELBbLOY+IdMKZWHxLVaM8hMcCo3A+ImoDzFDVNmayMgloibPaJRnnA63fzprwbtgetsViOedR1U8x67m9MABHmauqbsBZrx2G84HbSlX91SjplThfJhcLZ3ujHctZoHr16lq7du1ileFUUS9CC5BgeHMsW7r49yw6ebr46+GLbVt/UdUL/cf0TqnQWkrGca/h+tehL3A+y3fxsqq+HEAREeT8MGi/8fPmXyxYhX0OUrt2bdatW+c/4hkk7ffinS89mVn8iqp2aH4/TjxzpP5+qrhF4PIalff5j+WHjOOEXN7fa/CprW8cV1V/G12VeOyQiMViCX5EKBVS1qsrAlLJ+SVnLePnzb9YsArbYrEEP1LqTCvsRcBws1qkLXBEVdNxvmrtKc5WrVWBnsavWLBDIhaLJegRQEoX3G6HiLyLs8NmdRHZj7MctAyAqr6E8/l7LM6XwX9ivnhW1V9FZBKw2WQ1UVV9TV6eUazCtlgswY8IpQvRk1bVIX7CFbjTS9jrOFsgFztWYVssluBHBCkVzJbxzg5WYVsslqBHEEqFFP+qm+LGKmyLxRL82B42YBW2xWIpCYhQqkyRrAYp0ViFbbFYgh4x67D/7liFbbFYSgBCKTskYhW2xWIJfmwP28F+6fg3JzExkejoaKKiopg6dWqe8BMnTjBs2DCioqLo1KkT+/YVfluIT1av5Mp2LejWOpqXZk7LE75p/Tr6d7+Cy8OqsmxxfJ7wY8eO0iG6IRMeuKfAMny2ehWxHWPo1a4ZrzyfV4ak9eu45soraFLrAlYkZMuwcd2nxPXomOWa1b2IVct8WcfyzMrERJo3i6Zpkyie9VLvw4cPo2mTKLp0zq73Q4cO0adPb2pcdCFjx44JuFx3Pl29il7tY7iyTTNe9tAOm9evI67HFUSGX8DyXO3QKKwqA7p1ZEC3jtw+bHCh5MgvUqq0V/d3ocgVtog0ExEVEb9bEIrIqyISWUTlrhGRr0Vkm4isE5GC2l50z/N3P+F7jUHdQuOvrDNBZmYmY8aMIT4+npSUFObNm8euXbtyxJk1axZVqlRh586djBo1inHjxhW6zAn/vofX3v2A5Ws3k7BgPt9+ndO8YnhELZ6e+SJXDbzWYx7PTXmc1u3aF0qGxx+6h/+9M5/Fn2xiafwH7M4lQ1itWjwx40X6xuWUoU2HTixctZaFq9byxrxFlCtfng6duwVc/tixY1iwMJ6kZM/1/uabTr1v37GTO+8axfjxTr2XK1eO8eMfYfITTxTgynPKMPGBe3h1znyWfLaJhIUe6iCiFk/OeJF+HtqhXLnyfLh6LR+uXstLbxfInnFgnPlP00sEZ6KHPQRYa359oqq3quqXRVj2jaoajWOc9ZkizDeoMPsdFLrtkpKSuOSSS6hXrx5ly5Zl0KBBJCTk7C0uWbKEoUOHAhAXF8eaNWsKtXXptpQk6tSrT+26Tpl9465h1fIlOeLUql2Hho2jKFUq7yXu3LaFX37+iY5duhdYhh1bkqldtz4X13Fk6DNgIKtX5JQh4uI6XB7pWQYXiQkfckXXKyl//vkBlZ+UlET9+jnrfUnuek9Ywo035q33ChUq0L59e8qdVy6gMnOzPSWZOvXqc7GrHa4eyEcBtMNZR5xP0725vwtF2hIiIjiWlkcCV4pIOeNfQUSWmN7vThG53vivEZGW5vhFEUkSkS9E5DG3PPeKyGMikiIiO4x5e398ClwqInVF5DOTNkVEsrplIvJvk982EZni57rCRORTEdlq5L/CQ5x4EUk28t/m5v+7iEw25WwQkRrGv56IrDcyPJ4rr/tEZLMxVfSY8atr3iDewjHQ676DWIFIS0sjIiJ7a9+IiAjS0tK8xgkJCSE0NJRDhw4VuMyDB9IJi6iVdV4zLJyD6Wk+UmRz+vRpnnj0YR6YMLnA5TsypFHT7bprhkXw04H0gPNZ9uEH9I0bFHC6tLQ0atXKVe/peevdFSckJITKhaz33Bw8kEbN8GwZaoRHcDCAOjhx4jgDe3bmuj7dWbU08CGhQBHbwwaKftKxPfC9qu4RkTVAX4ypeiBNVfsCiEhlD2kfNhutlAY+EpGmqrrdhP2iqi1E5A4cU/e3+pHjKmAHjo3EK1X1uIg0AN4FWopIHxwLE21U9U9jBsgXNwArVHWykc9Tl+pmI395YLOIfKCqh4AKwAZVfVhEngb+ATwOzABeVNW3RCRrDwMR6Qk0AFrj7HmzyJg3+sH4jzAWMXJgHhK3AVx8caF1eVAy+41X6NK9J2HhxbZ/fBY/HzzAN7u+pEMhevolmY+Td1IjLJwf937PiEH9uSwyktp165/RMkuH2DUSRf2uMwRwDWjNJXtYZAdOj/spEblCVY94SHudiKQAW4DGgPvY9gLzmwzU9VH+OyKyFeiAo9jLAK+IyA5gnluePYA3VPVPcHbk8nNdm4GbRGQC0ERVj3mIM1pEtgEbcHq/DYz/ScDVBXGXvwPOAwTgbbd8ehq3BUgBGrrltc+TsjbX8LKqtlTVltWr529YPTw8nNTU7K19U1NTCQ8P9xonIyODo0ePUq1atXzl74kaNcNIT92fdX4gPY0aYeE+UmSzdfMm3n79ZTrHRDFlwsMsfH8uT08K3AZvjZrhHHC77gPpqVxUMyygPJYvWkiPPv0oUybwz6XDw8PZvz9XvYflrXdXnIyMDI4Ust5zU6NmOAfSsmU4mJZKjQDqwNVmF9etR+v2Hflyx3Y/KQqHiCClvLt8pO9t3lB3i8gDHsKnmzforSLyjYgcdgvLdAtbVMSXFhBFprBNz/Ma4BER2Qs8D/QWkUqq+g3QAkdxPy4ij+RKWw9HwXZX1abAEsB9kO6E+c3EvBWIyApTga+6xbtRVZup6tWq+iMwBjgIROMY0fT67iQiF7s1yu3uYcYeXCecjctnicjwXGm74DwE2pkx9C1u8p/S7EHfLPldWXsSBXjSXEczVb1UVV8zYX94k78gxMTEsHv3bvbu3cvJkyeZP38+ffv2zREnNjaW2bNnA7Bw4UI6d+6MM/JVMJo2j2Hfd9/x4z6nzCULP6B7r9h8pZ320mt8tuVLPkneyQMTJhN33WDuH/+Y/4S5iGrWgn3f72H/D44Myz5cQNd8yuBiSfx8YgswHAJOve/Zk7PeY3PXe99Y3nmn6Oo9N02at2Dvd3uy2yF+Ad3yWQdHDv/GyRPOX/LXQ4dI2bSBSy/Lz0hl4ShdupRX5wujm/4D9MHptA3JvdhBVce4/nM4umuBW/Bfbv9H72ZvzgJF+Y7RHdiuqr1cHiLyJhAnIquAX1V1tnly5R7SCMVRRkfMGG8fYI2vwtzL8UFlYL+qnhbHPL1rdmIlzoPlHdeQiFHwzTxlIiJ1TD6viMh5OA+ft3KV85vJqyHQNh+yrQMGA7OBG938VwCTjGy/i0gEcEbsPIWEhDBt2jT69+9PZmYmw4cPJzIykokTJ9KiRQv69evHyJEjueWWW4iKiqJq1aq89dZb/jP2U+ajU57hpuvjyMzM5NobhnFZw0Y8N+Vxopq1oEfvWLZvSeZfI2/k6JHDrE5cxoynn2D5Z5uK6KodGR5+Yir/GDKQ05mZxA0eSoPLG/H805NpHN2cbr1i2bE1mdE3D+Xo4cN8vHIZLzzzJIs/2QhA6o/7OJCWSqt2HQtc/rPPTuPqAU69DzP1PmmSU+99+/ZjxIiR3HrrLTRt4tT7rDez6z2yUUOOHTvGyZMnSVi8mA8XLaZRo0YBy/DIk1O5dfBAMjMzuWbIUBo0bMSMpyYTFd2c7qYd7rrJ1EHiMp5/5kmWfLqRPd9+w6P33o2UKoWePs0/Ro3h0svPsMIW8tWT9kJrYLeqfgcgInNxhkS9LXgYgrNfdtAhRWWsVETeADaazcBdfv2BfwHP4azaOI2jfP6lqklmnPteczwLZwz8R+AIsEhVZ5neektV/cVMUE5V1S4eys/Ky82vAc4YugLLgTtVtaIJewAYjjNksVRVH/KQ5++qWtEo+/uM7L8Dw1X1e5dswDEgHme442ugCjBBVde48jD5DQL6qepI81YxB6gIfAjc7Rbv/8h+qP0ODMXpnSeoapTvloAWLVqotelobTpC0Nh0TC6svcXyNRpo/RvzrhV38eX0/vuAX9y8sozwmv9db1W91ZwPw5m/uit3PqZztgGopaqZxi8D2ApkAFNUNe/HAWeJIlPYluDBKmyrsF2cMwq7ZgO9ZOh0r+FfPHuV1zICVNj/xlHWo9z8IlQ1VUTqA6txhm73FOZ6CkoQLLC0WCwW34hA6ZBSXp0fAjGkO5jsxQAAqGqq+f0OZ6i2eUGuoSiwCttisZQIpJR354fNQAPz7UNZHKWcZ7WHmX+qCqx386tq5q0Q56vmDngf+z7j2IWNFosl6BERv6tBvKGqGSJyF86EfmngdVX9QkQmAkmq6lLeg4G5mnOcuBHwPxE5jdPBnVLEX2cHhFXYFoulRFDK/9CHV1R1KY5ldHe/R3KdT/CQ7nOgSYELLmKswrZYLMGPQKkiXIdeUrEK22KxBD2OEV475WYVtsViCX4EShX8w5lzBquwLRZL0CNAqdJWYVuFbbFYgh/bwwaswrZYLCUCoVQBl/WdS1iFfQ4iGScpc2hvscoQXq1usZZ/6nSxFg9A2d8Kb/+ysERUrVPcIhQJri8d/+5YhW2xWIIeEShth0SswrZYLMGPIJS1PWyrsC0WSwngHOthi8gp4DiejZhUUFWPloWtwrZYLEFPKYHzzq0e9g5VbeEpwJhK9Mg5VQMWi+XcRHB62N6c3/T+bTqOFJGf3cwE3uoWNkJEvjVuRBFdki9T717DbA/bYrEEPSJC2RCPowT5Seuy6XglsB/YLCKLPOy6915uowYicgGOubCWOMMXySbtbwUSJptTInKhqv6cq7zq+DAJaHvYFosl6BGBsiGlvDo/ZNl0VNWTgMumY37oBaxU1V+Nkl4J9C7whWTzFjDbWLEBsoyRvwu87S2R7WFbLJagR/A79FFdRJLczrNsOgIROLZiXewH2njI4xoR6QR8A4wxhrk9pY0IVP7cqOp0EakEbDRvAODYvH0B8GoLzSpsi8US9IhAWd9fOv5SSLuRi4F3VfWEiPwTeBPoVoj8/KKqE4GJInKhOf/ZTxI7JGKxWEoGhZh09GvTUVUPqeoJc/oqEJPftAVBHP4hIvOB/wJXi/g3dmYVtsViCXpKiXBeSCmvzg9+bTqKSJjbaX9glzleAfQ0th2rAj2NX2F5HIgF/odj0eZCYIq/RFZh/81QVe5+ZDINO/ai+ZVXk7LDs3m68U89R73W3ahyeYzH8AVLEylzcSRJ23YWSp7ExESio6OJiopi6tSpecJPnDjBsGHDiIqKolOnTuzbVzT7c6xamUhM82iaNY1i2rOeyx05fBjNmkbRrUt2uclJm+nYrg0d27WhQ9s2LF70YYHKV1XGjJ9Mow69aNFjAFt2fOEx3vinnqN+q65UvSxnO7z89lyad+9Py55xdIm7kS+/2R2wDCsTE2neLJqmTaJ41kvdDx8+jKZNoujSObsODh06RJ8+valx0YWMHTsm4HILSkF72KqaAbhsOu4C3nfZdBSR/ibaaBH5QkS2AaOBkSbtr8AkHKW/GZho/ArLVcC1qroS+EtVnwA6+0tkFXYRIyJ7RWSHWcu5Q0QGuIX9XsA8Z4nIoKKQb/nHn7L7+33s+mw5Lz71GHc99JjHeH2v7Mrni9/zGHbs9z94/rW3ad28aaFkyczMZMyYMcTHx5OSksK8efPYtWtXjjizZs2iSpUq7Ny5k1GjRjFu3LhClekq956xY5i/IJ5NSSl8MG8eX+Uq9603nXK3bt/JHXeO4tHxTrmNIhuz5rN1rF2/kQ/i47l79GgyMjIClmH5aqcdvlxr2uHBiR7j9evRhXUJedth8NX92PLRIpISF3LPv27h/seeCqj8zMxMxo4dw4KF8SQle677N00dbN+xkzvvGsV4UwflypVj/PhHmPzEEwGVWRhKFW6VCKq6VFUvU9VLVHWy8XvEZYBXVR9U1caqGq2qXVX1K7e0r6vqpca9UUSXJOZB4pw4Pf/z/CWyCvvM0FVVmwGDgJnFLYw7ixJXM/SaAYgIbVtEc+ToMdIP5p3raNsimrAaF3rM49GpM7nvjlspd57f+8snSUlJXHLJJdSrV4+yZcsyaNAgEhIScsRZsmQJQ4cOBSAuLo41a9aQ06h14CQnJVG/fna5AwcNYsmSnOUuXbKEG250yr06Lo5PTLnnn38+ISHOXP3x4yeQAtoZXJy4mhsHOe3QJqYZh48eJf3gT3nitYlpRliNi/L4h1aqmHX8x59/BSxHUq46GDRoEEty133CEm68MW/dV6hQgfbt21PuvHIBlVkYnHXYBVfYQchPItLAHIcC63DWivukRF5pCSIUyLPAXkQqishHIpLioRc+XES2i8g2EcmzHlNEJpked4G+Ikg78BO1wmtmnUeE1SD1wMF8p0/Z8SX70w4Q293v25t/WdLSiIjIXiEVERFBWlqa1zghISGEhoZy6NChwpdbK2e56bnKTXeLExISQmjlUH415SZt3kSbljG0b9OK6TNmZCnwgGQ4cJCL3dqhVlhN0g7kVdi+eHHWOzTs0JOHJk9l2sSHAis/LY1aueogLT1v3ddyq4PKRVD3haEwXzoGIVeTPXn5T2CQqr7iL5Fd1ndm+FicLk994DoP4ceBOFU9ar5s2iAii4BIYBzQXlV/MV9ZZSEizwCVgJs0VzdTRG4DbgOoHeE+f1J0nD59mvsmPsVr087eq3Aw0rJVazYmJfP1V19x+z//wZU9e1Gu3Nnrbbr418gb+dfIG3l3YQJPznyJ15/zO2dVYnENiZxDtABcb0angLoiUldVP/GV6JyqgSCiq6pG4cz+viAiFXOFC/CEiGwHVuEsxK+Bs+5znqr+AlkTHi7GA5VV9fbcytrEfVlVW6pqy+oX5NDz/HfWHGJ6xRHTK46aF13I/rQDWWGp6QeJqFkjXxd17Pc/+OLrb+lx3QgubdeDjVu2MfDmOws88RgeHk5qavYKqdTUVMLDw73GycjI4OjRo1SrVq1A5eXIc3/OcsNylRvmFicjI4OjR45yQa5yL2/YkAoVKvLll54nDHPz4qx3aNkzjpY9nXb40a0d9qcfILxm3qGP/HD9gFgWrfgooDTh4eHsz1UH4WF5636/Wx0cKYK6LzByzvWw73FzDwELAc8TSm5YhX0GUdU9wEGcnrM7N+Is44kxY90HAX9dtM1ATO5ed364Y+QNJK9YSPKKhQzo1Z3ZH3yIqrIhZRuhlSp5HavOTeXQShzY/jm7169i9/pVtGkezYLX/0PL6KhARQIgJiaG3UH2ni4AACAASURBVLt3s3fvXk6ePMn8+fPp27dvjjixsbHMnj0bgIULF9K5c+cCjxu7aBETw5492eUumD+f2Ni85c55xyk3fuFCOply9+7dmzXJ+MMPP/DtN19Tp3b+rLr8a+SNJCUuJClxIf17d+ed+U47bEzeSuVKlTyOVXvj2+/2Zh0v/egTLq0XmGWZmFx1MH/+fGJz133fWN55p2jrvqAIQplSpby6koaq9ndzvYCGwGF/6eyQyBlERC4C6gG516JVBn5S1VMi0hVw/dtWAwtFZJqqHhKRC9x62ctxliUtEZGeqnqsIDL16daJZas/pWHH3pQvX45Xn52cFRbTK47kFQsBeGDyVObGL+HPv45Tt1VXbh5yDY+MvctbtgUiJCSEadOm0b9/fzIzMxk+fDiRkZFMnDiRFi1a0K9fP0aOHMktt9xCVFQUVatW5a233iqScqc+O42BVzvlDh02nEaRkUyeNJHmLVoQ27cfw0aM5LZbb6FZU6fc12c55W5Y/znTn32WMmVCkFKleHb6c1SrXj1gGfp068zy1Z/SqGMvypcrx6tuw0wte8aRlGja4fFneC9+CX/+9Rf1WnbhpiGDeOSeu3hx1hw+Wvs5ZULKULVyKK9NfzLgOnj22WlcPcCpg2Gm7idNcuq+b99+jBgxkltvvYWmTZw6mPVmdt1HNmrIsWPHOHnyJAmLF/PhosU0atQo4HrILwKc40bTDwN+ez5S2Bl3S05EZC9wDMgEygDPqurrJux3Va1oxq0XAxWBJKAt0EdV95rtG+8z6beo6kgRmQUkqOp8EbkZGAbEqupfnmSIaRqlG5fOO6PX6Y9T1qYj5Q8Xv03Hk0Fg07FihfOTC/nZOPUim+qEt5d4DR/ZsnahyzibiMhqnOcQOCMdlwBvq+qDvtLZHnYRo6p1fYRVNL+/AO28xHkTZx8Dd7+RbsevA68XgagWS4lBgDIlc6zaG/e6HZ+H8wXlt/4SWYVtsViCHxFKnUMKW1VzW5VZLyIbgTm+0lmFbbFYgp5zrYctIu4fMpTCWVHmd22oVdgWiyXoEZwNoM4h7nE7zgD24nxM4xOrsC0WS9AjAmV874ftJ730BmYApYFXVXVKrvCxwK04yvNn4GZV3WfCMoEdJuoPqtqfQlLQPEreAkaLxfK3pLR4d75ws+nYB+ebiCEikvvbiC1AS1VtCswHnnYL+0tVmxlXaGVtZKotIvEi8pMx/rtIRPwu6bEK22KxBD0iQpnSpbw6P/i16aiqH6vqn+Z0A46hgjPJGzgPhjCgJvA++Vj9ZRW2xWIJepwxbO/OD4HaZbwFWOZ2Xk5EkkRkg4j4HWfOJ9VUdbaqZho3G/D73b8dw7ZYLEGPs0rEZ//SlxHe/JcjMhRoSU5jAnVUNdVYOF8tIjvMthOF4WcRGQnMNufDcMbOfWIVtsViCXqcSUefXWlfRnjzZZdRRHoADwOd3ew7oqqp5vc7EVkDNAcKq7BvwpkEdY2VrzN+PrEK22KxlACE0gVf1pdl0xFHUQ8GbsiRu0hzHPuKvVX1Jzf/qsCfxpp6daADOSckC4Sq7geuCTSdHcO2WCxBj6uH7c35Ip82HZ/B2dtnnjHv5zLS2whIMrYePwamqKpnQ6j5ug7pKCLlReRSEVksIr8Yt9gMufjE9rDPQYTTSMbx4pUh82Sxll+mdNliLR9AMk74j3SGKa2B25sMRgr74YyqLgWW5vJ7xO24h5d0n+N8hVhUvKCqzURkNjCN7NUqg4B38LLHkAvbw7ZYLCWC0iJeXQnCJWw5VX1fVU8b9z5wvr/EVmFbLJagR3CGRby5EsRvInI9sEJEbhORUBGpJCL/BBb5S2yHRCwWS/AjUIgv04OJm4FncZYOVgamuoUJjilAr1iFbbFYgh4p3CqRoEFVvwPiCpreKmyLxVIiOId2V829vWoW/qymW4VtsViCHsem4zmksXNur1oBZ7+TLUAnX4mswrZYLCWCc0lf5971T0RqA8/7S2cVtsViCXpEzrkedg5U9QcRuVxESqtqprd4VmFbLJYSwTk2hl0W6A0ccY1bq2pDf+mswrZYLEGPM4Zd3FIUKfHAKaCqiKwGngPeUFWfK0iswrZYLMGPQOlzqYvtbNnaWETOAzaq6gQR8Ws04dxYim7JN6rK3ROe5vIu/Wne+zpSdu7yGG/cMy9Qt30fKjfukMN/7KSpxMQOJiZ2MI26Xk21pj4ntb3KMPa++2kc3ZxWbduzZetWj/FStmylZZv2NI5uztj77kdVAXhs0uO0atueNu070m9AHGnp6QHL4E5iYiLR0dFERUUxderUPOEnTpxg2LBhREVF0alTJ/bt21eo8sDVDk/RsPNVNO99rdd2GP/M89Rr14sqkXm3mJiXsIKmPQYSfeVAho1+oEAyjL33Pho3jaZVm3Y+2mELLVu3pXHTaMbee19WOzz48Diim8fQqk07rht8A4cPHw5YhkAQH64E8rWINHRt4yoi5ciH1XSrsIsQEaksIm+JyG4R2WOOK5uwuiJyg1vckSLywtmWcdmadXy79we++vhDXnxyHHeOe9JjvH49OrE+/q08/tPG30vy0rkkL53LnSMGE9e7W8AyrEhcyZ4937FzawovzJzB6DH3eIw3esxY/vP8DHZuTWHPnu9IXLkKgDH/N5rNGz5n4+dr6dO7F09OKfhul5mZmYwZM4b4+HhSUlKYN28eu3blVJ6zZs2iSpUq7Ny5k1GjRjFu3LgCl+di+Zq17P7+B3atWcSLT4znrocne4zXt3tnPv9wdh7/b7/fx1P/fZ1PPpjFtpULePaR+wOWYUViInv27GHntq288PwMRt89xmO80XeP4T8vzGTntq3s2bOHxJUrAejerSvJmzeyeeN6GjS4lGeenRawDPlFcHrY3pzf9CK9ReRr89/M83QTkfNE5D0TvlFE6rqFPWj8vxaRXkV0SVWALSLyMVAHZwvYGf4SWYVdtLwGfKeql6rqJcD3wKsmrC659uAtDMawaMAsXrmGYQP7ISK0bd6UI0ePkf5TXkMXbZs3JeyiC33mNXfxcq6/qnfAMiQsWcoNQwYjIrRp3Yojh4+QfuBAjjjpBw5w7Ogx2rRuhYhww5DBLE5YAkBoaGhWvD//+BMpxOqBpKQkLrnkEurVq0fZsmUZNGgQCQkJOeIsWbKEoUOHAhAXF8eaNWuyepkFZVHiGoa62qFFU44c89IOLTy3w2tzF/Cv4ddTtbJTFxdVvyBgGRISlnLDkCGmHVpz5Iivdmht2mEIixc77dCje3dCQpxR1datWpGamscmQJFRGBNh+TTCewvwm6peCkwHnjJpI3H2z26MM0n434L+93IxAegFPGp+O6nqqz5TYBV2kSEilwIxwCQ374lASxG5BJgCXGH22nV1ZcJFZLmIfCsiT7vl1VNE1otIiojME5GKxn+viDwlIinAtQWRM/XgT9QKq5F1HhF2EakH/FomysO+/Wns/TGNbu1bBZw2LS2dWhHZJvUiIsJJS0vPEyciIjw7TnjOOI8+NolLGzZm7vvzGP/wQwHLkF1OGhE5ZIkgLS3Na5yQkBBCQ0M5dOhQgcsESDv4E7XCa2aXW7MGqQd+8pEiJ99+t49vv99Hp2tG0OHqYaxYsy5wGdLTqFUre9g0Itz3tYOpn/SccQDeevttevW8MmAZ8o/3nfrysdzPrxFec/6mOZ4PdBenJzAAmKuqJ1T1e2C3ya9QqOqnbm6Tqv6Wn3RWYRcdkcBW9zWU5ngrztP5AeAzVW2mqtNNlGbA9Tj77V4vIhcbqxbjgB6q2gJIAsa6lXNIVVuo6lz3ws3OX0kikvTzoXy1faF4LyGRa/p0p3TpouhsBM5jj45n91dfMPi6a3np5YBN95V4MjIz2f39D3w091VmPz+F2x+cyOEjR4tFlqeefobSpUMYfP31Z64QHzv1GX1d3XX/G3ebW+r8GOHNimMMHhzBMYobqAFf35chclREjhl3WkQyjDstIsf8pbcKu3j5SFWPqOpx4Eucsay2OMp/nYhsBUYYfxfvecpIVV9W1Zaq2vLCalVzhP33rfeyJgrDLryQ/ekHs8JS038ioqbvoQ9PvL94Bdf3z/9wyEsvv0Kb9h1p074jNWvWYL/b63Nqahrh4WE54oeHh5Gamt2TS03LGwfg+uuvJf7DxQHLn11OeI5X+dTUVMLDw73GycjI4OjRo1Sr5tfAdR7++9ZcYvpcR0yf66h5UXX2p2UPP6QeOEhEzYvynVdEzRr069GZMmXKUO/iCBrUq8O3e3/wm+6l/71Mm3YdaNOuAzVr1mT//v3ZMqT5vnYw9ROWHeft2e+wdPlyZr3+aqGGpvwhqsjpTK8OY9PRzQXlU1xVQ1W1Eo45ssFAWeNuAF7xl94q7KLjS6CZiGTVqTluZsI84W6SJBNnmaUAK01PvJmqRqrqLW7x/ghUsDuGX581Udi/ZxfeXpCAqrJhy3ZCK1X0O1adm6/2fM9vR47SrkXTfKe5/bZ/sPHztWz8fC1X9evLnHfnoqps3LSZ0MqhhNWsmSN+WM2aVAqtxMZNm1FV5rw7l359YwHYvTvb/mnCkqVcdlmDgOR3JyYmht27d7N3715OnjzJ/Pnz6du3b444sbGxzJ7tTPwtXLiQzp07F0g53TF8MMnL3id52fsM6NmV2a52SAm8HQb07MonGxwj4b/8+hvffr+P+rX9rgrj9n/exsb169i4fp1ph3dNO2wiNNRXO2wy7fAu/fo57ZC4ciXTpj/H/Pfe4/zz/e69X0gUOZ3h1fkhP0Z4s+KISAjO1qeH8pm2IPTKZcBgLtDTXyKrsIsIVd2Ns3mL+xKCcUCKCTsGVMpHVhuADmZMHBGpICKXFZWcsV07Uv/iCC7vMoDbH3ycFyY9mBUWEzs46/jfTz5HnXa9+fOv49Rp15vHnnspK+y9xSu47qpeBe5R9e7Vk3p169I4ujl3jvo/Zkx7NiusTfuOWcczpj3LHXeNpnF0c+rVq5c1Rjru0QnEtG5Hq7bt+eij1Ux9ekqB5ABnTHratGn079+f5s2bM3DgQCIjI5k4cWLW5OPIkSP59ddfiYqKYubMmUyaNMlPrv7p0/UK6tWOoGHnq7j9wYk8Pyl7HD6mz3VZxw88OZ26bXvy51/Hqdu2JxOnvwhAz87tqVa1Ck17DKTHkH8w5cExVKtaJSAZevfq5bRD02juvGs0M6Znr/Jo0y57OeeM6dO4485RNG4abdrB0Stj7rmXY7//Tr/+A2jTrgOjRt9doLrIN6renW+yjPCaLwwHk9dYwCKct1lwzHWtVmdmeREw2KwiqQc0ADYVwdUcFJFxRqZ6IjIe8Ls+VQo7223JxlhYfp5su2zrgbtU9bCIlMExAloNmAX8BrRU1btM2gRgqqquEZFuOLPU55l8xqnqIhHZa9L84kuOlk0jdeOid4r24gLkVHW/9kTPKBoENh3L/PRNcYtARjG3A0D5iqHJqtqyMHnENG+m69es8hp+XpULfZYhIrE4XxOWBl5X1ckiMhFIMv+tcsDbQHPgV2Cw2bsaEXkYx/BABnC3qi4rzLWYPKsBjwAdcd6q1wKPqarP2WyrsM9BrMK2CtvFOaWwP070Gn5e1RqFLqMkYD9Nt1gswY8q+B+rLjGY/UO8jimqaldP/lZhWyyWEoDC6dPFLURRcq/b8XnAQJyFB+/7SmQVtsViKRHkYzVIiUFVU3J5rReRjarqc1MYq7AtFkvwowqnve7rX+Iwk44uSuN8JV3ZXzqrsC0WS4lA9JwaEtmMM4atOKtP9uHsZ+ITq7AtFksJ4NyadFTVAi3fsQrbYrEEP6pwDvWwRaSzJ39V/UREYlQ12VO4VdgWiyXoEUAyz50eNuBpE3gBPgGGAVZhWyyWkkq+PkEvMahqfx9hXr/xtwrbYrEEP3puLesTkQo4ew25NhFfBUxSVZ+bu1mFfQ6iIeU4dVGR7RdVINL/LN7xxpOZp4q1fIDaxdwGAGm/nytK7twaw8axgPMrzn74AHfi7EN0s69EVmFbLJbgRxXO0ENYRC7A2We+LrAXuC63BRgRaQa8CITifJE4WVXfM2GzgM44Rg8ARqqqZ4vG2cSoahO387Eist2frHZ7VYvFUgJQNOOUV1dIHsAxJtIA+Mic5+ZPYLiqumw7Pici7vvZ3ue2h70/ZQ3O2uvc+H2FsArbYrEEP4rzpaM3Vzjc7Tm+CVydp3jVb1T1W3OcBvwEBG6qKZvH3BW+iFQGHvOXyCpsi8US9KgqeuqUV4dvm47+qKGqLuMBB4AaviKLSGscs1573Lwni8h2EZkuIud5Sep+PfGqetjt/IiqLvSXzo5hWyyWEoDfvUR+8WPAYBVQ00PQwzlKUVUR8bp+UETCcAwdjFDNmgV9EEfRlwVeBv4NTPQlbEGxCttisQQ/qoUaq1bVHt7CROSgiISparpRyD95iRcKLAEeVtUNbnm7eucnROQNcm6dWqTYIRGLxVICUPR0pldXSNztOY4APswdwdiCXAi8parzc4WFmV/BGf/eWViBvGF72BaLJfhRhcKvBvHGFOB9EbkFZ9e86wBEpCVwu6reavw6AdVEZKRJ51q+946IXIjzaflW4HZ/BVqLMxaL5dxFFT118gxlrYeA7h78k4BbzfFsYLaX9N0KUKxr2ESBOcCNbufvektkFbbFYgl+zjEDBu4WZ0Tkr1znx72lswrbYrGUAAo36Rjk7BWR+3DG0jsBx7xFtJOOFosl+DmzH84UN6OAFsAHQD987CdiFfbfDFVl7L330rhJU1q1bsOWLZ6/ok3ZsoWWrVrTuElTxt57L2q2tvxgwQJatGzJ+RUrkZyS245o/vjko5V0b9uCrq2ieXHGtDzhmz5fx1XdrqBBzaosXRSf5Z/64w9c1e0K+nbpQK+OrXln1msFKh/gs9Wr6N0hhp5tm/Hy83ll2Lx+HQOvvILGERewfHF8jrC0/T9y8/VXE3tFK/pe0Zr9P+wrsBwuEhMTiY6OJioqiqlTp+YJP3HiBMOGDSMqKopOnTqxb1/hy/xk9UqubNeCbq2jeWmmh3ZYv47+3a/g8rCqLMtVBwDHjh2lQ3RDJjzgaWvnokX1jH6aftYRkUtFZJqIPIrTo74JiFXVAa4vKj1hFXYRICL/JyLPuZ3/zyzUd52PEpGZIlJXRDwu+RGRiSLSwxzfLSLnnwlZV6xIZM/uPezcvo0XXnie0Xd73np39P/dzX/+8wI7t29jz+49JCauBKBxZCRz58yhY8cOBSo/MzOTRx+4hzfmfsCKdZtZvHA+3379VY444bVq8fTzL9L/mmtz+F9Yoybzl61iyZp1LFi+mpdmTufggXQCJTMzk4kP3sMrc+aT8Okmliz8gN25ZAiLqMWTM16kX9y1edL/e9Tt3HLHaJZ+tpn3l6+mWvXCfKHsyDNmzBji4+NJSUlh3rx57Nq1K0ecWbNmUaVKFXbu3MmoUaMYN25cocuc8O97eO3dD1i+djMJCzy0Q0Qtnp75IlcNzFsHAM9NeZzW7doXSo78o+daD/sD4EegAvACzj4iHic13bEKu2hYB7jfudFAZREpbc7bA5/7ykBVH1FVl5K/GzgjCjthSQI33DAEEaFN69YcOXKE9PQDOeKkpx/g2LGjtGndGhHhhhuGsDhhMQANGzbksssKvm3otpQk6tStT+269Shbtiz9rr6GlcuW5IhTq3YdGjWOopTkvD3Lli3Leec5X/2ePHmC06cLtt3m9i3J1K5Xn4vrODLEXj2Qj1bkleHyyCikVE4Zdn/9FZmZGXTo7CwMqFChIuXPL1xTJSUlcckll1CvniPPoEGDSEhIyBFnyZIlDB06FIC4uDjWrFmT9dZTELalJFGnXnY79I27hlXL89ZBw8ZRlCqVV03s3LaFX37+iY5d8iyuODP4/zS9pHFaVaer6v1AM1U9ST7+81ZhFw1bgctEpLzZxOUv4+faPrE9jlIHKC0ir4jIFyKSKCLlwdmiUUQGichoIBz4WEQ+NmE9RWS9iKSIyDwRqVhQQdPS0qlVq1bWeUR4OGnpaTnjpKcRER6RHScigrS0wHuynjiQnk5YRHb5YeHhHMxVvi/SUvfTp3M7OjSL5J+j7qZGzbCAZTiYnkaY2/XVDIvgYHr+rm/vd7upFFqZUTffSFyPjjz92DgyMwvXw0tLSyMiInd9p3mNExISQmhoKIcOHSpwmQcP5GyHmmH5b4fTp0/zxKMP88CEyQUuP2BU0YyTXl0JZLmI3GQ6dZkicml+ElmFXQSoagawBWgFtAU2AhuA9iISAYiq/miiNwD+Y7ZpPAxckyuvmUAa0FVVu4pIdRzLFD1UtQWQBIzNLYOI3Oba+ObnX345I9cZDIRH1GLZJ+v5eNNWFrw3h59/8vgV8RkjIyOD5I3ruf/Rx5m3fA0//rCXhe+9c1ZlKG5mv/EKXbr3zPHQOxvo6dNeXQnkTuBVnM5dA5y113f5S2SX9RUdn+P0pMsD64FvgYeAn8k5HPK92365yTibpvuiLRAJrHO+fKWsyT8HqvoyzsYzxLRokeNd+aX//Y833pgFQExMDPv3788KS01LIzwsPEde4WHhpKalZsdJTSU8PPCerCdqhoWRnppdfnpaGjVylZ8fatQM47KGkWze8Dmx/fPshuk7bVg46W7XdyA9lRph+bu+muERNGzchIvr1AOgR+9+bEveDDcEJEIOwsPDSU3NXd/hHuPUqlWLjIwMjh49SrVq1QpcZo2aOdvhQHr+22Hr5k1s3ried2a9yp9//M7Jk6c4v0JF7h/vd3fQAqOnldMnzxXrOaCqoQVJZ3vYRYdrHLsdjkLdhaNoc49fn3A7zsT/Q1OAlW6bo0eq6i2BCHb7P//Jxg3r2bhhPVdd1Y85c95FVdm4aROhoaGEheXcxCwsrCaVKoWycdMmVJU5c96lX99+gRTplabNY9j7/Xf8uG8vJ0+eJCH+A3r0js1X2vS0VI7/9RcARw7/RtLG9dS/tEHAMjRp1oJ93+1hv5FhafwCuvXMnwxNmrXg2NEj/GreYjas/ZRLLmsYsAzuxMTEsHv3bvbudeSZP38+ffv2zREnNjaW2bOdOamFCxfSuXNnzAO8QDRtHsO+77LbYcnCD+jeK391MO2l1/hsy5d8kryTByZMJu66wWdUWTvoOdXDFpHOnpy/dFZhFx3rcXrDF6rqT+rMCP2Mszn6Op8p83IMqGSONwAdXGNcIlJBRAo869e7Vy/q1atL4yZNufPOu5jx3PSssDZt22Udz3huOnfccSeNmzSlXv169OrVE4APFy3ikgaXsXHjJgYOvIar+g8IqPyQkBAmPPkMI66Lo2eHlvTtH8dlDRsxfcrjrFq+FIBtW5Jp37QhSxfHM+7e/6NXx9YA7P7ma+J6dyO2S3sGD4jlH3eOpmFk44DrICQkhPFPTOWWIQPpe0Ur+vS/mgYNGzHzqcmsXuHIsGNLMp2bN2LF4ngevf9u+nVqA0Dp0qW5/9FJjLy2P1d1aYeqcu3QEb6Ky5c806ZNo3///jRv3pyBAwcSGRnJxIkTsyYfR44cya+//kpUVBQzZ85k0qRJhS7z0SnPcNP1cfTq0JLYAU47POfWDtu3JNMhuiHLFscz/t7/o/cVrQtVZqFQOH0yw6srgdzj5h4BEgC/jSqFmWm25EREvgC2qOpQcz4BZ8+AKqqaISJ1gQRVjTLh9wIVVXWCsQuXoKrzRWQUznhWmhnH7gY8Bbg2Rh+nqou8yRHTooWuW/vZGbnG/FL8RniL/76uHVqmuEUICiO8l14Umuxrr+r80Kx2Tf3o/mFew6uPmlroMooTEakNPK+qPntAdgy7CDETie7nE4AJbud7gSi386luxyPdjp/HsaDsOl+NM6Fpsfw9UeX0qTPz8MmPEV4TLxPYYU5/UNX+xr8eMBeohjMvNcws08s3qvqDiFwuIqVV1euyIzskYrFYgh41CtubKyT5McIL8JfbXFJ/N/+ngOmqeinwGxDQHJMLVW3oS1mDVdgWi6WEcDrztFdXSPwa4fWGMVrQDXAZNchXehE5KiLHzK/LHTNha72ls0MiFosl+PG/rK+6iCS5nb9slrrmh/wa4S1nysgApqhqPM4wyGHzLQbAfsDvAnVfy/pUtaO3MKuwLRZL0KPgb/ne2TDCW0dVU0WkPrBaRHYAR3xL7lWeEKAp2avBwLF88yDOtxoed/eyCttisQQ/WrgPZ4rCCK+qpprf70RkDdAcZxOnKiISYnrZtYBUT+lzsQJnSNp97+uGOMv85uCYKsuDVdgWiyX4UQq82Vc+cBnhnYJ3I7xVgT9V9YTZLqID8LTpkX8MDMJZKeIxvQcuUNXmucpIUdWrfCWyk44WiyXoUVVOnzzl1RWSKcCVIvIt0MOcIyItReRVE6cRkCQi24CPccawvzRh/wbGishunDHt/GzUPsuD31v+EtketsViCX7O4DrsfBrh/Zzs3Tdzx/sOyNdnoCIyTFXfVtUZbn7lcXroccBzXhNje9gWi6VEoGjmaa+uBPG4iIQDiEgLEfkPsA3HRNgof4ltD9tisQQ9qkrmGephn2XG4KwwUSAUGA2M9vfBjAursM9FMk4Q8st3xSpCWPX6xVp+RhC8PJY5tLe4RSC8Wt3iFqFoUEpaT9ojqroAWCAiV+LYcXwKaCkir/uy5ejCKmyLxRL0qCqZJ0uk7UaPqOpKYKWxUHUj8I6InFDVK3ylswrbYrEEP+oYMTjXUNUjwH+B/4qIx0lNd6zCtlgsQY8q51QP2xOqusNfHKuwLRZL8KOKBsEe58WNVdgWiyX4+Rv0sPODVdgWiyXocZb1lfxVIoXFKmyLxVIiKIJ9r0s8VmFbLJagR0/D6ZNWYRf/1wUWi8XiFz1jFmdE5AIRWSki35rfqh7idBWRrW7uuIhcbcJmicj3bmHNCiWQD6zCtlgsQY8qZJ467dUVEr82HVX1Y5c9RxyTYH8CkcWKHQAAIABJREFUiW5R7nOz97i1sAJ5wypsi8US/OgZ3fwpUJuOg4BlqvpnYQsOFKuw/2aoKnc/OoWGnfrSvNc1pOz40mO88U/PpF7bK6nSqE0O/zfnfUhY887E9LmWmD7X8tq7HxRIhrH33kvjJk1p1boNW7Z47pCkbNlCy1atadykKWPvvRdVZx3uBwsW0KJlS86vWInklJSAywdYmZhI82bRRDeJ4tmpU/OEnzhxghHDhxHdJIqunTuxb59jAGT1Rx9xRYf2tGnViis6tOeTNWsKVL6qcvcjk2nYsRfNr7zaezs89Rz1WnejyuUxHsMXLE2kzMWRJG3bWSA5XCQmJhIdHU1UVBRTvdTHsGHDiIqKolOn7Po4Wzgfzpz26jA2Hd3cbQFkn1+bji4GA+/m8pssIttFZLqInBdA2QERsMIWkWYioiLS+0wI5KG8uiIS0N0oIiNF5AVzfLuIDC8iWSaISGqusawqgchtNkWfWRTyFITlH69l9/f72PVJAi8++Qh3jXvcY7y+PTrz+YdzPIZd268XycvmkbxsHrcMuSZgGVasSGTP7j3s3L6NF154ntF33+0x3uj/u5v//OcFdm7fxp7de0hMXAlA48hI5s6ZQ8eOHQIuGyAzM5N7xo5hwcJ4NienMH/ePL7atStHnLfenEWVKlXYtmMnd941ikfGjwOgWrVqvD9/Phs3b+Z/L7/CP269pUAyLP/4U6cdPlvOi089xl0PPeYxXt8ru/L54vc8hh37/Q+ef+1tWjdvWiAZXGRmZjJmzBji4+NJSUlh3rx57MpVH7NmOfWxc+dORo0axbhx4wpVZsCoknkq06vD2HR0czkM8IrIKhHZ6cENyFmMKo4JSY8YE2JNcEx8uXgQx7xXK+ACHIMGZ4SC9LCHAGvNb5FjjFMWGar6kqr6teQQANPdxqqaqerhAOVJUtXRRShPQCxa+TFDr7kKEaFti2iOHD1G+sGf88Rr2yKasBoXnhEZEpYkcMMNQxAR2rRuzZEjR0hPP5AjTnr6AY4dO0qb1q0REW64YQiLExYD0LBhQy677LICl5+UlET9+pdQr149ypYtyzWDBpGQkJAjzpKEJdxw41AAro6LY82aNagq0c2aERYWDkCjyEiOHz/OiRMn/r+9846Tqrz6+PdHEUTFrrBYQMSGiIrG8qpEQCEiIGA31jf2Hhux1xRjsMaosSZI8iYqiiUajRpNVNQYFWtEopGiURG70n7vH+cZGdZdWNiZnS3P9/O5n51b5j5n78yce+55TllsGcb/+WG+P3JYvT6Hcy65glOO+gHt29XPoHv22Wfp3n3+9di9putx7718//txPYYXXY8Gw+C5rnVZ5NvtAbY3rmG5C3gvKeKCQq6xp2NiT2Cc7W/a3Nie7uBr4Cbq2MxgSVgshS1JwB7AQURLnfZpe1dJr6XZ0n9JulXSAEl/TzOv30nHLSPpRklPS/pn4e6WLOLxkh4mnP61jX+QpDsk3Z/Oe3HRvoPT2E8T/dYK28+VdHJ6faikZyS9IOl2SR3S9pslXSHpCUmTJe2+mNelZ/qfnk+PRT2q7V8n/b9bSvqupHuKZLtF0uOS3pY0QtLFkiam/7FtOq5/ev/EdP2W+Bc67d3/skbV/ObRXTqtztT3Fvb9/Dbj/vQQmw0cyV5H/JB3pr276DdUl2HadNZYY435MlRVMW36tAWPmT6NLlVd5h/TpQvTpk2nFEyfNo0uayx47unVx582jTXSMW3atGH5jh358MMPFzjmrjvvpHfvTWm3BArzW59D59WZ+u57dX7/cxNfYcq0d9mlf9/FHvtbskybRpcu1a/1t69H4Zg2bdrQsYbrUU4KtURqW+pJoacjLLon4z5Uc4cUKXsR/u/6+acWwuJa2NsSLdjfBB4FBhftWxf4BfFosAGwL7AdcDJwejrmDOBh298BdgR+LmmZtG9zYHfbi/oGbgrsRTyW7CVpzXTBziMU9XbARrW89w7bW9ruDbwKFD/Pdk7v3ZXU060WTixyhzySth0BXJ5mkLcAphQOlrQ+0Vn5INvP1HC+7sSs81BgDPCI7V7Al8DgdFO8GdgrbW8DHFn9JJIOK/jvPpjx0ULErx+7DujLpL/fzz8fuJ3+22/DIT88o2xjNWZefeUVzj7rTC6/8soGH3vevHmccv7PuPisUxt87MpRu3VdghojdenpiKSuwJrAX6u9/1ZJE4GJwCpAzX7GErC4CnsfojMw6W+xW+Tftifange8TITJmPgnuqZjdgZGSXqeUPjtgbXSvgdtz6iDDH+x/bHtr4BXgLWBrYBHbb9vexZQs9MPNk7W7ESiBm3Pon132p6XGmsubNKh2CWyY9r2JHC6pNOAtW1/mbavStyt97P9Qi3n+1N6vJoItAbuT9sL12194tr+K22/Bdih+klsX1fw362y0oJhpFff8vtvJgk7rbYKU4qs4qnvvkeX1VdbyL+7ICuvuALt2i0FwP/uPYLnXnp1Ee8Irrn2Wrbaehu22nobOnXqxJQp39zTmDptGlXJzVCgqnMVU6dNnX/M1KlUVXWus5wLo3NVFVOnLHjuztXHr6piSjpmzpw5fPzJJ6y88srp+Cnss8/eXPvr61lnnbo3arj65rH0GTicPgOH02m1VRf8HKa/R5dOi5rrCj797HNefv0NBux5IOtuM4AJ/3yBEYccvcQTj1VVVUydWv1af/t6FI6ZM2cOnxRdj4bAhtnz5tW61O/c/tB2f9s9kutkRtr+rO0fFB33lu0uSccVv7+f7V7JxfJ925/VS6CFUGeFLak1MBI4W9JbwJXAIEnLpUOKHXnzitbnMT+jUsDIIoW3lu3CL/7zNM5WRRbs0BpEKR5nLouXrXkzcEyyVM8jbhg1nVdJlosKsizspLbHEhbyl8B9kvqlXR8D/yEs99r4Op1jHjDb8x2DxdetXhx14N7fTBIO27kfY26/G9s89dwLdFxuucXyVRf7We9+8FE2WLdbnd53xOGHM+GpJ5nw1JMMGbIrY8f+DttMePppOnbsSOfOnRY4vnPnTiy3XEcmPP00thk79nfsOnjXOsu5MPr06cObb07irbfeYtasWdx+220MHjx4gWN2GbwLY28dA8Cd48bRt29fJDFz5kx2HzGS884/n2222Waxxj3qoH35xwPj+McD4xg2sD9jbr9riT6H5Tsux7svPsGkJx9i0pMPsdVmvbnjxl+yRe+NF0ueAn369GHSpPnX47aarscuuzBmTFyPcUXXo6EwMNe1Ly2FxbGw+wMv2l7TdlfbaxOP+sMX4xwPAMcmXw+SNqt+gO0JRQp9fB3POwHoK2nl5Pfdo5bjlgOmp2P2W9RJbZ9RFCxfK5LWASbbvoKwqAvT9rOI63OApH3r+L9U53Wgq6R10/r+fPuRrM58r9/2dFtrDTbYYTBHjDqPKy+c79Lo8735l23Uj0fTdasBfPHlV3TdagDnX3o1AFfdPJbeA4az+aDdueqmsdxwyeI//Q0aOJBu3brSs9cmHH30MVx+2aXf7Ntq6/lK8PLLLuWoo46mZ69N6LZONwYO3BmAu8aPp3uP9Zgw4WlGjBjJkKHDWBzatGnDJb8YzW7DhrLF5psxYuQINtxoIy684HzuvTcm2w448CBmzJhB714bc9WVV3De+RcAcN211zB58pv87Cc/Ydutt2Lbrbfi/f8u3hwAwPf67UC3tdZkg+0GccSpZ3PlRWd9s6/PwPk/qVEXXULXLXeMz2HLHTl/9FWLPdaiaNOmDaNHj2bo0KFsttlmjBgxgo022ojzzz//m8nHgw6K67HxxhtzxRVXcMEFF5RcjoVhYNY817q0FFTXmV5JNwETbF9TtG0o4U89ErjH9sZp+81p/bbk97nH9saKdu6XEb7wVsSj/q6SDgK2sH1MDeMWv3+B49Lk3SW2H5V0MBFeMxN4Hphl+xhJ5wKf2b5E0pHAqcD7hJJfzvZBxfKm835me9kaZDkXODS9v8BuRFzm/sBsIo5zX6LBZkHuFYAHgQuAT4CT0//9jWzVx60md3/gEsLifgY4Ms1I10ifTXp6wj2/r213gzAn93Sk/Yy3Ki0CsxtBT8cOHTr8w/YW9TlHtzZL+5zlu9a6/+AZr9V7jKZAnRV2pumQFXZW2AWai8Lu2qa9z+rYtdb9P/jo9RahsHO1vkwm0+gJl0ilpag8WWFnMplGjw1zszcgK+xMJtP4KUw6tnSyws5kMo2eQlhfSycr7Ewm0+ixs4UNWWFnMpkmgIHZ2YedFXYmk2kKOE86khV2JpNpAswju0Qgd5zJZDJNgYXUEanvZKSkPSS9LGmepFqTbyQNkvS6pEmSRhVt7yZpQtr+f5KWqp9EtZMVdiaTafSUuZbIS8AI4LHaDkjF734JfI8o37yPpEIZ558RVTzXBT5iwbLNJSUr7Ewm0+iJsD7XutTr3Partl9fxGHfASbZnpxKOP8eGJYK2fUDbkvH1aWJ7xKTfdjNkOcmvvJB27U3qW+X1FWAD0ohTxMdP8tQuvHXrq8Q7zPrgav99ioLOaS9pGeL1q9ztb6O9aQL8E7R+hSiDv/KwEzbc4q2d6FMZIXdDLFd72aMkp6tZDGdSo+fZWgc4xewXa+m35IeAjrVsOuM1NexSZAVdiaTafbYHlDPU0wl2oMVWCNt+xBYQVKbZGUXtpeF7MPOZDKZRfMM0CNFhCxF1MAfnzpEPQIUGncvqolvvcgKO1MbpfT/NcXxIcvQGMYvO5KGS5oCbAPcK+mBtL1K0n0AyXo+huia9SrwB9svp1OcBvxQ0iTCp31D2WTNDQwymUymaZAt7Ewmk2kiZIWdWYBCg+RMJtP4yAo78w2SlCZRkLRSpeVpDEjqXGkZKk2+iTcessLOfEORsj4UuEZSRcM+y1mTYSFjquj1YcCJktpXSoZKUH1854muRkOOw84sgKS9gZ2BU4uytxpq7A62v0ivhwMbAxcUW/7lpuimNTSNf5ntrxpi7DRu8VPOSGAWMMv2Aw0lQ9H4ewA9gH8DE2xPbigZMjWTLewWTg3W3IbASGCFtL91A8nREzhF0iZpU3dgOjSMhVe4DpJaJ8v+HGAAUdmzwShSlicRYWTdgXMlDWlIOSQdB5wCtAW2Bi5Nn1GmgmSF3YKpZs11Scr5XOBC4HpJa9ue20BKexawHjBS0jrAssBnBTmLZS71wNUs+OVTcZ/tgP8AJ5d6vNpkKPyV1AnY2vaOwPLA+8B9kjo0kCytiaeL/WyfR3wfHgP2k7RUpV02LZmssFswRcr6ROAqIkniQODnwHjgVknr2J5bLhkKP37bbxAJCFVECcs1gX6SNgMGSuovadVyWNtF1+EI4AZJPwaGECU3t5R0aanHLEbS8kX/VxdCQbeW9Ctgc2DP9BkMkdSjDOPXpIDXBg4GsP0+MBGosj0r+7QrR1bYLZzkqx1KKKcNgS1tf0pYVY8D10pqU27LVtJ+RJbYT4HehKLqSqQAH5iW5UotQ5Es3wf2AU4FNgP6J3/6TsQN42dlGrcVUVv5mDTJeWNSzq8C/YGTbH8l6WDgdNJTRykp+gyGSRoAtANOADaQdHw6bEVgFUll+wwyiyZnOrYwJPUH1rZ9Y1o/BJgNLE34rofa/jq5Q96WtIrtspb3TBb+HsDhtidKWgs4E3gLGGf7VUmty2zpH0rUi9iMUNy7EP7rZdPfVWy/VeIxuwMzCHfQJEDAprbfTb78kcSN9M/ERPDeRenQpRi/+IZ5ADCKuOavAA8n2a4FXidcJHvafqlU42cWnxwl0vL4mvBPz7b9W2AyMBr4OPlMCxNe60g6vgGUdRWhHIfY/jBVPfuPpJ8DF6RjJtmeXU45iJvWQ8CLtvulcY8k6kFfaLuklm2yVPcnupjMA64gbhT/C1xk+0XgRUmPpLdcZfvfJRy/WFmvAKwDfBf4Cjga6AvcDmwBrArMsf3fUo2fWTKywm4hpEdvbP9N0g8IV8fntu+Q9BwwRdIwYpJrP+CAcoT11RCi15qoU7w6UaqyEJXxPhEl0baUylrS6rbfS693A9YCxhIV1jYFlkmTfoOAI4B9y+Q3/1TSRUAvYCBwI6G0/yZpWds/krQn8Ibtf5Zy7GrK+lQiCmRz4HHbf5E0FtgX+AFwq+2/lnL8zJKTXSItDEk/BNYlwvaGAgcQj7/7AjsCnwKjy/HoW01RdABm254t6ULi8fsO229J2h8YTkQpfFnC8dcgomDGETemU4E30+vzgC+Ja7I18STyo1Jfh+quHUXT1xOB1wiFvRRREe41YFtgkO3XSilD0dg7E6F7RwEHAYcC29t+XVI3wh3z22xZNx6ywm5BpB/h3cAw229K2h64F9jf9l1pYnEp21+XYexiZX0SYVl2Bg4nir4PJuKeHwWGAbvZfqXEMlQRSmi9tOxte6ak0wkf7dXpCaQN0CqF95UFSX2Bd2xPTp/L2USCSiEi5X+AV2z/p4Rj9gG62b5NUm/i5vWR7UPS/jMI5b2z7Zc1vyh/ppGQo0SaMTVEdnwCvAy8B2D7cUJBjJO0u4OSK+s0VkFZH0P4rA8HViNcEZ8S/uqzgScJq7JkylrSismynQY8n5auhM8Y2z8GXgTOkrSN7TmlVtaSekkak14fANwMXCXpPGAOYeGvRUSCLG/7/hIr61ZE9MffJK1h+wXiyWpFRVYpti9Kct0pqS1QtknezJKRfdjNmCIl2Q2Ymib1AG4iojIgHr2vAsoy+58syV1tn5I2rUS4YY4hIiNeJqz8PW3/qQzjtyVcC1tJmkFY9VcTrodN043qNts/lTSLSJYptQzrAB8RraTuBN4lLPoNiHjvY4ArgZ8APyRcM6UcX7bnAU9IWg34raTf2L4yKfLvSsL2ONtnSBrdAJO8mSUgu0SaIZJapR9oIcX4WKKN0VO2b1R01JhNKKf+wADb79R6wvrJ0pmIKb7G9qi0bQPgGmBH25b0EuEO2L1M7pgViQiQNZnvo12LmOzrA/zN9phSj5vGXhW4CLgPeILwU29me/20/zuEO2hlQmG/V2o3RMFvLul/CJ99H2JC8TZiwvUYYsJ1nO17apgYzjQSsoXdDClS1gOIR/8RxETj9yQtbXtgmnBaBbi8HMpa0jJAO9vTJW0ETEiK4DSiSel/gWGSliUU2Y9Lqawl9QI6AB0Jv3jBFXK0pLNS6OAdhJtgU0l3ORKGSs2nRMTLjrbvTKGC90i6xfaBtp9OPvMdiUnYkilrSVsDH9ielCzry9M4DxLujqOIqJyrCBfVM5Cr8zVqbOelmS3E3ER34sd4ddrWkZjUuxY4p8zj7wr8Cfgr4eqAcEW8A1yc1o8DxhAumY1KPP5uhKvlBuBp4BZgh3RNLgeuS8f1BvYkfMalvgabE1mjEFmCLwIXpPWVCd/9jUXHty+DDGcQSTA90voThXGAZYgnjEeBvSr9nc1LHT/TSguQlxJ9kMm9VW3b/kQixA5pfWliwm80kblXDjl2Jiy1nYjQvAeAjmlfZ8INc0Zabw2sVuLxt0w3gV5F235B1EnZmogO+XVS5M8Da5XhGqxKTOi9Qli0rYgypQ8RmaQQTzcPA7+q7fMrxXeByF6ckMa/MMlScIWuDPQD1qj09zcvdVuyD7sZUC1kbhdiYu9R21Mk7Uv4i3ez/bCkpYkf7BdlkKM3UX9kiO2/pvWbgKeA521fl5JSJgOX2D67DDIMIXzyx6cElM/SxNqlhGIaKakd4SZ61lF0quQoanCcBNxJpHYXamqvClxv+wNJqxAW75QSjlv8XVjR9keSfkS4P7oQTxjrAR8QTzwXugHrfWfqR/ZhNwOKfqDHElmKDxI1lI+zPVbSHOAhSd+1/VgZRXmFiDYZJOlNwjd6N2HhnZUU6OgUtdKxTDKsSPjtScq6EEt8vKSnJG1r+wngd6UeWNKGQE9H1MnlyY+/CnFdhhHRKh8QTyB/cYnT/muIde+dwih/ShSNOpX4bvyKKKQ1MyvrJkalTfy8LPkCdC96vQMRCdKOmPX/F/AXIkkGwj2xfhllaZ3+LkU86n8IHFW0vy/wN2ClMl+TFQiFeHTRtoLfdgyhUMsxbnvCon4IuDJt25Tw1a+T1n8BzCT8620prRukVdHrw9O1rkrrHZJ8RxEROz0q9Z3NS/2WnDjTBFHQDhgv6acADst5fyKudzfb6xF+2usk7eSIsX29XDI5NTpwJJwMTGNvVHRIF+BjShxjXEwKZ5xJJOFsm0IacZQnHQ70JBRmyXFYqtcAhwD/I+nXwPqE73i7dMxJRPz7YNuznbRpfVGUyL0iuX4garOMBrpKOgG4h6iLMpaYiM3Zi02U7BJpojhKoA4G7pA01/YZDp/1msQjOERkwgTKlBRTg0wFpT1bUUjqz5IuSzIcBxzqEtYGqWH8QuGoxwmldL6kgUQI4ZZEbZKpZRz/c+DzFFt9JrAJkRyzq6SZtsfbfrCUY6bwzLOBM4v+/6lEbZhViQiZ24lEndm2Lynl+JmGJU86NjGqJzVI6kKE0N1l+6wUe3suEf+7LrCH7UkNLGMhUWMporXUGkR9ipLWBqmDHMsSiUGfAZNsv90AYxb+91aEwu4HXAL8ETjQJfQZS+pHTOruZfuplFE5zPal6fX7jqqAgwmlPsS5kFOTJivsJkS1SaWjgHm2r0lZe/cSmWsXEIpiJ2B8Od0gi5C1oLjaAKuX07KtZfxWRRZng1LDTXVv4Bnbb5ZwjKWJDMrORNZiB6IK4V22f56OaU/USzmZUOq5+UATJyvsJkSREiyUw9zLqah9SgEfT4TznbKQ0zQYKnOXmMZOuVO8UzbnHkR52EFEqOSvi/YvR8SBv2R7crnkyDQc2YfdBFC09frM9gRJKxE1m08APpR0EDG5dXfafluqX/FBOZVFXWjJyhrKn+LtaKc2j2iW+xoRFQSAoj9kT+Bk50JOzYZsYTcBJB1OVJjbNintc4gwvk+J5IfPCf3wI+Uaxi0OST2J+HsTGZ19iA70hzpajWWaCVlhNxEUzXJHE4r6NeJRd6LtaZIOJHyVuwFfV9qyzpQfSSJirwsTnKsS8febEZmMJW8Akak82SXSyCn4QR1lUdsQBZUG2X5A0lJJkZ9IdE/JWWvNkGqTza1tz03rcyV9lyjZuiURvgdwUqUmmzPlJSvsRo5tJwvKjlocc4j45gG2n0llM/fK1lTzpVpkUHdJn9k+J332pwPnOkrTPi/ppewSa75kl0gjo6bIgqLokLVtv51cIDcBm2YfZcsgJSKdQ8RT/xh4zvZBRd+JioUxZhqOnJreiKj26DsiZekVMgh3BG6UtIHtW4jIgLL0X8w0LhSNKEYS9bTvIfzUW0i6vigZKFteLYCssBsRRcr6ROAU4O20viyR/HC17deSYr8l+ymbJ2lCsZgViASZbSR1SeGSmwI7S7oWcpeYlkJ2iTQyJHUlQrP2ctQybmV7nqTlbX/c0pNRmjvVnrI2IVLqv5C0PVHA6UHgzyk6qBXQNSfFtBzypGOFqcFn/QWRZtwN+KjIL1kFfJyVdfOmSFkfR7Qve1rSJ8DFRKecQ4D2ku60/S7RDCLTQsgukQpSzZpqL6lDKs7zBNEYtmvatx9wWko1zjRDUgZr4fXuwO5ED84uRNu1S4g637cSiTFlq3qYabxkl0iFqKasf0j4JFcmuoK0IpIg1iIsqJ3IiRDNFkk9gMHAtba/lDSIKIm7W1rOJIp6TSHK1M51GVq8ZRo/2SVSIYqU9QlEl/HhwB1Ep5B+RHeQHYHViaI+/66QqJkykiaUpxB1q3tIWsH2/SlJqg/RdX6GpHeJUgTLJFdIpgWSFXYDI2knoiPL58AfgHeJOhCHAu8R1tTjRDbjQ5WSM1N+ksvrTOA3th+TNIrwT8vRxLgH0QvzH0TnmuFZWbdssg+7AUmPuhcD04mJxVuAF4jPYQjRA/GXRK2Q30haWvPbPmWaH62J3pt7S9qI+G68DwxN68OJcL6hwBG2p1dM0kyjIPuwG4gUojWeUMr3SWoLXEs0HXiSaNA6hogO6QFcZXtKpeTNlI9q8xeHEJOLEH7qacDxRI3rsbb/IWkpR6/MTAsnW28Nx3TCmuqd/JSzgeWAZYhQvunAXsCPgDFZWTdfqoXuHQE8S7gnTyQmmq8kelIOl7R0VtaZAtnCbgCKkl86E0kxzwIrAesQfsk5iv6HrYEOtj+soLiZBiC1+LoGuNj2y5I2JLrHdAd+RjTSbWv7gwqKmWlkZAu7AUjKulXyQR4O9CIiQQ5Lyrq17Vm2v8zKunlSPd3c0T2+HXB0Wn8VeIqoE3IE8FVW1pnqZIXdQCSl3db2NOAw4A3gEElVOXuxeVPNZ903TT5DNNGdLanQg7M98CJwYSqXmsksQHaJlIGaSl0WWndJ+g6wFPA6MeH4Z+AnuTRm86NgVRcp6yOI+HqAx4AbgE7AsYS1XUXEXU9seGkzTYGssMuIpM2BD4D3bH8tqS9wKXCa7QcldSL8lO9UVNBMWSiO7pDUj+gEMzgp8huJGPxfA28RvuuZtt+vlLyZxk92iZSQYj+lpKOBu4DzgOskLQN0Bc5PyrqN7Xezsm6eSFoXeEzSmmnTIKCXpC2TxX0ikcV6OrCm7Teyss4siqywS0jRo29/YG1ge+BcYAbwK+CPtu9U7mzeEphMZKz+VtKqRJeYPwD7S9rE9kzgJKIJRe7FmakT2SVSQjS/e/VEorLaEOKmuDrRgGBDYHfbn1VMyEyDkb4PFxCd7kcQYZtHEg0JbrH9XE0t4TKZ2sgWdj2pFq5l2+8Rlde6AwfbnmN7KjAa+CeRwZZpxiiRJpLPIiztO4C5wNXAbCIdvV0Fxcw0QbKFXQ+qhWvtA6wL/N32w5K2AP4P+LHtG9IxuVtMM6Xad6Gt7dlFCVNtgTOAvkQ2K8C8HGedWVyyhV0Pin6gRxP1q6cC10s6lSjqtAdwiaQD0vFZWTdTir4LxwJXpXmKeZI2BfoTMdcvAjcDH2RlnVntabzyAAAEWElEQVQSssJeAiT1lLR6er0RYTntTCQ+fEQ0IxhFuED6Eh1kMs0cSTsTtc1/lGLutwLuAz5PtWNOAA7MMfeZJSUr7MUk+az3AeamONtXiGSILYGRtvsA44iU46Nsv2h7UuUkzjQEKaZ+KLABUToXopDTwbYfh7DCc+hepj5khb0YJJ+kbZ9J/BhvlrRmerxdEShYTrOIzjG3V0jUTJmpFnPfNjUWuAx4ADhBUifbf7T9QGESsmLCZpoNedKxjlSbVFqeaIL6W+C/RIztF8DvgKWB1YjwvVcrJG6mgZB0IrAeUX3xLGAVIpxTwC9zYlSmlGSFXQeqKevjiE7Wo4j6D9cDnxHxtp8TMbcv2n6rMtJmGgpJ+wMHEmGcrwO/tz0q1Ys5gEiYOi9PNmdKRVbYi4GkQ4GDCet5WtrWBriKaEZwQvZRthwknQ48DGxBKO3dUs0YEb7sD23/t5IyZpoXuQlvHZDUGjCwHXAF0ErS8cDWRA3jY4Bfkq9ns6WmCoxESvlPgQ+BoSn2+iygle3zGlzITLMnW9i1IKkX8Intt4u27UuEZn0E/BX4D7CD7cMqI2WmoUkJUnOJWiFTgAeBy4kyudsBpwH7pOihTKakZIVdA5JWIpJgXgReIZT0J7YtaX2iXOpMSXsQBXwG2v64chJnykW1+YthhPvremJi8Uzg38A5xCRjR6J07ksVEjfTzMkKuxYk/QT4hIj4WA/4B/CA7b8nv/VewNnACNsvV07STLmopqx7AJsArzl6MO5E1IcZZfveVOhpuXzjzpSTrLBrIVnPpxMlUtsRvsp9gFMJf/ZkYLLtNyomZKZsVFPWxwKHEB3ufw1cY/vTpLRvAE63PaZy0mZaCnmSrBZs/1HSNsBwwle5PRFnuxJhcY9LyRKZZkiRsh5JlBrYBdgP6AHsIOmR1IjiAGIuI5MpO9nCroGCdSVpCFG/eEPgONt3S+pA/J6/rKyUmXKSXBwrEpPL79j+Xtp+EhGy9yfgfttfVE7KTEsjp6bXQFFB+XuIiIAnbN+d9n2RlXXzRFIvSWtDdLm3/SHwA2D9Qmdz278A3gZ2JCYaM5kGI1vYtVBUy7gn0Zfx7Byq1XypJTJolu0vU+biL4kWbxcXjrc9o2ICZ1ok2YddC0VJEp8RiRHvVVCcTJmxPUPSZcyPDLoVeE7S/bafkHQYMEbSHNujs7LOVIJsYdcBSUtnN0jzZyGRQaOAaUSvzlnFyVSZTEOSFXYdyI1SWw6SCr03pxCd7q8lwvl6EhPPuVZMpmJkl0gdyMq6+VN0U36Eb0cGLQ2Qn7IylSZb2JlMEanS3niiFMF+lZYnkykmh/VlMolCRyFSrfPUrzOTaTRkhZ3JJHJkUKaxk10imUwN5MigTGMkW9iZTM18VWkBMpnqZAs7k8lkmgjZws5kMpkmQlbYmUwm00TICjuTyWSaCFlhZzKZTBMhK+xMJpNpImSFnclkMk2ErLAzmUymifD/GeZtkZPGab8AAAAASUVORK5CYII=" + }, + "metadata": { + "application/vnd.databricks.v1+output": { + "addedWidgets": {}, + "arguments": {}, + "data": "/plots/20a813a2-9400-4146-8101-3af317e3089d.png", + "datasetInfos": [], + "metadata": {}, + "removedWidgets": [], + "type": "image" + } + }, + "output_type": "display_data" + } + ], + "source": [ + "races = [row[\"race\"] for row in df.groupBy(\"race\").count().select(\"race\").collect()]\n", + "dp_rows = feature_balance_measures.filter(F.col(\"FeatureName\") == \"race\").select(\"ClassA\", \"ClassB\", \"FeatureBalanceMeasure.dp\").collect()\n", + "race_dp_values = [(row[\"ClassA\"], row[\"ClassB\"], row[\"dp\"]) for row in dp_rows]\n", + "\n", + "race_dp_array = np.zeros((len(races), len(races)))\n", + "for class_a, class_b, dp_value in race_dp_values:\n", + " i, j = races.index(class_a), races.index(class_b)\n", + " dp_value = round(dp_value, 2)\n", + " race_dp_array[i, j] = dp_value\n", + " race_dp_array[j, i] = -1 * dp_value\n", + "\n", + "colormap = \"RdBu\"\n", + "dp_min, dp_max = -1.0, 1.0\n", + "\n", + "fig, ax = plt.subplots()\n", + "im = ax.imshow(race_dp_array, vmin=dp_min, vmax=dp_max, cmap=colormap)\n", + "\n", + "cbar = ax.figure.colorbar(im, ax=ax)\n", + "cbar.ax.set_ylabel(\"Demographic Parity\", rotation=-90, va=\"bottom\")\n", + "\n", + "ax.set_xticks(np.arange(len(races)))\n", + "ax.set_yticks(np.arange(len(races)))\n", + "ax.set_xticklabels(races)\n", + "ax.set_yticklabels(races)\n", + "\n", + "plt.setp(ax.get_xticklabels(), rotation=45, ha=\"right\", rotation_mode=\"anchor\")\n", + "\n", + "for i in range(len(races)):\n", + " for j in range(len(races)):\n", + " text = ax.text(j, i, race_dp_array[i, j], ha=\"center\", va=\"center\", color=\"k\")\n", + " \n", + "ax.set_title(\"Demographic Parity of Races in Adult Dataset\")\n", + "fig.tight_layout()\n", + "plt.show()" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "application/vnd.databricks.v1+cell": { + "inputWidgets": {}, + "nuid": "63884ff1-6fcf-491d-9c2a-46f0fa4bbc58", + "showTitle": false, + "title": "" + } + }, + "source": [ + "#### Interpret Feature Balance Measures\n", + "\n", + "Demographic Parity:\n", + "* When it is positive, it means that ClassA sees the positive outcome more than ClassB.\n", + "* When it is negative, it means that ClassB sees the positive outcome more than ClassA.\n", + "\n", + "---\n", + "\n", + "From the results, we can tell the following:\n", + "\n", + "For Sex:\n", + "* DP(Male, Female) = 0.1963 shows \"Male\" observations are associated with \">50k\" income label more often than \"Female\" observations.\n", + "\n", + "For Race:\n", + "* DP(Other, Asian-Pac-Islander) = -0.1734 shows \"Other\" observations are associated with \">50k\" income label less than \"Asian-Pac-Islander\" observations.\n", + "* DP(White, Other) = 0.1636 shows \"White\" observations are associated with \">50k\" income label more often than \"Other\" observations.\n", + "* DP(Asian-Pac-Islander, Amer-Indian-Eskimo) = 0.1494 shows \"Asian-Pac-Islander\" observations are associated with \">50k\" income label more often than \"Amer-Indian-Eskimo\" observations.\n", + "\n", + "Again, you can take mitigation steps to upsample/downsample your data to be less biased towards certain features and feature values.\n", + "\n", + "Built-in mitigation steps are coming soon." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "application/vnd.databricks.v1+cell": { + "inputWidgets": {}, + "nuid": "471830e4-f77d-4567-8475-8dd398f3fae4", + "showTitle": false, + "title": "" + } + }, + "source": [ + "### Calculate Distribution Balance Measures\n", + "\n", + "Distribution Balance Measures allow us to compare our data with a reference distribution (i.e. uniform distribution). They are calculated per sensitive column and don't use the label column.\n", + "\n", + "For example, let's assume we have a dataset with 9 rows and a Gender column, and we observe that:\n", + "* \"Male\" appears 4 times\n", + "* \"Female\" appears 3 times\n", + "* \"Other\" appears 2 times\n", + "\n", + "Assuming the uniform distribution:\n", + "$$ReferenceCount \\coloneqq \\frac{numRows}{numFeatureValues}$$\n", + "$$ReferenceProbability \\coloneqq \\frac{1}{numFeatureValues}$$\n", + "\n", + "Feature Value | Observed Count | Reference Count | Observed Probability | Reference Probabiliy\n", + "- | - | - | - | -\n", + "Male | 4 | 9/3 = 3 | 4/9 = 0.44 | 3/9 = 0.33\n", + "Female | 3 | 9/3 = 3 | 3/9 = 0.33 | 3/9 = 0.33\n", + "Other | 2 | 9/3 = 3 | 2/9 = 0.22 | 3/9 = 0.33\n", + "\n", + "We can use distance measures to find out how far our observed and reference distributions of these feature values are. Some of these distance measures include:\n", + "\n", + "Measure | Description | Interpretation | Reference\n", + "- | - | - | -\n", + "KL Divergence | Measure of how one probability distribution is different from a second, reference probability distribution. Measure of the information gained when one revises one's beliefs from the prior probability distribution Q to the posterior probability distribution P. In other words, it is the amount of information lost when Q is used to approximate P. | Non-negative. 0 means P = Q. | [Link](https://en.wikipedia.org/wiki/Kullback%E2%80%93Leibler_divergence)\n", + "JS Distance | Measuring the similarity between two probability distributions. Symmetrized and smoothed version of the Kullback–Leibler (KL) divergence. Square root of JS Divergence. | Range [0, 1]. 0 means perfectly same to balanced distribution. | [Link](https://en.wikipedia.org/wiki/Jensen%E2%80%93Shannon_divergence)\n", + "Wasserstein Distance | This distance is also known as the earth mover’s distance, since it can be seen as the minimum amount of “work” required to transform u into v, where “work” is measured as the amount of distribution weight that must be moved, multiplied by the distance it has to be moved. | Non-negative. 0 means P = Q. | [Link](https://en.wikipedia.org/wiki/Wasserstein_metric)\n", + "Infinity Norm Distance | Distance between two vectors is the greatest of their differences along any coordinate dimension. Also called Chebyshev distance or chessboard distance. | Non-negative. 0 means same distribution. | [Link](https://en.wikipedia.org/wiki/Chebyshev_distance)\n", + "Total Variation Distance | It is equal to half the L1 (Manhattan) distance between the two distributions. Take the difference between the two proportions in each category, add up the absolute values of all the differences, and then divide the sum by 2. | Non-negative. 0 means same distribution. | [Link](https://en.wikipedia.org/wiki/Total_variation_distance_of_probability_measures)\n", + "Chi-Squared Test | The chi-square test tests the null hypothesis that the categorical data has the given frequencies given expected frequencies in each category. | p-value gives evidence against null-hypothesis that difference in observed and expected frequencies is by random chance. | [Link](https://en.wikipedia.org/wiki/Chi-squared_test)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "application/vnd.databricks.v1+cell": { + "inputWidgets": {}, + "nuid": "61a36af1-9b38-45a9-89b5-39b2d14093c4", + "showTitle": false, + "title": "" + } + }, + "outputs": [], + "source": [ + "from synapse.ml.exploratory import DistributionBalanceMeasure\n", + "\n", + "distribution_balance_measures = (\n", + " DistributionBalanceMeasure()\n", + " .setSensitiveCols(cols_of_interest)\n", + " .transform(df)\n", + ")\n", + "\n", + "# Sort by JS Distance descending\n", + "display(distribution_balance_measures.sort(F.abs(\"DistributionBalanceMeasure.js_dist\").desc()))" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "application/vnd.databricks.v1+cell": { + "inputWidgets": {}, + "nuid": "ad2c4353-664d-4117-a629-45f66e92a4bd", + "showTitle": false, + "title": "" + } + }, + "source": [ + "#### Visualize Distribution Balance Measures" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "application/vnd.databricks.v1+cell": { + "inputWidgets": {}, + "nuid": "32a10ac1-4d12-496c-97ea-3b52e5f61d15", + "showTitle": false, + "title": "" + } + }, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAaIAAAEYCAYAAAAeWvJ8AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAgAElEQVR4nOydeXxVNfbAv4eWVfbFGWiRrYhstWgRlVFxQVQUN1RwVFBQGVHGDWRGZdTRUdxmdOCn4gYyQhUcBVxQURHHjUWqIrtQbSsq++IGLef3R/La29fXUqCv75We7+fTT99NcnOT3Nyc5OQkEVXFMAzDMGJFtVgnwDAMw6jamCAyDMMwYooJIsMwDCOmmCAyDMMwYooJIsMwDCOmmCAyDMMwYkpcCSIReVxEbi+nuA4RkR0ikuCv54rI0PKI28f3hogMKq/4ygsRyRKRU2KdDqNkRKSDiGSKyHYRGRHr9JQH5f19lTcicpyIrKigZ7UWERWRxPIMeyBTYYLIN5C/+I9vi4h8JCLDRKQgDao6TFX/Xsa4Sm1sVfVbVa2rqvnlkPY7ROQ/YfGfrqqT9jfuCM+aKCI7vRDdLiKLROSE8n5OReHzoyJydpj7P7374BglLZaMAt5T1Xqq+mi4p4h0FpG3RGST/1YWicgZMUhnueC/n12+Toe+/WMqMg2q+oGqdtifOESkl6+zt5RXuiI8o1SBHhBcO/zfDyLyqoj03otnDBaR/5VPisvnORU9IjpLVesBrYD7gFuAp8v7IQdA7+J+Va0L1AceA/4bGtlVUlYCl4Uu/Pu5EPg6ZimKQAXWm1bAV6X4zwLeBn4PHAyMALZVQLqiyQu+TjcF3gOmxTg9+8IgYBOBuhxDGvryPBxXV16u1J06Va2QPyALOCXM7ShgN9DFX08E7va/mwKvAltwL/8DnOCc7O/5BdiB6122BhQYAnwLzAu4Jfr45gL3AvNxH/UMoLH36wXkREovcBqwE9jln/d5IL6h/nc14DbgG+BH4DmggfcLpWOQT9sG4NZSyqmgDPx1HX9/C3/dDngX2Ojjeh5XKYuVsy/fj30ZrgPGATUCYRUYBqzyYcYDEvC/ElgGbAeWAkd49xbAS8B6YC0wYg/5eRD4AWjk3c4E3gD+BwwOhL3CP28z8CbQKuD3CJDt390i4LiwerTQ+/0APLyn9+p/3wFMB/7j7x0KNMB1jtYBucDdQIIPnwK8D2z1Zf9CKfnuhxM2W3xd6ejd3wXygV9x9enQsPua+vfSsJS4zwQyfdwfAane/SL/Pur769OB74FmJcQzzftvxX0zncPe23jgNf/+PwXaBfx7A8v9veN8uQwt4Tl3AP8JXHfyeWxWxnraGdfYbvLv96+B7240rkOzEXgR/01HSEORuuDrwc3AFz4PLwC1Sinzg3w5DMC1B+kBvwRcHd8ArAGGU7TtySLQ9gXLg0A7BdwTVjfGRUhH62DcAfebfdlU89ehcgl9u+d6944+/nz/jC3evS+wGPcdZAN3BOKuhftGNvp3tAD4nfeL+L2U9JwSy7c0z/L8C38ZAfdvgT8FKn9IEN0LPA5U93/H4RvJCC829HKe8xWmdvgLwzUGuUAXH+alQGXoxZ4brP+E+c+lUBBdAawG2gJ1gf8Ck8PS9qRP1+HAb/iGKUJ5BMsgASco1lC0MewN1ASa4RqQf5WQ7iOBo3GVvDWukb8+EFZxwr4hcAhOsJzm/S7w5dUdEP/cVriPfxEwBqjh87wG6FNafoAJgff8IjCQgCACzvZl2NGn9zbgo0A8lwBNvN9NuAa0lvf7GLjU/64LHL0X73UXcI7PV23gZeAJXB05GNdxudqHnwrc6sPWAv5QQp4PBX7y76k6rrO0Gt+4Eqg7Ee4VXMfgVZ+u34X5d8N1dnr4+jHI56mm93/el3kT4DvgzFK+ySuAeri69C8gM+y9bcQJiUQfb4b3a4pr4Pr7/N0A5JWSpzso/NZq4LQhGyj8Nkuspz596/w7r+Wve3i/PwOfAMk+D08AU0tIQ5G64MtsPq5T1dg/c1gpZXWpT0cCbsT674DfMJxQbunjeo99EER7qhuRwgfc23r3UIfnAp+3argOyk9Ac+83GPhfhPLp6sOn4oTaOd7vap/nOj7/R1LY2Snteyn2nBLzVZZA5fEX/jIC7p/gRwgUbYTvwo1aUvYUV+DltC3phfkXfF/AvxOuZ5MQXklLaLBKE0TvANcE/DrgGrjQh6VAcsB/PjCghHKaiOtJbMGN+n4F/lhKuZ4DLN5TOXu/64GXA9dKoDHFCYjR/vebwJ8jxNED+DbM7S/As6Xk527gDziB0dBX8toUFURvAEMC91UDfiYwKgqLdzNwuP89D7gTaBrh49rTe50X8PsdrpNQO+A2EDefA66jMyH4LktI2+3Ai2F5yQV6hdedEu5Pxo0KvsaN/ucB7b3fY8Dfw8KvAE7wvxviOndfAk/sxffZ0NeH0Eh+IvBUwP8MYLn/fRnwScBPgJyS8uTLeaev0/k4AderlLQU1FNf/otLCLcMODlw3Rz/3UUIW6Qu+HpwSeD6fuDxUtI0B9/h82laD1T31+8SEGLAqVS8IKrl3XuWcF8mcLb/PZg9CAhcx+Sf/vcVBEbee/G97PE5ob94sJpLwg25w3kA14t8S0TWiMjoMsSVvRf+3+B6c03LlMrSaeHjC8adiHtRIb4P/P4Z13MviQdVtSGuB5IOPCAipwOIyO9EJENEckVkG27IHDEPInKon8j83of9R4SwJaWrJZHncFoBLfyk8xYR2QL8NSyvxVDV/+FGcLcCr6rqLxHifSQQ5yZcA5fk83KziCwTka3ev0EgL0Nwo5DlIrJARM4sLS1hBOtEK1ydWBdIxxO4nh64kY0A80XkKxG5ooQ4i9QHVd3tn5NUlgSpao6qXquq7XyafsIJwVAabwor/5b+majqFpzKrQvwUEnPEJEEEblPRL72dSPLewXrR0l1owWBclPX6uzp23vR1+nfAUtwvepQWkqrpyXVQ3Bl8XKgHJbhBF2pdTFAmb5JEWkJnIgbFYLrINfCqbMgrDwo2hZUFKG6tQlARC7zlpmhsulCKW2diPQQkfdEZL2IbMWN8kLhJ+M6phki8p2I3C8i1dnz91JmYiqIRKQ7rgCLWVao6nZVvUlV2+L07TeKyMkh7xKiLMk9RMvA70NwvacNuA+9TiBdCbhGs6zxfod7KcG483A9/31GHUuADyms9P/w6emqqvVxKispIYrHcCqD9j7sX0sJG042bj4qkvtaVW0Y+KunqmWx6voPTsXyXAS/bNyQPhhvbVX9SESOwwmBC3HzTA1xen0BUNVVqjoQ9wGMBaaLyEHs+b1C0XebjevhNQ2kob6qdvbP+V5Vr1TVFjh1xf+JSEqEvBSpDyIiuLqXW4YyKpo41WzcXE2XQBrvCSunOqo61T8rDdeDnQoUs8gLcDFOHXoKTqi3DiW3DMlaR+BbCuSvLPnZAFwF3CEizb1zafU0G6d2ikQ2cHpYWdRS1b0u5z1wKa6tnCUi3+NU0bVwalEIKw/c9x+kSD3EGaGUxJ7ampI4F6eyXSEirXBTAdcCTfz3soTCMo30jCnATKClqjbATYuEvq9dqnqnqnYCjsXNUV7GHr6XvclLTASRiNT3vdYM3BD1ywhhzhSRFF/Jt+J6Oru99w+UXDlL4xIR6SQidXCqv+nqzLtXArVEpK+X9LfhdM4hfgBaB03Nw5gK3CAibUSkLk5YvKCqefuQxiKIyGE4tVbIyqoebvJvq4gkASNLub0ebvJxh4/nT3vx6KeAm0XkSHGk+Ao+H9guIreISG3fs+7iOxV74lHcvMm8CH6PA38Rkc4AItJARC4I5CMPpw5JFJExOItCfNhLRKSZH3ls8c672fN7LYKqrgPeAh7ydbSaiLQTbz4vIheISLIPvhn3oe2OENWLQF8ROdk/9ybcB/tRqaXjntFIRO705V1NRJriBMsnPsiTwDDfgxUROcjnr56IhCaV/wpcDiSJyDUlPKqeT9NGXCP5jz2lLcBrQGcROU+cpeEISm9ci6CqK3A97FGBtJRUT18FmovI9SJS0+ezh/d7HLjH10tEpJmELRMoJwbhVL9pgb/zgTNEpAnufY8QkWQRaYQzFAiSCQwQkeoiko6bWyuJvWrbxGlIrgX+BvzFfwMH4ermeh/mcgo7MqFnJItIjYBbPWCTqv4qIkfhOiqhZ5woIl19R24brgO/e0/fSwnPiUhFC6JZIrIdJ0lvBR7GfTCRaI/Ty+7AzS38n6q+5/3uBW7zw8Gb9+L5k3G67+9xPZoRAKq6FbgG1/jm4nowOYH7QqamG0XkswjxPuPjnoezWvoVuG4v0hXOKHFrBH7CvehncUNecB/EETjh/BrOMKIkbsZVqO24BuyFsiZAVafhrHim+PtfwVkk5eN6RGm4vG7AlVuDMsS5SVXf8aqccL+XcaOZDK+eWYKz+gLXaM3GCZZvcOUbVIWcBnwlIjtw1nUDVPWXMrzXSFyGm1BfihM203FzD+AMNz71z5mJm0NbEyEvK3Aj1X/jyucs3NKFnXt4Nri5lNa4uh8qh99w+nZUdSHOmnGcT9/qkB/uu8hW1cdU9TefhrtFpH2E5zyHK8tcn9dPIoSJiB/VXIAzOtiI+1Y/LOv9ngeAq0TkYEqpp6q6Hdd5OQv33a7CqcnAveuZOPX9dp+HkJAqF0TkaNzodrwfEYf+ZuLKfqBP85vA58BnFP8mb8dpFzbjvt8ppTzyEaC/iGwWkdJGtFt8+/Albv7uAlV9BkBVl+LUsh/jhEFXir6fd3Ed2+9FZIN3uwa4y5fjGJxwDfF73HewDaf+fB/X3kHp30uk50REIrQJhmEYhlFhxIOxgmEYhlGFMUFkGIZhxBQTRIZhGEZMOaAFkYg8IyI/isiSEvxFRB4VkdUi8oWIHBHwyxdnh58pIjMD7hNFZG3AL827NxCRWSLyubg1JiUZYRiGYRgBKp2xQtOmTbV169ZlCrt9+3YSEhJYu3YtnTt3Lua/detWfvzxR1JSUvjpp5/Izs6mY8eOACxevJhu3boVuycrK4sGDRrQqFGjIu7r1q0jPz+f5ORkdu3axVdffUVqairVqh3Qst4wjAOYRYsWbVDV8LV35U6lE0Tp6em6cOHCMofPysrizDPPZMmS4oOiq6++ml69ejFw4EAAOnTowNy5c2nevDl169Zlx44dxe4ZPHgwZ555Jv37F10KcO+995Kdnc348ePJysqid+/erFy50gSRYRiVFhFZpKrp0X5OlW4lc3NzadmycEF0cnIyubluUfavv/5Keno6Rx99NK+88kqR+2699VZSU1O54YYb+O233wC49tprWbZsGS1atKBr16488sgjJoQMwzDKgLWUJfDNN9+wcOFCpkyZwvXXX8/XX7vtru69916WL1/OggUL2LRpE2PHjgXgzTffJC0tje+++47MzEyuvfZatm2r7EfIGIZhRJ8qLYiSkpLIzi5coJ+Tk0NSUlKBH0Dbtm3p1asXixcvBqB58+aICDVr1uTyyy9n/vz5ADz77LOcd955iAgpKSm0adOG5cuXV3CODMMwKh+V/STT/aJfv36MGzeOAQMG8Omnn9KgQQOaN2/O5s2bqVOnDjVr1mTDhg18+OGHjBrltsVat24dzZs3R1V55ZVX6NLFbeF0yCGH8M4773Dcccfxww8/sGLFCtq23Zft8AzDiAd27dpFTk4Ov/76a6yTEnVq1apFcnIy1atXj8nzD2hBNHDgQObOncuGDRtITk7mzjvvZNeuXQAMGzaMM844g9dff52UlBTq1KnDs88+C8CyZcu4+uqrqVatGrt372b06NF06tQJgD/+8Y+sX78eVSUtLY3HH38cgNtvv53BgwfTtWtXVJWxY8fStGl5nDBhGEYsyMnJoV69erRu3RqRsm5aX/lQVTZu3EhOTg5t2rSJSRoOeKs5wzCMfWHZsmUcdthhB7QQCqGqLF++vGD5SgizmjMMw4gxVUEIQezzeUCr5iLRevRrUYs7676+ew5kGIZhFKHKCSLDMIx9obw7sWXpuCYkJNC1a1fy8vJo06YNkydPpmHDhuWajnjAVHOGYRhxSu3atcnMzGTJkiU0btyY8ePHxzpJUcEEkWEYRiXgmGOOKdj5Zf78+RxzzDF069aNY489lhUrVgCQn5/PzTffTJcuXUhNTeXf//43AIsWLeKEE07gyCOPpE+fPqxbty5m+YiEqeYMwzDinPz8fN555x2GDBkCwGGHHcYHH3xAYmIic+bM4a9//SsvvfQSEyZMICsri8zMTBITE9m0aRO7du3iuuuuY8aMGTRr1owXXniBW2+9lWeeeSbGuSrEBJFhGEac8ssvv5CWlkZubi4dO3akd+/egDs5YNCgQaxatQoRKVgfOWfOHIYNG0ZiomvaGzduzJIlS1iyZEnBvfn5+TRv3jw2GSqBqKnm9ucsIMMwDKNwjuibb75BVQvmiG6//XZOPPFElixZwqxZs0rd/UFV6dy5M5mZmWRmZvLll1/y1ltvVVQWykQ054gmAqeV4n860N7/XQU8FsW0GIZhVFrq1KnDo48+ykMPPUReXh5bt24t2A9z4sSJBeF69+7NE088QV5eHgCbNm2iQ4cOrF+/no8//hig4Ly0eCJqqjlVnScirUsJcjbwnLqtHT4RkYYi0lxV42sWzTAMg9ivE+zWrRupqalMnTqVUaNGMWjQIO6++2769i1M19ChQ1m5ciWpqalUr16dK6+8kmuvvZbp06czYsQItm7dSl5eHtdff33Ew0JjRSzniJKA7MB1jnczQWQYhgHFDuecNWtWwe+VK1cW/L777rsBSExM5OGHH+bhhx8ucl9aWhrz5s2LYkr3j0phvi0iV4nIQhFZuH79+lgnxzAMwyhHYimIcoGWgetk71YMVZ2gqumqmt6sWdSPTzcMwzAqkFgKopnAZd567mhgq80PGYZhVD2iNkckIlOBXkBTEckB/gZUB1DVx4HXgTOA1cDPwOXRSothGIYRv0TTam7gHvwVGB6t5xuGYRiVg0phrGAYhmEcuNgWP4ZhGGXhjgblHN/WPQa55557mDJlCgkJCVSrVo0nnniCHj16lG864gATRIZhGHHIxx9/zKuvvspnn31GzZo12bBhAzt37ox1sqKCqeYMwzDikHXr1tG0aVNq1qwJQNOmTWnRokXEIx22bt1Khw4dCo6DGDhwIE8++WQsk79XmCAyDMOIQ0499VSys7M59NBDueaaa3j//fcLjnSYPn06ixYt4oorruDWW2+lQYMGjBs3jsGDB5ORkcHmzZu58sorY52FMmOqOcMwjDikbt26LFq0iA8++ID33nuPiy66iNtuu63EIx169+7NtGnTGD58OJ9//nksk77XmCAyDMOIUxISEujVqxe9evWia9eujB8/ns6dOxfspB1k9+7dLFu2jDp16rB582aSk5NjkOJ9w1RzhmEYcciKFStYtWpVwXVmZiYdO3Ys8UiHf/7zn3Ts2JEpU6Zw+eWXFxyWVxmwEZFhGEZZKIO5dXmyY8cOrrvuOrZs2UJiYiIpKSlMmDCBq666qtiRDomJiTz11FPMnz+fevXqcfzxx3P33Xdz5513Vmia9xUTRIZhGHHIkUceyUcffVTMvWnTphGPdFi2bFnB7/BjIOIdU80ZhmEYMcUEkWEYhhFTTBAZhmGUgNub+cAn1vk0QWQYhhGBWrVqsXHjxpg30tFGVdm4cSO1atWKWRrMWMEwDCMCycnJ5OTksH79+lgnJerUqlUrpuuOTBAZhmFEoHr16rRp0ybWyagSmGrOMAzDiCkmiAzDMIyYYoLIMAzDiCkmiAzDMIyYYoLIMAzDiCkmiAzDMIyYYoLIMAzDiCkmiAzDMIyYYoLIMAzDiClRFUQicpqIrBCR1SIyOoL/ISLynogsFpEvROSMaKbHMAzDiD+iJohEJAEYD5wOdAIGikinsGC3AS+qajdgAPB/0UqPYRiGEZ9Ec0R0FLBaVdeo6k4gAzg7LIwC9f3vBsB3UUyPYRiGEYdEUxAlAdmB6xzvFuQO4BIRyQFeB66LFJGIXCUiC0VkYVXYCdcwDKMqEWtjhYHARFVNBs4AJotIsTSp6gRVTVfV9GbNmlV4Ig3DMIzoEU1BlAu0DFwne7cgQ4AXAVT1Y6AW0DSKaTIMwzDijGgKogVAexFpIyI1cMYIM8PCfAucDCAiHXGCyHRvhmEYVYioCSJVzQOuBd4EluGs474SkbtEpJ8PdhNwpYh8DkwFBuuBfi6vYRiGUYSontCqqq/jjBCCbmMCv5cCPaOZBsMwDCO+ibWxgmEYhlHFMUFkGIZhxBQTRIZhGEZMMUFkGIZhxBQTRIZhGEZMMUFkGIZhxBQTRIZhGEZMMUFkGIZhxBQTRIZhGEZMMUFkVElmz55Nhw4dSElJ4b777osY5sUXX6RTp0507tyZiy++uMA9ISGBtLQ00tLS6NevX7H7RowYQd26dQuuH3/8cbp27UpaWhp/+MMfWLp0aflnaD+x8jBiSVS3+DGMeCQ/P5/hw4fz9ttvk5ycTPfu3enXrx+dOhUeILxq1SruvfdePvzwQxo1asSPP/5Y4Fe7dm0yMzMjxr1w4UI2b95cxO3iiy9m2LBhAMycOZMbb7yR2bNnRyFn+4aVhxFrbERkVDnmz59PSkoKbdu2pUaNGgwYMIAZM2YUCfPkk08yfPhwGjVqBMDBBx+8x3jz8/MZOXIk999/fxH3+vXrF/z+6aefEJFyyEX5YeVhxBoTREaVIzc3l5YtC4/KSk5OJje36FFZK1euZOXKlfTs2ZOjjz66SI/9119/JT09naOPPppXXnmlwH3cuHH069eP5s2bF3vm+PHjadeuHaNGjeLRRx+NQq72HSsPI9aYas4wIpCXl8eqVauYO3cuOTk5HH/88Xz55Zc0bNiQb775hqSkJNasWcNJJ51E165dqV27NtOmTWPu3LkR4xs+fDjDhw9nypQp3H333UyaNKliM7SfWHkY0cRGREaVIykpiezs7ILrnJwckpKSioRJTk6mX79+VK9enTZt2nDooYeyatWqgvsB2rZtS69evVi8eDGLFy9m9erVpKSk0Lp1a37++WdSUlKKPXvAgAFFRg3xgJWHEWtMEBlVju7du7Nq1SrWrl3Lzp07ycjIKGbtdc455xT05jds2MDKlStp27Ytmzdv5rfffitw//DDD+nUqRN9+/bl+++/Jysri6ysLOrUqcPq1asBChpsgNdee4327dtXTEbLiJWHEWtMNWdUORITExk3bhx9+vQhPz+fK664gs6dOzNmzBjS09Pp168fffr04a233qJTp04kJCTwwAMP0KRJEz766COuvvpqqlWrxu7duxk9enQR67JIjBs3jjlz5lC9enUaNWoUd2ooKw8j1khlO5k7PT1dFy5cuM/3tx79WjmmpihZ9/WNWtyGYRgVjYgsUtX0aD/HVHOGYRhGTDHVnGEQ3ZEyVL7RsmkOjIrERkSGYRhGTDFBZBiGYcQUE0SGYRhGTDFBZBiGYcSUMgsiEakTzYQYhmEYVZM9CiIROVZElgLL/fXhIvJ/ZYlcRE4TkRUislpERpcQ5kIRWSoiX4nIlL1KvWEYhlHpKYv59j+BPsBMAFX9XESO39NNIpIAjAd6AznAAhGZqapLA2HaA38BeqrqZhHZ897yhmEYxgFFmVRzqpod5pRfhtuOAlar6hpV3QlkAGeHhbkSGK+qm/1zfsQwDMOoUpRFEGWLyLGAikh1EbkZWFaG+5KAoADL8W5BDgUOFZEPReQTETktUkQicpWILBSRhevXry/Dow3DMIzKQlkE0TBgOE6I5AJp/ro8SATaA72AgcCTItIwPJCqTlDVdFVNb9asWTk92jAMw4gH9jhHpKobgD/uQ9y5QMvAdbJ3C5IDfKqqu4C1IrISJ5gW7MPzDMMwjErIHgWRiDwLFNuiW1Wv2MOtC4D2ItIGJ4AGABeHhXkFNxJ6VkSa4lR1a8qQbsMwDOMAoSxWc68GftcCzgW+29NNqponItcCbwIJwDOq+pWI3AUsVNWZ3u9Ubx6eD4xU1Y17mwnDMAyj8rLHOSJVfSnw9zxwIVCm8ylU9XVVPVRV26nqPd5tjBdCqONGVe2kql1VNWN/MmOUzuzZs+nQoQMpKSncd999xfwnTpxIs2bNSEtLIy0tjaeeeqrA77TTTqNhw4aceeaZRe5Zu3YtPXr0ICUlhYsuuoidO3cW8X/ppZcQEfbnDCnDMA5s9mWLn/aArfepZOTn5zN8+HDeeOMNli5dytSpU1m6dGmxcBdddBGZmZlkZmYydOjQAveRI0cyefLkYuFvueUWbrjhBlavXk2jRo14+umnC/y2b9/OI488Qo8ePaKTKcMwDgjKsrPCdhHZFvoPzAJuiX7SjPJk/vz5pKSk0LZtW2rUqMGAAQOYMWNGme8/+eSTqVevXhE3VeXdd9+lf//+AAwaNIhXXnmlwP/222/nlltuoVatWuWTCcMwDkjKopqrp6r1A/8PVdWXKiJxRvmRm5tLy5aFRozJycnk5oYbMTpVWmpqKv379yc7O3wdc1E2btxIw4YNSUxMLBbnZ599RnZ2Nn372iFohmGUTonGCiJyRGk3qupn5Z8cI5acddZZDBw4kJo1a/LEE08waNAg3n333b2OZ/fu3dx4441MnDix/BNpGMYBR2lWcw+V4qfASeWcFiOKJCUlFRnh5OTkkJRUdKOLJk2aFPweOnQoo0aNKjXOJk2asGXLFvLy8khMTCyIc/v27SxZsoRevXoB8P3339OvXz9mzpxJenqZ7FwMw6hClCiIVPXEikyIEV26d+/OqlWrWLt2LUlJSWRkZDBlStHNztetW0fz5s0BmDlzJh07diw1ThHhxBNPZPr06QwYMIBJkyZx9tln06BBAzZs2FAQrlevXjz44IMmhAzDiEhZ1hEhIl2ATrh1RACo6nPRSpRR/iQmJjJu3Dj69OlDfn4+V1xxBZ07d2bMmDGkp6fTr18/Hn30UWbOnEliYiKNGzcuolo77rjjWL58OTt27CA5OZmnn36aPn36MHbsWAYMGMBtt91Gt27dGDJkSOwyaRhGpURUixwEFPQAACAASURBVG2aUDSAyN9we8F1Al4HTgf+p6r9o566CKSnp+v+rElpPfq1ckxNUbLus4n5yko06wVUvrph34kBICKLVDXqqoyyrCPqD5wMfK+qlwOHAw2imirDMAyjylAW1dyvqrpbRPJEpD7wI0U3MzUqKTYKMAwjHijNfHs8MBWY749meBJYBOwAPq6Y5BmGYRgHOqWNiFYCDwAtgJ9wQqk3UF9Vv6iAtBmGYRhVgBLniFT1EVU9Bjge2Ag8A8wGzhWR9hWUPsMwDOMApyxb/HyjqmNVtRvu7KBzgOVRT5lhGIZRJSjLpqeJInKWiDwPvAGsAM6LesoMwzCMKkFpxgq9cSOgM4D5QAZwlar+VEFpMwzDMKoApRkr/AWYAtykqpsrKD2GYRhGFaO0veZsU1PDMAwj6uzLCa2GYRiGUW6YIDIMwzBiigkiwzAMI6aYIDIMwzBiigkiwzAMI6aYIDIMwzBiSlQFkYicJiIrRGS1iIwuJdz5IqIiYmdJG4ZhVDGiJohEJAEYjzvRtRMwUEQ6RQhXD/gz8Gm00mIYhmHEL9EcER0FrFbVNaq6E7dF0NkRwv0dGAv8GsW0GIZhGHFKNAVREpAduM7xbgWIyBFAS1WN7lGhhmEYRtwSM2MFEakGPAzcVIawV4nIQhFZuH79+ugnzjAMw6gwoimIcoGWgetk7xaiHtAFmCsiWcDRwMxIBguqOkFV01U1vVmzZlFMsmEYhlHRRFMQLQDai0gbEakBDABmhjxVdauqNlXV1qraGvgE6KeqC6OYJsMwDCPOiJogUtU84FrgTWAZ8KKqfiUid4lIv2g91zAMw6hclHYe0X6jqq8Dr4e5jSkhbK9opsUwDMOIT2xnBcMwDCOmmCAyDMMwYooJIsMwDCOmmCAyDMMwYooJIsMwDCOmmCAyDMMwYooJIsMwDCOmmCAyDMMwYooJIsMwjACzZ8+mQ4cOpKSkcN999xXzf/jhh+nUqROpqamcfPLJfPPNNwV+3377LaeeeiodO3akU6dOZGVlATBu3DhSUlIQETZs2FAQ/oEHHiAtLY20tDS6dOlCQkICmzZtinoe4w0TRIZhGJ78/HyGDx/OG2+8wdKlS5k6dSpLly4tEqZbt24sXLiQL774gv79+zNq1KgCv8suu4yRI0eybNky5s+fz8EHHwxAz549mTNnDq1atSoS18iRI8nMzCQzM5N7772XE044gcaNG0c/o3GGCSLDMAzP/PnzSUlJoW3bttSoUYMBAwYwY8aMImFOPPFE6tSpA8DRRx9NTk4OAEuXLiUvL4/evXsDULdu3YJw3bp1o3Xr1qU+e+rUqQwcOLCcc1Q5MEFkGIbhyc3NpWXLwtNrkpOTyc3NLTH8008/zemnnw7AypUradiwIeeddx7dunVj5MiR5Ofnl+m5P//8M7Nnz+b888/fvwxUUkwQGYZh7AP/+c9/WLhwISNHjgQgLy+PDz74gAcffJAFCxawZs0aJk6cWKa4Zs2aRc+ePaukWg5MEBmGYRSQlJREdnZ2wXVOTg5JSUnFws2ZM4d77rmHmTNnUrNmTcCNntLS0mjbti2JiYmcc845fPbZZ2V6bkZGRpVVy4EJIsMwjAK6d+/OqlWrWLt2LTt37iQjI4N+/Yoen7Z48WKuvvpqZs6cWWCMELp3y5YtrF+/HoB3332XTp067fGZW7du5f333+fss88u38xUIkwQGYZheBITExk3bhx9+vShY8eOXHjhhXTu3JkxY8Ywc6Y7YHrkyJHs2LGDCy64gLS0tAJBlZCQwIMPPsjJJ59M165dUVWuvPJKAB599FGSk5PJyckhNTWVoUOHFjzz5Zdf5tRTT+Wggw6q+AzHCaKqsU7DXpGenq4LF+77aeKtR79WjqkpStZ9faMWdzSIZllA5SoPK4ui2HdiAIjIIlVNj/ZzbERkGIZhxJSoHhVuGIZR2bHRcvSxEZFhGIYRU0wQGYZhGDHFBJFhGIYRU0wQGYZhGDHFBJFhRIFf1iwi98mryX3iSrZ+Mq2Y/7x58zjiiCNITExk+vTpRfxKOkrg3Xff5YgjjqBLly4MGjSIvLw8AJ5//nlSU1Pp2rUrxx57LJ9//nnU87c3WFkUYmURGRNEhlHO6O58Nr39GAdfcCcthv4fPy19v9hRAocccggTJ07k4osvLnZ/pKMEdu/ezaBBg8jIyGDJkiW0atWKSZMmAdCmTRvef/99vvzyS26//XauuuqqCslnWbCyKCRSWezc8G2RMFWlLMKJqiASkdNEZIWIrBaR0RH8bxSRpSLyhYi8IyKtIsVjGJWJnetWktiwOdUb/h5JqM5BHY8vdpRA69atSU1NpVq1op9gSUcJbNy4kRo1anDooYcC0Lt3b1566SUAjj32WBo1agQUPZYgHrCyKCRSWfyy6pMiYapKWYQTNUEkIgnAeOB0oBMwUETCN15aDKSraiowHbg/WukxjIoib/tGEus3K7hOqNe01KMEgpR0lEDTpk3Jy8sjtKvI9OnTi2zOGSJ4LEE8YGVRSKSyyN+xsUz3HmhlEU40R0RHAatVdY2q7gQygCK7+qnqe6r6s7/8BEiOYnoMI+4p6SgBESEjI4MbbriBo446inr16pGQkFDk3vfee4+nn36asWPHxij15YuVRSEHellEUxAlAUHRnOPdSmII8EYkDxG5SkQWisjC0M62hhGvJNZrQt62wnqav31DxKMEIlHaUQLHHHMMH3zwAfPnz+f4448vUMcAfPHFFwwdOpQZM2bQpEmT8s3QfmBlUUikskioW7b0HWhlEU5cGCuIyCVAOvBAJH9VnaCq6aqa3qxZs0hBDCNuqNH8UPI2f8euLd+j+bv4adm8YkcJlERpRwn8+OOPAPz222+MHTuWYcOGAc6a6rzzzmPy5MlFGqF4wMqikEhlUTulR5nuPdDKIpxoCqJcoGXgOtm7FUFETgFuBfqp6m9RTI9hVAhSLYHGvYfx44tj+O6pP3HQYccVO0pgwYIFJCcnM23aNK6++mo6d+4MlH6UwAMPPEDHjh1JTU3lrLPO4qSTTgLgrrvuYuPGjVxzzTWkpaWRnh71zZLLjJVFIZHKokazVlWyLMKJ2jEQIpIIrAROxgmgBcDFqvpVIEw3nJHCaaq6qizx2jEQ5Ud5l8Uvaxax6Z0JsHs3dQ8/lc1zJxbxnzdvHtdffz1ffPEFGRkZ9O/fH4DMzEz+9Kc/sW3bNhISErj11lu56KKLABg3bhz/+te/+Prrr1m/fj1NmzYF3Mf3/PPPA05/vmzZMtavX7/PRy3bxpZFse+kkKpcNyr9MRCqmgdcC7wJLANeVNWvROQuEQmNzR8A6gLTRCRTRGZGKz1GdNmf9SJ16tThueee46uvvmL27Nlcf/31bNmyBYCePXsyZ84cWrUqatk/cuRIMjMzyczM5N577+WEE07YZyFkGEZsieoxEKr6OvB6mNuYwO9Tovl8o+IIrpEACtaLBI9Kbt26NUCxNRJB/XWLFi04+OCDWb9+PQ0bNqRbt257fPbUqVMZOHBgOeQiitzRIMrxb41u/OWJlUVRolkelaQs4sJYwaj87M96kSDz589n586dtGvXrkzhf/75Z2bPns3555+/188yDCM+sIPxjLhh3bp1XHrppUyaNKnYqKkkZs2aRc+ePU0tZxiVGBsRGeXC/qwXAdi2bRt9+/blnnvu4eijjy7zfRkZGfGvljMMo1RMEBnlwv6sF9m5cyfnnnsul112WYElXVnYunUr77//PmefffaeAxuGEbeYIDLKhf1ZL/Liiy8yb948Jk6cSFpaGmlpaWRmZgLw6KOPkpycTE5ODqmpqQwdOrTgmS+//DKnnnoqBx10UMVn2DCMcsPmiIxyo3a77iS1617E7a677ir43b1794g7AF9yySVccsklEeMcMWIEI0aMiOg3ePBgBg8evO8JNgwjLrARkWEYhhFTbERkRA9bH2EYRhmwEdF+Ejz697777ivm/9tvv3HRRReRkpJCjx49Co73zcrKonbt2gVzIqGNCsEt0OzatSupqamcdtppbNiwoUicDz30ECJSzN0wDKMyYoJoPwjf1mbq1KnFtrV5+umnadSoEatXr+aGG27glltuKfBr165dwTY1jz/+OOD2Tfvzn//Me++9xxdffEFqairjxo0ruCc7O5u33nqLQw45pGIyaRiGEWVMEO0H4Uf/DhgwoNgxyDNmzGDQoEEA9O/fn3feeYfSNppVVVSVn376CVVl27ZttGjRosD/hhtu4P7770dEopMpwzCMCsYE0X4Qvq1NcnJysW1tcnNzadnSnYaRmJhIgwYN2LjRHQ+8du1aunXrxgknnMAHH3wAQPXq1Xnsscfo2rUrLVq0YOnSpQwZMgRwQi0pKYnDDz+8IrJnGIZRIZggihHNmzfn22+/ZfHixTz88MNcfPHFbNu2jV27dvHYY4+xePFivvvuO1JTU7n33nv5+eef+cc//lHEHNowDONAwATRfhC+rU1OTk6xbW2SkpLIznYnpufl5bF161aaNGlCzZo1C47uPfLII2nXrh0rV64sWMjZrl07RIQLL7yQjz76iK+//pq1a9dy+OGH07p1a3JycjjiiCP4/vvvKyi3hmEY0cEE0X4Qvq1NRkZGsW1t+vXrx6RJkwCYPn06J510EiLC+vXryc/PB2DNmjWsWrWKtm3bkpSUxNKlSwuOBH777bfp2LEjXbt25ccffyQrK4usrCySk5P57LPP+P3vf1+xmTYMwyhnbB3RfhDc1gbdzVU3X1uwrU16ejr9+vVjyJAhXHrppaSkpNC4cWMyMjIAd1rpmDFjqF69OtWqVePxxx8v2EH6b3/7G8cffzzVq1enVatWTJw4MYa5NAzDiC4miPaT4LY2t97qjvwNzuPUqlWLadOmFbvv/PPPL/EMnWHDhhVZVxSJ0HokwzCMyo6p5gzDMIyYYiOi8sSOQDYMw9hrbERkGIZhxBQTRIZhGEZMMUFkGIZhxBQTRIZhGEZMMUFkGIZhxBQTRIZhGEZMiaogEpHTRGSFiKwWkdER/GuKyAve/1MRaR3N9BiGYRjxR9QEkYgkAOOB04FOwEAR6RQWbAiwWVVTgH8CY6OVHsMwDCM+ieaI6ChgtaquUdWdQAZwdliYs4FJ/vd04GSxE98MwzCqFFLaaaH7FbFIf+A0VR3qry8FeqjqtYEwS3yYHH/9tQ+zISyuq4Cr/GUHYEVUEr3/NAU27DFU1cHKoxAri0KsLIoSz+XRSlWb7TnY/lEptvhR1QnAhFinY0+IyEJVTY91OuIFK49CrCwKsbIoipVHdFVzuUDLwHWyd4sYRkQSgQbAxiimyTAMw4gzoimIFgDtRaSNiNQABgAzw8LMBAb53/2BdzVaukLDMAwjLomaak5V80TkWuBNIAF4RlW/EpG7gIWqOhN4GpgsIquBTThhVZmJe/VhBWPlUYiVRSFWFkWp8uURNWMFwzAMwygLtrOCYRiGEVNMEBmGYRgxxQSRYRiGEVNMEBn7hYjUE5Eon5FeeRCROn4pQpXHdklxiMjBItIh1umIZ0wQ7SV+D73Q7yr9oYlIfaAP0NVf14ttimKLiBwKXKKqebFOSywRkeoichTQKtZpiRXiCLWvvwNu9+5Hxi5V8Yv13PYSVc0HEJH2qroq1umpSEIflqru9v+3iUh3oKcXQveLyAtVpSH2nRINlQewBsjwjfARwNmqmh2zBFYgvlNWTVXzVXWXiJwMbBORHUCOqr4T4yRWCP4bUb8eMmSSXAvoKyKrgFdEZK2qbopZIuMQGxGVgIhUC/RoQm4iIjeJyEfAeBG5VkTaxSiJFYKIdBWRc0SkmqruVtXdXh13mYh0AfKBxsAjqvr8gS6EROQUEWkOrlPiy6OBiJwIdAd+xu2HeLWqZofXoQMJn28B3/IWdtKqAQ2BB4ErgV9jl8roIyKNRGSSiNTx34iKSDsReVBEzgOaA/OBt1V1pAmh4hywH8n+Emh0G4jICX4epAVwENALuAG4GLgkhsmMCiLSVESGiEhjQIDPfVkkiMhfgMXAKbiFyv8EpgJ5/t4DTl0pIh1E5Fh/2QbY6d2bi8jTwCLgVNyI6Hbc1lXLoHD0eKAgIgf5Tlp/4AKgtndvISIPi8hbwGDgJeA1YJSqfhizBEcREXlRRE5U1c3AaOAX734Z8DLwG7DCL96/GddRKaLeNxymmsNVjFBvLuDWBPgb0Bf3UX0GpAN/xDVGacB7wOSKTW30CJTDwcCxwC+qOkVEzhKRzsC7wJn+/KjgfZuBTiKSpKrh+wlWWkQk0Y/w+gPNRSQLmAicKyIvAW2B+mHl8YOI/BU4HnjdxyMHwtZVInIEcDXwgqpO90YZB/kR0D9wu+L/SVW/9uFXAseLyBpV/T5mCS9HfF6r+XqRBQzHtQOHAc8BvYGOwN3AW6q6RUSqA18D20XkFFWdE5PExzEmiCgy73MITp+9GzgcSFLVAtWb1/Em4irY5QH3Rr5XVKkJCOM8YC3eCAH4A/Cbqr4qIojI28BXuN7wG7htnP4MdPMGDFtUdV3Fpr78CagZ5wPn40bEu3AdlFXAFlyen8I1SjuBp4DZwAki8h7QQlW/rszCKJR2Vf3MdzoOw3VKHgOWAo/ihPJi4GAROQg3OnwDuByo4eOpVtlHiD79u8Ud8rkMlz9w5dBcRJrh6stNQD//PXwP3Am8ClznR0RNVHVKhWcgTqlSqrngvE/g/+/EHWn+GW7kM8oHTwe+9GGqe7dvcJu5tvHuvUXkcQob7EqNiPxeRF4HnscdbJgmIgcDbwFNRCQFd5jhaB9mJXAfTmjNAx7CncpbNwbJL3dE5DgRmYtTq5wA9FTV9cCHwAmq+hXunKwXge9w9eAfwLO4neRX4Aw4qlVWIQRu/kdEjhGRf+Is4bqISE3gf8AhQB3gYeBo4FzgHmCuqn6AmzO7Q0RmA91ikoFyRkQycKOfZkAtETlfVX/AqWivVNWXgX6qeglOACmubjyJU98NwXVcDE+VEkSBeZ/a/n8i8ApwGXAFcCZwtogcA2zFdQbrq+ouf/9PuCF3CxGZD4zE9QI/iUV+ykJozsYbWuxJN30m8J2qdsf1crfh5j4W4EYCx6nqBlVdhOvxbgA+Baqr6ovA8ap6UmWwJizjXNYfgVmq2geYA/Twvf0ZwHEi0kxV31XVt4D/4gTPYj8ZPQY4VlXPr+yjAK+SGwss8X9/wM13vI0zTU5X1VdUdaCqjlLVswD1nZi/A8txmx4vik0OyoZEMFCKEOYInDA5RlXvxwng6733dJwqHyBBRNL8dSqwXFW348z7L1TVj6KSiUrKASmIfKNbLG9+BPMi8I6IDMap2abhVExrfK/mFZylzzTgUGCUiDQWkRtF5E/AMnWnzJ6lqqeq6hPqjkKPO0SkPc7YoIhVU4RwoUa5FlDT//4ENxI6SlW34XTc7USkvjdY+Ag4GRirqr/4Z/xQUtnHE2VRk3kVSyKw2jv9C9ezTceVSz3gMBFJEpFJuBFhG5yQQlV/VNUcXx5xacCxp85JIN0dgO2q+jRwPzALJ2S/wxlmdBZn4HKEiDwhIpk4de1mVf1BVe/3HZW4JFQGoY5qCWFCZVEX6O5N1BNwGoHDxRkzvYGbM+sOtMN1WmsCA1Q1y4+M88oi8KoaB2Rh+EZ3N7gGRURqi0ht3ETiA7ie7jk4Pe6rOEEUWow5Fdfj+xk3F9AQZ/3TFXdeUr5vyH7Y04ccB9wAdAYQkZYi8ncReUxEWgQDBRrlXGCriBysqjtwRxiniluouRhnxNAUeEpVO6jqZaq6NDyueBsBhAtHr2pKEZHzQmUR4T3+hjNN3y3OaOFrnKA51Y+Qs3CCeDPwgKqmquqQ8El5Xx5xqZYLdk7891FkpBhItwCZvl7swqkh00SkIU491xNogvtWfgDOVdXbQpoEH29ctTUicokULksIlcGhIvKMiNwlIo2C4UNloarzcPNgXf199XHm6Vf6OcXZwCGq+omqnqmqt6rqN77NCK2/K1HgVVUqvbGChC2y9G4tceq2zrg83ojTZZ+I+4guxE00z1fVlSKSDxwpIutU9VsR2QYMUdXxIjLaN8oFBCql4hqruERVrxGRo/xH9RpOffQUsL6EWxbhyuYGEXkANwG9Cafbn4Y7R6pgTYgUX9AZN0hgYty/JxW36LYVrgM2GadKuwk395MfuFfULdZdjK8rIrIEZ8TRyo+WbgN2qOrPOHVVvJdHJMvQNsAI3O4Y74vI9ar6W8A/NHLcgFu2cBKQgVPZpuPUtm/gOiffq+oKnBFDgUALfCsxLxPfVogvh0+BXHUq+pa4fLyEGwEfAtwqImP8+w3dH7KifAIYLSKhObL38SdNq+rosGcmALvjtTMSN6hqpfvDn6MU5tYAZzKbgKsYd+PW+KwHzsD1ZrNwqoXksHtvAf4D1PPXh+I+PAmEScCZbcY8/3tZVrtxOuzbgR6+nBqG8hJelkAn4N+4xvVWnHVPqWUfT38l1I3jcXOAy3HC9r9AY++3HDg9Uhy4UfKlvj4twpnqNo11HstYDnX8/4h1FmeGPQs3l1UDmAuMLiFsTeAiXOM9FaeGHAG0j1T+8VZHSqgT1XFr4I711zuAG/3vdJywOT1YhoF6UQdnvPKyL8caYXFXunYi1n+VakQkhav7NeCWBFyD21LlR9ww+SdVvc37d8NNFi7F6fbXq9PdN8VZPM3BCaE++AVpqroy/NlawvxKvBLoAU/GWTN9ibNm+hYnlJ8G/hMsSwBVXSoiN2ugZxzmH3c9u2BvP5Q+EUnGWbt9hLPkexVnkn8MrvHtBryDs/67FHgjNAIIxaFucnmyiLyjbj4k7vEjkR44c/OR6nr8tXB5vAg3B/oibg7wj8AqVd0pIg8C14vIwxo25+nrwgsikoufB1M3b1jkucGyiyVeA7Al8B5DdeIc3Pt/SlVXedXiSbg6MglXPwBycMY4PYE3tOjIGuBXVX0f10EJPbNgax+Ng9FfZSOu9LaRCNPth+Z9unsBA277jGG4RnUQ8HvgSxFp6/3n4uaGFPg/4Fhxq7/fwg2rc1U1V1Wf0QNre5rQxzAJ6KGq96jqKbhefQbQXkRC6zuKzHWFhJCIJAbnDOKV8E6COIOUccAmdZPkE4C6Pl9rcQsQ033wZ4GjRKRxoMEKL4/vvHtCPJaHiNQQkW4iUsvn4TNVHSmFu6IPxs2JjsWthZqM65gtwJvaq+qruNFysU05xe8mrqr/U9XJ6tSW1cK+zZgLICgQxH/FjVoQkVo+rRNxSw+ycWt5+lDYHtTGaQH6Aqib5/sKZ6ae7ONJKGEaoHrILV7KoDISV4Io0kce9tKPEZE3cZYq14nIUNyisleB9j7Yd7gPqqe//hynajtKVTOB84AxqnqEqg7TwE4A8djI7CuB3uC7uLUOfbxwHoUT3J/7nnCoJxuasO3u50BQ1bx4+rjCGz/vVktEhovIk77HC26kcyhu3Qa4+a0ufuT0Dc4CsJU48+scYCNOfUeE8ggZe4Q29Iyn8gjV1+rA6cCpvuHsKCLDcabF4EZI41T1bVX9K5ACJOG+nbYiEtol+wv8Ak3f8IasyQo6aIG5n7iYcC9BIKYAc0TkU9w2RJ1w2o4rcUYVpwIdVHUBTi15nKouB9aL2xsOXIflzUC8oX0FE0Skv4hc7N0LDDKMfSduBJG4RWHFPnIR6egbmvo4tcAEVT0Zt67lWu82C+gCoKoLceq2S8WZ1T6Dm4AMCSpR1U983AkRKvEBQ6BX/wJuHqwv0Ai4SFX/CwUWZO1F5E4R+RC4jjgzwAj2RMM6Jgm4fJ0ALMS98xtwPf7a+D3hfAdkK4VrPL7BjQRCo+pTVPUVH1bF7S33dxH5BLjGC7CYN7pQrGceqq+JOEH6DPA4bn5zBdDMh/0dsCsguD7DjZDe8X6h0eF9OFVlqOHN98+7UETeFpGj4+EbCY5YtXBtYH1xVnDH4FRr7XGWbJNxyxIuxKngEoC+qvqoj+5D3Po5cN/JyT7er1T1Md9RQdx+kxNwKs3DcBoVo7zQGE5Q4dQAKf73GOB3/ndH3B5e4AwN5vnfoQ0mP8P1drt59w44vXfPQNxH4TYlTcDNBY2NZV5jVL6hydUk4IcS/B7AWTr1AWrGOs17yE8qbj3PDf66MU6Xnxh456/ijDGmA9cE7r0LmON/18VtvVOkPHx8L+H2iDuFsEnoGOY7Dbebd9CtPm7ep0XgPc7DG5fgrLiex2kGrsLNgx7s/e4DBuDUV9cAqRGe2c3f/ylu4fYhsS6HCGkM1eF/40ZzTwOdvdsXgfbhSJyK+pSwMm3u/aYSZpwU9pzzcGrNw0sKY3/7+S5jVIGq+/9PAs/73/VwJtd1gZnA9d69Ga4nUx036TwWqBWIq4W/91/A3wLu1bwgmoZTzxX72KrKnxcyr1FoKVYNSPC/46WxPRg3lwVhVke4jsbzvjG9BdeLvdw3HrOAEwNxTMCtA+uHMzcPxdEI6LOHNCTirc1i/ecFY+gdtQG6+N81fcO7CLfX20Tc4mpwu2GM8L/r4SzbHsapnx7CCdm5/q9JCc8NWYgdjzMAimnDi+tIhlt2tgf+gtvvry1uJNg2LMwDwNOB6z8BmTjB+ypufuzIEso9JOCK/Le/KL7nmD7cVfYFvrJVx02wN8AZEXyEU7mkeSFTCzfZOAm3LqgHbhQ0yTesBwcrk/9/054an6rwh1vYem6s01FK+vrgNoa8qQT/zjhrv8v89SDf0ByF22x1knevg7MKa+Pr1KtAg2CdqIx/Pl8X43Y+B7czeiJORbQQeNW7D8btjB26rwfwAdDaX5+J2xUgPP64Nzem0By9NW7kNxU3Kvw9zuBkMW47oX/iLOFa4vZCrO/LqxpOBfkv4LzKWAYH8l+owa5QRKQvTpisweluL1fV/4rbcPNjVf27iIzAbZORjdvDrJ+4jRY7A0NxvaIZwGRVAZ7bgQAADLFJREFU3VrhmTD2GT9XUbDSXESuxS0W/R/OinFzWPhauHVhu1V1lLiFmKNxo7yPcT3813EN7xpguLp9ASsFJVhjNcSphK7CWXf9Dmdwc4GI1MVpB1riNt+cgFO/1sKpp97E7QySiZuof09V14Q/U+Nk3guKLTYtOG4BZ3J+HU6oTMN1Lh4Dmqnq+b4u/R7XeT0IVwdOxBlv/A2nYk3EzYuuDX9mPJVBlaa8JRtOlfanEvxCgu8JClUIzwNv+t9nAV/733VxPby3ccKofiCeuJ7LsL8S60aosQmvD9NxO128hbNmKlZvgNNwKtta3u0u3E7XdXBqt8uBXhHiToh1vveifGr4/01wa76WUTjnkYJTMTfAGeZMDJRFFnCb/30pThBdhleBh5dlrPNZhnJo7f/3wJnhN8et//kGt9lqb5xKtnZYudXHdVDG+uu6OIFVrB7GOo/2V/QvGlZzvwMGiF/nEzSJVlUVka64Hsx073wNbtPAxqo6C6gjIl1UdYeqTsQJoSkELLm0cJ1LPO/zZuAWEYbqghYeo5wkIjcBp/k1Kq/h1vaMVrdNTBHUtR7LcFvLhCycFuBM9Wuq6mZVfVZV5/pnFhy7oJVgIbIvo2nADBE5U1U34lRuuyjcjmmN/zsN18DWAPqIyNW40cIJAOrW+fRR1efUmxbHm2WoRFiPJe6E12dE5HOctgSccUp13MLkR3Cj33U4i8BNOIEE0EBEbsGVWVv8YZW+DVkftLLz7jYKijP2a2cFX8GfBP6rqq95599wjcRpOL2t4BaThvgKp8Ov49dsbBWRn3A9uUdwVjojcCoJVPWKkp5fGRoZg0xcnUDc5ql/xo2at+Mst3ap6rPiDho7TkS+1siq1vW4RZjtAXynpQhSuPNG3DU0ofVJgev2uM5VPZwaaRwujxNEZCvuG/ofbn7jFXUmyi/jjhE4y68XugtnuDFUVb8Ne15wt4m4Kg8tehBle1V9B6eCW4rbZmeLD9oUZ6n2PHCHup0u8KrJZcBA3Ch5I87oaWwJz1PibEmCUZT9GhH5Cp6NWzSGiKTiKs1q/r+984+1sq7j+OutUpo2QVeNYopCSWgiorNZFAuxtrZYU0M05/LXXFF/NNZcTsUVUASpzcwtRaycoW4qGKAgNQlMDJQrkoq/NjQyNZviug3p0x/v73Pvw+lcsjyXc+7189oYnOc853nOczjn+Xw/v94fz25RNPR9lMersf7ZYZJG4dXN1LLbhbhclPrr1GHqvcnbIyKexxVO4JDT2cDPIuJ8rNU1ufSIrcPVcR/q41D/BBZExNz6xgaPu+033MaVfkXxBOsLv6uwkvcmXAk4Fud3DsGl2i/gUFRd6WAVcISk4RFxR0SMi4ivV0aowfNp+4230ROpbT9GVje5G4ffwAZ3NHBu8RAn4KKErXj0yhtys/WPyv7LcBVl5WlX/T55rxiAtOI/bDFwvCx18W3cD3QnvnFMht1+INWKcAH+gt2Jk63zcT4I4JXGFXGUruYWvNekPXxQ0tkR8SC+mR5btt+PvaOjccHBweXfQE/XfNW4uKtZSLYTQk0Vkqb09X4kXQLMljS8bFqLIwPg/MckvML/IvBJeQBfFzBW0lHQIzV0QkRsr35T9TBXp/1GwlTez1iVURM4F7wuIsZHxJyy7TIcYtwPFxsswXnDpcC3JK3BRQo7gF0R0VWLwtTPmfeKAUgrRE+fwMnS2dg7ug/3OmzGIYdVFAMUveKB24BrJP0mIp6uH6yTbixJy7iC0jOG80FTACLiIUnTsALyg5KeA94jaUhE7Gzwps/EvSJz6NXR6xiKtzNP0tyIuK0KE9Yqs5bgCtFLsSKIcJgarHKwJjw8bRrOoZ6FF3n74DlRAEREd73aqxM8H9hN+qcefhyFPeCv4mvdiEusnwWuLobpNeCtiJiP7yXVa4/Apfe3SdoAHBgRXY3nzPvF4OAdG6IScliME4TrsTtdKdPOlDS0ivmWL2vPIKrKCKmmXPtO30/SeUTEk7JszlDc1zJV0oTw6OitwPvLTemK2H0u0Gdx7uBEvDK+uRyv7d8TNcyZCU/evAkblZ5ppDWDsaWEldZKOgFr4f2t7PZH4FOSNuLcxww8K+t1bLh3o5NW/JUxqD4HSQdExD/kCcin4qKkMdgLvktSV0QsltSNizGGAXMkbcOfwxzsFf+eMh03PJSwOl/P594J34OkNbRqDMRv8WrnBjyf4wwcengZhx2W11Yvlat+ZJTehk76YSX9xnyc+/ihpJex+sEGLMlfn+SpsrgZiUM4NwIz2v0dqb2vRlHU4TicvBPnLS6UdHBjeLl4MW9KugrnQ4+gtyLuJvx7GRNFA3CgUH0muLrtMpzrOg2HXJdjbbZbcDXbUzh3vCEi7gYo+cHJWB+uu7zm4mjoJaudryM8wKS1tCSpV2LXT+O6/tNxOO69WKZnedmnLq75B2B6ySsl7w5+icNN4DLta6BXvbgxtBMRz0fEzIjY0AFGaDSWDOq58Uo6R9IK7LFcKeno4uE/g6sBG3NZ1TVUg+UmAk8Ww/ZWRGypjFA5/oBIuJdQ4ozwDKPD8aTj0VhuaV+sntIdEScBs3C4foSkIyWtx/eKF7Ec04sRsSgiXlMTpfVk8NLKwXi3YDkewjNgepBLVb+P3fAFwJzoY/BaMjiJiL+ot5/o1SbPd0yYpTGEjENIF0g6FDfQLsEqBt+NiI2S7sGlxhfhSrDpuGm7mQHdGRErJR0WpdKrft5ayKljPo//whbcDzYRfy5HAZ/AvU6fx8VIY8q+47HnNBYParworIz+H7R78ZHsXfpN4kc12RJ5QNeuiNjRLydLBgydnGBulnAv28/EMjov4PzNg9hQXISrPbcBkyPiIEmHYDWQaRHxdC2k1yNho9Lj08mfxf+CpDPwjKvLsVr+aqxw8C8cqp+Dy9AfBq6LiHUNr98t35a8+2ipIerrh5wknUoxEKdgaalnyrYPYMHcYXhFvxk4Hytgn1f2ORyPU5hVijG2Ad+JiFvlxtPHIuLyqgKwdr5BYXwaKQVL+wMrI+JaSStxs+4pOEz/VmPeLEkqWhqDzUqWZKBRQkAjcOXWF2TJnB9gCZ2lOKczDlgBDJN0YnnpybhX7o0ScnwDj2EHN2vPK8evcmCflnQ98Dv19hINJhbgardq+OA3gK+EZXZeDSuoZN4naUorc0RJMlA5HmvY7cI5npHA6SVp/lHcYLkU50Mm4RDTJqx+vQYrIMyk9MGEx5Ejq8VfgEN4a/H01K7BmP+IiPWSvoZV1ImIp5rsM+iuO2kNbRkDkSSdhKQxWHJnAVYzuAL4abj3ZzwwFzdmTsCezrNYE/FqrJW2uY/jHoSN1bNZnJMkfZMeUfKuJyKekHQHTqyfh8Nyp2AP6FGskTckIu6T9GFgR0RU6vGboXlTdinO+dPeu5L2M1hzYEn/kh5RkgCSPoIT7WOLh3QNlq6ahEdRz89myiTpH9IQJUlB0s1YEeAVLFf1EvB4RGxp2C8neyZJC0lDlCSFEnabjnXe1tS2Z1tCkvQjaYiSpA/SACXJ3iENUZI0kKG3JNm7pCFKkiRJ2kp2OSdJkiRtJQ1RkiRJ0lbSECVJkiRtJQ1RkjRBUkj6Ve3xfpJeLrOHkiRpIWmIkqQ5bwLHSDqgPJ6CJ4nudSSlFFcyqElDlCR9s4zesQbT8UgIACQdKGmhpPWSHpE0tWwfKWmNpI3lz8ll+3BJD0h6VNLmMtEUSTtqxzxd0qLy70WSrpf0EDBP0ihJKyRtKMevpp4myYAnV1pJ0je/Bi4v4bhjsebcxPLcpcDqiDhP0lBgvaRVwF+BKRHRXUZI3AqcAJwF3BsRs8tE0ve9jfOPAE4u01zvBy6OiK2STgKuAz7XwmtNkraRhihJ+iAiuiSNxN7QsoanTwW+JGlmebw/cBjwZ+BaScfh+UYfK88/DCyUNAS4KyIefRtv4fZihA7Cg/huL2IP4KmnSTIoSEOUJHtmCTAfq3AfWtsu4LSIeLK+s6RZWCx1HA59dwNExAOSPoNDfYsk/TgifgHUO8r3bzj3m+XvfYC/R8RxrbigJOk0MkeUJHtmIXBlRDzWsP1e4JuVHl0ZoAdwMLC9SASdA+xbnj8ceCkifg7cgKfCArwk6eNlntGXm72BiHgdeE7SGeVYkjSuZVeYJG0mDVGS7IGIeCEiftLkqe8BQ4AuSY+Xx+DczbmSNgFj6PVqJgGbJD0CTMPzjgAuAe4B1gHb9/BWzgbOL8d9HJj6f19UknQYqTWXJEmStJX0iJIkSZK2koYoSZIkaStpiJIkSZK2koYoSZIkaStpiJIkSZK2koYoSZIkaStpiJIkSZK28m+CQww8b0oV8QAAAABJRU5ErkJggg==" + }, + "metadata": { + "application/vnd.databricks.v1+output": { + "addedWidgets": {}, + "arguments": {}, + "data": "/plots/290402f0-7a7d-4689-865d-2fd43896e913.png", + "datasetInfos": [], + "metadata": {}, + "removedWidgets": [], + "type": "image" + } + }, + "output_type": "display_data" + } + ], + "source": [ + "distribution_rows = distribution_balance_measures.collect()\n", + "race_row = [row for row in distribution_rows if row[\"FeatureName\"] == \"race\"][0][\"DistributionBalanceMeasure\"]\n", + "sex_row = [row for row in distribution_rows if row[\"FeatureName\"] == \"sex\"][0][\"DistributionBalanceMeasure\"]\n", + "\n", + "measures_of_interest = [\"kl_divergence\", \"js_dist\", \"inf_norm_dist\", \"total_variation_dist\", \"wasserstein_dist\"]\n", + "race_measures = [round(race_row[measure], 4) for measure in measures_of_interest]\n", + "sex_measures = [round(sex_row[measure], 4) for measure in measures_of_interest]\n", + "\n", + "x = np.arange(len(measures_of_interest))\n", + "width = 0.35\n", + "\n", + "fig, ax = plt.subplots()\n", + "rects1 = ax.bar(x - width/2, race_measures, width, label=\"Race\")\n", + "rects2 = ax.bar(x + width/2, sex_measures, width, label=\"Sex\")\n", + "\n", + "ax.set_xlabel(\"Measure\")\n", + "ax.set_ylabel(\"Value\")\n", + "ax.set_title(\"Distribution Balance Measures of Sex and Race in Adult Dataset\")\n", + "ax.set_xticks(x)\n", + "ax.set_xticklabels(measures_of_interest)\n", + "ax.legend()\n", + "\n", + "plt.setp(ax.get_xticklabels(), rotation=20, ha=\"right\", rotation_mode=\"default\")\n", + "\n", + "def autolabel(rects):\n", + " for rect in rects:\n", + " height = rect.get_height()\n", + " ax.annotate('{}'.format(height),\n", + " xy=(rect.get_x() + rect.get_width() / 2, height),\n", + " xytext=(0, 1), # 1 point vertical offset\n", + " textcoords=\"offset points\",\n", + " ha='center', va='bottom')\n", + "\n", + "autolabel(rects1)\n", + "autolabel(rects2)\n", + "\n", + "fig.tight_layout()\n", + "\n", + "plt.show()" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "application/vnd.databricks.v1+cell": { + "inputWidgets": {}, + "nuid": "2498f850-584c-4e7f-a55e-a79fd27bda2e", + "showTitle": false, + "title": "" + } + }, + "source": [ + "#### Interpret Distribution Balance Measures\n", + "\n", + "Race has a JS Distance of 0.5104 while Sex has a JS Distance of 0.1217.\n", + "\n", + "Knowing that JS Distance is between [0, 1] where 0 means perfectly balanced distribution, we can tell that:\n", + "* There is a larger disparity between various races than various sexes in our dataset.\n", + "* Race is nowhere close to a perfectly balanced distribution (i.e. some races are seen ALOT more than others in our dataset).\n", + "* Sex is fairly close to a perfectly balanced distribution." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "application/vnd.databricks.v1+cell": { + "inputWidgets": {}, + "nuid": "2d848302-5693-4329-b4a4-da428ae431ed", + "showTitle": false, + "title": "" + } + }, + "source": [ + "### Calculate Aggregate Balance Measures\n", + "\n", + "Aggregate Balance Measures allow us to obtain a higher notion of inequality. They are calculated on the global set of sensitive columns and don't use the label column.\n", + "\n", + "These measures look at distribution of records across all combinations of sensitive columns. For example, if Sex and Race are sensitive columns, it shall try to quantify imbalance across all combinations - (Male, Black), (Female, White), (Male, Asian-Pac-Islander), etc.\n", + "\n", + "Measure | Description | Interpretation | Reference\n", + "- | - | - | -\n", + "Atkinson Index | It presents the percentage of total income that a given society would have to forego in order to have more equal shares of income between its citizens. This measure depends on the degree of society aversion to inequality (a theoretical parameter decided by the researcher), where a higher value entails greater social utility or willingness by individuals to accept smaller incomes in exchange for a more equal distribution. An important feature of the Atkinson index is that it can be decomposed into within-group and between-group inequality. | Range [0, 1]. 0 if perfect equality. 1 means maximum inequality. In our case, it is the proportion of records for a sensitive columns’ combination. | [Link](https://en.wikipedia.org/wiki/Atkinson_index)\n", + "Theil T Index | GE(1) = Theil's T and is more sensitive to differences at the top of the distribution. The Theil index is a statistic used to measure economic inequality. The Theil index measures an entropic \"distance\" the population is away from the \"ideal\" egalitarian state of everyone having the same income. | If everyone has the same income, then T_T equals 0. If one person has all the income, then T_T gives the result (ln N). 0 means equal income and larger values mean higher level of disproportion. | [Link](https://en.wikipedia.org/wiki/Theil_index)\n", + "Theil L Index | GE(0) = Theil's L and is more sensitive to differences at the lower end of the distribution. Logarithm of (mean income)/(income i), over all the incomes included in the summation. It is also referred to as the mean log deviation measure. Because a transfer from a larger income to a smaller one will change the smaller income's ratio more than it changes the larger income's ratio, the transfer-principle is satisfied by this index. | Same interpretation as Theil T Index. | [Link](https://en.wikipedia.org/wiki/Theil_index)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "application/vnd.databricks.v1+cell": { + "inputWidgets": {}, + "nuid": "d08ea1fd-8a70-407e-a9ad-9b6cac5c65bf", + "showTitle": false, + "title": "" + } + }, + "outputs": [], + "source": [ + "from synapse.ml.exploratory import AggregateBalanceMeasure\n", + "\n", + "aggregate_balance_measures = (\n", + " AggregateBalanceMeasure()\n", + " .setSensitiveCols(cols_of_interest)\n", + " .transform(df)\n", + ")\n", + "\n", + "display(aggregate_balance_measures)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "application/vnd.databricks.v1+cell": { + "inputWidgets": {}, + "nuid": "0666d394-be89-470c-bc31-a9cd28f86264", + "showTitle": false, + "title": "" + } + }, + "source": [ + "#### Interpret Aggregate Balance Measures\n", + "\n", + "An Atkinson Index of 0.7779 lets us know that 77.79% of data points need to be foregone to have a more equal share among our features.\n", + "\n", + "It lets us know that our dataset is leaning towards maximum inequality, and we should take actionable steps to:\n", + "* Upsample data points where the feature value is barely observed.\n", + "* Downsample data points where the feature value is observed much more than others." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "application/vnd.databricks.v1+cell": { + "inputWidgets": {}, + "nuid": "1a1fb46f-8c88-44fe-9177-76f09e07202e", + "showTitle": false, + "title": "" + } + }, + "source": [ + "### Summary\n", + "\n", + "Throughout the course of this sample notebook, we have:\n", + "1. Chosen \"Race\" and \"Sex\" as columns of interest in the Adult Census Income dataset.\n", + "2. Done preliminary analysis on our dataset. \n", + "3. Ran the 3 groups of measures that compose our **Data Balance Analysis**:\n", + " * **Feature Balance Measures**\n", + " * Calculated Feature Balance Measures to see that the highest Demographic Parity is in \"Sex\": Males see >50k income much more than Females.\n", + " * Visualized Demographic Parity of Races to see that Asian-Pac-Islander sees >50k income much more than Other, in addition to other race combinations.\n", + " * **Distribution Balance Measures** \n", + " * Calculated Distribution Balance Measures to see that \"Sex\" is much closer to a perfectly balanced distribution than \"Race\".\n", + " * Visualized various distribution balance measures to compare their values for \"Race\" and \"Sex\".\n", + " * **Aggregate Balance Measures**\n", + " * Calculated Aggregate Balance Measures to see that we need to forego 77.79% of data points to have a perfectly balanced dataset. We identified that our dataset is leaning towards maximum inequality, and we should take actionable steps to:\n", + " * Upsample data points where the feature value is barely observed.\n", + " * Downsample data points where the feature value is observed much more than others.\n", + " \n", + "**In conclusion:**\n", + "* These measures provide an indicator of disparity on the data, allowing for users to explore potential mitigations before proceeding to train. \n", + "* Users can use these measures to set thresholds on their level of \"tolerance\" for data representation.\n", + "* Production pipelines can use these measures as baseline for models that require frequent retraining on new data. \n", + "* These measures can also be saved as key metadata for the model/service built and added as part of model cards or transparency notes helping drive overall accountability for the ML service built and its performance across different demographics or sensitive attributes." + ] + } + ], + "metadata": { + "application/vnd.databricks.v1+notebook": { + "dashboards": [], + "language": "python", + "notebookMetadata": { + "pythonIndentUnit": 2 + }, + "notebookName": "AdultCensusIncome Tutorial", + "notebookOrigID": 4073163981188018, + "widgets": {} + }, + "language_info": { + "name": "python" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} diff --git a/website/docs/features/exploratory/Data Balance Analysis.md b/website/docs/features/exploratory/Data Balance Analysis.md new file mode 100644 index 0000000000..e8fc593b16 --- /dev/null +++ b/website/docs/features/exploratory/Data Balance Analysis.md @@ -0,0 +1,194 @@ +--- +title: Data Balance Analysis on Spark +description: Learn how to do Data Balance Analysis on Spark to determine how well features and feature values are represented in your dataset. +--- + +# Data Balance Analysis on Spark + +## Context + +Data Balance Analysis is relevant for overall understanding of datasets, but it becomes essential when thinking about building Machine Learning services out of such datasets. Having a well balanced data representation is critical when developing models in a responsible way, specially in terms of fairness. +It is unfortunately all too easy to build an ML model that produces biased results for subsets of an overall population, by training or testing the model on biased ground truth data. There are multiple case studies of biased models assisting in granting loans, healthcare, recruitment opportunities and many other decision making tasks. In most of these examples, the data from which these models are trained was the common issue. These findings emphasize how important it is for model creators and auditors to analyze data balance: to measure training data across sub-populations and ensure the data has good coverage and a balanced representation of labels across sensitive categories and category combinations, and to check that test data is representative of the target population. + +In summary, Data Balance Analysis, used as a step for building ML models has the following benefits: + +* **Reduces risks for unbalanced models (facilitate service fairness) and reduces costs of ML building** by identifying early on data representation gaps that prompt data scientists to seek mitigation steps (collect more data, follow a specific sampling mechanism, create synthetic data, etc.) before proceeding to train their models. +* **Enables easy e2e debugging of ML systems** in combination with [Fairlearn](https://fairlearn.org/) by providing a clear view if for an unbalanced model the issue is tied to the data or the model. + +## Examples + +* [Data Balance Analysis - Adult Census Income](../notebooks/Data%20Balance%20Analysis%20-%20Adult%20Census%20Income.ipynb) + +## Usage + +Data Balance Analysis currently supports three transformers in the `synapse.ml.exploratory` namespace: + +* FeatureBalanceMeasure - supervised (requires label column) +* DistributionBalanceMeasure - unsupervised (doesn't require label column) +* AggregateBalanceMeasure - unsupervised (doesn't require label column) + +1. Import all three transformers. + + For example: + + ```python + from synapse.ml.exploratory import AggregateBalanceMeasure, DistributionBalanceMeasure, FeatureBalanceMeasure + ``` + +2. Load your dataset, define features of interest, and ensure that the label column is binary. The `FeatureBalanceMeasure` transformer currently only supports binary labels, but support for numerical labels will be added soon. + + For example: + + ```python + import pyspark.sql.functions as F + + features = ["race", "sex"] + label = "income" + + df = spark.read.parquet("wasbs://publicwasb@mmlspark.blob.core.windows.net/AdultCensusIncome.parquet") + + # Convert the "income" column from {<=50K, >50K} to {0, 1} to represent our binary classification label column + df = df.withColumn(label, F.when(F.col(label).contains("<=50K"), F.lit(0)).otherwise(F.lit(1))) + ``` + +3. Create a `FeatureBalanceMeasure` transformer and call `setSensitiveCols` to set the list of sensitive features and call `setLabelCol` to set the binary label column. Then, call the `transform` method with your dataset and visualize the resulting dataframe. + + For example: + + ```python + feature_balance_measures = ( + FeatureBalanceMeasure() + .setSensitiveCols(features) + .setLabelCol(label) + .transform(df) + ) + feature_balance_measures.show(truncate=False) + ``` + +4. Create a `DistributionBalanceMeasure` transformer and and call `setSensitiveCols` to set the list of sensitive features. Then, call the `transform` method with your dataset and visualize the resulting dataframe. + + For example: + + ```python + distribution_balance_measures = ( + DistributionBalanceMeasure() + .setSensitiveCols(features) + .transform(df) + ) + distribution_balance_measures.show(truncate=False) + ``` + +5. Create a `AggregateBalanceMeasure` transformer and and call `setSensitiveCols` to set the list of sensitive features. Then, call the `transform` method with your dataset and visualize the resulting dataframe. + + For example: + + ```python + aggregate_balance_measures = ( + AggregateBalanceMeasure() + .setSensitiveCols(features) + .transform(df) + ) + aggregate_balance_measures.show(truncate=False) + ``` + +Note: If you are running this notebook in a Spark environment such as Azure Synapse or Databricks, then you can easily visualize the imbalance measures by calling the built-in plotting features `display()`. + +## Measure Explanations + +### Feature Balance Measures + +Feature Balance Measures allow us to see whether each combination of sensitive feature is receiving the positive outcome (true prediction) at balanced probability. + +In this context, we define a feature balance measure, also referred to as the parity, for label y as the difference between the association metrics of two different sensitive classes \\([x_A, x_B]\\), with respect to the association metric \\(A(x_i, y)\\). That is: + +$$parity(y \vert x_A, x_B, A(\cdot)) \coloneqq A(x_A, y) - A(x_B, y) $$ + +Using the dataset, we can see if the various sexes and races are receiving >50k income at equal or unequal rates. + +Note: Many of these metrics were influenced by this paper [Measuring Model Biases in the Absence of Ground Truth](https://arxiv.org/abs/2103.03417). + +Association Metric | Family | Description | Interpretation/Formula | Reference +| - | - | - | - | - +Demographic Parity | Fairness | Proportion of each segment of a protected class (e.g. gender) should receive the positive outcome at equal rates. | As close to 0 means better parity. \\(DP = P(Y \vert A = "Male") - P(Y \vert A = "Female")\\). Y = Positive label rate. | [Link](https://en.wikipedia.org/wiki/Fairness_%28machine_learning%29) +Pointwise Mutual Information (PMI), normalized PMI | Entropy | The PMI of a pair of feature values (ex: Gender=Male and Gender=Female) quantifies the discrepancy between the probability of their coincidence given their joint distribution and their individual distributions (assuming independence). | Range (normalized) [-1, 1]. -1 for no co-occurences. 0 for co-occurences at random. 1 for complete co-occurences. | [Link](https://en.wikipedia.org/wiki/Pointwise_mutual_information) +Sorensen-Dice Coefficient (SDC) | Intersection-over-Union | Used to gauge the similarity of two samples. Related to F1 score. | Equals twice the number of elements common to both sets divided by the sum of the number of elements in each set. | [Link](https://en.wikipedia.org/wiki/S%C3%B8rensen%E2%80%93Dice_coefficient) +Jaccard Index | Intersection-over-Union | Similar to SDC, guages the similarity and diversity of sample sets. | Equals the size of the intersection divided by the size of the union of the sample sets. | [Link](https://en.wikipedia.org/wiki/Jaccard_index) +Kendall Rank Correlation | Correlation and Statistical Tests | Used to measure the ordinal association between two measured quantities. | High when observations have a similar rank and low when observations have a dissimilar rank between the two variables. | [Link](https://en.wikipedia.org/wiki/Kendall_rank_correlation_coefficient) +Log-Likelihood Ratio | Correlation and Statistical Tests | Calculates the degree to which data supports one variable versus another. Log of the likelihood ratio, which gives the probability of correctly predicting the label in ratio to probability of incorrectly predicting label. | If likelihoods are similar, it should be close to 0. | [Link](https://en.wikipedia.org/wiki/Likelihood_function#Likelihood_ratio) +t-test | Correlation and Statistical Tests | Used to compare the means of two groups (pairwise). | Value looked up in t-Distribution tell if statistically significant or not. | [Link](https://en.wikipedia.org/wiki/Student's_t-test) + +### Distribution Balance Measures + +Distribution Balance Measures allow us to compare our data with a reference distribution (currently only uniform distribution is supported as a reference distribution). They are calculated per sensitive column and do not depend on the label column. + +For example, let's assume we have a dataset with 9 rows and a Gender column, and we observe that: + +* "Male" appears 4 times +* "Female" appears 3 times +* "Other" appears 2 times + +Assuming the uniform distribution: +$$ReferenceCount \coloneqq \frac{numRows}{numFeatureValues}$$ +$$ReferenceProbability \coloneqq \frac{1}{numFeatureValues}$$ + +Feature Value | Observed Count | Reference Count | Observed Probability | Reference Probabiliy +| - | - | - | - | - +Male | 4 | 9/3 = 3 | 4/9 = 0.44 | 3/9 = 0.33 +Female | 3 | 9/3 = 3 | 3/9 = 0.33 | 3/9 = 0.33 +Other | 2 | 9/3 = 3 | 2/9 = 0.22 | 3/9 = 0.33 + +We can use distance measures to find out how far our observed and reference distributions of these feature values are. Some of these distance measures include: + +Measure | Description | Interpretation | Reference +| - | - | - | - +KL Divergence | Measure of how one probability distribution is different from a second, reference probability distribution. Measure of the information gained when one revises one's beliefs from the prior probability distribution Q to the posterior probability distribution P. In other words, it is the amount of information lost when Q is used to approximate P. | Non-negative. 0 means P = Q. | [Link](https://en.wikipedia.org/wiki/Kullback%E2%80%93Leibler_divergence) +JS Distance | Measuring the similarity between two probability distributions. Symmetrized and smoothed version of the Kullback–Leibler (KL) divergence. Square root of JS Divergence. | Range [0, 1]. 0 means perfectly same to balanced distribution. | [Link](https://en.wikipedia.org/wiki/Jensen%E2%80%93Shannon_divergence) +Wasserstein Distance | This distance is also known as the earth mover’s distance, since it can be seen as the minimum amount of “work” required to transform u into v, where “work” is measured as the amount of distribution weight that must be moved, multiplied by the distance it has to be moved. | Non-negative. 0 means P = Q. | [Link](https://en.wikipedia.org/wiki/Wasserstein_metric) +Infinity Norm Distance | Distance between two vectors is the greatest of their differences along any coordinate dimension. Also called Chebyshev distance or chessboard distance. | Non-negative. 0 means same distribution. | [Link](https://en.wikipedia.org/wiki/Chebyshev_distance) +Total Variation Distance | It is equal to half the L1 (Manhattan) distance between the two distributions. Take the difference between the two proportions in each category, add up the absolute values of all the differences, and then divide the sum by 2. | Non-negative. 0 means same distribution. | [Link](https://en.wikipedia.org/wiki/Total_variation_distance_of_probability_measures) +Chi-Squared Test | The chi-square test tests the null hypothesis that the categorical data has the given frequencies given expected frequencies in each category. | p-value gives evidence against null-hypothesis that difference in observed and expected frequencies is by random chance. | [Link](https://en.wikipedia.org/wiki/Chi-squared_test) + +### Aggregate Balance Measures + +Aggregate Balance Measures allow us to obtain a higher notion of inequality. They are calculated on the set of all sensitive columns and do not depend on the label column. + +These measures look at distribution of records across all combinations of sensitive columns. For example, if Sex and Race are specified as sensitive features, it then tries to quantify imbalance across all combinations of the two specified features - (Male, Black), (Female, White), (Male, Asian-Pac-Islander), etc. + +Measure | Description | Interpretation | Reference +| - | - | - | - +Atkinson Index | It presents the percentage of total income that a given society would have to forego in order to have more equal shares of income between its citizens. This measure depends on the degree of society aversion to inequality (a theoretical parameter decided by the researcher), where a higher value entails greater social utility or willingness by individuals to accept smaller incomes in exchange for a more equal distribution. An important feature of the Atkinson index is that it can be decomposed into within-group and between-group inequality. | Range [0, 1]. 0 if perfect equality. 1 means maximum inequality. In our case, it is the proportion of records for a sensitive columns’ combination. | [Link](https://en.wikipedia.org/wiki/Atkinson_index) +Theil T Index | GE(1) = Theil's T and is more sensitive to differences at the top of the distribution. The Theil index is a statistic used to measure economic inequality. The Theil index measures an entropic "distance" the population is away from the "ideal" egalitarian state of everyone having the same income. | If everyone has the same income, then T_T equals 0. If one person has all the income, then T_T gives the result (ln N). 0 means equal income and larger values mean higher level of disproportion. | [Link](https://en.wikipedia.org/wiki/Theil_index) +Theil L Index | GE(0) = Theil's L and is more sensitive to differences at the lower end of the distribution. Logarithm of (mean income)/(income i), over all the incomes included in the summation. It is also referred to as the mean log deviation measure. Because a transfer from a larger income to a smaller one will change the smaller income's ratio more than it changes the larger income's ratio, the transfer-principle is satisfied by this index. | Same interpretation as Theil T Index. | [Link](https://en.wikipedia.org/wiki/Theil_index) + +## Mitigation + +It will not be a stretch to say that every real-world dataset has caveats, biases, and imbalances. Data collection is costly. Data Imbalance mitigation or de-biasing data is an area of research. There are many techniques available at various stages of ML lifecycle i.e., during pre-processing, in-processing, and post processing. Here we outline a couple of pre-processing techniques - + +### Resampling + +This involves under-sampling from majority class and over-sampling from minority class. Most naïve way to over-sample would be duplicate records and under-sample would be to remove records at random. + +* Caveats: + + 1. Under-sampling may remove valuable information. + 2. Over-sampling may cause overfitting and poor generalization on test set. + +![Bar chart undersampling and oversampling](https://mmlspark.blob.core.windows.net/graphics/exploratory/DataBalanceAnalysis_SamplingBar.png) + +There are smarter techniques to under-sample and over-sample in literature and implemented in Python’s [imbalanced-learn](https://imbalanced-learn.org/stable/) package. + +For example, we can cluster the records of the majority class, and do the under-sampling by removing records from each cluster, thus seeking to preserve information. + +One technique of under-sampling is use of Tomek Links. Tomek links are pairs of very close instances but of opposite classes. Removing the instances of the majority class of each pair increases the space between the two classes, facilitating the classification process. A similar way to under-sample majority class is using Near-Miss. It first calculates the distance between all the points in the larger class with the points in the smaller class. When two points belonging to different classes are very close to each other in the distribution, this algorithm eliminates the datapoint of the larger class thereby trying to balance the distribution. + +![Tomek Links](https://mmlspark.blob.core.windows.net/graphics/exploratory/DataBalanceAnalysis_TomekLinks.png) + +In over-sampling, instead of creating exact copies of the minority class records, we can introduce small variations into those copies, creating more diverse synthetic samples. This technique is called SMOTE (Synthetic Minority Oversampling Technique). It randomly picks a point from the minority class and computes the k-nearest neighbors for this point. The synthetic points are added between the chosen point and its neighbors. + +![Synthetic Samples](https://mmlspark.blob.core.windows.net/graphics/exploratory/DataBalanceAnalysis_SyntheticSamples.png) + +### Reweighting + +There is an expected and observed value in each table cell. The weight is essentially expected / observed value. This is easy to extend to multiple features with more than 2 groups. The weights are then incorporated in loss function of model training. + +![Reweighting](https://mmlspark.blob.core.windows.net/graphics/exploratory/DataBalanceAnalysis_Reweight.png) From ae4d058d5fd41311c99df8ae873a6784b6162fac Mon Sep 17 00:00:00 2001 From: ms-kashyap <64443771+ms-kashyap@users.noreply.github.com> Date: Wed, 3 Nov 2021 17:43:47 -0400 Subject: [PATCH 06/40] fix: Broken link to notebook in Data Balance Analysis doc (#1240) * [DataBalanceAnalysis] Add doc and sample notebook * Clear outputs in sample notebook * Address jasowang PR comments * [DataBalanceAnalysis] Update notebook and doc * [Databricks E2E Tests] Upgrade DBR from 8.3 to 9.1 LTS * [Databricks E2E Tests] Revert DBR from 9.1 LTS to 8.3 * Fix broken link Co-authored-by: Patel, Kashyap M --- website/docs/features/exploratory/Data Balance Analysis.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/website/docs/features/exploratory/Data Balance Analysis.md b/website/docs/features/exploratory/Data Balance Analysis.md index e8fc593b16..af1eb0e4dd 100644 --- a/website/docs/features/exploratory/Data Balance Analysis.md +++ b/website/docs/features/exploratory/Data Balance Analysis.md @@ -17,7 +17,7 @@ In summary, Data Balance Analysis, used as a step for building ML models has the ## Examples -* [Data Balance Analysis - Adult Census Income](../notebooks/Data%20Balance%20Analysis%20-%20Adult%20Census%20Income.ipynb) +* [Data Balance Analysis - Adult Census Income](https://github.com/microsoft/SynapseML/blob/master/notebooks/Data%20Balance%20Analysis%20-%20Adult%20Census%20Income.ipynb) ## Usage From 5429d80b146642126985747b264b180734e35e35 Mon Sep 17 00:00:00 2001 From: Serena Ruan <82044803+serena-ruan@users.noreply.github.com> Date: Fri, 5 Nov 2021 00:01:37 +0800 Subject: [PATCH 07/40] fix: fix publish to maven path and pypi env typo (#1242) * fix: fix publish Pypi & build publish path errors * fix inconsistency in variable name of secrets * fix synapseml blob maven path * fix var name * fix testR install spark path --- build.sbt | 17 ++++++++---- .../azure/synapse/ml/codegen/CodeGen.scala | 4 ++- .../azure/synapse/ml/codegen/TestGen.scala | 12 +++++---- .../ml/nbtest/DatabricksUtilities.scala | 3 ++- pipeline.yaml | 10 +++---- project/BlobMavenPlugin.scala | 26 +++++++++---------- project/Secrets.scala | 4 +-- tools/tests/run_r_tests.R | 4 +-- 8 files changed, 45 insertions(+), 35 deletions(-) diff --git a/build.sbt b/build.sbt index b6b1f40fdf..7c2d995ed4 100644 --- a/build.sbt +++ b/build.sbt @@ -259,6 +259,7 @@ val settings = Seq( assemblyOption in assembly := (assemblyOption in assembly).value.copy(includeScala = false), autoAPIMappings := true, pomPostProcess := pomPostFunc, + sbtPlugin := false ) ThisBuild / publishMavenStyle := true @@ -361,10 +362,16 @@ testWebsiteDocs := { ) } -sonatypeProjectHosting := Some( +ThisBuild / sonatypeProjectHosting := Some( GitHubHosting("Azure", "SynapseML", "mmlspark-support@microsot.com")) -homepage := Some(url("https://github.com/Microsoft/SynapseML")) -developers := List( +ThisBuild / homepage := Some(url("https://github.com/Microsoft/SynapseML")) +ThisBuild / scmInfo := Some( + ScmInfo( + url("https://github.com/Azure/SynapseML"), + "scm:git@github.com:Azure/SynapseML.git" + ) +) +ThisBuild / developers := List( Developer("mhamilton723", "Mark Hamilton", "mmlspark-support@microsoft.com", url("https://github.com/mhamilton723")), Developer("imatiach-msft", "Ilya Matiach", @@ -373,9 +380,9 @@ developers := List( "mmlspark-support@microsoft.com", url("https://github.com/drdarshan")) ) -licenses += ("MIT", url("https://github.com/Microsoft/SynapseML/blob/master/LICENSE")) +ThisBuild / licenses += ("MIT", url("https://github.com/Microsoft/SynapseML/blob/master/LICENSE")) -credentials += Credentials("Sonatype Nexus Repository Manager", +ThisBuild / credentials += Credentials("Sonatype Nexus Repository Manager", "oss.sonatype.org", Secrets.nexusUsername, Secrets.nexusPassword) diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/codegen/CodeGen.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/codegen/CodeGen.scala index 7dba32e973..dffc33aa25 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/codegen/CodeGen.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/codegen/CodeGen.scala @@ -5,6 +5,7 @@ package com.microsoft.azure.synapse.ml.codegen import java.io.File import CodegenConfigProtocol._ +import com.microsoft.azure.synapse.ml.build.BuildInfo import com.microsoft.azure.synapse.ml.core.env.FileUtilities._ import org.apache.commons.io.FileUtils import org.apache.commons.io.FilenameUtils._ @@ -87,13 +88,14 @@ object CodeGen { |Config/testthat/edition: 3 |""".stripMargin) + val scalaVersion = BuildInfo.scalaVersion.split(".".toCharArray).dropRight(1).mkString(".") writeFile(new File(conf.rSrcDir, "package_register.R"), s"""|#' @import sparklyr |spark_dependencies <- function(spark_version, scala_version, ...) { | spark_dependency( | jars = c(), | packages = c( - | "com.microsoft.azure:${conf.name}:${conf.version}" + | "com.microsoft.azure:${conf.name}_${scalaVersion}:${conf.version}" | ), | repositories = c("https://mmlspark.azureedge.net/maven") | ) diff --git a/core/src/test/scala/com/microsoft/azure/synapse/ml/codegen/TestGen.scala b/core/src/test/scala/com/microsoft/azure/synapse/ml/codegen/TestGen.scala index 6badac698c..3bb582e25d 100644 --- a/core/src/test/scala/com/microsoft/azure/synapse/ml/codegen/TestGen.scala +++ b/core/src/test/scala/com/microsoft/azure/synapse/ml/codegen/TestGen.scala @@ -8,6 +8,7 @@ import com.microsoft.azure.synapse.ml.core.test.fuzzing.PyTestFuzzing import java.io.File import CodegenConfigProtocol._ +import com.microsoft.azure.synapse.ml.build.BuildInfo import com.microsoft.azure.synapse.ml.core.env.FileUtilities._ import com.microsoft.azure.synapse.ml.core.utils.JarLoadingUtils.instantiateServices import org.apache.commons.io.FileUtils @@ -30,8 +31,8 @@ object TestGen { } private def makeInitFiles(conf: CodegenConfig, packageFolder: String = ""): Unit = { - val dir = new File(new File(conf.pyTestDir, "synapsemltest"), packageFolder) - if (!dir.exists()){ + val dir = new File(new File(conf.pyTestDir, "synapsemltest"), packageFolder) + if (!dir.exists()) { dir.mkdirs() } writeFile(new File(dir, "__init__.py"), "") @@ -46,7 +47,8 @@ object TestGen { if (!conf.pySrcDir.exists()) { conf.pySrcDir.mkdir() } - writeFile(join(conf.pyTestDir,"synapsemltest", "spark.py"), + val scalaVersion = BuildInfo.scalaVersion.split(".".toCharArray).dropRight(1).mkString(".") + writeFile(join(conf.pyTestDir, "synapsemltest", "spark.py"), s""" |# Copyright (C) Microsoft Corporation. All rights reserved. |# Licensed under the MIT License. See LICENSE in project root for information. @@ -59,7 +61,7 @@ object TestGen { |spark = (SparkSession.builder | .master("local[*]") | .appName("PysparkTests") - | .config("spark.jars.packages", "com.microsoft.azure:synapseml:" + __spark_package_version__) + | .config("spark.jars.packages", "com.microsoft.azure:synapseml_$scalaVersion:" + __spark_package_version__) | .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") | .config("spark.executor.heartbeatInterval", "60s") | .config("spark.sql.shuffle.partitions", 10) @@ -79,7 +81,7 @@ object TestGen { generatePythonTests(conf) TestBase.stopSparkSession() generatePyPackageData(conf) - if (toDir(conf.pyTestOverrideDir).exists()){ + if (toDir(conf.pyTestOverrideDir).exists()) { FileUtils.copyDirectoryToDirectory(toDir(conf.pyTestOverrideDir), toDir(conf.pyTestDir)) } makeInitFiles(conf) diff --git a/core/src/test/scala/com/microsoft/azure/synapse/ml/nbtest/DatabricksUtilities.scala b/core/src/test/scala/com/microsoft/azure/synapse/ml/nbtest/DatabricksUtilities.scala index 119b7a581f..e2bd6b811b 100644 --- a/core/src/test/scala/com/microsoft/azure/synapse/ml/nbtest/DatabricksUtilities.scala +++ b/core/src/test/scala/com/microsoft/azure/synapse/ml/nbtest/DatabricksUtilities.scala @@ -40,9 +40,10 @@ object DatabricksUtilities extends HasHttpClient { lazy val ClusterName = s"mmlspark-build-${LocalDateTime.now()}" val Folder = s"/SynapseMLBuild/build_${BuildInfo.version}" + val ScalaVersion: String = BuildInfo.scalaVersion.split(".".toCharArray).dropRight(1).mkString(".") // SynapseML info - val Version = s"com.microsoft.azure:synapseml:${BuildInfo.version}" + val Version = s"com.microsoft.azure:synapseml_$ScalaVersion:${BuildInfo.version}" val Repository = "https://mmlspark.azureedge.net/maven" val Libraries: String = List( diff --git a/pipeline.yaml b/pipeline.yaml index d9525e38bd..dd26efc639 100644 --- a/pipeline.yaml +++ b/pipeline.yaml @@ -65,7 +65,7 @@ jobs: sbt release displayName: Publish Artifacts env: - STORAGE_KEY: $(storage-key) + STORAGE-KEY: $(storage-key) NEXUS-UN: $(nexus-un) NEXUS-PW: $(nexus-pw) PGP-PRIVATE: $(pgp-private) @@ -75,7 +75,7 @@ jobs: condition: and(succeeded(), eq(variables['Build.SourceBranch'], 'refs/heads/master')) displayName: Publish Badges env: - STORAGE_KEY: $(storage-key) + STORAGE-KEY: $(storage-key) NEXUS-UN: $(nexus-un) NEXUS-PW: $(nexus-pw) PGP-PRIVATE: $(pgp-private) @@ -103,7 +103,7 @@ jobs: sbt publishBlob displayName: Publish Blob Artifacts env: - STORAGE_KEY: $(storage-key) + STORAGE-KEY: $(storage-key) NEXUS-UN: $(nexus-un) NEXUS-PW: $(nexus-pw) PGP-PRIVATE: $(pgp-private) @@ -145,7 +145,7 @@ jobs: sbt publishBlob displayName: Publish Blob Artifacts env: - STORAGE_KEY: $(storage-key) + STORAGE-KEY: $(storage-key) NEXUS-UN: $(nexus-un) NEXUS-PW: $(nexus-pw) PGP-PRIVATE: $(pgp-private) @@ -278,7 +278,7 @@ jobs: sbt publishPypi condition: startsWith(variables['tag'], 'v') env: - STORAGE_KEY: $(storage-key) + STORAGE-KEY: $(storage-key) NEXUS-UN: $(nexus-un) NEXUS-PW: $(nexus-pw) PGP-PRIVATE: $(pgp-private) diff --git a/project/BlobMavenPlugin.scala b/project/BlobMavenPlugin.scala index 7008c69964..b8a3cccb86 100644 --- a/project/BlobMavenPlugin.scala +++ b/project/BlobMavenPlugin.scala @@ -22,27 +22,25 @@ object BlobMavenPlugin extends AutoPlugin { override lazy val projectSettings: Seq[Setting[_]] = Seq( publishBlob := { publishM2.value - //TODO make this more general - 1.0 is a hack and not sure of a way to get this with sbt keys - val sourceArtifactName = s"${moduleName.value}_${scalaBinaryVersion.value}_1.0" - val destArtifactName = s"${moduleName.value}" + val artifactName = s"${moduleName.value}_${scalaBinaryVersion.value}" val repositoryDir = new File(new URI(Resolver.mavenLocal.root)) val orgDirs = organization.value.split(".".toCharArray.head) - val localPackageFolder = join(repositoryDir, orgDirs ++ Seq(sourceArtifactName, version.value):_*).toString - val blobMavenFolder = (orgDirs ++ Seq(destArtifactName, version.value)).mkString("/") + val localPackageFolder = join(repositoryDir, orgDirs ++ Seq(artifactName, version.value): _*).toString + val blobMavenFolder = (orgDirs ++ Seq(artifactName, version.value)).mkString("/") uploadToBlob(localPackageFolder, blobMavenFolder, "maven") println(blobArtifactInfo.value) }, blobArtifactInfo := { s""" - |SynapseML Build and Release Information - |--------------- - | - |### Maven Coordinates - | `${organization.value}:${moduleName.value}:${version.value}` - | - |### Maven Resolver - | `https://mmlspark.azureedge.net/maven` - |""".stripMargin + |SynapseML Build and Release Information + |--------------- + | + |### Maven Coordinates + | `${organization.value}:${moduleName.value}_${scalaBinaryVersion.value}:${version.value}` + | + |### Maven Resolver + | `https://mmlspark.azureedge.net/maven` + |""".stripMargin } ) } \ No newline at end of file diff --git a/project/Secrets.scala b/project/Secrets.scala index 4e3d0c1e07..8eca220e6e 100644 --- a/project/Secrets.scala +++ b/project/Secrets.scala @@ -58,7 +58,7 @@ object Secrets { lazy val pgpPrivate: String = new String(Base64.getDecoder.decode( sys.env.getOrElse("PGP-PRIVATE", getSecret("pgp-private")).getBytes("UTF-8"))) lazy val pgpPassword: String = sys.env.getOrElse("PGP-PW", getSecret("pgp-pw")) - lazy val storageKey: String = sys.env.getOrElse("STORAGE_KEY", getSecret("storage-key")) - lazy val pypiApiToken: String = sys.env.getOrElse("PYPI_API_TOKEN", getSecret("pypi-api-token")) + lazy val storageKey: String = sys.env.getOrElse("STORAGE-KEY", getSecret("storage-key")) + lazy val pypiApiToken: String = sys.env.getOrElse("PYPI-API-TOKEN", getSecret("pypi-api-token")) } diff --git a/tools/tests/run_r_tests.R b/tools/tests/run_r_tests.R index 847ea3ea46..199b3486ee 100644 --- a/tools/tests/run_r_tests.R +++ b/tools/tests/run_r_tests.R @@ -3,9 +3,9 @@ tryCatch({ spark_install_find(version = "3.1.2") }, error=function(err) { - spark_install_tar("../../../../../../../../spark-3.1.2-bin-hadoop3.2.tgz") + spark_install_tar("../../../../../../../spark-3.1.2-bin-hadoop3.2.tgz") } ) -options("testthat.output_file" = "../../../../../r-test-results.xml") +options("testthat.output_file" = "../../../../r-test-results.xml") devtools::test(reporter = JunitReporter$new()) \ No newline at end of file From ba41290fc22b0b6d59d090e1023a7b08369d93a7 Mon Sep 17 00:00:00 2001 From: Mark Hamilton Date: Thu, 4 Nov 2021 23:04:24 +0000 Subject: [PATCH 08/40] fix: enable backwards compatibility foe old namespace (#1244) --- core/src/main/python/mmlspark/__init__.py | 5 +++++ .../com/microsoft/azure/synapse/ml/codegen/CodeGen.scala | 3 ++- 2 files changed, 7 insertions(+), 1 deletion(-) create mode 100644 core/src/main/python/mmlspark/__init__.py diff --git a/core/src/main/python/mmlspark/__init__.py b/core/src/main/python/mmlspark/__init__.py new file mode 100644 index 0000000000..4c807c6adc --- /dev/null +++ b/core/src/main/python/mmlspark/__init__.py @@ -0,0 +1,5 @@ +import sys +import warnings +warnings.warn("The mmlspark namespace has been deprecated. Please change import statements to import from synapse.ml") +import synapse.ml +sys.modules['mmlspark'] = synapse.ml \ No newline at end of file diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/codegen/CodeGen.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/codegen/CodeGen.scala index dffc33aa25..9347a35c72 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/codegen/CodeGen.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/codegen/CodeGen.scala @@ -136,6 +136,7 @@ object CodeGen { if (!conf.pySrcDir.exists()) { conf.pySrcDir.mkdir() } + val extraPackage = if (conf.name.endsWith("core")){" + [\"mmlspark\"]"}else{""} writeFile(join(conf.pySrcDir, "setup.py"), s""" |# Copyright (C) Microsoft Corporation. All rights reserved. @@ -153,7 +154,7 @@ object CodeGen { | long_description="SynapseML contains Microsoft's open source " | + "contributions to the Apache Spark ecosystem", | license="MIT", - | packages=find_namespace_packages(include=['synapse.ml.*']), + | packages=find_namespace_packages(include=['synapse.ml.*']) ${extraPackage}, | url="https://github.com/Microsoft/SynapseML", | author="Microsoft", | author_email="mmlspark-support@microsoft.com", From 5a921cca0277b6175c1a29b75e90e1b761a3e8fc Mon Sep 17 00:00:00 2001 From: ms-kashyap <64443771+ms-kashyap@users.noreply.github.com> Date: Sat, 6 Nov 2021 13:21:25 -0400 Subject: [PATCH 09/40] docs: Introduce Responsible AI section on website (Interpretability + DataBalanceAnalysis) (#1241) * [DataBalanceAnalysis] Add doc and sample notebook * Clear outputs in sample notebook * Address jasowang PR comments * [DataBalanceAnalysis] Update notebook and doc * [Databricks E2E Tests] Upgrade DBR from 8.3 to 9.1 LTS * [Databricks E2E Tests] Revert DBR from 9.1 LTS to 8.3 * Fix broken link * [website] Model Interpretability -> Responsible AI * Get latest from upstream/master * Host DataBalanceAnalysis-AdultCensusIncome cell outputs in blob instead of inline, use Interpretability-Image Explainers as outstanding notebook in features/responsible_ai/ * Replace ModelInterpretability-SnowLeopardDetection with Interpretability-Image Explainers as outstanding Responsible AI notebook Co-authored-by: Patel, Kashyap M --- ...lance Analysis - Adult Census Income.ipynb | 663 ------------------ ...alanceAnalysis - Adult Census Income.ipynb | 641 +++++++++++++++++ website/docs/examples/about.md | 8 +- ...taBalanceAnalysis - Adult Census Income.md | 339 +++++++++ ...nterpretability - Explanation Dashboard.md | 191 +++++ ...terpretability - Tabular SHAP explainer.md | 2 +- .../Interpretability - Text Explainers.md | 0 ...terpretability - Snow Leopard Detection.md | 0 website/docs/features/onnx/about.md | 2 +- .../Data Balance Analysis.md | 12 +- .../Interpretability - Image Explainers.md | 0 .../Model Interpretation on Spark.md} | 8 +- website/notebookconvert.py | 32 +- website/sidebars.js | 16 +- website/src/pages/index.js | 2 +- website/src/plugins/examples/index.js | 15 +- 16 files changed, 1228 insertions(+), 703 deletions(-) delete mode 100644 notebooks/Data Balance Analysis - Adult Census Income.ipynb create mode 100644 notebooks/DataBalanceAnalysis - Adult Census Income.ipynb create mode 100644 website/docs/examples/responsible_ai/DataBalanceAnalysis - Adult Census Income.md create mode 100644 website/docs/examples/responsible_ai/Interpretability - Explanation Dashboard.md rename website/docs/examples/{model_interpretability => responsible_ai}/Interpretability - Tabular SHAP explainer.md (98%) rename website/docs/examples/{model_interpretability => responsible_ai}/Interpretability - Text Explainers.md (100%) rename website/docs/{features/model_interpretability => examples/responsible_ai}/ModelInterpretability - Snow Leopard Detection.md (100%) rename website/docs/features/{exploratory => responsible_ai}/Data Balance Analysis.md (96%) rename website/docs/{examples/model_interpretability => features/responsible_ai}/Interpretability - Image Explainers.md (100%) rename website/docs/features/{model_interpretability/about.md => responsible_ai/Model Interpretation on Spark.md} (97%) diff --git a/notebooks/Data Balance Analysis - Adult Census Income.ipynb b/notebooks/Data Balance Analysis - Adult Census Income.ipynb deleted file mode 100644 index ba4cd7c427..0000000000 --- a/notebooks/Data Balance Analysis - Adult Census Income.ipynb +++ /dev/null @@ -1,663 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": { - "application/vnd.databricks.v1+cell": { - "inputWidgets": {}, - "nuid": "f4e01a16-20fa-446a-9e3d-b560907b9ab2", - "showTitle": false, - "title": "" - } - }, - "source": [ - "## Data Balance Analysis using the Adult Census Income dataset\n", - "\n", - "In this example, we will conduct Data Balance Analysis (which consists on running three groups of measures) on the Adult Census Income dataset to determine how well features and feature values are represented in the dataset.\n", - "\n", - "This dataset can be used to predict whether annual income exceeds $50,000/year or not based on demographic data from the 1994 U.S. Census. The dataset we're reading contains 32,561 rows and 14 columns/features.\n", - "\n", - "[More info on the dataset here](https://archive.ics.uci.edu/ml/datasets/Adult)\n", - "\n", - "---\n", - "\n", - "Data Balance Analysis is relevant for overall understanding of datasets, but it becomes essential when thinking about building Machine Learning services out of such datasets. Having a well balanced data representation is critical when developing models in a responsible way, specially in terms of fairness. \n", - "It is unfortunately all too easy to build an ML model that produces biased results for subsets of an overall population, by training or testing the model on biased ground truth data. There are multiple case studies of biased models assisting in granting loans, healthcare, recruitment opportunities and many other decision making tasks. In most of these examples, the data from which these models are trained was the common issue. These findings emphasize how important it is for model creators and auditors to analyze data balance: to measure training data across sub-populations and ensure the data has good coverage and a balanced representation of labels across sensitive categories and category combinations, and to check that test data is representative of the target population.\n", - "\n", - "In summary, Data Balance Analysis, used as a step for building ML models has the following benefits:\n", - "* **Reduces risks for unbalanced models (facilitate service fairness) and reduces costs of ML building** by identifying early on data representation gaps that prompt data scientists to seek mitigation steps (collect more data, follow a specific sampling mechanism, create synthetic data, etc.) before proceeding to train their models. \n", - "* **Enables easy e2e debugging of ML systems** in combination with [Fairlearn](https://fairlearn.org/) by providing a clear view if for an unbalanced model the issue is tied to the data or the model. \n", - "\n", - "---\n", - "\n", - "Note: If you are running this notebook in a Spark environment such as Azure Synapse or Databricks, then you can easily visualize the imbalance measures using the built-in plotting features.\n", - "\n", - "Python dependencies:\n", - "* matplotlib==3.2.2\n", - "* numpy==1.19.2" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "application/vnd.databricks.v1+cell": { - "inputWidgets": {}, - "nuid": "a51d55f3-8f47-47e6-8698-4b78e65f034d", - "showTitle": false, - "title": "" - } - }, - "outputs": [], - "source": [ - "import matplotlib\n", - "import matplotlib.pyplot as plt\n", - "import numpy as np\n", - "import pyspark.sql.functions as F" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "application/vnd.databricks.v1+cell": { - "inputWidgets": {}, - "nuid": "5c7332a8-b256-4c57-a593-ab338f7ca623", - "showTitle": false, - "title": "" - } - }, - "outputs": [], - "source": [ - "df = spark.read.parquet(\"wasbs://publicwasb@mmlspark.blob.core.windows.net/AdultCensusIncome.parquet\")\n", - "display(df)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "application/vnd.databricks.v1+cell": { - "inputWidgets": {}, - "nuid": "267c342b-2770-4dff-aae3-aa75af24adef", - "showTitle": false, - "title": "" - } - }, - "outputs": [], - "source": [ - "# Convert the \"income\" column from {<=50K, >50K} to {0, 1} to represent our binary classification label column\n", - "label_col = \"income\"\n", - "df = df.withColumn(label_col, F.when(F.col(label_col).contains(\"<=50K\"), F.lit(0)).otherwise(F.lit(1)))" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "application/vnd.databricks.v1+cell": { - "inputWidgets": {}, - "nuid": "5af3f65c-5f1b-4e11-9bc9-ffa2b00116ae", - "showTitle": false, - "title": "" - } - }, - "source": [ - "### Perform preliminary analysis on columns of interest" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "application/vnd.databricks.v1+cell": { - "inputWidgets": {}, - "nuid": "819cb707-a3fd-45c0-a3d9-96e54d4a7e6f", - "showTitle": false, - "title": "" - } - }, - "outputs": [], - "source": [ - "display(df.groupBy(\"race\").count())" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "application/vnd.databricks.v1+cell": { - "inputWidgets": {}, - "nuid": "dd78d2b8-fefa-458b-bc23-629f7e763414", - "showTitle": false, - "title": "" - } - }, - "outputs": [], - "source": [ - "display(df.groupBy(\"sex\").count())" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "application/vnd.databricks.v1+cell": { - "inputWidgets": {}, - "nuid": "0d14030f-1fd8-4c1e-8742-7ad7d2dea4d2", - "showTitle": false, - "title": "" - } - }, - "outputs": [], - "source": [ - "# Choose columns/features to do data balance analysis on\n", - "cols_of_interest = [\"race\", \"sex\"]\n", - "display(df.select(cols_of_interest + [label_col]))" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "application/vnd.databricks.v1+cell": { - "inputWidgets": {}, - "nuid": "ae54d20f-f04a-4ffd-a442-e995955d922e", - "showTitle": false, - "title": "" - } - }, - "source": [ - "### Calculate Feature Balance Measures\n", - "\n", - "Feature Balance Measures allow us to see whether each combination of sensitive feature is receiving the positive outcome (true prediction) at equal rates.\n", - "\n", - "In this context, we define a feature balance measure, also referred to as the parity, for label y as the absolute difference between the association metrics of two different sensitive classes \\\\([x_A, x_B]\\\\), with respect to the association metric \\\\(A(x_i, y)\\\\). That is:\n", - "\n", - "$$parity(y \\vert x_A, x_B, A(\\cdot)) \\coloneqq A(x_A, y) - A(x_B, y) $$\n", - "\n", - "Using the dataset, we can see if the various sexes and races are receiving >50k income at equal or unequal rates.\n", - "\n", - "Note: Many of these metrics were influenced by this paper [Measuring Model Biases in the Absence of Ground Truth](https://arxiv.org/abs/2103.03417).\n", - "\n", - "Measure | Family | Description | Interpretation/Formula | Reference\n", - "- | - | - | - | -\n", - "Demographic Parity | Fairness | Proportion of each segment of a protected class (e.g. gender) should receive the positive outcome at equal rates. | As close to 0 means better parity. \\\\(DP = P(Y \\vert A = \"Male\") - P(Y \\vert A = \"Female\")\\\\). Y = Positive label rate. | [Link](https://en.wikipedia.org/wiki/Fairness_%28machine_learning%29)\n", - "Pointwise Mutual Information (PMI), normalized PMI | Entropy | The PMI of a pair of feature values (ex: Gender=Male and Gender=Female) quantifies the discrepancy between the probability of their coincidence given their joint distribution and their individual distributions (assuming independence). | Range (normalized) [-1, 1]. -1 for no co-occurences. 0 for co-occurences at random. 1 for complete co-occurences. | [Link](https://en.wikipedia.org/wiki/Pointwise_mutual_information)\n", - "Sorensen-Dice Coefficient (SDC) | Intersection-over-Union | Used to gauge the similarity of two samples. Related to F1 score. | Equals twice the number of elements common to both sets divided by the sum of the number of elements in each set. | [Link](https://en.wikipedia.org/wiki/S%C3%B8rensen%E2%80%93Dice_coefficient)\n", - "Jaccard Index | Intersection-over-Union | Similar to SDC, guages the similarity and diversity of sample sets. | Equals the size of the intersection divided by the size of the union of the sample sets. | [Link](https://en.wikipedia.org/wiki/Jaccard_index)\n", - "Kendall Rank Correlation | Correlation and Statistical Tests | Used to measure the ordinal association between two measured quantities. | High when observations have a similar rank and low when observations have a dissimilar rank between the two variables. | [Link](https://en.wikipedia.org/wiki/Kendall_rank_correlation_coefficient)\n", - "Log-Likelihood Ratio | Correlation and Statistical Tests | Calculates the degree to which data supports one variable versus another. Log of the likelihood ratio, which gives the probability of correctly predicting the label in ratio to probability of incorrectly predicting label. | If likelihoods are similar, it should be close to 0. | [Link](https://en.wikipedia.org/wiki/Likelihood_function#Likelihood_ratio)\n", - "t-test | Correlation and Statistical Tests | Used to compare the means of two groups (pairwise). | Value looked up in t-Distribution tell if statistically significant or not. | [Link](https://en.wikipedia.org/wiki/Student's_t-test)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "application/vnd.databricks.v1+cell": { - "inputWidgets": {}, - "nuid": "5dd892b3-b2e6-4fcb-8829-9c058fa4fd5e", - "showTitle": false, - "title": "" - } - }, - "outputs": [], - "source": [ - "from synapse.ml.exploratory import FeatureBalanceMeasure\n", - "\n", - "feature_balance_measures = (\n", - " FeatureBalanceMeasure()\n", - " .setSensitiveCols(cols_of_interest)\n", - " .setLabelCol(label_col)\n", - " .setVerbose(True)\n", - " .transform(df)\n", - ")\n", - "\n", - "# Sort by Demographic Parity descending for all features\n", - "display(feature_balance_measures.sort(F.abs(\"FeatureBalanceMeasure.dp\").desc()))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "application/vnd.databricks.v1+cell": { - "inputWidgets": {}, - "nuid": "46e1a9a7-97c7-437e-bead-eaf4c3b9e0d6", - "showTitle": false, - "title": "" - } - }, - "outputs": [], - "source": [ - "# Drill down to feature == \"sex\"\n", - "display(feature_balance_measures.filter(F.col(\"FeatureName\") == \"sex\").sort(F.abs(\"FeatureBalanceMeasure.dp\").desc()))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "application/vnd.databricks.v1+cell": { - "inputWidgets": {}, - "nuid": "d4bd77a1-3c10-4e16-9892-4ac920fb4432", - "showTitle": false, - "title": "" - } - }, - "outputs": [], - "source": [ - "# Drill down to feature == \"race\"\n", - "display(feature_balance_measures.filter(F.col(\"FeatureName\") == \"race\").sort(F.abs(\"FeatureBalanceMeasure.dp\").desc()))" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "application/vnd.databricks.v1+cell": { - "inputWidgets": {}, - "nuid": "aaec9b6a-06c6-4afb-86c8-a7fbc3df92d7", - "showTitle": false, - "title": "" - } - }, - "source": [ - "#### Visualize Feature Balance Measures" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "application/vnd.databricks.v1+cell": { - "inputWidgets": {}, - "nuid": "969e62cd-bb6c-4417-9046-dd8aa6d0fa9e", - "showTitle": false, - "title": "" - } - }, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAWwAAAEYCAYAAAB4LMxuAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAgAElEQVR4nOydZ3hVxdaA3wUBQSCAoJAEaYpCCAQIHaRLCQgGUUFpluv1qvAJlmsBRRBFRRD0Xr1WVEQUhAihBURUkJaEqlhAQUkCKkqxUBLW92PPSU6SU3KSQE5w3ueZ5+w9de2ZfdaePTN7lqgqFovFYgl+ShW3ABaLxWLJH1ZhWywWSwnBKmyLxWIpIViFbbFYLCUEq7AtFoulhGAVtsVisZQQrML+GyIiE0Rkto/wL0Sky1kUyZMMv4tI/bNQTnkRWSwiR0Rk3pkur6g5m20lImtE5NaijmvJP39bhS0ie0XkLxE5JiKHReRzEbldRP62deJCVRur6ppA0ohIXRFRo2h/N/X7QCFkqKiq35m8Z4nI4wXNyw+DgBpANVW9NnegebidMtfkuk/anSFZAqYgbeWOOHwnIl8WoVi5yxgpImv9xFkjIsfN//GoiCSLyAMicl4A5aiIXFp4iYOjHE/83ZXTVapaCagDTAH+DbxWvCL5R0RKF7cMPqiiqhWBIcAjItI7kMQiEnJmxPJKHeAbVc3wEec9c03VgY+BEtcT90En4CKgvoi0KmZZ7jL/xzDgHmAwsFREpHjFCh7+7gobAFU9oqqLgOuBESISBSAi54nIVBH5QUQOishLIlLehHURkf0icr+I/CQi6SJytYjEisg3IvKriDzkKsPk9ZyIpBn3nHvvweSTbsJudX+Kmx7miyKyVET+ALqKSF8R2WJ6Iz+KyAS3vFy93dtMfukicm+uyy4rIm+ZHs0XItLSLf1eEelhjkuLyEMissfETRaRi/NRp+uBL4AoEWktIutNDzVdRF4QkbJu5amI3Cki3wLfuvldKiK3ATcC95te7mIRuU9EPnAvT0RmisgMT7KISCPTgztsrrW/8X8MeAS43uR9i59rygDeASJE5EKTh79raywiK839cNB1T4hIKdOD3CMih0TkfRG5wISVE5HZxv+wiGwWkRpers29rSaYfDy2qxdGAB8CS82xe95XishX4gwXvQCIW1iOYTW3ey4kVx6NgJeAdqaOD/uRB1X9w7w19AfaAX1NXl7rWkQ+Ncm3mXKuF5GqIpIgIj+LyG/muJabbCPFebs4JiLfi8iNbmE3i8guk26FiNTxVo6/6ylSVPVv6YC9QA8P/j8A/zLH04FFwAVAJWAx8KQJ6wJk4PzhywD/AH4G5pi4jYG/gHom/kRgA05v5kLgc2CSCesNHDBpzgdmAwpcasJnAUeADjgP2XKm/CbmvClwELjaxK9r0r8LVDDxfnZdLzABOA7EAqWBJ4ENnuoGuA/YAVyO84eNxhk+yF1vrjJDTLwOwJ9AdyAGaGvC6gK7gLvd0iqw0tRzeTc/9+t/3C1+GPAHTm8ek+9PQIwHucoAu4GHgLJAN+AYcLlbXcz2cZ9khZv0U4BfgBDj5/XazH2QjtNbLGfO25iw/8O5H2oB5wH/A941Yf/EudfON+0TA4T6u4/9tauHtOcDR038a8x1lTVh1U09DTJ1OAbnfr/VU725t785X+MWdySw1s//MSt+Lv9Pgaf81XXue8acVzPXdb6p+3lAvAmrYK7ddR+EAY3N8QBzzzQyZY0DPvdWzlnVW8VRaDA4vCvsDcDDOErnD+ASt7B2wPfmuAuOQi5tziuZhmzjFj+ZbCW6B4h1C+sF7DXHr2MeBOb8UvIqrLf8XM9zwHRz7PrzNHQLfxp4zRxPAFa5hUUCf3mqG+BrYEA+6tNV5mHgN/NnGu0l7t3AQrdzBbrliuNVYRu/ZcA/zHE/4EsvZV2B8zAs5eb3LjDBrS78KeyT5roygUNAFx/xs64NZ1hoi5d4u4DubudhwCkcBXEzzgO9aSD3sb929ZB2KM6DPATngXIEiDNhw8n5EBdgP2dfYc8FXgngPvKqSIFmwG/muIJp02swnYRc99YtbuelcDofdfJTzpl0dkgkLxHArzi94POBZPMKdhhYbvxdHFLVTHP8l/k96Bb+F1DRHIcD+9zC9hk/V9iPbmHuxx79RKSNiHxsXveOALfj9Iq8pXEvDxwl5uJPoFzu11nDxTgPm/xSXVWrqmojVZ1pZL3MvI4eEJGjwBN+ZM0Pb+IoHMzv217ihQM/quppN799OO2cX95X1So4k5M7cXp6gN9r81V3dYCFbvfWLpwHQg1zLSuAueIMaT0tImXyKWt+2xWcIZD3VTVDVY8DH5A9LJLjnlRHUwXaRkWB6/+Y3/soCxE5X0T+JyL7TPxPgSoiUlpV/8AZAr0dSBeRJSLS0CStA8xwa5tfcR5YgdwzZwSrsN0QZ9IlAliL83r4F85rUhXjKqsz+VQQ0nBuBBe1jR84r8213MI8jRHn3lZxDs5wzcWqWhlnnDD35Ix7Pu7lBcKPwCUFSOfOi8BXQANVDcUZnsgtq69tIz2FxQNNxZlv6IcztuyJNOBiybn6pzaQmh/Bcwih+gtwGzBBRMKMt69r+xHwtjTxR6CP271VRVXLqWqqqp5S1cdUNRJob65veKDy+sKM5XYDhhoFeABn+CNWRKrj3JMXu8UXct5Pf+B0aFzU9FFcgbYEFWeuJAb4zHjl5z5y5x6cobw2Jn4nV9YAqrpCVa/Eebv5CnjFhP8I/DNX25RX1c8Lch1FiVXYgIiEikg/nNev2aq6w/TIXgGmi8hFJl6EiPQqYDHvAuNE5ELzh3gEZ6wa4H3gJnEmx84Hxucjv0rAr6p6XERaAzd4iDPe9DIaAzcB7xVA7leBSSLSQByaiki1APOohDNe+LvpxfwrwPQHyaX4TI9wPs6Da5Oq/uAl7Uacnub9IlJGnDXLV+G0dcCo6tc4vd/7jZeva0sAwkTkbnEmnSuJSBsT9hIw2W0y60IRGWCOu4pIE3FWAx3FGSpxf0MoCoYB3+AotGbGXYYz7DEEWAI0FpGBpoc+mpxKeSvQSURqi0hl4EEfZR0EaonbZKwvzD3bGWcydBPOhCj4v49y3yeVcDpdh8WZ0H3UrYwaIjJARCoAJ4Dfya7jl4AHzf8GEaksIu5LPvPcj2eLv7vCXiwix3CeqA8D03AUm4t/40w+bDCvVKtwbvCC8DiQBGzHmcRLMX6o6jJgJs6Ssd044+jg3EjeuAOYaOR/BEfp5+YTk99HwFRVTSyA3NNM3ok4f5bXgPIB5nEvzgPlGM5DMNAHx2tApHlFjXfzfxNnQtXbcAiqehJHQffBeWv6LzBcVb8KUAZ3ngFuMw9yr9emqseAK035B3BWwHQ1wTNw3pASTRtuAFzKvCbOw+gozlDJJ76usYCMAP6rqgfcHY6yGmHeJq7FmWQ9BDQA1rld20pzrdtx5moSfJS1GmfF0AER+cVHvBdMXRzEmZP5AOjtNpzl7z6aALxp7pPrTB7lcdp9A86QpotSwFicN7Bfgc6YB4CqLgSewhmSOoozDNbHRzlnDTGD6JYgQpylUDuB89T3+mBv6esC3wNlCpK+pCAitXFeZWuq6tHilsdiOdP83XvYQYOIxJnX5qo4T/fF57KyLSxmTHosMNcqa8vfBauwg4d/4qwl3oOzWiDQcd6/DWbc8SjOcMOjfqJbLIjI6+J84LbTS7iI8/HVbhHZLiIt3MJGiMi3xo3wlP5sYYdELBbLOY+IdMKZWHxLVaM8hMcCo3A+ImoDzFDVNmayMgloibPaJRnnA63fzprwbtgetsViOedR1U8x67m9MABHmauqbsBZrx2G84HbSlX91SjplThfJhcLZ3ujHctZoHr16lq7du1ileFUUS9CC5BgeHMsW7r49yw6ebr46+GLbVt/UdUL/cf0TqnQWkrGca/h+tehL3A+y3fxsqq+HEAREeT8MGi/8fPmXyxYhX0OUrt2bdatW+c/4hkk7ffinS89mVn8iqp2aH4/TjxzpP5+qrhF4PIalff5j+WHjOOEXN7fa/CprW8cV1V/G12VeOyQiMViCX5EKBVS1qsrAlLJ+SVnLePnzb9YsArbYrEEP1LqTCvsRcBws1qkLXBEVdNxvmrtKc5WrVWBnsavWLBDIhaLJegRQEoX3G6HiLyLs8NmdRHZj7MctAyAqr6E8/l7LM6XwX9ivnhW1V9FZBKw2WQ1UVV9TV6eUazCtlgswY8IpQvRk1bVIX7CFbjTS9jrOFsgFztWYVssluBHBCkVzJbxzg5WYVsslqBHEEqFFP+qm+LGKmyLxRL82B42YBW2xWIpCYhQqkyRrAYp0ViFbbFYgh4x67D/7liFbbFYSgBCKTskYhW2xWIJfmwP28F+6fg3JzExkejoaKKiopg6dWqe8BMnTjBs2DCioqLo1KkT+/YVfluIT1av5Mp2LejWOpqXZk7LE75p/Tr6d7+Cy8OqsmxxfJ7wY8eO0iG6IRMeuKfAMny2ehWxHWPo1a4ZrzyfV4ak9eu45soraFLrAlYkZMuwcd2nxPXomOWa1b2IVct8WcfyzMrERJo3i6Zpkyie9VLvw4cPo2mTKLp0zq73Q4cO0adPb2pcdCFjx44JuFx3Pl29il7tY7iyTTNe9tAOm9evI67HFUSGX8DyXO3QKKwqA7p1ZEC3jtw+bHCh5MgvUqq0V/d3ocgVtog0ExEVEb9bEIrIqyISWUTlrhGRr0Vkm4isE5GC2l50z/N3P+F7jUHdQuOvrDNBZmYmY8aMIT4+npSUFObNm8euXbtyxJk1axZVqlRh586djBo1inHjxhW6zAn/vofX3v2A5Ws3k7BgPt9+ndO8YnhELZ6e+SJXDbzWYx7PTXmc1u3aF0qGxx+6h/+9M5/Fn2xiafwH7M4lQ1itWjwx40X6xuWUoU2HTixctZaFq9byxrxFlCtfng6duwVc/tixY1iwMJ6kZM/1/uabTr1v37GTO+8axfjxTr2XK1eO8eMfYfITTxTgynPKMPGBe3h1znyWfLaJhIUe6iCiFk/OeJF+HtqhXLnyfLh6LR+uXstLbxfInnFgnPlP00sEZ6KHPQRYa359oqq3quqXRVj2jaoajWOc9ZkizDeoMPsdFLrtkpKSuOSSS6hXrx5ly5Zl0KBBJCTk7C0uWbKEoUOHAhAXF8eaNWsKtXXptpQk6tSrT+26Tpl9465h1fIlOeLUql2Hho2jKFUq7yXu3LaFX37+iY5duhdYhh1bkqldtz4X13Fk6DNgIKtX5JQh4uI6XB7pWQYXiQkfckXXKyl//vkBlZ+UlET9+jnrfUnuek9Ywo035q33ChUq0L59e8qdVy6gMnOzPSWZOvXqc7GrHa4eyEcBtMNZR5xP0725vwtF2hIiIjiWlkcCV4pIOeNfQUSWmN7vThG53vivEZGW5vhFEUkSkS9E5DG3PPeKyGMikiIiO4x5e398ClwqInVF5DOTNkVEsrplIvJvk982EZni57rCRORTEdlq5L/CQ5x4EUk28t/m5v+7iEw25WwQkRrGv56IrDcyPJ4rr/tEZLMxVfSY8atr3iDewjHQ676DWIFIS0sjIiJ7a9+IiAjS0tK8xgkJCSE0NJRDhw4VuMyDB9IJi6iVdV4zLJyD6Wk+UmRz+vRpnnj0YR6YMLnA5TsypFHT7bprhkXw04H0gPNZ9uEH9I0bFHC6tLQ0atXKVe/peevdFSckJITKhaz33Bw8kEbN8GwZaoRHcDCAOjhx4jgDe3bmuj7dWbU08CGhQBHbwwaKftKxPfC9qu4RkTVAX4ypeiBNVfsCiEhlD2kfNhutlAY+EpGmqrrdhP2iqi1E5A4cU/e3+pHjKmAHjo3EK1X1uIg0AN4FWopIHxwLE21U9U9jBsgXNwArVHWykc9Tl+pmI395YLOIfKCqh4AKwAZVfVhEngb+ATwOzABeVNW3RCRrDwMR6Qk0AFrj7HmzyJg3+sH4jzAWMXJgHhK3AVx8caF1eVAy+41X6NK9J2HhxbZ/fBY/HzzAN7u+pEMhevolmY+Td1IjLJwf937PiEH9uSwyktp165/RMkuH2DUSRf2uMwRwDWjNJXtYZAdOj/spEblCVY94SHudiKQAW4DGgPvY9gLzmwzU9VH+OyKyFeiAo9jLAK+IyA5gnluePYA3VPVPcHbk8nNdm4GbRGQC0ERVj3mIM1pEtgEbcHq/DYz/ScDVBXGXvwPOAwTgbbd8ehq3BUgBGrrltc+TsjbX8LKqtlTVltWr529YPTw8nNTU7K19U1NTCQ8P9xonIyODo0ePUq1atXzl74kaNcNIT92fdX4gPY0aYeE+UmSzdfMm3n79ZTrHRDFlwsMsfH8uT08K3AZvjZrhHHC77gPpqVxUMyygPJYvWkiPPv0oUybwz6XDw8PZvz9XvYflrXdXnIyMDI4Ust5zU6NmOAfSsmU4mJZKjQDqwNVmF9etR+v2Hflyx3Y/KQqHiCClvLt8pO9t3lB3i8gDHsKnmzforSLyjYgcdgvLdAtbVMSXFhBFprBNz/Ma4BER2Qs8D/QWkUqq+g3QAkdxPy4ij+RKWw9HwXZX1abAEsB9kO6E+c3EvBWIyApTga+6xbtRVZup6tWq+iMwBjgIROMY0fT67iQiF7s1yu3uYcYeXCecjctnicjwXGm74DwE2pkx9C1u8p/S7EHfLPldWXsSBXjSXEczVb1UVV8zYX94k78gxMTEsHv3bvbu3cvJkyeZP38+ffv2zREnNjaW2bNnA7Bw4UI6d+6MM/JVMJo2j2Hfd9/x4z6nzCULP6B7r9h8pZ320mt8tuVLPkneyQMTJhN33WDuH/+Y/4S5iGrWgn3f72H/D44Myz5cQNd8yuBiSfx8YgswHAJOve/Zk7PeY3PXe99Y3nmn6Oo9N02at2Dvd3uy2yF+Ad3yWQdHDv/GyRPOX/LXQ4dI2bSBSy/Lz0hl4ShdupRX5wujm/4D9MHptA3JvdhBVce4/nM4umuBW/Bfbv9H72ZvzgJF+Y7RHdiuqr1cHiLyJhAnIquAX1V1tnly5R7SCMVRRkfMGG8fYI2vwtzL8UFlYL+qnhbHPL1rdmIlzoPlHdeQiFHwzTxlIiJ1TD6viMh5OA+ft3KV85vJqyHQNh+yrQMGA7OBG938VwCTjGy/i0gEcEbsPIWEhDBt2jT69+9PZmYmw4cPJzIykokTJ9KiRQv69evHyJEjueWWW4iKiqJq1aq89dZb/jP2U+ajU57hpuvjyMzM5NobhnFZw0Y8N+Vxopq1oEfvWLZvSeZfI2/k6JHDrE5cxoynn2D5Z5uK6KodGR5+Yir/GDKQ05mZxA0eSoPLG/H805NpHN2cbr1i2bE1mdE3D+Xo4cN8vHIZLzzzJIs/2QhA6o/7OJCWSqt2HQtc/rPPTuPqAU69DzP1PmmSU+99+/ZjxIiR3HrrLTRt4tT7rDez6z2yUUOOHTvGyZMnSVi8mA8XLaZRo0YBy/DIk1O5dfBAMjMzuWbIUBo0bMSMpyYTFd2c7qYd7rrJ1EHiMp5/5kmWfLqRPd9+w6P33o2UKoWePs0/Ro3h0svPsMIW8tWT9kJrYLeqfgcgInNxhkS9LXgYgrNfdtAhRWWsVETeADaazcBdfv2BfwHP4azaOI2jfP6lqklmnPteczwLZwz8R+AIsEhVZ5neektV/cVMUE5V1S4eys/Ky82vAc4YugLLgTtVtaIJewAYjjNksVRVH/KQ5++qWtEo+/uM7L8Dw1X1e5dswDEgHme442ugCjBBVde48jD5DQL6qepI81YxB6gIfAjc7Rbv/8h+qP0ODMXpnSeoapTvloAWLVqotelobTpC0Nh0TC6svcXyNRpo/RvzrhV38eX0/vuAX9y8sozwmv9db1W91ZwPw5m/uit3PqZztgGopaqZxi8D2ApkAFNUNe/HAWeJIlPYluDBKmyrsF2cMwq7ZgO9ZOh0r+FfPHuV1zICVNj/xlHWo9z8IlQ1VUTqA6txhm73FOZ6CkoQLLC0WCwW34hA6ZBSXp0fAjGkO5jsxQAAqGqq+f0OZ6i2eUGuoSiwCttisZQIpJR354fNQAPz7UNZHKWcZ7WHmX+qCqx386tq5q0Q56vmDngf+z7j2IWNFosl6BERv6tBvKGqGSJyF86EfmngdVX9QkQmAkmq6lLeg4G5mnOcuBHwPxE5jdPBnVLEX2cHhFXYFoulRFDK/9CHV1R1KY5ldHe/R3KdT/CQ7nOgSYELLmKswrZYLMGPQKkiXIdeUrEK22KxBD2OEV475WYVtsViCX4EShX8w5lzBquwLRZL0CNAqdJWYVuFbbFYgh/bwwaswrZYLCUCoVQBl/WdS1iFfQ4iGScpc2hvscoQXq1usZZ/6nSxFg9A2d8Kb/+ysERUrVPcIhQJri8d/+5YhW2xWIIeEShth0SswrZYLMGPIJS1PWyrsC0WSwngHOthi8gp4DiejZhUUFWPloWtwrZYLEFPKYHzzq0e9g5VbeEpwJhK9Mg5VQMWi+XcRHB62N6c3/T+bTqOFJGf3cwE3uoWNkJEvjVuRBFdki9T717DbA/bYrEEPSJC2RCPowT5Seuy6XglsB/YLCKLPOy6915uowYicgGOubCWOMMXySbtbwUSJptTInKhqv6cq7zq+DAJaHvYFosl6BGBsiGlvDo/ZNl0VNWTgMumY37oBaxU1V+Nkl4J9C7whWTzFjDbWLEBsoyRvwu87S2R7WFbLJagR/A79FFdRJLczrNsOgIROLZiXewH2njI4xoR6QR8A4wxhrk9pY0IVP7cqOp0EakEbDRvAODYvH0B8GoLzSpsi8US9IhAWd9fOv5SSLuRi4F3VfWEiPwTeBPoVoj8/KKqE4GJInKhOf/ZTxI7JGKxWEoGhZh09GvTUVUPqeoJc/oqEJPftAVBHP4hIvOB/wJXi/g3dmYVtsViCXpKiXBeSCmvzg9+bTqKSJjbaX9glzleAfQ0th2rAj2NX2F5HIgF/odj0eZCYIq/RFZh/81QVe5+ZDINO/ai+ZVXk7LDs3m68U89R73W3ahyeYzH8AVLEylzcSRJ23YWSp7ExESio6OJiopi6tSpecJPnDjBsGHDiIqKolOnTuzbVzT7c6xamUhM82iaNY1i2rOeyx05fBjNmkbRrUt2uclJm+nYrg0d27WhQ9s2LF70YYHKV1XGjJ9Mow69aNFjAFt2fOEx3vinnqN+q65UvSxnO7z89lyad+9Py55xdIm7kS+/2R2wDCsTE2neLJqmTaJ41kvdDx8+jKZNoujSObsODh06RJ8+valx0YWMHTsm4HILSkF72KqaAbhsOu4C3nfZdBSR/ibaaBH5QkS2AaOBkSbtr8AkHKW/GZho/ArLVcC1qroS+EtVnwA6+0tkFXYRIyJ7RWSHWcu5Q0QGuIX9XsA8Z4nIoKKQb/nHn7L7+33s+mw5Lz71GHc99JjHeH2v7Mrni9/zGHbs9z94/rW3ad28aaFkyczMZMyYMcTHx5OSksK8efPYtWtXjjizZs2iSpUq7Ny5k1GjRjFu3LhClekq956xY5i/IJ5NSSl8MG8eX+Uq9603nXK3bt/JHXeO4tHxTrmNIhuz5rN1rF2/kQ/i47l79GgyMjIClmH5aqcdvlxr2uHBiR7j9evRhXUJedth8NX92PLRIpISF3LPv27h/seeCqj8zMxMxo4dw4KF8SQle677N00dbN+xkzvvGsV4UwflypVj/PhHmPzEEwGVWRhKFW6VCKq6VFUvU9VLVHWy8XvEZYBXVR9U1caqGq2qXVX1K7e0r6vqpca9UUSXJOZB4pw4Pf/z/CWyCvvM0FVVmwGDgJnFLYw7ixJXM/SaAYgIbVtEc+ToMdIP5p3raNsimrAaF3rM49GpM7nvjlspd57f+8snSUlJXHLJJdSrV4+yZcsyaNAgEhIScsRZsmQJQ4cOBSAuLo41a9aQ06h14CQnJVG/fna5AwcNYsmSnOUuXbKEG250yr06Lo5PTLnnn38+ISHOXP3x4yeQAtoZXJy4mhsHOe3QJqYZh48eJf3gT3nitYlpRliNi/L4h1aqmHX8x59/BSxHUq46GDRoEEty133CEm68MW/dV6hQgfbt21PuvHIBlVkYnHXYBVfYQchPItLAHIcC63DWivukRF5pCSIUyLPAXkQqishHIpLioRc+XES2i8g2EcmzHlNEJpked4G+Ikg78BO1wmtmnUeE1SD1wMF8p0/Z8SX70w4Q293v25t/WdLSiIjIXiEVERFBWlqa1zghISGEhoZy6NChwpdbK2e56bnKTXeLExISQmjlUH415SZt3kSbljG0b9OK6TNmZCnwgGQ4cJCL3dqhVlhN0g7kVdi+eHHWOzTs0JOHJk9l2sSHAis/LY1aueogLT1v3ddyq4PKRVD3haEwXzoGIVeTPXn5T2CQqr7iL5Fd1ndm+FicLk994DoP4ceBOFU9ar5s2iAii4BIYBzQXlV/MV9ZZSEizwCVgJs0VzdTRG4DbgOoHeE+f1J0nD59mvsmPsVr087eq3Aw0rJVazYmJfP1V19x+z//wZU9e1Gu3Nnrbbr418gb+dfIG3l3YQJPznyJ15/zO2dVYnENiZxDtABcb0angLoiUldVP/GV6JyqgSCiq6pG4cz+viAiFXOFC/CEiGwHVuEsxK+Bs+5znqr+AlkTHi7GA5VV9fbcytrEfVlVW6pqy+oX5NDz/HfWHGJ6xRHTK46aF13I/rQDWWGp6QeJqFkjXxd17Pc/+OLrb+lx3QgubdeDjVu2MfDmOws88RgeHk5qavYKqdTUVMLDw73GycjI4OjRo1SrVq1A5eXIc3/OcsNylRvmFicjI4OjR45yQa5yL2/YkAoVKvLll54nDHPz4qx3aNkzjpY9nXb40a0d9qcfILxm3qGP/HD9gFgWrfgooDTh4eHsz1UH4WF5636/Wx0cKYK6LzByzvWw73FzDwELAc8TSm5YhX0GUdU9wEGcnrM7N+Is44kxY90HAX9dtM1ATO5ed364Y+QNJK9YSPKKhQzo1Z3ZH3yIqrIhZRuhlSp5HavOTeXQShzY/jm7169i9/pVtGkezYLX/0PL6KhARQIgJiaG3UH2ni4AACAASURBVLt3s3fvXk6ePMn8+fPp27dvjjixsbHMnj0bgIULF9K5c+cCjxu7aBETw5492eUumD+f2Ni85c55xyk3fuFCOply9+7dmzXJ+MMPP/DtN19Tp3b+rLr8a+SNJCUuJClxIf17d+ed+U47bEzeSuVKlTyOVXvj2+/2Zh0v/egTLq0XmGWZmFx1MH/+fGJz133fWN55p2jrvqAIQplSpby6koaq9ndzvYCGwGF/6eyQyBlERC4C6gG516JVBn5S1VMi0hVw/dtWAwtFZJqqHhKRC9x62ctxliUtEZGeqnqsIDL16daJZas/pWHH3pQvX45Xn52cFRbTK47kFQsBeGDyVObGL+HPv45Tt1VXbh5yDY+MvctbtgUiJCSEadOm0b9/fzIzMxk+fDiRkZFMnDiRFi1a0K9fP0aOHMktt9xCVFQUVatW5a233iqScqc+O42BVzvlDh02nEaRkUyeNJHmLVoQ27cfw0aM5LZbb6FZU6fc12c55W5Y/znTn32WMmVCkFKleHb6c1SrXj1gGfp068zy1Z/SqGMvypcrx6tuw0wte8aRlGja4fFneC9+CX/+9Rf1WnbhpiGDeOSeu3hx1hw+Wvs5ZULKULVyKK9NfzLgOnj22WlcPcCpg2Gm7idNcuq+b99+jBgxkltvvYWmTZw6mPVmdt1HNmrIsWPHOHnyJAmLF/PhosU0atQo4HrILwKc40bTDwN+ez5S2Bl3S05EZC9wDMgEygDPqurrJux3Va1oxq0XAxWBJKAt0EdV95rtG+8z6beo6kgRmQUkqOp8EbkZGAbEqupfnmSIaRqlG5fOO6PX6Y9T1qYj5Q8Xv03Hk0Fg07FihfOTC/nZOPUim+qEt5d4DR/ZsnahyzibiMhqnOcQOCMdlwBvq+qDvtLZHnYRo6p1fYRVNL+/AO28xHkTZx8Dd7+RbsevA68XgagWS4lBgDIlc6zaG/e6HZ+H8wXlt/4SWYVtsViCHxFKnUMKW1VzW5VZLyIbgTm+0lmFbbFYgp5zrYctIu4fMpTCWVHmd22oVdgWiyXoEZwNoM4h7nE7zgD24nxM4xOrsC0WS9AjAmV874ftJ730BmYApYFXVXVKrvCxwK04yvNn4GZV3WfCMoEdJuoPqtqfQlLQPEreAkaLxfK3pLR4d75ws+nYB+ebiCEikvvbiC1AS1VtCswHnnYL+0tVmxlXaGVtZKotIvEi8pMx/rtIRPwu6bEK22KxBD0iQpnSpbw6P/i16aiqH6vqn+Z0A46hgjPJGzgPhjCgJvA++Vj9ZRW2xWIJepwxbO/OD4HaZbwFWOZ2Xk5EkkRkg4j4HWfOJ9VUdbaqZho3G/D73b8dw7ZYLEGPs0rEZ//SlxHe/JcjMhRoSU5jAnVUNdVYOF8tIjvMthOF4WcRGQnMNufDcMbOfWIVtsViCXqcSUefXWlfRnjzZZdRRHoADwOd3ew7oqqp5vc7EVkDNAcKq7BvwpkEdY2VrzN+PrEK22KxlACE0gVf1pdl0xFHUQ8GbsiRu0hzHPuKvVX1Jzf/qsCfxpp6daADOSckC4Sq7geuCTSdHcO2WCxBj6uH7c35Ip82HZ/B2dtnnjHv5zLS2whIMrYePwamqKpnQ6j5ug7pKCLlReRSEVksIr8Yt9gMufjE9rDPQYTTSMbx4pUh82Sxll+mdNliLR9AMk74j3SGKa2B25sMRgr74YyqLgWW5vJ7xO24h5d0n+N8hVhUvKCqzURkNjCN7NUqg4B38LLHkAvbw7ZYLCWC0iJeXQnCJWw5VX1fVU8b9z5wvr/EVmFbLJagR3CGRby5EsRvInI9sEJEbhORUBGpJCL/BBb5S2yHRCwWS/AjUIgv04OJm4FncZYOVgamuoUJjilAr1iFbbFYgh4p3CqRoEFVvwPiCpreKmyLxVIiOId2V829vWoW/qymW4VtsViCHsem4zmksXNur1oBZ7+TLUAnX4mswrZYLCWCc0lf5971T0RqA8/7S2cVtsViCXpEzrkedg5U9QcRuVxESqtqprd4VmFbLJYSwTk2hl0W6A0ccY1bq2pDf+mswrZYLEGPM4Zd3FIUKfHAKaCqiKwGngPeUFWfK0iswrZYLMGPQOlzqYvtbNnaWETOAzaq6gQR8Ws04dxYim7JN6rK3ROe5vIu/Wne+zpSdu7yGG/cMy9Qt30fKjfukMN/7KSpxMQOJiZ2MI26Xk21pj4ntb3KMPa++2kc3ZxWbduzZetWj/FStmylZZv2NI5uztj77kdVAXhs0uO0atueNu070m9AHGnp6QHL4E5iYiLR0dFERUUxderUPOEnTpxg2LBhREVF0alTJ/bt21eo8sDVDk/RsPNVNO99rdd2GP/M89Rr14sqkXm3mJiXsIKmPQYSfeVAho1+oEAyjL33Pho3jaZVm3Y+2mELLVu3pXHTaMbee19WOzz48Diim8fQqk07rht8A4cPHw5YhkAQH64E8rWINHRt4yoi5ciH1XSrsIsQEaksIm+JyG4R2WOOK5uwuiJyg1vckSLywtmWcdmadXy79we++vhDXnxyHHeOe9JjvH49OrE+/q08/tPG30vy0rkkL53LnSMGE9e7W8AyrEhcyZ4937FzawovzJzB6DH3eIw3esxY/vP8DHZuTWHPnu9IXLkKgDH/N5rNGz5n4+dr6dO7F09OKfhul5mZmYwZM4b4+HhSUlKYN28eu3blVJ6zZs2iSpUq7Ny5k1GjRjFu3LgCl+di+Zq17P7+B3atWcSLT4znrocne4zXt3tnPv9wdh7/b7/fx1P/fZ1PPpjFtpULePaR+wOWYUViInv27GHntq288PwMRt89xmO80XeP4T8vzGTntq3s2bOHxJUrAejerSvJmzeyeeN6GjS4lGeenRawDPlFcHrY3pzf9CK9ReRr89/M83QTkfNE5D0TvlFE6rqFPWj8vxaRXkV0SVWALSLyMVAHZwvYGf4SWYVdtLwGfKeql6rqJcD3wKsmrC659uAtDMawaMAsXrmGYQP7ISK0bd6UI0ePkf5TXkMXbZs3JeyiC33mNXfxcq6/qnfAMiQsWcoNQwYjIrRp3Yojh4+QfuBAjjjpBw5w7Ogx2rRuhYhww5DBLE5YAkBoaGhWvD//+BMpxOqBpKQkLrnkEurVq0fZsmUZNGgQCQkJOeIsWbKEoUOHAhAXF8eaNWuyepkFZVHiGoa62qFFU44c89IOLTy3w2tzF/Cv4ddTtbJTFxdVvyBgGRISlnLDkCGmHVpz5Iivdmht2mEIixc77dCje3dCQpxR1datWpGamscmQJFRGBNh+TTCewvwm6peCkwHnjJpI3H2z26MM0n434L+93IxAegFPGp+O6nqqz5TYBV2kSEilwIxwCQ374lASxG5BJgCXGH22nV1ZcJFZLmIfCsiT7vl1VNE1otIiojME5GKxn+viDwlIinAtQWRM/XgT9QKq5F1HhF2EakH/FomysO+/Wns/TGNbu1bBZw2LS2dWhHZJvUiIsJJS0vPEyciIjw7TnjOOI8+NolLGzZm7vvzGP/wQwHLkF1OGhE5ZIkgLS3Na5yQkBBCQ0M5dOhQgcsESDv4E7XCa2aXW7MGqQd+8pEiJ99+t49vv99Hp2tG0OHqYaxYsy5wGdLTqFUre9g0Itz3tYOpn/SccQDeevttevW8MmAZ8o/3nfrysdzPrxFec/6mOZ4PdBenJzAAmKuqJ1T1e2C3ya9QqOqnbm6Tqv6Wn3RWYRcdkcBW9zWU5ngrztP5AeAzVW2mqtNNlGbA9Tj77V4vIhcbqxbjgB6q2gJIAsa6lXNIVVuo6lz3ws3OX0kikvTzoXy1faF4LyGRa/p0p3TpouhsBM5jj45n91dfMPi6a3np5YBN95V4MjIz2f39D3w091VmPz+F2x+cyOEjR4tFlqeefobSpUMYfP31Z64QHzv1GX1d3XX/G3ebW+r8GOHNimMMHhzBMYobqAFf35chclREjhl3WkQyjDstIsf8pbcKu3j5SFWPqOpx4Eucsay2OMp/nYhsBUYYfxfvecpIVV9W1Zaq2vLCalVzhP33rfeyJgrDLryQ/ekHs8JS038ioqbvoQ9PvL94Bdf3z/9wyEsvv0Kb9h1p074jNWvWYL/b63Nqahrh4WE54oeHh5Gamt2TS03LGwfg+uuvJf7DxQHLn11OeI5X+dTUVMLDw73GycjI4OjRo1Sr5tfAdR7++9ZcYvpcR0yf66h5UXX2p2UPP6QeOEhEzYvynVdEzRr069GZMmXKUO/iCBrUq8O3e3/wm+6l/71Mm3YdaNOuAzVr1mT//v3ZMqT5vnYw9ROWHeft2e+wdPlyZr3+aqGGpvwhqsjpTK8OY9PRzQXlU1xVQ1W1Eo45ssFAWeNuAF7xl94q7KLjS6CZiGTVqTluZsI84W6SJBNnmaUAK01PvJmqRqrqLW7x/ghUsDuGX581Udi/ZxfeXpCAqrJhy3ZCK1X0O1adm6/2fM9vR47SrkXTfKe5/bZ/sPHztWz8fC1X9evLnHfnoqps3LSZ0MqhhNWsmSN+WM2aVAqtxMZNm1FV5rw7l359YwHYvTvb/mnCkqVcdlmDgOR3JyYmht27d7N3715OnjzJ/Pnz6du3b444sbGxzJ7tTPwtXLiQzp07F0g53TF8MMnL3id52fsM6NmV2a52SAm8HQb07MonGxwj4b/8+hvffr+P+rX9rgrj9n/exsb169i4fp1ph3dNO2wiNNRXO2wy7fAu/fo57ZC4ciXTpj/H/Pfe4/zz/e69X0gUOZ3h1fkhP0Z4s+KISAjO1qeH8pm2IPTKZcBgLtDTXyKrsIsIVd2Ns3mL+xKCcUCKCTsGVMpHVhuADmZMHBGpICKXFZWcsV07Uv/iCC7vMoDbH3ycFyY9mBUWEzs46/jfTz5HnXa9+fOv49Rp15vHnnspK+y9xSu47qpeBe5R9e7Vk3p169I4ujl3jvo/Zkx7NiusTfuOWcczpj3LHXeNpnF0c+rVq5c1Rjru0QnEtG5Hq7bt+eij1Ux9ekqB5ABnTHratGn079+f5s2bM3DgQCIjI5k4cWLW5OPIkSP59ddfiYqKYubMmUyaNMlPrv7p0/UK6tWOoGHnq7j9wYk8Pyl7HD6mz3VZxw88OZ26bXvy51/Hqdu2JxOnvwhAz87tqVa1Ck17DKTHkH8w5cExVKtaJSAZevfq5bRD02juvGs0M6Znr/Jo0y57OeeM6dO4485RNG4abdrB0Stj7rmXY7//Tr/+A2jTrgOjRt9doLrIN6renW+yjPCaLwwHk9dYwCKct1lwzHWtVmdmeREw2KwiqQc0ADYVwdUcFJFxRqZ6IjIe8Ls+VQo7223JxlhYfp5su2zrgbtU9bCIlMExAloNmAX8BrRU1btM2gRgqqquEZFuOLPU55l8xqnqIhHZa9L84kuOlk0jdeOid4r24gLkVHW/9kTPKBoENh3L/PRNcYtARjG3A0D5iqHJqtqyMHnENG+m69es8hp+XpULfZYhIrE4XxOWBl5X1ckiMhFIMv+tcsDbQHPgV2Cw2bsaEXkYx/BABnC3qi4rzLWYPKsBjwAdcd6q1wKPqarP2WyrsM9BrMK2CtvFOaWwP070Gn5e1RqFLqMkYD9Nt1gswY8q+B+rLjGY/UO8jimqaldP/lZhWyyWEoDC6dPFLURRcq/b8XnAQJyFB+/7SmQVtsViKRHkYzVIiUFVU3J5rReRjarqc1MYq7AtFkvwowqnve7rX+Iwk44uSuN8JV3ZXzqrsC0WS4lA9JwaEtmMM4atOKtP9uHsZ+ITq7AtFksJ4NyadFTVAi3fsQrbYrEEP6pwDvWwRaSzJ39V/UREYlQ12VO4VdgWiyXoEUAyz50eNuBpE3gBPgGGAVZhWyyWkkq+PkEvMahqfx9hXr/xtwrbYrEEP3puLesTkQo4ew25NhFfBUxSVZ+bu1mFfQ6iIeU4dVGR7RdVINL/LN7xxpOZp4q1fIDaxdwGAGm/nytK7twaw8axgPMrzn74AHfi7EN0s69EVmFbLJbgRxXO0ENYRC7A2We+LrAXuC63BRgRaQa8CITifJE4WVXfM2GzgM44Rg8ARqqqZ4vG2cSoahO387Eist2frHZ7VYvFUgJQNOOUV1dIHsAxJtIA+Mic5+ZPYLiqumw7Pici7vvZ3ue2h70/ZQ3O2uvc+H2FsArbYrEEP4rzpaM3Vzjc7Tm+CVydp3jVb1T1W3OcBvwEBG6qKZvH3BW+iFQGHvOXyCpsi8US9KgqeuqUV4dvm47+qKGqLuMBB4AaviKLSGscs1573Lwni8h2EZkuIud5Sep+PfGqetjt/IiqLvSXzo5hWyyWEoDfvUR+8WPAYBVQ00PQwzlKUVUR8bp+UETCcAwdjFDNmgV9EEfRlwVeBv4NTPQlbEGxCttisQQ/qoUaq1bVHt7CROSgiISparpRyD95iRcKLAEeVtUNbnm7eucnROQNcm6dWqTYIRGLxVICUPR0pldXSNztOY4APswdwdiCXAi8parzc4WFmV/BGf/eWViBvGF72BaLJfhRhcKvBvHGFOB9EbkFZ9e86wBEpCVwu6reavw6AdVEZKRJ51q+946IXIjzaflW4HZ/BVqLMxaL5dxFFT118gxlrYeA7h78k4BbzfFsYLaX9N0KUKxr2ESBOcCNbufvektkFbbFYgl+zjEDBu4WZ0Tkr1znx72lswrbYrGUAAo36Rjk7BWR+3DG0jsBx7xFtJOOFosl+DmzH84UN6OAFsAHQD987CdiFfbfDFVl7L330rhJU1q1bsOWLZ6/ok3ZsoWWrVrTuElTxt57L2q2tvxgwQJatGzJ+RUrkZyS245o/vjko5V0b9uCrq2ieXHGtDzhmz5fx1XdrqBBzaosXRSf5Z/64w9c1e0K+nbpQK+OrXln1msFKh/gs9Wr6N0hhp5tm/Hy83ll2Lx+HQOvvILGERewfHF8jrC0/T9y8/VXE3tFK/pe0Zr9P+wrsBwuEhMTiY6OJioqiqlTp+YJP3HiBMOGDSMqKopOnTqxb1/hy/xk9UqubNeCbq2jeWmmh3ZYv47+3a/g8rCqLMtVBwDHjh2lQ3RDJjzgaWvnokX1jH6aftYRkUtFZJqIPIrTo74JiFXVAa4vKj1hFXYRICL/JyLPuZ3/zyzUd52PEpGZIlJXRDwu+RGRiSLSwxzfLSLnnwlZV6xIZM/uPezcvo0XXnie0Xd73np39P/dzX/+8wI7t29jz+49JCauBKBxZCRz58yhY8cOBSo/MzOTRx+4hzfmfsCKdZtZvHA+3379VY444bVq8fTzL9L/mmtz+F9Yoybzl61iyZp1LFi+mpdmTufggXQCJTMzk4kP3sMrc+aT8Okmliz8gN25ZAiLqMWTM16kX9y1edL/e9Tt3HLHaJZ+tpn3l6+mWvXCfKHsyDNmzBji4+NJSUlh3rx57Nq1K0ecWbNmUaVKFXbu3MmoUaMYN25cocuc8O97eO3dD1i+djMJCzy0Q0Qtnp75IlcNzFsHAM9NeZzW7doXSo78o+daD/sD4EegAvACzj4iHic13bEKu2hYB7jfudFAZREpbc7bA5/7ykBVH1FVl5K/GzgjCjthSQI33DAEEaFN69YcOXKE9PQDOeKkpx/g2LGjtGndGhHhhhuGsDhhMQANGzbksssKvm3otpQk6tStT+269Shbtiz9rr6GlcuW5IhTq3YdGjWOopTkvD3Lli3Leec5X/2ePHmC06cLtt3m9i3J1K5Xn4vrODLEXj2Qj1bkleHyyCikVE4Zdn/9FZmZGXTo7CwMqFChIuXPL1xTJSUlcckll1CvniPPoEGDSEhIyBFnyZIlDB06FIC4uDjWrFmT9dZTELalJFGnXnY79I27hlXL89ZBw8ZRlCqVV03s3LaFX37+iY5d8iyuODP4/zS9pHFaVaer6v1AM1U9ST7+81ZhFw1bgctEpLzZxOUv4+faPrE9jlIHKC0ir4jIFyKSKCLlwdmiUUQGichoIBz4WEQ+NmE9RWS9iKSIyDwRqVhQQdPS0qlVq1bWeUR4OGnpaTnjpKcRER6RHScigrS0wHuynjiQnk5YRHb5YeHhHMxVvi/SUvfTp3M7OjSL5J+j7qZGzbCAZTiYnkaY2/XVDIvgYHr+rm/vd7upFFqZUTffSFyPjjz92DgyMwvXw0tLSyMiInd9p3mNExISQmhoKIcOHSpwmQcP5GyHmmH5b4fTp0/zxKMP88CEyQUuP2BU0YyTXl0JZLmI3GQ6dZkicml+ElmFXQSoagawBWgFtAU2AhuA9iISAYiq/miiNwD+Y7ZpPAxckyuvmUAa0FVVu4pIdRzLFD1UtQWQBIzNLYOI3Oba+ObnX345I9cZDIRH1GLZJ+v5eNNWFrw3h59/8vgV8RkjIyOD5I3ruf/Rx5m3fA0//rCXhe+9c1ZlKG5mv/EKXbr3zPHQOxvo6dNeXQnkTuBVnM5dA5y113f5S2SX9RUdn+P0pMsD64FvgYeAn8k5HPK92365yTibpvuiLRAJrHO+fKWsyT8HqvoyzsYzxLRokeNd+aX//Y833pgFQExMDPv3788KS01LIzwsPEde4WHhpKalZsdJTSU8PPCerCdqhoWRnppdfnpaGjVylZ8fatQM47KGkWze8Dmx/fPshuk7bVg46W7XdyA9lRph+bu+muERNGzchIvr1AOgR+9+bEveDDcEJEIOwsPDSU3NXd/hHuPUqlWLjIwMjh49SrVq1QpcZo2aOdvhQHr+22Hr5k1s3ried2a9yp9//M7Jk6c4v0JF7h/vd3fQAqOnldMnzxXrOaCqoQVJZ3vYRYdrHLsdjkLdhaNoc49fn3A7zsT/Q1OAlW6bo0eq6i2BCHb7P//Jxg3r2bhhPVdd1Y85c95FVdm4aROhoaGEheXcxCwsrCaVKoWycdMmVJU5c96lX99+gRTplabNY9j7/Xf8uG8vJ0+eJCH+A3r0js1X2vS0VI7/9RcARw7/RtLG9dS/tEHAMjRp1oJ93+1hv5FhafwCuvXMnwxNmrXg2NEj/GreYjas/ZRLLmsYsAzuxMTEsHv3bvbudeSZP38+ffv2zREnNjaW2bOdOamFCxfSuXNnzAO8QDRtHsO+77LbYcnCD+jeK391MO2l1/hsy5d8kryTByZMJu66wWdUWTvoOdXDFpHOnpy/dFZhFx3rcXrDF6rqT+rMCP2Mszn6Op8p83IMqGSONwAdXGNcIlJBRAo869e7Vy/q1atL4yZNufPOu5jx3PSssDZt22Udz3huOnfccSeNmzSlXv169OrVE4APFy3ikgaXsXHjJgYOvIar+g8IqPyQkBAmPPkMI66Lo2eHlvTtH8dlDRsxfcrjrFq+FIBtW5Jp37QhSxfHM+7e/6NXx9YA7P7ma+J6dyO2S3sGD4jlH3eOpmFk44DrICQkhPFPTOWWIQPpe0Ur+vS/mgYNGzHzqcmsXuHIsGNLMp2bN2LF4ngevf9u+nVqA0Dp0qW5/9FJjLy2P1d1aYeqcu3QEb6Ky5c806ZNo3///jRv3pyBAwcSGRnJxIkTsyYfR44cya+//kpUVBQzZ85k0qRJhS7z0SnPcNP1cfTq0JLYAU47POfWDtu3JNMhuiHLFscz/t7/o/cVrQtVZqFQOH0yw6srgdzj5h4BEgC/jSqFmWm25EREvgC2qOpQcz4BZ8+AKqqaISJ1gQRVjTLh9wIVVXWCsQuXoKrzRWQUznhWmhnH7gY8Bbg2Rh+nqou8yRHTooWuW/vZGbnG/FL8RniL/76uHVqmuEUICiO8l14Umuxrr+r80Kx2Tf3o/mFew6uPmlroMooTEakNPK+qPntAdgy7CDETie7nE4AJbud7gSi386luxyPdjp/HsaDsOl+NM6Fpsfw9UeX0qTPz8MmPEV4TLxPYYU5/UNX+xr8eMBeohjMvNcws08s3qvqDiFwuIqVV1euyIzskYrFYgh41CtubKyT5McIL8JfbXFJ/N/+ngOmqeinwGxDQHJMLVW3oS1mDVdgWi6WEcDrztFdXSPwa4fWGMVrQDXAZNchXehE5KiLHzK/LHTNha72ls0MiFosl+PG/rK+6iCS5nb9slrrmh/wa4S1nysgApqhqPM4wyGHzLQbAfsDvAnVfy/pUtaO3MKuwLRZL0KPgb/ne2TDCW0dVU0WkPrBaRHYAR3xL7lWeEKAp2avBwLF88yDOtxoed/eyCttisQQ/WrgPZ4rCCK+qpprf70RkDdAcZxOnKiISYnrZtYBUT+lzsQJnSNp97+uGOMv85uCYKsuDVdgWiyX4UQq82Vc+cBnhnYJ3I7xVgT9V9YTZLqID8LTpkX8MDMJZKeIxvQcuUNXmucpIUdWrfCWyk44WiyXoUVVOnzzl1RWSKcCVIvIt0MOcIyItReRVE6cRkCQi24CPccawvzRh/wbGishunDHt/GzUPsuD31v+EtketsViCX7O4DrsfBrh/Zzs3Tdzx/sOyNdnoCIyTFXfVtUZbn7lcXroccBzXhNje9gWi6VEoGjmaa+uBPG4iIQDiEgLEfkPsA3HRNgof4ltD9tisQQ9qkrmGephn2XG4KwwUSAUGA2M9vfBjAursM9FMk4Q8st3xSpCWPX6xVp+RhC8PJY5tLe4RSC8Wt3iFqFoUEpaT9ojqroAWCAiV+LYcXwKaCkir/uy5ejCKmyLxRL0qCqZJ0uk7UaPqOpKYKWxUHUj8I6InFDVK3ylswrbYrEEP+oYMTjXUNUjwH+B/4qIx0lNd6zCtlgsQY8q51QP2xOqusNfHKuwLRZL8KOKBsEe58WNVdgWiyX4+Rv0sPODVdgWiyXocZb1lfxVIoXFKmyLxVIiKIJ9r0s8VmFbLJagR0/D6ZNWYRf/1wUWi8XiFz1jFmdE5AIRWSki35rfqh7idBWRrW7uuIhcbcJmicj3bmHNCiWQD6zCtlgsQY8qZJ467dUVEr82HVX1Y5c9RxyTYH8CkcWKHQAAIABJREFUiW5R7nOz97i1sAJ5wypsi8US/OgZ3fwpUJuOg4BlqvpnYQsOFKuw/2aoKnc/OoWGnfrSvNc1pOz40mO88U/PpF7bK6nSqE0O/zfnfUhY887E9LmWmD7X8tq7HxRIhrH33kvjJk1p1boNW7Z47pCkbNlCy1atadykKWPvvRdVZx3uBwsW0KJlS86vWInklJSAywdYmZhI82bRRDeJ4tmpU/OEnzhxghHDhxHdJIqunTuxb59jAGT1Rx9xRYf2tGnViis6tOeTNWsKVL6qcvcjk2nYsRfNr7zaezs89Rz1WnejyuUxHsMXLE2kzMWRJG3bWSA5XCQmJhIdHU1UVBRTvdTHsGHDiIqKolOn7Po4Wzgfzpz26jA2Hd3cbQFkn1+bji4GA+/m8pssIttFZLqInBdA2QERsMIWkWYioiLS+0wI5KG8uiIS0N0oIiNF5AVzfLuIDC8iWSaISGqusawqgchtNkWfWRTyFITlH69l9/f72PVJAi8++Qh3jXvcY7y+PTrz+YdzPIZd268XycvmkbxsHrcMuSZgGVasSGTP7j3s3L6NF154ntF33+0x3uj/u5v//OcFdm7fxp7de0hMXAlA48hI5s6ZQ8eOHQIuGyAzM5N7xo5hwcJ4NienMH/ePL7atStHnLfenEWVKlXYtmMnd941ikfGjwOgWrVqvD9/Phs3b+Z/L7/CP269pUAyLP/4U6cdPlvOi089xl0PPeYxXt8ru/L54vc8hh37/Q+ef+1tWjdvWiAZXGRmZjJmzBji4+NJSUlh3rx57MpVH7NmOfWxc+dORo0axbhx4wpVZsCoknkq06vD2HR0czkM8IrIKhHZ6cENyFmMKo4JSY8YE2JNcEx8uXgQx7xXK+ACHIMGZ4SC9LCHAGvNb5FjjFMWGar6kqr6teQQANPdxqqaqerhAOVJUtXRRShPQCxa+TFDr7kKEaFti2iOHD1G+sGf88Rr2yKasBoXnhEZEpYkcMMNQxAR2rRuzZEjR0hPP5AjTnr6AY4dO0qb1q0REW64YQiLExYD0LBhQy677LICl5+UlET9+pdQr149ypYtyzWDBpGQkJAjzpKEJdxw41AAro6LY82aNagq0c2aERYWDkCjyEiOHz/OiRMn/r+9846Tqrz6+PdHEUTFrrBYQMSGiIrG8qpEQCEiIGA31jf2Hhux1xRjsMaosSZI8iYqiiUajRpNVNQYFWtEopGiURG70n7vH+cZGdZdWNiZnS3P9/O5n51b5j5n78yce+55TllsGcb/+WG+P3JYvT6Hcy65glOO+gHt29XPoHv22Wfp3n3+9di9putx7718//txPYYXXY8Gw+C5rnVZ5NvtAbY3rmG5C3gvKeKCQq6xp2NiT2Cc7W/a3Nie7uBr4Cbq2MxgSVgshS1JwB7AQURLnfZpe1dJr6XZ0n9JulXSAEl/TzOv30nHLSPpRklPS/pn4e6WLOLxkh4mnP61jX+QpDsk3Z/Oe3HRvoPT2E8T/dYK28+VdHJ6faikZyS9IOl2SR3S9pslXSHpCUmTJe2+mNelZ/qfnk+PRT2q7V8n/b9bSvqupHuKZLtF0uOS3pY0QtLFkiam/7FtOq5/ev/EdP2W+Bc67d3/skbV/ObRXTqtztT3Fvb9/Dbj/vQQmw0cyV5H/JB3pr276DdUl2HadNZYY435MlRVMW36tAWPmT6NLlVd5h/TpQvTpk2nFEyfNo0uayx47unVx582jTXSMW3atGH5jh358MMPFzjmrjvvpHfvTWm3BArzW59D59WZ+u57dX7/cxNfYcq0d9mlf9/FHvtbskybRpcu1a/1t69H4Zg2bdrQsYbrUU4KtURqW+pJoacjLLon4z5Uc4cUKXsR/u/6+acWwuJa2NsSLdjfBB4FBhftWxf4BfFosAGwL7AdcDJwejrmDOBh298BdgR+LmmZtG9zYHfbi/oGbgrsRTyW7CVpzXTBziMU9XbARrW89w7bW9ruDbwKFD/Pdk7v3ZXU060WTixyhzySth0BXJ5mkLcAphQOlrQ+0Vn5INvP1HC+7sSs81BgDPCI7V7Al8DgdFO8GdgrbW8DHFn9JJIOK/jvPpjx0ULErx+7DujLpL/fzz8fuJ3+22/DIT88o2xjNWZefeUVzj7rTC6/8soGH3vevHmccv7PuPisUxt87MpRu3VdghojdenpiKSuwJrAX6u9/1ZJE4GJwCpAzX7GErC4CnsfojMw6W+xW+Tftifange8TITJmPgnuqZjdgZGSXqeUPjtgbXSvgdtz6iDDH+x/bHtr4BXgLWBrYBHbb9vexZQs9MPNk7W7ESiBm3Pon132p6XGmsubNKh2CWyY9r2JHC6pNOAtW1/mbavStyt97P9Qi3n+1N6vJoItAbuT9sL12194tr+K22/Bdih+klsX1fw362y0oJhpFff8vtvJgk7rbYKU4qs4qnvvkeX1VdbyL+7ICuvuALt2i0FwP/uPYLnXnp1Ee8Irrn2Wrbaehu22nobOnXqxJQp39zTmDptGlXJzVCgqnMVU6dNnX/M1KlUVXWus5wLo3NVFVOnLHjuztXHr6piSjpmzpw5fPzJJ6y88srp+Cnss8/eXPvr61lnnbo3arj65rH0GTicPgOH02m1VRf8HKa/R5dOi5rrCj797HNefv0NBux5IOtuM4AJ/3yBEYccvcQTj1VVVUydWv1af/t6FI6ZM2cOnxRdj4bAhtnz5tW61O/c/tB2f9s9kutkRtr+rO0fFB33lu0uSccVv7+f7V7JxfJ925/VS6CFUGeFLak1MBI4W9JbwJXAIEnLpUOKHXnzitbnMT+jUsDIIoW3lu3CL/7zNM5WRRbs0BpEKR5nLouXrXkzcEyyVM8jbhg1nVdJlosKsizspLbHEhbyl8B9kvqlXR8D/yEs99r4Op1jHjDb8x2DxdetXhx14N7fTBIO27kfY26/G9s89dwLdFxuucXyVRf7We9+8FE2WLdbnd53xOGHM+GpJ5nw1JMMGbIrY8f+DttMePppOnbsSOfOnRY4vnPnTiy3XEcmPP00thk79nfsOnjXOsu5MPr06cObb07irbfeYtasWdx+220MHjx4gWN2GbwLY28dA8Cd48bRt29fJDFz5kx2HzGS884/n2222Waxxj3qoH35xwPj+McD4xg2sD9jbr9riT6H5Tsux7svPsGkJx9i0pMPsdVmvbnjxl+yRe+NF0ueAn369GHSpPnX47aarscuuzBmTFyPcUXXo6EwMNe1Ly2FxbGw+wMv2l7TdlfbaxOP+sMX4xwPAMcmXw+SNqt+gO0JRQp9fB3POwHoK2nl5Pfdo5bjlgOmp2P2W9RJbZ9RFCxfK5LWASbbvoKwqAvT9rOI63OApH3r+L9U53Wgq6R10/r+fPuRrM58r9/2dFtrDTbYYTBHjDqPKy+c79Lo8735l23Uj0fTdasBfPHlV3TdagDnX3o1AFfdPJbeA4az+aDdueqmsdxwyeI//Q0aOJBu3brSs9cmHH30MVx+2aXf7Ntq6/lK8PLLLuWoo46mZ69N6LZONwYO3BmAu8aPp3uP9Zgw4WlGjBjJkKHDWBzatGnDJb8YzW7DhrLF5psxYuQINtxoIy684HzuvTcm2w448CBmzJhB714bc9WVV3De+RcAcN211zB58pv87Cc/Ydutt2Lbrbfi/f8u3hwAwPf67UC3tdZkg+0GccSpZ3PlRWd9s6/PwPk/qVEXXULXLXeMz2HLHTl/9FWLPdaiaNOmDaNHj2bo0KFsttlmjBgxgo022ojzzz//m8nHgw6K67HxxhtzxRVXcMEFF5RcjoVhYNY817q0FFTXmV5JNwETbF9TtG0o4U89ErjH9sZp+81p/bbk97nH9saKdu6XEb7wVsSj/q6SDgK2sH1MDeMWv3+B49Lk3SW2H5V0MBFeMxN4Hphl+xhJ5wKf2b5E0pHAqcD7hJJfzvZBxfKm835me9kaZDkXODS9v8BuRFzm/sBsIo5zX6LBZkHuFYAHgQuAT4CT0//9jWzVx60md3/gEsLifgY4Ms1I10ifTXp6wj2/r213gzAn93Sk/Yy3Ki0CsxtBT8cOHTr8w/YW9TlHtzZL+5zlu9a6/+AZr9V7jKZAnRV2pumQFXZW2AWai8Lu2qa9z+rYtdb9P/jo9RahsHO1vkwm0+gJl0ilpag8WWFnMplGjw1zszcgK+xMJtP4KUw6tnSyws5kMo2eQlhfSycr7Ewm0+ixs4UNWWFnMpkmgIHZ2YedFXYmk2kKOE86khV2JpNpAswju0Qgd5zJZDJNgYXUEanvZKSkPSS9LGmepFqTbyQNkvS6pEmSRhVt7yZpQtr+f5KWqp9EtZMVdiaTafSUuZbIS8AI4LHaDkjF734JfI8o37yPpEIZ558RVTzXBT5iwbLNJSUr7Ewm0+iJsD7XutTr3Partl9fxGHfASbZnpxKOP8eGJYK2fUDbkvH1aWJ7xKTfdjNkOcmvvJB27U3qW+X1FWAD0ohTxMdP8tQuvHXrq8Q7zPrgav99ioLOaS9pGeL1q9ztb6O9aQL8E7R+hSiDv/KwEzbc4q2d6FMZIXdDLFd72aMkp6tZDGdSo+fZWgc4xewXa+m35IeAjrVsOuM1NexSZAVdiaTafbYHlDPU0wl2oMVWCNt+xBYQVKbZGUXtpeF7MPOZDKZRfMM0CNFhCxF1MAfnzpEPQIUGncvqolvvcgKO1MbpfT/NcXxIcvQGMYvO5KGS5oCbAPcK+mBtL1K0n0AyXo+huia9SrwB9svp1OcBvxQ0iTCp31D2WTNDQwymUymaZAt7Ewmk2kiZIWdWYBCg+RMJtP4yAo78w2SlCZRkLRSpeVpDEjqXGkZKk2+iTcessLOfEORsj4UuEZSRcM+y1mTYSFjquj1YcCJktpXSoZKUH1854muRkOOw84sgKS9gZ2BU4uytxpq7A62v0ivhwMbAxcUW/7lpuimNTSNf5ntrxpi7DRu8VPOSGAWMMv2Aw0lQ9H4ewA9gH8DE2xPbigZMjWTLewWTg3W3IbASGCFtL91A8nREzhF0iZpU3dgOjSMhVe4DpJaJ8v+HGAAUdmzwShSlicRYWTdgXMlDWlIOSQdB5wCtAW2Bi5Nn1GmgmSF3YKpZs11Scr5XOBC4HpJa9ue20BKexawHjBS0jrAssBnBTmLZS71wNUs+OVTcZ/tgP8AJ5d6vNpkKPyV1AnY2vaOwPLA+8B9kjo0kCytiaeL/WyfR3wfHgP2k7RUpV02LZmssFswRcr6ROAqIkniQODnwHjgVknr2J5bLhkKP37bbxAJCFVECcs1gX6SNgMGSuovadVyWNtF1+EI4AZJPwaGECU3t5R0aanHLEbS8kX/VxdCQbeW9Ctgc2DP9BkMkdSjDOPXpIDXBg4GsP0+MBGosj0r+7QrR1bYLZzkqx1KKKcNgS1tf0pYVY8D10pqU27LVtJ+RJbYT4HehKLqSqQAH5iW5UotQ5Es3wf2AU4FNgP6J3/6TsQN42dlGrcVUVv5mDTJeWNSzq8C/YGTbH8l6WDgdNJTRykp+gyGSRoAtANOADaQdHw6bEVgFUll+wwyiyZnOrYwJPUH1rZ9Y1o/BJgNLE34rofa/jq5Q96WtIrtspb3TBb+HsDhtidKWgs4E3gLGGf7VUmty2zpH0rUi9iMUNy7EP7rZdPfVWy/VeIxuwMzCHfQJEDAprbfTb78kcSN9M/ERPDeRenQpRi/+IZ5ADCKuOavAA8n2a4FXidcJHvafqlU42cWnxwl0vL4mvBPz7b9W2AyMBr4OPlMCxNe60g6vgGUdRWhHIfY/jBVPfuPpJ8DF6RjJtmeXU45iJvWQ8CLtvulcY8k6kFfaLuklm2yVPcnupjMA64gbhT/C1xk+0XgRUmPpLdcZfvfJRy/WFmvAKwDfBf4Cjga6AvcDmwBrArMsf3fUo2fWTKywm4hpEdvbP9N0g8IV8fntu+Q9BwwRdIwYpJrP+CAcoT11RCi15qoU7w6UaqyEJXxPhEl0baUylrS6rbfS693A9YCxhIV1jYFlkmTfoOAI4B9y+Q3/1TSRUAvYCBwI6G0/yZpWds/krQn8Ibtf5Zy7GrK+lQiCmRz4HHbf5E0FtgX+AFwq+2/lnL8zJKTXSItDEk/BNYlwvaGAgcQj7/7AjsCnwKjy/HoW01RdABm254t6ULi8fsO229J2h8YTkQpfFnC8dcgomDGETemU4E30+vzgC+Ja7I18STyo1Jfh+quHUXT1xOB1wiFvRRREe41YFtgkO3XSilD0dg7E6F7RwEHAYcC29t+XVI3wh3z22xZNx6ywm5BpB/h3cAw229K2h64F9jf9l1pYnEp21+XYexiZX0SYVl2Bg4nir4PJuKeHwWGAbvZfqXEMlQRSmi9tOxte6ak0wkf7dXpCaQN0CqF95UFSX2Bd2xPTp/L2USCSiEi5X+AV2z/p4Rj9gG62b5NUm/i5vWR7UPS/jMI5b2z7Zc1vyh/ppGQo0SaMTVEdnwCvAy8B2D7cUJBjJO0u4OSK+s0VkFZH0P4rA8HViNcEZ8S/uqzgScJq7JkylrSismynQY8n5auhM8Y2z8GXgTOkrSN7TmlVtaSekkak14fANwMXCXpPGAOYeGvRUSCLG/7/hIr61ZE9MffJK1h+wXiyWpFRVYpti9Kct0pqS1QtknezJKRfdjNmCIl2Q2Ymib1AG4iojIgHr2vAsoy+58syV1tn5I2rUS4YY4hIiNeJqz8PW3/qQzjtyVcC1tJmkFY9VcTrodN043qNts/lTSLSJYptQzrAB8RraTuBN4lLPoNiHjvY4ArgZ8APyRcM6UcX7bnAU9IWg34raTf2L4yKfLvSsL2ONtnSBrdAJO8mSUgu0SaIZJapR9oIcX4WKKN0VO2b1R01JhNKKf+wADb79R6wvrJ0pmIKb7G9qi0bQPgGmBH25b0EuEO2L1M7pgViQiQNZnvo12LmOzrA/zN9phSj5vGXhW4CLgPeILwU29me/20/zuEO2hlQmG/V2o3RMFvLul/CJ99H2JC8TZiwvUYYsJ1nO17apgYzjQSsoXdDClS1gOIR/8RxETj9yQtbXtgmnBaBbi8HMpa0jJAO9vTJW0ETEiK4DSiSel/gWGSliUU2Y9Lqawl9QI6AB0Jv3jBFXK0pLNS6OAdhJtgU0l3ORKGSs2nRMTLjrbvTKGC90i6xfaBtp9OPvMdiUnYkilrSVsDH9ielCzry9M4DxLujqOIqJyrCBfVM5Cr8zVqbOelmS3E3ER34sd4ddrWkZjUuxY4p8zj7wr8Cfgr4eqAcEW8A1yc1o8DxhAumY1KPP5uhKvlBuBp4BZgh3RNLgeuS8f1BvYkfMalvgabE1mjEFmCLwIXpPWVCd/9jUXHty+DDGcQSTA90voThXGAZYgnjEeBvSr9nc1LHT/TSguQlxJ9kMm9VW3b/kQixA5pfWliwm80kblXDjl2Jiy1nYjQvAeAjmlfZ8INc0Zabw2sVuLxt0w3gV5F235B1EnZmogO+XVS5M8Da5XhGqxKTOi9Qli0rYgypQ8RmaQQTzcPA7+q7fMrxXeByF6ckMa/MMlScIWuDPQD1qj09zcvdVuyD7sZUC1kbhdiYu9R21Mk7Uv4i3ez/bCkpYkf7BdlkKM3UX9kiO2/pvWbgKeA521fl5JSJgOX2D67DDIMIXzyx6cElM/SxNqlhGIaKakd4SZ61lF0quQoanCcBNxJpHYXamqvClxv+wNJqxAW75QSjlv8XVjR9keSfkS4P7oQTxjrAR8QTzwXugHrfWfqR/ZhNwOKfqDHElmKDxI1lI+zPVbSHOAhSd+1/VgZRXmFiDYZJOlNwjd6N2HhnZUU6OgUtdKxTDKsSPjtScq6EEt8vKSnJG1r+wngd6UeWNKGQE9H1MnlyY+/CnFdhhHRKh8QTyB/cYnT/muIde+dwih/ShSNOpX4bvyKKKQ1MyvrJkalTfy8LPkCdC96vQMRCdKOmPX/F/AXIkkGwj2xfhllaZ3+LkU86n8IHFW0vy/wN2ClMl+TFQiFeHTRtoLfdgyhUMsxbnvCon4IuDJt25Tw1a+T1n8BzCT8620prRukVdHrw9O1rkrrHZJ8RxEROz0q9Z3NS/2WnDjTBFHQDhgv6acADst5fyKudzfb6xF+2usk7eSIsX29XDI5NTpwJJwMTGNvVHRIF+BjShxjXEwKZ5xJJOFsm0IacZQnHQ70JBRmyXFYqtcAhwD/I+nXwPqE73i7dMxJRPz7YNuznbRpfVGUyL0iuX4garOMBrpKOgG4h6iLMpaYiM3Zi02U7BJpojhKoA4G7pA01/YZDp/1msQjOERkwgTKlBRTg0wFpT1bUUjqz5IuSzIcBxzqEtYGqWH8QuGoxwmldL6kgUQI4ZZEbZKpZRz/c+DzFFt9JrAJkRyzq6SZtsfbfrCUY6bwzLOBM4v+/6lEbZhViQiZ24lEndm2Lynl+JmGJU86NjGqJzVI6kKE0N1l+6wUe3suEf+7LrCH7UkNLGMhUWMporXUGkR9ipLWBqmDHMsSiUGfAZNsv90AYxb+91aEwu4HXAL8ETjQJfQZS+pHTOruZfuplFE5zPal6fX7jqqAgwmlPsS5kFOTJivsJkS1SaWjgHm2r0lZe/cSmWsXEIpiJ2B8Od0gi5C1oLjaAKuX07KtZfxWRRZng1LDTXVv4Bnbb5ZwjKWJDMrORNZiB6IK4V22f56OaU/USzmZUOq5+UATJyvsJkSREiyUw9zLqah9SgEfT4TznbKQ0zQYKnOXmMZOuVO8UzbnHkR52EFEqOSvi/YvR8SBv2R7crnkyDQc2YfdBFC09frM9gRJKxE1m08APpR0EDG5dXfafluqX/FBOZVFXWjJyhrKn+LtaKc2j2iW+xoRFQSAoj9kT+Bk50JOzYZsYTcBJB1OVJjbNintc4gwvk+J5IfPCf3wI+Uaxi0OST2J+HsTGZ19iA70hzpajWWaCVlhNxEUzXJHE4r6NeJRd6LtaZIOJHyVuwFfV9qyzpQfSSJirwsTnKsS8febEZmMJW8Akak82SXSyCn4QR1lUdsQBZUG2X5A0lJJkZ9IdE/JWWvNkGqTza1tz03rcyV9lyjZuiURvgdwUqUmmzPlJSvsRo5tJwvKjlocc4j45gG2n0llM/fK1lTzpVpkUHdJn9k+J332pwPnOkrTPi/ppewSa75kl0gjo6bIgqLokLVtv51cIDcBm2YfZcsgJSKdQ8RT/xh4zvZBRd+JioUxZhqOnJreiKj26DsiZekVMgh3BG6UtIHtW4jIgLL0X8w0LhSNKEYS9bTvIfzUW0i6vigZKFteLYCssBsRRcr6ROAU4O20viyR/HC17deSYr8l+ymbJ2lCsZgViASZbSR1SeGSmwI7S7oWcpeYlkJ2iTQyJHUlQrP2ctQybmV7nqTlbX/c0pNRmjvVnrI2IVLqv5C0PVHA6UHgzyk6qBXQNSfFtBzypGOFqcFn/QWRZtwN+KjIL1kFfJyVdfOmSFkfR7Qve1rSJ8DFRKecQ4D2ku60/S7RDCLTQsgukQpSzZpqL6lDKs7zBNEYtmvatx9wWko1zjRDUgZr4fXuwO5ED84uRNu1S4g637cSiTFlq3qYabxkl0iFqKasf0j4JFcmuoK0IpIg1iIsqJ3IiRDNFkk9gMHAtba/lDSIKIm7W1rOJIp6TSHK1M51GVq8ZRo/2SVSIYqU9QlEl/HhwB1Ep5B+RHeQHYHViaI+/66QqJkykiaUpxB1q3tIWsH2/SlJqg/RdX6GpHeJUgTLJFdIpgWSFXYDI2knoiPL58AfgHeJOhCHAu8R1tTjRDbjQ5WSM1N+ksvrTOA3th+TNIrwT8vRxLgH0QvzH0TnmuFZWbdssg+7AUmPuhcD04mJxVuAF4jPYQjRA/GXRK2Q30haWvPbPmWaH62J3pt7S9qI+G68DwxN68OJcL6hwBG2p1dM0kyjIPuwG4gUojWeUMr3SWoLXEs0HXiSaNA6hogO6QFcZXtKpeTNlI9q8xeHEJOLEH7qacDxRI3rsbb/IWkpR6/MTAsnW28Nx3TCmuqd/JSzgeWAZYhQvunAXsCPgDFZWTdfqoXuHQE8S7gnTyQmmq8kelIOl7R0VtaZAtnCbgCKkl86E0kxzwIrAesQfsk5iv6HrYEOtj+soLiZBiC1+LoGuNj2y5I2JLrHdAd+RjTSbWv7gwqKmWlkZAu7AUjKulXyQR4O9CIiQQ5Lyrq17Vm2v8zKunlSPd3c0T2+HXB0Wn8VeIqoE3IE8FVW1pnqZIXdQCSl3db2NOAw4A3gEElVOXuxeVPNZ903TT5DNNGdLanQg7M98CJwYSqXmsksQHaJlIGaSl0WWndJ+g6wFPA6MeH4Z+AnuTRm86NgVRcp6yOI+HqAx4AbgE7AsYS1XUXEXU9seGkzTYGssMuIpM2BD4D3bH8tqS9wKXCa7QcldSL8lO9UVNBMWSiO7pDUj+gEMzgp8huJGPxfA28RvuuZtt+vlLyZxk92iZSQYj+lpKOBu4DzgOskLQN0Bc5PyrqN7Xezsm6eSFoXeEzSmmnTIKCXpC2TxX0ikcV6OrCm7Teyss4siqywS0jRo29/YG1ge+BcYAbwK+CPtu9U7mzeEphMZKz+VtKqRJeYPwD7S9rE9kzgJKIJRe7FmakT2SVSQjS/e/VEorLaEOKmuDrRgGBDYHfbn1VMyEyDkb4PFxCd7kcQYZtHEg0JbrH9XE0t4TKZ2sgWdj2pFq5l2+8Rlde6AwfbnmN7KjAa+CeRwZZpxiiRJpLPIiztO4C5wNXAbCIdvV0Fxcw0QbKFXQ+qhWvtA6wL/N32w5K2AP4P+LHtG9IxuVtMM6Xad6Gt7dlFCVNtgTOAvkQ2K8C8HGedWVyyhV0Pin6gRxP1q6cC10s6lSjqtAdwiaQD0vFZWTdTir4LxwJXpXmKeZI2BfoTMdcvAjcDH2RlnVntabzyAAAEWElEQVQSssJeAiT1lLR6er0RYTntTCQ+fEQ0IxhFuED6Eh1kMs0cSTsTtc1/lGLutwLuAz5PtWNOAA7MMfeZJSUr7MUk+az3AeamONtXiGSILYGRtvsA44iU46Nsv2h7UuUkzjQEKaZ+KLABUToXopDTwbYfh7DCc+hepj5khb0YJJ+kbZ9J/BhvlrRmerxdEShYTrOIzjG3V0jUTJmpFnPfNjUWuAx4ADhBUifbf7T9QGESsmLCZpoNedKxjlSbVFqeaIL6W+C/RIztF8DvgKWB1YjwvVcrJG6mgZB0IrAeUX3xLGAVIpxTwC9zYlSmlGSFXQeqKevjiE7Wo4j6D9cDnxHxtp8TMbcv2n6rMtJmGgpJ+wMHEmGcrwO/tz0q1Ys5gEiYOi9PNmdKRVbYi4GkQ4GDCet5WtrWBriKaEZwQvZRthwknQ48DGxBKO3dUs0YEb7sD23/t5IyZpoXuQlvHZDUGjCwHXAF0ErS8cDWRA3jY4Bfkq9ns6WmCoxESvlPgQ+BoSn2+iygle3zGlzITLMnW9i1IKkX8Intt4u27UuEZn0E/BX4D7CD7cMqI2WmoUkJUnOJWiFTgAeBy4kyudsBpwH7pOihTKakZIVdA5JWIpJgXgReIZT0J7YtaX2iXOpMSXsQBXwG2v64chJnykW1+YthhPvremJi8Uzg38A5xCRjR6J07ksVEjfTzMkKuxYk/QT4hIj4WA/4B/CA7b8nv/VewNnACNsvV07STLmopqx7AJsArzl6MO5E1IcZZfveVOhpuXzjzpSTrLBrIVnPpxMlUtsRvsp9gFMJf/ZkYLLtNyomZKZsVFPWxwKHEB3ufw1cY/vTpLRvAE63PaZy0mZaCnmSrBZs/1HSNsBwwle5PRFnuxJhcY9LyRKZZkiRsh5JlBrYBdgP6AHsIOmR1IjiAGIuI5MpO9nCroGCdSVpCFG/eEPgONt3S+pA/J6/rKyUmXKSXBwrEpPL79j+Xtp+EhGy9yfgfttfVE7KTEsjp6bXQFFB+XuIiIAnbN+d9n2RlXXzRFIvSWtDdLm3/SHwA2D9Qmdz278A3gZ2JCYaM5kGI1vYtVBUy7gn0Zfx7Byq1XypJTJolu0vU+biL4kWbxcXjrc9o2ICZ1ok2YddC0VJEp8RiRHvVVCcTJmxPUPSZcyPDLoVeE7S/bafkHQYMEbSHNujs7LOVIJsYdcBSUtnN0jzZyGRQaOAaUSvzlnFyVSZTEOSFXYdyI1SWw6SCr03pxCd7q8lwvl6EhPPuVZMpmJkl0gdyMq6+VN0U36Eb0cGLQ2Qn7IylSZb2JlMEanS3niiFMF+lZYnkykmh/VlMolCRyFSrfPUrzOTaTRkhZ3JJHJkUKaxk10imUwN5MigTGMkW9iZTM18VWkBMpnqZAs7k8lkmgjZws5kMpkmQlbYmUwm00TICjuTyWSaCFlhZzKZTBMhK+xMJpNpImSFnclkMk2ErLAzmUymifD/GeZtkZPGab8AAAAASUVORK5CYII=" - }, - "metadata": { - "application/vnd.databricks.v1+output": { - "addedWidgets": {}, - "arguments": {}, - "data": "/plots/20a813a2-9400-4146-8101-3af317e3089d.png", - "datasetInfos": [], - "metadata": {}, - "removedWidgets": [], - "type": "image" - } - }, - "output_type": "display_data" - } - ], - "source": [ - "races = [row[\"race\"] for row in df.groupBy(\"race\").count().select(\"race\").collect()]\n", - "dp_rows = feature_balance_measures.filter(F.col(\"FeatureName\") == \"race\").select(\"ClassA\", \"ClassB\", \"FeatureBalanceMeasure.dp\").collect()\n", - "race_dp_values = [(row[\"ClassA\"], row[\"ClassB\"], row[\"dp\"]) for row in dp_rows]\n", - "\n", - "race_dp_array = np.zeros((len(races), len(races)))\n", - "for class_a, class_b, dp_value in race_dp_values:\n", - " i, j = races.index(class_a), races.index(class_b)\n", - " dp_value = round(dp_value, 2)\n", - " race_dp_array[i, j] = dp_value\n", - " race_dp_array[j, i] = -1 * dp_value\n", - "\n", - "colormap = \"RdBu\"\n", - "dp_min, dp_max = -1.0, 1.0\n", - "\n", - "fig, ax = plt.subplots()\n", - "im = ax.imshow(race_dp_array, vmin=dp_min, vmax=dp_max, cmap=colormap)\n", - "\n", - "cbar = ax.figure.colorbar(im, ax=ax)\n", - "cbar.ax.set_ylabel(\"Demographic Parity\", rotation=-90, va=\"bottom\")\n", - "\n", - "ax.set_xticks(np.arange(len(races)))\n", - "ax.set_yticks(np.arange(len(races)))\n", - "ax.set_xticklabels(races)\n", - "ax.set_yticklabels(races)\n", - "\n", - "plt.setp(ax.get_xticklabels(), rotation=45, ha=\"right\", rotation_mode=\"anchor\")\n", - "\n", - "for i in range(len(races)):\n", - " for j in range(len(races)):\n", - " text = ax.text(j, i, race_dp_array[i, j], ha=\"center\", va=\"center\", color=\"k\")\n", - " \n", - "ax.set_title(\"Demographic Parity of Races in Adult Dataset\")\n", - "fig.tight_layout()\n", - "plt.show()" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "application/vnd.databricks.v1+cell": { - "inputWidgets": {}, - "nuid": "63884ff1-6fcf-491d-9c2a-46f0fa4bbc58", - "showTitle": false, - "title": "" - } - }, - "source": [ - "#### Interpret Feature Balance Measures\n", - "\n", - "Demographic Parity:\n", - "* When it is positive, it means that ClassA sees the positive outcome more than ClassB.\n", - "* When it is negative, it means that ClassB sees the positive outcome more than ClassA.\n", - "\n", - "---\n", - "\n", - "From the results, we can tell the following:\n", - "\n", - "For Sex:\n", - "* DP(Male, Female) = 0.1963 shows \"Male\" observations are associated with \">50k\" income label more often than \"Female\" observations.\n", - "\n", - "For Race:\n", - "* DP(Other, Asian-Pac-Islander) = -0.1734 shows \"Other\" observations are associated with \">50k\" income label less than \"Asian-Pac-Islander\" observations.\n", - "* DP(White, Other) = 0.1636 shows \"White\" observations are associated with \">50k\" income label more often than \"Other\" observations.\n", - "* DP(Asian-Pac-Islander, Amer-Indian-Eskimo) = 0.1494 shows \"Asian-Pac-Islander\" observations are associated with \">50k\" income label more often than \"Amer-Indian-Eskimo\" observations.\n", - "\n", - "Again, you can take mitigation steps to upsample/downsample your data to be less biased towards certain features and feature values.\n", - "\n", - "Built-in mitigation steps are coming soon." - ] - }, - { - "cell_type": "markdown", - "metadata": { - "application/vnd.databricks.v1+cell": { - "inputWidgets": {}, - "nuid": "471830e4-f77d-4567-8475-8dd398f3fae4", - "showTitle": false, - "title": "" - } - }, - "source": [ - "### Calculate Distribution Balance Measures\n", - "\n", - "Distribution Balance Measures allow us to compare our data with a reference distribution (i.e. uniform distribution). They are calculated per sensitive column and don't use the label column.\n", - "\n", - "For example, let's assume we have a dataset with 9 rows and a Gender column, and we observe that:\n", - "* \"Male\" appears 4 times\n", - "* \"Female\" appears 3 times\n", - "* \"Other\" appears 2 times\n", - "\n", - "Assuming the uniform distribution:\n", - "$$ReferenceCount \\coloneqq \\frac{numRows}{numFeatureValues}$$\n", - "$$ReferenceProbability \\coloneqq \\frac{1}{numFeatureValues}$$\n", - "\n", - "Feature Value | Observed Count | Reference Count | Observed Probability | Reference Probabiliy\n", - "- | - | - | - | -\n", - "Male | 4 | 9/3 = 3 | 4/9 = 0.44 | 3/9 = 0.33\n", - "Female | 3 | 9/3 = 3 | 3/9 = 0.33 | 3/9 = 0.33\n", - "Other | 2 | 9/3 = 3 | 2/9 = 0.22 | 3/9 = 0.33\n", - "\n", - "We can use distance measures to find out how far our observed and reference distributions of these feature values are. Some of these distance measures include:\n", - "\n", - "Measure | Description | Interpretation | Reference\n", - "- | - | - | -\n", - "KL Divergence | Measure of how one probability distribution is different from a second, reference probability distribution. Measure of the information gained when one revises one's beliefs from the prior probability distribution Q to the posterior probability distribution P. In other words, it is the amount of information lost when Q is used to approximate P. | Non-negative. 0 means P = Q. | [Link](https://en.wikipedia.org/wiki/Kullback%E2%80%93Leibler_divergence)\n", - "JS Distance | Measuring the similarity between two probability distributions. Symmetrized and smoothed version of the Kullback–Leibler (KL) divergence. Square root of JS Divergence. | Range [0, 1]. 0 means perfectly same to balanced distribution. | [Link](https://en.wikipedia.org/wiki/Jensen%E2%80%93Shannon_divergence)\n", - "Wasserstein Distance | This distance is also known as the earth mover’s distance, since it can be seen as the minimum amount of “work” required to transform u into v, where “work” is measured as the amount of distribution weight that must be moved, multiplied by the distance it has to be moved. | Non-negative. 0 means P = Q. | [Link](https://en.wikipedia.org/wiki/Wasserstein_metric)\n", - "Infinity Norm Distance | Distance between two vectors is the greatest of their differences along any coordinate dimension. Also called Chebyshev distance or chessboard distance. | Non-negative. 0 means same distribution. | [Link](https://en.wikipedia.org/wiki/Chebyshev_distance)\n", - "Total Variation Distance | It is equal to half the L1 (Manhattan) distance between the two distributions. Take the difference between the two proportions in each category, add up the absolute values of all the differences, and then divide the sum by 2. | Non-negative. 0 means same distribution. | [Link](https://en.wikipedia.org/wiki/Total_variation_distance_of_probability_measures)\n", - "Chi-Squared Test | The chi-square test tests the null hypothesis that the categorical data has the given frequencies given expected frequencies in each category. | p-value gives evidence against null-hypothesis that difference in observed and expected frequencies is by random chance. | [Link](https://en.wikipedia.org/wiki/Chi-squared_test)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "application/vnd.databricks.v1+cell": { - "inputWidgets": {}, - "nuid": "61a36af1-9b38-45a9-89b5-39b2d14093c4", - "showTitle": false, - "title": "" - } - }, - "outputs": [], - "source": [ - "from synapse.ml.exploratory import DistributionBalanceMeasure\n", - "\n", - "distribution_balance_measures = (\n", - " DistributionBalanceMeasure()\n", - " .setSensitiveCols(cols_of_interest)\n", - " .transform(df)\n", - ")\n", - "\n", - "# Sort by JS Distance descending\n", - "display(distribution_balance_measures.sort(F.abs(\"DistributionBalanceMeasure.js_dist\").desc()))" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "application/vnd.databricks.v1+cell": { - "inputWidgets": {}, - "nuid": "ad2c4353-664d-4117-a629-45f66e92a4bd", - "showTitle": false, - "title": "" - } - }, - "source": [ - "#### Visualize Distribution Balance Measures" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "application/vnd.databricks.v1+cell": { - "inputWidgets": {}, - "nuid": "32a10ac1-4d12-496c-97ea-3b52e5f61d15", - "showTitle": false, - "title": "" - } - }, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAaIAAAEYCAYAAAAeWvJ8AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAgAElEQVR4nOydeXxVNfbAv4eWVfbFGWiRrYhstWgRlVFxQVQUN1RwVFBQGVHGDWRGZdTRUdxmdOCn4gYyQhUcBVxQURHHjUWqIrtQbSsq++IGLef3R/La29fXUqCv75We7+fTT99NcnOT3Nyc5OQkEVXFMAzDMGJFtVgnwDAMw6jamCAyDMMwYooJIsMwDCOmmCAyDMMwYooJIsMwDCOmmCAyDMMwYkpcCSIReVxEbi+nuA4RkR0ikuCv54rI0PKI28f3hogMKq/4ygsRyRKRU2KdDqNkRKSDiGSKyHYRGRHr9JQH5f19lTcicpyIrKigZ7UWERWRxPIMeyBTYYLIN5C/+I9vi4h8JCLDRKQgDao6TFX/Xsa4Sm1sVfVbVa2rqvnlkPY7ROQ/YfGfrqqT9jfuCM+aKCI7vRDdLiKLROSE8n5OReHzoyJydpj7P7374BglLZaMAt5T1Xqq+mi4p4h0FpG3RGST/1YWicgZMUhnueC/n12+Toe+/WMqMg2q+oGqdtifOESkl6+zt5RXuiI8o1SBHhBcO/zfDyLyqoj03otnDBaR/5VPisvnORU9IjpLVesBrYD7gFuAp8v7IQdA7+J+Va0L1AceA/4bGtlVUlYCl4Uu/Pu5EPg6ZimKQAXWm1bAV6X4zwLeBn4PHAyMALZVQLqiyQu+TjcF3gOmxTg9+8IgYBOBuhxDGvryPBxXV16u1J06Va2QPyALOCXM7ShgN9DFX08E7va/mwKvAltwL/8DnOCc7O/5BdiB6122BhQYAnwLzAu4Jfr45gL3AvNxH/UMoLH36wXkREovcBqwE9jln/d5IL6h/nc14DbgG+BH4DmggfcLpWOQT9sG4NZSyqmgDPx1HX9/C3/dDngX2Ojjeh5XKYuVsy/fj30ZrgPGATUCYRUYBqzyYcYDEvC/ElgGbAeWAkd49xbAS8B6YC0wYg/5eRD4AWjk3c4E3gD+BwwOhL3CP28z8CbQKuD3CJDt390i4LiwerTQ+/0APLyn9+p/3wFMB/7j7x0KNMB1jtYBucDdQIIPnwK8D2z1Zf9CKfnuhxM2W3xd6ejd3wXygV9x9enQsPua+vfSsJS4zwQyfdwfAane/SL/Pur769OB74FmJcQzzftvxX0zncPe23jgNf/+PwXaBfx7A8v9veN8uQwt4Tl3AP8JXHfyeWxWxnraGdfYbvLv96+B7240rkOzEXgR/01HSEORuuDrwc3AFz4PLwC1Sinzg3w5DMC1B+kBvwRcHd8ArAGGU7TtySLQ9gXLg0A7BdwTVjfGRUhH62DcAfebfdlU89ehcgl9u+d6944+/nz/jC3evS+wGPcdZAN3BOKuhftGNvp3tAD4nfeL+L2U9JwSy7c0z/L8C38ZAfdvgT8FKn9IEN0LPA5U93/H4RvJCC829HKe8xWmdvgLwzUGuUAXH+alQGXoxZ4brP+E+c+lUBBdAawG2gJ1gf8Ck8PS9qRP1+HAb/iGKUJ5BMsgASco1lC0MewN1ASa4RqQf5WQ7iOBo3GVvDWukb8+EFZxwr4hcAhOsJzm/S7w5dUdEP/cVriPfxEwBqjh87wG6FNafoAJgff8IjCQgCACzvZl2NGn9zbgo0A8lwBNvN9NuAa0lvf7GLjU/64LHL0X73UXcI7PV23gZeAJXB05GNdxudqHnwrc6sPWAv5QQp4PBX7y76k6rrO0Gt+4Eqg7Ee4VXMfgVZ+u34X5d8N1dnr4+jHI56mm93/el3kT4DvgzFK+ySuAeri69C8gM+y9bcQJiUQfb4b3a4pr4Pr7/N0A5JWSpzso/NZq4LQhGyj8Nkuspz596/w7r+Wve3i/PwOfAMk+D08AU0tIQ5G64MtsPq5T1dg/c1gpZXWpT0cCbsT674DfMJxQbunjeo99EER7qhuRwgfc23r3UIfnAp+3argOyk9Ac+83GPhfhPLp6sOn4oTaOd7vap/nOj7/R1LY2Snteyn2nBLzVZZA5fEX/jIC7p/gRwgUbYTvwo1aUvYUV+DltC3phfkXfF/AvxOuZ5MQXklLaLBKE0TvANcE/DrgGrjQh6VAcsB/PjCghHKaiOtJbMGN+n4F/lhKuZ4DLN5TOXu/64GXA9dKoDHFCYjR/vebwJ8jxNED+DbM7S/As6Xk527gDziB0dBX8toUFURvAEMC91UDfiYwKgqLdzNwuP89D7gTaBrh49rTe50X8PsdrpNQO+A2EDefA66jMyH4LktI2+3Ai2F5yQV6hdedEu5Pxo0KvsaN/ucB7b3fY8Dfw8KvAE7wvxviOndfAk/sxffZ0NeH0Eh+IvBUwP8MYLn/fRnwScBPgJyS8uTLeaev0/k4AderlLQU1FNf/otLCLcMODlw3Rz/3UUIW6Qu+HpwSeD6fuDxUtI0B9/h82laD1T31+8SEGLAqVS8IKrl3XuWcF8mcLb/PZg9CAhcx+Sf/vcVBEbee/G97PE5ob94sJpLwg25w3kA14t8S0TWiMjoMsSVvRf+3+B6c03LlMrSaeHjC8adiHtRIb4P/P4Z13MviQdVtSGuB5IOPCAipwOIyO9EJENEckVkG27IHDEPInKon8j83of9R4SwJaWrJZHncFoBLfyk8xYR2QL8NSyvxVDV/+FGcLcCr6rqLxHifSQQ5yZcA5fk83KziCwTka3ev0EgL0Nwo5DlIrJARM4sLS1hBOtEK1ydWBdIxxO4nh64kY0A80XkKxG5ooQ4i9QHVd3tn5NUlgSpao6qXquq7XyafsIJwVAabwor/5b+majqFpzKrQvwUEnPEJEEEblPRL72dSPLewXrR0l1owWBclPX6uzp23vR1+nfAUtwvepQWkqrpyXVQ3Bl8XKgHJbhBF2pdTFAmb5JEWkJnIgbFYLrINfCqbMgrDwo2hZUFKG6tQlARC7zlpmhsulCKW2diPQQkfdEZL2IbMWN8kLhJ+M6phki8p2I3C8i1dnz91JmYiqIRKQ7rgCLWVao6nZVvUlV2+L07TeKyMkh7xKiLMk9RMvA70NwvacNuA+9TiBdCbhGs6zxfod7KcG483A9/31GHUuADyms9P/w6emqqvVxKispIYrHcCqD9j7sX0sJG042bj4qkvtaVW0Y+KunqmWx6voPTsXyXAS/bNyQPhhvbVX9SESOwwmBC3HzTA1xen0BUNVVqjoQ9wGMBaaLyEHs+b1C0XebjevhNQ2kob6qdvbP+V5Vr1TVFjh1xf+JSEqEvBSpDyIiuLqXW4YyKpo41WzcXE2XQBrvCSunOqo61T8rDdeDnQoUs8gLcDFOHXoKTqi3DiW3DMlaR+BbCuSvLPnZAFwF3CEizb1zafU0G6d2ikQ2cHpYWdRS1b0u5z1wKa6tnCUi3+NU0bVwalEIKw/c9x+kSD3EGaGUxJ7ampI4F6eyXSEirXBTAdcCTfz3soTCMo30jCnATKClqjbATYuEvq9dqnqnqnYCjsXNUV7GHr6XvclLTASRiNT3vdYM3BD1ywhhzhSRFF/Jt+J6Oru99w+UXDlL4xIR6SQidXCqv+nqzLtXArVEpK+X9LfhdM4hfgBaB03Nw5gK3CAibUSkLk5YvKCqefuQxiKIyGE4tVbIyqoebvJvq4gkASNLub0ebvJxh4/nT3vx6KeAm0XkSHGk+Ao+H9guIreISG3fs+7iOxV74lHcvMm8CH6PA38Rkc4AItJARC4I5CMPpw5JFJExOItCfNhLRKSZH3ls8c672fN7LYKqrgPeAh7ydbSaiLQTbz4vIheISLIPvhn3oe2OENWLQF8ROdk/9ybcB/tRqaXjntFIRO705V1NRJriBMsnPsiTwDDfgxUROcjnr56IhCaV/wpcDiSJyDUlPKqeT9NGXCP5jz2lLcBrQGcROU+cpeEISm9ci6CqK3A97FGBtJRUT18FmovI9SJS0+ezh/d7HLjH10tEpJmELRMoJwbhVL9pgb/zgTNEpAnufY8QkWQRaYQzFAiSCQwQkeoiko6bWyuJvWrbxGlIrgX+BvzFfwMH4ermeh/mcgo7MqFnJItIjYBbPWCTqv4qIkfhOiqhZ5woIl19R24brgO/e0/fSwnPiUhFC6JZIrIdJ0lvBR7GfTCRaI/Ty+7AzS38n6q+5/3uBW7zw8Gb9+L5k3G67+9xPZoRAKq6FbgG1/jm4nowOYH7QqamG0XkswjxPuPjnoezWvoVuG4v0hXOKHFrBH7CvehncUNecB/EETjh/BrOMKIkbsZVqO24BuyFsiZAVafhrHim+PtfwVkk5eN6RGm4vG7AlVuDMsS5SVXf8aqccL+XcaOZDK+eWYKz+gLXaM3GCZZvcOUbVIWcBnwlIjtw1nUDVPWXMrzXSFyGm1BfihM203FzD+AMNz71z5mJm0NbEyEvK3Aj1X/jyucs3NKFnXt4Nri5lNa4uh8qh99w+nZUdSHOmnGcT9/qkB/uu8hW1cdU9TefhrtFpH2E5zyHK8tcn9dPIoSJiB/VXIAzOtiI+1Y/LOv9ngeAq0TkYEqpp6q6Hdd5OQv33a7CqcnAveuZOPX9dp+HkJAqF0TkaNzodrwfEYf+ZuLKfqBP85vA58BnFP8mb8dpFzbjvt8ppTzyEaC/iGwWkdJGtFt8+/Albv7uAlV9BkBVl+LUsh/jhEFXir6fd3Ed2+9FZIN3uwa4y5fjGJxwDfF73HewDaf+fB/X3kHp30uk50REIrQJhmEYhlFhxIOxgmEYhlGFMUFkGIZhxBQTRIZhGEZMOaAFkYg8IyI/isiSEvxFRB4VkdUi8oWIHBHwyxdnh58pIjMD7hNFZG3AL827NxCRWSLyubg1JiUZYRiGYRgBKp2xQtOmTbV169ZlCrt9+3YSEhJYu3YtnTt3Lua/detWfvzxR1JSUvjpp5/Izs6mY8eOACxevJhu3boVuycrK4sGDRrQqFGjIu7r1q0jPz+f5ORkdu3axVdffUVqairVqh3Qst4wjAOYRYsWbVDV8LV35U6lE0Tp6em6cOHCMofPysrizDPPZMmS4oOiq6++ml69ejFw4EAAOnTowNy5c2nevDl169Zlx44dxe4ZPHgwZ555Jv37F10KcO+995Kdnc348ePJysqid+/erFy50gSRYRiVFhFZpKrp0X5OlW4lc3NzadmycEF0cnIyubluUfavv/5Keno6Rx99NK+88kqR+2699VZSU1O54YYb+O233wC49tprWbZsGS1atKBr16488sgjJoQMwzDKgLWUJfDNN9+wcOFCpkyZwvXXX8/XX7vtru69916WL1/OggUL2LRpE2PHjgXgzTffJC0tje+++47MzEyuvfZatm2r7EfIGIZhRJ8qLYiSkpLIzi5coJ+Tk0NSUlKBH0Dbtm3p1asXixcvBqB58+aICDVr1uTyyy9n/vz5ADz77LOcd955iAgpKSm0adOG5cuXV3CODMMwKh+V/STT/aJfv36MGzeOAQMG8Omnn9KgQQOaN2/O5s2bqVOnDjVr1mTDhg18+OGHjBrltsVat24dzZs3R1V55ZVX6NLFbeF0yCGH8M4773Dcccfxww8/sGLFCtq23Zft8AzDiAd27dpFTk4Ov/76a6yTEnVq1apFcnIy1atXj8nzD2hBNHDgQObOncuGDRtITk7mzjvvZNeuXQAMGzaMM844g9dff52UlBTq1KnDs88+C8CyZcu4+uqrqVatGrt372b06NF06tQJgD/+8Y+sX78eVSUtLY3HH38cgNtvv53BgwfTtWtXVJWxY8fStGl5nDBhGEYsyMnJoV69erRu3RqRsm5aX/lQVTZu3EhOTg5t2rSJSRoOeKs5wzCMfWHZsmUcdthhB7QQCqGqLF++vGD5SgizmjMMw4gxVUEIQezzeUCr5iLRevRrUYs7676+ew5kGIZhFKHKCSLDMIx9obw7sWXpuCYkJNC1a1fy8vJo06YNkydPpmHDhuWajnjAVHOGYRhxSu3atcnMzGTJkiU0btyY8ePHxzpJUcEEkWEYRiXgmGOOKdj5Zf78+RxzzDF069aNY489lhUrVgCQn5/PzTffTJcuXUhNTeXf//43AIsWLeKEE07gyCOPpE+fPqxbty5m+YiEqeYMwzDinPz8fN555x2GDBkCwGGHHcYHH3xAYmIic+bM4a9//SsvvfQSEyZMICsri8zMTBITE9m0aRO7du3iuuuuY8aMGTRr1owXXniBW2+9lWeeeSbGuSrEBJFhGEac8ssvv5CWlkZubi4dO3akd+/egDs5YNCgQaxatQoRKVgfOWfOHIYNG0ZiomvaGzduzJIlS1iyZEnBvfn5+TRv3jw2GSqBqKnm9ucsIMMwDKNwjuibb75BVQvmiG6//XZOPPFElixZwqxZs0rd/UFV6dy5M5mZmWRmZvLll1/y1ltvVVQWykQ054gmAqeV4n860N7/XQU8FsW0GIZhVFrq1KnDo48+ykMPPUReXh5bt24t2A9z4sSJBeF69+7NE088QV5eHgCbNm2iQ4cOrF+/no8//hig4Ly0eCJqqjlVnScirUsJcjbwnLqtHT4RkYYi0lxV42sWzTAMg9ivE+zWrRupqalMnTqVUaNGMWjQIO6++2769i1M19ChQ1m5ciWpqalUr16dK6+8kmuvvZbp06czYsQItm7dSl5eHtdff33Ew0JjRSzniJKA7MB1jnczQWQYhgHFDuecNWtWwe+VK1cW/L777rsBSExM5OGHH+bhhx8ucl9aWhrz5s2LYkr3j0phvi0iV4nIQhFZuH79+lgnxzAMwyhHYimIcoGWgetk71YMVZ2gqumqmt6sWdSPTzcMwzAqkFgKopnAZd567mhgq80PGYZhVD2iNkckIlOBXkBTEckB/gZUB1DVx4HXgTOA1cDPwOXRSothGIYRv0TTam7gHvwVGB6t5xuGYRiVg0phrGAYhmEcuNgWP4ZhGGXhjgblHN/WPQa55557mDJlCgkJCVSrVo0nnniCHj16lG864gATRIZhGHHIxx9/zKuvvspnn31GzZo12bBhAzt37ox1sqKCqeYMwzDikHXr1tG0aVNq1qwJQNOmTWnRokXEIx22bt1Khw4dCo6DGDhwIE8++WQsk79XmCAyDMOIQ0499VSys7M59NBDueaaa3j//fcLjnSYPn06ixYt4oorruDWW2+lQYMGjBs3jsGDB5ORkcHmzZu58sorY52FMmOqOcMwjDikbt26LFq0iA8++ID33nuPiy66iNtuu63EIx169+7NtGnTGD58OJ9//nksk77XmCAyDMOIUxISEujVqxe9evWia9eujB8/ns6dOxfspB1k9+7dLFu2jDp16rB582aSk5NjkOJ9w1RzhmEYcciKFStYtWpVwXVmZiYdO3Ys8UiHf/7zn3Ts2JEpU6Zw+eWXFxyWVxmwEZFhGEZZKIO5dXmyY8cOrrvuOrZs2UJiYiIpKSlMmDCBq666qtiRDomJiTz11FPMnz+fevXqcfzxx3P33Xdz5513Vmia9xUTRIZhGHHIkUceyUcffVTMvWnTphGPdFi2bFnB7/BjIOIdU80ZhmEYMcUEkWEYhhFTTBAZhmGUgNub+cAn1vk0QWQYhhGBWrVqsXHjxpg30tFGVdm4cSO1atWKWRrMWMEwDCMCycnJ5OTksH79+lgnJerUqlUrpuuOTBAZhmFEoHr16rRp0ybWyagSmGrOMAzDiCkmiAzDMIyYYoLIMAzDiCkmiAzDMIyYYoLIMAzDiCkmiAzDMIyYYoLIMAzDiCkmiAzDMIyYYoLIMAzDiClRFUQicpqIrBCR1SIyOoL/ISLynogsFpEvROSMaKbHMAzDiD+iJohEJAEYD5wOdAIGikinsGC3AS+qajdgAPB/0UqPYRiGEZ9Ec0R0FLBaVdeo6k4gAzg7LIwC9f3vBsB3UUyPYRiGEYdEUxAlAdmB6xzvFuQO4BIRyQFeB66LFJGIXCUiC0VkYVXYCdcwDKMqEWtjhYHARFVNBs4AJotIsTSp6gRVTVfV9GbNmlV4Ig3DMIzoEU1BlAu0DFwne7cgQ4AXAVT1Y6AW0DSKaTIMwzDijGgKogVAexFpIyI1cMYIM8PCfAucDCAiHXGCyHRvhmEYVYioCSJVzQOuBd4EluGs474SkbtEpJ8PdhNwpYh8DkwFBuuBfi6vYRiGUYSontCqqq/jjBCCbmMCv5cCPaOZBsMwDCO+ibWxgmEYhlHFMUFkGIZhxBQTRIZhGEZMMUFkGIZhxBQTRIZhGEZMMUFkGIZhxBQTRIZhGEZMMUFkGIZhxBQTRIZhGEZMMUFkVElmz55Nhw4dSElJ4b777osY5sUXX6RTp0507tyZiy++uMA9ISGBtLQ00tLS6NevX7H7RowYQd26dQuuH3/8cbp27UpaWhp/+MMfWLp0aflnaD+x8jBiSVS3+DGMeCQ/P5/hw4fz9ttvk5ycTPfu3enXrx+dOhUeILxq1SruvfdePvzwQxo1asSPP/5Y4Fe7dm0yMzMjxr1w4UI2b95cxO3iiy9m2LBhAMycOZMbb7yR2bNnRyFn+4aVhxFrbERkVDnmz59PSkoKbdu2pUaNGgwYMIAZM2YUCfPkk08yfPhwGjVqBMDBBx+8x3jz8/MZOXIk999/fxH3+vXrF/z+6aefEJFyyEX5YeVhxBoTREaVIzc3l5YtC4/KSk5OJje36FFZK1euZOXKlfTs2ZOjjz66SI/9119/JT09naOPPppXXnmlwH3cuHH069eP5s2bF3vm+PHjadeuHaNGjeLRRx+NQq72HSsPI9aYas4wIpCXl8eqVauYO3cuOTk5HH/88Xz55Zc0bNiQb775hqSkJNasWcNJJ51E165dqV27NtOmTWPu3LkR4xs+fDjDhw9nypQp3H333UyaNKliM7SfWHkY0cRGREaVIykpiezs7ILrnJwckpKSioRJTk6mX79+VK9enTZt2nDooYeyatWqgvsB2rZtS69evVi8eDGLFy9m9erVpKSk0Lp1a37++WdSUlKKPXvAgAFFRg3xgJWHEWtMEBlVju7du7Nq1SrWrl3Lzp07ycjIKGbtdc455xT05jds2MDKlStp27Ytmzdv5rfffitw//DDD+nUqRN9+/bl+++/Jysri6ysLOrUqcPq1asBChpsgNdee4327dtXTEbLiJWHEWtMNWdUORITExk3bhx9+vQhPz+fK664gs6dOzNmzBjS09Pp168fffr04a233qJTp04kJCTwwAMP0KRJEz766COuvvpqqlWrxu7duxk9enQR67JIjBs3jjlz5lC9enUaNWoUd2ooKw8j1khlO5k7PT1dFy5cuM/3tx79WjmmpihZ9/WNWtyGYRgVjYgsUtX0aD/HVHOGYRhGTDHVnGEQ3ZEyVL7RsmkOjIrERkSGYRhGTDFBZBiGYcQUE0SGYRhGTDFBZBiGYcSUMgsiEakTzYQYhmEYVZM9CiIROVZElgLL/fXhIvJ/ZYlcRE4TkRUislpERpcQ5kIRWSoiX4nIlL1KvWEYhlHpKYv59j+BPsBMAFX9XESO39NNIpIAjAd6AznAAhGZqapLA2HaA38BeqrqZhHZ897yhmEYxgFFmVRzqpod5pRfhtuOAlar6hpV3QlkAGeHhbkSGK+qm/1zfsQwDMOoUpRFEGWLyLGAikh1EbkZWFaG+5KAoADL8W5BDgUOFZEPReQTETktUkQicpWILBSRhevXry/Dow3DMIzKQlkE0TBgOE6I5AJp/ro8SATaA72AgcCTItIwPJCqTlDVdFVNb9asWTk92jAMw4gH9jhHpKobgD/uQ9y5QMvAdbJ3C5IDfKqqu4C1IrISJ5gW7MPzDMMwjErIHgWRiDwLFNuiW1Wv2MOtC4D2ItIGJ4AGABeHhXkFNxJ6VkSa4lR1a8qQbsMwDOMAoSxWc68GftcCzgW+29NNqponItcCbwIJwDOq+pWI3AUsVNWZ3u9Ubx6eD4xU1Y17mwnDMAyj8rLHOSJVfSnw9zxwIVCm8ylU9XVVPVRV26nqPd5tjBdCqONGVe2kql1VNWN/MmOUzuzZs+nQoQMpKSncd999xfwnTpxIs2bNSEtLIy0tjaeeeqrA77TTTqNhw4aceeaZRe5Zu3YtPXr0ICUlhYsuuoidO3cW8X/ppZcQEfbnDCnDMA5s9mWLn/aArfepZOTn5zN8+HDeeOMNli5dytSpU1m6dGmxcBdddBGZmZlkZmYydOjQAveRI0cyefLkYuFvueUWbrjhBlavXk2jRo14+umnC/y2b9/OI488Qo8ePaKTKcMwDgjKsrPCdhHZFvoPzAJuiX7SjPJk/vz5pKSk0LZtW2rUqMGAAQOYMWNGme8/+eSTqVevXhE3VeXdd9+lf//+AAwaNIhXXnmlwP/222/nlltuoVatWuWTCcMwDkjKopqrp6r1A/8PVdWXKiJxRvmRm5tLy5aFRozJycnk5oYbMTpVWmpqKv379yc7O3wdc1E2btxIw4YNSUxMLBbnZ599RnZ2Nn372iFohmGUTonGCiJyRGk3qupn5Z8cI5acddZZDBw4kJo1a/LEE08waNAg3n333b2OZ/fu3dx4441MnDix/BNpGMYBR2lWcw+V4qfASeWcFiOKJCUlFRnh5OTkkJRUdKOLJk2aFPweOnQoo0aNKjXOJk2asGXLFvLy8khMTCyIc/v27SxZsoRevXoB8P3339OvXz9mzpxJenqZ7FwMw6hClCiIVPXEikyIEV26d+/OqlWrWLt2LUlJSWRkZDBlStHNztetW0fz5s0BmDlzJh07diw1ThHhxBNPZPr06QwYMIBJkyZx9tln06BBAzZs2FAQrlevXjz44IMmhAzDiEhZ1hEhIl2ATrh1RACo6nPRSpRR/iQmJjJu3Dj69OlDfn4+V1xxBZ07d2bMmDGkp6fTr18/Hn30UWbOnEliYiKNGzcuolo77rjjWL58OTt27CA5OZmnn36aPn36MHbsWAYMGMBtt91Gt27dGDJkSOwyaRhGpURUixwEFPQAACAASURBVG2aUDSAyN9we8F1Al4HTgf+p6r9o566CKSnp+v+rElpPfq1ckxNUbLus4n5yko06wVUvrph34kBICKLVDXqqoyyrCPqD5wMfK+qlwOHAw2imirDMAyjylAW1dyvqrpbRPJEpD7wI0U3MzUqKTYKMAwjHijNfHs8MBWY749meBJYBOwAPq6Y5BmGYRgHOqWNiFYCDwAtgJ9wQqk3UF9Vv6iAtBmGYRhVgBLniFT1EVU9Bjge2Ag8A8wGzhWR9hWUPsMwDOMApyxb/HyjqmNVtRvu7KBzgOVRT5lhGIZRJSjLpqeJInKWiDwPvAGsAM6LesoMwzCMKkFpxgq9cSOgM4D5QAZwlar+VEFpMwzDMKoApRkr/AWYAtykqpsrKD2GYRhGFaO0veZsU1PDMAwj6uzLCa2GYRiGUW6YIDIMwzBiigkiwzAMI6aYIDIMwzBiigkiwzAMI6aYIDIMwzBiSlQFkYicJiIrRGS1iIwuJdz5IqIiYmdJG4ZhVDGiJohEJAEYjzvRtRMwUEQ6RQhXD/gz8Gm00mIYhmHEL9EcER0FrFbVNaq6E7dF0NkRwv0dGAv8GsW0GIZhGHFKNAVREpAduM7xbgWIyBFAS1WN7lGhhmEYRtwSM2MFEakGPAzcVIawV4nIQhFZuH79+ugnzjAMw6gwoimIcoGWgetk7xaiHtAFmCsiWcDRwMxIBguqOkFV01U1vVmzZlFMsmEYhlHRRFMQLQDai0gbEakBDABmhjxVdauqNlXV1qraGvgE6KeqC6OYJsMwDCPOiJogUtU84FrgTWAZ8KKqfiUid4lIv2g91zAMw6hclHYe0X6jqq8Dr4e5jSkhbK9opsUwDMOIT2xnBcMwDCOmmCAyDMMwYooJIsMwDCOmmCAyDMMwYooJIsMwDCOmmCAyDMMwYooJIsMwDCOmmCAyDMMwYooJIsMwjACzZ8+mQ4cOpKSkcN999xXzf/jhh+nUqROpqamcfPLJfPPNNwV+3377LaeeeiodO3akU6dOZGVlATBu3DhSUlIQETZs2FAQ/oEHHiAtLY20tDS6dOlCQkICmzZtinoe4w0TRIZhGJ78/HyGDx/OG2+8wdKlS5k6dSpLly4tEqZbt24sXLiQL774gv79+zNq1KgCv8suu4yRI0eybNky5s+fz8EHHwxAz549mTNnDq1atSoS18iRI8nMzCQzM5N7772XE044gcaNG0c/o3GGCSLDMAzP/PnzSUlJoW3bttSoUYMBAwYwY8aMImFOPPFE6tSpA8DRRx9NTk4OAEuXLiUvL4/evXsDULdu3YJw3bp1o3Xr1qU+e+rUqQwcOLCcc1Q5MEFkGIbhyc3NpWXLwtNrkpOTyc3NLTH8008/zemnnw7AypUradiwIeeddx7dunVj5MiR5Ofnl+m5P//8M7Nnz+b888/fvwxUUkwQGYZh7AP/+c9/WLhwISNHjgQgLy+PDz74gAcffJAFCxawZs0aJk6cWKa4Zs2aRc+ePaukWg5MEBmGYRSQlJREdnZ2wXVOTg5JSUnFws2ZM4d77rmHmTNnUrNmTcCNntLS0mjbti2JiYmcc845fPbZZ2V6bkZGRpVVy4EJIsMwjAK6d+/OqlWrWLt2LTt37iQjI4N+/Yoen7Z48WKuvvpqZs6cWWCMELp3y5YtrF+/HoB3332XTp067fGZW7du5f333+fss88u38xUIkwQGYZheBITExk3bhx9+vShY8eOXHjhhXTu3JkxY8Ywc6Y7YHrkyJHs2LGDCy64gLS0tAJBlZCQwIMPPsjJJ59M165dUVWuvPJKAB599FGSk5PJyckhNTWVoUOHFjzz5Zdf5tRTT+Wggw6q+AzHCaKqsU7DXpGenq4LF+77aeKtR79WjqkpStZ9faMWdzSIZllA5SoPK4ui2HdiAIjIIlVNj/ZzbERkGIZhxJSoHhVuGIZR2bHRcvSxEZFhGIYRU0wQGYZhGDHFBJFhGIYRU0wQGYZhGDHFBJFhRIFf1iwi98mryX3iSrZ+Mq2Y/7x58zjiiCNITExk+vTpRfxKOkrg3Xff5YgjjqBLly4MGjSIvLw8AJ5//nlSU1Pp2rUrxx57LJ9//nnU87c3WFkUYmURGRNEhlHO6O58Nr39GAdfcCcthv4fPy19v9hRAocccggTJ07k4osvLnZ/pKMEdu/ezaBBg8jIyGDJkiW0atWKSZMmAdCmTRvef/99vvzyS26//XauuuqqCslnWbCyKCRSWezc8G2RMFWlLMKJqiASkdNEZIWIrBaR0RH8bxSRpSLyhYi8IyKtIsVjGJWJnetWktiwOdUb/h5JqM5BHY8vdpRA69atSU1NpVq1op9gSUcJbNy4kRo1anDooYcC0Lt3b1566SUAjj32WBo1agQUPZYgHrCyKCRSWfyy6pMiYapKWYQTNUEkIgnAeOB0oBMwUETCN15aDKSraiowHbg/WukxjIoib/tGEus3K7hOqNe01KMEgpR0lEDTpk3Jy8sjtKvI9OnTi2zOGSJ4LEE8YGVRSKSyyN+xsUz3HmhlEU40R0RHAatVdY2q7gQygCK7+qnqe6r6s7/8BEiOYnoMI+4p6SgBESEjI4MbbriBo446inr16pGQkFDk3vfee4+nn36asWPHxij15YuVRSEHellEUxAlAUHRnOPdSmII8EYkDxG5SkQWisjC0M62hhGvJNZrQt62wnqav31DxKMEIlHaUQLHHHMMH3zwAfPnz+f4448vUMcAfPHFFwwdOpQZM2bQpEmT8s3QfmBlUUikskioW7b0HWhlEU5cGCuIyCVAOvBAJH9VnaCq6aqa3qxZs0hBDCNuqNH8UPI2f8euLd+j+bv4adm8YkcJlERpRwn8+OOPAPz222+MHTuWYcOGAc6a6rzzzmPy5MlFGqF4wMqikEhlUTulR5nuPdDKIpxoCqJcoGXgOtm7FUFETgFuBfqp6m9RTI9hVAhSLYHGvYfx44tj+O6pP3HQYccVO0pgwYIFJCcnM23aNK6++mo6d+4MlH6UwAMPPEDHjh1JTU3lrLPO4qSTTgLgrrvuYuPGjVxzzTWkpaWRnh71zZLLjJVFIZHKokazVlWyLMKJ2jEQIpIIrAROxgmgBcDFqvpVIEw3nJHCaaq6qizx2jEQ5Ud5l8Uvaxax6Z0JsHs3dQ8/lc1zJxbxnzdvHtdffz1ffPEFGRkZ9O/fH4DMzEz+9Kc/sW3bNhISErj11lu56KKLABg3bhz/+te/+Prrr1m/fj1NmzYF3Mf3/PPPA05/vmzZMtavX7/PRy3bxpZFse+kkKpcNyr9MRCqmgdcC7wJLANeVNWvROQuEQmNzR8A6gLTRCRTRGZGKz1GdNmf9SJ16tThueee46uvvmL27Nlcf/31bNmyBYCePXsyZ84cWrUqatk/cuRIMjMzyczM5N577+WEE07YZyFkGEZsieoxEKr6OvB6mNuYwO9Tovl8o+IIrpEACtaLBI9Kbt26NUCxNRJB/XWLFi04+OCDWb9+PQ0bNqRbt257fPbUqVMZOHBgOeQiitzRIMrxb41u/OWJlUVRolkelaQs4sJYwaj87M96kSDz589n586dtGvXrkzhf/75Z2bPns3555+/188yDCM+sIPxjLhh3bp1XHrppUyaNKnYqKkkZs2aRc+ePU0tZxiVGBsRGeXC/qwXAdi2bRt9+/blnnvu4eijjy7zfRkZGfGvljMMo1RMEBnlwv6sF9m5cyfnnnsul112WYElXVnYunUr77//PmefffaeAxuGEbeYIDLKhf1ZL/Liiy8yb948Jk6cSFpaGmlpaWRmZgLw6KOPkpycTE5ODqmpqQwdOrTgmS+//DKnnnoqBx10UMVn2DCMcsPmiIxyo3a77iS1617E7a677ir43b1794g7AF9yySVccsklEeMcMWIEI0aMiOg3ePBgBg8evO8JNgwjLrARkWEYhhFTbERkRA9bH2EYRhmwEdF+Ejz697777ivm/9tvv3HRRReRkpJCjx49Co73zcrKonbt2gVzIqGNCsEt0OzatSupqamcdtppbNiwoUicDz30ECJSzN0wDKMyYoJoPwjf1mbq1KnFtrV5+umnadSoEatXr+aGG27glltuKfBr165dwTY1jz/+OOD2Tfvzn//Me++9xxdffEFqairjxo0ruCc7O5u33nqLQw45pGIyaRiGEWVMEO0H4Uf/DhgwoNgxyDNmzGDQoEEA9O/fn3feeYfSNppVVVSVn376CVVl27ZttGjRosD/hhtu4P7770dEopMpwzCMCsYE0X4Qvq1NcnJysW1tcnNzadnSnYaRmJhIgwYN2LjRHQ+8du1aunXrxgknnMAHH3wAQPXq1Xnsscfo2rUrLVq0YOnSpQwZMgRwQi0pKYnDDz+8IrJnGIZRIZggihHNmzfn22+/ZfHixTz88MNcfPHFbNu2jV27dvHYY4+xePFivvvuO1JTU7n33nv5+eef+cc//lHEHNowDONAwATRfhC+rU1OTk6xbW2SkpLIznYnpufl5bF161aaNGlCzZo1C47uPfLII2nXrh0rV64sWMjZrl07RIQLL7yQjz76iK+//pq1a9dy+OGH07p1a3JycjjiiCP4/vvvKyi3hmEY0cEE0X4Qvq1NRkZGsW1t+vXrx6RJkwCYPn06J510EiLC+vXryc/PB2DNmjWsWrWKtm3bkpSUxNKlSwuOBH777bfp2LEjXbt25ccffyQrK4usrCySk5P57LPP+P3vf1+xmTYMwyhnbB3RfhDc1gbdzVU3X1uwrU16ejr9+vVjyJAhXHrppaSkpNC4cWMyMjIAd1rpmDFjqF69OtWqVePxxx8v2EH6b3/7G8cffzzVq1enVatWTJw4MYa5NAzDiC4miPaT4LY2t97qjvwNzuPUqlWLadOmFbvv/PPPL/EMnWHDhhVZVxSJ0HokwzCMyo6p5gzDMIyYYiOi8sSOQDYMw9hrbERkGIZhxBQTRIZhGEZMMUFkGIZhxBQTRIZhGEZMMUFkGIZhxBQTRIZhGEZMiaogEpHTRGSFiKwWkdER/GuKyAve/1MRaR3N9BiGYRjxR9QEkYgkAOOB04FOwEAR6RQWbAiwWVVTgH8CY6OVHsMwDCM+ieaI6ChgtaquUdWdQAZwdliYs4FJ/vd04GSxE98MwzCqFFLaaaH7FbFIf+A0VR3qry8FeqjqtYEwS3yYHH/9tQ+zISyuq4Cr/GUHYEVUEr3/NAU27DFU1cHKoxAri0KsLIoSz+XRSlWb7TnY/lEptvhR1QnAhFinY0+IyEJVTY91OuIFK49CrCwKsbIoipVHdFVzuUDLwHWyd4sYRkQSgQbAxiimyTAMw4gzoimIFgDtRaSNiNQABgAzw8LMBAb53/2BdzVaukLDMAwjLomaak5V80TkWuBNIAF4RlW/EpG7gIWqOhN4GpgsIquBTThhVZmJe/VhBWPlUYiVRSFWFkWp8uURNWMFwzAMwygLtrOCYRiGEVNMEBmGYRgxxQSRYRiGEVNMEBn7hYjUE5Eon5FeeRCROn4pQpXHdklxiMjBItIh1umIZ0wQ7SV+D73Q7yr9oYlIfaAP0NVf14ttimKLiBwKXKKqebFOSywRkeoichTQKtZpiRXiCLWvvwNu9+5Hxi5V8Yv13PYSVc0HEJH2qroq1umpSEIflqru9v+3iUh3oKcXQveLyAtVpSH2nRINlQewBsjwjfARwNmqmh2zBFYgvlNWTVXzVXWXiJwMbBORHUCOqr4T4yRWCP4bUb8eMmSSXAvoKyKrgFdEZK2qbopZIuMQGxGVgIhUC/RoQm4iIjeJyEfAeBG5VkTaxSiJFYKIdBWRc0SkmqruVtXdXh13mYh0AfKBxsAjqvr8gS6EROQUEWkOrlPiy6OBiJwIdAd+xu2HeLWqZofXoQMJn28B3/IWdtKqAQ2BB4ErgV9jl8roIyKNRGSSiNTx34iKSDsReVBEzgOaA/OBt1V1pAmh4hywH8n+Emh0G4jICX4epAVwENALuAG4GLgkhsmMCiLSVESGiEhjQIDPfVkkiMhfgMXAKbiFyv8EpgJ5/t4DTl0pIh1E5Fh/2QbY6d2bi8jTwCLgVNyI6Hbc1lXLoHD0eKAgIgf5Tlp/4AKgtndvISIPi8hbwGDgJeA1YJSqfhizBEcREXlRRE5U1c3AaOAX734Z8DLwG7DCL96/GddRKaLeNxymmsNVjFBvLuDWBPgb0Bf3UX0GpAN/xDVGacB7wOSKTW30CJTDwcCxwC+qOkVEzhKRzsC7wJn+/KjgfZuBTiKSpKrh+wlWWkQk0Y/w+gPNRSQLmAicKyIvAW2B+mHl8YOI/BU4HnjdxyMHwtZVInIEcDXwgqpO90YZB/kR0D9wu+L/SVW/9uFXAseLyBpV/T5mCS9HfF6r+XqRBQzHtQOHAc8BvYGOwN3AW6q6RUSqA18D20XkFFWdE5PExzEmiCgy73MITp+9GzgcSFLVAtWb1/Em4irY5QH3Rr5XVKkJCOM8YC3eCAH4A/Cbqr4qIojI28BXuN7wG7htnP4MdPMGDFtUdV3Fpr78CagZ5wPn40bEu3AdlFXAFlyen8I1SjuBp4DZwAki8h7QQlW/rszCKJR2Vf3MdzoOw3VKHgOWAo/ihPJi4GAROQg3OnwDuByo4eOpVtlHiD79u8Ud8rkMlz9w5dBcRJrh6stNQD//PXwP3Am8ClznR0RNVHVKhWcgTqlSqrngvE/g/+/EHWn+GW7kM8oHTwe+9GGqe7dvcJu5tvHuvUXkcQob7EqNiPxeRF4HnscdbJgmIgcDbwFNRCQFd5jhaB9mJXAfTmjNAx7CncpbNwbJL3dE5DgRmYtTq5wA9FTV9cCHwAmq+hXunKwXge9w9eAfwLO4neRX4Aw4qlVWIQRu/kdEjhGRf+Is4bqISE3gf8AhQB3gYeBo4FzgHmCuqn6AmzO7Q0RmA91ikoFyRkQycKOfZkAtETlfVX/AqWivVNWXgX6qeglOACmubjyJU98NwXVcDE+VEkSBeZ/a/n8i8ApwGXAFcCZwtogcA2zFdQbrq+ouf/9PuCF3CxGZD4zE9QI/iUV+ykJozsYbWuxJN30m8J2qdsf1crfh5j4W4EYCx6nqBlVdhOvxbgA+Baqr6ovA8ap6UmWwJizjXNYfgVmq2geYA/Twvf0ZwHEi0kxV31XVt4D/4gTPYj8ZPQY4VlXPr+yjAK+SGwss8X9/wM13vI0zTU5X1VdUdaCqjlLVswD1nZi/A8txmx4vik0OyoZEMFCKEOYInDA5RlXvxwng6733dJwqHyBBRNL8dSqwXFW348z7L1TVj6KSiUrKASmIfKNbLG9+BPMi8I6IDMap2abhVExrfK/mFZylzzTgUGCUiDQWkRtF5E/AMnWnzJ6lqqeq6hPqjkKPO0SkPc7YoIhVU4RwoUa5FlDT//4ENxI6SlW34XTc7USkvjdY+Ag4GRirqr/4Z/xQUtnHE2VRk3kVSyKw2jv9C9ezTceVSz3gMBFJEpFJuBFhG5yQQlV/VNUcXx5xacCxp85JIN0dgO2q+jRwPzALJ2S/wxlmdBZn4HKEiDwhIpk4de1mVf1BVe/3HZW4JFQGoY5qCWFCZVEX6O5N1BNwGoHDxRkzvYGbM+sOtMN1WmsCA1Q1y4+M88oi8KoaB2Rh+EZ3N7gGRURqi0ht3ETiA7ie7jk4Pe6rOEEUWow5Fdfj+xk3F9AQZ/3TFXdeUr5vyH7Y04ccB9wAdAYQkZYi8ncReUxEWgQDBRrlXGCriBysqjtwRxiniluouRhnxNAUeEpVO6jqZaq6NDyueBsBhAtHr2pKEZHzQmUR4T3+hjNN3y3OaOFrnKA51Y+Qs3CCeDPwgKqmquqQ8El5Xx5xqZYLdk7891FkpBhItwCZvl7swqkh00SkIU491xNogvtWfgDOVdXbQpoEH29ctTUicokULksIlcGhIvKMiNwlIo2C4UNloarzcPNgXf199XHm6Vf6OcXZwCGq+omqnqmqt6rqN77NCK2/K1HgVVUqvbGChC2y9G4tceq2zrg83ojTZZ+I+4guxE00z1fVlSKSDxwpIutU9VsR2QYMUdXxIjLaN8oFBCql4hqruERVrxGRo/xH9RpOffQUsL6EWxbhyuYGEXkANwG9Cafbn4Y7R6pgTYgUX9AZN0hgYty/JxW36LYVrgM2GadKuwk395MfuFfULdZdjK8rIrIEZ8TRyo+WbgN2qOrPOHVVvJdHJMvQNsAI3O4Y74vI9ar6W8A/NHLcgFu2cBKQgVPZpuPUtm/gOiffq+oKnBFDgUALfCsxLxPfVogvh0+BXHUq+pa4fLyEGwEfAtwqImP8+w3dH7KifAIYLSKhObL38SdNq+rosGcmALvjtTMSN6hqpfvDn6MU5tYAZzKbgKsYd+PW+KwHzsD1ZrNwqoXksHtvAf4D1PPXh+I+PAmEScCZbcY8/3tZVrtxOuzbgR6+nBqG8hJelkAn4N+4xvVWnHVPqWUfT38l1I3jcXOAy3HC9r9AY++3HDg9Uhy4UfKlvj4twpnqNo11HstYDnX8/4h1FmeGPQs3l1UDmAuMLiFsTeAiXOM9FaeGHAG0j1T+8VZHSqgT1XFr4I711zuAG/3vdJywOT1YhoF6UQdnvPKyL8caYXFXunYi1n+VakQkhav7NeCWBFyD21LlR9ww+SdVvc37d8NNFi7F6fbXq9PdN8VZPM3BCaE++AVpqroy/NlawvxKvBLoAU/GWTN9ibNm+hYnlJ8G/hMsSwBVXSoiN2ugZxzmH3c9u2BvP5Q+EUnGWbt9hLPkexVnkn8MrvHtBryDs/67FHgjNAIIxaFucnmyiLyjbj4k7vEjkR44c/OR6nr8tXB5vAg3B/oibg7wj8AqVd0pIg8C14vIwxo25+nrwgsikoufB1M3b1jkucGyiyVeA7Al8B5DdeIc3Pt/SlVXedXiSbg6MglXPwBycMY4PYE3tOjIGuBXVX0f10EJPbNgax+Ng9FfZSOu9LaRCNPth+Z9unsBA277jGG4RnUQ8HvgSxFp6/3n4uaGFPg/4Fhxq7/fwg2rc1U1V1Wf0QNre5rQxzAJ6KGq96jqKbhefQbQXkRC6zuKzHWFhJCIJAbnDOKV8E6COIOUccAmdZPkE4C6Pl9rcQsQ033wZ4GjRKRxoMEKL4/vvHtCPJaHiNQQkW4iUsvn4TNVHSmFu6IPxs2JjsWthZqM65gtwJvaq+qruNFysU05xe8mrqr/U9XJ6tSW1cK+zZgLICgQxH/FjVoQkVo+rRNxSw+ycWt5+lDYHtTGaQH6Aqib5/sKZ6ae7ONJKGEaoHrILV7KoDISV4Io0kce9tKPEZE3cZYq14nIUNyisleB9j7Yd7gPqqe//hynajtKVTOB84AxqnqEqg7TwE4A8djI7CuB3uC7uLUOfbxwHoUT3J/7nnCoJxuasO3u50BQ1bx4+rjCGz/vVktEhovIk77HC26kcyhu3Qa4+a0ufuT0Dc4CsJU48+scYCNOfUeE8ggZe4Q29Iyn8gjV1+rA6cCpvuHsKCLDcabF4EZI41T1bVX9K5ACJOG+nbYiEtol+wv8Ak3f8IasyQo6aIG5n7iYcC9BIKYAc0TkU9w2RJ1w2o4rcUYVpwIdVHUBTi15nKouB9aL2xsOXIflzUC8oX0FE0Skv4hc7N0LDDKMfSduBJG4RWHFPnIR6egbmvo4tcAEVT0Zt67lWu82C+gCoKoLceq2S8WZ1T6Dm4AMCSpR1U983AkRKvEBQ6BX/wJuHqwv0Ai4SFX/CwUWZO1F5E4R+RC4jjgzwAj2RMM6Jgm4fJ0ALMS98xtwPf7a+D3hfAdkK4VrPL7BjQRCo+pTVPUVH1bF7S33dxH5BLjGC7CYN7pQrGceqq+JOEH6DPA4bn5zBdDMh/0dsCsguD7DjZDe8X6h0eF9OFVlqOHN98+7UETeFpGj4+EbCY5YtXBtYH1xVnDH4FRr7XGWbJNxyxIuxKngEoC+qvqoj+5D3Po5cN/JyT7er1T1Md9RQdx+kxNwKs3DcBoVo7zQGE5Q4dQAKf73GOB3/ndH3B5e4AwN5vnfoQ0mP8P1drt59w44vXfPQNxH4TYlTcDNBY2NZV5jVL6hydUk4IcS/B7AWTr1AWrGOs17yE8qbj3PDf66MU6Xnxh456/ijDGmA9cE7r0LmON/18VtvVOkPHx8L+H2iDuFsEnoGOY7Dbebd9CtPm7ep0XgPc7DG5fgrLiex2kGrsLNgx7s/e4DBuDUV9cAqRGe2c3f/ylu4fYhsS6HCGkM1eF/40ZzTwOdvdsXgfbhSJyK+pSwMm3u/aYSZpwU9pzzcGrNw0sKY3/7+S5jVIGq+/9PAs/73/VwJtd1gZnA9d69Ga4nUx036TwWqBWIq4W/91/A3wLu1bwgmoZTzxX72KrKnxcyr1FoKVYNSPC/46WxPRg3lwVhVke4jsbzvjG9BdeLvdw3HrOAEwNxTMCtA+uHMzcPxdEI6LOHNCTirc1i/ecFY+gdtQG6+N81fcO7CLfX20Tc4mpwu2GM8L/r4SzbHsapnx7CCdm5/q9JCc8NWYgdjzMAimnDi+tIhlt2tgf+gtvvry1uJNg2LMwDwNOB6z8BmTjB+ypufuzIEso9JOCK/Le/KL7nmD7cVfYFvrJVx02wN8AZEXyEU7mkeSFTCzfZOAm3LqgHbhQ0yTesBwcrk/9/054an6rwh1vYem6s01FK+vrgNoa8qQT/zjhrv8v89SDf0ByF22x1knevg7MKa+Pr1KtAg2CdqIx/Pl8X43Y+B7czeiJORbQQeNW7D8btjB26rwfwAdDaX5+J2xUgPP64Nzem0By9NW7kNxU3Kvw9zuBkMW47oX/iLOFa4vZCrO/LqxpOBfkv4LzKWAYH8l+owa5QRKQvTpisweluL1fV/4rbcPNjVf27iIzAbZORjdvDrJ+4jRY7A0NxvaIZwGRVAZ7bgQAADLFJREFU3VrhmTD2GT9XUbDSXESuxS0W/R/OinFzWPhauHVhu1V1lLiFmKNxo7yPcT3813EN7xpguLp9ASsFJVhjNcSphK7CWXf9Dmdwc4GI1MVpB1riNt+cgFO/1sKpp97E7QySiZuof09V14Q/U+Nk3guKLTYtOG4BZ3J+HU6oTMN1Lh4Dmqnq+b4u/R7XeT0IVwdOxBlv/A2nYk3EzYuuDX9mPJVBlaa8JRtOlfanEvxCgu8JClUIzwNv+t9nAV/733VxPby3ccKofiCeuJ7LsL8S60aosQmvD9NxO128hbNmKlZvgNNwKtta3u0u3E7XdXBqt8uBXhHiToh1vveifGr4/01wa76WUTjnkYJTMTfAGeZMDJRFFnCb/30pThBdhleBh5dlrPNZhnJo7f/3wJnhN8et//kGt9lqb5xKtnZYudXHdVDG+uu6OIFVrB7GOo/2V/QvGlZzvwMGiF/nEzSJVlUVka64Hsx073wNbtPAxqo6C6gjIl1UdYeqTsQJoSkELLm0cJ1LPO/zZuAWEYbqghYeo5wkIjcBp/k1Kq/h1vaMVrdNTBHUtR7LcFvLhCycFuBM9Wuq6mZVfVZV5/pnFhy7oJVgIbIvo2nADBE5U1U34lRuuyjcjmmN/zsN18DWAPqIyNW40cIJAOrW+fRR1efUmxbHm2WoRFiPJe6E12dE5HOctgSccUp13MLkR3Cj33U4i8BNOIEE0EBEbsGVWVv8YZW+DVkftLLz7jYKijP2a2cFX8GfBP6rqq95599wjcRpOL2t4BaThvgKp8Ov49dsbBWRn3A9uUdwVjojcCoJVPWKkp5fGRoZg0xcnUDc5ql/xo2at+Mst3ap6rPiDho7TkS+1siq1vW4RZjtAXynpQhSuPNG3DU0ofVJgev2uM5VPZwaaRwujxNEZCvuG/ofbn7jFXUmyi/jjhE4y68XugtnuDFUVb8Ne15wt4m4Kg8tehBle1V9B6eCW4rbZmeLD9oUZ6n2PHCHup0u8KrJZcBA3Ch5I87oaWwJz1PibEmCUZT9GhH5Cp6NWzSGiKTiKs1q/r+984+1sq7j+OutUpo2QVeNYopCSWgiorNZFAuxtrZYU0M05/LXXFF/NNZcTsUVUASpzcwtRaycoW4qGKAgNQlMDJQrkoq/NjQyNZviug3p0x/v73Pvw+lcsjyXc+7189oYnOc853nOczjn+Xw/v94fz25RNPR9lMersf7ZYZJG4dXN1LLbhbhclPrr1GHqvcnbIyKexxVO4JDT2cDPIuJ8rNU1ufSIrcPVcR/q41D/BBZExNz6xgaPu+033MaVfkXxBOsLv6uwkvcmXAk4Fud3DsGl2i/gUFRd6WAVcISk4RFxR0SMi4ivV0aowfNp+4230ROpbT9GVje5G4ffwAZ3NHBu8RAn4KKErXj0yhtys/WPyv7LcBVl5WlX/T55rxiAtOI/bDFwvCx18W3cD3QnvnFMht1+INWKcAH+gt2Jk63zcT4I4JXGFXGUruYWvNekPXxQ0tkR8SC+mR5btt+PvaOjccHBweXfQE/XfNW4uKtZSLYTQk0Vkqb09X4kXQLMljS8bFqLIwPg/MckvML/IvBJeQBfFzBW0lHQIzV0QkRsr35T9TBXp/1GwlTez1iVURM4F7wuIsZHxJyy7TIcYtwPFxsswXnDpcC3JK3BRQo7gF0R0VWLwtTPmfeKAUgrRE+fwMnS2dg7ug/3OmzGIYdVFAMUveKB24BrJP0mIp6uH6yTbixJy7iC0jOG80FTACLiIUnTsALyg5KeA94jaUhE7Gzwps/EvSJz6NXR6xiKtzNP0tyIuK0KE9Yqs5bgCtFLsSKIcJgarHKwJjw8bRrOoZ6FF3n74DlRAEREd73aqxM8H9hN+qcefhyFPeCv4mvdiEusnwWuLobpNeCtiJiP7yXVa4/Apfe3SdoAHBgRXY3nzPvF4OAdG6IScliME4TrsTtdKdPOlDS0ivmWL2vPIKrKCKmmXPtO30/SeUTEk7JszlDc1zJV0oTw6OitwPvLTemK2H0u0Gdx7uBEvDK+uRyv7d8TNcyZCU/evAkblZ5ppDWDsaWEldZKOgFr4f2t7PZH4FOSNuLcxww8K+t1bLh3o5NW/JUxqD4HSQdExD/kCcin4qKkMdgLvktSV0QsltSNizGGAXMkbcOfwxzsFf+eMh03PJSwOl/P594J34OkNbRqDMRv8WrnBjyf4wwcengZhx2W11Yvlat+ZJTehk76YSX9xnyc+/ihpJex+sEGLMlfn+SpsrgZiUM4NwIz2v0dqb2vRlHU4TicvBPnLS6UdHBjeLl4MW9KugrnQ4+gtyLuJvx7GRNFA3CgUH0muLrtMpzrOg2HXJdjbbZbcDXbUzh3vCEi7gYo+cHJWB+uu7zm4mjoJaudryM8wKS1tCSpV2LXT+O6/tNxOO69WKZnedmnLq75B2B6ySsl7w5+icNN4DLta6BXvbgxtBMRz0fEzIjY0AFGaDSWDOq58Uo6R9IK7LFcKeno4uE/g6sBG3NZ1TVUg+UmAk8Ww/ZWRGypjFA5/oBIuJdQ4ozwDKPD8aTj0VhuaV+sntIdEScBs3C4foSkIyWtx/eKF7Ec04sRsSgiXlMTpfVk8NLKwXi3YDkewjNgepBLVb+P3fAFwJzoY/BaMjiJiL+ot5/o1SbPd0yYpTGEjENIF0g6FDfQLsEqBt+NiI2S7sGlxhfhSrDpuGm7mQHdGRErJR0WpdKrft5ayKljPo//whbcDzYRfy5HAZ/AvU6fx8VIY8q+47HnNBYParworIz+H7R78ZHsXfpN4kc12RJ5QNeuiNjRLydLBgydnGBulnAv28/EMjov4PzNg9hQXISrPbcBkyPiIEmHYDWQaRHxdC2k1yNho9Lj08mfxf+CpDPwjKvLsVr+aqxw8C8cqp+Dy9AfBq6LiHUNr98t35a8+2ipIerrh5wknUoxEKdgaalnyrYPYMHcYXhFvxk4Hytgn1f2ORyPU5hVijG2Ad+JiFvlxtPHIuLyqgKwdr5BYXwaKQVL+wMrI+JaSStxs+4pOEz/VmPeLEkqWhqDzUqWZKBRQkAjcOXWF2TJnB9gCZ2lOKczDlgBDJN0YnnpybhX7o0ScnwDj2EHN2vPK8evcmCflnQ98Dv19hINJhbgardq+OA3gK+EZXZeDSuoZN4naUorc0RJMlA5HmvY7cI5npHA6SVp/lHcYLkU50Mm4RDTJqx+vQYrIMyk9MGEx5Ejq8VfgEN4a/H01K7BmP+IiPWSvoZV1ImIp5rsM+iuO2kNbRkDkSSdhKQxWHJnAVYzuAL4abj3ZzwwFzdmTsCezrNYE/FqrJW2uY/jHoSN1bNZnJMkfZMeUfKuJyKekHQHTqyfh8Nyp2AP6FGskTckIu6T9GFgR0RU6vGboXlTdinO+dPeu5L2M1hzYEn/kh5RkgCSPoIT7WOLh3QNlq6ahEdRz89myiTpH9IQJUlB0s1YEeAVLFf1EvB4RGxp2C8neyZJC0lDlCSFEnabjnXe1tS2Z1tCkvQjaYiSpA/SACXJ3iENUZI0kKG3JNm7pCFKkiRJ2kp2OSdJkiRtJQ1RkiRJ0lbSECVJkiRtJQ1RkjRBUkj6Ve3xfpJeLrOHkiRpIWmIkqQ5bwLHSDqgPJ6CJ4nudSSlFFcyqElDlCR9s4zesQbT8UgIACQdKGmhpPWSHpE0tWwfKWmNpI3lz8ll+3BJD0h6VNLmMtEUSTtqxzxd0qLy70WSrpf0EDBP0ihJKyRtKMevpp4myYAnV1pJ0je/Bi4v4bhjsebcxPLcpcDqiDhP0lBgvaRVwF+BKRHRXUZI3AqcAJwF3BsRs8tE0ve9jfOPAE4u01zvBy6OiK2STgKuAz7XwmtNkraRhihJ+iAiuiSNxN7QsoanTwW+JGlmebw/cBjwZ+BaScfh+UYfK88/DCyUNAS4KyIefRtv4fZihA7Cg/huL2IP4KmnSTIoSEOUJHtmCTAfq3AfWtsu4LSIeLK+s6RZWCx1HA59dwNExAOSPoNDfYsk/TgifgHUO8r3bzj3m+XvfYC/R8RxrbigJOk0MkeUJHtmIXBlRDzWsP1e4JuVHl0ZoAdwMLC9SASdA+xbnj8ceCkifg7cgKfCArwk6eNlntGXm72BiHgdeE7SGeVYkjSuZVeYJG0mDVGS7IGIeCEiftLkqe8BQ4AuSY+Xx+DczbmSNgFj6PVqJgGbJD0CTMPzjgAuAe4B1gHb9/BWzgbOL8d9HJj6f19UknQYqTWXJEmStJX0iJIkSZK2koYoSZIkaStpiJIkSZK2koYoSZIkaStpiJIkSZK2koYoSZIkaStpiJIkSZK28m+CQww8b0oV8QAAAABJRU5ErkJggg==" - }, - "metadata": { - "application/vnd.databricks.v1+output": { - "addedWidgets": {}, - "arguments": {}, - "data": "/plots/290402f0-7a7d-4689-865d-2fd43896e913.png", - "datasetInfos": [], - "metadata": {}, - "removedWidgets": [], - "type": "image" - } - }, - "output_type": "display_data" - } - ], - "source": [ - "distribution_rows = distribution_balance_measures.collect()\n", - "race_row = [row for row in distribution_rows if row[\"FeatureName\"] == \"race\"][0][\"DistributionBalanceMeasure\"]\n", - "sex_row = [row for row in distribution_rows if row[\"FeatureName\"] == \"sex\"][0][\"DistributionBalanceMeasure\"]\n", - "\n", - "measures_of_interest = [\"kl_divergence\", \"js_dist\", \"inf_norm_dist\", \"total_variation_dist\", \"wasserstein_dist\"]\n", - "race_measures = [round(race_row[measure], 4) for measure in measures_of_interest]\n", - "sex_measures = [round(sex_row[measure], 4) for measure in measures_of_interest]\n", - "\n", - "x = np.arange(len(measures_of_interest))\n", - "width = 0.35\n", - "\n", - "fig, ax = plt.subplots()\n", - "rects1 = ax.bar(x - width/2, race_measures, width, label=\"Race\")\n", - "rects2 = ax.bar(x + width/2, sex_measures, width, label=\"Sex\")\n", - "\n", - "ax.set_xlabel(\"Measure\")\n", - "ax.set_ylabel(\"Value\")\n", - "ax.set_title(\"Distribution Balance Measures of Sex and Race in Adult Dataset\")\n", - "ax.set_xticks(x)\n", - "ax.set_xticklabels(measures_of_interest)\n", - "ax.legend()\n", - "\n", - "plt.setp(ax.get_xticklabels(), rotation=20, ha=\"right\", rotation_mode=\"default\")\n", - "\n", - "def autolabel(rects):\n", - " for rect in rects:\n", - " height = rect.get_height()\n", - " ax.annotate('{}'.format(height),\n", - " xy=(rect.get_x() + rect.get_width() / 2, height),\n", - " xytext=(0, 1), # 1 point vertical offset\n", - " textcoords=\"offset points\",\n", - " ha='center', va='bottom')\n", - "\n", - "autolabel(rects1)\n", - "autolabel(rects2)\n", - "\n", - "fig.tight_layout()\n", - "\n", - "plt.show()" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "application/vnd.databricks.v1+cell": { - "inputWidgets": {}, - "nuid": "2498f850-584c-4e7f-a55e-a79fd27bda2e", - "showTitle": false, - "title": "" - } - }, - "source": [ - "#### Interpret Distribution Balance Measures\n", - "\n", - "Race has a JS Distance of 0.5104 while Sex has a JS Distance of 0.1217.\n", - "\n", - "Knowing that JS Distance is between [0, 1] where 0 means perfectly balanced distribution, we can tell that:\n", - "* There is a larger disparity between various races than various sexes in our dataset.\n", - "* Race is nowhere close to a perfectly balanced distribution (i.e. some races are seen ALOT more than others in our dataset).\n", - "* Sex is fairly close to a perfectly balanced distribution." - ] - }, - { - "cell_type": "markdown", - "metadata": { - "application/vnd.databricks.v1+cell": { - "inputWidgets": {}, - "nuid": "2d848302-5693-4329-b4a4-da428ae431ed", - "showTitle": false, - "title": "" - } - }, - "source": [ - "### Calculate Aggregate Balance Measures\n", - "\n", - "Aggregate Balance Measures allow us to obtain a higher notion of inequality. They are calculated on the global set of sensitive columns and don't use the label column.\n", - "\n", - "These measures look at distribution of records across all combinations of sensitive columns. For example, if Sex and Race are sensitive columns, it shall try to quantify imbalance across all combinations - (Male, Black), (Female, White), (Male, Asian-Pac-Islander), etc.\n", - "\n", - "Measure | Description | Interpretation | Reference\n", - "- | - | - | -\n", - "Atkinson Index | It presents the percentage of total income that a given society would have to forego in order to have more equal shares of income between its citizens. This measure depends on the degree of society aversion to inequality (a theoretical parameter decided by the researcher), where a higher value entails greater social utility or willingness by individuals to accept smaller incomes in exchange for a more equal distribution. An important feature of the Atkinson index is that it can be decomposed into within-group and between-group inequality. | Range [0, 1]. 0 if perfect equality. 1 means maximum inequality. In our case, it is the proportion of records for a sensitive columns’ combination. | [Link](https://en.wikipedia.org/wiki/Atkinson_index)\n", - "Theil T Index | GE(1) = Theil's T and is more sensitive to differences at the top of the distribution. The Theil index is a statistic used to measure economic inequality. The Theil index measures an entropic \"distance\" the population is away from the \"ideal\" egalitarian state of everyone having the same income. | If everyone has the same income, then T_T equals 0. If one person has all the income, then T_T gives the result (ln N). 0 means equal income and larger values mean higher level of disproportion. | [Link](https://en.wikipedia.org/wiki/Theil_index)\n", - "Theil L Index | GE(0) = Theil's L and is more sensitive to differences at the lower end of the distribution. Logarithm of (mean income)/(income i), over all the incomes included in the summation. It is also referred to as the mean log deviation measure. Because a transfer from a larger income to a smaller one will change the smaller income's ratio more than it changes the larger income's ratio, the transfer-principle is satisfied by this index. | Same interpretation as Theil T Index. | [Link](https://en.wikipedia.org/wiki/Theil_index)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "application/vnd.databricks.v1+cell": { - "inputWidgets": {}, - "nuid": "d08ea1fd-8a70-407e-a9ad-9b6cac5c65bf", - "showTitle": false, - "title": "" - } - }, - "outputs": [], - "source": [ - "from synapse.ml.exploratory import AggregateBalanceMeasure\n", - "\n", - "aggregate_balance_measures = (\n", - " AggregateBalanceMeasure()\n", - " .setSensitiveCols(cols_of_interest)\n", - " .transform(df)\n", - ")\n", - "\n", - "display(aggregate_balance_measures)" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "application/vnd.databricks.v1+cell": { - "inputWidgets": {}, - "nuid": "0666d394-be89-470c-bc31-a9cd28f86264", - "showTitle": false, - "title": "" - } - }, - "source": [ - "#### Interpret Aggregate Balance Measures\n", - "\n", - "An Atkinson Index of 0.7779 lets us know that 77.79% of data points need to be foregone to have a more equal share among our features.\n", - "\n", - "It lets us know that our dataset is leaning towards maximum inequality, and we should take actionable steps to:\n", - "* Upsample data points where the feature value is barely observed.\n", - "* Downsample data points where the feature value is observed much more than others." - ] - }, - { - "cell_type": "markdown", - "metadata": { - "application/vnd.databricks.v1+cell": { - "inputWidgets": {}, - "nuid": "1a1fb46f-8c88-44fe-9177-76f09e07202e", - "showTitle": false, - "title": "" - } - }, - "source": [ - "### Summary\n", - "\n", - "Throughout the course of this sample notebook, we have:\n", - "1. Chosen \"Race\" and \"Sex\" as columns of interest in the Adult Census Income dataset.\n", - "2. Done preliminary analysis on our dataset. \n", - "3. Ran the 3 groups of measures that compose our **Data Balance Analysis**:\n", - " * **Feature Balance Measures**\n", - " * Calculated Feature Balance Measures to see that the highest Demographic Parity is in \"Sex\": Males see >50k income much more than Females.\n", - " * Visualized Demographic Parity of Races to see that Asian-Pac-Islander sees >50k income much more than Other, in addition to other race combinations.\n", - " * **Distribution Balance Measures** \n", - " * Calculated Distribution Balance Measures to see that \"Sex\" is much closer to a perfectly balanced distribution than \"Race\".\n", - " * Visualized various distribution balance measures to compare their values for \"Race\" and \"Sex\".\n", - " * **Aggregate Balance Measures**\n", - " * Calculated Aggregate Balance Measures to see that we need to forego 77.79% of data points to have a perfectly balanced dataset. We identified that our dataset is leaning towards maximum inequality, and we should take actionable steps to:\n", - " * Upsample data points where the feature value is barely observed.\n", - " * Downsample data points where the feature value is observed much more than others.\n", - " \n", - "**In conclusion:**\n", - "* These measures provide an indicator of disparity on the data, allowing for users to explore potential mitigations before proceeding to train. \n", - "* Users can use these measures to set thresholds on their level of \"tolerance\" for data representation.\n", - "* Production pipelines can use these measures as baseline for models that require frequent retraining on new data. \n", - "* These measures can also be saved as key metadata for the model/service built and added as part of model cards or transparency notes helping drive overall accountability for the ML service built and its performance across different demographics or sensitive attributes." - ] - } - ], - "metadata": { - "application/vnd.databricks.v1+notebook": { - "dashboards": [], - "language": "python", - "notebookMetadata": { - "pythonIndentUnit": 2 - }, - "notebookName": "AdultCensusIncome Tutorial", - "notebookOrigID": 4073163981188018, - "widgets": {} - }, - "language_info": { - "name": "python" - } - }, - "nbformat": 4, - "nbformat_minor": 0 -} diff --git a/notebooks/DataBalanceAnalysis - Adult Census Income.ipynb b/notebooks/DataBalanceAnalysis - Adult Census Income.ipynb new file mode 100644 index 0000000000..6138008538 --- /dev/null +++ b/notebooks/DataBalanceAnalysis - Adult Census Income.ipynb @@ -0,0 +1,641 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "application/vnd.databricks.v1+cell": { + "inputWidgets": {}, + "nuid": "f4e01a16-20fa-446a-9e3d-b560907b9ab2", + "showTitle": false, + "title": "" + } + }, + "source": [ + "## Data Balance Analysis using the Adult Census Income dataset\n", + "\n", + "In this example, we will conduct Data Balance Analysis (which consists on running three groups of measures) on the Adult Census Income dataset to determine how well features and feature values are represented in the dataset.\n", + "\n", + "This dataset can be used to predict whether annual income exceeds $50,000/year or not based on demographic data from the 1994 U.S. Census. The dataset we're reading contains 32,561 rows and 14 columns/features.\n", + "\n", + "[More info on the dataset here](https://archive.ics.uci.edu/ml/datasets/Adult)\n", + "\n", + "---\n", + "\n", + "Data Balance Analysis is relevant for overall understanding of datasets, but it becomes essential when thinking about building Machine Learning services out of such datasets. Having a well balanced data representation is critical when developing models in a responsible way, specially in terms of fairness. \n", + "It is unfortunately all too easy to build an ML model that produces biased results for subsets of an overall population, by training or testing the model on biased ground truth data. There are multiple case studies of biased models assisting in granting loans, healthcare, recruitment opportunities and many other decision making tasks. In most of these examples, the data from which these models are trained was the common issue. These findings emphasize how important it is for model creators and auditors to analyze data balance: to measure training data across sub-populations and ensure the data has good coverage and a balanced representation of labels across sensitive categories and category combinations, and to check that test data is representative of the target population.\n", + "\n", + "In summary, Data Balance Analysis, used as a step for building ML models has the following benefits:\n", + "* **Reduces risks for unbalanced models (facilitate service fairness) and reduces costs of ML building** by identifying early on data representation gaps that prompt data scientists to seek mitigation steps (collect more data, follow a specific sampling mechanism, create synthetic data, etc.) before proceeding to train their models. \n", + "* **Enables easy e2e debugging of ML systems** in combination with [Fairlearn](https://fairlearn.org/) by providing a clear view if for an unbalanced model the issue is tied to the data or the model. \n", + "\n", + "---\n", + "\n", + "Note: If you are running this notebook in a Spark environment such as Azure Synapse or Databricks, then you can easily visualize the imbalance measures using the built-in plotting features.\n", + "\n", + "Python dependencies:\n", + "* matplotlib==3.2.2\n", + "* numpy==1.19.2" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "application/vnd.databricks.v1+cell": { + "inputWidgets": {}, + "nuid": "a51d55f3-8f47-47e6-8698-4b78e65f034d", + "showTitle": false, + "title": "" + } + }, + "outputs": [], + "source": [ + "import matplotlib\n", + "import matplotlib.pyplot as plt\n", + "import numpy as np\n", + "import pyspark.sql.functions as F" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "application/vnd.databricks.v1+cell": { + "inputWidgets": {}, + "nuid": "5c7332a8-b256-4c57-a593-ab338f7ca623", + "showTitle": false, + "title": "" + } + }, + "outputs": [], + "source": [ + "df = spark.read.parquet(\"wasbs://publicwasb@mmlspark.blob.core.windows.net/AdultCensusIncome.parquet\")\n", + "display(df)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "application/vnd.databricks.v1+cell": { + "inputWidgets": {}, + "nuid": "267c342b-2770-4dff-aae3-aa75af24adef", + "showTitle": false, + "title": "" + } + }, + "outputs": [], + "source": [ + "# Convert the \"income\" column from {<=50K, >50K} to {0, 1} to represent our binary classification label column\n", + "label_col = \"income\"\n", + "df = df.withColumn(label_col, F.when(F.col(label_col).contains(\"<=50K\"), F.lit(0)).otherwise(F.lit(1)))" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "application/vnd.databricks.v1+cell": { + "inputWidgets": {}, + "nuid": "5af3f65c-5f1b-4e11-9bc9-ffa2b00116ae", + "showTitle": false, + "title": "" + } + }, + "source": [ + "### Perform preliminary analysis on columns of interest" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "application/vnd.databricks.v1+cell": { + "inputWidgets": {}, + "nuid": "819cb707-a3fd-45c0-a3d9-96e54d4a7e6f", + "showTitle": false, + "title": "" + } + }, + "outputs": [], + "source": [ + "display(df.groupBy(\"race\").count())" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "application/vnd.databricks.v1+cell": { + "inputWidgets": {}, + "nuid": "dd78d2b8-fefa-458b-bc23-629f7e763414", + "showTitle": false, + "title": "" + } + }, + "outputs": [], + "source": [ + "display(df.groupBy(\"sex\").count())" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "application/vnd.databricks.v1+cell": { + "inputWidgets": {}, + "nuid": "0d14030f-1fd8-4c1e-8742-7ad7d2dea4d2", + "showTitle": false, + "title": "" + } + }, + "outputs": [], + "source": [ + "# Choose columns/features to do data balance analysis on\n", + "cols_of_interest = [\"race\", \"sex\"]\n", + "display(df.select(cols_of_interest + [label_col]))" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "application/vnd.databricks.v1+cell": { + "inputWidgets": {}, + "nuid": "ae54d20f-f04a-4ffd-a442-e995955d922e", + "showTitle": false, + "title": "" + } + }, + "source": [ + "### Calculate Feature Balance Measures\n", + "\n", + "Feature Balance Measures allow us to see whether each combination of sensitive feature is receiving the positive outcome (true prediction) at equal rates.\n", + "\n", + "In this context, we define a feature balance measure, also referred to as the parity, for label y as the absolute difference between the association metrics of two different sensitive classes \\\\([x_A, x_B]\\\\), with respect to the association metric \\\\(A(x_i, y)\\\\). That is:\n", + "\n", + "$$parity(y \\vert x_A, x_B, A(\\cdot)) \\coloneqq A(x_A, y) - A(x_B, y) $$\n", + "\n", + "Using the dataset, we can see if the various sexes and races are receiving >50k income at equal or unequal rates.\n", + "\n", + "Note: Many of these metrics were influenced by this paper [Measuring Model Biases in the Absence of Ground Truth](https://arxiv.org/abs/2103.03417).\n", + "\n", + "Measure | Family | Description | Interpretation/Formula | Reference\n", + "- | - | - | - | -\n", + "Demographic Parity | Fairness | Proportion of each segment of a protected class (e.g. gender) should receive the positive outcome at equal rates. | As close to 0 means better parity. \\\\(DP = P(Y \\vert A = \"Male\") - P(Y \\vert A = \"Female\")\\\\). Y = Positive label rate. | [Link](https://en.wikipedia.org/wiki/Fairness_%28machine_learning%29)\n", + "Pointwise Mutual Information (PMI), normalized PMI | Entropy | The PMI of a pair of feature values (ex: Gender=Male and Gender=Female) quantifies the discrepancy between the probability of their coincidence given their joint distribution and their individual distributions (assuming independence). | Range (normalized) [-1, 1]. -1 for no co-occurences. 0 for co-occurences at random. 1 for complete co-occurences. | [Link](https://en.wikipedia.org/wiki/Pointwise_mutual_information)\n", + "Sorensen-Dice Coefficient (SDC) | Intersection-over-Union | Used to gauge the similarity of two samples. Related to F1 score. | Equals twice the number of elements common to both sets divided by the sum of the number of elements in each set. | [Link](https://en.wikipedia.org/wiki/S%C3%B8rensen%E2%80%93Dice_coefficient)\n", + "Jaccard Index | Intersection-over-Union | Similar to SDC, guages the similarity and diversity of sample sets. | Equals the size of the intersection divided by the size of the union of the sample sets. | [Link](https://en.wikipedia.org/wiki/Jaccard_index)\n", + "Kendall Rank Correlation | Correlation and Statistical Tests | Used to measure the ordinal association between two measured quantities. | High when observations have a similar rank and low when observations have a dissimilar rank between the two variables. | [Link](https://en.wikipedia.org/wiki/Kendall_rank_correlation_coefficient)\n", + "Log-Likelihood Ratio | Correlation and Statistical Tests | Calculates the degree to which data supports one variable versus another. Log of the likelihood ratio, which gives the probability of correctly predicting the label in ratio to probability of incorrectly predicting label. | If likelihoods are similar, it should be close to 0. | [Link](https://en.wikipedia.org/wiki/Likelihood_function#Likelihood_ratio)\n", + "t-test | Correlation and Statistical Tests | Used to compare the means of two groups (pairwise). | Value looked up in t-Distribution tell if statistically significant or not. | [Link](https://en.wikipedia.org/wiki/Student's_t-test)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "application/vnd.databricks.v1+cell": { + "inputWidgets": {}, + "nuid": "5dd892b3-b2e6-4fcb-8829-9c058fa4fd5e", + "showTitle": false, + "title": "" + } + }, + "outputs": [], + "source": [ + "from synapse.ml.exploratory import FeatureBalanceMeasure\n", + "\n", + "feature_balance_measures = (\n", + " FeatureBalanceMeasure()\n", + " .setSensitiveCols(cols_of_interest)\n", + " .setLabelCol(label_col)\n", + " .setVerbose(True)\n", + " .transform(df)\n", + ")\n", + "\n", + "# Sort by Demographic Parity descending for all features\n", + "display(feature_balance_measures.sort(F.abs(\"FeatureBalanceMeasure.dp\").desc()))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "application/vnd.databricks.v1+cell": { + "inputWidgets": {}, + "nuid": "46e1a9a7-97c7-437e-bead-eaf4c3b9e0d6", + "showTitle": false, + "title": "" + } + }, + "outputs": [], + "source": [ + "# Drill down to feature == \"sex\"\n", + "display(feature_balance_measures.filter(F.col(\"FeatureName\") == \"sex\").sort(F.abs(\"FeatureBalanceMeasure.dp\").desc()))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "application/vnd.databricks.v1+cell": { + "inputWidgets": {}, + "nuid": "d4bd77a1-3c10-4e16-9892-4ac920fb4432", + "showTitle": false, + "title": "" + } + }, + "outputs": [], + "source": [ + "# Drill down to feature == \"race\"\n", + "display(feature_balance_measures.filter(F.col(\"FeatureName\") == \"race\").sort(F.abs(\"FeatureBalanceMeasure.dp\").desc()))" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "application/vnd.databricks.v1+cell": { + "inputWidgets": {}, + "nuid": "aaec9b6a-06c6-4afb-86c8-a7fbc3df92d7", + "showTitle": false, + "title": "" + } + }, + "source": [ + "#### Visualize Feature Balance Measures" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "application/vnd.databricks.v1+cell": { + "inputWidgets": {}, + "nuid": "969e62cd-bb6c-4417-9046-dd8aa6d0fa9e", + "showTitle": false, + "title": "" + } + }, + "outputs": [], + "source": [ + "races = [row[\"race\"] for row in df.groupBy(\"race\").count().select(\"race\").collect()]\n", + "dp_rows = feature_balance_measures.filter(F.col(\"FeatureName\") == \"race\").select(\"ClassA\", \"ClassB\", \"FeatureBalanceMeasure.dp\").collect()\n", + "race_dp_values = [(row[\"ClassA\"], row[\"ClassB\"], row[\"dp\"]) for row in dp_rows]\n", + "\n", + "race_dp_array = np.zeros((len(races), len(races)))\n", + "for class_a, class_b, dp_value in race_dp_values:\n", + " i, j = races.index(class_a), races.index(class_b)\n", + " dp_value = round(dp_value, 2)\n", + " race_dp_array[i, j] = dp_value\n", + " race_dp_array[j, i] = -1 * dp_value\n", + "\n", + "colormap = \"RdBu\"\n", + "dp_min, dp_max = -1.0, 1.0\n", + "\n", + "fig, ax = plt.subplots()\n", + "im = ax.imshow(race_dp_array, vmin=dp_min, vmax=dp_max, cmap=colormap)\n", + "\n", + "cbar = ax.figure.colorbar(im, ax=ax)\n", + "cbar.ax.set_ylabel(\"Demographic Parity\", rotation=-90, va=\"bottom\")\n", + "\n", + "ax.set_xticks(np.arange(len(races)))\n", + "ax.set_yticks(np.arange(len(races)))\n", + "ax.set_xticklabels(races)\n", + "ax.set_yticklabels(races)\n", + "\n", + "plt.setp(ax.get_xticklabels(), rotation=45, ha=\"right\", rotation_mode=\"anchor\")\n", + "\n", + "for i in range(len(races)):\n", + " for j in range(len(races)):\n", + " text = ax.text(j, i, race_dp_array[i, j], ha=\"center\", va=\"center\", color=\"k\")\n", + " \n", + "ax.set_title(\"Demographic Parity of Races in Adult Dataset\")\n", + "fig.tight_layout()\n", + "plt.show()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "![Demographic Parity of Races in Adult Dataset](https://mmlspark.blob.core.windows.net/graphics/responsible_ai/DataBalanceAnalysis_AdultCensusIncome_RacesDP.png)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "application/vnd.databricks.v1+cell": { + "inputWidgets": {}, + "nuid": "63884ff1-6fcf-491d-9c2a-46f0fa4bbc58", + "showTitle": false, + "title": "" + } + }, + "source": [ + "#### Interpret Feature Balance Measures\n", + "\n", + "Demographic Parity:\n", + "* When it is positive, it means that ClassA sees the positive outcome more than ClassB.\n", + "* When it is negative, it means that ClassB sees the positive outcome more than ClassA.\n", + "\n", + "---\n", + "\n", + "From the results, we can tell the following:\n", + "\n", + "For Sex:\n", + "* DP(Male, Female) = 0.1963 shows \"Male\" observations are associated with \">50k\" income label more often than \"Female\" observations.\n", + "\n", + "For Race:\n", + "* DP(Other, Asian-Pac-Islander) = -0.1734 shows \"Other\" observations are associated with \">50k\" income label less than \"Asian-Pac-Islander\" observations.\n", + "* DP(White, Other) = 0.1636 shows \"White\" observations are associated with \">50k\" income label more often than \"Other\" observations.\n", + "* DP(Asian-Pac-Islander, Amer-Indian-Eskimo) = 0.1494 shows \"Asian-Pac-Islander\" observations are associated with \">50k\" income label more often than \"Amer-Indian-Eskimo\" observations.\n", + "\n", + "Again, you can take mitigation steps to upsample/downsample your data to be less biased towards certain features and feature values.\n", + "\n", + "Built-in mitigation steps are coming soon." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "application/vnd.databricks.v1+cell": { + "inputWidgets": {}, + "nuid": "471830e4-f77d-4567-8475-8dd398f3fae4", + "showTitle": false, + "title": "" + } + }, + "source": [ + "### Calculate Distribution Balance Measures\n", + "\n", + "Distribution Balance Measures allow us to compare our data with a reference distribution (i.e. uniform distribution). They are calculated per sensitive column and don't use the label column.\n", + "\n", + "For example, let's assume we have a dataset with 9 rows and a Gender column, and we observe that:\n", + "* \"Male\" appears 4 times\n", + "* \"Female\" appears 3 times\n", + "* \"Other\" appears 2 times\n", + "\n", + "Assuming the uniform distribution:\n", + "$$ReferenceCount \\coloneqq \\frac{numRows}{numFeatureValues}$$\n", + "$$ReferenceProbability \\coloneqq \\frac{1}{numFeatureValues}$$\n", + "\n", + "Feature Value | Observed Count | Reference Count | Observed Probability | Reference Probabiliy\n", + "- | - | - | - | -\n", + "Male | 4 | 9/3 = 3 | 4/9 = 0.44 | 3/9 = 0.33\n", + "Female | 3 | 9/3 = 3 | 3/9 = 0.33 | 3/9 = 0.33\n", + "Other | 2 | 9/3 = 3 | 2/9 = 0.22 | 3/9 = 0.33\n", + "\n", + "We can use distance measures to find out how far our observed and reference distributions of these feature values are. Some of these distance measures include:\n", + "\n", + "Measure | Description | Interpretation | Reference\n", + "- | - | - | -\n", + "KL Divergence | Measure of how one probability distribution is different from a second, reference probability distribution. Measure of the information gained when one revises one's beliefs from the prior probability distribution Q to the posterior probability distribution P. In other words, it is the amount of information lost when Q is used to approximate P. | Non-negative. 0 means P = Q. | [Link](https://en.wikipedia.org/wiki/Kullback%E2%80%93Leibler_divergence)\n", + "JS Distance | Measuring the similarity between two probability distributions. Symmetrized and smoothed version of the Kullback–Leibler (KL) divergence. Square root of JS Divergence. | Range [0, 1]. 0 means perfectly same to balanced distribution. | [Link](https://en.wikipedia.org/wiki/Jensen%E2%80%93Shannon_divergence)\n", + "Wasserstein Distance | This distance is also known as the earth mover’s distance, since it can be seen as the minimum amount of “work” required to transform u into v, where “work” is measured as the amount of distribution weight that must be moved, multiplied by the distance it has to be moved. | Non-negative. 0 means P = Q. | [Link](https://en.wikipedia.org/wiki/Wasserstein_metric)\n", + "Infinity Norm Distance | Distance between two vectors is the greatest of their differences along any coordinate dimension. Also called Chebyshev distance or chessboard distance. | Non-negative. 0 means same distribution. | [Link](https://en.wikipedia.org/wiki/Chebyshev_distance)\n", + "Total Variation Distance | It is equal to half the L1 (Manhattan) distance between the two distributions. Take the difference between the two proportions in each category, add up the absolute values of all the differences, and then divide the sum by 2. | Non-negative. 0 means same distribution. | [Link](https://en.wikipedia.org/wiki/Total_variation_distance_of_probability_measures)\n", + "Chi-Squared Test | The chi-square test tests the null hypothesis that the categorical data has the given frequencies given expected frequencies in each category. | p-value gives evidence against null-hypothesis that difference in observed and expected frequencies is by random chance. | [Link](https://en.wikipedia.org/wiki/Chi-squared_test)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "application/vnd.databricks.v1+cell": { + "inputWidgets": {}, + "nuid": "61a36af1-9b38-45a9-89b5-39b2d14093c4", + "showTitle": false, + "title": "" + } + }, + "outputs": [], + "source": [ + "from synapse.ml.exploratory import DistributionBalanceMeasure\n", + "\n", + "distribution_balance_measures = (\n", + " DistributionBalanceMeasure()\n", + " .setSensitiveCols(cols_of_interest)\n", + " .transform(df)\n", + ")\n", + "\n", + "# Sort by JS Distance descending\n", + "display(distribution_balance_measures.sort(F.abs(\"DistributionBalanceMeasure.js_dist\").desc()))" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "application/vnd.databricks.v1+cell": { + "inputWidgets": {}, + "nuid": "ad2c4353-664d-4117-a629-45f66e92a4bd", + "showTitle": false, + "title": "" + } + }, + "source": [ + "#### Visualize Distribution Balance Measures" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "application/vnd.databricks.v1+cell": { + "inputWidgets": {}, + "nuid": "32a10ac1-4d12-496c-97ea-3b52e5f61d15", + "showTitle": false, + "title": "" + } + }, + "outputs": [], + "source": [ + "distribution_rows = distribution_balance_measures.collect()\n", + "race_row = [row for row in distribution_rows if row[\"FeatureName\"] == \"race\"][0][\"DistributionBalanceMeasure\"]\n", + "sex_row = [row for row in distribution_rows if row[\"FeatureName\"] == \"sex\"][0][\"DistributionBalanceMeasure\"]\n", + "\n", + "measures_of_interest = [\"kl_divergence\", \"js_dist\", \"inf_norm_dist\", \"total_variation_dist\", \"wasserstein_dist\"]\n", + "race_measures = [round(race_row[measure], 4) for measure in measures_of_interest]\n", + "sex_measures = [round(sex_row[measure], 4) for measure in measures_of_interest]\n", + "\n", + "x = np.arange(len(measures_of_interest))\n", + "width = 0.35\n", + "\n", + "fig, ax = plt.subplots()\n", + "rects1 = ax.bar(x - width/2, race_measures, width, label=\"Race\")\n", + "rects2 = ax.bar(x + width/2, sex_measures, width, label=\"Sex\")\n", + "\n", + "ax.set_xlabel(\"Measure\")\n", + "ax.set_ylabel(\"Value\")\n", + "ax.set_title(\"Distribution Balance Measures of Sex and Race in Adult Dataset\")\n", + "ax.set_xticks(x)\n", + "ax.set_xticklabels(measures_of_interest)\n", + "ax.legend()\n", + "\n", + "plt.setp(ax.get_xticklabels(), rotation=20, ha=\"right\", rotation_mode=\"default\")\n", + "\n", + "def autolabel(rects):\n", + " for rect in rects:\n", + " height = rect.get_height()\n", + " ax.annotate('{}'.format(height),\n", + " xy=(rect.get_x() + rect.get_width() / 2, height),\n", + " xytext=(0, 1), # 1 point vertical offset\n", + " textcoords=\"offset points\",\n", + " ha='center', va='bottom')\n", + "\n", + "autolabel(rects1)\n", + "autolabel(rects2)\n", + "\n", + "fig.tight_layout()\n", + "\n", + "plt.show()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "![Distribution Balance Measures of Sex and Race in Adult Dataset](https://mmlspark.blob.core.windows.net/graphics/responsible_ai/DataBalanceAnalysis_AdultCensusIncome_DistributionMeasures.png)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "application/vnd.databricks.v1+cell": { + "inputWidgets": {}, + "nuid": "2498f850-584c-4e7f-a55e-a79fd27bda2e", + "showTitle": false, + "title": "" + } + }, + "source": [ + "#### Interpret Distribution Balance Measures\n", + "\n", + "Race has a JS Distance of 0.5104 while Sex has a JS Distance of 0.1217.\n", + "\n", + "Knowing that JS Distance is between [0, 1] where 0 means perfectly balanced distribution, we can tell that:\n", + "* There is a larger disparity between various races than various sexes in our dataset.\n", + "* Race is nowhere close to a perfectly balanced distribution (i.e. some races are seen ALOT more than others in our dataset).\n", + "* Sex is fairly close to a perfectly balanced distribution." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "application/vnd.databricks.v1+cell": { + "inputWidgets": {}, + "nuid": "2d848302-5693-4329-b4a4-da428ae431ed", + "showTitle": false, + "title": "" + } + }, + "source": [ + "### Calculate Aggregate Balance Measures\n", + "\n", + "Aggregate Balance Measures allow us to obtain a higher notion of inequality. They are calculated on the global set of sensitive columns and don't use the label column.\n", + "\n", + "These measures look at distribution of records across all combinations of sensitive columns. For example, if Sex and Race are sensitive columns, it shall try to quantify imbalance across all combinations - (Male, Black), (Female, White), (Male, Asian-Pac-Islander), etc.\n", + "\n", + "Measure | Description | Interpretation | Reference\n", + "- | - | - | -\n", + "Atkinson Index | It presents the percentage of total income that a given society would have to forego in order to have more equal shares of income between its citizens. This measure depends on the degree of society aversion to inequality (a theoretical parameter decided by the researcher), where a higher value entails greater social utility or willingness by individuals to accept smaller incomes in exchange for a more equal distribution. An important feature of the Atkinson index is that it can be decomposed into within-group and between-group inequality. | Range [0, 1]. 0 if perfect equality. 1 means maximum inequality. In our case, it is the proportion of records for a sensitive columns’ combination. | [Link](https://en.wikipedia.org/wiki/Atkinson_index)\n", + "Theil T Index | GE(1) = Theil's T and is more sensitive to differences at the top of the distribution. The Theil index is a statistic used to measure economic inequality. The Theil index measures an entropic \"distance\" the population is away from the \"ideal\" egalitarian state of everyone having the same income. | If everyone has the same income, then T_T equals 0. If one person has all the income, then T_T gives the result (ln N). 0 means equal income and larger values mean higher level of disproportion. | [Link](https://en.wikipedia.org/wiki/Theil_index)\n", + "Theil L Index | GE(0) = Theil's L and is more sensitive to differences at the lower end of the distribution. Logarithm of (mean income)/(income i), over all the incomes included in the summation. It is also referred to as the mean log deviation measure. Because a transfer from a larger income to a smaller one will change the smaller income's ratio more than it changes the larger income's ratio, the transfer-principle is satisfied by this index. | Same interpretation as Theil T Index. | [Link](https://en.wikipedia.org/wiki/Theil_index)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "application/vnd.databricks.v1+cell": { + "inputWidgets": {}, + "nuid": "d08ea1fd-8a70-407e-a9ad-9b6cac5c65bf", + "showTitle": false, + "title": "" + } + }, + "outputs": [], + "source": [ + "from synapse.ml.exploratory import AggregateBalanceMeasure\n", + "\n", + "aggregate_balance_measures = (\n", + " AggregateBalanceMeasure()\n", + " .setSensitiveCols(cols_of_interest)\n", + " .transform(df)\n", + ")\n", + "\n", + "display(aggregate_balance_measures)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "application/vnd.databricks.v1+cell": { + "inputWidgets": {}, + "nuid": "0666d394-be89-470c-bc31-a9cd28f86264", + "showTitle": false, + "title": "" + } + }, + "source": [ + "#### Interpret Aggregate Balance Measures\n", + "\n", + "An Atkinson Index of 0.7779 lets us know that 77.79% of data points need to be foregone to have a more equal share among our features.\n", + "\n", + "It lets us know that our dataset is leaning towards maximum inequality, and we should take actionable steps to:\n", + "* Upsample data points where the feature value is barely observed.\n", + "* Downsample data points where the feature value is observed much more than others." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "application/vnd.databricks.v1+cell": { + "inputWidgets": {}, + "nuid": "1a1fb46f-8c88-44fe-9177-76f09e07202e", + "showTitle": false, + "title": "" + } + }, + "source": [ + "### Summary\n", + "\n", + "Throughout the course of this sample notebook, we have:\n", + "1. Chosen \"Race\" and \"Sex\" as columns of interest in the Adult Census Income dataset.\n", + "2. Done preliminary analysis on our dataset. \n", + "3. Ran the 3 groups of measures that compose our **Data Balance Analysis**:\n", + " * **Feature Balance Measures**\n", + " * Calculated Feature Balance Measures to see that the highest Demographic Parity is in \"Sex\": Males see >50k income much more than Females.\n", + " * Visualized Demographic Parity of Races to see that Asian-Pac-Islander sees >50k income much more than Other, in addition to other race combinations.\n", + " * **Distribution Balance Measures** \n", + " * Calculated Distribution Balance Measures to see that \"Sex\" is much closer to a perfectly balanced distribution than \"Race\".\n", + " * Visualized various distribution balance measures to compare their values for \"Race\" and \"Sex\".\n", + " * **Aggregate Balance Measures**\n", + " * Calculated Aggregate Balance Measures to see that we need to forego 77.79% of data points to have a perfectly balanced dataset. We identified that our dataset is leaning towards maximum inequality, and we should take actionable steps to:\n", + " * Upsample data points where the feature value is barely observed.\n", + " * Downsample data points where the feature value is observed much more than others.\n", + " \n", + "**In conclusion:**\n", + "* These measures provide an indicator of disparity on the data, allowing for users to explore potential mitigations before proceeding to train. \n", + "* Users can use these measures to set thresholds on their level of \"tolerance\" for data representation.\n", + "* Production pipelines can use these measures as baseline for models that require frequent retraining on new data. \n", + "* These measures can also be saved as key metadata for the model/service built and added as part of model cards or transparency notes helping drive overall accountability for the ML service built and its performance across different demographics or sensitive attributes." + ] + } + ], + "metadata": { + "application/vnd.databricks.v1+notebook": { + "dashboards": [], + "language": "python", + "notebookMetadata": { + "pythonIndentUnit": 2 + }, + "notebookName": "AdultCensusIncome Tutorial", + "notebookOrigID": 4073163981188018, + "widgets": {} + }, + "language_info": { + "name": "python" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} diff --git a/website/docs/examples/about.md b/website/docs/examples/about.md index 7ce693ae76..6993264ffb 100644 --- a/website/docs/examples/about.md +++ b/website/docs/examples/about.md @@ -25,6 +25,7 @@ sidebar_label: About - Train and evaluate a flight delay prediction system ([Regression - Flight Delays]) - Finding anomalous data access patterns using the Access Anomalies package of CyberML ([CyberML - Anomalous Access Detection]) - Model interpretation ([Interpretability - Tabular SHAP Explainer], [Interpretability - Image Explainers], [Interpretability - Text Explainers]) +- Do Data Balance Analysis to determine how well features and feature values are represented in your dataset ([DataBalanceAnalysis - Adult Census Income]) [Classification - Adult Census]: ../classification/Classification%20-%20Adult%20Census "Classification - Adult Census" @@ -47,9 +48,10 @@ sidebar_label: About [CyberML - Anomalous Access Detection]: ../CyberML%20-%20Anomalous%20Access%20Detection "CyberML - Anomalous Access Detection" -[Interpretability - Tabular SHAP Explainer]: ../model_interpretability/Interpretability%20-%20Tabular%20SHAP%20explainer "Interpretability - Tabular SHAP Explainer" +[Interpretability - Tabular SHAP Explainer]: ../responsible_ai/Interpretability%20-%20Tabular%20SHAP%20explainer "Interpretability - Tabular SHAP Explainer" -[Interpretability - Image Explainers]: ../model_interpretability/Interpretability%20-%20Image%20Explainers "Interpretability - Image Explainers" +[Interpretability - Image Explainers]: ../responsible_ai/Interpretability%20-%20Image%20Explainers "Interpretability - Image Explainers" -[Interpretability - Text Explainers]: ../model_interpretability/Interpretability%20-%20Text%20Explainers "Interpretability - Text Explainers" +[Interpretability - Text Explainers]: ../responsible_ai/Interpretability%20-%20Text%20Explainers "Interpretability - Text Explainers" +[DataBalanceAnalysis - Adult Census Income]: ../responsible_ai/DataBalanceAnalysis%20-%20Adult%20Census%20Income "DataBalanceAnalysis - Adult Census Income" diff --git a/website/docs/examples/responsible_ai/DataBalanceAnalysis - Adult Census Income.md b/website/docs/examples/responsible_ai/DataBalanceAnalysis - Adult Census Income.md new file mode 100644 index 0000000000..3ec25407d0 --- /dev/null +++ b/website/docs/examples/responsible_ai/DataBalanceAnalysis - Adult Census Income.md @@ -0,0 +1,339 @@ +--- +title: DataBalanceAnalysis - Adult Census Income +hide_title: true +status: stable +--- +## Data Balance Analysis using the Adult Census Income dataset + +In this example, we will conduct Data Balance Analysis (which consists on running three groups of measures) on the Adult Census Income dataset to determine how well features and feature values are represented in the dataset. + +This dataset can be used to predict whether annual income exceeds $50,000/year or not based on demographic data from the 1994 U.S. Census. The dataset we're reading contains 32,561 rows and 14 columns/features. + +[More info on the dataset here](https://archive.ics.uci.edu/ml/datasets/Adult) + +--- + +Data Balance Analysis is relevant for overall understanding of datasets, but it becomes essential when thinking about building Machine Learning services out of such datasets. Having a well balanced data representation is critical when developing models in a responsible way, specially in terms of fairness. +It is unfortunately all too easy to build an ML model that produces biased results for subsets of an overall population, by training or testing the model on biased ground truth data. There are multiple case studies of biased models assisting in granting loans, healthcare, recruitment opportunities and many other decision making tasks. In most of these examples, the data from which these models are trained was the common issue. These findings emphasize how important it is for model creators and auditors to analyze data balance: to measure training data across sub-populations and ensure the data has good coverage and a balanced representation of labels across sensitive categories and category combinations, and to check that test data is representative of the target population. + +In summary, Data Balance Analysis, used as a step for building ML models has the following benefits: +* **Reduces risks for unbalanced models (facilitate service fairness) and reduces costs of ML building** by identifying early on data representation gaps that prompt data scientists to seek mitigation steps (collect more data, follow a specific sampling mechanism, create synthetic data, etc.) before proceeding to train their models. +* **Enables easy e2e debugging of ML systems** in combination with [Fairlearn](https://fairlearn.org/) by providing a clear view if for an unbalanced model the issue is tied to the data or the model. + +--- + +Note: If you are running this notebook in a Spark environment such as Azure Synapse or Databricks, then you can easily visualize the imbalance measures using the built-in plotting features. + +Python dependencies: +* matplotlib==3.2.2 +* numpy==1.19.2 + + +```python +import matplotlib +import matplotlib.pyplot as plt +import numpy as np +import pyspark.sql.functions as F +``` + + +```python +df = spark.read.parquet("wasbs://publicwasb@mmlspark.blob.core.windows.net/AdultCensusIncome.parquet") +display(df) +``` + + +```python +# Convert the "income" column from {<=50K, >50K} to {0, 1} to represent our binary classification label column +label_col = "income" +df = df.withColumn(label_col, F.when(F.col(label_col).contains("<=50K"), F.lit(0)).otherwise(F.lit(1))) +``` + +### Perform preliminary analysis on columns of interest + + +```python +display(df.groupBy("race").count()) +``` + + +```python +display(df.groupBy("sex").count()) +``` + + +```python +# Choose columns/features to do data balance analysis on +cols_of_interest = ["race", "sex"] +display(df.select(cols_of_interest + [label_col])) +``` + +### Calculate Feature Balance Measures + +Feature Balance Measures allow us to see whether each combination of sensitive feature is receiving the positive outcome (true prediction) at equal rates. + +In this context, we define a feature balance measure, also referred to as the parity, for label y as the absolute difference between the association metrics of two different sensitive classes \\([x_A, x_B]\\), with respect to the association metric \\(A(x_i, y)\\). That is: + +$$parity(y \vert x_A, x_B, A(\cdot)) \coloneqq A(x_A, y) - A(x_B, y) $$ + +Using the dataset, we can see if the various sexes and races are receiving >50k income at equal or unequal rates. + +Note: Many of these metrics were influenced by this paper [Measuring Model Biases in the Absence of Ground Truth](https://arxiv.org/abs/2103.03417). + +Measure | Family | Description | Interpretation/Formula | Reference +- | - | - | - | - +Demographic Parity | Fairness | Proportion of each segment of a protected class (e.g. gender) should receive the positive outcome at equal rates. | As close to 0 means better parity. \\(DP = P(Y \vert A = "Male") - P(Y \vert A = "Female")\\). Y = Positive label rate. | [Link](https://en.wikipedia.org/wiki/Fairness_%28machine_learning%29) +Pointwise Mutual Information (PMI), normalized PMI | Entropy | The PMI of a pair of feature values (ex: Gender=Male and Gender=Female) quantifies the discrepancy between the probability of their coincidence given their joint distribution and their individual distributions (assuming independence). | Range (normalized) [-1, 1]. -1 for no co-occurences. 0 for co-occurences at random. 1 for complete co-occurences. | [Link](https://en.wikipedia.org/wiki/Pointwise_mutual_information) +Sorensen-Dice Coefficient (SDC) | Intersection-over-Union | Used to gauge the similarity of two samples. Related to F1 score. | Equals twice the number of elements common to both sets divided by the sum of the number of elements in each set. | [Link](https://en.wikipedia.org/wiki/S%C3%B8rensen%E2%80%93Dice_coefficient) +Jaccard Index | Intersection-over-Union | Similar to SDC, guages the similarity and diversity of sample sets. | Equals the size of the intersection divided by the size of the union of the sample sets. | [Link](https://en.wikipedia.org/wiki/Jaccard_index) +Kendall Rank Correlation | Correlation and Statistical Tests | Used to measure the ordinal association between two measured quantities. | High when observations have a similar rank and low when observations have a dissimilar rank between the two variables. | [Link](https://en.wikipedia.org/wiki/Kendall_rank_correlation_coefficient) +Log-Likelihood Ratio | Correlation and Statistical Tests | Calculates the degree to which data supports one variable versus another. Log of the likelihood ratio, which gives the probability of correctly predicting the label in ratio to probability of incorrectly predicting label. | If likelihoods are similar, it should be close to 0. | [Link](https://en.wikipedia.org/wiki/Likelihood_function#Likelihood_ratio) +t-test | Correlation and Statistical Tests | Used to compare the means of two groups (pairwise). | Value looked up in t-Distribution tell if statistically significant or not. | [Link](https://en.wikipedia.org/wiki/Student's_t-test) + + +```python +from synapse.ml.exploratory import FeatureBalanceMeasure + +feature_balance_measures = ( + FeatureBalanceMeasure() + .setSensitiveCols(cols_of_interest) + .setLabelCol(label_col) + .setVerbose(True) + .transform(df) +) + +# Sort by Demographic Parity descending for all features +display(feature_balance_measures.sort(F.abs("FeatureBalanceMeasure.dp").desc())) +``` + + +```python +# Drill down to feature == "sex" +display(feature_balance_measures.filter(F.col("FeatureName") == "sex").sort(F.abs("FeatureBalanceMeasure.dp").desc())) +``` + + +```python +# Drill down to feature == "race" +display(feature_balance_measures.filter(F.col("FeatureName") == "race").sort(F.abs("FeatureBalanceMeasure.dp").desc())) +``` + +#### Visualize Feature Balance Measures + + +```python +races = [row["race"] for row in df.groupBy("race").count().select("race").collect()] +dp_rows = feature_balance_measures.filter(F.col("FeatureName") == "race").select("ClassA", "ClassB", "FeatureBalanceMeasure.dp").collect() +race_dp_values = [(row["ClassA"], row["ClassB"], row["dp"]) for row in dp_rows] + +race_dp_array = np.zeros((len(races), len(races))) +for class_a, class_b, dp_value in race_dp_values: + i, j = races.index(class_a), races.index(class_b) + dp_value = round(dp_value, 2) + race_dp_array[i, j] = dp_value + race_dp_array[j, i] = -1 * dp_value + +colormap = "RdBu" +dp_min, dp_max = -1.0, 1.0 + +fig, ax = plt.subplots() +im = ax.imshow(race_dp_array, vmin=dp_min, vmax=dp_max, cmap=colormap) + +cbar = ax.figure.colorbar(im, ax=ax) +cbar.ax.set_ylabel("Demographic Parity", rotation=-90, va="bottom") + +ax.set_xticks(np.arange(len(races))) +ax.set_yticks(np.arange(len(races))) +ax.set_xticklabels(races) +ax.set_yticklabels(races) + +plt.setp(ax.get_xticklabels(), rotation=45, ha="right", rotation_mode="anchor") + +for i in range(len(races)): + for j in range(len(races)): + text = ax.text(j, i, race_dp_array[i, j], ha="center", va="center", color="k") + +ax.set_title("Demographic Parity of Races in Adult Dataset") +fig.tight_layout() +plt.show() +``` + +![Demographic Parity of Races in Adult Dataset](https://mmlspark.blob.core.windows.net/graphics/responsible_ai/DataBalanceAnalysis_AdultCensusIncome_RacesDP.png) + +#### Interpret Feature Balance Measures + +Demographic Parity: +* When it is positive, it means that ClassA sees the positive outcome more than ClassB. +* When it is negative, it means that ClassB sees the positive outcome more than ClassA. + +--- + +From the results, we can tell the following: + +For Sex: +* DP(Male, Female) = 0.1963 shows "Male" observations are associated with ">50k" income label more often than "Female" observations. + +For Race: +* DP(Other, Asian-Pac-Islander) = -0.1734 shows "Other" observations are associated with ">50k" income label less than "Asian-Pac-Islander" observations. +* DP(White, Other) = 0.1636 shows "White" observations are associated with ">50k" income label more often than "Other" observations. +* DP(Asian-Pac-Islander, Amer-Indian-Eskimo) = 0.1494 shows "Asian-Pac-Islander" observations are associated with ">50k" income label more often than "Amer-Indian-Eskimo" observations. + +Again, you can take mitigation steps to upsample/downsample your data to be less biased towards certain features and feature values. + +Built-in mitigation steps are coming soon. + +### Calculate Distribution Balance Measures + +Distribution Balance Measures allow us to compare our data with a reference distribution (i.e. uniform distribution). They are calculated per sensitive column and don't use the label column. + +For example, let's assume we have a dataset with 9 rows and a Gender column, and we observe that: +* "Male" appears 4 times +* "Female" appears 3 times +* "Other" appears 2 times + +Assuming the uniform distribution: +$$ReferenceCount \coloneqq \frac{numRows}{numFeatureValues}$$ +$$ReferenceProbability \coloneqq \frac{1}{numFeatureValues}$$ + +Feature Value | Observed Count | Reference Count | Observed Probability | Reference Probabiliy +- | - | - | - | - +Male | 4 | 9/3 = 3 | 4/9 = 0.44 | 3/9 = 0.33 +Female | 3 | 9/3 = 3 | 3/9 = 0.33 | 3/9 = 0.33 +Other | 2 | 9/3 = 3 | 2/9 = 0.22 | 3/9 = 0.33 + +We can use distance measures to find out how far our observed and reference distributions of these feature values are. Some of these distance measures include: + +Measure | Description | Interpretation | Reference +- | - | - | - +KL Divergence | Measure of how one probability distribution is different from a second, reference probability distribution. Measure of the information gained when one revises one's beliefs from the prior probability distribution Q to the posterior probability distribution P. In other words, it is the amount of information lost when Q is used to approximate P. | Non-negative. 0 means P = Q. | [Link](https://en.wikipedia.org/wiki/Kullback%E2%80%93Leibler_divergence) +JS Distance | Measuring the similarity between two probability distributions. Symmetrized and smoothed version of the Kullback–Leibler (KL) divergence. Square root of JS Divergence. | Range [0, 1]. 0 means perfectly same to balanced distribution. | [Link](https://en.wikipedia.org/wiki/Jensen%E2%80%93Shannon_divergence) +Wasserstein Distance | This distance is also known as the earth mover’s distance, since it can be seen as the minimum amount of “work” required to transform u into v, where “work” is measured as the amount of distribution weight that must be moved, multiplied by the distance it has to be moved. | Non-negative. 0 means P = Q. | [Link](https://en.wikipedia.org/wiki/Wasserstein_metric) +Infinity Norm Distance | Distance between two vectors is the greatest of their differences along any coordinate dimension. Also called Chebyshev distance or chessboard distance. | Non-negative. 0 means same distribution. | [Link](https://en.wikipedia.org/wiki/Chebyshev_distance) +Total Variation Distance | It is equal to half the L1 (Manhattan) distance between the two distributions. Take the difference between the two proportions in each category, add up the absolute values of all the differences, and then divide the sum by 2. | Non-negative. 0 means same distribution. | [Link](https://en.wikipedia.org/wiki/Total_variation_distance_of_probability_measures) +Chi-Squared Test | The chi-square test tests the null hypothesis that the categorical data has the given frequencies given expected frequencies in each category. | p-value gives evidence against null-hypothesis that difference in observed and expected frequencies is by random chance. | [Link](https://en.wikipedia.org/wiki/Chi-squared_test) + + +```python +from synapse.ml.exploratory import DistributionBalanceMeasure + +distribution_balance_measures = ( + DistributionBalanceMeasure() + .setSensitiveCols(cols_of_interest) + .transform(df) +) + +# Sort by JS Distance descending +display(distribution_balance_measures.sort(F.abs("DistributionBalanceMeasure.js_dist").desc())) +``` + +#### Visualize Distribution Balance Measures + + +```python +distribution_rows = distribution_balance_measures.collect() +race_row = [row for row in distribution_rows if row["FeatureName"] == "race"][0]["DistributionBalanceMeasure"] +sex_row = [row for row in distribution_rows if row["FeatureName"] == "sex"][0]["DistributionBalanceMeasure"] + +measures_of_interest = ["kl_divergence", "js_dist", "inf_norm_dist", "total_variation_dist", "wasserstein_dist"] +race_measures = [round(race_row[measure], 4) for measure in measures_of_interest] +sex_measures = [round(sex_row[measure], 4) for measure in measures_of_interest] + +x = np.arange(len(measures_of_interest)) +width = 0.35 + +fig, ax = plt.subplots() +rects1 = ax.bar(x - width/2, race_measures, width, label="Race") +rects2 = ax.bar(x + width/2, sex_measures, width, label="Sex") + +ax.set_xlabel("Measure") +ax.set_ylabel("Value") +ax.set_title("Distribution Balance Measures of Sex and Race in Adult Dataset") +ax.set_xticks(x) +ax.set_xticklabels(measures_of_interest) +ax.legend() + +plt.setp(ax.get_xticklabels(), rotation=20, ha="right", rotation_mode="default") + +def autolabel(rects): + for rect in rects: + height = rect.get_height() + ax.annotate('{}'.format(height), + xy=(rect.get_x() + rect.get_width() / 2, height), + xytext=(0, 1), # 1 point vertical offset + textcoords="offset points", + ha='center', va='bottom') + +autolabel(rects1) +autolabel(rects2) + +fig.tight_layout() + +plt.show() +``` + +![Distribution Balance Measures of Sex and Race in Adult Dataset](https://mmlspark.blob.core.windows.net/graphics/responsible_ai/DataBalanceAnalysis_AdultCensusIncome_DistributionMeasures.png) + +#### Interpret Distribution Balance Measures + +Race has a JS Distance of 0.5104 while Sex has a JS Distance of 0.1217. + +Knowing that JS Distance is between [0, 1] where 0 means perfectly balanced distribution, we can tell that: +* There is a larger disparity between various races than various sexes in our dataset. +* Race is nowhere close to a perfectly balanced distribution (i.e. some races are seen ALOT more than others in our dataset). +* Sex is fairly close to a perfectly balanced distribution. + +### Calculate Aggregate Balance Measures + +Aggregate Balance Measures allow us to obtain a higher notion of inequality. They are calculated on the global set of sensitive columns and don't use the label column. + +These measures look at distribution of records across all combinations of sensitive columns. For example, if Sex and Race are sensitive columns, it shall try to quantify imbalance across all combinations - (Male, Black), (Female, White), (Male, Asian-Pac-Islander), etc. + +Measure | Description | Interpretation | Reference +- | - | - | - +Atkinson Index | It presents the percentage of total income that a given society would have to forego in order to have more equal shares of income between its citizens. This measure depends on the degree of society aversion to inequality (a theoretical parameter decided by the researcher), where a higher value entails greater social utility or willingness by individuals to accept smaller incomes in exchange for a more equal distribution. An important feature of the Atkinson index is that it can be decomposed into within-group and between-group inequality. | Range [0, 1]. 0 if perfect equality. 1 means maximum inequality. In our case, it is the proportion of records for a sensitive columns’ combination. | [Link](https://en.wikipedia.org/wiki/Atkinson_index) +Theil T Index | GE(1) = Theil's T and is more sensitive to differences at the top of the distribution. The Theil index is a statistic used to measure economic inequality. The Theil index measures an entropic "distance" the population is away from the "ideal" egalitarian state of everyone having the same income. | If everyone has the same income, then T_T equals 0. If one person has all the income, then T_T gives the result (ln N). 0 means equal income and larger values mean higher level of disproportion. | [Link](https://en.wikipedia.org/wiki/Theil_index) +Theil L Index | GE(0) = Theil's L and is more sensitive to differences at the lower end of the distribution. Logarithm of (mean income)/(income i), over all the incomes included in the summation. It is also referred to as the mean log deviation measure. Because a transfer from a larger income to a smaller one will change the smaller income's ratio more than it changes the larger income's ratio, the transfer-principle is satisfied by this index. | Same interpretation as Theil T Index. | [Link](https://en.wikipedia.org/wiki/Theil_index) + + +```python +from synapse.ml.exploratory import AggregateBalanceMeasure + +aggregate_balance_measures = ( + AggregateBalanceMeasure() + .setSensitiveCols(cols_of_interest) + .transform(df) +) + +display(aggregate_balance_measures) +``` + +#### Interpret Aggregate Balance Measures + +An Atkinson Index of 0.7779 lets us know that 77.79% of data points need to be foregone to have a more equal share among our features. + +It lets us know that our dataset is leaning towards maximum inequality, and we should take actionable steps to: +* Upsample data points where the feature value is barely observed. +* Downsample data points where the feature value is observed much more than others. + +### Summary + +Throughout the course of this sample notebook, we have: +1. Chosen "Race" and "Sex" as columns of interest in the Adult Census Income dataset. +2. Done preliminary analysis on our dataset. +3. Ran the 3 groups of measures that compose our **Data Balance Analysis**: + * **Feature Balance Measures** + * Calculated Feature Balance Measures to see that the highest Demographic Parity is in "Sex": Males see >50k income much more than Females. + * Visualized Demographic Parity of Races to see that Asian-Pac-Islander sees >50k income much more than Other, in addition to other race combinations. + * **Distribution Balance Measures** + * Calculated Distribution Balance Measures to see that "Sex" is much closer to a perfectly balanced distribution than "Race". + * Visualized various distribution balance measures to compare their values for "Race" and "Sex". + * **Aggregate Balance Measures** + * Calculated Aggregate Balance Measures to see that we need to forego 77.79% of data points to have a perfectly balanced dataset. We identified that our dataset is leaning towards maximum inequality, and we should take actionable steps to: + * Upsample data points where the feature value is barely observed. + * Downsample data points where the feature value is observed much more than others. + +**In conclusion:** +* These measures provide an indicator of disparity on the data, allowing for users to explore potential mitigations before proceeding to train. +* Users can use these measures to set thresholds on their level of "tolerance" for data representation. +* Production pipelines can use these measures as baseline for models that require frequent retraining on new data. +* These measures can also be saved as key metadata for the model/service built and added as part of model cards or transparency notes helping drive overall accountability for the ML service built and its performance across different demographics or sensitive attributes. diff --git a/website/docs/examples/responsible_ai/Interpretability - Explanation Dashboard.md b/website/docs/examples/responsible_ai/Interpretability - Explanation Dashboard.md new file mode 100644 index 0000000000..63c05c450b --- /dev/null +++ b/website/docs/examples/responsible_ai/Interpretability - Explanation Dashboard.md @@ -0,0 +1,191 @@ +--- +title: Interpretability - Explanation Dashboard +hide_title: true +status: stable +--- +## Interpretability - Explanation Dashboard + +In this example, similar to the "Interpretability - Tabular SHAP explainer" notebook, we use Kernel SHAP to explain a tabular classification model built from the Adults Census dataset and then visualize the explanation in the ExplanationDashboard from https://github.com/microsoft/responsible-ai-widgets. + +First we import the packages and define some UDFs we will need later. + + +```python +import pyspark +from synapse.ml.explainers import * +from pyspark.ml import Pipeline +from pyspark.ml.classification import LogisticRegression +from pyspark.ml.feature import StringIndexer, OneHotEncoder, VectorAssembler +from pyspark.sql.types import * +from pyspark.sql.functions import * +import pandas as pd + +vec_access = udf(lambda v, i: float(v[i]), FloatType()) +vec2array = udf(lambda vec: vec.toArray().tolist(), ArrayType(FloatType())) +``` + +Now let's read the data and train a simple binary classification model. + + +```python +df = spark.read.parquet("wasbs://publicwasb@mmlspark.blob.core.windows.net/AdultCensusIncome.parquet") + +labelIndexer = StringIndexer(inputCol="income", outputCol="label", stringOrderType="alphabetAsc").fit(df) +print("Label index assigment: " + str(set(zip(labelIndexer.labels, [0, 1])))) + +training = labelIndexer.transform(df) +display(training) +categorical_features = [ + "workclass", + "education", + "marital-status", + "occupation", + "relationship", + "race", + "sex", + "native-country", +] +categorical_features_idx = [col + "_idx" for col in categorical_features] +categorical_features_enc = [col + "_enc" for col in categorical_features] +numeric_features = ["age", "education-num", "capital-gain", "capital-loss", "hours-per-week"] + +strIndexer = StringIndexer(inputCols=categorical_features, outputCols=categorical_features_idx) +onehotEnc = OneHotEncoder(inputCols=categorical_features_idx, outputCols=categorical_features_enc) +vectAssem = VectorAssembler(inputCols=categorical_features_enc + numeric_features, outputCol="features") +lr = LogisticRegression(featuresCol="features", labelCol="label", weightCol="fnlwgt") +pipeline = Pipeline(stages=[strIndexer, onehotEnc, vectAssem, lr]) +model = pipeline.fit(training) +``` + +After the model is trained, we randomly select some observations to be explained. + + +```python +explain_instances = model.transform(training).orderBy(rand()).limit(5).repartition(200).cache() +display(explain_instances) +``` + +We create a TabularSHAP explainer, set the input columns to all the features the model takes, specify the model and the target output column we are trying to explain. In this case, we are trying to explain the "probability" output which is a vector of length 2, and we are only looking at class 1 probability. Specify targetClasses to `[0, 1]` if you want to explain class 0 and 1 probability at the same time. Finally we sample 100 rows from the training data for background data, which is used for integrating out features in Kernel SHAP. + + +```python +shap = TabularSHAP( + inputCols=categorical_features + numeric_features, + outputCol="shapValues", + numSamples=5000, + model=model, + targetCol="probability", + targetClasses=[1], + backgroundData=broadcast(training.orderBy(rand()).limit(100).cache()), +) + +shap_df = shap.transform(explain_instances) + +``` + +Once we have the resulting dataframe, we extract the class 1 probability of the model output, the SHAP values for the target class, the original features and the true label. Then we convert it to a pandas dataframe for visisualization. +For each observation, the first element in the SHAP values vector is the base value (the mean output of the background dataset), and each of the following element is the SHAP values for each feature. + + +```python +shaps = ( + shap_df.withColumn("probability", vec_access(col("probability"), lit(1))) + .withColumn("shapValues", vec2array(col("shapValues").getItem(0))) + .select(["shapValues", "probability", "label"] + categorical_features + numeric_features) +) + +shaps_local = shaps.toPandas() +shaps_local.sort_values("probability", ascending=False, inplace=True, ignore_index=True) +pd.set_option("display.max_colwidth", None) +shaps_local +``` + +We can visualize the explanation in the [interpret-community format](https://github.com/interpretml/interpret-community) in the ExplanationDashboard from https://github.com/microsoft/responsible-ai-widgets/ + + +```python +import pandas as pd +import numpy as np + +features = categorical_features + numeric_features +features_with_base = ["Base"] + features + +rows = shaps_local.shape[0] + +local_importance_values = shaps_local[['shapValues']] +eval_data = shaps_local[features] +true_y = np.array(shaps_local[['label']]) +``` + + +```python +list_local_importance_values = local_importance_values.values.tolist() +converted_importance_values = [] +bias = [] +for classarray in list_local_importance_values: + for rowarray in classarray: + converted_list = rowarray.tolist() + bias.append(converted_list[0]) + # remove the bias from local importance values + del converted_list[0] + converted_importance_values.append(converted_list) +``` + +When running Synapse Analytics, please follow instructions here [Package management - Azure Synapse Analytics | Microsoft Docs](https://docs.microsoft.com/en-us/azure/synapse-analytics/spark/apache-spark-azure-portal-add-libraries) to install ["raiwidgets"](https://pypi.org/project/raiwidgets/) and ["interpret-community"](https://pypi.org/project/interpret-community/) packages. + + +```python +!pip install --upgrade raiwidgets +``` + + +```python +!pip install --upgrade interpret-community +``` + + +```python +from interpret_community.adapter import ExplanationAdapter +adapter = ExplanationAdapter(features, classification=True) +global_explanation = adapter.create_global(converted_importance_values, eval_data, expected_values=bias) +``` + + +```python +# view the global importance values +global_explanation.global_importance_values +``` + + +```python +# view the local importance values +global_explanation.local_importance_values +``` + + +```python +class wrapper(object): + def __init__(self, model): + self.model = model + + def predict(self, data): + sparkdata = spark.createDataFrame(data) + return model.transform(sparkdata).select('prediction').toPandas().values.flatten().tolist() + + def predict_proba(self, data): + sparkdata = spark.createDataFrame(data) + prediction = model.transform(sparkdata).select('probability').toPandas().values.flatten().tolist() + proba_list = [vector.values.tolist() for vector in prediction] + return proba_list +``` + + +```python +# view the explanation in the ExplanationDashboard +from raiwidgets import ExplanationDashboard +ExplanationDashboard(global_explanation, wrapper(model), dataset=eval_data, true_y=true_y) +``` + +Your results will look like: + + diff --git a/website/docs/examples/model_interpretability/Interpretability - Tabular SHAP explainer.md b/website/docs/examples/responsible_ai/Interpretability - Tabular SHAP explainer.md similarity index 98% rename from website/docs/examples/model_interpretability/Interpretability - Tabular SHAP explainer.md rename to website/docs/examples/responsible_ai/Interpretability - Tabular SHAP explainer.md index 661ef1bd54..bc9197efce 100644 --- a/website/docs/examples/model_interpretability/Interpretability - Tabular SHAP explainer.md +++ b/website/docs/examples/responsible_ai/Interpretability - Tabular SHAP explainer.md @@ -76,7 +76,7 @@ shap = TabularSHAP( model=model, targetCol="probability", targetClasses=[1], - backgroundData=training.orderBy(rand()).limit(100).cache(), + backgroundData=broadcast(training.orderBy(rand()).limit(100).cache()), ) shap_df = shap.transform(explain_instances) diff --git a/website/docs/examples/model_interpretability/Interpretability - Text Explainers.md b/website/docs/examples/responsible_ai/Interpretability - Text Explainers.md similarity index 100% rename from website/docs/examples/model_interpretability/Interpretability - Text Explainers.md rename to website/docs/examples/responsible_ai/Interpretability - Text Explainers.md diff --git a/website/docs/features/model_interpretability/ModelInterpretability - Snow Leopard Detection.md b/website/docs/examples/responsible_ai/ModelInterpretability - Snow Leopard Detection.md similarity index 100% rename from website/docs/features/model_interpretability/ModelInterpretability - Snow Leopard Detection.md rename to website/docs/examples/responsible_ai/ModelInterpretability - Snow Leopard Detection.md diff --git a/website/docs/features/onnx/about.md b/website/docs/features/onnx/about.md index dbb8e95cfe..3153c4c9ca 100644 --- a/website/docs/features/onnx/about.md +++ b/website/docs/features/onnx/about.md @@ -45,5 +45,5 @@ MMLSpark now includes a Spark transformer to bring an trained ONNX model to Apac ## Example -- [Interpretability - Image Explainers](/docs/examples/model_interpretability/Interpretability%20-%20Image%20Explainers) +- [Interpretability - Image Explainers](/docs/examples/responsible_ai/Interpretability%20-%20Image%20Explainers) - [ONNX - Inference on Spark](/docs/features/onnx/ONNX%20-%20Inference%20on%20Spark) diff --git a/website/docs/features/exploratory/Data Balance Analysis.md b/website/docs/features/responsible_ai/Data Balance Analysis.md similarity index 96% rename from website/docs/features/exploratory/Data Balance Analysis.md rename to website/docs/features/responsible_ai/Data Balance Analysis.md index af1eb0e4dd..19d7a0df32 100644 --- a/website/docs/features/exploratory/Data Balance Analysis.md +++ b/website/docs/features/responsible_ai/Data Balance Analysis.md @@ -1,5 +1,7 @@ --- title: Data Balance Analysis on Spark +hide_title: true +sidebar_label: Data Balance Analysis description: Learn how to do Data Balance Analysis on Spark to determine how well features and feature values are represented in your dataset. --- @@ -17,7 +19,7 @@ In summary, Data Balance Analysis, used as a step for building ML models has the ## Examples -* [Data Balance Analysis - Adult Census Income](https://github.com/microsoft/SynapseML/blob/master/notebooks/Data%20Balance%20Analysis%20-%20Adult%20Census%20Income.ipynb) +* [Data Balance Analysis - Adult Census Income](../../../examples/responsible_ai/DataBalanceAnalysis%20-%20Adult%20Census%20Income) ## Usage @@ -173,7 +175,7 @@ This involves under-sampling from majority class and over-sampling from minority 1. Under-sampling may remove valuable information. 2. Over-sampling may cause overfitting and poor generalization on test set. -![Bar chart undersampling and oversampling](https://mmlspark.blob.core.windows.net/graphics/exploratory/DataBalanceAnalysis_SamplingBar.png) +![Bar chart undersampling and oversampling](https://mmlspark.blob.core.windows.net/graphics/responsible_ai/DataBalanceAnalysis_SamplingBar.png) There are smarter techniques to under-sample and over-sample in literature and implemented in Python’s [imbalanced-learn](https://imbalanced-learn.org/stable/) package. @@ -181,14 +183,14 @@ For example, we can cluster the records of the majority class, and do the under- One technique of under-sampling is use of Tomek Links. Tomek links are pairs of very close instances but of opposite classes. Removing the instances of the majority class of each pair increases the space between the two classes, facilitating the classification process. A similar way to under-sample majority class is using Near-Miss. It first calculates the distance between all the points in the larger class with the points in the smaller class. When two points belonging to different classes are very close to each other in the distribution, this algorithm eliminates the datapoint of the larger class thereby trying to balance the distribution. -![Tomek Links](https://mmlspark.blob.core.windows.net/graphics/exploratory/DataBalanceAnalysis_TomekLinks.png) +![Tomek Links](https://mmlspark.blob.core.windows.net/graphics/responsible_ai/DataBalanceAnalysis_TomekLinks.png) In over-sampling, instead of creating exact copies of the minority class records, we can introduce small variations into those copies, creating more diverse synthetic samples. This technique is called SMOTE (Synthetic Minority Oversampling Technique). It randomly picks a point from the minority class and computes the k-nearest neighbors for this point. The synthetic points are added between the chosen point and its neighbors. -![Synthetic Samples](https://mmlspark.blob.core.windows.net/graphics/exploratory/DataBalanceAnalysis_SyntheticSamples.png) +![Synthetic Samples](https://mmlspark.blob.core.windows.net/graphics/responsible_ai/DataBalanceAnalysis_SyntheticSamples.png) ### Reweighting There is an expected and observed value in each table cell. The weight is essentially expected / observed value. This is easy to extend to multiple features with more than 2 groups. The weights are then incorporated in loss function of model training. -![Reweighting](https://mmlspark.blob.core.windows.net/graphics/exploratory/DataBalanceAnalysis_Reweight.png) +![Reweighting](https://mmlspark.blob.core.windows.net/graphics/responsible_ai/DataBalanceAnalysis_Reweight.png) diff --git a/website/docs/examples/model_interpretability/Interpretability - Image Explainers.md b/website/docs/features/responsible_ai/Interpretability - Image Explainers.md similarity index 100% rename from website/docs/examples/model_interpretability/Interpretability - Image Explainers.md rename to website/docs/features/responsible_ai/Interpretability - Image Explainers.md diff --git a/website/docs/features/model_interpretability/about.md b/website/docs/features/responsible_ai/Model Interpretation on Spark.md similarity index 97% rename from website/docs/features/model_interpretability/about.md rename to website/docs/features/responsible_ai/Model Interpretation on Spark.md index 01c71edc6d..e25250b2ba 100644 --- a/website/docs/features/model_interpretability/about.md +++ b/website/docs/features/responsible_ai/Model Interpretation on Spark.md @@ -1,7 +1,7 @@ --- title: Model Interpretation on Spark hide_title: true -sidebar_label: About +sidebar_label: Model Interpretation on Spark --- # Model Interpretation on Spark @@ -26,9 +26,9 @@ Both explainers extends from `org.apache.spark.ml.Transformer`. After setting up To see examples of model interpretability on Spark in action, take a look at these sample notebooks: -- [Tabular SHAP explainer](/docs/examples/model_interpretability/Interpretability%20-%20Tabular%20SHAP%20explainer) -- [Image explainers](/docs/examples/model_interpretability/Interpretability%20-%20Image%20Explainers) -- [Text explainers](/docs/examples/model_interpretability/Interpretability%20-%20Text%20Explainers) +- [Tabular SHAP explainer](../../../examples/responsible_ai/Interpretability%20-%20Tabular%20SHAP%20explainer) +- [Image explainers](../../../examples/responsible_ai/Interpretability%20-%20Image%20Explainers) +- [Text explainers](../../../examples/responsible_ai/Interpretability%20-%20Text%20Explainers) | | Tabular models | Vector models | Image models | Text models | |------------------------|-----------------------------|---------------------------|-------------------------|-----------------------| diff --git a/website/notebookconvert.py b/website/notebookconvert.py index 323c9e753e..6ae47e9cf5 100644 --- a/website/notebookconvert.py +++ b/website/notebookconvert.py @@ -1,21 +1,25 @@ import os import re + def add_header_to_markdown(folder, md): name = md[:-3] - with open(os.path.join(folder, md), 'r+', encoding='utf-8') as f: + with open(os.path.join(folder, md), "r+", encoding="utf-8") as f: content = f.read() f.truncate(0) - content = re.sub(r'style=\"[\S ]*?\"', '', content) - content = re.sub(r'', '', content) + content = re.sub(r"style=\"[\S ]*?\"", "", content) + content = re.sub(r"", "", content) f.seek(0, 0) f.write("---\ntitle: {}\nhide_title: true\nstatus: stable\n---\n".format(name) + content) f.close() + def convert_notebook_to_markdown(file_path, outputdir): - print("Converting {} into markdown \n".format(file_path)) - convert_cmd = 'jupyter nbconvert --output-dir=\"{}\" --to markdown \"{}\"'.format(outputdir, file_path) + print(f"Converting {file_path} into markdown") + convert_cmd = f'jupyter nbconvert --output-dir="{outputdir}" --to markdown "{file_path}"' os.system(convert_cmd) + print() + def convert_allnotebooks_in_folder(folder, outputdir): @@ -23,17 +27,21 @@ def convert_allnotebooks_in_folder(folder, outputdir): "CognitiveServices - Overview": os.path.join(outputdir, "features"), "Classification": os.path.join(outputdir, "examples", "classification"), "CognitiveServices": os.path.join(outputdir, "examples", "cognitive_services"), + "DataBalanceAnalysis": os.path.join(outputdir, "examples", "responsible_ai"), "DeepLearning": os.path.join(outputdir, "examples", "deep_learning"), - "Interpretability": os.path.join(outputdir, "examples", "model_interpretability"), + "Interpretability - Image Explainers": os.path.join(outputdir, "features", "responsible_ai"), + "Interpretability - Explanation Dashboard": os.path.join(outputdir, "examples", "responsible_ai"), + "Interpretability - Tabular SHAP explainer": os.path.join(outputdir, "examples", "responsible_ai"), + "Interpretability - Text Explainers": os.path.join(outputdir, "examples", "responsible_ai"), + "ModelInterpretability": os.path.join(outputdir, "examples", "responsible_ai"), "Regression": os.path.join(outputdir, "examples", "regression"), "TextAnalytics": os.path.join(outputdir, "examples", "text_analytics"), "HttpOnSpark": os.path.join(outputdir, "features", "http"), "LightGBM": os.path.join(outputdir, "features", "lightgbm"), - "ModelInterpretability": os.path.join(outputdir, "features", "model_interpretability"), "ONNX": os.path.join(outputdir, "features", "onnx"), "SparkServing": os.path.join(outputdir, "features", "spark_serving"), - "Vowpal Wabbit": os.path.join(outputdir, "features", "vw") - } + "Vowpal Wabbit": os.path.join(outputdir, "features", "vw"), + } for nb in os.listdir(folder): if nb.endswith(".ipynb"): @@ -44,7 +52,7 @@ def convert_allnotebooks_in_folder(folder, outputdir): if nb.startswith(k): finaldir = v break - + if not os.path.exists(finaldir): os.mkdir(finaldir) @@ -55,11 +63,13 @@ def convert_allnotebooks_in_folder(folder, outputdir): convert_notebook_to_markdown(os.path.join(folder, nb), finaldir) add_header_to_markdown(finaldir, md) + def main(): cur_path = os.getcwd() folder = os.path.join(cur_path, "notebooks") outputdir = os.path.join(cur_path, "website", "docs") convert_allnotebooks_in_folder(folder, outputdir) -if __name__ == '__main__': + +if __name__ == "__main__": main() diff --git a/website/sidebars.js b/website/sidebars.js index b16b5b7723..06bd321de6 100644 --- a/website/sidebars.js +++ b/website/sidebars.js @@ -1,17 +1,17 @@ -const {listExamplePaths} = require('./src/plugins/examples'); +const { listExamplePaths } = require('./src/plugins/examples'); let features_http_docs = listExamplePaths("features", "http"); let features_lightgbm_docs = listExamplePaths("features", "lightgbm"); -let features_mi_docs = listExamplePaths("features", "model_interpretability"); let features_onnx_docs = listExamplePaths("features", "onnx"); +let features_rai_docs = listExamplePaths("features", "responsible_ai"); let features_ss_docs = listExamplePaths("features", "spark_serving"); let features_vw_docs = listExamplePaths("features", "vw"); let examples_cl_docs = listExamplePaths("examples", "classification"); let examples_cs_docs = listExamplePaths("examples", "cognitive_services"); let examples_dl_docs = listExamplePaths("examples", "deep_learning"); -let examples_mi_docs = listExamplePaths("examples", "model_interpretability"); let examples_rg_docs = listExamplePaths("examples", "regression"); +let examples_rai_docs = listExamplePaths("examples", "responsible_ai"); let examples_ta_docs = listExamplePaths("examples", "text_analytics"); @@ -34,7 +34,7 @@ module.exports = { type: 'category', label: 'Features', items: [ - 'features/CognitiveServices - Overview', + 'features/CognitiveServices - Overview', { type: 'category', label: 'HTTP on Spark', @@ -47,8 +47,8 @@ module.exports = { }, { type: 'category', - label: 'Model Interpretability', - items: features_mi_docs, + label: 'Responsible AI', + items: features_rai_docs, }, { type: 'category', @@ -94,8 +94,8 @@ module.exports = { }, { type: 'category', - label: 'Model Interpretability', - items: examples_mi_docs, + label: 'Responsible AI', + items: examples_rai_docs, }, { type: 'category', diff --git a/website/src/pages/index.js b/website/src/pages/index.js index 0be749c07e..c5d6041e43 100644 --- a/website/src/pages/index.js +++ b/website/src/pages/index.js @@ -41,7 +41,7 @@ model_prediction_df = (ONNXModel() }, { label: "Model Interpretability", - further: "/docs/features/model_interpretability/about", + further: "/docs/next/features/responsible_ai/Model%20Interpretation%20on%20Spark", config: `from synapse.ml.explainers import * interpretation_df = (TabularSHAP() diff --git a/website/src/plugins/examples/index.js b/website/src/plugins/examples/index.js index 024c2a5c2a..04478b50bd 100644 --- a/website/src/plugins/examples/index.js +++ b/website/src/plugins/examples/index.js @@ -12,11 +12,13 @@ function all_examples_for_type(folder, type) { let examples = []; let dir = path.join(__dirname, `../../../docs/${folder}/${type}`); fs.readdirSync(dir).forEach(function (file) { - let name = file.split(".").slice(0, -1).join("."); - let data = fs.readFileSync(path.join(dir, file)); - const { frontMatter } = parseMarkdownString(data); - frontMatter["name"] = name; - examples.push(frontMatter); + if (file.endsWith(".md")) { + let name = file.split(".").slice(0, -1).join("."); + let data = fs.readFileSync(path.join(dir, file)); + const { frontMatter } = parseMarkdownString(data); + frontMatter["name"] = name; + examples.push(frontMatter); + } }); return examples; } @@ -28,9 +30,10 @@ function all_examples() { `features/CognitiveServices - Overview.md`, `examples/ConditionalKNN - Exploring Art Across Cultures.md`, `examples/CyberML - Anomalous Access Detection.md`, + `examples/responsible_ai/DataBalanceAnalysis - Adult Census Income.md`, + `features/responsible_ai/Interpretability - Image Explainers.md`, `features/onnx/ONNX - Inference on Spark.md`, `features/lightgbm/LightGBM - Overview.md`, - `features/model_interpretability/ModelInterpretability - Snow Leopard Detection.md`, `features/vw/Vowpal Wabbit - Overview.md`, ]; let examples = []; From 9831ee6783a7bc3d41735271798620e561071002 Mon Sep 17 00:00:00 2001 From: Stuart Leeks Date: Mon, 8 Nov 2021 17:34:22 +0000 Subject: [PATCH 10/40] feat: Add redactedText to PIIV3 (#1247) --- .../synapse/ml/cognitive/TextAnalyticsSchemas.scala | 1 + .../synapse/ml/cognitive/split1/TextAnalyticsSuite.scala | 9 +++++++++ 2 files changed, 10 insertions(+) diff --git a/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/TextAnalyticsSchemas.scala b/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/TextAnalyticsSchemas.scala index 78bfd6b4ec..dd326157a7 100644 --- a/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/TextAnalyticsSchemas.scala +++ b/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/TextAnalyticsSchemas.scala @@ -111,6 +111,7 @@ object PIIResponseV3 extends SparkBindings[TAResponse[PIIDocV3]] case class PIIDocV3(id: String, entities: Seq[PIIEntityV3], + redactedText: String, warnings: Seq[TAWarning], statistics: Option[DocumentStatistics]) diff --git a/cognitive/src/test/scala/com/microsoft/azure/synapse/ml/cognitive/split1/TextAnalyticsSuite.scala b/cognitive/src/test/scala/com/microsoft/azure/synapse/ml/cognitive/split1/TextAnalyticsSuite.scala index ea9b76ca7c..78d13e4dc0 100644 --- a/cognitive/src/test/scala/com/microsoft/azure/synapse/ml/cognitive/split1/TextAnalyticsSuite.scala +++ b/cognitive/src/test/scala/com/microsoft/azure/synapse/ml/cognitive/split1/TextAnalyticsSuite.scala @@ -415,6 +415,15 @@ class PIISuiteV3 extends TransformerFuzzing[PII] with TextKey { test("Basic Usage") { val results = n.transform(df) + + val redactedTexts = results.withColumn("redactedText", + col("response") + .getItem(0) + .getItem("redactedText")) + .select("redactedText") + val redactedText = redactedTexts.collect().head(0).toString() + assert(redactedText === "My SSN is ***********") + val matches = results.withColumn("match", col("response") .getItem(0) From 23a997b874fe9e064c360390fcecf974d49de25c Mon Sep 17 00:00:00 2001 From: Stuart Leeks Date: Mon, 8 Nov 2021 17:35:43 +0000 Subject: [PATCH 11/40] chore: Add config to Text Api location (#1235) * test: add config to specify Text Api location * Combine TextKey and TextApiLocation into TextEndpoint --- .../cognitive/split1/TextAnalyticsSuite.scala | 41 ++++++++++--------- 1 file changed, 21 insertions(+), 20 deletions(-) diff --git a/cognitive/src/test/scala/com/microsoft/azure/synapse/ml/cognitive/split1/TextAnalyticsSuite.scala b/cognitive/src/test/scala/com/microsoft/azure/synapse/ml/cognitive/split1/TextAnalyticsSuite.scala index 78d13e4dc0..1dd40bae8b 100644 --- a/cognitive/src/test/scala/com/microsoft/azure/synapse/ml/cognitive/split1/TextAnalyticsSuite.scala +++ b/cognitive/src/test/scala/com/microsoft/azure/synapse/ml/cognitive/split1/TextAnalyticsSuite.scala @@ -13,11 +13,12 @@ import org.apache.spark.sql.catalyst.expressions.GenericRowWithSchema import org.apache.spark.sql.functions.col import org.apache.spark.sql.{DataFrame, Row} -trait TextKey { +trait TextEndpoint { lazy val textKey = sys.env.getOrElse("TEXT_API_KEY", Secrets.CognitiveApiKey) + lazy val textApiLocation = sys.env.getOrElse("TEXT_API_LOCATION", "eastus") } -class LanguageDetectorSuite extends TransformerFuzzing[LanguageDetectorV2] with TextKey { +class LanguageDetectorSuite extends TransformerFuzzing[LanguageDetectorV2] with TextEndpoint { import spark.implicits._ @@ -30,7 +31,7 @@ class LanguageDetectorSuite extends TransformerFuzzing[LanguageDetectorV2] with lazy val detector: LanguageDetectorV2 = new LanguageDetectorV2() .setSubscriptionKey(textKey) - .setUrl("https://eastus.api.cognitive.microsoft.com/text/analytics/v2.0/languages") + .setUrl(s"https://$textApiLocation.api.cognitive.microsoft.com/text/analytics/v2.0/languages") .setTextCol("text2") .setOutputCol("replies") @@ -70,7 +71,7 @@ class LanguageDetectorSuite extends TransformerFuzzing[LanguageDetectorV2] with override def reader: MLReadable[_] = LanguageDetectorV2 } -class LanguageDetectorV3Suite extends TransformerFuzzing[LanguageDetector] with TextKey { +class LanguageDetectorV3Suite extends TransformerFuzzing[LanguageDetector] with TextEndpoint { import spark.implicits._ @@ -83,7 +84,7 @@ class LanguageDetectorV3Suite extends TransformerFuzzing[LanguageDetector] with lazy val detector: LanguageDetector = new LanguageDetector() .setSubscriptionKey(textKey) - .setUrl("https://eastus.api.cognitive.microsoft.com/text/analytics/v3.0/languages") + .setUrl(s"https://$textApiLocation.api.cognitive.microsoft.com/text/analytics/v3.0/languages") .setOutputCol("replies") test("Basic Usage") { @@ -102,7 +103,7 @@ class LanguageDetectorV3Suite extends TransformerFuzzing[LanguageDetector] with override def reader: MLReadable[_] = LanguageDetector } -class EntityDetectorSuite extends TransformerFuzzing[EntityDetectorV2] with TextKey { +class EntityDetectorSuite extends TransformerFuzzing[EntityDetectorV2] with TextEndpoint { import spark.implicits._ @@ -113,7 +114,7 @@ class EntityDetectorSuite extends TransformerFuzzing[EntityDetectorV2] with Text lazy val detector: EntityDetectorV2 = new EntityDetectorV2() .setSubscriptionKey(textKey) - .setUrl("https://eastus.api.cognitive.microsoft.com/text/analytics/v2.0/entities") + .setUrl(s"https://$textApiLocation.api.cognitive.microsoft.com/text/analytics/v2.0/entities") .setLanguage("en") .setOutputCol("replies") @@ -132,7 +133,7 @@ class EntityDetectorSuite extends TransformerFuzzing[EntityDetectorV2] with Text override def reader: MLReadable[_] = EntityDetectorV2 } -class EntityDetectorSuiteV3 extends TransformerFuzzing[EntityDetector] with TextKey { +class EntityDetectorSuiteV3 extends TransformerFuzzing[EntityDetector] with TextEndpoint { import spark.implicits._ @@ -163,7 +164,7 @@ class EntityDetectorSuiteV3 extends TransformerFuzzing[EntityDetector] with Text override def reader: MLReadable[_] = EntityDetector } -trait TextSentimentBaseSuite extends TestBase with TextKey { +trait TextSentimentBaseSuite extends TestBase with TextEndpoint { import spark.implicits._ lazy val df: DataFrame = Seq( @@ -219,7 +220,7 @@ class TextSentimentSuite extends TransformerFuzzing[TextSentimentV2] with TextSe lazy val t: TextSentimentV2 = new TextSentimentV2() .setSubscriptionKey(textKey) - .setUrl("https://eastus.api.cognitive.microsoft.com/text/analytics/v2.0/sentiment") + .setUrl(s"https://$textApiLocation.api.cognitive.microsoft.com/text/analytics/v2.0/sentiment") .setLanguageCol("lang") .setOutputCol("replies") @@ -251,7 +252,7 @@ class TextSentimentSuite extends TransformerFuzzing[TextSentimentV2] with TextSe override def reader: MLReadable[_] = TextSentimentV2 } -class KeyPhraseExtractorSuite extends TransformerFuzzing[KeyPhraseExtractorV2] with TextKey { +class KeyPhraseExtractorSuite extends TransformerFuzzing[KeyPhraseExtractorV2] with TextEndpoint { import spark.implicits._ @@ -264,7 +265,7 @@ class KeyPhraseExtractorSuite extends TransformerFuzzing[KeyPhraseExtractorV2] w lazy val t: KeyPhraseExtractorV2 = new KeyPhraseExtractorV2() .setSubscriptionKey(textKey) - .setUrl("https://eastus.api.cognitive.microsoft.com/text/analytics/v2.0/keyPhrases") + .setUrl(s"https://$textApiLocation.api.cognitive.microsoft.com/text/analytics/v2.0/keyPhrases") .setLanguageCol("lang") .setOutputCol("replies") @@ -285,7 +286,7 @@ class KeyPhraseExtractorSuite extends TransformerFuzzing[KeyPhraseExtractorV2] w override def reader: MLReadable[_] = KeyPhraseExtractorV2 } -class KeyPhraseExtractorV3Suite extends TransformerFuzzing[KeyPhraseExtractor] with TextKey { +class KeyPhraseExtractorV3Suite extends TransformerFuzzing[KeyPhraseExtractor] with TextEndpoint { import spark.implicits._ @@ -298,7 +299,7 @@ class KeyPhraseExtractorV3Suite extends TransformerFuzzing[KeyPhraseExtractor] w lazy val t: KeyPhraseExtractor = new KeyPhraseExtractor() .setSubscriptionKey(textKey) - .setUrl("https://eastus.api.cognitive.microsoft.com/text/analytics/v3.0/keyPhrases") + .setUrl(s"https://$textApiLocation.api.cognitive.microsoft.com/text/analytics/v3.0/keyPhrases") .setLanguageCol("lang") .setOutputCol("replies") @@ -319,7 +320,7 @@ class KeyPhraseExtractorV3Suite extends TransformerFuzzing[KeyPhraseExtractor] w override def reader: MLReadable[_] = KeyPhraseExtractor } -class NERSuite extends TransformerFuzzing[NERV2] with TextKey { +class NERSuite extends TransformerFuzzing[NERV2] with TextEndpoint { import spark.implicits._ lazy val df: DataFrame = Seq( @@ -329,7 +330,7 @@ class NERSuite extends TransformerFuzzing[NERV2] with TextKey { lazy val n: NERV2 = new NERV2() .setSubscriptionKey(textKey) - .setLocation("eastus") + .setLocation(textApiLocation) .setLanguage("en") .setOutputCol("response") @@ -358,7 +359,7 @@ class NERSuite extends TransformerFuzzing[NERV2] with TextKey { override def reader: MLReadable[_] = NERV2 } -class NERSuiteV3 extends TransformerFuzzing[NER] with TextKey { +class NERSuiteV3 extends TransformerFuzzing[NER] with TextEndpoint { import spark.implicits._ lazy val df: DataFrame = Seq( @@ -368,7 +369,7 @@ class NERSuiteV3 extends TransformerFuzzing[NER] with TextKey { lazy val n: NER = new NER() .setSubscriptionKey(textKey) - .setLocation("eastus") + .setLocation(textApiLocation) .setLanguage("en") .setOutputCol("response") @@ -397,7 +398,7 @@ class NERSuiteV3 extends TransformerFuzzing[NER] with TextKey { override def reader: MLReadable[_] = NER } -class PIISuiteV3 extends TransformerFuzzing[PII] with TextKey { +class PIISuiteV3 extends TransformerFuzzing[PII] with TextEndpoint { import spark.implicits._ lazy val df: DataFrame = Seq( @@ -409,7 +410,7 @@ class PIISuiteV3 extends TransformerFuzzing[PII] with TextKey { lazy val n: PII = new PII() .setSubscriptionKey(textKey) - .setLocation("eastus") + .setLocation(textApiLocation) .setLanguage("en") .setOutputCol("response") From 8d1bd200715520b895cc275e23f89d3ceae69ca0 Mon Sep 17 00:00:00 2001 From: Serena Ruan <82044803+serena-ruan@users.noreply.github.com> Date: Tue, 9 Nov 2021 22:48:57 +0800 Subject: [PATCH 12/40] perf: website enhancement (#1245) * refactor: refactor notebooks folder structure and modify notebookcovert to auto-detect file paths * rename MMLSpark to SynapseML in some contents & update website main page to include announcement and installation in Synapse * move notebooks to correct path * merge master * fix broken links and update to version-0.9.1 because the changes have nothing to do with library updates and only add new markdowns * remove " from announcement * update pipeline * fix string format in notebookconvert * fix missing argument of encoding in pipeline"s version of python * fix pipeline -- create conda environment * fix nbtest.DatabricksTests & add jupyter into environment.yaml * add source activate synapseml * remove generated markdowns * add markdowns into .gitignore * simplify fetching notebooks * remove condition for now to test if auto deployment works * test passed then add back condition & fix sidebar order Co-authored-by: Mark Hamilton --- .../ml/nbtest/DatabricksUtilities.scala | 12 +- environment.yaml | 1 + .../AzureSearchIndex - Met Artworks.ipynb | 0 ...lKNN - Exploring Art Across Cultures.ipynb | 0 ...CyberML - Anomalous Access Detection.ipynb | 0 ...meterTuning - Fighting Breast Cancer.ipynb | 0 ...nCV - Pipeline Image Transformations.ipynb | 0 ...on - Adult Census with Vowpal Wabbit.ipynb | 0 .../Classification - Adult Census.ipynb | 0 ...ication - Before and After SynapseML.ipynb | 0 ...Twitter Sentiment with Vowpal Wabbit.ipynb | 0 ...eServices - Celebrity Quote Analysis.ipynb | 0 ...iveServices - Predictive Maintenance.ipynb | 0 ...g - BiLSTM Medical Entity Extraction.ipynb | 0 ...ning - CIFAR10 Convolutional Network.ipynb | 0 ...arning - Flower Image Classification.ipynb | 0 .../DeepLearning - Transfer Learning.ipynb | 0 .../Regression - Auto Imports.ipynb | 0 ...on - Flight Delays with DataCleaning.ipynb | 0 .../Regression - Flight Delays.ipynb | 0 ...it vs. LightGBM vs. Linear Regressor.ipynb | 0 ...alanceAnalysis - Adult Census Income.ipynb | 4 +- ...rpretability - Explanation Dashboard.ipynb | 0 ...pretability - Tabular SHAP explainer.ipynb | 0 .../Interpretability - Text Explainers.ipynb | 0 ...pretability - Snow Leopard Detection.ipynb | 0 ... - Amazon Book Reviews with Word2Vec.ipynb | 0 .../TextAnalytics - Amazon Book Reviews.ipynb | 0 .../CognitiveServices - Overview.ipynb | 0 ...rk - Working with Arbitrary Web APIs.ipynb | 0 .../lightgbm}/LightGBM - Overview.ipynb | 0 .../onnx}/ONNX - Inference on Spark.ipynb | 0 .../Interpretability - Image Explainers.ipynb | 0 ...parkServing - Deploying a Classifier.ipynb | 0 .../vw}/Vowpal Wabbit - Overview.ipynb | 0 pipeline.yaml | 12 + website/.gitignore | 6 + .../AzureSearchIndex - Met Artworks.md | 110 ---- ...onalKNN - Exploring Art Across Cultures.md | 222 -------- .../CyberML - Anomalous Access Detection.md | 329 ------------ ...arameterTuning - Fighting Breast Cancer.md | 88 ---- ...OpenCV - Pipeline Image Transformations.md | 147 ------ website/docs/examples/about.md | 4 +- ...ation - Adult Census with Vowpal Wabbit.md | 83 --- .../Classification - Adult Census.md | 60 --- ...sification - Before and After SynapseML.md | 205 -------- ... - Twitter Sentiment with Vowpal Wabbit.md | 211 -------- ...tiveServices - Celebrity Quote Analysis.md | 131 ----- ...nitiveServices - Predictive Maintenance.md | 146 ------ ...ning - BiLSTM Medical Entity Extraction.md | 232 --------- ...earning - CIFAR10 Convolutional Network.md | 94 ---- ...pLearning - Flower Image Classification.md | 139 ----- .../DeepLearning - Transfer Learning.md | 72 --- .../regression/Regression - Auto Imports.md | 214 -------- ...ssion - Flight Delays with DataCleaning.md | 153 ------ .../regression/Regression - Flight Delays.md | 100 ---- ...abbit vs. LightGBM vs. Linear Regressor.md | 258 ---------- ...ics - Amazon Book Reviews with Word2Vec.md | 154 ------ .../TextAnalytics - Amazon Book Reviews.md | 112 ---- .../features/CognitiveServices - Overview.md | 418 --------------- ...Spark - Working with Arbitrary Web APIs.md | 48 -- website/docs/features/http/about.md | 2 +- .../features/lightgbm/LightGBM - Overview.md | 290 ----------- website/docs/features/lightgbm/about.md | 2 +- .../onnx/ONNX - Inference on Spark.md | 160 ------ website/docs/features/onnx/about.md | 6 +- .../Model Interpretation on Spark.md | 2 +- .../SparkServing - Deploying a Classifier.md | 120 ----- website/docs/features/spark_serving/about.md | 18 +- .../features/vw/Vowpal Wabbit - Overview.md | 483 ------------------ website/docs/features/vw/about.md | 4 +- website/docs/reference/developer-readme.md | 2 +- website/docs/reference/docker.md | 4 +- website/notebookconvert.py | 69 +-- website/sidebars.js | 2 +- website/src/pages/index.js | 79 ++- website/src/pages/index.module.css | 18 + website/src/pages/videos.js | 2 +- website/src/plugins/examples/index.js | 1 + website/src/theme/FeatureCards/index.js | 20 +- .../version-0.9.1/examples/about.md | 10 +- .../Interpretability - Image Explainers.md | 263 ---------- ...terpretability - Tabular SHAP explainer.md | 142 ----- .../Interpretability - Text Explainers.md | 134 ----- ...taBalanceAnalysis - Adult Census Income.md | 2 +- ...nterpretability - Explanation Dashboard.md | 0 ...terpretability - Tabular SHAP explainer.md | 0 .../Interpretability - Text Explainers.md | 0 ...terpretability - Snow Leopard Detection.md | 0 .../version-0.9.1/features/http/about.md | 2 +- .../version-0.9.1/features/lightgbm/about.md | 2 +- ...terpretability - Snow Leopard Detection.md | 282 ---------- .../version-0.9.1/features/onnx/about.md | 6 +- .../responsible_ai/Data Balance Analysis.md | 196 +++++++ .../Interpretability - Image Explainers.md | 0 .../Model Interpretation on Spark.md} | 8 +- .../features/spark_serving/about.md | 18 +- .../version-0.9.1/features/vw/about.md | 4 +- .../reference/developer-readme.md | 2 +- .../version-0.9.1/reference/docker.md | 4 +- .../version-0.9.1-sidebars.json | 26 +- 101 files changed, 413 insertions(+), 5737 deletions(-) rename notebooks/{ => examples}/AzureSearchIndex - Met Artworks.ipynb (100%) rename notebooks/{ => examples}/ConditionalKNN - Exploring Art Across Cultures.ipynb (100%) rename notebooks/{ => examples}/CyberML - Anomalous Access Detection.ipynb (100%) rename notebooks/{ => examples}/HyperParameterTuning - Fighting Breast Cancer.ipynb (100%) rename notebooks/{ => examples}/OpenCV - Pipeline Image Transformations.ipynb (100%) rename notebooks/{ => examples/classification}/Classification - Adult Census with Vowpal Wabbit.ipynb (100%) rename notebooks/{ => examples/classification}/Classification - Adult Census.ipynb (100%) rename notebooks/{ => examples/classification}/Classification - Before and After SynapseML.ipynb (100%) rename notebooks/{ => examples/classification}/Classification - Twitter Sentiment with Vowpal Wabbit.ipynb (100%) rename notebooks/{ => examples/cognitive_services}/CognitiveServices - Celebrity Quote Analysis.ipynb (100%) rename notebooks/{ => examples/cognitive_services}/CognitiveServices - Predictive Maintenance.ipynb (100%) rename notebooks/{ => examples/deep_learning}/DeepLearning - BiLSTM Medical Entity Extraction.ipynb (100%) rename notebooks/{ => examples/deep_learning}/DeepLearning - CIFAR10 Convolutional Network.ipynb (100%) rename notebooks/{ => examples/deep_learning}/DeepLearning - Flower Image Classification.ipynb (100%) rename notebooks/{ => examples/deep_learning}/DeepLearning - Transfer Learning.ipynb (100%) rename notebooks/{ => examples/regression}/Regression - Auto Imports.ipynb (100%) rename notebooks/{ => examples/regression}/Regression - Flight Delays with DataCleaning.ipynb (100%) rename notebooks/{ => examples/regression}/Regression - Flight Delays.ipynb (100%) rename notebooks/{ => examples/regression}/Regression - Vowpal Wabbit vs. LightGBM vs. Linear Regressor.ipynb (100%) rename notebooks/{ => examples/responsible_ai}/DataBalanceAnalysis - Adult Census Income.ipynb (99%) rename notebooks/{ => examples/responsible_ai}/Interpretability - Explanation Dashboard.ipynb (100%) rename notebooks/{ => examples/responsible_ai}/Interpretability - Tabular SHAP explainer.ipynb (100%) rename notebooks/{ => examples/responsible_ai}/Interpretability - Text Explainers.ipynb (100%) rename notebooks/{ => examples/responsible_ai}/ModelInterpretability - Snow Leopard Detection.ipynb (100%) rename notebooks/{ => examples/text_analytics}/TextAnalytics - Amazon Book Reviews with Word2Vec.ipynb (100%) rename notebooks/{ => examples/text_analytics}/TextAnalytics - Amazon Book Reviews.ipynb (100%) rename notebooks/{ => features}/CognitiveServices - Overview.ipynb (100%) rename notebooks/{ => features/http}/HttpOnSpark - Working with Arbitrary Web APIs.ipynb (100%) rename notebooks/{ => features/lightgbm}/LightGBM - Overview.ipynb (100%) rename notebooks/{ => features/onnx}/ONNX - Inference on Spark.ipynb (100%) rename notebooks/{ => features/responsible_ai}/Interpretability - Image Explainers.ipynb (100%) rename notebooks/{ => features/spark_serving}/SparkServing - Deploying a Classifier.ipynb (100%) rename notebooks/{ => features/vw}/Vowpal Wabbit - Overview.ipynb (100%) delete mode 100644 website/docs/examples/AzureSearchIndex - Met Artworks.md delete mode 100644 website/docs/examples/ConditionalKNN - Exploring Art Across Cultures.md delete mode 100644 website/docs/examples/CyberML - Anomalous Access Detection.md delete mode 100644 website/docs/examples/HyperParameterTuning - Fighting Breast Cancer.md delete mode 100644 website/docs/examples/OpenCV - Pipeline Image Transformations.md delete mode 100644 website/docs/examples/classification/Classification - Adult Census with Vowpal Wabbit.md delete mode 100644 website/docs/examples/classification/Classification - Adult Census.md delete mode 100644 website/docs/examples/classification/Classification - Before and After SynapseML.md delete mode 100644 website/docs/examples/classification/Classification - Twitter Sentiment with Vowpal Wabbit.md delete mode 100644 website/docs/examples/cognitive_services/CognitiveServices - Celebrity Quote Analysis.md delete mode 100644 website/docs/examples/cognitive_services/CognitiveServices - Predictive Maintenance.md delete mode 100644 website/docs/examples/deep_learning/DeepLearning - BiLSTM Medical Entity Extraction.md delete mode 100644 website/docs/examples/deep_learning/DeepLearning - CIFAR10 Convolutional Network.md delete mode 100644 website/docs/examples/deep_learning/DeepLearning - Flower Image Classification.md delete mode 100644 website/docs/examples/deep_learning/DeepLearning - Transfer Learning.md delete mode 100644 website/docs/examples/regression/Regression - Auto Imports.md delete mode 100644 website/docs/examples/regression/Regression - Flight Delays with DataCleaning.md delete mode 100644 website/docs/examples/regression/Regression - Flight Delays.md delete mode 100644 website/docs/examples/regression/Regression - Vowpal Wabbit vs. LightGBM vs. Linear Regressor.md delete mode 100644 website/docs/examples/text_analytics/TextAnalytics - Amazon Book Reviews with Word2Vec.md delete mode 100644 website/docs/examples/text_analytics/TextAnalytics - Amazon Book Reviews.md delete mode 100644 website/docs/features/CognitiveServices - Overview.md delete mode 100644 website/docs/features/http/HttpOnSpark - Working with Arbitrary Web APIs.md delete mode 100644 website/docs/features/lightgbm/LightGBM - Overview.md delete mode 100644 website/docs/features/onnx/ONNX - Inference on Spark.md delete mode 100644 website/docs/features/spark_serving/SparkServing - Deploying a Classifier.md delete mode 100644 website/docs/features/vw/Vowpal Wabbit - Overview.md delete mode 100644 website/versioned_docs/version-0.9.1/examples/model_interpretability/Interpretability - Image Explainers.md delete mode 100644 website/versioned_docs/version-0.9.1/examples/model_interpretability/Interpretability - Tabular SHAP explainer.md delete mode 100644 website/versioned_docs/version-0.9.1/examples/model_interpretability/Interpretability - Text Explainers.md rename website/{docs => versioned_docs/version-0.9.1}/examples/responsible_ai/DataBalanceAnalysis - Adult Census Income.md (98%) rename website/{docs => versioned_docs/version-0.9.1}/examples/responsible_ai/Interpretability - Explanation Dashboard.md (100%) rename website/{docs => versioned_docs/version-0.9.1}/examples/responsible_ai/Interpretability - Tabular SHAP explainer.md (100%) rename website/{docs => versioned_docs/version-0.9.1}/examples/responsible_ai/Interpretability - Text Explainers.md (100%) rename website/{docs => versioned_docs/version-0.9.1}/examples/responsible_ai/ModelInterpretability - Snow Leopard Detection.md (100%) delete mode 100644 website/versioned_docs/version-0.9.1/features/model_interpretability/ModelInterpretability - Snow Leopard Detection.md create mode 100644 website/versioned_docs/version-0.9.1/features/responsible_ai/Data Balance Analysis.md rename website/{docs => versioned_docs/version-0.9.1}/features/responsible_ai/Interpretability - Image Explainers.md (100%) rename website/versioned_docs/version-0.9.1/features/{model_interpretability/about.md => responsible_ai/Model Interpretation on Spark.md} (97%) diff --git a/core/src/test/scala/com/microsoft/azure/synapse/ml/nbtest/DatabricksUtilities.scala b/core/src/test/scala/com/microsoft/azure/synapse/ml/nbtest/DatabricksUtilities.scala index e2bd6b811b..9fef06abbc 100644 --- a/core/src/test/scala/com/microsoft/azure/synapse/ml/nbtest/DatabricksUtilities.scala +++ b/core/src/test/scala/com/microsoft/azure/synapse/ml/nbtest/DatabricksUtilities.scala @@ -59,9 +59,13 @@ object DatabricksUtilities extends HasHttpClient { // Execution Params val TimeoutInMillis: Int = 40 * 60 * 1000 - val NotebookFiles: Array[File] = Option( - FileUtilities.join(BuildInfo.baseDirectory.getParent, "notebooks").getCanonicalFile.listFiles() - ).get + def recursiveListFiles(f: File): Array[File] = { + val files = f.listFiles() + files.filter(_.isFile) ++ files.filter(_.isDirectory).flatMap(recursiveListFiles) + } + + val NotebookFiles: Array[File] = recursiveListFiles(FileUtilities.join(BuildInfo.baseDirectory.getParent, + "notebooks").getCanonicalFile) val ParallizableNotebooks: Seq[File] = NotebookFiles @@ -95,7 +99,7 @@ object DatabricksUtilities extends HasHttpClient { } //TODO convert all this to typed code - def databricksPost(path: String, body: String, retries:List[Int]=List(100, 500, 1000)): JsValue = { + def databricksPost(path: String, body: String, retries: List[Int] = List(100, 500, 1000)): JsValue = { retry(retries, { () => val request = new HttpPost(BaseURL + path) request.addHeader("Authorization", AuthValue) diff --git a/environment.yaml b/environment.yaml index 4495bcaaac..42dc41cf6e 100644 --- a/environment.yaml +++ b/environment.yaml @@ -23,3 +23,4 @@ dependencies: - pytest-codeblocks - azure-storage-blob - twine + - jupyter diff --git a/notebooks/AzureSearchIndex - Met Artworks.ipynb b/notebooks/examples/AzureSearchIndex - Met Artworks.ipynb similarity index 100% rename from notebooks/AzureSearchIndex - Met Artworks.ipynb rename to notebooks/examples/AzureSearchIndex - Met Artworks.ipynb diff --git a/notebooks/ConditionalKNN - Exploring Art Across Cultures.ipynb b/notebooks/examples/ConditionalKNN - Exploring Art Across Cultures.ipynb similarity index 100% rename from notebooks/ConditionalKNN - Exploring Art Across Cultures.ipynb rename to notebooks/examples/ConditionalKNN - Exploring Art Across Cultures.ipynb diff --git a/notebooks/CyberML - Anomalous Access Detection.ipynb b/notebooks/examples/CyberML - Anomalous Access Detection.ipynb similarity index 100% rename from notebooks/CyberML - Anomalous Access Detection.ipynb rename to notebooks/examples/CyberML - Anomalous Access Detection.ipynb diff --git a/notebooks/HyperParameterTuning - Fighting Breast Cancer.ipynb b/notebooks/examples/HyperParameterTuning - Fighting Breast Cancer.ipynb similarity index 100% rename from notebooks/HyperParameterTuning - Fighting Breast Cancer.ipynb rename to notebooks/examples/HyperParameterTuning - Fighting Breast Cancer.ipynb diff --git a/notebooks/OpenCV - Pipeline Image Transformations.ipynb b/notebooks/examples/OpenCV - Pipeline Image Transformations.ipynb similarity index 100% rename from notebooks/OpenCV - Pipeline Image Transformations.ipynb rename to notebooks/examples/OpenCV - Pipeline Image Transformations.ipynb diff --git a/notebooks/Classification - Adult Census with Vowpal Wabbit.ipynb b/notebooks/examples/classification/Classification - Adult Census with Vowpal Wabbit.ipynb similarity index 100% rename from notebooks/Classification - Adult Census with Vowpal Wabbit.ipynb rename to notebooks/examples/classification/Classification - Adult Census with Vowpal Wabbit.ipynb diff --git a/notebooks/Classification - Adult Census.ipynb b/notebooks/examples/classification/Classification - Adult Census.ipynb similarity index 100% rename from notebooks/Classification - Adult Census.ipynb rename to notebooks/examples/classification/Classification - Adult Census.ipynb diff --git a/notebooks/Classification - Before and After SynapseML.ipynb b/notebooks/examples/classification/Classification - Before and After SynapseML.ipynb similarity index 100% rename from notebooks/Classification - Before and After SynapseML.ipynb rename to notebooks/examples/classification/Classification - Before and After SynapseML.ipynb diff --git a/notebooks/Classification - Twitter Sentiment with Vowpal Wabbit.ipynb b/notebooks/examples/classification/Classification - Twitter Sentiment with Vowpal Wabbit.ipynb similarity index 100% rename from notebooks/Classification - Twitter Sentiment with Vowpal Wabbit.ipynb rename to notebooks/examples/classification/Classification - Twitter Sentiment with Vowpal Wabbit.ipynb diff --git a/notebooks/CognitiveServices - Celebrity Quote Analysis.ipynb b/notebooks/examples/cognitive_services/CognitiveServices - Celebrity Quote Analysis.ipynb similarity index 100% rename from notebooks/CognitiveServices - Celebrity Quote Analysis.ipynb rename to notebooks/examples/cognitive_services/CognitiveServices - Celebrity Quote Analysis.ipynb diff --git a/notebooks/CognitiveServices - Predictive Maintenance.ipynb b/notebooks/examples/cognitive_services/CognitiveServices - Predictive Maintenance.ipynb similarity index 100% rename from notebooks/CognitiveServices - Predictive Maintenance.ipynb rename to notebooks/examples/cognitive_services/CognitiveServices - Predictive Maintenance.ipynb diff --git a/notebooks/DeepLearning - BiLSTM Medical Entity Extraction.ipynb b/notebooks/examples/deep_learning/DeepLearning - BiLSTM Medical Entity Extraction.ipynb similarity index 100% rename from notebooks/DeepLearning - BiLSTM Medical Entity Extraction.ipynb rename to notebooks/examples/deep_learning/DeepLearning - BiLSTM Medical Entity Extraction.ipynb diff --git a/notebooks/DeepLearning - CIFAR10 Convolutional Network.ipynb b/notebooks/examples/deep_learning/DeepLearning - CIFAR10 Convolutional Network.ipynb similarity index 100% rename from notebooks/DeepLearning - CIFAR10 Convolutional Network.ipynb rename to notebooks/examples/deep_learning/DeepLearning - CIFAR10 Convolutional Network.ipynb diff --git a/notebooks/DeepLearning - Flower Image Classification.ipynb b/notebooks/examples/deep_learning/DeepLearning - Flower Image Classification.ipynb similarity index 100% rename from notebooks/DeepLearning - Flower Image Classification.ipynb rename to notebooks/examples/deep_learning/DeepLearning - Flower Image Classification.ipynb diff --git a/notebooks/DeepLearning - Transfer Learning.ipynb b/notebooks/examples/deep_learning/DeepLearning - Transfer Learning.ipynb similarity index 100% rename from notebooks/DeepLearning - Transfer Learning.ipynb rename to notebooks/examples/deep_learning/DeepLearning - Transfer Learning.ipynb diff --git a/notebooks/Regression - Auto Imports.ipynb b/notebooks/examples/regression/Regression - Auto Imports.ipynb similarity index 100% rename from notebooks/Regression - Auto Imports.ipynb rename to notebooks/examples/regression/Regression - Auto Imports.ipynb diff --git a/notebooks/Regression - Flight Delays with DataCleaning.ipynb b/notebooks/examples/regression/Regression - Flight Delays with DataCleaning.ipynb similarity index 100% rename from notebooks/Regression - Flight Delays with DataCleaning.ipynb rename to notebooks/examples/regression/Regression - Flight Delays with DataCleaning.ipynb diff --git a/notebooks/Regression - Flight Delays.ipynb b/notebooks/examples/regression/Regression - Flight Delays.ipynb similarity index 100% rename from notebooks/Regression - Flight Delays.ipynb rename to notebooks/examples/regression/Regression - Flight Delays.ipynb diff --git a/notebooks/Regression - Vowpal Wabbit vs. LightGBM vs. Linear Regressor.ipynb b/notebooks/examples/regression/Regression - Vowpal Wabbit vs. LightGBM vs. Linear Regressor.ipynb similarity index 100% rename from notebooks/Regression - Vowpal Wabbit vs. LightGBM vs. Linear Regressor.ipynb rename to notebooks/examples/regression/Regression - Vowpal Wabbit vs. LightGBM vs. Linear Regressor.ipynb diff --git a/notebooks/DataBalanceAnalysis - Adult Census Income.ipynb b/notebooks/examples/responsible_ai/DataBalanceAnalysis - Adult Census Income.ipynb similarity index 99% rename from notebooks/DataBalanceAnalysis - Adult Census Income.ipynb rename to notebooks/examples/responsible_ai/DataBalanceAnalysis - Adult Census Income.ipynb index 6138008538..a7eb218fbd 100644 --- a/notebooks/DataBalanceAnalysis - Adult Census Income.ipynb +++ b/notebooks/examples/responsible_ai/DataBalanceAnalysis - Adult Census Income.ipynb @@ -537,7 +537,7 @@ "Measure | Description | Interpretation | Reference\n", "- | - | - | -\n", "Atkinson Index | It presents the percentage of total income that a given society would have to forego in order to have more equal shares of income between its citizens. This measure depends on the degree of society aversion to inequality (a theoretical parameter decided by the researcher), where a higher value entails greater social utility or willingness by individuals to accept smaller incomes in exchange for a more equal distribution. An important feature of the Atkinson index is that it can be decomposed into within-group and between-group inequality. | Range [0, 1]. 0 if perfect equality. 1 means maximum inequality. In our case, it is the proportion of records for a sensitive columns’ combination. | [Link](https://en.wikipedia.org/wiki/Atkinson_index)\n", - "Theil T Index | GE(1) = Theil's T and is more sensitive to differences at the top of the distribution. The Theil index is a statistic used to measure economic inequality. The Theil index measures an entropic \"distance\" the population is away from the \"ideal\" egalitarian state of everyone having the same income. | If everyone has the same income, then T_T equals 0. If one person has all the income, then T_T gives the result (ln N). 0 means equal income and larger values mean higher level of disproportion. | [Link](https://en.wikipedia.org/wiki/Theil_index)\n", + "Theil T Index | GE(1) = Theil's T and is more sensitive to differences at the top of the distribution. The Theil index is a statistic used to measure economic inequality. The Theil index measures an entropic \"distance\" the population is away from the \"ideal\" egalitarian state of everyone having the same income. | If everyone has the same income, then T_T equals 0. If one person has all the income, then T_T gives the result (ln N). 0 means equal income and larger values mean higher level of disproportion. | [Link](https://en.wikipedia.org/wiki/Theil_index)\n", "Theil L Index | GE(0) = Theil's L and is more sensitive to differences at the lower end of the distribution. Logarithm of (mean income)/(income i), over all the incomes included in the summation. It is also referred to as the mean log deviation measure. Because a transfer from a larger income to a smaller one will change the smaller income's ratio more than it changes the larger income's ratio, the transfer-principle is satisfied by this index. | Same interpretation as Theil T Index. | [Link](https://en.wikipedia.org/wiki/Theil_index)" ] }, @@ -638,4 +638,4 @@ }, "nbformat": 4, "nbformat_minor": 0 -} +} \ No newline at end of file diff --git a/notebooks/Interpretability - Explanation Dashboard.ipynb b/notebooks/examples/responsible_ai/Interpretability - Explanation Dashboard.ipynb similarity index 100% rename from notebooks/Interpretability - Explanation Dashboard.ipynb rename to notebooks/examples/responsible_ai/Interpretability - Explanation Dashboard.ipynb diff --git a/notebooks/Interpretability - Tabular SHAP explainer.ipynb b/notebooks/examples/responsible_ai/Interpretability - Tabular SHAP explainer.ipynb similarity index 100% rename from notebooks/Interpretability - Tabular SHAP explainer.ipynb rename to notebooks/examples/responsible_ai/Interpretability - Tabular SHAP explainer.ipynb diff --git a/notebooks/Interpretability - Text Explainers.ipynb b/notebooks/examples/responsible_ai/Interpretability - Text Explainers.ipynb similarity index 100% rename from notebooks/Interpretability - Text Explainers.ipynb rename to notebooks/examples/responsible_ai/Interpretability - Text Explainers.ipynb diff --git a/notebooks/ModelInterpretability - Snow Leopard Detection.ipynb b/notebooks/examples/responsible_ai/ModelInterpretability - Snow Leopard Detection.ipynb similarity index 100% rename from notebooks/ModelInterpretability - Snow Leopard Detection.ipynb rename to notebooks/examples/responsible_ai/ModelInterpretability - Snow Leopard Detection.ipynb diff --git a/notebooks/TextAnalytics - Amazon Book Reviews with Word2Vec.ipynb b/notebooks/examples/text_analytics/TextAnalytics - Amazon Book Reviews with Word2Vec.ipynb similarity index 100% rename from notebooks/TextAnalytics - Amazon Book Reviews with Word2Vec.ipynb rename to notebooks/examples/text_analytics/TextAnalytics - Amazon Book Reviews with Word2Vec.ipynb diff --git a/notebooks/TextAnalytics - Amazon Book Reviews.ipynb b/notebooks/examples/text_analytics/TextAnalytics - Amazon Book Reviews.ipynb similarity index 100% rename from notebooks/TextAnalytics - Amazon Book Reviews.ipynb rename to notebooks/examples/text_analytics/TextAnalytics - Amazon Book Reviews.ipynb diff --git a/notebooks/CognitiveServices - Overview.ipynb b/notebooks/features/CognitiveServices - Overview.ipynb similarity index 100% rename from notebooks/CognitiveServices - Overview.ipynb rename to notebooks/features/CognitiveServices - Overview.ipynb diff --git a/notebooks/HttpOnSpark - Working with Arbitrary Web APIs.ipynb b/notebooks/features/http/HttpOnSpark - Working with Arbitrary Web APIs.ipynb similarity index 100% rename from notebooks/HttpOnSpark - Working with Arbitrary Web APIs.ipynb rename to notebooks/features/http/HttpOnSpark - Working with Arbitrary Web APIs.ipynb diff --git a/notebooks/LightGBM - Overview.ipynb b/notebooks/features/lightgbm/LightGBM - Overview.ipynb similarity index 100% rename from notebooks/LightGBM - Overview.ipynb rename to notebooks/features/lightgbm/LightGBM - Overview.ipynb diff --git a/notebooks/ONNX - Inference on Spark.ipynb b/notebooks/features/onnx/ONNX - Inference on Spark.ipynb similarity index 100% rename from notebooks/ONNX - Inference on Spark.ipynb rename to notebooks/features/onnx/ONNX - Inference on Spark.ipynb diff --git a/notebooks/Interpretability - Image Explainers.ipynb b/notebooks/features/responsible_ai/Interpretability - Image Explainers.ipynb similarity index 100% rename from notebooks/Interpretability - Image Explainers.ipynb rename to notebooks/features/responsible_ai/Interpretability - Image Explainers.ipynb diff --git a/notebooks/SparkServing - Deploying a Classifier.ipynb b/notebooks/features/spark_serving/SparkServing - Deploying a Classifier.ipynb similarity index 100% rename from notebooks/SparkServing - Deploying a Classifier.ipynb rename to notebooks/features/spark_serving/SparkServing - Deploying a Classifier.ipynb diff --git a/notebooks/Vowpal Wabbit - Overview.ipynb b/notebooks/features/vw/Vowpal Wabbit - Overview.ipynb similarity index 100% rename from notebooks/Vowpal Wabbit - Overview.ipynb rename to notebooks/features/vw/Vowpal Wabbit - Overview.ipynb diff --git a/pipeline.yaml b/pipeline.yaml index dd26efc639..191e0925c3 100644 --- a/pipeline.yaml +++ b/pipeline.yaml @@ -447,6 +447,18 @@ jobs: inputs: azureSubscription: 'MMLSpark Build' keyVaultName: mmlspark-keys + - bash: echo "##vso[task.prependpath]$CONDA/bin" + displayName: Add conda to PATH + - bash: conda env create -f environment.yaml + displayName: Create Anaconda environment + - task: AzureCLI@1 + displayName: 'Convert notebooks to markdowns' + inputs: + azureSubscription: 'MMLSpark Build' + scriptLocation: inlineScript + inlineScript: | + source activate synapseml + sbt convertNotebooks - bash: | yarn install git config --global user.name "${GH_NAME}" diff --git a/website/.gitignore b/website/.gitignore index b2d6de3062..9d46e3a639 100644 --- a/website/.gitignore +++ b/website/.gitignore @@ -8,6 +8,12 @@ .docusaurus .cache-loader +# Converted markdowns +/docs/examples/* +!/docs/examples/about.md +/docs/features/**/* +!/docs/features/**/about.md + # Misc .DS_Store .env.local diff --git a/website/docs/examples/AzureSearchIndex - Met Artworks.md b/website/docs/examples/AzureSearchIndex - Met Artworks.md deleted file mode 100644 index e045e82d48..0000000000 --- a/website/docs/examples/AzureSearchIndex - Met Artworks.md +++ /dev/null @@ -1,110 +0,0 @@ ---- -title: AzureSearchIndex - Met Artworks -hide_title: true -status: stable ---- -

Creating a searchable Art Database with The MET's open-access collection

- -In this example, we show how you can enrich data using Cognitive Skills and write to an Azure Search Index using SynapseML. We use a subset of The MET's open-access collection and enrich it by passing it through 'Describe Image' and a custom 'Image Similarity' skill. The results are then written to a searchable index. - - -```python -import os, sys, time, json, requests -from pyspark.ml import Transformer, Estimator, Pipeline -from pyspark.ml.feature import SQLTransformer -from pyspark.sql.functions import lit, udf, col, split -``` - - -```python -if os.environ.get("AZURE_SERVICE", None) == "Microsoft.ProjectArcadia": - from pyspark.sql import SparkSession - spark = SparkSession.builder.getOrCreate() - from notebookutils.mssparkutils.credentials import getSecret - os.environ['VISION_API_KEY'] = getSecret("mmlspark-keys", "mmlspark-cs-key") - os.environ['AZURE_SEARCH_KEY'] = getSecret("mmlspark-keys", "mmlspark-azure-search-key") -``` - - -```python -VISION_API_KEY = os.environ['VISION_API_KEY'] -AZURE_SEARCH_KEY = os.environ['AZURE_SEARCH_KEY'] -search_service = "mmlspark-azure-search" -search_index = "test" -``` - - -```python -data = spark.read\ - .format("csv")\ - .option("header", True)\ - .load("wasbs://publicwasb@mmlspark.blob.core.windows.net/metartworks_sample.csv")\ - .withColumn("searchAction", lit("upload"))\ - .withColumn("Neighbors", split(col("Neighbors"), ",").cast("array"))\ - .withColumn("Tags", split(col("Tags"), ",").cast("array"))\ - .limit(25) -``` - - - - -```python -from synapse.ml.cognitive import AnalyzeImage - -from synapse.ml.stages import SelectColumns - - - -#define pipeline - -describeImage = (AnalyzeImage() - - .setSubscriptionKey(VISION_API_KEY) - - .setLocation("eastus") - - .setImageUrlCol("PrimaryImageUrl") - - .setOutputCol("RawImageDescription") - - .setErrorCol("Errors") - - .setVisualFeatures(["Categories", "Description", "Faces", "ImageType", "Color", "Adult"]) - - .setConcurrency(5)) - - - -df2 = describeImage.transform(data)\ - - .select("*", "RawImageDescription.*").drop("Errors", "RawImageDescription") -``` - - - -Before writing the results to a Search Index, you must define a schema which must specify the name, type, and attributes of each field in your index. Refer [Create a basic index in Azure Search](https://docs.microsoft.com/en-us/azure/search/search-what-is-an-index) for more information. - - -```python -from synapse.ml.cognitive import * - -df2.writeToAzureSearch( - - subscriptionKey=AZURE_SEARCH_KEY, - - actionCol="searchAction", - - serviceName=search_service, - - indexName=search_index, - - keyCol="ObjectID") -``` - -The Search Index can be queried using the [Azure Search REST API](https://docs.microsoft.com/rest/api/searchservice/) by sending GET or POST requests and specifying query parameters that give the criteria for selecting matching documents. For more information on querying refer [Query your Azure Search index using the REST API](https://docs.microsoft.com/en-us/rest/api/searchservice/Search-Documents) - - -```python -url = 'https://{}.search.windows.net/indexes/{}/docs/search?api-version=2019-05-06'.format(search_service, search_index) -requests.post(url, json={"search": "Glass"}, headers = {"api-key": AZURE_SEARCH_KEY}).json() -``` diff --git a/website/docs/examples/ConditionalKNN - Exploring Art Across Cultures.md b/website/docs/examples/ConditionalKNN - Exploring Art Across Cultures.md deleted file mode 100644 index 16751ba790..0000000000 --- a/website/docs/examples/ConditionalKNN - Exploring Art Across Cultures.md +++ /dev/null @@ -1,222 +0,0 @@ ---- -title: ConditionalKNN - Exploring Art Across Cultures -hide_title: true -status: stable ---- -# Exploring Art across Culture and Medium with Fast, Conditional, k-Nearest Neighbors - - - -This notebook serves as a guideline for match-finding via k-nearest-neighbors. In the code below, we will set up code that allows queries involving cultures and mediums of art amassed from the Metropolitan Museum of Art in NYC and the Rijksmuseum in Amsterdam. - -### Overview of the BallTree -The structure functioning behind the kNN model is a BallTree, which is a recursive binary tree where each node (or "ball") contains a partition of the points of data to be queried. Building a BallTree involves assigning data points to the "ball" whose center they are closest to (with respect to a certain specified feature), resulting in a structure that allows binary-tree-like traversal and lends itself to finding k-nearest neighbors at a BallTree leaf. - -#### Setup -Import necessary Python libraries and prepare dataset. - - -```python -from pyspark.sql.types import BooleanType -from pyspark.sql.types import * -from pyspark.ml.feature import Normalizer -from pyspark.sql.functions import lit, array, array_contains, udf, col, struct -from synapse.ml.nn import ConditionalKNN, ConditionalKNNModel -from PIL import Image -from io import BytesIO - -import requests -import numpy as np -import matplotlib.pyplot as plt - -import os - -if os.environ.get("AZURE_SERVICE", None) == "Microsoft.ProjectArcadia": - from pyspark.sql import SparkSession - spark = SparkSession.builder.getOrCreate() - -``` - -Our dataset comes from a table containing artwork information from both the Met and Rijks museums. The schema is as follows: - -- **id**: A unique identifier for a piece of art - - Sample Met id: *388395* - - Sample Rijks id: *SK-A-2344* -- **Title**: Art piece title, as written in the museum's database -- **Artist**: Art piece artist, as written in the museum's database -- **Thumbnail_Url**: Location of a JPEG thumbnail of the art piece -- **Image_Url** Location of an image of the art piece hosted on the Met/Rijks website -- **Culture**: Category of culture that the art piece falls under - - Sample culture categories: *latin american*, *egyptian*, etc -- **Classification**: Category of medium that the art piece falls under - - Sample medium categories: *woodwork*, *paintings*, etc -- **Museum_Page**: Link to the work of art on the Met/Rijks website -- **Norm_Features**: Embedding of the art piece image -- **Museum**: Specifies which museum the piece originated from - - -```python -# loads the dataset and the two trained CKNN models for querying by medium and culture -df = spark.read.parquet( - "wasbs://publicwasb@mmlspark.blob.core.windows.net/met_and_rijks.parquet") -display(df.drop("Norm_Features")) - -``` - -#### Define categories to be queried on -We will be using two kNN models: one for culture, and one for medium. The categories for each grouping are defined below. - - -```python -# mediums = ['prints', 'drawings', 'ceramics', 'textiles', 'paintings', "musical instruments","glass", 'accessories', 'photographs', "metalwork", -# "sculptures", "weapons", "stone", "precious", "paper", "woodwork", "leatherwork", "uncategorized"] - -mediums = ['paintings', 'glass', 'ceramics'] - -# cultures = ['african (general)', 'american', 'ancient american', 'ancient asian', 'ancient european', 'ancient middle-eastern', 'asian (general)', -# 'austrian', 'belgian', 'british', 'chinese', 'czech', 'dutch', 'egyptian']#, 'european (general)', 'french', 'german', 'greek', -# 'iranian', 'italian', 'japanese', 'latin american', 'middle eastern', 'roman', 'russian', 'south asian', 'southeast asian', -# 'spanish', 'swiss', 'various'] - -cultures = ['japanese', 'american', 'african (general)'] - -# Uncomment the above for more robust and large scale searches! - -classes = cultures + mediums - -medium_set = set(mediums) -culture_set = set(cultures) -selected_ids = {"AK-RBK-17525-2", "AK-MAK-1204", "AK-RAK-2015-2-9"} - -small_df = df.where(udf(lambda medium, culture, id_val: (medium in medium_set) or ( - culture in culture_set) or (id_val in selected_ids), BooleanType())("Classification", "Culture", "id")) - -small_df.count() - -``` - -### Define and fit ConditionalKNN models -Below, we create ConditionalKNN models for both the medium and culture columns; each model takes in an output column, features column (feature vector), values column (cell values under the output column), and label column (the quality that the respective KNN is conditioned on). - - -```python -medium_cknn = (ConditionalKNN() - .setOutputCol("Matches") - .setFeaturesCol("Norm_Features") - .setValuesCol("Thumbnail_Url") - .setLabelCol("Classification") - .fit(small_df)) -``` - - -```python -culture_cknn = (ConditionalKNN() - .setOutputCol("Matches") - .setFeaturesCol("Norm_Features") - .setValuesCol("Thumbnail_Url") - .setLabelCol("Culture") - .fit(small_df)) - -``` - -#### Define matching and visualizing methods - -After the intial dataset and category setup, we prepare methods that will query and visualize the conditional kNN's results. - -`addMatches()` will create a Dataframe with a handful of matches per category. - - -```python -def add_matches(classes, cknn, df): - results = df - for label in classes: - results = (cknn.transform(results.withColumn("conditioner", array(lit(label)))) - .withColumnRenamed("Matches", "Matches_{}".format(label))) - return results -``` - -`plot_urls()` calls `plot_img` to visualize top matches for each category into a grid. - - -```python -def plot_img(axis, url, title): - try: - response = requests.get(url) - img = Image.open(BytesIO(response.content)).convert('RGB') - axis.imshow(img, aspect="equal") - except: - pass - if title is not None: - axis.set_title(title, fontsize=4) - axis.axis("off") - - -def plot_urls(url_arr, titles, filename): - nx, ny = url_arr.shape - - plt.figure(figsize=(nx*5, ny*5), dpi=1600) - fig, axes = plt.subplots(ny, nx) - - # reshape required in the case of 1 image query - if len(axes.shape) == 1: - axes = axes.reshape(1, -1) - - for i in range(nx): - for j in range(ny): - if j == 0: - plot_img(axes[j, i], url_arr[i, j], titles[i]) - else: - plot_img(axes[j, i], url_arr[i, j], None) - - plt.savefig(filename, dpi=1600) # saves the results as a PNG - - display(plt.show()) -``` - -### Putting it all together -Below, we define `test_all()` to take in the data, CKNN models, the art id values to query on, and the file path to save the output visualization to. The medium and culture models were previously trained and loaded. - - -```python -# main method to test a particular dataset with two CKNN models and a set of art IDs, saving the result to filename.png - -def test_all(data, cknn_medium, cknn_culture, test_ids, root): - is_nice_obj = udf(lambda obj: obj in test_ids, BooleanType()) - test_df = data.where(is_nice_obj("id")) - - results_df_medium = add_matches(mediums, cknn_medium, test_df) - results_df_culture = add_matches(cultures, cknn_culture, results_df_medium) - - results = results_df_culture.collect() - - original_urls = [row["Thumbnail_Url"] for row in results] - - culture_urls = [[row["Matches_{}".format( - label)][0]["value"] for row in results] for label in cultures] - culture_url_arr = np.array([original_urls] + culture_urls)[:, :] - plot_urls(culture_url_arr, ["Original"] + - cultures, root + "matches_by_culture.png") - - medium_urls = [[row["Matches_{}".format( - label)][0]["value"] for row in results] for label in mediums] - medium_url_arr = np.array([original_urls] + medium_urls)[:, :] - plot_urls(medium_url_arr, ["Original"] + - mediums, root + "matches_by_medium.png") - - return results_df_culture - -``` - -### Demo -The following cell performs batched queries given desired image IDs and a filename to save the visualization. - - - - - -```python -# sample query -result_df = test_all(small_df, medium_cknn, culture_cknn, - selected_ids, root=".") - -``` diff --git a/website/docs/examples/CyberML - Anomalous Access Detection.md b/website/docs/examples/CyberML - Anomalous Access Detection.md deleted file mode 100644 index 196df99174..0000000000 --- a/website/docs/examples/CyberML - Anomalous Access Detection.md +++ /dev/null @@ -1,329 +0,0 @@ ---- -title: CyberML - Anomalous Access Detection -hide_title: true -status: stable ---- -# CyberML - Anomalous Access Detection - -Here we demonstrate a novel CyberML model which can learn user access patterns and then automatically detect anomalous user access based on learned behavior. -The model internally uses Collaborative Filtering for Implicit Feedback as published here: http://yifanhu.net/PUB/cf.pdf -and is based on Apache Spark's implementation of this: https://spark.apache.org/docs/2.2.0/ml-collaborative-filtering.html. - -This notebook demonstrates a usage example of Anomalous Resource Access model. -All the model requires is a dataset in which there are 'users' which access 'resources'. -The model is based on Collaborative Filtering and it uses Machine Learning to learn access patterns of users and resources. -When a user accesses a resource which is outside of the user's learned profile then this access recieves a high anomaly score. - -In this notebook we provide a usage example and a synthetic dataset in which there are 3 departments: -(1) Finance, (2) HR and (3) Engineering. -In the training data users access only a subset of resources from their own departments. -To evaluate the model we use two datasets. -The first contains access patterns unseen during training in which users access resources within their departments (again, resources they didn't access during training but within their department). -The latter contains users accessing resources from outside their department. -We then use the model to assign anomaly scores expecting that the first get low anomaly scores and the latter recieve high anomaly scores. -This is what this example demonstrates. - -Note: the data does NOT contain information about departments, this information is implictly learned by the model by analyzing the access patterns. - -# Create an Azure Databricks cluster and install the following libs - -1. In Cluster Libraries install from library source Maven: -Coordinates: com.microsoft.azure:synapseml:0.9.2 -Repository: https://mmlspark.azureedge.net/maven - -2. In Cluster Libraries install from PyPI the library called plotly - -# Setup & Initialization - - -``` -# this is used to produce the synthetic dataset for this test -from synapse.ml.cyber.dataset import DataFactory - -# the access anomalies model generator -from synapse.ml.cyber.anomaly.collaborative_filtering import AccessAnomaly - -from pyspark.sql import functions as f, types as t -``` - - -``` -spark.sparkContext.setCheckpointDir('dbfs:/checkpoint_path/') -``` - -# Loadup datasets - - -``` -factory = DataFactory( - num_hr_users = 25, - num_hr_resources = 50, - num_fin_users = 35, - num_fin_resources = 75, - num_eng_users = 15, - num_eng_resources = 25, - single_component = True -) - -training_pdf = factory.create_clustered_training_data(ratio=0.4) - -# a tenant id is used when independant datasets originate from different tenants, in this example we set all tenants-ids to the same value -training_df = spark.createDataFrame(training_pdf).withColumn('tenant_id', f.lit(0)) -ingroup_df = spark.createDataFrame(factory.create_clustered_intra_test_data(training_pdf)).withColumn('tenant_id', f.lit(0)) -outgroup_df = spark.createDataFrame(factory.create_clustered_inter_test_data()).withColumn('tenant_id', f.lit(0)) -``` - - -``` -training_df.show() -``` - - -``` -print(training_df.count()) -print(ingroup_df.count()) -print(outgroup_df.count()) -``` - -# Model setup & training - - -``` -access_anomaly = AccessAnomaly( - tenantCol='tenant_id', - userCol='user', - resCol='res', - likelihoodCol='likelihood', - maxIter=1000 -) -``` - - -``` -model = access_anomaly.fit(training_df) -``` - -# Apply model & show result stats - - -``` -ingroup_scored_df = model.transform(ingroup_df) -``` - - -``` -ingroup_scored_df.agg( - f.min('anomaly_score').alias('min_anomaly_score'), - f.max('anomaly_score').alias('max_anomaly_score'), - f.mean('anomaly_score').alias('mean_anomaly_score'), - f.stddev('anomaly_score').alias('stddev_anomaly_score'), -).show() -``` - - -``` -outgroup_scored_df = model.transform(outgroup_df) -``` - - -``` -outgroup_scored_df.agg( - f.min('anomaly_score').alias('min_anomaly_score'), - f.max('anomaly_score').alias('max_anomaly_score'), - f.mean('anomaly_score').alias('mean_anomaly_score'), - f.stddev('anomaly_score').alias('stddev_anomaly_score'), -).show() -``` - -# Examine results - - -``` -# -# Select a subset of results to send to Log Analytics -# - -full_res_df = outgroup_scored_df.orderBy(f.desc('anomaly_score')).cache() - -from pyspark.sql.window import Window - -w = Window.partitionBy( - 'tenant_id', - 'user', - 'res' - ).orderBy( - f.desc('anomaly_score') - ) - -# select values above threshold -results_above_threshold = full_res_df.filter(full_res_df.anomaly_score > 1.0) - -# get distinct resource/user and corresponding timestamp and highest score -results_to_la = results_above_threshold.withColumn( - 'index', f.row_number().over(w) - ).orderBy( - f.desc('anomaly_score') - ).select( - 'tenant_id', - f.col('user'), - f.col('res'), - 'anomaly_score' - ).where( - 'index == 1' - ).limit(100).cache() - -# add a fake timestamp to the results -results_to_la = results_to_la.withColumn('timestamp', f.current_timestamp()) - -display(results_to_la) -``` - -# Display all resource accesses by users with highest anomalous score - - -``` -from plotly import __version__ -from plotly.offline import download_plotlyjs, init_notebook_mode, plot, iplot, offline - -import numpy as np -import pandas as pd - -print (__version__) # requires version >= 1.9.0 - -# run plotly in offline mode -offline.init_notebook_mode() -``` - - -``` -#Find all server accesses of users with high predicted scores -# For display, limit to top 25 results -results_to_display = results_to_la.orderBy( - f.desc('anomaly_score') - ).limit(25).cache() -interesting_records = full_res_df.join(results_to_display, ['user'], 'left_semi') -non_anomalous_records = interesting_records.join(results_to_display, ['user', 'res'], 'left_anti') - -top_non_anomalous_records = non_anomalous_records.groupBy( - 'tenant_id', - 'user', - 'res' - ).agg( - f.count('*').alias('count'), - ).select( - f.col('tenant_id'), - f.col('user'), - f.col('res'), - 'count' - ) - -#pick only a subset of non-anomalous record for UI -w = Window.partitionBy( - 'tenant_id', - 'user', - ).orderBy( - f.desc('count') - ) - -# pick top non-anomalous set -top_non_anomalous_accesses = top_non_anomalous_records.withColumn( - 'index', f.row_number().over(w) - ).orderBy( - f.desc('count') - ).select( - 'tenant_id', - f.col('user'), - f.col('res'), - f.col('count') - ).where( - 'index in (1,2,3,4,5)' - ).limit(25) - -# add back anomalous record -fileShare_accesses = (top_non_anomalous_accesses - .select('user', 'res', 'count') - .union(results_to_display.select('user', 'res', f.lit(1).alias('count'))).cache()) -``` - - -``` -# get unique users and file shares -high_scores_df = fileShare_accesses.toPandas() -unique_arr = np.append(high_scores_df.user.unique(), high_scores_df.res.unique()) - -unique_df = pd.DataFrame(data = unique_arr, columns = ['name']) -unique_df['index'] = range(0, len(unique_df.index)) - -# create index for source & target and color for the normal accesses -normal_line_color = 'rgba(211, 211, 211, 0.8)' -anomolous_color = 'red' -x = pd.merge(high_scores_df, unique_df, how='left', left_on='user', right_on='name').drop(['name'], axis=1).rename(columns={'index' : 'userIndex'}) -all_access_index_df = pd.merge(x, unique_df, how='left', left_on='res', right_on='name').drop(['name'], axis=1).rename(columns={'index' : 'resIndex'}) -all_access_index_df['color'] = normal_line_color - -# results_to_display index, color and -y = results_to_display.toPandas().drop(['tenant_id', 'timestamp', 'anomaly_score'], axis=1) -y = pd.merge(y, unique_df, how='left', left_on='user', right_on='name').drop(['name'], axis=1).rename(columns={'index' : 'userIndex'}) -high_scores_index_df = pd.merge(y, unique_df, how='left', left_on='res', right_on='name').drop(['name'], axis=1).rename(columns={'index' : 'resIndex'}) -high_scores_index_df['count'] = 1 -high_scores_index_df['color'] = anomolous_color - -# substract 1 for the red entries in all_access df -hsi_df = high_scores_index_df[['user','res', 'count']].rename(columns={'count' : 'hsiCount'}) -all_access_updated_count_df = pd.merge(all_access_index_df, hsi_df, how='left', left_on=['user', 'res'], right_on=['user', 'res']) -all_access_updated_count_df['count'] = np.where(all_access_updated_count_df['hsiCount']==1, all_access_updated_count_df['count'] - 1, all_access_updated_count_df['count']) -all_access_updated_count_df = all_access_updated_count_df.loc[all_access_updated_count_df['count'] > 0] -all_access_updated_count_df = all_access_updated_count_df[['user','res', 'count', 'userIndex', 'resIndex', 'color']] - -# combine the two tables -frames = [all_access_updated_count_df, high_scores_index_df] -display_df = pd.concat(frames, sort=True) -# display_df.head() -``` - - -``` -data_trace = dict( - type='sankey', - domain = dict( - x = [0,1], - y = [0,1] - ), - orientation = "h", - valueformat = ".0f", - node = dict( - pad = 10, - thickness = 30, - line = dict( - color = "black", - width = 0 - ), - label = unique_df['name'].dropna(axis=0, how='any') - ), - link = dict( - source = display_df['userIndex'].dropna(axis=0, how='any'), - target = display_df['resIndex'].dropna(axis=0, how='any'), - value = display_df['count'].dropna(axis=0, how='any'), - color = display_df['color'].dropna(axis=0, how='any'), - ) -) - -layout = dict( - title = "All resources accessed by users with highest anomalous scores", - height = 772, - font = dict( - size = 10 - ), -) - -fig = dict(data=[data_trace], layout=layout) - -p = plot(fig, output_type='div') - -displayHTML(p) -``` - - -``` - -``` diff --git a/website/docs/examples/HyperParameterTuning - Fighting Breast Cancer.md b/website/docs/examples/HyperParameterTuning - Fighting Breast Cancer.md deleted file mode 100644 index 5ffbf45d15..0000000000 --- a/website/docs/examples/HyperParameterTuning - Fighting Breast Cancer.md +++ /dev/null @@ -1,88 +0,0 @@ ---- -title: HyperParameterTuning - Fighting Breast Cancer -hide_title: true -status: stable ---- -## HyperParameterTuning - Fighting Breast Cancer - -We can do distributed randomized grid search hyperparameter tuning with SynapseML. - -First, we import the packages - - -```python -import pandas as pd - -``` - -Now let's read the data and split it to tuning and test sets: - - -```python -data = spark.read.parquet("wasbs://publicwasb@mmlspark.blob.core.windows.net/BreastCancer.parquet") -tune, test = data.randomSplit([0.80, 0.20]) -tune.limit(10).toPandas() -``` - -Next, define the models that wil be tuned: - - -```python -from synapse.ml.automl import TuneHyperparameters -from synapse.ml.train import TrainClassifier -from pyspark.ml.classification import LogisticRegression, RandomForestClassifier, GBTClassifier -logReg = LogisticRegression() -randForest = RandomForestClassifier() -gbt = GBTClassifier() -smlmodels = [logReg, randForest, gbt] -mmlmodels = [TrainClassifier(model=model, labelCol="Label") for model in smlmodels] -``` - -We can specify the hyperparameters using the HyperparamBuilder. -We can add either DiscreteHyperParam or RangeHyperParam hyperparameters. -TuneHyperparameters will randomly choose values from a uniform distribution. - - -```python -from synapse.ml.automl import * - -paramBuilder = \ - HyperparamBuilder() \ - .addHyperparam(logReg, logReg.regParam, RangeHyperParam(0.1, 0.3)) \ - .addHyperparam(randForest, randForest.numTrees, DiscreteHyperParam([5,10])) \ - .addHyperparam(randForest, randForest.maxDepth, DiscreteHyperParam([3,5])) \ - .addHyperparam(gbt, gbt.maxBins, RangeHyperParam(8,16)) \ - .addHyperparam(gbt, gbt.maxDepth, DiscreteHyperParam([3,5])) -searchSpace = paramBuilder.build() -# The search space is a list of params to tuples of estimator and hyperparam -print(searchSpace) -randomSpace = RandomSpace(searchSpace) -``` - -Next, run TuneHyperparameters to get the best model. - - -```python -bestModel = TuneHyperparameters( - evaluationMetric="accuracy", models=mmlmodels, numFolds=2, - numRuns=len(mmlmodels) * 2, parallelism=1, - paramSpace=randomSpace.space(), seed=0).fit(tune) -``` - -We can view the best model's parameters and retrieve the underlying best model pipeline - - -```python -print(bestModel.getBestModelInfo()) -print(bestModel.getBestModel()) -``` - -We can score against the test set and view metrics. - - -```python -from synapse.ml.train import ComputeModelStatistics -prediction = bestModel.transform(test) -metrics = ComputeModelStatistics().transform(prediction) -metrics.limit(10).toPandas() -``` diff --git a/website/docs/examples/OpenCV - Pipeline Image Transformations.md b/website/docs/examples/OpenCV - Pipeline Image Transformations.md deleted file mode 100644 index 7a4a681785..0000000000 --- a/website/docs/examples/OpenCV - Pipeline Image Transformations.md +++ /dev/null @@ -1,147 +0,0 @@ ---- -title: OpenCV - Pipeline Image Transformations -hide_title: true -status: stable ---- -## OpenCV - Pipeline Image Transformations - -This example shows how to manipulate the collection of images. -First, the images are downloaded to the local directory. -Second, they are copied to your cluster's attached HDFS. - -The images are loaded from the directory (for fast prototyping, consider loading a fraction of -images). Inside the dataframe, each image is a single field in the image column. The image has -sub-fields (path, height, width, OpenCV type and OpenCV bytes). - - -```python -import os -if os.environ.get("AZURE_SERVICE", None) == "Microsoft.ProjectArcadia": - from pyspark.sql import SparkSession - spark = SparkSession.builder.getOrCreate() - -import synapse.ml -import numpy as np -from synapse.ml.opencv import toNDArray -from synapse.ml.io import * - -imageDir = "wasbs://publicwasb@mmlspark.blob.core.windows.net/sampleImages" -images = spark.read.image().load(imageDir).cache() -images.printSchema() -print(images.count()) -``` - -We can also alternatively stream the images with a similiar api. -Check the [Structured Streaming Programming Guide](https://spark.apache.org/docs/latest/structured-streaming-programming-guide.html) -for more details on streaming. - - -```python -imageStream = spark.readStream.image().load(imageDir) -query = imageStream.select("image.height").writeStream.format("memory").queryName("heights").start() -print("Streaming query activity: {}".format(query.isActive)) -``` - -Wait a few seconds and then try querying for the images below. -Note that when streaming a directory of images that already exists it will -consume all images in a single batch. If one were to move images into the -directory, the streaming engine would pick up on them and send them as -another batch. - - -```python -heights = spark.sql("select * from heights") -print("Streamed {} heights".format(heights.count())) -``` - -After we have streamed the images we can stop the query: - - -```python -from py4j.protocol import Py4JJavaError -try: - query.stop() -except Py4JJavaError as e: - print(e) -``` - -When collected from the *DataFrame*, the image data are stored in a *Row*, which is Spark's way -to represent structures (in the current example, each dataframe row has a single Image, which -itself is a Row). It is possible to address image fields by name and use `toNDArray()` helper -function to convert the image into numpy array for further manipulations. - - -```python -from PIL import Image - -data = images.take(3) # take first three rows of the dataframe -im = data[2][0] # the image is in the first column of a given row - -print("image type: {}, number of fields: {}".format(type(im), len(im))) -print("image path: {}".format(im.origin)) -print("height: {}, width: {}, OpenCV type: {}".format(im.height, im.width, im.mode)) - -arr = toNDArray(im) # convert to numpy array -Image.fromarray(arr, "RGB") # display the image inside notebook -print(images.count()) -``` - -Use `ImageTransformer` for the basic image manipulation: resizing, cropping, etc. -Internally, operations are pipelined and backed by OpenCV implementation. - - -```python -from synapse.ml.opencv import ImageTransformer - -tr = (ImageTransformer() # images are resized and then cropped - .setOutputCol("transformed") - .resize(size=(200, 200)) - .crop(0, 0, height = 180, width = 180) ) - -small = tr.transform(images).select("transformed") - -im = small.take(3)[2][0] # take third image -Image.fromarray(toNDArray(im), "RGB") # display the image inside notebook -``` - -For the advanced image manipulations, use Spark UDFs. -The SynapseML package provides conversion function between *Spark Row* and -*ndarray* image representations. - - -```python -from pyspark.sql.functions import udf -from synapse.ml.opencv import ImageSchema, toNDArray, toImage - -def u(row): - array = toNDArray(row) # convert Image to numpy ndarray[height, width, 3] - array[:,:,2] = 0 - return toImage(array) # numpy array back to Spark Row structure - -noBlueUDF = udf(u,ImageSchema) - -noblue = small.withColumn("noblue", noBlueUDF(small["transformed"])).select("noblue") - -im = noblue.take(3)[2][0] # take second image -Image.fromarray(toNDArray(im), "RGB") # display the image inside notebook -``` - -Images could be unrolled into the dense 1D vectors suitable for CNTK evaluation. - - -```python -from synapse.ml.image import UnrollImage - -unroller = UnrollImage().setInputCol("noblue").setOutputCol("unrolled") - -unrolled = unroller.transform(noblue).select("unrolled") - -vector = unrolled.take(1)[0][0] -print(type(vector)) -len(vector.toArray()) -``` - - -```python - -``` diff --git a/website/docs/examples/about.md b/website/docs/examples/about.md index 6993264ffb..5247c91adc 100644 --- a/website/docs/examples/about.md +++ b/website/docs/examples/about.md @@ -10,7 +10,7 @@ sidebar_label: About - Fit a LightGBM classification or regression model on a biochemical dataset ([LightGBM Overview]), to learn more check out the [LightGBM documentation page](../../features/lightgbm/about). -- Deploy a deep network as a distributed web service with [MMLSpark +- Deploy a deep network as a distributed web service with [SynapseML Serving](../../features/spark_serving/about) - Use web services in Spark with [HTTP on Apache Spark](../../features/http/about) - Use Bi-directional LSTMs from Keras for medical entity extraction @@ -50,7 +50,7 @@ sidebar_label: About [Interpretability - Tabular SHAP Explainer]: ../responsible_ai/Interpretability%20-%20Tabular%20SHAP%20explainer "Interpretability - Tabular SHAP Explainer" -[Interpretability - Image Explainers]: ../responsible_ai/Interpretability%20-%20Image%20Explainers "Interpretability - Image Explainers" +[Interpretability - Image Explainers]: ../../features/responsible_ai/Interpretability%20-%20Image%20Explainers "Interpretability - Image Explainers" [Interpretability - Text Explainers]: ../responsible_ai/Interpretability%20-%20Text%20Explainers "Interpretability - Text Explainers" diff --git a/website/docs/examples/classification/Classification - Adult Census with Vowpal Wabbit.md b/website/docs/examples/classification/Classification - Adult Census with Vowpal Wabbit.md deleted file mode 100644 index 911d614a0a..0000000000 --- a/website/docs/examples/classification/Classification - Adult Census with Vowpal Wabbit.md +++ /dev/null @@ -1,83 +0,0 @@ ---- -title: Classification - Adult Census with Vowpal Wabbit -hide_title: true -status: stable ---- -# Classification - Adult Census using Vowpal Wabbit in SynapseML - -In this example, we predict incomes from the *Adult Census* dataset using Vowpal Wabbit (VW) classifier in SynapseML. -First, we read the data and split it into train and test sets as in this [example](https://github.com/Microsoft/SynapseML/blob/master/notebooks/Classification%20-%20Adult%20Census.ipynb -). - - -```python -import os -if os.environ.get("AZURE_SERVICE", None) == "Microsoft.ProjectArcadia": - from pyspark.sql import SparkSession - spark = SparkSession.builder.getOrCreate() -``` - - -```python -data = spark.read.parquet("wasbs://publicwasb@mmlspark.blob.core.windows.net/AdultCensusIncome.parquet") -data = data.select(["education", "marital-status", "hours-per-week", "income"]) -train, test = data.randomSplit([0.75, 0.25], seed=123) -train.limit(10).toPandas() -``` - -Next, we define a pipeline that includes feature engineering and training of a VW classifier. We use a featurizer provided by VW that hashes the feature names. -Note that VW expects classification labels being -1 or 1. Thus, the income category is mapped to this space before feeding training data into the pipeline. - - -```python -from pyspark.sql.functions import when, col -from pyspark.ml import Pipeline -from synapse.ml.vw import VowpalWabbitFeaturizer, VowpalWabbitClassifier - -# Define classification label -train = train.withColumn("label", when(col("income").contains("<"), 0.0).otherwise(1.0)).repartition(1).cache() -print(train.count()) - -# Specify featurizer -vw_featurizer = VowpalWabbitFeaturizer(inputCols=["education", "marital-status", "hours-per-week"], - outputCol="features") - -# Define VW classification model -args = "--loss_function=logistic --quiet --holdout_off" -vw_model = VowpalWabbitClassifier(featuresCol="features", - labelCol="label", - args=args, - numPasses=10) - -# Create a pipeline -vw_pipeline = Pipeline(stages=[vw_featurizer, vw_model]) -``` - -Then, we are ready to train the model by fitting the pipeline with the training data. - - -```python -# Train the model -vw_trained = vw_pipeline.fit(train) -``` - -After the model is trained, we apply it to predict the income of each sample in the test set. - - -```python -# Making predictions -test = test.withColumn("label", when(col("income").contains("<"), 0.0).otherwise(1.0)) -prediction = vw_trained.transform(test) -prediction.limit(10).toPandas() -``` - -Finally, we evaluate the model performance using `ComputeModelStatistics` function which will compute confusion matrix, accuracy, precision, recall, and AUC by default for classificaiton models. - - -```python -from synapse.ml.train import ComputeModelStatistics -metrics = ComputeModelStatistics(evaluationMetric="classification", - labelCol="label", - scoredLabelsCol="prediction").transform(prediction) -metrics.toPandas() -``` diff --git a/website/docs/examples/classification/Classification - Adult Census.md b/website/docs/examples/classification/Classification - Adult Census.md deleted file mode 100644 index c65b2ad573..0000000000 --- a/website/docs/examples/classification/Classification - Adult Census.md +++ /dev/null @@ -1,60 +0,0 @@ ---- -title: Classification - Adult Census -hide_title: true -status: stable ---- -## Classification - Adult Census - -In this example, we try to predict incomes from the *Adult Census* dataset. - -First, we import the packages (use `help(synapse)` to view contents), - - -```python -import os -if os.environ.get("AZURE_SERVICE", None) == "Microsoft.ProjectArcadia": - from pyspark.sql import SparkSession - spark = SparkSession.builder.getOrCreate() -``` - - -```python -import numpy as np -import pandas as pd -``` - -Now let's read the data and split it to train and test sets: - - -```python -data = spark.read.parquet("wasbs://publicwasb@mmlspark.blob.core.windows.net/AdultCensusIncome.parquet") -data = data.select(["education", "marital-status", "hours-per-week", "income"]) -train, test = data.randomSplit([0.75, 0.25], seed=123) -train.limit(10).toPandas() -``` - -`TrainClassifier` can be used to initialize and fit a model, it wraps SparkML classifiers. -You can use `help(synapse.ml.train.TrainClassifier)` to view the different parameters. - -Note that it implicitly converts the data into the format expected by the algorithm: tokenize -and hash strings, one-hot encodes categorical variables, assembles the features into a vector -and so on. The parameter `numFeatures` controls the number of hashed features. - - -```python -from synapse.ml.train import TrainClassifier - -from pyspark.ml.classification import LogisticRegression - -model = TrainClassifier(model=LogisticRegression(), labelCol="income", numFeatures=256).fit(train) -``` - -Finally, we save the model so it can be used in a scoring program. - - -```python -if os.environ.get("AZURE_SERVICE", None) != "Microsoft.ProjectArcadia": - model.write().overwrite().save("dbfs:/AdultCensus.mml") -else: - model.write().overwrite().save("abfss://synapse@mmlsparkeuap.dfs.core.windows.net/models/AdultCensus.mml") -``` diff --git a/website/docs/examples/classification/Classification - Before and After SynapseML.md b/website/docs/examples/classification/Classification - Before and After SynapseML.md deleted file mode 100644 index db9ba23bb2..0000000000 --- a/website/docs/examples/classification/Classification - Before and After SynapseML.md +++ /dev/null @@ -1,205 +0,0 @@ ---- -title: Classification - Before and After SynapseML -hide_title: true -status: stable ---- -## Classification - Before and After SynapseML - -### 1. Introduction - -


- -In this tutorial, we perform the same classification task in two -different ways: once using plain **`pyspark`** and once using the -**`synapseml`** library. The two methods yield the same performance, -but one of the two libraries is drastically simpler to use and iterate -on (can you guess which one?). - -The task is simple: Predict whether a user's review of a book sold on -Amazon is good (rating > 3) or bad based on the text of the review. We -accomplish this by training LogisticRegression learners with different -hyperparameters and choosing the best model. - - -```python -import os -if os.environ.get("AZURE_SERVICE", None) == "Microsoft.ProjectArcadia": - from pyspark.sql import SparkSession - spark = SparkSession.builder.getOrCreate() -``` - -### 2. Read the data - -We download and read in the data. We show a sample below: - - -```python -rawData = spark.read.parquet("wasbs://publicwasb@mmlspark.blob.core.windows.net/BookReviewsFromAmazon10K.parquet") -rawData.show(5) -``` - -### 3. Extract more features and process data - -Real data however is more complex than the above dataset. It is common -for a dataset to have features of multiple types: text, numeric, -categorical. To illustrate how difficult it is to work with these -datasets, we add two numerical features to the dataset: the **word -count** of the review and the **mean word length**. - - -```python -from pyspark.sql.functions import udf -from pyspark.sql.types import * -def wordCount(s): - return len(s.split()) -def wordLength(s): - import numpy as np - ss = [len(w) for w in s.split()] - return round(float(np.mean(ss)), 2) -wordLengthUDF = udf(wordLength, DoubleType()) -wordCountUDF = udf(wordCount, IntegerType()) -``` - - -```python -from synapse.ml.stages import UDFTransformer -wordLength = "wordLength" -wordCount = "wordCount" -wordLengthTransformer = UDFTransformer(inputCol="text", outputCol=wordLength, udf=wordLengthUDF) -wordCountTransformer = UDFTransformer(inputCol="text", outputCol=wordCount, udf=wordCountUDF) - -``` - - -```python -from pyspark.ml import Pipeline -data = Pipeline(stages=[wordLengthTransformer, wordCountTransformer]) \ - .fit(rawData).transform(rawData) \ - .withColumn("label", rawData["rating"] > 3).drop("rating") -``` - - -```python -data.show(5) -``` - -### 4a. Classify using pyspark - -To choose the best LogisticRegression classifier using the `pyspark` -library, need to *explictly* perform the following steps: - -1. Process the features: - * Tokenize the text column - * Hash the tokenized column into a vector using hashing - * Merge the numeric features with the vector in the step above -2. Process the label column: cast it into the proper type. -3. Train multiple LogisticRegression algorithms on the `train` dataset - with different hyperparameters -4. Compute the area under the ROC curve for each of the trained models - and select the model with the highest metric as computed on the - `test` dataset -5. Evaluate the best model on the `validation` set - -As you can see below, there is a lot of work involved and a lot of -steps where something can go wrong! - - -```python -from pyspark.ml.feature import Tokenizer, HashingTF -from pyspark.ml.feature import VectorAssembler - -# Featurize text column -tokenizer = Tokenizer(inputCol="text", outputCol="tokenizedText") -numFeatures = 10000 -hashingScheme = HashingTF(inputCol="tokenizedText", - outputCol="TextFeatures", - numFeatures=numFeatures) -tokenizedData = tokenizer.transform(data) -featurizedData = hashingScheme.transform(tokenizedData) - -# Merge text and numeric features in one feature column -featureColumnsArray = ["TextFeatures", "wordCount", "wordLength"] -assembler = VectorAssembler( - inputCols = featureColumnsArray, - outputCol="features") -assembledData = assembler.transform(featurizedData) - -# Select only columns of interest -# Convert rating column from boolean to int -processedData = assembledData \ - .select("label", "features") \ - .withColumn("label", assembledData.label.cast(IntegerType())) -``` - - -```python -from pyspark.ml.evaluation import BinaryClassificationEvaluator -from pyspark.ml.classification import LogisticRegression - -# Prepare data for learning -train, test, validation = processedData.randomSplit([0.60, 0.20, 0.20], seed=123) - -# Train the models on the 'train' data -lrHyperParams = [0.05, 0.1, 0.2, 0.4] -logisticRegressions = [LogisticRegression(regParam = hyperParam) - for hyperParam in lrHyperParams] -evaluator = BinaryClassificationEvaluator(rawPredictionCol="rawPrediction", - metricName="areaUnderROC") -metrics = [] -models = [] - -# Select the best model -for learner in logisticRegressions: - model = learner.fit(train) - models.append(model) - scoredData = model.transform(test) - metrics.append(evaluator.evaluate(scoredData)) -bestMetric = max(metrics) -bestModel = models[metrics.index(bestMetric)] - -# Get AUC on the validation dataset -scoredVal = bestModel.transform(validation) -print(evaluator.evaluate(scoredVal)) -``` - -### 4b. Classify using synapseml - -Life is a lot simpler when using `synapseml`! - -1. The **`TrainClassifier`** Estimator featurizes the data internally, - as long as the columns selected in the `train`, `test`, `validation` - dataset represent the features - -2. The **`FindBestModel`** Estimator find the best model from a pool of - trained models by find the model which performs best on the `test` - dataset given the specified metric - -3. The **`CompueModelStatistics`** Transformer computes the different - metrics on a scored dataset (in our case, the `validation` dataset) - at the same time - - -```python -from synapse.ml.train import TrainClassifier, ComputeModelStatistics -from synapse.ml.automl import FindBestModel - -# Prepare data for learning -train, test, validation = data.randomSplit([0.60, 0.20, 0.20], seed=123) - -# Train the models on the 'train' data -lrHyperParams = [0.05, 0.1, 0.2, 0.4] -logisticRegressions = [LogisticRegression(regParam = hyperParam) - for hyperParam in lrHyperParams] -lrmodels = [TrainClassifier(model=lrm, labelCol="label", numFeatures=10000).fit(train) - for lrm in logisticRegressions] - -# Select the best model -bestModel = FindBestModel(evaluationMetric="AUC", models=lrmodels).fit(test) - - -# Get AUC on the validation dataset -predictions = bestModel.transform(validation) -metrics = ComputeModelStatistics().transform(predictions) -print("Best model's AUC on validation set = " - + "{0:.2f}%".format(metrics.first()["AUC"] * 100)) -``` diff --git a/website/docs/examples/classification/Classification - Twitter Sentiment with Vowpal Wabbit.md b/website/docs/examples/classification/Classification - Twitter Sentiment with Vowpal Wabbit.md deleted file mode 100644 index 109730a59c..0000000000 --- a/website/docs/examples/classification/Classification - Twitter Sentiment with Vowpal Wabbit.md +++ /dev/null @@ -1,211 +0,0 @@ ---- -title: Classification - Twitter Sentiment with Vowpal Wabbit -hide_title: true -status: stable ---- -# Twitter Sentiment Classification using Vowpal Wabbit in SynapseML - -In this example, we show how to build a sentiment classification model using Vowpal Wabbit (VW) in SynapseML. The data set we use to train and evaluate the model is [Sentiment140](http://help.sentiment140.com/for-students/?source=post_page---------------------------) twitter data. First, we import a few packages that we need. - - -```python -import os -import re -import urllib.request -import numpy as np -import pandas as pd -from zipfile import ZipFile -from bs4 import BeautifulSoup -from pyspark.sql.functions import udf, rand, when, col -from pyspark.sql.types import StructType, StructField, DoubleType, StringType -from pyspark.ml import Pipeline -from pyspark.ml.feature import CountVectorizer, RegexTokenizer -from synapse.ml.vw import VowpalWabbitClassifier -from synapse.ml.train import ComputeModelStatistics -from pyspark.mllib.evaluation import BinaryClassificationMetrics -import matplotlib.pyplot as plt -``` - - -```python -if os.environ.get("AZURE_SERVICE", None) == "Microsoft.ProjectArcadia": - from pyspark.sql import SparkSession - spark = SparkSession.builder.getOrCreate() -``` - - -```python -# URL to download the sentiment140 dataset and data file names -DATA_URL = "http://cs.stanford.edu/people/alecmgo/trainingandtestdata.zip" -TRAIN_FILENAME = "training.1600000.processed.noemoticon.csv" -TEST_FILENAME = "testdata.manual.2009.06.14.csv" -# Folder for storing the downloaded data -DATA_FOLDER = "data" -# Data column names -COL_NAMES = ["label", "id", "date", "query_string", "user", "text"] -# Text encoding type of the data -ENCODING = "iso-8859-1" -``` - -## Data Preparation - -We use [Sentiment140](http://help.sentiment140.com/for-students/?source=post_page---------------------------) twitter data which originated from a Standford research project to train and evaluate VW classification model on Spark. The same dataset has been used in a previous [Azure Machine Learning sample](https://github.com/Azure-Samples/MachineLearningSamples-TwitterSentimentPrediction) on twitter sentiment prediction. Before using the data to build the classification model, we first download and clean up the data. - - -```python -def download_data(url, data_folder=DATA_FOLDER, filename="downloaded_data.zip"): - """Download and extract data from url""" - - data_dir = "./" + DATA_FOLDER - if not os.path.exists(data_dir): os.makedirs(data_dir) - downloaded_filepath = os.path.join(data_dir, filename) - print("Downloading data...") - urllib.request.urlretrieve(url, downloaded_filepath) - print("Extracting data...") - zipfile = ZipFile(downloaded_filepath) - zipfile.extractall(data_dir) - zipfile.close() - print("Finished data downloading and extraction.") - -download_data(DATA_URL) -``` - -Let's read the training data into a Spark DataFrame. - - -```python -df_train = pd.read_csv(os.path.join(".", DATA_FOLDER, TRAIN_FILENAME), - header=None, names=COL_NAMES, encoding=ENCODING) -df_train = spark.createDataFrame(df_train, verifySchema=False) -``` - -We can take a look at the training data and check how many samples it has. We should see that there are 1.6 million samples in the training data. There are 6 fields in the training data: -* label: the sentiment of the tweet (0.0 = negative, 2.0 = neutral, 4.0 = positive) -* id: the id of the tweet -* date: the date of the tweet -* query_string: The query used to extract the data. If there is no query, then this value is NO_QUERY. -* user: the user that tweeted -* text: the text of the tweet - - -```python -df_train.limit(10).toPandas() -``` - - -```python -print("Number of training samples: ", df_train.count()) -``` - -Before training the model, we randomly permute the data to mix negative and positive samples. This is helpful for properly training online learning algorithms like VW. To speed up model training, we use a subset of the data to train the model. If training with the full training set, typically you will see better performance of the model on the test set. - - -```python -df_train = df_train.orderBy(rand()) \ - .limit(100000) \ - .withColumn("label", when(col("label") > 0, 1.0).otherwise(0.0)) \ - .select(["label", "text"]) -``` - -## VW SynapseML Training - -Now we are ready to define a pipeline which consists of feture engineering steps and the VW model. - - -```python -# Specify featurizers -tokenizer = RegexTokenizer(inputCol="text", - outputCol="words") - -count_vectorizer = CountVectorizer(inputCol="words", - outputCol="features") - -# Define VW classification model -args = "--loss_function=logistic --quiet --holdout_off" -vw_model = VowpalWabbitClassifier(featuresCol="features", - labelCol="label", - args=args, - numPasses=10) - -# Create a pipeline -vw_pipeline = Pipeline(stages=[tokenizer, count_vectorizer, vw_model]) -``` - -With the prepared training data, we can fit the model pipeline as follows. - - -```python -vw_trained = vw_pipeline.fit(df_train) -``` - -## Model Performance Evaluation - -After training the model, we evaluate the performance of the model using the test set which is manually labeled. - - -```python -df_test = pd.read_csv(os.path.join(".", DATA_FOLDER, TEST_FILENAME), - header=None, names=COL_NAMES, encoding=ENCODING) -df_test = spark.createDataFrame(df_test, verifySchema=False) -``` - -We only use positive and negative tweets in the test set to evaluate the model, since our model is a binary classification model trained with only positive and negative tweets. - - -```python -print("Number of test samples before filtering: ", df_test.count()) -df_test = df_test.filter(col("label") != 2.0) \ - .withColumn("label", when(col("label") > 0, 1.0).otherwise(0.0)) \ - .select(["label", "text"]) -print("Number of test samples after filtering: ", df_test.count()) -``` - - -```python -# Make predictions -predictions = vw_trained.transform(df_test) -predictions.limit(10).toPandas() -``` - - -```python -# Compute model performance metrics -metrics = ComputeModelStatistics(evaluationMetric="classification", - labelCol="label", - scoredLabelsCol="prediction").transform(predictions) -metrics.toPandas() -``` - - -```python -# Utility class for plotting ROC curve (https://stackoverflow.com/questions/52847408/pyspark-extract-roc-curve) -class CurveMetrics(BinaryClassificationMetrics): - def __init__(self, *args): - super(CurveMetrics, self).__init__(*args) - - def get_curve(self, method): - rdd = getattr(self._java_model, method)().toJavaRDD() - points = [] - for row in rdd.collect(): - points += [(float(row._1()), float(row._2()))] - return points - -preds = predictions.select("label", "probability") \ - .rdd.map(lambda row: (float(row["probability"][1]), float(row["label"]))) -roc_points = CurveMetrics(preds).get_curve("roc") - -# Plot ROC curve -fig = plt.figure() -x_val = [x[0] for x in roc_points] -y_val = [x[1] for x in roc_points] -plt.title("ROC curve on test set") -plt.xlabel("False positive rate") -plt.ylabel("True positive rate") -plt.plot(x_val, y_val) -# Use display() if you're on Azure Databricks or you can do plt.show() -plt.show() -``` - -You should see an ROC curve like the following after the above cell is executed. - - diff --git a/website/docs/examples/cognitive_services/CognitiveServices - Celebrity Quote Analysis.md b/website/docs/examples/cognitive_services/CognitiveServices - Celebrity Quote Analysis.md deleted file mode 100644 index 89536f87d8..0000000000 --- a/website/docs/examples/cognitive_services/CognitiveServices - Celebrity Quote Analysis.md +++ /dev/null @@ -1,131 +0,0 @@ ---- -title: CognitiveServices - Celebrity Quote Analysis -hide_title: true -status: stable ---- -# Celebrity Quote Analysis with The Cognitive Services on Spark - - - - -```python -from synapse.ml.cognitive import * -from pyspark.ml import PipelineModel -from pyspark.sql.functions import col, udf -from pyspark.ml.feature import SQLTransformer -import os - -if os.environ.get("AZURE_SERVICE", None) == "Microsoft.ProjectArcadia": - from pyspark.sql import SparkSession - spark = SparkSession.builder.getOrCreate() - from notebookutils.mssparkutils.credentials import getSecret - os.environ['VISION_API_KEY'] = getSecret("mmlspark-keys", "mmlspark-cs-key") - os.environ['TEXT_API_KEY'] = getSecret("mmlspark-keys", "mmlspark-cs-key") - os.environ['BING_IMAGE_SEARCH_KEY'] = getSecret("mmlspark-keys", "mmlspark-bing-search-key") - -#put your service keys here -TEXT_API_KEY = os.environ["TEXT_API_KEY"] -VISION_API_KEY = os.environ["VISION_API_KEY"] -BING_IMAGE_SEARCH_KEY = os.environ["BING_IMAGE_SEARCH_KEY"] -``` - -### Extracting celebrity quote images using Bing Image Search on Spark - -Here we define two Transformers to extract celebrity quote images. - - - - -```python -imgsPerBatch = 10 #the number of images Bing will return for each query -offsets = [(i*imgsPerBatch,) for i in range(100)] # A list of offsets, used to page into the search results -bingParameters = spark.createDataFrame(offsets, ["offset"]) - -bingSearch = BingImageSearch()\ - .setSubscriptionKey(BING_IMAGE_SEARCH_KEY)\ - .setOffsetCol("offset")\ - .setQuery("celebrity quotes")\ - .setCount(imgsPerBatch)\ - .setOutputCol("images") - -#Transformer to that extracts and flattens the richly structured output of Bing Image Search into a simple URL column -getUrls = BingImageSearch.getUrlTransformer("images", "url") -``` - -### Recognizing Images of Celebrities -This block identifies the name of the celebrities for each of the images returned by the Bing Image Search. - - - - -```python -celebs = RecognizeDomainSpecificContent()\ - .setSubscriptionKey(VISION_API_KEY)\ - .setModel("celebrities")\ - .setUrl("https://eastus.api.cognitive.microsoft.com/vision/v2.0/")\ - .setImageUrlCol("url")\ - .setOutputCol("celebs") - -#Extract the first celebrity we see from the structured response -firstCeleb = SQLTransformer(statement="SELECT *, celebs.result.celebrities[0].name as firstCeleb FROM __THIS__") -``` - -### Reading the quote from the image. -This stage performs OCR on the images to recognize the quotes. - - - - -```python -from synapse.ml.stages import UDFTransformer - -recognizeText = RecognizeText()\ - .setSubscriptionKey(VISION_API_KEY)\ - .setUrl("https://eastus.api.cognitive.microsoft.com/vision/v2.0/recognizeText")\ - .setImageUrlCol("url")\ - .setMode("Printed")\ - .setOutputCol("ocr")\ - .setConcurrency(5) - -def getTextFunction(ocrRow): - if ocrRow is None: return None - return "\n".join([line.text for line in ocrRow.recognitionResult.lines]) - -# this transformer wil extract a simpler string from the structured output of recognize text -getText = UDFTransformer().setUDF(udf(getTextFunction)).setInputCol("ocr").setOutputCol("text") - -``` - -### Understanding the Sentiment of the Quote - - - - -```python -sentimentTransformer = TextSentiment()\ - .setTextCol("text")\ - .setUrl("https://eastus.api.cognitive.microsoft.com/text/analytics/v3.0/sentiment")\ - .setSubscriptionKey(TEXT_API_KEY)\ - .setOutputCol("sentiment") - -#Extract the sentiment score from the API response body -getSentiment = SQLTransformer(statement="SELECT *, sentiment[0].sentiment as sentimentLabel FROM __THIS__") -``` - -### Tying it all together - -Now that we have built the stages of our pipeline its time to chain them together into a single model that can be used to process batches of incoming data - - - - -```python -from synapse.ml.stages import SelectColumns -# Select the final coulmns -cleanupColumns = SelectColumns().setCols(["url", "firstCeleb", "text", "sentimentLabel"]) - -celebrityQuoteAnalysis = PipelineModel(stages=[ - bingSearch, getUrls, celebs, firstCeleb, recognizeText, getText, sentimentTransformer, getSentiment, cleanupColumns]) - -celebrityQuoteAnalysis.transform(bingParameters).show(5) -``` diff --git a/website/docs/examples/cognitive_services/CognitiveServices - Predictive Maintenance.md b/website/docs/examples/cognitive_services/CognitiveServices - Predictive Maintenance.md deleted file mode 100644 index 6cb72b18b2..0000000000 --- a/website/docs/examples/cognitive_services/CognitiveServices - Predictive Maintenance.md +++ /dev/null @@ -1,146 +0,0 @@ ---- -title: CognitiveServices - Predictive Maintenance -hide_title: true -status: stable ---- -# Recipe: Predictive maintenance with the Cognitive Services for Big Data - -This recipe shows how you can use Azure Synapse Analytics and Cognitive Services on Apache Spark for predictive maintenance of IoT devices. We'll follow along with the [CosmosDB and Synapse Link](https://github.com/Azure-Samples/cosmosdb-synapse-link-samples) sample. To keep things simple, in this recipe we'll read the data straight from a CSV file rather than getting streamed data through CosmosDB and Synapse Link. We strongly encourage you to look over the Synapse Link sample. - -## Hypothetical scenario - -The hypothetical scenario is a Power Plant, where IoT devices are monitoring [steam turbines](https://en.wikipedia.org/wiki/Steam_turbine). The IoTSignals collection has Revolutions per minute (RPM) and Megawatts (MW) data for each turbine. Signals from steam turbines are being analyzed and anomalous signals are detected. - -There could be outliers in the data in random frequency. In those situations, RPM values will go up and MW output will go down, for circuit protection. The idea is to see the data varying at the same time, but with different signals. - -## Prerequisites - -* An Azure subscription - [Create one for free](https://azure.microsoft.com/en-us/free/) -* [Azure Synapse workspace](https://docs.microsoft.com/en-us/azure/synapse-analytics/get-started-create-workspace) configured with a [serverless Apache Spark pool](https://docs.microsoft.com/en-us/azure/synapse-analytics/get-started-analyze-spark) - -## Setup - -### Create an Anomaly Detector resource - -Azure Cognitive Services are represented by Azure resources that you subscribe to. Create a resource for Translator using the [Azure portal](https://docs.microsoft.com/en-us/azure/cognitive-services/cognitive-services-apis-create-account?tabs=multiservice%2Clinux) or [Azure CLI](https://docs.microsoft.com/en-us/azure/cognitive-services/cognitive-services-apis-create-account-cli?tabs=linux). You can also: - -- View an existing resource in the [Azure portal](https://portal.azure.com/). - -Make note of the endpoint and the key for this resource, you'll need it in this guide. - -## Enter your service keys - -Let's start by adding your key and location. - - -``` -import os - -service_key = os.environ["ANOMALY_API_KEY"] # Paste your anomaly detector key here -location = "westus2" # Paste your anomaly detector location here - -assert (service_key is not None) -``` - -## Read data into a DataFrame - -Next, let's read the IoTSignals file into a DataFrame. Open a new notebook in your Synapse workspace and create a DataFrame from the file. - - -``` -df_signals = spark.read.csv("wasbs://publicwasb@mmlspark.blob.core.windows.net/iot/IoTSignals.csv", header=True, inferSchema=True) -``` - -### Run anomaly detection using Cognitive Services on Spark - -The goal is to find instances where the signals from the IoT devices were outputting anomalous values so that we can see when something is going wrong and do predictive maintenance. To do that, let's use Anomaly Detector on Spark: - - -``` -from pyspark.sql.functions import col, struct -from synapse.ml.cognitive import SimpleDetectAnomalies -from synapse.ml.core.spark import FluentAPI - -detector = (SimpleDetectAnomalies() - .setSubscriptionKey(service_key) - .setLocation(location) - .setOutputCol("anomalies") - .setGroupbyCol("grouping") - .setSensitivity(95) - .setGranularity("secondly")) - -df_anomaly = (df_signals - .where(col("unitSymbol") == 'RPM') - .withColumn("timestamp", col("dateTime").cast("string")) - .withColumn("value", col("measureValue").cast("double")) - .withColumn("grouping", struct("deviceId")) - .mlTransform(detector)).cache() - -df_anomaly.createOrReplaceTempView('df_anomaly') -``` - -Let's take a look at the data: - - -``` -df_anomaly.select("timestamp","value","deviceId","anomalies.isAnomaly").show(3) - -``` - -This cell should yield a result that looks like: - -| timestamp | value | deviceId | isAnomaly | -|:--------------------|--------:|:-----------|:------------| -| 2020-05-01 18:33:51 | 3174 | dev-7 | False | -| 2020-05-01 18:33:52 | 2976 | dev-7 | False | -| 2020-05-01 18:33:53 | 2714 | dev-7 | False | - -## Visualize anomalies for one of the devices - -IoTSignals.csv has signals from multiple IoT devices. We'll focus on a specific device and visualize anomalous outputs from the device. - - -``` -df_anomaly_single_device = spark.sql(""" -select - timestamp, - measureValue, - anomalies.expectedValue, - anomalies.expectedValue + anomalies.upperMargin as expectedUpperValue, - anomalies.expectedValue - anomalies.lowerMargin as expectedLowerValue, - case when anomalies.isAnomaly=true then 1 else 0 end as isAnomaly -from - df_anomaly -where deviceid = 'dev-1' and timestamp < '2020-04-29' -order by timestamp -limit 200""") -``` - -Now that we have created a dataframe that represents the anomalies for a particular device, we can visualize these anomalies: - - -``` -import matplotlib.pyplot as plt -from pyspark.sql.functions import col - -adf = df_anomaly_single_device.toPandas() -adf_subset = df_anomaly_single_device.where(col("isAnomaly") == 1).toPandas() - -plt.figure(figsize=(23,8)) -plt.plot(adf['timestamp'],adf['expectedUpperValue'], color='darkred', linestyle='solid', linewidth=0.25, label='UpperMargin') -plt.plot(adf['timestamp'],adf['expectedValue'], color='darkgreen', linestyle='solid', linewidth=2, label='Expected Value') -plt.plot(adf['timestamp'],adf['measureValue'], 'b', color='royalblue', linestyle='dotted', linewidth=2, label='Actual') -plt.plot(adf['timestamp'],adf['expectedLowerValue'], color='black', linestyle='solid', linewidth=0.25, label='Lower Margin') -plt.plot(adf_subset['timestamp'],adf_subset['measureValue'], 'ro', label = 'Anomaly') -plt.legend() -plt.title('RPM Anomalies with Confidence Intervals') -plt.show() -``` - -If successful, your output will look like this: - -![Anomaly Detector Plot](https://github.com/MicrosoftDocs/azure-docs/raw/master/articles/cognitive-services/big-data/media/anomaly-output.png) - -## Next steps - -Learn how to do predictive maintenance at scale with Azure Cognitive Services, Azure Synapse Analytics, and Azure CosmosDB. For more information, see the full sample on [GitHub](https://github.com/Azure-Samples/cosmosdb-synapse-link-samples). diff --git a/website/docs/examples/deep_learning/DeepLearning - BiLSTM Medical Entity Extraction.md b/website/docs/examples/deep_learning/DeepLearning - BiLSTM Medical Entity Extraction.md deleted file mode 100644 index d367ec0b58..0000000000 --- a/website/docs/examples/deep_learning/DeepLearning - BiLSTM Medical Entity Extraction.md +++ /dev/null @@ -1,232 +0,0 @@ ---- -title: DeepLearning - BiLSTM Medical Entity Extraction -hide_title: true -status: stable ---- -## DeepLearning - BiLSTM Medical Entity Extraction - -In this tutorial we use a Bidirectional LSTM entity extractor from the synapseml -model downloader to extract entities from PubMed medical abstracts - -Our goal is to identify useful entities in a block of free-form text. This is a -nontrivial task because entities might be referenced in the text using variety of -synonymns, abbreviations, or formats. Our target output for this model is a set -of tags that specify what kind of entity is referenced. The model we use was -trained on a large dataset of publically tagged pubmed abstracts. An example -annotated sequence is given below, "O" represents no tag: - -|I-Chemical | O |I-Chemical | O | O |I-Chemical | O |I-Chemical | O | O | O | O |I-Disease |I-Disease| O | O | -|:---: |:---:|:---: |:---:|:---:|:---: |:---:|:---: |:---:|:---: |:---:|:---:|:---: |:---: |:---:|:---: | -|Baricitinib| , |Methotrexate| , | or |Baricitinib|Plus |Methotrexate| in |Patients|with |Early|Rheumatoid|Arthritis| Who |Had...| - - - - -```python -from synapse.ml.cntk import CNTKModel -from synapse.ml.downloader import ModelDownloader -from pyspark.sql.functions import udf, col -from pyspark.sql.types import IntegerType, ArrayType, FloatType, StringType -from pyspark.sql import Row - -from os.path import abspath, join -import numpy as np -from nltk.tokenize import sent_tokenize, word_tokenize -import os, tarfile, pickle -import urllib.request -import nltk -``` - -Get the model and extract the data. - - -```python -modelName = "BiLSTM" -modelDir = "models" -if not os.path.exists(modelDir): os.makedirs(modelDir) -d = ModelDownloader(spark, "dbfs:///" + modelDir) -modelSchema = d.downloadByName(modelName) -nltk.download("punkt", "/dbfs/nltkdata") -nltk.data.path.append("/dbfs/nltkdata") -``` - - -```python -modelName = "BiLSTM" -modelDir = abspath("models") -if not os.path.exists(modelDir): os.makedirs(modelDir) -d = ModelDownloader(spark, "file://" + modelDir) -modelSchema = d.downloadByName(modelName) -nltk.download("punkt") -``` - -Download the embeddings - -We use the nltk punkt sentence and word tokenizers and a set of embeddings trained on PubMed Articles - - -```python -wordEmbFileName = "WordEmbeddings_PubMed.pkl" -pickleFile = join(abspath("models"), wordEmbFileName) -if not os.path.isfile(pickleFile): - urllib.request.urlretrieve("https://mmlspark.blob.core.windows.net/datasets/" + wordEmbFileName, pickleFile) -``` - -Load the embeddings and create functions for encoding sentences - - -```python -pickleContent = pickle.load(open(pickleFile, "rb"), encoding="latin-1") -wordToIndex = pickleContent["word_to_index"] -wordvectors = pickleContent["wordvectors"] -classToEntity = pickleContent["class_to_entity"] - -nClasses = len(classToEntity) -nFeatures = wordvectors.shape[1] -maxSentenceLen = 613 -``` - - -```python -content = "Baricitinib, Methotrexate, or Baricitinib Plus Methotrexate in Patients with Early Rheumatoid\ - Arthritis Who Had Received Limited or No Treatment with Disease-Modifying-Anti-Rheumatic-Drugs (DMARDs):\ - Phase 3 Trial Results. Keywords: Janus kinase (JAK), methotrexate (MTX) and rheumatoid arthritis (RA) and\ - Clinical research. In 2 completed phase 3 studies, baricitinib (bari) improved disease activity with a\ - satisfactory safety profile in patients (pts) with moderately-to-severely active RA who were inadequate\ - responders to either conventional synthetic1 or biologic2DMARDs. This abstract reports results from a\ - phase 3 study of bari administered as monotherapy or in combination with methotrexate (MTX) to pts with\ - early active RA who had limited or no prior treatment with DMARDs. MTX monotherapy was the active comparator." -``` - - -```python -sentences = sent_tokenize(content) -df = spark.createDataFrame(enumerate(sentences), ["index","sentence"]) -``` - - -```python -# Add the tokenizers to all worker nodes -def prepNLTK(partition): - nltk.data.path.append("/dbfs/nltkdata") - return partition - -df = df.rdd.mapPartitions(prepNLTK).toDF() -``` - - -```python -def safe_tokenize(sent): - try: - return word_tokenize(sent) - except LookupError: - prepNLTK(None) - return word_tokenize(sent) - -tokenizeUDF = udf(safe_tokenize, ArrayType(StringType())) -df = df.withColumn("tokens",tokenizeUDF("sentence")) - -countUDF = udf(len, IntegerType()) -df = df.withColumn("count",countUDF("tokens")) - -def wordToEmb(word): - return wordvectors[wordToIndex.get(word.lower(), wordToIndex["UNK"])] - -def featurize(tokens): - X = np.zeros((maxSentenceLen, nFeatures)) - X[-len(tokens):,:] = np.array([wordToEmb(word) for word in tokens]) - return [float(x) for x in X.reshape(maxSentenceLen, nFeatures).flatten()] - -def safe_show(df, retries): - try: - df.show() - except Exception as e: - if retries >= 1: - safe_show(df, retries-1) - else: - raise e - -featurizeUDF = udf(featurize, ArrayType(FloatType())) - -df = df.withColumn("features", featurizeUDF("tokens")).cache() -safe_show(df, 5) # Can be flaky on build server - - -``` - -Run the CNTKModel - - -```python -model = CNTKModel() \ - .setModelLocation(modelSchema.uri) \ - .setInputCol("features") \ - .setOutputCol("probs") \ - .setOutputNodeIndex(0) \ - .setMiniBatchSize(1) - -df = model.transform(df).cache() -df.show() -``` - - -```python -def probsToEntities(probs, wordCount): - reshaped_probs = np.array(probs).reshape(maxSentenceLen, nClasses) - reshaped_probs = reshaped_probs[-wordCount:,:] - return [classToEntity[np.argmax(probs)] for probs in reshaped_probs] - -toEntityUDF = udf(probsToEntities,ArrayType(StringType())) -df = df.withColumn("entities", toEntityUDF("probs", "count")) -df.show() -``` - -Show the annotated text - - -```python -# Color Code the Text based on the entity type -colors = { - "B-Disease": "blue", - "I-Disease":"blue", - "B-Drug":"lime", - "I-Drug":"lime", - "B-Chemical":"lime", - "I-Chemical":"lime", - "O":"black", - "NONE":"black" -} - -def prettyPrint(words, annotations): - formattedWords = [] - for word,annotation in zip(words,annotations): - formattedWord = "{}".format(colors[annotation], word) - if annotation in {"O","NONE"}: - formattedWords.append(formattedWord) - else: - formattedWords.append("{}".format(formattedWord)) - return " ".join(formattedWords) - -prettyPrintUDF = udf(prettyPrint, StringType()) -df = df.withColumn("formattedSentence", prettyPrintUDF("tokens", "entities")) \ - .select("formattedSentence") - -sentences = [row["formattedSentence"] for row in df.collect()] -``` - - -```python -from IPython.core.display import display, HTML -for sentence in sentences: - display(HTML(sentence)) -``` - -Example text used in this demo has been taken from: - -Fleischmann R, Takeuchi T, Schlichting DE, Macias WL, Rooney T, Gurbuz S, Stoykov I, -Beattie SD, Kuo WL, Schiff M. Baricitinib, Methotrexate, or Baricitinib Plus Methotrexate -in Patients with Early Rheumatoid Arthritis Who Had Received Limited or No Treatment with -Disease-Modifying Anti-Rheumatic Drugs (DMARDs): Phase 3 Trial Results [abstract]. -Arthritis Rheumatol. 2015; 67 (suppl 10). -http://acrabstracts.org/abstract/baricitinib-methotrexate-or-baricitinib-plus-methotrexate-in-patients-with-early-rheumatoid-arthritis-who-had-received-limited-or-no-treatment-with-disease-modifying-anti-rheumatic-drugs-dmards-p/. -Accessed August 18, 2017. diff --git a/website/docs/examples/deep_learning/DeepLearning - CIFAR10 Convolutional Network.md b/website/docs/examples/deep_learning/DeepLearning - CIFAR10 Convolutional Network.md deleted file mode 100644 index 944bcd482e..0000000000 --- a/website/docs/examples/deep_learning/DeepLearning - CIFAR10 Convolutional Network.md +++ /dev/null @@ -1,94 +0,0 @@ ---- -title: DeepLearning - CIFAR10 Convolutional Network -hide_title: true -status: stable ---- -## DeepLearning - CIFAR10 Convolutional Network - - -```python -from synapse.ml.cntk import CNTKModel -from synapse.ml.downloader import ModelDownloader -from pyspark.sql.functions import udf -from pyspark.sql.types import IntegerType -from os.path import abspath -``` - -Set some paths. - - -```python -cdnURL = "https://mmlspark.azureedge.net/datasets" - -# Please note that this is a copy of the CIFAR10 dataset originally found here: -# http://www.cs.toronto.edu/~kriz/cifar-10-python.tar.gz -imagesWithLabels = spark.read.parquet("wasbs://publicwasb@mmlspark.blob.core.windows.net/CIFAR10_test.parquet") -``` - - -```python -modelName = "ConvNet" -modelDir = "dbfs:///models/" -``` - -Get the model - - -```python -d = ModelDownloader(spark, modelDir) -model = d.downloadByName(modelName) - -``` - -Evaluate CNTK model. - - -```python -import time -start = time.time() - -# Use CNTK model to get log probabilities -cntkModel = CNTKModel().setInputCol("images").setOutputCol("output") \ - .setModelLocation(model.uri).setOutputNode("z") -scoredImages = cntkModel.transform(imagesWithLabels) - -# Transform the log probabilities to predictions -def argmax(x): return max(enumerate(x),key=lambda p: p[1])[0] - -argmaxUDF = udf(argmax, IntegerType()) -imagePredictions = scoredImages.withColumn("predictions", argmaxUDF("output")) \ - .select("predictions", "labels") - -numRows = imagePredictions.count() - -end = time.time() -print("classifying {} images took {} seconds".format(numRows,end-start)) -``` - -Plot confusion matrix. - - -```python -imagePredictions = imagePredictions.toPandas() -y, y_hat = imagePredictions["labels"], imagePredictions["predictions"] -``` - - -```python -import matplotlib.pyplot as plt -import numpy as np -from sklearn.metrics import confusion_matrix - -cm = confusion_matrix(y, y_hat) - -labels = ["airplane", "automobile", "bird", "cat", "deer", "dog", "frog", - "horse", "ship", "truck"] -plt.imshow(cm, interpolation="nearest", cmap=plt.cm.Blues) -plt.colorbar() -tick_marks = np.arange(len(labels)) -plt.xticks(tick_marks, labels, rotation=90) -plt.yticks(tick_marks, labels) -plt.xlabel("Predicted label") -plt.ylabel("True Label") -display(plt.show()) -``` diff --git a/website/docs/examples/deep_learning/DeepLearning - Flower Image Classification.md b/website/docs/examples/deep_learning/DeepLearning - Flower Image Classification.md deleted file mode 100644 index dd6da74262..0000000000 --- a/website/docs/examples/deep_learning/DeepLearning - Flower Image Classification.md +++ /dev/null @@ -1,139 +0,0 @@ ---- -title: DeepLearning - Flower Image Classification -hide_title: true -status: stable ---- -## Deep Learning - Flower Image Classification - - -```python -from pyspark.ml import Transformer, Estimator, Pipeline -from pyspark.ml.classification import LogisticRegression -from synapse.ml.downloader import ModelDownloader -import os, sys, time -``` - - -```python -model = ModelDownloader(spark, "dbfs:/models/").downloadByName("ResNet50") -``` - - -```python -# Load the images -# use flowers_and_labels.parquet on larger cluster in order to get better results -imagesWithLabels = spark.read.parquet("wasbs://publicwasb@mmlspark.blob.core.windows.net/flowers_and_labels2.parquet") \ - .withColumnRenamed("bytes","image").sample(.1) - -imagesWithLabels.printSchema() -``` - -![Smiley face](https://i.imgur.com/p2KgdYL.jpg) - - -```python -from synapse.ml.opencv import ImageTransformer -from synapse.ml.image import UnrollImage -from synapse.ml.cntk import ImageFeaturizer -from synapse.ml.stages import * - -# Make some featurizers -it = ImageTransformer()\ - .setOutputCol("scaled")\ - .resize(size=(60, 60)) - -ur = UnrollImage()\ - .setInputCol("scaled")\ - .setOutputCol("features") - -dc1 = DropColumns().setCols(["scaled", "image"]) - -lr1 = LogisticRegression().setMaxIter(8).setFeaturesCol("features").setLabelCol("labels") - -dc2 = DropColumns().setCols(["features"]) - -basicModel = Pipeline(stages=[it, ur, dc1, lr1, dc2]) -``` - - -```python -resnet = ImageFeaturizer()\ - .setInputCol("image")\ - .setOutputCol("features")\ - .setModelLocation(model.uri)\ - .setLayerNames(model.layerNames)\ - .setCutOutputLayers(1) - -dc3 = DropColumns().setCols(["image"]) - -lr2 = LogisticRegression().setMaxIter(8).setFeaturesCol("features").setLabelCol("labels") - -dc4 = DropColumns().setCols(["features"]) - -deepModel = Pipeline(stages=[resnet, dc3, lr2, dc4]) -``` - -![Resnet 18](https://i.imgur.com/Mb4Dyou.png) - -### How does it work? - -![Convolutional network weights](http://i.stack.imgur.com/Hl2H6.png) - -### Run the experiment - - -```python -def timedExperiment(model, train, test): - start = time.time() - result = model.fit(train).transform(test).toPandas() - print("Experiment took {}s".format(time.time() - start)) - return result -``` - - -```python -train, test = imagesWithLabels.randomSplit([.8,.2]) -train.count(), test.count() -``` - - -```python -basicResults = timedExperiment(basicModel, train, test) -``` - - -```python -deepResults = timedExperiment(deepModel, train, test) -``` - -### Plot confusion matrix. - - -```python -import matplotlib.pyplot as plt -from sklearn.metrics import confusion_matrix -import numpy as np - -def evaluate(results, name): - y, y_hat = results["labels"],results["prediction"] - y = [int(l) for l in y] - - accuracy = np.mean([1. if pred==true else 0. for (pred,true) in zip(y_hat,y)]) - cm = confusion_matrix(y, y_hat) - cm = cm.astype("float") / cm.sum(axis=1)[:, np.newaxis] - - plt.text(40, 10,"$Accuracy$ $=$ ${}\%$".format(round(accuracy*100,1)),fontsize=14) - plt.imshow(cm, interpolation="nearest", cmap=plt.cm.Blues) - plt.colorbar() - plt.xlabel("$Predicted$ $label$", fontsize=18) - plt.ylabel("$True$ $Label$", fontsize=18) - plt.title("$Normalized$ $CM$ $for$ ${}$".format(name)) - -plt.figure(figsize=(12,5)) -plt.subplot(1,2,1) -evaluate(deepResults,"CNTKModel + LR") -plt.subplot(1,2,2) -evaluate(basicResults,"LR") -# Note that on the larger dataset the accuracy will bump up from 44% to >90% -display(plt.show()) -``` diff --git a/website/docs/examples/deep_learning/DeepLearning - Transfer Learning.md b/website/docs/examples/deep_learning/DeepLearning - Transfer Learning.md deleted file mode 100644 index 520237e048..0000000000 --- a/website/docs/examples/deep_learning/DeepLearning - Transfer Learning.md +++ /dev/null @@ -1,72 +0,0 @@ ---- -title: DeepLearning - Transfer Learning -hide_title: true -status: stable ---- -## DeepLearning - Transfer Learning - -Classify automobile vs airplane using DNN featurization and transfer learning -against a subset of images from CIFAR-10 dataset. - -Load DNN Model and pick one of the inner layers as feature output - - -```python -from synapse.ml.cntk import CNTKModel -from synapse.ml.downloader import ModelDownloader -import numpy as np, os, urllib, tarfile, pickle, array -from os.path import abspath -from pyspark.sql.functions import col, udf -from pyspark.sql.types import * -modelName = "ConvNet" -modelDir = "file:" + abspath("models") -d = ModelDownloader(spark, modelDir) -model = d.downloadByName(modelName) -print(model.layerNames) -cntkModel = CNTKModel().setInputCol("images").setOutputCol("features") \ - .setModelLocation(model.uri).setOutputNode("l8") -``` - -Format raw CIFAR data into correct shape. - - -```python -imagesWithLabels = spark.read.parquet("wasbs://publicwasb@mmlspark.blob.core.windows.net/CIFAR10_test.parquet") -``` - -Select airplanes (label=0) and automobiles (label=1) - - -```python -imagesWithLabels = imagesWithLabels.filter("labels<2") -imagesWithLabels.cache() -``` - -Featurize images - - -```python -featurizedImages = cntkModel.transform(imagesWithLabels).select(["features","labels"]) -``` - -Use featurized images to train a classifier - - -```python -from synapse.ml.train import TrainClassifier -from pyspark.ml.classification import RandomForestClassifier - -train,test = featurizedImages.randomSplit([0.75,0.25]) - -model = TrainClassifier(model=RandomForestClassifier(),labelCol="labels").fit(train) -``` - -Evaluate the accuracy of the model - - -```python -from synapse.ml.train import ComputeModelStatistics -predictions = model.transform(test) -metrics = ComputeModelStatistics(evaluationMetric="accuracy").transform(predictions) -metrics.show() -``` diff --git a/website/docs/examples/regression/Regression - Auto Imports.md b/website/docs/examples/regression/Regression - Auto Imports.md deleted file mode 100644 index 407bae96ef..0000000000 --- a/website/docs/examples/regression/Regression - Auto Imports.md +++ /dev/null @@ -1,214 +0,0 @@ ---- -title: Regression - Auto Imports -hide_title: true -status: stable ---- -## Regression - Auto Imports - -This sample notebook is based on the Gallery [Sample 6: Train, Test, Evaluate -for Regression: Auto Imports -Dataset](https://gallery.cortanaintelligence.com/Experiment/670fbfc40c4f44438bfe72e47432ae7a) -for AzureML Studio. This experiment demonstrates how to build a regression -model to predict the automobile's price. The process includes training, testing, -and evaluating the model on the Automobile Imports data set. - -This sample demonstrates the use of several members of the synapseml library: -- [`TrainRegressor` - ](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.train.html?#module-synapse.ml.train.TrainRegressor) -- [`SummarizeData` - ](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.stages.html?#module-synapse.ml.stages.SummarizeData) -- [`CleanMissingData` - ](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.featurize.html?#module-synapse.ml.featurize.CleanMissingData) -- [`ComputeModelStatistics` - ](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.train.html?#module-synapse.ml.train.ComputeModelStatistics) -- [`FindBestModel` - ](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.automl.html?#module-synapse.ml.automl.FindBestModel) - -First, import the pandas package so that we can read and parse the datafile -using `pandas.read_csv()` - - -```python -import os -if os.environ.get("AZURE_SERVICE", None) == "Microsoft.ProjectArcadia": - from pyspark.sql import SparkSession - spark = SparkSession.builder.getOrCreate() -``` - - -```python -data = spark.read.parquet("wasbs://publicwasb@mmlspark.blob.core.windows.net/AutomobilePriceRaw.parquet") - -``` - -To learn more about the data that was just read into the DataFrame, -summarize the data using `SummarizeData` and print the summary. For each -column of the DataFrame, `SummarizeData` will report the summary statistics -in the following subcategories for each column: -* Feature name -* Counts - - Count - - Unique Value Count - - Missing Value Count -* Quantiles - - Min - - 1st Quartile - - Median - - 3rd Quartile - - Max -* Sample Statistics - - Sample Variance - - Sample Standard Deviation - - Sample Skewness - - Sample Kurtosis -* Percentiles - - P0.5 - - P1 - - P5 - - P95 - - P99 - - P99.5 - -Note that several columns have missing values (`normalized-losses`, `bore`, -`stroke`, `horsepower`, `peak-rpm`, `price`). This summary can be very -useful during the initial phases of data discovery and characterization. - - -```python -from synapse.ml.stages import SummarizeData -summary = SummarizeData().transform(data) -summary.toPandas() -``` - -Split the dataset into train and test datasets. - - -```python -# split the data into training and testing datasets -train, test = data.randomSplit([0.6, 0.4], seed=123) -train.limit(10).toPandas() -``` - -Now use the `CleanMissingData` API to replace the missing values in the -dataset with something more useful or meaningful. Specify a list of columns -to be cleaned, and specify the corresponding output column names, which are -not required to be the same as the input column names. `CleanMissiongData` -offers the options of "Mean", "Median", or "Custom" for the replacement -value. In the case of "Custom" value, the user also specifies the value to -use via the "customValue" parameter. In this example, we will replace -missing values in numeric columns with the median value for the column. We -will define the model here, then use it as a Pipeline stage when we train our -regression models and make our predictions in the following steps. - - -```python -from synapse.ml.featurize import CleanMissingData -cols = ["normalized-losses", "stroke", "bore", "horsepower", - "peak-rpm", "price"] -cleanModel = CleanMissingData().setCleaningMode("Median") \ - .setInputCols(cols).setOutputCols(cols) -``` - -Now we will create two Regressor models for comparison: Poisson Regression -and Random Forest. PySpark has several regressors implemented: -* `LinearRegression` -* `IsotonicRegression` -* `DecisionTreeRegressor` -* `RandomForestRegressor` -* `GBTRegressor` (Gradient-Boosted Trees) -* `AFTSurvivalRegression` (Accelerated Failure Time Model Survival) -* `GeneralizedLinearRegression` -- fit a generalized model by giving symbolic - description of the linear preditor (link function) and a description of the - error distribution (family). The following families are supported: - - `Gaussian` - - `Binomial` - - `Poisson` - - `Gamma` - - `Tweedie` -- power link function specified through `linkPower` -Refer to the -[Pyspark API Documentation](http://spark.apache.org/docs/latest/api/python/) -for more details. - -`TrainRegressor` creates a model based on the regressor and other parameters -that are supplied to it, then trains data on the model. - -In this next step, Create a Poisson Regression model using the -`GeneralizedLinearRegressor` API from Spark and create a Pipeline using the -`CleanMissingData` and `TrainRegressor` as pipeline stages to create and -train the model. Note that because `TrainRegressor` expects a `labelCol` to -be set, there is no need to set `linkPredictionCol` when setting up the -`GeneralizedLinearRegressor`. Fitting the pipe on the training dataset will -train the model. Applying the `transform()` of the pipe to the test dataset -creates the predictions. - - -```python -# train Poisson Regression Model -from pyspark.ml.regression import GeneralizedLinearRegression -from pyspark.ml import Pipeline -from synapse.ml.train import TrainRegressor - -glr = GeneralizedLinearRegression(family="poisson", link="log") -poissonModel = TrainRegressor().setModel(glr).setLabelCol("price").setNumFeatures(256) -poissonPipe = Pipeline(stages = [cleanModel, poissonModel]).fit(train) -poissonPrediction = poissonPipe.transform(test) -``` - -Next, repeat these steps to create a Random Forest Regression model using the -`RandomRorestRegressor` API from Spark. - - -```python -# train Random Forest regression on the same training data: -from pyspark.ml.regression import RandomForestRegressor - -rfr = RandomForestRegressor(maxDepth=30, maxBins=128, numTrees=8, minInstancesPerNode=1) -randomForestModel = TrainRegressor(model=rfr, labelCol="price", numFeatures=256).fit(train) -randomForestPipe = Pipeline(stages = [cleanModel, randomForestModel]).fit(train) -randomForestPrediction = randomForestPipe.transform(test) -``` - -After the models have been trained and scored, compute some basic statistics -to evaluate the predictions. The following statistics are calculated for -regression models to evaluate: -* Mean squared error -* Root mean squared error -* R^2 -* Mean absolute error - -Use the `ComputeModelStatistics` API to compute basic statistics for -the Poisson and the Random Forest models. - - -```python -from synapse.ml.train import ComputeModelStatistics -poissonMetrics = ComputeModelStatistics().transform(poissonPrediction) -print("Poisson Metrics") -poissonMetrics.toPandas() -``` - - -```python -randomForestMetrics = ComputeModelStatistics().transform(randomForestPrediction) -print("Random Forest Metrics") -randomForestMetrics.toPandas() -``` - -We can also compute per instance statistics for `poissonPrediction`: - - -```python -from synapse.ml.train import ComputePerInstanceStatistics -def demonstrateEvalPerInstance(pred): - return ComputePerInstanceStatistics().transform(pred) \ - .select("price", "Scores", "L1_loss", "L2_loss") \ - .limit(10).toPandas() -demonstrateEvalPerInstance(poissonPrediction) -``` - -and with `randomForestPrediction`: - - -```python -demonstrateEvalPerInstance(randomForestPrediction) -``` diff --git a/website/docs/examples/regression/Regression - Flight Delays with DataCleaning.md b/website/docs/examples/regression/Regression - Flight Delays with DataCleaning.md deleted file mode 100644 index d86ed446f9..0000000000 --- a/website/docs/examples/regression/Regression - Flight Delays with DataCleaning.md +++ /dev/null @@ -1,153 +0,0 @@ ---- -title: Regression - Flight Delays with DataCleaning -hide_title: true -status: stable ---- -## Regression - Flight Delays with DataCleaning - -This example notebook is similar to -[Regression - Flight Delays](https://github.com/microsoft/SynapseML/blob/master/notebooks/Regression%20-%20Flight%20Delays.ipynb). -In this example, we will demonstrate the use of `DataConversion()` in two -ways. First, to convert the data type of several columns after the dataset -has been read in to the Spark DataFrame instead of specifying the data types -as the file is read in. Second, to convert columns to categorical columns -instead of iterating over the columns and applying the `StringIndexer`. - -This sample demonstrates how to use the following APIs: -- [`TrainRegressor` - ](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.train.html?#module-synapse.ml.train.TrainRegressor) -- [`ComputePerInstanceStatistics` - ](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.train.html?#module-synapse.ml.train.ComputePerInstanceStatistics) -- [`DataConversion` - ](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.featurize.html?#module-synapse.ml.featurize.DataConversion) - -First, import the pandas package - - -```python -import os -if os.environ.get("AZURE_SERVICE", None) == "Microsoft.ProjectArcadia": - from pyspark.sql import SparkSession - spark = SparkSession.builder.getOrCreate() -``` - - -```python -import pandas as pd -``` - -Next, import the CSV dataset: retrieve the file if needed, save it locally, -read the data into a pandas dataframe via `read_csv()`, then convert it to -a Spark dataframe. - -Print the schema of the dataframe, and note the columns that are `long`. - - -```python -flightDelay = spark.read.parquet("wasbs://publicwasb@mmlspark.blob.core.windows.net/On_Time_Performance_2012_9.parquet") -# print some basic info -print("records read: " + str(flightDelay.count())) -print("Schema: ") -flightDelay.printSchema() -flightDelay.limit(10).toPandas() -``` - -Use the `DataConversion` transform API to convert the columns listed to -double. - -The `DataConversion` API accepts the following types for the `convertTo` -parameter: -* `boolean` -* `byte` -* `short` -* `integer` -* `long` -* `float` -* `double` -* `string` -* `toCategorical` -* `clearCategorical` -* `date` -- converts a string or long to a date of the format - "yyyy-MM-dd HH:mm:ss" unless another format is specified by -the `dateTimeFormat` parameter. - -Again, print the schema and note that the columns are now `double` -instead of long. - - -```python -from synapse.ml.featurize import DataConversion -flightDelay = DataConversion(cols=["Quarter","Month","DayofMonth","DayOfWeek", - "OriginAirportID","DestAirportID", - "CRSDepTime","CRSArrTime"], - convertTo="double") \ - .transform(flightDelay) -flightDelay.printSchema() -flightDelay.limit(10).toPandas() -``` - -Split the datasest into train and test sets. - - -```python -train, test = flightDelay.randomSplit([0.75, 0.25]) -``` - -Create a regressor model and train it on the dataset. - -First, use `DataConversion` to convert the columns `Carrier`, `DepTimeBlk`, -and `ArrTimeBlk` to categorical data. Recall that in Notebook 102, this -was accomplished by iterating over the columns and converting the strings -to index values using the `StringIndexer` API. The `DataConversion` API -simplifies the task by allowing you to specify all columns that will have -the same end type in a single command. - -Create a LinearRegression model using the Limited-memory BFGS solver -(`l-bfgs`), an `ElasticNet` mixing parameter of `0.3`, and a `Regularization` -of `0.1`. - -Train the model with the `TrainRegressor` API fit on the training dataset. - - -```python -from synapse.ml.train import TrainRegressor, TrainedRegressorModel -from pyspark.ml.regression import LinearRegression - -trainCat = DataConversion(cols=["Carrier","DepTimeBlk","ArrTimeBlk"], - convertTo="toCategorical") \ - .transform(train) -testCat = DataConversion(cols=["Carrier","DepTimeBlk","ArrTimeBlk"], - convertTo="toCategorical") \ - .transform(test) -lr = LinearRegression().setRegParam(0.1) \ - .setElasticNetParam(0.3) -model = TrainRegressor(model=lr, labelCol="ArrDelay").fit(trainCat) -``` - -Score the regressor on the test data. - - -```python -scoredData = model.transform(testCat) -scoredData.limit(10).toPandas() -``` - -Compute model metrics against the entire scored dataset - - -```python -from synapse.ml.train import ComputeModelStatistics -metrics = ComputeModelStatistics().transform(scoredData) -metrics.toPandas() -``` - -Finally, compute and show statistics on individual predictions in the test -dataset, demonstrating the usage of `ComputePerInstanceStatistics` - - -```python -from synapse.ml.train import ComputePerInstanceStatistics -evalPerInstance = ComputePerInstanceStatistics().transform(scoredData) -evalPerInstance.select("ArrDelay", "Scores", "L1_loss", "L2_loss") \ - .limit(10).toPandas() -``` diff --git a/website/docs/examples/regression/Regression - Flight Delays.md b/website/docs/examples/regression/Regression - Flight Delays.md deleted file mode 100644 index 6ec0c5ea37..0000000000 --- a/website/docs/examples/regression/Regression - Flight Delays.md +++ /dev/null @@ -1,100 +0,0 @@ ---- -title: Regression - Flight Delays -hide_title: true -status: stable ---- -## Regression - Flight Delays - -In this example, we run a linear regression on the *Flight Delay* dataset to predict the delay times. - -We demonstrate how to use the `TrainRegressor` and the `ComputePerInstanceStatistics` APIs. - -First, import the packages. - - -```python -import os -if os.environ.get("AZURE_SERVICE", None) == "Microsoft.ProjectArcadia": - from pyspark.sql import SparkSession - spark = SparkSession.builder.getOrCreate() -``` - - -```python -import numpy as np -import pandas as pd -import synapse.ml -``` - -Next, import the CSV dataset. - - -```python -flightDelay = spark.read.parquet("wasbs://publicwasb@mmlspark.blob.core.windows.net/On_Time_Performance_2012_9.parquet") -# print some basic info -print("records read: " + str(flightDelay.count())) -print("Schema: ") -flightDelay.printSchema() -flightDelay.limit(10).toPandas() -``` - -Split the dataset into train and test sets. - - -```python -train,test = flightDelay.randomSplit([0.75, 0.25]) -``` - -Train a regressor on dataset with `l-bfgs`. - - -```python -from synapse.ml.train import TrainRegressor, TrainedRegressorModel -from pyspark.ml.regression import LinearRegression -from pyspark.ml.feature import StringIndexer -# Convert columns to categorical -catCols = ["Carrier", "DepTimeBlk", "ArrTimeBlk"] -trainCat = train -testCat = test -for catCol in catCols: - simodel = StringIndexer(inputCol=catCol, outputCol=catCol + "Tmp").fit(train) - trainCat = simodel.transform(trainCat).drop(catCol).withColumnRenamed(catCol + "Tmp", catCol) - testCat = simodel.transform(testCat).drop(catCol).withColumnRenamed(catCol + "Tmp", catCol) -lr = LinearRegression().setRegParam(0.1).setElasticNetParam(0.3) -model = TrainRegressor(model=lr, labelCol="ArrDelay").fit(trainCat) -``` - -Save, load, or Score the regressor on the test data. - - -```python -if os.environ.get("AZURE_SERVICE", None) == "Microsoft.ProjectArcadia": - model_name = "/models/flightDelayModel.mml" -else: - model_name = "dbfs:/flightDelayModel.mml" - -model.write().overwrite().save(model_name) -flightDelayModel = TrainedRegressorModel.load(model_name) - -scoredData = flightDelayModel.transform(testCat) -scoredData.limit(10).toPandas() -``` - -Compute model metrics against the entire scored dataset - - -```python -from synapse.ml.train import ComputeModelStatistics -metrics = ComputeModelStatistics().transform(scoredData) -metrics.toPandas() -``` - -Finally, compute and show per-instance statistics, demonstrating the usage -of `ComputePerInstanceStatistics`. - - -```python -from synapse.ml.train import ComputePerInstanceStatistics -evalPerInstance = ComputePerInstanceStatistics().transform(scoredData) -evalPerInstance.select("ArrDelay", "Scores", "L1_loss", "L2_loss").limit(10).toPandas() -``` diff --git a/website/docs/examples/regression/Regression - Vowpal Wabbit vs. LightGBM vs. Linear Regressor.md b/website/docs/examples/regression/Regression - Vowpal Wabbit vs. LightGBM vs. Linear Regressor.md deleted file mode 100644 index 243dac1b04..0000000000 --- a/website/docs/examples/regression/Regression - Vowpal Wabbit vs. LightGBM vs. Linear Regressor.md +++ /dev/null @@ -1,258 +0,0 @@ ---- -title: Regression - Vowpal Wabbit vs. LightGBM vs. Linear Regressor -hide_title: true -status: stable ---- -# Vowpal Wabbit and LightGBM for a Regression Problem - -This notebook shows how to build simple regression models by using -[Vowpal Wabbit (VW)](https://github.com/VowpalWabbit/vowpal_wabbit) and -[LightGBM](https://github.com/microsoft/LightGBM) with SynapseML. - We also compare the results with - [Spark MLlib Linear Regression](https://spark.apache.org/docs/latest/ml-classification-regression.html#linear-regression). - - -```python -import os -if os.environ.get("AZURE_SERVICE", None) == "Microsoft.ProjectArcadia": - from pyspark.sql import SparkSession - spark = SparkSession.builder.getOrCreate() -``` - - -```python -import math -from synapse.ml.train import ComputeModelStatistics -from synapse.ml.vw import VowpalWabbitRegressor, VowpalWabbitFeaturizer -from synapse.ml.lightgbm import LightGBMRegressor -import numpy as np -import pandas as pd -from pyspark.ml.feature import VectorAssembler -from pyspark.ml.regression import LinearRegression -from sklearn.datasets import load_boston -``` - -## Prepare Dataset -We use [*Boston house price* dataset](https://scikit-learn.org/stable/modules/generated/sklearn.datasets.load_boston.html) -. -The data was collected in 1978 from Boston area and consists of 506 entries with 14 features including the value of homes. -We use `sklearn.datasets` module to download it easily, then split the set into training and testing by 75/25. - - -```python -boston = load_boston() - -feature_cols = ['f' + str(i) for i in range(boston.data.shape[1])] -header = ['target'] + feature_cols -df = spark.createDataFrame(pd.DataFrame(data=np.column_stack((boston.target, boston.data)), columns=header)).repartition(1) -print("Dataframe has {} rows".format(df.count())) -display(df.limit(10).toPandas()) -``` - - -```python -train_data, test_data = df.randomSplit([0.75, 0.25], seed=42) -``` - -Following is the summary of the training set. - - -```python -display(train_data.summary().toPandas()) -``` - -Plot feature distributions over different target values (house prices in our case). - - -```python -features = train_data.columns[1:] -values = train_data.drop('target').toPandas() -ncols = 5 -nrows = math.ceil(len(features) / ncols) -``` - -## Baseline - Spark MLlib Linear Regressor - -First, we set a baseline performance by using Linear Regressor in Spark MLlib. - - -```python -featurizer = VectorAssembler(inputCols=feature_cols, outputCol='features') -lr_train_data = featurizer.transform(train_data)['target', 'features'] -lr_test_data = featurizer.transform(test_data)['target', 'features'] -display(lr_train_data.limit(10).toPandas()) -``` - - -```python -# By default, `maxIter` is 100. Other params you may want to change include: `regParam`, `elasticNetParam`, etc. -lr = LinearRegression(labelCol='target') - -lr_model = lr.fit(lr_train_data) -lr_predictions = lr_model.transform(lr_test_data) - -display(lr_predictions.limit(10).toPandas()) -``` - -We evaluate the prediction result by using `synpase.ml.train.ComputeModelStatistics` which returns four metrics: -* [MSE (Mean Squared Error)](https://en.wikipedia.org/wiki/Mean_squared_error) -* [RMSE (Root Mean Squared Error)](https://en.wikipedia.org/wiki/Root-mean-square_deviation) = sqrt(MSE) -* [R quared](https://en.wikipedia.org/wiki/Coefficient_of_determination) -* [MAE (Mean Absolute Error)](https://en.wikipedia.org/wiki/Mean_absolute_error) - - -```python -metrics = ComputeModelStatistics( - evaluationMetric='regression', - labelCol='target', - scoresCol='prediction').transform(lr_predictions) - -results = metrics.toPandas() -results.insert(0, 'model', ['Spark MLlib - Linear Regression']) -display(results) -``` - -## Vowpal Wabbit - -Perform VW-style feature hashing. Many types (numbers, string, bool, map of string to (number, string)) are supported. - - -```python -vw_featurizer = VowpalWabbitFeaturizer( - inputCols=feature_cols, - outputCol='features') - -vw_train_data = vw_featurizer.transform(train_data)['target', 'features'] -vw_test_data = vw_featurizer.transform(test_data)['target', 'features'] -display(vw_train_data.limit(10).toPandas()) -``` - -See [VW wiki](https://github.com/vowpalWabbit/vowpal_wabbit/wiki/Command-Line-Arguments) for command line arguments. - - -```python -# Use the same number of iterations as Spark MLlib's Linear Regression (=100) -args = "--holdout_off --loss_function quantile -l 7 -q :: --power_t 0.3" -vwr = VowpalWabbitRegressor( - labelCol='target', - args=args, - numPasses=100) - -# To reduce number of partitions (which will effect performance), use `vw_train_data.repartition(1)` -vw_train_data_2 = vw_train_data.repartition(1).cache() -print(vw_train_data_2.count()) -vw_model = vwr.fit(vw_train_data_2.repartition(1)) -vw_predictions = vw_model.transform(vw_test_data) - -display(vw_predictions.limit(10).toPandas()) -``` - - -```python -metrics = ComputeModelStatistics( - evaluationMetric='regression', - labelCol='target', - scoresCol='prediction').transform(vw_predictions) - -vw_result = metrics.toPandas() -vw_result.insert(0, 'model', ['Vowpal Wabbit']) -results = results.append( - vw_result, - ignore_index=True) - -display(results) -``` - -## LightGBM - - -```python -lgr = LightGBMRegressor( - objective='quantile', - alpha=0.2, - learningRate=0.3, - numLeaves=31, - labelCol='target', - numIterations=100) - -# Using one partition since the training dataset is very small -repartitioned_data = lr_train_data.repartition(1).cache() -print(repartitioned_data.count()) -lg_model = lgr.fit(repartitioned_data) -lg_predictions = lg_model.transform(lr_test_data) - -display(lg_predictions.limit(10).toPandas()) -``` - - -```python -metrics = ComputeModelStatistics( - evaluationMetric='regression', - labelCol='target', - scoresCol='prediction').transform(lg_predictions) - -lg_result = metrics.toPandas() -lg_result.insert(0, 'model', ['LightGBM']) - -results = results.append( - lg_result, - ignore_index=True) - -display(results) -``` - -Following figure shows the actual-vs.-prediction graphs of the results: - -lr-vw-lg - - -```python -if os.environ.get("AZURE_SERVICE", None) != "Microsoft.ProjectArcadia": - from matplotlib.colors import ListedColormap, Normalize - from matplotlib.cm import get_cmap - import matplotlib.pyplot as plt - - f, axes = plt.subplots(nrows, ncols, sharey=True, figsize=(30,10)) - f.tight_layout() - yy = [r['target'] for r in train_data.select('target').collect()] - for irow in range(nrows): - axes[irow][0].set_ylabel('target') - for icol in range(ncols): - try: - feat = features[irow*ncols + icol] - xx = values[feat] - axes[irow][icol].scatter(xx, yy, s=10, alpha=0.25) - axes[irow][icol].set_xlabel(feat) - axes[irow][icol].get_yaxis().set_ticks([]) - except IndexError: - f.delaxes(axes[irow][icol]) - - cmap = get_cmap('YlOrRd') - - target = np.array(test_data.select('target').collect()).flatten() - model_preds = [ - ("Spark MLlib Linear Regression", lr_predictions), - ("Vowpal Wabbit", vw_predictions), - ("LightGBM", lg_predictions)] - - f, axes = plt.subplots(1, len(model_preds), sharey=True, figsize=(18, 6)) - f.tight_layout() - - for i, (model_name, preds) in enumerate(model_preds): - preds = np.array(preds.select('prediction').collect()).flatten() - err = np.absolute(preds - target) - - norm = Normalize() - clrs = cmap(np.asarray(norm(err)))[:, :-1] - axes[i].scatter(preds, target, s=60, c=clrs, edgecolors='#888888', alpha=0.75) - axes[i].plot((0, 60), (0, 60), linestyle='--', color='#888888') - axes[i].set_xlabel('Predicted values') - if i ==0: - axes[i].set_ylabel('Actual values') - axes[i].set_title(model_name) -``` - - -```python - -``` diff --git a/website/docs/examples/text_analytics/TextAnalytics - Amazon Book Reviews with Word2Vec.md b/website/docs/examples/text_analytics/TextAnalytics - Amazon Book Reviews with Word2Vec.md deleted file mode 100644 index d88de1c922..0000000000 --- a/website/docs/examples/text_analytics/TextAnalytics - Amazon Book Reviews with Word2Vec.md +++ /dev/null @@ -1,154 +0,0 @@ ---- -title: TextAnalytics - Amazon Book Reviews with Word2Vec -hide_title: true -status: stable ---- -## TextAnalytics - Amazon Book Reviews with Word2Vec - -Yet again, now using the `Word2Vec` Estimator from Spark. We can use the tree-based -learners from spark in this scenario due to the lower dimensionality representation of -features. - - -```python -import os -if os.environ.get("AZURE_SERVICE", None) == "Microsoft.ProjectArcadia": - from pyspark.sql import SparkSession - spark = SparkSession.builder.getOrCreate() -``` - - -```python -import pandas as pd - -``` - - -```python -data = spark.read.parquet("wasbs://publicwasb@mmlspark.blob.core.windows.net/BookReviewsFromAmazon10K.parquet") -data.limit(10).toPandas() -``` - -Modify the label column to predict a rating greater than 3. - - -```python -processedData = data.withColumn("label", data["rating"] > 3) \ - .select(["text", "label"]) -processedData.limit(5).toPandas() -``` - -Split the dataset into train, test and validation sets. - - -```python -train, test, validation = processedData.randomSplit([0.60, 0.20, 0.20]) -``` - -Use `Tokenizer` and `Word2Vec` to generate the features. - - -```python -from pyspark.ml import Pipeline -from pyspark.ml.feature import Tokenizer, Word2Vec -tokenizer = Tokenizer(inputCol="text", outputCol="words") -partitions = train.rdd.getNumPartitions() -word2vec = Word2Vec(maxIter=4, seed=42, inputCol="words", outputCol="features", - numPartitions=partitions) -textFeaturizer = Pipeline(stages = [tokenizer, word2vec]).fit(train) -``` - -Transform each of the train, test and validation datasets. - - -```python -ptrain = textFeaturizer.transform(train).select(["label", "features"]) -ptest = textFeaturizer.transform(test).select(["label", "features"]) -pvalidation = textFeaturizer.transform(validation).select(["label", "features"]) -ptrain.limit(5).toPandas() -``` - -Generate several models with different parameters from the training data. - - -```python -from pyspark.ml.classification import LogisticRegression, RandomForestClassifier, GBTClassifier - -from synapse.ml.train import TrainClassifier - -import itertools - - - -lrHyperParams = [0.05, 0.2] - -logisticRegressions = [LogisticRegression(regParam = hyperParam) - - for hyperParam in lrHyperParams] - -lrmodels = [TrainClassifier(model=lrm, labelCol="label").fit(ptrain) - - for lrm in logisticRegressions] - - - -rfHyperParams = itertools.product([5, 10], [2, 3]) - -randomForests = [RandomForestClassifier(numTrees=hyperParam[0], maxDepth=hyperParam[1]) - - for hyperParam in rfHyperParams] - -rfmodels = [TrainClassifier(model=rfm, labelCol="label").fit(ptrain) - - for rfm in randomForests] - - - -gbtHyperParams = itertools.product([8, 16], [2, 3]) - -gbtclassifiers = [GBTClassifier(maxBins=hyperParam[0], maxDepth=hyperParam[1]) - - for hyperParam in gbtHyperParams] - -gbtmodels = [TrainClassifier(model=gbt, labelCol="label").fit(ptrain) - - for gbt in gbtclassifiers] - - - -trainedModels = lrmodels + rfmodels + gbtmodels -``` - -Find the best model for the given test dataset. - - -```python -from synapse.ml.automl import FindBestModel - -bestModel = FindBestModel(evaluationMetric="AUC", models=trainedModels).fit(ptest) - -bestModel.getRocCurve().show() - -bestModel.getBestModelMetrics().show() - -bestModel.getAllModelMetrics().show() -``` - -Get the accuracy from the validation dataset. - - -```python -from synapse.ml.train import ComputeModelStatistics - -predictions = bestModel.transform(pvalidation) - -metrics = ComputeModelStatistics().transform(predictions) - -print("Best model's accuracy on validation set = " - - + "{0:.2f}%".format(metrics.first()["accuracy"] * 100)) - -print("Best model's AUC on validation set = " - - + "{0:.2f}%".format(metrics.first()["AUC"] * 100)) -``` diff --git a/website/docs/examples/text_analytics/TextAnalytics - Amazon Book Reviews.md b/website/docs/examples/text_analytics/TextAnalytics - Amazon Book Reviews.md deleted file mode 100644 index ca9e1b635f..0000000000 --- a/website/docs/examples/text_analytics/TextAnalytics - Amazon Book Reviews.md +++ /dev/null @@ -1,112 +0,0 @@ ---- -title: TextAnalytics - Amazon Book Reviews -hide_title: true -status: stable ---- -## TextAnalytics - Amazon Book Reviews - -Again, try to predict Amazon book ratings greater than 3 out of 5, this time using -the `TextFeaturizer` module which is a composition of several text analytics APIs that -are native to Spark. - - -```python -import os -if os.environ.get("AZURE_SERVICE", None) == "Microsoft.ProjectArcadia": - from pyspark.sql import SparkSession - spark = SparkSession.builder.getOrCreate() -``` - - -```python -import pandas as pd -``` - - -```python -data = spark.read.parquet("wasbs://publicwasb@mmlspark.blob.core.windows.net/BookReviewsFromAmazon10K.parquet") -data.limit(10).toPandas() -``` - -Use `TextFeaturizer` to generate our features column. We remove stop words, and use TF-IDF -to generate 2²⁰ sparse features. - - -```python -from synapse.ml.featurize.text import TextFeaturizer - -textFeaturizer = TextFeaturizer() \ - - .setInputCol("text").setOutputCol("features") \ - - .setUseStopWordsRemover(True).setUseIDF(True).setMinDocFreq(5).setNumFeatures(1 << 16).fit(data) -``` - - -```python -processedData = textFeaturizer.transform(data) -processedData.limit(5).toPandas() -``` - -Change the label so that we can predict whether the rating is greater than 3 using a binary -classifier. - - -```python -processedData = processedData.withColumn("label", processedData["rating"] > 3) \ - .select(["features", "label"]) -processedData.limit(5).toPandas() -``` - -Train several Logistic Regression models with different regularizations. - - -```python -train, test, validation = processedData.randomSplit([0.60, 0.20, 0.20]) - -from pyspark.ml.classification import LogisticRegression - - - -lrHyperParams = [0.05, 0.1, 0.2, 0.4] - -logisticRegressions = [LogisticRegression(regParam = hyperParam) for hyperParam in lrHyperParams] - - - -from synapse.ml.train import TrainClassifier - -lrmodels = [TrainClassifier(model=lrm, labelCol="label").fit(train) for lrm in logisticRegressions] -``` - -Find the model with the best AUC on the test set. - - -```python -from synapse.ml.automl import FindBestModel, BestModel - -bestModel = FindBestModel(evaluationMetric="AUC", models=lrmodels).fit(test) - -bestModel.getRocCurve().show() - -bestModel.getBestModelMetrics().show() - -bestModel.getAllModelMetrics().show() - - -``` - -Use the optimized `ComputeModelStatistics` API to find the model accuracy. - - -```python -from synapse.ml.train import ComputeModelStatistics - -predictions = bestModel.transform(validation) - -metrics = ComputeModelStatistics().transform(predictions) - -print("Best model's accuracy on validation set = " - - + "{0:.2f}%".format(metrics.first()["accuracy"] * 100)) -``` diff --git a/website/docs/features/CognitiveServices - Overview.md b/website/docs/features/CognitiveServices - Overview.md deleted file mode 100644 index cb607de9c2..0000000000 --- a/website/docs/features/CognitiveServices - Overview.md +++ /dev/null @@ -1,418 +0,0 @@ ---- -title: CognitiveServices - Overview -hide_title: true -status: stable ---- - - -# Cognitive Services - -[Azure Cognitive Services](https://azure.microsoft.com/en-us/services/cognitive-services/) are a suite of APIs, SDKs, and services available to help developers build intelligent applications without having direct AI or data science skills or knowledge by enabling developers to easily add cognitive features into their applications. The goal of Azure Cognitive Services is to help developers create applications that can see, hear, speak, understand, and even begin to reason. The catalog of services within Azure Cognitive Services can be categorized into five main pillars - Vision, Speech, Language, Web Search, and Decision. - -## Usage - -### Vision -[**Computer Vision**](https://azure.microsoft.com/en-us/services/cognitive-services/computer-vision/) -- Describe: provides description of an image in human readable language ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/DescribeImage.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.DescribeImage)) -- Analyze (color, image type, face, adult/racy content): analyzes visual features of an image ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/AnalyzeImage.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AnalyzeImage)) -- OCR: reads text from an image ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/OCR.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.OCR)) -- Recognize Text: reads text from an image ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/RecognizeText.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.RecognizeText)) -- Thumbnail: generates a thumbnail of user-specified size from the image ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/GenerateThumbnails.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.GenerateThumbnails)) -- Recognize domain-specific content: recognizes domain-specific content (celebrity, landmark) ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/RecognizeDomainSpecificContent.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.RecognizeDomainSpecificContent)) -- Tag: identifies list of words that are relevant to the in0put image ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/TagImage.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.TagImage)) - -[**Face**](https://azure.microsoft.com/en-us/services/cognitive-services/face/) -- Detect: detects human faces in an image ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/DetectFace.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.DetectFace)) -- Verify: verifies whether two faces belong to a same person, or a face belongs to a person ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/VerifyFaces.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.VerifyFaces)) -- Identify: finds the closest matches of the specific query person face from a person group ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/IdentifyFaces.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.IdentifyFaces)) -- Find similar: finds similar faces to the query face in a face list ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/FindSimilarFace.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.FindSimilarFace)) -- Group: divides a group of faces into disjoint groups based on similarity ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/GroupFaces.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.GroupFaces)) - -### Speech -[**Speech Services**](https://azure.microsoft.com/en-us/services/cognitive-services/speech-services/) -- Speech-to-text: transcribes audio streams ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/SpeechToText.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.SpeechToText)) - -### Language -[**Text Analytics**](https://azure.microsoft.com/en-us/services/cognitive-services/text-analytics/) -- Language detection: detects language of the input text ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/LanguageDetector.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.LanguageDetector)) -- Key phrase extraction: identifies the key talking points in the input text ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/KeyPhraseExtractor.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.KeyPhraseExtractor)) -- Named entity recognition: identifies known entities and general named entities in the input text ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/NER.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.NER)) -- Sentiment analysis: returns a score betwee 0 and 1 indicating the sentiment in the input text ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/TextSentiment.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.TextSentiment)) - -[**Translator**](https://azure.microsoft.com/en-us/services/cognitive-services/translator/) -- Translate: Translates text. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/Translate.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.Translate)) -- Transliterate: Converts text in one language from one script to another script. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/Transliterate.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.Transliterate)) -- Detect: Identifies the language of a piece of text. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/Detect.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.Detect)) -- BreakSentence: Identifies the positioning of sentence boundaries in a piece of text. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/BreakSentence.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.BreakSentence)) -- Dictionary Lookup: Provides alternative translations for a word and a small number of idiomatic phrases. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/DictionaryLookup.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.DictionaryLookup)) -- Dictionary Examples: Provides examples that show how terms in the dictionary are used in context. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/DictionaryExamples.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.DictionaryExamples)) -- Document Translation: Translates documents across all supported languages and dialects while preserving document structure and data format. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/DocumentTranslator.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.DocumentTranslator)) - -### Azure Form Recognizer -[**Form Recognizer**](https://azure.microsoft.com/en-us/services/form-recognizer/) -- Analyze Layout: Extract text and layout information from a given document. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/AnalyzeLayout.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AnalyzeLayout)) -- Analyze Receipts: Detects and extracts data from receipts using optical character recognition (OCR) and our receipt model, enabling you to easily extract structured data from receipts such as merchant name, merchant phone number, transaction date, transaction total, and more. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/AnalyzeReceipts.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AnalyzeReceipts)) -- Analyze Business Cards: Detects and extracts data from business cards using optical character recognition (OCR) and our business card model, enabling you to easily extract structured data from business cards such as contact names, company names, phone numbers, emails, and more. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/AnalyzeBusinessCards.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AnalyzeBusinessCards)) -- Analyze Invoices: Detects and extracts data from invoices using optical character recognition (OCR) and our invoice understanding deep learning models, enabling you to easily extract structured data from invoices such as customer, vendor, invoice ID, invoice due date, total, invoice amount due, tax amount, ship to, bill to, line items and more. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/AnalyzeInvoices.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AnalyzeInvoices)) -- Analyze ID Documents: Detects and extracts data from identification documents using optical character recognition (OCR) and our ID document model, enabling you to easily extract structured data from ID documents such as first name, last name, date of birth, document number, and more. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/AnalyzeIDDocuments.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AnalyzeIDDocuments)) -- Analyze Custom Form: Extracts information from forms (PDFs and images) into structured data based on a model created from a set of representative training forms. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/AnalyzeCustomModel.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AnalyzeCustomModel)) -- Get Custom Model: Get detailed information about a custom model. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/GetCustomModel.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/ListCustomModels.html)) -- List Custom Models: Get information about all custom models. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/ListCustomModels.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.ListCustomModels)) - -### Decision -[**Anomaly Detector**](https://azure.microsoft.com/en-us/services/cognitive-services/anomaly-detector/) -- Anomaly status of latest point: generates a model using preceding points and determines whether the latest point is anomalous ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/DetectLastAnomaly.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.DetectLastAnomaly)) -- Find anomalies: generates a model using an entire series and finds anomalies in the series ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/DetectAnomalies.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.DetectAnomalies)) - -### Search -- [Bing Image search](https://azure.microsoft.com/en-us/services/cognitive-services/bing-image-search-api/) ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/BingImageSearch.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.BingImageSearch)) -- [Azure Cognitive search](https://docs.microsoft.com/en-us/azure/search/search-what-is-azure-search) ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/index.html#com.microsoft.azure.synapse.ml.cognitive.AzureSearchWriter$), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AzureSearchWriter)) - - -## Prerequisites - -1. Follow the steps in [Getting started](https://docs.microsoft.com/en-us/azure/cognitive-services/big-data/getting-started) to set up your Azure Databricks and Cognitive Services environment. This tutorial shows you how to install SynapseML and how to create your Spark cluster in Databricks. -1. After you create a new notebook in Azure Databricks, copy the **Shared code** below and paste into a new cell in your notebook. -1. Choose a service sample, below, and copy paste it into a second new cell in your notebook. -1. Replace any of the service subscription key placeholders with your own key. -1. Choose the run button (triangle icon) in the upper right corner of the cell, then select **Run Cell**. -1. View results in a table below the cell. - -## Shared code - -To get started, we'll need to add this code to the project: - - -```python -from pyspark.sql.functions import udf, col - -from synapse.ml.io.http import HTTPTransformer, http_udf - -from requests import Request - -from pyspark.sql.functions import lit - -from pyspark.ml import PipelineModel - -from pyspark.sql.functions import col - -import os - - -``` - - -```python -if os.environ.get("AZURE_SERVICE", None) == "Microsoft.ProjectArcadia": - from pyspark.sql import SparkSession - spark = SparkSession.builder.getOrCreate() - from notebookutils.mssparkutils.credentials import getSecret - os.environ['ANOMALY_API_KEY'] = getSecret( - "mmlspark-keys", "anomaly-api-key") - os.environ['TEXT_API_KEY'] = getSecret("mmlspark-keys", "mmlspark-cs-key") - os.environ['BING_IMAGE_SEARCH_KEY'] = getSecret( - "mmlspark-keys", "mmlspark-bing-search-key") - os.environ['VISION_API_KEY'] = getSecret( - "mmlspark-keys", "mmlspark-cs-key") - os.environ['AZURE_SEARCH_KEY'] = getSecret( - "mmlspark-keys", "azure-search-key") -``` - - -```python -from synapse.ml.cognitive import * - - - -# A general Cognitive Services key for Text Analytics, Computer Vision and Form Recognizer (or use separate keys that belong to each service) - -service_key = os.environ["COGNITIVE_SERVICE_KEY"] - -# A Bing Search v7 subscription key - -bing_search_key = os.environ["BING_IMAGE_SEARCH_KEY"] - -# An Anomaly Dectector subscription key - -anomaly_key = os.environ["ANOMALY_API_KEY"] - -# A Translator subscription key - -translator_key = os.environ["TRANSLATOR_KEY"] -``` - -## Text Analytics sample - -The [Text Analytics](https://azure.microsoft.com/en-us/services/cognitive-services/text-analytics/) service provides several algorithms for extracting intelligent insights from text. For example, we can find the sentiment of given input text. The service will return a score between 0.0 and 1.0 where low scores indicate negative sentiment and high score indicates positive sentiment. This sample uses three simple sentences and returns the sentiment for each. - - -```python -# Create a dataframe that's tied to it's column names -df = spark.createDataFrame([ - ("I am so happy today, its sunny!", "en-US"), - ("I am frustrated by this rush hour traffic", "en-US"), - ("The cognitive services on spark aint bad", "en-US"), -], ["text", "language"]) - -# Run the Text Analytics service with options -sentiment = (TextSentiment() - .setTextCol("text") - .setLocation("eastus") - .setSubscriptionKey(service_key) - .setOutputCol("sentiment") - .setErrorCol("error") - .setLanguageCol("language")) - -# Show the results of your text query in a table format -display(sentiment.transform(df).select("text", col( - "sentiment")[0].getItem("sentiment").alias("sentiment"))) -``` - -## Translator sample -[Translator](https://azure.microsoft.com/en-us/services/cognitive-services/translator/) is a cloud-based machine translation service and is part of the Azure Cognitive Services family of cognitive APIs used to build intelligent apps. Translator is easy to integrate in your applications, websites, tools, and solutions. It allows you to add multi-language user experiences in 90 languages and dialects and can be used for text translation with any operating system. In this sample, we do a simple text translation by providing the sentences you want to translate and target languages you want to translate to. - - -```python -from pyspark.sql.functions import col, flatten - -# Create a dataframe including sentences you want to translate -df = spark.createDataFrame([ - (["Hello, what is your name?", "Bye"],) -], ["text",]) - -# Run the Translator service with options -translate = (Translate() - .setSubscriptionKey(translator_key) - .setLocation("eastus") - .setTextCol("text") - .setToLanguage(["zh-Hans"]) - .setOutputCol("translation")) - -# Show the results of the translation. -display(translate - .transform(df) - .withColumn("translation", flatten(col("translation.translations"))) - .withColumn("translation", col("translation.text")) - .select("translation")) -``` - -## Form Recognizer sample -[Form Recognizer](https://azure.microsoft.com/en-us/services/form-recognizer/) is a part of Azure Applied AI Services that lets you build automated data processing software using machine learning technology. Identify and extract text, key/value pairs, selection marks, tables, and structure from your documents—the service outputs structured data that includes the relationships in the original file, bounding boxes, confidence and more. In this sample, we analyze a business card image and extract its information into structured data. - - -```python -from pyspark.sql.functions import col, explode - -# Create a dataframe containing the source files -imageDf = spark.createDataFrame([ - ("https://mmlspark.blob.core.windows.net/datasets/FormRecognizer/business_card.jpg",) -], ["source",]) - -# Run the Form Recognizer service -analyzeBusinessCards = (AnalyzeBusinessCards() - .setSubscriptionKey(service_key) - .setLocation("eastus") - .setImageUrlCol("source") - .setOutputCol("businessCards")) - -# Show the results of recognition. -display(analyzeBusinessCards - .transform(imageDf) - .withColumn("documents", explode(col("businessCards.analyzeResult.documentResults.fields"))) - .select("source", "documents")) -``` - -## Computer Vision sample - -[Computer Vision](https://azure.microsoft.com/en-us/services/cognitive-services/computer-vision/) analyzes images to identify structure such as faces, objects, and natural-language descriptions. In this sample, we tag a list of images. Tags are one-word descriptions of things in the image like recognizable objects, people, scenery, and actions. - - -```python -# Create a dataframe with the image URLs -df = spark.createDataFrame([ - ("https://raw.githubusercontent.com/Azure-Samples/cognitive-services-sample-data-files/master/ComputerVision/Images/objects.jpg", ), - ("https://raw.githubusercontent.com/Azure-Samples/cognitive-services-sample-data-files/master/ComputerVision/Images/dog.jpg", ), - ("https://raw.githubusercontent.com/Azure-Samples/cognitive-services-sample-data-files/master/ComputerVision/Images/house.jpg", ) -], ["image", ]) - -# Run the Computer Vision service. Analyze Image extracts infortmation from/about the images. -analysis = (AnalyzeImage() - .setLocation("eastus") - .setSubscriptionKey(service_key) - .setVisualFeatures(["Categories", "Color", "Description", "Faces", "Objects", "Tags"]) - .setOutputCol("analysis_results") - .setImageUrlCol("image") - .setErrorCol("error")) - -# Show the results of what you wanted to pull out of the images. -display(analysis.transform(df).select( - "image", "analysis_results.description.tags")) - -``` - -## Bing Image Search sample - -[Bing Image Search](https://azure.microsoft.com/en-us/services/cognitive-services/bing-image-search-api/) searches the web to retrieve images related to a user's natural language query. In this sample, we use a text query that looks for images with quotes. It returns a list of image URLs that contain photos related to our query. - - -```python -# Number of images Bing will return per query -imgsPerBatch = 10 -# A list of offsets, used to page into the search results -offsets = [(i*imgsPerBatch,) for i in range(100)] -# Since web content is our data, we create a dataframe with options on that data: offsets -bingParameters = spark.createDataFrame(offsets, ["offset"]) - -# Run the Bing Image Search service with our text query -bingSearch = (BingImageSearch() - .setSubscriptionKey(bing_search_key) - .setOffsetCol("offset") - .setQuery("Martin Luther King Jr. quotes") - .setCount(imgsPerBatch) - .setOutputCol("images")) - -# Transformer that extracts and flattens the richly structured output of Bing Image Search into a simple URL column -getUrls = BingImageSearch.getUrlTransformer("images", "url") - -# This displays the full results returned, uncomment to use -# display(bingSearch.transform(bingParameters)) - -# Since we have two services, they are put into a pipeline -pipeline = PipelineModel(stages=[bingSearch, getUrls]) - -# Show the results of your search: image URLs -display(pipeline.transform(bingParameters)) - -``` - -## Speech-to-Text sample -The [Speech-to-text](https://azure.microsoft.com/en-us/services/cognitive-services/speech-services/) service converts streams or files of spoken audio to text. In this sample, we transcribe one audio file. - - -```python -# Create a dataframe with our audio URLs, tied to the column called "url" -df = spark.createDataFrame([("https://mmlspark.blob.core.windows.net/datasets/Speech/audio2.wav",) - ], ["url"]) - -# Run the Speech-to-text service to translate the audio into text -speech_to_text = (SpeechToTextSDK() - .setSubscriptionKey(service_key) - .setLocation("eastus") - .setOutputCol("text") - .setAudioDataCol("url") - .setLanguage("en-US") - .setProfanity("Masked")) - -# Show the results of the translation -display(speech_to_text.transform(df).select("url", "text.DisplayText")) - -``` - -## Anomaly Detector sample - -[Anomaly Detector](https://azure.microsoft.com/en-us/services/cognitive-services/anomaly-detector/) is great for detecting irregularities in your time series data. In this sample, we use the service to find anomalies in the entire time series. - - -```python -# Create a dataframe with the point data that Anomaly Detector requires -df = spark.createDataFrame([ - ("1972-01-01T00:00:00Z", 826.0), - ("1972-02-01T00:00:00Z", 799.0), - ("1972-03-01T00:00:00Z", 890.0), - ("1972-04-01T00:00:00Z", 900.0), - ("1972-05-01T00:00:00Z", 766.0), - ("1972-06-01T00:00:00Z", 805.0), - ("1972-07-01T00:00:00Z", 821.0), - ("1972-08-01T00:00:00Z", 20000.0), - ("1972-09-01T00:00:00Z", 883.0), - ("1972-10-01T00:00:00Z", 898.0), - ("1972-11-01T00:00:00Z", 957.0), - ("1972-12-01T00:00:00Z", 924.0), - ("1973-01-01T00:00:00Z", 881.0), - ("1973-02-01T00:00:00Z", 837.0), - ("1973-03-01T00:00:00Z", 9000.0) -], ["timestamp", "value"]).withColumn("group", lit("series1")) - -# Run the Anomaly Detector service to look for irregular data -anamoly_detector = (SimpleDetectAnomalies() - .setSubscriptionKey(anomaly_key) - .setLocation("eastus") - .setTimestampCol("timestamp") - .setValueCol("value") - .setOutputCol("anomalies") - .setGroupbyCol("group") - .setGranularity("monthly")) - -# Show the full results of the analysis with the anomalies marked as "True" -display(anamoly_detector.transform(df).select( - "timestamp", "value", "anomalies.isAnomaly")) -``` - -## Arbitrary web APIs - -With HTTP on Spark, any web service can be used in your big data pipeline. In this example, we use the [World Bank API](http://api.worldbank.org/v2/country/) to get information about various countries around the world. - - -```python -# Use any requests from the python requests library - -def world_bank_request(country): - return Request("GET", "http://api.worldbank.org/v2/country/{}?format=json".format(country)) - - -# Create a dataframe with spcificies which countries we want data on -df = (spark.createDataFrame([("br",), ("usa",)], ["country"]) - .withColumn("request", http_udf(world_bank_request)(col("country")))) - -# Much faster for big data because of the concurrency :) -client = (HTTPTransformer() - .setConcurrency(3) - .setInputCol("request") - .setOutputCol("response")) - -# Get the body of the response - - -def get_response_body(resp): - return resp.entity.content.decode() - - -# Show the details of the country data returned -display(client.transform(df) - .select("country", udf(get_response_body)(col("response")) - .alias("response"))) - -``` - -## Azure Cognitive search sample - -In this example, we show how you can enrich data using Cognitive Skills and write to an Azure Search Index using SynapseML. - - -```python -VISION_API_KEY = os.environ['VISION_API_KEY'] -AZURE_SEARCH_KEY = os.environ['AZURE_SEARCH_KEY'] -search_service = "mmlspark-azure-search" -search_index = "test-33467690" - -df = spark.createDataFrame([("upload", "0", "https://mmlspark.blob.core.windows.net/datasets/DSIR/test1.jpg"), - ("upload", "1", "https://mmlspark.blob.core.windows.net/datasets/DSIR/test2.jpg")], - ["searchAction", "id", "url"]) - -tdf = AnalyzeImage()\ - .setSubscriptionKey(VISION_API_KEY)\ - .setLocation("eastus")\ - .setImageUrlCol("url")\ - .setOutputCol("analyzed")\ - .setErrorCol("errors")\ - .setVisualFeatures(["Categories", "Tags", "Description", "Faces", "ImageType", "Color", "Adult"])\ - .transform(df).select("*", "analyzed.*")\ - .drop("errors", "analyzed") - -tdf.writeToAzureSearch(subscriptionKey=AZURE_SEARCH_KEY, - actionCol="searchAction", - serviceName=search_service, - indexName=search_index, - keyCol="id") - -``` diff --git a/website/docs/features/http/HttpOnSpark - Working with Arbitrary Web APIs.md b/website/docs/features/http/HttpOnSpark - Working with Arbitrary Web APIs.md deleted file mode 100644 index 9560287c84..0000000000 --- a/website/docs/features/http/HttpOnSpark - Working with Arbitrary Web APIs.md +++ /dev/null @@ -1,48 +0,0 @@ ---- -title: HttpOnSpark - Working with Arbitrary Web APIs -hide_title: true -status: stable ---- -### Use "dogs as a service" in a distributed fashion with HTTP on Spark - -In this example we will use the simple HTTP Transformer to call a public webAPI that returns random images of dogs. The service does not use the json payload, but this is for example purposes. - -A call to the dog service returns json objects structured like: - -`{"status":"success","message":"https:\/\/images.dog.ceo\/breeds\/lhasa\/n02098413_2536.jpg"}` - -If you visit the link you can download the image: - - - - - - - -```python -import os - -if os.environ.get("AZURE_SERVICE", None) == "Microsoft.ProjectArcadia": - from pyspark.sql import SparkSession - spark = SparkSession.builder.getOrCreate() - -from pyspark.sql.functions import struct -from pyspark.sql.types import * -from synapse.ml.io.http import * - -df = spark.createDataFrame([("foo",) for x in range(20)], ["data"]) \ - .withColumn("inputs", struct("data")) - -response_schema = StructType().add("status", StringType()).add("message", StringType()) - -client = SimpleHTTPTransformer() \ - .setInputCol("inputs") \ - .setInputParser(JSONInputParser()) \ - .setOutputParser(JSONOutputParser().setDataType(response_schema)) \ - .setOutputCol("results") \ - .setUrl("https://dog.ceo/api/breeds/image/random") - -responses = client.transform(df) -responses.select("results").show(truncate = False) -``` diff --git a/website/docs/features/http/about.md b/website/docs/features/http/about.md index 844043b705..e209dd198f 100644 --- a/website/docs/features/http/about.md +++ b/website/docs/features/http/about.md @@ -95,7 +95,7 @@ In HTTP on Spark, each partition manages a running web client that sends requests. A schematic representation can be seen below:

- +

## Schema diff --git a/website/docs/features/lightgbm/LightGBM - Overview.md b/website/docs/features/lightgbm/LightGBM - Overview.md deleted file mode 100644 index 6802c3b748..0000000000 --- a/website/docs/features/lightgbm/LightGBM - Overview.md +++ /dev/null @@ -1,290 +0,0 @@ ---- -title: LightGBM - Overview -hide_title: true -status: stable ---- -# LightGBM - -[LightGBM](https://github.com/Microsoft/LightGBM) is an open-source, -distributed, high-performance gradient boosting (GBDT, GBRT, GBM, or -MART) framework. This framework specializes in creating high-quality and -GPU enabled decision tree algorithms for ranking, classification, and -many other machine learning tasks. LightGBM is part of Microsoft's -[DMTK](http://github.com/microsoft/dmtk) project. - -### Advantages of LightGBM - -- **Composability**: LightGBM models can be incorporated into existing - SparkML Pipelines, and used for batch, streaming, and serving - workloads. -- **Performance**: LightGBM on Spark is 10-30% faster than SparkML on - the Higgs dataset, and achieves a 15% increase in AUC. [Parallel - experiments](https://github.com/Microsoft/LightGBM/blob/master/docs/Experiments.rst#parallel-experiment) - have verified that LightGBM can achieve a linear speed-up by using - multiple machines for training in specific settings. -- **Functionality**: LightGBM offers a wide array of [tunable - parameters](https://github.com/Microsoft/LightGBM/blob/master/docs/Parameters.rst), - that one can use to customize their decision tree system. LightGBM on - Spark also supports new types of problems such as quantile regression. -- **Cross platform** LightGBM on Spark is available on Spark, PySpark, and SparklyR - -### LightGBM Usage: - -- LightGBMClassifier: used for building classification models. For example, to predict whether a company will bankrupt or not, we could build a binary classification model with LightGBMClassifier. -- LightGBMRegressor: used for building regression models. For example, to predict the house price, we could build a regression model with LightGBMRegressor. -- LightGBMRanker: used for building ranking models. For example, to predict website searching result relevance, we could build a ranking model with LightGBMRanker. - -## Bankruptcy Prediction with LightGBM Classifier - - - -In this example, we use LightGBM to build a classification model in order to predict bankruptcy. - -#### Read dataset - - -```python -import os - -if os.environ.get("AZURE_SERVICE", None) == "Microsoft.ProjectArcadia": - from pyspark.sql import SparkSession - spark = SparkSession.builder.getOrCreate() -``` - - -```python -df = spark.read.format("csv")\ - .option("header", True)\ - .option("inferSchema", True)\ - .load("wasbs://publicwasb@mmlspark.blob.core.windows.net/company_bankruptcy_prediction_data.csv") -# print dataset size -print("records read: " + str(df.count())) -print("Schema: ") -df.printSchema() -``` - - -```python -display(df) -``` - -#### Split the dataset into train and test - - -```python -train, test = df.randomSplit([0.85, 0.15], seed=1) -``` - -#### Add featurizer to convert features to vector - - -```python -from pyspark.ml.feature import VectorAssembler -feature_cols = df.columns[1:] -featurizer = VectorAssembler( - inputCols=feature_cols, - outputCol='features' -) -train_data = featurizer.transform(train)['Bankrupt?', 'features'] -test_data = featurizer.transform(test)['Bankrupt?', 'features'] -``` - -#### Check if the data is unbalanced - - -```python -display(train_data.groupBy("Bankrupt?").count()) -``` - -#### Model Training - - -```python -from synapse.ml.lightgbm import LightGBMClassifier - -model = LightGBMClassifier(objective="binary", featuresCol="features", labelCol="Bankrupt?", isUnbalance=True) -``` - - -```python -model = model.fit(train_data) -``` - -By calling "saveNativeModel", it allows you to extract the underlying lightGBM model for fast deployment after you train on Spark. - - -```python -from synapse.ml.lightgbm import LightGBMClassificationModel - - - -if os.environ.get("AZURE_SERVICE", None) == "Microsoft.ProjectArcadia": - - model.saveNativeModel("/models/lgbmclassifier.model") - - model = LightGBMClassificationModel.loadNativeModelFromFile("/models/lgbmclassifier.model") - -else: - - model.saveNativeModel("/lgbmclassifier.model") - - model = LightGBMClassificationModel.loadNativeModelFromFile("/lgbmclassifier.model") - - -``` - -#### Feature Importances Visualization - - -```python -import pandas as pd -import matplotlib.pyplot as plt - -feature_importances = model.getFeatureImportances() -fi = pd.Series(feature_importances,index = feature_cols) -fi = fi.sort_values(ascending = True) -f_index = fi.index -f_values = fi.values - -# print feature importances -print ('f_index:',f_index) -print ('f_values:',f_values) - -# plot -x_index = list(range(len(fi))) -x_index = [x/len(fi) for x in x_index] -plt.rcParams['figure.figsize'] = (20,20) -plt.barh(x_index,f_values,height = 0.028 ,align="center",color = 'tan',tick_label=f_index) -plt.xlabel('importances') -plt.ylabel('features') -plt.show() -``` - -#### Model Prediction - - -```python -predictions = model.transform(test_data) -predictions.limit(10).toPandas() -``` - - -```python -from synapse.ml.train import ComputeModelStatistics -metrics = ComputeModelStatistics(evaluationMetric="classification", labelCol='Bankrupt?', scoredLabelsCol='prediction').transform(predictions) -display(metrics) -``` - -## Quantile Regression for Drug Discovery with LightGBMRegressor - - - -In this example, we show how to use LightGBM to build a simple regression model. - -#### Read dataset - - -```python -triazines = spark.read.format("libsvm")\ - .load("wasbs://publicwasb@mmlspark.blob.core.windows.net/triazines.scale.svmlight") -``` - - -```python -# print some basic info -print("records read: " + str(triazines.count())) -print("Schema: ") -triazines.printSchema() -display(triazines.limit(10)) -``` - -#### Split dataset into train and test - - -```python -train, test = triazines.randomSplit([0.85, 0.15], seed=1) -``` - -#### Model Training - - -```python -from synapse.ml.lightgbm import LightGBMRegressor -model = LightGBMRegressor(objective='quantile', - alpha=0.2, - learningRate=0.3, - numLeaves=31).fit(train) -``` - - -```python -print(model.getFeatureImportances()) -``` - -#### Model Prediction - - -```python -scoredData = model.transform(test) -display(scoredData) -``` - - -```python -from synapse.ml.train import ComputeModelStatistics -metrics = ComputeModelStatistics(evaluationMetric='regression', - labelCol='label', - scoresCol='prediction') \ - .transform(scoredData) -display(metrics) -``` - -## LightGBM Ranker - -#### Read dataset - - -```python -df = spark.read.format("parquet").load("wasbs://publicwasb@mmlspark.blob.core.windows.net/lightGBMRanker_train.parquet") -# print some basic info -print("records read: " + str(df.count())) -print("Schema: ") -df.printSchema() -display(df.limit(10)) -``` - -#### Model Training - - -```python -from synapse.ml.lightgbm import LightGBMRanker - -features_col = 'features' -query_col = 'query' -label_col = 'labels' -lgbm_ranker = LightGBMRanker(labelCol=label_col, - featuresCol=features_col, - groupCol=query_col, - predictionCol='preds', - leafPredictionCol='leafPreds', - featuresShapCol='importances', - repartitionByGroupingColumn=True, - numLeaves=32, - numIterations=200, - evalAt=[1,3,5], - metric='ndcg') -``` - - -```python -lgbm_ranker_model = lgbm_ranker.fit(df) -``` - -#### Model Prediction - - -```python -dt = spark.read.format("parquet").load("wasbs://publicwasb@mmlspark.blob.core.windows.net/lightGBMRanker_test.parquet") -predictions = lgbm_ranker_model.transform(dt) -predictions.limit(10).toPandas() -``` diff --git a/website/docs/features/lightgbm/about.md b/website/docs/features/lightgbm/about.md index 07d24ef6b8..1cc9150ad0 100644 --- a/website/docs/features/lightgbm/about.md +++ b/website/docs/features/lightgbm/about.md @@ -55,7 +55,7 @@ model = LightGBMRegressor(application='quantile', ``` For an end to end application, check out the LightGBM [notebook -example](/docs/features/lightgbm/LightGBM%20-%20Overview). +example](../LightGBM%20-%20Overview). ### Architecture diff --git a/website/docs/features/onnx/ONNX - Inference on Spark.md b/website/docs/features/onnx/ONNX - Inference on Spark.md deleted file mode 100644 index 1394a88934..0000000000 --- a/website/docs/features/onnx/ONNX - Inference on Spark.md +++ /dev/null @@ -1,160 +0,0 @@ ---- -title: ONNX - Inference on Spark -hide_title: true -status: stable ---- -## ONNX Inference on Spark - -In this example, we will train a LightGBM model, convert the model to ONNX format and use the converted model to infer some testing data on Spark. - -Python dependencies: - -- onnxmltools==1.7.0 -- lightgbm==3.2.1 - - -Load training data - - -```python -df = spark.read.format("csv")\ - .option("header", True)\ - .option("inferSchema", True)\ - .load("wasbs://publicwasb@mmlspark.blob.core.windows.net/company_bankruptcy_prediction_data.csv") - -display(df) -``` - -Use LightGBM to train a model - - -```python -from pyspark.ml.feature import VectorAssembler - -from synapse.ml.lightgbm import LightGBMClassifier - - - -feature_cols = df.columns[1:] - -featurizer = VectorAssembler( - - inputCols=feature_cols, - - outputCol='features' - -) - - - -train_data = featurizer.transform(df)['Bankrupt?', 'features'] - - - -model = ( - - LightGBMClassifier(featuresCol="features", labelCol="Bankrupt?") - - .setEarlyStoppingRound(300) - - .setLambdaL1(0.5) - - .setNumIterations(1000) - - .setNumThreads(-1) - - .setMaxDeltaStep(0.5) - - .setNumLeaves(31) - - .setMaxDepth(-1) - - .setBaggingFraction(0.7) - - .setFeatureFraction(0.7) - - .setBaggingFreq(2) - - .setObjective("binary") - - .setIsUnbalance(True) - - .setMinSumHessianInLeaf(20) - - .setMinGainToSplit(0.01) - -) - - - -model = model.fit(train_data) -``` - -Export the trained model to a LightGBM booster, convert it to ONNX format. - - -```python -import lightgbm as lgb -from lightgbm import Booster, LGBMClassifier - -def convertModel(lgbm_model: LGBMClassifier or Booster, input_size: int) -> bytes: - from onnxmltools.convert import convert_lightgbm - from onnxconverter_common.data_types import FloatTensorType - initial_types = [("input", FloatTensorType([-1, input_size]))] - onnx_model = convert_lightgbm(lgbm_model, initial_types=initial_types, target_opset=9) - return onnx_model.SerializeToString() - -booster_model_str = model.getLightGBMBooster().modelStr().get() -booster = lgb.Booster(model_str=booster_model_str) -model_payload_ml = convertModel(booster, len(df.columns) - 1) -``` - -Load the ONNX payload into an `ONNXModel`, and inspect the model inputs and outputs. - - -```python -from synapse.ml.onnx import ONNXModel - - - -onnx_ml = ONNXModel().setModelPayload(model_payload_ml) - - - -print("Model inputs:" + str(onnx_ml.getModelInputs())) - -print("Model outputs:" + str(onnx_ml.getModelOutputs())) -``` - -Map the model input to the input dataframe's column name (FeedDict), and map the output dataframe's column names to the model outputs (FetchDict). - - -```python -onnx_ml = ( - onnx_ml - .setDeviceType("CPU") - .setFeedDict({"input": "features"}) - .setFetchDict({"probability": "probabilities", "prediction": "label"}) - .setMiniBatchSize(5000) -) -``` - -Create some testing data and transform the data through the ONNX model. - - -```python -from pyspark.ml.feature import VectorAssembler -import pandas as pd -import numpy as np - -n = 1000 * 1000 -m = 95 -test = np.random.rand(n, m) -testPdf = pd.DataFrame(test) -cols = list(map(str, testPdf.columns)) -testDf = spark.createDataFrame(testPdf) -testDf = testDf.union(testDf).repartition(200) -testDf = VectorAssembler().setInputCols(cols).setOutputCol("features").transform(testDf).drop(*cols).cache() - -display(onnx_ml.transform(testDf)) -``` diff --git a/website/docs/features/onnx/about.md b/website/docs/features/onnx/about.md index 3153c4c9ca..e9946a0203 100644 --- a/website/docs/features/onnx/about.md +++ b/website/docs/features/onnx/about.md @@ -11,7 +11,7 @@ description: Learn how to use the ONNX model transformer to run inference for an [ONNX](https://onnx.ai/) is an open format to represent both deep learning and traditional machine learning models. With ONNX, AI developers can more easily move models between state-of-the-art tools and choose the combination that is best for them. -MMLSpark now includes a Spark transformer to bring an trained ONNX model to Apache Spark, so you can run inference on your data with Spark's large-scale data processing power. +SynapseML now includes a Spark transformer to bring an trained ONNX model to Apache Spark, so you can run inference on your data with Spark's large-scale data processing power. ## Usage @@ -45,5 +45,5 @@ MMLSpark now includes a Spark transformer to bring an trained ONNX model to Apac ## Example -- [Interpretability - Image Explainers](/docs/examples/responsible_ai/Interpretability%20-%20Image%20Explainers) -- [ONNX - Inference on Spark](/docs/features/onnx/ONNX%20-%20Inference%20on%20Spark) +- [Interpretability - Image Explainers](../../responsible_ai/Interpretability%20-%20Image%20Explainers) +- [ONNX - Inference on Spark](../ONNX%20-%20Inference%20on%20Spark) diff --git a/website/docs/features/responsible_ai/Model Interpretation on Spark.md b/website/docs/features/responsible_ai/Model Interpretation on Spark.md index e25250b2ba..e81aa9525d 100644 --- a/website/docs/features/responsible_ai/Model Interpretation on Spark.md +++ b/website/docs/features/responsible_ai/Model Interpretation on Spark.md @@ -27,7 +27,7 @@ Both explainers extends from `org.apache.spark.ml.Transformer`. After setting up To see examples of model interpretability on Spark in action, take a look at these sample notebooks: - [Tabular SHAP explainer](../../../examples/responsible_ai/Interpretability%20-%20Tabular%20SHAP%20explainer) -- [Image explainers](../../../examples/responsible_ai/Interpretability%20-%20Image%20Explainers) +- [Image explainers](../../../features/responsible_ai/Interpretability%20-%20Image%20Explainers) - [Text explainers](../../../examples/responsible_ai/Interpretability%20-%20Text%20Explainers) | | Tabular models | Vector models | Image models | Text models | diff --git a/website/docs/features/spark_serving/SparkServing - Deploying a Classifier.md b/website/docs/features/spark_serving/SparkServing - Deploying a Classifier.md deleted file mode 100644 index 3cccd6703e..0000000000 --- a/website/docs/features/spark_serving/SparkServing - Deploying a Classifier.md +++ /dev/null @@ -1,120 +0,0 @@ ---- -title: SparkServing - Deploying a Classifier -hide_title: true -status: stable ---- -## Model Deployment with Spark Serving -In this example, we try to predict incomes from the *Adult Census* dataset. Then we will use Spark serving to deploy it as a realtime web service. -First, we import needed packages: - - -```python -import os -if os.environ.get("AZURE_SERVICE", None) == "Microsoft.ProjectArcadia": - from pyspark.sql import SparkSession - spark = SparkSession.builder.getOrCreate() -``` - - -```python -import sys -import numpy as np -import pandas as pd - -``` - -Now let's read the data and split it to train and test sets: - - -```python -data = spark.read.parquet("wasbs://publicwasb@mmlspark.blob.core.windows.net/AdultCensusIncome.parquet") -data = data.select(["education", "marital-status", "hours-per-week", "income"]) -train, test = data.randomSplit([0.75, 0.25], seed=123) -train.limit(10).toPandas() -``` - -`TrainClassifier` can be used to initialize and fit a model, it wraps SparkML classifiers. -You can use `help(synapse.ml.TrainClassifier)` to view the different parameters. - -Note that it implicitly converts the data into the format expected by the algorithm. More specifically it: - tokenizes, hashes strings, one-hot encodes categorical variables, assembles the features into a vector -etc. The parameter `numFeatures` controls the number of hashed features. - - -```python -from synapse.ml.train import TrainClassifier -from pyspark.ml.classification import LogisticRegression -model = TrainClassifier(model=LogisticRegression(), labelCol="income", numFeatures=256).fit(train) -``` - -After the model is trained, we score it against the test dataset and view metrics. - - -```python -from synapse.ml.train import ComputeModelStatistics, TrainedClassifierModel -prediction = model.transform(test) -prediction.printSchema() -``` - - -```python -metrics = ComputeModelStatistics().transform(prediction) -metrics.limit(10).toPandas() -``` - -First, we will define the webservice input/output. -For more information, you can visit the [documentation for Spark Serving](https://github.com/Microsoft/SynapseML/blob/master/docs/mmlspark-serving.md) - - -```python -from pyspark.sql.types import * -from synapse.ml.io import * -import uuid - -serving_inputs = spark.readStream.server() \ - .address("localhost", 8898, "my_api") \ - .option("name", "my_api") \ - .load() \ - .parseRequest("my_api", test.schema) - -serving_outputs = model.transform(serving_inputs) \ - .makeReply("scored_labels") - -server = serving_outputs.writeStream \ - .server() \ - .replyTo("my_api") \ - .queryName("my_query") \ - .option("checkpointLocation", "file:///tmp/checkpoints-{}".format(uuid.uuid1())) \ - .start() - -``` - -Test the webservice - - -```python -import requests -data = u'{"education":" 10th","marital-status":"Divorced","hours-per-week":40.0}' -r = requests.post(data=data, url="http://localhost:8898/my_api") -print("Response {}".format(r.text)) -``` - - -```python -import requests -data = u'{"education":" Masters","marital-status":"Married-civ-spouse","hours-per-week":40.0}' -r = requests.post(data=data, url="http://localhost:8898/my_api") -print("Response {}".format(r.text)) -``` - - -```python -import time -time.sleep(20) # wait for server to finish setting up (just to be safe) -server.stop() -``` - - -```python - -``` diff --git a/website/docs/features/spark_serving/about.md b/website/docs/features/spark_serving/about.md index efdba757fe..1881e1b9c4 100644 --- a/website/docs/features/spark_serving/about.md +++ b/website/docs/features/spark_serving/about.md @@ -4,7 +4,9 @@ hide_title: true sidebar_label: About --- -# Spark Serving + + +# Spark Serving ### An Engine for Deploying Spark Jobs as Distributed Web Services @@ -31,7 +33,7 @@ sidebar_label: About ### Jupyter Notebook Examples -- [Deploy a classifier trained on the Adult Census Dataset](/docs/features/spark_serving/SparkServing%20-%20Deploying%20a%20Classifier) +- [Deploy a classifier trained on the Adult Census Dataset](../SparkServing%20-%20Deploying%20a%20Classifier) - More coming soon! ### Spark Serving Hello World @@ -108,14 +110,14 @@ You can deploy head node load balancing with the `HTTPSource` and distributes work across partitions, then collects response data back to the head node. All HTTP requests are kept and replied to on the head node. In both python and Scala these classes can be access by using -`spark.readStream.server()` after importing MMLSpark. +`spark.readStream.server()` after importing SynapseML. This mode allows for more complex windowing, repartitioning, and SQL operations. This option is also idea for rapid setup and testing, as it doesn't require any additional load balancing or network switches.A diagram of this configuration can be seen below:

- +

### Fully Distributed (Custom Load Balancer) @@ -124,7 +126,7 @@ You can configure Spark Serving for a custom load balancer using the `DistributedHTTPSource` and `DistributedHTTPSink` classes. This mode spins up servers on each executor JVM. In both python and Scala these classes can be access by using -`spark.readStream.distributedServer()` after importing MMLSpark. +`spark.readStream.distributedServer()` after importing SynapseML. Each server will feed its executor's partitions in parallel. This mode is key for high throughput and low latency as data does not need to be transferred to and from the @@ -132,11 +134,11 @@ head node. This deployment results in several web services that all route into the same spark computation. You can deploy an external load balancer to unify the executor's services under a single IP address. Support for automatic load balancer management and deployment is -targeted for the next release of MMLSpark. A diagram of this +targeted for the next release of SynapseML. A diagram of this configuration can be seen below:

- +

Queries that involve data movement across workers, such as a nontrivial @@ -148,7 +150,7 @@ routing will be automatically handled by the sink. ### Sub-Millisecond Latency with Continuous Processing

- +

Continuous processing can be enabled by hooking into the `HTTPSourceV2` class using: diff --git a/website/docs/features/vw/Vowpal Wabbit - Overview.md b/website/docs/features/vw/Vowpal Wabbit - Overview.md deleted file mode 100644 index 795dd1a86b..0000000000 --- a/website/docs/features/vw/Vowpal Wabbit - Overview.md +++ /dev/null @@ -1,483 +0,0 @@ ---- -title: Vowpal Wabbit - Overview -hide_title: true -status: stable ---- - - -# VowalWabbit - -[VowpalWabbit](https://github.com/VowpalWabbit/vowpal_wabbit) (VW) is a machine learning system which -pushes the frontier of machine learning with techniques such as online, hashing, allreduce, -reductions, learning2search, active, and interactive learning. -VowpalWabbit is a popular choice in ad-tech due to it's speed and cost efficacy. -Furthermore it includes many advances in the area of reinforcement learning (e.g. contextual bandits). - -### Advantages of VowpalWabbit - -- **Composability**: VowpalWabbit models can be incorporated into existing - SparkML Pipelines, and used for batch, streaming, and serving workloads. -- **Small footprint**: VowpalWabbit memory consumption is rather small and can be controlled through '-b 18' or setNumBits method. - This determines the size of the model (e.g. 2^18 * some_constant). -- **Feature Interactions**: Feature interactions (e.g. quadratic, cubic,... terms) are created on-the-fly within the most inner - learning loop in VW. - Interactions can be specified by using the -q parameter and passing the first character of the namespaces that should be _interacted_. - The VW namespace concept is mapped to Spark using columns. The column name is used as namespace name, thus one sparse or dense Spark ML vector corresponds to the features of a single namespace. - To allow passing of multiple namespaces the VW estimator (classifier or regression) expose an additional property called _additionalFeatures_. Users can pass an array of column names. -- **Simple deployment**: all native dependencies are packaged into a single jars (including boost and zlib). -- **VowpalWabbit command line arguments**: users can pass VW command line arguments to control the learning process. -- **VowpalWabbit binary models** Users can supply an inital VowpalWabbit model to start the training which can be produced outside of - VW on Spark by invoking _setInitialModel_ and pass the model as a byte array. Similarly users can access the binary model by invoking - _getModel_ on the trained model object. -- **Java-based hashing** VWs version of murmur-hash was re-implemented in Java (praise to [JackDoe](https://github.com/jackdoe)) - providing a major performance improvement compared to passing input strings through JNI and hashing in C++. -- **Cross language** VowpalWabbit on Spark is available on Spark, PySpark, and SparklyR. - -### Limitations of VowpalWabbit on Spark - -- **Linux and CentOS only** The native binaries included with the published jar are built Linux and CentOS only. - We're working on creating a more portable version by statically linking Boost and lib C++. -- **Limited Parsing** Features implemented in the native VW parser (e.g. ngrams, skips, ...) are not yet implemented in - VowpalWabbitFeaturizer. - -### VowpalWabbit Usage: - -- VowpalWabbitClassifier: used to build classification models. -- VowpalWabbitRegressor: used to build regression models. -- VowpalWabbitFeaturizer: used for feature hashing and extraction. For details please visit [here](https://github.com/VowpalWabbit/vowpal_wabbit/wiki/Feature-Hashing-and-Extraction). -- VowpalWabbitContextualBandit: used to solve contextual bandits problems. For algorithm details please visit [here](https://github.com/VowpalWabbit/vowpal_wabbit/wiki/Contextual-Bandit-algorithms). - -## Heart Disease Detection with VowalWabbit Classifier - - - -#### Read dataset - - -```python -import os - -if os.environ.get("AZURE_SERVICE", None) == "Microsoft.ProjectArcadia": - from pyspark.sql import SparkSession - spark = SparkSession.builder.getOrCreate() -``` - - -```python -df = spark.read.format("csv")\ - .option("header", True)\ - .option("inferSchema", True)\ - .load("wasbs://publicwasb@mmlspark.blob.core.windows.net/heart_disease_prediction_data.csv") -# print dataset basic info -print("records read: " + str(df.count())) -print("Schema: ") -df.printSchema() -``` - - -```python -display(df) -``` - -#### Split the dataset into train and test - - -```python -train, test = df.randomSplit([0.85, 0.15], seed=1) -``` - -#### Use VowalWabbitFeaturizer to convert data features into vector - - -```python -from synapse.ml.vw import VowpalWabbitFeaturizer -featurizer = VowpalWabbitFeaturizer(inputCols=df.columns[:-1], outputCol="features") -train_data = featurizer.transform(train)["target", "features"] -test_data = featurizer.transform(test)["target", "features"] -``` - - -```python -display(train_data.groupBy("target").count()) -``` - -#### Model Training - - -```python -from synapse.ml.vw import VowpalWabbitClassifier -model = VowpalWabbitClassifier(numPasses=20, labelCol="target", featuresCol="features").fit(train_data) -``` - -#### Model Prediction - - -```python -predictions = model.transform(test_data) -display(predictions) -``` - - -```python -from synapse.ml.train import ComputeModelStatistics -metrics = ComputeModelStatistics(evaluationMetric='classification', labelCol='target', scoredLabelsCol='prediction').transform(predictions) -display(metrics) -``` - -## Adult Census with VowpalWabbitClassifier - -In this example, we predict incomes from the Adult Census dataset using Vowpal Wabbit (VW) Classifier in SynapseML. - -#### Read dataset and split them into train & test - - -```python -data = spark.read.parquet("wasbs://publicwasb@mmlspark.blob.core.windows.net/AdultCensusIncome.parquet") -data = data.select(["education", "marital-status", "hours-per-week", "income"]) -train, test = data.randomSplit([0.75, 0.25], seed=123) -display(train) -``` - -#### Model Training - -We define a pipeline that includes feature engineering and training of a VW classifier. We use a featurizer provided by VW that hashes the feature names. Note that VW expects classification labels being -1 or 1. Thus, the income category is mapped to this space before feeding training data into the pipeline. - -Note: VW supports distributed learning, and it's controlled by number of partitions of dataset. - - -```python -from pyspark.sql.functions import when, col -from pyspark.ml import Pipeline -from synapse.ml.vw import VowpalWabbitFeaturizer, VowpalWabbitClassifier - -# Define classification label -train = train.withColumn("label", when(col("income").contains("<"), 0.0).otherwise(1.0)).repartition(1) -print(train.count()) - -# Specify featurizer -vw_featurizer = VowpalWabbitFeaturizer(inputCols=["education", "marital-status", "hours-per-week"], - outputCol="features") -``` - -Note: "args" parameter lets you pass in any params not exposed through our API. Full command line argument docs can be found [here](https://github.com/VowpalWabbit/vowpal_wabbit/wiki/Command-Line-Arguments). - - -```python -# Define VW classification model -args = "--loss_function=logistic --quiet --holdout_off" -vw_model = VowpalWabbitClassifier(featuresCol="features", - labelCol="label", - args=args, - numPasses=10) - -# Create a pipeline -vw_pipeline = Pipeline(stages=[vw_featurizer, vw_model]) -``` - - -```python -vw_trained = vw_pipeline.fit(train) -``` - -#### Model Prediction - -After the model is trained, we apply it to predict the income of each sample in the test set. - - -```python -# Making predictions -test = test.withColumn("label", when(col("income").contains("<"), 0.0).otherwise(1.0)) -prediction = vw_trained.transform(test) -display(prediction) -``` - -Finally, we evaluate the model performance using ComputeModelStatistics function which will compute confusion matrix, accuracy, precision, recall, and AUC by default for classificaiton models. - - -```python -from synapse.ml.train import ComputeModelStatistics -metrics = ComputeModelStatistics(evaluationMetric="classification", - labelCol="label", - scoredLabelsCol="prediction").transform(prediction) -display(metrics) -``` - -## Boston house price prediction with VowpalWabbitRegressor - Quantile Regression - -In this example, we show how to build regression model with VW using Boston's house price. - -#### Read dataset - -We use [*Boston house price* dataset](https://scikit-learn.org/stable/modules/generated/sklearn.datasets.load_boston.html) -. -The data was collected in 1978 from Boston area and consists of 506 entries with 14 features including the value of homes. -We use `sklearn.datasets` module to download it easily, then split the set into training and testing by 75/25. - - -```python -import math -from matplotlib.colors import ListedColormap, Normalize -from matplotlib.cm import get_cmap -import matplotlib.pyplot as plt -from synapse.ml.train import ComputeModelStatistics -from synapse.ml.vw import VowpalWabbitRegressor, VowpalWabbitFeaturizer -import numpy as np -import pandas as pd -from sklearn.datasets import load_boston -``` - - -```python -boston = load_boston() - -feature_cols = ['f' + str(i) for i in range(boston.data.shape[1])] -header = ['target'] + feature_cols -df = spark.createDataFrame( - pd.DataFrame(data=np.column_stack((boston.target, boston.data)), columns=header) -).repartition(1) -print("Dataframe has {} rows".format(df.count())) -display(df.limit(10)) -``` - - -```python -train_data, test_data = df.randomSplit([0.75, 0.25], seed=42) -``` - - -```python -display(train_data.summary().toPandas()) -``` - - -```python -train_data.show(10) -``` - -Exploratory analysis: plot feature distributions over different target values. - - -```python -features = train_data.columns[1:] -values = train_data.drop('target').toPandas() -ncols = 5 -nrows = math.ceil(len(features) / ncols) - -yy = [r['target'] for r in train_data.select('target').collect()] - -f, axes = plt.subplots(nrows, ncols, sharey=True, figsize=(30,10)) -f.tight_layout() - -for irow in range(nrows): - axes[irow][0].set_ylabel('target') - for icol in range(ncols): - try: - feat = features[irow*ncols + icol] - xx = values[feat] - - axes[irow][icol].scatter(xx, yy, s=10, alpha=0.25) - axes[irow][icol].set_xlabel(feat) - axes[irow][icol].get_yaxis().set_ticks([]) - except IndexError: - f.delaxes(axes[irow][icol]) -``` - -#### VW-style feature hashing - - -```python -vw_featurizer = VowpalWabbitFeaturizer( - inputCols=feature_cols, - outputCol='features', -) -vw_train_data = vw_featurizer.transform(train_data)['target', 'features'] -vw_test_data = vw_featurizer.transform(test_data)['target', 'features'] -display(vw_train_data) -``` - -#### Model training & Prediction - -See [VW wiki](https://github.com/vowpalWabbit/vowpal_wabbit/wiki/Command-Line-Arguments) for command line arguments. - - -```python -args = "--holdout_off --loss_function quantile -l 7 -q :: --power_t 0.7" -vwr = VowpalWabbitRegressor( - labelCol='target', - featuresCol='features', - args=args, - numPasses=200, -) - -# To reduce number of partitions (which will effect performance), use `vw_train_data.repartition(1)` -vw_model = vwr.fit(vw_train_data.repartition(1)) -vw_predictions = vw_model.transform(vw_test_data) - -display(vw_predictions.limit(20).toPandas()) -``` - -#### Compute Statistics & Visualization - - -```python -metrics = ComputeModelStatistics( - evaluationMetric='regression', - labelCol='target', - scoresCol='prediction' -).transform(vw_predictions) - -vw_result = metrics.toPandas() -vw_result.insert(0, 'model', ['Vowpal Wabbit']) -display(vw_result) -``` - - -```python -cmap = get_cmap('YlOrRd') -target = np.array(test_data.select('target').collect()).flatten() -model_preds = [("Vowpal Wabbit", vw_predictions)] - -f, axe = plt.subplots(figsize=(6, 6)) -f.tight_layout() - -preds = np.array(vw_predictions.select('prediction').collect()).flatten() -err = np.absolute(preds - target) -norm = Normalize() -clrs = cmap(np.asarray(norm(err)))[:, :-1] -plt.scatter(preds, target, s=60, c=clrs, edgecolors='#888888', alpha=0.75) -plt.plot((0, 60), (0, 60), linestyle='--', color='#888888') -axe.set_xlabel('Predicted values') -axe.set_ylabel('Actual values') -axe.set_title("Vowpal Wabbit") -``` - -## Quantile Regression for Drug Discovery with VowpalWabbitRegressor - - - - - -#### Read dataset - - -```python -triazines = spark.read.format("libsvm")\ - .load("wasbs://publicwasb@mmlspark.blob.core.windows.net/triazines.scale.svmlight") -``` - - -```python -# print some basic info -print("records read: " + str(triazines.count())) -print("Schema: ") -triazines.printSchema() -display(triazines.limit(10)) -``` - -#### Split dataset into train and test - - -```python -train, test = triazines.randomSplit([0.85, 0.15], seed=1) -``` - -#### Model Training - - -```python -from synapse.ml.vw import VowpalWabbitRegressor -model = (VowpalWabbitRegressor(numPasses=20, args="--holdout_off --loss_function quantile -q :: -l 0.1") - .fit(train)) -``` - -#### Model Prediction - - -```python -scoredData = model.transform(test) -display(scoredData.limit(10)) -``` - - -```python -from synapse.ml.train import ComputeModelStatistics -metrics = ComputeModelStatistics(evaluationMetric='regression', - labelCol='label', - scoresCol='prediction') \ - .transform(scoredData) -display(metrics) -``` - -## VW Contextual Bandit - -#### Read dataset - - -```python -data = spark.read.format("json").load("wasbs://publicwasb@mmlspark.blob.core.windows.net/vwcb_input.dsjson") -``` - -Note: Actions are all five TAction_x_topic columns. - - -```python -from pyspark.sql.functions import col -from pyspark.sql.types import IntegerType, DoubleType -data = data.withColumn('GUser_id', col('c.GUser.id'))\ - .withColumn('GUser_major', col('c.GUser.major'))\ - .withColumn('GUser_hobby', col('c.GUser.hobby'))\ - .withColumn('GUser_favorite_character', col('c.GUser.favorite_character'))\ - .withColumn('TAction_0_topic', col('c._multi.TAction.topic')[0])\ - .withColumn('TAction_1_topic', col('c._multi.TAction.topic')[1])\ - .withColumn('TAction_2_topic', col('c._multi.TAction.topic')[2])\ - .withColumn('TAction_3_topic', col('c._multi.TAction.topic')[3])\ - .withColumn('TAction_4_topic', col('c._multi.TAction.topic')[4])\ - .withColumn('chosenAction', col('_label_Action').cast(IntegerType()))\ - .withColumn('label', col('_labelIndex').cast(DoubleType()))\ - .withColumn('probability', col('_label_probability'))\ - .select('GUser_id', 'GUser_major', 'GUser_hobby', 'GUser_favorite_character', 'TAction_0_topic', 'TAction_1_topic', 'TAction_2_topic', 'TAction_3_topic', 'TAction_4_topic', 'chosenAction', 'label', 'probability') - -print("Schema: ") -data.printSchema() -``` - -Add pipeline to add featurizer, convert all feature columns into vector. - - -```python -from synapse.ml.vw import VowpalWabbitFeaturizer, VowpalWabbitContextualBandit, VectorZipper -from pyspark.ml import Pipeline -pipeline = Pipeline(stages=[ - VowpalWabbitFeaturizer(inputCols=['GUser_id'], outputCol='GUser_id_feature'), - VowpalWabbitFeaturizer(inputCols=['GUser_major'], outputCol='GUser_major_feature'), - VowpalWabbitFeaturizer(inputCols=['GUser_hobby'], outputCol='GUser_hobby_feature'), - VowpalWabbitFeaturizer(inputCols=['GUser_favorite_character'], outputCol='GUser_favorite_character_feature'), - VowpalWabbitFeaturizer(inputCols=['TAction_0_topic'], outputCol='TAction_0_topic_feature'), - VowpalWabbitFeaturizer(inputCols=['TAction_1_topic'], outputCol='TAction_1_topic_feature'), - VowpalWabbitFeaturizer(inputCols=['TAction_2_topic'], outputCol='TAction_2_topic_feature'), - VowpalWabbitFeaturizer(inputCols=['TAction_3_topic'], outputCol='TAction_3_topic_feature'), - VowpalWabbitFeaturizer(inputCols=['TAction_4_topic'], outputCol='TAction_4_topic_feature'), - VectorZipper(inputCols=['TAction_0_topic_feature', 'TAction_1_topic_feature', 'TAction_2_topic_feature', 'TAction_3_topic_feature','TAction_4_topic_feature'], outputCol='features') -]) -tranformation_pipeline = pipeline.fit(data) -transformed_data = tranformation_pipeline.transform(data) - -display(transformed_data) -``` - -Build VowpalWabbit Contextual Bandit model and compute performance statistics. - - -```python -estimator = VowpalWabbitContextualBandit() \ - .setArgs("--cb_explore_adf --epsilon 0.2 --quiet") \ - .setSharedCol('GUser_id_feature') \ - .setAdditionalSharedFeatures(["GUser_major_feature", "GUser_hobby_feature", "GUser_favorite_character_feature"]) \ - .setFeaturesCol('features') \ - .setUseBarrierExecutionMode(False)\ - .setChosenActionCol('chosenAction')\ - .setLabelCol('label')\ - .setProbabilityCol('probability') -model = estimator.fit(transformed_data) -display(model.getPerformanceStatistics()) -``` diff --git a/website/docs/features/vw/about.md b/website/docs/features/vw/about.md index ea63e476e9..292145e498 100644 --- a/website/docs/features/vw/about.md +++ b/website/docs/features/vw/about.md @@ -4,7 +4,7 @@ hide_title: true sidebar_label: About --- - + # VowpalWabbit on Apache Spark @@ -64,7 +64,7 @@ model = (VowpalWabbitRegressor(args="--holdout_off --loss_function quantile -q : Through the args parameter you can pass command line parameters to VW as documented in the [VW Wiki](https://github.com/vowpalWabbit/vowpal_wabbit/wiki/Command-Line-Arguments). For an end to end application, check out the VowpalWabbit [notebook -example](/docs/features/vw/Vowpal%20Wabbit%20-%20Overview). +example](../Vowpal%20Wabbit%20-%20Overview). ### Hyper-parameter tuning diff --git a/website/docs/reference/developer-readme.md b/website/docs/reference/developer-readme.md index 735f5af41a..d711c04b5f 100644 --- a/website/docs/reference/developer-readme.md +++ b/website/docs/reference/developer-readme.md @@ -2,7 +2,7 @@ title: Build System Commands hide_title: true sidebar_label: Build System Commands -description: MMLSpark Development Setup +description: SynapseML Development Setup --- # SynapseML Development Setup diff --git a/website/docs/reference/docker.md b/website/docs/reference/docker.md index 7880e61569..ae47e0a370 100644 --- a/website/docs/reference/docker.md +++ b/website/docs/reference/docker.md @@ -1,7 +1,7 @@ --- -title: Using the MMLSpark Docker Image +title: Using the SynapseML Docker Image sidebar_label: Docker Image -description: Using the MMLSpark Docker Image +description: Using the SynapseML Docker Image --- ## Quickstart: install and run the Docker image diff --git a/website/notebookconvert.py b/website/notebookconvert.py index 6ae47e9cf5..a812f16d87 100644 --- a/website/notebookconvert.py +++ b/website/notebookconvert.py @@ -1,10 +1,11 @@ +import io import os import re def add_header_to_markdown(folder, md): name = md[:-3] - with open(os.path.join(folder, md), "r+", encoding="utf-8") as f: + with io.open(os.path.join(folder, md), "r+", encoding="utf-8") as f: content = f.read() f.truncate(0) content = re.sub(r"style=\"[\S ]*?\"", "", content) @@ -15,54 +16,32 @@ def add_header_to_markdown(folder, md): def convert_notebook_to_markdown(file_path, outputdir): - print(f"Converting {file_path} into markdown") - convert_cmd = f'jupyter nbconvert --output-dir="{outputdir}" --to markdown "{file_path}"' + print("Converting {} into markdown".format(file_path)) + convert_cmd = 'jupyter nbconvert --output-dir="{}" --to markdown "{}"'.format(outputdir, file_path) os.system(convert_cmd) print() - def convert_allnotebooks_in_folder(folder, outputdir): - - dic = { - "CognitiveServices - Overview": os.path.join(outputdir, "features"), - "Classification": os.path.join(outputdir, "examples", "classification"), - "CognitiveServices": os.path.join(outputdir, "examples", "cognitive_services"), - "DataBalanceAnalysis": os.path.join(outputdir, "examples", "responsible_ai"), - "DeepLearning": os.path.join(outputdir, "examples", "deep_learning"), - "Interpretability - Image Explainers": os.path.join(outputdir, "features", "responsible_ai"), - "Interpretability - Explanation Dashboard": os.path.join(outputdir, "examples", "responsible_ai"), - "Interpretability - Tabular SHAP explainer": os.path.join(outputdir, "examples", "responsible_ai"), - "Interpretability - Text Explainers": os.path.join(outputdir, "examples", "responsible_ai"), - "ModelInterpretability": os.path.join(outputdir, "examples", "responsible_ai"), - "Regression": os.path.join(outputdir, "examples", "regression"), - "TextAnalytics": os.path.join(outputdir, "examples", "text_analytics"), - "HttpOnSpark": os.path.join(outputdir, "features", "http"), - "LightGBM": os.path.join(outputdir, "features", "lightgbm"), - "ONNX": os.path.join(outputdir, "features", "onnx"), - "SparkServing": os.path.join(outputdir, "features", "spark_serving"), - "Vowpal Wabbit": os.path.join(outputdir, "features", "vw"), - } - - for nb in os.listdir(folder): - if nb.endswith(".ipynb"): - - finaldir = os.path.join(outputdir, "examples") - - for k, v in dic.items(): - if nb.startswith(k): - finaldir = v - break - - if not os.path.exists(finaldir): - os.mkdir(finaldir) - - md = nb.replace(".ipynb", ".md") - if os.path.exists(os.path.join(finaldir, md)): - os.remove(os.path.join(finaldir, md)) - - convert_notebook_to_markdown(os.path.join(folder, nb), finaldir) - add_header_to_markdown(finaldir, md) - + + cur_folders = [folder] + output_dirs = [outputdir] + while cur_folders: + cur_dir = cur_folders.pop(0) + cur_output_dir = output_dirs.pop(0) + for file in os.listdir(cur_dir): + if os.path.isdir(os.path.join(cur_dir, file)): + cur_folders.append(os.path.join(cur_dir, file)) + output_dirs.append(os.path.join(cur_output_dir, file)) + else: + if not os.path.exists(cur_output_dir): + os.mkdir(cur_output_dir) + + md = file.replace(".ipynb", ".md") + if os.path.exists(os.path.join(cur_output_dir, md)): + os.remove(os.path.join(cur_output_dir, md)) + + convert_notebook_to_markdown(os.path.join(cur_dir, file), cur_output_dir) + add_header_to_markdown(cur_output_dir, md) def main(): cur_path = os.getcwd() diff --git a/website/sidebars.js b/website/sidebars.js index 06bd321de6..4f3884d4f1 100644 --- a/website/sidebars.js +++ b/website/sidebars.js @@ -10,8 +10,8 @@ let features_vw_docs = listExamplePaths("features", "vw"); let examples_cl_docs = listExamplePaths("examples", "classification"); let examples_cs_docs = listExamplePaths("examples", "cognitive_services"); let examples_dl_docs = listExamplePaths("examples", "deep_learning"); -let examples_rg_docs = listExamplePaths("examples", "regression"); let examples_rai_docs = listExamplePaths("examples", "responsible_ai"); +let examples_rg_docs = listExamplePaths("examples", "regression"); let examples_ta_docs = listExamplePaths("examples", "text_analytics"); diff --git a/website/src/pages/index.js b/website/src/pages/index.js index c5d6041e43..21ce4d92d9 100644 --- a/website/src/pages/index.js +++ b/website/src/pages/index.js @@ -9,12 +9,13 @@ import CodeSnippet from "@site/src/theme/CodeSnippet"; import SampleSnippet from "@site/src/theme/SampleSnippet"; import Tabs from "@theme/Tabs"; import TabItem from "@theme/TabItem"; +import clsx from "clsx"; const snippets = [ { label: "Text Analytics", further: - "/docs/features/CognitiveServices%20-%20Overview#text-analytics-sample", + "docs/features/CognitiveServices%20-%20Overview#text-analytics-sample", config: `from synapse.ml.cognitive import * sentiment_df = (TextSentiment() @@ -28,7 +29,7 @@ sentiment_df = (TextSentiment() }, { label: "Deep Learning", - further: "/docs/features/onnx/ONNX%20-%20Inference%20on%20Spark", + further: "docs/features/onnx/ONNX%20-%20Inference%20on%20Spark", config: `from synapse.ml.onnx import * model_prediction_df = (ONNXModel() @@ -40,8 +41,9 @@ model_prediction_df = (ONNXModel() .transform(input_df))`, }, { - label: "Model Interpretability", - further: "/docs/next/features/responsible_ai/Model%20Interpretation%20on%20Spark", + label: "Responsible AI", + further: + "docs/features/responsible_ai/Model%20Interpretation%20on%20Spark", config: `from synapse.ml.explainers import * interpretation_df = (TabularSHAP() @@ -55,7 +57,7 @@ interpretation_df = (TabularSHAP() }, { label: "LightGBM", - further: "/docs/features/lightgbm/about", + further: "docs/features/lightgbm/about", config: `from synapse.ml.lightgbm import * quantile_df = (LightGBMRegressor() @@ -168,7 +170,21 @@ function Home() {
- + +
+ +
+
+
+ Coming from + + MMLSpark + + ? We have been renamed to SynapseML! +
@@ -220,11 +236,15 @@ function Home() { and cloud native.

- Note: SynpaseML is built-in for Azure Synapse. + Note: SynpaseML will be built-in for{" "} + + Azure Synapse soon. +

+ + SynapseML can be conveniently installed on Synapse: + + Please also include `synapseml==0.9.2` in your + requirements.txt file for usage of PySpark. [ + + Install Python libraries in Synapse + + ] + - MMLSpark can be conveniently installed on existing Spark + SynapseML can be conveniently installed on existing Spark clusters via the --packages option, examples: This can be used in other Spark contexts too. For example, you - can use MMLSpark in{" "} + can use SynapseML in{" "} AZTK by adding it to the{" "} @@ -252,7 +294,7 @@ spark-submit --packages com.microsoft.azure:synapseml:0.9.2 MyApp.jar`}

- To install MMLSpark on the{" "} + To install SynapseML on the{" "} Databricks cloud @@ -260,8 +302,7 @@ spark-submit --packages com.microsoft.azure:synapseml:0.9.2 MyApp.jar`} library from Maven coordinates {" "} - in your workspace. - in your workspace. + in your workspace. in your workspace.

For the coordinates use: @@ -280,16 +321,16 @@ spark-submit --packages com.microsoft.azure:synapseml:0.9.2 MyApp.jar`} Finally, ensure that your Spark cluster has at least Spark 2.4 and Scala 2.11.

- You can use MMLSpark in both your Scala and PySpark notebooks. - To get started with our example notebooks import the following - databricks archive: + You can use SynapseML in both your Scala and PySpark + notebooks. To get started with our example notebooks import + the following databricks archive: - The easiest way to evaluate MMLSpark is via our pre-built + The easiest way to evaluate SynapseML is via our pre-built Docker container. To do so, run the following command: - To try out MMLSpark on a Python (or Conda) installation you + To try out SynapseML on a Python (or Conda) installation you can get Spark installed via pip with Spark + AI Summit 2018

We use CNTK on Spark to distribute a Faster RCNN object - detection network and deploy it as a web service with MMLSpark + detection network and deploy it as a web service with SynapseML Serving for use on Unmanned Aerial Vehicals (UAVs)

c.status != "deprecated") + .sort() .forEach(function (info) { paths.push(`${folder}/${type}/${info.name}`); }); diff --git a/website/src/theme/FeatureCards/index.js b/website/src/theme/FeatureCards/index.js index 8837ed2188..ebe0f03f53 100644 --- a/website/src/theme/FeatureCards/index.js +++ b/website/src/theme/FeatureCards/index.js @@ -17,42 +17,42 @@ const features = [ body: "Spark is well known for it's ability to switch between batch and streaming workloads by modifying a single line. \ We push this concept even further and enable distributed web services with the same API as batch and streaming workloads.", footer: "Learn More", - burl: "notebooks/Spark%20Serving/about", + burl: "../features/spark_serving/about", }, { src: "/img/notebooks/decision_tree_recolor.png", title: "Lightning Fast Gradient Boosting", - body: "MMLSpark adds GPU enabled gradient boosted machines from the popular framework LightGBM. \ + body: "SynapseML adds GPU enabled gradient boosted machines from the popular framework LightGBM. \ Users can mix and match frameworks in a single distributed environment and API.", footer: "Try an Example", - burl: "notebooks/LightGBM/LightGBM%20-%20Overview", + burl: "../features/lightgbm/LightGBM%20-%20Overview", }, { src: "/img/notebooks/vw-blue-dark-orange.svg", title: "Fast and Sparse Text Analytics", body: "Vowpal Wabbit on Spark enables new classes of workloads in scalable and performant text analytics", footer: "Try an Example", - burl: "notebooks/Vowpal%20Wabbit/Vowpal%20Wabbit%20-%20Overview", + burl: "../features/vw/Vowpal%20Wabbit%20-%20Overview", }, { src: "/img/notebooks/microservice_recolor.png", title: "Distributed Microservices", - body: "MMLSpark provides powerful and idiomatic tools to communicate with any HTTP endpoint service using Spark. \ + body: "SynapseML provides powerful and idiomatic tools to communicate with any HTTP endpoint service using Spark. \ Users can now use Spark as a elastic micro-service orchestrator.", footer: "Learn More", - burl: "notebooks/HTTP/about", + burl: "../features/http/about", }, { src: "/img/notebooks/LIME-1.svg", title: "Large Scale Model Interpretability", body: "Understand any image classifier with a distributed implementation of Local Interpretable Model Agnostic Explanations (LIME).", footer: "Try an Example", - burl: "notebooks/Model%20Interpretation/ModelInterpretation%20-%20Snow%20Leopard%20Detection", + burl: "../features/responsible_ai/Interpretability%20-%20Image%20Explainers/", }, { src: "/img/notebooks/cntk-1.svg", title: "Scalable Deep Learning", - body: "MMLSpark integrates the distributed computing framework Apache Spark with the flexible deep learning framework CNTK. \ + body: "SynapseML integrates the distributed computing framework Apache Spark with the flexible deep learning framework CNTK. \ Enabling deep learning at unprecedented scales.", footer: "Read the Paper", burl: "https://arxiv.org/abs/1804.04031", @@ -60,9 +60,9 @@ const features = [ { src: "/img/multilingual.svg", title: "Broad Language Support", - body: "MMLSpark's API spans Scala, Python, Java, R, .NET and C# so you can integrate with any ecosystem.", + body: "SynapseML's API spans Scala, Python, Java, R, .NET and C# so you can integrate with any ecosystem.", footer: "Try our PySpark Examples", - burl: "notebooks/about", + burl: "../features/CognitiveServices%20-%20Overview", }, ]; diff --git a/website/versioned_docs/version-0.9.1/examples/about.md b/website/versioned_docs/version-0.9.1/examples/about.md index 7ce693ae76..5247c91adc 100644 --- a/website/versioned_docs/version-0.9.1/examples/about.md +++ b/website/versioned_docs/version-0.9.1/examples/about.md @@ -10,7 +10,7 @@ sidebar_label: About - Fit a LightGBM classification or regression model on a biochemical dataset ([LightGBM Overview]), to learn more check out the [LightGBM documentation page](../../features/lightgbm/about). -- Deploy a deep network as a distributed web service with [MMLSpark +- Deploy a deep network as a distributed web service with [SynapseML Serving](../../features/spark_serving/about) - Use web services in Spark with [HTTP on Apache Spark](../../features/http/about) - Use Bi-directional LSTMs from Keras for medical entity extraction @@ -25,6 +25,7 @@ sidebar_label: About - Train and evaluate a flight delay prediction system ([Regression - Flight Delays]) - Finding anomalous data access patterns using the Access Anomalies package of CyberML ([CyberML - Anomalous Access Detection]) - Model interpretation ([Interpretability - Tabular SHAP Explainer], [Interpretability - Image Explainers], [Interpretability - Text Explainers]) +- Do Data Balance Analysis to determine how well features and feature values are represented in your dataset ([DataBalanceAnalysis - Adult Census Income]) [Classification - Adult Census]: ../classification/Classification%20-%20Adult%20Census "Classification - Adult Census" @@ -47,9 +48,10 @@ sidebar_label: About [CyberML - Anomalous Access Detection]: ../CyberML%20-%20Anomalous%20Access%20Detection "CyberML - Anomalous Access Detection" -[Interpretability - Tabular SHAP Explainer]: ../model_interpretability/Interpretability%20-%20Tabular%20SHAP%20explainer "Interpretability - Tabular SHAP Explainer" +[Interpretability - Tabular SHAP Explainer]: ../responsible_ai/Interpretability%20-%20Tabular%20SHAP%20explainer "Interpretability - Tabular SHAP Explainer" -[Interpretability - Image Explainers]: ../model_interpretability/Interpretability%20-%20Image%20Explainers "Interpretability - Image Explainers" +[Interpretability - Image Explainers]: ../../features/responsible_ai/Interpretability%20-%20Image%20Explainers "Interpretability - Image Explainers" -[Interpretability - Text Explainers]: ../model_interpretability/Interpretability%20-%20Text%20Explainers "Interpretability - Text Explainers" +[Interpretability - Text Explainers]: ../responsible_ai/Interpretability%20-%20Text%20Explainers "Interpretability - Text Explainers" +[DataBalanceAnalysis - Adult Census Income]: ../responsible_ai/DataBalanceAnalysis%20-%20Adult%20Census%20Income "DataBalanceAnalysis - Adult Census Income" diff --git a/website/versioned_docs/version-0.9.1/examples/model_interpretability/Interpretability - Image Explainers.md b/website/versioned_docs/version-0.9.1/examples/model_interpretability/Interpretability - Image Explainers.md deleted file mode 100644 index 1051f28447..0000000000 --- a/website/versioned_docs/version-0.9.1/examples/model_interpretability/Interpretability - Image Explainers.md +++ /dev/null @@ -1,263 +0,0 @@ ---- -title: Interpretability - Image Explainers -hide_title: true -status: stable ---- -## Interpretability - Image Explainers - -In this example, we use LIME and Kernel SHAP explainers to explain the ResNet50 model's multi-class output of an image. - -First we import the packages and define some UDFs and a plotting function we will need later. - - -```python -from synapse.ml.explainers import * - -from synapse.ml.onnx import ONNXModel - -from synapse.ml.opencv import ImageTransformer - -from synapse.ml.io import * - -from pyspark.ml import Pipeline - -from pyspark.ml.classification import LogisticRegression - -from pyspark.ml.feature import StringIndexer - -from pyspark.sql.functions import * - -from pyspark.sql.types import * - -import numpy as np - -import pyspark - -import urllib.request - -import matplotlib.pyplot as plt - -import PIL, io - -from PIL import Image - - - -vec_slice = udf(lambda vec, indices: (vec.toArray())[indices].tolist(), ArrayType(FloatType())) - -arg_top_k = udf(lambda vec, k: (-vec.toArray()).argsort()[:k].tolist(), ArrayType(IntegerType())) - - - -def downloadBytes(url: str): - - with urllib.request.urlopen(url) as url: - - barr = url.read() - - return barr - - - -def rotate_color_channel(bgr_image_array, height, width, nChannels): - - B, G, R, *_ = np.asarray(bgr_image_array).reshape(height, width, nChannels).T - - rgb_image_array = np.array((R, G, B)).T - - return rgb_image_array - - - -def plot_superpixels(image_rgb_array, sp_clusters, weights, green_threshold=99): - - superpixels = sp_clusters - - green_value = np.percentile(weights, green_threshold) - - img = Image.fromarray(image_rgb_array, mode='RGB').convert("RGBA") - - image_array = np.asarray(img).copy() - - for (sp, v) in zip(superpixels, weights): - - if v > green_value: - - for (x, y) in sp: - - image_array[y, x, 1] = 255 - - image_array[y, x, 3] = 200 - - plt.clf() - - plt.imshow(image_array) - - display() -``` - -Create a dataframe for a testing image, and use the ResNet50 ONNX model to infer the image. - -The result shows 39.6% probability of "violin" (889), and 38.4% probability of "upright piano" (881). - - -```python -from synapse.ml.io import * - - - -image_df = spark.read.image().load("wasbs://publicwasb@mmlspark.blob.core.windows.net/explainers/images/david-lusvardi-dWcUncxocQY-unsplash.jpg") - -display(image_df) - - - -# Rotate the image array from BGR into RGB channels for visualization later. - -row = image_df.select("image.height", "image.width", "image.nChannels", "image.data").head() - -locals().update(row.asDict()) - -rgb_image_array = rotate_color_channel(data, height, width, nChannels) - - - -# Download the ONNX model - -modelPayload = downloadBytes("https://mmlspark.blob.core.windows.net/publicwasb/ONNXModels/resnet50-v2-7.onnx") - - - -featurizer = ( - - ImageTransformer(inputCol="image", outputCol="features") - - .resize(224, True) - - .centerCrop(224, 224) - - .normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225], color_scale_factor = 1/255) - - .setTensorElementType(FloatType()) - -) - - - -onnx = ( - - ONNXModel() - - .setModelPayload(modelPayload) - - .setFeedDict({"data": "features"}) - - .setFetchDict({"rawPrediction": "resnetv24_dense0_fwd"}) - - .setSoftMaxDict({"rawPrediction": "probability"}) - - .setMiniBatchSize(1) - -) - - - -model = Pipeline(stages=[featurizer, onnx]).fit(image_df) -``` - - -```python -predicted = ( - model.transform(image_df) - .withColumn("top2pred", arg_top_k(col("probability"), lit(2))) - .withColumn("top2prob", vec_slice(col("probability"), col("top2pred"))) -) - -display(predicted.select("top2pred", "top2prob")) -``` - -First we use the LIME image explainer to explain the model's top 2 classes' probabilities. - - -```python -lime = ( - ImageLIME() - .setModel(model) - .setOutputCol("weights") - .setInputCol("image") - .setCellSize(150.0) - .setModifier(50.0) - .setNumSamples(500) - .setTargetCol("probability") - .setTargetClassesCol("top2pred") - .setSamplingFraction(0.7) -) - -lime_result = ( - lime.transform(predicted) - .withColumn("weights_violin", col("weights").getItem(0)) - .withColumn("weights_piano", col("weights").getItem(1)) - .cache() -) - -display(lime_result.select(col("weights_violin"), col("weights_piano"))) -lime_row = lime_result.head() -``` - -We plot the LIME weights for "violin" output and "upright piano" output. - -Green area are superpixels with LIME weights above 95 percentile. - - -```python -plot_superpixels(rgb_image_array, lime_row["superpixels"]["clusters"], list(lime_row["weights_violin"]), 95) -plot_superpixels(rgb_image_array, lime_row["superpixels"]["clusters"], list(lime_row["weights_piano"]), 95) -``` - -Your results will look like: - - - -Then we use the Kernel SHAP image explainer to explain the model's top 2 classes' probabilities. - - -```python -shap = ( - ImageSHAP() - .setModel(model) - .setOutputCol("shaps") - .setSuperpixelCol("superpixels") - .setInputCol("image") - .setCellSize(150.0) - .setModifier(50.0) - .setNumSamples(500) - .setTargetCol("probability") - .setTargetClassesCol("top2pred") -) - -shap_result = ( - shap.transform(predicted) - .withColumn("shaps_violin", col("shaps").getItem(0)) - .withColumn("shaps_piano", col("shaps").getItem(1)) - .cache() -) - -display(shap_result.select(col("shaps_violin"), col("shaps_piano"))) -shap_row = shap_result.head() -``` - -We plot the SHAP values for "piano" output and "cell" output. - -Green area are superpixels with SHAP values above 95 percentile. - -> Notice that we drop the base value from the SHAP output before rendering the superpixels. The base value is the model output for the background (all black) image. - - -```python -plot_superpixels(rgb_image_array, shap_row["superpixels"]["clusters"], list(shap_row["shaps_violin"][1:]), 95) -plot_superpixels(rgb_image_array, shap_row["superpixels"]["clusters"], list(shap_row["shaps_piano"][1:]), 95) -``` - -Your results will look like: - - diff --git a/website/versioned_docs/version-0.9.1/examples/model_interpretability/Interpretability - Tabular SHAP explainer.md b/website/versioned_docs/version-0.9.1/examples/model_interpretability/Interpretability - Tabular SHAP explainer.md deleted file mode 100644 index 661ef1bd54..0000000000 --- a/website/versioned_docs/version-0.9.1/examples/model_interpretability/Interpretability - Tabular SHAP explainer.md +++ /dev/null @@ -1,142 +0,0 @@ ---- -title: Interpretability - Tabular SHAP explainer -hide_title: true -status: stable ---- -## Interpretability - Tabular SHAP explainer - -In this example, we use Kernel SHAP to explain a tabular classification model built from the Adults Census dataset. - -First we import the packages and define some UDFs we will need later. - - -```python -import pyspark -from synapse.ml.explainers import * -from pyspark.ml import Pipeline -from pyspark.ml.classification import LogisticRegression -from pyspark.ml.feature import StringIndexer, OneHotEncoder, VectorAssembler -from pyspark.sql.types import * -from pyspark.sql.functions import * -import pandas as pd - -vec_access = udf(lambda v, i: float(v[i]), FloatType()) -vec2array = udf(lambda vec: vec.toArray().tolist(), ArrayType(FloatType())) -``` - -Now let's read the data and train a simple binary classification model. - - -```python -df = spark.read.parquet("wasbs://publicwasb@mmlspark.blob.core.windows.net/AdultCensusIncome.parquet") - -labelIndexer = StringIndexer(inputCol="income", outputCol="label", stringOrderType="alphabetAsc").fit(df) -print("Label index assigment: " + str(set(zip(labelIndexer.labels, [0, 1])))) - -training = labelIndexer.transform(df) -display(training) -categorical_features = [ - "workclass", - "education", - "marital-status", - "occupation", - "relationship", - "race", - "sex", - "native-country", -] -categorical_features_idx = [col + "_idx" for col in categorical_features] -categorical_features_enc = [col + "_enc" for col in categorical_features] -numeric_features = ["age", "education-num", "capital-gain", "capital-loss", "hours-per-week"] - -strIndexer = StringIndexer(inputCols=categorical_features, outputCols=categorical_features_idx) -onehotEnc = OneHotEncoder(inputCols=categorical_features_idx, outputCols=categorical_features_enc) -vectAssem = VectorAssembler(inputCols=categorical_features_enc + numeric_features, outputCol="features") -lr = LogisticRegression(featuresCol="features", labelCol="label", weightCol="fnlwgt") -pipeline = Pipeline(stages=[strIndexer, onehotEnc, vectAssem, lr]) -model = pipeline.fit(training) -``` - -After the model is trained, we randomly select some observations to be explained. - - -```python -explain_instances = model.transform(training).orderBy(rand()).limit(5).repartition(200).cache() -display(explain_instances) -``` - -We create a TabularSHAP explainer, set the input columns to all the features the model takes, specify the model and the target output column we are trying to explain. In this case, we are trying to explain the "probability" output which is a vector of length 2, and we are only looking at class 1 probability. Specify targetClasses to `[0, 1]` if you want to explain class 0 and 1 probability at the same time. Finally we sample 100 rows from the training data for background data, which is used for integrating out features in Kernel SHAP. - - -```python -shap = TabularSHAP( - inputCols=categorical_features + numeric_features, - outputCol="shapValues", - numSamples=5000, - model=model, - targetCol="probability", - targetClasses=[1], - backgroundData=training.orderBy(rand()).limit(100).cache(), -) - -shap_df = shap.transform(explain_instances) - -``` - -Once we have the resulting dataframe, we extract the class 1 probability of the model output, the SHAP values for the target class, the original features and the true label. Then we convert it to a pandas dataframe for visisualization. -For each observation, the first element in the SHAP values vector is the base value (the mean output of the background dataset), and each of the following element is the SHAP values for each feature. - - -```python -shaps = ( - shap_df.withColumn("probability", vec_access(col("probability"), lit(1))) - .withColumn("shapValues", vec2array(col("shapValues").getItem(0))) - .select(["shapValues", "probability", "label"] + categorical_features + numeric_features) -) - -shaps_local = shaps.toPandas() -shaps_local.sort_values("probability", ascending=False, inplace=True, ignore_index=True) -pd.set_option("display.max_colwidth", None) -shaps_local -``` - -We use plotly subplot to visualize the SHAP values. - - -```python -from plotly.subplots import make_subplots -import plotly.graph_objects as go -import pandas as pd - -features = categorical_features + numeric_features -features_with_base = ["Base"] + features - -rows = shaps_local.shape[0] - -fig = make_subplots( - rows=rows, - cols=1, - subplot_titles="Probability: " + shaps_local["probability"].apply("{:.2%}".format) + "; Label: " + shaps_local["label"].astype(str), -) - -for index, row in shaps_local.iterrows(): - feature_values = [0] + [row[feature] for feature in features] - shap_values = row["shapValues"] - list_of_tuples = list(zip(features_with_base, feature_values, shap_values)) - shap_pdf = pd.DataFrame(list_of_tuples, columns=["name", "value", "shap"]) - fig.add_trace( - go.Bar(x=shap_pdf["name"], y=shap_pdf["shap"], hovertext="value: " + shap_pdf["value"].astype(str)), - row=index + 1, - col=1, - ) - -fig.update_yaxes(range=[-1, 1], fixedrange=True, zerolinecolor="black") -fig.update_xaxes(type="category", tickangle=45, fixedrange=True) -fig.update_layout(height=400 * rows, title_text="SHAP explanations") -fig.show() - -``` - -Your results will look like: - - diff --git a/website/versioned_docs/version-0.9.1/examples/model_interpretability/Interpretability - Text Explainers.md b/website/versioned_docs/version-0.9.1/examples/model_interpretability/Interpretability - Text Explainers.md deleted file mode 100644 index fe4b3812c1..0000000000 --- a/website/versioned_docs/version-0.9.1/examples/model_interpretability/Interpretability - Text Explainers.md +++ /dev/null @@ -1,134 +0,0 @@ ---- -title: Interpretability - Text Explainers -hide_title: true -status: stable ---- -## Interpretability - Text Explainers - -In this example, we use LIME and Kernel SHAP explainers to explain a text classification model. - -First we import the packages and define some UDFs and a plotting function we will need later. - - -``` -from pyspark.sql.functions import * -from pyspark.sql.types import * -from pyspark.ml.feature import StopWordsRemover, HashingTF, IDF, Tokenizer -from pyspark.ml import Pipeline -from pyspark.ml.classification import LogisticRegression -from synapse.ml.explainers import * -from synapse.ml.featurize.text import TextFeaturizer - -vec2array = udf(lambda vec: vec.toArray().tolist(), ArrayType(FloatType())) -vec_access = udf(lambda v, i: float(v[i]), FloatType()) -``` - -Load training data, and convert rating to binary label. - - -``` -data = ( - spark.read.parquet("wasbs://publicwasb@mmlspark.blob.core.windows.net/BookReviewsFromAmazon10K.parquet") - .withColumn("label", (col("rating") > 3).cast(LongType())) - .select("label", "text") -) - -data.limit(10).toPandas() -``` - -We train a text classification model, and randomly sample 10 rows to explain. - - -``` -train, test = data.randomSplit([0.60, 0.40]) - -pipeline = Pipeline( - stages=[ - TextFeaturizer( - inputCol="text", - outputCol="features", - useStopWordsRemover=True, - useIDF=True, - minDocFreq=20, - numFeatures=1 << 16, - ), - LogisticRegression(maxIter=100, regParam=0.005, labelCol="label", featuresCol="features"), - ] -) - -model = pipeline.fit(train) - -prediction = model.transform(test) - -explain_instances = prediction.orderBy(rand()).limit(10) -``` - - -``` -def plotConfusionMatrix(df, label, prediction, classLabels): - from synapse.ml.plot import confusionMatrix - import matplotlib.pyplot as plt - - fig = plt.figure(figsize=(4.5, 4.5)) - confusionMatrix(df, label, prediction, classLabels) - display(fig) - - -plotConfusionMatrix(model.transform(test), "label", "prediction", [0, 1]) -``` - -First we use the LIME text explainer to explain the model's predicted probability for a given observation. - - -``` -lime = TextLIME( - model=model, - outputCol="weights", - inputCol="text", - targetCol="probability", - targetClasses=[1], - tokensCol="tokens", - samplingFraction=0.7, - numSamples=2000, -) - -lime_results = ( - lime.transform(explain_instances) - .select("tokens", "weights", "r2", "probability", "text") - .withColumn("probability", vec_access("probability", lit(1))) - .withColumn("weights", vec2array(col("weights").getItem(0))) - .withColumn("r2", vec_access("r2", lit(0))) - .withColumn("tokens_weights", arrays_zip("tokens", "weights")) -) - -display(lime_results.select("probability", "r2", "tokens_weights", "text").orderBy(col("probability").desc())) -``` - -Then we use the Kernel SHAP text explainer to explain the model's predicted probability for a given observation. - -> Notice that we drop the base value from the SHAP output before displaying the SHAP values. The base value is the model output for an empty string. - - -``` -shap = TextSHAP( - model=model, - outputCol="shaps", - inputCol="text", - targetCol="probability", - targetClasses=[1], - tokensCol="tokens", - numSamples=5000, -) - -shap_results = ( - shap.transform(explain_instances) - .select("tokens", "shaps", "r2", "probability", "text") - .withColumn("probability", vec_access("probability", lit(1))) - .withColumn("shaps", vec2array(col("shaps").getItem(0))) - .withColumn("shaps", slice(col("shaps"), lit(2), size(col("shaps")))) - .withColumn("r2", vec_access("r2", lit(0))) - .withColumn("tokens_shaps", arrays_zip("tokens", "shaps")) -) - -display(shap_results.select("probability", "r2", "tokens_shaps", "text").orderBy(col("probability").desc())) -``` diff --git a/website/docs/examples/responsible_ai/DataBalanceAnalysis - Adult Census Income.md b/website/versioned_docs/version-0.9.1/examples/responsible_ai/DataBalanceAnalysis - Adult Census Income.md similarity index 98% rename from website/docs/examples/responsible_ai/DataBalanceAnalysis - Adult Census Income.md rename to website/versioned_docs/version-0.9.1/examples/responsible_ai/DataBalanceAnalysis - Adult Census Income.md index 3ec25407d0..32fa833bfc 100644 --- a/website/docs/examples/responsible_ai/DataBalanceAnalysis - Adult Census Income.md +++ b/website/versioned_docs/version-0.9.1/examples/responsible_ai/DataBalanceAnalysis - Adult Census Income.md @@ -291,7 +291,7 @@ These measures look at distribution of records across all combinations of sensit Measure | Description | Interpretation | Reference - | - | - | - Atkinson Index | It presents the percentage of total income that a given society would have to forego in order to have more equal shares of income between its citizens. This measure depends on the degree of society aversion to inequality (a theoretical parameter decided by the researcher), where a higher value entails greater social utility or willingness by individuals to accept smaller incomes in exchange for a more equal distribution. An important feature of the Atkinson index is that it can be decomposed into within-group and between-group inequality. | Range [0, 1]. 0 if perfect equality. 1 means maximum inequality. In our case, it is the proportion of records for a sensitive columns’ combination. | [Link](https://en.wikipedia.org/wiki/Atkinson_index) -Theil T Index | GE(1) = Theil's T and is more sensitive to differences at the top of the distribution. The Theil index is a statistic used to measure economic inequality. The Theil index measures an entropic "distance" the population is away from the "ideal" egalitarian state of everyone having the same income. | If everyone has the same income, then T_T equals 0. If one person has all the income, then T_T gives the result (ln N). 0 means equal income and larger values mean higher level of disproportion. | [Link](https://en.wikipedia.org/wiki/Theil_index) +Theil T Index | GE(1) = Theil's T and is more sensitive to differences at the top of the distribution. The Theil index is a statistic used to measure economic inequality. The Theil index measures an entropic "distance" the population is away from the "ideal" egalitarian state of everyone having the same income. | If everyone has the same income, then T_T equals 0. If one person has all the income, then T_T gives the result (ln N). 0 means equal income and larger values mean higher level of disproportion. | [Link](https://en.wikipedia.org/wiki/Theil_index) Theil L Index | GE(0) = Theil's L and is more sensitive to differences at the lower end of the distribution. Logarithm of (mean income)/(income i), over all the incomes included in the summation. It is also referred to as the mean log deviation measure. Because a transfer from a larger income to a smaller one will change the smaller income's ratio more than it changes the larger income's ratio, the transfer-principle is satisfied by this index. | Same interpretation as Theil T Index. | [Link](https://en.wikipedia.org/wiki/Theil_index) diff --git a/website/docs/examples/responsible_ai/Interpretability - Explanation Dashboard.md b/website/versioned_docs/version-0.9.1/examples/responsible_ai/Interpretability - Explanation Dashboard.md similarity index 100% rename from website/docs/examples/responsible_ai/Interpretability - Explanation Dashboard.md rename to website/versioned_docs/version-0.9.1/examples/responsible_ai/Interpretability - Explanation Dashboard.md diff --git a/website/docs/examples/responsible_ai/Interpretability - Tabular SHAP explainer.md b/website/versioned_docs/version-0.9.1/examples/responsible_ai/Interpretability - Tabular SHAP explainer.md similarity index 100% rename from website/docs/examples/responsible_ai/Interpretability - Tabular SHAP explainer.md rename to website/versioned_docs/version-0.9.1/examples/responsible_ai/Interpretability - Tabular SHAP explainer.md diff --git a/website/docs/examples/responsible_ai/Interpretability - Text Explainers.md b/website/versioned_docs/version-0.9.1/examples/responsible_ai/Interpretability - Text Explainers.md similarity index 100% rename from website/docs/examples/responsible_ai/Interpretability - Text Explainers.md rename to website/versioned_docs/version-0.9.1/examples/responsible_ai/Interpretability - Text Explainers.md diff --git a/website/docs/examples/responsible_ai/ModelInterpretability - Snow Leopard Detection.md b/website/versioned_docs/version-0.9.1/examples/responsible_ai/ModelInterpretability - Snow Leopard Detection.md similarity index 100% rename from website/docs/examples/responsible_ai/ModelInterpretability - Snow Leopard Detection.md rename to website/versioned_docs/version-0.9.1/examples/responsible_ai/ModelInterpretability - Snow Leopard Detection.md diff --git a/website/versioned_docs/version-0.9.1/features/http/about.md b/website/versioned_docs/version-0.9.1/features/http/about.md index 844043b705..e209dd198f 100644 --- a/website/versioned_docs/version-0.9.1/features/http/about.md +++ b/website/versioned_docs/version-0.9.1/features/http/about.md @@ -95,7 +95,7 @@ In HTTP on Spark, each partition manages a running web client that sends requests. A schematic representation can be seen below:

- +

## Schema diff --git a/website/versioned_docs/version-0.9.1/features/lightgbm/about.md b/website/versioned_docs/version-0.9.1/features/lightgbm/about.md index 07d24ef6b8..1cc9150ad0 100644 --- a/website/versioned_docs/version-0.9.1/features/lightgbm/about.md +++ b/website/versioned_docs/version-0.9.1/features/lightgbm/about.md @@ -55,7 +55,7 @@ model = LightGBMRegressor(application='quantile', ``` For an end to end application, check out the LightGBM [notebook -example](/docs/features/lightgbm/LightGBM%20-%20Overview). +example](../LightGBM%20-%20Overview). ### Architecture diff --git a/website/versioned_docs/version-0.9.1/features/model_interpretability/ModelInterpretability - Snow Leopard Detection.md b/website/versioned_docs/version-0.9.1/features/model_interpretability/ModelInterpretability - Snow Leopard Detection.md deleted file mode 100644 index b191225293..0000000000 --- a/website/versioned_docs/version-0.9.1/features/model_interpretability/ModelInterpretability - Snow Leopard Detection.md +++ /dev/null @@ -1,282 +0,0 @@ ---- -title: ModelInterpretability - Snow Leopard Detection -hide_title: true -status: stable ---- -## Automated Snow Leopard Detection with Synapse Machine Learning - - - - -```python -import os -if os.environ.get("AZURE_SERVICE", None) == "Microsoft.ProjectArcadia": - from pyspark.sql import SparkSession - spark = SparkSession.builder.getOrCreate() - from notebookutils.mssparkutils.credentials import getSecret - os.environ["BING_IMAGE_SEARCH_KEY"] = getSecret("mmlspark-keys", "bing-image-search-key") - -# WARNING this notebook requires alot of memory. -# If you get a heap space error, try dropping the number of images bing returns -# or by writing out the images to parquet first - -# Replace the following with a line like: BING_IMAGE_SEARCH_KEY = "hdwo2oyd3o928s....." -BING_IMAGE_SEARCH_KEY = os.environ["BING_IMAGE_SEARCH_KEY"] -``` - - -```python -from synapse.ml.cognitive import * -from synapse.ml.core.spark import FluentAPI -from pyspark.sql.functions import lit - -def bingPhotoSearch(name, queries, pages): - offsets = [offset*10 for offset in range(0, pages)] - parameters = [(query, offset) for offset in offsets for query in queries] - - return spark.createDataFrame(parameters, ("queries","offsets")) \ - .mlTransform( - BingImageSearch() # Apply Bing Image Search - .setSubscriptionKey(BING_IMAGE_SEARCH_KEY) # Set the API Key - .setOffsetCol("offsets") # Specify a column containing the offsets - .setQueryCol("queries") # Specify a column containing the query words - .setCount(10) # Specify the number of images to return per offset - .setImageType("photo") # Specify a filter to ensure we get photos - .setOutputCol("images")) \ - .mlTransform(BingImageSearch.getUrlTransformer("images", "urls")) \ - .withColumn("labels", lit(name)) \ - .limit(400) - -``` - - - - -```python -def displayDF(df, n=5, image_cols = set(["urls"])): - rows = df.take(n) - cols = df.columns - header = "".join(["" + c + "" for c in cols]) - - style = """ - - - - -""" - - table = [] - for row in rows: - table.append("") - for col in cols: - if col in image_cols: - rep = ''.format(row[col]) - else: - rep = row[col] - table.append("{}".format(rep)) - table.append("") - tableHTML = "".join(table) - - body = """ - - - - {} - - {} -
- - - """.format(header, tableHTML) - try: - displayHTML(style + body) - except: - pass -``` - - -```python -snowLeopardQueries = ["snow leopard"] -snowLeopardUrls = bingPhotoSearch("snow leopard", snowLeopardQueries, pages=100) -displayDF(snowLeopardUrls) -``` - - -```python -randomWords = spark.read.parquet("wasbs://publicwasb@mmlspark.blob.core.windows.net/random_words.parquet").cache() -randomWords.show() -``` - - -```python -randomLinks = randomWords \ - .mlTransform(BingImageSearch() - .setSubscriptionKey(BING_IMAGE_SEARCH_KEY) - .setCount(10) - .setQueryCol("words") - .setOutputCol("images")) \ - .mlTransform(BingImageSearch.getUrlTransformer("images", "urls")) \ - .withColumn("label", lit("other")) \ - .limit(400) - -displayDF(randomLinks) -``` - - -```python -images = snowLeopardUrls.union(randomLinks).distinct().repartition(100)\ - .mlTransform(BingImageSearch.downloadFromUrls("urls", "image", concurrency=5, timeout=5000))\ - .dropna() - -train, test = images.randomSplit([.7,.3], seed=1) -``` - - -```python -from pyspark.ml import Pipeline - -from pyspark.ml.feature import StringIndexer - -from pyspark.ml.classification import LogisticRegression - -from pyspark.sql.functions import udf - -from synapse.ml.downloader import ModelDownloader - -from synapse.ml.cntk import ImageFeaturizer - -from synapse.ml.stages import UDFTransformer - -from pyspark.sql.types import * - - - -def getIndex(row): - - return float(row[1]) - - - -if os.environ.get("AZURE_SERVICE", None) == "Microsoft.ProjectArcadia": - - network = ModelDownloader(spark, "abfss://synapse@mmlsparkeuap.dfs.core.windows.net/models/").downloadByName("ResNet50") - -else: - - network = ModelDownloader(spark, "dbfs:/Models/").downloadByName("ResNet50") - - - -model = Pipeline(stages=[ - - StringIndexer(inputCol = "labels", outputCol="index"), - - ImageFeaturizer(inputCol="image", outputCol="features", cutOutputLayers=1).setModel(network), - - LogisticRegression(maxIter=5, labelCol="index", regParam=10.0), - - UDFTransformer()\ - - .setUDF(udf(getIndex, DoubleType()))\ - - .setInputCol("probability")\ - - .setOutputCol("leopard_prob") - -]) - - - -fitModel = model.fit(train) -``` - - - - -```python -def plotConfusionMatrix(df, label, prediction, classLabels): - - from synapse.ml.plot import confusionMatrix - - import matplotlib.pyplot as plt - - fig = plt.figure(figsize=(4.5, 4.5)) - - confusionMatrix(df, label, prediction, classLabels) - - display(fig) - - - -if os.environ.get("AZURE_SERVICE", None) != "Microsoft.ProjectArcadia": - - plotConfusionMatrix(fitModel.transform(test), "index", "prediction", fitModel.stages[0].labels) -``` - - -```python -import urllib.request - -from synapse.ml.lime import ImageLIME - - - -test_image_url = "https://mmlspark.blob.core.windows.net/graphics/SnowLeopardAD/snow_leopard1.jpg" - -with urllib.request.urlopen(test_image_url) as url: - - barr = url.read() - -test_subsample = spark.createDataFrame([(bytearray(barr),)], ["image"]) - - - -lime = ImageLIME()\ - - .setModel(fitModel)\ - - .setPredictionCol("leopard_prob")\ - - .setOutputCol("weights")\ - - .setInputCol("image")\ - - .setCellSize(100.0)\ - - .setModifier(50.0)\ - - .setNSamples(300) - - - -result = lime.transform(test_subsample) -``` - - -```python -import matplotlib.pyplot as plt -import PIL, io, numpy as np - -def plot_superpixels(row): - image_bytes = row['image'] - superpixels = row['superpixels']['clusters'] - weights = list(row['weights']) - mean_weight = np.percentile(weights,90) - img = (PIL.Image.open(io.BytesIO(image_bytes))).convert('RGBA') - image_array = np.asarray(img).copy() - for (sp, w) in zip(superpixels, weights): - if w > mean_weight: - for (x, y) in sp: - image_array[y, x, 1] = 255 - image_array[y, x, 3] = 200 - plt.clf() - plt.imshow(image_array) - display() - -# Gets first row from the LIME-transformed data frame -if os.environ.get("AZURE_SERVICE", None) != "Microsoft.ProjectArcadia": - plot_superpixels(result.take(1)[0]) -``` - -### Your results will look like: - diff --git a/website/versioned_docs/version-0.9.1/features/onnx/about.md b/website/versioned_docs/version-0.9.1/features/onnx/about.md index dbb8e95cfe..e9946a0203 100644 --- a/website/versioned_docs/version-0.9.1/features/onnx/about.md +++ b/website/versioned_docs/version-0.9.1/features/onnx/about.md @@ -11,7 +11,7 @@ description: Learn how to use the ONNX model transformer to run inference for an [ONNX](https://onnx.ai/) is an open format to represent both deep learning and traditional machine learning models. With ONNX, AI developers can more easily move models between state-of-the-art tools and choose the combination that is best for them. -MMLSpark now includes a Spark transformer to bring an trained ONNX model to Apache Spark, so you can run inference on your data with Spark's large-scale data processing power. +SynapseML now includes a Spark transformer to bring an trained ONNX model to Apache Spark, so you can run inference on your data with Spark's large-scale data processing power. ## Usage @@ -45,5 +45,5 @@ MMLSpark now includes a Spark transformer to bring an trained ONNX model to Apac ## Example -- [Interpretability - Image Explainers](/docs/examples/model_interpretability/Interpretability%20-%20Image%20Explainers) -- [ONNX - Inference on Spark](/docs/features/onnx/ONNX%20-%20Inference%20on%20Spark) +- [Interpretability - Image Explainers](../../responsible_ai/Interpretability%20-%20Image%20Explainers) +- [ONNX - Inference on Spark](../ONNX%20-%20Inference%20on%20Spark) diff --git a/website/versioned_docs/version-0.9.1/features/responsible_ai/Data Balance Analysis.md b/website/versioned_docs/version-0.9.1/features/responsible_ai/Data Balance Analysis.md new file mode 100644 index 0000000000..19d7a0df32 --- /dev/null +++ b/website/versioned_docs/version-0.9.1/features/responsible_ai/Data Balance Analysis.md @@ -0,0 +1,196 @@ +--- +title: Data Balance Analysis on Spark +hide_title: true +sidebar_label: Data Balance Analysis +description: Learn how to do Data Balance Analysis on Spark to determine how well features and feature values are represented in your dataset. +--- + +# Data Balance Analysis on Spark + +## Context + +Data Balance Analysis is relevant for overall understanding of datasets, but it becomes essential when thinking about building Machine Learning services out of such datasets. Having a well balanced data representation is critical when developing models in a responsible way, specially in terms of fairness. +It is unfortunately all too easy to build an ML model that produces biased results for subsets of an overall population, by training or testing the model on biased ground truth data. There are multiple case studies of biased models assisting in granting loans, healthcare, recruitment opportunities and many other decision making tasks. In most of these examples, the data from which these models are trained was the common issue. These findings emphasize how important it is for model creators and auditors to analyze data balance: to measure training data across sub-populations and ensure the data has good coverage and a balanced representation of labels across sensitive categories and category combinations, and to check that test data is representative of the target population. + +In summary, Data Balance Analysis, used as a step for building ML models has the following benefits: + +* **Reduces risks for unbalanced models (facilitate service fairness) and reduces costs of ML building** by identifying early on data representation gaps that prompt data scientists to seek mitigation steps (collect more data, follow a specific sampling mechanism, create synthetic data, etc.) before proceeding to train their models. +* **Enables easy e2e debugging of ML systems** in combination with [Fairlearn](https://fairlearn.org/) by providing a clear view if for an unbalanced model the issue is tied to the data or the model. + +## Examples + +* [Data Balance Analysis - Adult Census Income](../../../examples/responsible_ai/DataBalanceAnalysis%20-%20Adult%20Census%20Income) + +## Usage + +Data Balance Analysis currently supports three transformers in the `synapse.ml.exploratory` namespace: + +* FeatureBalanceMeasure - supervised (requires label column) +* DistributionBalanceMeasure - unsupervised (doesn't require label column) +* AggregateBalanceMeasure - unsupervised (doesn't require label column) + +1. Import all three transformers. + + For example: + + ```python + from synapse.ml.exploratory import AggregateBalanceMeasure, DistributionBalanceMeasure, FeatureBalanceMeasure + ``` + +2. Load your dataset, define features of interest, and ensure that the label column is binary. The `FeatureBalanceMeasure` transformer currently only supports binary labels, but support for numerical labels will be added soon. + + For example: + + ```python + import pyspark.sql.functions as F + + features = ["race", "sex"] + label = "income" + + df = spark.read.parquet("wasbs://publicwasb@mmlspark.blob.core.windows.net/AdultCensusIncome.parquet") + + # Convert the "income" column from {<=50K, >50K} to {0, 1} to represent our binary classification label column + df = df.withColumn(label, F.when(F.col(label).contains("<=50K"), F.lit(0)).otherwise(F.lit(1))) + ``` + +3. Create a `FeatureBalanceMeasure` transformer and call `setSensitiveCols` to set the list of sensitive features and call `setLabelCol` to set the binary label column. Then, call the `transform` method with your dataset and visualize the resulting dataframe. + + For example: + + ```python + feature_balance_measures = ( + FeatureBalanceMeasure() + .setSensitiveCols(features) + .setLabelCol(label) + .transform(df) + ) + feature_balance_measures.show(truncate=False) + ``` + +4. Create a `DistributionBalanceMeasure` transformer and and call `setSensitiveCols` to set the list of sensitive features. Then, call the `transform` method with your dataset and visualize the resulting dataframe. + + For example: + + ```python + distribution_balance_measures = ( + DistributionBalanceMeasure() + .setSensitiveCols(features) + .transform(df) + ) + distribution_balance_measures.show(truncate=False) + ``` + +5. Create a `AggregateBalanceMeasure` transformer and and call `setSensitiveCols` to set the list of sensitive features. Then, call the `transform` method with your dataset and visualize the resulting dataframe. + + For example: + + ```python + aggregate_balance_measures = ( + AggregateBalanceMeasure() + .setSensitiveCols(features) + .transform(df) + ) + aggregate_balance_measures.show(truncate=False) + ``` + +Note: If you are running this notebook in a Spark environment such as Azure Synapse or Databricks, then you can easily visualize the imbalance measures by calling the built-in plotting features `display()`. + +## Measure Explanations + +### Feature Balance Measures + +Feature Balance Measures allow us to see whether each combination of sensitive feature is receiving the positive outcome (true prediction) at balanced probability. + +In this context, we define a feature balance measure, also referred to as the parity, for label y as the difference between the association metrics of two different sensitive classes \\([x_A, x_B]\\), with respect to the association metric \\(A(x_i, y)\\). That is: + +$$parity(y \vert x_A, x_B, A(\cdot)) \coloneqq A(x_A, y) - A(x_B, y) $$ + +Using the dataset, we can see if the various sexes and races are receiving >50k income at equal or unequal rates. + +Note: Many of these metrics were influenced by this paper [Measuring Model Biases in the Absence of Ground Truth](https://arxiv.org/abs/2103.03417). + +Association Metric | Family | Description | Interpretation/Formula | Reference +| - | - | - | - | - +Demographic Parity | Fairness | Proportion of each segment of a protected class (e.g. gender) should receive the positive outcome at equal rates. | As close to 0 means better parity. \\(DP = P(Y \vert A = "Male") - P(Y \vert A = "Female")\\). Y = Positive label rate. | [Link](https://en.wikipedia.org/wiki/Fairness_%28machine_learning%29) +Pointwise Mutual Information (PMI), normalized PMI | Entropy | The PMI of a pair of feature values (ex: Gender=Male and Gender=Female) quantifies the discrepancy between the probability of their coincidence given their joint distribution and their individual distributions (assuming independence). | Range (normalized) [-1, 1]. -1 for no co-occurences. 0 for co-occurences at random. 1 for complete co-occurences. | [Link](https://en.wikipedia.org/wiki/Pointwise_mutual_information) +Sorensen-Dice Coefficient (SDC) | Intersection-over-Union | Used to gauge the similarity of two samples. Related to F1 score. | Equals twice the number of elements common to both sets divided by the sum of the number of elements in each set. | [Link](https://en.wikipedia.org/wiki/S%C3%B8rensen%E2%80%93Dice_coefficient) +Jaccard Index | Intersection-over-Union | Similar to SDC, guages the similarity and diversity of sample sets. | Equals the size of the intersection divided by the size of the union of the sample sets. | [Link](https://en.wikipedia.org/wiki/Jaccard_index) +Kendall Rank Correlation | Correlation and Statistical Tests | Used to measure the ordinal association between two measured quantities. | High when observations have a similar rank and low when observations have a dissimilar rank between the two variables. | [Link](https://en.wikipedia.org/wiki/Kendall_rank_correlation_coefficient) +Log-Likelihood Ratio | Correlation and Statistical Tests | Calculates the degree to which data supports one variable versus another. Log of the likelihood ratio, which gives the probability of correctly predicting the label in ratio to probability of incorrectly predicting label. | If likelihoods are similar, it should be close to 0. | [Link](https://en.wikipedia.org/wiki/Likelihood_function#Likelihood_ratio) +t-test | Correlation and Statistical Tests | Used to compare the means of two groups (pairwise). | Value looked up in t-Distribution tell if statistically significant or not. | [Link](https://en.wikipedia.org/wiki/Student's_t-test) + +### Distribution Balance Measures + +Distribution Balance Measures allow us to compare our data with a reference distribution (currently only uniform distribution is supported as a reference distribution). They are calculated per sensitive column and do not depend on the label column. + +For example, let's assume we have a dataset with 9 rows and a Gender column, and we observe that: + +* "Male" appears 4 times +* "Female" appears 3 times +* "Other" appears 2 times + +Assuming the uniform distribution: +$$ReferenceCount \coloneqq \frac{numRows}{numFeatureValues}$$ +$$ReferenceProbability \coloneqq \frac{1}{numFeatureValues}$$ + +Feature Value | Observed Count | Reference Count | Observed Probability | Reference Probabiliy +| - | - | - | - | - +Male | 4 | 9/3 = 3 | 4/9 = 0.44 | 3/9 = 0.33 +Female | 3 | 9/3 = 3 | 3/9 = 0.33 | 3/9 = 0.33 +Other | 2 | 9/3 = 3 | 2/9 = 0.22 | 3/9 = 0.33 + +We can use distance measures to find out how far our observed and reference distributions of these feature values are. Some of these distance measures include: + +Measure | Description | Interpretation | Reference +| - | - | - | - +KL Divergence | Measure of how one probability distribution is different from a second, reference probability distribution. Measure of the information gained when one revises one's beliefs from the prior probability distribution Q to the posterior probability distribution P. In other words, it is the amount of information lost when Q is used to approximate P. | Non-negative. 0 means P = Q. | [Link](https://en.wikipedia.org/wiki/Kullback%E2%80%93Leibler_divergence) +JS Distance | Measuring the similarity between two probability distributions. Symmetrized and smoothed version of the Kullback–Leibler (KL) divergence. Square root of JS Divergence. | Range [0, 1]. 0 means perfectly same to balanced distribution. | [Link](https://en.wikipedia.org/wiki/Jensen%E2%80%93Shannon_divergence) +Wasserstein Distance | This distance is also known as the earth mover’s distance, since it can be seen as the minimum amount of “work” required to transform u into v, where “work” is measured as the amount of distribution weight that must be moved, multiplied by the distance it has to be moved. | Non-negative. 0 means P = Q. | [Link](https://en.wikipedia.org/wiki/Wasserstein_metric) +Infinity Norm Distance | Distance between two vectors is the greatest of their differences along any coordinate dimension. Also called Chebyshev distance or chessboard distance. | Non-negative. 0 means same distribution. | [Link](https://en.wikipedia.org/wiki/Chebyshev_distance) +Total Variation Distance | It is equal to half the L1 (Manhattan) distance between the two distributions. Take the difference between the two proportions in each category, add up the absolute values of all the differences, and then divide the sum by 2. | Non-negative. 0 means same distribution. | [Link](https://en.wikipedia.org/wiki/Total_variation_distance_of_probability_measures) +Chi-Squared Test | The chi-square test tests the null hypothesis that the categorical data has the given frequencies given expected frequencies in each category. | p-value gives evidence against null-hypothesis that difference in observed and expected frequencies is by random chance. | [Link](https://en.wikipedia.org/wiki/Chi-squared_test) + +### Aggregate Balance Measures + +Aggregate Balance Measures allow us to obtain a higher notion of inequality. They are calculated on the set of all sensitive columns and do not depend on the label column. + +These measures look at distribution of records across all combinations of sensitive columns. For example, if Sex and Race are specified as sensitive features, it then tries to quantify imbalance across all combinations of the two specified features - (Male, Black), (Female, White), (Male, Asian-Pac-Islander), etc. + +Measure | Description | Interpretation | Reference +| - | - | - | - +Atkinson Index | It presents the percentage of total income that a given society would have to forego in order to have more equal shares of income between its citizens. This measure depends on the degree of society aversion to inequality (a theoretical parameter decided by the researcher), where a higher value entails greater social utility or willingness by individuals to accept smaller incomes in exchange for a more equal distribution. An important feature of the Atkinson index is that it can be decomposed into within-group and between-group inequality. | Range [0, 1]. 0 if perfect equality. 1 means maximum inequality. In our case, it is the proportion of records for a sensitive columns’ combination. | [Link](https://en.wikipedia.org/wiki/Atkinson_index) +Theil T Index | GE(1) = Theil's T and is more sensitive to differences at the top of the distribution. The Theil index is a statistic used to measure economic inequality. The Theil index measures an entropic "distance" the population is away from the "ideal" egalitarian state of everyone having the same income. | If everyone has the same income, then T_T equals 0. If one person has all the income, then T_T gives the result (ln N). 0 means equal income and larger values mean higher level of disproportion. | [Link](https://en.wikipedia.org/wiki/Theil_index) +Theil L Index | GE(0) = Theil's L and is more sensitive to differences at the lower end of the distribution. Logarithm of (mean income)/(income i), over all the incomes included in the summation. It is also referred to as the mean log deviation measure. Because a transfer from a larger income to a smaller one will change the smaller income's ratio more than it changes the larger income's ratio, the transfer-principle is satisfied by this index. | Same interpretation as Theil T Index. | [Link](https://en.wikipedia.org/wiki/Theil_index) + +## Mitigation + +It will not be a stretch to say that every real-world dataset has caveats, biases, and imbalances. Data collection is costly. Data Imbalance mitigation or de-biasing data is an area of research. There are many techniques available at various stages of ML lifecycle i.e., during pre-processing, in-processing, and post processing. Here we outline a couple of pre-processing techniques - + +### Resampling + +This involves under-sampling from majority class and over-sampling from minority class. Most naïve way to over-sample would be duplicate records and under-sample would be to remove records at random. + +* Caveats: + + 1. Under-sampling may remove valuable information. + 2. Over-sampling may cause overfitting and poor generalization on test set. + +![Bar chart undersampling and oversampling](https://mmlspark.blob.core.windows.net/graphics/responsible_ai/DataBalanceAnalysis_SamplingBar.png) + +There are smarter techniques to under-sample and over-sample in literature and implemented in Python’s [imbalanced-learn](https://imbalanced-learn.org/stable/) package. + +For example, we can cluster the records of the majority class, and do the under-sampling by removing records from each cluster, thus seeking to preserve information. + +One technique of under-sampling is use of Tomek Links. Tomek links are pairs of very close instances but of opposite classes. Removing the instances of the majority class of each pair increases the space between the two classes, facilitating the classification process. A similar way to under-sample majority class is using Near-Miss. It first calculates the distance between all the points in the larger class with the points in the smaller class. When two points belonging to different classes are very close to each other in the distribution, this algorithm eliminates the datapoint of the larger class thereby trying to balance the distribution. + +![Tomek Links](https://mmlspark.blob.core.windows.net/graphics/responsible_ai/DataBalanceAnalysis_TomekLinks.png) + +In over-sampling, instead of creating exact copies of the minority class records, we can introduce small variations into those copies, creating more diverse synthetic samples. This technique is called SMOTE (Synthetic Minority Oversampling Technique). It randomly picks a point from the minority class and computes the k-nearest neighbors for this point. The synthetic points are added between the chosen point and its neighbors. + +![Synthetic Samples](https://mmlspark.blob.core.windows.net/graphics/responsible_ai/DataBalanceAnalysis_SyntheticSamples.png) + +### Reweighting + +There is an expected and observed value in each table cell. The weight is essentially expected / observed value. This is easy to extend to multiple features with more than 2 groups. The weights are then incorporated in loss function of model training. + +![Reweighting](https://mmlspark.blob.core.windows.net/graphics/responsible_ai/DataBalanceAnalysis_Reweight.png) diff --git a/website/docs/features/responsible_ai/Interpretability - Image Explainers.md b/website/versioned_docs/version-0.9.1/features/responsible_ai/Interpretability - Image Explainers.md similarity index 100% rename from website/docs/features/responsible_ai/Interpretability - Image Explainers.md rename to website/versioned_docs/version-0.9.1/features/responsible_ai/Interpretability - Image Explainers.md diff --git a/website/versioned_docs/version-0.9.1/features/model_interpretability/about.md b/website/versioned_docs/version-0.9.1/features/responsible_ai/Model Interpretation on Spark.md similarity index 97% rename from website/versioned_docs/version-0.9.1/features/model_interpretability/about.md rename to website/versioned_docs/version-0.9.1/features/responsible_ai/Model Interpretation on Spark.md index 01c71edc6d..e81aa9525d 100644 --- a/website/versioned_docs/version-0.9.1/features/model_interpretability/about.md +++ b/website/versioned_docs/version-0.9.1/features/responsible_ai/Model Interpretation on Spark.md @@ -1,7 +1,7 @@ --- title: Model Interpretation on Spark hide_title: true -sidebar_label: About +sidebar_label: Model Interpretation on Spark --- # Model Interpretation on Spark @@ -26,9 +26,9 @@ Both explainers extends from `org.apache.spark.ml.Transformer`. After setting up To see examples of model interpretability on Spark in action, take a look at these sample notebooks: -- [Tabular SHAP explainer](/docs/examples/model_interpretability/Interpretability%20-%20Tabular%20SHAP%20explainer) -- [Image explainers](/docs/examples/model_interpretability/Interpretability%20-%20Image%20Explainers) -- [Text explainers](/docs/examples/model_interpretability/Interpretability%20-%20Text%20Explainers) +- [Tabular SHAP explainer](../../../examples/responsible_ai/Interpretability%20-%20Tabular%20SHAP%20explainer) +- [Image explainers](../../../features/responsible_ai/Interpretability%20-%20Image%20Explainers) +- [Text explainers](../../../examples/responsible_ai/Interpretability%20-%20Text%20Explainers) | | Tabular models | Vector models | Image models | Text models | |------------------------|-----------------------------|---------------------------|-------------------------|-----------------------| diff --git a/website/versioned_docs/version-0.9.1/features/spark_serving/about.md b/website/versioned_docs/version-0.9.1/features/spark_serving/about.md index efdba757fe..1881e1b9c4 100644 --- a/website/versioned_docs/version-0.9.1/features/spark_serving/about.md +++ b/website/versioned_docs/version-0.9.1/features/spark_serving/about.md @@ -4,7 +4,9 @@ hide_title: true sidebar_label: About --- -# Spark Serving + + +# Spark Serving ### An Engine for Deploying Spark Jobs as Distributed Web Services @@ -31,7 +33,7 @@ sidebar_label: About ### Jupyter Notebook Examples -- [Deploy a classifier trained on the Adult Census Dataset](/docs/features/spark_serving/SparkServing%20-%20Deploying%20a%20Classifier) +- [Deploy a classifier trained on the Adult Census Dataset](../SparkServing%20-%20Deploying%20a%20Classifier) - More coming soon! ### Spark Serving Hello World @@ -108,14 +110,14 @@ You can deploy head node load balancing with the `HTTPSource` and distributes work across partitions, then collects response data back to the head node. All HTTP requests are kept and replied to on the head node. In both python and Scala these classes can be access by using -`spark.readStream.server()` after importing MMLSpark. +`spark.readStream.server()` after importing SynapseML. This mode allows for more complex windowing, repartitioning, and SQL operations. This option is also idea for rapid setup and testing, as it doesn't require any additional load balancing or network switches.A diagram of this configuration can be seen below:

- +

### Fully Distributed (Custom Load Balancer) @@ -124,7 +126,7 @@ You can configure Spark Serving for a custom load balancer using the `DistributedHTTPSource` and `DistributedHTTPSink` classes. This mode spins up servers on each executor JVM. In both python and Scala these classes can be access by using -`spark.readStream.distributedServer()` after importing MMLSpark. +`spark.readStream.distributedServer()` after importing SynapseML. Each server will feed its executor's partitions in parallel. This mode is key for high throughput and low latency as data does not need to be transferred to and from the @@ -132,11 +134,11 @@ head node. This deployment results in several web services that all route into the same spark computation. You can deploy an external load balancer to unify the executor's services under a single IP address. Support for automatic load balancer management and deployment is -targeted for the next release of MMLSpark. A diagram of this +targeted for the next release of SynapseML. A diagram of this configuration can be seen below:

- +

Queries that involve data movement across workers, such as a nontrivial @@ -148,7 +150,7 @@ routing will be automatically handled by the sink. ### Sub-Millisecond Latency with Continuous Processing

- +

Continuous processing can be enabled by hooking into the `HTTPSourceV2` class using: diff --git a/website/versioned_docs/version-0.9.1/features/vw/about.md b/website/versioned_docs/version-0.9.1/features/vw/about.md index ea63e476e9..292145e498 100644 --- a/website/versioned_docs/version-0.9.1/features/vw/about.md +++ b/website/versioned_docs/version-0.9.1/features/vw/about.md @@ -4,7 +4,7 @@ hide_title: true sidebar_label: About --- - + # VowpalWabbit on Apache Spark @@ -64,7 +64,7 @@ model = (VowpalWabbitRegressor(args="--holdout_off --loss_function quantile -q : Through the args parameter you can pass command line parameters to VW as documented in the [VW Wiki](https://github.com/vowpalWabbit/vowpal_wabbit/wiki/Command-Line-Arguments). For an end to end application, check out the VowpalWabbit [notebook -example](/docs/features/vw/Vowpal%20Wabbit%20-%20Overview). +example](../Vowpal%20Wabbit%20-%20Overview). ### Hyper-parameter tuning diff --git a/website/versioned_docs/version-0.9.1/reference/developer-readme.md b/website/versioned_docs/version-0.9.1/reference/developer-readme.md index 735f5af41a..d711c04b5f 100644 --- a/website/versioned_docs/version-0.9.1/reference/developer-readme.md +++ b/website/versioned_docs/version-0.9.1/reference/developer-readme.md @@ -2,7 +2,7 @@ title: Build System Commands hide_title: true sidebar_label: Build System Commands -description: MMLSpark Development Setup +description: SynapseML Development Setup --- # SynapseML Development Setup diff --git a/website/versioned_docs/version-0.9.1/reference/docker.md b/website/versioned_docs/version-0.9.1/reference/docker.md index 43e87a50d2..46651b3cd6 100644 --- a/website/versioned_docs/version-0.9.1/reference/docker.md +++ b/website/versioned_docs/version-0.9.1/reference/docker.md @@ -1,7 +1,7 @@ --- -title: Using the MMLSpark Docker Image +title: Using the SynapseML Docker Image sidebar_label: Docker Image -description: Using the MMLSpark Docker Image +description: Using the SynapseML Docker Image --- ## Quickstart: install and run the Docker image diff --git a/website/versioned_sidebars/version-0.9.1-sidebars.json b/website/versioned_sidebars/version-0.9.1-sidebars.json index 4924724725..1ed5b57788 100644 --- a/website/versioned_sidebars/version-0.9.1-sidebars.json +++ b/website/versioned_sidebars/version-0.9.1-sidebars.json @@ -66,15 +66,19 @@ }, { "type": "category", - "label": "Model Interpretability", + "label": "Responsible AI", "items": [ { "type": "doc", - "id": "version-0.9.1/features/model_interpretability/about" + "id": "version-0.9.1/features/responsible_ai/Data Balance Analysis" }, { "type": "doc", - "id": "version-0.9.1/features/model_interpretability/ModelInterpretability - Snow Leopard Detection" + "id": "version-0.9.1/features/responsible_ai/Interpretability - Image Explainers" + }, + { + "type": "doc", + "id": "version-0.9.1/features/responsible_ai/Model Interpretation on Spark" } ], "collapsible": true, @@ -226,19 +230,27 @@ }, { "type": "category", - "label": "Model Interpretability", + "label": "Responsible AI", "items": [ { "type": "doc", - "id": "version-0.9.1/examples/model_interpretability/Interpretability - Image Explainers" + "id": "version-0.9.1/examples/responsible_ai/DataBalanceAnalysis - Adult Census Income" + }, + { + "type": "doc", + "id": "version-0.9.1/examples/responsible_ai/Interpretability - Explanation Dashboard" + }, + { + "type": "doc", + "id": "version-0.9.1/examples/responsible_ai/Interpretability - Tabular SHAP explainer" }, { "type": "doc", - "id": "version-0.9.1/examples/model_interpretability/Interpretability - Tabular SHAP explainer" + "id": "version-0.9.1/examples/responsible_ai/Interpretability - Text Explainers" }, { "type": "doc", - "id": "version-0.9.1/examples/model_interpretability/Interpretability - Text Explainers" + "id": "version-0.9.1/examples/responsible_ai/ModelInterpretability - Snow Leopard Detection" } ], "collapsible": true, From d72dd3e5194bfd3a2abe002177a73ea20bd9e408 Mon Sep 17 00:00:00 2001 From: Mark Date: Tue, 9 Nov 2021 18:22:53 -0500 Subject: [PATCH 13/40] docs: remove website whitepsace --- .../AzureSearchIndex - Met Artworks.ipynb | 40 +++---- .../Classification - Adult Census.ipynb | 4 +- ...pretability - Snow Leopard Detection.ipynb | 104 +++++++++--------- ... - Amazon Book Reviews with Word2Vec.ipynb | 64 +++++------ .../TextAnalytics - Amazon Book Reviews.ipynb | 38 +++---- .../CognitiveServices - Overview.ipynb | 32 +++--- .../lightgbm/LightGBM - Overview.ipynb | 18 +-- .../onnx/ONNX - Inference on Spark.ipynb | 68 ++++++------ .../Interpretability - Image Explainers.ipynb | 28 ++--- .../vw/Vowpal Wabbit - Overview.ipynb | 4 +- .../AzureSearchIndex - Met Artworks.md | 18 --- ...terpretability - Snow Leopard Detection.md | 50 --------- ...ics - Amazon Book Reviews with Word2Vec.md | 30 ----- .../TextAnalytics - Amazon Book Reviews.md | 20 ---- .../features/CognitiveServices - Overview.md | 17 --- .../features/lightgbm/LightGBM - Overview.md | 8 -- .../onnx/ONNX - Inference on Spark.md | 35 ------ .../Interpretability - Image Explainers.md | 70 +----------- 18 files changed, 201 insertions(+), 447 deletions(-) diff --git a/notebooks/examples/AzureSearchIndex - Met Artworks.ipynb b/notebooks/examples/AzureSearchIndex - Met Artworks.ipynb index c83ea78d5f..7490778573 100644 --- a/notebooks/examples/AzureSearchIndex - Met Artworks.ipynb +++ b/notebooks/examples/AzureSearchIndex - Met Artworks.ipynb @@ -85,20 +85,20 @@ "cell_type": "code", "execution_count": 7, "source": [ - "from synapse.ml.cognitive import AnalyzeImage\r\n", - "from synapse.ml.stages import SelectColumns\r\n", - "\r\n", - "#define pipeline\r\n", - "describeImage = (AnalyzeImage()\r\n", - " .setSubscriptionKey(VISION_API_KEY)\r\n", - " .setLocation(\"eastus\")\r\n", - " .setImageUrlCol(\"PrimaryImageUrl\")\r\n", - " .setOutputCol(\"RawImageDescription\")\r\n", - " .setErrorCol(\"Errors\")\r\n", - " .setVisualFeatures([\"Categories\", \"Description\", \"Faces\", \"ImageType\", \"Color\", \"Adult\"])\r\n", - " .setConcurrency(5))\r\n", - "\r\n", - "df2 = describeImage.transform(data)\\\r\n", + "from synapse.ml.cognitive import AnalyzeImage\n", + "from synapse.ml.stages import SelectColumns\n", + "\n", + "#define pipeline\n", + "describeImage = (AnalyzeImage()\n", + " .setSubscriptionKey(VISION_API_KEY)\n", + " .setLocation(\"eastus\")\n", + " .setImageUrlCol(\"PrimaryImageUrl\")\n", + " .setOutputCol(\"RawImageDescription\")\n", + " .setErrorCol(\"Errors\")\n", + " .setVisualFeatures([\"Categories\", \"Description\", \"Faces\", \"ImageType\", \"Color\", \"Adult\"])\n", + " .setConcurrency(5))\n", + "\n", + "df2 = describeImage.transform(data)\\\n", " .select(\"*\", \"RawImageDescription.*\").drop(\"Errors\", \"RawImageDescription\")" ], "outputs": [], @@ -124,12 +124,12 @@ "cell_type": "code", "execution_count": 10, "source": [ - "from synapse.ml.cognitive import *\r\n", - "df2.writeToAzureSearch(\r\n", - " subscriptionKey=AZURE_SEARCH_KEY,\r\n", - " actionCol=\"searchAction\",\r\n", - " serviceName=search_service,\r\n", - " indexName=search_index,\r\n", + "from synapse.ml.cognitive import *\n", + "df2.writeToAzureSearch(\n", + " subscriptionKey=AZURE_SEARCH_KEY,\n", + " actionCol=\"searchAction\",\n", + " serviceName=search_service,\n", + " indexName=search_index,\n", " keyCol=\"ObjectID\")" ], "outputs": [], diff --git a/notebooks/examples/classification/Classification - Adult Census.ipynb b/notebooks/examples/classification/Classification - Adult Census.ipynb index af9d521971..e1216289f3 100644 --- a/notebooks/examples/classification/Classification - Adult Census.ipynb +++ b/notebooks/examples/classification/Classification - Adult Census.ipynb @@ -68,8 +68,8 @@ "cell_type": "code", "execution_count": null, "source": [ - "from synapse.ml.train import TrainClassifier\r\n", - "from pyspark.ml.classification import LogisticRegression\r\n", + "from synapse.ml.train import TrainClassifier\n", + "from pyspark.ml.classification import LogisticRegression\n", "model = TrainClassifier(model=LogisticRegression(), labelCol=\"income\", numFeatures=256).fit(train)" ], "outputs": [], diff --git a/notebooks/examples/responsible_ai/ModelInterpretability - Snow Leopard Detection.ipynb b/notebooks/examples/responsible_ai/ModelInterpretability - Snow Leopard Detection.ipynb index 2d8a32c9c0..e6009b1a48 100644 --- a/notebooks/examples/responsible_ai/ModelInterpretability - Snow Leopard Detection.ipynb +++ b/notebooks/examples/responsible_ai/ModelInterpretability - Snow Leopard Detection.ipynb @@ -195,33 +195,33 @@ "cell_type": "code", "execution_count": null, "source": [ - "from pyspark.ml import Pipeline\r\n", - "from pyspark.ml.feature import StringIndexer\r\n", - "from pyspark.ml.classification import LogisticRegression\r\n", - "from pyspark.sql.functions import udf\r\n", - "from synapse.ml.downloader import ModelDownloader\r\n", - "from synapse.ml.cntk import ImageFeaturizer\r\n", - "from synapse.ml.stages import UDFTransformer\r\n", - "from pyspark.sql.types import *\r\n", - "\r\n", - "def getIndex(row):\r\n", - " return float(row[1])\r\n", - "\r\n", - "if os.environ.get(\"AZURE_SERVICE\", None) == \"Microsoft.ProjectArcadia\":\r\n", - " network = ModelDownloader(spark, \"abfss://synapse@mmlsparkeuap.dfs.core.windows.net/models/\").downloadByName(\"ResNet50\")\r\n", - "else:\r\n", - " network = ModelDownloader(spark, \"dbfs:/Models/\").downloadByName(\"ResNet50\")\r\n", - "\r\n", - "model = Pipeline(stages=[\r\n", - " StringIndexer(inputCol = \"labels\", outputCol=\"index\"),\r\n", - " ImageFeaturizer(inputCol=\"image\", outputCol=\"features\", cutOutputLayers=1).setModel(network),\r\n", - " LogisticRegression(maxIter=5, labelCol=\"index\", regParam=10.0),\r\n", - " UDFTransformer()\\\r\n", - " .setUDF(udf(getIndex, DoubleType()))\\\r\n", - " .setInputCol(\"probability\")\\\r\n", - " .setOutputCol(\"leopard_prob\")\r\n", - "])\r\n", - "\r\n", + "from pyspark.ml import Pipeline\n", + "from pyspark.ml.feature import StringIndexer\n", + "from pyspark.ml.classification import LogisticRegression\n", + "from pyspark.sql.functions import udf\n", + "from synapse.ml.downloader import ModelDownloader\n", + "from synapse.ml.cntk import ImageFeaturizer\n", + "from synapse.ml.stages import UDFTransformer\n", + "from pyspark.sql.types import *\n", + "\n", + "def getIndex(row):\n", + " return float(row[1])\n", + "\n", + "if os.environ.get(\"AZURE_SERVICE\", None) == \"Microsoft.ProjectArcadia\":\n", + " network = ModelDownloader(spark, \"abfss://synapse@mmlsparkeuap.dfs.core.windows.net/models/\").downloadByName(\"ResNet50\")\n", + "else:\n", + " network = ModelDownloader(spark, \"dbfs:/Models/\").downloadByName(\"ResNet50\")\n", + "\n", + "model = Pipeline(stages=[\n", + " StringIndexer(inputCol = \"labels\", outputCol=\"index\"),\n", + " ImageFeaturizer(inputCol=\"image\", outputCol=\"features\", cutOutputLayers=1).setModel(network),\n", + " LogisticRegression(maxIter=5, labelCol=\"index\", regParam=10.0),\n", + " UDFTransformer()\\\n", + " .setUDF(udf(getIndex, DoubleType()))\\\n", + " .setInputCol(\"probability\")\\\n", + " .setOutputCol(\"leopard_prob\")\n", + "])\n", + "\n", "fitModel = model.fit(train)" ], "outputs": [], @@ -238,14 +238,14 @@ "cell_type": "code", "execution_count": null, "source": [ - "def plotConfusionMatrix(df, label, prediction, classLabels):\r\n", - " from synapse.ml.plot import confusionMatrix\r\n", - " import matplotlib.pyplot as plt\r\n", - " fig = plt.figure(figsize=(4.5, 4.5))\r\n", - " confusionMatrix(df, label, prediction, classLabels)\r\n", - " display(fig)\r\n", - "\r\n", - "if os.environ.get(\"AZURE_SERVICE\", None) != \"Microsoft.ProjectArcadia\":\r\n", + "def plotConfusionMatrix(df, label, prediction, classLabels):\n", + " from synapse.ml.plot import confusionMatrix\n", + " import matplotlib.pyplot as plt\n", + " fig = plt.figure(figsize=(4.5, 4.5))\n", + " confusionMatrix(df, label, prediction, classLabels)\n", + " display(fig)\n", + "\n", + "if os.environ.get(\"AZURE_SERVICE\", None) != \"Microsoft.ProjectArcadia\":\n", " plotConfusionMatrix(fitModel.transform(test), \"index\", \"prediction\", fitModel.stages[0].labels)" ], "outputs": [], @@ -257,23 +257,23 @@ "cell_type": "code", "execution_count": null, "source": [ - "import urllib.request\r\n", - "from synapse.ml.lime import ImageLIME\r\n", - "\r\n", - "test_image_url = \"https://mmlspark.blob.core.windows.net/graphics/SnowLeopardAD/snow_leopard1.jpg\"\r\n", - "with urllib.request.urlopen(test_image_url) as url:\r\n", - " barr = url.read()\r\n", - "test_subsample = spark.createDataFrame([(bytearray(barr),)], [\"image\"])\r\n", - "\r\n", - "lime = ImageLIME()\\\r\n", - " .setModel(fitModel)\\\r\n", - " .setPredictionCol(\"leopard_prob\")\\\r\n", - " .setOutputCol(\"weights\")\\\r\n", - " .setInputCol(\"image\")\\\r\n", - " .setCellSize(100.0)\\\r\n", - " .setModifier(50.0)\\\r\n", - " .setNSamples(300)\r\n", - "\r\n", + "import urllib.request\n", + "from synapse.ml.lime import ImageLIME\n", + "\n", + "test_image_url = \"https://mmlspark.blob.core.windows.net/graphics/SnowLeopardAD/snow_leopard1.jpg\"\n", + "with urllib.request.urlopen(test_image_url) as url:\n", + " barr = url.read()\n", + "test_subsample = spark.createDataFrame([(bytearray(barr),)], [\"image\"])\n", + "\n", + "lime = ImageLIME()\\\n", + " .setModel(fitModel)\\\n", + " .setPredictionCol(\"leopard_prob\")\\\n", + " .setOutputCol(\"weights\")\\\n", + " .setInputCol(\"image\")\\\n", + " .setCellSize(100.0)\\\n", + " .setModifier(50.0)\\\n", + " .setNSamples(300)\n", + "\n", "result = lime.transform(test_subsample)" ], "outputs": [], diff --git a/notebooks/examples/text_analytics/TextAnalytics - Amazon Book Reviews with Word2Vec.ipynb b/notebooks/examples/text_analytics/TextAnalytics - Amazon Book Reviews with Word2Vec.ipynb index 6b11d3e4d1..d8d7469204 100644 --- a/notebooks/examples/text_analytics/TextAnalytics - Amazon Book Reviews with Word2Vec.ipynb +++ b/notebooks/examples/text_analytics/TextAnalytics - Amazon Book Reviews with Word2Vec.ipynb @@ -128,28 +128,28 @@ "cell_type": "code", "execution_count": null, "source": [ - "from pyspark.ml.classification import LogisticRegression, RandomForestClassifier, GBTClassifier\r\n", - "from synapse.ml.train import TrainClassifier\r\n", - "import itertools\r\n", - "\r\n", - "lrHyperParams = [0.05, 0.2]\r\n", - "logisticRegressions = [LogisticRegression(regParam = hyperParam)\r\n", - " for hyperParam in lrHyperParams]\r\n", - "lrmodels = [TrainClassifier(model=lrm, labelCol=\"label\").fit(ptrain)\r\n", - " for lrm in logisticRegressions]\r\n", - "\r\n", - "rfHyperParams = itertools.product([5, 10], [2, 3])\r\n", - "randomForests = [RandomForestClassifier(numTrees=hyperParam[0], maxDepth=hyperParam[1])\r\n", - " for hyperParam in rfHyperParams]\r\n", - "rfmodels = [TrainClassifier(model=rfm, labelCol=\"label\").fit(ptrain)\r\n", - " for rfm in randomForests]\r\n", - "\r\n", - "gbtHyperParams = itertools.product([8, 16], [2, 3])\r\n", - "gbtclassifiers = [GBTClassifier(maxBins=hyperParam[0], maxDepth=hyperParam[1])\r\n", - " for hyperParam in gbtHyperParams]\r\n", - "gbtmodels = [TrainClassifier(model=gbt, labelCol=\"label\").fit(ptrain)\r\n", - " for gbt in gbtclassifiers]\r\n", - "\r\n", + "from pyspark.ml.classification import LogisticRegression, RandomForestClassifier, GBTClassifier\n", + "from synapse.ml.train import TrainClassifier\n", + "import itertools\n", + "\n", + "lrHyperParams = [0.05, 0.2]\n", + "logisticRegressions = [LogisticRegression(regParam = hyperParam)\n", + " for hyperParam in lrHyperParams]\n", + "lrmodels = [TrainClassifier(model=lrm, labelCol=\"label\").fit(ptrain)\n", + " for lrm in logisticRegressions]\n", + "\n", + "rfHyperParams = itertools.product([5, 10], [2, 3])\n", + "randomForests = [RandomForestClassifier(numTrees=hyperParam[0], maxDepth=hyperParam[1])\n", + " for hyperParam in rfHyperParams]\n", + "rfmodels = [TrainClassifier(model=rfm, labelCol=\"label\").fit(ptrain)\n", + " for rfm in randomForests]\n", + "\n", + "gbtHyperParams = itertools.product([8, 16], [2, 3])\n", + "gbtclassifiers = [GBTClassifier(maxBins=hyperParam[0], maxDepth=hyperParam[1])\n", + " for hyperParam in gbtHyperParams]\n", + "gbtmodels = [TrainClassifier(model=gbt, labelCol=\"label\").fit(ptrain)\n", + " for gbt in gbtclassifiers]\n", + "\n", "trainedModels = lrmodels + rfmodels + gbtmodels" ], "outputs": [], @@ -166,10 +166,10 @@ "cell_type": "code", "execution_count": null, "source": [ - "from synapse.ml.automl import FindBestModel\r\n", - "bestModel = FindBestModel(evaluationMetric=\"AUC\", models=trainedModels).fit(ptest)\r\n", - "bestModel.getRocCurve().show()\r\n", - "bestModel.getBestModelMetrics().show()\r\n", + "from synapse.ml.automl import FindBestModel\n", + "bestModel = FindBestModel(evaluationMetric=\"AUC\", models=trainedModels).fit(ptest)\n", + "bestModel.getRocCurve().show()\n", + "bestModel.getBestModelMetrics().show()\n", "bestModel.getAllModelMetrics().show()" ], "outputs": [], @@ -186,12 +186,12 @@ "cell_type": "code", "execution_count": null, "source": [ - "from synapse.ml.train import ComputeModelStatistics\r\n", - "predictions = bestModel.transform(pvalidation)\r\n", - "metrics = ComputeModelStatistics().transform(predictions)\r\n", - "print(\"Best model's accuracy on validation set = \"\r\n", - " + \"{0:.2f}%\".format(metrics.first()[\"accuracy\"] * 100))\r\n", - "print(\"Best model's AUC on validation set = \"\r\n", + "from synapse.ml.train import ComputeModelStatistics\n", + "predictions = bestModel.transform(pvalidation)\n", + "metrics = ComputeModelStatistics().transform(predictions)\n", + "print(\"Best model's accuracy on validation set = \"\n", + " + \"{0:.2f}%\".format(metrics.first()[\"accuracy\"] * 100))\n", + "print(\"Best model's AUC on validation set = \"\n", " + \"{0:.2f}%\".format(metrics.first()[\"AUC\"] * 100))" ], "outputs": [], diff --git a/notebooks/examples/text_analytics/TextAnalytics - Amazon Book Reviews.ipynb b/notebooks/examples/text_analytics/TextAnalytics - Amazon Book Reviews.ipynb index 520552d039..dde7120492 100644 --- a/notebooks/examples/text_analytics/TextAnalytics - Amazon Book Reviews.ipynb +++ b/notebooks/examples/text_analytics/TextAnalytics - Amazon Book Reviews.ipynb @@ -54,9 +54,9 @@ "cell_type": "code", "execution_count": null, "source": [ - "from synapse.ml.featurize.text import TextFeaturizer\r\n", - "textFeaturizer = TextFeaturizer() \\\r\n", - " .setInputCol(\"text\").setOutputCol(\"features\") \\\r\n", + "from synapse.ml.featurize.text import TextFeaturizer\n", + "textFeaturizer = TextFeaturizer() \\\n", + " .setInputCol(\"text\").setOutputCol(\"features\") \\\n", " .setUseStopWordsRemover(True).setUseIDF(True).setMinDocFreq(5).setNumFeatures(1 << 16).fit(data)" ], "outputs": [], @@ -102,13 +102,13 @@ "cell_type": "code", "execution_count": null, "source": [ - "train, test, validation = processedData.randomSplit([0.60, 0.20, 0.20])\r\n", - "from pyspark.ml.classification import LogisticRegression\r\n", - "\r\n", - "lrHyperParams = [0.05, 0.1, 0.2, 0.4]\r\n", - "logisticRegressions = [LogisticRegression(regParam = hyperParam) for hyperParam in lrHyperParams]\r\n", - "\r\n", - "from synapse.ml.train import TrainClassifier\r\n", + "train, test, validation = processedData.randomSplit([0.60, 0.20, 0.20])\n", + "from pyspark.ml.classification import LogisticRegression\n", + "\n", + "lrHyperParams = [0.05, 0.1, 0.2, 0.4]\n", + "logisticRegressions = [LogisticRegression(regParam = hyperParam) for hyperParam in lrHyperParams]\n", + "\n", + "from synapse.ml.train import TrainClassifier\n", "lrmodels = [TrainClassifier(model=lrm, labelCol=\"label\").fit(train) for lrm in logisticRegressions]" ], "outputs": [], @@ -125,11 +125,11 @@ "cell_type": "code", "execution_count": null, "source": [ - "from synapse.ml.automl import FindBestModel, BestModel\r\n", - "bestModel = FindBestModel(evaluationMetric=\"AUC\", models=lrmodels).fit(test)\r\n", - "bestModel.getRocCurve().show()\r\n", - "bestModel.getBestModelMetrics().show()\r\n", - "bestModel.getAllModelMetrics().show()\r\n" + "from synapse.ml.automl import FindBestModel, BestModel\n", + "bestModel = FindBestModel(evaluationMetric=\"AUC\", models=lrmodels).fit(test)\n", + "bestModel.getRocCurve().show()\n", + "bestModel.getBestModelMetrics().show()\n", + "bestModel.getAllModelMetrics().show()\n" ], "outputs": [], "metadata": {} @@ -145,10 +145,10 @@ "cell_type": "code", "execution_count": null, "source": [ - "from synapse.ml.train import ComputeModelStatistics\r\n", - "predictions = bestModel.transform(validation)\r\n", - "metrics = ComputeModelStatistics().transform(predictions)\r\n", - "print(\"Best model's accuracy on validation set = \"\r\n", + "from synapse.ml.train import ComputeModelStatistics\n", + "predictions = bestModel.transform(validation)\n", + "metrics = ComputeModelStatistics().transform(predictions)\n", + "print(\"Best model's accuracy on validation set = \"\n", " + \"{0:.2f}%\".format(metrics.first()[\"accuracy\"] * 100))" ], "outputs": [], diff --git a/notebooks/features/CognitiveServices - Overview.ipynb b/notebooks/features/CognitiveServices - Overview.ipynb index dcd0975e0e..7e4b154126 100644 --- a/notebooks/features/CognitiveServices - Overview.ipynb +++ b/notebooks/features/CognitiveServices - Overview.ipynb @@ -114,13 +114,13 @@ "cell_type": "code", "execution_count": null, "source": [ - "from pyspark.sql.functions import udf, col\r\n", - "from synapse.ml.io.http import HTTPTransformer, http_udf\r\n", - "from requests import Request\r\n", - "from pyspark.sql.functions import lit\r\n", - "from pyspark.ml import PipelineModel\r\n", - "from pyspark.sql.functions import col\r\n", - "import os\r\n" + "from pyspark.sql.functions import udf, col\n", + "from synapse.ml.io.http import HTTPTransformer, http_udf\n", + "from requests import Request\n", + "from pyspark.sql.functions import lit\n", + "from pyspark.ml import PipelineModel\n", + "from pyspark.sql.functions import col\n", + "import os\n" ], "outputs": [], "metadata": {} @@ -150,15 +150,15 @@ "cell_type": "code", "execution_count": null, "source": [ - "from synapse.ml.cognitive import *\r\n", - "\r\n", - "# A general Cognitive Services key for Text Analytics, Computer Vision and Form Recognizer (or use separate keys that belong to each service)\r\n", - "service_key = os.environ[\"COGNITIVE_SERVICE_KEY\"]\r\n", - "# A Bing Search v7 subscription key\r\n", - "bing_search_key = os.environ[\"BING_IMAGE_SEARCH_KEY\"]\r\n", - "# An Anomaly Dectector subscription key\r\n", - "anomaly_key = os.environ[\"ANOMALY_API_KEY\"]\r\n", - "# A Translator subscription key\r\n", + "from synapse.ml.cognitive import *\n", + "\n", + "# A general Cognitive Services key for Text Analytics, Computer Vision and Form Recognizer (or use separate keys that belong to each service)\n", + "service_key = os.environ[\"COGNITIVE_SERVICE_KEY\"]\n", + "# A Bing Search v7 subscription key\n", + "bing_search_key = os.environ[\"BING_IMAGE_SEARCH_KEY\"]\n", + "# An Anomaly Dectector subscription key\n", + "anomaly_key = os.environ[\"ANOMALY_API_KEY\"]\n", + "# A Translator subscription key\n", "translator_key = os.environ[\"TRANSLATOR_KEY\"]" ], "outputs": [], diff --git a/notebooks/features/lightgbm/LightGBM - Overview.ipynb b/notebooks/features/lightgbm/LightGBM - Overview.ipynb index 14fa8c47ea..6ad877e062 100644 --- a/notebooks/features/lightgbm/LightGBM - Overview.ipynb +++ b/notebooks/features/lightgbm/LightGBM - Overview.ipynb @@ -184,7 +184,7 @@ "cell_type": "code", "execution_count": null, "source": [ - "from synapse.ml.lightgbm import LightGBMClassifier\r\n", + "from synapse.ml.lightgbm import LightGBMClassifier\n", "model = LightGBMClassifier(objective=\"binary\", featuresCol=\"features\", labelCol=\"Bankrupt?\", isUnbalance=True)" ], "outputs": [], @@ -210,14 +210,14 @@ "cell_type": "code", "execution_count": null, "source": [ - "from synapse.ml.lightgbm import LightGBMClassificationModel\r\n", - "\r\n", - "if os.environ.get(\"AZURE_SERVICE\", None) == \"Microsoft.ProjectArcadia\":\r\n", - " model.saveNativeModel(\"/models/lgbmclassifier.model\")\r\n", - " model = LightGBMClassificationModel.loadNativeModelFromFile(\"/models/lgbmclassifier.model\")\r\n", - "else:\r\n", - " model.saveNativeModel(\"/lgbmclassifier.model\")\r\n", - " model = LightGBMClassificationModel.loadNativeModelFromFile(\"/lgbmclassifier.model\")\r\n" + "from synapse.ml.lightgbm import LightGBMClassificationModel\n", + "\n", + "if os.environ.get(\"AZURE_SERVICE\", None) == \"Microsoft.ProjectArcadia\":\n", + " model.saveNativeModel(\"/models/lgbmclassifier.model\")\n", + " model = LightGBMClassificationModel.loadNativeModelFromFile(\"/models/lgbmclassifier.model\")\n", + "else:\n", + " model.saveNativeModel(\"/lgbmclassifier.model\")\n", + " model = LightGBMClassificationModel.loadNativeModelFromFile(\"/lgbmclassifier.model\")\n" ], "outputs": [], "metadata": {} diff --git a/notebooks/features/onnx/ONNX - Inference on Spark.ipynb b/notebooks/features/onnx/ONNX - Inference on Spark.ipynb index 4b60a999a4..44e0b3dd54 100644 --- a/notebooks/features/onnx/ONNX - Inference on Spark.ipynb +++ b/notebooks/features/onnx/ONNX - Inference on Spark.ipynb @@ -46,35 +46,35 @@ "cell_type": "code", "execution_count": null, "source": [ - "from pyspark.ml.feature import VectorAssembler\r\n", - "from synapse.ml.lightgbm import LightGBMClassifier\r\n", - "\r\n", - "feature_cols = df.columns[1:]\r\n", - "featurizer = VectorAssembler(\r\n", - " inputCols=feature_cols,\r\n", - " outputCol='features'\r\n", - ")\r\n", - "\r\n", - "train_data = featurizer.transform(df)['Bankrupt?', 'features']\r\n", - "\r\n", - "model = (\r\n", - " LightGBMClassifier(featuresCol=\"features\", labelCol=\"Bankrupt?\")\r\n", - " .setEarlyStoppingRound(300)\r\n", - " .setLambdaL1(0.5)\r\n", - " .setNumIterations(1000)\r\n", - " .setNumThreads(-1)\r\n", - " .setMaxDeltaStep(0.5)\r\n", - " .setNumLeaves(31)\r\n", - " .setMaxDepth(-1)\r\n", - " .setBaggingFraction(0.7)\r\n", - " .setFeatureFraction(0.7)\r\n", - " .setBaggingFreq(2)\r\n", - " .setObjective(\"binary\")\r\n", - " .setIsUnbalance(True)\r\n", - " .setMinSumHessianInLeaf(20)\r\n", - " .setMinGainToSplit(0.01)\r\n", - ")\r\n", - "\r\n", + "from pyspark.ml.feature import VectorAssembler\n", + "from synapse.ml.lightgbm import LightGBMClassifier\n", + "\n", + "feature_cols = df.columns[1:]\n", + "featurizer = VectorAssembler(\n", + " inputCols=feature_cols,\n", + " outputCol='features'\n", + ")\n", + "\n", + "train_data = featurizer.transform(df)['Bankrupt?', 'features']\n", + "\n", + "model = (\n", + " LightGBMClassifier(featuresCol=\"features\", labelCol=\"Bankrupt?\")\n", + " .setEarlyStoppingRound(300)\n", + " .setLambdaL1(0.5)\n", + " .setNumIterations(1000)\n", + " .setNumThreads(-1)\n", + " .setMaxDeltaStep(0.5)\n", + " .setNumLeaves(31)\n", + " .setMaxDepth(-1)\n", + " .setBaggingFraction(0.7)\n", + " .setFeatureFraction(0.7)\n", + " .setBaggingFreq(2)\n", + " .setObjective(\"binary\")\n", + " .setIsUnbalance(True)\n", + " .setMinSumHessianInLeaf(20)\n", + " .setMinGainToSplit(0.01)\n", + ")\n", + "\n", "model = model.fit(train_data)" ], "outputs": [], @@ -119,11 +119,11 @@ "cell_type": "code", "execution_count": null, "source": [ - "from synapse.ml.onnx import ONNXModel\r\n", - "\r\n", - "onnx_ml = ONNXModel().setModelPayload(model_payload_ml)\r\n", - "\r\n", - "print(\"Model inputs:\" + str(onnx_ml.getModelInputs()))\r\n", + "from synapse.ml.onnx import ONNXModel\n", + "\n", + "onnx_ml = ONNXModel().setModelPayload(model_payload_ml)\n", + "\n", + "print(\"Model inputs:\" + str(onnx_ml.getModelInputs()))\n", "print(\"Model outputs:\" + str(onnx_ml.getModelOutputs()))" ], "outputs": [], diff --git a/notebooks/features/responsible_ai/Interpretability - Image Explainers.ipynb b/notebooks/features/responsible_ai/Interpretability - Image Explainers.ipynb index da23a4edb7..aaabe25faf 100644 --- a/notebooks/features/responsible_ai/Interpretability - Image Explainers.ipynb +++ b/notebooks/features/responsible_ai/Interpretability - Image Explainers.ipynb @@ -15,20 +15,20 @@ "cell_type": "code", "execution_count": null, "source": [ - "from synapse.ml.explainers import *\r\n", - "from synapse.ml.onnx import ONNXModel\r\n", - "from synapse.ml.opencv import ImageTransformer\r\n", - "from synapse.ml.io import *\r\n", - "from pyspark.ml import Pipeline\r\n", - "from pyspark.ml.classification import LogisticRegression\r\n", - "from pyspark.ml.feature import StringIndexer\r\n", - "from pyspark.sql.functions import *\r\n", - "from pyspark.sql.types import *\r\n", - "import numpy as np\r\n", - "import pyspark\r\n", - "import urllib.request\r\n", - "import matplotlib.pyplot as plt\r\n", - "import PIL, io\r\n", + "from synapse.ml.explainers import *\n", + "from synapse.ml.onnx import ONNXModel\n", + "from synapse.ml.opencv import ImageTransformer\n", + "from synapse.ml.io import *\n", + "from pyspark.ml import Pipeline\n", + "from pyspark.ml.classification import LogisticRegression\n", + "from pyspark.ml.feature import StringIndexer\n", + "from pyspark.sql.functions import *\n", + "from pyspark.sql.types import *\n", + "import numpy as np\n", + "import pyspark\n", + "import urllib.request\n", + "import matplotlib.pyplot as plt\n", + "import PIL, io\n", "from PIL import Image\r\n", "\r\n", "vec_slice = udf(lambda vec, indices: (vec.toArray())[indices].tolist(), ArrayType(FloatType()))\r\n", diff --git a/notebooks/features/vw/Vowpal Wabbit - Overview.ipynb b/notebooks/features/vw/Vowpal Wabbit - Overview.ipynb index bf560d9c69..46ac2eb3c7 100644 --- a/notebooks/features/vw/Vowpal Wabbit - Overview.ipynb +++ b/notebooks/features/vw/Vowpal Wabbit - Overview.ipynb @@ -562,8 +562,8 @@ }, { "source": [ - "## Quantile Regression for Drug Discovery with VowpalWabbitRegressor\r\n", - "\r\n", + "## Quantile Regression for Drug Discovery with VowpalWabbitRegressor\n", + "\n", "" ], "cell_type": "markdown", diff --git a/website/versioned_docs/version-0.9.1/examples/AzureSearchIndex - Met Artworks.md b/website/versioned_docs/version-0.9.1/examples/AzureSearchIndex - Met Artworks.md index e045e82d48..386447e8e2 100644 --- a/website/versioned_docs/version-0.9.1/examples/AzureSearchIndex - Met Artworks.md +++ b/website/versioned_docs/version-0.9.1/examples/AzureSearchIndex - Met Artworks.md @@ -50,33 +50,20 @@ data = spark.read\ ```python from synapse.ml.cognitive import AnalyzeImage - from synapse.ml.stages import SelectColumns - #define pipeline - describeImage = (AnalyzeImage() - .setSubscriptionKey(VISION_API_KEY) - .setLocation("eastus") - .setImageUrlCol("PrimaryImageUrl") - .setOutputCol("RawImageDescription") - .setErrorCol("Errors") - .setVisualFeatures(["Categories", "Description", "Faces", "ImageType", "Color", "Adult"]) - .setConcurrency(5)) - - df2 = describeImage.transform(data)\ - .select("*", "RawImageDescription.*").drop("Errors", "RawImageDescription") ``` @@ -89,15 +76,10 @@ Before writing the results to a Search Index, you must define a schema which mus from synapse.ml.cognitive import * df2.writeToAzureSearch( - subscriptionKey=AZURE_SEARCH_KEY, - actionCol="searchAction", - serviceName=search_service, - indexName=search_index, - keyCol="ObjectID") ``` diff --git a/website/versioned_docs/version-0.9.1/examples/responsible_ai/ModelInterpretability - Snow Leopard Detection.md b/website/versioned_docs/version-0.9.1/examples/responsible_ai/ModelInterpretability - Snow Leopard Detection.md index b191225293..59b5b59fa9 100644 --- a/website/versioned_docs/version-0.9.1/examples/responsible_ai/ModelInterpretability - Snow Leopard Detection.md +++ b/website/versioned_docs/version-0.9.1/examples/responsible_ai/ModelInterpretability - Snow Leopard Detection.md @@ -134,59 +134,34 @@ train, test = images.randomSplit([.7,.3], seed=1) ```python from pyspark.ml import Pipeline - from pyspark.ml.feature import StringIndexer - from pyspark.ml.classification import LogisticRegression - from pyspark.sql.functions import udf - from synapse.ml.downloader import ModelDownloader - from synapse.ml.cntk import ImageFeaturizer - from synapse.ml.stages import UDFTransformer - from pyspark.sql.types import * - def getIndex(row): - return float(row[1]) - if os.environ.get("AZURE_SERVICE", None) == "Microsoft.ProjectArcadia": - network = ModelDownloader(spark, "abfss://synapse@mmlsparkeuap.dfs.core.windows.net/models/").downloadByName("ResNet50") - else: - network = ModelDownloader(spark, "dbfs:/Models/").downloadByName("ResNet50") - - model = Pipeline(stages=[ - StringIndexer(inputCol = "labels", outputCol="index"), - ImageFeaturizer(inputCol="image", outputCol="features", cutOutputLayers=1).setModel(network), - LogisticRegression(maxIter=5, labelCol="index", regParam=10.0), - UDFTransformer()\ - .setUDF(udf(getIndex, DoubleType()))\ - .setInputCol("probability")\ - .setOutputCol("leopard_prob") - ]) - - fitModel = model.fit(train) ``` @@ -195,60 +170,35 @@ fitModel = model.fit(train) ```python def plotConfusionMatrix(df, label, prediction, classLabels): - from synapse.ml.plot import confusionMatrix - import matplotlib.pyplot as plt - fig = plt.figure(figsize=(4.5, 4.5)) - confusionMatrix(df, label, prediction, classLabels) - display(fig) - - if os.environ.get("AZURE_SERVICE", None) != "Microsoft.ProjectArcadia": - plotConfusionMatrix(fitModel.transform(test), "index", "prediction", fitModel.stages[0].labels) ``` ```python import urllib.request - from synapse.ml.lime import ImageLIME - - test_image_url = "https://mmlspark.blob.core.windows.net/graphics/SnowLeopardAD/snow_leopard1.jpg" - with urllib.request.urlopen(test_image_url) as url: - barr = url.read() - test_subsample = spark.createDataFrame([(bytearray(barr),)], ["image"]) - - lime = ImageLIME()\ - .setModel(fitModel)\ - .setPredictionCol("leopard_prob")\ - .setOutputCol("weights")\ - .setInputCol("image")\ - .setCellSize(100.0)\ - .setModifier(50.0)\ - .setNSamples(300) - - result = lime.transform(test_subsample) ``` diff --git a/website/versioned_docs/version-0.9.1/examples/text_analytics/TextAnalytics - Amazon Book Reviews with Word2Vec.md b/website/versioned_docs/version-0.9.1/examples/text_analytics/TextAnalytics - Amazon Book Reviews with Word2Vec.md index d88de1c922..2f9bb154ef 100644 --- a/website/versioned_docs/version-0.9.1/examples/text_analytics/TextAnalytics - Amazon Book Reviews with Word2Vec.md +++ b/website/versioned_docs/version-0.9.1/examples/text_analytics/TextAnalytics - Amazon Book Reviews with Word2Vec.md @@ -73,45 +73,25 @@ Generate several models with different parameters from the training data. ```python from pyspark.ml.classification import LogisticRegression, RandomForestClassifier, GBTClassifier - from synapse.ml.train import TrainClassifier - import itertools - - lrHyperParams = [0.05, 0.2] - logisticRegressions = [LogisticRegression(regParam = hyperParam) - for hyperParam in lrHyperParams] - lrmodels = [TrainClassifier(model=lrm, labelCol="label").fit(ptrain) - for lrm in logisticRegressions] - - rfHyperParams = itertools.product([5, 10], [2, 3]) - randomForests = [RandomForestClassifier(numTrees=hyperParam[0], maxDepth=hyperParam[1]) - for hyperParam in rfHyperParams] - rfmodels = [TrainClassifier(model=rfm, labelCol="label").fit(ptrain) - for rfm in randomForests] - - gbtHyperParams = itertools.product([8, 16], [2, 3]) - gbtclassifiers = [GBTClassifier(maxBins=hyperParam[0], maxDepth=hyperParam[1]) - for hyperParam in gbtHyperParams] - gbtmodels = [TrainClassifier(model=gbt, labelCol="label").fit(ptrain) - for gbt in gbtclassifiers] @@ -124,13 +104,9 @@ Find the best model for the given test dataset. ```python from synapse.ml.automl import FindBestModel - bestModel = FindBestModel(evaluationMetric="AUC", models=trainedModels).fit(ptest) - bestModel.getRocCurve().show() - bestModel.getBestModelMetrics().show() - bestModel.getAllModelMetrics().show() ``` @@ -139,16 +115,10 @@ Get the accuracy from the validation dataset. ```python from synapse.ml.train import ComputeModelStatistics - predictions = bestModel.transform(pvalidation) - metrics = ComputeModelStatistics().transform(predictions) - print("Best model's accuracy on validation set = " - + "{0:.2f}%".format(metrics.first()["accuracy"] * 100)) - print("Best model's AUC on validation set = " - + "{0:.2f}%".format(metrics.first()["AUC"] * 100)) ``` diff --git a/website/versioned_docs/version-0.9.1/examples/text_analytics/TextAnalytics - Amazon Book Reviews.md b/website/versioned_docs/version-0.9.1/examples/text_analytics/TextAnalytics - Amazon Book Reviews.md index ca9e1b635f..5aaac3127a 100644 --- a/website/versioned_docs/version-0.9.1/examples/text_analytics/TextAnalytics - Amazon Book Reviews.md +++ b/website/versioned_docs/version-0.9.1/examples/text_analytics/TextAnalytics - Amazon Book Reviews.md @@ -34,11 +34,8 @@ to generate 2²⁰ sparse features. ```python from synapse.ml.featurize.text import TextFeaturizer - textFeaturizer = TextFeaturizer() \ - .setInputCol("text").setOutputCol("features") \ - .setUseStopWordsRemover(True).setUseIDF(True).setMinDocFreq(5).setNumFeatures(1 << 16).fit(data) ``` @@ -63,19 +60,12 @@ Train several Logistic Regression models with different regularizations. ```python train, test, validation = processedData.randomSplit([0.60, 0.20, 0.20]) - from pyspark.ml.classification import LogisticRegression - - lrHyperParams = [0.05, 0.1, 0.2, 0.4] - logisticRegressions = [LogisticRegression(regParam = hyperParam) for hyperParam in lrHyperParams] - - from synapse.ml.train import TrainClassifier - lrmodels = [TrainClassifier(model=lrm, labelCol="label").fit(train) for lrm in logisticRegressions] ``` @@ -84,16 +74,10 @@ Find the model with the best AUC on the test set. ```python from synapse.ml.automl import FindBestModel, BestModel - bestModel = FindBestModel(evaluationMetric="AUC", models=lrmodels).fit(test) - bestModel.getRocCurve().show() - bestModel.getBestModelMetrics().show() - bestModel.getAllModelMetrics().show() - - ``` Use the optimized `ComputeModelStatistics` API to find the model accuracy. @@ -101,12 +85,8 @@ Use the optimized `ComputeModelStatistics` API to find the model accuracy. ```python from synapse.ml.train import ComputeModelStatistics - predictions = bestModel.transform(validation) - metrics = ComputeModelStatistics().transform(predictions) - print("Best model's accuracy on validation set = " - + "{0:.2f}%".format(metrics.first()["accuracy"] * 100)) ``` diff --git a/website/versioned_docs/version-0.9.1/features/CognitiveServices - Overview.md b/website/versioned_docs/version-0.9.1/features/CognitiveServices - Overview.md index 59694c4598..ce1c16acc0 100644 --- a/website/versioned_docs/version-0.9.1/features/CognitiveServices - Overview.md +++ b/website/versioned_docs/version-0.9.1/features/CognitiveServices - Overview.md @@ -85,20 +85,12 @@ To get started, we'll need to add this code to the project: ```python from pyspark.sql.functions import udf, col - from synapse.ml.io.http import HTTPTransformer, http_udf - from requests import Request - from pyspark.sql.functions import lit - from pyspark.ml import PipelineModel - from pyspark.sql.functions import col - import os - - ``` @@ -122,22 +114,13 @@ if os.environ.get("AZURE_SERVICE", None) == "Microsoft.ProjectArcadia": ```python from synapse.ml.cognitive import * - - # A general Cognitive Services key for Text Analytics, Computer Vision and Form Recognizer (or use separate keys that belong to each service) - service_key = os.environ["COGNITIVE_SERVICE_KEY"] - # A Bing Search v7 subscription key - bing_search_key = os.environ["BING_IMAGE_SEARCH_KEY"] - # An Anomaly Dectector subscription key - anomaly_key = os.environ["ANOMALY_API_KEY"] - # A Translator subscription key - translator_key = os.environ["TRANSLATOR_KEY"] ``` diff --git a/website/versioned_docs/version-0.9.1/features/lightgbm/LightGBM - Overview.md b/website/versioned_docs/version-0.9.1/features/lightgbm/LightGBM - Overview.md index 6802c3b748..e86275b4d7 100644 --- a/website/versioned_docs/version-0.9.1/features/lightgbm/LightGBM - Overview.md +++ b/website/versioned_docs/version-0.9.1/features/lightgbm/LightGBM - Overview.md @@ -116,21 +116,13 @@ By calling "saveNativeModel", it allows you to extract the underlying lightGBM m ```python from synapse.ml.lightgbm import LightGBMClassificationModel - - if os.environ.get("AZURE_SERVICE", None) == "Microsoft.ProjectArcadia": - model.saveNativeModel("/models/lgbmclassifier.model") - model = LightGBMClassificationModel.loadNativeModelFromFile("/models/lgbmclassifier.model") - else: - model.saveNativeModel("/lgbmclassifier.model") - model = LightGBMClassificationModel.loadNativeModelFromFile("/lgbmclassifier.model") - ``` #### Feature Importances Visualization diff --git a/website/versioned_docs/version-0.9.1/features/onnx/ONNX - Inference on Spark.md b/website/versioned_docs/version-0.9.1/features/onnx/ONNX - Inference on Spark.md index 1394a88934..86a9d0df7a 100644 --- a/website/versioned_docs/version-0.9.1/features/onnx/ONNX - Inference on Spark.md +++ b/website/versioned_docs/version-0.9.1/features/onnx/ONNX - Inference on Spark.md @@ -30,63 +30,34 @@ Use LightGBM to train a model ```python from pyspark.ml.feature import VectorAssembler - from synapse.ml.lightgbm import LightGBMClassifier - - feature_cols = df.columns[1:] - featurizer = VectorAssembler( - inputCols=feature_cols, - outputCol='features' - ) - - train_data = featurizer.transform(df)['Bankrupt?', 'features'] - - model = ( - LightGBMClassifier(featuresCol="features", labelCol="Bankrupt?") - .setEarlyStoppingRound(300) - .setLambdaL1(0.5) - .setNumIterations(1000) - .setNumThreads(-1) - .setMaxDeltaStep(0.5) - .setNumLeaves(31) - .setMaxDepth(-1) - .setBaggingFraction(0.7) - .setFeatureFraction(0.7) - .setBaggingFreq(2) - .setObjective("binary") - .setIsUnbalance(True) - .setMinSumHessianInLeaf(20) - .setMinGainToSplit(0.01) - ) - - model = model.fit(train_data) ``` @@ -115,14 +86,8 @@ Load the ONNX payload into an `ONNXModel`, and inspect the model inputs and outp ```python from synapse.ml.onnx import ONNXModel - - onnx_ml = ONNXModel().setModelPayload(model_payload_ml) - - - print("Model inputs:" + str(onnx_ml.getModelInputs())) - print("Model outputs:" + str(onnx_ml.getModelOutputs())) ``` diff --git a/website/versioned_docs/version-0.9.1/features/responsible_ai/Interpretability - Image Explainers.md b/website/versioned_docs/version-0.9.1/features/responsible_ai/Interpretability - Image Explainers.md index 1051f28447..cda7d70d5d 100644 --- a/website/versioned_docs/version-0.9.1/features/responsible_ai/Interpretability - Image Explainers.md +++ b/website/versioned_docs/version-0.9.1/features/responsible_ai/Interpretability - Image Explainers.md @@ -12,87 +12,49 @@ First we import the packages and define some UDFs and a plotting function we wil ```python from synapse.ml.explainers import * - from synapse.ml.onnx import ONNXModel - from synapse.ml.opencv import ImageTransformer - from synapse.ml.io import * - from pyspark.ml import Pipeline - from pyspark.ml.classification import LogisticRegression - from pyspark.ml.feature import StringIndexer - from pyspark.sql.functions import * - from pyspark.sql.types import * - import numpy as np - import pyspark - import urllib.request - import matplotlib.pyplot as plt - import PIL, io - from PIL import Image - - vec_slice = udf(lambda vec, indices: (vec.toArray())[indices].tolist(), ArrayType(FloatType())) - arg_top_k = udf(lambda vec, k: (-vec.toArray()).argsort()[:k].tolist(), ArrayType(IntegerType())) - def downloadBytes(url: str): - with urllib.request.urlopen(url) as url: - barr = url.read() - return barr - - + def rotate_color_channel(bgr_image_array, height, width, nChannels): - B, G, R, *_ = np.asarray(bgr_image_array).reshape(height, width, nChannels).T - rgb_image_array = np.array((R, G, B)).T - return rgb_image_array - def plot_superpixels(image_rgb_array, sp_clusters, weights, green_threshold=99): - superpixels = sp_clusters - green_value = np.percentile(weights, green_threshold) - img = Image.fromarray(image_rgb_array, mode='RGB').convert("RGBA") - image_array = np.asarray(img).copy() - for (sp, v) in zip(superpixels, weights): - if v > green_value: - for (x, y) in sp: - image_array[y, x, 1] = 255 - image_array[y, x, 3] = 200 - plt.clf() - plt.imshow(image_array) - display() ``` @@ -104,64 +66,34 @@ The result shows 39.6% probability of "violin" (889), and 38.4% probability of " ```python from synapse.ml.io import * - - image_df = spark.read.image().load("wasbs://publicwasb@mmlspark.blob.core.windows.net/explainers/images/david-lusvardi-dWcUncxocQY-unsplash.jpg") - display(image_df) - - # Rotate the image array from BGR into RGB channels for visualization later. - row = image_df.select("image.height", "image.width", "image.nChannels", "image.data").head() - locals().update(row.asDict()) - rgb_image_array = rotate_color_channel(data, height, width, nChannels) - - # Download the ONNX model - modelPayload = downloadBytes("https://mmlspark.blob.core.windows.net/publicwasb/ONNXModels/resnet50-v2-7.onnx") - - featurizer = ( - ImageTransformer(inputCol="image", outputCol="features") - .resize(224, True) - .centerCrop(224, 224) - .normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225], color_scale_factor = 1/255) - .setTensorElementType(FloatType()) - ) - - onnx = ( - ONNXModel() - .setModelPayload(modelPayload) - .setFeedDict({"data": "features"}) - .setFetchDict({"rawPrediction": "resnetv24_dense0_fwd"}) - .setSoftMaxDict({"rawPrediction": "probability"}) - .setMiniBatchSize(1) - ) - - model = Pipeline(stages=[featurizer, onnx]).fit(image_df) ``` From 8406dd46f16c41f8aee6d185cf1541b92d8d46c8 Mon Sep 17 00:00:00 2001 From: Mark Date: Tue, 9 Nov 2021 18:48:56 -0500 Subject: [PATCH 14/40] doc: add form demo and update website --- ...on - Adult Census with Vowpal Wabbit.ipynb | 0 .../Classification - Adult Census.ipynb | 0 ...ication - Before and After SynapseML.ipynb | 0 ...Twitter Sentiment with Vowpal Wabbit.ipynb | 0 ...eServices - Celebrity Quote Analysis.ipynb | 0 ...ultilingual Search Engine from Forms.ipynb | 1 + .../CognitiveServices - Overview.ipynb | 0 ...iveServices - Predictive Maintenance.ipynb | 0 ...rk - Working with Arbitrary Web APIs.ipynb | 81 --------- ...nCV - Pipeline Image Transformations.ipynb | 0 .../AzureSearchIndex - Met Artworks.ipynb | 0 ...lKNN - Exploring Art Across Cultures.ipynb | 0 ...CyberML - Anomalous Access Detection.ipynb | 0 ...g - BiLSTM Medical Entity Extraction.ipynb | 0 ...ning - CIFAR10 Convolutional Network.ipynb | 0 ...arning - Flower Image Classification.ipynb | 0 .../DeepLearning - Transfer Learning.ipynb | 0 ...meterTuning - Fighting Breast Cancer.ipynb | 0 ... - Amazon Book Reviews with Word2Vec.ipynb | 0 .../TextAnalytics - Amazon Book Reviews.ipynb | 0 .../Regression - Auto Imports.ipynb | 0 ...on - Flight Delays with DataCleaning.ipynb | 0 .../Regression - Flight Delays.ipynb | 0 ...it vs. LightGBM vs. Linear Regressor.ipynb | 0 ...alanceAnalysis - Adult Census Income.ipynb | 0 ...rpretability - Explanation Dashboard.ipynb | 0 ...retability - Snow Leopard Detection.ipynb} | 0 ...pretability - Tabular SHAP explainer.ipynb | 0 .../Interpretability - Text Explainers.ipynb | 0 website/docs/examples/about.md | 57 ------- website/docs/features/http/about.md | 161 ------------------ website/sidebars.js | 80 +++------ website/src/pages/index.js | 2 +- website/src/plugins/examples/index.js | 12 +- .../AzureSearchIndex - Met Artworks.md | 18 ++ ...terpretability - Snow Leopard Detection.md | 50 ++++++ ...ics - Amazon Book Reviews with Word2Vec.md | 30 ++++ .../TextAnalytics - Amazon Book Reviews.md | 20 +++ .../features/CognitiveServices - Overview.md | 17 ++ .../features/lightgbm/LightGBM - Overview.md | 8 + .../onnx/ONNX - Inference on Spark.md | 35 ++++ .../Interpretability - Image Explainers.md | 70 +++++++- 42 files changed, 282 insertions(+), 360 deletions(-) rename notebooks/{examples => features}/classification/Classification - Adult Census with Vowpal Wabbit.ipynb (100%) rename notebooks/{examples => features}/classification/Classification - Adult Census.ipynb (100%) rename notebooks/{examples => features}/classification/Classification - Before and After SynapseML.ipynb (100%) rename notebooks/{examples => features}/classification/Classification - Twitter Sentiment with Vowpal Wabbit.ipynb (100%) rename notebooks/{examples => features}/cognitive_services/CognitiveServices - Celebrity Quote Analysis.ipynb (100%) create mode 100644 notebooks/features/cognitive_services/CognitiveServices - Create a Multilingual Search Engine from Forms.ipynb rename notebooks/features/{ => cognitive_services}/CognitiveServices - Overview.ipynb (100%) rename notebooks/{examples => features}/cognitive_services/CognitiveServices - Predictive Maintenance.ipynb (100%) delete mode 100644 notebooks/features/http/HttpOnSpark - Working with Arbitrary Web APIs.ipynb rename notebooks/{examples => features/opencv}/OpenCV - Pipeline Image Transformations.ipynb (100%) rename notebooks/{examples => features/other}/AzureSearchIndex - Met Artworks.ipynb (100%) rename notebooks/{examples => features/other}/ConditionalKNN - Exploring Art Across Cultures.ipynb (100%) rename notebooks/{examples => features/other}/CyberML - Anomalous Access Detection.ipynb (100%) rename notebooks/{examples/deep_learning => features/other}/DeepLearning - BiLSTM Medical Entity Extraction.ipynb (100%) rename notebooks/{examples/deep_learning => features/other}/DeepLearning - CIFAR10 Convolutional Network.ipynb (100%) rename notebooks/{examples/deep_learning => features/other}/DeepLearning - Flower Image Classification.ipynb (100%) rename notebooks/{examples/deep_learning => features/other}/DeepLearning - Transfer Learning.ipynb (100%) rename notebooks/{examples => features/other}/HyperParameterTuning - Fighting Breast Cancer.ipynb (100%) rename notebooks/{examples/text_analytics => features/other}/TextAnalytics - Amazon Book Reviews with Word2Vec.ipynb (100%) rename notebooks/{examples/text_analytics => features/other}/TextAnalytics - Amazon Book Reviews.ipynb (100%) rename notebooks/{examples => features}/regression/Regression - Auto Imports.ipynb (100%) rename notebooks/{examples => features}/regression/Regression - Flight Delays with DataCleaning.ipynb (100%) rename notebooks/{examples => features}/regression/Regression - Flight Delays.ipynb (100%) rename notebooks/{examples => features}/regression/Regression - Vowpal Wabbit vs. LightGBM vs. Linear Regressor.ipynb (100%) rename notebooks/{examples => features}/responsible_ai/DataBalanceAnalysis - Adult Census Income.ipynb (100%) rename notebooks/{examples => features}/responsible_ai/Interpretability - Explanation Dashboard.ipynb (100%) rename notebooks/{examples/responsible_ai/ModelInterpretability - Snow Leopard Detection.ipynb => features/responsible_ai/Interpretability - Snow Leopard Detection.ipynb} (100%) rename notebooks/{examples => features}/responsible_ai/Interpretability - Tabular SHAP explainer.ipynb (100%) rename notebooks/{examples => features}/responsible_ai/Interpretability - Text Explainers.ipynb (100%) delete mode 100644 website/docs/examples/about.md delete mode 100644 website/docs/features/http/about.md diff --git a/notebooks/examples/classification/Classification - Adult Census with Vowpal Wabbit.ipynb b/notebooks/features/classification/Classification - Adult Census with Vowpal Wabbit.ipynb similarity index 100% rename from notebooks/examples/classification/Classification - Adult Census with Vowpal Wabbit.ipynb rename to notebooks/features/classification/Classification - Adult Census with Vowpal Wabbit.ipynb diff --git a/notebooks/examples/classification/Classification - Adult Census.ipynb b/notebooks/features/classification/Classification - Adult Census.ipynb similarity index 100% rename from notebooks/examples/classification/Classification - Adult Census.ipynb rename to notebooks/features/classification/Classification - Adult Census.ipynb diff --git a/notebooks/examples/classification/Classification - Before and After SynapseML.ipynb b/notebooks/features/classification/Classification - Before and After SynapseML.ipynb similarity index 100% rename from notebooks/examples/classification/Classification - Before and After SynapseML.ipynb rename to notebooks/features/classification/Classification - Before and After SynapseML.ipynb diff --git a/notebooks/examples/classification/Classification - Twitter Sentiment with Vowpal Wabbit.ipynb b/notebooks/features/classification/Classification - Twitter Sentiment with Vowpal Wabbit.ipynb similarity index 100% rename from notebooks/examples/classification/Classification - Twitter Sentiment with Vowpal Wabbit.ipynb rename to notebooks/features/classification/Classification - Twitter Sentiment with Vowpal Wabbit.ipynb diff --git a/notebooks/examples/cognitive_services/CognitiveServices - Celebrity Quote Analysis.ipynb b/notebooks/features/cognitive_services/CognitiveServices - Celebrity Quote Analysis.ipynb similarity index 100% rename from notebooks/examples/cognitive_services/CognitiveServices - Celebrity Quote Analysis.ipynb rename to notebooks/features/cognitive_services/CognitiveServices - Celebrity Quote Analysis.ipynb diff --git a/notebooks/features/cognitive_services/CognitiveServices - Create a Multilingual Search Engine from Forms.ipynb b/notebooks/features/cognitive_services/CognitiveServices - Create a Multilingual Search Engine from Forms.ipynb new file mode 100644 index 0000000000..1e10a0d8b5 --- /dev/null +++ b/notebooks/features/cognitive_services/CognitiveServices - Create a Multilingual Search Engine from Forms.ipynb @@ -0,0 +1 @@ +{"cells":[{"cell_type":"code","source":["import os\nkey = os.environ['VISION_API_KEY']\nsearch_key = os.environ['AZURE_SEARCH_KEY']\ntranslator_key = os.environ['TRANSLATOR_KEY']\n\nsearch_service = \"mmlspark-azure-search\"\nsearch_index = \"form-demo-index\""],"metadata":{"jupyter":{"source_hidden":false,"outputs_hidden":false},"nteract":{"transient":{"deleting":false}},"application/vnd.databricks.v1+cell":{"title":"","showTitle":false,"inputWidgets":{},"nuid":"05ebf79d-aa8e-4f8e-9105-6deeeb87e9a8"}},"outputs":[{"output_type":"display_data","metadata":{"application/vnd.databricks.v1+output":{"data":"","errorSummary":"","metadata":{},"errorTraceType":null,"type":"ipynbError","arguments":{}}},"output_type":"display_data","data":{"text/html":[""]},"transient":null}],"execution_count":0},{"cell_type":"code","source":["from pyspark.sql.functions import udf\nfrom pyspark.sql.types import StringType\n\ndef blob_to_url(blob):\n [prefix, postfix] = blob.split(\"@\")\n container = prefix.split(\"/\")[-1]\n split_postfix = postfix.split(\"/\")\n account = split_postfix[0]\n filepath = \"/\".join(split_postfix[1:])\n return \"https://{}/{}/{}\".format(account, container, filepath)\n\n\ndf2 = (spark.read.format(\"binaryFile\")\n .load(\"wasbs://ignite2021@mmlsparkdemo.blob.core.windows.net/forms/*\")\n .select(\"path\")\n .coalesce(24)\n .limit(10)\n .select(udf(blob_to_url, StringType())(\"path\").alias(\"url\"))\n .cache()\n )\n"],"metadata":{"jupyter":{"source_hidden":false,"outputs_hidden":false},"nteract":{"transient":{"deleting":false}},"application/vnd.databricks.v1+cell":{"title":"","showTitle":false,"inputWidgets":{},"nuid":"61bbc0d2-e2fe-40b2-ba6c-6c24ef315c36"}},"outputs":[{"output_type":"display_data","metadata":{"application/vnd.databricks.v1+output":{"data":"","errorSummary":"","metadata":{},"errorTraceType":null,"type":"ipynbError","arguments":{}}},"output_type":"display_data","data":{"text/html":[""]},"transient":null}],"execution_count":0},{"cell_type":"code","source":["display(df2)"],"metadata":{"jupyter":{"source_hidden":false,"outputs_hidden":false},"nteract":{"transient":{"deleting":false}},"collapsed":false,"application/vnd.databricks.v1+cell":{"title":"","showTitle":false,"inputWidgets":{},"nuid":"dc2a155d-6a61-49d3-bd91-0cb96d0f3d0f"}},"outputs":[{"output_type":"display_data","metadata":{"application/vnd.databricks.v1+output":{"data":"","errorSummary":"","metadata":{},"errorTraceType":null,"type":"ipynbError","arguments":{}}},"output_type":"display_data","data":{"text/html":[""]},"transient":null}],"execution_count":0},{"cell_type":"code","source":["displayHTML(\"\"\"\n\n\"\"\")"],"metadata":{"jupyter":{"source_hidden":false,"outputs_hidden":false},"nteract":{"transient":{"deleting":false}},"application/vnd.databricks.v1+cell":{"title":"","showTitle":false,"inputWidgets":{},"nuid":"a6489a8f-6e8d-4358-9f75-6631340c19a4"}},"outputs":[{"output_type":"display_data","metadata":{"application/vnd.databricks.v1+output":{"data":"","errorSummary":"","metadata":{},"errorTraceType":null,"type":"ipynbError","arguments":{}}},"output_type":"display_data","data":{"text/html":[""]},"transient":null}],"execution_count":0},{"cell_type":"code","source":["from synapse.ml.cognitive import AnalyzeInvoices\n\nanalyzed_df = (AnalyzeInvoices()\n .setSubscriptionKey(key)\n .setLocation(\"eastus\")\n .setImageUrlCol(\"url\")\n .setOutputCol(\"invoices\")\n .setErrorCol(\"errors\")\n .setConcurrency(5)\n .transform(df2)\n .cache())\n"],"metadata":{"jupyter":{"source_hidden":false,"outputs_hidden":false},"nteract":{"transient":{"deleting":false}},"application/vnd.databricks.v1+cell":{"title":"","showTitle":false,"inputWidgets":{},"nuid":"92a91f10-8698-4cca-9f8b-ecca146f0cf0"}},"outputs":[{"output_type":"display_data","metadata":{"application/vnd.databricks.v1+output":{"data":"","errorSummary":"","metadata":{},"errorTraceType":null,"type":"ipynbError","arguments":{}}},"output_type":"display_data","data":{"text/html":[""]},"transient":null}],"execution_count":0},{"cell_type":"code","source":["display(analyzed_df)"],"metadata":{"jupyter":{"source_hidden":false,"outputs_hidden":false},"nteract":{"transient":{"deleting":false}},"collapsed":false,"application/vnd.databricks.v1+cell":{"title":"","showTitle":false,"inputWidgets":{},"nuid":"f3982d15-fe5f-4b98-82c2-d77e29877456"}},"outputs":[{"output_type":"display_data","metadata":{"application/vnd.databricks.v1+output":{"data":"","errorSummary":"","metadata":{},"errorTraceType":null,"type":"ipynbError","arguments":{}}},"output_type":"display_data","data":{"text/html":[""]},"transient":null}],"execution_count":0},{"cell_type":"code","source":["from synapse.ml.cognitive import FormOntologyLearner\n\norganized_df = (FormOntologyLearner()\n .setInputCol(\"invoices\")\n .setOutputCol(\"extracted\")\n .fit(analyzed_df.limit(10))\n .transform(analyzed_df)\n .select(\"url\", \"extracted.*\")\n .cache())"],"metadata":{"jupyter":{"source_hidden":false,"outputs_hidden":false},"nteract":{"transient":{"deleting":false}},"application/vnd.databricks.v1+cell":{"title":"","showTitle":false,"inputWidgets":{},"nuid":"84954800-58ba-474c-8325-44f3ae08604a"}},"outputs":[{"output_type":"display_data","metadata":{"application/vnd.databricks.v1+output":{"data":"","errorSummary":"","metadata":{},"errorTraceType":null,"type":"ipynbError","arguments":{}}},"output_type":"display_data","data":{"text/html":[""]},"transient":null}],"execution_count":0},{"cell_type":"code","source":["display(organized_df)"],"metadata":{"jupyter":{"source_hidden":false,"outputs_hidden":false},"nteract":{"transient":{"deleting":false}},"collapsed":false,"application/vnd.databricks.v1+cell":{"title":"","showTitle":false,"inputWidgets":{},"nuid":"f8cfd999-0a99-494c-a915-f3d56a9a9b7c"}},"outputs":[{"output_type":"display_data","metadata":{"application/vnd.databricks.v1+output":{"data":"","errorSummary":"","metadata":{},"errorTraceType":null,"type":"ipynbError","arguments":{}}},"output_type":"display_data","data":{"text/html":[""]},"transient":null}],"execution_count":0},{"cell_type":"code","source":["from pyspark.sql.functions import explode, col\nitemized_df = (organized_df\n .select(\"*\", explode(col(\"Items\")).alias(\"Item\"))\n .drop(\"Items\")\n .select(\"Item.*\", \"*\")\n .drop(\"Item\"))\n"],"metadata":{"jupyter":{"source_hidden":false,"outputs_hidden":false},"nteract":{"transient":{"deleting":false}},"application/vnd.databricks.v1+cell":{"title":"","showTitle":false,"inputWidgets":{},"nuid":"5639512f-6649-46af-a6d3-e6e2b6e398fc"}},"outputs":[{"output_type":"display_data","metadata":{"application/vnd.databricks.v1+output":{"data":"","errorSummary":"","metadata":{},"errorTraceType":null,"type":"ipynbError","arguments":{}}},"output_type":"display_data","data":{"text/html":[""]},"transient":null}],"execution_count":0},{"cell_type":"code","source":["display(itemized_df)"],"metadata":{"jupyter":{"source_hidden":false,"outputs_hidden":false},"nteract":{"transient":{"deleting":false}},"collapsed":false,"application/vnd.databricks.v1+cell":{"title":"","showTitle":false,"inputWidgets":{},"nuid":"64319bf8-a9c5-44cb-b8cd-d9743d00951d"}},"outputs":[{"output_type":"display_data","metadata":{"application/vnd.databricks.v1+output":{"data":"","errorSummary":"","metadata":{},"errorTraceType":null,"type":"ipynbError","arguments":{}}},"output_type":"display_data","data":{"text/html":[""]},"transient":null}],"execution_count":0},{"cell_type":"code","source":["display(itemized_df.where(col(\"ProductCode\") == 6))"],"metadata":{"jupyter":{"source_hidden":false,"outputs_hidden":false},"nteract":{"transient":{"deleting":false}},"collapsed":false,"application/vnd.databricks.v1+cell":{"title":"","showTitle":false,"inputWidgets":{},"nuid":"cfc51726-0dfe-40f0-a889-59309fddaf64"}},"outputs":[{"output_type":"display_data","metadata":{"application/vnd.databricks.v1+output":{"data":"","errorSummary":"","metadata":{},"errorTraceType":null,"type":"ipynbError","arguments":{}}},"output_type":"display_data","data":{"text/html":[""]},"transient":null}],"execution_count":0},{"cell_type":"code","source":["from synapse.ml.cognitive import Translate\n\ntranslated_df = (Translate()\n .setSubscriptionKey(translator_key)\n .setLocation(\"eastus\")\n .setTextCol(\"Description\")\n .setErrorCol(\"TranslationError\")\n .setOutputCol(\"output\")\n .setToLanguage([\"zh-Hans\", \"fr\", \"ru\", \"cy\"])\n .setConcurrency(5)\n .transform(itemized_df)\n .withColumn(\"Translations\", col(\"output.translations\")[0])\n .drop(\"output\", \"TranslationError\")\n .cache())\n"],"metadata":{"jupyter":{"source_hidden":false,"outputs_hidden":false},"nteract":{"transient":{"deleting":false}},"application/vnd.databricks.v1+cell":{"title":"","showTitle":false,"inputWidgets":{},"nuid":"95cb6caa-5c6d-42a0-98bf-ad672216ffca"}},"outputs":[{"output_type":"display_data","metadata":{"application/vnd.databricks.v1+output":{"data":"","errorSummary":"","metadata":{},"errorTraceType":null,"type":"ipynbError","arguments":{}}},"output_type":"display_data","data":{"text/html":[""]},"transient":null}],"execution_count":0},{"cell_type":"code","source":["display(translated_df)"],"metadata":{"jupyter":{"source_hidden":false,"outputs_hidden":false},"nteract":{"transient":{"deleting":false}},"collapsed":false,"application/vnd.databricks.v1+cell":{"title":"","showTitle":false,"inputWidgets":{},"nuid":"8b3d78e0-87bd-43b4-b3ec-329fc9fbda7f"}},"outputs":[{"output_type":"display_data","metadata":{"application/vnd.databricks.v1+output":{"data":"","errorSummary":"","metadata":{},"errorTraceType":null,"type":"ipynbError","arguments":{}}},"output_type":"display_data","data":{"text/html":[""]},"transient":null}],"execution_count":0},{"cell_type":"code","source":["from synapse.ml.cognitive import *\nfrom pyspark.sql.functions import monotonically_increasing_id, lit\n\n(translated_df\n .withColumn(\"DocID\", monotonically_increasing_id().cast(\"string\"))\n .withColumn(\"SearchAction\", lit(\"upload\"))\n .writeToAzureSearch(\n subscriptionKey=search_key,\n actionCol=\"SearchAction\",\n serviceName=search_service,\n indexName=search_index,\n keyCol=\"DocID\")\n)\n"],"metadata":{"jupyter":{"source_hidden":false,"outputs_hidden":false},"nteract":{"transient":{"deleting":false}},"application/vnd.databricks.v1+cell":{"title":"","showTitle":false,"inputWidgets":{},"nuid":"19cb88b7-9e68-4e95-b2bc-b89af76d2688"}},"outputs":[{"output_type":"display_data","metadata":{"application/vnd.databricks.v1+output":{"data":"","errorSummary":"","metadata":{},"errorTraceType":null,"type":"ipynbError","arguments":{}}},"output_type":"display_data","data":{"text/html":[""]},"transient":null}],"execution_count":0},{"cell_type":"code","source":["import requests\nurl = 'https://{}.search.windows.net/indexes/{}/docs/search?api-version=2019-05-06'.format(search_service, search_index)\nrequests.post(url, json={\"search\": \"door\"}, headers = {\"api-key\": search_key}).json()"],"metadata":{"jupyter":{"source_hidden":false,"outputs_hidden":false},"nteract":{"transient":{"deleting":false}},"application/vnd.databricks.v1+cell":{"title":"","showTitle":false,"inputWidgets":{},"nuid":"5d2231a1-3231-450a-bea5-9924073a25e0"}},"outputs":[{"output_type":"display_data","metadata":{"application/vnd.databricks.v1+output":{"data":"","errorSummary":"","metadata":{},"errorTraceType":null,"type":"ipynbError","arguments":{}}},"output_type":"display_data","data":{"text/html":[""]},"transient":null}],"execution_count":0},{"cell_type":"code","source":[""],"metadata":{"jupyter":{"source_hidden":false,"outputs_hidden":false},"nteract":{"transient":{"deleting":false}},"application/vnd.databricks.v1+cell":{"title":"","showTitle":false,"inputWidgets":{},"nuid":"e4674b70-04c1-4bc4-92ab-357968b80c9e"}},"outputs":[{"output_type":"display_data","metadata":{"application/vnd.databricks.v1+output":{"data":"","errorSummary":"","metadata":{},"errorTraceType":null,"type":"ipynbError","arguments":{}}},"output_type":"display_data","data":{"text/html":[""]},"transient":null}],"execution_count":0}],"metadata":{"language_info":{"name":"python"},"description":null,"save_output":true,"kernelspec":{"name":"synapse_pyspark","display_name":"Synapse PySpark"},"synapse_widget":{"version":"0.1","state":{"16075ac4-be11-498a-a42c-6186fa6b01d9":{"type":"Synapse.DataFrame","sync_state":{"table":{"rows":[{"12":"1120.0","8":"Mackenzie Gray","4":"Coffee Maker Red","11":"1198.4","9":"11","13":"78.4","16":"T.T TAILWIND TRADERS","5":"385.2","10":"22671","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2010/Invoice22671.pdf","1":"25.2","17":"Mackenzie Gray","14":"22, rue du Puits Dixme","0":"39","2":"2.0","7":"34, rue des Grands Champs Versailles","3":"200.0"},{"12":"1120.0","8":"Mackenzie Gray","4":"Celebrations C9","11":"1198.4","9":"11","13":"78.4","16":"T.T TAILWIND TRADERS","5":"10.7","10":"22671","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2010/Invoice22671.pdf","1":"0.7","17":"Mackenzie Gray","14":"22, rue du Puits Dixme","0":"31","2":"1.0","7":"34, rue des Grands Champs Versailles","3":"10.0"},{"12":"1120.0","8":"Mackenzie Gray","4":"Black Bathing System Classic 18 in. H x 60 in. W x 32.5","11":"1198.4","9":"11","13":"78.4","16":"T.T TAILWIND TRADERS","5":"802.5","10":"22671","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2010/Invoice22671.pdf","1":"52.5","17":"Mackenzie Gray","14":"22, rue du Puits Dixme","0":"12","2":"3.0","7":"34, rue des Grands Champs Versailles","3":"250.0"},{"12":"1208.5","4":"Blend Solid White Sheer Curtains","15":"Sarosgawk","11":"1293.1","9":"2021-10-05","13":"84.6","16":"T.T TAILWIND TRADERS","5":"235.4","10":"28073","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2010/Invoice28073.pdf","1":"15.4","14":"Maubeuge","0":"17","2":"2.0","18":"San Gabriel","7":"United States","3":"110.0"},{"12":"1208.5","4":"Rechargeable screwdriver with extra battery","15":"Sarosgawk","11":"1293.1","9":"2021-10-05","13":"84.6","16":"T.T TAILWIND TRADERS","5":"667.68","10":"28073","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2010/Invoice28073.pdf","1":"43.68","14":"Maubeuge","0":"56","2":"2.0","18":"San Gabriel","7":"United States","3":"312.0"},{"12":"1208.5","4":"Extractor Steal","15":"Sarosgawk","11":"1293.1","9":"2021-10-05","13":"84.6","16":"T.T TAILWIND TRADERS","5":"390.02","10":"28073","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2010/Invoice28073.pdf","1":"25.52","14":"Maubeuge","0":"40","2":"3.0","18":"San Gabriel","7":"United States","3":"135.0"},{"12":"750.0","8":"Villeneuve-d'Ascq","19":"Villeneuve-d'Ascq","4":"Big Metal Shelving","15":"Braeo","11":"802.5","9":"2021-03-12","13":"52.5","16":"T.T TAILWIND TRADERS","5":"317.79","10":"64808","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2013/Invoice64808.pdf","1":"20.79","14":"Circle","0":"43","2":"3.0","18":"828, rue de Berri","7":"3923 Dew Drop","3":"99.0"},{"12":"750.0","8":"Villeneuve-d'Ascq","19":"Villeneuve-d'Ascq","4":"Metal Shelving","15":"Braeo","11":"802.5","9":"2021-03-12","13":"52.5","16":"T.T TAILWIND TRADERS","5":"288.9","10":"64808","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2013/Invoice64808.pdf","1":"18.9","14":"Circle","0":"42","2":"3.0","18":"828, rue de Berri","7":"3923 Dew Drop","3":"90.0"},{"12":"750.0","8":"Villeneuve-d'Ascq","19":"Villeneuve-d'Ascq","4":"Measuring Tape","15":"Braeo","11":"802.5","9":"2021-03-12","13":"52.5","16":"T.T TAILWIND TRADERS","5":"131.61","10":"64808","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2013/Invoice64808.pdf","1":"8.61","14":"Circle","0":"46","2":"1.0","18":"828, rue de Berri","7":"3923 Dew Drop","3":"123.0"},{"12":"750.0","8":"Villeneuve-d'Ascq","19":"Villeneuve-d'Ascq","4":"Curtain Rod 48 in","15":"Braeo","11":"802.5","9":"2021-03-12","13":"52.5","16":"T.T TAILWIND TRADERS","5":"64.2","10":"64808","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2013/Invoice64808.pdf","1":"4.2","14":"Circle","0":"21","2":"3.0","18":"828, rue de Berri","7":"3923 Dew Drop","3":"25.0"},{"12":"1758.0","8":"Clinton Gutierrez","4":"Wood Table","11":"1881.06","13":"123.06","16":"T.T TAILWIND TRADERS","5":"1267.95","10":"67164","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2013/Invoice67164.pdf","1":"82.95","17":"Dluhbio","0":"36","2":"3.0","18":"45000","7":"Platz des Landtags 404 Solingen Nordrhein-Westfalen 42651 Germany","3":"395.0"},{"12":"1758.0","8":"Clinton Gutierrez","4":"White Window","11":"1881.06","13":"123.06","16":"T.T TAILWIND TRADERS","5":"256.8","10":"67164","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2013/Invoice67164.pdf","1":"16.8","17":"Dluhbio","0":"20","2":"2.0","18":"45000","7":"Platz des Landtags 404 Solingen Nordrhein-Westfalen 42651 Germany","3":"120.0"},{"12":"1758.0","8":"Clinton Gutierrez","4":"Indoor Kit Gardering","11":"1881.06","13":"123.06","16":"T.T TAILWIND TRADERS","5":"224.7","10":"67164","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2013/Invoice67164.pdf","1":"14.7","17":"Dluhbio","0":"25","2":"3.0","18":"45000","7":"Platz des Landtags 404 Solingen Nordrhein-Westfalen 42651 Germany","3":"70.0"},{"12":"1758.0","8":"Clinton Gutierrez","4":"Measuring Tape","11":"1881.06","13":"123.06","16":"T.T TAILWIND TRADERS","5":"131.61","10":"67164","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2013/Invoice67164.pdf","1":"8.61","17":"Dluhbio","0":"46","2":"1.0","18":"45000","7":"Platz des Landtags 404 Solingen Nordrhein-Westfalen 42651 Germany","3":"123.0"},{"12":"1217.2","8":"Ashlee Raje","4":"Bathing System Classic 18 in. H x 60 in. W x 32.5","15":"Alvotue","11":"1302.4","9":"October","13":"85.2","16":"T.T TAILWIND TRADERS","5":"577.8","10":"80110","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2014/Invoice80110.pdf","1":"37.8","17":"Ashlee Raje","14":"4559 Loop Beaverton Oregon 97005 States","0":"9","2":"3.0","18":"Street","7":"Julpum","3":"200.0"},{"12":"1217.2","8":"Ashlee Raje","4":"Two red garden gnomes","15":"Alvotue","11":"1302.4","9":"October","13":"85.2","16":"T.T TAILWIND TRADERS","5":"295.32","10":"80110","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2014/Invoice80110.pdf","1":"19.32","17":"Ashlee Raje","14":"4559 Loop Beaverton Oregon 97005 States","0":"59","2":"3.0","18":"Street","7":"Julpum","3":"92.0"},{"12":"1217.2","8":"Ashlee Raje","4":"Single red garden gnome","15":"Alvotue","11":"1302.4","9":"October","13":"85.2","16":"T.T TAILWIND TRADERS","5":"161.78","10":"80110","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2014/Invoice80110.pdf","1":"10.58","17":"Ashlee Raje","14":"4559 Loop Beaverton Oregon 97005 States","0":"58","2":"3.0","18":"Street","7":"Julpum","3":"56.0"},{"12":"1217.2","8":"Ashlee Raje","4":"Artificial Tree","15":"Alvotue","11":"1302.4","9":"October","13":"85.2","16":"T.T TAILWIND TRADERS","5":"267.5","10":"80110","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2014/Invoice80110.pdf","1":"17.5","17":"Ashlee Raje","14":"4559 Loop Beaverton Oregon 97005 States","0":"32","2":"1.0","18":"Street","7":"Julpum","3":"250.0"},{"12":"1469.8","8":"Pal","4":"Wood Table","15":"Ifiaeh","11":"1572.69","13":"102.89","16":"T.T TAILWIND TRADERS","5":"845.3","10":"71864","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2014/Invoice71864.pdf","1":"55.3","17":"Willie","0":"36","2":"2.0","7":"27, place de Brazaville Roubaix Nord 59100 France","3":"395.0"},{"12":"1469.8","8":"Pal","4":"Rechargeable screwdriver with extra battery","15":"Ifiaeh","11":"1572.69","13":"102.89","16":"T.T TAILWIND TRADERS","5":"300.46","10":"71864","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2014/Invoice71864.pdf","1":"19.66","17":"Willie","0":"56","2":"1.0","7":"27, place de Brazaville Roubaix Nord 59100 France","3":"312.0"},{"12":"1469.8","8":"Pal","4":"Bathroom Sink Faucet","15":"Ifiaeh","11":"1572.69","13":"102.89","16":"T.T TAILWIND TRADERS","5":"105.93","10":"71864","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2014/Invoice71864.pdf","1":"6.93","17":"Willie","0":"14","2":"1.0","7":"27, place de Brazaville Roubaix Nord 59100 France","3":"99.0"},{"12":"1469.8","8":"Pal","4":"Hammer","15":"Ifiaeh","11":"1572.69","13":"102.89","16":"T.T TAILWIND TRADERS","5":"321.0","10":"71864","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2014/Invoice71864.pdf","1":"21.0","17":"Willie","0":"48","2":"3.0","7":"27, place de Brazaville Roubaix Nord 59100 France","3":"100.0"},{"12":"758.0","8":"Misty Xie","4":"Screwdriver","15":"Rloefos","11":"811.06","9":"4","13":"53.06","16":"T.T TAILWIND TRADERS","5":"235.4","10":"80066","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2014/Invoice80066.pdf","1":"15.4","17":"Misty Xie","14":"376 Amador Valley Blvd.","0":"49","2":"2.0","7":"6058 Hill Street","3":"110.0"},{"12":"758.0","8":"Misty Xie","4":"Yellow Rechargeable screwdriver","15":"Rloefos","11":"811.06","9":"4","13":"53.06","16":"T.T TAILWIND TRADERS","5":"267.5","10":"80066","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2014/Invoice80066.pdf","1":"17.5","17":"Misty Xie","14":"376 Amador Valley Blvd.","0":"54","2":"1.0","7":"6058 Hill Street","3":"250.0"},{"12":"758.0","8":"Misty Xie","4":"Steel Passage Door Knob","15":"Rloefos","11":"811.06","9":"4","13":"53.06","16":"T.T TAILWIND TRADERS","5":"19.26","10":"80066","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2014/Invoice80066.pdf","1":"1.26","17":"Misty Xie","14":"376 Amador Valley Blvd.","0":"22","2":"2.0","7":"6058 Hill Street","3":"10.0"},{"12":"758.0","8":"Misty Xie","4":"Extractor Steal","15":"Rloefos","11":"811.06","9":"4","13":"53.06","16":"T.T TAILWIND TRADERS","5":"288.9","10":"80066","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2014/Invoice80066.pdf","1":"18.9","17":"Misty Xie","14":"376 Amador Valley Blvd.","0":"40","2":"2.0","7":"6058 Hill Street","3":"135.0"},{"12":"999.0","8":"Connie Liang","4":"Multi Function Drill","11":"1068.93","9":"2015-11-04","13":"69.93","16":"T.T TAILWIND TRADERS","5":"170.13","10":"83878","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2015/Invoice83878.pdf","1":"11.13","17":"Mrurc Potsdamer","0":"47","2":"1.0","18":"4SJ","7":"Straße 242 Stuttgart Saarland 70511 Germany","3":"159.0"},{"12":"999.0","8":"Connie Liang","4":"Stainless multi-tool plier","11":"1068.93","9":"2015-11-04","13":"69.93","16":"T.T TAILWIND TRADERS","5":"96.3","10":"83878","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2015/Invoice83878.pdf","1":"6.3","17":"Mrurc Potsdamer","0":"53","2":"1.0","18":"4SJ","7":"Straße 242 Stuttgart Saarland 70511 Germany","3":"90.0"},{"12":"999.0","8":"Connie Liang","4":"Artificial Tree","11":"1068.93","9":"2015-11-04","13":"69.93","16":"T.T TAILWIND TRADERS","5":"802.5","10":"83878","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2015/Invoice83878.pdf","1":"52.5","17":"Mrurc Potsdamer","0":"32","2":"3.0","18":"4SJ","7":"Straße 242 Stuttgart Saarland 70511 Germany","3":"250.0"},{"12":"488.0","8":"Colin Cai","4":"Big Metal Shelving","11":"522.16","9":"2016","13":"34.16","16":"T.T TAILWIND TRADERS","5":"211.86","10":"102247","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2016/Invoice102247.pdf","1":"13.86","17":"Colin Cai","14":"Phata 8858 V. Street London England W1Y 3RA United Kingdom","0":"43","2":"2.0","18":"80074","7":"Kampstr 9859","3":"99.0"},{"12":"488.0","8":"Colin Cai","4":"Gardering","11":"522.16","9":"2016","13":"34.16","16":"T.T TAILWIND TRADERS","5":"21.4","10":"102247","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2016/Invoice102247.pdf","1":"1.4","17":"Colin Cai","14":"Phata 8858 V. Street London England W1Y 3RA United Kingdom","0":"30","2":"2.0","18":"80074","7":"Kampstr 9859","3":"10.0"},{"12":"488.0","8":"Colin Cai","4":"Craftsman 100 ft. L x 5/8 in.","11":"522.16","9":"2016","13":"34.16","16":"T.T TAILWIND TRADERS","5":"288.9","10":"102247","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2016/Invoice102247.pdf","1":"18.9","17":"Colin Cai","14":"Phata 8858 V. Street London England W1Y 3RA United Kingdom","0":"26","2":"3.0","18":"80074","7":"Kampstr 9859","3":"100.0"},{"12":"614.2","8":"Roy","4":"One sat on shoe gnome","15":"Rgerlakmog","11":"657.19","9":"8","13":"42.99","16":"T.T TAILWIND TRADERS","5":"104.0","10":"92549","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2016/Invoice92549.pdf","1":"6.8","14":"4186 Silver Oaks Pl. Newport Beach California 92625 United States","0":"61","2":"2.0","7":"2957 Tri-state Avenue Cambridge","3":"54.0"},{"12":"614.2","8":"Roy","4":"Refrigerator 1.7 cu. ft. 110 watts","15":"Rgerlakmog","11":"657.19","9":"8","13":"42.99","16":"T.T TAILWIND TRADERS","5":"428.0","10":"92549","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2016/Invoice92549.pdf","1":"28.0","14":"4186 Silver Oaks Pl. Newport Beach California 92625 United States","0":"2","2":"2.0","7":"2957 Tri-state Avenue Cambridge","3":"200.0"},{"12":"614.2","8":"Roy","4":"Celebrations C9","15":"Rgerlakmog","11":"657.19","9":"8","13":"42.99","16":"T.T TAILWIND TRADERS","5":"28.89","10":"92549","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2016/Invoice92549.pdf","1":"1.89","14":"4186 Silver Oaks Pl. Newport Beach California 92625 United States","0":"31","2":"3.0","7":"2957 Tri-state Avenue Cambridge","3":"10.0"},{"12":"614.2","8":"Roy","4":"Craftsman 100 ft. L x 5/8 in.","15":"Rgerlakmog","11":"657.19","9":"8","13":"42.99","16":"T.T TAILWIND TRADERS","5":"96.3","10":"92549","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2016/Invoice92549.pdf","1":"6.3","14":"4186 Silver Oaks Pl. Newport Beach California 92625 United States","0":"26","2":"1.0","7":"2957 Tri-state Avenue Cambridge","3":"100.0"}],"schema":[{"key":"0","name":"ProductCode","type":"string"},{"key":"1","name":"Tax","type":"double"},{"key":"2","name":"Quantity","type":"double"},{"key":"3","name":"UnitPrice","type":"double"},{"key":"4","name":"Description","type":"string"},{"key":"5","name":"Amount","type":"double"},{"key":"6","name":"url","type":"string"},{"key":"7","name":"CustomerAddress","type":"string"},{"key":"8","name":"CustomerName","type":"string"},{"key":"9","name":"InvoiceDate","type":"string"},{"key":"10","name":"InvoiceId","type":"string"},{"key":"11","name":"InvoiceTotal","type":"double"},{"key":"12","name":"SubTotal","type":"double"},{"key":"13","name":"TotalTax","type":"double"},{"key":"14","name":"VendorAddress","type":"string"},{"key":"15","name":"VendorAddressRecipient","type":"string"},{"key":"16","name":"VendorName","type":"string"},{"key":"17","name":"CustomerAddressRecipient","type":"string"},{"key":"18","name":"ShippingAddress","type":"string"},{"key":"19","name":"ShippingAddressRecipient","type":"string"}]},"isSummary":false,"language":"scala"},"persist_state":{"view":{"type":"details","tableOptions":{},"chartOptions":{"seriesFieldKeys":["5"],"categoryFieldKeys":["4"],"isStacked":false,"aggregationType":"sum","chartType":"pie"}}}},"f0c0ce60-d8e5-439d-b9aa-e684071cfb57":{"type":"Synapse.DataFrame","sync_state":{"table":{"rows":[{"0":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2010/Invoice22671.pdf"},{"0":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2010/Invoice28073.pdf"},{"0":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2013/Invoice64808.pdf"},{"0":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2013/Invoice67164.pdf"},{"0":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2014/Invoice80110.pdf"},{"0":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2014/Invoice71864.pdf"},{"0":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2014/Invoice80066.pdf"},{"0":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2015/Invoice83878.pdf"},{"0":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2016/Invoice102247.pdf"},{"0":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2016/Invoice92549.pdf"}],"schema":[{"key":"0","name":"url","type":"string"}]},"isSummary":false,"language":"scala"},"persist_state":{"view":{"type":"details","tableOptions":{},"chartOptions":{"seriesFieldKeys":["0"],"categoryFieldKeys":["0"],"isStacked":false,"aggregationType":"count","chartType":"bar"}}}},"f3cf4021-da8f-4614-89ea-b471037f0f6d":{"type":"Synapse.DataFrame","sync_state":{"table":{"rows":[{"0":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2010/Invoice22671.pdf","2":{"status":"succeeded","createdDateTime":"2021-10-26T22:39:18Z","lastUpdatedDateTime":"2021-10-26T22:39:23Z","analyzeResult":{"version":"2.1.0","readResults":[{"angle":0,"page":1,"height":11,"unit":"inch","width":8.5}],"pageResults":[{"page":1,"tables":[{"rows":4,"columns":8,"cells":[{"rowIndex":0,"isHeader":true,"boundingBox":[1.1002,4.8178,1.4382,4.8178,1.4382,5.0833,1.1002,5.0833],"text":"Itm","columnIndex":0},{"rowIndex":0,"isHeader":true,"boundingBox":[1.4382,4.8178,1.9128,4.8178,1.9128,5.0833,1.4382,5.0833],"text":"Qty","columnIndex":1},{"rowIndex":0,"isHeader":true,"boundingBox":[1.9128,4.8178,4.3051,4.8178,4.3051,5.0833,1.9128,5.0833],"text":"Description","columnIndex":2},{"rowIndex":0,"isHeader":true,"boundingBox":[4.3051,4.8178,4.9356,4.8178,4.9356,5.0833,4.3051,5.0833],"text":"Price","columnIndex":3},{"rowIndex":0,"isHeader":true,"boundingBox":[4.9356,4.8178,5.6507,4.8178,5.6507,5.0833,4.9356,5.0833],"text":"Discount","columnIndex":4},{"rowIndex":0,"isHeader":true,"boundingBox":[5.6507,4.8178,6.1383,4.8178,6.1383,5.0833,5.6507,5.0833],"text":"(Pct)","columnIndex":5},{"rowIndex":0,"isHeader":true,"boundingBox":[6.1383,4.8178,6.7428,4.8178,6.7363,5.0833,6.1383,5.0833],"text":"Tax","columnIndex":6},{"rowIndex":0,"isHeader":true,"boundingBox":[6.7428,4.8178,7.4254,4.8241,7.4254,5.0833,6.7363,5.0833],"text":"LineTotal","columnIndex":7},{"rowIndex":1,"columnIndex":0,"text":"39","boundingBox":[1.1002,5.0833,1.4382,5.0833,1.4382,5.3805,1.1002,5.3805]},{"rowIndex":1,"columnIndex":1,"text":"2","boundingBox":[1.4382,5.0833,1.9128,5.0833,1.9128,5.3805,1.4382,5.3805]},{"rowIndex":1,"columnIndex":2,"text":"Coffee Maker Red","boundingBox":[1.9128,5.0833,4.3051,5.0833,4.3051,5.3805,1.9128,5.3805]},{"rowIndex":1,"columnIndex":3,"text":"$200.00","boundingBox":[4.3051,5.0833,4.9356,5.0833,4.9356,5.3805,4.3051,5.3805]},{"rowIndex":1,"columnIndex":4,"text":"$40.00","boundingBox":[4.9356,5.0833,5.6507,5.0833,5.6507,5.3805,4.9356,5.3805]},{"rowIndex":1,"columnIndex":5,"text":"10%","boundingBox":[5.6507,5.0833,6.1383,5.0833,6.1383,5.3805,5.6507,5.3805]},{"rowIndex":1,"columnIndex":6,"text":"$25.20","boundingBox":[6.1383,5.0833,6.7363,5.0833,6.7363,5.3805,6.1383,5.3805]},{"rowIndex":1,"columnIndex":7,"text":"$385.20","boundingBox":[6.7363,5.0833,7.4254,5.0833,7.4254,5.3868,6.7363,5.3805]},{"rowIndex":2,"columnIndex":0,"text":"31","boundingBox":[1.1002,5.3805,1.4382,5.3805,1.4382,5.6839,1.1002,5.6839]},{"rowIndex":2,"columnIndex":1,"text":"1","boundingBox":[1.4382,5.3805,1.9128,5.3805,1.9128,5.6839,1.4382,5.6839]},{"rowIndex":2,"columnIndex":2,"text":"Celebrations C9","boundingBox":[1.9128,5.3805,4.3051,5.3805,4.3051,5.6839,1.9128,5.6839]},{"rowIndex":2,"columnIndex":3,"text":"$10.00","boundingBox":[4.3051,5.3805,4.9356,5.3805,4.9356,5.6839,4.3051,5.6839]},{"rowIndex":2,"columnIndex":4,"text":"$0.00","boundingBox":[4.9356,5.3805,5.6507,5.3805,5.6507,5.6839,4.9356,5.6839]},{"rowIndex":2,"columnIndex":5,"text":"0%","boundingBox":[5.6507,5.3805,6.1383,5.3805,6.1383,5.6839,5.6507,5.6839]},{"rowIndex":2,"columnIndex":6,"text":"$0.70","boundingBox":[6.1383,5.3805,6.7363,5.3805,6.7363,5.6839,6.1383,5.6839]},{"rowIndex":2,"columnIndex":7,"text":"$10.70","boundingBox":[6.7363,5.3805,7.4254,5.3868,7.4254,5.6839,6.7363,5.6839]},{"rowIndex":3,"columnIndex":0,"text":"12","boundingBox":[1.1002,5.6839,1.4382,5.6839,1.4382,6.1265,1.1002,6.1265]},{"rowIndex":3,"columnIndex":1,"text":"3","boundingBox":[1.4382,5.6839,1.9128,5.6839,1.9128,6.1265,1.4382,6.1265]},{"rowIndex":3,"columnIndex":2,"text":"Black Bathing System Classic 18 in. H x 60 in. W x 32.5","boundingBox":[1.9128,5.6839,4.3051,5.6839,4.3051,6.1265,1.9128,6.1265]},{"rowIndex":3,"columnIndex":3,"text":"$250.00","boundingBox":[4.3051,5.6839,4.9356,5.6839,4.9356,6.1265,4.3051,6.1265]},{"rowIndex":3,"columnIndex":4,"text":"$0.00","boundingBox":[4.9356,5.6839,5.6507,5.6839,5.6507,6.1265,4.9356,6.1265]},{"rowIndex":3,"columnIndex":5,"text":"0%","boundingBox":[5.6507,5.6839,6.1383,5.6839,6.1383,6.1265,5.6507,6.1265]},{"rowIndex":3,"columnIndex":6,"text":"$52.50","boundingBox":[6.1383,5.6839,6.7363,5.6839,6.7363,6.1265,6.1383,6.1265]},{"rowIndex":3,"columnIndex":7,"text":"$802.50","boundingBox":[6.7363,5.6839,7.4254,5.6839,7.4254,6.1265,6.7363,6.1265]}],"boundingBox":[1.0946,4.8125,7.4324,4.8133,7.4322,6.128,1.0936,6.1274]}]}],"documentResults":[{"docType":"prebuilt:invoice","pageRange":[1,1],"fields":{"CustomerAddress":{"page":1,"valueString":"34, rue des Grands Champs Versailles","boundingBox":[1.1282,2.1342,3.5087,2.1342,3.5087,2.2669,1.1282,2.2669],"text":"34, rue des Grands Champs Versailles","confidence":0.71,"type":"string"},"InvoiceTotal":{"valueNumber":1198.4,"page":1,"boundingBox":[6.5814,7.9097,7.413,7.9097,7.413,8.0886,6.5814,8.0886],"text":"$1198.40","confidence":0.962,"type":"number"},"CustomerName":{"page":1,"valueString":"Mackenzie Gray","boundingBox":[1.1354,1.6519,2.2799,1.6519,2.2799,1.8161,1.1354,1.8161],"text":"Mackenzie Gray","confidence":0.48,"type":"string"},"VendorName":{"page":1,"valueString":"T.T TAILWIND TRADERS","boundingBox":[4.909,1.4371,7.3107,1.4184,7.3155,2.0248,4.9137,2.0434],"text":"T.T TAILWIND TRADERS","confidence":0.953,"type":"string"},"InvoiceId":{"page":1,"valueString":"22671","boundingBox":[1.1272,4.1481,1.4599,4.1481,1.4599,4.248,1.1272,4.248],"text":"22671","confidence":0.946,"type":"string"},"InvoiceDate":{"page":1,"boundingBox":[2.7397,4.1481,2.8456,4.1481,2.8456,4.2465,2.7397,4.2465],"text":"11","confidence":0.375,"type":"date"},"SubTotal":{"valueNumber":1120,"page":1,"boundingBox":[6.8462,6.5516,7.4152,6.5516,7.4152,6.674,6.8462,6.674],"text":"$1120.00","confidence":0.971,"type":"number"},"TotalTax":{"valueNumber":78.4,"page":1,"boundingBox":[7.002,7.5208,7.4153,7.5208,7.4153,7.6432,7.002,7.6432],"text":"$78.40","confidence":0.967,"type":"number"},"VendorAddress":{"page":1,"valueString":"22, rue du Puits Dixme","boundingBox":[5.9835,2.3511,7.1057,2.3511,7.1057,2.4518,5.9835,2.4518],"text":"22, rue du Puits Dixme","confidence":0.606,"type":"string"},"CustomerAddressRecipient":{"page":1,"valueString":"Mackenzie Gray","boundingBox":[1.1354,1.6519,2.2799,1.6519,2.2799,1.8161,1.1354,1.8161],"text":"Mackenzie Gray","confidence":0.48,"type":"string"},"Items":{"type":"array","valueArray":["{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":385.2,\"text\":\"$385.20\",\"boundingBox\":[6.8637,5.1789,7.3548,5.1789,7.3548,5.3013,6.8637,5.3013],\"page\":1,\"confidence\":0.888},\"Description\":{\"type\":\"string\",\"valueString\":\"Coffee Maker Red\",\"text\":\"Coffee Maker Red\",\"boundingBox\":[2.0294,5.1844,3.0817,5.1844,3.0817,5.2891,2.0294,5.2891],\"page\":1,\"confidence\":0.879},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"39\",\"text\":\"39\",\"boundingBox\":[1.2114,5.1897,1.3427,5.1897,1.3427,5.2891,1.2114,5.2891],\"page\":1,\"confidence\":0.499},\"Quantity\":{\"type\":\"number\",\"valueNumber\":2,\"text\":\"2\",\"boundingBox\":[1.6933,5.1897,1.7518,5.1897,1.7518,5.2876,1.6933,5.2876],\"page\":1,\"confidence\":0.77},\"Tax\":{\"type\":\"number\",\"valueNumber\":25.2,\"text\":\"$25.20\",\"boundingBox\":[6.2822,5.1792,6.6601,5.1792,6.6601,5.3013,6.2822,5.3013],\"page\":1,\"confidence\":0.755},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":200,\"text\":\"$200.00\",\"boundingBox\":[4.4092,5.1792,4.8601,5.1792,4.8601,5.3013,4.4092,5.3013],\"page\":1,\"confidence\":0.899}},\"text\":\"39 2 Coffee Maker Red $200.00 $40.00 10% $25.20 $385.20\",\"boundingBox\":[1.2114,5.1789,7.3548,5.1789,7.3548,5.3013,1.2114,5.3013],\"page\":1,\"confidence\":0.867}","{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":10.7,\"text\":\"$10.70\",\"boundingBox\":[6.9416,5.4789,7.3548,5.4789,7.3548,5.6013,6.9416,5.6013],\"page\":1,\"confidence\":0.895},\"Description\":{\"type\":\"string\",\"valueString\":\"Celebrations C9\",\"text\":\"Celebrations C9\",\"boundingBox\":[2.0294,5.4859,2.9552,5.4859,2.9552,5.5891,2.0294,5.5891],\"page\":1,\"confidence\":0.872},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"31\",\"text\":\"31\",\"boundingBox\":[1.2114,5.4892,1.3242,5.4892,1.3242,5.5891,1.2114,5.5891],\"page\":1,\"confidence\":0.555},\"Quantity\":{\"type\":\"number\",\"valueNumber\":1,\"text\":\"1\",\"boundingBox\":[1.7012,5.4892,1.7341,5.4892,1.7341,5.5876,1.7012,5.5876],\"page\":1,\"confidence\":0.769},\"Tax\":{\"type\":\"number\",\"valueNumber\":0.7,\"text\":\"$0.70\",\"boundingBox\":[6.3552,5.4792,6.6601,5.4792,6.6601,5.6013,6.3552,5.6013],\"page\":1,\"confidence\":0.773},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":10,\"text\":\"$10.00\",\"boundingBox\":[4.4822,5.4792,4.8601,5.4792,4.8601,5.6013,4.4822,5.6013],\"page\":1,\"confidence\":0.891}},\"text\":\"31 1 Celebrations C9 $10.00 $0.00 0% $0.70 $10.70\",\"boundingBox\":[1.2114,5.4789,7.3548,5.4789,7.3548,5.6013,1.2114,5.6013],\"page\":1,\"confidence\":0.7}","{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":802.5,\"text\":\"$802.50\",\"boundingBox\":[6.8637,5.7789,7.3548,5.7789,7.3548,5.9013,6.8637,5.9013],\"page\":1,\"confidence\":0.898},\"Description\":{\"type\":\"string\",\"valueString\":\"Black Bathing System Classic 18 in. H x 60 in. W x 32.5\",\"text\":\"Black Bathing System Classic 18 in. H x 60 in. W x 32.5\",\"boundingBox\":[2.0249,5.7859,4.1986,5.7859,4.1986,6.0692,2.0249,6.0692],\"page\":1,\"confidence\":0.755},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"12\",\"text\":\"12\",\"boundingBox\":[1.2183,5.7892,1.342,5.7892,1.342,5.8876,1.2183,5.8876],\"page\":1,\"confidence\":0.498},\"Quantity\":{\"type\":\"number\",\"valueNumber\":3,\"text\":\"3\",\"boundingBox\":[1.6943,5.7897,1.7491,5.7897,1.7491,5.8891,1.6943,5.8891],\"page\":1,\"confidence\":0.75},\"Tax\":{\"type\":\"number\",\"valueNumber\":52.5,\"text\":\"$52.50\",\"boundingBox\":[6.2822,5.7792,6.6601,5.7792,6.6601,5.9013,6.2822,5.9013],\"page\":1,\"confidence\":0.736},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":250,\"text\":\"$250.00\",\"boundingBox\":[4.4092,5.7792,4.8601,5.7792,4.8601,5.9013,4.4092,5.9013],\"page\":1,\"confidence\":0.899}},\"text\":\"12 3 Black Bathing System Classic 18 in. H $250.00 $0.00 0% $52.50 $802.50 x 60 in. W x 32.5\",\"boundingBox\":[1.2183,5.7789,7.3548,5.7789,7.3548,6.0692,1.2183,6.0692],\"page\":1,\"confidence\":0.81}"]}}}]}}},{"0":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2010/Invoice28073.pdf","2":{"status":"succeeded","createdDateTime":"2021-10-26T22:39:19Z","lastUpdatedDateTime":"2021-10-26T22:39:23Z","analyzeResult":{"version":"2.1.0","readResults":[{"angle":0,"page":1,"height":11,"unit":"inch","width":8.5}],"pageResults":[{"page":1,"tables":[{"rows":4,"columns":8,"cells":[{"rowIndex":0,"isHeader":true,"boundingBox":[1.102,4.8158,1.4468,4.8158,1.4403,5.0805,1.0955,5.0805],"text":"Itm","columnIndex":0},{"rowIndex":0,"isHeader":true,"boundingBox":[1.4468,4.8158,1.9087,4.8158,1.9087,5.0805,1.4403,5.0805],"text":"Qty","columnIndex":1},{"rowIndex":0,"isHeader":true,"boundingBox":[1.9087,4.8158,4.3027,4.8221,4.3027,5.0805,1.9087,5.0805],"text":"Description","columnIndex":2},{"rowIndex":0,"isHeader":true,"boundingBox":[4.3027,4.8221,4.9337,4.8221,4.9337,5.0805,4.3027,5.0805],"text":"Price","columnIndex":3},{"rowIndex":0,"isHeader":true,"boundingBox":[4.9337,4.8221,5.6558,4.8221,5.6558,5.0805,4.9337,5.0805],"text":"Discount","columnIndex":4},{"rowIndex":0,"isHeader":true,"boundingBox":[5.6558,4.8221,6.1372,4.8221,6.1372,5.0805,5.6558,5.0805],"text":"(Pct)","columnIndex":5},{"rowIndex":0,"isHeader":true,"boundingBox":[6.1372,4.8221,6.7357,4.8221,6.7357,5.0805,6.1372,5.0805],"text":"Tax","columnIndex":6},{"rowIndex":0,"isHeader":true,"boundingBox":[6.7357,4.8221,7.4253,4.8221,7.4318,5.0868,6.7357,5.0805],"text":"LineTotal","columnIndex":7},{"rowIndex":1,"columnIndex":0,"text":"17","boundingBox":[1.0955,5.0805,1.4403,5.0805,1.4403,5.3831,1.0955,5.3831]},{"rowIndex":1,"columnIndex":1,"text":"2","boundingBox":[1.4403,5.0805,1.9087,5.0805,1.9087,5.3831,1.4403,5.3831]},{"rowIndex":1,"columnIndex":2,"text":"Blend Solid White Sheer Curtains","boundingBox":[1.9087,5.0805,4.3027,5.0805,4.3027,5.3831,1.9087,5.3831]},{"rowIndex":1,"columnIndex":3,"text":"$110.00","boundingBox":[4.3027,5.0805,4.9337,5.0805,4.9337,5.3831,4.3027,5.3831]},{"rowIndex":1,"columnIndex":4,"text":"$0.00","boundingBox":[4.9337,5.0805,5.6558,5.0805,5.6558,5.3831,4.9337,5.3831]},{"rowIndex":1,"columnIndex":5,"text":"0%","boundingBox":[5.6558,5.0805,6.1372,5.0805,6.1372,5.3831,5.6558,5.3831]},{"rowIndex":1,"columnIndex":6,"text":"$15.40","boundingBox":[6.1372,5.0805,6.7357,5.0805,6.7357,5.3831,6.1372,5.3831]},{"rowIndex":1,"columnIndex":7,"text":"$235.40","boundingBox":[6.7357,5.0805,7.4318,5.0868,7.4318,5.3831,6.7357,5.3831]},{"rowIndex":2,"columnIndex":0,"text":"56","boundingBox":[1.0955,5.3831,1.4403,5.3831,1.4403,5.8181,1.0955,5.8181]},{"rowIndex":2,"columnIndex":1,"text":"2","boundingBox":[1.4403,5.3831,1.9087,5.3831,1.9087,5.8181,1.4403,5.8181]},{"rowIndex":2,"columnIndex":2,"text":"Rechargeable screwdriver with extra battery","boundingBox":[1.9087,5.3831,4.3027,5.3831,4.3027,5.8244,1.9087,5.8181]},{"rowIndex":2,"columnIndex":3,"text":"$312.00","boundingBox":[4.3027,5.3831,4.9337,5.3831,4.9337,5.8244,4.3027,5.8244]},{"rowIndex":2,"columnIndex":4,"text":"$0.00","boundingBox":[4.9337,5.3831,5.6558,5.3831,5.6558,5.8244,4.9337,5.8244]},{"rowIndex":2,"columnIndex":5,"text":"0%","boundingBox":[5.6558,5.3831,6.1372,5.3831,6.1372,5.8244,5.6558,5.8244]},{"rowIndex":2,"columnIndex":6,"text":"$43.68","boundingBox":[6.1372,5.3831,6.7357,5.3831,6.7357,5.8244,6.1372,5.8244]},{"rowIndex":2,"columnIndex":7,"text":"$667.68","boundingBox":[6.7357,5.3831,7.4318,5.3831,7.4318,5.8244,6.7357,5.8244]},{"rowIndex":3,"columnIndex":0,"text":"40","boundingBox":[1.0955,5.8181,1.4403,5.8181,1.4403,6.1206,1.0955,6.1206]},{"rowIndex":3,"columnIndex":1,"text":"3","boundingBox":[1.4403,5.8181,1.9087,5.8181,1.9087,6.1206,1.4403,6.1206]},{"rowIndex":3,"columnIndex":2,"text":"Extractor Steal","boundingBox":[1.9087,5.8181,4.3027,5.8244,4.3027,6.1206,1.9087,6.1206]},{"rowIndex":3,"columnIndex":3,"text":"$135.00","boundingBox":[4.3027,5.8244,4.9337,5.8244,4.9337,6.1206,4.3027,6.1206]},{"rowIndex":3,"columnIndex":4,"text":"$40.50","boundingBox":[4.9337,5.8244,5.6558,5.8244,5.6558,6.1206,4.9337,6.1206]},{"rowIndex":3,"columnIndex":5,"text":"10%","boundingBox":[5.6558,5.8244,6.1372,5.8244,6.1372,6.1206,5.6558,6.1206]},{"rowIndex":3,"columnIndex":6,"text":"$25.52","boundingBox":[6.1372,5.8244,6.7357,5.8244,6.7422,6.1206,6.1372,6.1206]},{"rowIndex":3,"columnIndex":7,"text":"$390.02","boundingBox":[6.7357,5.8244,7.4318,5.8244,7.4318,6.1269,6.7422,6.1206]}],"boundingBox":[1.0899,4.8213,7.4305,4.8214,7.4306,6.1277,1.0892,6.1275]}]}],"documentResults":[{"docType":"prebuilt:invoice","pageRange":[1,1],"fields":{"CustomerAddress":{"page":1,"valueString":"United States","boundingBox":[1.1313,3.1342,1.9101,3.1342,1.9101,3.2374,1.1313,3.2374],"text":"United States","confidence":0.306,"type":"string"},"ShippingAddress":{"page":1,"valueString":"San Gabriel","boundingBox":[2.968,2.1342,3.6276,2.1342,3.6276,2.2374,2.968,2.2374],"text":"San Gabriel","confidence":0.265,"type":"string"},"InvoiceTotal":{"valueNumber":1293.1,"page":1,"boundingBox":[6.5814,7.9097,7.413,7.9097,7.413,8.0886,6.5814,8.0886],"text":"$1293.10","confidence":0.962,"type":"number"},"VendorName":{"page":1,"valueString":"T.T TAILWIND TRADERS","boundingBox":[4.909,1.4371,7.3107,1.4184,7.3155,2.0248,4.9137,2.0434],"text":"T.T TAILWIND TRADERS","confidence":0.952,"type":"string"},"VendorAddressRecipient":{"page":1,"valueString":"Sarosgawk","boundingBox":[5.9841,2.1511,6.517,2.1511,6.517,2.264,5.9841,2.264],"text":"Sarosgawk","confidence":0.234,"type":"string"},"InvoiceId":{"page":1,"valueString":"28073","boundingBox":[1.1272,4.1486,1.475,4.1486,1.475,4.248,1.1272,4.248],"text":"28073","confidence":0.954,"type":"string"},"InvoiceDate":{"page":1,"boundingBox":[2.7355,4.1448,3.3205,4.1448,3.3205,4.248,2.7355,4.248],"text":"5 October","confidence":0.375,"valueDate":"2021-10-05","type":"date"},"SubTotal":{"valueNumber":1208.5,"page":1,"boundingBox":[6.8462,6.5516,7.4152,6.5516,7.4152,6.674,6.8462,6.674],"text":"$1208.50","confidence":0.971,"type":"number"},"TotalTax":{"valueNumber":84.6,"page":1,"boundingBox":[7.002,7.5208,7.4153,7.5208,7.4153,7.6432,7.002,7.6432],"text":"$84.60","confidence":0.967,"type":"number"},"VendorAddress":{"page":1,"valueString":"Maubeuge","boundingBox":[6.5215,2.3511,7.0513,2.3511,7.0513,2.464,6.5215,2.464],"text":"Maubeuge","confidence":0.251,"type":"string"},"Items":{"type":"array","valueArray":["{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":235.4,\"text\":\"$235.40\",\"boundingBox\":[6.8637,5.1789,7.3548,5.1789,7.3548,5.3013,6.8637,5.3013],\"page\":1,\"confidence\":0.898},\"Description\":{\"type\":\"string\",\"valueString\":\"Blend Solid White Sheer Curtains\",\"text\":\"Blend Solid White Sheer Curtains\",\"boundingBox\":[2.0356,5.1859,3.9764,5.1859,3.9764,5.2891,2.0356,5.2891],\"page\":1,\"confidence\":0.827},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"17\",\"text\":\"17\",\"boundingBox\":[1.2183,5.1892,1.3431,5.1892,1.3431,5.2876,1.2183,5.2876],\"page\":1,\"confidence\":0.6},\"Quantity\":{\"type\":\"number\",\"valueNumber\":2,\"text\":\"2\",\"boundingBox\":[1.6933,5.1897,1.7518,5.1897,1.7518,5.2876,1.6933,5.2876],\"page\":1,\"confidence\":0.729},\"Tax\":{\"type\":\"number\",\"valueNumber\":15.4,\"text\":\"$15.40\",\"boundingBox\":[6.2822,5.1792,6.6601,5.1792,6.6601,5.3013,6.2822,5.3013],\"page\":1,\"confidence\":0.778},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":110,\"text\":\"$110.00\",\"boundingBox\":[4.4092,5.1792,4.8601,5.1792,4.8601,5.3013,4.4092,5.3013],\"page\":1,\"confidence\":0.899}},\"text\":\"17 2 Blend Solid White Sheer Curtains $110.00 $0.00 0% $15.40 $235.40\",\"boundingBox\":[1.2183,5.1789,7.3548,5.1789,7.3548,5.3013,1.2183,5.3013],\"page\":1,\"confidence\":0.86}","{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":667.68,\"text\":\"$667.68\",\"boundingBox\":[6.8637,5.4789,7.354,5.4789,7.354,5.6013,6.8637,5.6013],\"page\":1,\"confidence\":0.898},\"Description\":{\"type\":\"string\",\"valueString\":\"Rechargeable screwdriver with extra battery\",\"text\":\"Rechargeable screwdriver with extra battery\",\"boundingBox\":[2.0341,5.4859,4.1487,5.4859,4.1487,5.7994,2.0341,5.7994],\"page\":1,\"confidence\":0.539},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"56\",\"text\":\"56\",\"boundingBox\":[1.2141,5.4897,1.3439,5.4897,1.3439,5.5891,1.2141,5.5891],\"page\":1,\"confidence\":0.505},\"Quantity\":{\"type\":\"number\",\"valueNumber\":2,\"text\":\"2\",\"boundingBox\":[1.6933,5.4897,1.7518,5.4897,1.7518,5.5876,1.6933,5.5876],\"page\":1,\"confidence\":0.749},\"Tax\":{\"type\":\"number\",\"valueNumber\":43.68,\"text\":\"$43.68\",\"boundingBox\":[6.2822,5.4792,6.6601,5.4792,6.6601,5.6013,6.2822,5.6013],\"page\":1,\"confidence\":0.767},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":312,\"text\":\"$312.00\",\"boundingBox\":[4.4092,5.4792,4.8601,5.4792,4.8601,5.6013,4.4092,5.6013],\"page\":1,\"confidence\":0.899}},\"text\":\"56 2 Rechargeable screwdriver with extra $312.00 $0.00 0% $43.68 $667.68 battery\",\"boundingBox\":[1.2141,5.4789,7.354,5.4789,7.354,5.7994,1.2141,5.7994],\"page\":1,\"confidence\":0.843}","{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":390.02,\"text\":\"$390.02\",\"boundingBox\":[6.8637,5.9204,7.3501,5.9204,7.3501,6.0428,6.8637,6.0428],\"page\":1,\"confidence\":0.903},\"Description\":{\"type\":\"string\",\"valueString\":\"Extractor Steal\",\"text\":\"Extractor Steal\",\"boundingBox\":[2.0356,5.9274,2.8654,5.9274,2.8654,6.0306,2.0356,6.0306],\"page\":1,\"confidence\":0.898},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"40\",\"text\":\"40\",\"boundingBox\":[1.2043,5.9312,1.3437,5.9312,1.3437,6.0306,1.2043,6.0306],\"page\":1,\"confidence\":0.57},\"Quantity\":{\"type\":\"number\",\"valueNumber\":3,\"text\":\"3\",\"boundingBox\":[1.6943,5.9312,1.7491,5.9312,1.7491,6.0306,1.6943,6.0306],\"page\":1,\"confidence\":0.801},\"Tax\":{\"type\":\"number\",\"valueNumber\":25.52,\"text\":\"$25.52\",\"boundingBox\":[6.2822,5.9206,6.6583,5.9206,6.6583,6.0428,6.2822,6.0428],\"page\":1,\"confidence\":0.769},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":135,\"text\":\"$135.00\",\"boundingBox\":[4.4092,5.9206,4.8601,5.9206,4.8601,6.0428,4.4092,6.0428],\"page\":1,\"confidence\":0.899}},\"text\":\"40 3 Extractor Steal $135.00 $40.50 10% $25.52 $390.02\",\"boundingBox\":[1.2043,5.9204,7.3501,5.9204,7.3501,6.0428,1.2043,6.0428],\"page\":1,\"confidence\":0.826}"]}}}]}}},{"0":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2013/Invoice64808.pdf","2":{"status":"succeeded","createdDateTime":"2021-10-26T22:39:18Z","lastUpdatedDateTime":"2021-10-26T22:39:23Z","analyzeResult":{"version":"2.1.0","readResults":[{"angle":0,"page":1,"height":11,"unit":"inch","width":8.5}],"pageResults":[{"page":1,"tables":[{"rows":5,"columns":8,"cells":[{"rowIndex":0,"isHeader":true,"boundingBox":[1.1002,4.8223,1.4447,4.8223,1.4447,5.0827,1.1002,5.0827],"text":"Itm","columnIndex":0},{"rowIndex":0,"isHeader":true,"boundingBox":[1.4447,4.8223,1.9128,4.8223,1.9128,5.0827,1.4447,5.0827],"text":"Qty","columnIndex":1},{"rowIndex":0,"isHeader":true,"boundingBox":[1.9128,4.8223,4.3051,4.8223,4.3051,5.0827,1.9128,5.0827],"text":"Description","columnIndex":2},{"rowIndex":0,"isHeader":true,"boundingBox":[4.3051,4.8223,4.9356,4.8223,4.9356,5.0827,4.3051,5.0827],"text":"Price","columnIndex":3},{"rowIndex":0,"isHeader":true,"boundingBox":[4.9356,4.8223,5.6507,4.8223,5.6507,5.0827,4.9356,5.0827],"text":"Discount","columnIndex":4},{"rowIndex":0,"isHeader":true,"boundingBox":[5.6507,4.8223,6.1383,4.8223,6.1383,5.0827,5.6507,5.0827],"text":"(Pct)","columnIndex":5},{"rowIndex":0,"isHeader":true,"boundingBox":[6.1383,4.8223,6.7363,4.8223,6.7363,5.0827,6.1383,5.0827],"text":"Tax","columnIndex":6},{"rowIndex":0,"isHeader":true,"boundingBox":[6.7363,4.8223,7.4319,4.8223,7.4319,5.0827,6.7363,5.0827],"text":"LineTotal","columnIndex":7},{"rowIndex":1,"columnIndex":0,"text":"43","boundingBox":[1.1002,5.0827,1.4447,5.0827,1.4447,5.3803,1.1002,5.3803]},{"rowIndex":1,"columnIndex":1,"text":"3","boundingBox":[1.4447,5.0827,1.9128,5.0827,1.9128,5.3803,1.4447,5.3803]},{"rowIndex":1,"columnIndex":2,"text":"Big Metal Shelving","boundingBox":[1.9128,5.0827,4.3051,5.0827,4.3051,5.3803,1.9128,5.3803]},{"rowIndex":1,"columnIndex":3,"text":"$99.00","boundingBox":[4.3051,5.0827,4.9356,5.0827,4.9356,5.3803,4.3051,5.3803]},{"rowIndex":1,"columnIndex":4,"text":"$0.00","boundingBox":[4.9356,5.0827,5.6507,5.0827,5.6507,5.3803,4.9356,5.3803]},{"rowIndex":1,"columnIndex":5,"text":"0%","boundingBox":[5.6507,5.0827,6.1383,5.0827,6.1383,5.3803,5.6507,5.3803]},{"rowIndex":1,"columnIndex":6,"text":"$20.79","boundingBox":[6.1383,5.0827,6.7363,5.0827,6.7363,5.3803,6.1383,5.3803]},{"rowIndex":1,"columnIndex":7,"text":"$317.79","boundingBox":[6.7363,5.0827,7.4319,5.0827,7.4319,5.3865,6.7363,5.3803]},{"rowIndex":2,"columnIndex":0,"text":"42","boundingBox":[1.1002,5.3803,1.4447,5.3803,1.4447,5.6841,1.1002,5.6841]},{"rowIndex":2,"columnIndex":1,"text":"3","boundingBox":[1.4447,5.3803,1.9128,5.3803,1.9128,5.6841,1.4447,5.6841]},{"rowIndex":2,"columnIndex":2,"text":"Metal Shelving","boundingBox":[1.9128,5.3803,4.3051,5.3803,4.3051,5.6841,1.9128,5.6841]},{"rowIndex":2,"columnIndex":3,"text":"$90.00","boundingBox":[4.3051,5.3803,4.9356,5.3803,4.9356,5.6841,4.3051,5.6841]},{"rowIndex":2,"columnIndex":4,"text":"$0.00","boundingBox":[4.9356,5.3803,5.6507,5.3803,5.6507,5.6841,4.9356,5.6841]},{"rowIndex":2,"columnIndex":5,"text":"0%","boundingBox":[5.6507,5.3803,6.1383,5.3803,6.1383,5.6841,5.6507,5.6841]},{"rowIndex":2,"columnIndex":6,"text":"$18.90","boundingBox":[6.1383,5.3803,6.7363,5.3803,6.7363,5.6841,6.1383,5.6841]},{"rowIndex":2,"columnIndex":7,"text":"$288.90","boundingBox":[6.7363,5.3803,7.4319,5.3865,7.4319,5.6841,6.7363,5.6841]},{"rowIndex":3,"columnIndex":0,"text":"46","boundingBox":[1.1002,5.6841,1.4447,5.6841,1.4447,5.9817,1.1002,5.9817]},{"rowIndex":3,"columnIndex":1,"text":"1","boundingBox":[1.4447,5.6841,1.9128,5.6841,1.9128,5.9817,1.4447,5.9817]},{"rowIndex":3,"columnIndex":2,"text":"Measuring Tape","boundingBox":[1.9128,5.6841,4.3051,5.6841,4.3051,5.9817,1.9128,5.9817]},{"rowIndex":3,"columnIndex":3,"text":"$123.00","boundingBox":[4.3051,5.6841,4.9356,5.6841,4.9356,5.9817,4.3051,5.9817]},{"rowIndex":3,"columnIndex":4,"text":"$0.00","boundingBox":[4.9356,5.6841,5.6507,5.6841,5.6507,5.9817,4.9356,5.9817]},{"rowIndex":3,"columnIndex":5,"text":"0%","boundingBox":[5.6507,5.6841,6.1383,5.6841,6.1383,5.9817,5.6507,5.9817]},{"rowIndex":3,"columnIndex":6,"text":"$8.61","boundingBox":[6.1383,5.6841,6.7363,5.6841,6.7363,5.9817,6.1383,5.9817]},{"rowIndex":3,"columnIndex":7,"text":"$131.61","boundingBox":[6.7363,5.6841,7.4319,5.6841,7.4319,5.9817,6.7363,5.9817]},{"rowIndex":4,"columnIndex":0,"text":"21","boundingBox":[1.1002,5.9817,1.4447,5.9817,1.4447,6.2793,1.1067,6.2855]},{"rowIndex":4,"columnIndex":1,"text":"3","boundingBox":[1.4447,5.9817,1.9128,5.9817,1.9128,6.2793,1.4447,6.2793]},{"rowIndex":4,"columnIndex":2,"text":"Curtain Rod 48 in","boundingBox":[1.9128,5.9817,4.3051,5.9817,4.3051,6.2793,1.9128,6.2793]},{"rowIndex":4,"columnIndex":3,"text":"$25.00","boundingBox":[4.3051,5.9817,4.9356,5.9817,4.9356,6.2793,4.3051,6.2793]},{"rowIndex":4,"columnIndex":4,"text":"$15.00","boundingBox":[4.9356,5.9817,5.6507,5.9817,5.6507,6.2793,4.9356,6.2793]},{"rowIndex":4,"columnIndex":5,"text":"20%","boundingBox":[5.6507,5.9817,6.1383,5.9817,6.1383,6.2793,5.6507,6.2793]},{"rowIndex":4,"columnIndex":6,"text":"$4.20","boundingBox":[6.1383,5.9817,6.7363,5.9817,6.7428,6.2793,6.1383,6.2793]},{"rowIndex":4,"columnIndex":7,"text":"$64.20","boundingBox":[6.7363,5.9817,7.4319,5.9817,7.4319,6.2793,6.7428,6.2793]}],"boundingBox":[1.0921,4.8111,7.43,4.8116,7.4295,6.2883,1.0909,6.2879]}]}],"documentResults":[{"docType":"prebuilt:invoice","pageRange":[1,1],"fields":{"CustomerAddress":{"page":1,"valueString":"3923 Dew Drop","boundingBox":[5.9843,2.3543,6.759,2.3543,6.759,2.4634,5.9843,2.4634],"text":"3923 Dew Drop","confidence":0.371,"type":"string"},"ShippingAddress":{"page":1,"valueString":"828, rue de Berri","boundingBox":[1.126,2.1342,2.0896,2.1342,2.0896,2.2532,1.126,2.2532],"text":"828, rue de Berri","confidence":0.651,"type":"string"},"InvoiceTotal":{"valueNumber":802.5,"page":1,"boundingBox":[6.6952,8.0682,7.413,8.0682,7.413,8.2471,6.6952,8.2471],"text":"$802.50","confidence":0.96,"type":"number"},"CustomerName":{"page":1,"valueString":"Villeneuve-d'Ascq","boundingBox":[2.9612,2.1342,4.013,2.1342,4.013,2.2669,2.9612,2.2669],"text":"Villeneuve-d'Ascq","confidence":0.36,"type":"string"},"VendorName":{"page":1,"valueString":"T.T TAILWIND TRADERS","boundingBox":[4.909,1.4371,7.3107,1.4184,7.3155,2.0248,4.9137,2.0434],"text":"T.T TAILWIND TRADERS","confidence":0.954,"type":"string"},"VendorAddressRecipient":{"page":1,"valueString":"Braeo","boundingBox":[5.9879,2.1558,6.2626,2.1558,6.2626,2.2384,5.9879,2.2384],"text":"Braeo","confidence":0.215,"type":"string"},"InvoiceId":{"page":1,"valueString":"64808","boundingBox":[1.1274,4.1486,1.4794,4.1486,1.4794,4.248,1.1274,4.248],"text":"64808","confidence":0.967,"type":"string"},"InvoiceDate":{"page":1,"boundingBox":[2.7397,4.1448,3.2744,4.1448,3.2744,4.248,2.7397,4.248],"text":"12 March","confidence":0.304,"valueDate":"2021-03-12","type":"date"},"SubTotal":{"valueNumber":750,"page":1,"boundingBox":[6.9241,6.7101,7.4152,6.7101,7.4152,6.8325,6.9241,6.8325],"text":"$750.00","confidence":0.971,"type":"number"},"TotalTax":{"valueNumber":52.5,"page":1,"boundingBox":[7.002,7.6793,7.4153,7.6793,7.4153,7.8017,7.002,7.8017],"text":"$52.50","confidence":0.97,"type":"number"},"ShippingAddressRecipient":{"page":1,"valueString":"Villeneuve-d'Ascq","boundingBox":[2.9612,2.1342,4.013,2.1342,4.013,2.2669,2.9612,2.2669],"text":"Villeneuve-d'Ascq","confidence":0.36,"type":"string"},"VendorAddress":{"page":1,"valueString":"Circle","boundingBox":[6.8008,2.3511,7.0698,2.3511,7.0698,2.4384,6.8008,2.4384],"text":"Circle","confidence":0.271,"type":"string"},"Items":{"type":"array","valueArray":["{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":317.79,\"text\":\"$317.79\",\"boundingBox\":[6.8637,5.1789,7.3533,5.1789,7.3533,5.3013,6.8637,5.3013],\"page\":1,\"confidence\":0.872},\"Description\":{\"type\":\"string\",\"valueString\":\"Big Metal Shelving\",\"text\":\"Big Metal Shelving\",\"boundingBox\":[2.0356,5.1859,3.1214,5.1859,3.1214,5.3193,2.0356,5.3193],\"page\":1,\"confidence\":0.876},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"43\",\"text\":\"43\",\"boundingBox\":[1.2043,5.1897,1.3393,5.1897,1.3393,5.2891,1.2043,5.2891],\"page\":1,\"confidence\":0.632},\"Quantity\":{\"type\":\"number\",\"valueNumber\":3,\"text\":\"3\",\"boundingBox\":[1.6943,5.1897,1.7491,5.1897,1.7491,5.2891,1.6943,5.2891],\"page\":1,\"confidence\":0.794},\"Tax\":{\"type\":\"number\",\"valueNumber\":20.79,\"text\":\"$20.79\",\"boundingBox\":[6.2822,5.1792,6.659,5.1792,6.659,5.3013,6.2822,5.3013],\"page\":1,\"confidence\":0.727},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":99,\"text\":\"$99.00\",\"boundingBox\":[4.4822,5.1792,4.8601,5.1792,4.8601,5.3013,4.4822,5.3013],\"page\":1,\"confidence\":0.887}},\"text\":\"43 3 Big Metal Shelving $99.00 $0.00 0% $20.79 $317.79\",\"boundingBox\":[1.2043,5.1789,7.3533,5.1789,7.3533,5.3193,1.2043,5.3193],\"page\":1,\"confidence\":0.864}","{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":288.9,\"text\":\"$288.90\",\"boundingBox\":[6.8637,5.4789,7.3548,5.4789,7.3548,5.6013,6.8637,5.6013],\"page\":1,\"confidence\":0.895},\"Description\":{\"type\":\"string\",\"valueString\":\"Metal Shelving\",\"text\":\"Metal Shelving\",\"boundingBox\":[2.0356,5.4859,2.8944,5.4859,2.8944,5.6193,2.0356,5.6193],\"page\":1,\"confidence\":0.8},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"42\",\"text\":\"42\",\"boundingBox\":[1.2043,5.4897,1.342,5.4897,1.342,5.5876,1.2043,5.5876],\"page\":1,\"confidence\":0.6},\"Quantity\":{\"type\":\"number\",\"valueNumber\":3,\"text\":\"3\",\"boundingBox\":[1.6943,5.4897,1.7491,5.4897,1.7491,5.5891,1.6943,5.5891],\"page\":1,\"confidence\":0.77},\"Tax\":{\"type\":\"number\",\"valueNumber\":18.9,\"text\":\"$18.90\",\"boundingBox\":[6.2822,5.4792,6.6601,5.4792,6.6601,5.6013,6.2822,5.6013],\"page\":1,\"confidence\":0.729},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":90,\"text\":\"$90.00\",\"boundingBox\":[4.4822,5.4792,4.8601,5.4792,4.8601,5.6013,4.4822,5.6013],\"page\":1,\"confidence\":0.884}},\"text\":\"42 3 Metal Shelving $90.00 $0.00 0% $18.90 $288.90\",\"boundingBox\":[1.2043,5.4789,7.3548,5.4789,7.3548,5.6193,1.2043,5.6193],\"page\":1,\"confidence\":0.805}","{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":131.61,\"text\":\"$131.61\",\"boundingBox\":[6.8637,5.7789,7.3364,5.7789,7.3364,5.9013,6.8637,5.9013],\"page\":1,\"confidence\":0.899},\"Description\":{\"type\":\"string\",\"valueString\":\"Measuring Tape\",\"text\":\"Measuring Tape\",\"boundingBox\":[2.0356,5.7871,2.9754,5.7871,2.9754,5.9193,2.0356,5.9193],\"page\":1,\"confidence\":0.886},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"46\",\"text\":\"46\",\"boundingBox\":[1.2043,5.7897,1.3439,5.7897,1.3439,5.8891,1.2043,5.8891],\"page\":1,\"confidence\":0.676},\"Quantity\":{\"type\":\"number\",\"valueNumber\":1,\"text\":\"1\",\"boundingBox\":[1.7012,5.7892,1.7341,5.7892,1.7341,5.8876,1.7012,5.8876],\"page\":1,\"confidence\":0.752},\"Tax\":{\"type\":\"number\",\"valueNumber\":8.61,\"text\":\"$8.61\",\"boundingBox\":[6.3552,5.7792,6.6406,5.7792,6.6406,5.9013,6.3552,5.9013],\"page\":1,\"confidence\":0.724},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":123,\"text\":\"$123.00\",\"boundingBox\":[4.4092,5.7792,4.8601,5.7792,4.8601,5.9013,4.4092,5.9013],\"page\":1,\"confidence\":0.899}},\"text\":\"46 1 Measuring Tape $123.00 $0.00 0% $8.61 $131.61\",\"boundingBox\":[1.2043,5.7789,7.3364,5.7789,7.3364,5.9193,1.2043,5.9193],\"page\":1,\"confidence\":0.7}","{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":64.2,\"text\":\"$64.20\",\"boundingBox\":[6.9416,6.0789,7.3548,6.0789,7.3548,6.2013,6.9416,6.2013],\"page\":1,\"confidence\":0.901},\"Description\":{\"type\":\"string\",\"valueString\":\"Curtain Rod 48 in\",\"text\":\"Curtain Rod 48 in\",\"boundingBox\":[2.0294,6.0859,3.051,6.0859,3.051,6.1891,2.0294,6.1891],\"page\":1,\"confidence\":0.801},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"21\",\"text\":\"21\",\"boundingBox\":[1.2105,6.0892,1.3242,6.0892,1.3242,6.1876,1.2105,6.1876],\"page\":1,\"confidence\":0.69},\"Quantity\":{\"type\":\"number\",\"valueNumber\":3,\"text\":\"3\",\"boundingBox\":[1.6943,6.0897,1.7491,6.0897,1.7491,6.1891,1.6943,6.1891],\"page\":1,\"confidence\":0.79},\"Tax\":{\"type\":\"number\",\"valueNumber\":4.2,\"text\":\"$4.20\",\"boundingBox\":[6.3552,6.0792,6.6601,6.0792,6.6601,6.2013,6.3552,6.2013],\"page\":1,\"confidence\":0.719},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":25,\"text\":\"$25.00\",\"boundingBox\":[4.4822,6.0792,4.8601,6.0792,4.8601,6.2013,4.4822,6.2013],\"page\":1,\"confidence\":0.884}},\"text\":\"21 3 Curtain Rod 48 in $25.00 $15.00 20% $4.20 $64.20\",\"boundingBox\":[1.2105,6.0789,7.3548,6.0789,7.3548,6.2013,1.2105,6.2013],\"page\":1,\"confidence\":0.87}"]}}}]}}},{"0":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2013/Invoice67164.pdf","2":{"status":"succeeded","createdDateTime":"2021-10-26T22:39:18Z","lastUpdatedDateTime":"2021-10-26T22:39:22Z","analyzeResult":{"version":"2.1.0","readResults":[{"angle":0,"page":1,"height":11,"unit":"inch","width":8.5}],"pageResults":[{"page":1,"tables":[{"rows":5,"columns":8,"cells":[{"rowIndex":0,"isHeader":true,"boundingBox":[1.1002,4.8187,1.4447,4.8187,1.4447,5.0838,1.1002,5.0838],"text":"Itm","columnIndex":0},{"rowIndex":0,"isHeader":true,"boundingBox":[1.4447,4.8187,1.9128,4.8187,1.9128,5.0838,1.4447,5.0838],"text":"Qty","columnIndex":1},{"rowIndex":0,"isHeader":true,"boundingBox":[1.9128,4.8187,4.3051,4.8187,4.3051,5.0838,1.9128,5.0838],"text":"Description","columnIndex":2},{"rowIndex":0,"isHeader":true,"boundingBox":[4.3051,4.8187,4.9356,4.8187,4.9356,5.0838,4.3051,5.0838],"text":"Price","columnIndex":3},{"rowIndex":0,"isHeader":true,"boundingBox":[4.9356,4.8187,5.6572,4.8187,5.6507,5.0838,4.9356,5.0838],"text":"Discount","columnIndex":4},{"rowIndex":0,"isHeader":true,"boundingBox":[5.6572,4.8187,6.1383,4.8187,6.1383,5.0838,5.6507,5.0838],"text":"(Pct)","columnIndex":5},{"rowIndex":0,"isHeader":true,"boundingBox":[6.1383,4.8187,6.7363,4.8187,6.7363,5.0838,6.1383,5.0838],"text":"Tax","columnIndex":6},{"rowIndex":0,"isHeader":true,"boundingBox":[6.7363,4.8187,7.4319,4.8248,7.4319,5.0838,6.7363,5.0838],"text":"LineTotal","columnIndex":7},{"rowIndex":1,"columnIndex":0,"text":"36","boundingBox":[1.1002,5.0838,1.4447,5.0838,1.4447,5.3798,1.1002,5.3798]},{"rowIndex":1,"columnIndex":1,"text":"3","boundingBox":[1.4447,5.0838,1.9128,5.0838,1.9128,5.3798,1.4447,5.3798]},{"rowIndex":1,"columnIndex":2,"text":"Wood Table","boundingBox":[1.9128,5.0838,4.3051,5.0838,4.3051,5.3798,1.9128,5.3798]},{"rowIndex":1,"columnIndex":3,"text":"$395.00","boundingBox":[4.3051,5.0838,4.9356,5.0838,4.9356,5.3798,4.3051,5.3798]},{"rowIndex":1,"columnIndex":4,"text":"$0.00","boundingBox":[4.9356,5.0838,5.6507,5.0838,5.6507,5.3798,4.9356,5.3798]},{"rowIndex":1,"columnIndex":5,"text":"0%","boundingBox":[5.6507,5.0838,6.1383,5.0838,6.1383,5.3798,5.6507,5.3798]},{"rowIndex":1,"columnIndex":6,"text":"$82.95","boundingBox":[6.1383,5.0838,6.7363,5.0838,6.7363,5.3798,6.1383,5.3798]},{"rowIndex":1,"columnIndex":7,"text":"$1267.95","boundingBox":[6.7363,5.0838,7.4319,5.0838,7.4319,5.3798,6.7363,5.3798]},{"rowIndex":2,"columnIndex":0,"text":"20","boundingBox":[1.1002,5.3798,1.4447,5.3798,1.4447,5.682,1.1002,5.682]},{"rowIndex":2,"columnIndex":1,"text":"2","boundingBox":[1.4447,5.3798,1.9128,5.3798,1.9128,5.682,1.4447,5.682]},{"rowIndex":2,"columnIndex":2,"text":"White Window","boundingBox":[1.9128,5.3798,4.3051,5.3798,4.3051,5.682,1.9128,5.682]},{"rowIndex":2,"columnIndex":3,"text":"$120.00","boundingBox":[4.3051,5.3798,4.9356,5.3798,4.9356,5.682,4.3051,5.682]},{"rowIndex":2,"columnIndex":4,"text":"$0.00","boundingBox":[4.9356,5.3798,5.6507,5.3798,5.6507,5.682,4.9356,5.682]},{"rowIndex":2,"columnIndex":5,"text":"0%","boundingBox":[5.6507,5.3798,6.1383,5.3798,6.1383,5.682,5.6507,5.682]},{"rowIndex":2,"columnIndex":6,"text":"$16.80","boundingBox":[6.1383,5.3798,6.7363,5.3798,6.7363,5.682,6.1383,5.682]},{"rowIndex":2,"columnIndex":7,"text":"$256.80","boundingBox":[6.7363,5.3798,7.4319,5.3798,7.4319,5.682,6.7363,5.682]},{"rowIndex":3,"columnIndex":0,"text":"25","boundingBox":[1.1002,5.682,1.4447,5.682,1.4447,5.978,1.1002,5.978]},{"rowIndex":3,"columnIndex":1,"text":"3","boundingBox":[1.4447,5.682,1.9128,5.682,1.9128,5.978,1.4447,5.978]},{"rowIndex":3,"columnIndex":2,"text":"Indoor Kit Gardering","boundingBox":[1.9128,5.682,4.3051,5.682,4.3051,5.9842,1.9128,5.978]},{"rowIndex":3,"columnIndex":3,"text":"$70.00","boundingBox":[4.3051,5.682,4.9356,5.682,4.9356,5.9842,4.3051,5.9842]},{"rowIndex":3,"columnIndex":4,"text":"$0.00","boundingBox":[4.9356,5.682,5.6507,5.682,5.6507,5.9842,4.9356,5.9842]},{"rowIndex":3,"columnIndex":5,"text":"0%","boundingBox":[5.6507,5.682,6.1383,5.682,6.1383,5.9842,5.6507,5.9842]},{"rowIndex":3,"columnIndex":6,"text":"$14.70","boundingBox":[6.1383,5.682,6.7363,5.682,6.7363,5.9842,6.1383,5.9842]},{"rowIndex":3,"columnIndex":7,"text":"$224.70","boundingBox":[6.7363,5.682,7.4319,5.682,7.4319,5.978,6.7363,5.9842]},{"rowIndex":4,"columnIndex":0,"text":"46","boundingBox":[1.1002,5.978,1.4447,5.978,1.4447,6.2802,1.1067,6.2864]},{"rowIndex":4,"columnIndex":1,"text":"1","boundingBox":[1.4447,5.978,1.9128,5.978,1.9128,6.2802,1.4447,6.2802]},{"rowIndex":4,"columnIndex":2,"text":"Measuring Tape","boundingBox":[1.9128,5.978,4.3051,5.9842,4.3051,6.2802,1.9128,6.2802]},{"rowIndex":4,"columnIndex":3,"text":"$123.00","boundingBox":[4.3051,5.9842,4.9356,5.9842,4.9291,6.2802,4.3051,6.2802]},{"rowIndex":4,"columnIndex":4,"text":"$0.00","boundingBox":[4.9356,5.9842,5.6507,5.9842,5.6507,6.2802,4.9291,6.2802]},{"rowIndex":4,"columnIndex":5,"text":"0%","boundingBox":[5.6507,5.9842,6.1383,5.9842,6.1383,6.2802,5.6507,6.2802]},{"rowIndex":4,"columnIndex":6,"text":"$8.61","boundingBox":[6.1383,5.9842,6.7363,5.9842,6.7428,6.2802,6.1383,6.2802]},{"rowIndex":4,"columnIndex":7,"text":"$131.61","boundingBox":[6.7363,5.9842,7.4319,5.978,7.4319,6.2802,6.7428,6.2802]}],"boundingBox":[1.0919,4.8211,7.4312,4.8214,7.4306,6.2883,1.0904,6.288]}]}],"documentResults":[{"docType":"prebuilt:invoice","pageRange":[1,1],"fields":{"CustomerAddress":{"page":1,"valueString":"Platz des Landtags 404 Solingen Nordrhein-Westfalen 42651 Germany","boundingBox":[5.9782,2.3511,7.1261,2.3511,7.1261,3.264,5.9782,3.264],"text":"Platz des Landtags 404 Solingen Nordrhein-Westfalen 42651 Germany","confidence":0.371,"type":"string"},"ShippingAddress":{"page":1,"valueString":"45000","boundingBox":[2.9609,2.638,3.3194,2.638,3.3194,2.7374,2.9609,2.7374],"text":"45000","confidence":0.533,"type":"string"},"InvoiceTotal":{"valueNumber":1881.06,"page":1,"boundingBox":[6.5814,8.0682,7.413,8.0682,7.413,8.2471,6.5814,8.2471],"text":"$1881.06","confidence":0.959,"type":"number"},"CustomerName":{"page":1,"valueString":"Clinton Gutierrez","boundingBox":[1.1277,1.6519,2.3581,1.6519,2.3581,1.7789,1.1277,1.7789],"text":"Clinton Gutierrez","confidence":0.342,"type":"string"},"VendorName":{"page":1,"valueString":"T.T TAILWIND TRADERS","boundingBox":[4.909,1.4371,7.3107,1.4184,7.3155,2.0248,4.9137,2.0434],"text":"T.T TAILWIND TRADERS","confidence":0.954,"type":"string"},"InvoiceId":{"page":1,"valueString":"67164","boundingBox":[1.1274,4.1481,1.4804,4.1481,1.4804,4.248,1.1274,4.248],"text":"67164","confidence":0.967,"type":"string"},"SubTotal":{"valueNumber":1758,"page":1,"boundingBox":[6.8462,6.7101,7.4152,6.7101,7.4152,6.8325,6.8462,6.8325],"text":"$1758.00","confidence":0.971,"type":"number"},"TotalTax":{"valueNumber":123.06,"page":1,"boundingBox":[6.9241,7.6793,7.4152,7.6793,7.4152,7.8017,6.9241,7.8017],"text":"$123.06","confidence":0.962,"type":"number"},"CustomerAddressRecipient":{"page":1,"valueString":"Dluhbio","boundingBox":[5.9879,2.1511,6.3717,2.1511,6.3717,2.2384,5.9879,2.2384],"text":"Dluhbio","confidence":0.276,"type":"string"},"Items":{"type":"array","valueArray":["{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":1267.95,\"text\":\"$1267.95\",\"boundingBox\":[6.7858,5.1789,7.3503,5.1789,7.3503,5.3013,6.7858,5.3013],\"page\":1,\"confidence\":0.863},\"Description\":{\"type\":\"string\",\"valueString\":\"Wood Table\",\"text\":\"Wood Table\",\"boundingBox\":[2.0249,5.1859,2.7419,5.1859,2.7419,5.2891,2.0249,5.2891],\"page\":1,\"confidence\":0.888},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"36\",\"text\":\"36\",\"boundingBox\":[1.2114,5.1897,1.3439,5.1897,1.3439,5.2891,1.2114,5.2891],\"page\":1,\"confidence\":0.695},\"Quantity\":{\"type\":\"number\",\"valueNumber\":3,\"text\":\"3\",\"boundingBox\":[1.6943,5.1897,1.7491,5.1897,1.7491,5.2891,1.6943,5.2891],\"page\":1,\"confidence\":0.791},\"Tax\":{\"type\":\"number\",\"valueNumber\":82.95,\"text\":\"$82.95\",\"boundingBox\":[6.2822,5.1792,6.6567,5.1792,6.6567,5.3013,6.2822,5.3013],\"page\":1,\"confidence\":0.778},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":395,\"text\":\"$395.00\",\"boundingBox\":[4.4092,5.1792,4.8601,5.1792,4.8601,5.3013,4.4092,5.3013],\"page\":1,\"confidence\":0.884}},\"text\":\"36 3 Wood Table $395.00 $0.00 0% $82.95 $1267.95\",\"boundingBox\":[1.2114,5.1789,7.3503,5.1789,7.3503,5.3013,1.2114,5.3013],\"page\":1,\"confidence\":0.804}","{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":256.8,\"text\":\"$256.80\",\"boundingBox\":[6.8637,5.4789,7.3548,5.4789,7.3548,5.6013,6.8637,5.6013],\"page\":1,\"confidence\":0.897},\"Description\":{\"type\":\"string\",\"valueString\":\"White Window\",\"text\":\"White Window\",\"boundingBox\":[2.0249,5.4859,2.9032,5.4859,2.9032,5.5891,2.0249,5.5891],\"page\":1,\"confidence\":0.893},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"20\",\"text\":\"20\",\"boundingBox\":[1.2105,5.4897,1.3437,5.4897,1.3437,5.5891,1.2105,5.5891],\"page\":1,\"confidence\":0.6},\"Quantity\":{\"type\":\"number\",\"valueNumber\":2,\"text\":\"2\",\"boundingBox\":[1.6933,5.4897,1.7518,5.4897,1.7518,5.5876,1.6933,5.5876],\"page\":1,\"confidence\":0.793},\"Tax\":{\"type\":\"number\",\"valueNumber\":16.8,\"text\":\"$16.80\",\"boundingBox\":[6.2822,5.4792,6.6601,5.4792,6.6601,5.6013,6.2822,5.6013],\"page\":1,\"confidence\":0.802},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":120,\"text\":\"$120.00\",\"boundingBox\":[4.4092,5.4792,4.8601,5.4792,4.8601,5.6013,4.4092,5.6013],\"page\":1,\"confidence\":0.892}},\"text\":\"20 2 White Window $120.00 $0.00 0% $16.80 $256.80\",\"boundingBox\":[1.2105,5.4789,7.3548,5.4789,7.3548,5.6013,1.2105,5.6013],\"page\":1,\"confidence\":0.762}","{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":224.7,\"text\":\"$224.70\",\"boundingBox\":[6.8637,5.7789,7.3548,5.7789,7.3548,5.9013,6.8637,5.9013],\"page\":1,\"confidence\":0.895},\"Description\":{\"type\":\"string\",\"valueString\":\"Indoor Kit Gardering\",\"text\":\"Indoor Kit Gardering\",\"boundingBox\":[2.0356,5.7859,3.2358,5.7859,3.2358,5.9193,2.0356,5.9193],\"page\":1,\"confidence\":0.883},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"25\",\"text\":\"25\",\"boundingBox\":[1.2105,5.7897,1.3404,5.7897,1.3404,5.8891,1.2105,5.8891],\"page\":1,\"confidence\":0.595},\"Quantity\":{\"type\":\"number\",\"valueNumber\":3,\"text\":\"3\",\"boundingBox\":[1.6943,5.7897,1.7491,5.7897,1.7491,5.8891,1.6943,5.8891],\"page\":1,\"confidence\":0.815},\"Tax\":{\"type\":\"number\",\"valueNumber\":14.7,\"text\":\"$14.70\",\"boundingBox\":[6.2822,5.7792,6.6601,5.7792,6.6601,5.9013,6.2822,5.9013],\"page\":1,\"confidence\":0.801},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":70,\"text\":\"$70.00\",\"boundingBox\":[4.4822,5.7792,4.8601,5.7792,4.8601,5.9013,4.4822,5.9013],\"page\":1,\"confidence\":0.89}},\"text\":\"25 3 Indoor Kit Gardering $70.00 $0.00 0% $14.70 $224.70\",\"boundingBox\":[1.2105,5.7789,7.3548,5.7789,7.3548,5.9193,1.2105,5.9193],\"page\":1,\"confidence\":0.766}","{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":131.61,\"text\":\"$131.61\",\"boundingBox\":[6.8637,6.0789,7.3364,6.0789,7.3364,6.2013,6.8637,6.2013],\"page\":1,\"confidence\":0.9},\"Description\":{\"type\":\"string\",\"valueString\":\"Measuring Tape\",\"text\":\"Measuring Tape\",\"boundingBox\":[2.0356,6.0871,2.9754,6.0871,2.9754,6.2193,2.0356,6.2193],\"page\":1,\"confidence\":0.898},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"46\",\"text\":\"46\",\"boundingBox\":[1.2043,6.0897,1.3439,6.0897,1.3439,6.1891,1.2043,6.1891],\"page\":1,\"confidence\":0.636},\"Quantity\":{\"type\":\"number\",\"valueNumber\":1,\"text\":\"1\",\"boundingBox\":[1.7012,6.0892,1.7341,6.0892,1.7341,6.1876,1.7012,6.1876],\"page\":1,\"confidence\":0.771},\"Tax\":{\"type\":\"number\",\"valueNumber\":8.61,\"text\":\"$8.61\",\"boundingBox\":[6.3552,6.0792,6.6406,6.0792,6.6406,6.2013,6.3552,6.2013],\"page\":1,\"confidence\":0.783},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":123,\"text\":\"$123.00\",\"boundingBox\":[4.4092,6.0792,4.8601,6.0792,4.8601,6.2013,4.4092,6.2013],\"page\":1,\"confidence\":0.898}},\"text\":\"46 1 Measuring Tape $123.00 $0.00 0% $8.61 $131.61\",\"boundingBox\":[1.2043,6.0789,7.3364,6.0789,7.3364,6.2193,1.2043,6.2193],\"page\":1,\"confidence\":0.808}"]}}}]}}},{"0":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2014/Invoice80110.pdf","2":{"status":"succeeded","createdDateTime":"2021-10-26T22:39:18Z","lastUpdatedDateTime":"2021-10-26T22:39:24Z","analyzeResult":{"version":"2.1.0","readResults":[{"angle":0,"page":1,"height":11,"unit":"inch","width":8.5}],"pageResults":[{"page":1,"tables":[{"rows":5,"columns":8,"cells":[{"rowIndex":0,"isHeader":true,"boundingBox":[1.0954,4.82,1.4465,4.82,1.4465,5.0833,1.0954,5.0833],"text":"Itm","columnIndex":0},{"rowIndex":0,"isHeader":true,"boundingBox":[1.4465,4.82,1.9145,4.82,1.9145,5.0833,1.4465,5.0833],"text":"Qty","columnIndex":1},{"rowIndex":0,"isHeader":true,"boundingBox":[1.9145,4.82,4.2938,4.82,4.2938,5.0833,1.9145,5.0833],"text":"Description","columnIndex":2},{"rowIndex":0,"isHeader":true,"boundingBox":[4.2938,4.82,4.9373,4.82,4.9373,5.0833,4.2938,5.0833],"text":"Price","columnIndex":3},{"rowIndex":0,"isHeader":true,"boundingBox":[4.9373,4.82,5.6524,4.82,5.6524,5.0833,4.9373,5.0833],"text":"Discount","columnIndex":4},{"rowIndex":0,"isHeader":true,"boundingBox":[5.6524,4.82,6.14,4.82,6.14,5.0833,5.6524,5.0833],"text":"(Pct)","columnIndex":5},{"rowIndex":0,"isHeader":true,"boundingBox":[6.14,4.82,6.7381,4.82,6.7381,5.0833,6.14,5.0833],"text":"Tax","columnIndex":6},{"rowIndex":0,"isHeader":true,"boundingBox":[6.7381,4.82,7.4271,4.82,7.4271,5.0833,6.7381,5.0833],"text":"LineTotal","columnIndex":7},{"rowIndex":1,"columnIndex":0,"text":"9","boundingBox":[1.0954,5.0833,1.4465,5.0833,1.4465,5.52,1.0954,5.52]},{"rowIndex":1,"columnIndex":1,"text":"3","boundingBox":[1.4465,5.0833,1.9145,5.0833,1.9145,5.52,1.4465,5.52]},{"rowIndex":1,"columnIndex":2,"text":"Bathing System Classic 18 in. H x 60 in. W x 32.5","boundingBox":[1.9145,5.0833,4.2938,5.0833,4.3003,5.52,1.9145,5.52]},{"rowIndex":1,"columnIndex":3,"text":"$200.00","boundingBox":[4.2938,5.0833,4.9373,5.0833,4.9373,5.52,4.3003,5.52]},{"rowIndex":1,"columnIndex":4,"text":"$60.00","boundingBox":[4.9373,5.0833,5.6524,5.0833,5.6524,5.52,4.9373,5.52]},{"rowIndex":1,"columnIndex":5,"text":"10%","boundingBox":[5.6524,5.0833,6.14,5.0833,6.14,5.52,5.6524,5.52]},{"rowIndex":1,"columnIndex":6,"text":"$37.80","boundingBox":[6.14,5.0833,6.7381,5.0833,6.7381,5.52,6.14,5.52]},{"rowIndex":1,"columnIndex":7,"text":"$577.80","boundingBox":[6.7381,5.0833,7.4271,5.0833,7.4336,5.526,6.7381,5.52]},{"rowIndex":2,"columnIndex":0,"text":"59","boundingBox":[1.0954,5.52,1.4465,5.52,1.4465,5.8252,1.0954,5.8252]},{"rowIndex":2,"columnIndex":1,"text":"3","boundingBox":[1.4465,5.52,1.9145,5.52,1.9145,5.8252,1.4465,5.8252]},{"rowIndex":2,"columnIndex":2,"text":"Two red garden gnomes","boundingBox":[1.9145,5.52,4.3003,5.52,4.3003,5.8252,1.9145,5.8252]},{"rowIndex":2,"columnIndex":3,"text":"$92.00","boundingBox":[4.3003,5.52,4.9373,5.52,4.9373,5.8252,4.3003,5.8252]},{"rowIndex":2,"columnIndex":4,"text":"$0.00","boundingBox":[4.9373,5.52,5.6524,5.52,5.6524,5.8252,4.9373,5.8252]},{"rowIndex":2,"columnIndex":5,"text":"0%","boundingBox":[5.6524,5.52,6.14,5.52,6.14,5.8252,5.6524,5.8252]},{"rowIndex":2,"columnIndex":6,"text":"$19.32","boundingBox":[6.14,5.52,6.7381,5.52,6.7381,5.8252,6.14,5.8252]},{"rowIndex":2,"columnIndex":7,"text":"$295.32","boundingBox":[6.7381,5.52,7.4336,5.526,7.4336,5.8252,6.7381,5.8252]},{"rowIndex":3,"columnIndex":0,"text":"58","boundingBox":[1.0954,5.8252,1.4465,5.8252,1.4465,6.1243,1.0954,6.1243]},{"rowIndex":3,"columnIndex":1,"text":"3","boundingBox":[1.4465,5.8252,1.9145,5.8252,1.9145,6.1243,1.4465,6.1243]},{"rowIndex":3,"columnIndex":2,"text":"Single red garden gnome","boundingBox":[1.9145,5.8252,4.3003,5.8252,4.3003,6.1243,1.9145,6.1243]},{"rowIndex":3,"columnIndex":3,"text":"$56.00","boundingBox":[4.3003,5.8252,4.9373,5.8252,4.9373,6.1243,4.3003,6.1243]},{"rowIndex":3,"columnIndex":4,"text":"$16.80","boundingBox":[4.9373,5.8252,5.6524,5.8252,5.6524,6.1243,4.9373,6.1243]},{"rowIndex":3,"columnIndex":5,"text":"10%","boundingBox":[5.6524,5.8252,6.14,5.8252,6.14,6.1243,5.6524,6.1243]},{"rowIndex":3,"columnIndex":6,"text":"$10.58","boundingBox":[6.14,5.8252,6.7381,5.8252,6.7381,6.1243,6.14,6.1243]},{"rowIndex":3,"columnIndex":7,"text":"$161.78","boundingBox":[6.7381,5.8252,7.4336,5.8252,7.4336,6.1243,6.7381,6.1243]},{"rowIndex":4,"columnIndex":0,"text":"32","boundingBox":[1.0954,6.1243,1.4465,6.1243,1.4465,6.4294,1.1019,6.4294]},{"rowIndex":4,"columnIndex":1,"text":"1","boundingBox":[1.4465,6.1243,1.9145,6.1243,1.9145,6.4294,1.4465,6.4294]},{"rowIndex":4,"columnIndex":2,"text":"Artificial Tree","boundingBox":[1.9145,6.1243,4.3003,6.1243,4.3003,6.4234,1.9145,6.4294]},{"rowIndex":4,"columnIndex":3,"text":"$250.00","boundingBox":[4.3003,6.1243,4.9373,6.1243,4.9373,6.4234,4.3003,6.4234]},{"rowIndex":4,"columnIndex":4,"text":"$0.00","boundingBox":[4.9373,6.1243,5.6524,6.1243,5.6524,6.4234,4.9373,6.4234]},{"rowIndex":4,"columnIndex":5,"text":"0%","boundingBox":[5.6524,6.1243,6.14,6.1243,6.14,6.4234,5.6524,6.4234]},{"rowIndex":4,"columnIndex":6,"text":"$17.50","boundingBox":[6.14,6.1243,6.7381,6.1243,6.7381,6.4234,6.14,6.4234]},{"rowIndex":4,"columnIndex":7,"text":"$267.50","boundingBox":[6.7381,6.1243,7.4336,6.1243,7.4401,6.4175,6.7381,6.4234]}],"boundingBox":[1.0889,4.8212,7.4279,4.8211,7.4278,6.4273,1.0879,6.4275]}]}],"documentResults":[{"docType":"prebuilt:invoice","pageRange":[1,1],"fields":{"CustomerAddress":{"page":1,"valueString":"Julpum","boundingBox":[6.2569,2.3511,6.6116,2.3511,6.6116,2.4634,6.2569,2.4634],"text":"Julpum","confidence":0.299,"type":"string"},"ShippingAddress":{"page":1,"valueString":"Street","boundingBox":[1.9649,2.138,2.3054,2.138,2.3054,2.2374,1.9649,2.2374],"text":"Street","confidence":0.23,"type":"string"},"InvoiceTotal":{"valueNumber":1302.4,"page":1,"boundingBox":[6.5814,8.2097,7.413,8.2097,7.413,8.3886,6.5814,8.3886],"text":"$1302.40","confidence":0.958,"type":"number"},"CustomerName":{"page":1,"valueString":"Ashlee Raje","boundingBox":[1.1219,1.6519,1.9574,1.6519,1.9574,1.8161,1.1219,1.8161],"text":"Ashlee Raje","confidence":0.264,"type":"string"},"VendorName":{"page":1,"valueString":"T.T TAILWIND TRADERS","boundingBox":[4.909,1.4371,7.3107,1.4184,7.3155,2.0248,4.9137,2.0434],"text":"T.T TAILWIND TRADERS","confidence":0.953,"type":"string"},"VendorAddressRecipient":{"page":1,"valueString":"Alvotue","boundingBox":[5.9786,2.1511,6.3597,2.1511,6.3597,2.2384,5.9786,2.2384],"text":"Alvotue","confidence":0.217,"type":"string"},"InvoiceId":{"page":1,"valueString":"80110","boundingBox":[1.126,4.1481,1.4794,4.1481,1.4794,4.248,1.126,4.248],"text":"80110","confidence":0.954,"type":"string"},"InvoiceDate":{"page":1,"boundingBox":[2.9138,4.1448,3.3935,4.1448,3.3935,4.248,2.9138,4.248],"text":"October","confidence":0.394,"type":"date"},"SubTotal":{"valueNumber":1217.2,"page":1,"boundingBox":[6.8462,6.8516,7.4152,6.8516,7.4152,6.974,6.8462,6.974],"text":"$1217.20","confidence":0.967,"type":"number"},"TotalTax":{"valueNumber":85.2,"page":1,"boundingBox":[7.002,7.8208,7.4153,7.8208,7.4153,7.9432,7.002,7.9432],"text":"$85.20","confidence":0.962,"type":"number"},"VendorAddress":{"page":1,"valueString":"4559 Loop Beaverton Oregon 97005 States","boundingBox":[5.9782,2.3543,6.9008,2.3543,6.9008,3.2384,5.9782,3.2384],"text":"4559 Loop Beaverton Oregon 97005 States","confidence":0.353,"type":"string"},"CustomerAddressRecipient":{"page":1,"valueString":"Ashlee Raje","boundingBox":[1.1219,1.6519,1.9574,1.6519,1.9574,1.8161,1.1219,1.8161],"text":"Ashlee Raje","confidence":0.264,"type":"string"},"Items":{"type":"array","valueArray":["{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":577.8,\"text\":\"$577.80\",\"boundingBox\":[6.8637,5.1789,7.3548,5.1789,7.3548,5.3013,6.8637,5.3013],\"page\":1,\"confidence\":0.899},\"Description\":{\"type\":\"string\",\"valueString\":\"Bathing System Classic 18 in. H x 60 in. W x 32.5\",\"text\":\"Bathing System Classic 18 in. H x 60 in. W x 32.5\",\"boundingBox\":[2.0326,5.1859,4.1415,5.1859,4.1415,5.4692,2.0326,5.4692],\"page\":1,\"confidence\":0.827},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"9\",\"text\":\"9\",\"boundingBox\":[1.246,5.1897,1.3062,5.1897,1.3062,5.2891,1.246,5.2891],\"page\":1,\"confidence\":0.455},\"Quantity\":{\"type\":\"number\",\"valueNumber\":3,\"text\":\"3\",\"boundingBox\":[1.6943,5.1897,1.7491,5.1897,1.7491,5.2891,1.6943,5.2891],\"page\":1,\"confidence\":0.771},\"Tax\":{\"type\":\"number\",\"valueNumber\":37.8,\"text\":\"$37.80\",\"boundingBox\":[6.2822,5.1792,6.6601,5.1792,6.6601,5.3013,6.2822,5.3013],\"page\":1,\"confidence\":0.781},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":200,\"text\":\"$200.00\",\"boundingBox\":[4.4092,5.1792,4.8601,5.1792,4.8601,5.3013,4.4092,5.3013],\"page\":1,\"confidence\":0.899}},\"text\":\"9 3 Bathing System Classic 18 in. H x 60 $200.00 $60.00 10% $37.80 $577.80 in. W x 32.5\",\"boundingBox\":[1.246,5.1789,7.3548,5.1789,7.3548,5.4692,1.246,5.4692],\"page\":1,\"confidence\":0.903}","{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":295.32,\"text\":\"$295.32\",\"boundingBox\":[6.8637,5.6204,7.3501,5.6204,7.3501,5.7428,6.8637,5.7428],\"page\":1,\"confidence\":0.898},\"Description\":{\"type\":\"string\",\"valueString\":\"Two red garden gnomes\",\"text\":\"Two red garden gnomes\",\"boundingBox\":[2.0259,5.6274,3.4758,5.6274,3.4758,5.7608,2.0259,5.7608],\"page\":1,\"confidence\":0.855},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"59\",\"text\":\"59\",\"boundingBox\":[1.2141,5.6312,1.3427,5.6312,1.3427,5.7306,1.2141,5.7306],\"page\":1,\"confidence\":0.5},\"Quantity\":{\"type\":\"number\",\"valueNumber\":3,\"text\":\"3\",\"boundingBox\":[1.6943,5.6312,1.7491,5.6312,1.7491,5.7306,1.6943,5.7306],\"page\":1,\"confidence\":0.748},\"Tax\":{\"type\":\"number\",\"valueNumber\":19.32,\"text\":\"$19.32\",\"boundingBox\":[6.2822,5.6206,6.6583,5.6206,6.6583,5.7428,6.2822,5.7428],\"page\":1,\"confidence\":0.863},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":92,\"text\":\"$92.00\",\"boundingBox\":[4.4822,5.6206,4.8601,5.6206,4.8601,5.7428,4.4822,5.7428],\"page\":1,\"confidence\":0.898}},\"text\":\"59 3 Two red garden gnomes $92.00 $0.00 0% $19.32 $295.32\",\"boundingBox\":[1.2141,5.6204,7.3501,5.6204,7.3501,5.7608,1.2141,5.7608],\"page\":1,\"confidence\":0.809}","{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":161.78,\"text\":\"$161.78\",\"boundingBox\":[6.8637,5.9204,7.354,5.9204,7.354,6.0428,6.8637,6.0428],\"page\":1,\"confidence\":0.899},\"Description\":{\"type\":\"string\",\"valueString\":\"Single red garden gnome\",\"text\":\"Single red garden gnome\",\"boundingBox\":[2.0311,5.9274,3.5352,5.9274,3.5352,6.0608,2.0311,6.0608],\"page\":1,\"confidence\":0.874},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"58\",\"text\":\"58\",\"boundingBox\":[1.2141,5.9312,1.3437,5.9312,1.3437,6.0306,1.2141,6.0306],\"page\":1,\"confidence\":0.561},\"Quantity\":{\"type\":\"number\",\"valueNumber\":3,\"text\":\"3\",\"boundingBox\":[1.6943,5.9312,1.7491,5.9312,1.7491,6.0306,1.6943,6.0306],\"page\":1,\"confidence\":0.771},\"Tax\":{\"type\":\"number\",\"valueNumber\":10.58,\"text\":\"$10.58\",\"boundingBox\":[6.2822,5.9206,6.6601,5.9206,6.6601,6.0428,6.2822,6.0428],\"page\":1,\"confidence\":0.83},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":56,\"text\":\"$56.00\",\"boundingBox\":[4.4822,5.9206,4.8601,5.9206,4.8601,6.0428,4.4822,6.0428],\"page\":1,\"confidence\":0.898}},\"text\":\"58 3 Single red garden gnome $56.00 $16.80 10% $10.58 $161.78\",\"boundingBox\":[1.2141,5.9204,7.354,5.9204,7.354,6.0608,1.2141,6.0608],\"page\":1,\"confidence\":0.812}","{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":267.5,\"text\":\"$267.50\",\"boundingBox\":[6.8637,6.2204,7.3548,6.2204,7.3548,6.3428,6.8637,6.3428],\"page\":1,\"confidence\":0.901},\"Description\":{\"type\":\"string\",\"valueString\":\"Artificial Tree\",\"text\":\"Artificial Tree\",\"boundingBox\":[2.0246,6.2259,2.7987,6.2259,2.7987,6.3306,2.0246,6.3306],\"page\":1,\"confidence\":0.888},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"32\",\"text\":\"32\",\"boundingBox\":[1.2114,6.2312,1.342,6.2312,1.342,6.3306,1.2114,6.3306],\"page\":1,\"confidence\":0.5},\"Quantity\":{\"type\":\"number\",\"valueNumber\":1,\"text\":\"1\",\"boundingBox\":[1.7012,6.2307,1.7341,6.2307,1.7341,6.3291,1.7012,6.3291],\"page\":1,\"confidence\":0.697},\"Tax\":{\"type\":\"number\",\"valueNumber\":17.5,\"text\":\"$17.50\",\"boundingBox\":[6.2822,6.2206,6.6601,6.2206,6.6601,6.3428,6.2822,6.3428],\"page\":1,\"confidence\":0.803},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":250,\"text\":\"$250.00\",\"boundingBox\":[4.4092,6.2206,4.8601,6.2206,4.8601,6.3428,4.4092,6.3428],\"page\":1,\"confidence\":0.899}},\"text\":\"32 1 Artificial Tree $250.00 $0.00 0% $17.50 $267.50\",\"boundingBox\":[1.2114,6.2204,7.3548,6.2204,7.3548,6.3428,1.2114,6.3428],\"page\":1,\"confidence\":0.805}"]}}}]}}},{"0":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2014/Invoice71864.pdf","2":{"status":"succeeded","createdDateTime":"2021-10-26T22:39:24Z","lastUpdatedDateTime":"2021-10-26T22:39:28Z","analyzeResult":{"version":"2.1.0","readResults":[{"angle":0,"page":1,"height":11,"unit":"inch","width":8.5}],"pageResults":[{"page":1,"tables":[{"rows":5,"columns":8,"cells":[{"rowIndex":0,"isHeader":true,"boundingBox":[1.0954,4.82,1.4465,4.82,1.4465,5.0833,1.0954,5.0833],"text":"Itm","columnIndex":0},{"rowIndex":0,"isHeader":true,"boundingBox":[1.4465,4.82,1.9145,4.82,1.9145,5.0833,1.4465,5.0833],"text":"Qty","columnIndex":1},{"rowIndex":0,"isHeader":true,"boundingBox":[1.9145,4.82,4.3003,4.82,4.3003,5.0833,1.9145,5.0833],"text":"Description","columnIndex":2},{"rowIndex":0,"isHeader":true,"boundingBox":[4.3003,4.82,4.9373,4.82,4.9373,5.0833,4.3003,5.0833],"text":"Price","columnIndex":3},{"rowIndex":0,"isHeader":true,"boundingBox":[4.9373,4.82,5.6524,4.82,5.6524,5.0833,4.9373,5.0833],"text":"Discount","columnIndex":4},{"rowIndex":0,"isHeader":true,"boundingBox":[5.6524,4.82,6.14,4.82,6.14,5.0833,5.6524,5.0833],"text":"(Pct)","columnIndex":5},{"rowIndex":0,"isHeader":true,"boundingBox":[6.14,4.82,6.7381,4.82,6.7381,5.0833,6.14,5.0833],"text":"Tax","columnIndex":6},{"rowIndex":0,"isHeader":true,"boundingBox":[6.7381,4.82,7.4271,4.82,7.4271,5.0833,6.7381,5.0833],"text":"LineTotal","columnIndex":7},{"rowIndex":1,"columnIndex":0,"text":"36","boundingBox":[1.0954,5.0833,1.4465,5.0833,1.4465,5.3824,1.0954,5.3824]},{"rowIndex":1,"columnIndex":1,"text":"2","boundingBox":[1.4465,5.0833,1.9145,5.0833,1.9145,5.3824,1.4465,5.3824]},{"rowIndex":1,"columnIndex":2,"text":"Wood Table","boundingBox":[1.9145,5.0833,4.3003,5.0833,4.3003,5.3824,1.9145,5.3824]},{"rowIndex":1,"columnIndex":3,"text":"$395.00","boundingBox":[4.3003,5.0833,4.9373,5.0833,4.9373,5.3824,4.3003,5.3824]},{"rowIndex":1,"columnIndex":4,"text":"$0.00","boundingBox":[4.9373,5.0833,5.6524,5.0833,5.6524,5.3824,4.9373,5.3824]},{"rowIndex":1,"columnIndex":5,"text":"0%","boundingBox":[5.6524,5.0833,6.14,5.0833,6.14,5.3824,5.6524,5.3824]},{"rowIndex":1,"columnIndex":6,"text":"$55.30","boundingBox":[6.14,5.0833,6.7381,5.0833,6.7381,5.3824,6.14,5.3824]},{"rowIndex":1,"columnIndex":7,"text":"$845.30","boundingBox":[6.7381,5.0833,7.4271,5.0833,7.4271,5.3824,6.7381,5.3824]},{"rowIndex":2,"columnIndex":0,"text":"56","boundingBox":[1.0954,5.3824,1.4465,5.3824,1.4465,5.8252,1.0954,5.8252]},{"rowIndex":2,"columnIndex":1,"text":"1","boundingBox":[1.4465,5.3824,1.9145,5.3824,1.9145,5.8252,1.4465,5.8252]},{"rowIndex":2,"columnIndex":2,"text":"Rechargeable screwdriver with extra battery","boundingBox":[1.9145,5.3824,4.3003,5.3824,4.3003,5.8252,1.9145,5.8252]},{"rowIndex":2,"columnIndex":3,"text":"$312.00","boundingBox":[4.3003,5.3824,4.9373,5.3824,4.9373,5.8252,4.3003,5.8252]},{"rowIndex":2,"columnIndex":4,"text":"$31.20","boundingBox":[4.9373,5.3824,5.6524,5.3824,5.6524,5.8252,4.9373,5.8252]},{"rowIndex":2,"columnIndex":5,"text":"10%","boundingBox":[5.6524,5.3824,6.14,5.3824,6.14,5.8252,5.6524,5.8252]},{"rowIndex":2,"columnIndex":6,"text":"$19.66","boundingBox":[6.14,5.3824,6.7381,5.3824,6.7381,5.8252,6.14,5.8252]},{"rowIndex":2,"columnIndex":7,"text":"$300.46","boundingBox":[6.7381,5.3824,7.4271,5.3824,7.4336,5.8252,6.7381,5.8252]},{"rowIndex":3,"columnIndex":0,"text":"14","boundingBox":[1.0954,5.8252,1.4465,5.8252,1.4465,6.1243,1.0954,6.1243]},{"rowIndex":3,"columnIndex":1,"text":"1","boundingBox":[1.4465,5.8252,1.9145,5.8252,1.9145,6.1243,1.4465,6.1243]},{"rowIndex":3,"columnIndex":2,"text":"Bathroom Sink Faucet","boundingBox":[1.9145,5.8252,4.3003,5.8252,4.3003,6.1243,1.9145,6.1243]},{"rowIndex":3,"columnIndex":3,"text":"$99.00","boundingBox":[4.3003,5.8252,4.9373,5.8252,4.9373,6.1243,4.3003,6.1243]},{"rowIndex":3,"columnIndex":4,"text":"$0.00","boundingBox":[4.9373,5.8252,5.6524,5.8252,5.6524,6.1243,4.9373,6.1243]},{"rowIndex":3,"columnIndex":5,"text":"0%","boundingBox":[5.6524,5.8252,6.14,5.8252,6.14,6.1243,5.6524,6.1243]},{"rowIndex":3,"columnIndex":6,"text":"$6.93","boundingBox":[6.14,5.8252,6.7381,5.8252,6.7381,6.1243,6.14,6.1243]},{"rowIndex":3,"columnIndex":7,"text":"$105.93","boundingBox":[6.7381,5.8252,7.4336,5.8252,7.4336,6.1243,6.7381,6.1243]},{"rowIndex":4,"columnIndex":0,"text":"48","boundingBox":[1.0954,6.1243,1.4465,6.1243,1.4465,6.4234,1.1019,6.4294]},{"rowIndex":4,"columnIndex":1,"text":"3","boundingBox":[1.4465,6.1243,1.9145,6.1243,1.9145,6.4234,1.4465,6.4234]},{"rowIndex":4,"columnIndex":2,"text":"Hammer","boundingBox":[1.9145,6.1243,4.3003,6.1243,4.3003,6.4234,1.9145,6.4234]},{"rowIndex":4,"columnIndex":3,"text":"$100.00","boundingBox":[4.3003,6.1243,4.9373,6.1243,4.9373,6.4234,4.3003,6.4234]},{"rowIndex":4,"columnIndex":4,"text":"$0.00","boundingBox":[4.9373,6.1243,5.6524,6.1243,5.6524,6.4234,4.9373,6.4234]},{"rowIndex":4,"columnIndex":5,"text":"0%","boundingBox":[5.6524,6.1243,6.14,6.1243,6.14,6.4234,5.6524,6.4234]},{"rowIndex":4,"columnIndex":6,"text":"$21.00","boundingBox":[6.14,6.1243,6.7381,6.1243,6.7381,6.4234,6.14,6.4234]},{"rowIndex":4,"columnIndex":7,"text":"$321.00","boundingBox":[6.7381,6.1243,7.4336,6.1243,7.4401,6.4175,6.7381,6.4234]}],"boundingBox":[1.0898,4.8211,7.429,4.8211,7.4288,6.4277,1.0887,6.4278]}]}],"documentResults":[{"docType":"prebuilt:invoice","pageRange":[1,1],"fields":{"CustomerAddress":{"page":1,"valueString":"27, place de Brazaville Roubaix Nord 59100 France","boundingBox":[5.9835,2.3511,7.0796,2.3511,7.0796,3.2384,5.9835,3.2384],"text":"27, place de Brazaville Roubaix Nord 59100 France","confidence":0.435,"type":"string"},"InvoiceTotal":{"valueNumber":1572.69,"page":1,"boundingBox":[6.5814,8.2097,7.4108,8.2097,7.4108,8.3886,6.5814,8.3886],"text":"$1572.69","confidence":0.962,"type":"number"},"CustomerName":{"page":1,"valueString":"Pal","boundingBox":[1.5849,1.6519,1.7744,1.6519,1.7744,1.7789,1.5849,1.7789],"text":"Pal","confidence":0.256,"type":"string"},"VendorName":{"page":1,"valueString":"T.T TAILWIND TRADERS","boundingBox":[4.909,1.4371,7.3107,1.4184,7.3155,2.0248,4.9137,2.0434],"text":"T.T TAILWIND TRADERS","confidence":0.954,"type":"string"},"VendorAddressRecipient":{"page":1,"valueString":"Ifiaeh","boundingBox":[5.9879,2.1499,6.2462,2.1499,6.2462,2.2384,5.9879,2.2384],"text":"Ifiaeh","confidence":0.263,"type":"string"},"InvoiceId":{"page":1,"valueString":"71864","boundingBox":[1.1266,4.1481,1.4804,4.1481,1.4804,4.248,1.1266,4.248],"text":"71864","confidence":0.966,"type":"string"},"SubTotal":{"valueNumber":1469.8,"page":1,"boundingBox":[6.8462,6.8516,7.4152,6.8516,7.4152,6.974,6.8462,6.974],"text":"$1469.80","confidence":0.971,"type":"number"},"TotalTax":{"valueNumber":102.89,"page":1,"boundingBox":[6.9241,7.8208,7.4137,7.8208,7.4137,7.9432,6.9241,7.9432],"text":"$102.89","confidence":0.963,"type":"number"},"CustomerAddressRecipient":{"page":1,"valueString":"Willie","boundingBox":[1.1222,1.6519,1.5176,1.6519,1.5176,1.7789,1.1222,1.7789],"text":"Willie","confidence":0.22,"type":"string"},"Items":{"type":"array","valueArray":["{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":845.3,\"text\":\"$845.30\",\"boundingBox\":[6.8637,5.1789,7.3548,5.1789,7.3548,5.3013,6.8637,5.3013],\"page\":1,\"confidence\":0.886},\"Description\":{\"type\":\"string\",\"valueString\":\"Wood Table\",\"text\":\"Wood Table\",\"boundingBox\":[2.0249,5.1859,2.7419,5.1859,2.7419,5.2891,2.0249,5.2891],\"page\":1,\"confidence\":0.858},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"36\",\"text\":\"36\",\"boundingBox\":[1.2114,5.1897,1.3439,5.1897,1.3439,5.2891,1.2114,5.2891],\"page\":1,\"confidence\":0.721},\"Quantity\":{\"type\":\"number\",\"valueNumber\":2,\"text\":\"2\",\"boundingBox\":[1.6933,5.1897,1.7518,5.1897,1.7518,5.2876,1.6933,5.2876],\"page\":1,\"confidence\":0.767},\"Tax\":{\"type\":\"number\",\"valueNumber\":55.3,\"text\":\"$55.30\",\"boundingBox\":[6.2822,5.1792,6.6601,5.1792,6.6601,5.3013,6.2822,5.3013],\"page\":1,\"confidence\":0.781},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":395,\"text\":\"$395.00\",\"boundingBox\":[4.4092,5.1792,4.8601,5.1792,4.8601,5.3013,4.4092,5.3013],\"page\":1,\"confidence\":0.898}},\"text\":\"36 2 Wood Table $395.00 $0.00 0% $55.30 $845.30\",\"boundingBox\":[1.2114,5.1789,7.3548,5.1789,7.3548,5.3013,1.2114,5.3013],\"page\":1,\"confidence\":0.708}","{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":300.46,\"text\":\"$300.46\",\"boundingBox\":[6.8637,5.4789,7.3548,5.4789,7.3548,5.6013,6.8637,5.6013],\"page\":1,\"confidence\":0.898},\"Description\":{\"type\":\"string\",\"valueString\":\"Rechargeable screwdriver with extra battery\",\"text\":\"Rechargeable screwdriver with extra battery\",\"boundingBox\":[2.0341,5.4859,4.1487,5.4859,4.1487,5.7994,2.0341,5.7994],\"page\":1,\"confidence\":0.768},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"56\",\"text\":\"56\",\"boundingBox\":[1.2141,5.4897,1.3439,5.4897,1.3439,5.5891,1.2141,5.5891],\"page\":1,\"confidence\":0.727},\"Quantity\":{\"type\":\"number\",\"valueNumber\":1,\"text\":\"1\",\"boundingBox\":[1.7012,5.4892,1.7341,5.4892,1.7341,5.5876,1.7012,5.5876],\"page\":1,\"confidence\":0.712},\"Tax\":{\"type\":\"number\",\"valueNumber\":19.66,\"text\":\"$19.66\",\"boundingBox\":[6.2822,5.4792,6.6602,5.4792,6.6602,5.6013,6.2822,5.6013],\"page\":1,\"confidence\":0.786},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":312,\"text\":\"$312.00\",\"boundingBox\":[4.4092,5.4792,4.8601,5.4792,4.8601,5.6013,4.4092,5.6013],\"page\":1,\"confidence\":0.899}},\"text\":\"56 1 Rechargeable screwdriver with extra $312.00 $31.20 10% $19.66 $300.46 battery\",\"boundingBox\":[1.2141,5.4789,7.3548,5.4789,7.3548,5.7994,1.2141,5.7994],\"page\":1,\"confidence\":0.88}","{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":105.93,\"text\":\"$105.93\",\"boundingBox\":[6.8637,5.9204,7.3495,5.9204,7.3495,6.0428,6.8637,6.0428],\"page\":1,\"confidence\":0.89},\"Description\":{\"type\":\"string\",\"valueString\":\"Bathroom Sink Faucet\",\"text\":\"Bathroom Sink Faucet\",\"boundingBox\":[2.0356,5.9274,3.3231,5.9274,3.3231,6.0306,2.0356,6.0306],\"page\":1,\"confidence\":0.887},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"14\",\"text\":\"14\",\"boundingBox\":[1.2183,5.9307,1.3447,5.9307,1.3447,6.0291,1.2183,6.0291],\"page\":1,\"confidence\":0.726},\"Quantity\":{\"type\":\"number\",\"valueNumber\":1,\"text\":\"1\",\"boundingBox\":[1.7012,5.9307,1.7341,5.9307,1.7341,6.0291,1.7012,6.0291],\"page\":1,\"confidence\":0.731},\"Tax\":{\"type\":\"number\",\"valueNumber\":6.93,\"text\":\"$6.93\",\"boundingBox\":[6.3552,5.9206,6.6556,5.9206,6.6556,6.0428,6.3552,6.0428],\"page\":1,\"confidence\":0.802},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":99,\"text\":\"$99.00\",\"boundingBox\":[4.4822,5.9206,4.8601,5.9206,4.8601,6.0428,4.4822,6.0428],\"page\":1,\"confidence\":0.899}},\"text\":\"14 1 Bathroom Sink Faucet $99.00 $0.00 0% $6.93 $105.93\",\"boundingBox\":[1.2183,5.9204,7.3495,5.9204,7.3495,6.0428,1.2183,6.0428],\"page\":1,\"confidence\":0.864}","{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":321,\"text\":\"$321.00\",\"boundingBox\":[6.8637,6.2204,7.3548,6.2204,7.3548,6.3428,6.8637,6.3428],\"page\":1,\"confidence\":0.9},\"Description\":{\"type\":\"string\",\"valueString\":\"Hammer\",\"text\":\"Hammer\",\"boundingBox\":[2.0356,6.233,2.5381,6.233,2.5381,6.3306,2.0356,6.3306],\"page\":1,\"confidence\":0.9},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"48\",\"text\":\"48\",\"boundingBox\":[1.2043,6.2312,1.3437,6.2312,1.3437,6.3305,1.2043,6.3305],\"page\":1,\"confidence\":0.77},\"Quantity\":{\"type\":\"number\",\"valueNumber\":3,\"text\":\"3\",\"boundingBox\":[1.6943,6.2312,1.7491,6.2312,1.7491,6.3306,1.6943,6.3306],\"page\":1,\"confidence\":0.767},\"Tax\":{\"type\":\"number\",\"valueNumber\":21,\"text\":\"$21.00\",\"boundingBox\":[6.2822,6.2206,6.6601,6.2206,6.6601,6.3428,6.2822,6.3428],\"page\":1,\"confidence\":0.801},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":100,\"text\":\"$100.00\",\"boundingBox\":[4.4092,6.2206,4.8601,6.2206,4.8601,6.3428,4.4092,6.3428],\"page\":1,\"confidence\":0.899}},\"text\":\"48 3 Hammer $100.00 $0.00 0% $21.00 $321.00\",\"boundingBox\":[1.2043,6.2204,7.3548,6.2204,7.3548,6.3428,1.2043,6.3428],\"page\":1,\"confidence\":0.805}"]}}}]}}},{"0":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2014/Invoice80066.pdf","2":{"status":"succeeded","createdDateTime":"2021-10-26T22:39:23Z","lastUpdatedDateTime":"2021-10-26T22:39:28Z","analyzeResult":{"version":"2.1.0","readResults":[{"angle":0,"page":1,"height":11,"unit":"inch","width":8.5}],"pageResults":[{"page":1,"tables":[{"rows":5,"columns":8,"cells":[{"rowIndex":0,"isHeader":true,"boundingBox":[1.1002,4.8223,1.4447,4.8223,1.4447,5.0827,1.1002,5.0827],"text":"Itm","columnIndex":0},{"rowIndex":0,"isHeader":true,"boundingBox":[1.4447,4.8223,1.9128,4.8223,1.9128,5.0827,1.4447,5.0827],"text":"Qty","columnIndex":1},{"rowIndex":0,"isHeader":true,"boundingBox":[1.9128,4.8223,4.3051,4.8223,4.3051,5.0827,1.9128,5.0827],"text":"Description","columnIndex":2},{"rowIndex":0,"isHeader":true,"boundingBox":[4.3051,4.8223,4.9356,4.8223,4.9356,5.0827,4.3051,5.0827],"text":"Price","columnIndex":3},{"rowIndex":0,"isHeader":true,"boundingBox":[4.9356,4.8223,5.6507,4.8223,5.6507,5.0827,4.9356,5.0827],"text":"Discount","columnIndex":4},{"rowIndex":0,"isHeader":true,"boundingBox":[5.6507,4.8223,6.1383,4.8223,6.1383,5.0827,5.6507,5.0827],"text":"(Pct)","columnIndex":5},{"rowIndex":0,"isHeader":true,"boundingBox":[6.1383,4.8223,6.7298,4.8223,6.7298,5.0827,6.1383,5.0827],"text":"Tax","columnIndex":6},{"rowIndex":0,"isHeader":true,"boundingBox":[6.7298,4.8223,7.4254,4.8223,7.4254,5.0827,6.7298,5.0827],"text":"LineTotal","columnIndex":7},{"rowIndex":1,"columnIndex":0,"text":"49","boundingBox":[1.1002,5.0827,1.4447,5.0827,1.4447,5.3803,1.1002,5.3803]},{"rowIndex":1,"columnIndex":1,"text":"2","boundingBox":[1.4447,5.0827,1.9128,5.0827,1.9128,5.3803,1.4447,5.3803]},{"rowIndex":1,"columnIndex":2,"text":"Screwdriver","boundingBox":[1.9128,5.0827,4.3051,5.0827,4.3051,5.3803,1.9128,5.3803]},{"rowIndex":1,"columnIndex":3,"text":"$110.00","boundingBox":[4.3051,5.0827,4.9356,5.0827,4.9356,5.3803,4.3051,5.3803]},{"rowIndex":1,"columnIndex":4,"text":"$0.00","boundingBox":[4.9356,5.0827,5.6507,5.0827,5.6507,5.3803,4.9356,5.3803]},{"rowIndex":1,"columnIndex":5,"text":"0%","boundingBox":[5.6507,5.0827,6.1383,5.0827,6.1383,5.3803,5.6507,5.3803]},{"rowIndex":1,"columnIndex":6,"text":"$15.40","boundingBox":[6.1383,5.0827,6.7298,5.0827,6.7363,5.3803,6.1383,5.3803]},{"rowIndex":1,"columnIndex":7,"text":"$235.40","boundingBox":[6.7298,5.0827,7.4254,5.0827,7.4254,5.3865,6.7363,5.3803]},{"rowIndex":2,"columnIndex":0,"text":"54","boundingBox":[1.1002,5.3803,1.4447,5.3803,1.4447,5.6841,1.1002,5.6841]},{"rowIndex":2,"columnIndex":1,"text":"1","boundingBox":[1.4447,5.3803,1.9128,5.3803,1.9128,5.6841,1.4447,5.6841]},{"rowIndex":2,"columnIndex":2,"text":"Yellow Rechargeable screwdriver","boundingBox":[1.9128,5.3803,4.3051,5.3803,4.3051,5.6841,1.9128,5.6841]},{"rowIndex":2,"columnIndex":3,"text":"$250.00","boundingBox":[4.3051,5.3803,4.9356,5.3803,4.9356,5.6841,4.3051,5.6841]},{"rowIndex":2,"columnIndex":4,"text":"$0.00","boundingBox":[4.9356,5.3803,5.6507,5.3803,5.6507,5.6841,4.9356,5.6841]},{"rowIndex":2,"columnIndex":5,"text":"0%","boundingBox":[5.6507,5.3803,6.1383,5.3803,6.1383,5.6841,5.6507,5.6841]},{"rowIndex":2,"columnIndex":6,"text":"$17.50","boundingBox":[6.1383,5.3803,6.7363,5.3803,6.7363,5.6841,6.1383,5.6841]},{"rowIndex":2,"columnIndex":7,"text":"$267.50","boundingBox":[6.7363,5.3803,7.4254,5.3865,7.4254,5.6841,6.7363,5.6841]},{"rowIndex":3,"columnIndex":0,"text":"22","boundingBox":[1.1002,5.6841,1.4447,5.6841,1.4447,5.9817,1.1002,5.9817]},{"rowIndex":3,"columnIndex":1,"text":"2","boundingBox":[1.4447,5.6841,1.9128,5.6841,1.9128,5.9817,1.4447,5.9817]},{"rowIndex":3,"columnIndex":2,"text":"Steel Passage Door Knob","boundingBox":[1.9128,5.6841,4.3051,5.6841,4.3051,5.9817,1.9128,5.9817]},{"rowIndex":3,"columnIndex":3,"text":"$10.00","boundingBox":[4.3051,5.6841,4.9356,5.6841,4.9356,5.9817,4.3051,5.9817]},{"rowIndex":3,"columnIndex":4,"text":"$2.00","boundingBox":[4.9356,5.6841,5.6507,5.6841,5.6507,5.9817,4.9356,5.9817]},{"rowIndex":3,"columnIndex":5,"text":"10%","boundingBox":[5.6507,5.6841,6.1383,5.6841,6.1383,5.9817,5.6507,5.9817]},{"rowIndex":3,"columnIndex":6,"text":"$1.26","boundingBox":[6.1383,5.6841,6.7363,5.6841,6.7363,5.9817,6.1383,5.9817]},{"rowIndex":3,"columnIndex":7,"text":"$19.26","boundingBox":[6.7363,5.6841,7.4254,5.6841,7.4254,5.9817,6.7363,5.9817]},{"rowIndex":4,"columnIndex":0,"text":"40","boundingBox":[1.1002,5.9817,1.4447,5.9817,1.4447,6.2793,1.1067,6.2855]},{"rowIndex":4,"columnIndex":1,"text":"2","boundingBox":[1.4447,5.9817,1.9128,5.9817,1.9128,6.2793,1.4447,6.2793]},{"rowIndex":4,"columnIndex":2,"text":"Extractor Steal","boundingBox":[1.9128,5.9817,4.3051,5.9817,4.3051,6.2793,1.9128,6.2793]},{"rowIndex":4,"columnIndex":3,"text":"$135.00","boundingBox":[4.3051,5.9817,4.9356,5.9817,4.9356,6.2793,4.3051,6.2793]},{"rowIndex":4,"columnIndex":4,"text":"$0.00","boundingBox":[4.9356,5.9817,5.6507,5.9817,5.6507,6.2793,4.9356,6.2793]},{"rowIndex":4,"columnIndex":5,"text":"0%","boundingBox":[5.6507,5.9817,6.1383,5.9817,6.1383,6.2793,5.6507,6.2793]},{"rowIndex":4,"columnIndex":6,"text":"$18.90","boundingBox":[6.1383,5.9817,6.7363,5.9817,6.7363,6.2793,6.1383,6.2793]},{"rowIndex":4,"columnIndex":7,"text":"$288.90","boundingBox":[6.7363,5.9817,7.4254,5.9817,7.4254,6.2793,6.7363,6.2793]}],"boundingBox":[1.092,4.811,7.431,4.8115,7.4303,6.2884,1.0906,6.2882]}]}],"documentResults":[{"docType":"prebuilt:invoice","pageRange":[1,1],"fields":{"CustomerAddress":{"page":1,"valueString":"6058 Hill Street","boundingBox":[1.1274,2.1342,2.0287,2.1342,2.0287,2.2374,1.1274,2.2374],"text":"6058 Hill Street","confidence":0.4,"type":"string"},"InvoiceTotal":{"valueNumber":811.06,"page":1,"boundingBox":[6.6952,8.0682,7.413,8.0682,7.413,8.2471,6.6952,8.2471],"text":"$811.06","confidence":0.962,"type":"number"},"CustomerName":{"page":1,"valueString":"Misty Xie","boundingBox":[1.1354,1.6534,1.7819,1.6534,1.7819,1.8161,1.1354,1.8161],"text":"Misty Xie","confidence":0.38,"type":"string"},"VendorName":{"page":1,"valueString":"T.T TAILWIND TRADERS","boundingBox":[4.909,1.4371,7.3107,1.4184,7.3155,2.0248,4.9137,2.0434],"text":"T.T TAILWIND TRADERS","confidence":0.954,"type":"string"},"VendorAddressRecipient":{"page":1,"valueString":"Rloefos","boundingBox":[5.9879,2.1499,6.347,2.1499,6.347,2.2384,5.9879,2.2384],"text":"Rloefos","confidence":0.305,"type":"string"},"InvoiceId":{"page":1,"valueString":"80066","boundingBox":[1.126,4.1486,1.4796,4.1486,1.4796,4.248,1.126,4.248],"text":"80066","confidence":0.963,"type":"string"},"InvoiceDate":{"page":1,"boundingBox":[2.7256,4.1504,2.793,4.1504,2.793,4.2465,2.7256,4.2465],"text":"4","confidence":0.304,"type":"date"},"SubTotal":{"valueNumber":758,"page":1,"boundingBox":[6.9241,6.7101,7.4152,6.7101,7.4152,6.8325,6.9241,6.8325],"text":"$758.00","confidence":0.971,"type":"number"},"TotalTax":{"valueNumber":53.06,"page":1,"boundingBox":[7.002,7.6793,7.4153,7.6793,7.4153,7.8017,7.002,7.8017],"text":"$53.06","confidence":0.967,"type":"number"},"VendorAddress":{"page":1,"valueString":"376 Amador Valley Blvd.","boundingBox":[5.9843,2.3511,7.1959,2.3511,7.1959,2.464,5.9843,2.464],"text":"376 Amador Valley Blvd.","confidence":0.501,"type":"string"},"CustomerAddressRecipient":{"page":1,"valueString":"Misty Xie","boundingBox":[1.1354,1.6534,1.7819,1.6534,1.7819,1.8161,1.1354,1.8161],"text":"Misty Xie","confidence":0.38,"type":"string"},"Items":{"type":"array","valueArray":["{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":235.4,\"text\":\"$235.40\",\"boundingBox\":[6.8637,5.1789,7.3548,5.1789,7.3548,5.3013,6.8637,5.3013],\"page\":1,\"confidence\":0.899},\"Description\":{\"type\":\"string\",\"valueString\":\"Screwdriver\",\"text\":\"Screwdriver\",\"boundingBox\":[2.0311,5.1859,2.7142,5.1859,2.7142,5.2891,2.0311,5.2891],\"page\":1,\"confidence\":0.902},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"49\",\"text\":\"49\",\"boundingBox\":[1.2043,5.1897,1.3427,5.1897,1.3427,5.2891,1.2043,5.2891],\"page\":1,\"confidence\":0.769},\"Quantity\":{\"type\":\"number\",\"valueNumber\":2,\"text\":\"2\",\"boundingBox\":[1.6933,5.1897,1.7518,5.1897,1.7518,5.2876,1.6933,5.2876],\"page\":1,\"confidence\":0.8},\"Tax\":{\"type\":\"number\",\"valueNumber\":15.4,\"text\":\"$15.40\",\"boundingBox\":[6.2822,5.1792,6.6601,5.1792,6.6601,5.3013,6.2822,5.3013],\"page\":1,\"confidence\":0.693},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":110,\"text\":\"$110.00\",\"boundingBox\":[4.4092,5.1792,4.8601,5.1792,4.8601,5.3013,4.4092,5.3013],\"page\":1,\"confidence\":0.888}},\"text\":\"49 2 Screwdriver $110.00 $0.00 0% $15.40 $235.40\",\"boundingBox\":[1.2043,5.1789,7.3548,5.1789,7.3548,5.3013,1.2043,5.3013],\"page\":1,\"confidence\":0.7}","{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":267.5,\"text\":\"$267.50\",\"boundingBox\":[6.8637,5.4789,7.3548,5.4789,7.3548,5.6013,6.8637,5.6013],\"page\":1,\"confidence\":0.902},\"Description\":{\"type\":\"string\",\"valueString\":\"Yellow Rechargeable screwdriver\",\"text\":\"Yellow Rechargeable screwdriver\",\"boundingBox\":[2.0244,5.4859,3.97,5.4859,3.97,5.6193,2.0244,5.6193],\"page\":1,\"confidence\":0.828},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"54\",\"text\":\"54\",\"boundingBox\":[1.2141,5.4915,1.3447,5.4915,1.3447,5.5891,1.2141,5.5891],\"page\":1,\"confidence\":0.73},\"Quantity\":{\"type\":\"number\",\"valueNumber\":1,\"text\":\"1\",\"boundingBox\":[1.7012,5.4892,1.7341,5.4892,1.7341,5.5876,1.7012,5.5876],\"page\":1,\"confidence\":0.746},\"Tax\":{\"type\":\"number\",\"valueNumber\":17.5,\"text\":\"$17.50\",\"boundingBox\":[6.2822,5.4792,6.6601,5.4792,6.6601,5.6013,6.2822,5.6013],\"page\":1,\"confidence\":0.697},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":250,\"text\":\"$250.00\",\"boundingBox\":[4.4092,5.4792,4.8601,5.4792,4.8601,5.6013,4.4092,5.6013],\"page\":1,\"confidence\":0.898}},\"text\":\"54 1 Yellow Rechargeable screwdriver $250.00 $0.00 0% $17.50 $267.50\",\"boundingBox\":[1.2141,5.4789,7.3548,5.4789,7.3548,5.6193,1.2141,5.6193],\"page\":1,\"confidence\":0.847}","{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":19.26,\"text\":\"$19.26\",\"boundingBox\":[6.9416,5.7789,7.3548,5.7789,7.3548,5.9013,6.9416,5.9013],\"page\":1,\"confidence\":0.902},\"Description\":{\"type\":\"string\",\"valueString\":\"Steel Passage Door Knob\",\"text\":\"Steel Passage Door Knob\",\"boundingBox\":[2.0311,5.7859,3.5123,5.7859,3.5123,5.9193,2.0311,5.9193],\"page\":1,\"confidence\":0.884},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"22\",\"text\":\"22\",\"boundingBox\":[1.2105,5.7897,1.342,5.7897,1.342,5.8876,1.2105,5.8876],\"page\":1,\"confidence\":0.731},\"Quantity\":{\"type\":\"number\",\"valueNumber\":2,\"text\":\"2\",\"boundingBox\":[1.6933,5.7897,1.7518,5.7897,1.7518,5.8876,1.6933,5.8876],\"page\":1,\"confidence\":0.803},\"Tax\":{\"type\":\"number\",\"valueNumber\":1.26,\"text\":\"$1.26\",\"boundingBox\":[6.3552,5.7792,6.6602,5.7792,6.6602,5.9013,6.3552,5.9013],\"page\":1,\"confidence\":0.715},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":10,\"text\":\"$10.00\",\"boundingBox\":[4.4822,5.7792,4.8601,5.7792,4.8601,5.9013,4.4822,5.9013],\"page\":1,\"confidence\":0.885}},\"text\":\"22 2 Steel Passage Door Knob $10.00 $2.00 10% $1.26 $19.26\",\"boundingBox\":[1.2105,5.7789,7.3548,5.7789,7.3548,5.9193,1.2105,5.9193],\"page\":1,\"confidence\":0.87}","{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":288.9,\"text\":\"$288.90\",\"boundingBox\":[6.8637,6.0789,7.3548,6.0789,7.3548,6.2013,6.8637,6.2013],\"page\":1,\"confidence\":0.903},\"Description\":{\"type\":\"string\",\"valueString\":\"Extractor Steal\",\"text\":\"Extractor Steal\",\"boundingBox\":[2.0356,6.0859,2.8654,6.0859,2.8654,6.1891,2.0356,6.1891],\"page\":1,\"confidence\":0.9},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"40\",\"text\":\"40\",\"boundingBox\":[1.2043,6.0897,1.3437,6.0897,1.3437,6.1891,1.2043,6.1891],\"page\":1,\"confidence\":0.762},\"Quantity\":{\"type\":\"number\",\"valueNumber\":2,\"text\":\"2\",\"boundingBox\":[1.6933,6.0897,1.7518,6.0897,1.7518,6.1876,1.6933,6.1876],\"page\":1,\"confidence\":0.825},\"Tax\":{\"type\":\"number\",\"valueNumber\":18.9,\"text\":\"$18.90\",\"boundingBox\":[6.2822,6.0792,6.6601,6.0792,6.6601,6.2013,6.2822,6.2013],\"page\":1,\"confidence\":0.694},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":135,\"text\":\"$135.00\",\"boundingBox\":[4.4092,6.0792,4.8601,6.0792,4.8601,6.2013,4.4092,6.2013],\"page\":1,\"confidence\":0.898}},\"text\":\"40 2 Extractor Steal $135.00 $0.00 0% $18.90 $288.90\",\"boundingBox\":[1.2043,6.0789,7.3548,6.0789,7.3548,6.2013,1.2043,6.2013],\"page\":1,\"confidence\":0.714}"]}}}]}}},{"0":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2015/Invoice83878.pdf","2":{"status":"succeeded","createdDateTime":"2021-10-26T22:39:24Z","lastUpdatedDateTime":"2021-10-26T22:39:27Z","analyzeResult":{"version":"2.1.0","readResults":[{"angle":0,"page":1,"height":11,"unit":"inch","width":8.5}],"pageResults":[{"page":1,"tables":[{"rows":4,"columns":8,"cells":[{"rowIndex":0,"isHeader":true,"boundingBox":[1.1026,4.8206,1.4353,4.8206,1.4418,5.0808,1.1026,5.0808],"text":"Itm","columnIndex":0},{"rowIndex":0,"isHeader":true,"boundingBox":[1.4353,4.8206,1.9115,4.8206,1.9115,5.0808,1.4418,5.0808],"text":"Qty","columnIndex":1},{"rowIndex":0,"isHeader":true,"boundingBox":[1.9115,4.8206,4.2928,4.8206,4.2993,5.0865,1.9115,5.0808],"text":"Description","columnIndex":2},{"rowIndex":0,"isHeader":true,"boundingBox":[4.2928,4.8206,4.9322,4.8206,4.9322,5.0865,4.2993,5.0865],"text":"Price","columnIndex":3},{"rowIndex":0,"isHeader":true,"boundingBox":[4.9322,4.8206,5.6433,4.8206,5.6498,5.0808,4.9322,5.0865],"text":"Discount","columnIndex":4},{"rowIndex":0,"isHeader":true,"boundingBox":[5.6433,4.8206,6.1261,4.8206,6.1326,5.0808,5.6498,5.0808],"text":"(Pct)","columnIndex":5},{"rowIndex":0,"isHeader":true,"boundingBox":[6.1261,4.8206,6.7263,4.8206,6.7328,5.0808,6.1326,5.0808],"text":"Tax","columnIndex":6},{"rowIndex":0,"isHeader":true,"boundingBox":[6.7263,4.8206,7.4309,4.8206,7.4309,5.0808,6.7328,5.0808],"text":"LineTotal","columnIndex":7},{"rowIndex":1,"columnIndex":0,"text":"47","boundingBox":[1.1026,5.0808,1.4418,5.0808,1.4418,5.3814,1.1026,5.3814]},{"rowIndex":1,"columnIndex":1,"text":"1","boundingBox":[1.4418,5.0808,1.9115,5.0808,1.9115,5.3814,1.4418,5.3814]},{"rowIndex":1,"columnIndex":2,"text":"Multi Function Drill","boundingBox":[1.9115,5.0808,4.2993,5.0865,4.2993,5.3814,1.9115,5.3814]},{"rowIndex":1,"columnIndex":3,"text":"$159.00","boundingBox":[4.2993,5.0865,4.9322,5.0865,4.9322,5.3814,4.2993,5.3814]},{"rowIndex":1,"columnIndex":4,"text":"$0.00","boundingBox":[4.9322,5.0865,5.6498,5.0808,5.6498,5.3814,4.9322,5.3814]},{"rowIndex":1,"columnIndex":5,"text":"0%","boundingBox":[5.6498,5.0808,6.1326,5.0808,6.1326,5.3814,5.6498,5.3814]},{"rowIndex":1,"columnIndex":6,"text":"$11.13","boundingBox":[6.1326,5.0808,6.7328,5.0808,6.7328,5.3814,6.1326,5.3814]},{"rowIndex":1,"columnIndex":7,"text":"$170.13","boundingBox":[6.7328,5.0808,7.4309,5.0808,7.4309,5.3814,6.7328,5.3814]},{"rowIndex":2,"columnIndex":0,"text":"53","boundingBox":[1.1026,5.3814,1.4418,5.3814,1.4418,5.682,1.1026,5.682]},{"rowIndex":2,"columnIndex":1,"text":"1","boundingBox":[1.4418,5.3814,1.9115,5.3814,1.9115,5.682,1.4418,5.682]},{"rowIndex":2,"columnIndex":2,"text":"Stainless multi-tool plier","boundingBox":[1.9115,5.3814,4.2993,5.3814,4.2993,5.682,1.9115,5.682]},{"rowIndex":2,"columnIndex":3,"text":"$90.00","boundingBox":[4.2993,5.3814,4.9322,5.3814,4.9322,5.682,4.2993,5.682]},{"rowIndex":2,"columnIndex":4,"text":"$0.00","boundingBox":[4.9322,5.3814,5.6498,5.3814,5.6498,5.682,4.9322,5.682]},{"rowIndex":2,"columnIndex":5,"text":"0%","boundingBox":[5.6498,5.3814,6.1326,5.3814,6.1326,5.682,5.6498,5.682]},{"rowIndex":2,"columnIndex":6,"text":"$6.30","boundingBox":[6.1326,5.3814,6.7328,5.3814,6.7328,5.682,6.1326,5.682]},{"rowIndex":2,"columnIndex":7,"text":"$96.30","boundingBox":[6.7328,5.3814,7.4309,5.3814,7.4309,5.682,6.7328,5.682]},{"rowIndex":3,"columnIndex":0,"text":"32","boundingBox":[1.1026,5.682,1.4418,5.682,1.4483,5.9827,1.1026,5.9827]},{"rowIndex":3,"columnIndex":1,"text":"3","boundingBox":[1.4418,5.682,1.9115,5.682,1.9115,5.9827,1.4483,5.9827]},{"rowIndex":3,"columnIndex":2,"text":"Artificial Tree","boundingBox":[1.9115,5.682,4.2993,5.682,4.3059,5.9827,1.9115,5.9827]},{"rowIndex":3,"columnIndex":3,"text":"$250.00","boundingBox":[4.2993,5.682,4.9322,5.682,4.9322,5.9827,4.3059,5.9827]},{"rowIndex":3,"columnIndex":4,"text":"$0.00","boundingBox":[4.9322,5.682,5.6498,5.682,5.6563,5.9827,4.9322,5.9827]},{"rowIndex":3,"columnIndex":5,"text":"0%","boundingBox":[5.6498,5.682,6.1326,5.682,6.1391,5.9827,5.6563,5.9827]},{"rowIndex":3,"columnIndex":6,"text":"$52.50","boundingBox":[6.1326,5.682,6.7328,5.682,6.7393,5.9827,6.1391,5.9827]},{"rowIndex":3,"columnIndex":7,"text":"$802.50","boundingBox":[6.7328,5.682,7.4309,5.682,7.4309,5.9827,6.7393,5.9827]}],"boundingBox":[1.0898,4.8177,7.4518,4.8177,7.4519,5.9923,1.089,5.9918]}]}],"documentResults":[{"docType":"prebuilt:invoice","pageRange":[1,1],"fields":{"CustomerAddress":{"page":1,"valueString":"Straße 242 Stuttgart Saarland 70511 Germany","boundingBox":[5.9827,2.3499,7.0809,2.3499,7.0809,3.264,5.9827,3.264],"text":"Straße 242 Stuttgart Saarland 70511 Germany","confidence":0.371,"type":"string"},"ShippingAddress":{"page":1,"valueString":"4SJ","boundingBox":[3.2434,2.638,3.4241,2.638,3.4241,2.7374,3.2434,2.7374],"text":"4SJ","confidence":0.364,"type":"string"},"InvoiceTotal":{"valueNumber":1068.93,"page":1,"boundingBox":[6.5814,7.7682,7.4053,7.7682,7.4053,7.9471,6.5814,7.9471],"text":"$1068.93","confidence":0.962,"type":"number"},"CustomerName":{"page":1,"valueString":"Connie Liang","boundingBox":[1.1277,1.6534,2.0641,1.6534,2.0641,1.8161,1.1277,1.8161],"text":"Connie Liang","confidence":0.352,"type":"string"},"VendorName":{"page":1,"valueString":"T.T TAILWIND TRADERS","boundingBox":[4.909,1.4371,7.3107,1.4184,7.3155,2.0248,4.9137,2.0434],"text":"T.T TAILWIND TRADERS","confidence":0.954,"type":"string"},"InvoiceId":{"page":1,"valueString":"83878","boundingBox":[1.126,4.1486,1.4794,4.1486,1.4794,4.248,1.126,4.248],"text":"83878","confidence":0.968,"type":"string"},"InvoiceDate":{"page":1,"boundingBox":[2.7256,4.1448,3.7844,4.1448,3.7844,4.248,2.7256,4.248],"text":"4 November 2015","confidence":0.363,"valueDate":"2015-11-04","type":"date"},"SubTotal":{"valueNumber":999,"page":1,"boundingBox":[6.9241,6.4101,7.4152,6.4101,7.4152,6.5325,6.9241,6.5325],"text":"$999.00","confidence":0.973,"type":"number"},"TotalTax":{"valueNumber":69.93,"page":1,"boundingBox":[7.002,7.3793,7.41,7.3793,7.41,7.5017,7.002,7.5017],"text":"$69.93","confidence":0.968,"type":"number"},"CustomerAddressRecipient":{"page":1,"valueString":"Mrurc Potsdamer","boundingBox":[5.9879,2.1558,6.5191,2.1558,6.5191,2.4384,5.9879,2.4384],"text":"Mrurc Potsdamer","confidence":0.426,"type":"string"},"Items":{"type":"array","valueArray":["{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":170.13,\"text\":\"$170.13\",\"boundingBox\":[6.8637,5.1789,7.3495,5.1789,7.3495,5.3013,6.8637,5.3013],\"page\":1,\"confidence\":0.902},\"Description\":{\"type\":\"string\",\"valueString\":\"Multi Function Drill\",\"text\":\"Multi Function Drill\",\"boundingBox\":[2.0356,5.1859,3.1514,5.1859,3.1514,5.2891,2.0356,5.2891],\"page\":1,\"confidence\":0.892},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"47\",\"text\":\"47\",\"boundingBox\":[1.2043,5.1915,1.3431,5.1915,1.3431,5.2876,1.2043,5.2876],\"page\":1,\"confidence\":0.702},\"Quantity\":{\"type\":\"number\",\"valueNumber\":1,\"text\":\"1\",\"boundingBox\":[1.7012,5.1892,1.7341,5.1892,1.7341,5.2876,1.7012,5.2876],\"page\":1,\"confidence\":0.779},\"Tax\":{\"type\":\"number\",\"valueNumber\":11.13,\"text\":\"$11.13\",\"boundingBox\":[6.2822,5.1792,6.6556,5.1792,6.6556,5.3013,6.2822,5.3013],\"page\":1,\"confidence\":0.898},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":159,\"text\":\"$159.00\",\"boundingBox\":[4.4092,5.1792,4.8601,5.1792,4.8601,5.3013,4.4092,5.3013],\"page\":1,\"confidence\":0.899}},\"text\":\"47 1 Multi Function Drill $159.00 $0.00 0% $11.13 $170.13\",\"boundingBox\":[1.2043,5.1789,7.3495,5.1789,7.3495,5.3013,1.2043,5.3013],\"page\":1,\"confidence\":0.808}","{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":96.3,\"text\":\"$96.30\",\"boundingBox\":[6.9416,5.4789,7.3548,5.4789,7.3548,5.6013,6.9416,5.6013],\"page\":1,\"confidence\":0.934},\"Description\":{\"type\":\"string\",\"valueString\":\"Stainless multi-tool plier\",\"text\":\"Stainless multi-tool plier\",\"boundingBox\":[2.0311,5.4859,3.4686,5.4859,3.4686,5.6186,2.0311,5.6186],\"page\":1,\"confidence\":0.883},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"53\",\"text\":\"53\",\"boundingBox\":[1.2141,5.4897,1.3393,5.4897,1.3393,5.5891,1.2141,5.5891],\"page\":1,\"confidence\":0.695},\"Quantity\":{\"type\":\"number\",\"valueNumber\":1,\"text\":\"1\",\"boundingBox\":[1.7012,5.4892,1.7341,5.4892,1.7341,5.5876,1.7012,5.5876],\"page\":1,\"confidence\":0.798},\"Tax\":{\"type\":\"number\",\"valueNumber\":6.3,\"text\":\"$6.30\",\"boundingBox\":[6.3552,5.4792,6.6601,5.4792,6.6601,5.6013,6.3552,5.6013],\"page\":1,\"confidence\":0.899},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":90,\"text\":\"$90.00\",\"boundingBox\":[4.4822,5.4792,4.8601,5.4792,4.8601,5.6013,4.4822,5.6013],\"page\":1,\"confidence\":0.9}},\"text\":\"53 1 Stainless multi-tool plier $90.00 $0.00 0% $6.30 $96.30\",\"boundingBox\":[1.2141,5.4789,7.3548,5.4789,7.3548,5.6186,1.2141,5.6186],\"page\":1,\"confidence\":0.804}","{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":802.5,\"text\":\"$802.50\",\"boundingBox\":[6.8637,5.7789,7.3548,5.7789,7.3548,5.9013,6.8637,5.9013],\"page\":1,\"confidence\":0.958},\"Description\":{\"type\":\"string\",\"valueString\":\"Artificial Tree\",\"text\":\"Artificial Tree\",\"boundingBox\":[2.0246,5.7844,2.7987,5.7844,2.7987,5.8891,2.0246,5.8891],\"page\":1,\"confidence\":0.898},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"32\",\"text\":\"32\",\"boundingBox\":[1.2114,5.7897,1.342,5.7897,1.342,5.8891,1.2114,5.8891],\"page\":1,\"confidence\":0.695},\"Quantity\":{\"type\":\"number\",\"valueNumber\":3,\"text\":\"3\",\"boundingBox\":[1.6943,5.7897,1.7491,5.7897,1.7491,5.8891,1.6943,5.8891],\"page\":1,\"confidence\":0.8},\"Tax\":{\"type\":\"number\",\"valueNumber\":52.5,\"text\":\"$52.50\",\"boundingBox\":[6.2822,5.7792,6.6601,5.7792,6.6601,5.9013,6.2822,5.9013],\"page\":1,\"confidence\":0.898},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":250,\"text\":\"$250.00\",\"boundingBox\":[4.4092,5.7792,4.8601,5.7792,4.8601,5.9013,4.4092,5.9013],\"page\":1,\"confidence\":0.902}},\"text\":\"32 3 Artificial Tree $250.00 $0.00 0% $52.50 $802.50\",\"boundingBox\":[1.2114,5.7789,7.3548,5.7789,7.3548,5.9013,1.2114,5.9013],\"page\":1,\"confidence\":0.804}"]}}}]}}},{"0":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2016/Invoice102247.pdf","2":{"status":"succeeded","createdDateTime":"2021-10-26T22:39:23Z","lastUpdatedDateTime":"2021-10-26T22:39:28Z","analyzeResult":{"version":"2.1.0","readResults":[{"angle":0,"page":1,"height":11,"unit":"inch","width":8.5}],"pageResults":[{"page":1,"tables":[{"rows":4,"columns":8,"cells":[{"rowIndex":0,"isHeader":true,"boundingBox":[1.1003,4.8201,1.4451,4.8201,1.4451,5.0852,1.1003,5.0852],"text":"Itm","columnIndex":0},{"rowIndex":0,"isHeader":true,"boundingBox":[1.4451,4.8201,1.9135,4.8201,1.9135,5.0852,1.4451,5.0852],"text":"Qty","columnIndex":1},{"rowIndex":0,"isHeader":true,"boundingBox":[1.9135,4.8201,4.301,4.8201,4.301,5.0852,1.9135,5.0852],"text":"Description","columnIndex":2},{"rowIndex":0,"isHeader":true,"boundingBox":[4.301,4.8201,4.9385,4.8201,4.9385,5.0852,4.301,5.0852],"text":"Price","columnIndex":3},{"rowIndex":0,"isHeader":true,"boundingBox":[4.9385,4.8201,5.6541,4.8201,5.6541,5.0852,4.9385,5.0852],"text":"Discount","columnIndex":4},{"rowIndex":0,"isHeader":true,"boundingBox":[5.6541,4.8201,6.129,4.8201,6.1355,5.0852,5.6541,5.0852],"text":"(Pct)","columnIndex":5},{"rowIndex":0,"isHeader":true,"boundingBox":[6.129,4.8201,6.7275,4.8201,6.734,5.0852,6.1355,5.0852],"text":"Tax","columnIndex":6},{"rowIndex":0,"isHeader":true,"boundingBox":[6.7275,4.8201,7.4236,4.8201,7.4236,5.0852,6.734,5.0852],"text":"LineTotal","columnIndex":7},{"rowIndex":1,"columnIndex":0,"text":"43","boundingBox":[1.1003,5.0852,1.4451,5.0852,1.4451,5.3848,1.1003,5.3848]},{"rowIndex":1,"columnIndex":1,"text":"2","boundingBox":[1.4451,5.0852,1.9135,5.0852,1.9135,5.3848,1.4451,5.3848]},{"rowIndex":1,"columnIndex":2,"text":"Big Metal Shelving","boundingBox":[1.9135,5.0852,4.301,5.0852,4.301,5.3848,1.9135,5.3848]},{"rowIndex":1,"columnIndex":3,"text":"$99.00","boundingBox":[4.301,5.0852,4.9385,5.0852,4.9385,5.3848,4.301,5.3848]},{"rowIndex":1,"columnIndex":4,"text":"$0.00","boundingBox":[4.9385,5.0852,5.6541,5.0852,5.6541,5.3848,4.9385,5.3848]},{"rowIndex":1,"columnIndex":5,"text":"0%","boundingBox":[5.6541,5.0852,6.1355,5.0852,6.1355,5.3848,5.6541,5.3848]},{"rowIndex":1,"columnIndex":6,"text":"$13.86","boundingBox":[6.1355,5.0852,6.734,5.0852,6.734,5.3848,6.1355,5.3848]},{"rowIndex":1,"columnIndex":7,"text":"$211.86","boundingBox":[6.734,5.0852,7.4236,5.0852,7.4236,5.3791,6.734,5.3848]},{"rowIndex":2,"columnIndex":0,"text":"30","boundingBox":[1.1003,5.3848,1.4451,5.3848,1.4451,5.6845,1.1003,5.6845]},{"rowIndex":2,"columnIndex":1,"text":"2","boundingBox":[1.4451,5.3848,1.9135,5.3848,1.9135,5.6845,1.4451,5.6845]},{"rowIndex":2,"columnIndex":2,"text":"Gardering","boundingBox":[1.9135,5.3848,4.301,5.3848,4.301,5.6845,1.9135,5.6845]},{"rowIndex":2,"columnIndex":3,"text":"$10.00","boundingBox":[4.301,5.3848,4.9385,5.3848,4.9385,5.6845,4.301,5.6845]},{"rowIndex":2,"columnIndex":4,"text":"$0.00","boundingBox":[4.9385,5.3848,5.6541,5.3848,5.6541,5.6845,4.9385,5.6845]},{"rowIndex":2,"columnIndex":5,"text":"0%","boundingBox":[5.6541,5.3848,6.1355,5.3848,6.1355,5.6845,5.6541,5.6845]},{"rowIndex":2,"columnIndex":6,"text":"$1.40","boundingBox":[6.1355,5.3848,6.734,5.3848,6.734,5.6845,6.1355,5.6845]},{"rowIndex":2,"columnIndex":7,"text":"$21.40","boundingBox":[6.734,5.3848,7.4236,5.3791,7.4236,5.6845,6.734,5.6845]},{"rowIndex":3,"columnIndex":0,"text":"26","boundingBox":[1.1003,5.6845,1.4451,5.6845,1.4451,5.9784,1.1003,5.9784]},{"rowIndex":3,"columnIndex":1,"text":"3","boundingBox":[1.4451,5.6845,1.9135,5.6845,1.9135,5.9784,1.4451,5.9784]},{"rowIndex":3,"columnIndex":2,"text":"Craftsman 100 ft. L x 5/8 in.","boundingBox":[1.9135,5.6845,4.301,5.6845,4.301,5.9784,1.9135,5.9784]},{"rowIndex":3,"columnIndex":3,"text":"$100.00","boundingBox":[4.301,5.6845,4.9385,5.6845,4.9385,5.9784,4.301,5.9784]},{"rowIndex":3,"columnIndex":4,"text":"$30.00","boundingBox":[4.9385,5.6845,5.6541,5.6845,5.6541,5.9842,4.9385,5.9784]},{"rowIndex":3,"columnIndex":5,"text":"10%","boundingBox":[5.6541,5.6845,6.1355,5.6845,6.1355,5.9842,5.6541,5.9842]},{"rowIndex":3,"columnIndex":6,"text":"$18.90","boundingBox":[6.1355,5.6845,6.734,5.6845,6.7405,5.9842,6.1355,5.9842]},{"rowIndex":3,"columnIndex":7,"text":"$288.90","boundingBox":[6.734,5.6845,7.4236,5.6845,7.4236,5.9842,6.7405,5.9842]}],"boundingBox":[1.0917,4.8203,7.4343,4.8206,7.4337,5.9877,1.0899,5.9869]}]}],"documentResults":[{"docType":"prebuilt:invoice","pageRange":[1,1],"fields":{"CustomerAddress":{"page":1,"valueString":"Kampstr 9859","boundingBox":[1.1325,2.138,1.9359,2.138,1.9359,2.2669,1.1325,2.2669],"text":"Kampstr 9859","confidence":0.265,"type":"string"},"ShippingAddress":{"page":1,"valueString":"80074","boundingBox":[2.966,2.638,3.3203,2.638,3.3203,2.7374,2.966,2.7374],"text":"80074","confidence":0.274,"type":"string"},"InvoiceTotal":{"valueNumber":522.16,"page":1,"boundingBox":[6.6952,7.7682,7.413,7.7682,7.413,7.9471,6.6952,7.9471],"text":"$522.16","confidence":0.961,"type":"number"},"CustomerName":{"page":1,"valueString":"Colin Cai","boundingBox":[1.1277,1.6519,1.7581,1.6519,1.7581,1.7789,1.1277,1.7789],"text":"Colin Cai","confidence":0.443,"type":"string"},"VendorName":{"page":1,"valueString":"T.T TAILWIND TRADERS","boundingBox":[4.909,1.4371,7.3107,1.4184,7.3155,2.0248,4.9137,2.0434],"text":"T.T TAILWIND TRADERS","confidence":0.954,"type":"string"},"InvoiceId":{"page":1,"valueString":"102247","boundingBox":[1.1351,4.1481,1.5518,4.1481,1.5518,4.248,1.1351,4.248],"text":"102247","confidence":0.966,"type":"string"},"InvoiceDate":{"page":1,"boundingBox":[3.2074,4.1481,3.4868,4.1481,3.4868,4.248,3.2074,4.248],"text":"2016","confidence":0.466,"type":"date"},"SubTotal":{"valueNumber":488,"page":1,"boundingBox":[6.9241,6.4101,7.4152,6.4101,7.4152,6.5325,6.9241,6.5325],"text":"$488.00","confidence":0.972,"type":"number"},"TotalTax":{"valueNumber":34.16,"page":1,"boundingBox":[7.002,7.3793,7.4153,7.3793,7.4153,7.5017,7.002,7.5017],"text":"$34.16","confidence":0.965,"type":"number"},"VendorAddress":{"page":1,"valueString":"Phata 8858 V. Street London England W1Y 3RA United Kingdom","boundingBox":[5.9789,2.1511,6.7971,2.1511,6.7971,3.264,5.9789,3.264],"text":"Phata 8858 V. Street London England W1Y 3RA United Kingdom","confidence":0.371,"type":"string"},"CustomerAddressRecipient":{"page":1,"valueString":"Colin Cai","boundingBox":[1.1277,1.6519,1.7581,1.6519,1.7581,1.7789,1.1277,1.7789],"text":"Colin Cai","confidence":0.443,"type":"string"},"Items":{"type":"array","valueArray":["{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":211.86,\"text\":\"$211.86\",\"boundingBox\":[6.8637,5.1789,7.3548,5.1789,7.3548,5.3013,6.8637,5.3013],\"page\":1,\"confidence\":0.893},\"Description\":{\"type\":\"string\",\"valueString\":\"Big Metal Shelving\",\"text\":\"Big Metal Shelving\",\"boundingBox\":[2.0356,5.1859,3.1214,5.1859,3.1214,5.3193,2.0356,5.3193],\"page\":1,\"confidence\":0.894},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"43\",\"text\":\"43\",\"boundingBox\":[1.2043,5.1897,1.3393,5.1897,1.3393,5.2891,1.2043,5.2891],\"page\":1,\"confidence\":0.73},\"Quantity\":{\"type\":\"number\",\"valueNumber\":2,\"text\":\"2\",\"boundingBox\":[1.6933,5.1897,1.7518,5.1897,1.7518,5.2876,1.6933,5.2876],\"page\":1,\"confidence\":0.763},\"Tax\":{\"type\":\"number\",\"valueNumber\":13.86,\"text\":\"$13.86\",\"boundingBox\":[6.2822,5.1792,6.6602,5.1792,6.6602,5.3013,6.2822,5.3013],\"page\":1,\"confidence\":0.744},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":99,\"text\":\"$99.00\",\"boundingBox\":[4.4822,5.1792,4.8601,5.1792,4.8601,5.3013,4.4822,5.3013],\"page\":1,\"confidence\":0.893}},\"text\":\"43 2 Big Metal Shelving $99.00 $0.00 0% $13.86 $211.86\",\"boundingBox\":[1.2043,5.1789,7.3548,5.1789,7.3548,5.3193,1.2043,5.3193],\"page\":1,\"confidence\":0.807}","{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":21.4,\"text\":\"$21.40\",\"boundingBox\":[6.9416,5.4789,7.3548,5.4789,7.3548,5.6013,6.9416,5.6013],\"page\":1,\"confidence\":0.899},\"Description\":{\"type\":\"string\",\"valueString\":\"Gardering\",\"text\":\"Gardering\",\"boundingBox\":[2.0294,5.4859,2.6072,5.4859,2.6072,5.6193,2.0294,5.6193],\"page\":1,\"confidence\":0.955},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"30\",\"text\":\"30\",\"boundingBox\":[1.2114,5.4897,1.3437,5.4897,1.3437,5.5891,1.2114,5.5891],\"page\":1,\"confidence\":0.712},\"Quantity\":{\"type\":\"number\",\"valueNumber\":2,\"text\":\"2\",\"boundingBox\":[1.6933,5.4897,1.7518,5.4897,1.7518,5.5876,1.6933,5.5876],\"page\":1,\"confidence\":0.763},\"Tax\":{\"type\":\"number\",\"valueNumber\":1.4,\"text\":\"$1.40\",\"boundingBox\":[6.3552,5.4792,6.6601,5.4792,6.6601,5.6013,6.3552,5.6013],\"page\":1,\"confidence\":0.78},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":10,\"text\":\"$10.00\",\"boundingBox\":[4.4822,5.4792,4.8601,5.4792,4.8601,5.6013,4.4822,5.6013],\"page\":1,\"confidence\":0.887}},\"text\":\"30 2 Gardering $10.00 $0.00 0% $1.40 $21.40\",\"boundingBox\":[1.2114,5.4789,7.3548,5.4789,7.3548,5.6193,1.2114,5.6193],\"page\":1,\"confidence\":0.701}","{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":288.9,\"text\":\"$288.90\",\"boundingBox\":[6.8637,5.7789,7.3548,5.7789,7.3548,5.9013,6.8637,5.9013],\"page\":1,\"confidence\":0.899},\"Description\":{\"type\":\"string\",\"valueString\":\"Craftsman 100 ft. L x 5/8 in.\",\"text\":\"Craftsman 100 ft. L x 5/8 in.\",\"boundingBox\":[2.0294,5.7844,3.6439,5.7844,3.6439,5.9032,2.0294,5.9032],\"page\":1,\"confidence\":0.887},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"26\",\"text\":\"26\",\"boundingBox\":[1.2105,5.7897,1.3439,5.7897,1.3439,5.8891,1.2105,5.8891],\"page\":1,\"confidence\":0.724},\"Quantity\":{\"type\":\"number\",\"valueNumber\":3,\"text\":\"3\",\"boundingBox\":[1.6943,5.7897,1.7491,5.7897,1.7491,5.8891,1.6943,5.8891],\"page\":1,\"confidence\":0.769},\"Tax\":{\"type\":\"number\",\"valueNumber\":18.9,\"text\":\"$18.90\",\"boundingBox\":[6.2822,5.7792,6.6601,5.7792,6.6601,5.9013,6.2822,5.9013],\"page\":1,\"confidence\":0.718},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":100,\"text\":\"$100.00\",\"boundingBox\":[4.4092,5.7792,4.8601,5.7792,4.8601,5.9013,4.4092,5.9013],\"page\":1,\"confidence\":0.894}},\"text\":\"26 3 Craftsman 100 ft. L x 5/8 in. $100.00 $30.00 10% $18.90 $288.90\",\"boundingBox\":[1.2105,5.7789,7.3548,5.7789,7.3548,5.9032,1.2105,5.9032],\"page\":1,\"confidence\":0.835}"]}}}]}}},{"0":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2016/Invoice92549.pdf","2":{"status":"succeeded","createdDateTime":"2021-10-26T22:39:25Z","lastUpdatedDateTime":"2021-10-26T22:39:31Z","analyzeResult":{"version":"2.1.0","readResults":[{"angle":0,"page":1,"height":11,"unit":"inch","width":8.5}],"pageResults":[{"page":1,"tables":[{"rows":5,"columns":8,"cells":[{"rowIndex":0,"isHeader":true,"boundingBox":[1.102,4.8187,1.4403,4.8187,1.4403,5.0838,1.102,5.0838],"text":"Itm","columnIndex":0},{"rowIndex":0,"isHeader":true,"boundingBox":[1.4403,4.8187,1.9087,4.8187,1.9087,5.0838,1.4403,5.0838],"text":"Qty","columnIndex":1},{"rowIndex":0,"isHeader":true,"boundingBox":[1.9087,4.8187,4.3027,4.8187,4.3027,5.0838,1.9087,5.0838],"text":"Description","columnIndex":2},{"rowIndex":0,"isHeader":true,"boundingBox":[4.3027,4.8187,4.9402,4.8187,4.9402,5.0838,4.3027,5.0838],"text":"Price","columnIndex":3},{"rowIndex":0,"isHeader":true,"boundingBox":[4.9402,4.8187,5.6493,4.8187,5.6493,5.0838,4.9402,5.0838],"text":"Discount","columnIndex":4},{"rowIndex":0,"isHeader":true,"boundingBox":[5.6493,4.8187,6.1307,4.8187,6.1307,5.0838,5.6493,5.0838],"text":"(Pct)","columnIndex":5},{"rowIndex":0,"isHeader":true,"boundingBox":[6.1307,4.8187,6.7357,4.8187,6.7357,5.0838,6.1307,5.0838],"text":"Tax","columnIndex":6},{"rowIndex":0,"isHeader":true,"boundingBox":[6.7357,4.8187,7.4318,4.8248,7.4318,5.0838,6.7357,5.0838],"text":"LineTotal","columnIndex":7},{"rowIndex":1,"columnIndex":0,"text":"61","boundingBox":[1.102,5.0838,1.4403,5.0838,1.4403,5.3798,1.102,5.3798]},{"rowIndex":1,"columnIndex":1,"text":"2","boundingBox":[1.4403,5.0838,1.9087,5.0838,1.9087,5.3798,1.4403,5.3798]},{"rowIndex":1,"columnIndex":2,"text":"One sat on shoe gnome","boundingBox":[1.9087,5.0838,4.3027,5.0838,4.3027,5.3798,1.9087,5.3798]},{"rowIndex":1,"columnIndex":3,"text":"$54.00","boundingBox":[4.3027,5.0838,4.9402,5.0838,4.9402,5.3798,4.3027,5.3798]},{"rowIndex":1,"columnIndex":4,"text":"$10.80","boundingBox":[4.9402,5.0838,5.6493,5.0838,5.6493,5.3798,4.9402,5.3798]},{"rowIndex":1,"columnIndex":5,"text":"10%","boundingBox":[5.6493,5.0838,6.1307,5.0838,6.1307,5.3798,5.6493,5.3798]},{"rowIndex":1,"columnIndex":6,"text":"$6.80","boundingBox":[6.1307,5.0838,6.7357,5.0838,6.7357,5.3798,6.1307,5.3798]},{"rowIndex":1,"columnIndex":7,"text":"$104.00","boundingBox":[6.7357,5.0838,7.4318,5.0838,7.4318,5.3798,6.7357,5.3798]},{"rowIndex":2,"columnIndex":0,"text":"2","boundingBox":[1.102,5.3798,1.4403,5.3798,1.4403,5.682,1.102,5.682]},{"rowIndex":2,"columnIndex":1,"text":"2","boundingBox":[1.4403,5.3798,1.9087,5.3798,1.9087,5.682,1.4403,5.682]},{"rowIndex":2,"columnIndex":2,"text":"Refrigerator 1.7 cu. ft. 110 watts","boundingBox":[1.9087,5.3798,4.3027,5.3798,4.3027,5.682,1.9087,5.682]},{"rowIndex":2,"columnIndex":3,"text":"$200.00","boundingBox":[4.3027,5.3798,4.9402,5.3798,4.9402,5.682,4.3027,5.682]},{"rowIndex":2,"columnIndex":4,"text":"$0.00","boundingBox":[4.9402,5.3798,5.6493,5.3798,5.6493,5.682,4.9402,5.682]},{"rowIndex":2,"columnIndex":5,"text":"0%","boundingBox":[5.6493,5.3798,6.1307,5.3798,6.1307,5.682,5.6493,5.682]},{"rowIndex":2,"columnIndex":6,"text":"$28.00","boundingBox":[6.1307,5.3798,6.7357,5.3798,6.7357,5.682,6.1307,5.682]},{"rowIndex":2,"columnIndex":7,"text":"$428.00","boundingBox":[6.7357,5.3798,7.4318,5.3798,7.4318,5.682,6.7357,5.682]},{"rowIndex":3,"columnIndex":0,"text":"31","boundingBox":[1.102,5.682,1.4403,5.682,1.4403,5.978,1.102,5.978]},{"rowIndex":3,"columnIndex":1,"text":"3","boundingBox":[1.4403,5.682,1.9087,5.682,1.9087,5.978,1.4403,5.978]},{"rowIndex":3,"columnIndex":2,"text":"Celebrations C9","boundingBox":[1.9087,5.682,4.3027,5.682,4.3027,5.9842,1.9087,5.978]},{"rowIndex":3,"columnIndex":3,"text":"$10.00","boundingBox":[4.3027,5.682,4.9402,5.682,4.9402,5.9842,4.3027,5.9842]},{"rowIndex":3,"columnIndex":4,"text":"$3.00","boundingBox":[4.9402,5.682,5.6493,5.682,5.6493,5.9842,4.9402,5.9842]},{"rowIndex":3,"columnIndex":5,"text":"10%","boundingBox":[5.6493,5.682,6.1307,5.682,6.1307,5.9842,5.6493,5.9842]},{"rowIndex":3,"columnIndex":6,"text":"$1.89","boundingBox":[6.1307,5.682,6.7357,5.682,6.7357,5.9842,6.1307,5.9842]},{"rowIndex":3,"columnIndex":7,"text":"$28.89","boundingBox":[6.7357,5.682,7.4318,5.682,7.4318,5.9842,6.7357,5.9842]},{"rowIndex":4,"columnIndex":0,"text":"26","boundingBox":[1.102,5.978,1.4403,5.978,1.4468,6.2802,1.102,6.2864]},{"rowIndex":4,"columnIndex":1,"text":"1","boundingBox":[1.4403,5.978,1.9087,5.978,1.9152,6.2802,1.4468,6.2802]},{"rowIndex":4,"columnIndex":2,"text":"Craftsman 100 ft. L x 5/8 in.","boundingBox":[1.9087,5.978,4.3027,5.9842,4.3027,6.2802,1.9152,6.2802]},{"rowIndex":4,"columnIndex":3,"text":"$100.00","boundingBox":[4.3027,5.9842,4.9402,5.9842,4.9402,6.2802,4.3027,6.2802]},{"rowIndex":4,"columnIndex":4,"text":"$10.00","boundingBox":[4.9402,5.9842,5.6493,5.9842,5.6493,6.2802,4.9402,6.2802]},{"rowIndex":4,"columnIndex":5,"text":"10%","boundingBox":[5.6493,5.9842,6.1307,5.9842,6.1372,6.2802,5.6493,6.2802]},{"rowIndex":4,"columnIndex":6,"text":"$6.30","boundingBox":[6.1307,5.9842,6.7357,5.9842,6.7422,6.2802,6.1372,6.2802]},{"rowIndex":4,"columnIndex":7,"text":"$96.30","boundingBox":[6.7357,5.9842,7.4318,5.9842,7.4318,6.2802,6.7422,6.2802]}],"boundingBox":[1.09,4.8198,7.4327,4.8203,7.4323,6.2887,1.0887,6.2881]}]}],"documentResults":[{"docType":"prebuilt:invoice","pageRange":[1,1],"fields":{"CustomerAddress":{"page":1,"valueString":"2957 Tri-state Avenue Cambridge","boundingBox":[1.1272,2.1342,3.6131,2.1342,3.6131,2.2676,1.1272,2.2676],"text":"2957 Tri-state Avenue Cambridge","confidence":0.381,"type":"string"},"InvoiceTotal":{"valueNumber":657.19,"page":1,"boundingBox":[6.6952,8.0682,7.4109,8.0682,7.4109,8.2471,6.6952,8.2471],"text":"$657.19","confidence":0.962,"type":"number"},"CustomerName":{"page":1,"valueString":"Roy","boundingBox":[1.1354,1.6587,1.3971,1.6587,1.3971,1.8161,1.1354,1.8161],"text":"Roy","confidence":0.223,"type":"string"},"VendorName":{"page":1,"valueString":"T.T TAILWIND TRADERS","boundingBox":[4.909,1.4371,7.3107,1.4184,7.3155,2.0248,4.9137,2.0434],"text":"T.T TAILWIND TRADERS","confidence":0.952,"type":"string"},"VendorAddressRecipient":{"page":1,"valueString":"Rgerlakmog","boundingBox":[5.9879,2.1511,6.5796,2.1511,6.5796,2.264,5.9879,2.264],"text":"Rgerlakmog","confidence":0.254,"type":"string"},"InvoiceId":{"page":1,"valueString":"92549","boundingBox":[1.1263,4.1486,1.4784,4.1486,1.4784,4.248,1.1263,4.248],"text":"92549","confidence":0.965,"type":"string"},"InvoiceDate":{"page":1,"boundingBox":[2.7306,4.1486,2.7921,4.1486,2.7921,4.2479,2.7306,4.2479],"text":"8","confidence":0.276,"type":"date"},"SubTotal":{"valueNumber":614.2,"page":1,"boundingBox":[6.9241,6.7101,7.4152,6.7101,7.4152,6.8325,6.9241,6.8325],"text":"$614.20","confidence":0.97,"type":"number"},"TotalTax":{"valueNumber":42.99,"page":1,"boundingBox":[7.002,7.6793,7.4138,7.6793,7.4138,7.8017,7.002,7.8017],"text":"$42.99","confidence":0.969,"type":"number"},"VendorAddress":{"page":1,"valueString":"4186 Silver Oaks Pl. Newport Beach California 92625 United States","boundingBox":[5.9782,2.3511,6.9497,2.3511,6.9497,3.2384,5.9782,3.2384],"text":"4186 Silver Oaks Pl. Newport Beach California 92625 United States","confidence":0.401,"type":"string"},"Items":{"type":"array","valueArray":["{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":104,\"text\":\"$104.00\",\"boundingBox\":[6.8637,5.1789,7.3548,5.1789,7.3548,5.3013,6.8637,5.3013],\"page\":1,\"confidence\":0.888},\"Description\":{\"type\":\"string\",\"valueString\":\"One sat on shoe gnome\",\"text\":\"One sat on shoe gnome\",\"boundingBox\":[2.0294,5.1859,3.4491,5.1859,3.4491,5.3193,2.0294,5.3193],\"page\":1,\"confidence\":0.85},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"61\",\"text\":\"61\",\"boundingBox\":[1.2106,5.1892,1.3242,5.1892,1.3242,5.2891,1.2106,5.2891],\"page\":1,\"confidence\":0.705},\"Quantity\":{\"type\":\"number\",\"valueNumber\":2,\"text\":\"2\",\"boundingBox\":[1.6933,5.1897,1.7518,5.1897,1.7518,5.2876,1.6933,5.2876],\"page\":1,\"confidence\":0.778},\"Tax\":{\"type\":\"number\",\"valueNumber\":6.8,\"text\":\"$6.80\",\"boundingBox\":[6.3552,5.1792,6.6601,5.1792,6.6601,5.3013,6.3552,5.3013],\"page\":1,\"confidence\":0.772},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":54,\"text\":\"$54.00\",\"boundingBox\":[4.4822,5.1792,4.8601,5.1792,4.8601,5.3013,4.4822,5.3013],\"page\":1,\"confidence\":0.839}},\"text\":\"61 2 One sat on shoe gnome $54.00 $10.80 10% $6.80 $104.00\",\"boundingBox\":[1.2106,5.1789,7.3548,5.1789,7.3548,5.3193,1.2106,5.3193],\"page\":1,\"confidence\":0.88}","{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":428,\"text\":\"$428.00\",\"boundingBox\":[6.8637,5.4789,7.3548,5.4789,7.3548,5.6013,6.8637,5.6013],\"page\":1,\"confidence\":0.888},\"Description\":{\"type\":\"string\",\"valueString\":\"Refrigerator 1.7 cu. ft. 110 watts\",\"text\":\"Refrigerator 1.7 cu. ft. 110 watts\",\"boundingBox\":[2.0356,5.4844,3.9092,5.4844,3.9092,5.6193,2.0356,5.6193],\"page\":1,\"confidence\":0.833},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"2\",\"text\":\"2\",\"boundingBox\":[1.247,5.4897,1.3055,5.4897,1.3055,5.5876,1.247,5.5876],\"page\":1,\"confidence\":0.566},\"Quantity\":{\"type\":\"number\",\"valueNumber\":2,\"text\":\"2\",\"boundingBox\":[1.6933,5.4897,1.7518,5.4897,1.7518,5.5876,1.6933,5.5876],\"page\":1,\"confidence\":0.781},\"Tax\":{\"type\":\"number\",\"valueNumber\":28,\"text\":\"$28.00\",\"boundingBox\":[6.2822,5.4792,6.6601,5.4792,6.6601,5.6013,6.2822,5.6013],\"page\":1,\"confidence\":0.799},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":200,\"text\":\"$200.00\",\"boundingBox\":[4.4092,5.4792,4.8601,5.4792,4.8601,5.6013,4.4092,5.6013],\"page\":1,\"confidence\":0.862}},\"text\":\"2 2 Refrigerator 1.7 cu. ft. 110 watts $200.00 $0.00 0% $28.00 $428.00\",\"boundingBox\":[1.247,5.4789,7.3548,5.4789,7.3548,5.6193,1.247,5.6193],\"page\":1,\"confidence\":0.882}","{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":28.89,\"text\":\"$28.89\",\"boundingBox\":[6.9416,5.7789,7.3534,5.7789,7.3534,5.9013,6.9416,5.9013],\"page\":1,\"confidence\":0.896},\"Description\":{\"type\":\"string\",\"valueString\":\"Celebrations C9\",\"text\":\"Celebrations C9\",\"boundingBox\":[2.0294,5.7859,2.9552,5.7859,2.9552,5.8891,2.0294,5.8891],\"page\":1,\"confidence\":0.885},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"31\",\"text\":\"31\",\"boundingBox\":[1.2114,5.7892,1.3242,5.7892,1.3242,5.8891,1.2114,5.8891],\"page\":1,\"confidence\":0.696},\"Quantity\":{\"type\":\"number\",\"valueNumber\":3,\"text\":\"3\",\"boundingBox\":[1.6943,5.7897,1.7491,5.7897,1.7491,5.8891,1.6943,5.8891],\"page\":1,\"confidence\":0.831},\"Tax\":{\"type\":\"number\",\"valueNumber\":1.89,\"text\":\"$1.89\",\"boundingBox\":[6.3552,5.7792,6.659,5.7792,6.659,5.9013,6.3552,5.9013],\"page\":1,\"confidence\":0.801},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":10,\"text\":\"$10.00\",\"boundingBox\":[4.4822,5.7792,4.8601,5.7792,4.8601,5.9013,4.4822,5.9013],\"page\":1,\"confidence\":0.877}},\"text\":\"31 3 Celebrations C9 $10.00 $3.00 10% $1.89 $28.89\",\"boundingBox\":[1.2114,5.7789,7.3534,5.7789,7.3534,5.9013,1.2114,5.9013],\"page\":1,\"confidence\":0.804}","{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":96.3,\"text\":\"$96.30\",\"boundingBox\":[6.9416,6.0789,7.3548,6.0789,7.3548,6.2013,6.9416,6.2013],\"page\":1,\"confidence\":0.9},\"Description\":{\"type\":\"string\",\"valueString\":\"Craftsman 100 ft. L x 5/8 in.\",\"text\":\"Craftsman 100 ft. L x 5/8 in.\",\"boundingBox\":[2.0294,6.0844,3.6439,6.0844,3.6439,6.2032,2.0294,6.2032],\"page\":1,\"confidence\":0.853},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"26\",\"text\":\"26\",\"boundingBox\":[1.2105,6.0897,1.3439,6.0897,1.3439,6.1891,1.2105,6.1891],\"page\":1,\"confidence\":0.741},\"Quantity\":{\"type\":\"number\",\"valueNumber\":1,\"text\":\"1\",\"boundingBox\":[1.7012,6.0892,1.7341,6.0892,1.7341,6.1876,1.7012,6.1876],\"page\":1,\"confidence\":0.799},\"Tax\":{\"type\":\"number\",\"valueNumber\":6.3,\"text\":\"$6.30\",\"boundingBox\":[6.3552,6.0792,6.6601,6.0792,6.6601,6.2013,6.3552,6.2013],\"page\":1,\"confidence\":0.778},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":100,\"text\":\"$100.00\",\"boundingBox\":[4.4092,6.0792,4.8601,6.0792,4.8601,6.2013,4.4092,6.2013],\"page\":1,\"confidence\":0.887}},\"text\":\"26 1 Craftsman 100 ft. L x 5/8 in. $100.00 $10.00 10% $6.30 $96.30\",\"boundingBox\":[1.2105,6.0789,7.3548,6.0789,7.3548,6.2032,1.2105,6.2032],\"page\":1,\"confidence\":0.882}"]}}}]}}}],"schema":[{"key":"0","name":"url","type":"string"},{"key":"1","name":"errors","type":"StructType(StructField(response,StringType,true), StructField(status,StructType(StructField(protocolVersion,StructType(StructField(protocol,StringType,true), StructField(major,IntegerType,false), StructField(minor,IntegerType,false)),true), StructField(statusCode,IntegerType,false), StructField(reasonPhrase,StringType,true)),true))"},{"key":"2","name":"invoices","type":"StructType(StructField(status,StringType,true), StructField(createdDateTime,StringType,true), StructField(lastUpdatedDateTime,StringType,true), StructField(analyzeResult,StructType(StructField(version,StringType,true), StructField(readResults,ArrayType(StructType(StructField(page,IntegerType,true), StructField(language,StringType,true), StructField(angle,DoubleType,true), StructField(width,DoubleType,true), StructField(height,DoubleType,true), StructField(unit,StringType,true), StructField(lines,ArrayType(StructType(StructField(boundingBox,ArrayType(DoubleType,true),true), StructField(text,StringType,true), StructField(words,ArrayType(StructType(StructField(boundingBox,ArrayType(DoubleType,true),true), StructField(text,StringType,true), StructField(confidence,DoubleType,true)),true),true)),true),true)),true),true), StructField(pageResults,ArrayType(StructType(StructField(page,IntegerType,true), StructField(keyValuePairs,ArrayType(StructType(StructField(key,StructType(StructField(text,StringType,true), StructField(boundingBox,ArrayType(DoubleType,true),true)),true), StructField(value,StructType(StructField(text,StringType,true), StructField(boundingBox,ArrayType(DoubleType,true),true)),true)),true),true), StructField(tables,ArrayType(StructType(StructField(rows,IntegerType,true), StructField(columns,IntegerType,true), StructField(cells,ArrayType(StructType(StructField(rowIndex,IntegerType,true), StructField(columnIndex,IntegerType,true), StructField(text,StringType,true), StructField(boundingBox,ArrayType(DoubleType,true),true), StructField(isHeader,BooleanType,true), StructField(elements,ArrayType(StringType,true),true)),true),true), StructField(boundingBox,ArrayType(DoubleType,true),true)),true),true)),true),true), StructField(documentResults,ArrayType(StructType(StructField(docType,StringType,true), StructField(pageRange,ArrayType(IntegerType,true),true), StructField(fields,MapType(StringType,StructType(StructField(type,StringType,true), StructField(page,IntegerType,true), StructField(confidence,DoubleType,true), StructField(boundingBox,ArrayType(DoubleType,true),true), StructField(text,StringType,true), StructField(valueString,StringType,true), StructField(valuePhoneNumber,StringType,true), StructField(valueNumber,DoubleType,true), StructField(valueDate,StringType,true), StructField(valueTime,StringType,true), StructField(valueObject,StringType,true), StructField(valueArray,ArrayType(StringType,true),true)),true),true)),true),true)),true))"}]},"isSummary":false,"language":"scala"},"persist_state":{"view":{"type":"details","tableOptions":{},"chartOptions":{"seriesFieldKeys":["0"],"categoryFieldKeys":["0"],"isStacked":false,"aggregationType":"count","chartType":"bar"}}}},"bdf21081-09a3-4c55-a766-f02bf5f7ca04":{"type":"Synapse.DataFrame","sync_state":{"table":{"rows":[],"schema":[{"key":"0","name":"ProductCode","type":"string"},{"key":"1","name":"Tax","type":"double"},{"key":"2","name":"Quantity","type":"double"},{"key":"3","name":"UnitPrice","type":"double"},{"key":"4","name":"Description","type":"string"},{"key":"5","name":"Amount","type":"double"},{"key":"6","name":"url","type":"string"},{"key":"7","name":"CustomerAddress","type":"string"},{"key":"8","name":"CustomerName","type":"string"},{"key":"9","name":"InvoiceDate","type":"string"},{"key":"10","name":"InvoiceId","type":"string"},{"key":"11","name":"InvoiceTotal","type":"double"},{"key":"12","name":"SubTotal","type":"double"},{"key":"13","name":"TotalTax","type":"double"},{"key":"14","name":"VendorAddress","type":"string"},{"key":"15","name":"VendorAddressRecipient","type":"string"},{"key":"16","name":"VendorName","type":"string"},{"key":"17","name":"CustomerAddressRecipient","type":"string"},{"key":"18","name":"ShippingAddress","type":"string"},{"key":"19","name":"ShippingAddressRecipient","type":"string"}]},"isSummary":false,"language":"scala"},"persist_state":{"view":{"type":"details","tableOptions":{},"chartOptions":{"seriesFieldKeys":["1"],"categoryFieldKeys":["0"],"isStacked":false,"aggregationType":"sum","chartType":"bar"}}}},"c63cd1a0-8216-496d-9d04-c754d3e89d94":{"type":"Synapse.DataFrame","sync_state":{"table":{"rows":[{"12":"Mackenzie Gray","8":"78.4","4":"22671","11":"T.T TAILWIND TRADERS","9":"22, rue du Puits Dixme","5":"1198.4","6":[{"ProductCode":"39","Tax":25.2,"Quantity":2,"UnitPrice":200,"Description":"Coffee Maker Red","Amount":385.2},{"ProductCode":"31","Tax":0.7,"Quantity":1,"UnitPrice":10,"Description":"Celebrations C9","Amount":10.7},{"ProductCode":"12","Tax":52.5,"Quantity":3,"UnitPrice":250,"Description":"Black Bathing System Classic 18 in. H x 60 in. W x 32.5","Amount":802.5}],"1":"34, rue des Grands Champs Versailles","0":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2010/Invoice22671.pdf","2":"Mackenzie Gray","7":"1120.0","3":"11"},{"8":"84.6","4":"28073","11":"T.T TAILWIND TRADERS","9":"Maubeuge","13":"San Gabriel","5":"1293.1","10":"Sarosgawk","6":[{"ProductCode":"17","Tax":15.4,"Quantity":2,"UnitPrice":110,"Description":"Blend Solid White Sheer Curtains","Amount":235.4},{"ProductCode":"56","Tax":43.68,"Quantity":2,"UnitPrice":312,"Description":"Rechargeable screwdriver with extra battery","Amount":667.68},{"ProductCode":"40","Tax":25.52,"Quantity":3,"UnitPrice":135,"Description":"Extractor Steal","Amount":390.02}],"1":"United States","0":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2010/Invoice28073.pdf","7":"1208.5","3":"2021-10-05"},{"8":"52.5","4":"64808","11":"T.T TAILWIND TRADERS","9":"Circle","13":"828, rue de Berri","5":"802.5","10":"Braeo","6":[{"ProductCode":"43","Tax":20.79,"Quantity":3,"UnitPrice":99,"Description":"Big Metal Shelving","Amount":317.79},{"ProductCode":"42","Tax":18.9,"Quantity":3,"UnitPrice":90,"Description":"Metal Shelving","Amount":288.9},{"ProductCode":"46","Tax":8.61,"Quantity":1,"UnitPrice":123,"Description":"Measuring Tape","Amount":131.61},{"ProductCode":"21","Tax":4.2,"Quantity":3,"UnitPrice":25,"Description":"Curtain Rod 48 in","Amount":64.2}],"1":"3923 Dew Drop","14":"Villeneuve-d'Ascq","0":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2013/Invoice64808.pdf","2":"Villeneuve-d'Ascq","7":"750.0","3":"2021-03-12"},{"12":"Dluhbio","8":"123.06","4":"67164","11":"T.T TAILWIND TRADERS","13":"45000","5":"1881.06","6":[{"ProductCode":"36","Tax":82.95,"Quantity":3,"UnitPrice":395,"Description":"Wood Table","Amount":1267.95},{"ProductCode":"20","Tax":16.8,"Quantity":2,"UnitPrice":120,"Description":"White Window","Amount":256.8},{"ProductCode":"25","Tax":14.7,"Quantity":3,"UnitPrice":70,"Description":"Indoor Kit Gardering","Amount":224.7},{"ProductCode":"46","Tax":8.61,"Quantity":1,"UnitPrice":123,"Description":"Measuring Tape","Amount":131.61}],"1":"Platz des Landtags 404 Solingen Nordrhein-Westfalen 42651 Germany","0":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2013/Invoice67164.pdf","2":"Clinton Gutierrez","7":"1758.0"},{"12":"Ashlee Raje","8":"85.2","4":"80110","11":"T.T TAILWIND TRADERS","9":"4559 Loop Beaverton Oregon 97005 States","13":"Street","5":"1302.4","10":"Alvotue","6":[{"ProductCode":"9","Tax":37.8,"Quantity":3,"UnitPrice":200,"Description":"Bathing System Classic 18 in. H x 60 in. W x 32.5","Amount":577.8},{"ProductCode":"59","Tax":19.32,"Quantity":3,"UnitPrice":92,"Description":"Two red garden gnomes","Amount":295.32},{"ProductCode":"58","Tax":10.58,"Quantity":3,"UnitPrice":56,"Description":"Single red garden gnome","Amount":161.78},{"ProductCode":"32","Tax":17.5,"Quantity":1,"UnitPrice":250,"Description":"Artificial Tree","Amount":267.5}],"1":"Julpum","0":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2014/Invoice80110.pdf","2":"Ashlee Raje","7":"1217.2","3":"October"},{"12":"Willie","8":"102.89","4":"71864","11":"T.T TAILWIND TRADERS","5":"1572.69","10":"Ifiaeh","6":[{"ProductCode":"36","Tax":55.3,"Quantity":2,"UnitPrice":395,"Description":"Wood Table","Amount":845.3},{"ProductCode":"56","Tax":19.66,"Quantity":1,"UnitPrice":312,"Description":"Rechargeable screwdriver with extra battery","Amount":300.46},{"ProductCode":"14","Tax":6.93,"Quantity":1,"UnitPrice":99,"Description":"Bathroom Sink Faucet","Amount":105.93},{"ProductCode":"48","Tax":21,"Quantity":3,"UnitPrice":100,"Description":"Hammer","Amount":321}],"1":"27, place de Brazaville Roubaix Nord 59100 France","0":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2014/Invoice71864.pdf","2":"Pal","7":"1469.8"},{"12":"Misty Xie","8":"53.06","4":"80066","11":"T.T TAILWIND TRADERS","9":"376 Amador Valley Blvd.","5":"811.06","10":"Rloefos","6":[{"ProductCode":"49","Tax":15.4,"Quantity":2,"UnitPrice":110,"Description":"Screwdriver","Amount":235.4},{"ProductCode":"54","Tax":17.5,"Quantity":1,"UnitPrice":250,"Description":"Yellow Rechargeable screwdriver","Amount":267.5},{"ProductCode":"22","Tax":1.26,"Quantity":2,"UnitPrice":10,"Description":"Steel Passage Door Knob","Amount":19.26},{"ProductCode":"40","Tax":18.9,"Quantity":2,"UnitPrice":135,"Description":"Extractor Steal","Amount":288.9}],"1":"6058 Hill Street","0":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2014/Invoice80066.pdf","2":"Misty Xie","7":"758.0","3":"4"},{"12":"Mrurc Potsdamer","8":"69.93","4":"83878","11":"T.T TAILWIND TRADERS","13":"4SJ","5":"1068.93","6":[{"ProductCode":"47","Tax":11.13,"Quantity":1,"UnitPrice":159,"Description":"Multi Function Drill","Amount":170.13},{"ProductCode":"53","Tax":6.3,"Quantity":1,"UnitPrice":90,"Description":"Stainless multi-tool plier","Amount":96.3},{"ProductCode":"32","Tax":52.5,"Quantity":3,"UnitPrice":250,"Description":"Artificial Tree","Amount":802.5}],"1":"Straße 242 Stuttgart Saarland 70511 Germany","0":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2015/Invoice83878.pdf","2":"Connie Liang","7":"999.0","3":"2015-11-04"},{"12":"Colin Cai","8":"34.16","4":"102247","11":"T.T TAILWIND TRADERS","9":"Phata 8858 V. Street London England W1Y 3RA United Kingdom","13":"80074","5":"522.16","6":[{"ProductCode":"43","Tax":13.86,"Quantity":2,"UnitPrice":99,"Description":"Big Metal Shelving","Amount":211.86},{"ProductCode":"30","Tax":1.4,"Quantity":2,"UnitPrice":10,"Description":"Gardering","Amount":21.4},{"ProductCode":"26","Tax":18.9,"Quantity":3,"UnitPrice":100,"Description":"Craftsman 100 ft. L x 5/8 in.","Amount":288.9}],"1":"Kampstr 9859","0":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2016/Invoice102247.pdf","2":"Colin Cai","7":"488.0","3":"2016"},{"8":"42.99","4":"92549","11":"T.T TAILWIND TRADERS","9":"4186 Silver Oaks Pl. Newport Beach California 92625 United States","5":"657.19","10":"Rgerlakmog","6":[{"ProductCode":"61","Tax":6.8,"Quantity":2,"UnitPrice":54,"Description":"One sat on shoe gnome","Amount":104},{"ProductCode":"2","Tax":28,"Quantity":2,"UnitPrice":200,"Description":"Refrigerator 1.7 cu. ft. 110 watts","Amount":428},{"ProductCode":"31","Tax":1.89,"Quantity":3,"UnitPrice":10,"Description":"Celebrations C9","Amount":28.89},{"ProductCode":"26","Tax":6.3,"Quantity":1,"UnitPrice":100,"Description":"Craftsman 100 ft. L x 5/8 in.","Amount":96.3}],"1":"2957 Tri-state Avenue Cambridge","0":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2016/Invoice92549.pdf","2":"Roy","7":"614.2","3":"8"}],"schema":[{"key":"0","name":"url","type":"string"},{"key":"1","name":"CustomerAddress","type":"string"},{"key":"2","name":"CustomerName","type":"string"},{"key":"3","name":"InvoiceDate","type":"string"},{"key":"4","name":"InvoiceId","type":"string"},{"key":"5","name":"InvoiceTotal","type":"double"},{"key":"6","name":"Items","type":"ArrayType(StructType(StructField(ProductCode,StringType,true), StructField(Tax,DoubleType,true), StructField(Quantity,DoubleType,true), StructField(UnitPrice,DoubleType,true), StructField(Description,StringType,true), StructField(Amount,DoubleType,true)),true)"},{"key":"7","name":"SubTotal","type":"double"},{"key":"8","name":"TotalTax","type":"double"},{"key":"9","name":"VendorAddress","type":"string"},{"key":"10","name":"VendorAddressRecipient","type":"string"},{"key":"11","name":"VendorName","type":"string"},{"key":"12","name":"CustomerAddressRecipient","type":"string"},{"key":"13","name":"ShippingAddress","type":"string"},{"key":"14","name":"ShippingAddressRecipient","type":"string"}]},"isSummary":false,"language":"scala"},"persist_state":{"view":{"type":"details","tableOptions":{},"chartOptions":{"seriesFieldKeys":["5"],"categoryFieldKeys":["0"],"isStacked":false,"aggregationType":"sum","chartType":"bar"}}}},"88f16e5a-cbc4-4aa3-8a39-456877298c4a":{"type":"Synapse.DataFrame","sync_state":{"table":{"rows":[{"12":"1120.0","8":"Mackenzie Gray","4":"Coffee Maker Red","11":"1198.4","9":"11","13":"78.4","16":"T.T TAILWIND TRADERS","5":"385.2","10":"22671","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2010/Invoice22671.pdf","1":"25.2","17":"Mackenzie Gray","14":"22, rue du Puits Dixme","0":"39","20":[{"to":"zh-Hans","text":"咖啡机红色"},{"to":"fr","text":"Cafetière Rouge"},{"to":"ru","text":"Кофеварка Красная"},{"to":"cy","text":"Gwneuthurwr Coffi Coch"}],"2":"2.0","7":"34, rue des Grands Champs Versailles","3":"200.0"},{"12":"1120.0","8":"Mackenzie Gray","4":"Celebrations C9","11":"1198.4","9":"11","13":"78.4","16":"T.T TAILWIND TRADERS","5":"10.7","10":"22671","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2010/Invoice22671.pdf","1":"0.7","17":"Mackenzie Gray","14":"22, rue du Puits Dixme","0":"31","20":[{"to":"zh-Hans","text":"庆祝活动 C9"},{"to":"fr","text":"Célébrations C9"},{"to":"ru","text":"Торжества C9"},{"to":"cy","text":"Dathliadau C9"}],"2":"1.0","7":"34, rue des Grands Champs Versailles","3":"10.0"},{"12":"1120.0","8":"Mackenzie Gray","4":"Black Bathing System Classic 18 in. H x 60 in. W x 32.5","11":"1198.4","9":"11","13":"78.4","16":"T.T TAILWIND TRADERS","5":"802.5","10":"22671","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2010/Invoice22671.pdf","1":"52.5","17":"Mackenzie Gray","14":"22, rue du Puits Dixme","0":"12","20":[{"to":"zh-Hans","text":"黑洗浴系统经典 18 在.H x 60 在。W x 32.5"},{"to":"fr","text":"Système de bain noir Classic 18 po. H x 60 po. L x 32,5"},{"to":"ru","text":"Черная система купания Classic 18 in. В x 60 в. Ш x 32,5"},{"to":"cy","text":"Clasur y System Ymdrochi Ddu 18 i mewn. H x 60 i mewn. W x 32.5"}],"2":"3.0","7":"34, rue des Grands Champs Versailles","3":"250.0"},{"12":"1208.5","4":"Blend Solid White Sheer Curtains","15":"Sarosgawk","11":"1293.1","9":"2021-10-05","13":"84.6","16":"T.T TAILWIND TRADERS","5":"235.4","10":"28073","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2010/Invoice28073.pdf","1":"15.4","14":"Maubeuge","0":"17","20":[{"to":"zh-Hans","text":"混合实心白色雪尔窗帘"},{"to":"fr","text":"Mélangez des rideaux transparents blancs solides"},{"to":"ru","text":"Смесь Сплошные белые шторы"},{"to":"cy","text":"Llenni Sheer Gwyn Solid Blend"}],"2":"2.0","18":"San Gabriel","7":"United States","3":"110.0"},{"12":"1208.5","4":"Rechargeable screwdriver with extra battery","15":"Sarosgawk","11":"1293.1","9":"2021-10-05","13":"84.6","16":"T.T TAILWIND TRADERS","5":"667.68","10":"28073","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2010/Invoice28073.pdf","1":"43.68","14":"Maubeuge","0":"56","20":[{"to":"zh-Hans","text":"可充电螺丝刀,带额外电池"},{"to":"fr","text":"Tournevis rechargeable avec batterie supplémentaire"},{"to":"ru","text":"Аккумуляторная отвертка с дополнительной батареей"},{"to":"cy","text":"Sgriwdreifer gellir ailgodi tâl tâl gyda batri ychwanegol"}],"2":"2.0","18":"San Gabriel","7":"United States","3":"312.0"},{"12":"1208.5","4":"Extractor Steal","15":"Sarosgawk","11":"1293.1","9":"2021-10-05","13":"84.6","16":"T.T TAILWIND TRADERS","5":"390.02","10":"28073","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2010/Invoice28073.pdf","1":"25.52","14":"Maubeuge","0":"40","20":[{"to":"zh-Hans","text":"提取器偷窃"},{"to":"fr","text":"Vol d’extracteur"},{"to":"ru","text":"Кража экстрактора"},{"to":"cy","text":"Dwyn Echdynnwr"}],"2":"3.0","18":"San Gabriel","7":"United States","3":"135.0"},{"12":"750.0","8":"Villeneuve-d'Ascq","19":"Villeneuve-d'Ascq","4":"Big Metal Shelving","15":"Braeo","11":"802.5","9":"2021-03-12","13":"52.5","16":"T.T TAILWIND TRADERS","5":"317.79","10":"64808","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2013/Invoice64808.pdf","1":"20.79","14":"Circle","0":"43","20":[{"to":"zh-Hans","text":"大金属搁板"},{"to":"fr","text":"Grandes étagères métalliques"},{"to":"ru","text":"Большие металлические стеллажи"},{"to":"cy","text":"Silffoedd Metel Mawr"}],"2":"3.0","18":"828, rue de Berri","7":"3923 Dew Drop","3":"99.0"},{"12":"750.0","8":"Villeneuve-d'Ascq","19":"Villeneuve-d'Ascq","4":"Metal Shelving","15":"Braeo","11":"802.5","9":"2021-03-12","13":"52.5","16":"T.T TAILWIND TRADERS","5":"288.9","10":"64808","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2013/Invoice64808.pdf","1":"18.9","14":"Circle","0":"42","20":[{"to":"zh-Hans","text":"金属搁板"},{"to":"fr","text":"Étagères métalliques"},{"to":"ru","text":"Металлические стеллажи"},{"to":"cy","text":"Silffoedd Metel"}],"2":"3.0","18":"828, rue de Berri","7":"3923 Dew Drop","3":"90.0"},{"12":"750.0","8":"Villeneuve-d'Ascq","19":"Villeneuve-d'Ascq","4":"Measuring Tape","15":"Braeo","11":"802.5","9":"2021-03-12","13":"52.5","16":"T.T TAILWIND TRADERS","5":"131.61","10":"64808","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2013/Invoice64808.pdf","1":"8.61","14":"Circle","0":"46","20":[{"to":"zh-Hans","text":"卷尺"},{"to":"fr","text":"Mètre ruban"},{"to":"ru","text":"Мерная лента"},{"to":"cy","text":"Tâp Mesur"}],"2":"1.0","18":"828, rue de Berri","7":"3923 Dew Drop","3":"123.0"},{"12":"750.0","8":"Villeneuve-d'Ascq","19":"Villeneuve-d'Ascq","4":"Curtain Rod 48 in","15":"Braeo","11":"802.5","9":"2021-03-12","13":"52.5","16":"T.T TAILWIND TRADERS","5":"64.2","10":"64808","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2013/Invoice64808.pdf","1":"4.2","14":"Circle","0":"21","20":[{"to":"zh-Hans","text":"窗帘杆 48 在"},{"to":"fr","text":"Tringle à rideaux 48 po"},{"to":"ru","text":"Карниз 48 in"},{"to":"cy","text":"Rod Llennyrch 48 yn"}],"2":"3.0","18":"828, rue de Berri","7":"3923 Dew Drop","3":"25.0"},{"12":"1758.0","8":"Clinton Gutierrez","4":"Wood Table","11":"1881.06","13":"123.06","16":"T.T TAILWIND TRADERS","5":"1267.95","10":"67164","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2013/Invoice67164.pdf","1":"82.95","17":"Dluhbio","0":"36","20":[{"to":"zh-Hans","text":"木桌"},{"to":"fr","text":"Table en bois"},{"to":"ru","text":"Деревянный стол"},{"to":"cy","text":"Tabl Pren"}],"2":"3.0","18":"45000","7":"Platz des Landtags 404 Solingen Nordrhein-Westfalen 42651 Germany","3":"395.0"},{"12":"1758.0","8":"Clinton Gutierrez","4":"White Window","11":"1881.06","13":"123.06","16":"T.T TAILWIND TRADERS","5":"256.8","10":"67164","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2013/Invoice67164.pdf","1":"16.8","17":"Dluhbio","0":"20","20":[{"to":"zh-Hans","text":"白色窗口"},{"to":"fr","text":"Fenêtre blanche"},{"to":"ru","text":"Белое окно"},{"to":"cy","text":"Ffenestr Gwyn"}],"2":"2.0","18":"45000","7":"Platz des Landtags 404 Solingen Nordrhein-Westfalen 42651 Germany","3":"120.0"},{"12":"1758.0","8":"Clinton Gutierrez","4":"Indoor Kit Gardering","11":"1881.06","13":"123.06","16":"T.T TAILWIND TRADERS","5":"224.7","10":"67164","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2013/Invoice67164.pdf","1":"14.7","17":"Dluhbio","0":"25","20":[{"to":"zh-Hans","text":"室内套件加德林"},{"to":"fr","text":"Kit Gardering intérieur"},{"to":"ru","text":"Гардеринг комплектов для помещений"},{"to":"cy","text":"Gardering Cit Dan Do"}],"2":"3.0","18":"45000","7":"Platz des Landtags 404 Solingen Nordrhein-Westfalen 42651 Germany","3":"70.0"},{"12":"1758.0","8":"Clinton Gutierrez","4":"Measuring Tape","11":"1881.06","13":"123.06","16":"T.T TAILWIND TRADERS","5":"131.61","10":"67164","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2013/Invoice67164.pdf","1":"8.61","17":"Dluhbio","0":"46","20":[{"to":"zh-Hans","text":"卷尺"},{"to":"fr","text":"Mètre ruban"},{"to":"ru","text":"Мерная лента"},{"to":"cy","text":"Tâp Mesur"}],"2":"1.0","18":"45000","7":"Platz des Landtags 404 Solingen Nordrhein-Westfalen 42651 Germany","3":"123.0"},{"12":"1217.2","8":"Ashlee Raje","4":"Bathing System Classic 18 in. H x 60 in. W x 32.5","15":"Alvotue","11":"1302.4","9":"October","13":"85.2","16":"T.T TAILWIND TRADERS","5":"577.8","10":"80110","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2014/Invoice80110.pdf","1":"37.8","17":"Ashlee Raje","14":"4559 Loop Beaverton Oregon 97005 States","0":"9","20":[{"to":"zh-Hans","text":"沐浴系统经典 18 在.H x 60 在。W x 32.5"},{"to":"fr","text":"Système de bain Classic 18 po. H x 60 po. L x 32,5"},{"to":"ru","text":"Система купания Классическая 18 в. В x 60 в. Ш x 32,5"},{"to":"cy","text":"Clasurol y System Ymdrochi 18 i mewn. H x 60 i mewn. W x 32.5"}],"2":"3.0","18":"Street","7":"Julpum","3":"200.0"},{"12":"1217.2","8":"Ashlee Raje","4":"Two red garden gnomes","15":"Alvotue","11":"1302.4","9":"October","13":"85.2","16":"T.T TAILWIND TRADERS","5":"295.32","10":"80110","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2014/Invoice80110.pdf","1":"19.32","17":"Ashlee Raje","14":"4559 Loop Beaverton Oregon 97005 States","0":"59","20":[{"to":"zh-Hans","text":"两个红色的花园侏儒"},{"to":"fr","text":"Deux nains de jardin rouges"},{"to":"ru","text":"Два красных садовых гнома"},{"to":"cy","text":"Dwy gnomes gardd goch"}],"2":"3.0","18":"Street","7":"Julpum","3":"92.0"},{"12":"1217.2","8":"Ashlee Raje","4":"Single red garden gnome","15":"Alvotue","11":"1302.4","9":"October","13":"85.2","16":"T.T TAILWIND TRADERS","5":"161.78","10":"80110","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2014/Invoice80110.pdf","1":"10.58","17":"Ashlee Raje","14":"4559 Loop Beaverton Oregon 97005 States","0":"58","20":[{"to":"zh-Hans","text":"单红花园侏儒"},{"to":"fr","text":"Gnome de jardin rouge unique"},{"to":"ru","text":"Одиночный красный садовый гном"},{"to":"cy","text":"Gnome gardd goch sengl"}],"2":"3.0","18":"Street","7":"Julpum","3":"56.0"},{"12":"1217.2","8":"Ashlee Raje","4":"Artificial Tree","15":"Alvotue","11":"1302.4","9":"October","13":"85.2","16":"T.T TAILWIND TRADERS","5":"267.5","10":"80110","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2014/Invoice80110.pdf","1":"17.5","17":"Ashlee Raje","14":"4559 Loop Beaverton Oregon 97005 States","0":"32","20":[{"to":"zh-Hans","text":"人造树"},{"to":"fr","text":"Arbre artificiel"},{"to":"ru","text":"Искусственное дерево"},{"to":"cy","text":"Coeden Artiffisial"}],"2":"1.0","18":"Street","7":"Julpum","3":"250.0"},{"12":"1469.8","8":"Pal","4":"Wood Table","15":"Ifiaeh","11":"1572.69","13":"102.89","16":"T.T TAILWIND TRADERS","5":"845.3","10":"71864","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2014/Invoice71864.pdf","1":"55.3","17":"Willie","0":"36","20":[{"to":"zh-Hans","text":"木桌"},{"to":"fr","text":"Table en bois"},{"to":"ru","text":"Деревянный стол"},{"to":"cy","text":"Tabl Pren"}],"2":"2.0","7":"27, place de Brazaville Roubaix Nord 59100 France","3":"395.0"},{"12":"1469.8","8":"Pal","4":"Rechargeable screwdriver with extra battery","15":"Ifiaeh","11":"1572.69","13":"102.89","16":"T.T TAILWIND TRADERS","5":"300.46","10":"71864","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2014/Invoice71864.pdf","1":"19.66","17":"Willie","0":"56","20":[{"to":"zh-Hans","text":"可充电螺丝刀,带额外电池"},{"to":"fr","text":"Tournevis rechargeable avec batterie supplémentaire"},{"to":"ru","text":"Аккумуляторная отвертка с дополнительной батареей"},{"to":"cy","text":"Sgriwdreifer gellir ailgodi tâl tâl gyda batri ychwanegol"}],"2":"1.0","7":"27, place de Brazaville Roubaix Nord 59100 France","3":"312.0"},{"12":"1469.8","8":"Pal","4":"Bathroom Sink Faucet","15":"Ifiaeh","11":"1572.69","13":"102.89","16":"T.T TAILWIND TRADERS","5":"105.93","10":"71864","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2014/Invoice71864.pdf","1":"6.93","17":"Willie","0":"14","20":[{"to":"zh-Hans","text":"浴室水槽水龙头"},{"to":"fr","text":"Robinet lavabo de salle de bain"},{"to":"ru","text":"Смеситель для раковины в ванной комнате"},{"to":"cy","text":"Faucet Sinc Ystafell Ymolchi"}],"2":"1.0","7":"27, place de Brazaville Roubaix Nord 59100 France","3":"99.0"},{"12":"1469.8","8":"Pal","4":"Hammer","15":"Ifiaeh","11":"1572.69","13":"102.89","16":"T.T TAILWIND TRADERS","5":"321.0","10":"71864","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2014/Invoice71864.pdf","1":"21.0","17":"Willie","0":"48","20":[{"to":"zh-Hans","text":"锤"},{"to":"fr","text":"Marteau"},{"to":"ru","text":"Молоток"},{"to":"cy","text":"Morthwyl"}],"2":"3.0","7":"27, place de Brazaville Roubaix Nord 59100 France","3":"100.0"},{"12":"758.0","8":"Misty Xie","4":"Screwdriver","15":"Rloefos","11":"811.06","9":"4","13":"53.06","16":"T.T TAILWIND TRADERS","5":"235.4","10":"80066","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2014/Invoice80066.pdf","1":"15.4","17":"Misty Xie","14":"376 Amador Valley Blvd.","0":"49","20":[{"to":"zh-Hans","text":"螺丝刀"},{"to":"fr","text":"Tournevis"},{"to":"ru","text":"Отвёртка"},{"to":"cy","text":"Sgriwdreifer"}],"2":"2.0","7":"6058 Hill Street","3":"110.0"},{"12":"758.0","8":"Misty Xie","4":"Yellow Rechargeable screwdriver","15":"Rloefos","11":"811.06","9":"4","13":"53.06","16":"T.T TAILWIND TRADERS","5":"267.5","10":"80066","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2014/Invoice80066.pdf","1":"17.5","17":"Misty Xie","14":"376 Amador Valley Blvd.","0":"54","20":[{"to":"zh-Hans","text":"黄色可充电螺丝刀"},{"to":"fr","text":"Tournevis rechargeable jaune"},{"to":"ru","text":"Желтая аккумуляторная отвертка"},{"to":"cy","text":"Sgriwdreifer Ailwefradwy Melyn"}],"2":"1.0","7":"6058 Hill Street","3":"250.0"},{"12":"758.0","8":"Misty Xie","4":"Steel Passage Door Knob","15":"Rloefos","11":"811.06","9":"4","13":"53.06","16":"T.T TAILWIND TRADERS","5":"19.26","10":"80066","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2014/Invoice80066.pdf","1":"1.26","17":"Misty Xie","14":"376 Amador Valley Blvd.","0":"22","20":[{"to":"zh-Hans","text":"钢通道门旋钮"},{"to":"fr","text":"Poignée de porte de passage en acier"},{"to":"ru","text":"Стальная дверная ручка прохода"},{"to":"cy","text":"Knob Drws Pasio Dur"}],"2":"2.0","7":"6058 Hill Street","3":"10.0"},{"12":"758.0","8":"Misty Xie","4":"Extractor Steal","15":"Rloefos","11":"811.06","9":"4","13":"53.06","16":"T.T TAILWIND TRADERS","5":"288.9","10":"80066","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2014/Invoice80066.pdf","1":"18.9","17":"Misty Xie","14":"376 Amador Valley Blvd.","0":"40","20":[{"to":"zh-Hans","text":"提取器偷窃"},{"to":"fr","text":"Vol d’extracteur"},{"to":"ru","text":"Кража экстрактора"},{"to":"cy","text":"Dwyn Echdynnwr"}],"2":"2.0","7":"6058 Hill Street","3":"135.0"},{"12":"999.0","8":"Connie Liang","4":"Multi Function Drill","11":"1068.93","9":"2015-11-04","13":"69.93","16":"T.T TAILWIND TRADERS","5":"170.13","10":"83878","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2015/Invoice83878.pdf","1":"11.13","17":"Mrurc Potsdamer","0":"47","20":[{"to":"zh-Hans","text":"多功能钻机"},{"to":"fr","text":"Perceuse multifonction"},{"to":"ru","text":"Многофункциональная дрель"},{"to":"cy","text":"Dril Aml-Swyddogaeth"}],"2":"1.0","18":"4SJ","7":"Straße 242 Stuttgart Saarland 70511 Germany","3":"159.0"},{"12":"999.0","8":"Connie Liang","4":"Stainless multi-tool plier","11":"1068.93","9":"2015-11-04","13":"69.93","16":"T.T TAILWIND TRADERS","5":"96.3","10":"83878","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2015/Invoice83878.pdf","1":"6.3","17":"Mrurc Potsdamer","0":"53","20":[{"to":"zh-Hans","text":"不锈钢多工具钳子"},{"to":"fr","text":"Pince multi-outils en acier inoxydable"},{"to":"ru","text":"Нержавеющая многофункциональная плоскогубцы"},{"to":"cy","text":"Plisgyn aml-offeryn di-staen"}],"2":"1.0","18":"4SJ","7":"Straße 242 Stuttgart Saarland 70511 Germany","3":"90.0"},{"12":"999.0","8":"Connie Liang","4":"Artificial Tree","11":"1068.93","9":"2015-11-04","13":"69.93","16":"T.T TAILWIND TRADERS","5":"802.5","10":"83878","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2015/Invoice83878.pdf","1":"52.5","17":"Mrurc Potsdamer","0":"32","20":[{"to":"zh-Hans","text":"人造树"},{"to":"fr","text":"Arbre artificiel"},{"to":"ru","text":"Искусственное дерево"},{"to":"cy","text":"Coeden Artiffisial"}],"2":"3.0","18":"4SJ","7":"Straße 242 Stuttgart Saarland 70511 Germany","3":"250.0"},{"12":"488.0","8":"Colin Cai","4":"Big Metal Shelving","11":"522.16","9":"2016","13":"34.16","16":"T.T TAILWIND TRADERS","5":"211.86","10":"102247","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2016/Invoice102247.pdf","1":"13.86","17":"Colin Cai","14":"Phata 8858 V. Street London England W1Y 3RA United Kingdom","0":"43","20":[{"to":"zh-Hans","text":"大金属搁板"},{"to":"fr","text":"Grandes étagères métalliques"},{"to":"ru","text":"Большие металлические стеллажи"},{"to":"cy","text":"Silffoedd Metel Mawr"}],"2":"2.0","18":"80074","7":"Kampstr 9859","3":"99.0"},{"12":"488.0","8":"Colin Cai","4":"Gardering","11":"522.16","9":"2016","13":"34.16","16":"T.T TAILWIND TRADERS","5":"21.4","10":"102247","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2016/Invoice102247.pdf","1":"1.4","17":"Colin Cai","14":"Phata 8858 V. Street London England W1Y 3RA United Kingdom","0":"30","20":[{"to":"zh-Hans","text":"守卫"},{"to":"fr","text":"Garde"},{"to":"ru","text":"Охрана"},{"to":"cy","text":"Gwarchod"}],"2":"2.0","18":"80074","7":"Kampstr 9859","3":"10.0"},{"12":"488.0","8":"Colin Cai","4":"Craftsman 100 ft. L x 5/8 in.","11":"522.16","9":"2016","13":"34.16","16":"T.T TAILWIND TRADERS","5":"288.9","10":"102247","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2016/Invoice102247.pdf","1":"18.9","17":"Colin Cai","14":"Phata 8858 V. Street London England W1Y 3RA United Kingdom","0":"26","20":[{"to":"zh-Hans","text":"工匠 100 英尺 L x 5/8 英寸。"},{"to":"fr","text":"Artisan 100 pi L x 5/8 po."},{"to":"ru","text":"Ремесленник 100 футов L x 5/8 дюйма"},{"to":"cy","text":"Crefftwr 100 troedfedd. L x 5/8 i mewn."}],"2":"3.0","18":"80074","7":"Kampstr 9859","3":"100.0"},{"12":"614.2","8":"Roy","4":"One sat on shoe gnome","15":"Rgerlakmog","11":"657.19","9":"8","13":"42.99","16":"T.T TAILWIND TRADERS","5":"104.0","10":"92549","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2016/Invoice92549.pdf","1":"6.8","14":"4186 Silver Oaks Pl. Newport Beach California 92625 United States","0":"61","20":[{"to":"zh-Hans","text":"一个坐在鞋侏儒上"},{"to":"fr","text":"L’un d’eux était assis sur un gnome de chaussures"},{"to":"ru","text":"Один сидел на ботинке гнома"},{"to":"cy","text":"Roedd un yn eistedd ar gnome esgidiau"}],"2":"2.0","7":"2957 Tri-state Avenue Cambridge","3":"54.0"},{"12":"614.2","8":"Roy","4":"Refrigerator 1.7 cu. ft. 110 watts","15":"Rgerlakmog","11":"657.19","9":"8","13":"42.99","16":"T.T TAILWIND TRADERS","5":"428.0","10":"92549","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2016/Invoice92549.pdf","1":"28.0","14":"4186 Silver Oaks Pl. Newport Beach California 92625 United States","0":"2","20":[{"to":"zh-Hans","text":"冰箱 1.7 cu. 英尺 110 瓦"},{"to":"fr","text":"Réfrigérateur 1,7 pi³ 110 watts"},{"to":"ru","text":"Холодильник 1,7 куб. фута 110 Вт"},{"to":"cy","text":"Oergell 1.7 cu. ft. 110 watt"}],"2":"2.0","7":"2957 Tri-state Avenue Cambridge","3":"200.0"},{"12":"614.2","8":"Roy","4":"Celebrations C9","15":"Rgerlakmog","11":"657.19","9":"8","13":"42.99","16":"T.T TAILWIND TRADERS","5":"28.89","10":"92549","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2016/Invoice92549.pdf","1":"1.89","14":"4186 Silver Oaks Pl. Newport Beach California 92625 United States","0":"31","20":[{"to":"zh-Hans","text":"庆祝活动 C9"},{"to":"fr","text":"Célébrations C9"},{"to":"ru","text":"Торжества C9"},{"to":"cy","text":"Dathliadau C9"}],"2":"3.0","7":"2957 Tri-state Avenue Cambridge","3":"10.0"},{"12":"614.2","8":"Roy","4":"Craftsman 100 ft. L x 5/8 in.","15":"Rgerlakmog","11":"657.19","9":"8","13":"42.99","16":"T.T TAILWIND TRADERS","5":"96.3","10":"92549","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2016/Invoice92549.pdf","1":"6.3","14":"4186 Silver Oaks Pl. Newport Beach California 92625 United States","0":"26","20":[{"to":"zh-Hans","text":"工匠 100 英尺 L x 5/8 英寸。"},{"to":"fr","text":"Artisan 100 pi L x 5/8 po."},{"to":"ru","text":"Ремесленник 100 футов L x 5/8 дюйма"},{"to":"cy","text":"Crefftwr 100 troedfedd. L x 5/8 i mewn."}],"2":"1.0","7":"2957 Tri-state Avenue Cambridge","3":"100.0"}],"schema":[{"key":"0","name":"ProductCode","type":"string"},{"key":"1","name":"Tax","type":"double"},{"key":"2","name":"Quantity","type":"double"},{"key":"3","name":"UnitPrice","type":"double"},{"key":"4","name":"Description","type":"string"},{"key":"5","name":"Amount","type":"double"},{"key":"6","name":"url","type":"string"},{"key":"7","name":"CustomerAddress","type":"string"},{"key":"8","name":"CustomerName","type":"string"},{"key":"9","name":"InvoiceDate","type":"string"},{"key":"10","name":"InvoiceId","type":"string"},{"key":"11","name":"InvoiceTotal","type":"double"},{"key":"12","name":"SubTotal","type":"double"},{"key":"13","name":"TotalTax","type":"double"},{"key":"14","name":"VendorAddress","type":"string"},{"key":"15","name":"VendorAddressRecipient","type":"string"},{"key":"16","name":"VendorName","type":"string"},{"key":"17","name":"CustomerAddressRecipient","type":"string"},{"key":"18","name":"ShippingAddress","type":"string"},{"key":"19","name":"ShippingAddressRecipient","type":"string"},{"key":"20","name":"Translations","type":"ArrayType(StructType(StructField(to,StringType,true), StructField(text,StringType,true), StructField(transliteration,StructType(StructField(script,StringType,true), StructField(text,StringType,true)),true), StructField(alignment,StructType(StructField(proj,StringType,true)),true), StructField(sentLen,StructType(StructField(srcSentLen,ArrayType(IntegerType,true),true), StructField(transSentLen,ArrayType(IntegerType,true),true)),true)),true)"}]},"isSummary":false,"language":"scala"},"persist_state":{"view":{"type":"details","tableOptions":{},"chartOptions":{"seriesFieldKeys":["1"],"categoryFieldKeys":["0"],"isStacked":false,"aggregationType":"sum","chartType":"bar"}}}}}},"application/vnd.databricks.v1+notebook":{"notebookName":"CognitiveServices - Create a Multilingual Search Engine from Forms","dashboards":[],"notebookMetadata":{"pythonIndentUnit":2},"language":"python","widgets":{},"notebookOrigID":945159649235142}},"nbformat":4,"nbformat_minor":0} diff --git a/notebooks/features/CognitiveServices - Overview.ipynb b/notebooks/features/cognitive_services/CognitiveServices - Overview.ipynb similarity index 100% rename from notebooks/features/CognitiveServices - Overview.ipynb rename to notebooks/features/cognitive_services/CognitiveServices - Overview.ipynb diff --git a/notebooks/examples/cognitive_services/CognitiveServices - Predictive Maintenance.ipynb b/notebooks/features/cognitive_services/CognitiveServices - Predictive Maintenance.ipynb similarity index 100% rename from notebooks/examples/cognitive_services/CognitiveServices - Predictive Maintenance.ipynb rename to notebooks/features/cognitive_services/CognitiveServices - Predictive Maintenance.ipynb diff --git a/notebooks/features/http/HttpOnSpark - Working with Arbitrary Web APIs.ipynb b/notebooks/features/http/HttpOnSpark - Working with Arbitrary Web APIs.ipynb deleted file mode 100644 index c3825ee9e2..0000000000 --- a/notebooks/features/http/HttpOnSpark - Working with Arbitrary Web APIs.ipynb +++ /dev/null @@ -1,81 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": { - "collapsed": true - }, - "source": [ - "### Use \"dogs as a service\" in a distributed fashion with HTTP on Spark\n", - "\n", - "In this example we will use the simple HTTP Transformer to call a public webAPI that returns random images of dogs. The service does not use the json payload, but this is for example purposes. \n", - "\n", - "A call to the dog service returns json objects structured like:\n", - "\n", - "`{\"status\":\"success\",\"message\":\"https:\\/\\/images.dog.ceo\\/breeds\\/lhasa\\/n02098413_2536.jpg\"}`\n", - "\n", - "If you visit the link you can download the image:\n", - "\n", - "\n", - "\n", - "\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import os\n", - "\n", - "if os.environ.get(\"AZURE_SERVICE\", None) == \"Microsoft.ProjectArcadia\":\n", - " from pyspark.sql import SparkSession\n", - " spark = SparkSession.builder.getOrCreate()\n", - "\n", - "from pyspark.sql.functions import struct\n", - "from pyspark.sql.types import *\n", - "from synapse.ml.io.http import *\n", - "\n", - "df = spark.createDataFrame([(\"foo\",) for x in range(20)], [\"data\"]) \\\n", - " .withColumn(\"inputs\", struct(\"data\"))\n", - "\n", - "response_schema = StructType().add(\"status\", StringType()).add(\"message\", StringType())\n", - "\n", - "client = SimpleHTTPTransformer() \\\n", - " .setInputCol(\"inputs\") \\\n", - " .setInputParser(JSONInputParser()) \\\n", - " .setOutputParser(JSONOutputParser().setDataType(response_schema)) \\\n", - " .setOutputCol(\"results\") \\\n", - " .setUrl(\"https://dog.ceo/api/breeds/image/random\")\n", - "\n", - "responses = client.transform(df)\n", - "responses.select(\"results\").show(truncate = False)" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.6.3" - }, - "name": "HttpOnSpark - Parallelizing a Custom Web Service", - "notebookId": 3187910992870443 - }, - "nbformat": 4, - "nbformat_minor": 1 -} \ No newline at end of file diff --git a/notebooks/examples/OpenCV - Pipeline Image Transformations.ipynb b/notebooks/features/opencv/OpenCV - Pipeline Image Transformations.ipynb similarity index 100% rename from notebooks/examples/OpenCV - Pipeline Image Transformations.ipynb rename to notebooks/features/opencv/OpenCV - Pipeline Image Transformations.ipynb diff --git a/notebooks/examples/AzureSearchIndex - Met Artworks.ipynb b/notebooks/features/other/AzureSearchIndex - Met Artworks.ipynb similarity index 100% rename from notebooks/examples/AzureSearchIndex - Met Artworks.ipynb rename to notebooks/features/other/AzureSearchIndex - Met Artworks.ipynb diff --git a/notebooks/examples/ConditionalKNN - Exploring Art Across Cultures.ipynb b/notebooks/features/other/ConditionalKNN - Exploring Art Across Cultures.ipynb similarity index 100% rename from notebooks/examples/ConditionalKNN - Exploring Art Across Cultures.ipynb rename to notebooks/features/other/ConditionalKNN - Exploring Art Across Cultures.ipynb diff --git a/notebooks/examples/CyberML - Anomalous Access Detection.ipynb b/notebooks/features/other/CyberML - Anomalous Access Detection.ipynb similarity index 100% rename from notebooks/examples/CyberML - Anomalous Access Detection.ipynb rename to notebooks/features/other/CyberML - Anomalous Access Detection.ipynb diff --git a/notebooks/examples/deep_learning/DeepLearning - BiLSTM Medical Entity Extraction.ipynb b/notebooks/features/other/DeepLearning - BiLSTM Medical Entity Extraction.ipynb similarity index 100% rename from notebooks/examples/deep_learning/DeepLearning - BiLSTM Medical Entity Extraction.ipynb rename to notebooks/features/other/DeepLearning - BiLSTM Medical Entity Extraction.ipynb diff --git a/notebooks/examples/deep_learning/DeepLearning - CIFAR10 Convolutional Network.ipynb b/notebooks/features/other/DeepLearning - CIFAR10 Convolutional Network.ipynb similarity index 100% rename from notebooks/examples/deep_learning/DeepLearning - CIFAR10 Convolutional Network.ipynb rename to notebooks/features/other/DeepLearning - CIFAR10 Convolutional Network.ipynb diff --git a/notebooks/examples/deep_learning/DeepLearning - Flower Image Classification.ipynb b/notebooks/features/other/DeepLearning - Flower Image Classification.ipynb similarity index 100% rename from notebooks/examples/deep_learning/DeepLearning - Flower Image Classification.ipynb rename to notebooks/features/other/DeepLearning - Flower Image Classification.ipynb diff --git a/notebooks/examples/deep_learning/DeepLearning - Transfer Learning.ipynb b/notebooks/features/other/DeepLearning - Transfer Learning.ipynb similarity index 100% rename from notebooks/examples/deep_learning/DeepLearning - Transfer Learning.ipynb rename to notebooks/features/other/DeepLearning - Transfer Learning.ipynb diff --git a/notebooks/examples/HyperParameterTuning - Fighting Breast Cancer.ipynb b/notebooks/features/other/HyperParameterTuning - Fighting Breast Cancer.ipynb similarity index 100% rename from notebooks/examples/HyperParameterTuning - Fighting Breast Cancer.ipynb rename to notebooks/features/other/HyperParameterTuning - Fighting Breast Cancer.ipynb diff --git a/notebooks/examples/text_analytics/TextAnalytics - Amazon Book Reviews with Word2Vec.ipynb b/notebooks/features/other/TextAnalytics - Amazon Book Reviews with Word2Vec.ipynb similarity index 100% rename from notebooks/examples/text_analytics/TextAnalytics - Amazon Book Reviews with Word2Vec.ipynb rename to notebooks/features/other/TextAnalytics - Amazon Book Reviews with Word2Vec.ipynb diff --git a/notebooks/examples/text_analytics/TextAnalytics - Amazon Book Reviews.ipynb b/notebooks/features/other/TextAnalytics - Amazon Book Reviews.ipynb similarity index 100% rename from notebooks/examples/text_analytics/TextAnalytics - Amazon Book Reviews.ipynb rename to notebooks/features/other/TextAnalytics - Amazon Book Reviews.ipynb diff --git a/notebooks/examples/regression/Regression - Auto Imports.ipynb b/notebooks/features/regression/Regression - Auto Imports.ipynb similarity index 100% rename from notebooks/examples/regression/Regression - Auto Imports.ipynb rename to notebooks/features/regression/Regression - Auto Imports.ipynb diff --git a/notebooks/examples/regression/Regression - Flight Delays with DataCleaning.ipynb b/notebooks/features/regression/Regression - Flight Delays with DataCleaning.ipynb similarity index 100% rename from notebooks/examples/regression/Regression - Flight Delays with DataCleaning.ipynb rename to notebooks/features/regression/Regression - Flight Delays with DataCleaning.ipynb diff --git a/notebooks/examples/regression/Regression - Flight Delays.ipynb b/notebooks/features/regression/Regression - Flight Delays.ipynb similarity index 100% rename from notebooks/examples/regression/Regression - Flight Delays.ipynb rename to notebooks/features/regression/Regression - Flight Delays.ipynb diff --git a/notebooks/examples/regression/Regression - Vowpal Wabbit vs. LightGBM vs. Linear Regressor.ipynb b/notebooks/features/regression/Regression - Vowpal Wabbit vs. LightGBM vs. Linear Regressor.ipynb similarity index 100% rename from notebooks/examples/regression/Regression - Vowpal Wabbit vs. LightGBM vs. Linear Regressor.ipynb rename to notebooks/features/regression/Regression - Vowpal Wabbit vs. LightGBM vs. Linear Regressor.ipynb diff --git a/notebooks/examples/responsible_ai/DataBalanceAnalysis - Adult Census Income.ipynb b/notebooks/features/responsible_ai/DataBalanceAnalysis - Adult Census Income.ipynb similarity index 100% rename from notebooks/examples/responsible_ai/DataBalanceAnalysis - Adult Census Income.ipynb rename to notebooks/features/responsible_ai/DataBalanceAnalysis - Adult Census Income.ipynb diff --git a/notebooks/examples/responsible_ai/Interpretability - Explanation Dashboard.ipynb b/notebooks/features/responsible_ai/Interpretability - Explanation Dashboard.ipynb similarity index 100% rename from notebooks/examples/responsible_ai/Interpretability - Explanation Dashboard.ipynb rename to notebooks/features/responsible_ai/Interpretability - Explanation Dashboard.ipynb diff --git a/notebooks/examples/responsible_ai/ModelInterpretability - Snow Leopard Detection.ipynb b/notebooks/features/responsible_ai/Interpretability - Snow Leopard Detection.ipynb similarity index 100% rename from notebooks/examples/responsible_ai/ModelInterpretability - Snow Leopard Detection.ipynb rename to notebooks/features/responsible_ai/Interpretability - Snow Leopard Detection.ipynb diff --git a/notebooks/examples/responsible_ai/Interpretability - Tabular SHAP explainer.ipynb b/notebooks/features/responsible_ai/Interpretability - Tabular SHAP explainer.ipynb similarity index 100% rename from notebooks/examples/responsible_ai/Interpretability - Tabular SHAP explainer.ipynb rename to notebooks/features/responsible_ai/Interpretability - Tabular SHAP explainer.ipynb diff --git a/notebooks/examples/responsible_ai/Interpretability - Text Explainers.ipynb b/notebooks/features/responsible_ai/Interpretability - Text Explainers.ipynb similarity index 100% rename from notebooks/examples/responsible_ai/Interpretability - Text Explainers.ipynb rename to notebooks/features/responsible_ai/Interpretability - Text Explainers.ipynb diff --git a/website/docs/examples/about.md b/website/docs/examples/about.md deleted file mode 100644 index 5247c91adc..0000000000 --- a/website/docs/examples/about.md +++ /dev/null @@ -1,57 +0,0 @@ ---- -title: Examples -hide_title: true -sidebar_label: About ---- - -## Examples - -- Create a deep image classifier with transfer learning ([DeepLearning - Flower Image Classification]) -- Fit a LightGBM classification or regression model on a biochemical dataset - ([LightGBM Overview]), to learn more check out the [LightGBM documentation - page](../../features/lightgbm/about). -- Deploy a deep network as a distributed web service with [SynapseML - Serving](../../features/spark_serving/about) -- Use web services in Spark with [HTTP on Apache Spark](../../features/http/about) -- Use Bi-directional LSTMs from Keras for medical entity extraction - ([DeepLearning - BiLSTM Medical Entity Extraction]) -- Create a text analytics system on Amazon book reviews ([TextAnalytics - Amazon Book Reviews]) -- Perform distributed hyperparameter tuning to identify Breast Cancer - ([HyperParameterTuning - Fighting Breast Cancer]) -- Easily ingest images from HDFS into Spark `DataFrame` ([DeepLearning - CIFAR10 Convolutional Network]) -- Use OpenCV on Spark to manipulate images ([OpenCV - Pipeline Image Transformations]) -- Train classification and regression models easily via implicit featurization - of data ([Classification - Adult Census]) -- Train and evaluate a flight delay prediction system ([Regression - Flight Delays]) -- Finding anomalous data access patterns using the Access Anomalies package of CyberML ([CyberML - Anomalous Access Detection]) -- Model interpretation ([Interpretability - Tabular SHAP Explainer], [Interpretability - Image Explainers], [Interpretability - Text Explainers]) -- Do Data Balance Analysis to determine how well features and feature values are represented in your dataset ([DataBalanceAnalysis - Adult Census Income]) - - -[Classification - Adult Census]: ../classification/Classification%20-%20Adult%20Census "Classification - Adult Census" - -[Regression - Flight Delays]: ../regression/Regression%20-%20Flight%20Delays "Regression - Flight Delays" - -[LightGBM Overview]: ../../features/lightgbm/LightGBM%20-%20Overview "LightGBM Overview" - -[TextAnalytics - Amazon Book Reviews]: ../text_analytics/TextAnalytics%20-%20Amazon%20Book%20Reviews "TextAnalytics - Amazon Book Reviews" - -[HyperParameterTuning - Fighting Breast Cancer]: ../HyperParameterTuning%20-%20Fighting%20Breast%20Cancer "HyperParameterTuning - Fighting Breast Cancer" - -[DeepLearning - CIFAR10 Convolutional Network]: ../deep_learning/DeepLearning%20-%20CIFAR10%20Convolutional%20Network "DeepLearning - CIFAR10 Convolutional Network" - -[OpenCV - Pipeline Image Transformations]: ../OpenCV%20-%20Pipeline%20Image%20Transformations "OpenCV - Pipeline Image Transformations" - -[DeepLearning - BiLSTM Medical Entity Extraction]: ../deep_learning/DeepLearning%20-%20BiLSTM%20Medical%20Entity%20Extraction "DeepLearning - BiLSTM Medical Entity Extraction" - -[DeepLearning - Flower Image Classification]: ../deep_learning/DeepLearning%20-%20Flower%20Image%20Classification "DeepLearning - Flower Image Classification" - -[CyberML - Anomalous Access Detection]: ../CyberML%20-%20Anomalous%20Access%20Detection "CyberML - Anomalous Access Detection" - -[Interpretability - Tabular SHAP Explainer]: ../responsible_ai/Interpretability%20-%20Tabular%20SHAP%20explainer "Interpretability - Tabular SHAP Explainer" - -[Interpretability - Image Explainers]: ../../features/responsible_ai/Interpretability%20-%20Image%20Explainers "Interpretability - Image Explainers" - -[Interpretability - Text Explainers]: ../responsible_ai/Interpretability%20-%20Text%20Explainers "Interpretability - Text Explainers" - -[DataBalanceAnalysis - Adult Census Income]: ../responsible_ai/DataBalanceAnalysis%20-%20Adult%20Census%20Income "DataBalanceAnalysis - Adult Census Income" diff --git a/website/docs/features/http/about.md b/website/docs/features/http/about.md deleted file mode 100644 index e209dd198f..0000000000 --- a/website/docs/features/http/about.md +++ /dev/null @@ -1,161 +0,0 @@ ---- -title: Http on Apache Spark -hide_title: true -sidebar_label: About ---- - -# HTTP on Apache Spark - -### A library for interacting with HTTP services from Apache Spark - -- **Flexible**: Encodes the entire HTTP protocol in Apache Spark for - full control of web requests -- **Performant**: Fully distributed across workers, built in support for - multi-threaded buffering, batching, and asynchronous request - concurrency. -- **Easy to Use**: High-level APIs for automatic parsing of requests, - abstracting all HTTP knowledge to under the hood. -- **Accessible from Multiple Languages**: Usable in Python and Scala. - Native integration with Scala's Apache HTTP Core. Native - integration with [Python Requests] library coming soon! -- **Composable**: Pipeline Stage APIs allow users to embed and compose - web services with SparkML machine learning models. - -[Apache HTTP Core]: https://hc.apache.org/httpcomponents-core-ga/httpcore/apidocs/org/apache/http/package-summary.html - -[Python Requests]: http://docs.python-requests.org/en/master/ - -## Usage - -### Send a JSON POST request - -```python -import synapse.ml -from synapse.ml.io.http import SimpleHTTPTransformer, JSONOutputParser -from pyspark.sql.types import StructType, StringType - -df = sc.parallelize([(x, ) for x in range(100)]).toDF("data") - -client = SimpleHTTPTransformer() \ - .setInputCol("data") \ - .setOutputParser(JSONOutputParser() \ - .setDataType(StructType().add("replies", StringType))) \ - .setUrl("www.my_service_url.com/any_api_here") \ - .setOutputCol("results") - -responses = client.transform(df) -``` - -## High-Performance Functionality - -The Simple HTTP transformer provides options for batching request bodies -and asynchronous request sending. For simplicity and easier debugging, -these options are not enabled by default. - -- `maxBatchSize`: Parameter that enables buffered minibatching. If this - parameter is set, a background thread will fetch up to at most - `maxBatchSize` requests. These requests's are combined by creating an - array of their entity data. The method sends _up to_ `maxBatchSize` - requests, rapid iterator materialization will result in smaller - batches as the background thread does not have enough time to - materialize a full batch. In other words, each new request sends all - of the new data that has accumulated at this stage of the pipeline. - -- `concurrency`: This parameter allows one to send up to `concurrency` - requests simultaneously using Scala futures under the hood. If this - parameter is set to 1 (default), then no Scala futures are used. - -- `concurrentTimeout`: If `concurrency`>1, requests will fail if they do - not receive a response within `concurrentTimeout` seconds. - -- `handlingStrategy`: (`"basic"`, or `"advanced"`) advanced handling - uses exponential backoff on the retires and can handle responses that - instruct clients to throttle or retry again. - -```python -SimpleHTTPTransformer() \ - .setMaxBatchSize(100) \ - .setConcurrency(5) \ - .setConcurrentTimeout(30.0) \ - .setHandlingStrategy("advanced") -``` - -## Architecture - -HTTP on Spark encapsulates the entire HTTP protocol within Spark's -datatypes. Uses can create flexible web clients that communicate with a -wide variety of endpoints. SynapseML provides methods to convert between -Scala case classes, Spark types, and Apache HTTP Core types. A common -representation makes it easy to work with HTTP on spark from Scala, -Python, or any other spark compatible language. This common -representation is serializable allowing for complex operations like SQL -joins and repartitons. - -In HTTP on Spark, each partition manages a running web client that sends -requests. A schematic representation can be seen below: - -

- -

- -## Schema - -This library adds Spark types that faithfully represent the HTTP -protocol for requests and responses. SynapseML provides several ways to -create these objects from the apache HTTP core library, and from a set -of case classes. - -The schema for a complete HTTP request looks like: - - request: struct (nullable = true) - +-- requestLine: struct (nullable = true) - | +-- method: string (nullable = true) - | +-- uri: string (nullable = true) - | +-- protoclVersion: struct (nullable = true) - | +-- protocol: string (nullable = true) - | +-- major: integer (nullable = false) - | +-- minor: integer (nullable = false) - +-- headers: array (nullable = true) - | +-- element: struct (containsNull = true) - | +-- name: string (nullable = true) - | +-- value: string (nullable = true) - +-- entity: struct (nullable = true) - +-- content: binary (nullable = true) - +-- contentEncoding: struct (nullable = true) - | +-- name: string (nullable = true) - | +-- value: string (nullable = true) - +-- contentLenth: long (nullable = false) - +-- contentType: struct (nullable = true) - | +-- name: string (nullable = true) - | +-- value: string (nullable = true) - +-- isChunked: boolean (nullable = false) - +-- isRepeatable: boolean (nullable = false) - +-- isStreaming: boolean (nullable = false) - -And the schema for a complete response looks like: - - response: struct (nullable = true) - +-- headers: array (nullable = true) - | +-- element: struct (containsNull = true) - | +-- name: string (nullable = true) - | +-- value: string (nullable = true) - +-- entity: struct (nullable = true) - | +-- content: binary (nullable = true) - | +-- contentEncoding: struct (nullable = true) - | | +-- name: string (nullable = true) - | | +-- value: string (nullable = true) - | +-- contentLenth: long (nullable = false) - | +-- contentType: struct (nullable = true) - | | +-- name: string (nullable = true) - | | +-- value: string (nullable = true) - | +-- isChunked: boolean (nullable = false) - | +-- isRepeatable: boolean (nullable = false) - | +-- isStreaming: boolean (nullable = false) - +-- statusLine: struct (nullable = true) - | +-- protocolVersion: struct (nullable = true) - | | +-- protocol: string (nullable = true) - | | +-- major: integer (nullable = false) - | | +-- minor: integer (nullable = false) - | +-- statusCode: integer (nullable = false) - | +-- reasonPhrase: string (nullable = true) - +-- locale: string (nullable = true) diff --git a/website/sidebars.js b/website/sidebars.js index 4f3884d4f1..8e6a7a2ebb 100644 --- a/website/sidebars.js +++ b/website/sidebars.js @@ -1,19 +1,15 @@ const { listExamplePaths } = require('./src/plugins/examples'); -let features_http_docs = listExamplePaths("features", "http"); -let features_lightgbm_docs = listExamplePaths("features", "lightgbm"); -let features_onnx_docs = listExamplePaths("features", "onnx"); -let features_rai_docs = listExamplePaths("features", "responsible_ai"); -let features_ss_docs = listExamplePaths("features", "spark_serving"); -let features_vw_docs = listExamplePaths("features", "vw"); - -let examples_cl_docs = listExamplePaths("examples", "classification"); -let examples_cs_docs = listExamplePaths("examples", "cognitive_services"); -let examples_dl_docs = listExamplePaths("examples", "deep_learning"); -let examples_rai_docs = listExamplePaths("examples", "responsible_ai"); -let examples_rg_docs = listExamplePaths("examples", "regression"); -let examples_ta_docs = listExamplePaths("examples", "text_analytics"); - +let cs_pages = listExamplePaths("features", "cognitive_services"); +let rai_pages = listExamplePaths("features", "responsible_ai"); +let onnx_pages = listExamplePaths("features", "onnx"); +let lgbm_pages = listExamplePaths("features", "lightgbm"); +let vw_pages = listExamplePaths("features", "vw"); +let ss_pages = listExamplePaths("features", "spark_serving"); +let ocv_pages = listExamplePaths("features", "opencv"); +let cls_pages = listExamplePaths("features", "classification"); +let reg_pages = listExamplePaths("features", "regression"); +let other_pages = listExamplePaths("features", "other"); module.exports = { docs: [ @@ -34,79 +30,57 @@ module.exports = { type: 'category', label: 'Features', items: [ - 'features/CognitiveServices - Overview', - { - type: 'category', - label: 'HTTP on Spark', - items: features_http_docs, - }, { type: 'category', - label: 'LightGBM', - items: features_lightgbm_docs, + label: 'Cognitive Services', + items: cs_pages, }, { type: 'category', label: 'Responsible AI', - items: features_rai_docs, + items: rai_pages, }, { type: 'category', label: 'ONNX', - items: features_onnx_docs, + items: onnx_pages, }, { type: 'category', - label: 'Spark Serving', - items: features_ss_docs, + label: 'LightGBM', + items: lgbm_pages, }, { type: 'category', label: 'Vowpal Wabbit', - items: features_vw_docs, - }, - ], - }, - { - type: 'category', - label: 'Examples', - items: [ - 'examples/about', - 'examples/AzureSearchIndex - Met Artworks', - 'examples/ConditionalKNN - Exploring Art Across Cultures', - 'examples/CyberML - Anomalous Access Detection', - 'examples/HyperParameterTuning - Fighting Breast Cancer', - 'examples/OpenCV - Pipeline Image Transformations', - { - type: 'category', - label: 'Classification', - items: examples_cl_docs, + items: vw_pages, }, { type: 'category', - label: 'Cognitive Services', - items: examples_cs_docs, + label: 'Spark Serving', + items: ss_pages, }, { type: 'category', - label: 'Deep Learning', - items: examples_dl_docs, + label: 'OpenCV', + items: ocv_pages, }, { type: 'category', - label: 'Responsible AI', - items: examples_rai_docs, + label: 'Classification', + items: cls_pages, }, { type: 'category', label: 'Regression', - items: examples_rg_docs, + items: reg_pages, }, { type: 'category', - label: 'Text Analytics', - items: examples_ta_docs, + label: 'Other', + items: other_pages, }, + ], }, { diff --git a/website/src/pages/index.js b/website/src/pages/index.js index 21ce4d92d9..463cf9f04a 100644 --- a/website/src/pages/index.js +++ b/website/src/pages/index.js @@ -13,7 +13,7 @@ import clsx from "clsx"; const snippets = [ { - label: "Text Analytics", + label: "Cognitive Services", further: "docs/features/CognitiveServices%20-%20Overview#text-analytics-sample", config: `from synapse.ml.cognitive import * diff --git a/website/src/plugins/examples/index.js b/website/src/plugins/examples/index.js index 54b42bcd0d..8d3c0f512a 100644 --- a/website/src/plugins/examples/index.js +++ b/website/src/plugins/examples/index.js @@ -25,12 +25,12 @@ function all_examples_for_type(folder, type) { function all_examples() { let ex_links = [ - `examples/AzureSearchIndex - Met Artworks.md`, - `examples/classification/Classification - Adult Census.md`, - `features/CognitiveServices - Overview.md`, - `examples/ConditionalKNN - Exploring Art Across Cultures.md`, - `examples/CyberML - Anomalous Access Detection.md`, - `examples/responsible_ai/DataBalanceAnalysis - Adult Census Income.md`, + `features/other/AzureSearchIndex - Met Artworks.md`, + `features/classification/Classification - Adult Census.md`, + `features/cognitive_services/CognitiveServices - Overview.md`, + `features/other/ConditionalKNN - Exploring Art Across Cultures.md`, + `features/other/CyberML - Anomalous Access Detection.md`, + `features/responsible_ai/DataBalanceAnalysis - Adult Census Income.md`, `features/responsible_ai/Interpretability - Image Explainers.md`, `features/onnx/ONNX - Inference on Spark.md`, `features/lightgbm/LightGBM - Overview.md`, diff --git a/website/versioned_docs/version-0.9.1/examples/AzureSearchIndex - Met Artworks.md b/website/versioned_docs/version-0.9.1/examples/AzureSearchIndex - Met Artworks.md index 386447e8e2..e045e82d48 100644 --- a/website/versioned_docs/version-0.9.1/examples/AzureSearchIndex - Met Artworks.md +++ b/website/versioned_docs/version-0.9.1/examples/AzureSearchIndex - Met Artworks.md @@ -50,20 +50,33 @@ data = spark.read\ ```python from synapse.ml.cognitive import AnalyzeImage + from synapse.ml.stages import SelectColumns + #define pipeline + describeImage = (AnalyzeImage() + .setSubscriptionKey(VISION_API_KEY) + .setLocation("eastus") + .setImageUrlCol("PrimaryImageUrl") + .setOutputCol("RawImageDescription") + .setErrorCol("Errors") + .setVisualFeatures(["Categories", "Description", "Faces", "ImageType", "Color", "Adult"]) + .setConcurrency(5)) + + df2 = describeImage.transform(data)\ + .select("*", "RawImageDescription.*").drop("Errors", "RawImageDescription") ``` @@ -76,10 +89,15 @@ Before writing the results to a Search Index, you must define a schema which mus from synapse.ml.cognitive import * df2.writeToAzureSearch( + subscriptionKey=AZURE_SEARCH_KEY, + actionCol="searchAction", + serviceName=search_service, + indexName=search_index, + keyCol="ObjectID") ``` diff --git a/website/versioned_docs/version-0.9.1/examples/responsible_ai/ModelInterpretability - Snow Leopard Detection.md b/website/versioned_docs/version-0.9.1/examples/responsible_ai/ModelInterpretability - Snow Leopard Detection.md index 59b5b59fa9..b191225293 100644 --- a/website/versioned_docs/version-0.9.1/examples/responsible_ai/ModelInterpretability - Snow Leopard Detection.md +++ b/website/versioned_docs/version-0.9.1/examples/responsible_ai/ModelInterpretability - Snow Leopard Detection.md @@ -134,34 +134,59 @@ train, test = images.randomSplit([.7,.3], seed=1) ```python from pyspark.ml import Pipeline + from pyspark.ml.feature import StringIndexer + from pyspark.ml.classification import LogisticRegression + from pyspark.sql.functions import udf + from synapse.ml.downloader import ModelDownloader + from synapse.ml.cntk import ImageFeaturizer + from synapse.ml.stages import UDFTransformer + from pyspark.sql.types import * + def getIndex(row): + return float(row[1]) + if os.environ.get("AZURE_SERVICE", None) == "Microsoft.ProjectArcadia": + network = ModelDownloader(spark, "abfss://synapse@mmlsparkeuap.dfs.core.windows.net/models/").downloadByName("ResNet50") + else: + network = ModelDownloader(spark, "dbfs:/Models/").downloadByName("ResNet50") + + model = Pipeline(stages=[ + StringIndexer(inputCol = "labels", outputCol="index"), + ImageFeaturizer(inputCol="image", outputCol="features", cutOutputLayers=1).setModel(network), + LogisticRegression(maxIter=5, labelCol="index", regParam=10.0), + UDFTransformer()\ + .setUDF(udf(getIndex, DoubleType()))\ + .setInputCol("probability")\ + .setOutputCol("leopard_prob") + ]) + + fitModel = model.fit(train) ``` @@ -170,35 +195,60 @@ fitModel = model.fit(train) ```python def plotConfusionMatrix(df, label, prediction, classLabels): + from synapse.ml.plot import confusionMatrix + import matplotlib.pyplot as plt + fig = plt.figure(figsize=(4.5, 4.5)) + confusionMatrix(df, label, prediction, classLabels) + display(fig) + + if os.environ.get("AZURE_SERVICE", None) != "Microsoft.ProjectArcadia": + plotConfusionMatrix(fitModel.transform(test), "index", "prediction", fitModel.stages[0].labels) ``` ```python import urllib.request + from synapse.ml.lime import ImageLIME + + test_image_url = "https://mmlspark.blob.core.windows.net/graphics/SnowLeopardAD/snow_leopard1.jpg" + with urllib.request.urlopen(test_image_url) as url: + barr = url.read() + test_subsample = spark.createDataFrame([(bytearray(barr),)], ["image"]) + + lime = ImageLIME()\ + .setModel(fitModel)\ + .setPredictionCol("leopard_prob")\ + .setOutputCol("weights")\ + .setInputCol("image")\ + .setCellSize(100.0)\ + .setModifier(50.0)\ + .setNSamples(300) + + result = lime.transform(test_subsample) ``` diff --git a/website/versioned_docs/version-0.9.1/examples/text_analytics/TextAnalytics - Amazon Book Reviews with Word2Vec.md b/website/versioned_docs/version-0.9.1/examples/text_analytics/TextAnalytics - Amazon Book Reviews with Word2Vec.md index 2f9bb154ef..d88de1c922 100644 --- a/website/versioned_docs/version-0.9.1/examples/text_analytics/TextAnalytics - Amazon Book Reviews with Word2Vec.md +++ b/website/versioned_docs/version-0.9.1/examples/text_analytics/TextAnalytics - Amazon Book Reviews with Word2Vec.md @@ -73,25 +73,45 @@ Generate several models with different parameters from the training data. ```python from pyspark.ml.classification import LogisticRegression, RandomForestClassifier, GBTClassifier + from synapse.ml.train import TrainClassifier + import itertools + + lrHyperParams = [0.05, 0.2] + logisticRegressions = [LogisticRegression(regParam = hyperParam) + for hyperParam in lrHyperParams] + lrmodels = [TrainClassifier(model=lrm, labelCol="label").fit(ptrain) + for lrm in logisticRegressions] + + rfHyperParams = itertools.product([5, 10], [2, 3]) + randomForests = [RandomForestClassifier(numTrees=hyperParam[0], maxDepth=hyperParam[1]) + for hyperParam in rfHyperParams] + rfmodels = [TrainClassifier(model=rfm, labelCol="label").fit(ptrain) + for rfm in randomForests] + + gbtHyperParams = itertools.product([8, 16], [2, 3]) + gbtclassifiers = [GBTClassifier(maxBins=hyperParam[0], maxDepth=hyperParam[1]) + for hyperParam in gbtHyperParams] + gbtmodels = [TrainClassifier(model=gbt, labelCol="label").fit(ptrain) + for gbt in gbtclassifiers] @@ -104,9 +124,13 @@ Find the best model for the given test dataset. ```python from synapse.ml.automl import FindBestModel + bestModel = FindBestModel(evaluationMetric="AUC", models=trainedModels).fit(ptest) + bestModel.getRocCurve().show() + bestModel.getBestModelMetrics().show() + bestModel.getAllModelMetrics().show() ``` @@ -115,10 +139,16 @@ Get the accuracy from the validation dataset. ```python from synapse.ml.train import ComputeModelStatistics + predictions = bestModel.transform(pvalidation) + metrics = ComputeModelStatistics().transform(predictions) + print("Best model's accuracy on validation set = " + + "{0:.2f}%".format(metrics.first()["accuracy"] * 100)) + print("Best model's AUC on validation set = " + + "{0:.2f}%".format(metrics.first()["AUC"] * 100)) ``` diff --git a/website/versioned_docs/version-0.9.1/examples/text_analytics/TextAnalytics - Amazon Book Reviews.md b/website/versioned_docs/version-0.9.1/examples/text_analytics/TextAnalytics - Amazon Book Reviews.md index 5aaac3127a..ca9e1b635f 100644 --- a/website/versioned_docs/version-0.9.1/examples/text_analytics/TextAnalytics - Amazon Book Reviews.md +++ b/website/versioned_docs/version-0.9.1/examples/text_analytics/TextAnalytics - Amazon Book Reviews.md @@ -34,8 +34,11 @@ to generate 2²⁰ sparse features. ```python from synapse.ml.featurize.text import TextFeaturizer + textFeaturizer = TextFeaturizer() \ + .setInputCol("text").setOutputCol("features") \ + .setUseStopWordsRemover(True).setUseIDF(True).setMinDocFreq(5).setNumFeatures(1 << 16).fit(data) ``` @@ -60,12 +63,19 @@ Train several Logistic Regression models with different regularizations. ```python train, test, validation = processedData.randomSplit([0.60, 0.20, 0.20]) + from pyspark.ml.classification import LogisticRegression + + lrHyperParams = [0.05, 0.1, 0.2, 0.4] + logisticRegressions = [LogisticRegression(regParam = hyperParam) for hyperParam in lrHyperParams] + + from synapse.ml.train import TrainClassifier + lrmodels = [TrainClassifier(model=lrm, labelCol="label").fit(train) for lrm in logisticRegressions] ``` @@ -74,10 +84,16 @@ Find the model with the best AUC on the test set. ```python from synapse.ml.automl import FindBestModel, BestModel + bestModel = FindBestModel(evaluationMetric="AUC", models=lrmodels).fit(test) + bestModel.getRocCurve().show() + bestModel.getBestModelMetrics().show() + bestModel.getAllModelMetrics().show() + + ``` Use the optimized `ComputeModelStatistics` API to find the model accuracy. @@ -85,8 +101,12 @@ Use the optimized `ComputeModelStatistics` API to find the model accuracy. ```python from synapse.ml.train import ComputeModelStatistics + predictions = bestModel.transform(validation) + metrics = ComputeModelStatistics().transform(predictions) + print("Best model's accuracy on validation set = " + + "{0:.2f}%".format(metrics.first()["accuracy"] * 100)) ``` diff --git a/website/versioned_docs/version-0.9.1/features/CognitiveServices - Overview.md b/website/versioned_docs/version-0.9.1/features/CognitiveServices - Overview.md index ce1c16acc0..59694c4598 100644 --- a/website/versioned_docs/version-0.9.1/features/CognitiveServices - Overview.md +++ b/website/versioned_docs/version-0.9.1/features/CognitiveServices - Overview.md @@ -85,12 +85,20 @@ To get started, we'll need to add this code to the project: ```python from pyspark.sql.functions import udf, col + from synapse.ml.io.http import HTTPTransformer, http_udf + from requests import Request + from pyspark.sql.functions import lit + from pyspark.ml import PipelineModel + from pyspark.sql.functions import col + import os + + ``` @@ -114,13 +122,22 @@ if os.environ.get("AZURE_SERVICE", None) == "Microsoft.ProjectArcadia": ```python from synapse.ml.cognitive import * + + # A general Cognitive Services key for Text Analytics, Computer Vision and Form Recognizer (or use separate keys that belong to each service) + service_key = os.environ["COGNITIVE_SERVICE_KEY"] + # A Bing Search v7 subscription key + bing_search_key = os.environ["BING_IMAGE_SEARCH_KEY"] + # An Anomaly Dectector subscription key + anomaly_key = os.environ["ANOMALY_API_KEY"] + # A Translator subscription key + translator_key = os.environ["TRANSLATOR_KEY"] ``` diff --git a/website/versioned_docs/version-0.9.1/features/lightgbm/LightGBM - Overview.md b/website/versioned_docs/version-0.9.1/features/lightgbm/LightGBM - Overview.md index e86275b4d7..6802c3b748 100644 --- a/website/versioned_docs/version-0.9.1/features/lightgbm/LightGBM - Overview.md +++ b/website/versioned_docs/version-0.9.1/features/lightgbm/LightGBM - Overview.md @@ -116,13 +116,21 @@ By calling "saveNativeModel", it allows you to extract the underlying lightGBM m ```python from synapse.ml.lightgbm import LightGBMClassificationModel + + if os.environ.get("AZURE_SERVICE", None) == "Microsoft.ProjectArcadia": + model.saveNativeModel("/models/lgbmclassifier.model") + model = LightGBMClassificationModel.loadNativeModelFromFile("/models/lgbmclassifier.model") + else: + model.saveNativeModel("/lgbmclassifier.model") + model = LightGBMClassificationModel.loadNativeModelFromFile("/lgbmclassifier.model") + ``` #### Feature Importances Visualization diff --git a/website/versioned_docs/version-0.9.1/features/onnx/ONNX - Inference on Spark.md b/website/versioned_docs/version-0.9.1/features/onnx/ONNX - Inference on Spark.md index 86a9d0df7a..1394a88934 100644 --- a/website/versioned_docs/version-0.9.1/features/onnx/ONNX - Inference on Spark.md +++ b/website/versioned_docs/version-0.9.1/features/onnx/ONNX - Inference on Spark.md @@ -30,34 +30,63 @@ Use LightGBM to train a model ```python from pyspark.ml.feature import VectorAssembler + from synapse.ml.lightgbm import LightGBMClassifier + + feature_cols = df.columns[1:] + featurizer = VectorAssembler( + inputCols=feature_cols, + outputCol='features' + ) + + train_data = featurizer.transform(df)['Bankrupt?', 'features'] + + model = ( + LightGBMClassifier(featuresCol="features", labelCol="Bankrupt?") + .setEarlyStoppingRound(300) + .setLambdaL1(0.5) + .setNumIterations(1000) + .setNumThreads(-1) + .setMaxDeltaStep(0.5) + .setNumLeaves(31) + .setMaxDepth(-1) + .setBaggingFraction(0.7) + .setFeatureFraction(0.7) + .setBaggingFreq(2) + .setObjective("binary") + .setIsUnbalance(True) + .setMinSumHessianInLeaf(20) + .setMinGainToSplit(0.01) + ) + + model = model.fit(train_data) ``` @@ -86,8 +115,14 @@ Load the ONNX payload into an `ONNXModel`, and inspect the model inputs and outp ```python from synapse.ml.onnx import ONNXModel + + onnx_ml = ONNXModel().setModelPayload(model_payload_ml) + + + print("Model inputs:" + str(onnx_ml.getModelInputs())) + print("Model outputs:" + str(onnx_ml.getModelOutputs())) ``` diff --git a/website/versioned_docs/version-0.9.1/features/responsible_ai/Interpretability - Image Explainers.md b/website/versioned_docs/version-0.9.1/features/responsible_ai/Interpretability - Image Explainers.md index cda7d70d5d..1051f28447 100644 --- a/website/versioned_docs/version-0.9.1/features/responsible_ai/Interpretability - Image Explainers.md +++ b/website/versioned_docs/version-0.9.1/features/responsible_ai/Interpretability - Image Explainers.md @@ -12,49 +12,87 @@ First we import the packages and define some UDFs and a plotting function we wil ```python from synapse.ml.explainers import * + from synapse.ml.onnx import ONNXModel + from synapse.ml.opencv import ImageTransformer + from synapse.ml.io import * + from pyspark.ml import Pipeline + from pyspark.ml.classification import LogisticRegression + from pyspark.ml.feature import StringIndexer + from pyspark.sql.functions import * + from pyspark.sql.types import * + import numpy as np + import pyspark + import urllib.request + import matplotlib.pyplot as plt + import PIL, io + from PIL import Image + + vec_slice = udf(lambda vec, indices: (vec.toArray())[indices].tolist(), ArrayType(FloatType())) + arg_top_k = udf(lambda vec, k: (-vec.toArray()).argsort()[:k].tolist(), ArrayType(IntegerType())) + def downloadBytes(url: str): + with urllib.request.urlopen(url) as url: + barr = url.read() + return barr - + + def rotate_color_channel(bgr_image_array, height, width, nChannels): + B, G, R, *_ = np.asarray(bgr_image_array).reshape(height, width, nChannels).T + rgb_image_array = np.array((R, G, B)).T + return rgb_image_array + def plot_superpixels(image_rgb_array, sp_clusters, weights, green_threshold=99): + superpixels = sp_clusters + green_value = np.percentile(weights, green_threshold) + img = Image.fromarray(image_rgb_array, mode='RGB').convert("RGBA") + image_array = np.asarray(img).copy() + for (sp, v) in zip(superpixels, weights): + if v > green_value: + for (x, y) in sp: + image_array[y, x, 1] = 255 + image_array[y, x, 3] = 200 + plt.clf() + plt.imshow(image_array) + display() ``` @@ -66,34 +104,64 @@ The result shows 39.6% probability of "violin" (889), and 38.4% probability of " ```python from synapse.ml.io import * + + image_df = spark.read.image().load("wasbs://publicwasb@mmlspark.blob.core.windows.net/explainers/images/david-lusvardi-dWcUncxocQY-unsplash.jpg") + display(image_df) + + # Rotate the image array from BGR into RGB channels for visualization later. + row = image_df.select("image.height", "image.width", "image.nChannels", "image.data").head() + locals().update(row.asDict()) + rgb_image_array = rotate_color_channel(data, height, width, nChannels) + + # Download the ONNX model + modelPayload = downloadBytes("https://mmlspark.blob.core.windows.net/publicwasb/ONNXModels/resnet50-v2-7.onnx") + + featurizer = ( + ImageTransformer(inputCol="image", outputCol="features") + .resize(224, True) + .centerCrop(224, 224) + .normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225], color_scale_factor = 1/255) + .setTensorElementType(FloatType()) + ) + + onnx = ( + ONNXModel() + .setModelPayload(modelPayload) + .setFeedDict({"data": "features"}) + .setFetchDict({"rawPrediction": "resnetv24_dense0_fwd"}) + .setSoftMaxDict({"rawPrediction": "probability"}) + .setMiniBatchSize(1) + ) + + model = Pipeline(stages=[featurizer, onnx]).fit(image_df) ``` From 43870b6e0043d09a89052f2131a1fbd8a624a6dd Mon Sep 17 00:00:00 2001 From: Mark Date: Tue, 9 Nov 2021 19:40:32 -0500 Subject: [PATCH 15/40] docs: tweak announcement styling --- website/src/pages/index.js | 19 ++++++++----------- 1 file changed, 8 insertions(+), 11 deletions(-) diff --git a/website/src/pages/index.js b/website/src/pages/index.js index 463cf9f04a..66ab0a4942 100644 --- a/website/src/pages/index.js +++ b/website/src/pages/index.js @@ -176,20 +176,17 @@ function Home() { /> -
-
-
- Coming from - - MMLSpark - - ? We have been renamed to SynapseML! -
-
-
+
+
+
+ Coming from MMLSpark? + We have been renamed to SynapseML! +
+
+
From 6459f3cdf3e88f78c132ccf5965e510fe8a16e7d Mon Sep 17 00:00:00 2001 From: Mark Date: Tue, 9 Nov 2021 20:48:39 -0500 Subject: [PATCH 16/40] docs: fixup blog posts --- website/blog/2019-08-24-Welcome to Azure Cognitive Services.md | 2 +- ...-10-02-MMLSpark empowering AI for Good with Mark Hamilton.md | 2 +- ...s.md => 2020-12-01-Large-Scale Intelligent Microservices.md} | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) rename website/blog/{2020-12-Large-Scale Intelligent Microservices.md => 2020-12-01-Large-Scale Intelligent Microservices.md} (96%) diff --git a/website/blog/2019-08-24-Welcome to Azure Cognitive Services.md b/website/blog/2019-08-24-Welcome to Azure Cognitive Services.md index ea11736f6c..d7ee780445 100644 --- a/website/blog/2019-08-24-Welcome to Azure Cognitive Services.md +++ b/website/blog/2019-08-24-Welcome to Azure Cognitive Services.md @@ -2,7 +2,7 @@ title: "Dear Spark developers: Welcome to Azure Cognitive Services" description: "Dear Spark developers: Welcome to Azure Cognitive Services" keywords: [ - "Cognitive Services", + "Cognitive Services", "blog post", ] --- diff --git a/website/blog/2019-10-02-MMLSpark empowering AI for Good with Mark Hamilton.md b/website/blog/2019-10-02-MMLSpark empowering AI for Good with Mark Hamilton.md index 827453433f..fc6f280680 100644 --- a/website/blog/2019-10-02-MMLSpark empowering AI for Good with Mark Hamilton.md +++ b/website/blog/2019-10-02-MMLSpark empowering AI for Good with Mark Hamilton.md @@ -2,7 +2,7 @@ title: "MMLSpark: empowering AI for Good with Mark Hamilton" description: "MMLSpark: empowering AI for Good with Mark Hamilton" keywords: [ - "Mark Hamilton", + "AI for good", "Microsoft Research", "Podcast", ] --- diff --git a/website/blog/2020-12-Large-Scale Intelligent Microservices.md b/website/blog/2020-12-01-Large-Scale Intelligent Microservices.md similarity index 96% rename from website/blog/2020-12-Large-Scale Intelligent Microservices.md rename to website/blog/2020-12-01-Large-Scale Intelligent Microservices.md index 270bdf6673..e683ce7c07 100644 --- a/website/blog/2020-12-Large-Scale Intelligent Microservices.md +++ b/website/blog/2020-12-01-Large-Scale Intelligent Microservices.md @@ -2,7 +2,7 @@ title: "Publication - Large-Scale Intelligent Microservices" description: "Large-Scale Intelligent Microservices" keywords: [ - "microservices", + "microservices", "IEEE Big Data", "paper", ] --- From c0b516b26b26776600dc72f964890d9b02a2cbce Mon Sep 17 00:00:00 2001 From: Mark Hamilton Date: Wed, 10 Nov 2021 00:01:31 -0500 Subject: [PATCH 17/40] doc: fix broken website (#1251) --- ...ultilingual Search Engine from Forms.ipynb | 302 +++++++++++++++++- pipeline.yaml | 9 +- .../responsible_ai/Data Balance Analysis.md | 2 +- .../Model Interpretation on Spark.md | 4 +- website/docs/reference/datasets.md | 2 +- website/src/pages/index.js | 2 +- .../version-0.9.1/examples/about.md | 57 ---- ...ation - Adult Census with Vowpal Wabbit.md | 0 .../Classification - Adult Census.md | 2 - ...sification - Before and After SynapseML.md | 0 ... - Twitter Sentiment with Vowpal Wabbit.md | 0 ...tiveServices - Celebrity Quote Analysis.md | 0 ...a Multilingual Search Engine from Forms.md | 165 ++++++++++ .../CognitiveServices - Overview.md | 16 - ...nitiveServices - Predictive Maintenance.md | 0 ...Spark - Working with Arbitrary Web APIs.md | 48 --- .../version-0.9.1/features/http/about.md | 161 ---------- .../features/lightgbm/LightGBM - Overview.md | 9 - .../onnx/ONNX - Inference on Spark.md | 34 -- ...OpenCV - Pipeline Image Transformations.md | 0 .../other}/AzureSearchIndex - Met Artworks.md | 20 -- ...onalKNN - Exploring Art Across Cultures.md | 0 .../CyberML - Anomalous Access Detection.md | 0 ...ning - BiLSTM Medical Entity Extraction.md | 0 ...earning - CIFAR10 Convolutional Network.md | 0 ...pLearning - Flower Image Classification.md | 0 .../DeepLearning - Transfer Learning.md | 0 ...arameterTuning - Fighting Breast Cancer.md | 0 ...ics - Amazon Book Reviews with Word2Vec.md | 32 -- .../TextAnalytics - Amazon Book Reviews.md | 19 -- .../regression/Regression - Auto Imports.md | 0 ...ssion - Flight Delays with DataCleaning.md | 0 .../regression/Regression - Flight Delays.md | 0 ...abbit vs. LightGBM vs. Linear Regressor.md | 0 .../responsible_ai/Data Balance Analysis.md | 2 +- ...taBalanceAnalysis - Adult Census Income.md | 0 ...nterpretability - Explanation Dashboard.md | 0 .../Interpretability - Image Explainers.md | 14 - ...erpretability - Snow Leopard Detection.md} | 54 +--- ...terpretability - Tabular SHAP explainer.md | 0 .../Interpretability - Text Explainers.md | 0 .../Model Interpretation on Spark.md | 4 +- .../features/vw/Vowpal Wabbit - Overview.md | 2 - .../version-0.9.1/reference/datasets.md | 2 +- .../version-0.9.1-sidebars.json | 168 ++++------ yarn.lock | 4 + 46 files changed, 555 insertions(+), 579 deletions(-) delete mode 100644 website/versioned_docs/version-0.9.1/examples/about.md rename website/versioned_docs/version-0.9.1/{examples => features}/classification/Classification - Adult Census with Vowpal Wabbit.md (100%) rename website/versioned_docs/version-0.9.1/{examples => features}/classification/Classification - Adult Census.md (99%) rename website/versioned_docs/version-0.9.1/{examples => features}/classification/Classification - Before and After SynapseML.md (100%) rename website/versioned_docs/version-0.9.1/{examples => features}/classification/Classification - Twitter Sentiment with Vowpal Wabbit.md (100%) rename website/versioned_docs/version-0.9.1/{examples => features}/cognitive_services/CognitiveServices - Celebrity Quote Analysis.md (100%) create mode 100644 website/versioned_docs/version-0.9.1/features/cognitive_services/CognitiveServices - Create a Multilingual Search Engine from Forms.md rename website/versioned_docs/version-0.9.1/features/{ => cognitive_services}/CognitiveServices - Overview.md (99%) rename website/versioned_docs/version-0.9.1/{examples => features}/cognitive_services/CognitiveServices - Predictive Maintenance.md (100%) delete mode 100644 website/versioned_docs/version-0.9.1/features/http/HttpOnSpark - Working with Arbitrary Web APIs.md delete mode 100644 website/versioned_docs/version-0.9.1/features/http/about.md rename website/versioned_docs/version-0.9.1/{examples => features/opencv}/OpenCV - Pipeline Image Transformations.md (100%) rename website/versioned_docs/version-0.9.1/{examples => features/other}/AzureSearchIndex - Met Artworks.md (99%) rename website/versioned_docs/version-0.9.1/{examples => features/other}/ConditionalKNN - Exploring Art Across Cultures.md (100%) rename website/versioned_docs/version-0.9.1/{examples => features/other}/CyberML - Anomalous Access Detection.md (100%) rename website/versioned_docs/version-0.9.1/{examples/deep_learning => features/other}/DeepLearning - BiLSTM Medical Entity Extraction.md (100%) rename website/versioned_docs/version-0.9.1/{examples/deep_learning => features/other}/DeepLearning - CIFAR10 Convolutional Network.md (100%) rename website/versioned_docs/version-0.9.1/{examples/deep_learning => features/other}/DeepLearning - Flower Image Classification.md (100%) rename website/versioned_docs/version-0.9.1/{examples/deep_learning => features/other}/DeepLearning - Transfer Learning.md (100%) rename website/versioned_docs/version-0.9.1/{examples => features/other}/HyperParameterTuning - Fighting Breast Cancer.md (100%) rename website/versioned_docs/version-0.9.1/{examples/text_analytics => features/other}/TextAnalytics - Amazon Book Reviews with Word2Vec.md (99%) rename website/versioned_docs/version-0.9.1/{examples/text_analytics => features/other}/TextAnalytics - Amazon Book Reviews.md (99%) rename website/versioned_docs/version-0.9.1/{examples => features}/regression/Regression - Auto Imports.md (100%) rename website/versioned_docs/version-0.9.1/{examples => features}/regression/Regression - Flight Delays with DataCleaning.md (100%) rename website/versioned_docs/version-0.9.1/{examples => features}/regression/Regression - Flight Delays.md (100%) rename website/versioned_docs/version-0.9.1/{examples => features}/regression/Regression - Vowpal Wabbit vs. LightGBM vs. Linear Regressor.md (100%) rename website/versioned_docs/version-0.9.1/{examples => features}/responsible_ai/DataBalanceAnalysis - Adult Census Income.md (100%) rename website/versioned_docs/version-0.9.1/{examples => features}/responsible_ai/Interpretability - Explanation Dashboard.md (100%) rename website/versioned_docs/version-0.9.1/{examples/responsible_ai/ModelInterpretability - Snow Leopard Detection.md => features/responsible_ai/Interpretability - Snow Leopard Detection.md} (98%) rename website/versioned_docs/version-0.9.1/{examples => features}/responsible_ai/Interpretability - Tabular SHAP explainer.md (100%) rename website/versioned_docs/version-0.9.1/{examples => features}/responsible_ai/Interpretability - Text Explainers.md (100%) create mode 100644 yarn.lock diff --git a/notebooks/features/cognitive_services/CognitiveServices - Create a Multilingual Search Engine from Forms.ipynb b/notebooks/features/cognitive_services/CognitiveServices - Create a Multilingual Search Engine from Forms.ipynb index 1e10a0d8b5..f6b897b1f0 100644 --- a/notebooks/features/cognitive_services/CognitiveServices - Create a Multilingual Search Engine from Forms.ipynb +++ b/notebooks/features/cognitive_services/CognitiveServices - Create a Multilingual Search Engine from Forms.ipynb @@ -1 +1,301 @@ -{"cells":[{"cell_type":"code","source":["import os\nkey = os.environ['VISION_API_KEY']\nsearch_key = os.environ['AZURE_SEARCH_KEY']\ntranslator_key = os.environ['TRANSLATOR_KEY']\n\nsearch_service = \"mmlspark-azure-search\"\nsearch_index = \"form-demo-index\""],"metadata":{"jupyter":{"source_hidden":false,"outputs_hidden":false},"nteract":{"transient":{"deleting":false}},"application/vnd.databricks.v1+cell":{"title":"","showTitle":false,"inputWidgets":{},"nuid":"05ebf79d-aa8e-4f8e-9105-6deeeb87e9a8"}},"outputs":[{"output_type":"display_data","metadata":{"application/vnd.databricks.v1+output":{"data":"","errorSummary":"","metadata":{},"errorTraceType":null,"type":"ipynbError","arguments":{}}},"output_type":"display_data","data":{"text/html":[""]},"transient":null}],"execution_count":0},{"cell_type":"code","source":["from pyspark.sql.functions import udf\nfrom pyspark.sql.types import StringType\n\ndef blob_to_url(blob):\n [prefix, postfix] = blob.split(\"@\")\n container = prefix.split(\"/\")[-1]\n split_postfix = postfix.split(\"/\")\n account = split_postfix[0]\n filepath = \"/\".join(split_postfix[1:])\n return \"https://{}/{}/{}\".format(account, container, filepath)\n\n\ndf2 = (spark.read.format(\"binaryFile\")\n .load(\"wasbs://ignite2021@mmlsparkdemo.blob.core.windows.net/forms/*\")\n .select(\"path\")\n .coalesce(24)\n .limit(10)\n .select(udf(blob_to_url, StringType())(\"path\").alias(\"url\"))\n .cache()\n )\n"],"metadata":{"jupyter":{"source_hidden":false,"outputs_hidden":false},"nteract":{"transient":{"deleting":false}},"application/vnd.databricks.v1+cell":{"title":"","showTitle":false,"inputWidgets":{},"nuid":"61bbc0d2-e2fe-40b2-ba6c-6c24ef315c36"}},"outputs":[{"output_type":"display_data","metadata":{"application/vnd.databricks.v1+output":{"data":"","errorSummary":"","metadata":{},"errorTraceType":null,"type":"ipynbError","arguments":{}}},"output_type":"display_data","data":{"text/html":[""]},"transient":null}],"execution_count":0},{"cell_type":"code","source":["display(df2)"],"metadata":{"jupyter":{"source_hidden":false,"outputs_hidden":false},"nteract":{"transient":{"deleting":false}},"collapsed":false,"application/vnd.databricks.v1+cell":{"title":"","showTitle":false,"inputWidgets":{},"nuid":"dc2a155d-6a61-49d3-bd91-0cb96d0f3d0f"}},"outputs":[{"output_type":"display_data","metadata":{"application/vnd.databricks.v1+output":{"data":"","errorSummary":"","metadata":{},"errorTraceType":null,"type":"ipynbError","arguments":{}}},"output_type":"display_data","data":{"text/html":[""]},"transient":null}],"execution_count":0},{"cell_type":"code","source":["displayHTML(\"\"\"\n\n\"\"\")"],"metadata":{"jupyter":{"source_hidden":false,"outputs_hidden":false},"nteract":{"transient":{"deleting":false}},"application/vnd.databricks.v1+cell":{"title":"","showTitle":false,"inputWidgets":{},"nuid":"a6489a8f-6e8d-4358-9f75-6631340c19a4"}},"outputs":[{"output_type":"display_data","metadata":{"application/vnd.databricks.v1+output":{"data":"","errorSummary":"","metadata":{},"errorTraceType":null,"type":"ipynbError","arguments":{}}},"output_type":"display_data","data":{"text/html":[""]},"transient":null}],"execution_count":0},{"cell_type":"code","source":["from synapse.ml.cognitive import AnalyzeInvoices\n\nanalyzed_df = (AnalyzeInvoices()\n .setSubscriptionKey(key)\n .setLocation(\"eastus\")\n .setImageUrlCol(\"url\")\n .setOutputCol(\"invoices\")\n .setErrorCol(\"errors\")\n .setConcurrency(5)\n .transform(df2)\n .cache())\n"],"metadata":{"jupyter":{"source_hidden":false,"outputs_hidden":false},"nteract":{"transient":{"deleting":false}},"application/vnd.databricks.v1+cell":{"title":"","showTitle":false,"inputWidgets":{},"nuid":"92a91f10-8698-4cca-9f8b-ecca146f0cf0"}},"outputs":[{"output_type":"display_data","metadata":{"application/vnd.databricks.v1+output":{"data":"","errorSummary":"","metadata":{},"errorTraceType":null,"type":"ipynbError","arguments":{}}},"output_type":"display_data","data":{"text/html":[""]},"transient":null}],"execution_count":0},{"cell_type":"code","source":["display(analyzed_df)"],"metadata":{"jupyter":{"source_hidden":false,"outputs_hidden":false},"nteract":{"transient":{"deleting":false}},"collapsed":false,"application/vnd.databricks.v1+cell":{"title":"","showTitle":false,"inputWidgets":{},"nuid":"f3982d15-fe5f-4b98-82c2-d77e29877456"}},"outputs":[{"output_type":"display_data","metadata":{"application/vnd.databricks.v1+output":{"data":"","errorSummary":"","metadata":{},"errorTraceType":null,"type":"ipynbError","arguments":{}}},"output_type":"display_data","data":{"text/html":[""]},"transient":null}],"execution_count":0},{"cell_type":"code","source":["from synapse.ml.cognitive import FormOntologyLearner\n\norganized_df = (FormOntologyLearner()\n .setInputCol(\"invoices\")\n .setOutputCol(\"extracted\")\n .fit(analyzed_df.limit(10))\n .transform(analyzed_df)\n .select(\"url\", \"extracted.*\")\n .cache())"],"metadata":{"jupyter":{"source_hidden":false,"outputs_hidden":false},"nteract":{"transient":{"deleting":false}},"application/vnd.databricks.v1+cell":{"title":"","showTitle":false,"inputWidgets":{},"nuid":"84954800-58ba-474c-8325-44f3ae08604a"}},"outputs":[{"output_type":"display_data","metadata":{"application/vnd.databricks.v1+output":{"data":"","errorSummary":"","metadata":{},"errorTraceType":null,"type":"ipynbError","arguments":{}}},"output_type":"display_data","data":{"text/html":[""]},"transient":null}],"execution_count":0},{"cell_type":"code","source":["display(organized_df)"],"metadata":{"jupyter":{"source_hidden":false,"outputs_hidden":false},"nteract":{"transient":{"deleting":false}},"collapsed":false,"application/vnd.databricks.v1+cell":{"title":"","showTitle":false,"inputWidgets":{},"nuid":"f8cfd999-0a99-494c-a915-f3d56a9a9b7c"}},"outputs":[{"output_type":"display_data","metadata":{"application/vnd.databricks.v1+output":{"data":"","errorSummary":"","metadata":{},"errorTraceType":null,"type":"ipynbError","arguments":{}}},"output_type":"display_data","data":{"text/html":[""]},"transient":null}],"execution_count":0},{"cell_type":"code","source":["from pyspark.sql.functions import explode, col\nitemized_df = (organized_df\n .select(\"*\", explode(col(\"Items\")).alias(\"Item\"))\n .drop(\"Items\")\n .select(\"Item.*\", \"*\")\n .drop(\"Item\"))\n"],"metadata":{"jupyter":{"source_hidden":false,"outputs_hidden":false},"nteract":{"transient":{"deleting":false}},"application/vnd.databricks.v1+cell":{"title":"","showTitle":false,"inputWidgets":{},"nuid":"5639512f-6649-46af-a6d3-e6e2b6e398fc"}},"outputs":[{"output_type":"display_data","metadata":{"application/vnd.databricks.v1+output":{"data":"","errorSummary":"","metadata":{},"errorTraceType":null,"type":"ipynbError","arguments":{}}},"output_type":"display_data","data":{"text/html":[""]},"transient":null}],"execution_count":0},{"cell_type":"code","source":["display(itemized_df)"],"metadata":{"jupyter":{"source_hidden":false,"outputs_hidden":false},"nteract":{"transient":{"deleting":false}},"collapsed":false,"application/vnd.databricks.v1+cell":{"title":"","showTitle":false,"inputWidgets":{},"nuid":"64319bf8-a9c5-44cb-b8cd-d9743d00951d"}},"outputs":[{"output_type":"display_data","metadata":{"application/vnd.databricks.v1+output":{"data":"","errorSummary":"","metadata":{},"errorTraceType":null,"type":"ipynbError","arguments":{}}},"output_type":"display_data","data":{"text/html":[""]},"transient":null}],"execution_count":0},{"cell_type":"code","source":["display(itemized_df.where(col(\"ProductCode\") == 6))"],"metadata":{"jupyter":{"source_hidden":false,"outputs_hidden":false},"nteract":{"transient":{"deleting":false}},"collapsed":false,"application/vnd.databricks.v1+cell":{"title":"","showTitle":false,"inputWidgets":{},"nuid":"cfc51726-0dfe-40f0-a889-59309fddaf64"}},"outputs":[{"output_type":"display_data","metadata":{"application/vnd.databricks.v1+output":{"data":"","errorSummary":"","metadata":{},"errorTraceType":null,"type":"ipynbError","arguments":{}}},"output_type":"display_data","data":{"text/html":[""]},"transient":null}],"execution_count":0},{"cell_type":"code","source":["from synapse.ml.cognitive import Translate\n\ntranslated_df = (Translate()\n .setSubscriptionKey(translator_key)\n .setLocation(\"eastus\")\n .setTextCol(\"Description\")\n .setErrorCol(\"TranslationError\")\n .setOutputCol(\"output\")\n .setToLanguage([\"zh-Hans\", \"fr\", \"ru\", \"cy\"])\n .setConcurrency(5)\n .transform(itemized_df)\n .withColumn(\"Translations\", col(\"output.translations\")[0])\n .drop(\"output\", \"TranslationError\")\n .cache())\n"],"metadata":{"jupyter":{"source_hidden":false,"outputs_hidden":false},"nteract":{"transient":{"deleting":false}},"application/vnd.databricks.v1+cell":{"title":"","showTitle":false,"inputWidgets":{},"nuid":"95cb6caa-5c6d-42a0-98bf-ad672216ffca"}},"outputs":[{"output_type":"display_data","metadata":{"application/vnd.databricks.v1+output":{"data":"","errorSummary":"","metadata":{},"errorTraceType":null,"type":"ipynbError","arguments":{}}},"output_type":"display_data","data":{"text/html":[""]},"transient":null}],"execution_count":0},{"cell_type":"code","source":["display(translated_df)"],"metadata":{"jupyter":{"source_hidden":false,"outputs_hidden":false},"nteract":{"transient":{"deleting":false}},"collapsed":false,"application/vnd.databricks.v1+cell":{"title":"","showTitle":false,"inputWidgets":{},"nuid":"8b3d78e0-87bd-43b4-b3ec-329fc9fbda7f"}},"outputs":[{"output_type":"display_data","metadata":{"application/vnd.databricks.v1+output":{"data":"","errorSummary":"","metadata":{},"errorTraceType":null,"type":"ipynbError","arguments":{}}},"output_type":"display_data","data":{"text/html":[""]},"transient":null}],"execution_count":0},{"cell_type":"code","source":["from synapse.ml.cognitive import *\nfrom pyspark.sql.functions import monotonically_increasing_id, lit\n\n(translated_df\n .withColumn(\"DocID\", monotonically_increasing_id().cast(\"string\"))\n .withColumn(\"SearchAction\", lit(\"upload\"))\n .writeToAzureSearch(\n subscriptionKey=search_key,\n actionCol=\"SearchAction\",\n serviceName=search_service,\n indexName=search_index,\n keyCol=\"DocID\")\n)\n"],"metadata":{"jupyter":{"source_hidden":false,"outputs_hidden":false},"nteract":{"transient":{"deleting":false}},"application/vnd.databricks.v1+cell":{"title":"","showTitle":false,"inputWidgets":{},"nuid":"19cb88b7-9e68-4e95-b2bc-b89af76d2688"}},"outputs":[{"output_type":"display_data","metadata":{"application/vnd.databricks.v1+output":{"data":"","errorSummary":"","metadata":{},"errorTraceType":null,"type":"ipynbError","arguments":{}}},"output_type":"display_data","data":{"text/html":[""]},"transient":null}],"execution_count":0},{"cell_type":"code","source":["import requests\nurl = 'https://{}.search.windows.net/indexes/{}/docs/search?api-version=2019-05-06'.format(search_service, search_index)\nrequests.post(url, json={\"search\": \"door\"}, headers = {\"api-key\": search_key}).json()"],"metadata":{"jupyter":{"source_hidden":false,"outputs_hidden":false},"nteract":{"transient":{"deleting":false}},"application/vnd.databricks.v1+cell":{"title":"","showTitle":false,"inputWidgets":{},"nuid":"5d2231a1-3231-450a-bea5-9924073a25e0"}},"outputs":[{"output_type":"display_data","metadata":{"application/vnd.databricks.v1+output":{"data":"","errorSummary":"","metadata":{},"errorTraceType":null,"type":"ipynbError","arguments":{}}},"output_type":"display_data","data":{"text/html":[""]},"transient":null}],"execution_count":0},{"cell_type":"code","source":[""],"metadata":{"jupyter":{"source_hidden":false,"outputs_hidden":false},"nteract":{"transient":{"deleting":false}},"application/vnd.databricks.v1+cell":{"title":"","showTitle":false,"inputWidgets":{},"nuid":"e4674b70-04c1-4bc4-92ab-357968b80c9e"}},"outputs":[{"output_type":"display_data","metadata":{"application/vnd.databricks.v1+output":{"data":"","errorSummary":"","metadata":{},"errorTraceType":null,"type":"ipynbError","arguments":{}}},"output_type":"display_data","data":{"text/html":[""]},"transient":null}],"execution_count":0}],"metadata":{"language_info":{"name":"python"},"description":null,"save_output":true,"kernelspec":{"name":"synapse_pyspark","display_name":"Synapse PySpark"},"synapse_widget":{"version":"0.1","state":{"16075ac4-be11-498a-a42c-6186fa6b01d9":{"type":"Synapse.DataFrame","sync_state":{"table":{"rows":[{"12":"1120.0","8":"Mackenzie Gray","4":"Coffee Maker Red","11":"1198.4","9":"11","13":"78.4","16":"T.T TAILWIND TRADERS","5":"385.2","10":"22671","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2010/Invoice22671.pdf","1":"25.2","17":"Mackenzie Gray","14":"22, rue du Puits Dixme","0":"39","2":"2.0","7":"34, rue des Grands Champs Versailles","3":"200.0"},{"12":"1120.0","8":"Mackenzie Gray","4":"Celebrations C9","11":"1198.4","9":"11","13":"78.4","16":"T.T TAILWIND TRADERS","5":"10.7","10":"22671","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2010/Invoice22671.pdf","1":"0.7","17":"Mackenzie Gray","14":"22, rue du Puits Dixme","0":"31","2":"1.0","7":"34, rue des Grands Champs Versailles","3":"10.0"},{"12":"1120.0","8":"Mackenzie Gray","4":"Black Bathing System Classic 18 in. H x 60 in. W x 32.5","11":"1198.4","9":"11","13":"78.4","16":"T.T TAILWIND TRADERS","5":"802.5","10":"22671","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2010/Invoice22671.pdf","1":"52.5","17":"Mackenzie Gray","14":"22, rue du Puits Dixme","0":"12","2":"3.0","7":"34, rue des Grands Champs Versailles","3":"250.0"},{"12":"1208.5","4":"Blend Solid White Sheer Curtains","15":"Sarosgawk","11":"1293.1","9":"2021-10-05","13":"84.6","16":"T.T TAILWIND TRADERS","5":"235.4","10":"28073","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2010/Invoice28073.pdf","1":"15.4","14":"Maubeuge","0":"17","2":"2.0","18":"San Gabriel","7":"United States","3":"110.0"},{"12":"1208.5","4":"Rechargeable screwdriver with extra battery","15":"Sarosgawk","11":"1293.1","9":"2021-10-05","13":"84.6","16":"T.T TAILWIND TRADERS","5":"667.68","10":"28073","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2010/Invoice28073.pdf","1":"43.68","14":"Maubeuge","0":"56","2":"2.0","18":"San Gabriel","7":"United States","3":"312.0"},{"12":"1208.5","4":"Extractor Steal","15":"Sarosgawk","11":"1293.1","9":"2021-10-05","13":"84.6","16":"T.T TAILWIND TRADERS","5":"390.02","10":"28073","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2010/Invoice28073.pdf","1":"25.52","14":"Maubeuge","0":"40","2":"3.0","18":"San Gabriel","7":"United States","3":"135.0"},{"12":"750.0","8":"Villeneuve-d'Ascq","19":"Villeneuve-d'Ascq","4":"Big Metal Shelving","15":"Braeo","11":"802.5","9":"2021-03-12","13":"52.5","16":"T.T TAILWIND TRADERS","5":"317.79","10":"64808","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2013/Invoice64808.pdf","1":"20.79","14":"Circle","0":"43","2":"3.0","18":"828, rue de Berri","7":"3923 Dew Drop","3":"99.0"},{"12":"750.0","8":"Villeneuve-d'Ascq","19":"Villeneuve-d'Ascq","4":"Metal Shelving","15":"Braeo","11":"802.5","9":"2021-03-12","13":"52.5","16":"T.T TAILWIND TRADERS","5":"288.9","10":"64808","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2013/Invoice64808.pdf","1":"18.9","14":"Circle","0":"42","2":"3.0","18":"828, rue de Berri","7":"3923 Dew Drop","3":"90.0"},{"12":"750.0","8":"Villeneuve-d'Ascq","19":"Villeneuve-d'Ascq","4":"Measuring Tape","15":"Braeo","11":"802.5","9":"2021-03-12","13":"52.5","16":"T.T TAILWIND TRADERS","5":"131.61","10":"64808","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2013/Invoice64808.pdf","1":"8.61","14":"Circle","0":"46","2":"1.0","18":"828, rue de Berri","7":"3923 Dew Drop","3":"123.0"},{"12":"750.0","8":"Villeneuve-d'Ascq","19":"Villeneuve-d'Ascq","4":"Curtain Rod 48 in","15":"Braeo","11":"802.5","9":"2021-03-12","13":"52.5","16":"T.T TAILWIND TRADERS","5":"64.2","10":"64808","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2013/Invoice64808.pdf","1":"4.2","14":"Circle","0":"21","2":"3.0","18":"828, rue de Berri","7":"3923 Dew Drop","3":"25.0"},{"12":"1758.0","8":"Clinton Gutierrez","4":"Wood Table","11":"1881.06","13":"123.06","16":"T.T TAILWIND TRADERS","5":"1267.95","10":"67164","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2013/Invoice67164.pdf","1":"82.95","17":"Dluhbio","0":"36","2":"3.0","18":"45000","7":"Platz des Landtags 404 Solingen Nordrhein-Westfalen 42651 Germany","3":"395.0"},{"12":"1758.0","8":"Clinton Gutierrez","4":"White Window","11":"1881.06","13":"123.06","16":"T.T TAILWIND TRADERS","5":"256.8","10":"67164","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2013/Invoice67164.pdf","1":"16.8","17":"Dluhbio","0":"20","2":"2.0","18":"45000","7":"Platz des Landtags 404 Solingen Nordrhein-Westfalen 42651 Germany","3":"120.0"},{"12":"1758.0","8":"Clinton Gutierrez","4":"Indoor Kit Gardering","11":"1881.06","13":"123.06","16":"T.T TAILWIND TRADERS","5":"224.7","10":"67164","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2013/Invoice67164.pdf","1":"14.7","17":"Dluhbio","0":"25","2":"3.0","18":"45000","7":"Platz des Landtags 404 Solingen Nordrhein-Westfalen 42651 Germany","3":"70.0"},{"12":"1758.0","8":"Clinton Gutierrez","4":"Measuring Tape","11":"1881.06","13":"123.06","16":"T.T TAILWIND TRADERS","5":"131.61","10":"67164","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2013/Invoice67164.pdf","1":"8.61","17":"Dluhbio","0":"46","2":"1.0","18":"45000","7":"Platz des Landtags 404 Solingen Nordrhein-Westfalen 42651 Germany","3":"123.0"},{"12":"1217.2","8":"Ashlee Raje","4":"Bathing System Classic 18 in. H x 60 in. W x 32.5","15":"Alvotue","11":"1302.4","9":"October","13":"85.2","16":"T.T TAILWIND TRADERS","5":"577.8","10":"80110","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2014/Invoice80110.pdf","1":"37.8","17":"Ashlee Raje","14":"4559 Loop Beaverton Oregon 97005 States","0":"9","2":"3.0","18":"Street","7":"Julpum","3":"200.0"},{"12":"1217.2","8":"Ashlee Raje","4":"Two red garden gnomes","15":"Alvotue","11":"1302.4","9":"October","13":"85.2","16":"T.T TAILWIND TRADERS","5":"295.32","10":"80110","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2014/Invoice80110.pdf","1":"19.32","17":"Ashlee Raje","14":"4559 Loop Beaverton Oregon 97005 States","0":"59","2":"3.0","18":"Street","7":"Julpum","3":"92.0"},{"12":"1217.2","8":"Ashlee Raje","4":"Single red garden gnome","15":"Alvotue","11":"1302.4","9":"October","13":"85.2","16":"T.T TAILWIND TRADERS","5":"161.78","10":"80110","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2014/Invoice80110.pdf","1":"10.58","17":"Ashlee Raje","14":"4559 Loop Beaverton Oregon 97005 States","0":"58","2":"3.0","18":"Street","7":"Julpum","3":"56.0"},{"12":"1217.2","8":"Ashlee Raje","4":"Artificial Tree","15":"Alvotue","11":"1302.4","9":"October","13":"85.2","16":"T.T TAILWIND TRADERS","5":"267.5","10":"80110","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2014/Invoice80110.pdf","1":"17.5","17":"Ashlee Raje","14":"4559 Loop Beaverton Oregon 97005 States","0":"32","2":"1.0","18":"Street","7":"Julpum","3":"250.0"},{"12":"1469.8","8":"Pal","4":"Wood Table","15":"Ifiaeh","11":"1572.69","13":"102.89","16":"T.T TAILWIND TRADERS","5":"845.3","10":"71864","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2014/Invoice71864.pdf","1":"55.3","17":"Willie","0":"36","2":"2.0","7":"27, place de Brazaville Roubaix Nord 59100 France","3":"395.0"},{"12":"1469.8","8":"Pal","4":"Rechargeable screwdriver with extra battery","15":"Ifiaeh","11":"1572.69","13":"102.89","16":"T.T TAILWIND TRADERS","5":"300.46","10":"71864","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2014/Invoice71864.pdf","1":"19.66","17":"Willie","0":"56","2":"1.0","7":"27, place de Brazaville Roubaix Nord 59100 France","3":"312.0"},{"12":"1469.8","8":"Pal","4":"Bathroom Sink Faucet","15":"Ifiaeh","11":"1572.69","13":"102.89","16":"T.T TAILWIND TRADERS","5":"105.93","10":"71864","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2014/Invoice71864.pdf","1":"6.93","17":"Willie","0":"14","2":"1.0","7":"27, place de Brazaville Roubaix Nord 59100 France","3":"99.0"},{"12":"1469.8","8":"Pal","4":"Hammer","15":"Ifiaeh","11":"1572.69","13":"102.89","16":"T.T TAILWIND TRADERS","5":"321.0","10":"71864","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2014/Invoice71864.pdf","1":"21.0","17":"Willie","0":"48","2":"3.0","7":"27, place de Brazaville Roubaix Nord 59100 France","3":"100.0"},{"12":"758.0","8":"Misty Xie","4":"Screwdriver","15":"Rloefos","11":"811.06","9":"4","13":"53.06","16":"T.T TAILWIND TRADERS","5":"235.4","10":"80066","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2014/Invoice80066.pdf","1":"15.4","17":"Misty Xie","14":"376 Amador Valley Blvd.","0":"49","2":"2.0","7":"6058 Hill Street","3":"110.0"},{"12":"758.0","8":"Misty Xie","4":"Yellow Rechargeable screwdriver","15":"Rloefos","11":"811.06","9":"4","13":"53.06","16":"T.T TAILWIND TRADERS","5":"267.5","10":"80066","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2014/Invoice80066.pdf","1":"17.5","17":"Misty Xie","14":"376 Amador Valley Blvd.","0":"54","2":"1.0","7":"6058 Hill Street","3":"250.0"},{"12":"758.0","8":"Misty Xie","4":"Steel Passage Door Knob","15":"Rloefos","11":"811.06","9":"4","13":"53.06","16":"T.T TAILWIND TRADERS","5":"19.26","10":"80066","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2014/Invoice80066.pdf","1":"1.26","17":"Misty Xie","14":"376 Amador Valley Blvd.","0":"22","2":"2.0","7":"6058 Hill Street","3":"10.0"},{"12":"758.0","8":"Misty Xie","4":"Extractor Steal","15":"Rloefos","11":"811.06","9":"4","13":"53.06","16":"T.T TAILWIND TRADERS","5":"288.9","10":"80066","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2014/Invoice80066.pdf","1":"18.9","17":"Misty Xie","14":"376 Amador Valley Blvd.","0":"40","2":"2.0","7":"6058 Hill Street","3":"135.0"},{"12":"999.0","8":"Connie Liang","4":"Multi Function Drill","11":"1068.93","9":"2015-11-04","13":"69.93","16":"T.T TAILWIND TRADERS","5":"170.13","10":"83878","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2015/Invoice83878.pdf","1":"11.13","17":"Mrurc Potsdamer","0":"47","2":"1.0","18":"4SJ","7":"Straße 242 Stuttgart Saarland 70511 Germany","3":"159.0"},{"12":"999.0","8":"Connie Liang","4":"Stainless multi-tool plier","11":"1068.93","9":"2015-11-04","13":"69.93","16":"T.T TAILWIND TRADERS","5":"96.3","10":"83878","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2015/Invoice83878.pdf","1":"6.3","17":"Mrurc Potsdamer","0":"53","2":"1.0","18":"4SJ","7":"Straße 242 Stuttgart Saarland 70511 Germany","3":"90.0"},{"12":"999.0","8":"Connie Liang","4":"Artificial Tree","11":"1068.93","9":"2015-11-04","13":"69.93","16":"T.T TAILWIND TRADERS","5":"802.5","10":"83878","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2015/Invoice83878.pdf","1":"52.5","17":"Mrurc Potsdamer","0":"32","2":"3.0","18":"4SJ","7":"Straße 242 Stuttgart Saarland 70511 Germany","3":"250.0"},{"12":"488.0","8":"Colin Cai","4":"Big Metal Shelving","11":"522.16","9":"2016","13":"34.16","16":"T.T TAILWIND TRADERS","5":"211.86","10":"102247","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2016/Invoice102247.pdf","1":"13.86","17":"Colin Cai","14":"Phata 8858 V. Street London England W1Y 3RA United Kingdom","0":"43","2":"2.0","18":"80074","7":"Kampstr 9859","3":"99.0"},{"12":"488.0","8":"Colin Cai","4":"Gardering","11":"522.16","9":"2016","13":"34.16","16":"T.T TAILWIND TRADERS","5":"21.4","10":"102247","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2016/Invoice102247.pdf","1":"1.4","17":"Colin Cai","14":"Phata 8858 V. Street London England W1Y 3RA United Kingdom","0":"30","2":"2.0","18":"80074","7":"Kampstr 9859","3":"10.0"},{"12":"488.0","8":"Colin Cai","4":"Craftsman 100 ft. L x 5/8 in.","11":"522.16","9":"2016","13":"34.16","16":"T.T TAILWIND TRADERS","5":"288.9","10":"102247","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2016/Invoice102247.pdf","1":"18.9","17":"Colin Cai","14":"Phata 8858 V. Street London England W1Y 3RA United Kingdom","0":"26","2":"3.0","18":"80074","7":"Kampstr 9859","3":"100.0"},{"12":"614.2","8":"Roy","4":"One sat on shoe gnome","15":"Rgerlakmog","11":"657.19","9":"8","13":"42.99","16":"T.T TAILWIND TRADERS","5":"104.0","10":"92549","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2016/Invoice92549.pdf","1":"6.8","14":"4186 Silver Oaks Pl. Newport Beach California 92625 United States","0":"61","2":"2.0","7":"2957 Tri-state Avenue Cambridge","3":"54.0"},{"12":"614.2","8":"Roy","4":"Refrigerator 1.7 cu. ft. 110 watts","15":"Rgerlakmog","11":"657.19","9":"8","13":"42.99","16":"T.T TAILWIND TRADERS","5":"428.0","10":"92549","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2016/Invoice92549.pdf","1":"28.0","14":"4186 Silver Oaks Pl. Newport Beach California 92625 United States","0":"2","2":"2.0","7":"2957 Tri-state Avenue Cambridge","3":"200.0"},{"12":"614.2","8":"Roy","4":"Celebrations C9","15":"Rgerlakmog","11":"657.19","9":"8","13":"42.99","16":"T.T TAILWIND TRADERS","5":"28.89","10":"92549","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2016/Invoice92549.pdf","1":"1.89","14":"4186 Silver Oaks Pl. Newport Beach California 92625 United States","0":"31","2":"3.0","7":"2957 Tri-state Avenue Cambridge","3":"10.0"},{"12":"614.2","8":"Roy","4":"Craftsman 100 ft. L x 5/8 in.","15":"Rgerlakmog","11":"657.19","9":"8","13":"42.99","16":"T.T TAILWIND TRADERS","5":"96.3","10":"92549","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2016/Invoice92549.pdf","1":"6.3","14":"4186 Silver Oaks Pl. Newport Beach California 92625 United States","0":"26","2":"1.0","7":"2957 Tri-state Avenue Cambridge","3":"100.0"}],"schema":[{"key":"0","name":"ProductCode","type":"string"},{"key":"1","name":"Tax","type":"double"},{"key":"2","name":"Quantity","type":"double"},{"key":"3","name":"UnitPrice","type":"double"},{"key":"4","name":"Description","type":"string"},{"key":"5","name":"Amount","type":"double"},{"key":"6","name":"url","type":"string"},{"key":"7","name":"CustomerAddress","type":"string"},{"key":"8","name":"CustomerName","type":"string"},{"key":"9","name":"InvoiceDate","type":"string"},{"key":"10","name":"InvoiceId","type":"string"},{"key":"11","name":"InvoiceTotal","type":"double"},{"key":"12","name":"SubTotal","type":"double"},{"key":"13","name":"TotalTax","type":"double"},{"key":"14","name":"VendorAddress","type":"string"},{"key":"15","name":"VendorAddressRecipient","type":"string"},{"key":"16","name":"VendorName","type":"string"},{"key":"17","name":"CustomerAddressRecipient","type":"string"},{"key":"18","name":"ShippingAddress","type":"string"},{"key":"19","name":"ShippingAddressRecipient","type":"string"}]},"isSummary":false,"language":"scala"},"persist_state":{"view":{"type":"details","tableOptions":{},"chartOptions":{"seriesFieldKeys":["5"],"categoryFieldKeys":["4"],"isStacked":false,"aggregationType":"sum","chartType":"pie"}}}},"f0c0ce60-d8e5-439d-b9aa-e684071cfb57":{"type":"Synapse.DataFrame","sync_state":{"table":{"rows":[{"0":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2010/Invoice22671.pdf"},{"0":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2010/Invoice28073.pdf"},{"0":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2013/Invoice64808.pdf"},{"0":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2013/Invoice67164.pdf"},{"0":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2014/Invoice80110.pdf"},{"0":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2014/Invoice71864.pdf"},{"0":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2014/Invoice80066.pdf"},{"0":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2015/Invoice83878.pdf"},{"0":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2016/Invoice102247.pdf"},{"0":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2016/Invoice92549.pdf"}],"schema":[{"key":"0","name":"url","type":"string"}]},"isSummary":false,"language":"scala"},"persist_state":{"view":{"type":"details","tableOptions":{},"chartOptions":{"seriesFieldKeys":["0"],"categoryFieldKeys":["0"],"isStacked":false,"aggregationType":"count","chartType":"bar"}}}},"f3cf4021-da8f-4614-89ea-b471037f0f6d":{"type":"Synapse.DataFrame","sync_state":{"table":{"rows":[{"0":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2010/Invoice22671.pdf","2":{"status":"succeeded","createdDateTime":"2021-10-26T22:39:18Z","lastUpdatedDateTime":"2021-10-26T22:39:23Z","analyzeResult":{"version":"2.1.0","readResults":[{"angle":0,"page":1,"height":11,"unit":"inch","width":8.5}],"pageResults":[{"page":1,"tables":[{"rows":4,"columns":8,"cells":[{"rowIndex":0,"isHeader":true,"boundingBox":[1.1002,4.8178,1.4382,4.8178,1.4382,5.0833,1.1002,5.0833],"text":"Itm","columnIndex":0},{"rowIndex":0,"isHeader":true,"boundingBox":[1.4382,4.8178,1.9128,4.8178,1.9128,5.0833,1.4382,5.0833],"text":"Qty","columnIndex":1},{"rowIndex":0,"isHeader":true,"boundingBox":[1.9128,4.8178,4.3051,4.8178,4.3051,5.0833,1.9128,5.0833],"text":"Description","columnIndex":2},{"rowIndex":0,"isHeader":true,"boundingBox":[4.3051,4.8178,4.9356,4.8178,4.9356,5.0833,4.3051,5.0833],"text":"Price","columnIndex":3},{"rowIndex":0,"isHeader":true,"boundingBox":[4.9356,4.8178,5.6507,4.8178,5.6507,5.0833,4.9356,5.0833],"text":"Discount","columnIndex":4},{"rowIndex":0,"isHeader":true,"boundingBox":[5.6507,4.8178,6.1383,4.8178,6.1383,5.0833,5.6507,5.0833],"text":"(Pct)","columnIndex":5},{"rowIndex":0,"isHeader":true,"boundingBox":[6.1383,4.8178,6.7428,4.8178,6.7363,5.0833,6.1383,5.0833],"text":"Tax","columnIndex":6},{"rowIndex":0,"isHeader":true,"boundingBox":[6.7428,4.8178,7.4254,4.8241,7.4254,5.0833,6.7363,5.0833],"text":"LineTotal","columnIndex":7},{"rowIndex":1,"columnIndex":0,"text":"39","boundingBox":[1.1002,5.0833,1.4382,5.0833,1.4382,5.3805,1.1002,5.3805]},{"rowIndex":1,"columnIndex":1,"text":"2","boundingBox":[1.4382,5.0833,1.9128,5.0833,1.9128,5.3805,1.4382,5.3805]},{"rowIndex":1,"columnIndex":2,"text":"Coffee Maker Red","boundingBox":[1.9128,5.0833,4.3051,5.0833,4.3051,5.3805,1.9128,5.3805]},{"rowIndex":1,"columnIndex":3,"text":"$200.00","boundingBox":[4.3051,5.0833,4.9356,5.0833,4.9356,5.3805,4.3051,5.3805]},{"rowIndex":1,"columnIndex":4,"text":"$40.00","boundingBox":[4.9356,5.0833,5.6507,5.0833,5.6507,5.3805,4.9356,5.3805]},{"rowIndex":1,"columnIndex":5,"text":"10%","boundingBox":[5.6507,5.0833,6.1383,5.0833,6.1383,5.3805,5.6507,5.3805]},{"rowIndex":1,"columnIndex":6,"text":"$25.20","boundingBox":[6.1383,5.0833,6.7363,5.0833,6.7363,5.3805,6.1383,5.3805]},{"rowIndex":1,"columnIndex":7,"text":"$385.20","boundingBox":[6.7363,5.0833,7.4254,5.0833,7.4254,5.3868,6.7363,5.3805]},{"rowIndex":2,"columnIndex":0,"text":"31","boundingBox":[1.1002,5.3805,1.4382,5.3805,1.4382,5.6839,1.1002,5.6839]},{"rowIndex":2,"columnIndex":1,"text":"1","boundingBox":[1.4382,5.3805,1.9128,5.3805,1.9128,5.6839,1.4382,5.6839]},{"rowIndex":2,"columnIndex":2,"text":"Celebrations C9","boundingBox":[1.9128,5.3805,4.3051,5.3805,4.3051,5.6839,1.9128,5.6839]},{"rowIndex":2,"columnIndex":3,"text":"$10.00","boundingBox":[4.3051,5.3805,4.9356,5.3805,4.9356,5.6839,4.3051,5.6839]},{"rowIndex":2,"columnIndex":4,"text":"$0.00","boundingBox":[4.9356,5.3805,5.6507,5.3805,5.6507,5.6839,4.9356,5.6839]},{"rowIndex":2,"columnIndex":5,"text":"0%","boundingBox":[5.6507,5.3805,6.1383,5.3805,6.1383,5.6839,5.6507,5.6839]},{"rowIndex":2,"columnIndex":6,"text":"$0.70","boundingBox":[6.1383,5.3805,6.7363,5.3805,6.7363,5.6839,6.1383,5.6839]},{"rowIndex":2,"columnIndex":7,"text":"$10.70","boundingBox":[6.7363,5.3805,7.4254,5.3868,7.4254,5.6839,6.7363,5.6839]},{"rowIndex":3,"columnIndex":0,"text":"12","boundingBox":[1.1002,5.6839,1.4382,5.6839,1.4382,6.1265,1.1002,6.1265]},{"rowIndex":3,"columnIndex":1,"text":"3","boundingBox":[1.4382,5.6839,1.9128,5.6839,1.9128,6.1265,1.4382,6.1265]},{"rowIndex":3,"columnIndex":2,"text":"Black Bathing System Classic 18 in. H x 60 in. W x 32.5","boundingBox":[1.9128,5.6839,4.3051,5.6839,4.3051,6.1265,1.9128,6.1265]},{"rowIndex":3,"columnIndex":3,"text":"$250.00","boundingBox":[4.3051,5.6839,4.9356,5.6839,4.9356,6.1265,4.3051,6.1265]},{"rowIndex":3,"columnIndex":4,"text":"$0.00","boundingBox":[4.9356,5.6839,5.6507,5.6839,5.6507,6.1265,4.9356,6.1265]},{"rowIndex":3,"columnIndex":5,"text":"0%","boundingBox":[5.6507,5.6839,6.1383,5.6839,6.1383,6.1265,5.6507,6.1265]},{"rowIndex":3,"columnIndex":6,"text":"$52.50","boundingBox":[6.1383,5.6839,6.7363,5.6839,6.7363,6.1265,6.1383,6.1265]},{"rowIndex":3,"columnIndex":7,"text":"$802.50","boundingBox":[6.7363,5.6839,7.4254,5.6839,7.4254,6.1265,6.7363,6.1265]}],"boundingBox":[1.0946,4.8125,7.4324,4.8133,7.4322,6.128,1.0936,6.1274]}]}],"documentResults":[{"docType":"prebuilt:invoice","pageRange":[1,1],"fields":{"CustomerAddress":{"page":1,"valueString":"34, rue des Grands Champs Versailles","boundingBox":[1.1282,2.1342,3.5087,2.1342,3.5087,2.2669,1.1282,2.2669],"text":"34, rue des Grands Champs Versailles","confidence":0.71,"type":"string"},"InvoiceTotal":{"valueNumber":1198.4,"page":1,"boundingBox":[6.5814,7.9097,7.413,7.9097,7.413,8.0886,6.5814,8.0886],"text":"$1198.40","confidence":0.962,"type":"number"},"CustomerName":{"page":1,"valueString":"Mackenzie Gray","boundingBox":[1.1354,1.6519,2.2799,1.6519,2.2799,1.8161,1.1354,1.8161],"text":"Mackenzie Gray","confidence":0.48,"type":"string"},"VendorName":{"page":1,"valueString":"T.T TAILWIND TRADERS","boundingBox":[4.909,1.4371,7.3107,1.4184,7.3155,2.0248,4.9137,2.0434],"text":"T.T TAILWIND TRADERS","confidence":0.953,"type":"string"},"InvoiceId":{"page":1,"valueString":"22671","boundingBox":[1.1272,4.1481,1.4599,4.1481,1.4599,4.248,1.1272,4.248],"text":"22671","confidence":0.946,"type":"string"},"InvoiceDate":{"page":1,"boundingBox":[2.7397,4.1481,2.8456,4.1481,2.8456,4.2465,2.7397,4.2465],"text":"11","confidence":0.375,"type":"date"},"SubTotal":{"valueNumber":1120,"page":1,"boundingBox":[6.8462,6.5516,7.4152,6.5516,7.4152,6.674,6.8462,6.674],"text":"$1120.00","confidence":0.971,"type":"number"},"TotalTax":{"valueNumber":78.4,"page":1,"boundingBox":[7.002,7.5208,7.4153,7.5208,7.4153,7.6432,7.002,7.6432],"text":"$78.40","confidence":0.967,"type":"number"},"VendorAddress":{"page":1,"valueString":"22, rue du Puits Dixme","boundingBox":[5.9835,2.3511,7.1057,2.3511,7.1057,2.4518,5.9835,2.4518],"text":"22, rue du Puits Dixme","confidence":0.606,"type":"string"},"CustomerAddressRecipient":{"page":1,"valueString":"Mackenzie Gray","boundingBox":[1.1354,1.6519,2.2799,1.6519,2.2799,1.8161,1.1354,1.8161],"text":"Mackenzie Gray","confidence":0.48,"type":"string"},"Items":{"type":"array","valueArray":["{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":385.2,\"text\":\"$385.20\",\"boundingBox\":[6.8637,5.1789,7.3548,5.1789,7.3548,5.3013,6.8637,5.3013],\"page\":1,\"confidence\":0.888},\"Description\":{\"type\":\"string\",\"valueString\":\"Coffee Maker Red\",\"text\":\"Coffee Maker Red\",\"boundingBox\":[2.0294,5.1844,3.0817,5.1844,3.0817,5.2891,2.0294,5.2891],\"page\":1,\"confidence\":0.879},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"39\",\"text\":\"39\",\"boundingBox\":[1.2114,5.1897,1.3427,5.1897,1.3427,5.2891,1.2114,5.2891],\"page\":1,\"confidence\":0.499},\"Quantity\":{\"type\":\"number\",\"valueNumber\":2,\"text\":\"2\",\"boundingBox\":[1.6933,5.1897,1.7518,5.1897,1.7518,5.2876,1.6933,5.2876],\"page\":1,\"confidence\":0.77},\"Tax\":{\"type\":\"number\",\"valueNumber\":25.2,\"text\":\"$25.20\",\"boundingBox\":[6.2822,5.1792,6.6601,5.1792,6.6601,5.3013,6.2822,5.3013],\"page\":1,\"confidence\":0.755},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":200,\"text\":\"$200.00\",\"boundingBox\":[4.4092,5.1792,4.8601,5.1792,4.8601,5.3013,4.4092,5.3013],\"page\":1,\"confidence\":0.899}},\"text\":\"39 2 Coffee Maker Red $200.00 $40.00 10% $25.20 $385.20\",\"boundingBox\":[1.2114,5.1789,7.3548,5.1789,7.3548,5.3013,1.2114,5.3013],\"page\":1,\"confidence\":0.867}","{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":10.7,\"text\":\"$10.70\",\"boundingBox\":[6.9416,5.4789,7.3548,5.4789,7.3548,5.6013,6.9416,5.6013],\"page\":1,\"confidence\":0.895},\"Description\":{\"type\":\"string\",\"valueString\":\"Celebrations C9\",\"text\":\"Celebrations C9\",\"boundingBox\":[2.0294,5.4859,2.9552,5.4859,2.9552,5.5891,2.0294,5.5891],\"page\":1,\"confidence\":0.872},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"31\",\"text\":\"31\",\"boundingBox\":[1.2114,5.4892,1.3242,5.4892,1.3242,5.5891,1.2114,5.5891],\"page\":1,\"confidence\":0.555},\"Quantity\":{\"type\":\"number\",\"valueNumber\":1,\"text\":\"1\",\"boundingBox\":[1.7012,5.4892,1.7341,5.4892,1.7341,5.5876,1.7012,5.5876],\"page\":1,\"confidence\":0.769},\"Tax\":{\"type\":\"number\",\"valueNumber\":0.7,\"text\":\"$0.70\",\"boundingBox\":[6.3552,5.4792,6.6601,5.4792,6.6601,5.6013,6.3552,5.6013],\"page\":1,\"confidence\":0.773},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":10,\"text\":\"$10.00\",\"boundingBox\":[4.4822,5.4792,4.8601,5.4792,4.8601,5.6013,4.4822,5.6013],\"page\":1,\"confidence\":0.891}},\"text\":\"31 1 Celebrations C9 $10.00 $0.00 0% $0.70 $10.70\",\"boundingBox\":[1.2114,5.4789,7.3548,5.4789,7.3548,5.6013,1.2114,5.6013],\"page\":1,\"confidence\":0.7}","{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":802.5,\"text\":\"$802.50\",\"boundingBox\":[6.8637,5.7789,7.3548,5.7789,7.3548,5.9013,6.8637,5.9013],\"page\":1,\"confidence\":0.898},\"Description\":{\"type\":\"string\",\"valueString\":\"Black Bathing System Classic 18 in. H x 60 in. W x 32.5\",\"text\":\"Black Bathing System Classic 18 in. H x 60 in. W x 32.5\",\"boundingBox\":[2.0249,5.7859,4.1986,5.7859,4.1986,6.0692,2.0249,6.0692],\"page\":1,\"confidence\":0.755},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"12\",\"text\":\"12\",\"boundingBox\":[1.2183,5.7892,1.342,5.7892,1.342,5.8876,1.2183,5.8876],\"page\":1,\"confidence\":0.498},\"Quantity\":{\"type\":\"number\",\"valueNumber\":3,\"text\":\"3\",\"boundingBox\":[1.6943,5.7897,1.7491,5.7897,1.7491,5.8891,1.6943,5.8891],\"page\":1,\"confidence\":0.75},\"Tax\":{\"type\":\"number\",\"valueNumber\":52.5,\"text\":\"$52.50\",\"boundingBox\":[6.2822,5.7792,6.6601,5.7792,6.6601,5.9013,6.2822,5.9013],\"page\":1,\"confidence\":0.736},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":250,\"text\":\"$250.00\",\"boundingBox\":[4.4092,5.7792,4.8601,5.7792,4.8601,5.9013,4.4092,5.9013],\"page\":1,\"confidence\":0.899}},\"text\":\"12 3 Black Bathing System Classic 18 in. H $250.00 $0.00 0% $52.50 $802.50 x 60 in. W x 32.5\",\"boundingBox\":[1.2183,5.7789,7.3548,5.7789,7.3548,6.0692,1.2183,6.0692],\"page\":1,\"confidence\":0.81}"]}}}]}}},{"0":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2010/Invoice28073.pdf","2":{"status":"succeeded","createdDateTime":"2021-10-26T22:39:19Z","lastUpdatedDateTime":"2021-10-26T22:39:23Z","analyzeResult":{"version":"2.1.0","readResults":[{"angle":0,"page":1,"height":11,"unit":"inch","width":8.5}],"pageResults":[{"page":1,"tables":[{"rows":4,"columns":8,"cells":[{"rowIndex":0,"isHeader":true,"boundingBox":[1.102,4.8158,1.4468,4.8158,1.4403,5.0805,1.0955,5.0805],"text":"Itm","columnIndex":0},{"rowIndex":0,"isHeader":true,"boundingBox":[1.4468,4.8158,1.9087,4.8158,1.9087,5.0805,1.4403,5.0805],"text":"Qty","columnIndex":1},{"rowIndex":0,"isHeader":true,"boundingBox":[1.9087,4.8158,4.3027,4.8221,4.3027,5.0805,1.9087,5.0805],"text":"Description","columnIndex":2},{"rowIndex":0,"isHeader":true,"boundingBox":[4.3027,4.8221,4.9337,4.8221,4.9337,5.0805,4.3027,5.0805],"text":"Price","columnIndex":3},{"rowIndex":0,"isHeader":true,"boundingBox":[4.9337,4.8221,5.6558,4.8221,5.6558,5.0805,4.9337,5.0805],"text":"Discount","columnIndex":4},{"rowIndex":0,"isHeader":true,"boundingBox":[5.6558,4.8221,6.1372,4.8221,6.1372,5.0805,5.6558,5.0805],"text":"(Pct)","columnIndex":5},{"rowIndex":0,"isHeader":true,"boundingBox":[6.1372,4.8221,6.7357,4.8221,6.7357,5.0805,6.1372,5.0805],"text":"Tax","columnIndex":6},{"rowIndex":0,"isHeader":true,"boundingBox":[6.7357,4.8221,7.4253,4.8221,7.4318,5.0868,6.7357,5.0805],"text":"LineTotal","columnIndex":7},{"rowIndex":1,"columnIndex":0,"text":"17","boundingBox":[1.0955,5.0805,1.4403,5.0805,1.4403,5.3831,1.0955,5.3831]},{"rowIndex":1,"columnIndex":1,"text":"2","boundingBox":[1.4403,5.0805,1.9087,5.0805,1.9087,5.3831,1.4403,5.3831]},{"rowIndex":1,"columnIndex":2,"text":"Blend Solid White Sheer Curtains","boundingBox":[1.9087,5.0805,4.3027,5.0805,4.3027,5.3831,1.9087,5.3831]},{"rowIndex":1,"columnIndex":3,"text":"$110.00","boundingBox":[4.3027,5.0805,4.9337,5.0805,4.9337,5.3831,4.3027,5.3831]},{"rowIndex":1,"columnIndex":4,"text":"$0.00","boundingBox":[4.9337,5.0805,5.6558,5.0805,5.6558,5.3831,4.9337,5.3831]},{"rowIndex":1,"columnIndex":5,"text":"0%","boundingBox":[5.6558,5.0805,6.1372,5.0805,6.1372,5.3831,5.6558,5.3831]},{"rowIndex":1,"columnIndex":6,"text":"$15.40","boundingBox":[6.1372,5.0805,6.7357,5.0805,6.7357,5.3831,6.1372,5.3831]},{"rowIndex":1,"columnIndex":7,"text":"$235.40","boundingBox":[6.7357,5.0805,7.4318,5.0868,7.4318,5.3831,6.7357,5.3831]},{"rowIndex":2,"columnIndex":0,"text":"56","boundingBox":[1.0955,5.3831,1.4403,5.3831,1.4403,5.8181,1.0955,5.8181]},{"rowIndex":2,"columnIndex":1,"text":"2","boundingBox":[1.4403,5.3831,1.9087,5.3831,1.9087,5.8181,1.4403,5.8181]},{"rowIndex":2,"columnIndex":2,"text":"Rechargeable screwdriver with extra battery","boundingBox":[1.9087,5.3831,4.3027,5.3831,4.3027,5.8244,1.9087,5.8181]},{"rowIndex":2,"columnIndex":3,"text":"$312.00","boundingBox":[4.3027,5.3831,4.9337,5.3831,4.9337,5.8244,4.3027,5.8244]},{"rowIndex":2,"columnIndex":4,"text":"$0.00","boundingBox":[4.9337,5.3831,5.6558,5.3831,5.6558,5.8244,4.9337,5.8244]},{"rowIndex":2,"columnIndex":5,"text":"0%","boundingBox":[5.6558,5.3831,6.1372,5.3831,6.1372,5.8244,5.6558,5.8244]},{"rowIndex":2,"columnIndex":6,"text":"$43.68","boundingBox":[6.1372,5.3831,6.7357,5.3831,6.7357,5.8244,6.1372,5.8244]},{"rowIndex":2,"columnIndex":7,"text":"$667.68","boundingBox":[6.7357,5.3831,7.4318,5.3831,7.4318,5.8244,6.7357,5.8244]},{"rowIndex":3,"columnIndex":0,"text":"40","boundingBox":[1.0955,5.8181,1.4403,5.8181,1.4403,6.1206,1.0955,6.1206]},{"rowIndex":3,"columnIndex":1,"text":"3","boundingBox":[1.4403,5.8181,1.9087,5.8181,1.9087,6.1206,1.4403,6.1206]},{"rowIndex":3,"columnIndex":2,"text":"Extractor Steal","boundingBox":[1.9087,5.8181,4.3027,5.8244,4.3027,6.1206,1.9087,6.1206]},{"rowIndex":3,"columnIndex":3,"text":"$135.00","boundingBox":[4.3027,5.8244,4.9337,5.8244,4.9337,6.1206,4.3027,6.1206]},{"rowIndex":3,"columnIndex":4,"text":"$40.50","boundingBox":[4.9337,5.8244,5.6558,5.8244,5.6558,6.1206,4.9337,6.1206]},{"rowIndex":3,"columnIndex":5,"text":"10%","boundingBox":[5.6558,5.8244,6.1372,5.8244,6.1372,6.1206,5.6558,6.1206]},{"rowIndex":3,"columnIndex":6,"text":"$25.52","boundingBox":[6.1372,5.8244,6.7357,5.8244,6.7422,6.1206,6.1372,6.1206]},{"rowIndex":3,"columnIndex":7,"text":"$390.02","boundingBox":[6.7357,5.8244,7.4318,5.8244,7.4318,6.1269,6.7422,6.1206]}],"boundingBox":[1.0899,4.8213,7.4305,4.8214,7.4306,6.1277,1.0892,6.1275]}]}],"documentResults":[{"docType":"prebuilt:invoice","pageRange":[1,1],"fields":{"CustomerAddress":{"page":1,"valueString":"United States","boundingBox":[1.1313,3.1342,1.9101,3.1342,1.9101,3.2374,1.1313,3.2374],"text":"United States","confidence":0.306,"type":"string"},"ShippingAddress":{"page":1,"valueString":"San Gabriel","boundingBox":[2.968,2.1342,3.6276,2.1342,3.6276,2.2374,2.968,2.2374],"text":"San Gabriel","confidence":0.265,"type":"string"},"InvoiceTotal":{"valueNumber":1293.1,"page":1,"boundingBox":[6.5814,7.9097,7.413,7.9097,7.413,8.0886,6.5814,8.0886],"text":"$1293.10","confidence":0.962,"type":"number"},"VendorName":{"page":1,"valueString":"T.T TAILWIND TRADERS","boundingBox":[4.909,1.4371,7.3107,1.4184,7.3155,2.0248,4.9137,2.0434],"text":"T.T TAILWIND TRADERS","confidence":0.952,"type":"string"},"VendorAddressRecipient":{"page":1,"valueString":"Sarosgawk","boundingBox":[5.9841,2.1511,6.517,2.1511,6.517,2.264,5.9841,2.264],"text":"Sarosgawk","confidence":0.234,"type":"string"},"InvoiceId":{"page":1,"valueString":"28073","boundingBox":[1.1272,4.1486,1.475,4.1486,1.475,4.248,1.1272,4.248],"text":"28073","confidence":0.954,"type":"string"},"InvoiceDate":{"page":1,"boundingBox":[2.7355,4.1448,3.3205,4.1448,3.3205,4.248,2.7355,4.248],"text":"5 October","confidence":0.375,"valueDate":"2021-10-05","type":"date"},"SubTotal":{"valueNumber":1208.5,"page":1,"boundingBox":[6.8462,6.5516,7.4152,6.5516,7.4152,6.674,6.8462,6.674],"text":"$1208.50","confidence":0.971,"type":"number"},"TotalTax":{"valueNumber":84.6,"page":1,"boundingBox":[7.002,7.5208,7.4153,7.5208,7.4153,7.6432,7.002,7.6432],"text":"$84.60","confidence":0.967,"type":"number"},"VendorAddress":{"page":1,"valueString":"Maubeuge","boundingBox":[6.5215,2.3511,7.0513,2.3511,7.0513,2.464,6.5215,2.464],"text":"Maubeuge","confidence":0.251,"type":"string"},"Items":{"type":"array","valueArray":["{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":235.4,\"text\":\"$235.40\",\"boundingBox\":[6.8637,5.1789,7.3548,5.1789,7.3548,5.3013,6.8637,5.3013],\"page\":1,\"confidence\":0.898},\"Description\":{\"type\":\"string\",\"valueString\":\"Blend Solid White Sheer Curtains\",\"text\":\"Blend Solid White Sheer Curtains\",\"boundingBox\":[2.0356,5.1859,3.9764,5.1859,3.9764,5.2891,2.0356,5.2891],\"page\":1,\"confidence\":0.827},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"17\",\"text\":\"17\",\"boundingBox\":[1.2183,5.1892,1.3431,5.1892,1.3431,5.2876,1.2183,5.2876],\"page\":1,\"confidence\":0.6},\"Quantity\":{\"type\":\"number\",\"valueNumber\":2,\"text\":\"2\",\"boundingBox\":[1.6933,5.1897,1.7518,5.1897,1.7518,5.2876,1.6933,5.2876],\"page\":1,\"confidence\":0.729},\"Tax\":{\"type\":\"number\",\"valueNumber\":15.4,\"text\":\"$15.40\",\"boundingBox\":[6.2822,5.1792,6.6601,5.1792,6.6601,5.3013,6.2822,5.3013],\"page\":1,\"confidence\":0.778},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":110,\"text\":\"$110.00\",\"boundingBox\":[4.4092,5.1792,4.8601,5.1792,4.8601,5.3013,4.4092,5.3013],\"page\":1,\"confidence\":0.899}},\"text\":\"17 2 Blend Solid White Sheer Curtains $110.00 $0.00 0% $15.40 $235.40\",\"boundingBox\":[1.2183,5.1789,7.3548,5.1789,7.3548,5.3013,1.2183,5.3013],\"page\":1,\"confidence\":0.86}","{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":667.68,\"text\":\"$667.68\",\"boundingBox\":[6.8637,5.4789,7.354,5.4789,7.354,5.6013,6.8637,5.6013],\"page\":1,\"confidence\":0.898},\"Description\":{\"type\":\"string\",\"valueString\":\"Rechargeable screwdriver with extra battery\",\"text\":\"Rechargeable screwdriver with extra battery\",\"boundingBox\":[2.0341,5.4859,4.1487,5.4859,4.1487,5.7994,2.0341,5.7994],\"page\":1,\"confidence\":0.539},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"56\",\"text\":\"56\",\"boundingBox\":[1.2141,5.4897,1.3439,5.4897,1.3439,5.5891,1.2141,5.5891],\"page\":1,\"confidence\":0.505},\"Quantity\":{\"type\":\"number\",\"valueNumber\":2,\"text\":\"2\",\"boundingBox\":[1.6933,5.4897,1.7518,5.4897,1.7518,5.5876,1.6933,5.5876],\"page\":1,\"confidence\":0.749},\"Tax\":{\"type\":\"number\",\"valueNumber\":43.68,\"text\":\"$43.68\",\"boundingBox\":[6.2822,5.4792,6.6601,5.4792,6.6601,5.6013,6.2822,5.6013],\"page\":1,\"confidence\":0.767},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":312,\"text\":\"$312.00\",\"boundingBox\":[4.4092,5.4792,4.8601,5.4792,4.8601,5.6013,4.4092,5.6013],\"page\":1,\"confidence\":0.899}},\"text\":\"56 2 Rechargeable screwdriver with extra $312.00 $0.00 0% $43.68 $667.68 battery\",\"boundingBox\":[1.2141,5.4789,7.354,5.4789,7.354,5.7994,1.2141,5.7994],\"page\":1,\"confidence\":0.843}","{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":390.02,\"text\":\"$390.02\",\"boundingBox\":[6.8637,5.9204,7.3501,5.9204,7.3501,6.0428,6.8637,6.0428],\"page\":1,\"confidence\":0.903},\"Description\":{\"type\":\"string\",\"valueString\":\"Extractor Steal\",\"text\":\"Extractor Steal\",\"boundingBox\":[2.0356,5.9274,2.8654,5.9274,2.8654,6.0306,2.0356,6.0306],\"page\":1,\"confidence\":0.898},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"40\",\"text\":\"40\",\"boundingBox\":[1.2043,5.9312,1.3437,5.9312,1.3437,6.0306,1.2043,6.0306],\"page\":1,\"confidence\":0.57},\"Quantity\":{\"type\":\"number\",\"valueNumber\":3,\"text\":\"3\",\"boundingBox\":[1.6943,5.9312,1.7491,5.9312,1.7491,6.0306,1.6943,6.0306],\"page\":1,\"confidence\":0.801},\"Tax\":{\"type\":\"number\",\"valueNumber\":25.52,\"text\":\"$25.52\",\"boundingBox\":[6.2822,5.9206,6.6583,5.9206,6.6583,6.0428,6.2822,6.0428],\"page\":1,\"confidence\":0.769},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":135,\"text\":\"$135.00\",\"boundingBox\":[4.4092,5.9206,4.8601,5.9206,4.8601,6.0428,4.4092,6.0428],\"page\":1,\"confidence\":0.899}},\"text\":\"40 3 Extractor Steal $135.00 $40.50 10% $25.52 $390.02\",\"boundingBox\":[1.2043,5.9204,7.3501,5.9204,7.3501,6.0428,1.2043,6.0428],\"page\":1,\"confidence\":0.826}"]}}}]}}},{"0":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2013/Invoice64808.pdf","2":{"status":"succeeded","createdDateTime":"2021-10-26T22:39:18Z","lastUpdatedDateTime":"2021-10-26T22:39:23Z","analyzeResult":{"version":"2.1.0","readResults":[{"angle":0,"page":1,"height":11,"unit":"inch","width":8.5}],"pageResults":[{"page":1,"tables":[{"rows":5,"columns":8,"cells":[{"rowIndex":0,"isHeader":true,"boundingBox":[1.1002,4.8223,1.4447,4.8223,1.4447,5.0827,1.1002,5.0827],"text":"Itm","columnIndex":0},{"rowIndex":0,"isHeader":true,"boundingBox":[1.4447,4.8223,1.9128,4.8223,1.9128,5.0827,1.4447,5.0827],"text":"Qty","columnIndex":1},{"rowIndex":0,"isHeader":true,"boundingBox":[1.9128,4.8223,4.3051,4.8223,4.3051,5.0827,1.9128,5.0827],"text":"Description","columnIndex":2},{"rowIndex":0,"isHeader":true,"boundingBox":[4.3051,4.8223,4.9356,4.8223,4.9356,5.0827,4.3051,5.0827],"text":"Price","columnIndex":3},{"rowIndex":0,"isHeader":true,"boundingBox":[4.9356,4.8223,5.6507,4.8223,5.6507,5.0827,4.9356,5.0827],"text":"Discount","columnIndex":4},{"rowIndex":0,"isHeader":true,"boundingBox":[5.6507,4.8223,6.1383,4.8223,6.1383,5.0827,5.6507,5.0827],"text":"(Pct)","columnIndex":5},{"rowIndex":0,"isHeader":true,"boundingBox":[6.1383,4.8223,6.7363,4.8223,6.7363,5.0827,6.1383,5.0827],"text":"Tax","columnIndex":6},{"rowIndex":0,"isHeader":true,"boundingBox":[6.7363,4.8223,7.4319,4.8223,7.4319,5.0827,6.7363,5.0827],"text":"LineTotal","columnIndex":7},{"rowIndex":1,"columnIndex":0,"text":"43","boundingBox":[1.1002,5.0827,1.4447,5.0827,1.4447,5.3803,1.1002,5.3803]},{"rowIndex":1,"columnIndex":1,"text":"3","boundingBox":[1.4447,5.0827,1.9128,5.0827,1.9128,5.3803,1.4447,5.3803]},{"rowIndex":1,"columnIndex":2,"text":"Big Metal Shelving","boundingBox":[1.9128,5.0827,4.3051,5.0827,4.3051,5.3803,1.9128,5.3803]},{"rowIndex":1,"columnIndex":3,"text":"$99.00","boundingBox":[4.3051,5.0827,4.9356,5.0827,4.9356,5.3803,4.3051,5.3803]},{"rowIndex":1,"columnIndex":4,"text":"$0.00","boundingBox":[4.9356,5.0827,5.6507,5.0827,5.6507,5.3803,4.9356,5.3803]},{"rowIndex":1,"columnIndex":5,"text":"0%","boundingBox":[5.6507,5.0827,6.1383,5.0827,6.1383,5.3803,5.6507,5.3803]},{"rowIndex":1,"columnIndex":6,"text":"$20.79","boundingBox":[6.1383,5.0827,6.7363,5.0827,6.7363,5.3803,6.1383,5.3803]},{"rowIndex":1,"columnIndex":7,"text":"$317.79","boundingBox":[6.7363,5.0827,7.4319,5.0827,7.4319,5.3865,6.7363,5.3803]},{"rowIndex":2,"columnIndex":0,"text":"42","boundingBox":[1.1002,5.3803,1.4447,5.3803,1.4447,5.6841,1.1002,5.6841]},{"rowIndex":2,"columnIndex":1,"text":"3","boundingBox":[1.4447,5.3803,1.9128,5.3803,1.9128,5.6841,1.4447,5.6841]},{"rowIndex":2,"columnIndex":2,"text":"Metal Shelving","boundingBox":[1.9128,5.3803,4.3051,5.3803,4.3051,5.6841,1.9128,5.6841]},{"rowIndex":2,"columnIndex":3,"text":"$90.00","boundingBox":[4.3051,5.3803,4.9356,5.3803,4.9356,5.6841,4.3051,5.6841]},{"rowIndex":2,"columnIndex":4,"text":"$0.00","boundingBox":[4.9356,5.3803,5.6507,5.3803,5.6507,5.6841,4.9356,5.6841]},{"rowIndex":2,"columnIndex":5,"text":"0%","boundingBox":[5.6507,5.3803,6.1383,5.3803,6.1383,5.6841,5.6507,5.6841]},{"rowIndex":2,"columnIndex":6,"text":"$18.90","boundingBox":[6.1383,5.3803,6.7363,5.3803,6.7363,5.6841,6.1383,5.6841]},{"rowIndex":2,"columnIndex":7,"text":"$288.90","boundingBox":[6.7363,5.3803,7.4319,5.3865,7.4319,5.6841,6.7363,5.6841]},{"rowIndex":3,"columnIndex":0,"text":"46","boundingBox":[1.1002,5.6841,1.4447,5.6841,1.4447,5.9817,1.1002,5.9817]},{"rowIndex":3,"columnIndex":1,"text":"1","boundingBox":[1.4447,5.6841,1.9128,5.6841,1.9128,5.9817,1.4447,5.9817]},{"rowIndex":3,"columnIndex":2,"text":"Measuring Tape","boundingBox":[1.9128,5.6841,4.3051,5.6841,4.3051,5.9817,1.9128,5.9817]},{"rowIndex":3,"columnIndex":3,"text":"$123.00","boundingBox":[4.3051,5.6841,4.9356,5.6841,4.9356,5.9817,4.3051,5.9817]},{"rowIndex":3,"columnIndex":4,"text":"$0.00","boundingBox":[4.9356,5.6841,5.6507,5.6841,5.6507,5.9817,4.9356,5.9817]},{"rowIndex":3,"columnIndex":5,"text":"0%","boundingBox":[5.6507,5.6841,6.1383,5.6841,6.1383,5.9817,5.6507,5.9817]},{"rowIndex":3,"columnIndex":6,"text":"$8.61","boundingBox":[6.1383,5.6841,6.7363,5.6841,6.7363,5.9817,6.1383,5.9817]},{"rowIndex":3,"columnIndex":7,"text":"$131.61","boundingBox":[6.7363,5.6841,7.4319,5.6841,7.4319,5.9817,6.7363,5.9817]},{"rowIndex":4,"columnIndex":0,"text":"21","boundingBox":[1.1002,5.9817,1.4447,5.9817,1.4447,6.2793,1.1067,6.2855]},{"rowIndex":4,"columnIndex":1,"text":"3","boundingBox":[1.4447,5.9817,1.9128,5.9817,1.9128,6.2793,1.4447,6.2793]},{"rowIndex":4,"columnIndex":2,"text":"Curtain Rod 48 in","boundingBox":[1.9128,5.9817,4.3051,5.9817,4.3051,6.2793,1.9128,6.2793]},{"rowIndex":4,"columnIndex":3,"text":"$25.00","boundingBox":[4.3051,5.9817,4.9356,5.9817,4.9356,6.2793,4.3051,6.2793]},{"rowIndex":4,"columnIndex":4,"text":"$15.00","boundingBox":[4.9356,5.9817,5.6507,5.9817,5.6507,6.2793,4.9356,6.2793]},{"rowIndex":4,"columnIndex":5,"text":"20%","boundingBox":[5.6507,5.9817,6.1383,5.9817,6.1383,6.2793,5.6507,6.2793]},{"rowIndex":4,"columnIndex":6,"text":"$4.20","boundingBox":[6.1383,5.9817,6.7363,5.9817,6.7428,6.2793,6.1383,6.2793]},{"rowIndex":4,"columnIndex":7,"text":"$64.20","boundingBox":[6.7363,5.9817,7.4319,5.9817,7.4319,6.2793,6.7428,6.2793]}],"boundingBox":[1.0921,4.8111,7.43,4.8116,7.4295,6.2883,1.0909,6.2879]}]}],"documentResults":[{"docType":"prebuilt:invoice","pageRange":[1,1],"fields":{"CustomerAddress":{"page":1,"valueString":"3923 Dew Drop","boundingBox":[5.9843,2.3543,6.759,2.3543,6.759,2.4634,5.9843,2.4634],"text":"3923 Dew Drop","confidence":0.371,"type":"string"},"ShippingAddress":{"page":1,"valueString":"828, rue de Berri","boundingBox":[1.126,2.1342,2.0896,2.1342,2.0896,2.2532,1.126,2.2532],"text":"828, rue de Berri","confidence":0.651,"type":"string"},"InvoiceTotal":{"valueNumber":802.5,"page":1,"boundingBox":[6.6952,8.0682,7.413,8.0682,7.413,8.2471,6.6952,8.2471],"text":"$802.50","confidence":0.96,"type":"number"},"CustomerName":{"page":1,"valueString":"Villeneuve-d'Ascq","boundingBox":[2.9612,2.1342,4.013,2.1342,4.013,2.2669,2.9612,2.2669],"text":"Villeneuve-d'Ascq","confidence":0.36,"type":"string"},"VendorName":{"page":1,"valueString":"T.T TAILWIND TRADERS","boundingBox":[4.909,1.4371,7.3107,1.4184,7.3155,2.0248,4.9137,2.0434],"text":"T.T TAILWIND TRADERS","confidence":0.954,"type":"string"},"VendorAddressRecipient":{"page":1,"valueString":"Braeo","boundingBox":[5.9879,2.1558,6.2626,2.1558,6.2626,2.2384,5.9879,2.2384],"text":"Braeo","confidence":0.215,"type":"string"},"InvoiceId":{"page":1,"valueString":"64808","boundingBox":[1.1274,4.1486,1.4794,4.1486,1.4794,4.248,1.1274,4.248],"text":"64808","confidence":0.967,"type":"string"},"InvoiceDate":{"page":1,"boundingBox":[2.7397,4.1448,3.2744,4.1448,3.2744,4.248,2.7397,4.248],"text":"12 March","confidence":0.304,"valueDate":"2021-03-12","type":"date"},"SubTotal":{"valueNumber":750,"page":1,"boundingBox":[6.9241,6.7101,7.4152,6.7101,7.4152,6.8325,6.9241,6.8325],"text":"$750.00","confidence":0.971,"type":"number"},"TotalTax":{"valueNumber":52.5,"page":1,"boundingBox":[7.002,7.6793,7.4153,7.6793,7.4153,7.8017,7.002,7.8017],"text":"$52.50","confidence":0.97,"type":"number"},"ShippingAddressRecipient":{"page":1,"valueString":"Villeneuve-d'Ascq","boundingBox":[2.9612,2.1342,4.013,2.1342,4.013,2.2669,2.9612,2.2669],"text":"Villeneuve-d'Ascq","confidence":0.36,"type":"string"},"VendorAddress":{"page":1,"valueString":"Circle","boundingBox":[6.8008,2.3511,7.0698,2.3511,7.0698,2.4384,6.8008,2.4384],"text":"Circle","confidence":0.271,"type":"string"},"Items":{"type":"array","valueArray":["{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":317.79,\"text\":\"$317.79\",\"boundingBox\":[6.8637,5.1789,7.3533,5.1789,7.3533,5.3013,6.8637,5.3013],\"page\":1,\"confidence\":0.872},\"Description\":{\"type\":\"string\",\"valueString\":\"Big Metal Shelving\",\"text\":\"Big Metal Shelving\",\"boundingBox\":[2.0356,5.1859,3.1214,5.1859,3.1214,5.3193,2.0356,5.3193],\"page\":1,\"confidence\":0.876},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"43\",\"text\":\"43\",\"boundingBox\":[1.2043,5.1897,1.3393,5.1897,1.3393,5.2891,1.2043,5.2891],\"page\":1,\"confidence\":0.632},\"Quantity\":{\"type\":\"number\",\"valueNumber\":3,\"text\":\"3\",\"boundingBox\":[1.6943,5.1897,1.7491,5.1897,1.7491,5.2891,1.6943,5.2891],\"page\":1,\"confidence\":0.794},\"Tax\":{\"type\":\"number\",\"valueNumber\":20.79,\"text\":\"$20.79\",\"boundingBox\":[6.2822,5.1792,6.659,5.1792,6.659,5.3013,6.2822,5.3013],\"page\":1,\"confidence\":0.727},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":99,\"text\":\"$99.00\",\"boundingBox\":[4.4822,5.1792,4.8601,5.1792,4.8601,5.3013,4.4822,5.3013],\"page\":1,\"confidence\":0.887}},\"text\":\"43 3 Big Metal Shelving $99.00 $0.00 0% $20.79 $317.79\",\"boundingBox\":[1.2043,5.1789,7.3533,5.1789,7.3533,5.3193,1.2043,5.3193],\"page\":1,\"confidence\":0.864}","{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":288.9,\"text\":\"$288.90\",\"boundingBox\":[6.8637,5.4789,7.3548,5.4789,7.3548,5.6013,6.8637,5.6013],\"page\":1,\"confidence\":0.895},\"Description\":{\"type\":\"string\",\"valueString\":\"Metal Shelving\",\"text\":\"Metal Shelving\",\"boundingBox\":[2.0356,5.4859,2.8944,5.4859,2.8944,5.6193,2.0356,5.6193],\"page\":1,\"confidence\":0.8},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"42\",\"text\":\"42\",\"boundingBox\":[1.2043,5.4897,1.342,5.4897,1.342,5.5876,1.2043,5.5876],\"page\":1,\"confidence\":0.6},\"Quantity\":{\"type\":\"number\",\"valueNumber\":3,\"text\":\"3\",\"boundingBox\":[1.6943,5.4897,1.7491,5.4897,1.7491,5.5891,1.6943,5.5891],\"page\":1,\"confidence\":0.77},\"Tax\":{\"type\":\"number\",\"valueNumber\":18.9,\"text\":\"$18.90\",\"boundingBox\":[6.2822,5.4792,6.6601,5.4792,6.6601,5.6013,6.2822,5.6013],\"page\":1,\"confidence\":0.729},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":90,\"text\":\"$90.00\",\"boundingBox\":[4.4822,5.4792,4.8601,5.4792,4.8601,5.6013,4.4822,5.6013],\"page\":1,\"confidence\":0.884}},\"text\":\"42 3 Metal Shelving $90.00 $0.00 0% $18.90 $288.90\",\"boundingBox\":[1.2043,5.4789,7.3548,5.4789,7.3548,5.6193,1.2043,5.6193],\"page\":1,\"confidence\":0.805}","{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":131.61,\"text\":\"$131.61\",\"boundingBox\":[6.8637,5.7789,7.3364,5.7789,7.3364,5.9013,6.8637,5.9013],\"page\":1,\"confidence\":0.899},\"Description\":{\"type\":\"string\",\"valueString\":\"Measuring Tape\",\"text\":\"Measuring Tape\",\"boundingBox\":[2.0356,5.7871,2.9754,5.7871,2.9754,5.9193,2.0356,5.9193],\"page\":1,\"confidence\":0.886},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"46\",\"text\":\"46\",\"boundingBox\":[1.2043,5.7897,1.3439,5.7897,1.3439,5.8891,1.2043,5.8891],\"page\":1,\"confidence\":0.676},\"Quantity\":{\"type\":\"number\",\"valueNumber\":1,\"text\":\"1\",\"boundingBox\":[1.7012,5.7892,1.7341,5.7892,1.7341,5.8876,1.7012,5.8876],\"page\":1,\"confidence\":0.752},\"Tax\":{\"type\":\"number\",\"valueNumber\":8.61,\"text\":\"$8.61\",\"boundingBox\":[6.3552,5.7792,6.6406,5.7792,6.6406,5.9013,6.3552,5.9013],\"page\":1,\"confidence\":0.724},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":123,\"text\":\"$123.00\",\"boundingBox\":[4.4092,5.7792,4.8601,5.7792,4.8601,5.9013,4.4092,5.9013],\"page\":1,\"confidence\":0.899}},\"text\":\"46 1 Measuring Tape $123.00 $0.00 0% $8.61 $131.61\",\"boundingBox\":[1.2043,5.7789,7.3364,5.7789,7.3364,5.9193,1.2043,5.9193],\"page\":1,\"confidence\":0.7}","{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":64.2,\"text\":\"$64.20\",\"boundingBox\":[6.9416,6.0789,7.3548,6.0789,7.3548,6.2013,6.9416,6.2013],\"page\":1,\"confidence\":0.901},\"Description\":{\"type\":\"string\",\"valueString\":\"Curtain Rod 48 in\",\"text\":\"Curtain Rod 48 in\",\"boundingBox\":[2.0294,6.0859,3.051,6.0859,3.051,6.1891,2.0294,6.1891],\"page\":1,\"confidence\":0.801},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"21\",\"text\":\"21\",\"boundingBox\":[1.2105,6.0892,1.3242,6.0892,1.3242,6.1876,1.2105,6.1876],\"page\":1,\"confidence\":0.69},\"Quantity\":{\"type\":\"number\",\"valueNumber\":3,\"text\":\"3\",\"boundingBox\":[1.6943,6.0897,1.7491,6.0897,1.7491,6.1891,1.6943,6.1891],\"page\":1,\"confidence\":0.79},\"Tax\":{\"type\":\"number\",\"valueNumber\":4.2,\"text\":\"$4.20\",\"boundingBox\":[6.3552,6.0792,6.6601,6.0792,6.6601,6.2013,6.3552,6.2013],\"page\":1,\"confidence\":0.719},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":25,\"text\":\"$25.00\",\"boundingBox\":[4.4822,6.0792,4.8601,6.0792,4.8601,6.2013,4.4822,6.2013],\"page\":1,\"confidence\":0.884}},\"text\":\"21 3 Curtain Rod 48 in $25.00 $15.00 20% $4.20 $64.20\",\"boundingBox\":[1.2105,6.0789,7.3548,6.0789,7.3548,6.2013,1.2105,6.2013],\"page\":1,\"confidence\":0.87}"]}}}]}}},{"0":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2013/Invoice67164.pdf","2":{"status":"succeeded","createdDateTime":"2021-10-26T22:39:18Z","lastUpdatedDateTime":"2021-10-26T22:39:22Z","analyzeResult":{"version":"2.1.0","readResults":[{"angle":0,"page":1,"height":11,"unit":"inch","width":8.5}],"pageResults":[{"page":1,"tables":[{"rows":5,"columns":8,"cells":[{"rowIndex":0,"isHeader":true,"boundingBox":[1.1002,4.8187,1.4447,4.8187,1.4447,5.0838,1.1002,5.0838],"text":"Itm","columnIndex":0},{"rowIndex":0,"isHeader":true,"boundingBox":[1.4447,4.8187,1.9128,4.8187,1.9128,5.0838,1.4447,5.0838],"text":"Qty","columnIndex":1},{"rowIndex":0,"isHeader":true,"boundingBox":[1.9128,4.8187,4.3051,4.8187,4.3051,5.0838,1.9128,5.0838],"text":"Description","columnIndex":2},{"rowIndex":0,"isHeader":true,"boundingBox":[4.3051,4.8187,4.9356,4.8187,4.9356,5.0838,4.3051,5.0838],"text":"Price","columnIndex":3},{"rowIndex":0,"isHeader":true,"boundingBox":[4.9356,4.8187,5.6572,4.8187,5.6507,5.0838,4.9356,5.0838],"text":"Discount","columnIndex":4},{"rowIndex":0,"isHeader":true,"boundingBox":[5.6572,4.8187,6.1383,4.8187,6.1383,5.0838,5.6507,5.0838],"text":"(Pct)","columnIndex":5},{"rowIndex":0,"isHeader":true,"boundingBox":[6.1383,4.8187,6.7363,4.8187,6.7363,5.0838,6.1383,5.0838],"text":"Tax","columnIndex":6},{"rowIndex":0,"isHeader":true,"boundingBox":[6.7363,4.8187,7.4319,4.8248,7.4319,5.0838,6.7363,5.0838],"text":"LineTotal","columnIndex":7},{"rowIndex":1,"columnIndex":0,"text":"36","boundingBox":[1.1002,5.0838,1.4447,5.0838,1.4447,5.3798,1.1002,5.3798]},{"rowIndex":1,"columnIndex":1,"text":"3","boundingBox":[1.4447,5.0838,1.9128,5.0838,1.9128,5.3798,1.4447,5.3798]},{"rowIndex":1,"columnIndex":2,"text":"Wood Table","boundingBox":[1.9128,5.0838,4.3051,5.0838,4.3051,5.3798,1.9128,5.3798]},{"rowIndex":1,"columnIndex":3,"text":"$395.00","boundingBox":[4.3051,5.0838,4.9356,5.0838,4.9356,5.3798,4.3051,5.3798]},{"rowIndex":1,"columnIndex":4,"text":"$0.00","boundingBox":[4.9356,5.0838,5.6507,5.0838,5.6507,5.3798,4.9356,5.3798]},{"rowIndex":1,"columnIndex":5,"text":"0%","boundingBox":[5.6507,5.0838,6.1383,5.0838,6.1383,5.3798,5.6507,5.3798]},{"rowIndex":1,"columnIndex":6,"text":"$82.95","boundingBox":[6.1383,5.0838,6.7363,5.0838,6.7363,5.3798,6.1383,5.3798]},{"rowIndex":1,"columnIndex":7,"text":"$1267.95","boundingBox":[6.7363,5.0838,7.4319,5.0838,7.4319,5.3798,6.7363,5.3798]},{"rowIndex":2,"columnIndex":0,"text":"20","boundingBox":[1.1002,5.3798,1.4447,5.3798,1.4447,5.682,1.1002,5.682]},{"rowIndex":2,"columnIndex":1,"text":"2","boundingBox":[1.4447,5.3798,1.9128,5.3798,1.9128,5.682,1.4447,5.682]},{"rowIndex":2,"columnIndex":2,"text":"White Window","boundingBox":[1.9128,5.3798,4.3051,5.3798,4.3051,5.682,1.9128,5.682]},{"rowIndex":2,"columnIndex":3,"text":"$120.00","boundingBox":[4.3051,5.3798,4.9356,5.3798,4.9356,5.682,4.3051,5.682]},{"rowIndex":2,"columnIndex":4,"text":"$0.00","boundingBox":[4.9356,5.3798,5.6507,5.3798,5.6507,5.682,4.9356,5.682]},{"rowIndex":2,"columnIndex":5,"text":"0%","boundingBox":[5.6507,5.3798,6.1383,5.3798,6.1383,5.682,5.6507,5.682]},{"rowIndex":2,"columnIndex":6,"text":"$16.80","boundingBox":[6.1383,5.3798,6.7363,5.3798,6.7363,5.682,6.1383,5.682]},{"rowIndex":2,"columnIndex":7,"text":"$256.80","boundingBox":[6.7363,5.3798,7.4319,5.3798,7.4319,5.682,6.7363,5.682]},{"rowIndex":3,"columnIndex":0,"text":"25","boundingBox":[1.1002,5.682,1.4447,5.682,1.4447,5.978,1.1002,5.978]},{"rowIndex":3,"columnIndex":1,"text":"3","boundingBox":[1.4447,5.682,1.9128,5.682,1.9128,5.978,1.4447,5.978]},{"rowIndex":3,"columnIndex":2,"text":"Indoor Kit Gardering","boundingBox":[1.9128,5.682,4.3051,5.682,4.3051,5.9842,1.9128,5.978]},{"rowIndex":3,"columnIndex":3,"text":"$70.00","boundingBox":[4.3051,5.682,4.9356,5.682,4.9356,5.9842,4.3051,5.9842]},{"rowIndex":3,"columnIndex":4,"text":"$0.00","boundingBox":[4.9356,5.682,5.6507,5.682,5.6507,5.9842,4.9356,5.9842]},{"rowIndex":3,"columnIndex":5,"text":"0%","boundingBox":[5.6507,5.682,6.1383,5.682,6.1383,5.9842,5.6507,5.9842]},{"rowIndex":3,"columnIndex":6,"text":"$14.70","boundingBox":[6.1383,5.682,6.7363,5.682,6.7363,5.9842,6.1383,5.9842]},{"rowIndex":3,"columnIndex":7,"text":"$224.70","boundingBox":[6.7363,5.682,7.4319,5.682,7.4319,5.978,6.7363,5.9842]},{"rowIndex":4,"columnIndex":0,"text":"46","boundingBox":[1.1002,5.978,1.4447,5.978,1.4447,6.2802,1.1067,6.2864]},{"rowIndex":4,"columnIndex":1,"text":"1","boundingBox":[1.4447,5.978,1.9128,5.978,1.9128,6.2802,1.4447,6.2802]},{"rowIndex":4,"columnIndex":2,"text":"Measuring Tape","boundingBox":[1.9128,5.978,4.3051,5.9842,4.3051,6.2802,1.9128,6.2802]},{"rowIndex":4,"columnIndex":3,"text":"$123.00","boundingBox":[4.3051,5.9842,4.9356,5.9842,4.9291,6.2802,4.3051,6.2802]},{"rowIndex":4,"columnIndex":4,"text":"$0.00","boundingBox":[4.9356,5.9842,5.6507,5.9842,5.6507,6.2802,4.9291,6.2802]},{"rowIndex":4,"columnIndex":5,"text":"0%","boundingBox":[5.6507,5.9842,6.1383,5.9842,6.1383,6.2802,5.6507,6.2802]},{"rowIndex":4,"columnIndex":6,"text":"$8.61","boundingBox":[6.1383,5.9842,6.7363,5.9842,6.7428,6.2802,6.1383,6.2802]},{"rowIndex":4,"columnIndex":7,"text":"$131.61","boundingBox":[6.7363,5.9842,7.4319,5.978,7.4319,6.2802,6.7428,6.2802]}],"boundingBox":[1.0919,4.8211,7.4312,4.8214,7.4306,6.2883,1.0904,6.288]}]}],"documentResults":[{"docType":"prebuilt:invoice","pageRange":[1,1],"fields":{"CustomerAddress":{"page":1,"valueString":"Platz des Landtags 404 Solingen Nordrhein-Westfalen 42651 Germany","boundingBox":[5.9782,2.3511,7.1261,2.3511,7.1261,3.264,5.9782,3.264],"text":"Platz des Landtags 404 Solingen Nordrhein-Westfalen 42651 Germany","confidence":0.371,"type":"string"},"ShippingAddress":{"page":1,"valueString":"45000","boundingBox":[2.9609,2.638,3.3194,2.638,3.3194,2.7374,2.9609,2.7374],"text":"45000","confidence":0.533,"type":"string"},"InvoiceTotal":{"valueNumber":1881.06,"page":1,"boundingBox":[6.5814,8.0682,7.413,8.0682,7.413,8.2471,6.5814,8.2471],"text":"$1881.06","confidence":0.959,"type":"number"},"CustomerName":{"page":1,"valueString":"Clinton Gutierrez","boundingBox":[1.1277,1.6519,2.3581,1.6519,2.3581,1.7789,1.1277,1.7789],"text":"Clinton Gutierrez","confidence":0.342,"type":"string"},"VendorName":{"page":1,"valueString":"T.T TAILWIND TRADERS","boundingBox":[4.909,1.4371,7.3107,1.4184,7.3155,2.0248,4.9137,2.0434],"text":"T.T TAILWIND TRADERS","confidence":0.954,"type":"string"},"InvoiceId":{"page":1,"valueString":"67164","boundingBox":[1.1274,4.1481,1.4804,4.1481,1.4804,4.248,1.1274,4.248],"text":"67164","confidence":0.967,"type":"string"},"SubTotal":{"valueNumber":1758,"page":1,"boundingBox":[6.8462,6.7101,7.4152,6.7101,7.4152,6.8325,6.8462,6.8325],"text":"$1758.00","confidence":0.971,"type":"number"},"TotalTax":{"valueNumber":123.06,"page":1,"boundingBox":[6.9241,7.6793,7.4152,7.6793,7.4152,7.8017,6.9241,7.8017],"text":"$123.06","confidence":0.962,"type":"number"},"CustomerAddressRecipient":{"page":1,"valueString":"Dluhbio","boundingBox":[5.9879,2.1511,6.3717,2.1511,6.3717,2.2384,5.9879,2.2384],"text":"Dluhbio","confidence":0.276,"type":"string"},"Items":{"type":"array","valueArray":["{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":1267.95,\"text\":\"$1267.95\",\"boundingBox\":[6.7858,5.1789,7.3503,5.1789,7.3503,5.3013,6.7858,5.3013],\"page\":1,\"confidence\":0.863},\"Description\":{\"type\":\"string\",\"valueString\":\"Wood Table\",\"text\":\"Wood Table\",\"boundingBox\":[2.0249,5.1859,2.7419,5.1859,2.7419,5.2891,2.0249,5.2891],\"page\":1,\"confidence\":0.888},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"36\",\"text\":\"36\",\"boundingBox\":[1.2114,5.1897,1.3439,5.1897,1.3439,5.2891,1.2114,5.2891],\"page\":1,\"confidence\":0.695},\"Quantity\":{\"type\":\"number\",\"valueNumber\":3,\"text\":\"3\",\"boundingBox\":[1.6943,5.1897,1.7491,5.1897,1.7491,5.2891,1.6943,5.2891],\"page\":1,\"confidence\":0.791},\"Tax\":{\"type\":\"number\",\"valueNumber\":82.95,\"text\":\"$82.95\",\"boundingBox\":[6.2822,5.1792,6.6567,5.1792,6.6567,5.3013,6.2822,5.3013],\"page\":1,\"confidence\":0.778},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":395,\"text\":\"$395.00\",\"boundingBox\":[4.4092,5.1792,4.8601,5.1792,4.8601,5.3013,4.4092,5.3013],\"page\":1,\"confidence\":0.884}},\"text\":\"36 3 Wood Table $395.00 $0.00 0% $82.95 $1267.95\",\"boundingBox\":[1.2114,5.1789,7.3503,5.1789,7.3503,5.3013,1.2114,5.3013],\"page\":1,\"confidence\":0.804}","{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":256.8,\"text\":\"$256.80\",\"boundingBox\":[6.8637,5.4789,7.3548,5.4789,7.3548,5.6013,6.8637,5.6013],\"page\":1,\"confidence\":0.897},\"Description\":{\"type\":\"string\",\"valueString\":\"White Window\",\"text\":\"White Window\",\"boundingBox\":[2.0249,5.4859,2.9032,5.4859,2.9032,5.5891,2.0249,5.5891],\"page\":1,\"confidence\":0.893},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"20\",\"text\":\"20\",\"boundingBox\":[1.2105,5.4897,1.3437,5.4897,1.3437,5.5891,1.2105,5.5891],\"page\":1,\"confidence\":0.6},\"Quantity\":{\"type\":\"number\",\"valueNumber\":2,\"text\":\"2\",\"boundingBox\":[1.6933,5.4897,1.7518,5.4897,1.7518,5.5876,1.6933,5.5876],\"page\":1,\"confidence\":0.793},\"Tax\":{\"type\":\"number\",\"valueNumber\":16.8,\"text\":\"$16.80\",\"boundingBox\":[6.2822,5.4792,6.6601,5.4792,6.6601,5.6013,6.2822,5.6013],\"page\":1,\"confidence\":0.802},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":120,\"text\":\"$120.00\",\"boundingBox\":[4.4092,5.4792,4.8601,5.4792,4.8601,5.6013,4.4092,5.6013],\"page\":1,\"confidence\":0.892}},\"text\":\"20 2 White Window $120.00 $0.00 0% $16.80 $256.80\",\"boundingBox\":[1.2105,5.4789,7.3548,5.4789,7.3548,5.6013,1.2105,5.6013],\"page\":1,\"confidence\":0.762}","{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":224.7,\"text\":\"$224.70\",\"boundingBox\":[6.8637,5.7789,7.3548,5.7789,7.3548,5.9013,6.8637,5.9013],\"page\":1,\"confidence\":0.895},\"Description\":{\"type\":\"string\",\"valueString\":\"Indoor Kit Gardering\",\"text\":\"Indoor Kit Gardering\",\"boundingBox\":[2.0356,5.7859,3.2358,5.7859,3.2358,5.9193,2.0356,5.9193],\"page\":1,\"confidence\":0.883},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"25\",\"text\":\"25\",\"boundingBox\":[1.2105,5.7897,1.3404,5.7897,1.3404,5.8891,1.2105,5.8891],\"page\":1,\"confidence\":0.595},\"Quantity\":{\"type\":\"number\",\"valueNumber\":3,\"text\":\"3\",\"boundingBox\":[1.6943,5.7897,1.7491,5.7897,1.7491,5.8891,1.6943,5.8891],\"page\":1,\"confidence\":0.815},\"Tax\":{\"type\":\"number\",\"valueNumber\":14.7,\"text\":\"$14.70\",\"boundingBox\":[6.2822,5.7792,6.6601,5.7792,6.6601,5.9013,6.2822,5.9013],\"page\":1,\"confidence\":0.801},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":70,\"text\":\"$70.00\",\"boundingBox\":[4.4822,5.7792,4.8601,5.7792,4.8601,5.9013,4.4822,5.9013],\"page\":1,\"confidence\":0.89}},\"text\":\"25 3 Indoor Kit Gardering $70.00 $0.00 0% $14.70 $224.70\",\"boundingBox\":[1.2105,5.7789,7.3548,5.7789,7.3548,5.9193,1.2105,5.9193],\"page\":1,\"confidence\":0.766}","{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":131.61,\"text\":\"$131.61\",\"boundingBox\":[6.8637,6.0789,7.3364,6.0789,7.3364,6.2013,6.8637,6.2013],\"page\":1,\"confidence\":0.9},\"Description\":{\"type\":\"string\",\"valueString\":\"Measuring Tape\",\"text\":\"Measuring Tape\",\"boundingBox\":[2.0356,6.0871,2.9754,6.0871,2.9754,6.2193,2.0356,6.2193],\"page\":1,\"confidence\":0.898},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"46\",\"text\":\"46\",\"boundingBox\":[1.2043,6.0897,1.3439,6.0897,1.3439,6.1891,1.2043,6.1891],\"page\":1,\"confidence\":0.636},\"Quantity\":{\"type\":\"number\",\"valueNumber\":1,\"text\":\"1\",\"boundingBox\":[1.7012,6.0892,1.7341,6.0892,1.7341,6.1876,1.7012,6.1876],\"page\":1,\"confidence\":0.771},\"Tax\":{\"type\":\"number\",\"valueNumber\":8.61,\"text\":\"$8.61\",\"boundingBox\":[6.3552,6.0792,6.6406,6.0792,6.6406,6.2013,6.3552,6.2013],\"page\":1,\"confidence\":0.783},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":123,\"text\":\"$123.00\",\"boundingBox\":[4.4092,6.0792,4.8601,6.0792,4.8601,6.2013,4.4092,6.2013],\"page\":1,\"confidence\":0.898}},\"text\":\"46 1 Measuring Tape $123.00 $0.00 0% $8.61 $131.61\",\"boundingBox\":[1.2043,6.0789,7.3364,6.0789,7.3364,6.2193,1.2043,6.2193],\"page\":1,\"confidence\":0.808}"]}}}]}}},{"0":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2014/Invoice80110.pdf","2":{"status":"succeeded","createdDateTime":"2021-10-26T22:39:18Z","lastUpdatedDateTime":"2021-10-26T22:39:24Z","analyzeResult":{"version":"2.1.0","readResults":[{"angle":0,"page":1,"height":11,"unit":"inch","width":8.5}],"pageResults":[{"page":1,"tables":[{"rows":5,"columns":8,"cells":[{"rowIndex":0,"isHeader":true,"boundingBox":[1.0954,4.82,1.4465,4.82,1.4465,5.0833,1.0954,5.0833],"text":"Itm","columnIndex":0},{"rowIndex":0,"isHeader":true,"boundingBox":[1.4465,4.82,1.9145,4.82,1.9145,5.0833,1.4465,5.0833],"text":"Qty","columnIndex":1},{"rowIndex":0,"isHeader":true,"boundingBox":[1.9145,4.82,4.2938,4.82,4.2938,5.0833,1.9145,5.0833],"text":"Description","columnIndex":2},{"rowIndex":0,"isHeader":true,"boundingBox":[4.2938,4.82,4.9373,4.82,4.9373,5.0833,4.2938,5.0833],"text":"Price","columnIndex":3},{"rowIndex":0,"isHeader":true,"boundingBox":[4.9373,4.82,5.6524,4.82,5.6524,5.0833,4.9373,5.0833],"text":"Discount","columnIndex":4},{"rowIndex":0,"isHeader":true,"boundingBox":[5.6524,4.82,6.14,4.82,6.14,5.0833,5.6524,5.0833],"text":"(Pct)","columnIndex":5},{"rowIndex":0,"isHeader":true,"boundingBox":[6.14,4.82,6.7381,4.82,6.7381,5.0833,6.14,5.0833],"text":"Tax","columnIndex":6},{"rowIndex":0,"isHeader":true,"boundingBox":[6.7381,4.82,7.4271,4.82,7.4271,5.0833,6.7381,5.0833],"text":"LineTotal","columnIndex":7},{"rowIndex":1,"columnIndex":0,"text":"9","boundingBox":[1.0954,5.0833,1.4465,5.0833,1.4465,5.52,1.0954,5.52]},{"rowIndex":1,"columnIndex":1,"text":"3","boundingBox":[1.4465,5.0833,1.9145,5.0833,1.9145,5.52,1.4465,5.52]},{"rowIndex":1,"columnIndex":2,"text":"Bathing System Classic 18 in. H x 60 in. W x 32.5","boundingBox":[1.9145,5.0833,4.2938,5.0833,4.3003,5.52,1.9145,5.52]},{"rowIndex":1,"columnIndex":3,"text":"$200.00","boundingBox":[4.2938,5.0833,4.9373,5.0833,4.9373,5.52,4.3003,5.52]},{"rowIndex":1,"columnIndex":4,"text":"$60.00","boundingBox":[4.9373,5.0833,5.6524,5.0833,5.6524,5.52,4.9373,5.52]},{"rowIndex":1,"columnIndex":5,"text":"10%","boundingBox":[5.6524,5.0833,6.14,5.0833,6.14,5.52,5.6524,5.52]},{"rowIndex":1,"columnIndex":6,"text":"$37.80","boundingBox":[6.14,5.0833,6.7381,5.0833,6.7381,5.52,6.14,5.52]},{"rowIndex":1,"columnIndex":7,"text":"$577.80","boundingBox":[6.7381,5.0833,7.4271,5.0833,7.4336,5.526,6.7381,5.52]},{"rowIndex":2,"columnIndex":0,"text":"59","boundingBox":[1.0954,5.52,1.4465,5.52,1.4465,5.8252,1.0954,5.8252]},{"rowIndex":2,"columnIndex":1,"text":"3","boundingBox":[1.4465,5.52,1.9145,5.52,1.9145,5.8252,1.4465,5.8252]},{"rowIndex":2,"columnIndex":2,"text":"Two red garden gnomes","boundingBox":[1.9145,5.52,4.3003,5.52,4.3003,5.8252,1.9145,5.8252]},{"rowIndex":2,"columnIndex":3,"text":"$92.00","boundingBox":[4.3003,5.52,4.9373,5.52,4.9373,5.8252,4.3003,5.8252]},{"rowIndex":2,"columnIndex":4,"text":"$0.00","boundingBox":[4.9373,5.52,5.6524,5.52,5.6524,5.8252,4.9373,5.8252]},{"rowIndex":2,"columnIndex":5,"text":"0%","boundingBox":[5.6524,5.52,6.14,5.52,6.14,5.8252,5.6524,5.8252]},{"rowIndex":2,"columnIndex":6,"text":"$19.32","boundingBox":[6.14,5.52,6.7381,5.52,6.7381,5.8252,6.14,5.8252]},{"rowIndex":2,"columnIndex":7,"text":"$295.32","boundingBox":[6.7381,5.52,7.4336,5.526,7.4336,5.8252,6.7381,5.8252]},{"rowIndex":3,"columnIndex":0,"text":"58","boundingBox":[1.0954,5.8252,1.4465,5.8252,1.4465,6.1243,1.0954,6.1243]},{"rowIndex":3,"columnIndex":1,"text":"3","boundingBox":[1.4465,5.8252,1.9145,5.8252,1.9145,6.1243,1.4465,6.1243]},{"rowIndex":3,"columnIndex":2,"text":"Single red garden gnome","boundingBox":[1.9145,5.8252,4.3003,5.8252,4.3003,6.1243,1.9145,6.1243]},{"rowIndex":3,"columnIndex":3,"text":"$56.00","boundingBox":[4.3003,5.8252,4.9373,5.8252,4.9373,6.1243,4.3003,6.1243]},{"rowIndex":3,"columnIndex":4,"text":"$16.80","boundingBox":[4.9373,5.8252,5.6524,5.8252,5.6524,6.1243,4.9373,6.1243]},{"rowIndex":3,"columnIndex":5,"text":"10%","boundingBox":[5.6524,5.8252,6.14,5.8252,6.14,6.1243,5.6524,6.1243]},{"rowIndex":3,"columnIndex":6,"text":"$10.58","boundingBox":[6.14,5.8252,6.7381,5.8252,6.7381,6.1243,6.14,6.1243]},{"rowIndex":3,"columnIndex":7,"text":"$161.78","boundingBox":[6.7381,5.8252,7.4336,5.8252,7.4336,6.1243,6.7381,6.1243]},{"rowIndex":4,"columnIndex":0,"text":"32","boundingBox":[1.0954,6.1243,1.4465,6.1243,1.4465,6.4294,1.1019,6.4294]},{"rowIndex":4,"columnIndex":1,"text":"1","boundingBox":[1.4465,6.1243,1.9145,6.1243,1.9145,6.4294,1.4465,6.4294]},{"rowIndex":4,"columnIndex":2,"text":"Artificial Tree","boundingBox":[1.9145,6.1243,4.3003,6.1243,4.3003,6.4234,1.9145,6.4294]},{"rowIndex":4,"columnIndex":3,"text":"$250.00","boundingBox":[4.3003,6.1243,4.9373,6.1243,4.9373,6.4234,4.3003,6.4234]},{"rowIndex":4,"columnIndex":4,"text":"$0.00","boundingBox":[4.9373,6.1243,5.6524,6.1243,5.6524,6.4234,4.9373,6.4234]},{"rowIndex":4,"columnIndex":5,"text":"0%","boundingBox":[5.6524,6.1243,6.14,6.1243,6.14,6.4234,5.6524,6.4234]},{"rowIndex":4,"columnIndex":6,"text":"$17.50","boundingBox":[6.14,6.1243,6.7381,6.1243,6.7381,6.4234,6.14,6.4234]},{"rowIndex":4,"columnIndex":7,"text":"$267.50","boundingBox":[6.7381,6.1243,7.4336,6.1243,7.4401,6.4175,6.7381,6.4234]}],"boundingBox":[1.0889,4.8212,7.4279,4.8211,7.4278,6.4273,1.0879,6.4275]}]}],"documentResults":[{"docType":"prebuilt:invoice","pageRange":[1,1],"fields":{"CustomerAddress":{"page":1,"valueString":"Julpum","boundingBox":[6.2569,2.3511,6.6116,2.3511,6.6116,2.4634,6.2569,2.4634],"text":"Julpum","confidence":0.299,"type":"string"},"ShippingAddress":{"page":1,"valueString":"Street","boundingBox":[1.9649,2.138,2.3054,2.138,2.3054,2.2374,1.9649,2.2374],"text":"Street","confidence":0.23,"type":"string"},"InvoiceTotal":{"valueNumber":1302.4,"page":1,"boundingBox":[6.5814,8.2097,7.413,8.2097,7.413,8.3886,6.5814,8.3886],"text":"$1302.40","confidence":0.958,"type":"number"},"CustomerName":{"page":1,"valueString":"Ashlee Raje","boundingBox":[1.1219,1.6519,1.9574,1.6519,1.9574,1.8161,1.1219,1.8161],"text":"Ashlee Raje","confidence":0.264,"type":"string"},"VendorName":{"page":1,"valueString":"T.T TAILWIND TRADERS","boundingBox":[4.909,1.4371,7.3107,1.4184,7.3155,2.0248,4.9137,2.0434],"text":"T.T TAILWIND TRADERS","confidence":0.953,"type":"string"},"VendorAddressRecipient":{"page":1,"valueString":"Alvotue","boundingBox":[5.9786,2.1511,6.3597,2.1511,6.3597,2.2384,5.9786,2.2384],"text":"Alvotue","confidence":0.217,"type":"string"},"InvoiceId":{"page":1,"valueString":"80110","boundingBox":[1.126,4.1481,1.4794,4.1481,1.4794,4.248,1.126,4.248],"text":"80110","confidence":0.954,"type":"string"},"InvoiceDate":{"page":1,"boundingBox":[2.9138,4.1448,3.3935,4.1448,3.3935,4.248,2.9138,4.248],"text":"October","confidence":0.394,"type":"date"},"SubTotal":{"valueNumber":1217.2,"page":1,"boundingBox":[6.8462,6.8516,7.4152,6.8516,7.4152,6.974,6.8462,6.974],"text":"$1217.20","confidence":0.967,"type":"number"},"TotalTax":{"valueNumber":85.2,"page":1,"boundingBox":[7.002,7.8208,7.4153,7.8208,7.4153,7.9432,7.002,7.9432],"text":"$85.20","confidence":0.962,"type":"number"},"VendorAddress":{"page":1,"valueString":"4559 Loop Beaverton Oregon 97005 States","boundingBox":[5.9782,2.3543,6.9008,2.3543,6.9008,3.2384,5.9782,3.2384],"text":"4559 Loop Beaverton Oregon 97005 States","confidence":0.353,"type":"string"},"CustomerAddressRecipient":{"page":1,"valueString":"Ashlee Raje","boundingBox":[1.1219,1.6519,1.9574,1.6519,1.9574,1.8161,1.1219,1.8161],"text":"Ashlee Raje","confidence":0.264,"type":"string"},"Items":{"type":"array","valueArray":["{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":577.8,\"text\":\"$577.80\",\"boundingBox\":[6.8637,5.1789,7.3548,5.1789,7.3548,5.3013,6.8637,5.3013],\"page\":1,\"confidence\":0.899},\"Description\":{\"type\":\"string\",\"valueString\":\"Bathing System Classic 18 in. H x 60 in. W x 32.5\",\"text\":\"Bathing System Classic 18 in. H x 60 in. W x 32.5\",\"boundingBox\":[2.0326,5.1859,4.1415,5.1859,4.1415,5.4692,2.0326,5.4692],\"page\":1,\"confidence\":0.827},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"9\",\"text\":\"9\",\"boundingBox\":[1.246,5.1897,1.3062,5.1897,1.3062,5.2891,1.246,5.2891],\"page\":1,\"confidence\":0.455},\"Quantity\":{\"type\":\"number\",\"valueNumber\":3,\"text\":\"3\",\"boundingBox\":[1.6943,5.1897,1.7491,5.1897,1.7491,5.2891,1.6943,5.2891],\"page\":1,\"confidence\":0.771},\"Tax\":{\"type\":\"number\",\"valueNumber\":37.8,\"text\":\"$37.80\",\"boundingBox\":[6.2822,5.1792,6.6601,5.1792,6.6601,5.3013,6.2822,5.3013],\"page\":1,\"confidence\":0.781},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":200,\"text\":\"$200.00\",\"boundingBox\":[4.4092,5.1792,4.8601,5.1792,4.8601,5.3013,4.4092,5.3013],\"page\":1,\"confidence\":0.899}},\"text\":\"9 3 Bathing System Classic 18 in. H x 60 $200.00 $60.00 10% $37.80 $577.80 in. W x 32.5\",\"boundingBox\":[1.246,5.1789,7.3548,5.1789,7.3548,5.4692,1.246,5.4692],\"page\":1,\"confidence\":0.903}","{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":295.32,\"text\":\"$295.32\",\"boundingBox\":[6.8637,5.6204,7.3501,5.6204,7.3501,5.7428,6.8637,5.7428],\"page\":1,\"confidence\":0.898},\"Description\":{\"type\":\"string\",\"valueString\":\"Two red garden gnomes\",\"text\":\"Two red garden gnomes\",\"boundingBox\":[2.0259,5.6274,3.4758,5.6274,3.4758,5.7608,2.0259,5.7608],\"page\":1,\"confidence\":0.855},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"59\",\"text\":\"59\",\"boundingBox\":[1.2141,5.6312,1.3427,5.6312,1.3427,5.7306,1.2141,5.7306],\"page\":1,\"confidence\":0.5},\"Quantity\":{\"type\":\"number\",\"valueNumber\":3,\"text\":\"3\",\"boundingBox\":[1.6943,5.6312,1.7491,5.6312,1.7491,5.7306,1.6943,5.7306],\"page\":1,\"confidence\":0.748},\"Tax\":{\"type\":\"number\",\"valueNumber\":19.32,\"text\":\"$19.32\",\"boundingBox\":[6.2822,5.6206,6.6583,5.6206,6.6583,5.7428,6.2822,5.7428],\"page\":1,\"confidence\":0.863},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":92,\"text\":\"$92.00\",\"boundingBox\":[4.4822,5.6206,4.8601,5.6206,4.8601,5.7428,4.4822,5.7428],\"page\":1,\"confidence\":0.898}},\"text\":\"59 3 Two red garden gnomes $92.00 $0.00 0% $19.32 $295.32\",\"boundingBox\":[1.2141,5.6204,7.3501,5.6204,7.3501,5.7608,1.2141,5.7608],\"page\":1,\"confidence\":0.809}","{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":161.78,\"text\":\"$161.78\",\"boundingBox\":[6.8637,5.9204,7.354,5.9204,7.354,6.0428,6.8637,6.0428],\"page\":1,\"confidence\":0.899},\"Description\":{\"type\":\"string\",\"valueString\":\"Single red garden gnome\",\"text\":\"Single red garden gnome\",\"boundingBox\":[2.0311,5.9274,3.5352,5.9274,3.5352,6.0608,2.0311,6.0608],\"page\":1,\"confidence\":0.874},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"58\",\"text\":\"58\",\"boundingBox\":[1.2141,5.9312,1.3437,5.9312,1.3437,6.0306,1.2141,6.0306],\"page\":1,\"confidence\":0.561},\"Quantity\":{\"type\":\"number\",\"valueNumber\":3,\"text\":\"3\",\"boundingBox\":[1.6943,5.9312,1.7491,5.9312,1.7491,6.0306,1.6943,6.0306],\"page\":1,\"confidence\":0.771},\"Tax\":{\"type\":\"number\",\"valueNumber\":10.58,\"text\":\"$10.58\",\"boundingBox\":[6.2822,5.9206,6.6601,5.9206,6.6601,6.0428,6.2822,6.0428],\"page\":1,\"confidence\":0.83},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":56,\"text\":\"$56.00\",\"boundingBox\":[4.4822,5.9206,4.8601,5.9206,4.8601,6.0428,4.4822,6.0428],\"page\":1,\"confidence\":0.898}},\"text\":\"58 3 Single red garden gnome $56.00 $16.80 10% $10.58 $161.78\",\"boundingBox\":[1.2141,5.9204,7.354,5.9204,7.354,6.0608,1.2141,6.0608],\"page\":1,\"confidence\":0.812}","{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":267.5,\"text\":\"$267.50\",\"boundingBox\":[6.8637,6.2204,7.3548,6.2204,7.3548,6.3428,6.8637,6.3428],\"page\":1,\"confidence\":0.901},\"Description\":{\"type\":\"string\",\"valueString\":\"Artificial Tree\",\"text\":\"Artificial Tree\",\"boundingBox\":[2.0246,6.2259,2.7987,6.2259,2.7987,6.3306,2.0246,6.3306],\"page\":1,\"confidence\":0.888},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"32\",\"text\":\"32\",\"boundingBox\":[1.2114,6.2312,1.342,6.2312,1.342,6.3306,1.2114,6.3306],\"page\":1,\"confidence\":0.5},\"Quantity\":{\"type\":\"number\",\"valueNumber\":1,\"text\":\"1\",\"boundingBox\":[1.7012,6.2307,1.7341,6.2307,1.7341,6.3291,1.7012,6.3291],\"page\":1,\"confidence\":0.697},\"Tax\":{\"type\":\"number\",\"valueNumber\":17.5,\"text\":\"$17.50\",\"boundingBox\":[6.2822,6.2206,6.6601,6.2206,6.6601,6.3428,6.2822,6.3428],\"page\":1,\"confidence\":0.803},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":250,\"text\":\"$250.00\",\"boundingBox\":[4.4092,6.2206,4.8601,6.2206,4.8601,6.3428,4.4092,6.3428],\"page\":1,\"confidence\":0.899}},\"text\":\"32 1 Artificial Tree $250.00 $0.00 0% $17.50 $267.50\",\"boundingBox\":[1.2114,6.2204,7.3548,6.2204,7.3548,6.3428,1.2114,6.3428],\"page\":1,\"confidence\":0.805}"]}}}]}}},{"0":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2014/Invoice71864.pdf","2":{"status":"succeeded","createdDateTime":"2021-10-26T22:39:24Z","lastUpdatedDateTime":"2021-10-26T22:39:28Z","analyzeResult":{"version":"2.1.0","readResults":[{"angle":0,"page":1,"height":11,"unit":"inch","width":8.5}],"pageResults":[{"page":1,"tables":[{"rows":5,"columns":8,"cells":[{"rowIndex":0,"isHeader":true,"boundingBox":[1.0954,4.82,1.4465,4.82,1.4465,5.0833,1.0954,5.0833],"text":"Itm","columnIndex":0},{"rowIndex":0,"isHeader":true,"boundingBox":[1.4465,4.82,1.9145,4.82,1.9145,5.0833,1.4465,5.0833],"text":"Qty","columnIndex":1},{"rowIndex":0,"isHeader":true,"boundingBox":[1.9145,4.82,4.3003,4.82,4.3003,5.0833,1.9145,5.0833],"text":"Description","columnIndex":2},{"rowIndex":0,"isHeader":true,"boundingBox":[4.3003,4.82,4.9373,4.82,4.9373,5.0833,4.3003,5.0833],"text":"Price","columnIndex":3},{"rowIndex":0,"isHeader":true,"boundingBox":[4.9373,4.82,5.6524,4.82,5.6524,5.0833,4.9373,5.0833],"text":"Discount","columnIndex":4},{"rowIndex":0,"isHeader":true,"boundingBox":[5.6524,4.82,6.14,4.82,6.14,5.0833,5.6524,5.0833],"text":"(Pct)","columnIndex":5},{"rowIndex":0,"isHeader":true,"boundingBox":[6.14,4.82,6.7381,4.82,6.7381,5.0833,6.14,5.0833],"text":"Tax","columnIndex":6},{"rowIndex":0,"isHeader":true,"boundingBox":[6.7381,4.82,7.4271,4.82,7.4271,5.0833,6.7381,5.0833],"text":"LineTotal","columnIndex":7},{"rowIndex":1,"columnIndex":0,"text":"36","boundingBox":[1.0954,5.0833,1.4465,5.0833,1.4465,5.3824,1.0954,5.3824]},{"rowIndex":1,"columnIndex":1,"text":"2","boundingBox":[1.4465,5.0833,1.9145,5.0833,1.9145,5.3824,1.4465,5.3824]},{"rowIndex":1,"columnIndex":2,"text":"Wood Table","boundingBox":[1.9145,5.0833,4.3003,5.0833,4.3003,5.3824,1.9145,5.3824]},{"rowIndex":1,"columnIndex":3,"text":"$395.00","boundingBox":[4.3003,5.0833,4.9373,5.0833,4.9373,5.3824,4.3003,5.3824]},{"rowIndex":1,"columnIndex":4,"text":"$0.00","boundingBox":[4.9373,5.0833,5.6524,5.0833,5.6524,5.3824,4.9373,5.3824]},{"rowIndex":1,"columnIndex":5,"text":"0%","boundingBox":[5.6524,5.0833,6.14,5.0833,6.14,5.3824,5.6524,5.3824]},{"rowIndex":1,"columnIndex":6,"text":"$55.30","boundingBox":[6.14,5.0833,6.7381,5.0833,6.7381,5.3824,6.14,5.3824]},{"rowIndex":1,"columnIndex":7,"text":"$845.30","boundingBox":[6.7381,5.0833,7.4271,5.0833,7.4271,5.3824,6.7381,5.3824]},{"rowIndex":2,"columnIndex":0,"text":"56","boundingBox":[1.0954,5.3824,1.4465,5.3824,1.4465,5.8252,1.0954,5.8252]},{"rowIndex":2,"columnIndex":1,"text":"1","boundingBox":[1.4465,5.3824,1.9145,5.3824,1.9145,5.8252,1.4465,5.8252]},{"rowIndex":2,"columnIndex":2,"text":"Rechargeable screwdriver with extra battery","boundingBox":[1.9145,5.3824,4.3003,5.3824,4.3003,5.8252,1.9145,5.8252]},{"rowIndex":2,"columnIndex":3,"text":"$312.00","boundingBox":[4.3003,5.3824,4.9373,5.3824,4.9373,5.8252,4.3003,5.8252]},{"rowIndex":2,"columnIndex":4,"text":"$31.20","boundingBox":[4.9373,5.3824,5.6524,5.3824,5.6524,5.8252,4.9373,5.8252]},{"rowIndex":2,"columnIndex":5,"text":"10%","boundingBox":[5.6524,5.3824,6.14,5.3824,6.14,5.8252,5.6524,5.8252]},{"rowIndex":2,"columnIndex":6,"text":"$19.66","boundingBox":[6.14,5.3824,6.7381,5.3824,6.7381,5.8252,6.14,5.8252]},{"rowIndex":2,"columnIndex":7,"text":"$300.46","boundingBox":[6.7381,5.3824,7.4271,5.3824,7.4336,5.8252,6.7381,5.8252]},{"rowIndex":3,"columnIndex":0,"text":"14","boundingBox":[1.0954,5.8252,1.4465,5.8252,1.4465,6.1243,1.0954,6.1243]},{"rowIndex":3,"columnIndex":1,"text":"1","boundingBox":[1.4465,5.8252,1.9145,5.8252,1.9145,6.1243,1.4465,6.1243]},{"rowIndex":3,"columnIndex":2,"text":"Bathroom Sink Faucet","boundingBox":[1.9145,5.8252,4.3003,5.8252,4.3003,6.1243,1.9145,6.1243]},{"rowIndex":3,"columnIndex":3,"text":"$99.00","boundingBox":[4.3003,5.8252,4.9373,5.8252,4.9373,6.1243,4.3003,6.1243]},{"rowIndex":3,"columnIndex":4,"text":"$0.00","boundingBox":[4.9373,5.8252,5.6524,5.8252,5.6524,6.1243,4.9373,6.1243]},{"rowIndex":3,"columnIndex":5,"text":"0%","boundingBox":[5.6524,5.8252,6.14,5.8252,6.14,6.1243,5.6524,6.1243]},{"rowIndex":3,"columnIndex":6,"text":"$6.93","boundingBox":[6.14,5.8252,6.7381,5.8252,6.7381,6.1243,6.14,6.1243]},{"rowIndex":3,"columnIndex":7,"text":"$105.93","boundingBox":[6.7381,5.8252,7.4336,5.8252,7.4336,6.1243,6.7381,6.1243]},{"rowIndex":4,"columnIndex":0,"text":"48","boundingBox":[1.0954,6.1243,1.4465,6.1243,1.4465,6.4234,1.1019,6.4294]},{"rowIndex":4,"columnIndex":1,"text":"3","boundingBox":[1.4465,6.1243,1.9145,6.1243,1.9145,6.4234,1.4465,6.4234]},{"rowIndex":4,"columnIndex":2,"text":"Hammer","boundingBox":[1.9145,6.1243,4.3003,6.1243,4.3003,6.4234,1.9145,6.4234]},{"rowIndex":4,"columnIndex":3,"text":"$100.00","boundingBox":[4.3003,6.1243,4.9373,6.1243,4.9373,6.4234,4.3003,6.4234]},{"rowIndex":4,"columnIndex":4,"text":"$0.00","boundingBox":[4.9373,6.1243,5.6524,6.1243,5.6524,6.4234,4.9373,6.4234]},{"rowIndex":4,"columnIndex":5,"text":"0%","boundingBox":[5.6524,6.1243,6.14,6.1243,6.14,6.4234,5.6524,6.4234]},{"rowIndex":4,"columnIndex":6,"text":"$21.00","boundingBox":[6.14,6.1243,6.7381,6.1243,6.7381,6.4234,6.14,6.4234]},{"rowIndex":4,"columnIndex":7,"text":"$321.00","boundingBox":[6.7381,6.1243,7.4336,6.1243,7.4401,6.4175,6.7381,6.4234]}],"boundingBox":[1.0898,4.8211,7.429,4.8211,7.4288,6.4277,1.0887,6.4278]}]}],"documentResults":[{"docType":"prebuilt:invoice","pageRange":[1,1],"fields":{"CustomerAddress":{"page":1,"valueString":"27, place de Brazaville Roubaix Nord 59100 France","boundingBox":[5.9835,2.3511,7.0796,2.3511,7.0796,3.2384,5.9835,3.2384],"text":"27, place de Brazaville Roubaix Nord 59100 France","confidence":0.435,"type":"string"},"InvoiceTotal":{"valueNumber":1572.69,"page":1,"boundingBox":[6.5814,8.2097,7.4108,8.2097,7.4108,8.3886,6.5814,8.3886],"text":"$1572.69","confidence":0.962,"type":"number"},"CustomerName":{"page":1,"valueString":"Pal","boundingBox":[1.5849,1.6519,1.7744,1.6519,1.7744,1.7789,1.5849,1.7789],"text":"Pal","confidence":0.256,"type":"string"},"VendorName":{"page":1,"valueString":"T.T TAILWIND TRADERS","boundingBox":[4.909,1.4371,7.3107,1.4184,7.3155,2.0248,4.9137,2.0434],"text":"T.T TAILWIND TRADERS","confidence":0.954,"type":"string"},"VendorAddressRecipient":{"page":1,"valueString":"Ifiaeh","boundingBox":[5.9879,2.1499,6.2462,2.1499,6.2462,2.2384,5.9879,2.2384],"text":"Ifiaeh","confidence":0.263,"type":"string"},"InvoiceId":{"page":1,"valueString":"71864","boundingBox":[1.1266,4.1481,1.4804,4.1481,1.4804,4.248,1.1266,4.248],"text":"71864","confidence":0.966,"type":"string"},"SubTotal":{"valueNumber":1469.8,"page":1,"boundingBox":[6.8462,6.8516,7.4152,6.8516,7.4152,6.974,6.8462,6.974],"text":"$1469.80","confidence":0.971,"type":"number"},"TotalTax":{"valueNumber":102.89,"page":1,"boundingBox":[6.9241,7.8208,7.4137,7.8208,7.4137,7.9432,6.9241,7.9432],"text":"$102.89","confidence":0.963,"type":"number"},"CustomerAddressRecipient":{"page":1,"valueString":"Willie","boundingBox":[1.1222,1.6519,1.5176,1.6519,1.5176,1.7789,1.1222,1.7789],"text":"Willie","confidence":0.22,"type":"string"},"Items":{"type":"array","valueArray":["{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":845.3,\"text\":\"$845.30\",\"boundingBox\":[6.8637,5.1789,7.3548,5.1789,7.3548,5.3013,6.8637,5.3013],\"page\":1,\"confidence\":0.886},\"Description\":{\"type\":\"string\",\"valueString\":\"Wood Table\",\"text\":\"Wood Table\",\"boundingBox\":[2.0249,5.1859,2.7419,5.1859,2.7419,5.2891,2.0249,5.2891],\"page\":1,\"confidence\":0.858},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"36\",\"text\":\"36\",\"boundingBox\":[1.2114,5.1897,1.3439,5.1897,1.3439,5.2891,1.2114,5.2891],\"page\":1,\"confidence\":0.721},\"Quantity\":{\"type\":\"number\",\"valueNumber\":2,\"text\":\"2\",\"boundingBox\":[1.6933,5.1897,1.7518,5.1897,1.7518,5.2876,1.6933,5.2876],\"page\":1,\"confidence\":0.767},\"Tax\":{\"type\":\"number\",\"valueNumber\":55.3,\"text\":\"$55.30\",\"boundingBox\":[6.2822,5.1792,6.6601,5.1792,6.6601,5.3013,6.2822,5.3013],\"page\":1,\"confidence\":0.781},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":395,\"text\":\"$395.00\",\"boundingBox\":[4.4092,5.1792,4.8601,5.1792,4.8601,5.3013,4.4092,5.3013],\"page\":1,\"confidence\":0.898}},\"text\":\"36 2 Wood Table $395.00 $0.00 0% $55.30 $845.30\",\"boundingBox\":[1.2114,5.1789,7.3548,5.1789,7.3548,5.3013,1.2114,5.3013],\"page\":1,\"confidence\":0.708}","{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":300.46,\"text\":\"$300.46\",\"boundingBox\":[6.8637,5.4789,7.3548,5.4789,7.3548,5.6013,6.8637,5.6013],\"page\":1,\"confidence\":0.898},\"Description\":{\"type\":\"string\",\"valueString\":\"Rechargeable screwdriver with extra battery\",\"text\":\"Rechargeable screwdriver with extra battery\",\"boundingBox\":[2.0341,5.4859,4.1487,5.4859,4.1487,5.7994,2.0341,5.7994],\"page\":1,\"confidence\":0.768},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"56\",\"text\":\"56\",\"boundingBox\":[1.2141,5.4897,1.3439,5.4897,1.3439,5.5891,1.2141,5.5891],\"page\":1,\"confidence\":0.727},\"Quantity\":{\"type\":\"number\",\"valueNumber\":1,\"text\":\"1\",\"boundingBox\":[1.7012,5.4892,1.7341,5.4892,1.7341,5.5876,1.7012,5.5876],\"page\":1,\"confidence\":0.712},\"Tax\":{\"type\":\"number\",\"valueNumber\":19.66,\"text\":\"$19.66\",\"boundingBox\":[6.2822,5.4792,6.6602,5.4792,6.6602,5.6013,6.2822,5.6013],\"page\":1,\"confidence\":0.786},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":312,\"text\":\"$312.00\",\"boundingBox\":[4.4092,5.4792,4.8601,5.4792,4.8601,5.6013,4.4092,5.6013],\"page\":1,\"confidence\":0.899}},\"text\":\"56 1 Rechargeable screwdriver with extra $312.00 $31.20 10% $19.66 $300.46 battery\",\"boundingBox\":[1.2141,5.4789,7.3548,5.4789,7.3548,5.7994,1.2141,5.7994],\"page\":1,\"confidence\":0.88}","{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":105.93,\"text\":\"$105.93\",\"boundingBox\":[6.8637,5.9204,7.3495,5.9204,7.3495,6.0428,6.8637,6.0428],\"page\":1,\"confidence\":0.89},\"Description\":{\"type\":\"string\",\"valueString\":\"Bathroom Sink Faucet\",\"text\":\"Bathroom Sink Faucet\",\"boundingBox\":[2.0356,5.9274,3.3231,5.9274,3.3231,6.0306,2.0356,6.0306],\"page\":1,\"confidence\":0.887},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"14\",\"text\":\"14\",\"boundingBox\":[1.2183,5.9307,1.3447,5.9307,1.3447,6.0291,1.2183,6.0291],\"page\":1,\"confidence\":0.726},\"Quantity\":{\"type\":\"number\",\"valueNumber\":1,\"text\":\"1\",\"boundingBox\":[1.7012,5.9307,1.7341,5.9307,1.7341,6.0291,1.7012,6.0291],\"page\":1,\"confidence\":0.731},\"Tax\":{\"type\":\"number\",\"valueNumber\":6.93,\"text\":\"$6.93\",\"boundingBox\":[6.3552,5.9206,6.6556,5.9206,6.6556,6.0428,6.3552,6.0428],\"page\":1,\"confidence\":0.802},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":99,\"text\":\"$99.00\",\"boundingBox\":[4.4822,5.9206,4.8601,5.9206,4.8601,6.0428,4.4822,6.0428],\"page\":1,\"confidence\":0.899}},\"text\":\"14 1 Bathroom Sink Faucet $99.00 $0.00 0% $6.93 $105.93\",\"boundingBox\":[1.2183,5.9204,7.3495,5.9204,7.3495,6.0428,1.2183,6.0428],\"page\":1,\"confidence\":0.864}","{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":321,\"text\":\"$321.00\",\"boundingBox\":[6.8637,6.2204,7.3548,6.2204,7.3548,6.3428,6.8637,6.3428],\"page\":1,\"confidence\":0.9},\"Description\":{\"type\":\"string\",\"valueString\":\"Hammer\",\"text\":\"Hammer\",\"boundingBox\":[2.0356,6.233,2.5381,6.233,2.5381,6.3306,2.0356,6.3306],\"page\":1,\"confidence\":0.9},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"48\",\"text\":\"48\",\"boundingBox\":[1.2043,6.2312,1.3437,6.2312,1.3437,6.3305,1.2043,6.3305],\"page\":1,\"confidence\":0.77},\"Quantity\":{\"type\":\"number\",\"valueNumber\":3,\"text\":\"3\",\"boundingBox\":[1.6943,6.2312,1.7491,6.2312,1.7491,6.3306,1.6943,6.3306],\"page\":1,\"confidence\":0.767},\"Tax\":{\"type\":\"number\",\"valueNumber\":21,\"text\":\"$21.00\",\"boundingBox\":[6.2822,6.2206,6.6601,6.2206,6.6601,6.3428,6.2822,6.3428],\"page\":1,\"confidence\":0.801},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":100,\"text\":\"$100.00\",\"boundingBox\":[4.4092,6.2206,4.8601,6.2206,4.8601,6.3428,4.4092,6.3428],\"page\":1,\"confidence\":0.899}},\"text\":\"48 3 Hammer $100.00 $0.00 0% $21.00 $321.00\",\"boundingBox\":[1.2043,6.2204,7.3548,6.2204,7.3548,6.3428,1.2043,6.3428],\"page\":1,\"confidence\":0.805}"]}}}]}}},{"0":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2014/Invoice80066.pdf","2":{"status":"succeeded","createdDateTime":"2021-10-26T22:39:23Z","lastUpdatedDateTime":"2021-10-26T22:39:28Z","analyzeResult":{"version":"2.1.0","readResults":[{"angle":0,"page":1,"height":11,"unit":"inch","width":8.5}],"pageResults":[{"page":1,"tables":[{"rows":5,"columns":8,"cells":[{"rowIndex":0,"isHeader":true,"boundingBox":[1.1002,4.8223,1.4447,4.8223,1.4447,5.0827,1.1002,5.0827],"text":"Itm","columnIndex":0},{"rowIndex":0,"isHeader":true,"boundingBox":[1.4447,4.8223,1.9128,4.8223,1.9128,5.0827,1.4447,5.0827],"text":"Qty","columnIndex":1},{"rowIndex":0,"isHeader":true,"boundingBox":[1.9128,4.8223,4.3051,4.8223,4.3051,5.0827,1.9128,5.0827],"text":"Description","columnIndex":2},{"rowIndex":0,"isHeader":true,"boundingBox":[4.3051,4.8223,4.9356,4.8223,4.9356,5.0827,4.3051,5.0827],"text":"Price","columnIndex":3},{"rowIndex":0,"isHeader":true,"boundingBox":[4.9356,4.8223,5.6507,4.8223,5.6507,5.0827,4.9356,5.0827],"text":"Discount","columnIndex":4},{"rowIndex":0,"isHeader":true,"boundingBox":[5.6507,4.8223,6.1383,4.8223,6.1383,5.0827,5.6507,5.0827],"text":"(Pct)","columnIndex":5},{"rowIndex":0,"isHeader":true,"boundingBox":[6.1383,4.8223,6.7298,4.8223,6.7298,5.0827,6.1383,5.0827],"text":"Tax","columnIndex":6},{"rowIndex":0,"isHeader":true,"boundingBox":[6.7298,4.8223,7.4254,4.8223,7.4254,5.0827,6.7298,5.0827],"text":"LineTotal","columnIndex":7},{"rowIndex":1,"columnIndex":0,"text":"49","boundingBox":[1.1002,5.0827,1.4447,5.0827,1.4447,5.3803,1.1002,5.3803]},{"rowIndex":1,"columnIndex":1,"text":"2","boundingBox":[1.4447,5.0827,1.9128,5.0827,1.9128,5.3803,1.4447,5.3803]},{"rowIndex":1,"columnIndex":2,"text":"Screwdriver","boundingBox":[1.9128,5.0827,4.3051,5.0827,4.3051,5.3803,1.9128,5.3803]},{"rowIndex":1,"columnIndex":3,"text":"$110.00","boundingBox":[4.3051,5.0827,4.9356,5.0827,4.9356,5.3803,4.3051,5.3803]},{"rowIndex":1,"columnIndex":4,"text":"$0.00","boundingBox":[4.9356,5.0827,5.6507,5.0827,5.6507,5.3803,4.9356,5.3803]},{"rowIndex":1,"columnIndex":5,"text":"0%","boundingBox":[5.6507,5.0827,6.1383,5.0827,6.1383,5.3803,5.6507,5.3803]},{"rowIndex":1,"columnIndex":6,"text":"$15.40","boundingBox":[6.1383,5.0827,6.7298,5.0827,6.7363,5.3803,6.1383,5.3803]},{"rowIndex":1,"columnIndex":7,"text":"$235.40","boundingBox":[6.7298,5.0827,7.4254,5.0827,7.4254,5.3865,6.7363,5.3803]},{"rowIndex":2,"columnIndex":0,"text":"54","boundingBox":[1.1002,5.3803,1.4447,5.3803,1.4447,5.6841,1.1002,5.6841]},{"rowIndex":2,"columnIndex":1,"text":"1","boundingBox":[1.4447,5.3803,1.9128,5.3803,1.9128,5.6841,1.4447,5.6841]},{"rowIndex":2,"columnIndex":2,"text":"Yellow Rechargeable screwdriver","boundingBox":[1.9128,5.3803,4.3051,5.3803,4.3051,5.6841,1.9128,5.6841]},{"rowIndex":2,"columnIndex":3,"text":"$250.00","boundingBox":[4.3051,5.3803,4.9356,5.3803,4.9356,5.6841,4.3051,5.6841]},{"rowIndex":2,"columnIndex":4,"text":"$0.00","boundingBox":[4.9356,5.3803,5.6507,5.3803,5.6507,5.6841,4.9356,5.6841]},{"rowIndex":2,"columnIndex":5,"text":"0%","boundingBox":[5.6507,5.3803,6.1383,5.3803,6.1383,5.6841,5.6507,5.6841]},{"rowIndex":2,"columnIndex":6,"text":"$17.50","boundingBox":[6.1383,5.3803,6.7363,5.3803,6.7363,5.6841,6.1383,5.6841]},{"rowIndex":2,"columnIndex":7,"text":"$267.50","boundingBox":[6.7363,5.3803,7.4254,5.3865,7.4254,5.6841,6.7363,5.6841]},{"rowIndex":3,"columnIndex":0,"text":"22","boundingBox":[1.1002,5.6841,1.4447,5.6841,1.4447,5.9817,1.1002,5.9817]},{"rowIndex":3,"columnIndex":1,"text":"2","boundingBox":[1.4447,5.6841,1.9128,5.6841,1.9128,5.9817,1.4447,5.9817]},{"rowIndex":3,"columnIndex":2,"text":"Steel Passage Door Knob","boundingBox":[1.9128,5.6841,4.3051,5.6841,4.3051,5.9817,1.9128,5.9817]},{"rowIndex":3,"columnIndex":3,"text":"$10.00","boundingBox":[4.3051,5.6841,4.9356,5.6841,4.9356,5.9817,4.3051,5.9817]},{"rowIndex":3,"columnIndex":4,"text":"$2.00","boundingBox":[4.9356,5.6841,5.6507,5.6841,5.6507,5.9817,4.9356,5.9817]},{"rowIndex":3,"columnIndex":5,"text":"10%","boundingBox":[5.6507,5.6841,6.1383,5.6841,6.1383,5.9817,5.6507,5.9817]},{"rowIndex":3,"columnIndex":6,"text":"$1.26","boundingBox":[6.1383,5.6841,6.7363,5.6841,6.7363,5.9817,6.1383,5.9817]},{"rowIndex":3,"columnIndex":7,"text":"$19.26","boundingBox":[6.7363,5.6841,7.4254,5.6841,7.4254,5.9817,6.7363,5.9817]},{"rowIndex":4,"columnIndex":0,"text":"40","boundingBox":[1.1002,5.9817,1.4447,5.9817,1.4447,6.2793,1.1067,6.2855]},{"rowIndex":4,"columnIndex":1,"text":"2","boundingBox":[1.4447,5.9817,1.9128,5.9817,1.9128,6.2793,1.4447,6.2793]},{"rowIndex":4,"columnIndex":2,"text":"Extractor Steal","boundingBox":[1.9128,5.9817,4.3051,5.9817,4.3051,6.2793,1.9128,6.2793]},{"rowIndex":4,"columnIndex":3,"text":"$135.00","boundingBox":[4.3051,5.9817,4.9356,5.9817,4.9356,6.2793,4.3051,6.2793]},{"rowIndex":4,"columnIndex":4,"text":"$0.00","boundingBox":[4.9356,5.9817,5.6507,5.9817,5.6507,6.2793,4.9356,6.2793]},{"rowIndex":4,"columnIndex":5,"text":"0%","boundingBox":[5.6507,5.9817,6.1383,5.9817,6.1383,6.2793,5.6507,6.2793]},{"rowIndex":4,"columnIndex":6,"text":"$18.90","boundingBox":[6.1383,5.9817,6.7363,5.9817,6.7363,6.2793,6.1383,6.2793]},{"rowIndex":4,"columnIndex":7,"text":"$288.90","boundingBox":[6.7363,5.9817,7.4254,5.9817,7.4254,6.2793,6.7363,6.2793]}],"boundingBox":[1.092,4.811,7.431,4.8115,7.4303,6.2884,1.0906,6.2882]}]}],"documentResults":[{"docType":"prebuilt:invoice","pageRange":[1,1],"fields":{"CustomerAddress":{"page":1,"valueString":"6058 Hill Street","boundingBox":[1.1274,2.1342,2.0287,2.1342,2.0287,2.2374,1.1274,2.2374],"text":"6058 Hill Street","confidence":0.4,"type":"string"},"InvoiceTotal":{"valueNumber":811.06,"page":1,"boundingBox":[6.6952,8.0682,7.413,8.0682,7.413,8.2471,6.6952,8.2471],"text":"$811.06","confidence":0.962,"type":"number"},"CustomerName":{"page":1,"valueString":"Misty Xie","boundingBox":[1.1354,1.6534,1.7819,1.6534,1.7819,1.8161,1.1354,1.8161],"text":"Misty Xie","confidence":0.38,"type":"string"},"VendorName":{"page":1,"valueString":"T.T TAILWIND TRADERS","boundingBox":[4.909,1.4371,7.3107,1.4184,7.3155,2.0248,4.9137,2.0434],"text":"T.T TAILWIND TRADERS","confidence":0.954,"type":"string"},"VendorAddressRecipient":{"page":1,"valueString":"Rloefos","boundingBox":[5.9879,2.1499,6.347,2.1499,6.347,2.2384,5.9879,2.2384],"text":"Rloefos","confidence":0.305,"type":"string"},"InvoiceId":{"page":1,"valueString":"80066","boundingBox":[1.126,4.1486,1.4796,4.1486,1.4796,4.248,1.126,4.248],"text":"80066","confidence":0.963,"type":"string"},"InvoiceDate":{"page":1,"boundingBox":[2.7256,4.1504,2.793,4.1504,2.793,4.2465,2.7256,4.2465],"text":"4","confidence":0.304,"type":"date"},"SubTotal":{"valueNumber":758,"page":1,"boundingBox":[6.9241,6.7101,7.4152,6.7101,7.4152,6.8325,6.9241,6.8325],"text":"$758.00","confidence":0.971,"type":"number"},"TotalTax":{"valueNumber":53.06,"page":1,"boundingBox":[7.002,7.6793,7.4153,7.6793,7.4153,7.8017,7.002,7.8017],"text":"$53.06","confidence":0.967,"type":"number"},"VendorAddress":{"page":1,"valueString":"376 Amador Valley Blvd.","boundingBox":[5.9843,2.3511,7.1959,2.3511,7.1959,2.464,5.9843,2.464],"text":"376 Amador Valley Blvd.","confidence":0.501,"type":"string"},"CustomerAddressRecipient":{"page":1,"valueString":"Misty Xie","boundingBox":[1.1354,1.6534,1.7819,1.6534,1.7819,1.8161,1.1354,1.8161],"text":"Misty Xie","confidence":0.38,"type":"string"},"Items":{"type":"array","valueArray":["{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":235.4,\"text\":\"$235.40\",\"boundingBox\":[6.8637,5.1789,7.3548,5.1789,7.3548,5.3013,6.8637,5.3013],\"page\":1,\"confidence\":0.899},\"Description\":{\"type\":\"string\",\"valueString\":\"Screwdriver\",\"text\":\"Screwdriver\",\"boundingBox\":[2.0311,5.1859,2.7142,5.1859,2.7142,5.2891,2.0311,5.2891],\"page\":1,\"confidence\":0.902},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"49\",\"text\":\"49\",\"boundingBox\":[1.2043,5.1897,1.3427,5.1897,1.3427,5.2891,1.2043,5.2891],\"page\":1,\"confidence\":0.769},\"Quantity\":{\"type\":\"number\",\"valueNumber\":2,\"text\":\"2\",\"boundingBox\":[1.6933,5.1897,1.7518,5.1897,1.7518,5.2876,1.6933,5.2876],\"page\":1,\"confidence\":0.8},\"Tax\":{\"type\":\"number\",\"valueNumber\":15.4,\"text\":\"$15.40\",\"boundingBox\":[6.2822,5.1792,6.6601,5.1792,6.6601,5.3013,6.2822,5.3013],\"page\":1,\"confidence\":0.693},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":110,\"text\":\"$110.00\",\"boundingBox\":[4.4092,5.1792,4.8601,5.1792,4.8601,5.3013,4.4092,5.3013],\"page\":1,\"confidence\":0.888}},\"text\":\"49 2 Screwdriver $110.00 $0.00 0% $15.40 $235.40\",\"boundingBox\":[1.2043,5.1789,7.3548,5.1789,7.3548,5.3013,1.2043,5.3013],\"page\":1,\"confidence\":0.7}","{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":267.5,\"text\":\"$267.50\",\"boundingBox\":[6.8637,5.4789,7.3548,5.4789,7.3548,5.6013,6.8637,5.6013],\"page\":1,\"confidence\":0.902},\"Description\":{\"type\":\"string\",\"valueString\":\"Yellow Rechargeable screwdriver\",\"text\":\"Yellow Rechargeable screwdriver\",\"boundingBox\":[2.0244,5.4859,3.97,5.4859,3.97,5.6193,2.0244,5.6193],\"page\":1,\"confidence\":0.828},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"54\",\"text\":\"54\",\"boundingBox\":[1.2141,5.4915,1.3447,5.4915,1.3447,5.5891,1.2141,5.5891],\"page\":1,\"confidence\":0.73},\"Quantity\":{\"type\":\"number\",\"valueNumber\":1,\"text\":\"1\",\"boundingBox\":[1.7012,5.4892,1.7341,5.4892,1.7341,5.5876,1.7012,5.5876],\"page\":1,\"confidence\":0.746},\"Tax\":{\"type\":\"number\",\"valueNumber\":17.5,\"text\":\"$17.50\",\"boundingBox\":[6.2822,5.4792,6.6601,5.4792,6.6601,5.6013,6.2822,5.6013],\"page\":1,\"confidence\":0.697},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":250,\"text\":\"$250.00\",\"boundingBox\":[4.4092,5.4792,4.8601,5.4792,4.8601,5.6013,4.4092,5.6013],\"page\":1,\"confidence\":0.898}},\"text\":\"54 1 Yellow Rechargeable screwdriver $250.00 $0.00 0% $17.50 $267.50\",\"boundingBox\":[1.2141,5.4789,7.3548,5.4789,7.3548,5.6193,1.2141,5.6193],\"page\":1,\"confidence\":0.847}","{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":19.26,\"text\":\"$19.26\",\"boundingBox\":[6.9416,5.7789,7.3548,5.7789,7.3548,5.9013,6.9416,5.9013],\"page\":1,\"confidence\":0.902},\"Description\":{\"type\":\"string\",\"valueString\":\"Steel Passage Door Knob\",\"text\":\"Steel Passage Door Knob\",\"boundingBox\":[2.0311,5.7859,3.5123,5.7859,3.5123,5.9193,2.0311,5.9193],\"page\":1,\"confidence\":0.884},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"22\",\"text\":\"22\",\"boundingBox\":[1.2105,5.7897,1.342,5.7897,1.342,5.8876,1.2105,5.8876],\"page\":1,\"confidence\":0.731},\"Quantity\":{\"type\":\"number\",\"valueNumber\":2,\"text\":\"2\",\"boundingBox\":[1.6933,5.7897,1.7518,5.7897,1.7518,5.8876,1.6933,5.8876],\"page\":1,\"confidence\":0.803},\"Tax\":{\"type\":\"number\",\"valueNumber\":1.26,\"text\":\"$1.26\",\"boundingBox\":[6.3552,5.7792,6.6602,5.7792,6.6602,5.9013,6.3552,5.9013],\"page\":1,\"confidence\":0.715},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":10,\"text\":\"$10.00\",\"boundingBox\":[4.4822,5.7792,4.8601,5.7792,4.8601,5.9013,4.4822,5.9013],\"page\":1,\"confidence\":0.885}},\"text\":\"22 2 Steel Passage Door Knob $10.00 $2.00 10% $1.26 $19.26\",\"boundingBox\":[1.2105,5.7789,7.3548,5.7789,7.3548,5.9193,1.2105,5.9193],\"page\":1,\"confidence\":0.87}","{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":288.9,\"text\":\"$288.90\",\"boundingBox\":[6.8637,6.0789,7.3548,6.0789,7.3548,6.2013,6.8637,6.2013],\"page\":1,\"confidence\":0.903},\"Description\":{\"type\":\"string\",\"valueString\":\"Extractor Steal\",\"text\":\"Extractor Steal\",\"boundingBox\":[2.0356,6.0859,2.8654,6.0859,2.8654,6.1891,2.0356,6.1891],\"page\":1,\"confidence\":0.9},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"40\",\"text\":\"40\",\"boundingBox\":[1.2043,6.0897,1.3437,6.0897,1.3437,6.1891,1.2043,6.1891],\"page\":1,\"confidence\":0.762},\"Quantity\":{\"type\":\"number\",\"valueNumber\":2,\"text\":\"2\",\"boundingBox\":[1.6933,6.0897,1.7518,6.0897,1.7518,6.1876,1.6933,6.1876],\"page\":1,\"confidence\":0.825},\"Tax\":{\"type\":\"number\",\"valueNumber\":18.9,\"text\":\"$18.90\",\"boundingBox\":[6.2822,6.0792,6.6601,6.0792,6.6601,6.2013,6.2822,6.2013],\"page\":1,\"confidence\":0.694},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":135,\"text\":\"$135.00\",\"boundingBox\":[4.4092,6.0792,4.8601,6.0792,4.8601,6.2013,4.4092,6.2013],\"page\":1,\"confidence\":0.898}},\"text\":\"40 2 Extractor Steal $135.00 $0.00 0% $18.90 $288.90\",\"boundingBox\":[1.2043,6.0789,7.3548,6.0789,7.3548,6.2013,1.2043,6.2013],\"page\":1,\"confidence\":0.714}"]}}}]}}},{"0":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2015/Invoice83878.pdf","2":{"status":"succeeded","createdDateTime":"2021-10-26T22:39:24Z","lastUpdatedDateTime":"2021-10-26T22:39:27Z","analyzeResult":{"version":"2.1.0","readResults":[{"angle":0,"page":1,"height":11,"unit":"inch","width":8.5}],"pageResults":[{"page":1,"tables":[{"rows":4,"columns":8,"cells":[{"rowIndex":0,"isHeader":true,"boundingBox":[1.1026,4.8206,1.4353,4.8206,1.4418,5.0808,1.1026,5.0808],"text":"Itm","columnIndex":0},{"rowIndex":0,"isHeader":true,"boundingBox":[1.4353,4.8206,1.9115,4.8206,1.9115,5.0808,1.4418,5.0808],"text":"Qty","columnIndex":1},{"rowIndex":0,"isHeader":true,"boundingBox":[1.9115,4.8206,4.2928,4.8206,4.2993,5.0865,1.9115,5.0808],"text":"Description","columnIndex":2},{"rowIndex":0,"isHeader":true,"boundingBox":[4.2928,4.8206,4.9322,4.8206,4.9322,5.0865,4.2993,5.0865],"text":"Price","columnIndex":3},{"rowIndex":0,"isHeader":true,"boundingBox":[4.9322,4.8206,5.6433,4.8206,5.6498,5.0808,4.9322,5.0865],"text":"Discount","columnIndex":4},{"rowIndex":0,"isHeader":true,"boundingBox":[5.6433,4.8206,6.1261,4.8206,6.1326,5.0808,5.6498,5.0808],"text":"(Pct)","columnIndex":5},{"rowIndex":0,"isHeader":true,"boundingBox":[6.1261,4.8206,6.7263,4.8206,6.7328,5.0808,6.1326,5.0808],"text":"Tax","columnIndex":6},{"rowIndex":0,"isHeader":true,"boundingBox":[6.7263,4.8206,7.4309,4.8206,7.4309,5.0808,6.7328,5.0808],"text":"LineTotal","columnIndex":7},{"rowIndex":1,"columnIndex":0,"text":"47","boundingBox":[1.1026,5.0808,1.4418,5.0808,1.4418,5.3814,1.1026,5.3814]},{"rowIndex":1,"columnIndex":1,"text":"1","boundingBox":[1.4418,5.0808,1.9115,5.0808,1.9115,5.3814,1.4418,5.3814]},{"rowIndex":1,"columnIndex":2,"text":"Multi Function Drill","boundingBox":[1.9115,5.0808,4.2993,5.0865,4.2993,5.3814,1.9115,5.3814]},{"rowIndex":1,"columnIndex":3,"text":"$159.00","boundingBox":[4.2993,5.0865,4.9322,5.0865,4.9322,5.3814,4.2993,5.3814]},{"rowIndex":1,"columnIndex":4,"text":"$0.00","boundingBox":[4.9322,5.0865,5.6498,5.0808,5.6498,5.3814,4.9322,5.3814]},{"rowIndex":1,"columnIndex":5,"text":"0%","boundingBox":[5.6498,5.0808,6.1326,5.0808,6.1326,5.3814,5.6498,5.3814]},{"rowIndex":1,"columnIndex":6,"text":"$11.13","boundingBox":[6.1326,5.0808,6.7328,5.0808,6.7328,5.3814,6.1326,5.3814]},{"rowIndex":1,"columnIndex":7,"text":"$170.13","boundingBox":[6.7328,5.0808,7.4309,5.0808,7.4309,5.3814,6.7328,5.3814]},{"rowIndex":2,"columnIndex":0,"text":"53","boundingBox":[1.1026,5.3814,1.4418,5.3814,1.4418,5.682,1.1026,5.682]},{"rowIndex":2,"columnIndex":1,"text":"1","boundingBox":[1.4418,5.3814,1.9115,5.3814,1.9115,5.682,1.4418,5.682]},{"rowIndex":2,"columnIndex":2,"text":"Stainless multi-tool plier","boundingBox":[1.9115,5.3814,4.2993,5.3814,4.2993,5.682,1.9115,5.682]},{"rowIndex":2,"columnIndex":3,"text":"$90.00","boundingBox":[4.2993,5.3814,4.9322,5.3814,4.9322,5.682,4.2993,5.682]},{"rowIndex":2,"columnIndex":4,"text":"$0.00","boundingBox":[4.9322,5.3814,5.6498,5.3814,5.6498,5.682,4.9322,5.682]},{"rowIndex":2,"columnIndex":5,"text":"0%","boundingBox":[5.6498,5.3814,6.1326,5.3814,6.1326,5.682,5.6498,5.682]},{"rowIndex":2,"columnIndex":6,"text":"$6.30","boundingBox":[6.1326,5.3814,6.7328,5.3814,6.7328,5.682,6.1326,5.682]},{"rowIndex":2,"columnIndex":7,"text":"$96.30","boundingBox":[6.7328,5.3814,7.4309,5.3814,7.4309,5.682,6.7328,5.682]},{"rowIndex":3,"columnIndex":0,"text":"32","boundingBox":[1.1026,5.682,1.4418,5.682,1.4483,5.9827,1.1026,5.9827]},{"rowIndex":3,"columnIndex":1,"text":"3","boundingBox":[1.4418,5.682,1.9115,5.682,1.9115,5.9827,1.4483,5.9827]},{"rowIndex":3,"columnIndex":2,"text":"Artificial Tree","boundingBox":[1.9115,5.682,4.2993,5.682,4.3059,5.9827,1.9115,5.9827]},{"rowIndex":3,"columnIndex":3,"text":"$250.00","boundingBox":[4.2993,5.682,4.9322,5.682,4.9322,5.9827,4.3059,5.9827]},{"rowIndex":3,"columnIndex":4,"text":"$0.00","boundingBox":[4.9322,5.682,5.6498,5.682,5.6563,5.9827,4.9322,5.9827]},{"rowIndex":3,"columnIndex":5,"text":"0%","boundingBox":[5.6498,5.682,6.1326,5.682,6.1391,5.9827,5.6563,5.9827]},{"rowIndex":3,"columnIndex":6,"text":"$52.50","boundingBox":[6.1326,5.682,6.7328,5.682,6.7393,5.9827,6.1391,5.9827]},{"rowIndex":3,"columnIndex":7,"text":"$802.50","boundingBox":[6.7328,5.682,7.4309,5.682,7.4309,5.9827,6.7393,5.9827]}],"boundingBox":[1.0898,4.8177,7.4518,4.8177,7.4519,5.9923,1.089,5.9918]}]}],"documentResults":[{"docType":"prebuilt:invoice","pageRange":[1,1],"fields":{"CustomerAddress":{"page":1,"valueString":"Straße 242 Stuttgart Saarland 70511 Germany","boundingBox":[5.9827,2.3499,7.0809,2.3499,7.0809,3.264,5.9827,3.264],"text":"Straße 242 Stuttgart Saarland 70511 Germany","confidence":0.371,"type":"string"},"ShippingAddress":{"page":1,"valueString":"4SJ","boundingBox":[3.2434,2.638,3.4241,2.638,3.4241,2.7374,3.2434,2.7374],"text":"4SJ","confidence":0.364,"type":"string"},"InvoiceTotal":{"valueNumber":1068.93,"page":1,"boundingBox":[6.5814,7.7682,7.4053,7.7682,7.4053,7.9471,6.5814,7.9471],"text":"$1068.93","confidence":0.962,"type":"number"},"CustomerName":{"page":1,"valueString":"Connie Liang","boundingBox":[1.1277,1.6534,2.0641,1.6534,2.0641,1.8161,1.1277,1.8161],"text":"Connie Liang","confidence":0.352,"type":"string"},"VendorName":{"page":1,"valueString":"T.T TAILWIND TRADERS","boundingBox":[4.909,1.4371,7.3107,1.4184,7.3155,2.0248,4.9137,2.0434],"text":"T.T TAILWIND TRADERS","confidence":0.954,"type":"string"},"InvoiceId":{"page":1,"valueString":"83878","boundingBox":[1.126,4.1486,1.4794,4.1486,1.4794,4.248,1.126,4.248],"text":"83878","confidence":0.968,"type":"string"},"InvoiceDate":{"page":1,"boundingBox":[2.7256,4.1448,3.7844,4.1448,3.7844,4.248,2.7256,4.248],"text":"4 November 2015","confidence":0.363,"valueDate":"2015-11-04","type":"date"},"SubTotal":{"valueNumber":999,"page":1,"boundingBox":[6.9241,6.4101,7.4152,6.4101,7.4152,6.5325,6.9241,6.5325],"text":"$999.00","confidence":0.973,"type":"number"},"TotalTax":{"valueNumber":69.93,"page":1,"boundingBox":[7.002,7.3793,7.41,7.3793,7.41,7.5017,7.002,7.5017],"text":"$69.93","confidence":0.968,"type":"number"},"CustomerAddressRecipient":{"page":1,"valueString":"Mrurc Potsdamer","boundingBox":[5.9879,2.1558,6.5191,2.1558,6.5191,2.4384,5.9879,2.4384],"text":"Mrurc Potsdamer","confidence":0.426,"type":"string"},"Items":{"type":"array","valueArray":["{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":170.13,\"text\":\"$170.13\",\"boundingBox\":[6.8637,5.1789,7.3495,5.1789,7.3495,5.3013,6.8637,5.3013],\"page\":1,\"confidence\":0.902},\"Description\":{\"type\":\"string\",\"valueString\":\"Multi Function Drill\",\"text\":\"Multi Function Drill\",\"boundingBox\":[2.0356,5.1859,3.1514,5.1859,3.1514,5.2891,2.0356,5.2891],\"page\":1,\"confidence\":0.892},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"47\",\"text\":\"47\",\"boundingBox\":[1.2043,5.1915,1.3431,5.1915,1.3431,5.2876,1.2043,5.2876],\"page\":1,\"confidence\":0.702},\"Quantity\":{\"type\":\"number\",\"valueNumber\":1,\"text\":\"1\",\"boundingBox\":[1.7012,5.1892,1.7341,5.1892,1.7341,5.2876,1.7012,5.2876],\"page\":1,\"confidence\":0.779},\"Tax\":{\"type\":\"number\",\"valueNumber\":11.13,\"text\":\"$11.13\",\"boundingBox\":[6.2822,5.1792,6.6556,5.1792,6.6556,5.3013,6.2822,5.3013],\"page\":1,\"confidence\":0.898},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":159,\"text\":\"$159.00\",\"boundingBox\":[4.4092,5.1792,4.8601,5.1792,4.8601,5.3013,4.4092,5.3013],\"page\":1,\"confidence\":0.899}},\"text\":\"47 1 Multi Function Drill $159.00 $0.00 0% $11.13 $170.13\",\"boundingBox\":[1.2043,5.1789,7.3495,5.1789,7.3495,5.3013,1.2043,5.3013],\"page\":1,\"confidence\":0.808}","{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":96.3,\"text\":\"$96.30\",\"boundingBox\":[6.9416,5.4789,7.3548,5.4789,7.3548,5.6013,6.9416,5.6013],\"page\":1,\"confidence\":0.934},\"Description\":{\"type\":\"string\",\"valueString\":\"Stainless multi-tool plier\",\"text\":\"Stainless multi-tool plier\",\"boundingBox\":[2.0311,5.4859,3.4686,5.4859,3.4686,5.6186,2.0311,5.6186],\"page\":1,\"confidence\":0.883},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"53\",\"text\":\"53\",\"boundingBox\":[1.2141,5.4897,1.3393,5.4897,1.3393,5.5891,1.2141,5.5891],\"page\":1,\"confidence\":0.695},\"Quantity\":{\"type\":\"number\",\"valueNumber\":1,\"text\":\"1\",\"boundingBox\":[1.7012,5.4892,1.7341,5.4892,1.7341,5.5876,1.7012,5.5876],\"page\":1,\"confidence\":0.798},\"Tax\":{\"type\":\"number\",\"valueNumber\":6.3,\"text\":\"$6.30\",\"boundingBox\":[6.3552,5.4792,6.6601,5.4792,6.6601,5.6013,6.3552,5.6013],\"page\":1,\"confidence\":0.899},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":90,\"text\":\"$90.00\",\"boundingBox\":[4.4822,5.4792,4.8601,5.4792,4.8601,5.6013,4.4822,5.6013],\"page\":1,\"confidence\":0.9}},\"text\":\"53 1 Stainless multi-tool plier $90.00 $0.00 0% $6.30 $96.30\",\"boundingBox\":[1.2141,5.4789,7.3548,5.4789,7.3548,5.6186,1.2141,5.6186],\"page\":1,\"confidence\":0.804}","{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":802.5,\"text\":\"$802.50\",\"boundingBox\":[6.8637,5.7789,7.3548,5.7789,7.3548,5.9013,6.8637,5.9013],\"page\":1,\"confidence\":0.958},\"Description\":{\"type\":\"string\",\"valueString\":\"Artificial Tree\",\"text\":\"Artificial Tree\",\"boundingBox\":[2.0246,5.7844,2.7987,5.7844,2.7987,5.8891,2.0246,5.8891],\"page\":1,\"confidence\":0.898},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"32\",\"text\":\"32\",\"boundingBox\":[1.2114,5.7897,1.342,5.7897,1.342,5.8891,1.2114,5.8891],\"page\":1,\"confidence\":0.695},\"Quantity\":{\"type\":\"number\",\"valueNumber\":3,\"text\":\"3\",\"boundingBox\":[1.6943,5.7897,1.7491,5.7897,1.7491,5.8891,1.6943,5.8891],\"page\":1,\"confidence\":0.8},\"Tax\":{\"type\":\"number\",\"valueNumber\":52.5,\"text\":\"$52.50\",\"boundingBox\":[6.2822,5.7792,6.6601,5.7792,6.6601,5.9013,6.2822,5.9013],\"page\":1,\"confidence\":0.898},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":250,\"text\":\"$250.00\",\"boundingBox\":[4.4092,5.7792,4.8601,5.7792,4.8601,5.9013,4.4092,5.9013],\"page\":1,\"confidence\":0.902}},\"text\":\"32 3 Artificial Tree $250.00 $0.00 0% $52.50 $802.50\",\"boundingBox\":[1.2114,5.7789,7.3548,5.7789,7.3548,5.9013,1.2114,5.9013],\"page\":1,\"confidence\":0.804}"]}}}]}}},{"0":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2016/Invoice102247.pdf","2":{"status":"succeeded","createdDateTime":"2021-10-26T22:39:23Z","lastUpdatedDateTime":"2021-10-26T22:39:28Z","analyzeResult":{"version":"2.1.0","readResults":[{"angle":0,"page":1,"height":11,"unit":"inch","width":8.5}],"pageResults":[{"page":1,"tables":[{"rows":4,"columns":8,"cells":[{"rowIndex":0,"isHeader":true,"boundingBox":[1.1003,4.8201,1.4451,4.8201,1.4451,5.0852,1.1003,5.0852],"text":"Itm","columnIndex":0},{"rowIndex":0,"isHeader":true,"boundingBox":[1.4451,4.8201,1.9135,4.8201,1.9135,5.0852,1.4451,5.0852],"text":"Qty","columnIndex":1},{"rowIndex":0,"isHeader":true,"boundingBox":[1.9135,4.8201,4.301,4.8201,4.301,5.0852,1.9135,5.0852],"text":"Description","columnIndex":2},{"rowIndex":0,"isHeader":true,"boundingBox":[4.301,4.8201,4.9385,4.8201,4.9385,5.0852,4.301,5.0852],"text":"Price","columnIndex":3},{"rowIndex":0,"isHeader":true,"boundingBox":[4.9385,4.8201,5.6541,4.8201,5.6541,5.0852,4.9385,5.0852],"text":"Discount","columnIndex":4},{"rowIndex":0,"isHeader":true,"boundingBox":[5.6541,4.8201,6.129,4.8201,6.1355,5.0852,5.6541,5.0852],"text":"(Pct)","columnIndex":5},{"rowIndex":0,"isHeader":true,"boundingBox":[6.129,4.8201,6.7275,4.8201,6.734,5.0852,6.1355,5.0852],"text":"Tax","columnIndex":6},{"rowIndex":0,"isHeader":true,"boundingBox":[6.7275,4.8201,7.4236,4.8201,7.4236,5.0852,6.734,5.0852],"text":"LineTotal","columnIndex":7},{"rowIndex":1,"columnIndex":0,"text":"43","boundingBox":[1.1003,5.0852,1.4451,5.0852,1.4451,5.3848,1.1003,5.3848]},{"rowIndex":1,"columnIndex":1,"text":"2","boundingBox":[1.4451,5.0852,1.9135,5.0852,1.9135,5.3848,1.4451,5.3848]},{"rowIndex":1,"columnIndex":2,"text":"Big Metal Shelving","boundingBox":[1.9135,5.0852,4.301,5.0852,4.301,5.3848,1.9135,5.3848]},{"rowIndex":1,"columnIndex":3,"text":"$99.00","boundingBox":[4.301,5.0852,4.9385,5.0852,4.9385,5.3848,4.301,5.3848]},{"rowIndex":1,"columnIndex":4,"text":"$0.00","boundingBox":[4.9385,5.0852,5.6541,5.0852,5.6541,5.3848,4.9385,5.3848]},{"rowIndex":1,"columnIndex":5,"text":"0%","boundingBox":[5.6541,5.0852,6.1355,5.0852,6.1355,5.3848,5.6541,5.3848]},{"rowIndex":1,"columnIndex":6,"text":"$13.86","boundingBox":[6.1355,5.0852,6.734,5.0852,6.734,5.3848,6.1355,5.3848]},{"rowIndex":1,"columnIndex":7,"text":"$211.86","boundingBox":[6.734,5.0852,7.4236,5.0852,7.4236,5.3791,6.734,5.3848]},{"rowIndex":2,"columnIndex":0,"text":"30","boundingBox":[1.1003,5.3848,1.4451,5.3848,1.4451,5.6845,1.1003,5.6845]},{"rowIndex":2,"columnIndex":1,"text":"2","boundingBox":[1.4451,5.3848,1.9135,5.3848,1.9135,5.6845,1.4451,5.6845]},{"rowIndex":2,"columnIndex":2,"text":"Gardering","boundingBox":[1.9135,5.3848,4.301,5.3848,4.301,5.6845,1.9135,5.6845]},{"rowIndex":2,"columnIndex":3,"text":"$10.00","boundingBox":[4.301,5.3848,4.9385,5.3848,4.9385,5.6845,4.301,5.6845]},{"rowIndex":2,"columnIndex":4,"text":"$0.00","boundingBox":[4.9385,5.3848,5.6541,5.3848,5.6541,5.6845,4.9385,5.6845]},{"rowIndex":2,"columnIndex":5,"text":"0%","boundingBox":[5.6541,5.3848,6.1355,5.3848,6.1355,5.6845,5.6541,5.6845]},{"rowIndex":2,"columnIndex":6,"text":"$1.40","boundingBox":[6.1355,5.3848,6.734,5.3848,6.734,5.6845,6.1355,5.6845]},{"rowIndex":2,"columnIndex":7,"text":"$21.40","boundingBox":[6.734,5.3848,7.4236,5.3791,7.4236,5.6845,6.734,5.6845]},{"rowIndex":3,"columnIndex":0,"text":"26","boundingBox":[1.1003,5.6845,1.4451,5.6845,1.4451,5.9784,1.1003,5.9784]},{"rowIndex":3,"columnIndex":1,"text":"3","boundingBox":[1.4451,5.6845,1.9135,5.6845,1.9135,5.9784,1.4451,5.9784]},{"rowIndex":3,"columnIndex":2,"text":"Craftsman 100 ft. L x 5/8 in.","boundingBox":[1.9135,5.6845,4.301,5.6845,4.301,5.9784,1.9135,5.9784]},{"rowIndex":3,"columnIndex":3,"text":"$100.00","boundingBox":[4.301,5.6845,4.9385,5.6845,4.9385,5.9784,4.301,5.9784]},{"rowIndex":3,"columnIndex":4,"text":"$30.00","boundingBox":[4.9385,5.6845,5.6541,5.6845,5.6541,5.9842,4.9385,5.9784]},{"rowIndex":3,"columnIndex":5,"text":"10%","boundingBox":[5.6541,5.6845,6.1355,5.6845,6.1355,5.9842,5.6541,5.9842]},{"rowIndex":3,"columnIndex":6,"text":"$18.90","boundingBox":[6.1355,5.6845,6.734,5.6845,6.7405,5.9842,6.1355,5.9842]},{"rowIndex":3,"columnIndex":7,"text":"$288.90","boundingBox":[6.734,5.6845,7.4236,5.6845,7.4236,5.9842,6.7405,5.9842]}],"boundingBox":[1.0917,4.8203,7.4343,4.8206,7.4337,5.9877,1.0899,5.9869]}]}],"documentResults":[{"docType":"prebuilt:invoice","pageRange":[1,1],"fields":{"CustomerAddress":{"page":1,"valueString":"Kampstr 9859","boundingBox":[1.1325,2.138,1.9359,2.138,1.9359,2.2669,1.1325,2.2669],"text":"Kampstr 9859","confidence":0.265,"type":"string"},"ShippingAddress":{"page":1,"valueString":"80074","boundingBox":[2.966,2.638,3.3203,2.638,3.3203,2.7374,2.966,2.7374],"text":"80074","confidence":0.274,"type":"string"},"InvoiceTotal":{"valueNumber":522.16,"page":1,"boundingBox":[6.6952,7.7682,7.413,7.7682,7.413,7.9471,6.6952,7.9471],"text":"$522.16","confidence":0.961,"type":"number"},"CustomerName":{"page":1,"valueString":"Colin Cai","boundingBox":[1.1277,1.6519,1.7581,1.6519,1.7581,1.7789,1.1277,1.7789],"text":"Colin Cai","confidence":0.443,"type":"string"},"VendorName":{"page":1,"valueString":"T.T TAILWIND TRADERS","boundingBox":[4.909,1.4371,7.3107,1.4184,7.3155,2.0248,4.9137,2.0434],"text":"T.T TAILWIND TRADERS","confidence":0.954,"type":"string"},"InvoiceId":{"page":1,"valueString":"102247","boundingBox":[1.1351,4.1481,1.5518,4.1481,1.5518,4.248,1.1351,4.248],"text":"102247","confidence":0.966,"type":"string"},"InvoiceDate":{"page":1,"boundingBox":[3.2074,4.1481,3.4868,4.1481,3.4868,4.248,3.2074,4.248],"text":"2016","confidence":0.466,"type":"date"},"SubTotal":{"valueNumber":488,"page":1,"boundingBox":[6.9241,6.4101,7.4152,6.4101,7.4152,6.5325,6.9241,6.5325],"text":"$488.00","confidence":0.972,"type":"number"},"TotalTax":{"valueNumber":34.16,"page":1,"boundingBox":[7.002,7.3793,7.4153,7.3793,7.4153,7.5017,7.002,7.5017],"text":"$34.16","confidence":0.965,"type":"number"},"VendorAddress":{"page":1,"valueString":"Phata 8858 V. Street London England W1Y 3RA United Kingdom","boundingBox":[5.9789,2.1511,6.7971,2.1511,6.7971,3.264,5.9789,3.264],"text":"Phata 8858 V. Street London England W1Y 3RA United Kingdom","confidence":0.371,"type":"string"},"CustomerAddressRecipient":{"page":1,"valueString":"Colin Cai","boundingBox":[1.1277,1.6519,1.7581,1.6519,1.7581,1.7789,1.1277,1.7789],"text":"Colin Cai","confidence":0.443,"type":"string"},"Items":{"type":"array","valueArray":["{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":211.86,\"text\":\"$211.86\",\"boundingBox\":[6.8637,5.1789,7.3548,5.1789,7.3548,5.3013,6.8637,5.3013],\"page\":1,\"confidence\":0.893},\"Description\":{\"type\":\"string\",\"valueString\":\"Big Metal Shelving\",\"text\":\"Big Metal Shelving\",\"boundingBox\":[2.0356,5.1859,3.1214,5.1859,3.1214,5.3193,2.0356,5.3193],\"page\":1,\"confidence\":0.894},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"43\",\"text\":\"43\",\"boundingBox\":[1.2043,5.1897,1.3393,5.1897,1.3393,5.2891,1.2043,5.2891],\"page\":1,\"confidence\":0.73},\"Quantity\":{\"type\":\"number\",\"valueNumber\":2,\"text\":\"2\",\"boundingBox\":[1.6933,5.1897,1.7518,5.1897,1.7518,5.2876,1.6933,5.2876],\"page\":1,\"confidence\":0.763},\"Tax\":{\"type\":\"number\",\"valueNumber\":13.86,\"text\":\"$13.86\",\"boundingBox\":[6.2822,5.1792,6.6602,5.1792,6.6602,5.3013,6.2822,5.3013],\"page\":1,\"confidence\":0.744},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":99,\"text\":\"$99.00\",\"boundingBox\":[4.4822,5.1792,4.8601,5.1792,4.8601,5.3013,4.4822,5.3013],\"page\":1,\"confidence\":0.893}},\"text\":\"43 2 Big Metal Shelving $99.00 $0.00 0% $13.86 $211.86\",\"boundingBox\":[1.2043,5.1789,7.3548,5.1789,7.3548,5.3193,1.2043,5.3193],\"page\":1,\"confidence\":0.807}","{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":21.4,\"text\":\"$21.40\",\"boundingBox\":[6.9416,5.4789,7.3548,5.4789,7.3548,5.6013,6.9416,5.6013],\"page\":1,\"confidence\":0.899},\"Description\":{\"type\":\"string\",\"valueString\":\"Gardering\",\"text\":\"Gardering\",\"boundingBox\":[2.0294,5.4859,2.6072,5.4859,2.6072,5.6193,2.0294,5.6193],\"page\":1,\"confidence\":0.955},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"30\",\"text\":\"30\",\"boundingBox\":[1.2114,5.4897,1.3437,5.4897,1.3437,5.5891,1.2114,5.5891],\"page\":1,\"confidence\":0.712},\"Quantity\":{\"type\":\"number\",\"valueNumber\":2,\"text\":\"2\",\"boundingBox\":[1.6933,5.4897,1.7518,5.4897,1.7518,5.5876,1.6933,5.5876],\"page\":1,\"confidence\":0.763},\"Tax\":{\"type\":\"number\",\"valueNumber\":1.4,\"text\":\"$1.40\",\"boundingBox\":[6.3552,5.4792,6.6601,5.4792,6.6601,5.6013,6.3552,5.6013],\"page\":1,\"confidence\":0.78},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":10,\"text\":\"$10.00\",\"boundingBox\":[4.4822,5.4792,4.8601,5.4792,4.8601,5.6013,4.4822,5.6013],\"page\":1,\"confidence\":0.887}},\"text\":\"30 2 Gardering $10.00 $0.00 0% $1.40 $21.40\",\"boundingBox\":[1.2114,5.4789,7.3548,5.4789,7.3548,5.6193,1.2114,5.6193],\"page\":1,\"confidence\":0.701}","{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":288.9,\"text\":\"$288.90\",\"boundingBox\":[6.8637,5.7789,7.3548,5.7789,7.3548,5.9013,6.8637,5.9013],\"page\":1,\"confidence\":0.899},\"Description\":{\"type\":\"string\",\"valueString\":\"Craftsman 100 ft. L x 5/8 in.\",\"text\":\"Craftsman 100 ft. L x 5/8 in.\",\"boundingBox\":[2.0294,5.7844,3.6439,5.7844,3.6439,5.9032,2.0294,5.9032],\"page\":1,\"confidence\":0.887},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"26\",\"text\":\"26\",\"boundingBox\":[1.2105,5.7897,1.3439,5.7897,1.3439,5.8891,1.2105,5.8891],\"page\":1,\"confidence\":0.724},\"Quantity\":{\"type\":\"number\",\"valueNumber\":3,\"text\":\"3\",\"boundingBox\":[1.6943,5.7897,1.7491,5.7897,1.7491,5.8891,1.6943,5.8891],\"page\":1,\"confidence\":0.769},\"Tax\":{\"type\":\"number\",\"valueNumber\":18.9,\"text\":\"$18.90\",\"boundingBox\":[6.2822,5.7792,6.6601,5.7792,6.6601,5.9013,6.2822,5.9013],\"page\":1,\"confidence\":0.718},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":100,\"text\":\"$100.00\",\"boundingBox\":[4.4092,5.7792,4.8601,5.7792,4.8601,5.9013,4.4092,5.9013],\"page\":1,\"confidence\":0.894}},\"text\":\"26 3 Craftsman 100 ft. L x 5/8 in. $100.00 $30.00 10% $18.90 $288.90\",\"boundingBox\":[1.2105,5.7789,7.3548,5.7789,7.3548,5.9032,1.2105,5.9032],\"page\":1,\"confidence\":0.835}"]}}}]}}},{"0":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2016/Invoice92549.pdf","2":{"status":"succeeded","createdDateTime":"2021-10-26T22:39:25Z","lastUpdatedDateTime":"2021-10-26T22:39:31Z","analyzeResult":{"version":"2.1.0","readResults":[{"angle":0,"page":1,"height":11,"unit":"inch","width":8.5}],"pageResults":[{"page":1,"tables":[{"rows":5,"columns":8,"cells":[{"rowIndex":0,"isHeader":true,"boundingBox":[1.102,4.8187,1.4403,4.8187,1.4403,5.0838,1.102,5.0838],"text":"Itm","columnIndex":0},{"rowIndex":0,"isHeader":true,"boundingBox":[1.4403,4.8187,1.9087,4.8187,1.9087,5.0838,1.4403,5.0838],"text":"Qty","columnIndex":1},{"rowIndex":0,"isHeader":true,"boundingBox":[1.9087,4.8187,4.3027,4.8187,4.3027,5.0838,1.9087,5.0838],"text":"Description","columnIndex":2},{"rowIndex":0,"isHeader":true,"boundingBox":[4.3027,4.8187,4.9402,4.8187,4.9402,5.0838,4.3027,5.0838],"text":"Price","columnIndex":3},{"rowIndex":0,"isHeader":true,"boundingBox":[4.9402,4.8187,5.6493,4.8187,5.6493,5.0838,4.9402,5.0838],"text":"Discount","columnIndex":4},{"rowIndex":0,"isHeader":true,"boundingBox":[5.6493,4.8187,6.1307,4.8187,6.1307,5.0838,5.6493,5.0838],"text":"(Pct)","columnIndex":5},{"rowIndex":0,"isHeader":true,"boundingBox":[6.1307,4.8187,6.7357,4.8187,6.7357,5.0838,6.1307,5.0838],"text":"Tax","columnIndex":6},{"rowIndex":0,"isHeader":true,"boundingBox":[6.7357,4.8187,7.4318,4.8248,7.4318,5.0838,6.7357,5.0838],"text":"LineTotal","columnIndex":7},{"rowIndex":1,"columnIndex":0,"text":"61","boundingBox":[1.102,5.0838,1.4403,5.0838,1.4403,5.3798,1.102,5.3798]},{"rowIndex":1,"columnIndex":1,"text":"2","boundingBox":[1.4403,5.0838,1.9087,5.0838,1.9087,5.3798,1.4403,5.3798]},{"rowIndex":1,"columnIndex":2,"text":"One sat on shoe gnome","boundingBox":[1.9087,5.0838,4.3027,5.0838,4.3027,5.3798,1.9087,5.3798]},{"rowIndex":1,"columnIndex":3,"text":"$54.00","boundingBox":[4.3027,5.0838,4.9402,5.0838,4.9402,5.3798,4.3027,5.3798]},{"rowIndex":1,"columnIndex":4,"text":"$10.80","boundingBox":[4.9402,5.0838,5.6493,5.0838,5.6493,5.3798,4.9402,5.3798]},{"rowIndex":1,"columnIndex":5,"text":"10%","boundingBox":[5.6493,5.0838,6.1307,5.0838,6.1307,5.3798,5.6493,5.3798]},{"rowIndex":1,"columnIndex":6,"text":"$6.80","boundingBox":[6.1307,5.0838,6.7357,5.0838,6.7357,5.3798,6.1307,5.3798]},{"rowIndex":1,"columnIndex":7,"text":"$104.00","boundingBox":[6.7357,5.0838,7.4318,5.0838,7.4318,5.3798,6.7357,5.3798]},{"rowIndex":2,"columnIndex":0,"text":"2","boundingBox":[1.102,5.3798,1.4403,5.3798,1.4403,5.682,1.102,5.682]},{"rowIndex":2,"columnIndex":1,"text":"2","boundingBox":[1.4403,5.3798,1.9087,5.3798,1.9087,5.682,1.4403,5.682]},{"rowIndex":2,"columnIndex":2,"text":"Refrigerator 1.7 cu. ft. 110 watts","boundingBox":[1.9087,5.3798,4.3027,5.3798,4.3027,5.682,1.9087,5.682]},{"rowIndex":2,"columnIndex":3,"text":"$200.00","boundingBox":[4.3027,5.3798,4.9402,5.3798,4.9402,5.682,4.3027,5.682]},{"rowIndex":2,"columnIndex":4,"text":"$0.00","boundingBox":[4.9402,5.3798,5.6493,5.3798,5.6493,5.682,4.9402,5.682]},{"rowIndex":2,"columnIndex":5,"text":"0%","boundingBox":[5.6493,5.3798,6.1307,5.3798,6.1307,5.682,5.6493,5.682]},{"rowIndex":2,"columnIndex":6,"text":"$28.00","boundingBox":[6.1307,5.3798,6.7357,5.3798,6.7357,5.682,6.1307,5.682]},{"rowIndex":2,"columnIndex":7,"text":"$428.00","boundingBox":[6.7357,5.3798,7.4318,5.3798,7.4318,5.682,6.7357,5.682]},{"rowIndex":3,"columnIndex":0,"text":"31","boundingBox":[1.102,5.682,1.4403,5.682,1.4403,5.978,1.102,5.978]},{"rowIndex":3,"columnIndex":1,"text":"3","boundingBox":[1.4403,5.682,1.9087,5.682,1.9087,5.978,1.4403,5.978]},{"rowIndex":3,"columnIndex":2,"text":"Celebrations C9","boundingBox":[1.9087,5.682,4.3027,5.682,4.3027,5.9842,1.9087,5.978]},{"rowIndex":3,"columnIndex":3,"text":"$10.00","boundingBox":[4.3027,5.682,4.9402,5.682,4.9402,5.9842,4.3027,5.9842]},{"rowIndex":3,"columnIndex":4,"text":"$3.00","boundingBox":[4.9402,5.682,5.6493,5.682,5.6493,5.9842,4.9402,5.9842]},{"rowIndex":3,"columnIndex":5,"text":"10%","boundingBox":[5.6493,5.682,6.1307,5.682,6.1307,5.9842,5.6493,5.9842]},{"rowIndex":3,"columnIndex":6,"text":"$1.89","boundingBox":[6.1307,5.682,6.7357,5.682,6.7357,5.9842,6.1307,5.9842]},{"rowIndex":3,"columnIndex":7,"text":"$28.89","boundingBox":[6.7357,5.682,7.4318,5.682,7.4318,5.9842,6.7357,5.9842]},{"rowIndex":4,"columnIndex":0,"text":"26","boundingBox":[1.102,5.978,1.4403,5.978,1.4468,6.2802,1.102,6.2864]},{"rowIndex":4,"columnIndex":1,"text":"1","boundingBox":[1.4403,5.978,1.9087,5.978,1.9152,6.2802,1.4468,6.2802]},{"rowIndex":4,"columnIndex":2,"text":"Craftsman 100 ft. L x 5/8 in.","boundingBox":[1.9087,5.978,4.3027,5.9842,4.3027,6.2802,1.9152,6.2802]},{"rowIndex":4,"columnIndex":3,"text":"$100.00","boundingBox":[4.3027,5.9842,4.9402,5.9842,4.9402,6.2802,4.3027,6.2802]},{"rowIndex":4,"columnIndex":4,"text":"$10.00","boundingBox":[4.9402,5.9842,5.6493,5.9842,5.6493,6.2802,4.9402,6.2802]},{"rowIndex":4,"columnIndex":5,"text":"10%","boundingBox":[5.6493,5.9842,6.1307,5.9842,6.1372,6.2802,5.6493,6.2802]},{"rowIndex":4,"columnIndex":6,"text":"$6.30","boundingBox":[6.1307,5.9842,6.7357,5.9842,6.7422,6.2802,6.1372,6.2802]},{"rowIndex":4,"columnIndex":7,"text":"$96.30","boundingBox":[6.7357,5.9842,7.4318,5.9842,7.4318,6.2802,6.7422,6.2802]}],"boundingBox":[1.09,4.8198,7.4327,4.8203,7.4323,6.2887,1.0887,6.2881]}]}],"documentResults":[{"docType":"prebuilt:invoice","pageRange":[1,1],"fields":{"CustomerAddress":{"page":1,"valueString":"2957 Tri-state Avenue Cambridge","boundingBox":[1.1272,2.1342,3.6131,2.1342,3.6131,2.2676,1.1272,2.2676],"text":"2957 Tri-state Avenue Cambridge","confidence":0.381,"type":"string"},"InvoiceTotal":{"valueNumber":657.19,"page":1,"boundingBox":[6.6952,8.0682,7.4109,8.0682,7.4109,8.2471,6.6952,8.2471],"text":"$657.19","confidence":0.962,"type":"number"},"CustomerName":{"page":1,"valueString":"Roy","boundingBox":[1.1354,1.6587,1.3971,1.6587,1.3971,1.8161,1.1354,1.8161],"text":"Roy","confidence":0.223,"type":"string"},"VendorName":{"page":1,"valueString":"T.T TAILWIND TRADERS","boundingBox":[4.909,1.4371,7.3107,1.4184,7.3155,2.0248,4.9137,2.0434],"text":"T.T TAILWIND TRADERS","confidence":0.952,"type":"string"},"VendorAddressRecipient":{"page":1,"valueString":"Rgerlakmog","boundingBox":[5.9879,2.1511,6.5796,2.1511,6.5796,2.264,5.9879,2.264],"text":"Rgerlakmog","confidence":0.254,"type":"string"},"InvoiceId":{"page":1,"valueString":"92549","boundingBox":[1.1263,4.1486,1.4784,4.1486,1.4784,4.248,1.1263,4.248],"text":"92549","confidence":0.965,"type":"string"},"InvoiceDate":{"page":1,"boundingBox":[2.7306,4.1486,2.7921,4.1486,2.7921,4.2479,2.7306,4.2479],"text":"8","confidence":0.276,"type":"date"},"SubTotal":{"valueNumber":614.2,"page":1,"boundingBox":[6.9241,6.7101,7.4152,6.7101,7.4152,6.8325,6.9241,6.8325],"text":"$614.20","confidence":0.97,"type":"number"},"TotalTax":{"valueNumber":42.99,"page":1,"boundingBox":[7.002,7.6793,7.4138,7.6793,7.4138,7.8017,7.002,7.8017],"text":"$42.99","confidence":0.969,"type":"number"},"VendorAddress":{"page":1,"valueString":"4186 Silver Oaks Pl. Newport Beach California 92625 United States","boundingBox":[5.9782,2.3511,6.9497,2.3511,6.9497,3.2384,5.9782,3.2384],"text":"4186 Silver Oaks Pl. Newport Beach California 92625 United States","confidence":0.401,"type":"string"},"Items":{"type":"array","valueArray":["{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":104,\"text\":\"$104.00\",\"boundingBox\":[6.8637,5.1789,7.3548,5.1789,7.3548,5.3013,6.8637,5.3013],\"page\":1,\"confidence\":0.888},\"Description\":{\"type\":\"string\",\"valueString\":\"One sat on shoe gnome\",\"text\":\"One sat on shoe gnome\",\"boundingBox\":[2.0294,5.1859,3.4491,5.1859,3.4491,5.3193,2.0294,5.3193],\"page\":1,\"confidence\":0.85},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"61\",\"text\":\"61\",\"boundingBox\":[1.2106,5.1892,1.3242,5.1892,1.3242,5.2891,1.2106,5.2891],\"page\":1,\"confidence\":0.705},\"Quantity\":{\"type\":\"number\",\"valueNumber\":2,\"text\":\"2\",\"boundingBox\":[1.6933,5.1897,1.7518,5.1897,1.7518,5.2876,1.6933,5.2876],\"page\":1,\"confidence\":0.778},\"Tax\":{\"type\":\"number\",\"valueNumber\":6.8,\"text\":\"$6.80\",\"boundingBox\":[6.3552,5.1792,6.6601,5.1792,6.6601,5.3013,6.3552,5.3013],\"page\":1,\"confidence\":0.772},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":54,\"text\":\"$54.00\",\"boundingBox\":[4.4822,5.1792,4.8601,5.1792,4.8601,5.3013,4.4822,5.3013],\"page\":1,\"confidence\":0.839}},\"text\":\"61 2 One sat on shoe gnome $54.00 $10.80 10% $6.80 $104.00\",\"boundingBox\":[1.2106,5.1789,7.3548,5.1789,7.3548,5.3193,1.2106,5.3193],\"page\":1,\"confidence\":0.88}","{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":428,\"text\":\"$428.00\",\"boundingBox\":[6.8637,5.4789,7.3548,5.4789,7.3548,5.6013,6.8637,5.6013],\"page\":1,\"confidence\":0.888},\"Description\":{\"type\":\"string\",\"valueString\":\"Refrigerator 1.7 cu. ft. 110 watts\",\"text\":\"Refrigerator 1.7 cu. ft. 110 watts\",\"boundingBox\":[2.0356,5.4844,3.9092,5.4844,3.9092,5.6193,2.0356,5.6193],\"page\":1,\"confidence\":0.833},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"2\",\"text\":\"2\",\"boundingBox\":[1.247,5.4897,1.3055,5.4897,1.3055,5.5876,1.247,5.5876],\"page\":1,\"confidence\":0.566},\"Quantity\":{\"type\":\"number\",\"valueNumber\":2,\"text\":\"2\",\"boundingBox\":[1.6933,5.4897,1.7518,5.4897,1.7518,5.5876,1.6933,5.5876],\"page\":1,\"confidence\":0.781},\"Tax\":{\"type\":\"number\",\"valueNumber\":28,\"text\":\"$28.00\",\"boundingBox\":[6.2822,5.4792,6.6601,5.4792,6.6601,5.6013,6.2822,5.6013],\"page\":1,\"confidence\":0.799},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":200,\"text\":\"$200.00\",\"boundingBox\":[4.4092,5.4792,4.8601,5.4792,4.8601,5.6013,4.4092,5.6013],\"page\":1,\"confidence\":0.862}},\"text\":\"2 2 Refrigerator 1.7 cu. ft. 110 watts $200.00 $0.00 0% $28.00 $428.00\",\"boundingBox\":[1.247,5.4789,7.3548,5.4789,7.3548,5.6193,1.247,5.6193],\"page\":1,\"confidence\":0.882}","{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":28.89,\"text\":\"$28.89\",\"boundingBox\":[6.9416,5.7789,7.3534,5.7789,7.3534,5.9013,6.9416,5.9013],\"page\":1,\"confidence\":0.896},\"Description\":{\"type\":\"string\",\"valueString\":\"Celebrations C9\",\"text\":\"Celebrations C9\",\"boundingBox\":[2.0294,5.7859,2.9552,5.7859,2.9552,5.8891,2.0294,5.8891],\"page\":1,\"confidence\":0.885},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"31\",\"text\":\"31\",\"boundingBox\":[1.2114,5.7892,1.3242,5.7892,1.3242,5.8891,1.2114,5.8891],\"page\":1,\"confidence\":0.696},\"Quantity\":{\"type\":\"number\",\"valueNumber\":3,\"text\":\"3\",\"boundingBox\":[1.6943,5.7897,1.7491,5.7897,1.7491,5.8891,1.6943,5.8891],\"page\":1,\"confidence\":0.831},\"Tax\":{\"type\":\"number\",\"valueNumber\":1.89,\"text\":\"$1.89\",\"boundingBox\":[6.3552,5.7792,6.659,5.7792,6.659,5.9013,6.3552,5.9013],\"page\":1,\"confidence\":0.801},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":10,\"text\":\"$10.00\",\"boundingBox\":[4.4822,5.7792,4.8601,5.7792,4.8601,5.9013,4.4822,5.9013],\"page\":1,\"confidence\":0.877}},\"text\":\"31 3 Celebrations C9 $10.00 $3.00 10% $1.89 $28.89\",\"boundingBox\":[1.2114,5.7789,7.3534,5.7789,7.3534,5.9013,1.2114,5.9013],\"page\":1,\"confidence\":0.804}","{\"type\":\"object\",\"valueObject\":{\"Amount\":{\"type\":\"number\",\"valueNumber\":96.3,\"text\":\"$96.30\",\"boundingBox\":[6.9416,6.0789,7.3548,6.0789,7.3548,6.2013,6.9416,6.2013],\"page\":1,\"confidence\":0.9},\"Description\":{\"type\":\"string\",\"valueString\":\"Craftsman 100 ft. L x 5/8 in.\",\"text\":\"Craftsman 100 ft. L x 5/8 in.\",\"boundingBox\":[2.0294,6.0844,3.6439,6.0844,3.6439,6.2032,2.0294,6.2032],\"page\":1,\"confidence\":0.853},\"ProductCode\":{\"type\":\"string\",\"valueString\":\"26\",\"text\":\"26\",\"boundingBox\":[1.2105,6.0897,1.3439,6.0897,1.3439,6.1891,1.2105,6.1891],\"page\":1,\"confidence\":0.741},\"Quantity\":{\"type\":\"number\",\"valueNumber\":1,\"text\":\"1\",\"boundingBox\":[1.7012,6.0892,1.7341,6.0892,1.7341,6.1876,1.7012,6.1876],\"page\":1,\"confidence\":0.799},\"Tax\":{\"type\":\"number\",\"valueNumber\":6.3,\"text\":\"$6.30\",\"boundingBox\":[6.3552,6.0792,6.6601,6.0792,6.6601,6.2013,6.3552,6.2013],\"page\":1,\"confidence\":0.778},\"UnitPrice\":{\"type\":\"number\",\"valueNumber\":100,\"text\":\"$100.00\",\"boundingBox\":[4.4092,6.0792,4.8601,6.0792,4.8601,6.2013,4.4092,6.2013],\"page\":1,\"confidence\":0.887}},\"text\":\"26 1 Craftsman 100 ft. L x 5/8 in. $100.00 $10.00 10% $6.30 $96.30\",\"boundingBox\":[1.2105,6.0789,7.3548,6.0789,7.3548,6.2032,1.2105,6.2032],\"page\":1,\"confidence\":0.882}"]}}}]}}}],"schema":[{"key":"0","name":"url","type":"string"},{"key":"1","name":"errors","type":"StructType(StructField(response,StringType,true), StructField(status,StructType(StructField(protocolVersion,StructType(StructField(protocol,StringType,true), StructField(major,IntegerType,false), StructField(minor,IntegerType,false)),true), StructField(statusCode,IntegerType,false), StructField(reasonPhrase,StringType,true)),true))"},{"key":"2","name":"invoices","type":"StructType(StructField(status,StringType,true), StructField(createdDateTime,StringType,true), StructField(lastUpdatedDateTime,StringType,true), StructField(analyzeResult,StructType(StructField(version,StringType,true), StructField(readResults,ArrayType(StructType(StructField(page,IntegerType,true), StructField(language,StringType,true), StructField(angle,DoubleType,true), StructField(width,DoubleType,true), StructField(height,DoubleType,true), StructField(unit,StringType,true), StructField(lines,ArrayType(StructType(StructField(boundingBox,ArrayType(DoubleType,true),true), StructField(text,StringType,true), StructField(words,ArrayType(StructType(StructField(boundingBox,ArrayType(DoubleType,true),true), StructField(text,StringType,true), StructField(confidence,DoubleType,true)),true),true)),true),true)),true),true), StructField(pageResults,ArrayType(StructType(StructField(page,IntegerType,true), StructField(keyValuePairs,ArrayType(StructType(StructField(key,StructType(StructField(text,StringType,true), StructField(boundingBox,ArrayType(DoubleType,true),true)),true), StructField(value,StructType(StructField(text,StringType,true), StructField(boundingBox,ArrayType(DoubleType,true),true)),true)),true),true), StructField(tables,ArrayType(StructType(StructField(rows,IntegerType,true), StructField(columns,IntegerType,true), StructField(cells,ArrayType(StructType(StructField(rowIndex,IntegerType,true), StructField(columnIndex,IntegerType,true), StructField(text,StringType,true), StructField(boundingBox,ArrayType(DoubleType,true),true), StructField(isHeader,BooleanType,true), StructField(elements,ArrayType(StringType,true),true)),true),true), StructField(boundingBox,ArrayType(DoubleType,true),true)),true),true)),true),true), StructField(documentResults,ArrayType(StructType(StructField(docType,StringType,true), StructField(pageRange,ArrayType(IntegerType,true),true), StructField(fields,MapType(StringType,StructType(StructField(type,StringType,true), StructField(page,IntegerType,true), StructField(confidence,DoubleType,true), StructField(boundingBox,ArrayType(DoubleType,true),true), StructField(text,StringType,true), StructField(valueString,StringType,true), StructField(valuePhoneNumber,StringType,true), StructField(valueNumber,DoubleType,true), StructField(valueDate,StringType,true), StructField(valueTime,StringType,true), StructField(valueObject,StringType,true), StructField(valueArray,ArrayType(StringType,true),true)),true),true)),true),true)),true))"}]},"isSummary":false,"language":"scala"},"persist_state":{"view":{"type":"details","tableOptions":{},"chartOptions":{"seriesFieldKeys":["0"],"categoryFieldKeys":["0"],"isStacked":false,"aggregationType":"count","chartType":"bar"}}}},"bdf21081-09a3-4c55-a766-f02bf5f7ca04":{"type":"Synapse.DataFrame","sync_state":{"table":{"rows":[],"schema":[{"key":"0","name":"ProductCode","type":"string"},{"key":"1","name":"Tax","type":"double"},{"key":"2","name":"Quantity","type":"double"},{"key":"3","name":"UnitPrice","type":"double"},{"key":"4","name":"Description","type":"string"},{"key":"5","name":"Amount","type":"double"},{"key":"6","name":"url","type":"string"},{"key":"7","name":"CustomerAddress","type":"string"},{"key":"8","name":"CustomerName","type":"string"},{"key":"9","name":"InvoiceDate","type":"string"},{"key":"10","name":"InvoiceId","type":"string"},{"key":"11","name":"InvoiceTotal","type":"double"},{"key":"12","name":"SubTotal","type":"double"},{"key":"13","name":"TotalTax","type":"double"},{"key":"14","name":"VendorAddress","type":"string"},{"key":"15","name":"VendorAddressRecipient","type":"string"},{"key":"16","name":"VendorName","type":"string"},{"key":"17","name":"CustomerAddressRecipient","type":"string"},{"key":"18","name":"ShippingAddress","type":"string"},{"key":"19","name":"ShippingAddressRecipient","type":"string"}]},"isSummary":false,"language":"scala"},"persist_state":{"view":{"type":"details","tableOptions":{},"chartOptions":{"seriesFieldKeys":["1"],"categoryFieldKeys":["0"],"isStacked":false,"aggregationType":"sum","chartType":"bar"}}}},"c63cd1a0-8216-496d-9d04-c754d3e89d94":{"type":"Synapse.DataFrame","sync_state":{"table":{"rows":[{"12":"Mackenzie Gray","8":"78.4","4":"22671","11":"T.T TAILWIND TRADERS","9":"22, rue du Puits Dixme","5":"1198.4","6":[{"ProductCode":"39","Tax":25.2,"Quantity":2,"UnitPrice":200,"Description":"Coffee Maker Red","Amount":385.2},{"ProductCode":"31","Tax":0.7,"Quantity":1,"UnitPrice":10,"Description":"Celebrations C9","Amount":10.7},{"ProductCode":"12","Tax":52.5,"Quantity":3,"UnitPrice":250,"Description":"Black Bathing System Classic 18 in. H x 60 in. W x 32.5","Amount":802.5}],"1":"34, rue des Grands Champs Versailles","0":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2010/Invoice22671.pdf","2":"Mackenzie Gray","7":"1120.0","3":"11"},{"8":"84.6","4":"28073","11":"T.T TAILWIND TRADERS","9":"Maubeuge","13":"San Gabriel","5":"1293.1","10":"Sarosgawk","6":[{"ProductCode":"17","Tax":15.4,"Quantity":2,"UnitPrice":110,"Description":"Blend Solid White Sheer Curtains","Amount":235.4},{"ProductCode":"56","Tax":43.68,"Quantity":2,"UnitPrice":312,"Description":"Rechargeable screwdriver with extra battery","Amount":667.68},{"ProductCode":"40","Tax":25.52,"Quantity":3,"UnitPrice":135,"Description":"Extractor Steal","Amount":390.02}],"1":"United States","0":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2010/Invoice28073.pdf","7":"1208.5","3":"2021-10-05"},{"8":"52.5","4":"64808","11":"T.T TAILWIND TRADERS","9":"Circle","13":"828, rue de Berri","5":"802.5","10":"Braeo","6":[{"ProductCode":"43","Tax":20.79,"Quantity":3,"UnitPrice":99,"Description":"Big Metal Shelving","Amount":317.79},{"ProductCode":"42","Tax":18.9,"Quantity":3,"UnitPrice":90,"Description":"Metal Shelving","Amount":288.9},{"ProductCode":"46","Tax":8.61,"Quantity":1,"UnitPrice":123,"Description":"Measuring Tape","Amount":131.61},{"ProductCode":"21","Tax":4.2,"Quantity":3,"UnitPrice":25,"Description":"Curtain Rod 48 in","Amount":64.2}],"1":"3923 Dew Drop","14":"Villeneuve-d'Ascq","0":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2013/Invoice64808.pdf","2":"Villeneuve-d'Ascq","7":"750.0","3":"2021-03-12"},{"12":"Dluhbio","8":"123.06","4":"67164","11":"T.T TAILWIND TRADERS","13":"45000","5":"1881.06","6":[{"ProductCode":"36","Tax":82.95,"Quantity":3,"UnitPrice":395,"Description":"Wood Table","Amount":1267.95},{"ProductCode":"20","Tax":16.8,"Quantity":2,"UnitPrice":120,"Description":"White Window","Amount":256.8},{"ProductCode":"25","Tax":14.7,"Quantity":3,"UnitPrice":70,"Description":"Indoor Kit Gardering","Amount":224.7},{"ProductCode":"46","Tax":8.61,"Quantity":1,"UnitPrice":123,"Description":"Measuring Tape","Amount":131.61}],"1":"Platz des Landtags 404 Solingen Nordrhein-Westfalen 42651 Germany","0":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2013/Invoice67164.pdf","2":"Clinton Gutierrez","7":"1758.0"},{"12":"Ashlee Raje","8":"85.2","4":"80110","11":"T.T TAILWIND TRADERS","9":"4559 Loop Beaverton Oregon 97005 States","13":"Street","5":"1302.4","10":"Alvotue","6":[{"ProductCode":"9","Tax":37.8,"Quantity":3,"UnitPrice":200,"Description":"Bathing System Classic 18 in. H x 60 in. W x 32.5","Amount":577.8},{"ProductCode":"59","Tax":19.32,"Quantity":3,"UnitPrice":92,"Description":"Two red garden gnomes","Amount":295.32},{"ProductCode":"58","Tax":10.58,"Quantity":3,"UnitPrice":56,"Description":"Single red garden gnome","Amount":161.78},{"ProductCode":"32","Tax":17.5,"Quantity":1,"UnitPrice":250,"Description":"Artificial Tree","Amount":267.5}],"1":"Julpum","0":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2014/Invoice80110.pdf","2":"Ashlee Raje","7":"1217.2","3":"October"},{"12":"Willie","8":"102.89","4":"71864","11":"T.T TAILWIND TRADERS","5":"1572.69","10":"Ifiaeh","6":[{"ProductCode":"36","Tax":55.3,"Quantity":2,"UnitPrice":395,"Description":"Wood Table","Amount":845.3},{"ProductCode":"56","Tax":19.66,"Quantity":1,"UnitPrice":312,"Description":"Rechargeable screwdriver with extra battery","Amount":300.46},{"ProductCode":"14","Tax":6.93,"Quantity":1,"UnitPrice":99,"Description":"Bathroom Sink Faucet","Amount":105.93},{"ProductCode":"48","Tax":21,"Quantity":3,"UnitPrice":100,"Description":"Hammer","Amount":321}],"1":"27, place de Brazaville Roubaix Nord 59100 France","0":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2014/Invoice71864.pdf","2":"Pal","7":"1469.8"},{"12":"Misty Xie","8":"53.06","4":"80066","11":"T.T TAILWIND TRADERS","9":"376 Amador Valley Blvd.","5":"811.06","10":"Rloefos","6":[{"ProductCode":"49","Tax":15.4,"Quantity":2,"UnitPrice":110,"Description":"Screwdriver","Amount":235.4},{"ProductCode":"54","Tax":17.5,"Quantity":1,"UnitPrice":250,"Description":"Yellow Rechargeable screwdriver","Amount":267.5},{"ProductCode":"22","Tax":1.26,"Quantity":2,"UnitPrice":10,"Description":"Steel Passage Door Knob","Amount":19.26},{"ProductCode":"40","Tax":18.9,"Quantity":2,"UnitPrice":135,"Description":"Extractor Steal","Amount":288.9}],"1":"6058 Hill Street","0":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2014/Invoice80066.pdf","2":"Misty Xie","7":"758.0","3":"4"},{"12":"Mrurc Potsdamer","8":"69.93","4":"83878","11":"T.T TAILWIND TRADERS","13":"4SJ","5":"1068.93","6":[{"ProductCode":"47","Tax":11.13,"Quantity":1,"UnitPrice":159,"Description":"Multi Function Drill","Amount":170.13},{"ProductCode":"53","Tax":6.3,"Quantity":1,"UnitPrice":90,"Description":"Stainless multi-tool plier","Amount":96.3},{"ProductCode":"32","Tax":52.5,"Quantity":3,"UnitPrice":250,"Description":"Artificial Tree","Amount":802.5}],"1":"Straße 242 Stuttgart Saarland 70511 Germany","0":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2015/Invoice83878.pdf","2":"Connie Liang","7":"999.0","3":"2015-11-04"},{"12":"Colin Cai","8":"34.16","4":"102247","11":"T.T TAILWIND TRADERS","9":"Phata 8858 V. Street London England W1Y 3RA United Kingdom","13":"80074","5":"522.16","6":[{"ProductCode":"43","Tax":13.86,"Quantity":2,"UnitPrice":99,"Description":"Big Metal Shelving","Amount":211.86},{"ProductCode":"30","Tax":1.4,"Quantity":2,"UnitPrice":10,"Description":"Gardering","Amount":21.4},{"ProductCode":"26","Tax":18.9,"Quantity":3,"UnitPrice":100,"Description":"Craftsman 100 ft. L x 5/8 in.","Amount":288.9}],"1":"Kampstr 9859","0":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2016/Invoice102247.pdf","2":"Colin Cai","7":"488.0","3":"2016"},{"8":"42.99","4":"92549","11":"T.T TAILWIND TRADERS","9":"4186 Silver Oaks Pl. Newport Beach California 92625 United States","5":"657.19","10":"Rgerlakmog","6":[{"ProductCode":"61","Tax":6.8,"Quantity":2,"UnitPrice":54,"Description":"One sat on shoe gnome","Amount":104},{"ProductCode":"2","Tax":28,"Quantity":2,"UnitPrice":200,"Description":"Refrigerator 1.7 cu. ft. 110 watts","Amount":428},{"ProductCode":"31","Tax":1.89,"Quantity":3,"UnitPrice":10,"Description":"Celebrations C9","Amount":28.89},{"ProductCode":"26","Tax":6.3,"Quantity":1,"UnitPrice":100,"Description":"Craftsman 100 ft. L x 5/8 in.","Amount":96.3}],"1":"2957 Tri-state Avenue Cambridge","0":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2016/Invoice92549.pdf","2":"Roy","7":"614.2","3":"8"}],"schema":[{"key":"0","name":"url","type":"string"},{"key":"1","name":"CustomerAddress","type":"string"},{"key":"2","name":"CustomerName","type":"string"},{"key":"3","name":"InvoiceDate","type":"string"},{"key":"4","name":"InvoiceId","type":"string"},{"key":"5","name":"InvoiceTotal","type":"double"},{"key":"6","name":"Items","type":"ArrayType(StructType(StructField(ProductCode,StringType,true), StructField(Tax,DoubleType,true), StructField(Quantity,DoubleType,true), StructField(UnitPrice,DoubleType,true), StructField(Description,StringType,true), StructField(Amount,DoubleType,true)),true)"},{"key":"7","name":"SubTotal","type":"double"},{"key":"8","name":"TotalTax","type":"double"},{"key":"9","name":"VendorAddress","type":"string"},{"key":"10","name":"VendorAddressRecipient","type":"string"},{"key":"11","name":"VendorName","type":"string"},{"key":"12","name":"CustomerAddressRecipient","type":"string"},{"key":"13","name":"ShippingAddress","type":"string"},{"key":"14","name":"ShippingAddressRecipient","type":"string"}]},"isSummary":false,"language":"scala"},"persist_state":{"view":{"type":"details","tableOptions":{},"chartOptions":{"seriesFieldKeys":["5"],"categoryFieldKeys":["0"],"isStacked":false,"aggregationType":"sum","chartType":"bar"}}}},"88f16e5a-cbc4-4aa3-8a39-456877298c4a":{"type":"Synapse.DataFrame","sync_state":{"table":{"rows":[{"12":"1120.0","8":"Mackenzie Gray","4":"Coffee Maker Red","11":"1198.4","9":"11","13":"78.4","16":"T.T TAILWIND TRADERS","5":"385.2","10":"22671","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2010/Invoice22671.pdf","1":"25.2","17":"Mackenzie Gray","14":"22, rue du Puits Dixme","0":"39","20":[{"to":"zh-Hans","text":"咖啡机红色"},{"to":"fr","text":"Cafetière Rouge"},{"to":"ru","text":"Кофеварка Красная"},{"to":"cy","text":"Gwneuthurwr Coffi Coch"}],"2":"2.0","7":"34, rue des Grands Champs Versailles","3":"200.0"},{"12":"1120.0","8":"Mackenzie Gray","4":"Celebrations C9","11":"1198.4","9":"11","13":"78.4","16":"T.T TAILWIND TRADERS","5":"10.7","10":"22671","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2010/Invoice22671.pdf","1":"0.7","17":"Mackenzie Gray","14":"22, rue du Puits Dixme","0":"31","20":[{"to":"zh-Hans","text":"庆祝活动 C9"},{"to":"fr","text":"Célébrations C9"},{"to":"ru","text":"Торжества C9"},{"to":"cy","text":"Dathliadau C9"}],"2":"1.0","7":"34, rue des Grands Champs Versailles","3":"10.0"},{"12":"1120.0","8":"Mackenzie Gray","4":"Black Bathing System Classic 18 in. H x 60 in. W x 32.5","11":"1198.4","9":"11","13":"78.4","16":"T.T TAILWIND TRADERS","5":"802.5","10":"22671","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2010/Invoice22671.pdf","1":"52.5","17":"Mackenzie Gray","14":"22, rue du Puits Dixme","0":"12","20":[{"to":"zh-Hans","text":"黑洗浴系统经典 18 在.H x 60 在。W x 32.5"},{"to":"fr","text":"Système de bain noir Classic 18 po. H x 60 po. L x 32,5"},{"to":"ru","text":"Черная система купания Classic 18 in. В x 60 в. Ш x 32,5"},{"to":"cy","text":"Clasur y System Ymdrochi Ddu 18 i mewn. H x 60 i mewn. W x 32.5"}],"2":"3.0","7":"34, rue des Grands Champs Versailles","3":"250.0"},{"12":"1208.5","4":"Blend Solid White Sheer Curtains","15":"Sarosgawk","11":"1293.1","9":"2021-10-05","13":"84.6","16":"T.T TAILWIND TRADERS","5":"235.4","10":"28073","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2010/Invoice28073.pdf","1":"15.4","14":"Maubeuge","0":"17","20":[{"to":"zh-Hans","text":"混合实心白色雪尔窗帘"},{"to":"fr","text":"Mélangez des rideaux transparents blancs solides"},{"to":"ru","text":"Смесь Сплошные белые шторы"},{"to":"cy","text":"Llenni Sheer Gwyn Solid Blend"}],"2":"2.0","18":"San Gabriel","7":"United States","3":"110.0"},{"12":"1208.5","4":"Rechargeable screwdriver with extra battery","15":"Sarosgawk","11":"1293.1","9":"2021-10-05","13":"84.6","16":"T.T TAILWIND TRADERS","5":"667.68","10":"28073","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2010/Invoice28073.pdf","1":"43.68","14":"Maubeuge","0":"56","20":[{"to":"zh-Hans","text":"可充电螺丝刀,带额外电池"},{"to":"fr","text":"Tournevis rechargeable avec batterie supplémentaire"},{"to":"ru","text":"Аккумуляторная отвертка с дополнительной батареей"},{"to":"cy","text":"Sgriwdreifer gellir ailgodi tâl tâl gyda batri ychwanegol"}],"2":"2.0","18":"San Gabriel","7":"United States","3":"312.0"},{"12":"1208.5","4":"Extractor Steal","15":"Sarosgawk","11":"1293.1","9":"2021-10-05","13":"84.6","16":"T.T TAILWIND TRADERS","5":"390.02","10":"28073","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2010/Invoice28073.pdf","1":"25.52","14":"Maubeuge","0":"40","20":[{"to":"zh-Hans","text":"提取器偷窃"},{"to":"fr","text":"Vol d’extracteur"},{"to":"ru","text":"Кража экстрактора"},{"to":"cy","text":"Dwyn Echdynnwr"}],"2":"3.0","18":"San Gabriel","7":"United States","3":"135.0"},{"12":"750.0","8":"Villeneuve-d'Ascq","19":"Villeneuve-d'Ascq","4":"Big Metal Shelving","15":"Braeo","11":"802.5","9":"2021-03-12","13":"52.5","16":"T.T TAILWIND TRADERS","5":"317.79","10":"64808","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2013/Invoice64808.pdf","1":"20.79","14":"Circle","0":"43","20":[{"to":"zh-Hans","text":"大金属搁板"},{"to":"fr","text":"Grandes étagères métalliques"},{"to":"ru","text":"Большие металлические стеллажи"},{"to":"cy","text":"Silffoedd Metel Mawr"}],"2":"3.0","18":"828, rue de Berri","7":"3923 Dew Drop","3":"99.0"},{"12":"750.0","8":"Villeneuve-d'Ascq","19":"Villeneuve-d'Ascq","4":"Metal Shelving","15":"Braeo","11":"802.5","9":"2021-03-12","13":"52.5","16":"T.T TAILWIND TRADERS","5":"288.9","10":"64808","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2013/Invoice64808.pdf","1":"18.9","14":"Circle","0":"42","20":[{"to":"zh-Hans","text":"金属搁板"},{"to":"fr","text":"Étagères métalliques"},{"to":"ru","text":"Металлические стеллажи"},{"to":"cy","text":"Silffoedd Metel"}],"2":"3.0","18":"828, rue de Berri","7":"3923 Dew Drop","3":"90.0"},{"12":"750.0","8":"Villeneuve-d'Ascq","19":"Villeneuve-d'Ascq","4":"Measuring Tape","15":"Braeo","11":"802.5","9":"2021-03-12","13":"52.5","16":"T.T TAILWIND TRADERS","5":"131.61","10":"64808","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2013/Invoice64808.pdf","1":"8.61","14":"Circle","0":"46","20":[{"to":"zh-Hans","text":"卷尺"},{"to":"fr","text":"Mètre ruban"},{"to":"ru","text":"Мерная лента"},{"to":"cy","text":"Tâp Mesur"}],"2":"1.0","18":"828, rue de Berri","7":"3923 Dew Drop","3":"123.0"},{"12":"750.0","8":"Villeneuve-d'Ascq","19":"Villeneuve-d'Ascq","4":"Curtain Rod 48 in","15":"Braeo","11":"802.5","9":"2021-03-12","13":"52.5","16":"T.T TAILWIND TRADERS","5":"64.2","10":"64808","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2013/Invoice64808.pdf","1":"4.2","14":"Circle","0":"21","20":[{"to":"zh-Hans","text":"窗帘杆 48 在"},{"to":"fr","text":"Tringle à rideaux 48 po"},{"to":"ru","text":"Карниз 48 in"},{"to":"cy","text":"Rod Llennyrch 48 yn"}],"2":"3.0","18":"828, rue de Berri","7":"3923 Dew Drop","3":"25.0"},{"12":"1758.0","8":"Clinton Gutierrez","4":"Wood Table","11":"1881.06","13":"123.06","16":"T.T TAILWIND TRADERS","5":"1267.95","10":"67164","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2013/Invoice67164.pdf","1":"82.95","17":"Dluhbio","0":"36","20":[{"to":"zh-Hans","text":"木桌"},{"to":"fr","text":"Table en bois"},{"to":"ru","text":"Деревянный стол"},{"to":"cy","text":"Tabl Pren"}],"2":"3.0","18":"45000","7":"Platz des Landtags 404 Solingen Nordrhein-Westfalen 42651 Germany","3":"395.0"},{"12":"1758.0","8":"Clinton Gutierrez","4":"White Window","11":"1881.06","13":"123.06","16":"T.T TAILWIND TRADERS","5":"256.8","10":"67164","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2013/Invoice67164.pdf","1":"16.8","17":"Dluhbio","0":"20","20":[{"to":"zh-Hans","text":"白色窗口"},{"to":"fr","text":"Fenêtre blanche"},{"to":"ru","text":"Белое окно"},{"to":"cy","text":"Ffenestr Gwyn"}],"2":"2.0","18":"45000","7":"Platz des Landtags 404 Solingen Nordrhein-Westfalen 42651 Germany","3":"120.0"},{"12":"1758.0","8":"Clinton Gutierrez","4":"Indoor Kit Gardering","11":"1881.06","13":"123.06","16":"T.T TAILWIND TRADERS","5":"224.7","10":"67164","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2013/Invoice67164.pdf","1":"14.7","17":"Dluhbio","0":"25","20":[{"to":"zh-Hans","text":"室内套件加德林"},{"to":"fr","text":"Kit Gardering intérieur"},{"to":"ru","text":"Гардеринг комплектов для помещений"},{"to":"cy","text":"Gardering Cit Dan Do"}],"2":"3.0","18":"45000","7":"Platz des Landtags 404 Solingen Nordrhein-Westfalen 42651 Germany","3":"70.0"},{"12":"1758.0","8":"Clinton Gutierrez","4":"Measuring Tape","11":"1881.06","13":"123.06","16":"T.T TAILWIND TRADERS","5":"131.61","10":"67164","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2013/Invoice67164.pdf","1":"8.61","17":"Dluhbio","0":"46","20":[{"to":"zh-Hans","text":"卷尺"},{"to":"fr","text":"Mètre ruban"},{"to":"ru","text":"Мерная лента"},{"to":"cy","text":"Tâp Mesur"}],"2":"1.0","18":"45000","7":"Platz des Landtags 404 Solingen Nordrhein-Westfalen 42651 Germany","3":"123.0"},{"12":"1217.2","8":"Ashlee Raje","4":"Bathing System Classic 18 in. H x 60 in. W x 32.5","15":"Alvotue","11":"1302.4","9":"October","13":"85.2","16":"T.T TAILWIND TRADERS","5":"577.8","10":"80110","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2014/Invoice80110.pdf","1":"37.8","17":"Ashlee Raje","14":"4559 Loop Beaverton Oregon 97005 States","0":"9","20":[{"to":"zh-Hans","text":"沐浴系统经典 18 在.H x 60 在。W x 32.5"},{"to":"fr","text":"Système de bain Classic 18 po. H x 60 po. L x 32,5"},{"to":"ru","text":"Система купания Классическая 18 в. В x 60 в. Ш x 32,5"},{"to":"cy","text":"Clasurol y System Ymdrochi 18 i mewn. H x 60 i mewn. W x 32.5"}],"2":"3.0","18":"Street","7":"Julpum","3":"200.0"},{"12":"1217.2","8":"Ashlee Raje","4":"Two red garden gnomes","15":"Alvotue","11":"1302.4","9":"October","13":"85.2","16":"T.T TAILWIND TRADERS","5":"295.32","10":"80110","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2014/Invoice80110.pdf","1":"19.32","17":"Ashlee Raje","14":"4559 Loop Beaverton Oregon 97005 States","0":"59","20":[{"to":"zh-Hans","text":"两个红色的花园侏儒"},{"to":"fr","text":"Deux nains de jardin rouges"},{"to":"ru","text":"Два красных садовых гнома"},{"to":"cy","text":"Dwy gnomes gardd goch"}],"2":"3.0","18":"Street","7":"Julpum","3":"92.0"},{"12":"1217.2","8":"Ashlee Raje","4":"Single red garden gnome","15":"Alvotue","11":"1302.4","9":"October","13":"85.2","16":"T.T TAILWIND TRADERS","5":"161.78","10":"80110","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2014/Invoice80110.pdf","1":"10.58","17":"Ashlee Raje","14":"4559 Loop Beaverton Oregon 97005 States","0":"58","20":[{"to":"zh-Hans","text":"单红花园侏儒"},{"to":"fr","text":"Gnome de jardin rouge unique"},{"to":"ru","text":"Одиночный красный садовый гном"},{"to":"cy","text":"Gnome gardd goch sengl"}],"2":"3.0","18":"Street","7":"Julpum","3":"56.0"},{"12":"1217.2","8":"Ashlee Raje","4":"Artificial Tree","15":"Alvotue","11":"1302.4","9":"October","13":"85.2","16":"T.T TAILWIND TRADERS","5":"267.5","10":"80110","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2014/Invoice80110.pdf","1":"17.5","17":"Ashlee Raje","14":"4559 Loop Beaverton Oregon 97005 States","0":"32","20":[{"to":"zh-Hans","text":"人造树"},{"to":"fr","text":"Arbre artificiel"},{"to":"ru","text":"Искусственное дерево"},{"to":"cy","text":"Coeden Artiffisial"}],"2":"1.0","18":"Street","7":"Julpum","3":"250.0"},{"12":"1469.8","8":"Pal","4":"Wood Table","15":"Ifiaeh","11":"1572.69","13":"102.89","16":"T.T TAILWIND TRADERS","5":"845.3","10":"71864","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2014/Invoice71864.pdf","1":"55.3","17":"Willie","0":"36","20":[{"to":"zh-Hans","text":"木桌"},{"to":"fr","text":"Table en bois"},{"to":"ru","text":"Деревянный стол"},{"to":"cy","text":"Tabl Pren"}],"2":"2.0","7":"27, place de Brazaville Roubaix Nord 59100 France","3":"395.0"},{"12":"1469.8","8":"Pal","4":"Rechargeable screwdriver with extra battery","15":"Ifiaeh","11":"1572.69","13":"102.89","16":"T.T TAILWIND TRADERS","5":"300.46","10":"71864","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2014/Invoice71864.pdf","1":"19.66","17":"Willie","0":"56","20":[{"to":"zh-Hans","text":"可充电螺丝刀,带额外电池"},{"to":"fr","text":"Tournevis rechargeable avec batterie supplémentaire"},{"to":"ru","text":"Аккумуляторная отвертка с дополнительной батареей"},{"to":"cy","text":"Sgriwdreifer gellir ailgodi tâl tâl gyda batri ychwanegol"}],"2":"1.0","7":"27, place de Brazaville Roubaix Nord 59100 France","3":"312.0"},{"12":"1469.8","8":"Pal","4":"Bathroom Sink Faucet","15":"Ifiaeh","11":"1572.69","13":"102.89","16":"T.T TAILWIND TRADERS","5":"105.93","10":"71864","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2014/Invoice71864.pdf","1":"6.93","17":"Willie","0":"14","20":[{"to":"zh-Hans","text":"浴室水槽水龙头"},{"to":"fr","text":"Robinet lavabo de salle de bain"},{"to":"ru","text":"Смеситель для раковины в ванной комнате"},{"to":"cy","text":"Faucet Sinc Ystafell Ymolchi"}],"2":"1.0","7":"27, place de Brazaville Roubaix Nord 59100 France","3":"99.0"},{"12":"1469.8","8":"Pal","4":"Hammer","15":"Ifiaeh","11":"1572.69","13":"102.89","16":"T.T TAILWIND TRADERS","5":"321.0","10":"71864","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2014/Invoice71864.pdf","1":"21.0","17":"Willie","0":"48","20":[{"to":"zh-Hans","text":"锤"},{"to":"fr","text":"Marteau"},{"to":"ru","text":"Молоток"},{"to":"cy","text":"Morthwyl"}],"2":"3.0","7":"27, place de Brazaville Roubaix Nord 59100 France","3":"100.0"},{"12":"758.0","8":"Misty Xie","4":"Screwdriver","15":"Rloefos","11":"811.06","9":"4","13":"53.06","16":"T.T TAILWIND TRADERS","5":"235.4","10":"80066","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2014/Invoice80066.pdf","1":"15.4","17":"Misty Xie","14":"376 Amador Valley Blvd.","0":"49","20":[{"to":"zh-Hans","text":"螺丝刀"},{"to":"fr","text":"Tournevis"},{"to":"ru","text":"Отвёртка"},{"to":"cy","text":"Sgriwdreifer"}],"2":"2.0","7":"6058 Hill Street","3":"110.0"},{"12":"758.0","8":"Misty Xie","4":"Yellow Rechargeable screwdriver","15":"Rloefos","11":"811.06","9":"4","13":"53.06","16":"T.T TAILWIND TRADERS","5":"267.5","10":"80066","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2014/Invoice80066.pdf","1":"17.5","17":"Misty Xie","14":"376 Amador Valley Blvd.","0":"54","20":[{"to":"zh-Hans","text":"黄色可充电螺丝刀"},{"to":"fr","text":"Tournevis rechargeable jaune"},{"to":"ru","text":"Желтая аккумуляторная отвертка"},{"to":"cy","text":"Sgriwdreifer Ailwefradwy Melyn"}],"2":"1.0","7":"6058 Hill Street","3":"250.0"},{"12":"758.0","8":"Misty Xie","4":"Steel Passage Door Knob","15":"Rloefos","11":"811.06","9":"4","13":"53.06","16":"T.T TAILWIND TRADERS","5":"19.26","10":"80066","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2014/Invoice80066.pdf","1":"1.26","17":"Misty Xie","14":"376 Amador Valley Blvd.","0":"22","20":[{"to":"zh-Hans","text":"钢通道门旋钮"},{"to":"fr","text":"Poignée de porte de passage en acier"},{"to":"ru","text":"Стальная дверная ручка прохода"},{"to":"cy","text":"Knob Drws Pasio Dur"}],"2":"2.0","7":"6058 Hill Street","3":"10.0"},{"12":"758.0","8":"Misty Xie","4":"Extractor Steal","15":"Rloefos","11":"811.06","9":"4","13":"53.06","16":"T.T TAILWIND TRADERS","5":"288.9","10":"80066","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2014/Invoice80066.pdf","1":"18.9","17":"Misty Xie","14":"376 Amador Valley Blvd.","0":"40","20":[{"to":"zh-Hans","text":"提取器偷窃"},{"to":"fr","text":"Vol d’extracteur"},{"to":"ru","text":"Кража экстрактора"},{"to":"cy","text":"Dwyn Echdynnwr"}],"2":"2.0","7":"6058 Hill Street","3":"135.0"},{"12":"999.0","8":"Connie Liang","4":"Multi Function Drill","11":"1068.93","9":"2015-11-04","13":"69.93","16":"T.T TAILWIND TRADERS","5":"170.13","10":"83878","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2015/Invoice83878.pdf","1":"11.13","17":"Mrurc Potsdamer","0":"47","20":[{"to":"zh-Hans","text":"多功能钻机"},{"to":"fr","text":"Perceuse multifonction"},{"to":"ru","text":"Многофункциональная дрель"},{"to":"cy","text":"Dril Aml-Swyddogaeth"}],"2":"1.0","18":"4SJ","7":"Straße 242 Stuttgart Saarland 70511 Germany","3":"159.0"},{"12":"999.0","8":"Connie Liang","4":"Stainless multi-tool plier","11":"1068.93","9":"2015-11-04","13":"69.93","16":"T.T TAILWIND TRADERS","5":"96.3","10":"83878","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2015/Invoice83878.pdf","1":"6.3","17":"Mrurc Potsdamer","0":"53","20":[{"to":"zh-Hans","text":"不锈钢多工具钳子"},{"to":"fr","text":"Pince multi-outils en acier inoxydable"},{"to":"ru","text":"Нержавеющая многофункциональная плоскогубцы"},{"to":"cy","text":"Plisgyn aml-offeryn di-staen"}],"2":"1.0","18":"4SJ","7":"Straße 242 Stuttgart Saarland 70511 Germany","3":"90.0"},{"12":"999.0","8":"Connie Liang","4":"Artificial Tree","11":"1068.93","9":"2015-11-04","13":"69.93","16":"T.T TAILWIND TRADERS","5":"802.5","10":"83878","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2015/Invoice83878.pdf","1":"52.5","17":"Mrurc Potsdamer","0":"32","20":[{"to":"zh-Hans","text":"人造树"},{"to":"fr","text":"Arbre artificiel"},{"to":"ru","text":"Искусственное дерево"},{"to":"cy","text":"Coeden Artiffisial"}],"2":"3.0","18":"4SJ","7":"Straße 242 Stuttgart Saarland 70511 Germany","3":"250.0"},{"12":"488.0","8":"Colin Cai","4":"Big Metal Shelving","11":"522.16","9":"2016","13":"34.16","16":"T.T TAILWIND TRADERS","5":"211.86","10":"102247","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2016/Invoice102247.pdf","1":"13.86","17":"Colin Cai","14":"Phata 8858 V. Street London England W1Y 3RA United Kingdom","0":"43","20":[{"to":"zh-Hans","text":"大金属搁板"},{"to":"fr","text":"Grandes étagères métalliques"},{"to":"ru","text":"Большие металлические стеллажи"},{"to":"cy","text":"Silffoedd Metel Mawr"}],"2":"2.0","18":"80074","7":"Kampstr 9859","3":"99.0"},{"12":"488.0","8":"Colin Cai","4":"Gardering","11":"522.16","9":"2016","13":"34.16","16":"T.T TAILWIND TRADERS","5":"21.4","10":"102247","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2016/Invoice102247.pdf","1":"1.4","17":"Colin Cai","14":"Phata 8858 V. Street London England W1Y 3RA United Kingdom","0":"30","20":[{"to":"zh-Hans","text":"守卫"},{"to":"fr","text":"Garde"},{"to":"ru","text":"Охрана"},{"to":"cy","text":"Gwarchod"}],"2":"2.0","18":"80074","7":"Kampstr 9859","3":"10.0"},{"12":"488.0","8":"Colin Cai","4":"Craftsman 100 ft. L x 5/8 in.","11":"522.16","9":"2016","13":"34.16","16":"T.T TAILWIND TRADERS","5":"288.9","10":"102247","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2016/Invoice102247.pdf","1":"18.9","17":"Colin Cai","14":"Phata 8858 V. Street London England W1Y 3RA United Kingdom","0":"26","20":[{"to":"zh-Hans","text":"工匠 100 英尺 L x 5/8 英寸。"},{"to":"fr","text":"Artisan 100 pi L x 5/8 po."},{"to":"ru","text":"Ремесленник 100 футов L x 5/8 дюйма"},{"to":"cy","text":"Crefftwr 100 troedfedd. L x 5/8 i mewn."}],"2":"3.0","18":"80074","7":"Kampstr 9859","3":"100.0"},{"12":"614.2","8":"Roy","4":"One sat on shoe gnome","15":"Rgerlakmog","11":"657.19","9":"8","13":"42.99","16":"T.T TAILWIND TRADERS","5":"104.0","10":"92549","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2016/Invoice92549.pdf","1":"6.8","14":"4186 Silver Oaks Pl. Newport Beach California 92625 United States","0":"61","20":[{"to":"zh-Hans","text":"一个坐在鞋侏儒上"},{"to":"fr","text":"L’un d’eux était assis sur un gnome de chaussures"},{"to":"ru","text":"Один сидел на ботинке гнома"},{"to":"cy","text":"Roedd un yn eistedd ar gnome esgidiau"}],"2":"2.0","7":"2957 Tri-state Avenue Cambridge","3":"54.0"},{"12":"614.2","8":"Roy","4":"Refrigerator 1.7 cu. ft. 110 watts","15":"Rgerlakmog","11":"657.19","9":"8","13":"42.99","16":"T.T TAILWIND TRADERS","5":"428.0","10":"92549","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2016/Invoice92549.pdf","1":"28.0","14":"4186 Silver Oaks Pl. Newport Beach California 92625 United States","0":"2","20":[{"to":"zh-Hans","text":"冰箱 1.7 cu. 英尺 110 瓦"},{"to":"fr","text":"Réfrigérateur 1,7 pi³ 110 watts"},{"to":"ru","text":"Холодильник 1,7 куб. фута 110 Вт"},{"to":"cy","text":"Oergell 1.7 cu. ft. 110 watt"}],"2":"2.0","7":"2957 Tri-state Avenue Cambridge","3":"200.0"},{"12":"614.2","8":"Roy","4":"Celebrations C9","15":"Rgerlakmog","11":"657.19","9":"8","13":"42.99","16":"T.T TAILWIND TRADERS","5":"28.89","10":"92549","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2016/Invoice92549.pdf","1":"1.89","14":"4186 Silver Oaks Pl. Newport Beach California 92625 United States","0":"31","20":[{"to":"zh-Hans","text":"庆祝活动 C9"},{"to":"fr","text":"Célébrations C9"},{"to":"ru","text":"Торжества C9"},{"to":"cy","text":"Dathliadau C9"}],"2":"3.0","7":"2957 Tri-state Avenue Cambridge","3":"10.0"},{"12":"614.2","8":"Roy","4":"Craftsman 100 ft. L x 5/8 in.","15":"Rgerlakmog","11":"657.19","9":"8","13":"42.99","16":"T.T TAILWIND TRADERS","5":"96.3","10":"92549","6":"https://mmlsparkdemo.blob.core.windows.net/ignite2021/forms/2016/Invoice92549.pdf","1":"6.3","14":"4186 Silver Oaks Pl. Newport Beach California 92625 United States","0":"26","20":[{"to":"zh-Hans","text":"工匠 100 英尺 L x 5/8 英寸。"},{"to":"fr","text":"Artisan 100 pi L x 5/8 po."},{"to":"ru","text":"Ремесленник 100 футов L x 5/8 дюйма"},{"to":"cy","text":"Crefftwr 100 troedfedd. L x 5/8 i mewn."}],"2":"1.0","7":"2957 Tri-state Avenue Cambridge","3":"100.0"}],"schema":[{"key":"0","name":"ProductCode","type":"string"},{"key":"1","name":"Tax","type":"double"},{"key":"2","name":"Quantity","type":"double"},{"key":"3","name":"UnitPrice","type":"double"},{"key":"4","name":"Description","type":"string"},{"key":"5","name":"Amount","type":"double"},{"key":"6","name":"url","type":"string"},{"key":"7","name":"CustomerAddress","type":"string"},{"key":"8","name":"CustomerName","type":"string"},{"key":"9","name":"InvoiceDate","type":"string"},{"key":"10","name":"InvoiceId","type":"string"},{"key":"11","name":"InvoiceTotal","type":"double"},{"key":"12","name":"SubTotal","type":"double"},{"key":"13","name":"TotalTax","type":"double"},{"key":"14","name":"VendorAddress","type":"string"},{"key":"15","name":"VendorAddressRecipient","type":"string"},{"key":"16","name":"VendorName","type":"string"},{"key":"17","name":"CustomerAddressRecipient","type":"string"},{"key":"18","name":"ShippingAddress","type":"string"},{"key":"19","name":"ShippingAddressRecipient","type":"string"},{"key":"20","name":"Translations","type":"ArrayType(StructType(StructField(to,StringType,true), StructField(text,StringType,true), StructField(transliteration,StructType(StructField(script,StringType,true), StructField(text,StringType,true)),true), StructField(alignment,StructType(StructField(proj,StringType,true)),true), StructField(sentLen,StructType(StructField(srcSentLen,ArrayType(IntegerType,true),true), StructField(transSentLen,ArrayType(IntegerType,true),true)),true)),true)"}]},"isSummary":false,"language":"scala"},"persist_state":{"view":{"type":"details","tableOptions":{},"chartOptions":{"seriesFieldKeys":["1"],"categoryFieldKeys":["0"],"isStacked":false,"aggregationType":"sum","chartType":"bar"}}}}}},"application/vnd.databricks.v1+notebook":{"notebookName":"CognitiveServices - Create a Multilingual Search Engine from Forms","dashboards":[],"notebookMetadata":{"pythonIndentUnit":2},"language":"python","widgets":{},"notebookOrigID":945159649235142}},"nbformat":4,"nbformat_minor":0} +{ + "cells":[ + { + "cell_type":"code", + "execution_count":null, + "metadata":{ + + }, + "outputs":[ + + ], + "source":[ + "import os\n", + "key = os.environ['VISION_API_KEY']\n", + "search_key = os.environ['AZURE_SEARCH_KEY']\n", + "translator_key = os.environ['TRANSLATOR_KEY']\n", + "\n", + "search_service = \"mmlspark-azure-search\"\n", + "search_index = \"form-demo-index\"" + ] + }, + { + "cell_type":"code", + "execution_count":null, + "metadata":{ + + }, + "outputs":[ + + ], + "source":[ + "from pyspark.sql.functions import udf\n", + "from pyspark.sql.types import StringType\n", + "\n", + "def blob_to_url(blob):\n", + " [prefix, postfix] = blob.split(\"@\")\n", + " container = prefix.split(\"/\")[-1]\n", + " split_postfix = postfix.split(\"/\")\n", + " account = split_postfix[0]\n", + " filepath = \"/\".join(split_postfix[1:])\n", + " return \"https://{}/{}/{}\".format(account, container, filepath)\n", + "\n", + "\n", + "df2 = (spark.read.format(\"binaryFile\")\n", + " .load(\"wasbs://ignite2021@mmlsparkdemo.blob.core.windows.net/form_subset/*\")\n", + " .select(\"path\")\n", + " .limit(10)\n", + " .select(udf(blob_to_url, StringType())(\"path\").alias(\"url\"))\n", + " .cache()\n", + " )\n" + ] + }, + { + "cell_type":"code", + "execution_count":null, + "metadata":{ + + }, + "outputs":[ + + ], + "source":[ + "display(df2)" + ] + }, + { + "cell_type":"code", + "execution_count":null, + "metadata":{ + + }, + "outputs":[ + + ], + "source":[ + "displayHTML(\"\"\"\n", + "\n", + "\"\"\")" + ] + }, + { + "cell_type":"code", + "execution_count":null, + "metadata":{ + + }, + "outputs":[ + + ], + "source":[ + "from synapse.ml.cognitive import AnalyzeInvoices\n", + "\n", + "analyzed_df = (AnalyzeInvoices()\n", + " .setSubscriptionKey(key)\n", + " .setLocation(\"eastus\")\n", + " .setImageUrlCol(\"url\")\n", + " .setOutputCol(\"invoices\")\n", + " .setErrorCol(\"errors\")\n", + " .setConcurrency(5)\n", + " .transform(df2)\n", + " .cache())\n" + ] + }, + { + "cell_type":"code", + "execution_count":null, + "metadata":{ + + }, + "outputs":[ + + ], + "source":[ + "display(analyzed_df)" + ] + }, + { + "cell_type":"code", + "execution_count":null, + "metadata":{ + + }, + "outputs":[ + + ], + "source":[ + "from synapse.ml.cognitive import FormOntologyLearner\n", + "\n", + "organized_df = (FormOntologyLearner()\n", + " .setInputCol(\"invoices\")\n", + " .setOutputCol(\"extracted\")\n", + " .fit(analyzed_df)\n", + " .transform(analyzed_df)\n", + " .select(\"url\", \"extracted.*\")\n", + " .cache())" + ] + }, + { + "cell_type":"code", + "execution_count":null, + "metadata":{ + + }, + "outputs":[ + + ], + "source":[ + "display(organized_df)" + ] + }, + { + "cell_type":"code", + "execution_count":null, + "metadata":{ + + }, + "outputs":[ + + ], + "source":[ + "from pyspark.sql.functions import explode, col\n", + "itemized_df = (organized_df\n", + " .select(\"*\", explode(col(\"Items\")).alias(\"Item\"))\n", + " .drop(\"Items\")\n", + " .select(\"Item.*\", \"*\")\n", + " .drop(\"Item\"))\n" + ] + }, + { + "cell_type":"code", + "execution_count":null, + "metadata":{ + + }, + "outputs":[ + + ], + "source":[ + "display(itemized_df)" + ] + }, + { + "cell_type":"code", + "execution_count":null, + "metadata":{ + + }, + "outputs":[ + + ], + "source":[ + "display(itemized_df.where(col(\"ProductCode\") == 48))" + ] + }, + { + "cell_type":"code", + "execution_count":null, + "metadata":{ + + }, + "outputs":[ + + ], + "source":[ + "from synapse.ml.cognitive import Translate\n", + "\n", + "translated_df = (Translate()\n", + " .setSubscriptionKey(translator_key)\n", + " .setLocation(\"eastus\")\n", + " .setTextCol(\"Description\")\n", + " .setErrorCol(\"TranslationError\")\n", + " .setOutputCol(\"output\")\n", + " .setToLanguage([\"zh-Hans\", \"fr\", \"ru\", \"cy\"])\n", + " .setConcurrency(5)\n", + " .transform(itemized_df)\n", + " .withColumn(\"Translations\", col(\"output.translations\")[0])\n", + " .drop(\"output\", \"TranslationError\")\n", + " .cache())\n" + ] + }, + { + "cell_type":"code", + "execution_count":null, + "metadata":{ + + }, + "outputs":[ + + ], + "source":[ + "display(translated_df)" + ] + }, + { + "cell_type":"code", + "execution_count":null, + "metadata":{ + + }, + "outputs":[ + + ], + "source":[ + "from synapse.ml.cognitive import *\n", + "from pyspark.sql.functions import monotonically_increasing_id, lit\n", + "\n", + "(translated_df\n", + " .withColumn(\"DocID\", monotonically_increasing_id().cast(\"string\"))\n", + " .withColumn(\"SearchAction\", lit(\"upload\"))\n", + " .writeToAzureSearch(\n", + " subscriptionKey=search_key,\n", + " actionCol=\"SearchAction\",\n", + " serviceName=search_service,\n", + " indexName=search_index,\n", + " keyCol=\"DocID\")\n", + ")\n" + ] + }, + { + "cell_type":"code", + "execution_count":null, + "metadata":{ + + }, + "outputs":[ + + ], + "source":[ + "import requests\n", + "url = 'https://{}.search.windows.net/indexes/{}/docs/search?api-version=2019-05-06'.format(search_service, search_index)\n", + "requests.post(url, json={\"search\": \"door\"}, headers = {\"api-key\": search_key}).json()" + ] + }, + { + "cell_type":"code", + "execution_count":null, + "metadata":{ + + }, + "outputs":[ + + ], + "source":[ + + ] + } + ], + "metadata":{ + "description":null, + "kernelspec":{ + "display_name":"Synapse PySpark", + "name":"synapse_pyspark" + }, + "language_info":{ + "name":"python" + }, + "save_output":true + }, + "nbformat":4, + "nbformat_minor":0 +} \ No newline at end of file diff --git a/pipeline.yaml b/pipeline.yaml index 191e0925c3..c3b965a7f6 100644 --- a/pipeline.yaml +++ b/pipeline.yaml @@ -461,18 +461,23 @@ jobs: sbt convertNotebooks - bash: | yarn install + cd website + yarn + yarn build + displayName: 'yarn install and build' + - bash: | git config --global user.name "${GH_NAME}" git config --global user.email "${GH_EMAIL}" git checkout -b main echo "machine github.com login ${GH_NAME} password ${GH_TOKEN}" > ~/.netrc cd website - yarn && GIT_USER="${GH_NAME}" yarn deploy + GIT_USER="${GH_NAME}" yarn deploy condition: and(succeeded(), eq(variables['Build.SourceBranch'], 'refs/heads/master')) env: GH_NAME: $(gh-name) GH_EMAIL: $(gh-email) GH_TOKEN: $(gh-token) - displayName: 'yarn install and build' + displayName: 'yarn deploy' - job: UnitTests diff --git a/website/docs/features/responsible_ai/Data Balance Analysis.md b/website/docs/features/responsible_ai/Data Balance Analysis.md index 19d7a0df32..e95ffb78bc 100644 --- a/website/docs/features/responsible_ai/Data Balance Analysis.md +++ b/website/docs/features/responsible_ai/Data Balance Analysis.md @@ -19,7 +19,7 @@ In summary, Data Balance Analysis, used as a step for building ML models has the ## Examples -* [Data Balance Analysis - Adult Census Income](../../../examples/responsible_ai/DataBalanceAnalysis%20-%20Adult%20Census%20Income) +* [Data Balance Analysis - Adult Census Income](../../../features/responsible_ai/DataBalanceAnalysis%20-%20Adult%20Census%20Income) ## Usage diff --git a/website/docs/features/responsible_ai/Model Interpretation on Spark.md b/website/docs/features/responsible_ai/Model Interpretation on Spark.md index e81aa9525d..d2c7cc5f4e 100644 --- a/website/docs/features/responsible_ai/Model Interpretation on Spark.md +++ b/website/docs/features/responsible_ai/Model Interpretation on Spark.md @@ -26,9 +26,9 @@ Both explainers extends from `org.apache.spark.ml.Transformer`. After setting up To see examples of model interpretability on Spark in action, take a look at these sample notebooks: -- [Tabular SHAP explainer](../../../examples/responsible_ai/Interpretability%20-%20Tabular%20SHAP%20explainer) +- [Tabular SHAP explainer](../../../features/responsible_ai/Interpretability%20-%20Tabular%20SHAP%20explainer) - [Image explainers](../../../features/responsible_ai/Interpretability%20-%20Image%20Explainers) -- [Text explainers](../../../examples/responsible_ai/Interpretability%20-%20Text%20Explainers) +- [Text explainers](../../../features/responsible_ai/Interpretability%20-%20Text%20Explainers) | | Tabular models | Vector models | Image models | Text models | |------------------------|-----------------------------|---------------------------|-------------------------|-----------------------| diff --git a/website/docs/reference/datasets.md b/website/docs/reference/datasets.md index bcbab0a935..524696b6dd 100644 --- a/website/docs/reference/datasets.md +++ b/website/docs/reference/datasets.md @@ -30,7 +30,7 @@ tab-separated file with 2 columns (`rating`, `text`) and 10000 rows. The contains free-form text strings in English language. You can use `synapse.ml.TextFeaturizer` to convert the text into feature vectors for machine learning models ([see -example](../../examples/text_analytics/TextAnalytics%20-%20Amazon%20Book%20Reviews/)). +example](../../features/other/TextAnalytics%20-%20Amazon%20Book%20Reviews/)). The example dataset is available [here](https://mmlspark.azureedge.net/datasets/BookReviewsFromAmazon10K.tsv); diff --git a/website/src/pages/index.js b/website/src/pages/index.js index 66ab0a4942..624ee5c3ee 100644 --- a/website/src/pages/index.js +++ b/website/src/pages/index.js @@ -15,7 +15,7 @@ const snippets = [ { label: "Cognitive Services", further: - "docs/features/CognitiveServices%20-%20Overview#text-analytics-sample", + "docs/features/cognitive_services/CognitiveServices%20-%20Overview#text-analytics-sample", config: `from synapse.ml.cognitive import * sentiment_df = (TextSentiment() diff --git a/website/versioned_docs/version-0.9.1/examples/about.md b/website/versioned_docs/version-0.9.1/examples/about.md deleted file mode 100644 index 5247c91adc..0000000000 --- a/website/versioned_docs/version-0.9.1/examples/about.md +++ /dev/null @@ -1,57 +0,0 @@ ---- -title: Examples -hide_title: true -sidebar_label: About ---- - -## Examples - -- Create a deep image classifier with transfer learning ([DeepLearning - Flower Image Classification]) -- Fit a LightGBM classification or regression model on a biochemical dataset - ([LightGBM Overview]), to learn more check out the [LightGBM documentation - page](../../features/lightgbm/about). -- Deploy a deep network as a distributed web service with [SynapseML - Serving](../../features/spark_serving/about) -- Use web services in Spark with [HTTP on Apache Spark](../../features/http/about) -- Use Bi-directional LSTMs from Keras for medical entity extraction - ([DeepLearning - BiLSTM Medical Entity Extraction]) -- Create a text analytics system on Amazon book reviews ([TextAnalytics - Amazon Book Reviews]) -- Perform distributed hyperparameter tuning to identify Breast Cancer - ([HyperParameterTuning - Fighting Breast Cancer]) -- Easily ingest images from HDFS into Spark `DataFrame` ([DeepLearning - CIFAR10 Convolutional Network]) -- Use OpenCV on Spark to manipulate images ([OpenCV - Pipeline Image Transformations]) -- Train classification and regression models easily via implicit featurization - of data ([Classification - Adult Census]) -- Train and evaluate a flight delay prediction system ([Regression - Flight Delays]) -- Finding anomalous data access patterns using the Access Anomalies package of CyberML ([CyberML - Anomalous Access Detection]) -- Model interpretation ([Interpretability - Tabular SHAP Explainer], [Interpretability - Image Explainers], [Interpretability - Text Explainers]) -- Do Data Balance Analysis to determine how well features and feature values are represented in your dataset ([DataBalanceAnalysis - Adult Census Income]) - - -[Classification - Adult Census]: ../classification/Classification%20-%20Adult%20Census "Classification - Adult Census" - -[Regression - Flight Delays]: ../regression/Regression%20-%20Flight%20Delays "Regression - Flight Delays" - -[LightGBM Overview]: ../../features/lightgbm/LightGBM%20-%20Overview "LightGBM Overview" - -[TextAnalytics - Amazon Book Reviews]: ../text_analytics/TextAnalytics%20-%20Amazon%20Book%20Reviews "TextAnalytics - Amazon Book Reviews" - -[HyperParameterTuning - Fighting Breast Cancer]: ../HyperParameterTuning%20-%20Fighting%20Breast%20Cancer "HyperParameterTuning - Fighting Breast Cancer" - -[DeepLearning - CIFAR10 Convolutional Network]: ../deep_learning/DeepLearning%20-%20CIFAR10%20Convolutional%20Network "DeepLearning - CIFAR10 Convolutional Network" - -[OpenCV - Pipeline Image Transformations]: ../OpenCV%20-%20Pipeline%20Image%20Transformations "OpenCV - Pipeline Image Transformations" - -[DeepLearning - BiLSTM Medical Entity Extraction]: ../deep_learning/DeepLearning%20-%20BiLSTM%20Medical%20Entity%20Extraction "DeepLearning - BiLSTM Medical Entity Extraction" - -[DeepLearning - Flower Image Classification]: ../deep_learning/DeepLearning%20-%20Flower%20Image%20Classification "DeepLearning - Flower Image Classification" - -[CyberML - Anomalous Access Detection]: ../CyberML%20-%20Anomalous%20Access%20Detection "CyberML - Anomalous Access Detection" - -[Interpretability - Tabular SHAP Explainer]: ../responsible_ai/Interpretability%20-%20Tabular%20SHAP%20explainer "Interpretability - Tabular SHAP Explainer" - -[Interpretability - Image Explainers]: ../../features/responsible_ai/Interpretability%20-%20Image%20Explainers "Interpretability - Image Explainers" - -[Interpretability - Text Explainers]: ../responsible_ai/Interpretability%20-%20Text%20Explainers "Interpretability - Text Explainers" - -[DataBalanceAnalysis - Adult Census Income]: ../responsible_ai/DataBalanceAnalysis%20-%20Adult%20Census%20Income "DataBalanceAnalysis - Adult Census Income" diff --git a/website/versioned_docs/version-0.9.1/examples/classification/Classification - Adult Census with Vowpal Wabbit.md b/website/versioned_docs/version-0.9.1/features/classification/Classification - Adult Census with Vowpal Wabbit.md similarity index 100% rename from website/versioned_docs/version-0.9.1/examples/classification/Classification - Adult Census with Vowpal Wabbit.md rename to website/versioned_docs/version-0.9.1/features/classification/Classification - Adult Census with Vowpal Wabbit.md diff --git a/website/versioned_docs/version-0.9.1/examples/classification/Classification - Adult Census.md b/website/versioned_docs/version-0.9.1/features/classification/Classification - Adult Census.md similarity index 99% rename from website/versioned_docs/version-0.9.1/examples/classification/Classification - Adult Census.md rename to website/versioned_docs/version-0.9.1/features/classification/Classification - Adult Census.md index c65b2ad573..1541bd7c4f 100644 --- a/website/versioned_docs/version-0.9.1/examples/classification/Classification - Adult Census.md +++ b/website/versioned_docs/version-0.9.1/features/classification/Classification - Adult Census.md @@ -43,9 +43,7 @@ and so on. The parameter `numFeatures` controls the number of hashed features. ```python from synapse.ml.train import TrainClassifier - from pyspark.ml.classification import LogisticRegression - model = TrainClassifier(model=LogisticRegression(), labelCol="income", numFeatures=256).fit(train) ``` diff --git a/website/versioned_docs/version-0.9.1/examples/classification/Classification - Before and After SynapseML.md b/website/versioned_docs/version-0.9.1/features/classification/Classification - Before and After SynapseML.md similarity index 100% rename from website/versioned_docs/version-0.9.1/examples/classification/Classification - Before and After SynapseML.md rename to website/versioned_docs/version-0.9.1/features/classification/Classification - Before and After SynapseML.md diff --git a/website/versioned_docs/version-0.9.1/examples/classification/Classification - Twitter Sentiment with Vowpal Wabbit.md b/website/versioned_docs/version-0.9.1/features/classification/Classification - Twitter Sentiment with Vowpal Wabbit.md similarity index 100% rename from website/versioned_docs/version-0.9.1/examples/classification/Classification - Twitter Sentiment with Vowpal Wabbit.md rename to website/versioned_docs/version-0.9.1/features/classification/Classification - Twitter Sentiment with Vowpal Wabbit.md diff --git a/website/versioned_docs/version-0.9.1/examples/cognitive_services/CognitiveServices - Celebrity Quote Analysis.md b/website/versioned_docs/version-0.9.1/features/cognitive_services/CognitiveServices - Celebrity Quote Analysis.md similarity index 100% rename from website/versioned_docs/version-0.9.1/examples/cognitive_services/CognitiveServices - Celebrity Quote Analysis.md rename to website/versioned_docs/version-0.9.1/features/cognitive_services/CognitiveServices - Celebrity Quote Analysis.md diff --git a/website/versioned_docs/version-0.9.1/features/cognitive_services/CognitiveServices - Create a Multilingual Search Engine from Forms.md b/website/versioned_docs/version-0.9.1/features/cognitive_services/CognitiveServices - Create a Multilingual Search Engine from Forms.md new file mode 100644 index 0000000000..0397538a50 --- /dev/null +++ b/website/versioned_docs/version-0.9.1/features/cognitive_services/CognitiveServices - Create a Multilingual Search Engine from Forms.md @@ -0,0 +1,165 @@ +--- +title: CognitiveServices - Create a Multilingual Search Engine from Forms +hide_title: true +status: stable +--- +```python +import os +key = os.environ['VISION_API_KEY'] +search_key = os.environ['AZURE_SEARCH_KEY'] +translator_key = os.environ['TRANSLATOR_KEY'] + +search_service = "mmlspark-azure-search" +search_index = "form-demo-index" +``` + + +```python +from pyspark.sql.functions import udf +from pyspark.sql.types import StringType + +def blob_to_url(blob): + [prefix, postfix] = blob.split("@") + container = prefix.split("/")[-1] + split_postfix = postfix.split("/") + account = split_postfix[0] + filepath = "/".join(split_postfix[1:]) + return "https://{}/{}/{}".format(account, container, filepath) + + +df2 = (spark.read.format("binaryFile") + .load("wasbs://ignite2021@mmlsparkdemo.blob.core.windows.net/forms/*") + .select("path") + .coalesce(24) + .limit(10) + .select(udf(blob_to_url, StringType())("path").alias("url")) + .cache() + ) + +``` + + +```python +display(df2) +``` + + +```python +displayHTML(""" + +""") +``` + + +```python +from synapse.ml.cognitive import AnalyzeInvoices + +analyzed_df = (AnalyzeInvoices() + .setSubscriptionKey(key) + .setLocation("eastus") + .setImageUrlCol("url") + .setOutputCol("invoices") + .setErrorCol("errors") + .setConcurrency(5) + .transform(df2) + .cache()) + +``` + + +```python +display(analyzed_df) +``` + + +```python +from synapse.ml.cognitive import FormOntologyLearner + +organized_df = (FormOntologyLearner() + .setInputCol("invoices") + .setOutputCol("extracted") + .fit(analyzed_df.limit(10)) + .transform(analyzed_df) + .select("url", "extracted.*") + .cache()) +``` + + +```python +display(organized_df) +``` + + +```python +from pyspark.sql.functions import explode, col +itemized_df = (organized_df + .select("*", explode(col("Items")).alias("Item")) + .drop("Items") + .select("Item.*", "*") + .drop("Item")) + +``` + + +```python +display(itemized_df) +``` + + +```python +display(itemized_df.where(col("ProductCode") == 6)) +``` + + +```python +from synapse.ml.cognitive import Translate + +translated_df = (Translate() + .setSubscriptionKey(translator_key) + .setLocation("eastus") + .setTextCol("Description") + .setErrorCol("TranslationError") + .setOutputCol("output") + .setToLanguage(["zh-Hans", "fr", "ru", "cy"]) + .setConcurrency(5) + .transform(itemized_df) + .withColumn("Translations", col("output.translations")[0]) + .drop("output", "TranslationError") + .cache()) + +``` + + +```python +display(translated_df) +``` + + +```python +from synapse.ml.cognitive import * +from pyspark.sql.functions import monotonically_increasing_id, lit + +(translated_df + .withColumn("DocID", monotonically_increasing_id().cast("string")) + .withColumn("SearchAction", lit("upload")) + .writeToAzureSearch( + subscriptionKey=search_key, + actionCol="SearchAction", + serviceName=search_service, + indexName=search_index, + keyCol="DocID") +) + +``` + + +```python +import requests +url = 'https://{}.search.windows.net/indexes/{}/docs/search?api-version=2019-05-06'.format(search_service, search_index) +requests.post(url, json={"search": "door"}, headers = {"api-key": search_key}).json() +``` + + +```python + +``` diff --git a/website/versioned_docs/version-0.9.1/features/CognitiveServices - Overview.md b/website/versioned_docs/version-0.9.1/features/cognitive_services/CognitiveServices - Overview.md similarity index 99% rename from website/versioned_docs/version-0.9.1/features/CognitiveServices - Overview.md rename to website/versioned_docs/version-0.9.1/features/cognitive_services/CognitiveServices - Overview.md index 59694c4598..13617cfb76 100644 --- a/website/versioned_docs/version-0.9.1/features/CognitiveServices - Overview.md +++ b/website/versioned_docs/version-0.9.1/features/cognitive_services/CognitiveServices - Overview.md @@ -85,20 +85,13 @@ To get started, we'll need to add this code to the project: ```python from pyspark.sql.functions import udf, col - from synapse.ml.io.http import HTTPTransformer, http_udf - from requests import Request - from pyspark.sql.functions import lit - from pyspark.ml import PipelineModel - from pyspark.sql.functions import col - import os - ``` @@ -122,22 +115,13 @@ if os.environ.get("AZURE_SERVICE", None) == "Microsoft.ProjectArcadia": ```python from synapse.ml.cognitive import * - - # A general Cognitive Services key for Text Analytics, Computer Vision and Form Recognizer (or use separate keys that belong to each service) - service_key = os.environ["COGNITIVE_SERVICE_KEY"] - # A Bing Search v7 subscription key - bing_search_key = os.environ["BING_IMAGE_SEARCH_KEY"] - # An Anomaly Dectector subscription key - anomaly_key = os.environ["ANOMALY_API_KEY"] - # A Translator subscription key - translator_key = os.environ["TRANSLATOR_KEY"] ``` diff --git a/website/versioned_docs/version-0.9.1/examples/cognitive_services/CognitiveServices - Predictive Maintenance.md b/website/versioned_docs/version-0.9.1/features/cognitive_services/CognitiveServices - Predictive Maintenance.md similarity index 100% rename from website/versioned_docs/version-0.9.1/examples/cognitive_services/CognitiveServices - Predictive Maintenance.md rename to website/versioned_docs/version-0.9.1/features/cognitive_services/CognitiveServices - Predictive Maintenance.md diff --git a/website/versioned_docs/version-0.9.1/features/http/HttpOnSpark - Working with Arbitrary Web APIs.md b/website/versioned_docs/version-0.9.1/features/http/HttpOnSpark - Working with Arbitrary Web APIs.md deleted file mode 100644 index 9560287c84..0000000000 --- a/website/versioned_docs/version-0.9.1/features/http/HttpOnSpark - Working with Arbitrary Web APIs.md +++ /dev/null @@ -1,48 +0,0 @@ ---- -title: HttpOnSpark - Working with Arbitrary Web APIs -hide_title: true -status: stable ---- -### Use "dogs as a service" in a distributed fashion with HTTP on Spark - -In this example we will use the simple HTTP Transformer to call a public webAPI that returns random images of dogs. The service does not use the json payload, but this is for example purposes. - -A call to the dog service returns json objects structured like: - -`{"status":"success","message":"https:\/\/images.dog.ceo\/breeds\/lhasa\/n02098413_2536.jpg"}` - -If you visit the link you can download the image: - - - - - - - -```python -import os - -if os.environ.get("AZURE_SERVICE", None) == "Microsoft.ProjectArcadia": - from pyspark.sql import SparkSession - spark = SparkSession.builder.getOrCreate() - -from pyspark.sql.functions import struct -from pyspark.sql.types import * -from synapse.ml.io.http import * - -df = spark.createDataFrame([("foo",) for x in range(20)], ["data"]) \ - .withColumn("inputs", struct("data")) - -response_schema = StructType().add("status", StringType()).add("message", StringType()) - -client = SimpleHTTPTransformer() \ - .setInputCol("inputs") \ - .setInputParser(JSONInputParser()) \ - .setOutputParser(JSONOutputParser().setDataType(response_schema)) \ - .setOutputCol("results") \ - .setUrl("https://dog.ceo/api/breeds/image/random") - -responses = client.transform(df) -responses.select("results").show(truncate = False) -``` diff --git a/website/versioned_docs/version-0.9.1/features/http/about.md b/website/versioned_docs/version-0.9.1/features/http/about.md deleted file mode 100644 index e209dd198f..0000000000 --- a/website/versioned_docs/version-0.9.1/features/http/about.md +++ /dev/null @@ -1,161 +0,0 @@ ---- -title: Http on Apache Spark -hide_title: true -sidebar_label: About ---- - -# HTTP on Apache Spark - -### A library for interacting with HTTP services from Apache Spark - -- **Flexible**: Encodes the entire HTTP protocol in Apache Spark for - full control of web requests -- **Performant**: Fully distributed across workers, built in support for - multi-threaded buffering, batching, and asynchronous request - concurrency. -- **Easy to Use**: High-level APIs for automatic parsing of requests, - abstracting all HTTP knowledge to under the hood. -- **Accessible from Multiple Languages**: Usable in Python and Scala. - Native integration with Scala's Apache HTTP Core. Native - integration with [Python Requests] library coming soon! -- **Composable**: Pipeline Stage APIs allow users to embed and compose - web services with SparkML machine learning models. - -[Apache HTTP Core]: https://hc.apache.org/httpcomponents-core-ga/httpcore/apidocs/org/apache/http/package-summary.html - -[Python Requests]: http://docs.python-requests.org/en/master/ - -## Usage - -### Send a JSON POST request - -```python -import synapse.ml -from synapse.ml.io.http import SimpleHTTPTransformer, JSONOutputParser -from pyspark.sql.types import StructType, StringType - -df = sc.parallelize([(x, ) for x in range(100)]).toDF("data") - -client = SimpleHTTPTransformer() \ - .setInputCol("data") \ - .setOutputParser(JSONOutputParser() \ - .setDataType(StructType().add("replies", StringType))) \ - .setUrl("www.my_service_url.com/any_api_here") \ - .setOutputCol("results") - -responses = client.transform(df) -``` - -## High-Performance Functionality - -The Simple HTTP transformer provides options for batching request bodies -and asynchronous request sending. For simplicity and easier debugging, -these options are not enabled by default. - -- `maxBatchSize`: Parameter that enables buffered minibatching. If this - parameter is set, a background thread will fetch up to at most - `maxBatchSize` requests. These requests's are combined by creating an - array of their entity data. The method sends _up to_ `maxBatchSize` - requests, rapid iterator materialization will result in smaller - batches as the background thread does not have enough time to - materialize a full batch. In other words, each new request sends all - of the new data that has accumulated at this stage of the pipeline. - -- `concurrency`: This parameter allows one to send up to `concurrency` - requests simultaneously using Scala futures under the hood. If this - parameter is set to 1 (default), then no Scala futures are used. - -- `concurrentTimeout`: If `concurrency`>1, requests will fail if they do - not receive a response within `concurrentTimeout` seconds. - -- `handlingStrategy`: (`"basic"`, or `"advanced"`) advanced handling - uses exponential backoff on the retires and can handle responses that - instruct clients to throttle or retry again. - -```python -SimpleHTTPTransformer() \ - .setMaxBatchSize(100) \ - .setConcurrency(5) \ - .setConcurrentTimeout(30.0) \ - .setHandlingStrategy("advanced") -``` - -## Architecture - -HTTP on Spark encapsulates the entire HTTP protocol within Spark's -datatypes. Uses can create flexible web clients that communicate with a -wide variety of endpoints. SynapseML provides methods to convert between -Scala case classes, Spark types, and Apache HTTP Core types. A common -representation makes it easy to work with HTTP on spark from Scala, -Python, or any other spark compatible language. This common -representation is serializable allowing for complex operations like SQL -joins and repartitons. - -In HTTP on Spark, each partition manages a running web client that sends -requests. A schematic representation can be seen below: - -

- -

- -## Schema - -This library adds Spark types that faithfully represent the HTTP -protocol for requests and responses. SynapseML provides several ways to -create these objects from the apache HTTP core library, and from a set -of case classes. - -The schema for a complete HTTP request looks like: - - request: struct (nullable = true) - +-- requestLine: struct (nullable = true) - | +-- method: string (nullable = true) - | +-- uri: string (nullable = true) - | +-- protoclVersion: struct (nullable = true) - | +-- protocol: string (nullable = true) - | +-- major: integer (nullable = false) - | +-- minor: integer (nullable = false) - +-- headers: array (nullable = true) - | +-- element: struct (containsNull = true) - | +-- name: string (nullable = true) - | +-- value: string (nullable = true) - +-- entity: struct (nullable = true) - +-- content: binary (nullable = true) - +-- contentEncoding: struct (nullable = true) - | +-- name: string (nullable = true) - | +-- value: string (nullable = true) - +-- contentLenth: long (nullable = false) - +-- contentType: struct (nullable = true) - | +-- name: string (nullable = true) - | +-- value: string (nullable = true) - +-- isChunked: boolean (nullable = false) - +-- isRepeatable: boolean (nullable = false) - +-- isStreaming: boolean (nullable = false) - -And the schema for a complete response looks like: - - response: struct (nullable = true) - +-- headers: array (nullable = true) - | +-- element: struct (containsNull = true) - | +-- name: string (nullable = true) - | +-- value: string (nullable = true) - +-- entity: struct (nullable = true) - | +-- content: binary (nullable = true) - | +-- contentEncoding: struct (nullable = true) - | | +-- name: string (nullable = true) - | | +-- value: string (nullable = true) - | +-- contentLenth: long (nullable = false) - | +-- contentType: struct (nullable = true) - | | +-- name: string (nullable = true) - | | +-- value: string (nullable = true) - | +-- isChunked: boolean (nullable = false) - | +-- isRepeatable: boolean (nullable = false) - | +-- isStreaming: boolean (nullable = false) - +-- statusLine: struct (nullable = true) - | +-- protocolVersion: struct (nullable = true) - | | +-- protocol: string (nullable = true) - | | +-- major: integer (nullable = false) - | | +-- minor: integer (nullable = false) - | +-- statusCode: integer (nullable = false) - | +-- reasonPhrase: string (nullable = true) - +-- locale: string (nullable = true) diff --git a/website/versioned_docs/version-0.9.1/features/lightgbm/LightGBM - Overview.md b/website/versioned_docs/version-0.9.1/features/lightgbm/LightGBM - Overview.md index 6802c3b748..e7f0c1a278 100644 --- a/website/versioned_docs/version-0.9.1/features/lightgbm/LightGBM - Overview.md +++ b/website/versioned_docs/version-0.9.1/features/lightgbm/LightGBM - Overview.md @@ -101,7 +101,6 @@ display(train_data.groupBy("Bankrupt?").count()) ```python from synapse.ml.lightgbm import LightGBMClassifier - model = LightGBMClassifier(objective="binary", featuresCol="features", labelCol="Bankrupt?", isUnbalance=True) ``` @@ -116,21 +115,13 @@ By calling "saveNativeModel", it allows you to extract the underlying lightGBM m ```python from synapse.ml.lightgbm import LightGBMClassificationModel - - if os.environ.get("AZURE_SERVICE", None) == "Microsoft.ProjectArcadia": - model.saveNativeModel("/models/lgbmclassifier.model") - model = LightGBMClassificationModel.loadNativeModelFromFile("/models/lgbmclassifier.model") - else: - model.saveNativeModel("/lgbmclassifier.model") - model = LightGBMClassificationModel.loadNativeModelFromFile("/lgbmclassifier.model") - ``` #### Feature Importances Visualization diff --git a/website/versioned_docs/version-0.9.1/features/onnx/ONNX - Inference on Spark.md b/website/versioned_docs/version-0.9.1/features/onnx/ONNX - Inference on Spark.md index 1394a88934..9f3230f761 100644 --- a/website/versioned_docs/version-0.9.1/features/onnx/ONNX - Inference on Spark.md +++ b/website/versioned_docs/version-0.9.1/features/onnx/ONNX - Inference on Spark.md @@ -30,63 +30,34 @@ Use LightGBM to train a model ```python from pyspark.ml.feature import VectorAssembler - from synapse.ml.lightgbm import LightGBMClassifier - - feature_cols = df.columns[1:] - featurizer = VectorAssembler( - inputCols=feature_cols, - outputCol='features' - ) - - train_data = featurizer.transform(df)['Bankrupt?', 'features'] - - model = ( - LightGBMClassifier(featuresCol="features", labelCol="Bankrupt?") - .setEarlyStoppingRound(300) - .setLambdaL1(0.5) - .setNumIterations(1000) - .setNumThreads(-1) - .setMaxDeltaStep(0.5) - .setNumLeaves(31) - .setMaxDepth(-1) - .setBaggingFraction(0.7) - .setFeatureFraction(0.7) - .setBaggingFreq(2) - .setObjective("binary") - .setIsUnbalance(True) - .setMinSumHessianInLeaf(20) - .setMinGainToSplit(0.01) - ) - - model = model.fit(train_data) ``` @@ -115,14 +86,9 @@ Load the ONNX payload into an `ONNXModel`, and inspect the model inputs and outp ```python from synapse.ml.onnx import ONNXModel - - onnx_ml = ONNXModel().setModelPayload(model_payload_ml) - - print("Model inputs:" + str(onnx_ml.getModelInputs())) - print("Model outputs:" + str(onnx_ml.getModelOutputs())) ``` diff --git a/website/versioned_docs/version-0.9.1/examples/OpenCV - Pipeline Image Transformations.md b/website/versioned_docs/version-0.9.1/features/opencv/OpenCV - Pipeline Image Transformations.md similarity index 100% rename from website/versioned_docs/version-0.9.1/examples/OpenCV - Pipeline Image Transformations.md rename to website/versioned_docs/version-0.9.1/features/opencv/OpenCV - Pipeline Image Transformations.md diff --git a/website/versioned_docs/version-0.9.1/examples/AzureSearchIndex - Met Artworks.md b/website/versioned_docs/version-0.9.1/features/other/AzureSearchIndex - Met Artworks.md similarity index 99% rename from website/versioned_docs/version-0.9.1/examples/AzureSearchIndex - Met Artworks.md rename to website/versioned_docs/version-0.9.1/features/other/AzureSearchIndex - Met Artworks.md index e045e82d48..c32e6e57fc 100644 --- a/website/versioned_docs/version-0.9.1/examples/AzureSearchIndex - Met Artworks.md +++ b/website/versioned_docs/version-0.9.1/features/other/AzureSearchIndex - Met Artworks.md @@ -50,33 +50,19 @@ data = spark.read\ ```python from synapse.ml.cognitive import AnalyzeImage - from synapse.ml.stages import SelectColumns - - #define pipeline - describeImage = (AnalyzeImage() - .setSubscriptionKey(VISION_API_KEY) - .setLocation("eastus") - .setImageUrlCol("PrimaryImageUrl") - .setOutputCol("RawImageDescription") - .setErrorCol("Errors") - .setVisualFeatures(["Categories", "Description", "Faces", "ImageType", "Color", "Adult"]) - .setConcurrency(5)) - - df2 = describeImage.transform(data)\ - .select("*", "RawImageDescription.*").drop("Errors", "RawImageDescription") ``` @@ -87,17 +73,11 @@ Before writing the results to a Search Index, you must define a schema which mus ```python from synapse.ml.cognitive import * - df2.writeToAzureSearch( - subscriptionKey=AZURE_SEARCH_KEY, - actionCol="searchAction", - serviceName=search_service, - indexName=search_index, - keyCol="ObjectID") ``` diff --git a/website/versioned_docs/version-0.9.1/examples/ConditionalKNN - Exploring Art Across Cultures.md b/website/versioned_docs/version-0.9.1/features/other/ConditionalKNN - Exploring Art Across Cultures.md similarity index 100% rename from website/versioned_docs/version-0.9.1/examples/ConditionalKNN - Exploring Art Across Cultures.md rename to website/versioned_docs/version-0.9.1/features/other/ConditionalKNN - Exploring Art Across Cultures.md diff --git a/website/versioned_docs/version-0.9.1/examples/CyberML - Anomalous Access Detection.md b/website/versioned_docs/version-0.9.1/features/other/CyberML - Anomalous Access Detection.md similarity index 100% rename from website/versioned_docs/version-0.9.1/examples/CyberML - Anomalous Access Detection.md rename to website/versioned_docs/version-0.9.1/features/other/CyberML - Anomalous Access Detection.md diff --git a/website/versioned_docs/version-0.9.1/examples/deep_learning/DeepLearning - BiLSTM Medical Entity Extraction.md b/website/versioned_docs/version-0.9.1/features/other/DeepLearning - BiLSTM Medical Entity Extraction.md similarity index 100% rename from website/versioned_docs/version-0.9.1/examples/deep_learning/DeepLearning - BiLSTM Medical Entity Extraction.md rename to website/versioned_docs/version-0.9.1/features/other/DeepLearning - BiLSTM Medical Entity Extraction.md diff --git a/website/versioned_docs/version-0.9.1/examples/deep_learning/DeepLearning - CIFAR10 Convolutional Network.md b/website/versioned_docs/version-0.9.1/features/other/DeepLearning - CIFAR10 Convolutional Network.md similarity index 100% rename from website/versioned_docs/version-0.9.1/examples/deep_learning/DeepLearning - CIFAR10 Convolutional Network.md rename to website/versioned_docs/version-0.9.1/features/other/DeepLearning - CIFAR10 Convolutional Network.md diff --git a/website/versioned_docs/version-0.9.1/examples/deep_learning/DeepLearning - Flower Image Classification.md b/website/versioned_docs/version-0.9.1/features/other/DeepLearning - Flower Image Classification.md similarity index 100% rename from website/versioned_docs/version-0.9.1/examples/deep_learning/DeepLearning - Flower Image Classification.md rename to website/versioned_docs/version-0.9.1/features/other/DeepLearning - Flower Image Classification.md diff --git a/website/versioned_docs/version-0.9.1/examples/deep_learning/DeepLearning - Transfer Learning.md b/website/versioned_docs/version-0.9.1/features/other/DeepLearning - Transfer Learning.md similarity index 100% rename from website/versioned_docs/version-0.9.1/examples/deep_learning/DeepLearning - Transfer Learning.md rename to website/versioned_docs/version-0.9.1/features/other/DeepLearning - Transfer Learning.md diff --git a/website/versioned_docs/version-0.9.1/examples/HyperParameterTuning - Fighting Breast Cancer.md b/website/versioned_docs/version-0.9.1/features/other/HyperParameterTuning - Fighting Breast Cancer.md similarity index 100% rename from website/versioned_docs/version-0.9.1/examples/HyperParameterTuning - Fighting Breast Cancer.md rename to website/versioned_docs/version-0.9.1/features/other/HyperParameterTuning - Fighting Breast Cancer.md diff --git a/website/versioned_docs/version-0.9.1/examples/text_analytics/TextAnalytics - Amazon Book Reviews with Word2Vec.md b/website/versioned_docs/version-0.9.1/features/other/TextAnalytics - Amazon Book Reviews with Word2Vec.md similarity index 99% rename from website/versioned_docs/version-0.9.1/examples/text_analytics/TextAnalytics - Amazon Book Reviews with Word2Vec.md rename to website/versioned_docs/version-0.9.1/features/other/TextAnalytics - Amazon Book Reviews with Word2Vec.md index d88de1c922..59fcf6a1fe 100644 --- a/website/versioned_docs/version-0.9.1/examples/text_analytics/TextAnalytics - Amazon Book Reviews with Word2Vec.md +++ b/website/versioned_docs/version-0.9.1/features/other/TextAnalytics - Amazon Book Reviews with Word2Vec.md @@ -73,49 +73,27 @@ Generate several models with different parameters from the training data. ```python from pyspark.ml.classification import LogisticRegression, RandomForestClassifier, GBTClassifier - from synapse.ml.train import TrainClassifier - import itertools - - lrHyperParams = [0.05, 0.2] - logisticRegressions = [LogisticRegression(regParam = hyperParam) - for hyperParam in lrHyperParams] - lrmodels = [TrainClassifier(model=lrm, labelCol="label").fit(ptrain) - for lrm in logisticRegressions] - - rfHyperParams = itertools.product([5, 10], [2, 3]) - randomForests = [RandomForestClassifier(numTrees=hyperParam[0], maxDepth=hyperParam[1]) - for hyperParam in rfHyperParams] - rfmodels = [TrainClassifier(model=rfm, labelCol="label").fit(ptrain) - for rfm in randomForests] - - gbtHyperParams = itertools.product([8, 16], [2, 3]) - gbtclassifiers = [GBTClassifier(maxBins=hyperParam[0], maxDepth=hyperParam[1]) - for hyperParam in gbtHyperParams] - gbtmodels = [TrainClassifier(model=gbt, labelCol="label").fit(ptrain) - for gbt in gbtclassifiers] - - trainedModels = lrmodels + rfmodels + gbtmodels ``` @@ -124,13 +102,9 @@ Find the best model for the given test dataset. ```python from synapse.ml.automl import FindBestModel - bestModel = FindBestModel(evaluationMetric="AUC", models=trainedModels).fit(ptest) - bestModel.getRocCurve().show() - bestModel.getBestModelMetrics().show() - bestModel.getAllModelMetrics().show() ``` @@ -139,16 +113,10 @@ Get the accuracy from the validation dataset. ```python from synapse.ml.train import ComputeModelStatistics - predictions = bestModel.transform(pvalidation) - metrics = ComputeModelStatistics().transform(predictions) - print("Best model's accuracy on validation set = " - + "{0:.2f}%".format(metrics.first()["accuracy"] * 100)) - print("Best model's AUC on validation set = " - + "{0:.2f}%".format(metrics.first()["AUC"] * 100)) ``` diff --git a/website/versioned_docs/version-0.9.1/examples/text_analytics/TextAnalytics - Amazon Book Reviews.md b/website/versioned_docs/version-0.9.1/features/other/TextAnalytics - Amazon Book Reviews.md similarity index 99% rename from website/versioned_docs/version-0.9.1/examples/text_analytics/TextAnalytics - Amazon Book Reviews.md rename to website/versioned_docs/version-0.9.1/features/other/TextAnalytics - Amazon Book Reviews.md index ca9e1b635f..e0443d6044 100644 --- a/website/versioned_docs/version-0.9.1/examples/text_analytics/TextAnalytics - Amazon Book Reviews.md +++ b/website/versioned_docs/version-0.9.1/features/other/TextAnalytics - Amazon Book Reviews.md @@ -34,11 +34,8 @@ to generate 2²⁰ sparse features. ```python from synapse.ml.featurize.text import TextFeaturizer - textFeaturizer = TextFeaturizer() \ - .setInputCol("text").setOutputCol("features") \ - .setUseStopWordsRemover(True).setUseIDF(True).setMinDocFreq(5).setNumFeatures(1 << 16).fit(data) ``` @@ -63,19 +60,12 @@ Train several Logistic Regression models with different regularizations. ```python train, test, validation = processedData.randomSplit([0.60, 0.20, 0.20]) - from pyspark.ml.classification import LogisticRegression - - lrHyperParams = [0.05, 0.1, 0.2, 0.4] - logisticRegressions = [LogisticRegression(regParam = hyperParam) for hyperParam in lrHyperParams] - - from synapse.ml.train import TrainClassifier - lrmodels = [TrainClassifier(model=lrm, labelCol="label").fit(train) for lrm in logisticRegressions] ``` @@ -84,16 +74,11 @@ Find the model with the best AUC on the test set. ```python from synapse.ml.automl import FindBestModel, BestModel - bestModel = FindBestModel(evaluationMetric="AUC", models=lrmodels).fit(test) - bestModel.getRocCurve().show() - bestModel.getBestModelMetrics().show() - bestModel.getAllModelMetrics().show() - ``` Use the optimized `ComputeModelStatistics` API to find the model accuracy. @@ -101,12 +86,8 @@ Use the optimized `ComputeModelStatistics` API to find the model accuracy. ```python from synapse.ml.train import ComputeModelStatistics - predictions = bestModel.transform(validation) - metrics = ComputeModelStatistics().transform(predictions) - print("Best model's accuracy on validation set = " - + "{0:.2f}%".format(metrics.first()["accuracy"] * 100)) ``` diff --git a/website/versioned_docs/version-0.9.1/examples/regression/Regression - Auto Imports.md b/website/versioned_docs/version-0.9.1/features/regression/Regression - Auto Imports.md similarity index 100% rename from website/versioned_docs/version-0.9.1/examples/regression/Regression - Auto Imports.md rename to website/versioned_docs/version-0.9.1/features/regression/Regression - Auto Imports.md diff --git a/website/versioned_docs/version-0.9.1/examples/regression/Regression - Flight Delays with DataCleaning.md b/website/versioned_docs/version-0.9.1/features/regression/Regression - Flight Delays with DataCleaning.md similarity index 100% rename from website/versioned_docs/version-0.9.1/examples/regression/Regression - Flight Delays with DataCleaning.md rename to website/versioned_docs/version-0.9.1/features/regression/Regression - Flight Delays with DataCleaning.md diff --git a/website/versioned_docs/version-0.9.1/examples/regression/Regression - Flight Delays.md b/website/versioned_docs/version-0.9.1/features/regression/Regression - Flight Delays.md similarity index 100% rename from website/versioned_docs/version-0.9.1/examples/regression/Regression - Flight Delays.md rename to website/versioned_docs/version-0.9.1/features/regression/Regression - Flight Delays.md diff --git a/website/versioned_docs/version-0.9.1/examples/regression/Regression - Vowpal Wabbit vs. LightGBM vs. Linear Regressor.md b/website/versioned_docs/version-0.9.1/features/regression/Regression - Vowpal Wabbit vs. LightGBM vs. Linear Regressor.md similarity index 100% rename from website/versioned_docs/version-0.9.1/examples/regression/Regression - Vowpal Wabbit vs. LightGBM vs. Linear Regressor.md rename to website/versioned_docs/version-0.9.1/features/regression/Regression - Vowpal Wabbit vs. LightGBM vs. Linear Regressor.md diff --git a/website/versioned_docs/version-0.9.1/features/responsible_ai/Data Balance Analysis.md b/website/versioned_docs/version-0.9.1/features/responsible_ai/Data Balance Analysis.md index 19d7a0df32..e95ffb78bc 100644 --- a/website/versioned_docs/version-0.9.1/features/responsible_ai/Data Balance Analysis.md +++ b/website/versioned_docs/version-0.9.1/features/responsible_ai/Data Balance Analysis.md @@ -19,7 +19,7 @@ In summary, Data Balance Analysis, used as a step for building ML models has the ## Examples -* [Data Balance Analysis - Adult Census Income](../../../examples/responsible_ai/DataBalanceAnalysis%20-%20Adult%20Census%20Income) +* [Data Balance Analysis - Adult Census Income](../../../features/responsible_ai/DataBalanceAnalysis%20-%20Adult%20Census%20Income) ## Usage diff --git a/website/versioned_docs/version-0.9.1/examples/responsible_ai/DataBalanceAnalysis - Adult Census Income.md b/website/versioned_docs/version-0.9.1/features/responsible_ai/DataBalanceAnalysis - Adult Census Income.md similarity index 100% rename from website/versioned_docs/version-0.9.1/examples/responsible_ai/DataBalanceAnalysis - Adult Census Income.md rename to website/versioned_docs/version-0.9.1/features/responsible_ai/DataBalanceAnalysis - Adult Census Income.md diff --git a/website/versioned_docs/version-0.9.1/examples/responsible_ai/Interpretability - Explanation Dashboard.md b/website/versioned_docs/version-0.9.1/features/responsible_ai/Interpretability - Explanation Dashboard.md similarity index 100% rename from website/versioned_docs/version-0.9.1/examples/responsible_ai/Interpretability - Explanation Dashboard.md rename to website/versioned_docs/version-0.9.1/features/responsible_ai/Interpretability - Explanation Dashboard.md diff --git a/website/versioned_docs/version-0.9.1/features/responsible_ai/Interpretability - Image Explainers.md b/website/versioned_docs/version-0.9.1/features/responsible_ai/Interpretability - Image Explainers.md index 1051f28447..4eea455b27 100644 --- a/website/versioned_docs/version-0.9.1/features/responsible_ai/Interpretability - Image Explainers.md +++ b/website/versioned_docs/version-0.9.1/features/responsible_ai/Interpretability - Image Explainers.md @@ -12,33 +12,19 @@ First we import the packages and define some UDFs and a plotting function we wil ```python from synapse.ml.explainers import * - from synapse.ml.onnx import ONNXModel - from synapse.ml.opencv import ImageTransformer - from synapse.ml.io import * - from pyspark.ml import Pipeline - from pyspark.ml.classification import LogisticRegression - from pyspark.ml.feature import StringIndexer - from pyspark.sql.functions import * - from pyspark.sql.types import * - import numpy as np - import pyspark - import urllib.request - import matplotlib.pyplot as plt - import PIL, io - from PIL import Image diff --git a/website/versioned_docs/version-0.9.1/examples/responsible_ai/ModelInterpretability - Snow Leopard Detection.md b/website/versioned_docs/version-0.9.1/features/responsible_ai/Interpretability - Snow Leopard Detection.md similarity index 98% rename from website/versioned_docs/version-0.9.1/examples/responsible_ai/ModelInterpretability - Snow Leopard Detection.md rename to website/versioned_docs/version-0.9.1/features/responsible_ai/Interpretability - Snow Leopard Detection.md index b191225293..5740c315ce 100644 --- a/website/versioned_docs/version-0.9.1/examples/responsible_ai/ModelInterpretability - Snow Leopard Detection.md +++ b/website/versioned_docs/version-0.9.1/features/responsible_ai/Interpretability - Snow Leopard Detection.md @@ -1,5 +1,5 @@ --- -title: ModelInterpretability - Snow Leopard Detection +title: Interpretability - Snow Leopard Detection hide_title: true status: stable --- @@ -134,59 +134,32 @@ train, test = images.randomSplit([.7,.3], seed=1) ```python from pyspark.ml import Pipeline - from pyspark.ml.feature import StringIndexer - from pyspark.ml.classification import LogisticRegression - from pyspark.sql.functions import udf - from synapse.ml.downloader import ModelDownloader - from synapse.ml.cntk import ImageFeaturizer - from synapse.ml.stages import UDFTransformer - from pyspark.sql.types import * - - def getIndex(row): - return float(row[1]) - - if os.environ.get("AZURE_SERVICE", None) == "Microsoft.ProjectArcadia": - network = ModelDownloader(spark, "abfss://synapse@mmlsparkeuap.dfs.core.windows.net/models/").downloadByName("ResNet50") - else: - network = ModelDownloader(spark, "dbfs:/Models/").downloadByName("ResNet50") - - model = Pipeline(stages=[ - StringIndexer(inputCol = "labels", outputCol="index"), - ImageFeaturizer(inputCol="image", outputCol="features", cutOutputLayers=1).setModel(network), - LogisticRegression(maxIter=5, labelCol="index", regParam=10.0), - UDFTransformer()\ - .setUDF(udf(getIndex, DoubleType()))\ - .setInputCol("probability")\ - .setOutputCol("leopard_prob") - ]) - - fitModel = model.fit(train) ``` @@ -195,60 +168,35 @@ fitModel = model.fit(train) ```python def plotConfusionMatrix(df, label, prediction, classLabels): - from synapse.ml.plot import confusionMatrix - import matplotlib.pyplot as plt - fig = plt.figure(figsize=(4.5, 4.5)) - confusionMatrix(df, label, prediction, classLabels) - display(fig) - - if os.environ.get("AZURE_SERVICE", None) != "Microsoft.ProjectArcadia": - plotConfusionMatrix(fitModel.transform(test), "index", "prediction", fitModel.stages[0].labels) ``` ```python import urllib.request - from synapse.ml.lime import ImageLIME - - test_image_url = "https://mmlspark.blob.core.windows.net/graphics/SnowLeopardAD/snow_leopard1.jpg" - with urllib.request.urlopen(test_image_url) as url: - barr = url.read() - test_subsample = spark.createDataFrame([(bytearray(barr),)], ["image"]) - - lime = ImageLIME()\ - .setModel(fitModel)\ - .setPredictionCol("leopard_prob")\ - .setOutputCol("weights")\ - .setInputCol("image")\ - .setCellSize(100.0)\ - .setModifier(50.0)\ - .setNSamples(300) - - result = lime.transform(test_subsample) ``` diff --git a/website/versioned_docs/version-0.9.1/examples/responsible_ai/Interpretability - Tabular SHAP explainer.md b/website/versioned_docs/version-0.9.1/features/responsible_ai/Interpretability - Tabular SHAP explainer.md similarity index 100% rename from website/versioned_docs/version-0.9.1/examples/responsible_ai/Interpretability - Tabular SHAP explainer.md rename to website/versioned_docs/version-0.9.1/features/responsible_ai/Interpretability - Tabular SHAP explainer.md diff --git a/website/versioned_docs/version-0.9.1/examples/responsible_ai/Interpretability - Text Explainers.md b/website/versioned_docs/version-0.9.1/features/responsible_ai/Interpretability - Text Explainers.md similarity index 100% rename from website/versioned_docs/version-0.9.1/examples/responsible_ai/Interpretability - Text Explainers.md rename to website/versioned_docs/version-0.9.1/features/responsible_ai/Interpretability - Text Explainers.md diff --git a/website/versioned_docs/version-0.9.1/features/responsible_ai/Model Interpretation on Spark.md b/website/versioned_docs/version-0.9.1/features/responsible_ai/Model Interpretation on Spark.md index e81aa9525d..d2c7cc5f4e 100644 --- a/website/versioned_docs/version-0.9.1/features/responsible_ai/Model Interpretation on Spark.md +++ b/website/versioned_docs/version-0.9.1/features/responsible_ai/Model Interpretation on Spark.md @@ -26,9 +26,9 @@ Both explainers extends from `org.apache.spark.ml.Transformer`. After setting up To see examples of model interpretability on Spark in action, take a look at these sample notebooks: -- [Tabular SHAP explainer](../../../examples/responsible_ai/Interpretability%20-%20Tabular%20SHAP%20explainer) +- [Tabular SHAP explainer](../../../features/responsible_ai/Interpretability%20-%20Tabular%20SHAP%20explainer) - [Image explainers](../../../features/responsible_ai/Interpretability%20-%20Image%20Explainers) -- [Text explainers](../../../examples/responsible_ai/Interpretability%20-%20Text%20Explainers) +- [Text explainers](../../../features/responsible_ai/Interpretability%20-%20Text%20Explainers) | | Tabular models | Vector models | Image models | Text models | |------------------------|-----------------------------|---------------------------|-------------------------|-----------------------| diff --git a/website/versioned_docs/version-0.9.1/features/vw/Vowpal Wabbit - Overview.md b/website/versioned_docs/version-0.9.1/features/vw/Vowpal Wabbit - Overview.md index 795dd1a86b..ee48672341 100644 --- a/website/versioned_docs/version-0.9.1/features/vw/Vowpal Wabbit - Overview.md +++ b/website/versioned_docs/version-0.9.1/features/vw/Vowpal Wabbit - Overview.md @@ -353,8 +353,6 @@ axe.set_title("Vowpal Wabbit") ## Quantile Regression for Drug Discovery with VowpalWabbitRegressor - - #### Read dataset diff --git a/website/versioned_docs/version-0.9.1/reference/datasets.md b/website/versioned_docs/version-0.9.1/reference/datasets.md index bcbab0a935..524696b6dd 100644 --- a/website/versioned_docs/version-0.9.1/reference/datasets.md +++ b/website/versioned_docs/version-0.9.1/reference/datasets.md @@ -30,7 +30,7 @@ tab-separated file with 2 columns (`rating`, `text`) and 10000 rows. The contains free-form text strings in English language. You can use `synapse.ml.TextFeaturizer` to convert the text into feature vectors for machine learning models ([see -example](../../examples/text_analytics/TextAnalytics%20-%20Amazon%20Book%20Reviews/)). +example](../../features/other/TextAnalytics%20-%20Amazon%20Book%20Reviews/)). The example dataset is available [here](https://mmlspark.azureedge.net/datasets/BookReviewsFromAmazon10K.tsv); diff --git a/website/versioned_sidebars/version-0.9.1-sidebars.json b/website/versioned_sidebars/version-0.9.1-sidebars.json index 1ed5b57788..c77179702b 100644 --- a/website/versioned_sidebars/version-0.9.1-sidebars.json +++ b/website/versioned_sidebars/version-0.9.1-sidebars.json @@ -28,37 +28,25 @@ "type": "category", "label": "Features", "items": [ - { - "type": "doc", - "id": "version-0.9.1/features/CognitiveServices - Overview" - }, { "type": "category", - "label": "HTTP on Spark", + "label": "Cognitive Services", "items": [ { "type": "doc", - "id": "version-0.9.1/features/http/about" + "id": "version-0.9.1/features/cognitive_services/CognitiveServices - Celebrity Quote Analysis" }, { "type": "doc", - "id": "version-0.9.1/features/http/HttpOnSpark - Working with Arbitrary Web APIs" - } - ], - "collapsible": true, - "collapsed": true - }, - { - "type": "category", - "label": "LightGBM", - "items": [ + "id": "version-0.9.1/features/cognitive_services/CognitiveServices - Create a Multilingual Search Engine from Forms" + }, { "type": "doc", - "id": "version-0.9.1/features/lightgbm/about" + "id": "version-0.9.1/features/cognitive_services/CognitiveServices - Overview" }, { "type": "doc", - "id": "version-0.9.1/features/lightgbm/LightGBM - Overview" + "id": "version-0.9.1/features/cognitive_services/CognitiveServices - Predictive Maintenance" } ], "collapsible": true, @@ -72,10 +60,30 @@ "type": "doc", "id": "version-0.9.1/features/responsible_ai/Data Balance Analysis" }, + { + "type": "doc", + "id": "version-0.9.1/features/responsible_ai/DataBalanceAnalysis - Adult Census Income" + }, + { + "type": "doc", + "id": "version-0.9.1/features/responsible_ai/Interpretability - Explanation Dashboard" + }, { "type": "doc", "id": "version-0.9.1/features/responsible_ai/Interpretability - Image Explainers" }, + { + "type": "doc", + "id": "version-0.9.1/features/responsible_ai/Interpretability - Snow Leopard Detection" + }, + { + "type": "doc", + "id": "version-0.9.1/features/responsible_ai/Interpretability - Tabular SHAP explainer" + }, + { + "type": "doc", + "id": "version-0.9.1/features/responsible_ai/Interpretability - Text Explainers" + }, { "type": "doc", "id": "version-0.9.1/features/responsible_ai/Model Interpretation on Spark" @@ -102,15 +110,15 @@ }, { "type": "category", - "label": "Spark Serving", + "label": "LightGBM", "items": [ { "type": "doc", - "id": "version-0.9.1/features/spark_serving/about" + "id": "version-0.9.1/features/lightgbm/about" }, { "type": "doc", - "id": "version-0.9.1/features/spark_serving/SparkServing - Deploying a Classifier" + "id": "version-0.9.1/features/lightgbm/LightGBM - Overview" } ], "collapsible": true, @@ -131,58 +139,18 @@ ], "collapsible": true, "collapsed": true - } - ], - "collapsible": true, - "collapsed": true - }, - { - "type": "category", - "label": "Examples", - "items": [ - { - "type": "doc", - "id": "version-0.9.1/examples/about" - }, - { - "type": "doc", - "id": "version-0.9.1/examples/AzureSearchIndex - Met Artworks" - }, - { - "type": "doc", - "id": "version-0.9.1/examples/ConditionalKNN - Exploring Art Across Cultures" - }, - { - "type": "doc", - "id": "version-0.9.1/examples/CyberML - Anomalous Access Detection" - }, - { - "type": "doc", - "id": "version-0.9.1/examples/HyperParameterTuning - Fighting Breast Cancer" - }, - { - "type": "doc", - "id": "version-0.9.1/examples/OpenCV - Pipeline Image Transformations" }, { "type": "category", - "label": "Classification", + "label": "Spark Serving", "items": [ { "type": "doc", - "id": "version-0.9.1/examples/classification/Classification - Adult Census with Vowpal Wabbit" - }, - { - "type": "doc", - "id": "version-0.9.1/examples/classification/Classification - Adult Census" - }, - { - "type": "doc", - "id": "version-0.9.1/examples/classification/Classification - Before and After SynapseML" + "id": "version-0.9.1/features/spark_serving/about" }, { "type": "doc", - "id": "version-0.9.1/examples/classification/Classification - Twitter Sentiment with Vowpal Wabbit" + "id": "version-0.9.1/features/spark_serving/SparkServing - Deploying a Classifier" } ], "collapsible": true, @@ -190,15 +158,11 @@ }, { "type": "category", - "label": "Cognitive Services", + "label": "OpenCV", "items": [ { "type": "doc", - "id": "version-0.9.1/examples/cognitive_services/CognitiveServices - Celebrity Quote Analysis" - }, - { - "type": "doc", - "id": "version-0.9.1/examples/cognitive_services/CognitiveServices - Predictive Maintenance" + "id": "version-0.9.1/features/opencv/OpenCV - Pipeline Image Transformations" } ], "collapsible": true, @@ -206,23 +170,23 @@ }, { "type": "category", - "label": "Deep Learning", + "label": "Classification", "items": [ { "type": "doc", - "id": "version-0.9.1/examples/deep_learning/DeepLearning - BiLSTM Medical Entity Extraction" + "id": "version-0.9.1/features/classification/Classification - Adult Census with Vowpal Wabbit" }, { "type": "doc", - "id": "version-0.9.1/examples/deep_learning/DeepLearning - CIFAR10 Convolutional Network" + "id": "version-0.9.1/features/classification/Classification - Adult Census" }, { "type": "doc", - "id": "version-0.9.1/examples/deep_learning/DeepLearning - Flower Image Classification" + "id": "version-0.9.1/features/classification/Classification - Before and After SynapseML" }, { "type": "doc", - "id": "version-0.9.1/examples/deep_learning/DeepLearning - Transfer Learning" + "id": "version-0.9.1/features/classification/Classification - Twitter Sentiment with Vowpal Wabbit" } ], "collapsible": true, @@ -230,27 +194,23 @@ }, { "type": "category", - "label": "Responsible AI", + "label": "Regression", "items": [ { "type": "doc", - "id": "version-0.9.1/examples/responsible_ai/DataBalanceAnalysis - Adult Census Income" - }, - { - "type": "doc", - "id": "version-0.9.1/examples/responsible_ai/Interpretability - Explanation Dashboard" + "id": "version-0.9.1/features/regression/Regression - Auto Imports" }, { "type": "doc", - "id": "version-0.9.1/examples/responsible_ai/Interpretability - Tabular SHAP explainer" + "id": "version-0.9.1/features/regression/Regression - Flight Delays with DataCleaning" }, { "type": "doc", - "id": "version-0.9.1/examples/responsible_ai/Interpretability - Text Explainers" + "id": "version-0.9.1/features/regression/Regression - Flight Delays" }, { "type": "doc", - "id": "version-0.9.1/examples/responsible_ai/ModelInterpretability - Snow Leopard Detection" + "id": "version-0.9.1/features/regression/Regression - Vowpal Wabbit vs. LightGBM vs. Linear Regressor" } ], "collapsible": true, @@ -258,39 +218,47 @@ }, { "type": "category", - "label": "Regression", + "label": "Other", "items": [ { "type": "doc", - "id": "version-0.9.1/examples/regression/Regression - Auto Imports" + "id": "version-0.9.1/features/other/AzureSearchIndex - Met Artworks" }, { "type": "doc", - "id": "version-0.9.1/examples/regression/Regression - Flight Delays with DataCleaning" + "id": "version-0.9.1/features/other/ConditionalKNN - Exploring Art Across Cultures" }, { "type": "doc", - "id": "version-0.9.1/examples/regression/Regression - Flight Delays" + "id": "version-0.9.1/features/other/CyberML - Anomalous Access Detection" }, { "type": "doc", - "id": "version-0.9.1/examples/regression/Regression - Vowpal Wabbit vs. LightGBM vs. Linear Regressor" - } - ], - "collapsible": true, - "collapsed": true - }, - { - "type": "category", - "label": "Text Analytics", - "items": [ + "id": "version-0.9.1/features/other/DeepLearning - BiLSTM Medical Entity Extraction" + }, + { + "type": "doc", + "id": "version-0.9.1/features/other/DeepLearning - CIFAR10 Convolutional Network" + }, + { + "type": "doc", + "id": "version-0.9.1/features/other/DeepLearning - Flower Image Classification" + }, + { + "type": "doc", + "id": "version-0.9.1/features/other/DeepLearning - Transfer Learning" + }, + { + "type": "doc", + "id": "version-0.9.1/features/other/HyperParameterTuning - Fighting Breast Cancer" + }, { "type": "doc", - "id": "version-0.9.1/examples/text_analytics/TextAnalytics - Amazon Book Reviews with Word2Vec" + "id": "version-0.9.1/features/other/TextAnalytics - Amazon Book Reviews with Word2Vec" }, { "type": "doc", - "id": "version-0.9.1/examples/text_analytics/TextAnalytics - Amazon Book Reviews" + "id": "version-0.9.1/features/other/TextAnalytics - Amazon Book Reviews" } ], "collapsible": true, diff --git a/yarn.lock b/yarn.lock new file mode 100644 index 0000000000..fb57ccd13a --- /dev/null +++ b/yarn.lock @@ -0,0 +1,4 @@ +# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. +# yarn lockfile v1 + + From b2751eb94479bbd73d05363dba3e80fc2edd2c59 Mon Sep 17 00:00:00 2001 From: Serena Ruan <82044803+serena-ruan@users.noreply.github.com> Date: Fri, 12 Nov 2021 23:37:18 +0800 Subject: [PATCH 18/40] refactor: automate adding tons of codeblock helpers in website samples (#1253) * delete python imports and helpers in markdowns & modify doctest to auto add those * fix ) in markdown & format * fix mergePyCodeDir error * fix announcement color * test letting job fail on errors * fix pipeline * fix pipeline * fix SynapseE2e job * fixing pipeline * fix python version * fix sphinx version caused bug in publish artifacts --- environment.yaml | 2 +- pipeline.yaml | 17 +- project/CodegenPlugin.scala | 2 +- .../documentation/estimators/_LightGBM.md | 68 +--- website/docs/documentation/estimators/_VW.md | 45 +-- .../documentation/estimators/core/_AutoML.md | 41 -- .../estimators/core/_Featurize.md | 112 +----- .../estimators/core/_IsolationForest.md | 22 +- .../docs/documentation/estimators/core/_NN.md | 45 +-- .../estimators/core/_Recommendation.md | 45 +-- .../documentation/estimators/core/_Stages.md | 68 +--- .../documentation/estimators/core/_Train.md | 45 +-- .../documentation/transformers/_OpenCV.md | 44 +-- .../docs/documentation/transformers/_VW.md | 88 +---- .../cognitive/_AnomalyDetection.md | 69 +--- .../transformers/cognitive/_AzureSearch.md | 24 -- .../cognitive/_BingImageSearch.md | 24 -- .../transformers/cognitive/_ComputerVision.md | 184 +-------- .../transformers/cognitive/_Face.md | 115 +----- .../transformers/cognitive/_FormRecognizer.md | 178 +-------- .../transformers/cognitive/_SpeechToText.md | 44 +-- .../transformers/cognitive/_TextAnalytics.md | 134 +------ .../transformers/cognitive/_Translator.md | 159 +------- .../transformers/core/_Explainers.md | 176 +-------- .../transformers/core/_Featurize.md | 88 +---- .../documentation/transformers/core/_IO.md | 154 +------- .../documentation/transformers/core/_Image.md | 66 +--- .../transformers/core/_Stages.md | 374 ++---------------- .../core/_SuperpixelTransformer.md | 22 +- .../documentation/transformers/core/_Train.md | 44 +-- .../transformers/deep_learning/_ONNXModel.md | 17 +- website/doctest.py | 53 +++ website/src/pages/index.module.css | 1 - 33 files changed, 271 insertions(+), 2299 deletions(-) diff --git a/environment.yaml b/environment.yaml index 42dc41cf6e..93a6466902 100644 --- a/environment.yaml +++ b/environment.yaml @@ -13,7 +13,7 @@ dependencies: - r-devtools - pip: - wheel - - sphinx + - sphinx==4.2.0 - sphinx_rtd_theme - coverage - pytest diff --git a/pipeline.yaml b/pipeline.yaml index c3b965a7f6..31eab83508 100644 --- a/pipeline.yaml +++ b/pipeline.yaml @@ -56,6 +56,7 @@ jobs: azureSubscription: 'MMLSpark Build' keyVaultName: mmlspark-keys - bash: | + set -e source activate synapseml sbt packagePython sbt publishBlob publishDocs publishR publishPython @@ -71,7 +72,9 @@ jobs: PGP-PRIVATE: $(pgp-private) PGP-PUBLIC: $(pgp-public) PGP-PW: $(pgp-pw) - - bash: sbt publishBadges + - bash: | + set -e + sbt publishBadges condition: and(succeeded(), eq(variables['Build.SourceBranch'], 'refs/heads/master')) displayName: Publish Badges env: @@ -98,6 +101,7 @@ jobs: azureSubscription: 'MMLSpark Build' keyVaultName: mmlspark-keys - bash: | + set -e source activate synapseml sbt packagePython sbt publishBlob @@ -139,8 +143,9 @@ jobs: azureSubscription: 'MMLSpark Build' keyVaultName: mmlspark-keys - bash: | + set -e source activate synapseml - jupyter nbconvert --to script ./notebooks/*.ipynb* + jupyter nbconvert --to script ./notebooks/features/*/*.ipynb* sbt packagePython sbt publishBlob displayName: Publish Blob Artifacts @@ -246,6 +251,7 @@ jobs: echo '##vso[task.setvariable variable=tag]'$(git tag -l --points-at HEAD) displayName: 'Get Git Tag' - bash: | + set -e wget https://github.com/git-chglog/git-chglog/releases/download/0.8.0/git-chglog_linux_amd64 chmod +x git-chglog_linux_amd64 ./git-chglog_linux_amd64 -o CHANGELOG.md $TAG @@ -274,6 +280,7 @@ jobs: azureSubscription: 'MMLSpark Build' keyVaultName: mmlspark-keys - bash: | + set -e source activate synapseml sbt publishPypi condition: startsWith(variables['tag'], 'v') @@ -327,6 +334,7 @@ jobs: keyVaultName: mmlspark-keys condition: succeededOrFailed() - bash: | + set -e curl -s https://codecov.io/bash > .codecov chmod +x .codecov echo "Starting Codecov Upload" @@ -377,6 +385,7 @@ jobs: keyVaultName: mmlspark-keys condition: succeededOrFailed() - bash: | + set -e curl -s https://codecov.io/bash > .codecov chmod +x .codecov echo "Starting Codecov Upload" @@ -424,6 +433,7 @@ jobs: keyVaultName: mmlspark-keys condition: succeededOrFailed() - bash: | + set -e curl -s https://codecov.io/bash > .codecov chmod +x .codecov echo "Starting Codecov Upload" @@ -460,12 +470,14 @@ jobs: source activate synapseml sbt convertNotebooks - bash: | + set -e yarn install cd website yarn yarn build displayName: 'yarn install and build' - bash: | + set -e git config --global user.name "${GH_NAME}" git config --global user.email "${GH_EMAIL}" git checkout -b main @@ -594,6 +606,7 @@ jobs: keyVaultName: mmlspark-keys condition: succeededOrFailed() - bash: | + set -e curl -s https://codecov.io/bash > .codecov chmod +x .codecov echo "Starting Codecov Upload" diff --git a/project/CodegenPlugin.scala b/project/CodegenPlugin.scala index b1ddcccc14..dfc15c81d9 100644 --- a/project/CodegenPlugin.scala +++ b/project/CodegenPlugin.scala @@ -224,7 +224,7 @@ object CodegenPlugin extends AutoPlugin { artifactPath.in(packageBin).in(Compile).value.getParentFile }, mergePyCodeDir := { - join(baseDirectory.value.getParent, "target", "scala-2.12", "sbt-1.0", "generated") + join(baseDirectory.value.getParent, "target", "scala-2.12", "generated") }, codegenDir := { join(targetDir.value, "generated") diff --git a/website/docs/documentation/estimators/_LightGBM.md b/website/docs/documentation/estimators/_LightGBM.md index 3fdb32209f..26fea2654c 100644 --- a/website/docs/documentation/estimators/_LightGBM.md +++ b/website/docs/documentation/estimators/_LightGBM.md @@ -2,26 +2,8 @@ import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; import DocTable from "@theme/DocumentationTable"; - + + ## LightGBMClassifier @@ -87,27 +69,8 @@ values={[ ]}> - + + @@ -159,27 +122,8 @@ values={[ ]}> - + + diff --git a/website/docs/documentation/estimators/_VW.md b/website/docs/documentation/estimators/_VW.md index 5c658d789d..ca108aed1a 100644 --- a/website/docs/documentation/estimators/_VW.md +++ b/website/docs/documentation/estimators/_VW.md @@ -2,26 +2,8 @@ import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; import DocTable from "@theme/DocumentationTable"; - + + ## VowpalWabbitRegressor @@ -84,27 +66,8 @@ values={[ ]}> - + + diff --git a/website/docs/documentation/estimators/core/_AutoML.md b/website/docs/documentation/estimators/core/_AutoML.md index 0b7d131423..0f6a009c6a 100644 --- a/website/docs/documentation/estimators/core/_AutoML.md +++ b/website/docs/documentation/estimators/core/_AutoML.md @@ -2,26 +2,6 @@ import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; import DocTable from "@theme/DocumentationTable"; - ## AutoML @@ -143,27 +123,6 @@ values={[ ]}> - diff --git a/website/docs/documentation/estimators/core/_Featurize.md b/website/docs/documentation/estimators/core/_Featurize.md index 60092a70da..3f5dbfbaea 100644 --- a/website/docs/documentation/estimators/core/_Featurize.md +++ b/website/docs/documentation/estimators/core/_Featurize.md @@ -2,26 +2,6 @@ import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; import DocTable from "@theme/DocumentationTable"; - ## Featurize @@ -112,27 +92,8 @@ values={[ ]}> - + + @@ -189,27 +150,8 @@ values={[ ]}> - + + @@ -277,27 +219,8 @@ values={[ ]}> - + + @@ -352,27 +275,8 @@ values={[ ]}> - + + diff --git a/website/docs/documentation/estimators/core/_IsolationForest.md b/website/docs/documentation/estimators/core/_IsolationForest.md index 8d66c167f2..7153fe995d 100644 --- a/website/docs/documentation/estimators/core/_IsolationForest.md +++ b/website/docs/documentation/estimators/core/_IsolationForest.md @@ -2,26 +2,8 @@ import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; import DocTable from "@theme/DocumentationTable"; - + + ## Isolation Forest diff --git a/website/docs/documentation/estimators/core/_NN.md b/website/docs/documentation/estimators/core/_NN.md index 5aa5dcae50..4b917bd8ff 100644 --- a/website/docs/documentation/estimators/core/_NN.md +++ b/website/docs/documentation/estimators/core/_NN.md @@ -2,26 +2,8 @@ import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; import DocTable from "@theme/DocumentationTable"; - + + ## NN @@ -76,27 +58,8 @@ values={[ ]}> - + + diff --git a/website/docs/documentation/estimators/core/_Recommendation.md b/website/docs/documentation/estimators/core/_Recommendation.md index 9135ee55bf..3c4c4b5924 100644 --- a/website/docs/documentation/estimators/core/_Recommendation.md +++ b/website/docs/documentation/estimators/core/_Recommendation.md @@ -2,26 +2,8 @@ import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; import DocTable from "@theme/DocumentationTable"; - + + ## Recommendation @@ -244,27 +226,8 @@ values={[ ]}> - + + diff --git a/website/docs/documentation/estimators/core/_Stages.md b/website/docs/documentation/estimators/core/_Stages.md index a9c2822e1e..0d2b1a2c1a 100644 --- a/website/docs/documentation/estimators/core/_Stages.md +++ b/website/docs/documentation/estimators/core/_Stages.md @@ -2,26 +2,8 @@ import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; import DocTable from "@theme/DocumentationTable"; - + + ## Stages @@ -100,27 +82,8 @@ values={[ ]}> - + + @@ -186,27 +149,8 @@ values={[ ]}> - + + diff --git a/website/docs/documentation/estimators/core/_Train.md b/website/docs/documentation/estimators/core/_Train.md index 7cb0e687a0..7bae6555ab 100644 --- a/website/docs/documentation/estimators/core/_Train.md +++ b/website/docs/documentation/estimators/core/_Train.md @@ -2,26 +2,8 @@ import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; import DocTable from "@theme/DocumentationTable"; - + + ## Train @@ -112,27 +94,8 @@ values={[ ]}> - + + diff --git a/website/docs/documentation/transformers/_OpenCV.md b/website/docs/documentation/transformers/_OpenCV.md index 22f8e85360..49744e75f0 100644 --- a/website/docs/documentation/transformers/_OpenCV.md +++ b/website/docs/documentation/transformers/_OpenCV.md @@ -2,26 +2,8 @@ import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; import DocTable from "@theme/DocumentationTable"; - + + ## ImageTransformer @@ -88,26 +70,8 @@ values={[ ]}> - + + diff --git a/website/docs/documentation/transformers/_VW.md b/website/docs/documentation/transformers/_VW.md index deb56683a8..1f46e7e9a1 100644 --- a/website/docs/documentation/transformers/_VW.md +++ b/website/docs/documentation/transformers/_VW.md @@ -2,26 +2,8 @@ import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; import DocTable from "@theme/DocumentationTable"; - + + ## VectorZipper @@ -112,26 +94,8 @@ values={[ - + + @@ -178,26 +142,8 @@ values={[ - + + @@ -246,26 +192,8 @@ values={[ - + + diff --git a/website/docs/documentation/transformers/cognitive/_AnomalyDetection.md b/website/docs/documentation/transformers/cognitive/_AnomalyDetection.md index 0f5e9a12a1..7aaa87ebcf 100644 --- a/website/docs/documentation/transformers/cognitive/_AnomalyDetection.md +++ b/website/docs/documentation/transformers/cognitive/_AnomalyDetection.md @@ -2,27 +2,8 @@ import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; import DocTable from "@theme/DocumentationTable"; - + + ## Anomaly Detection @@ -136,27 +117,8 @@ values={[ ]}> - + + @@ -254,27 +216,8 @@ values={[ ]}> - + + diff --git a/website/docs/documentation/transformers/cognitive/_AzureSearch.md b/website/docs/documentation/transformers/cognitive/_AzureSearch.md index adbc038426..05dae98558 100644 --- a/website/docs/documentation/transformers/cognitive/_AzureSearch.md +++ b/website/docs/documentation/transformers/cognitive/_AzureSearch.md @@ -2,30 +2,6 @@ import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; import DocTable from "@theme/DocumentationTable"; - ## Azure Search diff --git a/website/docs/documentation/transformers/cognitive/_BingImageSearch.md b/website/docs/documentation/transformers/cognitive/_BingImageSearch.md index a80fe56213..d3eb0b2000 100644 --- a/website/docs/documentation/transformers/cognitive/_BingImageSearch.md +++ b/website/docs/documentation/transformers/cognitive/_BingImageSearch.md @@ -2,30 +2,6 @@ import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; import DocTable from "@theme/DocumentationTable"; - ## Bing Image Search diff --git a/website/docs/documentation/transformers/cognitive/_ComputerVision.md b/website/docs/documentation/transformers/cognitive/_ComputerVision.md index 03e87b1323..ab3c4b83a3 100644 --- a/website/docs/documentation/transformers/cognitive/_ComputerVision.md +++ b/website/docs/documentation/transformers/cognitive/_ComputerVision.md @@ -2,27 +2,8 @@ import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; import DocTable from "@theme/DocumentationTable"; - + + ## Computer Vision @@ -99,27 +80,8 @@ values={[ ]}> - + + @@ -191,27 +153,8 @@ values={[ ]}> - + + @@ -280,27 +223,8 @@ values={[ ]}> - + + @@ -367,27 +291,8 @@ values={[ ]}> - + + @@ -450,27 +355,8 @@ values={[ ]}> - + + @@ -537,27 +423,8 @@ values={[ ]}> - + + @@ -618,27 +485,8 @@ values={[ ]}> - + + diff --git a/website/docs/documentation/transformers/cognitive/_Face.md b/website/docs/documentation/transformers/cognitive/_Face.md index 33f4e0cf28..d2f0ed4616 100644 --- a/website/docs/documentation/transformers/cognitive/_Face.md +++ b/website/docs/documentation/transformers/cognitive/_Face.md @@ -2,27 +2,8 @@ import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; import DocTable from "@theme/DocumentationTable"; - + + ## Face @@ -104,27 +85,8 @@ values={[ ]}> - + + @@ -216,27 +178,8 @@ values={[ ]}> - + + @@ -326,27 +269,8 @@ values={[ ]}> - + + @@ -401,27 +325,8 @@ values={[ ]}> - + + diff --git a/website/docs/documentation/transformers/cognitive/_FormRecognizer.md b/website/docs/documentation/transformers/cognitive/_FormRecognizer.md index 655d7bb646..5ec86fa76d 100644 --- a/website/docs/documentation/transformers/cognitive/_FormRecognizer.md +++ b/website/docs/documentation/transformers/cognitive/_FormRecognizer.md @@ -2,27 +2,6 @@ import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; import DocTable from "@theme/DocumentationTable"; - ## Form Recognizer @@ -104,27 +83,8 @@ values={[ ]}> - + + @@ -189,27 +149,8 @@ values={[ ]}> - + + @@ -272,27 +213,6 @@ values={[ ]}> - @@ -358,27 +278,6 @@ values={[ ]}> - @@ -444,27 +343,8 @@ values={[ ]}> - + + @@ -537,27 +417,8 @@ values={[ ]}> - + + @@ -624,27 +485,8 @@ values={[ ]}> - + + diff --git a/website/docs/documentation/transformers/cognitive/_SpeechToText.md b/website/docs/documentation/transformers/cognitive/_SpeechToText.md index 56b6af5225..d5dba97d92 100644 --- a/website/docs/documentation/transformers/cognitive/_SpeechToText.md +++ b/website/docs/documentation/transformers/cognitive/_SpeechToText.md @@ -2,27 +2,6 @@ import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; import DocTable from "@theme/DocumentationTable"; - ## Speech To Text @@ -105,27 +84,8 @@ values={[ ]}> - + + diff --git a/website/docs/documentation/transformers/cognitive/_TextAnalytics.md b/website/docs/documentation/transformers/cognitive/_TextAnalytics.md index 7f3c9503de..882e1abf2d 100644 --- a/website/docs/documentation/transformers/cognitive/_TextAnalytics.md +++ b/website/docs/documentation/transformers/cognitive/_TextAnalytics.md @@ -2,27 +2,6 @@ import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; import DocTable from "@theme/DocumentationTable"; - ## Text Analytics @@ -99,27 +78,6 @@ values={[ ]}> - @@ -186,27 +144,8 @@ values={[ ]}> - + + @@ -275,27 +214,8 @@ values={[ ]}> - + + @@ -359,27 +279,8 @@ values={[ ]}> - + + @@ -446,27 +347,8 @@ values={[ ]}> - + + diff --git a/website/docs/documentation/transformers/cognitive/_Translator.md b/website/docs/documentation/transformers/cognitive/_Translator.md index 3e68c3d9e9..1e14de7703 100644 --- a/website/docs/documentation/transformers/cognitive/_Translator.md +++ b/website/docs/documentation/transformers/cognitive/_Translator.md @@ -2,27 +2,6 @@ import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; import DocTable from "@theme/DocumentationTable"; - ## Translator @@ -106,27 +85,8 @@ values={[ ]}> - + + @@ -200,27 +160,8 @@ values={[ ]}> - + + @@ -286,27 +227,8 @@ values={[ ]}> - + + @@ -372,27 +294,8 @@ values={[ ]}> - + + @@ -464,27 +367,8 @@ values={[ ]}> - + + @@ -552,27 +436,8 @@ values={[ ]}> - + + diff --git a/website/docs/documentation/transformers/core/_Explainers.md b/website/docs/documentation/transformers/core/_Explainers.md index 573250e81a..dbe6bcf076 100644 --- a/website/docs/documentation/transformers/core/_Explainers.md +++ b/website/docs/documentation/transformers/core/_Explainers.md @@ -2,26 +2,8 @@ import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; import DocTable from "@theme/DocumentationTable"; - + + ## Explainers @@ -96,26 +78,8 @@ values={[ ]}> - + + @@ -181,26 +145,8 @@ values={[ ]}> - + + @@ -273,26 +219,8 @@ values={[ ]}> - + + @@ -363,26 +291,8 @@ values={[ ]}> - + + @@ -443,26 +353,8 @@ values={[ ]}> - + + @@ -521,26 +413,8 @@ values={[ ]}> - + + @@ -621,26 +495,8 @@ values={[ ]}> - + + diff --git a/website/docs/documentation/transformers/core/_Featurize.md b/website/docs/documentation/transformers/core/_Featurize.md index bd61b8fb56..82beef09ba 100644 --- a/website/docs/documentation/transformers/core/_Featurize.md +++ b/website/docs/documentation/transformers/core/_Featurize.md @@ -2,26 +2,8 @@ import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; import DocTable from "@theme/DocumentationTable"; - + + ## Featurize @@ -92,26 +74,8 @@ values={[ ]}> - + + @@ -175,26 +139,8 @@ values={[ ]}> - + + @@ -271,26 +217,8 @@ values={[ ]}> - + + diff --git a/website/docs/documentation/transformers/core/_IO.md b/website/docs/documentation/transformers/core/_IO.md index 8a7e38c526..19569f157a 100644 --- a/website/docs/documentation/transformers/core/_IO.md +++ b/website/docs/documentation/transformers/core/_IO.md @@ -2,26 +2,8 @@ import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; import DocTable from "@theme/DocumentationTable"; - + + ## IO @@ -87,26 +69,8 @@ values={[ ]}> - + + @@ -158,26 +122,8 @@ values={[ ]}> - + + @@ -221,26 +167,8 @@ values={[ ]}> - + + @@ -286,26 +214,8 @@ values={[ ]}> - + + @@ -347,26 +257,8 @@ values={[ ]}> - + + @@ -409,26 +301,8 @@ values={[ ]}> - + + diff --git a/website/docs/documentation/transformers/core/_Image.md b/website/docs/documentation/transformers/core/_Image.md index 6e8984a230..3d6c452145 100644 --- a/website/docs/documentation/transformers/core/_Image.md +++ b/website/docs/documentation/transformers/core/_Image.md @@ -2,26 +2,8 @@ import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; import DocTable from "@theme/DocumentationTable"; - + + ## Image @@ -90,26 +72,8 @@ values={[ ]}> - + + @@ -179,26 +143,8 @@ values={[ ]}> - + + diff --git a/website/docs/documentation/transformers/core/_Stages.md b/website/docs/documentation/transformers/core/_Stages.md index 4a97eae2e5..b8cc3a0ac1 100644 --- a/website/docs/documentation/transformers/core/_Stages.md +++ b/website/docs/documentation/transformers/core/_Stages.md @@ -2,26 +2,8 @@ import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; import DocTable from "@theme/DocumentationTable"; - + + ## Stages @@ -105,26 +87,8 @@ values={[ ]}> - + + @@ -178,26 +142,8 @@ values={[ ]}> - + + @@ -259,26 +205,8 @@ values={[ ]}> - + + @@ -332,26 +260,8 @@ values={[ ]}> - + + @@ -416,26 +326,8 @@ values={[ ]}> - + + @@ -482,26 +374,8 @@ values={[ ]}> - + + @@ -543,26 +417,8 @@ values={[ ]}> - + + @@ -612,26 +468,8 @@ values={[ ]}> - + + @@ -681,26 +519,8 @@ values={[ ]}> - + + @@ -754,26 +574,8 @@ values={[ ]}> - + + @@ -845,26 +647,8 @@ values={[ ]}> - + + @@ -918,26 +702,8 @@ values={[ ]}> - + + @@ -1007,26 +773,8 @@ values={[ ]}> - + + @@ -1080,26 +828,8 @@ values={[ ]}> - + + @@ -1173,26 +903,8 @@ values={[ ]}> - + + @@ -1258,26 +970,8 @@ values={[ ]}> - + + diff --git a/website/docs/documentation/transformers/core/_SuperpixelTransformer.md b/website/docs/documentation/transformers/core/_SuperpixelTransformer.md index 622ffea5e0..1c7c06890d 100644 --- a/website/docs/documentation/transformers/core/_SuperpixelTransformer.md +++ b/website/docs/documentation/transformers/core/_SuperpixelTransformer.md @@ -2,26 +2,8 @@ import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; import DocTable from "@theme/DocumentationTable"; - + + ## LIME diff --git a/website/docs/documentation/transformers/core/_Train.md b/website/docs/documentation/transformers/core/_Train.md index e00aa728cf..7247fc87f2 100644 --- a/website/docs/documentation/transformers/core/_Train.md +++ b/website/docs/documentation/transformers/core/_Train.md @@ -2,26 +2,8 @@ import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; import DocTable from "@theme/DocumentationTable"; - + + ## Train @@ -92,26 +74,8 @@ values={[ ]}> - + + diff --git a/website/docs/documentation/transformers/deep_learning/_ONNXModel.md b/website/docs/documentation/transformers/deep_learning/_ONNXModel.md index e10f0fb789..84fd7f1df6 100644 --- a/website/docs/documentation/transformers/deep_learning/_ONNXModel.md +++ b/website/docs/documentation/transformers/deep_learning/_ONNXModel.md @@ -2,16 +2,6 @@ import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; import DocTable from "@theme/DocumentationTable"; - ## ONNXModel @@ -24,14 +14,13 @@ values={[ ```py -import synapse.ml from synapse.ml.onnx import ONNXModel model_path = "PUT_YOUR_MODEL_PATH" onnx_ml = (ONNXModel() .setModelLocation(model_path) .setFeedDict({"float_input": "features"}) - .setFetchDict({"prediction": "output_label", "rawProbability": "output_probability"}) + .setFetchDict({"prediction": "output_label", "rawProbability": "output_probability"})) ``` @@ -41,10 +30,10 @@ onnx_ml = (ONNXModel() import com.microsoft.azure.synapse.ml.onnx._ val model_path = "PUT_YOUR_MODEL_PATH" -val onnx_ml = new ONNXModel() +val onnx_ml = (new ONNXModel() .setModelLocation(model_path) .setFeedDict(Map("float_input" -> "features")) - .setFetchDict(Map("prediction" -> "output_label", "rawProbability" -> "output_probability")) + .setFetchDict(Map("prediction" -> "output_label", "rawProbability" -> "output_probability"))) ``` diff --git a/website/doctest.py b/website/doctest.py index 94f85f508c..f8eab83225 100644 --- a/website/doctest.py +++ b/website/doctest.py @@ -1,8 +1,61 @@ +import io import os +import re + + +def add_python_helper_to_markdown(folder, md): + replacement = """ + +""" + with io.open(os.path.join(folder, md), "r+", encoding="utf-8") as f: + content = f.read() + f.truncate(0) + content = re.sub("", replacement, content) + f.seek(0, 0) + f.write(content) + f.close() + + +def iterate_over_documentation(folder): + + cur_folders = [folder] + while cur_folders: + cur_dir = cur_folders.pop(0) + for file in os.listdir(cur_dir): + if os.path.isdir(os.path.join(cur_dir, file)): + cur_folders.append(os.path.join(cur_dir, file)) + else: + if file.startswith("_"): + add_python_helper_to_markdown(cur_dir, file) def main(): cur_path = os.getcwd() folder = os.path.join(cur_path, "website", "docs", "documentation") + iterate_over_documentation(folder) os.chdir(folder) os.system("pytest --codeblocks --junit-xml={}".format(os.path.join(cur_path, "target", "website-test-result.xml"))) diff --git a/website/src/pages/index.module.css b/website/src/pages/index.module.css index 2e8b706904..e263691288 100644 --- a/website/src/pages/index.module.css +++ b/website/src/pages/index.module.css @@ -45,7 +45,6 @@ .announcementDark { background-color: transparent; - color: #fff; } .announcementInner { From c6660d0b78a598a6767c1860d221216dcec8438c Mon Sep 17 00:00:00 2001 From: Mark Hamilton Date: Mon, 15 Nov 2021 12:04:52 -0500 Subject: [PATCH 19/40] docs: clean up some spacing issues in website (#1256) --- .../Interpretability - Image Explainers.ipynb | 114 +++--- website/docusaurus.config.js | 2 +- website/src/pages/index.js | 27 +- .../documentation/estimators/_LightGBM.md | 68 +--- .../documentation/estimators/_VW.md | 45 +-- .../documentation/estimators/core/_AutoML.md | 41 -- .../estimators/core/_Featurize.md | 112 +----- .../estimators/core/_IsolationForest.md | 22 +- .../documentation/estimators/core/_NN.md | 45 +-- .../estimators/core/_Recommendation.md | 45 +-- .../documentation/estimators/core/_Stages.md | 68 +--- .../documentation/estimators/core/_Train.md | 45 +-- .../documentation/transformers/_OpenCV.md | 44 +-- .../documentation/transformers/_VW.md | 88 +---- .../cognitive/_AnomalyDetection.md | 69 +--- .../transformers/cognitive/_AzureSearch.md | 24 -- .../cognitive/_BingImageSearch.md | 24 -- .../transformers/cognitive/_ComputerVision.md | 184 +-------- .../transformers/cognitive/_Face.md | 115 +----- .../transformers/cognitive/_FormRecognizer.md | 178 +-------- .../transformers/cognitive/_SpeechToText.md | 44 +-- .../transformers/cognitive/_TextAnalytics.md | 134 +------ .../transformers/cognitive/_Translator.md | 159 +------- .../transformers/core/_Explainers.md | 176 +-------- .../transformers/core/_Featurize.md | 88 +---- .../documentation/transformers/core/_IO.md | 154 +------- .../documentation/transformers/core/_Image.md | 66 +--- .../transformers/core/_Stages.md | 374 ++---------------- .../core/_SuperpixelTransformer.md | 22 +- .../documentation/transformers/core/_Train.md | 44 +-- .../transformers/deep_learning/_ONNXModel.md | 17 +- ...a Multilingual Search Engine from Forms.md | 7 +- .../Interpretability - Image Explainers.md | 57 --- 33 files changed, 282 insertions(+), 2420 deletions(-) diff --git a/notebooks/features/responsible_ai/Interpretability - Image Explainers.ipynb b/notebooks/features/responsible_ai/Interpretability - Image Explainers.ipynb index aaabe25faf..50829ab5e9 100644 --- a/notebooks/features/responsible_ai/Interpretability - Image Explainers.ipynb +++ b/notebooks/features/responsible_ai/Interpretability - Image Explainers.ipynb @@ -29,33 +29,33 @@ "import urllib.request\n", "import matplotlib.pyplot as plt\n", "import PIL, io\n", - "from PIL import Image\r\n", - "\r\n", - "vec_slice = udf(lambda vec, indices: (vec.toArray())[indices].tolist(), ArrayType(FloatType()))\r\n", - "arg_top_k = udf(lambda vec, k: (-vec.toArray()).argsort()[:k].tolist(), ArrayType(IntegerType()))\r\n", - "\r\n", - "def downloadBytes(url: str):\r\n", - " with urllib.request.urlopen(url) as url:\r\n", - " barr = url.read()\r\n", - " return barr\r\n", - "\r\n", - "def rotate_color_channel(bgr_image_array, height, width, nChannels):\r\n", - " B, G, R, *_ = np.asarray(bgr_image_array).reshape(height, width, nChannels).T\r\n", - " rgb_image_array = np.array((R, G, B)).T\r\n", - " return rgb_image_array\r\n", - " \r\n", - "def plot_superpixels(image_rgb_array, sp_clusters, weights, green_threshold=99):\r\n", - " superpixels = sp_clusters\r\n", - " green_value = np.percentile(weights, green_threshold)\r\n", - " img = Image.fromarray(image_rgb_array, mode='RGB').convert(\"RGBA\")\r\n", - " image_array = np.asarray(img).copy()\r\n", - " for (sp, v) in zip(superpixels, weights):\r\n", - " if v > green_value:\r\n", - " for (x, y) in sp:\r\n", - " image_array[y, x, 1] = 255\r\n", - " image_array[y, x, 3] = 200\r\n", - " plt.clf()\r\n", - " plt.imshow(image_array)\r\n", + "from PIL import Image\n", + "\n", + "vec_slice = udf(lambda vec, indices: (vec.toArray())[indices].tolist(), ArrayType(FloatType()))\n", + "arg_top_k = udf(lambda vec, k: (-vec.toArray()).argsort()[:k].tolist(), ArrayType(IntegerType()))\n", + "\n", + "def downloadBytes(url: str):\n", + " with urllib.request.urlopen(url) as url:\n", + " barr = url.read()\n", + " return barr\n", + "\n", + "def rotate_color_channel(bgr_image_array, height, width, nChannels):\n", + " B, G, R, *_ = np.asarray(bgr_image_array).reshape(height, width, nChannels).T\n", + " rgb_image_array = np.array((R, G, B)).T\n", + " return rgb_image_array\n", + " \n", + "def plot_superpixels(image_rgb_array, sp_clusters, weights, green_threshold=99):\n", + " superpixels = sp_clusters\n", + " green_value = np.percentile(weights, green_threshold)\n", + " img = Image.fromarray(image_rgb_array, mode='RGB').convert(\"RGBA\")\n", + " image_array = np.asarray(img).copy()\n", + " for (sp, v) in zip(superpixels, weights):\n", + " if v > green_value:\n", + " for (x, y) in sp:\n", + " image_array[y, x, 1] = 255\n", + " image_array[y, x, 3] = 200\n", + " plt.clf()\n", + " plt.imshow(image_array)\n", " display()" ], "outputs": [], @@ -74,36 +74,36 @@ "cell_type": "code", "execution_count": null, "source": [ - "from synapse.ml.io import *\r\n", - "\r\n", - "image_df = spark.read.image().load(\"wasbs://publicwasb@mmlspark.blob.core.windows.net/explainers/images/david-lusvardi-dWcUncxocQY-unsplash.jpg\")\r\n", - "display(image_df)\r\n", - "\r\n", - "# Rotate the image array from BGR into RGB channels for visualization later.\r\n", - "row = image_df.select(\"image.height\", \"image.width\", \"image.nChannels\", \"image.data\").head()\r\n", - "locals().update(row.asDict())\r\n", - "rgb_image_array = rotate_color_channel(data, height, width, nChannels)\r\n", - "\r\n", - "# Download the ONNX model\r\n", - "modelPayload = downloadBytes(\"https://mmlspark.blob.core.windows.net/publicwasb/ONNXModels/resnet50-v2-7.onnx\")\r\n", - "\r\n", - "featurizer = (\r\n", - " ImageTransformer(inputCol=\"image\", outputCol=\"features\")\r\n", - " .resize(224, True)\r\n", - " .centerCrop(224, 224)\r\n", - " .normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225], color_scale_factor = 1/255)\r\n", - " .setTensorElementType(FloatType())\r\n", - ")\r\n", - "\r\n", - "onnx = (\r\n", - " ONNXModel()\r\n", - " .setModelPayload(modelPayload)\r\n", - " .setFeedDict({\"data\": \"features\"})\r\n", - " .setFetchDict({\"rawPrediction\": \"resnetv24_dense0_fwd\"})\r\n", - " .setSoftMaxDict({\"rawPrediction\": \"probability\"})\r\n", - " .setMiniBatchSize(1)\r\n", - ")\r\n", - "\r\n", + "from synapse.ml.io import *\n", + "\n", + "image_df = spark.read.image().load(\"wasbs://publicwasb@mmlspark.blob.core.windows.net/explainers/images/david-lusvardi-dWcUncxocQY-unsplash.jpg\")\n", + "display(image_df)\n", + "\n", + "# Rotate the image array from BGR into RGB channels for visualization later.\n", + "row = image_df.select(\"image.height\", \"image.width\", \"image.nChannels\", \"image.data\").head()\n", + "locals().update(row.asDict())\n", + "rgb_image_array = rotate_color_channel(data, height, width, nChannels)\n", + "\n", + "# Download the ONNX model\n", + "modelPayload = downloadBytes(\"https://mmlspark.blob.core.windows.net/publicwasb/ONNXModels/resnet50-v2-7.onnx\")\n", + "\n", + "featurizer = (\n", + " ImageTransformer(inputCol=\"image\", outputCol=\"features\")\n", + " .resize(224, True)\n", + " .centerCrop(224, 224)\n", + " .normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225], color_scale_factor = 1/255)\n", + " .setTensorElementType(FloatType())\n", + ")\n", + "\n", + "onnx = (\n", + " ONNXModel()\n", + " .setModelPayload(modelPayload)\n", + " .setFeedDict({\"data\": \"features\"})\n", + " .setFetchDict({\"rawPrediction\": \"resnetv24_dense0_fwd\"})\n", + " .setSoftMaxDict({\"rawPrediction\": \"probability\"})\n", + " .setMiniBatchSize(1)\n", + ")\n", + "\n", "model = Pipeline(stages=[featurizer, onnx]).fit(image_df)" ], "outputs": [], diff --git a/website/docusaurus.config.js b/website/docusaurus.config.js index fe672fbfac..4499523ca3 100644 --- a/website/docusaurus.config.js +++ b/website/docusaurus.config.js @@ -3,7 +3,7 @@ const {all_examples} = require('./src/plugins/examples'); let version = "0.9.1"; module.exports = { - title: 'Synapse ML', + title: 'SynapseML', tagline: 'Simple and Distributed Machine Learning', url: 'https://microsoft.github.io', baseUrl: '/SynapseML/', diff --git a/website/src/pages/index.js b/website/src/pages/index.js index 624ee5c3ee..52906b6b40 100644 --- a/website/src/pages/index.js +++ b/website/src/pages/index.js @@ -61,13 +61,26 @@ interpretation_df = (TabularSHAP() config: `from synapse.ml.lightgbm import * quantile_df = (LightGBMRegressor() - .setApplication('quantile') - .setAlpha(0.3) - .setLearningRate(0.3) - .setNumIterations(100) - .setNumLeaves(31) - .fit(train_df) - .transform(test_df))`, + .setApplication('quantile') + .setAlpha(0.3) + .setLearningRate(0.3) + .setNumIterations(100) + .setNumLeaves(31) + .fit(train_df) + .transform(test_df))`, + }, + { + label: "OpenCV", + further: "docs/features/opencv/OpenCV%20-%20Pipeline%20Image%20Transformations", + config: `from synapse.ml.opencv import * + +image_df = (ImageTransformer() + .setInputCol("images") + .setOutputCol("transformed_images") + .resize(224, True) + .centerCrop(224, 224) + .normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225], color_scale_factor = 1/255) + .transform(input_df))`, }, ]; diff --git a/website/versioned_docs/version-0.9.1/documentation/estimators/_LightGBM.md b/website/versioned_docs/version-0.9.1/documentation/estimators/_LightGBM.md index a9927b744f..26fea2654c 100644 --- a/website/versioned_docs/version-0.9.1/documentation/estimators/_LightGBM.md +++ b/website/versioned_docs/version-0.9.1/documentation/estimators/_LightGBM.md @@ -2,26 +2,8 @@ import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; import DocTable from "@theme/DocumentationTable"; - + + ## LightGBMClassifier @@ -87,27 +69,8 @@ values={[ ]}> - + + @@ -159,27 +122,8 @@ values={[ ]}> - + + diff --git a/website/versioned_docs/version-0.9.1/documentation/estimators/_VW.md b/website/versioned_docs/version-0.9.1/documentation/estimators/_VW.md index 2e1348fef7..ca108aed1a 100644 --- a/website/versioned_docs/version-0.9.1/documentation/estimators/_VW.md +++ b/website/versioned_docs/version-0.9.1/documentation/estimators/_VW.md @@ -2,26 +2,8 @@ import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; import DocTable from "@theme/DocumentationTable"; - + + ## VowpalWabbitRegressor @@ -84,27 +66,8 @@ values={[ ]}> - + + diff --git a/website/versioned_docs/version-0.9.1/documentation/estimators/core/_AutoML.md b/website/versioned_docs/version-0.9.1/documentation/estimators/core/_AutoML.md index b4d092ca52..0f6a009c6a 100644 --- a/website/versioned_docs/version-0.9.1/documentation/estimators/core/_AutoML.md +++ b/website/versioned_docs/version-0.9.1/documentation/estimators/core/_AutoML.md @@ -2,26 +2,6 @@ import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; import DocTable from "@theme/DocumentationTable"; - ## AutoML @@ -143,27 +123,6 @@ values={[ ]}> - diff --git a/website/versioned_docs/version-0.9.1/documentation/estimators/core/_Featurize.md b/website/versioned_docs/version-0.9.1/documentation/estimators/core/_Featurize.md index 92157a5d34..3f5dbfbaea 100644 --- a/website/versioned_docs/version-0.9.1/documentation/estimators/core/_Featurize.md +++ b/website/versioned_docs/version-0.9.1/documentation/estimators/core/_Featurize.md @@ -2,26 +2,6 @@ import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; import DocTable from "@theme/DocumentationTable"; - ## Featurize @@ -112,27 +92,8 @@ values={[ ]}> - + + @@ -189,27 +150,8 @@ values={[ ]}> - + + @@ -277,27 +219,8 @@ values={[ ]}> - + + @@ -352,27 +275,8 @@ values={[ ]}> - + + diff --git a/website/versioned_docs/version-0.9.1/documentation/estimators/core/_IsolationForest.md b/website/versioned_docs/version-0.9.1/documentation/estimators/core/_IsolationForest.md index ae84a4936f..7153fe995d 100644 --- a/website/versioned_docs/version-0.9.1/documentation/estimators/core/_IsolationForest.md +++ b/website/versioned_docs/version-0.9.1/documentation/estimators/core/_IsolationForest.md @@ -2,26 +2,8 @@ import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; import DocTable from "@theme/DocumentationTable"; - + + ## Isolation Forest diff --git a/website/versioned_docs/version-0.9.1/documentation/estimators/core/_NN.md b/website/versioned_docs/version-0.9.1/documentation/estimators/core/_NN.md index 02092cdcf5..4b917bd8ff 100644 --- a/website/versioned_docs/version-0.9.1/documentation/estimators/core/_NN.md +++ b/website/versioned_docs/version-0.9.1/documentation/estimators/core/_NN.md @@ -2,26 +2,8 @@ import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; import DocTable from "@theme/DocumentationTable"; - + + ## NN @@ -76,27 +58,8 @@ values={[ ]}> - + + diff --git a/website/versioned_docs/version-0.9.1/documentation/estimators/core/_Recommendation.md b/website/versioned_docs/version-0.9.1/documentation/estimators/core/_Recommendation.md index 992ad410ca..3c4c4b5924 100644 --- a/website/versioned_docs/version-0.9.1/documentation/estimators/core/_Recommendation.md +++ b/website/versioned_docs/version-0.9.1/documentation/estimators/core/_Recommendation.md @@ -2,26 +2,8 @@ import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; import DocTable from "@theme/DocumentationTable"; - + + ## Recommendation @@ -244,27 +226,8 @@ values={[ ]}> - + + diff --git a/website/versioned_docs/version-0.9.1/documentation/estimators/core/_Stages.md b/website/versioned_docs/version-0.9.1/documentation/estimators/core/_Stages.md index c1c910ad58..0d2b1a2c1a 100644 --- a/website/versioned_docs/version-0.9.1/documentation/estimators/core/_Stages.md +++ b/website/versioned_docs/version-0.9.1/documentation/estimators/core/_Stages.md @@ -2,26 +2,8 @@ import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; import DocTable from "@theme/DocumentationTable"; - + + ## Stages @@ -100,27 +82,8 @@ values={[ ]}> - + + @@ -186,27 +149,8 @@ values={[ ]}> - + + diff --git a/website/versioned_docs/version-0.9.1/documentation/estimators/core/_Train.md b/website/versioned_docs/version-0.9.1/documentation/estimators/core/_Train.md index 0cda3a70b0..7bae6555ab 100644 --- a/website/versioned_docs/version-0.9.1/documentation/estimators/core/_Train.md +++ b/website/versioned_docs/version-0.9.1/documentation/estimators/core/_Train.md @@ -2,26 +2,8 @@ import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; import DocTable from "@theme/DocumentationTable"; - + + ## Train @@ -112,27 +94,8 @@ values={[ ]}> - + + diff --git a/website/versioned_docs/version-0.9.1/documentation/transformers/_OpenCV.md b/website/versioned_docs/version-0.9.1/documentation/transformers/_OpenCV.md index 3864bfe6d7..49744e75f0 100644 --- a/website/versioned_docs/version-0.9.1/documentation/transformers/_OpenCV.md +++ b/website/versioned_docs/version-0.9.1/documentation/transformers/_OpenCV.md @@ -2,26 +2,8 @@ import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; import DocTable from "@theme/DocumentationTable"; - + + ## ImageTransformer @@ -88,26 +70,8 @@ values={[ ]}> - + + diff --git a/website/versioned_docs/version-0.9.1/documentation/transformers/_VW.md b/website/versioned_docs/version-0.9.1/documentation/transformers/_VW.md index f647433eb9..1f46e7e9a1 100644 --- a/website/versioned_docs/version-0.9.1/documentation/transformers/_VW.md +++ b/website/versioned_docs/version-0.9.1/documentation/transformers/_VW.md @@ -2,26 +2,8 @@ import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; import DocTable from "@theme/DocumentationTable"; - + + ## VectorZipper @@ -112,26 +94,8 @@ values={[ - + + @@ -178,26 +142,8 @@ values={[ - + + @@ -246,26 +192,8 @@ values={[ - + + diff --git a/website/versioned_docs/version-0.9.1/documentation/transformers/cognitive/_AnomalyDetection.md b/website/versioned_docs/version-0.9.1/documentation/transformers/cognitive/_AnomalyDetection.md index 185783181d..7aaa87ebcf 100644 --- a/website/versioned_docs/version-0.9.1/documentation/transformers/cognitive/_AnomalyDetection.md +++ b/website/versioned_docs/version-0.9.1/documentation/transformers/cognitive/_AnomalyDetection.md @@ -2,27 +2,8 @@ import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; import DocTable from "@theme/DocumentationTable"; - + + ## Anomaly Detection @@ -136,27 +117,8 @@ values={[ ]}> - + + @@ -254,27 +216,8 @@ values={[ ]}> - + + diff --git a/website/versioned_docs/version-0.9.1/documentation/transformers/cognitive/_AzureSearch.md b/website/versioned_docs/version-0.9.1/documentation/transformers/cognitive/_AzureSearch.md index bedacee18b..05dae98558 100644 --- a/website/versioned_docs/version-0.9.1/documentation/transformers/cognitive/_AzureSearch.md +++ b/website/versioned_docs/version-0.9.1/documentation/transformers/cognitive/_AzureSearch.md @@ -2,30 +2,6 @@ import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; import DocTable from "@theme/DocumentationTable"; - ## Azure Search diff --git a/website/versioned_docs/version-0.9.1/documentation/transformers/cognitive/_BingImageSearch.md b/website/versioned_docs/version-0.9.1/documentation/transformers/cognitive/_BingImageSearch.md index 7855fbd15f..d3eb0b2000 100644 --- a/website/versioned_docs/version-0.9.1/documentation/transformers/cognitive/_BingImageSearch.md +++ b/website/versioned_docs/version-0.9.1/documentation/transformers/cognitive/_BingImageSearch.md @@ -2,30 +2,6 @@ import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; import DocTable from "@theme/DocumentationTable"; - ## Bing Image Search diff --git a/website/versioned_docs/version-0.9.1/documentation/transformers/cognitive/_ComputerVision.md b/website/versioned_docs/version-0.9.1/documentation/transformers/cognitive/_ComputerVision.md index 671d7267da..ab3c4b83a3 100644 --- a/website/versioned_docs/version-0.9.1/documentation/transformers/cognitive/_ComputerVision.md +++ b/website/versioned_docs/version-0.9.1/documentation/transformers/cognitive/_ComputerVision.md @@ -2,27 +2,8 @@ import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; import DocTable from "@theme/DocumentationTable"; - + + ## Computer Vision @@ -99,27 +80,8 @@ values={[ ]}> - + + @@ -191,27 +153,8 @@ values={[ ]}> - + + @@ -280,27 +223,8 @@ values={[ ]}> - + + @@ -367,27 +291,8 @@ values={[ ]}> - + + @@ -450,27 +355,8 @@ values={[ ]}> - + + @@ -537,27 +423,8 @@ values={[ ]}> - + + @@ -618,27 +485,8 @@ values={[ ]}> - + + diff --git a/website/versioned_docs/version-0.9.1/documentation/transformers/cognitive/_Face.md b/website/versioned_docs/version-0.9.1/documentation/transformers/cognitive/_Face.md index 625cf6f5c9..d2f0ed4616 100644 --- a/website/versioned_docs/version-0.9.1/documentation/transformers/cognitive/_Face.md +++ b/website/versioned_docs/version-0.9.1/documentation/transformers/cognitive/_Face.md @@ -2,27 +2,8 @@ import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; import DocTable from "@theme/DocumentationTable"; - + + ## Face @@ -104,27 +85,8 @@ values={[ ]}> - + + @@ -216,27 +178,8 @@ values={[ ]}> - + + @@ -326,27 +269,8 @@ values={[ ]}> - + + @@ -401,27 +325,8 @@ values={[ ]}> - + + diff --git a/website/versioned_docs/version-0.9.1/documentation/transformers/cognitive/_FormRecognizer.md b/website/versioned_docs/version-0.9.1/documentation/transformers/cognitive/_FormRecognizer.md index de5dd48ed9..5ec86fa76d 100644 --- a/website/versioned_docs/version-0.9.1/documentation/transformers/cognitive/_FormRecognizer.md +++ b/website/versioned_docs/version-0.9.1/documentation/transformers/cognitive/_FormRecognizer.md @@ -2,27 +2,6 @@ import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; import DocTable from "@theme/DocumentationTable"; - ## Form Recognizer @@ -104,27 +83,8 @@ values={[ ]}> - + + @@ -189,27 +149,8 @@ values={[ ]}> - + + @@ -272,27 +213,6 @@ values={[ ]}> - @@ -358,27 +278,6 @@ values={[ ]}> - @@ -444,27 +343,8 @@ values={[ ]}> - + + @@ -537,27 +417,8 @@ values={[ ]}> - + + @@ -624,27 +485,8 @@ values={[ ]}> - + + diff --git a/website/versioned_docs/version-0.9.1/documentation/transformers/cognitive/_SpeechToText.md b/website/versioned_docs/version-0.9.1/documentation/transformers/cognitive/_SpeechToText.md index 068c9ea148..d5dba97d92 100644 --- a/website/versioned_docs/version-0.9.1/documentation/transformers/cognitive/_SpeechToText.md +++ b/website/versioned_docs/version-0.9.1/documentation/transformers/cognitive/_SpeechToText.md @@ -2,27 +2,6 @@ import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; import DocTable from "@theme/DocumentationTable"; - ## Speech To Text @@ -105,27 +84,8 @@ values={[ ]}> - + + diff --git a/website/versioned_docs/version-0.9.1/documentation/transformers/cognitive/_TextAnalytics.md b/website/versioned_docs/version-0.9.1/documentation/transformers/cognitive/_TextAnalytics.md index 6d11f26d86..882e1abf2d 100644 --- a/website/versioned_docs/version-0.9.1/documentation/transformers/cognitive/_TextAnalytics.md +++ b/website/versioned_docs/version-0.9.1/documentation/transformers/cognitive/_TextAnalytics.md @@ -2,27 +2,6 @@ import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; import DocTable from "@theme/DocumentationTable"; - ## Text Analytics @@ -99,27 +78,6 @@ values={[ ]}> - @@ -186,27 +144,8 @@ values={[ ]}> - + + @@ -275,27 +214,8 @@ values={[ ]}> - + + @@ -359,27 +279,8 @@ values={[ ]}> - + + @@ -446,27 +347,8 @@ values={[ ]}> - + + diff --git a/website/versioned_docs/version-0.9.1/documentation/transformers/cognitive/_Translator.md b/website/versioned_docs/version-0.9.1/documentation/transformers/cognitive/_Translator.md index 557041141d..1e14de7703 100644 --- a/website/versioned_docs/version-0.9.1/documentation/transformers/cognitive/_Translator.md +++ b/website/versioned_docs/version-0.9.1/documentation/transformers/cognitive/_Translator.md @@ -2,27 +2,6 @@ import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; import DocTable from "@theme/DocumentationTable"; - ## Translator @@ -106,27 +85,8 @@ values={[ ]}> - + + @@ -200,27 +160,8 @@ values={[ ]}> - + + @@ -286,27 +227,8 @@ values={[ ]}> - + + @@ -372,27 +294,8 @@ values={[ ]}> - + + @@ -464,27 +367,8 @@ values={[ ]}> - + + @@ -552,27 +436,8 @@ values={[ ]}> - + + diff --git a/website/versioned_docs/version-0.9.1/documentation/transformers/core/_Explainers.md b/website/versioned_docs/version-0.9.1/documentation/transformers/core/_Explainers.md index bfc381e17f..dbe6bcf076 100644 --- a/website/versioned_docs/version-0.9.1/documentation/transformers/core/_Explainers.md +++ b/website/versioned_docs/version-0.9.1/documentation/transformers/core/_Explainers.md @@ -2,26 +2,8 @@ import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; import DocTable from "@theme/DocumentationTable"; - + + ## Explainers @@ -96,26 +78,8 @@ values={[ ]}> - + + @@ -181,26 +145,8 @@ values={[ ]}> - + + @@ -273,26 +219,8 @@ values={[ ]}> - + + @@ -363,26 +291,8 @@ values={[ ]}> - + + @@ -443,26 +353,8 @@ values={[ ]}> - + + @@ -521,26 +413,8 @@ values={[ ]}> - + + @@ -621,26 +495,8 @@ values={[ ]}> - + + diff --git a/website/versioned_docs/version-0.9.1/documentation/transformers/core/_Featurize.md b/website/versioned_docs/version-0.9.1/documentation/transformers/core/_Featurize.md index dd8f7b0fcb..82beef09ba 100644 --- a/website/versioned_docs/version-0.9.1/documentation/transformers/core/_Featurize.md +++ b/website/versioned_docs/version-0.9.1/documentation/transformers/core/_Featurize.md @@ -2,26 +2,8 @@ import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; import DocTable from "@theme/DocumentationTable"; - + + ## Featurize @@ -92,26 +74,8 @@ values={[ ]}> - + + @@ -175,26 +139,8 @@ values={[ ]}> - + + @@ -271,26 +217,8 @@ values={[ ]}> - + + diff --git a/website/versioned_docs/version-0.9.1/documentation/transformers/core/_IO.md b/website/versioned_docs/version-0.9.1/documentation/transformers/core/_IO.md index 91cd9e8b02..19569f157a 100644 --- a/website/versioned_docs/version-0.9.1/documentation/transformers/core/_IO.md +++ b/website/versioned_docs/version-0.9.1/documentation/transformers/core/_IO.md @@ -2,26 +2,8 @@ import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; import DocTable from "@theme/DocumentationTable"; - + + ## IO @@ -87,26 +69,8 @@ values={[ ]}> - + + @@ -158,26 +122,8 @@ values={[ ]}> - + + @@ -221,26 +167,8 @@ values={[ ]}> - + + @@ -286,26 +214,8 @@ values={[ ]}> - + + @@ -347,26 +257,8 @@ values={[ ]}> - + + @@ -409,26 +301,8 @@ values={[ ]}> - + + diff --git a/website/versioned_docs/version-0.9.1/documentation/transformers/core/_Image.md b/website/versioned_docs/version-0.9.1/documentation/transformers/core/_Image.md index a96de7be85..3d6c452145 100644 --- a/website/versioned_docs/version-0.9.1/documentation/transformers/core/_Image.md +++ b/website/versioned_docs/version-0.9.1/documentation/transformers/core/_Image.md @@ -2,26 +2,8 @@ import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; import DocTable from "@theme/DocumentationTable"; - + + ## Image @@ -90,26 +72,8 @@ values={[ ]}> - + + @@ -179,26 +143,8 @@ values={[ ]}> - + + diff --git a/website/versioned_docs/version-0.9.1/documentation/transformers/core/_Stages.md b/website/versioned_docs/version-0.9.1/documentation/transformers/core/_Stages.md index 82b57e7561..b8cc3a0ac1 100644 --- a/website/versioned_docs/version-0.9.1/documentation/transformers/core/_Stages.md +++ b/website/versioned_docs/version-0.9.1/documentation/transformers/core/_Stages.md @@ -2,26 +2,8 @@ import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; import DocTable from "@theme/DocumentationTable"; - + + ## Stages @@ -105,26 +87,8 @@ values={[ ]}> - + + @@ -178,26 +142,8 @@ values={[ ]}> - + + @@ -259,26 +205,8 @@ values={[ ]}> - + + @@ -332,26 +260,8 @@ values={[ ]}> - + + @@ -416,26 +326,8 @@ values={[ ]}> - + + @@ -482,26 +374,8 @@ values={[ ]}> - + + @@ -543,26 +417,8 @@ values={[ ]}> - + + @@ -612,26 +468,8 @@ values={[ ]}> - + + @@ -681,26 +519,8 @@ values={[ ]}> - + + @@ -754,26 +574,8 @@ values={[ ]}> - + + @@ -845,26 +647,8 @@ values={[ ]}> - + + @@ -918,26 +702,8 @@ values={[ ]}> - + + @@ -1007,26 +773,8 @@ values={[ ]}> - + + @@ -1080,26 +828,8 @@ values={[ ]}> - + + @@ -1173,26 +903,8 @@ values={[ ]}> - + + @@ -1258,26 +970,8 @@ values={[ ]}> - + + diff --git a/website/versioned_docs/version-0.9.1/documentation/transformers/core/_SuperpixelTransformer.md b/website/versioned_docs/version-0.9.1/documentation/transformers/core/_SuperpixelTransformer.md index ae2583711b..1c7c06890d 100644 --- a/website/versioned_docs/version-0.9.1/documentation/transformers/core/_SuperpixelTransformer.md +++ b/website/versioned_docs/version-0.9.1/documentation/transformers/core/_SuperpixelTransformer.md @@ -2,26 +2,8 @@ import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; import DocTable from "@theme/DocumentationTable"; - + + ## LIME diff --git a/website/versioned_docs/version-0.9.1/documentation/transformers/core/_Train.md b/website/versioned_docs/version-0.9.1/documentation/transformers/core/_Train.md index 5077cfaa31..7247fc87f2 100644 --- a/website/versioned_docs/version-0.9.1/documentation/transformers/core/_Train.md +++ b/website/versioned_docs/version-0.9.1/documentation/transformers/core/_Train.md @@ -2,26 +2,8 @@ import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; import DocTable from "@theme/DocumentationTable"; - + + ## Train @@ -92,26 +74,8 @@ values={[ ]}> - + + diff --git a/website/versioned_docs/version-0.9.1/documentation/transformers/deep_learning/_ONNXModel.md b/website/versioned_docs/version-0.9.1/documentation/transformers/deep_learning/_ONNXModel.md index 9e696c891e..84fd7f1df6 100644 --- a/website/versioned_docs/version-0.9.1/documentation/transformers/deep_learning/_ONNXModel.md +++ b/website/versioned_docs/version-0.9.1/documentation/transformers/deep_learning/_ONNXModel.md @@ -2,16 +2,6 @@ import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; import DocTable from "@theme/DocumentationTable"; - ## ONNXModel @@ -24,14 +14,13 @@ values={[ ```py -import synapse.ml from synapse.ml.onnx import ONNXModel model_path = "PUT_YOUR_MODEL_PATH" onnx_ml = (ONNXModel() .setModelLocation(model_path) .setFeedDict({"float_input": "features"}) - .setFetchDict({"prediction": "output_label", "rawProbability": "output_probability"}) + .setFetchDict({"prediction": "output_label", "rawProbability": "output_probability"})) ``` @@ -41,10 +30,10 @@ onnx_ml = (ONNXModel() import com.microsoft.azure.synapse.ml.onnx._ val model_path = "PUT_YOUR_MODEL_PATH" -val onnx_ml = new ONNXModel() +val onnx_ml = (new ONNXModel() .setModelLocation(model_path) .setFeedDict(Map("float_input" -> "features")) - .setFetchDict(Map("prediction" -> "output_label", "rawProbability" -> "output_probability")) + .setFetchDict(Map("prediction" -> "output_label", "rawProbability" -> "output_probability"))) ``` diff --git a/website/versioned_docs/version-0.9.1/features/cognitive_services/CognitiveServices - Create a Multilingual Search Engine from Forms.md b/website/versioned_docs/version-0.9.1/features/cognitive_services/CognitiveServices - Create a Multilingual Search Engine from Forms.md index 0397538a50..f8c47e8e84 100644 --- a/website/versioned_docs/version-0.9.1/features/cognitive_services/CognitiveServices - Create a Multilingual Search Engine from Forms.md +++ b/website/versioned_docs/version-0.9.1/features/cognitive_services/CognitiveServices - Create a Multilingual Search Engine from Forms.md @@ -28,9 +28,8 @@ def blob_to_url(blob): df2 = (spark.read.format("binaryFile") - .load("wasbs://ignite2021@mmlsparkdemo.blob.core.windows.net/forms/*") + .load("wasbs://ignite2021@mmlsparkdemo.blob.core.windows.net/form_subset/*") .select("path") - .coalesce(24) .limit(10) .select(udf(blob_to_url, StringType())("path").alias("url")) .cache() @@ -78,7 +77,7 @@ from synapse.ml.cognitive import FormOntologyLearner organized_df = (FormOntologyLearner() .setInputCol("invoices") .setOutputCol("extracted") - .fit(analyzed_df.limit(10)) + .fit(analyzed_df) .transform(analyzed_df) .select("url", "extracted.*") .cache()) @@ -107,7 +106,7 @@ display(itemized_df) ```python -display(itemized_df.where(col("ProductCode") == 6)) +display(itemized_df.where(col("ProductCode") == 48)) ``` diff --git a/website/versioned_docs/version-0.9.1/features/responsible_ai/Interpretability - Image Explainers.md b/website/versioned_docs/version-0.9.1/features/responsible_ai/Interpretability - Image Explainers.md index 4eea455b27..ca1048c138 100644 --- a/website/versioned_docs/version-0.9.1/features/responsible_ai/Interpretability - Image Explainers.md +++ b/website/versioned_docs/version-0.9.1/features/responsible_ai/Interpretability - Image Explainers.md @@ -27,58 +27,31 @@ import matplotlib.pyplot as plt import PIL, io from PIL import Image - - vec_slice = udf(lambda vec, indices: (vec.toArray())[indices].tolist(), ArrayType(FloatType())) - arg_top_k = udf(lambda vec, k: (-vec.toArray()).argsort()[:k].tolist(), ArrayType(IntegerType())) - - def downloadBytes(url: str): - with urllib.request.urlopen(url) as url: - barr = url.read() - return barr - - def rotate_color_channel(bgr_image_array, height, width, nChannels): - B, G, R, *_ = np.asarray(bgr_image_array).reshape(height, width, nChannels).T - rgb_image_array = np.array((R, G, B)).T - return rgb_image_array - - def plot_superpixels(image_rgb_array, sp_clusters, weights, green_threshold=99): - superpixels = sp_clusters - green_value = np.percentile(weights, green_threshold) - img = Image.fromarray(image_rgb_array, mode='RGB').convert("RGBA") - image_array = np.asarray(img).copy() - for (sp, v) in zip(superpixels, weights): - if v > green_value: - for (x, y) in sp: - image_array[y, x, 1] = 255 - image_array[y, x, 3] = 200 - plt.clf() - plt.imshow(image_array) - display() ``` @@ -90,64 +63,34 @@ The result shows 39.6% probability of "violin" (889), and 38.4% probability of " ```python from synapse.ml.io import * - - image_df = spark.read.image().load("wasbs://publicwasb@mmlspark.blob.core.windows.net/explainers/images/david-lusvardi-dWcUncxocQY-unsplash.jpg") - display(image_df) - - # Rotate the image array from BGR into RGB channels for visualization later. - row = image_df.select("image.height", "image.width", "image.nChannels", "image.data").head() - locals().update(row.asDict()) - rgb_image_array = rotate_color_channel(data, height, width, nChannels) - - # Download the ONNX model - modelPayload = downloadBytes("https://mmlspark.blob.core.windows.net/publicwasb/ONNXModels/resnet50-v2-7.onnx") - - featurizer = ( - ImageTransformer(inputCol="image", outputCol="features") - .resize(224, True) - .centerCrop(224, 224) - .normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225], color_scale_factor = 1/255) - .setTensorElementType(FloatType()) - ) - - onnx = ( - ONNXModel() - .setModelPayload(modelPayload) - .setFeedDict({"data": "features"}) - .setFetchDict({"rawPrediction": "resnetv24_dense0_fwd"}) - .setSoftMaxDict({"rawPrediction": "probability"}) - .setMiniBatchSize(1) - ) - - model = Pipeline(stages=[featurizer, onnx]).fit(image_df) ``` From e6da4d5583fd443e04cc89dfbf399b88851bb507 Mon Sep 17 00:00:00 2001 From: Mark Hamilton Date: Tue, 16 Nov 2021 00:18:35 -0500 Subject: [PATCH 20/40] chore: Update to SynapseML v0.9.4 (#1258) --- README.md | 26 ++-- .../CognitiveServices - Overview.ipynb | 72 +++++------ ...CyberML - Anomalous Access Detection.ipynb | 2 +- .../Regression - Auto Imports.ipynb | 10 +- ...on - Flight Delays with DataCleaning.ipynb | 6 +- website/docs/about.md | 4 +- website/docs/getting_started/installation.md | 18 +-- website/docs/reference/R-setup.md | 6 +- website/docs/reference/cyber.md | 24 ++-- website/docs/reference/docker.md | 8 +- website/doctest.py | 2 +- website/src/pages/index.js | 18 +-- .../{version-0.9.1 => version-0.9.4}/about.md | 4 +- .../documentation/estimators/_LightGBM.md | 0 .../documentation/estimators/_VW.md | 0 .../documentation/estimators/core/_AutoML.md | 0 .../estimators/core/_Featurize.md | 0 .../estimators/core/_IsolationForest.md | 0 .../documentation/estimators/core/_NN.md | 0 .../estimators/core/_Recommendation.md | 0 .../documentation/estimators/core/_Stages.md | 0 .../documentation/estimators/core/_Train.md | 0 .../estimators/estimators_core.md | 0 .../estimators/estimators_lightgbm.md | 0 .../documentation/estimators/estimators_vw.md | 0 .../documentation/transformers/_OpenCV.md | 0 .../documentation/transformers/_VW.md | 0 .../cognitive/_AnomalyDetection.md | 0 .../transformers/cognitive/_AzureSearch.md | 0 .../cognitive/_BingImageSearch.md | 0 .../transformers/cognitive/_ComputerVision.md | 0 .../transformers/cognitive/_Face.md | 0 .../transformers/cognitive/_FormRecognizer.md | 0 .../transformers/cognitive/_SpeechToText.md | 0 .../transformers/cognitive/_TextAnalytics.md | 0 .../transformers/cognitive/_Translator.md | 0 .../transformers/core/_Explainers.md | 0 .../transformers/core/_Featurize.md | 0 .../documentation/transformers/core/_IO.md | 0 .../documentation/transformers/core/_Image.md | 0 .../transformers/core/_Stages.md | 0 .../core/_SuperpixelTransformer.md | 0 .../documentation/transformers/core/_Train.md | 0 .../transformers/deep_learning/_ONNXModel.md | 0 .../transformers/transformers_cognitive.md | 0 .../transformers/transformers_core.md | 0 .../transformers_deep_learning.md | 0 .../transformers/transformers_opencv.md | 0 .../transformers/transformers_vw.md | 0 ...ation - Adult Census with Vowpal Wabbit.md | 0 .../Classification - Adult Census.md | 0 ...sification - Before and After SynapseML.md | 0 ... - Twitter Sentiment with Vowpal Wabbit.md | 0 ...tiveServices - Celebrity Quote Analysis.md | 0 ...a Multilingual Search Engine from Forms.md | 0 .../CognitiveServices - Overview.md | 72 +++++------ ...nitiveServices - Predictive Maintenance.md | 0 .../features/lightgbm/LightGBM - Overview.md | 0 .../features/lightgbm/about.md | 0 .../onnx/ONNX - Inference on Spark.md | 0 .../features/onnx/about.md | 0 ...OpenCV - Pipeline Image Transformations.md | 0 .../other/AzureSearchIndex - Met Artworks.md | 0 ...onalKNN - Exploring Art Across Cultures.md | 0 .../CyberML - Anomalous Access Detection.md | 2 +- ...ning - BiLSTM Medical Entity Extraction.md | 0 ...earning - CIFAR10 Convolutional Network.md | 0 ...pLearning - Flower Image Classification.md | 0 .../other/DeepLearning - Transfer Learning.md | 0 ...arameterTuning - Fighting Breast Cancer.md | 0 ...ics - Amazon Book Reviews with Word2Vec.md | 0 .../TextAnalytics - Amazon Book Reviews.md | 0 .../regression/Regression - Auto Imports.md | 10 +- ...ssion - Flight Delays with DataCleaning.md | 6 +- .../regression/Regression - Flight Delays.md | 0 ...abbit vs. LightGBM vs. Linear Regressor.md | 0 .../responsible_ai/Data Balance Analysis.md | 0 ...taBalanceAnalysis - Adult Census Income.md | 0 ...nterpretability - Explanation Dashboard.md | 0 .../Interpretability - Image Explainers.md | 0 ...terpretability - Snow Leopard Detection.md | 0 ...terpretability - Tabular SHAP explainer.md | 0 .../Interpretability - Text Explainers.md | 0 .../Model Interpretation on Spark.md | 0 .../SparkServing - Deploying a Classifier.md | 0 .../features/spark_serving/about.md | 0 .../features/vw/Vowpal Wabbit - Overview.md | 0 .../features/vw/about.md | 0 .../getting_started/first_example.md | 0 .../getting_started/first_model.md | 0 .../getting_started/installation.md | 18 +-- .../reference/R-setup.md | 6 +- .../reference/SAR.md | 0 .../reference/contributing_guide.md | 0 .../reference/cyber.md | 24 ++-- .../reference/datasets.md | 0 .../reference/developer-readme.md | 0 .../reference/docker.md | 8 +- .../reference/vagrant.md | 0 .../third-party-notices.txt | 0 ...ebars.json => version-0.9.4-sidebars.json} | 120 +++++++++--------- website/versions.json | 2 +- 102 files changed, 234 insertions(+), 234 deletions(-) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/about.md (94%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/documentation/estimators/_LightGBM.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/documentation/estimators/_VW.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/documentation/estimators/core/_AutoML.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/documentation/estimators/core/_Featurize.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/documentation/estimators/core/_IsolationForest.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/documentation/estimators/core/_NN.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/documentation/estimators/core/_Recommendation.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/documentation/estimators/core/_Stages.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/documentation/estimators/core/_Train.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/documentation/estimators/estimators_core.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/documentation/estimators/estimators_lightgbm.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/documentation/estimators/estimators_vw.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/documentation/transformers/_OpenCV.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/documentation/transformers/_VW.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/documentation/transformers/cognitive/_AnomalyDetection.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/documentation/transformers/cognitive/_AzureSearch.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/documentation/transformers/cognitive/_BingImageSearch.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/documentation/transformers/cognitive/_ComputerVision.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/documentation/transformers/cognitive/_Face.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/documentation/transformers/cognitive/_FormRecognizer.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/documentation/transformers/cognitive/_SpeechToText.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/documentation/transformers/cognitive/_TextAnalytics.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/documentation/transformers/cognitive/_Translator.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/documentation/transformers/core/_Explainers.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/documentation/transformers/core/_Featurize.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/documentation/transformers/core/_IO.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/documentation/transformers/core/_Image.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/documentation/transformers/core/_Stages.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/documentation/transformers/core/_SuperpixelTransformer.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/documentation/transformers/core/_Train.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/documentation/transformers/deep_learning/_ONNXModel.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/documentation/transformers/transformers_cognitive.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/documentation/transformers/transformers_core.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/documentation/transformers/transformers_deep_learning.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/documentation/transformers/transformers_opencv.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/documentation/transformers/transformers_vw.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/features/classification/Classification - Adult Census with Vowpal Wabbit.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/features/classification/Classification - Adult Census.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/features/classification/Classification - Before and After SynapseML.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/features/classification/Classification - Twitter Sentiment with Vowpal Wabbit.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/features/cognitive_services/CognitiveServices - Celebrity Quote Analysis.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/features/cognitive_services/CognitiveServices - Create a Multilingual Search Engine from Forms.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/features/cognitive_services/CognitiveServices - Overview.md (84%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/features/cognitive_services/CognitiveServices - Predictive Maintenance.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/features/lightgbm/LightGBM - Overview.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/features/lightgbm/about.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/features/onnx/ONNX - Inference on Spark.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/features/onnx/about.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/features/opencv/OpenCV - Pipeline Image Transformations.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/features/other/AzureSearchIndex - Met Artworks.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/features/other/ConditionalKNN - Exploring Art Across Cultures.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/features/other/CyberML - Anomalous Access Detection.md (99%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/features/other/DeepLearning - BiLSTM Medical Entity Extraction.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/features/other/DeepLearning - CIFAR10 Convolutional Network.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/features/other/DeepLearning - Flower Image Classification.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/features/other/DeepLearning - Transfer Learning.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/features/other/HyperParameterTuning - Fighting Breast Cancer.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/features/other/TextAnalytics - Amazon Book Reviews with Word2Vec.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/features/other/TextAnalytics - Amazon Book Reviews.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/features/regression/Regression - Auto Imports.md (95%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/features/regression/Regression - Flight Delays with DataCleaning.md (96%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/features/regression/Regression - Flight Delays.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/features/regression/Regression - Vowpal Wabbit vs. LightGBM vs. Linear Regressor.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/features/responsible_ai/Data Balance Analysis.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/features/responsible_ai/DataBalanceAnalysis - Adult Census Income.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/features/responsible_ai/Interpretability - Explanation Dashboard.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/features/responsible_ai/Interpretability - Image Explainers.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/features/responsible_ai/Interpretability - Snow Leopard Detection.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/features/responsible_ai/Interpretability - Tabular SHAP explainer.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/features/responsible_ai/Interpretability - Text Explainers.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/features/responsible_ai/Model Interpretation on Spark.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/features/spark_serving/SparkServing - Deploying a Classifier.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/features/spark_serving/about.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/features/vw/Vowpal Wabbit - Overview.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/features/vw/about.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/getting_started/first_example.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/getting_started/first_model.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/getting_started/installation.md (93%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/reference/R-setup.md (98%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/reference/SAR.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/reference/contributing_guide.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/reference/cyber.md (90%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/reference/datasets.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/reference/developer-readme.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/reference/docker.md (98%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/reference/vagrant.md (100%) rename website/versioned_docs/{version-0.9.1 => version-0.9.4}/third-party-notices.txt (100%) rename website/versioned_sidebars/{version-0.9.1-sidebars.json => version-0.9.4-sidebars.json} (65%) diff --git a/README.md b/README.md index b02ed23ea7..7cf99bfe44 100644 --- a/README.md +++ b/README.md @@ -4,9 +4,9 @@ [![Build Status](https://msdata.visualstudio.com/A365/_apis/build/status/microsoft.SynapseML?branchName=master)](https://msdata.visualstudio.com/A365/_build/latest?definitionId=17563&branchName=master) [![codecov](https://codecov.io/gh/Microsoft/SynapseML/branch/master/graph/badge.svg)](https://codecov.io/gh/Microsoft/SynapseML) [![Gitter](https://badges.gitter.im/Microsoft/MMLSpark.svg)](https://gitter.im/Microsoft/MMLSpark?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge) -[![Release Notes](https://img.shields.io/badge/release-notes-blue)](https://github.com/Microsoft/SynapseML/releases) [![Scala Docs](https://img.shields.io/static/v1?label=api%20docs&message=scala&color=blue&logo=scala)](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/index.html#package) [![PySpark Docs](https://img.shields.io/static/v1?label=api%20docs&message=python&color=blue&logo=python)](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/index.html) [![Academic Paper](https://img.shields.io/badge/academic-paper-7fdcf7)](https://arxiv.org/abs/1810.08744) +[![Release Notes](https://img.shields.io/badge/release-notes-blue)](https://github.com/Microsoft/SynapseML/releases) [![Scala Docs](https://img.shields.io/static/v1?label=api%20docs&message=scala&color=blue&logo=scala)](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/index.html#package) [![PySpark Docs](https://img.shields.io/static/v1?label=api%20docs&message=python&color=blue&logo=python)](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/index.html) [![Academic Paper](https://img.shields.io/badge/academic-paper-7fdcf7)](https://arxiv.org/abs/1810.08744) -[![Version](https://img.shields.io/badge/version-0.9.2-blue)](https://github.com/Microsoft/SynapseML/releases) [![Snapshot Version](https://mmlspark.blob.core.windows.net/icons/badges/master_version3.svg)](#sbt) +[![Version](https://img.shields.io/badge/version-0.9.4-blue)](https://github.com/Microsoft/SynapseML/releases) [![Snapshot Version](https://mmlspark.blob.core.windows.net/icons/badges/master_version3.svg)](#sbt) SynapseML is an ecosystem of tools aimed towards expanding the distributed computing framework @@ -24,8 +24,8 @@ sub-millisecond latency web services, backed by your Spark cluster. SynapseML requires Scala 2.12, Spark 3.0+, and Python 3.6+. See the API documentation [for -Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/index.html#package) and [for -PySpark](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/index.html). +Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/index.html#package) and [for +PySpark](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/index.html).
Table of Contents @@ -149,7 +149,7 @@ the above example, or from python: ```python import pyspark spark = pyspark.sql.SparkSession.builder.appName("MyApp") \ - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") \ + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.4") \ .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") \ .getOrCreate() import synapse.ml @@ -162,7 +162,7 @@ your `build.sbt`: ```scala resolvers += "SynapseML" at "https://mmlspark.azureedge.net/maven" -libraryDependencies += "com.microsoft.azure" % "synapseml" % "0.9.2" +libraryDependencies += "com.microsoft.azure" % "synapseml" % "0.9.4" ``` @@ -172,9 +172,9 @@ SynapseML can be conveniently installed on existing Spark clusters via the `--packages` option, examples: ```bash -spark-shell --packages com.microsoft.azure:synapseml:0.9.2 -pyspark --packages com.microsoft.azure:synapseml:0.9.2 -spark-submit --packages com.microsoft.azure:synapseml:0.9.2 MyApp.jar +spark-shell --packages com.microsoft.azure:synapseml:0.9.4 +pyspark --packages com.microsoft.azure:synapseml:0.9.4 +spark-submit --packages com.microsoft.azure:synapseml:0.9.4 MyApp.jar ``` This can be used in other Spark contexts too. For example, you can use SynapseML @@ -189,7 +189,7 @@ cloud](http://community.cloud.databricks.com), create a new [library from Maven coordinates](https://docs.databricks.com/user-guide/libraries.html#libraries-from-maven-pypi-or-spark-packages) in your workspace. -For the coordinates use: `com.microsoft.azure:synapseml:0.9.2` +For the coordinates use: `com.microsoft.azure:synapseml:0.9.4` with the resolver: `https://mmlspark.azureedge.net/maven`. Ensure this library is attached to your target cluster(s). @@ -197,7 +197,7 @@ Finally, ensure that your Spark cluster has at least Spark 3.12 and Scala 2.12. You can use SynapseML in both your Scala and PySpark notebooks. To get started with our example notebooks import the following databricks archive: -`https://mmlspark.blob.core.windows.net/dbcs/SynapseMLExamplesv0.9.2.dbc` +`https://mmlspark.blob.core.windows.net/dbcs/SynapseMLExamplesv0.9.4.dbc` ### Apache Livy and HDInsight @@ -210,7 +210,7 @@ Excluding certain packages from the library may be necessary due to current issu { "name": "synapseml", "conf": { - "spark.jars.packages": "com.microsoft.azure:synapseml:0.9.2", + "spark.jars.packages": "com.microsoft.azure:synapseml:0.9.4", "spark.jars.repositories": "https://mmlspark.azureedge.net/maven", "spark.jars.excludes": "org.scala-lang:scala-reflect,org.apache.spark:spark-tags_2.12,org.scalactic:scalactic_2.12,org.scalatest:scalatest_2.12" } @@ -224,7 +224,7 @@ In Azure Synapse, "spark.yarn.user.classpath.first" should be set to "true" to o { "name": "synapseml", "conf": { - "spark.jars.packages": "com.microsoft.azure:synapseml:0.9.2", + "spark.jars.packages": "com.microsoft.azure:synapseml:0.9.4", "spark.jars.repositories": "https://mmlspark.azureedge.net/maven", "spark.jars.excludes": "org.scala-lang:scala-reflect,org.apache.spark:spark-tags_2.12,org.scalactic:scalactic_2.12,org.scalatest:scalatest_2.12", "spark.yarn.user.classpath.first": "true" diff --git a/notebooks/features/cognitive_services/CognitiveServices - Overview.ipynb b/notebooks/features/cognitive_services/CognitiveServices - Overview.ipynb index 7e4b154126..dcfb024449 100644 --- a/notebooks/features/cognitive_services/CognitiveServices - Overview.ipynb +++ b/notebooks/features/cognitive_services/CognitiveServices - Overview.ipynb @@ -30,60 +30,60 @@ "\n", "### Vision\n", "[**Computer Vision**](https://azure.microsoft.com/en-us/services/cognitive-services/computer-vision/)\n", - "- Describe: provides description of an image in human readable language ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/DescribeImage.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.DescribeImage))\n", - "- Analyze (color, image type, face, adult/racy content): analyzes visual features of an image ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/AnalyzeImage.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AnalyzeImage))\n", - "- OCR: reads text from an image ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/OCR.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.OCR))\n", - "- Recognize Text: reads text from an image ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/RecognizeText.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.RecognizeText))\n", - "- Thumbnail: generates a thumbnail of user-specified size from the image ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/GenerateThumbnails.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.GenerateThumbnails))\n", - "- Recognize domain-specific content: recognizes domain-specific content (celebrity, landmark) ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/RecognizeDomainSpecificContent.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.RecognizeDomainSpecificContent))\n", - "- Tag: identifies list of words that are relevant to the in0put image ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/TagImage.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.TagImage))\n", + "- Describe: provides description of an image in human readable language ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/DescribeImage.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.DescribeImage))\n", + "- Analyze (color, image type, face, adult/racy content): analyzes visual features of an image ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/AnalyzeImage.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AnalyzeImage))\n", + "- OCR: reads text from an image ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/OCR.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.OCR))\n", + "- Recognize Text: reads text from an image ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/RecognizeText.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.RecognizeText))\n", + "- Thumbnail: generates a thumbnail of user-specified size from the image ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/GenerateThumbnails.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.GenerateThumbnails))\n", + "- Recognize domain-specific content: recognizes domain-specific content (celebrity, landmark) ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/RecognizeDomainSpecificContent.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.RecognizeDomainSpecificContent))\n", + "- Tag: identifies list of words that are relevant to the in0put image ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/TagImage.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.TagImage))\n", "\n", "[**Face**](https://azure.microsoft.com/en-us/services/cognitive-services/face/)\n", - "- Detect: detects human faces in an image ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/DetectFace.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.DetectFace))\n", - "- Verify: verifies whether two faces belong to a same person, or a face belongs to a person ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/VerifyFaces.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.VerifyFaces))\n", - "- Identify: finds the closest matches of the specific query person face from a person group ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/IdentifyFaces.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.IdentifyFaces))\n", - "- Find similar: finds similar faces to the query face in a face list ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/FindSimilarFace.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.FindSimilarFace))\n", - "- Group: divides a group of faces into disjoint groups based on similarity ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/GroupFaces.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.GroupFaces))\n", + "- Detect: detects human faces in an image ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/DetectFace.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.DetectFace))\n", + "- Verify: verifies whether two faces belong to a same person, or a face belongs to a person ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/VerifyFaces.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.VerifyFaces))\n", + "- Identify: finds the closest matches of the specific query person face from a person group ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/IdentifyFaces.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.IdentifyFaces))\n", + "- Find similar: finds similar faces to the query face in a face list ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/FindSimilarFace.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.FindSimilarFace))\n", + "- Group: divides a group of faces into disjoint groups based on similarity ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/GroupFaces.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.GroupFaces))\n", "\n", "### Speech\n", "[**Speech Services**](https://azure.microsoft.com/en-us/services/cognitive-services/speech-services/)\n", - "- Speech-to-text: transcribes audio streams ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/SpeechToText.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.SpeechToText))\n", + "- Speech-to-text: transcribes audio streams ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/SpeechToText.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.SpeechToText))\n", "\n", "### Language\n", "[**Text Analytics**](https://azure.microsoft.com/en-us/services/cognitive-services/text-analytics/)\n", - "- Language detection: detects language of the input text ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/LanguageDetector.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.LanguageDetector))\n", - "- Key phrase extraction: identifies the key talking points in the input text ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/KeyPhraseExtractor.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.KeyPhraseExtractor))\n", - "- Named entity recognition: identifies known entities and general named entities in the input text ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/NER.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.NER))\n", - "- Sentiment analysis: returns a score betwee 0 and 1 indicating the sentiment in the input text ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/TextSentiment.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.TextSentiment))\n", + "- Language detection: detects language of the input text ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/LanguageDetector.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.LanguageDetector))\n", + "- Key phrase extraction: identifies the key talking points in the input text ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/KeyPhraseExtractor.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.KeyPhraseExtractor))\n", + "- Named entity recognition: identifies known entities and general named entities in the input text ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/NER.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.NER))\n", + "- Sentiment analysis: returns a score betwee 0 and 1 indicating the sentiment in the input text ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/TextSentiment.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.TextSentiment))\n", "\n", "[**Translator**](https://azure.microsoft.com/en-us/services/cognitive-services/translator/)\n", - "- Translate: Translates text. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/Translate.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.Translate))\n", - "- Transliterate: Converts text in one language from one script to another script. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/Transliterate.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.Transliterate))\n", - "- Detect: Identifies the language of a piece of text. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/Detect.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.Detect))\n", - "- BreakSentence: Identifies the positioning of sentence boundaries in a piece of text. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/BreakSentence.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.BreakSentence))\n", - "- Dictionary Lookup: Provides alternative translations for a word and a small number of idiomatic phrases. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/DictionaryLookup.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.DictionaryLookup))\n", - "- Dictionary Examples: Provides examples that show how terms in the dictionary are used in context. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/DictionaryExamples.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.DictionaryExamples))\n", - "- Document Translation: Translates documents across all supported languages and dialects while preserving document structure and data format. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/DocumentTranslator.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.DocumentTranslator))\n", + "- Translate: Translates text. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/Translate.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.Translate))\n", + "- Transliterate: Converts text in one language from one script to another script. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/Transliterate.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.Transliterate))\n", + "- Detect: Identifies the language of a piece of text. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/Detect.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.Detect))\n", + "- BreakSentence: Identifies the positioning of sentence boundaries in a piece of text. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/BreakSentence.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.BreakSentence))\n", + "- Dictionary Lookup: Provides alternative translations for a word and a small number of idiomatic phrases. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/DictionaryLookup.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.DictionaryLookup))\n", + "- Dictionary Examples: Provides examples that show how terms in the dictionary are used in context. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/DictionaryExamples.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.DictionaryExamples))\n", + "- Document Translation: Translates documents across all supported languages and dialects while preserving document structure and data format. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/DocumentTranslator.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.DocumentTranslator))\n", "\n", "### Azure Form Recognizer\n", "[**Form Recognizer**](https://azure.microsoft.com/en-us/services/form-recognizer/)\n", - "- Analyze Layout: Extract text and layout information from a given document. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/AnalyzeLayout.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AnalyzeLayout))\n", - "- Analyze Receipts: Detects and extracts data from receipts using optical character recognition (OCR) and our receipt model, enabling you to easily extract structured data from receipts such as merchant name, merchant phone number, transaction date, transaction total, and more. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/AnalyzeReceipts.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AnalyzeReceipts))\n", - "- Analyze Business Cards: Detects and extracts data from business cards using optical character recognition (OCR) and our business card model, enabling you to easily extract structured data from business cards such as contact names, company names, phone numbers, emails, and more. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/AnalyzeBusinessCards.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AnalyzeBusinessCards))\n", - "- Analyze Invoices: Detects and extracts data from invoices using optical character recognition (OCR) and our invoice understanding deep learning models, enabling you to easily extract structured data from invoices such as customer, vendor, invoice ID, invoice due date, total, invoice amount due, tax amount, ship to, bill to, line items and more. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/AnalyzeInvoices.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AnalyzeInvoices))\n", - "- Analyze ID Documents: Detects and extracts data from identification documents using optical character recognition (OCR) and our ID document model, enabling you to easily extract structured data from ID documents such as first name, last name, date of birth, document number, and more. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/AnalyzeIDDocuments.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AnalyzeIDDocuments))\n", - "- Analyze Custom Form: Extracts information from forms (PDFs and images) into structured data based on a model created from a set of representative training forms. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/AnalyzeCustomModel.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AnalyzeCustomModel))\n", - "- Get Custom Model: Get detailed information about a custom model. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/GetCustomModel.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/ListCustomModels.html))\n", - "- List Custom Models: Get information about all custom models. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/ListCustomModels.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.ListCustomModels))\n", + "- Analyze Layout: Extract text and layout information from a given document. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/AnalyzeLayout.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AnalyzeLayout))\n", + "- Analyze Receipts: Detects and extracts data from receipts using optical character recognition (OCR) and our receipt model, enabling you to easily extract structured data from receipts such as merchant name, merchant phone number, transaction date, transaction total, and more. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/AnalyzeReceipts.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AnalyzeReceipts))\n", + "- Analyze Business Cards: Detects and extracts data from business cards using optical character recognition (OCR) and our business card model, enabling you to easily extract structured data from business cards such as contact names, company names, phone numbers, emails, and more. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/AnalyzeBusinessCards.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AnalyzeBusinessCards))\n", + "- Analyze Invoices: Detects and extracts data from invoices using optical character recognition (OCR) and our invoice understanding deep learning models, enabling you to easily extract structured data from invoices such as customer, vendor, invoice ID, invoice due date, total, invoice amount due, tax amount, ship to, bill to, line items and more. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/AnalyzeInvoices.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AnalyzeInvoices))\n", + "- Analyze ID Documents: Detects and extracts data from identification documents using optical character recognition (OCR) and our ID document model, enabling you to easily extract structured data from ID documents such as first name, last name, date of birth, document number, and more. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/AnalyzeIDDocuments.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AnalyzeIDDocuments))\n", + "- Analyze Custom Form: Extracts information from forms (PDFs and images) into structured data based on a model created from a set of representative training forms. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/AnalyzeCustomModel.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AnalyzeCustomModel))\n", + "- Get Custom Model: Get detailed information about a custom model. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/GetCustomModel.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/ListCustomModels.html))\n", + "- List Custom Models: Get information about all custom models. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/ListCustomModels.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.ListCustomModels))\n", "\n", "### Decision\n", "[**Anomaly Detector**](https://azure.microsoft.com/en-us/services/cognitive-services/anomaly-detector/)\n", - "- Anomaly status of latest point: generates a model using preceding points and determines whether the latest point is anomalous ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/DetectLastAnomaly.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.DetectLastAnomaly))\n", - "- Find anomalies: generates a model using an entire series and finds anomalies in the series ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/DetectAnomalies.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.DetectAnomalies))\n", + "- Anomaly status of latest point: generates a model using preceding points and determines whether the latest point is anomalous ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/DetectLastAnomaly.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.DetectLastAnomaly))\n", + "- Find anomalies: generates a model using an entire series and finds anomalies in the series ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/DetectAnomalies.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.DetectAnomalies))\n", "\n", "### Search\n", - "- [Bing Image search](https://azure.microsoft.com/en-us/services/cognitive-services/bing-image-search-api/) ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/com/microsoft/azure/synapse/ml/cognitive/BingImageSearch.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.BingImageSearch))\n", - "- [Azure Cognitive search](https://docs.microsoft.com/en-us/azure/search/search-what-is-azure-search) ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/index.html#com.microsoft.azure.synapse.ml.cognitive.AzureSearchWriter$), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AzureSearchWriter))\n" + "- [Bing Image search](https://azure.microsoft.com/en-us/services/cognitive-services/bing-image-search-api/) ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/BingImageSearch.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.BingImageSearch))\n", + "- [Azure Cognitive search](https://docs.microsoft.com/en-us/azure/search/search-what-is-azure-search) ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/index.html#com.microsoft.azure.synapse.ml.cognitive.AzureSearchWriter$), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AzureSearchWriter))\n" ], "metadata": {} }, diff --git a/notebooks/features/other/CyberML - Anomalous Access Detection.ipynb b/notebooks/features/other/CyberML - Anomalous Access Detection.ipynb index 04980812d9..3e2fd563b7 100644 --- a/notebooks/features/other/CyberML - Anomalous Access Detection.ipynb +++ b/notebooks/features/other/CyberML - Anomalous Access Detection.ipynb @@ -34,7 +34,7 @@ "# Create an Azure Databricks cluster and install the following libs\n", "\n", "1. In Cluster Libraries install from library source Maven:\n", - "Coordinates: com.microsoft.azure:synapseml:0.9.2\n", + "Coordinates: com.microsoft.azure:synapseml:0.9.4\n", "Repository: https://mmlspark.azureedge.net/maven\n", "\n", "2. In Cluster Libraries install from PyPI the library called plotly" diff --git a/notebooks/features/regression/Regression - Auto Imports.ipynb b/notebooks/features/regression/Regression - Auto Imports.ipynb index f6be7fe319..d62d0fab85 100644 --- a/notebooks/features/regression/Regression - Auto Imports.ipynb +++ b/notebooks/features/regression/Regression - Auto Imports.ipynb @@ -15,15 +15,15 @@ "\n", "This sample demonstrates the use of several members of the synapseml library:\n", "- [`TrainRegressor`\n", - " ](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.train.html?#module-synapse.ml.train.TrainRegressor)\n", + " ](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.train.html?#module-synapse.ml.train.TrainRegressor)\n", "- [`SummarizeData`\n", - " ](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.stages.html?#module-synapse.ml.stages.SummarizeData)\n", + " ](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.stages.html?#module-synapse.ml.stages.SummarizeData)\n", "- [`CleanMissingData`\n", - " ](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.featurize.html?#module-synapse.ml.featurize.CleanMissingData)\n", + " ](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.featurize.html?#module-synapse.ml.featurize.CleanMissingData)\n", "- [`ComputeModelStatistics`\n", - " ](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.train.html?#module-synapse.ml.train.ComputeModelStatistics)\n", + " ](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.train.html?#module-synapse.ml.train.ComputeModelStatistics)\n", "- [`FindBestModel`\n", - " ](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.automl.html?#module-synapse.ml.automl.FindBestModel)\n", + " ](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.automl.html?#module-synapse.ml.automl.FindBestModel)\n", "\n", "First, import the pandas package so that we can read and parse the datafile\n", "using `pandas.read_csv()`" diff --git a/notebooks/features/regression/Regression - Flight Delays with DataCleaning.ipynb b/notebooks/features/regression/Regression - Flight Delays with DataCleaning.ipynb index 1d397e83a4..bb8f87cd6d 100644 --- a/notebooks/features/regression/Regression - Flight Delays with DataCleaning.ipynb +++ b/notebooks/features/regression/Regression - Flight Delays with DataCleaning.ipynb @@ -16,11 +16,11 @@ "\n", "This sample demonstrates how to use the following APIs:\n", "- [`TrainRegressor`\n", - " ](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.train.html?#module-synapse.ml.train.TrainRegressor)\n", + " ](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.train.html?#module-synapse.ml.train.TrainRegressor)\n", "- [`ComputePerInstanceStatistics`\n", - " ](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.train.html?#module-synapse.ml.train.ComputePerInstanceStatistics)\n", + " ](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.train.html?#module-synapse.ml.train.ComputePerInstanceStatistics)\n", "- [`DataConversion`\n", - " ](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.featurize.html?#module-synapse.ml.featurize.DataConversion)\n", + " ](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.featurize.html?#module-synapse.ml.featurize.DataConversion)\n", "\n", "First, import the pandas package" ] diff --git a/website/docs/about.md b/website/docs/about.md index 61a1098d08..b8cc6330c8 100644 --- a/website/docs/about.md +++ b/website/docs/about.md @@ -25,8 +25,8 @@ sub-millisecond latency web services, backed by your Spark cluster. SynapseML requires Scala 2.12, Spark 3.0+, and Python 3.6+. See the API documentation [for -Scala](https://mmlspark.blob.core.windows.net/docs/0.9.2/scala/index.html#package) and [for -PySpark](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/index.html). +Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/index.html#package) and [for +PySpark](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/index.html). import Link from '@docusaurus/Link'; diff --git a/website/docs/getting_started/installation.md b/website/docs/getting_started/installation.md index 9e227bf505..de87ea67fe 100644 --- a/website/docs/getting_started/installation.md +++ b/website/docs/getting_started/installation.md @@ -12,7 +12,7 @@ the above example, or from python: ```python import pyspark spark = pyspark.sql.SparkSession.builder.appName("MyApp") \ - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") \ + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.4") \ .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") \ .getOrCreate() import synapse.ml @@ -25,7 +25,7 @@ your `build.sbt`: ```scala resolvers += "SynapseML" at "https://mmlspark.azureedge.net/maven" -libraryDependencies += "com.microsoft.azure" %% "synapseml" % "0.9.2" +libraryDependencies += "com.microsoft.azure" %% "synapseml" % "0.9.4" ``` @@ -35,9 +35,9 @@ SynapseML can be conveniently installed on existing Spark clusters via the `--packages` option, examples: ```bash -spark-shell --packages com.microsoft.azure:synapseml:0.9.2 -pyspark --packages com.microsoft.azure:synapseml:0.9.2 -spark-submit --packages com.microsoft.azure:synapseml:0.9.2 MyApp.jar +spark-shell --packages com.microsoft.azure:synapseml:0.9.4 +pyspark --packages com.microsoft.azure:synapseml:0.9.4 +spark-submit --packages com.microsoft.azure:synapseml:0.9.4 MyApp.jar ``` This can be used in other Spark contexts too. For example, you can use SynapseML @@ -52,7 +52,7 @@ cloud](http://community.cloud.databricks.com), create a new [library from Maven coordinates](https://docs.databricks.com/user-guide/libraries.html#libraries-from-maven-pypi-or-spark-packages) in your workspace. -For the coordinates use: `com.microsoft.azure:synapseml:0.9.2` +For the coordinates use: `com.microsoft.azure:synapseml:0.9.4` with the resolver: `https://mmlspark.azureedge.net/maven`. Ensure this library is attached to your target cluster(s). @@ -60,7 +60,7 @@ Finally, ensure that your Spark cluster has at least Spark 3.12 and Scala 2.12. You can use SynapseML in both your Scala and PySpark notebooks. To get started with our example notebooks import the following databricks archive: -`https://mmlspark.blob.core.windows.net/dbcs/SynapseMLExamplesv0.9.2.dbc` +`https://mmlspark.blob.core.windows.net/dbcs/SynapseMLExamplesv0.9.4.dbc` ### Apache Livy and HDInsight @@ -73,7 +73,7 @@ Excluding certain packages from the library may be necessary due to current issu { "name": "synapseml", "conf": { - "spark.jars.packages": "com.microsoft.azure:synapseml:0.9.2", + "spark.jars.packages": "com.microsoft.azure:synapseml:0.9.4", "spark.jars.repositories": "https://mmlspark.azureedge.net/maven", "spark.jars.excludes": "org.scala-lang:scala-reflect,org.apache.spark:spark-tags_2.12,org.scalactic:scalactic_2.12,org.scalatest:scalatest_2.12" } @@ -87,7 +87,7 @@ In Azure Synapse, "spark.yarn.user.classpath.first" should be set to "true" to o { "name": "synapseml", "conf": { - "spark.jars.packages": "com.microsoft.azure:synapseml:0.9.2", + "spark.jars.packages": "com.microsoft.azure:synapseml:0.9.4", "spark.jars.repositories": "https://mmlspark.azureedge.net/maven", "spark.jars.excludes": "org.scala-lang:scala-reflect,org.apache.spark:spark-tags_2.12,org.scalactic:scalactic_2.12,org.scalatest:scalatest_2.12", "spark.yarn.user.classpath.first": "true" diff --git a/website/docs/reference/R-setup.md b/website/docs/reference/R-setup.md index 539273f689..190c8c9697 100644 --- a/website/docs/reference/R-setup.md +++ b/website/docs/reference/R-setup.md @@ -18,7 +18,7 @@ To install the current SynapseML package for R use: ```R ... -devtools::install_url("https://mmlspark.azureedge.net/rrr/synapseml-0.9.2.zip") +devtools::install_url("https://mmlspark.azureedge.net/rrr/synapseml-0.9.4.zip") ... ``` @@ -31,7 +31,7 @@ It will take some time to install all dependencies. Then, run: library(sparklyr) library(dplyr) config <- spark_config() -config$sparklyr.defaultPackages <- "com.microsoft.azure:synapseml:0.9.2" +config$sparklyr.defaultPackages <- "com.microsoft.azure:synapseml:0.9.4" sc <- spark_connect(master = "local", config = config) ... ``` @@ -91,7 +91,7 @@ and then use spark_connect with method = "databricks": ```R install.packages("devtools") -devtools::install_url("https://mmlspark.azureedge.net/rrr/synapseml-0.9.2.zip") +devtools::install_url("https://mmlspark.azureedge.net/rrr/synapseml-0.9.4.zip") library(sparklyr) library(dplyr) sc <- spark_connect(method = "databricks") diff --git a/website/docs/reference/cyber.md b/website/docs/reference/cyber.md index 3570e04f7a..74d9a4b053 100644 --- a/website/docs/reference/cyber.md +++ b/website/docs/reference/cyber.md @@ -18,50 +18,50 @@ sidebar_label: CyberML (i.e., it returns a sample from the complement set). ## feature engineering: [indexers.py](https://github.com/microsoft/SynapseML/blob/master/core/src/main/python/synapse/ml/cyber/feature/indexers.py) -1. [IdIndexer](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cyber.feature.html#synapse.ml.cyber.feature.indexers.IdIndexer) +1. [IdIndexer](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cyber.feature.html#synapse.ml.cyber.feature.indexers.IdIndexer) is a SparkML [Estimator](https://spark.apache.org/docs/2.2.0/api/java/index.html?org/apache/spark/ml/Estimator.html). Given a dataframe, it creates an IdIndexerModel (described next) for categorical features which contains the information to map each partition and column seen in the given dataframe to an id. for each partition or one consecutive range for all partition and column values. -2. [IdIndexerModel](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cyber.feature.html#synapse.ml.cyber.feature.indexers.IdIndexerModel) +2. [IdIndexerModel](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cyber.feature.html#synapse.ml.cyber.feature.indexers.IdIndexerModel) is a SparkML [Transformer](https://spark.apache.org/docs/2.2.0/api/java/index.html?org/apache/spark/ml/Transformer.html). Given a dataframe maps each partition and column field to a consecutive integer id. Partitions or column values not encountered in the estimator are mapped to 0. The model can operate in two modes, either create consecutive integer id independently -3. [MultiIndexer](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cyber.feature.html#synapse.ml.cyber.feature.indexers.MultiIndexer) +3. [MultiIndexer](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cyber.feature.html#synapse.ml.cyber.feature.indexers.MultiIndexer) is a SparkML [Estimator](https://spark.apache.org/docs/2.2.0/api/java/index.html?org/apache/spark/ml/Estimator.html). Uses multiple IdIndexer to generate a MultiIndexerModel (described next) for categorical features which contains multiple IdIndexers for multiple partitions and columns. -4. [MultiIndexerModel](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cyber.feature.html#synapse.ml.cyber.feature.indexers.MultiIndexerModel) +4. [MultiIndexerModel](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cyber.feature.html#synapse.ml.cyber.feature.indexers.MultiIndexerModel) is a SparkML [Transformer](https://spark.apache.org/docs/2.2.0/api/java/index.html?org/apache/spark/ml/Transformer.html). Given a dataframe maps each partition and column field to a consecutive integer id. Partitions or column values not encountered in the estimator are mapped to 0. The model can operate in two modes, either create consecutive integer id independently ## feature engineering: [scalers.py](https://github.com/microsoft/SynapseML/blob/master/core/src/main/python/synapse/ml/cyber/feature/scalers.py) -1. [StandardScalarScaler](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cyber.feature.html#synapse.ml.cyber.feature.scalers.StandardScalarScaler) +1. [StandardScalarScaler](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cyber.feature.html#synapse.ml.cyber.feature.scalers.StandardScalarScaler) is a SparkML [Estimator](https://spark.apache.org/docs/2.2.0/api/java/index.html?org/apache/spark/ml/Estimator.html). Given a dataframe it creates a StandardScalarScalerModel (described next) which normalizes any given dataframe according to the mean and standard deviation calculated on the dataframe given to the estimator. -2. [StandardScalarScalerModel](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cyber.feature.html#synapse.ml.cyber.feature.scalers.StandardScalarScalerModel) +2. [StandardScalarScalerModel](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cyber.feature.html#synapse.ml.cyber.feature.scalers.StandardScalarScalerModel) is a SparkML [Transformer](https://spark.apache.org/docs/2.2.0/api/java/index.html?org/apache/spark/ml/Transformer.html). Given a dataframe with a value column x, the transformer changes its value as follows: x'=(x-mean)/stddev, i.e., if the transformer is given the same dataframe the estimator was given then the value column will have a mean of 0.0 and a standard deviation of 1.0. -3. [LinearScalarScaler](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cyber.feature.html#synapse.ml.cyber.feature.scalers.LinearScalarScaler) +3. [LinearScalarScaler](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cyber.feature.html#synapse.ml.cyber.feature.scalers.LinearScalarScaler) is a SparkML [Estimator](https://spark.apache.org/docs/2.2.0/api/java/index.html?org/apache/spark/ml/Estimator.html). Given a dataframe it creates a LinearScalarScalerModel (described next) which normalizes any given dataframe according to the minimum and maximum values calculated on the dataframe given to the estimator. -4. [LinearScalarScalerModel](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cyber.feature.html#synapse.ml.cyber.feature.scalers.LinearScalarScalerModel) +4. [LinearScalarScalerModel](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cyber.feature.html#synapse.ml.cyber.feature.scalers.LinearScalarScalerModel) is a SparkML [Transformer](https://spark.apache.org/docs/2.2.0/api/java/index.html?org/apache/spark/ml/Transformer.html). Given a dataframe with a value column x, the transformer changes its value such that if the transformer is given the same dataframe the estimator was given then the value column will be scaled linearly to the given ranges. ## access anomalies: [collaborative_filtering.py](https://github.com/microsoft/SynapseML/blob/master/core/src/main/python/synapse/ml/cyber/anomaly/collaborative_filtering.py) -1. [AccessAnomaly](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cyber.anomaly.html#synapse.ml.cyber.anomaly.collaborative_filtering.AccessAnomaly) +1. [AccessAnomaly](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cyber.anomaly.html#synapse.ml.cyber.anomaly.collaborative_filtering.AccessAnomaly) is a SparkML [Estimator](https://spark.apache.org/docs/2.2.0/api/java/index.html?org/apache/spark/ml/Estimator.html). Given a dataframe the estimator generates an AccessAnomalyModel (next described) which can detect anomalous access of users to resources in such a way where the access @@ -69,14 +69,14 @@ sidebar_label: CyberML a resource from Finance. This is based solely on access patterns rather than explicit features. Internally this is based on Collaborative Filtering as implemented in Spark using Matrix Factorization with Alternating Least Squares. -2. [AccessAnomalyModel](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cyber.anomaly.html#synapse.ml.cyber.anomaly.collaborative_filtering.AccessAnomalyModel) +2. [AccessAnomalyModel](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cyber.anomaly.html#synapse.ml.cyber.anomaly.collaborative_filtering.AccessAnomalyModel) is a SparkML [Transformer](https://spark.apache.org/docs/2.2.0/api/java/index.html?org/apache/spark/ml/Transformer.html). Given a dataframe the transformer computes a value between (-inf, inf) where positive values indicate an anomaly score. Anomaly scores are computed to have a mean of 1.0 and a standard deviation of 1.0 over the original dataframe given to the estimator. -3. [ModelNormalizeTransformer](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cyber.anomaly.html#synapse.ml.cyber.anomaly.collaborative_filtering.ModelNormalizeTransformer) +3. [ModelNormalizeTransformer](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cyber.anomaly.html#synapse.ml.cyber.anomaly.collaborative_filtering.ModelNormalizeTransformer) is a SparkML [Transformer](https://spark.apache.org/docs/2.2.0/api/java/index.html?org/apache/spark/ml/Transformer.html). This is a transformer used internally by AccessAnomaly to normalize a model to generate anomaly scores with mean 0.0 and standard deviation of 1.0. -4. [AccessAnomalyConfig](https://mmlspark.blob.core.windows.net/docs/0.9.2/pyspark/synapse.ml.cyber.anomaly.html#synapse.ml.cyber.anomaly.collaborative_filtering.AccessAnomalyConfig) +4. [AccessAnomalyConfig](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cyber.anomaly.html#synapse.ml.cyber.anomaly.collaborative_filtering.AccessAnomalyConfig) contains the default values for AccessAnomaly. diff --git a/website/docs/reference/docker.md b/website/docs/reference/docker.md index ae47e0a370..c83d59ab45 100644 --- a/website/docs/reference/docker.md +++ b/website/docs/reference/docker.md @@ -32,7 +32,7 @@ You can now select one of the sample notebooks and run it, or create your own. In the above, `mcr.microsoft.com/mmlspark/release` specifies the project and image name that you want to run. There is another component implicit here which is the _tsag_ (= version) that you want to use — specifying it explicitly looks like -`mcr.microsoft.com/mmlspark/release:0.9.2` for the `0.9.2` tag. +`mcr.microsoft.com/mmlspark/release:0.9.4` for the `0.9.4` tag. Leaving `mcr.microsoft.com/mmlspark/release` by itself has an implicit `latest` tag, so it is equivalent to `mcr.microsoft.com/mmlspark/release:latest`. The `latest` tag is identical to the @@ -48,7 +48,7 @@ that you will probably want to use can look as follows: docker run -it --rm \ -p 127.0.0.1:80:8888 \ -v ~/myfiles:/notebooks/myfiles \ - mcr.microsoft.com/mmlspark/release:0.9.2 + mcr.microsoft.com/mmlspark/release:0.9.4 ``` In this example, backslashes are used to break things up for readability; you @@ -58,7 +58,7 @@ path and line breaks looks a little different: docker run -it --rm ` -p 127.0.0.1:80:8888 ` -v C:\myfiles:/notebooks/myfiles ` - mcr.microsoft.com/mmlspark/release:0.9.2 + mcr.microsoft.com/mmlspark/release:0.9.4 Let's break this command and go over the meaning of each part: @@ -141,7 +141,7 @@ Let's break this command and go over the meaning of each part: model.write().overwrite().save('myfiles/myTrainedModel.mml') ``` -- **`mcr.microsoft.com/mmlspark/release:0.9.2`** +- **`mcr.microsoft.com/mmlspark/release:0.9.4`** Finally, this specifies an explicit version tag for the image that we want to run. diff --git a/website/doctest.py b/website/doctest.py index f8eab83225..939cc7b5bc 100644 --- a/website/doctest.py +++ b/website/doctest.py @@ -17,7 +17,7 @@ def add_python_helper_to_markdown(folder, md): os.environ["PYSPARK_DRIVER_PYTHON_OPTS"] = "notebook" spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.2") + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.4") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) diff --git a/website/src/pages/index.js b/website/src/pages/index.js index 52906b6b40..3eaf9cd779 100644 --- a/website/src/pages/index.js +++ b/website/src/pages/index.js @@ -269,7 +269,7 @@ function Home() { { "name": "synapseml", "conf": { - "spark.jars.packages": "com.microsoft.azure:synapseml:0.9.2", + "spark.jars.packages": "com.microsoft.azure:synapseml:0.9.4", "spark.jars.repositories": "https://mmlspark.azureedge.net/maven", "spark.jars.excludes": "org.scala-lang:scala-reflect,org.apache.spark:spark-tags_2.12,org.scalactic:scalactic_2.12,org.scalatest:scalatest_2.12", "spark.yarn.user.classpath.first": "true" @@ -277,7 +277,7 @@ function Home() { }`} lang="bash" > - Please also include `synapseml==0.9.2` in your + Please also include `synapseml==0.9.4` in your requirements.txt file for usage of PySpark. [ Install Python libraries in Synapse @@ -288,9 +288,9 @@ function Home() { SynapseML can be conveniently installed on existing Spark clusters via the --packages option, examples: This can be used in other Spark contexts too. For example, you @@ -317,7 +317,7 @@ spark-submit --packages com.microsoft.azure:synapseml:0.9.2 MyApp.jar`}

For the coordinates use: with the resolver: @@ -335,7 +335,7 @@ spark-submit --packages com.microsoft.azure:synapseml:0.9.2 MyApp.jar`} notebooks. To get started with our example notebooks import the following databricks archive: @@ -373,7 +373,7 @@ spark-submit --packages com.microsoft.azure:synapseml:0.9.2 MyApp.jar`} diff --git a/website/versioned_docs/version-0.9.1/about.md b/website/versioned_docs/version-0.9.4/about.md similarity index 94% rename from website/versioned_docs/version-0.9.1/about.md rename to website/versioned_docs/version-0.9.4/about.md index da2951d9b3..b8cc6330c8 100644 --- a/website/versioned_docs/version-0.9.1/about.md +++ b/website/versioned_docs/version-0.9.4/about.md @@ -25,8 +25,8 @@ sub-millisecond latency web services, backed by your Spark cluster. SynapseML requires Scala 2.12, Spark 3.0+, and Python 3.6+. See the API documentation [for -Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/index.html#package) and [for -PySpark](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/index.html). +Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/index.html#package) and [for +PySpark](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/index.html). import Link from '@docusaurus/Link'; diff --git a/website/versioned_docs/version-0.9.1/documentation/estimators/_LightGBM.md b/website/versioned_docs/version-0.9.4/documentation/estimators/_LightGBM.md similarity index 100% rename from website/versioned_docs/version-0.9.1/documentation/estimators/_LightGBM.md rename to website/versioned_docs/version-0.9.4/documentation/estimators/_LightGBM.md diff --git a/website/versioned_docs/version-0.9.1/documentation/estimators/_VW.md b/website/versioned_docs/version-0.9.4/documentation/estimators/_VW.md similarity index 100% rename from website/versioned_docs/version-0.9.1/documentation/estimators/_VW.md rename to website/versioned_docs/version-0.9.4/documentation/estimators/_VW.md diff --git a/website/versioned_docs/version-0.9.1/documentation/estimators/core/_AutoML.md b/website/versioned_docs/version-0.9.4/documentation/estimators/core/_AutoML.md similarity index 100% rename from website/versioned_docs/version-0.9.1/documentation/estimators/core/_AutoML.md rename to website/versioned_docs/version-0.9.4/documentation/estimators/core/_AutoML.md diff --git a/website/versioned_docs/version-0.9.1/documentation/estimators/core/_Featurize.md b/website/versioned_docs/version-0.9.4/documentation/estimators/core/_Featurize.md similarity index 100% rename from website/versioned_docs/version-0.9.1/documentation/estimators/core/_Featurize.md rename to website/versioned_docs/version-0.9.4/documentation/estimators/core/_Featurize.md diff --git a/website/versioned_docs/version-0.9.1/documentation/estimators/core/_IsolationForest.md b/website/versioned_docs/version-0.9.4/documentation/estimators/core/_IsolationForest.md similarity index 100% rename from website/versioned_docs/version-0.9.1/documentation/estimators/core/_IsolationForest.md rename to website/versioned_docs/version-0.9.4/documentation/estimators/core/_IsolationForest.md diff --git a/website/versioned_docs/version-0.9.1/documentation/estimators/core/_NN.md b/website/versioned_docs/version-0.9.4/documentation/estimators/core/_NN.md similarity index 100% rename from website/versioned_docs/version-0.9.1/documentation/estimators/core/_NN.md rename to website/versioned_docs/version-0.9.4/documentation/estimators/core/_NN.md diff --git a/website/versioned_docs/version-0.9.1/documentation/estimators/core/_Recommendation.md b/website/versioned_docs/version-0.9.4/documentation/estimators/core/_Recommendation.md similarity index 100% rename from website/versioned_docs/version-0.9.1/documentation/estimators/core/_Recommendation.md rename to website/versioned_docs/version-0.9.4/documentation/estimators/core/_Recommendation.md diff --git a/website/versioned_docs/version-0.9.1/documentation/estimators/core/_Stages.md b/website/versioned_docs/version-0.9.4/documentation/estimators/core/_Stages.md similarity index 100% rename from website/versioned_docs/version-0.9.1/documentation/estimators/core/_Stages.md rename to website/versioned_docs/version-0.9.4/documentation/estimators/core/_Stages.md diff --git a/website/versioned_docs/version-0.9.1/documentation/estimators/core/_Train.md b/website/versioned_docs/version-0.9.4/documentation/estimators/core/_Train.md similarity index 100% rename from website/versioned_docs/version-0.9.1/documentation/estimators/core/_Train.md rename to website/versioned_docs/version-0.9.4/documentation/estimators/core/_Train.md diff --git a/website/versioned_docs/version-0.9.1/documentation/estimators/estimators_core.md b/website/versioned_docs/version-0.9.4/documentation/estimators/estimators_core.md similarity index 100% rename from website/versioned_docs/version-0.9.1/documentation/estimators/estimators_core.md rename to website/versioned_docs/version-0.9.4/documentation/estimators/estimators_core.md diff --git a/website/versioned_docs/version-0.9.1/documentation/estimators/estimators_lightgbm.md b/website/versioned_docs/version-0.9.4/documentation/estimators/estimators_lightgbm.md similarity index 100% rename from website/versioned_docs/version-0.9.1/documentation/estimators/estimators_lightgbm.md rename to website/versioned_docs/version-0.9.4/documentation/estimators/estimators_lightgbm.md diff --git a/website/versioned_docs/version-0.9.1/documentation/estimators/estimators_vw.md b/website/versioned_docs/version-0.9.4/documentation/estimators/estimators_vw.md similarity index 100% rename from website/versioned_docs/version-0.9.1/documentation/estimators/estimators_vw.md rename to website/versioned_docs/version-0.9.4/documentation/estimators/estimators_vw.md diff --git a/website/versioned_docs/version-0.9.1/documentation/transformers/_OpenCV.md b/website/versioned_docs/version-0.9.4/documentation/transformers/_OpenCV.md similarity index 100% rename from website/versioned_docs/version-0.9.1/documentation/transformers/_OpenCV.md rename to website/versioned_docs/version-0.9.4/documentation/transformers/_OpenCV.md diff --git a/website/versioned_docs/version-0.9.1/documentation/transformers/_VW.md b/website/versioned_docs/version-0.9.4/documentation/transformers/_VW.md similarity index 100% rename from website/versioned_docs/version-0.9.1/documentation/transformers/_VW.md rename to website/versioned_docs/version-0.9.4/documentation/transformers/_VW.md diff --git a/website/versioned_docs/version-0.9.1/documentation/transformers/cognitive/_AnomalyDetection.md b/website/versioned_docs/version-0.9.4/documentation/transformers/cognitive/_AnomalyDetection.md similarity index 100% rename from website/versioned_docs/version-0.9.1/documentation/transformers/cognitive/_AnomalyDetection.md rename to website/versioned_docs/version-0.9.4/documentation/transformers/cognitive/_AnomalyDetection.md diff --git a/website/versioned_docs/version-0.9.1/documentation/transformers/cognitive/_AzureSearch.md b/website/versioned_docs/version-0.9.4/documentation/transformers/cognitive/_AzureSearch.md similarity index 100% rename from website/versioned_docs/version-0.9.1/documentation/transformers/cognitive/_AzureSearch.md rename to website/versioned_docs/version-0.9.4/documentation/transformers/cognitive/_AzureSearch.md diff --git a/website/versioned_docs/version-0.9.1/documentation/transformers/cognitive/_BingImageSearch.md b/website/versioned_docs/version-0.9.4/documentation/transformers/cognitive/_BingImageSearch.md similarity index 100% rename from website/versioned_docs/version-0.9.1/documentation/transformers/cognitive/_BingImageSearch.md rename to website/versioned_docs/version-0.9.4/documentation/transformers/cognitive/_BingImageSearch.md diff --git a/website/versioned_docs/version-0.9.1/documentation/transformers/cognitive/_ComputerVision.md b/website/versioned_docs/version-0.9.4/documentation/transformers/cognitive/_ComputerVision.md similarity index 100% rename from website/versioned_docs/version-0.9.1/documentation/transformers/cognitive/_ComputerVision.md rename to website/versioned_docs/version-0.9.4/documentation/transformers/cognitive/_ComputerVision.md diff --git a/website/versioned_docs/version-0.9.1/documentation/transformers/cognitive/_Face.md b/website/versioned_docs/version-0.9.4/documentation/transformers/cognitive/_Face.md similarity index 100% rename from website/versioned_docs/version-0.9.1/documentation/transformers/cognitive/_Face.md rename to website/versioned_docs/version-0.9.4/documentation/transformers/cognitive/_Face.md diff --git a/website/versioned_docs/version-0.9.1/documentation/transformers/cognitive/_FormRecognizer.md b/website/versioned_docs/version-0.9.4/documentation/transformers/cognitive/_FormRecognizer.md similarity index 100% rename from website/versioned_docs/version-0.9.1/documentation/transformers/cognitive/_FormRecognizer.md rename to website/versioned_docs/version-0.9.4/documentation/transformers/cognitive/_FormRecognizer.md diff --git a/website/versioned_docs/version-0.9.1/documentation/transformers/cognitive/_SpeechToText.md b/website/versioned_docs/version-0.9.4/documentation/transformers/cognitive/_SpeechToText.md similarity index 100% rename from website/versioned_docs/version-0.9.1/documentation/transformers/cognitive/_SpeechToText.md rename to website/versioned_docs/version-0.9.4/documentation/transformers/cognitive/_SpeechToText.md diff --git a/website/versioned_docs/version-0.9.1/documentation/transformers/cognitive/_TextAnalytics.md b/website/versioned_docs/version-0.9.4/documentation/transformers/cognitive/_TextAnalytics.md similarity index 100% rename from website/versioned_docs/version-0.9.1/documentation/transformers/cognitive/_TextAnalytics.md rename to website/versioned_docs/version-0.9.4/documentation/transformers/cognitive/_TextAnalytics.md diff --git a/website/versioned_docs/version-0.9.1/documentation/transformers/cognitive/_Translator.md b/website/versioned_docs/version-0.9.4/documentation/transformers/cognitive/_Translator.md similarity index 100% rename from website/versioned_docs/version-0.9.1/documentation/transformers/cognitive/_Translator.md rename to website/versioned_docs/version-0.9.4/documentation/transformers/cognitive/_Translator.md diff --git a/website/versioned_docs/version-0.9.1/documentation/transformers/core/_Explainers.md b/website/versioned_docs/version-0.9.4/documentation/transformers/core/_Explainers.md similarity index 100% rename from website/versioned_docs/version-0.9.1/documentation/transformers/core/_Explainers.md rename to website/versioned_docs/version-0.9.4/documentation/transformers/core/_Explainers.md diff --git a/website/versioned_docs/version-0.9.1/documentation/transformers/core/_Featurize.md b/website/versioned_docs/version-0.9.4/documentation/transformers/core/_Featurize.md similarity index 100% rename from website/versioned_docs/version-0.9.1/documentation/transformers/core/_Featurize.md rename to website/versioned_docs/version-0.9.4/documentation/transformers/core/_Featurize.md diff --git a/website/versioned_docs/version-0.9.1/documentation/transformers/core/_IO.md b/website/versioned_docs/version-0.9.4/documentation/transformers/core/_IO.md similarity index 100% rename from website/versioned_docs/version-0.9.1/documentation/transformers/core/_IO.md rename to website/versioned_docs/version-0.9.4/documentation/transformers/core/_IO.md diff --git a/website/versioned_docs/version-0.9.1/documentation/transformers/core/_Image.md b/website/versioned_docs/version-0.9.4/documentation/transformers/core/_Image.md similarity index 100% rename from website/versioned_docs/version-0.9.1/documentation/transformers/core/_Image.md rename to website/versioned_docs/version-0.9.4/documentation/transformers/core/_Image.md diff --git a/website/versioned_docs/version-0.9.1/documentation/transformers/core/_Stages.md b/website/versioned_docs/version-0.9.4/documentation/transformers/core/_Stages.md similarity index 100% rename from website/versioned_docs/version-0.9.1/documentation/transformers/core/_Stages.md rename to website/versioned_docs/version-0.9.4/documentation/transformers/core/_Stages.md diff --git a/website/versioned_docs/version-0.9.1/documentation/transformers/core/_SuperpixelTransformer.md b/website/versioned_docs/version-0.9.4/documentation/transformers/core/_SuperpixelTransformer.md similarity index 100% rename from website/versioned_docs/version-0.9.1/documentation/transformers/core/_SuperpixelTransformer.md rename to website/versioned_docs/version-0.9.4/documentation/transformers/core/_SuperpixelTransformer.md diff --git a/website/versioned_docs/version-0.9.1/documentation/transformers/core/_Train.md b/website/versioned_docs/version-0.9.4/documentation/transformers/core/_Train.md similarity index 100% rename from website/versioned_docs/version-0.9.1/documentation/transformers/core/_Train.md rename to website/versioned_docs/version-0.9.4/documentation/transformers/core/_Train.md diff --git a/website/versioned_docs/version-0.9.1/documentation/transformers/deep_learning/_ONNXModel.md b/website/versioned_docs/version-0.9.4/documentation/transformers/deep_learning/_ONNXModel.md similarity index 100% rename from website/versioned_docs/version-0.9.1/documentation/transformers/deep_learning/_ONNXModel.md rename to website/versioned_docs/version-0.9.4/documentation/transformers/deep_learning/_ONNXModel.md diff --git a/website/versioned_docs/version-0.9.1/documentation/transformers/transformers_cognitive.md b/website/versioned_docs/version-0.9.4/documentation/transformers/transformers_cognitive.md similarity index 100% rename from website/versioned_docs/version-0.9.1/documentation/transformers/transformers_cognitive.md rename to website/versioned_docs/version-0.9.4/documentation/transformers/transformers_cognitive.md diff --git a/website/versioned_docs/version-0.9.1/documentation/transformers/transformers_core.md b/website/versioned_docs/version-0.9.4/documentation/transformers/transformers_core.md similarity index 100% rename from website/versioned_docs/version-0.9.1/documentation/transformers/transformers_core.md rename to website/versioned_docs/version-0.9.4/documentation/transformers/transformers_core.md diff --git a/website/versioned_docs/version-0.9.1/documentation/transformers/transformers_deep_learning.md b/website/versioned_docs/version-0.9.4/documentation/transformers/transformers_deep_learning.md similarity index 100% rename from website/versioned_docs/version-0.9.1/documentation/transformers/transformers_deep_learning.md rename to website/versioned_docs/version-0.9.4/documentation/transformers/transformers_deep_learning.md diff --git a/website/versioned_docs/version-0.9.1/documentation/transformers/transformers_opencv.md b/website/versioned_docs/version-0.9.4/documentation/transformers/transformers_opencv.md similarity index 100% rename from website/versioned_docs/version-0.9.1/documentation/transformers/transformers_opencv.md rename to website/versioned_docs/version-0.9.4/documentation/transformers/transformers_opencv.md diff --git a/website/versioned_docs/version-0.9.1/documentation/transformers/transformers_vw.md b/website/versioned_docs/version-0.9.4/documentation/transformers/transformers_vw.md similarity index 100% rename from website/versioned_docs/version-0.9.1/documentation/transformers/transformers_vw.md rename to website/versioned_docs/version-0.9.4/documentation/transformers/transformers_vw.md diff --git a/website/versioned_docs/version-0.9.1/features/classification/Classification - Adult Census with Vowpal Wabbit.md b/website/versioned_docs/version-0.9.4/features/classification/Classification - Adult Census with Vowpal Wabbit.md similarity index 100% rename from website/versioned_docs/version-0.9.1/features/classification/Classification - Adult Census with Vowpal Wabbit.md rename to website/versioned_docs/version-0.9.4/features/classification/Classification - Adult Census with Vowpal Wabbit.md diff --git a/website/versioned_docs/version-0.9.1/features/classification/Classification - Adult Census.md b/website/versioned_docs/version-0.9.4/features/classification/Classification - Adult Census.md similarity index 100% rename from website/versioned_docs/version-0.9.1/features/classification/Classification - Adult Census.md rename to website/versioned_docs/version-0.9.4/features/classification/Classification - Adult Census.md diff --git a/website/versioned_docs/version-0.9.1/features/classification/Classification - Before and After SynapseML.md b/website/versioned_docs/version-0.9.4/features/classification/Classification - Before and After SynapseML.md similarity index 100% rename from website/versioned_docs/version-0.9.1/features/classification/Classification - Before and After SynapseML.md rename to website/versioned_docs/version-0.9.4/features/classification/Classification - Before and After SynapseML.md diff --git a/website/versioned_docs/version-0.9.1/features/classification/Classification - Twitter Sentiment with Vowpal Wabbit.md b/website/versioned_docs/version-0.9.4/features/classification/Classification - Twitter Sentiment with Vowpal Wabbit.md similarity index 100% rename from website/versioned_docs/version-0.9.1/features/classification/Classification - Twitter Sentiment with Vowpal Wabbit.md rename to website/versioned_docs/version-0.9.4/features/classification/Classification - Twitter Sentiment with Vowpal Wabbit.md diff --git a/website/versioned_docs/version-0.9.1/features/cognitive_services/CognitiveServices - Celebrity Quote Analysis.md b/website/versioned_docs/version-0.9.4/features/cognitive_services/CognitiveServices - Celebrity Quote Analysis.md similarity index 100% rename from website/versioned_docs/version-0.9.1/features/cognitive_services/CognitiveServices - Celebrity Quote Analysis.md rename to website/versioned_docs/version-0.9.4/features/cognitive_services/CognitiveServices - Celebrity Quote Analysis.md diff --git a/website/versioned_docs/version-0.9.1/features/cognitive_services/CognitiveServices - Create a Multilingual Search Engine from Forms.md b/website/versioned_docs/version-0.9.4/features/cognitive_services/CognitiveServices - Create a Multilingual Search Engine from Forms.md similarity index 100% rename from website/versioned_docs/version-0.9.1/features/cognitive_services/CognitiveServices - Create a Multilingual Search Engine from Forms.md rename to website/versioned_docs/version-0.9.4/features/cognitive_services/CognitiveServices - Create a Multilingual Search Engine from Forms.md diff --git a/website/versioned_docs/version-0.9.1/features/cognitive_services/CognitiveServices - Overview.md b/website/versioned_docs/version-0.9.4/features/cognitive_services/CognitiveServices - Overview.md similarity index 84% rename from website/versioned_docs/version-0.9.1/features/cognitive_services/CognitiveServices - Overview.md rename to website/versioned_docs/version-0.9.4/features/cognitive_services/CognitiveServices - Overview.md index 13617cfb76..36c373fcad 100644 --- a/website/versioned_docs/version-0.9.1/features/cognitive_services/CognitiveServices - Overview.md +++ b/website/versioned_docs/version-0.9.4/features/cognitive_services/CognitiveServices - Overview.md @@ -13,60 +13,60 @@ status: stable ### Vision [**Computer Vision**](https://azure.microsoft.com/en-us/services/cognitive-services/computer-vision/) -- Describe: provides description of an image in human readable language ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/DescribeImage.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.DescribeImage)) -- Analyze (color, image type, face, adult/racy content): analyzes visual features of an image ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/AnalyzeImage.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AnalyzeImage)) -- OCR: reads text from an image ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/OCR.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.OCR)) -- Recognize Text: reads text from an image ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/RecognizeText.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.RecognizeText)) -- Thumbnail: generates a thumbnail of user-specified size from the image ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/GenerateThumbnails.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.GenerateThumbnails)) -- Recognize domain-specific content: recognizes domain-specific content (celebrity, landmark) ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/RecognizeDomainSpecificContent.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.RecognizeDomainSpecificContent)) -- Tag: identifies list of words that are relevant to the in0put image ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/TagImage.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.TagImage)) +- Describe: provides description of an image in human readable language ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/DescribeImage.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.DescribeImage)) +- Analyze (color, image type, face, adult/racy content): analyzes visual features of an image ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/AnalyzeImage.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AnalyzeImage)) +- OCR: reads text from an image ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/OCR.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.OCR)) +- Recognize Text: reads text from an image ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/RecognizeText.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.RecognizeText)) +- Thumbnail: generates a thumbnail of user-specified size from the image ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/GenerateThumbnails.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.GenerateThumbnails)) +- Recognize domain-specific content: recognizes domain-specific content (celebrity, landmark) ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/RecognizeDomainSpecificContent.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.RecognizeDomainSpecificContent)) +- Tag: identifies list of words that are relevant to the in0put image ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/TagImage.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.TagImage)) [**Face**](https://azure.microsoft.com/en-us/services/cognitive-services/face/) -- Detect: detects human faces in an image ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/DetectFace.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.DetectFace)) -- Verify: verifies whether two faces belong to a same person, or a face belongs to a person ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/VerifyFaces.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.VerifyFaces)) -- Identify: finds the closest matches of the specific query person face from a person group ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/IdentifyFaces.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.IdentifyFaces)) -- Find similar: finds similar faces to the query face in a face list ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/FindSimilarFace.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.FindSimilarFace)) -- Group: divides a group of faces into disjoint groups based on similarity ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/GroupFaces.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.GroupFaces)) +- Detect: detects human faces in an image ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/DetectFace.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.DetectFace)) +- Verify: verifies whether two faces belong to a same person, or a face belongs to a person ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/VerifyFaces.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.VerifyFaces)) +- Identify: finds the closest matches of the specific query person face from a person group ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/IdentifyFaces.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.IdentifyFaces)) +- Find similar: finds similar faces to the query face in a face list ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/FindSimilarFace.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.FindSimilarFace)) +- Group: divides a group of faces into disjoint groups based on similarity ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/GroupFaces.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.GroupFaces)) ### Speech [**Speech Services**](https://azure.microsoft.com/en-us/services/cognitive-services/speech-services/) -- Speech-to-text: transcribes audio streams ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/SpeechToText.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.SpeechToText)) +- Speech-to-text: transcribes audio streams ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/SpeechToText.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.SpeechToText)) ### Language [**Text Analytics**](https://azure.microsoft.com/en-us/services/cognitive-services/text-analytics/) -- Language detection: detects language of the input text ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/LanguageDetector.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.LanguageDetector)) -- Key phrase extraction: identifies the key talking points in the input text ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/KeyPhraseExtractor.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.KeyPhraseExtractor)) -- Named entity recognition: identifies known entities and general named entities in the input text ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/NER.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.NER)) -- Sentiment analysis: returns a score betwee 0 and 1 indicating the sentiment in the input text ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/TextSentiment.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.TextSentiment)) +- Language detection: detects language of the input text ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/LanguageDetector.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.LanguageDetector)) +- Key phrase extraction: identifies the key talking points in the input text ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/KeyPhraseExtractor.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.KeyPhraseExtractor)) +- Named entity recognition: identifies known entities and general named entities in the input text ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/NER.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.NER)) +- Sentiment analysis: returns a score betwee 0 and 1 indicating the sentiment in the input text ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/TextSentiment.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.TextSentiment)) [**Translator**](https://azure.microsoft.com/en-us/services/cognitive-services/translator/) -- Translate: Translates text. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/Translate.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.Translate)) -- Transliterate: Converts text in one language from one script to another script. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/Transliterate.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.Transliterate)) -- Detect: Identifies the language of a piece of text. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/Detect.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.Detect)) -- BreakSentence: Identifies the positioning of sentence boundaries in a piece of text. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/BreakSentence.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.BreakSentence)) -- Dictionary Lookup: Provides alternative translations for a word and a small number of idiomatic phrases. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/DictionaryLookup.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.DictionaryLookup)) -- Dictionary Examples: Provides examples that show how terms in the dictionary are used in context. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/DictionaryExamples.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.DictionaryExamples)) -- Document Translation: Translates documents across all supported languages and dialects while preserving document structure and data format. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/DocumentTranslator.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.DocumentTranslator)) +- Translate: Translates text. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/Translate.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.Translate)) +- Transliterate: Converts text in one language from one script to another script. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/Transliterate.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.Transliterate)) +- Detect: Identifies the language of a piece of text. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/Detect.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.Detect)) +- BreakSentence: Identifies the positioning of sentence boundaries in a piece of text. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/BreakSentence.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.BreakSentence)) +- Dictionary Lookup: Provides alternative translations for a word and a small number of idiomatic phrases. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/DictionaryLookup.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.DictionaryLookup)) +- Dictionary Examples: Provides examples that show how terms in the dictionary are used in context. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/DictionaryExamples.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.DictionaryExamples)) +- Document Translation: Translates documents across all supported languages and dialects while preserving document structure and data format. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/DocumentTranslator.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.DocumentTranslator)) ### Azure Form Recognizer [**Form Recognizer**](https://azure.microsoft.com/en-us/services/form-recognizer/) -- Analyze Layout: Extract text and layout information from a given document. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/AnalyzeLayout.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AnalyzeLayout)) -- Analyze Receipts: Detects and extracts data from receipts using optical character recognition (OCR) and our receipt model, enabling you to easily extract structured data from receipts such as merchant name, merchant phone number, transaction date, transaction total, and more. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/AnalyzeReceipts.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AnalyzeReceipts)) -- Analyze Business Cards: Detects and extracts data from business cards using optical character recognition (OCR) and our business card model, enabling you to easily extract structured data from business cards such as contact names, company names, phone numbers, emails, and more. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/AnalyzeBusinessCards.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AnalyzeBusinessCards)) -- Analyze Invoices: Detects and extracts data from invoices using optical character recognition (OCR) and our invoice understanding deep learning models, enabling you to easily extract structured data from invoices such as customer, vendor, invoice ID, invoice due date, total, invoice amount due, tax amount, ship to, bill to, line items and more. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/AnalyzeInvoices.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AnalyzeInvoices)) -- Analyze ID Documents: Detects and extracts data from identification documents using optical character recognition (OCR) and our ID document model, enabling you to easily extract structured data from ID documents such as first name, last name, date of birth, document number, and more. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/AnalyzeIDDocuments.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AnalyzeIDDocuments)) -- Analyze Custom Form: Extracts information from forms (PDFs and images) into structured data based on a model created from a set of representative training forms. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/AnalyzeCustomModel.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AnalyzeCustomModel)) -- Get Custom Model: Get detailed information about a custom model. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/GetCustomModel.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/ListCustomModels.html)) -- List Custom Models: Get information about all custom models. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/ListCustomModels.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.ListCustomModels)) +- Analyze Layout: Extract text and layout information from a given document. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/AnalyzeLayout.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AnalyzeLayout)) +- Analyze Receipts: Detects and extracts data from receipts using optical character recognition (OCR) and our receipt model, enabling you to easily extract structured data from receipts such as merchant name, merchant phone number, transaction date, transaction total, and more. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/AnalyzeReceipts.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AnalyzeReceipts)) +- Analyze Business Cards: Detects and extracts data from business cards using optical character recognition (OCR) and our business card model, enabling you to easily extract structured data from business cards such as contact names, company names, phone numbers, emails, and more. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/AnalyzeBusinessCards.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AnalyzeBusinessCards)) +- Analyze Invoices: Detects and extracts data from invoices using optical character recognition (OCR) and our invoice understanding deep learning models, enabling you to easily extract structured data from invoices such as customer, vendor, invoice ID, invoice due date, total, invoice amount due, tax amount, ship to, bill to, line items and more. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/AnalyzeInvoices.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AnalyzeInvoices)) +- Analyze ID Documents: Detects and extracts data from identification documents using optical character recognition (OCR) and our ID document model, enabling you to easily extract structured data from ID documents such as first name, last name, date of birth, document number, and more. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/AnalyzeIDDocuments.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AnalyzeIDDocuments)) +- Analyze Custom Form: Extracts information from forms (PDFs and images) into structured data based on a model created from a set of representative training forms. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/AnalyzeCustomModel.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AnalyzeCustomModel)) +- Get Custom Model: Get detailed information about a custom model. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/GetCustomModel.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/ListCustomModels.html)) +- List Custom Models: Get information about all custom models. ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/ListCustomModels.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.ListCustomModels)) ### Decision [**Anomaly Detector**](https://azure.microsoft.com/en-us/services/cognitive-services/anomaly-detector/) -- Anomaly status of latest point: generates a model using preceding points and determines whether the latest point is anomalous ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/DetectLastAnomaly.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.DetectLastAnomaly)) -- Find anomalies: generates a model using an entire series and finds anomalies in the series ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/DetectAnomalies.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.DetectAnomalies)) +- Anomaly status of latest point: generates a model using preceding points and determines whether the latest point is anomalous ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/DetectLastAnomaly.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.DetectLastAnomaly)) +- Find anomalies: generates a model using an entire series and finds anomalies in the series ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/DetectAnomalies.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.DetectAnomalies)) ### Search -- [Bing Image search](https://azure.microsoft.com/en-us/services/cognitive-services/bing-image-search-api/) ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/com/microsoft/azure/synapse/ml/cognitive/BingImageSearch.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.BingImageSearch)) -- [Azure Cognitive search](https://docs.microsoft.com/en-us/azure/search/search-what-is-azure-search) ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/index.html#com.microsoft.azure.synapse.ml.cognitive.AzureSearchWriter$), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AzureSearchWriter)) +- [Bing Image search](https://azure.microsoft.com/en-us/services/cognitive-services/bing-image-search-api/) ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/com/microsoft/azure/synapse/ml/cognitive/BingImageSearch.html), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.BingImageSearch)) +- [Azure Cognitive search](https://docs.microsoft.com/en-us/azure/search/search-what-is-azure-search) ([Scala](https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/index.html#com.microsoft.azure.synapse.ml.cognitive.AzureSearchWriter$), [Python](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cognitive.html#module-synapse.ml.cognitive.AzureSearchWriter)) ## Prerequisites diff --git a/website/versioned_docs/version-0.9.1/features/cognitive_services/CognitiveServices - Predictive Maintenance.md b/website/versioned_docs/version-0.9.4/features/cognitive_services/CognitiveServices - Predictive Maintenance.md similarity index 100% rename from website/versioned_docs/version-0.9.1/features/cognitive_services/CognitiveServices - Predictive Maintenance.md rename to website/versioned_docs/version-0.9.4/features/cognitive_services/CognitiveServices - Predictive Maintenance.md diff --git a/website/versioned_docs/version-0.9.1/features/lightgbm/LightGBM - Overview.md b/website/versioned_docs/version-0.9.4/features/lightgbm/LightGBM - Overview.md similarity index 100% rename from website/versioned_docs/version-0.9.1/features/lightgbm/LightGBM - Overview.md rename to website/versioned_docs/version-0.9.4/features/lightgbm/LightGBM - Overview.md diff --git a/website/versioned_docs/version-0.9.1/features/lightgbm/about.md b/website/versioned_docs/version-0.9.4/features/lightgbm/about.md similarity index 100% rename from website/versioned_docs/version-0.9.1/features/lightgbm/about.md rename to website/versioned_docs/version-0.9.4/features/lightgbm/about.md diff --git a/website/versioned_docs/version-0.9.1/features/onnx/ONNX - Inference on Spark.md b/website/versioned_docs/version-0.9.4/features/onnx/ONNX - Inference on Spark.md similarity index 100% rename from website/versioned_docs/version-0.9.1/features/onnx/ONNX - Inference on Spark.md rename to website/versioned_docs/version-0.9.4/features/onnx/ONNX - Inference on Spark.md diff --git a/website/versioned_docs/version-0.9.1/features/onnx/about.md b/website/versioned_docs/version-0.9.4/features/onnx/about.md similarity index 100% rename from website/versioned_docs/version-0.9.1/features/onnx/about.md rename to website/versioned_docs/version-0.9.4/features/onnx/about.md diff --git a/website/versioned_docs/version-0.9.1/features/opencv/OpenCV - Pipeline Image Transformations.md b/website/versioned_docs/version-0.9.4/features/opencv/OpenCV - Pipeline Image Transformations.md similarity index 100% rename from website/versioned_docs/version-0.9.1/features/opencv/OpenCV - Pipeline Image Transformations.md rename to website/versioned_docs/version-0.9.4/features/opencv/OpenCV - Pipeline Image Transformations.md diff --git a/website/versioned_docs/version-0.9.1/features/other/AzureSearchIndex - Met Artworks.md b/website/versioned_docs/version-0.9.4/features/other/AzureSearchIndex - Met Artworks.md similarity index 100% rename from website/versioned_docs/version-0.9.1/features/other/AzureSearchIndex - Met Artworks.md rename to website/versioned_docs/version-0.9.4/features/other/AzureSearchIndex - Met Artworks.md diff --git a/website/versioned_docs/version-0.9.1/features/other/ConditionalKNN - Exploring Art Across Cultures.md b/website/versioned_docs/version-0.9.4/features/other/ConditionalKNN - Exploring Art Across Cultures.md similarity index 100% rename from website/versioned_docs/version-0.9.1/features/other/ConditionalKNN - Exploring Art Across Cultures.md rename to website/versioned_docs/version-0.9.4/features/other/ConditionalKNN - Exploring Art Across Cultures.md diff --git a/website/versioned_docs/version-0.9.1/features/other/CyberML - Anomalous Access Detection.md b/website/versioned_docs/version-0.9.4/features/other/CyberML - Anomalous Access Detection.md similarity index 99% rename from website/versioned_docs/version-0.9.1/features/other/CyberML - Anomalous Access Detection.md rename to website/versioned_docs/version-0.9.4/features/other/CyberML - Anomalous Access Detection.md index 97723b1daf..2890275442 100644 --- a/website/versioned_docs/version-0.9.1/features/other/CyberML - Anomalous Access Detection.md +++ b/website/versioned_docs/version-0.9.4/features/other/CyberML - Anomalous Access Detection.md @@ -28,7 +28,7 @@ Note: the data does NOT contain information about departments, this information # Create an Azure Databricks cluster and install the following libs 1. In Cluster Libraries install from library source Maven: -Coordinates: com.microsoft.azure:synapseml:0.9.1 +Coordinates: com.microsoft.azure:synapseml:0.9.4 Repository: https://mmlspark.azureedge.net/maven 2. In Cluster Libraries install from PyPI the library called plotly diff --git a/website/versioned_docs/version-0.9.1/features/other/DeepLearning - BiLSTM Medical Entity Extraction.md b/website/versioned_docs/version-0.9.4/features/other/DeepLearning - BiLSTM Medical Entity Extraction.md similarity index 100% rename from website/versioned_docs/version-0.9.1/features/other/DeepLearning - BiLSTM Medical Entity Extraction.md rename to website/versioned_docs/version-0.9.4/features/other/DeepLearning - BiLSTM Medical Entity Extraction.md diff --git a/website/versioned_docs/version-0.9.1/features/other/DeepLearning - CIFAR10 Convolutional Network.md b/website/versioned_docs/version-0.9.4/features/other/DeepLearning - CIFAR10 Convolutional Network.md similarity index 100% rename from website/versioned_docs/version-0.9.1/features/other/DeepLearning - CIFAR10 Convolutional Network.md rename to website/versioned_docs/version-0.9.4/features/other/DeepLearning - CIFAR10 Convolutional Network.md diff --git a/website/versioned_docs/version-0.9.1/features/other/DeepLearning - Flower Image Classification.md b/website/versioned_docs/version-0.9.4/features/other/DeepLearning - Flower Image Classification.md similarity index 100% rename from website/versioned_docs/version-0.9.1/features/other/DeepLearning - Flower Image Classification.md rename to website/versioned_docs/version-0.9.4/features/other/DeepLearning - Flower Image Classification.md diff --git a/website/versioned_docs/version-0.9.1/features/other/DeepLearning - Transfer Learning.md b/website/versioned_docs/version-0.9.4/features/other/DeepLearning - Transfer Learning.md similarity index 100% rename from website/versioned_docs/version-0.9.1/features/other/DeepLearning - Transfer Learning.md rename to website/versioned_docs/version-0.9.4/features/other/DeepLearning - Transfer Learning.md diff --git a/website/versioned_docs/version-0.9.1/features/other/HyperParameterTuning - Fighting Breast Cancer.md b/website/versioned_docs/version-0.9.4/features/other/HyperParameterTuning - Fighting Breast Cancer.md similarity index 100% rename from website/versioned_docs/version-0.9.1/features/other/HyperParameterTuning - Fighting Breast Cancer.md rename to website/versioned_docs/version-0.9.4/features/other/HyperParameterTuning - Fighting Breast Cancer.md diff --git a/website/versioned_docs/version-0.9.1/features/other/TextAnalytics - Amazon Book Reviews with Word2Vec.md b/website/versioned_docs/version-0.9.4/features/other/TextAnalytics - Amazon Book Reviews with Word2Vec.md similarity index 100% rename from website/versioned_docs/version-0.9.1/features/other/TextAnalytics - Amazon Book Reviews with Word2Vec.md rename to website/versioned_docs/version-0.9.4/features/other/TextAnalytics - Amazon Book Reviews with Word2Vec.md diff --git a/website/versioned_docs/version-0.9.1/features/other/TextAnalytics - Amazon Book Reviews.md b/website/versioned_docs/version-0.9.4/features/other/TextAnalytics - Amazon Book Reviews.md similarity index 100% rename from website/versioned_docs/version-0.9.1/features/other/TextAnalytics - Amazon Book Reviews.md rename to website/versioned_docs/version-0.9.4/features/other/TextAnalytics - Amazon Book Reviews.md diff --git a/website/versioned_docs/version-0.9.1/features/regression/Regression - Auto Imports.md b/website/versioned_docs/version-0.9.4/features/regression/Regression - Auto Imports.md similarity index 95% rename from website/versioned_docs/version-0.9.1/features/regression/Regression - Auto Imports.md rename to website/versioned_docs/version-0.9.4/features/regression/Regression - Auto Imports.md index f6a5976d5a..6b0f080656 100644 --- a/website/versioned_docs/version-0.9.1/features/regression/Regression - Auto Imports.md +++ b/website/versioned_docs/version-0.9.4/features/regression/Regression - Auto Imports.md @@ -14,15 +14,15 @@ and evaluating the model on the Automobile Imports data set. This sample demonstrates the use of several members of the synapseml library: - [`TrainRegressor` - ](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.train.html?#module-synapse.ml.train.TrainRegressor) + ](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.train.html?#module-synapse.ml.train.TrainRegressor) - [`SummarizeData` - ](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.stages.html?#module-synapse.ml.stages.SummarizeData) + ](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.stages.html?#module-synapse.ml.stages.SummarizeData) - [`CleanMissingData` - ](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.featurize.html?#module-synapse.ml.featurize.CleanMissingData) + ](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.featurize.html?#module-synapse.ml.featurize.CleanMissingData) - [`ComputeModelStatistics` - ](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.train.html?#module-synapse.ml.train.ComputeModelStatistics) + ](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.train.html?#module-synapse.ml.train.ComputeModelStatistics) - [`FindBestModel` - ](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.automl.html?#module-synapse.ml.automl.FindBestModel) + ](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.automl.html?#module-synapse.ml.automl.FindBestModel) First, import the pandas package so that we can read and parse the datafile using `pandas.read_csv()` diff --git a/website/versioned_docs/version-0.9.1/features/regression/Regression - Flight Delays with DataCleaning.md b/website/versioned_docs/version-0.9.4/features/regression/Regression - Flight Delays with DataCleaning.md similarity index 96% rename from website/versioned_docs/version-0.9.1/features/regression/Regression - Flight Delays with DataCleaning.md rename to website/versioned_docs/version-0.9.4/features/regression/Regression - Flight Delays with DataCleaning.md index 48b6738345..a90ad50773 100644 --- a/website/versioned_docs/version-0.9.1/features/regression/Regression - Flight Delays with DataCleaning.md +++ b/website/versioned_docs/version-0.9.4/features/regression/Regression - Flight Delays with DataCleaning.md @@ -15,11 +15,11 @@ instead of iterating over the columns and applying the `StringIndexer`. This sample demonstrates how to use the following APIs: - [`TrainRegressor` - ](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.train.html?#module-synapse.ml.train.TrainRegressor) + ](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.train.html?#module-synapse.ml.train.TrainRegressor) - [`ComputePerInstanceStatistics` - ](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.train.html?#module-synapse.ml.train.ComputePerInstanceStatistics) + ](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.train.html?#module-synapse.ml.train.ComputePerInstanceStatistics) - [`DataConversion` - ](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.featurize.html?#module-synapse.ml.featurize.DataConversion) + ](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.featurize.html?#module-synapse.ml.featurize.DataConversion) First, import the pandas package diff --git a/website/versioned_docs/version-0.9.1/features/regression/Regression - Flight Delays.md b/website/versioned_docs/version-0.9.4/features/regression/Regression - Flight Delays.md similarity index 100% rename from website/versioned_docs/version-0.9.1/features/regression/Regression - Flight Delays.md rename to website/versioned_docs/version-0.9.4/features/regression/Regression - Flight Delays.md diff --git a/website/versioned_docs/version-0.9.1/features/regression/Regression - Vowpal Wabbit vs. LightGBM vs. Linear Regressor.md b/website/versioned_docs/version-0.9.4/features/regression/Regression - Vowpal Wabbit vs. LightGBM vs. Linear Regressor.md similarity index 100% rename from website/versioned_docs/version-0.9.1/features/regression/Regression - Vowpal Wabbit vs. LightGBM vs. Linear Regressor.md rename to website/versioned_docs/version-0.9.4/features/regression/Regression - Vowpal Wabbit vs. LightGBM vs. Linear Regressor.md diff --git a/website/versioned_docs/version-0.9.1/features/responsible_ai/Data Balance Analysis.md b/website/versioned_docs/version-0.9.4/features/responsible_ai/Data Balance Analysis.md similarity index 100% rename from website/versioned_docs/version-0.9.1/features/responsible_ai/Data Balance Analysis.md rename to website/versioned_docs/version-0.9.4/features/responsible_ai/Data Balance Analysis.md diff --git a/website/versioned_docs/version-0.9.1/features/responsible_ai/DataBalanceAnalysis - Adult Census Income.md b/website/versioned_docs/version-0.9.4/features/responsible_ai/DataBalanceAnalysis - Adult Census Income.md similarity index 100% rename from website/versioned_docs/version-0.9.1/features/responsible_ai/DataBalanceAnalysis - Adult Census Income.md rename to website/versioned_docs/version-0.9.4/features/responsible_ai/DataBalanceAnalysis - Adult Census Income.md diff --git a/website/versioned_docs/version-0.9.1/features/responsible_ai/Interpretability - Explanation Dashboard.md b/website/versioned_docs/version-0.9.4/features/responsible_ai/Interpretability - Explanation Dashboard.md similarity index 100% rename from website/versioned_docs/version-0.9.1/features/responsible_ai/Interpretability - Explanation Dashboard.md rename to website/versioned_docs/version-0.9.4/features/responsible_ai/Interpretability - Explanation Dashboard.md diff --git a/website/versioned_docs/version-0.9.1/features/responsible_ai/Interpretability - Image Explainers.md b/website/versioned_docs/version-0.9.4/features/responsible_ai/Interpretability - Image Explainers.md similarity index 100% rename from website/versioned_docs/version-0.9.1/features/responsible_ai/Interpretability - Image Explainers.md rename to website/versioned_docs/version-0.9.4/features/responsible_ai/Interpretability - Image Explainers.md diff --git a/website/versioned_docs/version-0.9.1/features/responsible_ai/Interpretability - Snow Leopard Detection.md b/website/versioned_docs/version-0.9.4/features/responsible_ai/Interpretability - Snow Leopard Detection.md similarity index 100% rename from website/versioned_docs/version-0.9.1/features/responsible_ai/Interpretability - Snow Leopard Detection.md rename to website/versioned_docs/version-0.9.4/features/responsible_ai/Interpretability - Snow Leopard Detection.md diff --git a/website/versioned_docs/version-0.9.1/features/responsible_ai/Interpretability - Tabular SHAP explainer.md b/website/versioned_docs/version-0.9.4/features/responsible_ai/Interpretability - Tabular SHAP explainer.md similarity index 100% rename from website/versioned_docs/version-0.9.1/features/responsible_ai/Interpretability - Tabular SHAP explainer.md rename to website/versioned_docs/version-0.9.4/features/responsible_ai/Interpretability - Tabular SHAP explainer.md diff --git a/website/versioned_docs/version-0.9.1/features/responsible_ai/Interpretability - Text Explainers.md b/website/versioned_docs/version-0.9.4/features/responsible_ai/Interpretability - Text Explainers.md similarity index 100% rename from website/versioned_docs/version-0.9.1/features/responsible_ai/Interpretability - Text Explainers.md rename to website/versioned_docs/version-0.9.4/features/responsible_ai/Interpretability - Text Explainers.md diff --git a/website/versioned_docs/version-0.9.1/features/responsible_ai/Model Interpretation on Spark.md b/website/versioned_docs/version-0.9.4/features/responsible_ai/Model Interpretation on Spark.md similarity index 100% rename from website/versioned_docs/version-0.9.1/features/responsible_ai/Model Interpretation on Spark.md rename to website/versioned_docs/version-0.9.4/features/responsible_ai/Model Interpretation on Spark.md diff --git a/website/versioned_docs/version-0.9.1/features/spark_serving/SparkServing - Deploying a Classifier.md b/website/versioned_docs/version-0.9.4/features/spark_serving/SparkServing - Deploying a Classifier.md similarity index 100% rename from website/versioned_docs/version-0.9.1/features/spark_serving/SparkServing - Deploying a Classifier.md rename to website/versioned_docs/version-0.9.4/features/spark_serving/SparkServing - Deploying a Classifier.md diff --git a/website/versioned_docs/version-0.9.1/features/spark_serving/about.md b/website/versioned_docs/version-0.9.4/features/spark_serving/about.md similarity index 100% rename from website/versioned_docs/version-0.9.1/features/spark_serving/about.md rename to website/versioned_docs/version-0.9.4/features/spark_serving/about.md diff --git a/website/versioned_docs/version-0.9.1/features/vw/Vowpal Wabbit - Overview.md b/website/versioned_docs/version-0.9.4/features/vw/Vowpal Wabbit - Overview.md similarity index 100% rename from website/versioned_docs/version-0.9.1/features/vw/Vowpal Wabbit - Overview.md rename to website/versioned_docs/version-0.9.4/features/vw/Vowpal Wabbit - Overview.md diff --git a/website/versioned_docs/version-0.9.1/features/vw/about.md b/website/versioned_docs/version-0.9.4/features/vw/about.md similarity index 100% rename from website/versioned_docs/version-0.9.1/features/vw/about.md rename to website/versioned_docs/version-0.9.4/features/vw/about.md diff --git a/website/versioned_docs/version-0.9.1/getting_started/first_example.md b/website/versioned_docs/version-0.9.4/getting_started/first_example.md similarity index 100% rename from website/versioned_docs/version-0.9.1/getting_started/first_example.md rename to website/versioned_docs/version-0.9.4/getting_started/first_example.md diff --git a/website/versioned_docs/version-0.9.1/getting_started/first_model.md b/website/versioned_docs/version-0.9.4/getting_started/first_model.md similarity index 100% rename from website/versioned_docs/version-0.9.1/getting_started/first_model.md rename to website/versioned_docs/version-0.9.4/getting_started/first_model.md diff --git a/website/versioned_docs/version-0.9.1/getting_started/installation.md b/website/versioned_docs/version-0.9.4/getting_started/installation.md similarity index 93% rename from website/versioned_docs/version-0.9.1/getting_started/installation.md rename to website/versioned_docs/version-0.9.4/getting_started/installation.md index 0a9660391f..de87ea67fe 100644 --- a/website/versioned_docs/version-0.9.1/getting_started/installation.md +++ b/website/versioned_docs/version-0.9.4/getting_started/installation.md @@ -12,7 +12,7 @@ the above example, or from python: ```python import pyspark spark = pyspark.sql.SparkSession.builder.appName("MyApp") \ - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.1") \ + .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.4") \ .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") \ .getOrCreate() import synapse.ml @@ -25,7 +25,7 @@ your `build.sbt`: ```scala resolvers += "SynapseML" at "https://mmlspark.azureedge.net/maven" -libraryDependencies += "com.microsoft.azure" %% "synapseml" % "0.9.1" +libraryDependencies += "com.microsoft.azure" %% "synapseml" % "0.9.4" ``` @@ -35,9 +35,9 @@ SynapseML can be conveniently installed on existing Spark clusters via the `--packages` option, examples: ```bash -spark-shell --packages com.microsoft.azure:synapseml:0.9.1 -pyspark --packages com.microsoft.azure:synapseml:0.9.1 -spark-submit --packages com.microsoft.azure:synapseml:0.9.1 MyApp.jar +spark-shell --packages com.microsoft.azure:synapseml:0.9.4 +pyspark --packages com.microsoft.azure:synapseml:0.9.4 +spark-submit --packages com.microsoft.azure:synapseml:0.9.4 MyApp.jar ``` This can be used in other Spark contexts too. For example, you can use SynapseML @@ -52,7 +52,7 @@ cloud](http://community.cloud.databricks.com), create a new [library from Maven coordinates](https://docs.databricks.com/user-guide/libraries.html#libraries-from-maven-pypi-or-spark-packages) in your workspace. -For the coordinates use: `com.microsoft.azure:synapseml:0.9.1` +For the coordinates use: `com.microsoft.azure:synapseml:0.9.4` with the resolver: `https://mmlspark.azureedge.net/maven`. Ensure this library is attached to your target cluster(s). @@ -60,7 +60,7 @@ Finally, ensure that your Spark cluster has at least Spark 3.12 and Scala 2.12. You can use SynapseML in both your Scala and PySpark notebooks. To get started with our example notebooks import the following databricks archive: -`https://mmlspark.blob.core.windows.net/dbcs/SynapseMLExamplesv0.9.1.dbc` +`https://mmlspark.blob.core.windows.net/dbcs/SynapseMLExamplesv0.9.4.dbc` ### Apache Livy and HDInsight @@ -73,7 +73,7 @@ Excluding certain packages from the library may be necessary due to current issu { "name": "synapseml", "conf": { - "spark.jars.packages": "com.microsoft.azure:synapseml:0.9.1", + "spark.jars.packages": "com.microsoft.azure:synapseml:0.9.4", "spark.jars.repositories": "https://mmlspark.azureedge.net/maven", "spark.jars.excludes": "org.scala-lang:scala-reflect,org.apache.spark:spark-tags_2.12,org.scalactic:scalactic_2.12,org.scalatest:scalatest_2.12" } @@ -87,7 +87,7 @@ In Azure Synapse, "spark.yarn.user.classpath.first" should be set to "true" to o { "name": "synapseml", "conf": { - "spark.jars.packages": "com.microsoft.azure:synapseml:0.9.1", + "spark.jars.packages": "com.microsoft.azure:synapseml:0.9.4", "spark.jars.repositories": "https://mmlspark.azureedge.net/maven", "spark.jars.excludes": "org.scala-lang:scala-reflect,org.apache.spark:spark-tags_2.12,org.scalactic:scalactic_2.12,org.scalatest:scalatest_2.12", "spark.yarn.user.classpath.first": "true" diff --git a/website/versioned_docs/version-0.9.1/reference/R-setup.md b/website/versioned_docs/version-0.9.4/reference/R-setup.md similarity index 98% rename from website/versioned_docs/version-0.9.1/reference/R-setup.md rename to website/versioned_docs/version-0.9.4/reference/R-setup.md index fbc7f8aa61..190c8c9697 100644 --- a/website/versioned_docs/version-0.9.1/reference/R-setup.md +++ b/website/versioned_docs/version-0.9.4/reference/R-setup.md @@ -18,7 +18,7 @@ To install the current SynapseML package for R use: ```R ... -devtools::install_url("https://mmlspark.azureedge.net/rrr/synapseml-0.9.1.zip") +devtools::install_url("https://mmlspark.azureedge.net/rrr/synapseml-0.9.4.zip") ... ``` @@ -31,7 +31,7 @@ It will take some time to install all dependencies. Then, run: library(sparklyr) library(dplyr) config <- spark_config() -config$sparklyr.defaultPackages <- "com.microsoft.azure:synapseml:0.9.1" +config$sparklyr.defaultPackages <- "com.microsoft.azure:synapseml:0.9.4" sc <- spark_connect(master = "local", config = config) ... ``` @@ -91,7 +91,7 @@ and then use spark_connect with method = "databricks": ```R install.packages("devtools") -devtools::install_url("https://mmlspark.azureedge.net/rrr/synapseml-0.9.1.zip") +devtools::install_url("https://mmlspark.azureedge.net/rrr/synapseml-0.9.4.zip") library(sparklyr) library(dplyr) sc <- spark_connect(method = "databricks") diff --git a/website/versioned_docs/version-0.9.1/reference/SAR.md b/website/versioned_docs/version-0.9.4/reference/SAR.md similarity index 100% rename from website/versioned_docs/version-0.9.1/reference/SAR.md rename to website/versioned_docs/version-0.9.4/reference/SAR.md diff --git a/website/versioned_docs/version-0.9.1/reference/contributing_guide.md b/website/versioned_docs/version-0.9.4/reference/contributing_guide.md similarity index 100% rename from website/versioned_docs/version-0.9.1/reference/contributing_guide.md rename to website/versioned_docs/version-0.9.4/reference/contributing_guide.md diff --git a/website/versioned_docs/version-0.9.1/reference/cyber.md b/website/versioned_docs/version-0.9.4/reference/cyber.md similarity index 90% rename from website/versioned_docs/version-0.9.1/reference/cyber.md rename to website/versioned_docs/version-0.9.4/reference/cyber.md index cf2a98faf2..74d9a4b053 100644 --- a/website/versioned_docs/version-0.9.1/reference/cyber.md +++ b/website/versioned_docs/version-0.9.4/reference/cyber.md @@ -18,50 +18,50 @@ sidebar_label: CyberML (i.e., it returns a sample from the complement set). ## feature engineering: [indexers.py](https://github.com/microsoft/SynapseML/blob/master/core/src/main/python/synapse/ml/cyber/feature/indexers.py) -1. [IdIndexer](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cyber.feature.html#synapse.ml.cyber.feature.indexers.IdIndexer) +1. [IdIndexer](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cyber.feature.html#synapse.ml.cyber.feature.indexers.IdIndexer) is a SparkML [Estimator](https://spark.apache.org/docs/2.2.0/api/java/index.html?org/apache/spark/ml/Estimator.html). Given a dataframe, it creates an IdIndexerModel (described next) for categorical features which contains the information to map each partition and column seen in the given dataframe to an id. for each partition or one consecutive range for all partition and column values. -2. [IdIndexerModel](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cyber.feature.html#synapse.ml.cyber.feature.indexers.IdIndexerModel) +2. [IdIndexerModel](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cyber.feature.html#synapse.ml.cyber.feature.indexers.IdIndexerModel) is a SparkML [Transformer](https://spark.apache.org/docs/2.2.0/api/java/index.html?org/apache/spark/ml/Transformer.html). Given a dataframe maps each partition and column field to a consecutive integer id. Partitions or column values not encountered in the estimator are mapped to 0. The model can operate in two modes, either create consecutive integer id independently -3. [MultiIndexer](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cyber.feature.html#synapse.ml.cyber.feature.indexers.MultiIndexer) +3. [MultiIndexer](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cyber.feature.html#synapse.ml.cyber.feature.indexers.MultiIndexer) is a SparkML [Estimator](https://spark.apache.org/docs/2.2.0/api/java/index.html?org/apache/spark/ml/Estimator.html). Uses multiple IdIndexer to generate a MultiIndexerModel (described next) for categorical features which contains multiple IdIndexers for multiple partitions and columns. -4. [MultiIndexerModel](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cyber.feature.html#synapse.ml.cyber.feature.indexers.MultiIndexerModel) +4. [MultiIndexerModel](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cyber.feature.html#synapse.ml.cyber.feature.indexers.MultiIndexerModel) is a SparkML [Transformer](https://spark.apache.org/docs/2.2.0/api/java/index.html?org/apache/spark/ml/Transformer.html). Given a dataframe maps each partition and column field to a consecutive integer id. Partitions or column values not encountered in the estimator are mapped to 0. The model can operate in two modes, either create consecutive integer id independently ## feature engineering: [scalers.py](https://github.com/microsoft/SynapseML/blob/master/core/src/main/python/synapse/ml/cyber/feature/scalers.py) -1. [StandardScalarScaler](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cyber.feature.html#synapse.ml.cyber.feature.scalers.StandardScalarScaler) +1. [StandardScalarScaler](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cyber.feature.html#synapse.ml.cyber.feature.scalers.StandardScalarScaler) is a SparkML [Estimator](https://spark.apache.org/docs/2.2.0/api/java/index.html?org/apache/spark/ml/Estimator.html). Given a dataframe it creates a StandardScalarScalerModel (described next) which normalizes any given dataframe according to the mean and standard deviation calculated on the dataframe given to the estimator. -2. [StandardScalarScalerModel](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cyber.feature.html#synapse.ml.cyber.feature.scalers.StandardScalarScalerModel) +2. [StandardScalarScalerModel](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cyber.feature.html#synapse.ml.cyber.feature.scalers.StandardScalarScalerModel) is a SparkML [Transformer](https://spark.apache.org/docs/2.2.0/api/java/index.html?org/apache/spark/ml/Transformer.html). Given a dataframe with a value column x, the transformer changes its value as follows: x'=(x-mean)/stddev, i.e., if the transformer is given the same dataframe the estimator was given then the value column will have a mean of 0.0 and a standard deviation of 1.0. -3. [LinearScalarScaler](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cyber.feature.html#synapse.ml.cyber.feature.scalers.LinearScalarScaler) +3. [LinearScalarScaler](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cyber.feature.html#synapse.ml.cyber.feature.scalers.LinearScalarScaler) is a SparkML [Estimator](https://spark.apache.org/docs/2.2.0/api/java/index.html?org/apache/spark/ml/Estimator.html). Given a dataframe it creates a LinearScalarScalerModel (described next) which normalizes any given dataframe according to the minimum and maximum values calculated on the dataframe given to the estimator. -4. [LinearScalarScalerModel](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cyber.feature.html#synapse.ml.cyber.feature.scalers.LinearScalarScalerModel) +4. [LinearScalarScalerModel](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cyber.feature.html#synapse.ml.cyber.feature.scalers.LinearScalarScalerModel) is a SparkML [Transformer](https://spark.apache.org/docs/2.2.0/api/java/index.html?org/apache/spark/ml/Transformer.html). Given a dataframe with a value column x, the transformer changes its value such that if the transformer is given the same dataframe the estimator was given then the value column will be scaled linearly to the given ranges. ## access anomalies: [collaborative_filtering.py](https://github.com/microsoft/SynapseML/blob/master/core/src/main/python/synapse/ml/cyber/anomaly/collaborative_filtering.py) -1. [AccessAnomaly](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cyber.anomaly.html#synapse.ml.cyber.anomaly.collaborative_filtering.AccessAnomaly) +1. [AccessAnomaly](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cyber.anomaly.html#synapse.ml.cyber.anomaly.collaborative_filtering.AccessAnomaly) is a SparkML [Estimator](https://spark.apache.org/docs/2.2.0/api/java/index.html?org/apache/spark/ml/Estimator.html). Given a dataframe the estimator generates an AccessAnomalyModel (next described) which can detect anomalous access of users to resources in such a way where the access @@ -69,14 +69,14 @@ sidebar_label: CyberML a resource from Finance. This is based solely on access patterns rather than explicit features. Internally this is based on Collaborative Filtering as implemented in Spark using Matrix Factorization with Alternating Least Squares. -2. [AccessAnomalyModel](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cyber.anomaly.html#synapse.ml.cyber.anomaly.collaborative_filtering.AccessAnomalyModel) +2. [AccessAnomalyModel](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cyber.anomaly.html#synapse.ml.cyber.anomaly.collaborative_filtering.AccessAnomalyModel) is a SparkML [Transformer](https://spark.apache.org/docs/2.2.0/api/java/index.html?org/apache/spark/ml/Transformer.html). Given a dataframe the transformer computes a value between (-inf, inf) where positive values indicate an anomaly score. Anomaly scores are computed to have a mean of 1.0 and a standard deviation of 1.0 over the original dataframe given to the estimator. -3. [ModelNormalizeTransformer](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cyber.anomaly.html#synapse.ml.cyber.anomaly.collaborative_filtering.ModelNormalizeTransformer) +3. [ModelNormalizeTransformer](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cyber.anomaly.html#synapse.ml.cyber.anomaly.collaborative_filtering.ModelNormalizeTransformer) is a SparkML [Transformer](https://spark.apache.org/docs/2.2.0/api/java/index.html?org/apache/spark/ml/Transformer.html). This is a transformer used internally by AccessAnomaly to normalize a model to generate anomaly scores with mean 0.0 and standard deviation of 1.0. -4. [AccessAnomalyConfig](https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/synapse.ml.cyber.anomaly.html#synapse.ml.cyber.anomaly.collaborative_filtering.AccessAnomalyConfig) +4. [AccessAnomalyConfig](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/synapse.ml.cyber.anomaly.html#synapse.ml.cyber.anomaly.collaborative_filtering.AccessAnomalyConfig) contains the default values for AccessAnomaly. diff --git a/website/versioned_docs/version-0.9.1/reference/datasets.md b/website/versioned_docs/version-0.9.4/reference/datasets.md similarity index 100% rename from website/versioned_docs/version-0.9.1/reference/datasets.md rename to website/versioned_docs/version-0.9.4/reference/datasets.md diff --git a/website/versioned_docs/version-0.9.1/reference/developer-readme.md b/website/versioned_docs/version-0.9.4/reference/developer-readme.md similarity index 100% rename from website/versioned_docs/version-0.9.1/reference/developer-readme.md rename to website/versioned_docs/version-0.9.4/reference/developer-readme.md diff --git a/website/versioned_docs/version-0.9.1/reference/docker.md b/website/versioned_docs/version-0.9.4/reference/docker.md similarity index 98% rename from website/versioned_docs/version-0.9.1/reference/docker.md rename to website/versioned_docs/version-0.9.4/reference/docker.md index 46651b3cd6..c83d59ab45 100644 --- a/website/versioned_docs/version-0.9.1/reference/docker.md +++ b/website/versioned_docs/version-0.9.4/reference/docker.md @@ -32,7 +32,7 @@ You can now select one of the sample notebooks and run it, or create your own. In the above, `mcr.microsoft.com/mmlspark/release` specifies the project and image name that you want to run. There is another component implicit here which is the _tsag_ (= version) that you want to use — specifying it explicitly looks like -`mcr.microsoft.com/mmlspark/release:0.9.1` for the `0.9.1` tag. +`mcr.microsoft.com/mmlspark/release:0.9.4` for the `0.9.4` tag. Leaving `mcr.microsoft.com/mmlspark/release` by itself has an implicit `latest` tag, so it is equivalent to `mcr.microsoft.com/mmlspark/release:latest`. The `latest` tag is identical to the @@ -48,7 +48,7 @@ that you will probably want to use can look as follows: docker run -it --rm \ -p 127.0.0.1:80:8888 \ -v ~/myfiles:/notebooks/myfiles \ - mcr.microsoft.com/mmlspark/release:0.9.1 + mcr.microsoft.com/mmlspark/release:0.9.4 ``` In this example, backslashes are used to break things up for readability; you @@ -58,7 +58,7 @@ path and line breaks looks a little different: docker run -it --rm ` -p 127.0.0.1:80:8888 ` -v C:\myfiles:/notebooks/myfiles ` - mcr.microsoft.com/mmlspark/release:0.9.1 + mcr.microsoft.com/mmlspark/release:0.9.4 Let's break this command and go over the meaning of each part: @@ -141,7 +141,7 @@ Let's break this command and go over the meaning of each part: model.write().overwrite().save('myfiles/myTrainedModel.mml') ``` -- **`mcr.microsoft.com/mmlspark/release:0.9.1`** +- **`mcr.microsoft.com/mmlspark/release:0.9.4`** Finally, this specifies an explicit version tag for the image that we want to run. diff --git a/website/versioned_docs/version-0.9.1/reference/vagrant.md b/website/versioned_docs/version-0.9.4/reference/vagrant.md similarity index 100% rename from website/versioned_docs/version-0.9.1/reference/vagrant.md rename to website/versioned_docs/version-0.9.4/reference/vagrant.md diff --git a/website/versioned_docs/version-0.9.1/third-party-notices.txt b/website/versioned_docs/version-0.9.4/third-party-notices.txt similarity index 100% rename from website/versioned_docs/version-0.9.1/third-party-notices.txt rename to website/versioned_docs/version-0.9.4/third-party-notices.txt diff --git a/website/versioned_sidebars/version-0.9.1-sidebars.json b/website/versioned_sidebars/version-0.9.4-sidebars.json similarity index 65% rename from website/versioned_sidebars/version-0.9.1-sidebars.json rename to website/versioned_sidebars/version-0.9.4-sidebars.json index c77179702b..cf09c5e420 100644 --- a/website/versioned_sidebars/version-0.9.1-sidebars.json +++ b/website/versioned_sidebars/version-0.9.4-sidebars.json @@ -1,8 +1,8 @@ { - "version-0.9.1/docs": [ + "version-0.9.4/docs": [ { "type": "doc", - "id": "version-0.9.1/about" + "id": "version-0.9.4/about" }, { "type": "category", @@ -10,15 +10,15 @@ "items": [ { "type": "doc", - "id": "version-0.9.1/getting_started/installation" + "id": "version-0.9.4/getting_started/installation" }, { "type": "doc", - "id": "version-0.9.1/getting_started/first_example" + "id": "version-0.9.4/getting_started/first_example" }, { "type": "doc", - "id": "version-0.9.1/getting_started/first_model" + "id": "version-0.9.4/getting_started/first_model" } ], "collapsible": true, @@ -34,19 +34,19 @@ "items": [ { "type": "doc", - "id": "version-0.9.1/features/cognitive_services/CognitiveServices - Celebrity Quote Analysis" + "id": "version-0.9.4/features/cognitive_services/CognitiveServices - Celebrity Quote Analysis" }, { "type": "doc", - "id": "version-0.9.1/features/cognitive_services/CognitiveServices - Create a Multilingual Search Engine from Forms" + "id": "version-0.9.4/features/cognitive_services/CognitiveServices - Create a Multilingual Search Engine from Forms" }, { "type": "doc", - "id": "version-0.9.1/features/cognitive_services/CognitiveServices - Overview" + "id": "version-0.9.4/features/cognitive_services/CognitiveServices - Overview" }, { "type": "doc", - "id": "version-0.9.1/features/cognitive_services/CognitiveServices - Predictive Maintenance" + "id": "version-0.9.4/features/cognitive_services/CognitiveServices - Predictive Maintenance" } ], "collapsible": true, @@ -58,35 +58,35 @@ "items": [ { "type": "doc", - "id": "version-0.9.1/features/responsible_ai/Data Balance Analysis" + "id": "version-0.9.4/features/responsible_ai/Data Balance Analysis" }, { "type": "doc", - "id": "version-0.9.1/features/responsible_ai/DataBalanceAnalysis - Adult Census Income" + "id": "version-0.9.4/features/responsible_ai/DataBalanceAnalysis - Adult Census Income" }, { "type": "doc", - "id": "version-0.9.1/features/responsible_ai/Interpretability - Explanation Dashboard" + "id": "version-0.9.4/features/responsible_ai/Interpretability - Explanation Dashboard" }, { "type": "doc", - "id": "version-0.9.1/features/responsible_ai/Interpretability - Image Explainers" + "id": "version-0.9.4/features/responsible_ai/Interpretability - Image Explainers" }, { "type": "doc", - "id": "version-0.9.1/features/responsible_ai/Interpretability - Snow Leopard Detection" + "id": "version-0.9.4/features/responsible_ai/Interpretability - Snow Leopard Detection" }, { "type": "doc", - "id": "version-0.9.1/features/responsible_ai/Interpretability - Tabular SHAP explainer" + "id": "version-0.9.4/features/responsible_ai/Interpretability - Tabular SHAP explainer" }, { "type": "doc", - "id": "version-0.9.1/features/responsible_ai/Interpretability - Text Explainers" + "id": "version-0.9.4/features/responsible_ai/Interpretability - Text Explainers" }, { "type": "doc", - "id": "version-0.9.1/features/responsible_ai/Model Interpretation on Spark" + "id": "version-0.9.4/features/responsible_ai/Model Interpretation on Spark" } ], "collapsible": true, @@ -98,11 +98,11 @@ "items": [ { "type": "doc", - "id": "version-0.9.1/features/onnx/about" + "id": "version-0.9.4/features/onnx/about" }, { "type": "doc", - "id": "version-0.9.1/features/onnx/ONNX - Inference on Spark" + "id": "version-0.9.4/features/onnx/ONNX - Inference on Spark" } ], "collapsible": true, @@ -114,11 +114,11 @@ "items": [ { "type": "doc", - "id": "version-0.9.1/features/lightgbm/about" + "id": "version-0.9.4/features/lightgbm/about" }, { "type": "doc", - "id": "version-0.9.1/features/lightgbm/LightGBM - Overview" + "id": "version-0.9.4/features/lightgbm/LightGBM - Overview" } ], "collapsible": true, @@ -130,11 +130,11 @@ "items": [ { "type": "doc", - "id": "version-0.9.1/features/vw/about" + "id": "version-0.9.4/features/vw/about" }, { "type": "doc", - "id": "version-0.9.1/features/vw/Vowpal Wabbit - Overview" + "id": "version-0.9.4/features/vw/Vowpal Wabbit - Overview" } ], "collapsible": true, @@ -146,11 +146,11 @@ "items": [ { "type": "doc", - "id": "version-0.9.1/features/spark_serving/about" + "id": "version-0.9.4/features/spark_serving/about" }, { "type": "doc", - "id": "version-0.9.1/features/spark_serving/SparkServing - Deploying a Classifier" + "id": "version-0.9.4/features/spark_serving/SparkServing - Deploying a Classifier" } ], "collapsible": true, @@ -162,7 +162,7 @@ "items": [ { "type": "doc", - "id": "version-0.9.1/features/opencv/OpenCV - Pipeline Image Transformations" + "id": "version-0.9.4/features/opencv/OpenCV - Pipeline Image Transformations" } ], "collapsible": true, @@ -174,19 +174,19 @@ "items": [ { "type": "doc", - "id": "version-0.9.1/features/classification/Classification - Adult Census with Vowpal Wabbit" + "id": "version-0.9.4/features/classification/Classification - Adult Census with Vowpal Wabbit" }, { "type": "doc", - "id": "version-0.9.1/features/classification/Classification - Adult Census" + "id": "version-0.9.4/features/classification/Classification - Adult Census" }, { "type": "doc", - "id": "version-0.9.1/features/classification/Classification - Before and After SynapseML" + "id": "version-0.9.4/features/classification/Classification - Before and After SynapseML" }, { "type": "doc", - "id": "version-0.9.1/features/classification/Classification - Twitter Sentiment with Vowpal Wabbit" + "id": "version-0.9.4/features/classification/Classification - Twitter Sentiment with Vowpal Wabbit" } ], "collapsible": true, @@ -198,19 +198,19 @@ "items": [ { "type": "doc", - "id": "version-0.9.1/features/regression/Regression - Auto Imports" + "id": "version-0.9.4/features/regression/Regression - Auto Imports" }, { "type": "doc", - "id": "version-0.9.1/features/regression/Regression - Flight Delays with DataCleaning" + "id": "version-0.9.4/features/regression/Regression - Flight Delays with DataCleaning" }, { "type": "doc", - "id": "version-0.9.1/features/regression/Regression - Flight Delays" + "id": "version-0.9.4/features/regression/Regression - Flight Delays" }, { "type": "doc", - "id": "version-0.9.1/features/regression/Regression - Vowpal Wabbit vs. LightGBM vs. Linear Regressor" + "id": "version-0.9.4/features/regression/Regression - Vowpal Wabbit vs. LightGBM vs. Linear Regressor" } ], "collapsible": true, @@ -222,43 +222,43 @@ "items": [ { "type": "doc", - "id": "version-0.9.1/features/other/AzureSearchIndex - Met Artworks" + "id": "version-0.9.4/features/other/AzureSearchIndex - Met Artworks" }, { "type": "doc", - "id": "version-0.9.1/features/other/ConditionalKNN - Exploring Art Across Cultures" + "id": "version-0.9.4/features/other/ConditionalKNN - Exploring Art Across Cultures" }, { "type": "doc", - "id": "version-0.9.1/features/other/CyberML - Anomalous Access Detection" + "id": "version-0.9.4/features/other/CyberML - Anomalous Access Detection" }, { "type": "doc", - "id": "version-0.9.1/features/other/DeepLearning - BiLSTM Medical Entity Extraction" + "id": "version-0.9.4/features/other/DeepLearning - BiLSTM Medical Entity Extraction" }, { "type": "doc", - "id": "version-0.9.1/features/other/DeepLearning - CIFAR10 Convolutional Network" + "id": "version-0.9.4/features/other/DeepLearning - CIFAR10 Convolutional Network" }, { "type": "doc", - "id": "version-0.9.1/features/other/DeepLearning - Flower Image Classification" + "id": "version-0.9.4/features/other/DeepLearning - Flower Image Classification" }, { "type": "doc", - "id": "version-0.9.1/features/other/DeepLearning - Transfer Learning" + "id": "version-0.9.4/features/other/DeepLearning - Transfer Learning" }, { "type": "doc", - "id": "version-0.9.1/features/other/HyperParameterTuning - Fighting Breast Cancer" + "id": "version-0.9.4/features/other/HyperParameterTuning - Fighting Breast Cancer" }, { "type": "doc", - "id": "version-0.9.1/features/other/TextAnalytics - Amazon Book Reviews with Word2Vec" + "id": "version-0.9.4/features/other/TextAnalytics - Amazon Book Reviews with Word2Vec" }, { "type": "doc", - "id": "version-0.9.1/features/other/TextAnalytics - Amazon Book Reviews" + "id": "version-0.9.4/features/other/TextAnalytics - Amazon Book Reviews" } ], "collapsible": true, @@ -274,23 +274,23 @@ "items": [ { "type": "doc", - "id": "version-0.9.1/documentation/transformers/transformers_cognitive" + "id": "version-0.9.4/documentation/transformers/transformers_cognitive" }, { "type": "doc", - "id": "version-0.9.1/documentation/transformers/transformers_core" + "id": "version-0.9.4/documentation/transformers/transformers_core" }, { "type": "doc", - "id": "version-0.9.1/documentation/transformers/transformers_opencv" + "id": "version-0.9.4/documentation/transformers/transformers_opencv" }, { "type": "doc", - "id": "version-0.9.1/documentation/transformers/transformers_vw" + "id": "version-0.9.4/documentation/transformers/transformers_vw" }, { "type": "doc", - "id": "version-0.9.1/documentation/transformers/transformers_deep_learning" + "id": "version-0.9.4/documentation/transformers/transformers_deep_learning" } ], "collapsible": true, @@ -302,15 +302,15 @@ "items": [ { "type": "doc", - "id": "version-0.9.1/documentation/estimators/estimators_core" + "id": "version-0.9.4/documentation/estimators/estimators_core" }, { "type": "doc", - "id": "version-0.9.1/documentation/estimators/estimators_lightgbm" + "id": "version-0.9.4/documentation/estimators/estimators_lightgbm" }, { "type": "doc", - "id": "version-0.9.1/documentation/estimators/estimators_vw" + "id": "version-0.9.4/documentation/estimators/estimators_vw" } ], "collapsible": true, @@ -322,35 +322,35 @@ "items": [ { "type": "doc", - "id": "version-0.9.1/reference/developer-readme" + "id": "version-0.9.4/reference/developer-readme" }, { "type": "doc", - "id": "version-0.9.1/reference/contributing_guide" + "id": "version-0.9.4/reference/contributing_guide" }, { "type": "doc", - "id": "version-0.9.1/reference/docker" + "id": "version-0.9.4/reference/docker" }, { "type": "doc", - "id": "version-0.9.1/reference/R-setup" + "id": "version-0.9.4/reference/R-setup" }, { "type": "doc", - "id": "version-0.9.1/reference/SAR" + "id": "version-0.9.4/reference/SAR" }, { "type": "doc", - "id": "version-0.9.1/reference/cyber" + "id": "version-0.9.4/reference/cyber" }, { "type": "doc", - "id": "version-0.9.1/reference/datasets" + "id": "version-0.9.4/reference/datasets" }, { "type": "doc", - "id": "version-0.9.1/reference/vagrant" + "id": "version-0.9.4/reference/vagrant" } ], "collapsible": true, diff --git a/website/versions.json b/website/versions.json index ee7e07f3af..587e1558e1 100644 --- a/website/versions.json +++ b/website/versions.json @@ -1,3 +1,3 @@ [ - "0.9.1" + "0.9.4" ] From 1c1edf6d71f632a4a10d5afbfa5c6525af452c8f Mon Sep 17 00:00:00 2001 From: Mark Hamilton Date: Tue, 16 Nov 2021 01:14:27 -0500 Subject: [PATCH 21/40] fix: fix install instructions (#1259) --- README.md | 16 ++++++++-------- .../CyberML - Anomalous Access Detection.ipynb | 2 +- website/docs/getting_started/installation.md | 14 +++++++------- website/docs/reference/R-setup.md | 2 +- website/doctest.py | 2 +- website/src/pages/index.js | 12 ++++++------ .../CyberML - Anomalous Access Detection.md | 2 +- .../getting_started/installation.md | 14 +++++++------- .../version-0.9.4/reference/R-setup.md | 2 +- 9 files changed, 33 insertions(+), 33 deletions(-) diff --git a/README.md b/README.md index 7cf99bfe44..4a8207187a 100644 --- a/README.md +++ b/README.md @@ -149,7 +149,7 @@ the above example, or from python: ```python import pyspark spark = pyspark.sql.SparkSession.builder.appName("MyApp") \ - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.4") \ + .config("spark.jars.packages", "com.microsoft.azure:synapseml_2.12:0.9.4") \ .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") \ .getOrCreate() import synapse.ml @@ -162,7 +162,7 @@ your `build.sbt`: ```scala resolvers += "SynapseML" at "https://mmlspark.azureedge.net/maven" -libraryDependencies += "com.microsoft.azure" % "synapseml" % "0.9.4" +libraryDependencies += "com.microsoft.azure" % "synapseml_2.12" % "0.9.4" ``` @@ -172,9 +172,9 @@ SynapseML can be conveniently installed on existing Spark clusters via the `--packages` option, examples: ```bash -spark-shell --packages com.microsoft.azure:synapseml:0.9.4 -pyspark --packages com.microsoft.azure:synapseml:0.9.4 -spark-submit --packages com.microsoft.azure:synapseml:0.9.4 MyApp.jar +spark-shell --packages com.microsoft.azure:synapseml_2.12:0.9.4 +pyspark --packages com.microsoft.azure:synapseml_2.12:0.9.4 +spark-submit --packages com.microsoft.azure:synapseml_2.12:0.9.4 MyApp.jar ``` This can be used in other Spark contexts too. For example, you can use SynapseML @@ -189,7 +189,7 @@ cloud](http://community.cloud.databricks.com), create a new [library from Maven coordinates](https://docs.databricks.com/user-guide/libraries.html#libraries-from-maven-pypi-or-spark-packages) in your workspace. -For the coordinates use: `com.microsoft.azure:synapseml:0.9.4` +For the coordinates use: `com.microsoft.azure:synapseml_2.12:0.9.4` with the resolver: `https://mmlspark.azureedge.net/maven`. Ensure this library is attached to your target cluster(s). @@ -210,7 +210,7 @@ Excluding certain packages from the library may be necessary due to current issu { "name": "synapseml", "conf": { - "spark.jars.packages": "com.microsoft.azure:synapseml:0.9.4", + "spark.jars.packages": "com.microsoft.azure:synapseml_2.12:0.9.4", "spark.jars.repositories": "https://mmlspark.azureedge.net/maven", "spark.jars.excludes": "org.scala-lang:scala-reflect,org.apache.spark:spark-tags_2.12,org.scalactic:scalactic_2.12,org.scalatest:scalatest_2.12" } @@ -224,7 +224,7 @@ In Azure Synapse, "spark.yarn.user.classpath.first" should be set to "true" to o { "name": "synapseml", "conf": { - "spark.jars.packages": "com.microsoft.azure:synapseml:0.9.4", + "spark.jars.packages": "com.microsoft.azure:synapseml_2.12:0.9.4", "spark.jars.repositories": "https://mmlspark.azureedge.net/maven", "spark.jars.excludes": "org.scala-lang:scala-reflect,org.apache.spark:spark-tags_2.12,org.scalactic:scalactic_2.12,org.scalatest:scalatest_2.12", "spark.yarn.user.classpath.first": "true" diff --git a/notebooks/features/other/CyberML - Anomalous Access Detection.ipynb b/notebooks/features/other/CyberML - Anomalous Access Detection.ipynb index 3e2fd563b7..2cb553adf1 100644 --- a/notebooks/features/other/CyberML - Anomalous Access Detection.ipynb +++ b/notebooks/features/other/CyberML - Anomalous Access Detection.ipynb @@ -34,7 +34,7 @@ "# Create an Azure Databricks cluster and install the following libs\n", "\n", "1. In Cluster Libraries install from library source Maven:\n", - "Coordinates: com.microsoft.azure:synapseml:0.9.4\n", + "Coordinates: com.microsoft.azure:synapseml_2.12:0.9.4\n", "Repository: https://mmlspark.azureedge.net/maven\n", "\n", "2. In Cluster Libraries install from PyPI the library called plotly" diff --git a/website/docs/getting_started/installation.md b/website/docs/getting_started/installation.md index de87ea67fe..7f7bec14d4 100644 --- a/website/docs/getting_started/installation.md +++ b/website/docs/getting_started/installation.md @@ -12,7 +12,7 @@ the above example, or from python: ```python import pyspark spark = pyspark.sql.SparkSession.builder.appName("MyApp") \ - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.4") \ + .config("spark.jars.packages", "com.microsoft.azure:synapseml_2.12:0.9.4") \ .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") \ .getOrCreate() import synapse.ml @@ -35,9 +35,9 @@ SynapseML can be conveniently installed on existing Spark clusters via the `--packages` option, examples: ```bash -spark-shell --packages com.microsoft.azure:synapseml:0.9.4 -pyspark --packages com.microsoft.azure:synapseml:0.9.4 -spark-submit --packages com.microsoft.azure:synapseml:0.9.4 MyApp.jar +spark-shell --packages com.microsoft.azure:synapseml_2.12:0.9.4 +pyspark --packages com.microsoft.azure:synapseml_2.12:0.9.4 +spark-submit --packages com.microsoft.azure:synapseml_2.12:0.9.4 MyApp.jar ``` This can be used in other Spark contexts too. For example, you can use SynapseML @@ -52,7 +52,7 @@ cloud](http://community.cloud.databricks.com), create a new [library from Maven coordinates](https://docs.databricks.com/user-guide/libraries.html#libraries-from-maven-pypi-or-spark-packages) in your workspace. -For the coordinates use: `com.microsoft.azure:synapseml:0.9.4` +For the coordinates use: `com.microsoft.azure:synapseml_2.12:0.9.4` with the resolver: `https://mmlspark.azureedge.net/maven`. Ensure this library is attached to your target cluster(s). @@ -73,7 +73,7 @@ Excluding certain packages from the library may be necessary due to current issu { "name": "synapseml", "conf": { - "spark.jars.packages": "com.microsoft.azure:synapseml:0.9.4", + "spark.jars.packages": "com.microsoft.azure:synapseml_2.12:0.9.4", "spark.jars.repositories": "https://mmlspark.azureedge.net/maven", "spark.jars.excludes": "org.scala-lang:scala-reflect,org.apache.spark:spark-tags_2.12,org.scalactic:scalactic_2.12,org.scalatest:scalatest_2.12" } @@ -87,7 +87,7 @@ In Azure Synapse, "spark.yarn.user.classpath.first" should be set to "true" to o { "name": "synapseml", "conf": { - "spark.jars.packages": "com.microsoft.azure:synapseml:0.9.4", + "spark.jars.packages": "com.microsoft.azure:synapseml_2.12:0.9.4", "spark.jars.repositories": "https://mmlspark.azureedge.net/maven", "spark.jars.excludes": "org.scala-lang:scala-reflect,org.apache.spark:spark-tags_2.12,org.scalactic:scalactic_2.12,org.scalatest:scalatest_2.12", "spark.yarn.user.classpath.first": "true" diff --git a/website/docs/reference/R-setup.md b/website/docs/reference/R-setup.md index 190c8c9697..35832fe8a5 100644 --- a/website/docs/reference/R-setup.md +++ b/website/docs/reference/R-setup.md @@ -31,7 +31,7 @@ It will take some time to install all dependencies. Then, run: library(sparklyr) library(dplyr) config <- spark_config() -config$sparklyr.defaultPackages <- "com.microsoft.azure:synapseml:0.9.4" +config$sparklyr.defaultPackages <- "com.microsoft.azure:synapseml_2.12:0.9.4" sc <- spark_connect(master = "local", config = config) ... ``` diff --git a/website/doctest.py b/website/doctest.py index 939cc7b5bc..a38ac81480 100644 --- a/website/doctest.py +++ b/website/doctest.py @@ -17,7 +17,7 @@ def add_python_helper_to_markdown(folder, md): os.environ["PYSPARK_DRIVER_PYTHON_OPTS"] = "notebook" spark = (pyspark.sql.SparkSession.builder.appName("MyApp") - .config("spark.jars.packages", "com.microsoft.azure:synapseml:0.9.4") + .config("spark.jars.packages", "com.microsoft.azure:synapseml_2.12:0.9.4") .config("spark.jars.repositories", "https://mmlspark.azureedge.net/maven") .getOrCreate()) diff --git a/website/src/pages/index.js b/website/src/pages/index.js index 3eaf9cd779..36d9202ef1 100644 --- a/website/src/pages/index.js +++ b/website/src/pages/index.js @@ -269,7 +269,7 @@ function Home() { { "name": "synapseml", "conf": { - "spark.jars.packages": "com.microsoft.azure:synapseml:0.9.4", + "spark.jars.packages": "com.microsoft.azure:synapseml_2.12:0.9.4", "spark.jars.repositories": "https://mmlspark.azureedge.net/maven", "spark.jars.excludes": "org.scala-lang:scala-reflect,org.apache.spark:spark-tags_2.12,org.scalactic:scalactic_2.12,org.scalatest:scalatest_2.12", "spark.yarn.user.classpath.first": "true" @@ -288,9 +288,9 @@ function Home() { SynapseML can be conveniently installed on existing Spark clusters via the --packages option, examples: This can be used in other Spark contexts too. For example, you @@ -317,7 +317,7 @@ spark-submit --packages com.microsoft.azure:synapseml:0.9.4 MyApp.jar`}

For the coordinates use: with the resolver: @@ -373,7 +373,7 @@ spark-submit --packages com.microsoft.azure:synapseml:0.9.4 MyApp.jar`} Date: Tue, 16 Nov 2021 12:36:44 -0500 Subject: [PATCH 22/40] chore: add website telemetry (#1260) --- website/docusaurus.config.js | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/website/docusaurus.config.js b/website/docusaurus.config.js index 4499523ca3..1e976da739 100644 --- a/website/docusaurus.config.js +++ b/website/docusaurus.config.js @@ -1,6 +1,6 @@ const path = require('path'); const {all_examples} = require('./src/plugins/examples'); -let version = "0.9.1"; +let version = "0.9.4"; module.exports = { title: 'SynapseML', @@ -13,7 +13,7 @@ module.exports = { trailingSlash: true, customFields: { examples: all_examples(), - version: "0.9.1", + version: "0.9.4", }, themeConfig: { prism: { @@ -80,11 +80,11 @@ module.exports = { }, { label: 'Python API Reference', - to: 'https://mmlspark.blob.core.windows.net/docs/0.9.1/pyspark/index.html', + to: 'https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/index.html', }, { label: 'Scala API Reference', - to: 'https://mmlspark.blob.core.windows.net/docs/0.9.1/scala/index.html', + to: 'https://mmlspark.blob.core.windows.net/docs/0.9.4/scala/index.html', }, ], }, @@ -118,7 +118,11 @@ module.exports = { apiKey: 'edc58a221b8a7df52bf7058219bbf9c9', indexName: 'synapseML', contextualSearch: true, - } + }, + googleAnalytics: { + trackingID: 'G-RWPE0183E8', + anonymizeIP: true, + }, }, presets: [ [ From b2a9c37a0877eef2240723fb899614937164c9f9 Mon Sep 17 00:00:00 2001 From: Mark Hamilton Date: Tue, 16 Nov 2021 13:14:02 -0500 Subject: [PATCH 23/40] chore: fix website telemetry (#1261) --- website/docusaurus.config.js | 2 +- website/package.json | 8 +- website/yarn.lock | 2543 ++++++++-------------------------- 3 files changed, 618 insertions(+), 1935 deletions(-) diff --git a/website/docusaurus.config.js b/website/docusaurus.config.js index 1e976da739..9e75fd3b0d 100644 --- a/website/docusaurus.config.js +++ b/website/docusaurus.config.js @@ -119,7 +119,7 @@ module.exports = { indexName: 'synapseML', contextualSearch: true, }, - googleAnalytics: { + gtag: { trackingID: 'G-RWPE0183E8', anonymizeIP: true, }, diff --git a/website/package.json b/website/package.json index 1f5823d669..d4a8b23916 100644 --- a/website/package.json +++ b/website/package.json @@ -15,10 +15,10 @@ "audit:fix": "npx yarn-audit-fix" }, "dependencies": { - "@docusaurus/core": "^2.0.0-beta.7", - "@docusaurus/preset-classic": "^2.0.0-beta.7", - "@docusaurus/theme-classic": "^2.0.0-beta.6", - "@docusaurus/theme-search-algolia": "^2.0.0-beta.7", + "@docusaurus/core": "^2.0.0-beta.9", + "@docusaurus/preset-classic": "^2.0.0-beta.9", + "@docusaurus/theme-classic": "^2.0.0-beta.9", + "@docusaurus/theme-search-algolia": "^2.0.0-beta.9", "ansi-html-community": "^0.0.8", "classnames": "^2.2.6", "react": "^16.8.4", diff --git a/website/yarn.lock b/website/yarn.lock index e0b7614f3e..e3153856eb 100644 --- a/website/yarn.lock +++ b/website/yarn.lock @@ -229,20 +229,20 @@ "@algolia/logger-common" "4.10.5" "@algolia/requester-common" "4.10.5" -"@babel/code-frame@7.10.4": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.10.4.tgz#168da1a36e90da68ae8d49c0f1b48c7c6249213a" - integrity sha512-vG6SvB6oYEhvgisZNFRmRCUkLz11c7rp+tbNTynGqc6mS1d5ATd/sGyV6W0KZZnXRKMTzZDRgQT3Ou9jhpAfUg== - dependencies: - "@babel/highlight" "^7.10.4" - -"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.10.4", "@babel/code-frame@^7.14.5", "@babel/code-frame@^7.5.5": +"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.10.4", "@babel/code-frame@^7.14.5": version "7.14.5" resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.14.5.tgz#23b08d740e83f49c5e59945fbf1b43e80bbf4edb" integrity sha512-9pzDqyc6OLDaqe+zbACgFkb6fKMNG6CObKpnYXChRsvYGyEdc7CA2BaqeOM+vOtCS5ndmJicPJhKAwYRI6UfFw== dependencies: "@babel/highlight" "^7.14.5" +"@babel/code-frame@^7.8.3": + version "7.16.0" + resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.16.0.tgz#0dfc80309beec8411e65e706461c408b0bb9b431" + integrity sha512-IF4EOMEV+bfYwOmNxGzSnjR2EmQod7f1UXOpZM3l4i4o4QNwzjtJAu/HxdjHq0aYBvdqMuQEY1eg0nqW9ZPORA== + dependencies: + "@babel/highlight" "^7.16.0" + "@babel/compat-data@^7.13.11", "@babel/compat-data@^7.14.5", "@babel/compat-data@^7.14.7": version "7.14.7" resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.14.7.tgz#7b047d7a3a89a67d2258dc61f604f098f1bc7e08" @@ -659,7 +659,7 @@ "@babel/traverse" "^7.14.5" "@babel/types" "^7.14.5" -"@babel/highlight@^7.10.4", "@babel/highlight@^7.14.5": +"@babel/highlight@^7.14.5": version "7.14.5" resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.14.5.tgz#6861a52f03966405001f6aa534a01a24d99e8cd9" integrity sha512-qf9u2WFWVV0MppaL877j2dBtQIDgmidgjGk5VIMw3OadXvYaXn66U1BFlH2t4+t3i+8PhedppRv+i40ABzd+gg== @@ -668,6 +668,15 @@ chalk "^2.0.0" js-tokens "^4.0.0" +"@babel/highlight@^7.16.0": + version "7.16.0" + resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.16.0.tgz#6ceb32b2ca4b8f5f361fb7fd821e3fddf4a1725a" + integrity sha512-t8MH41kUQylBtu2+4IQA3atqevA2lRgqA2wyVB/YiWmsDSuylZZuXOUy9ric30hfzauEFfdsuk/eXTRrGrfd0g== + dependencies: + "@babel/helper-validator-identifier" "^7.15.7" + chalk "^2.0.0" + js-tokens "^4.0.0" + "@babel/parser@^7.12.16", "@babel/parser@^7.12.7", "@babel/parser@^7.14.5", "@babel/parser@^7.14.6", "@babel/parser@^7.14.7": version "7.14.7" resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.14.7.tgz#6099720c8839ca865a2637e6c85852ead0bdb595" @@ -1290,18 +1299,6 @@ dependencies: "@babel/helper-plugin-utils" "^7.14.5" -"@babel/plugin-transform-runtime@^7.12.15": - version "7.14.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-runtime/-/plugin-transform-runtime-7.14.5.tgz#30491dad49c6059f8f8fa5ee8896a0089e987523" - integrity sha512-fPMBhh1AV8ZyneiCIA+wYYUH1arzlXR1UMcApjvchDhfKxhy2r2lReJv8uHEyihi4IFIGlr1Pdx7S5fkESDQsg== - dependencies: - "@babel/helper-module-imports" "^7.14.5" - "@babel/helper-plugin-utils" "^7.14.5" - babel-plugin-polyfill-corejs2 "^0.2.2" - babel-plugin-polyfill-corejs3 "^0.2.2" - babel-plugin-polyfill-regenerator "^0.2.2" - semver "^6.3.0" - "@babel/plugin-transform-runtime@^7.15.0": version "7.15.8" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-runtime/-/plugin-transform-runtime-7.15.8.tgz#9d15b1e94e1c7f6344f65a8d573597d93c6cd886" @@ -1382,7 +1379,7 @@ "@babel/helper-create-regexp-features-plugin" "^7.14.5" "@babel/helper-plugin-utils" "^7.14.5" -"@babel/preset-env@^7.12.1", "@babel/preset-env@^7.12.16": +"@babel/preset-env@^7.12.1": version "7.14.7" resolved "https://registry.yarnpkg.com/@babel/preset-env/-/preset-env-7.14.7.tgz#5c70b22d4c2d893b03d8c886a5c17422502b932a" integrity sha512-itOGqCKLsSUl0Y+1nSfhbuuOlTs0MJk2Iv7iSH+XT/mR8U1zRLO7NjWlYXB47yhK4J/7j+HYty/EhFZDYKa/VA== @@ -1572,14 +1569,6 @@ "@babel/helper-validator-option" "^7.14.5" "@babel/plugin-transform-typescript" "^7.14.5" -"@babel/runtime-corejs3@^7.12.13": - version "7.14.7" - resolved "https://registry.yarnpkg.com/@babel/runtime-corejs3/-/runtime-corejs3-7.14.7.tgz#0ef292bbce40ca00f874c9724ef175a12476465c" - integrity sha512-Wvzcw4mBYbTagyBVZpAJWI06auSIj033T/yNE0Zn1xcup83MieCddZA7ls3kme17L4NOGBrQ09Q+nKB41RLWBA== - dependencies: - core-js-pure "^3.15.0" - regenerator-runtime "^0.13.4" - "@babel/runtime-corejs3@^7.15.4": version "7.15.4" resolved "https://registry.yarnpkg.com/@babel/runtime-corejs3/-/runtime-corejs3-7.15.4.tgz#403139af262b9a6e8f9ba04a6fdcebf8de692bf1" @@ -1588,7 +1577,7 @@ core-js-pure "^3.16.0" regenerator-runtime "^0.13.4" -"@babel/runtime@^7.1.2", "@babel/runtime@^7.10.2", "@babel/runtime@^7.10.3", "@babel/runtime@^7.12.1", "@babel/runtime@^7.12.5", "@babel/runtime@^7.8.4": +"@babel/runtime@^7.1.2", "@babel/runtime@^7.10.2", "@babel/runtime@^7.10.3", "@babel/runtime@^7.12.1", "@babel/runtime@^7.8.4": version "7.14.6" resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.14.6.tgz#535203bc0892efc7dec60bdc27b2ecf6e409062d" integrity sha512-/PCB2uJ7oM44tz8YhC4Z/6PeOKXp4K588f+5M3clr1M4zbqztlo0XEfJ2LEzj/FgwfgGcIdl8n7YYjTCI0BYwg== @@ -1681,95 +1670,10 @@ "@docsearch/css" "3.0.0-alpha.41" algoliasearch "^4.0.0" -"@docusaurus/core@2.0.0-beta.6": - version "2.0.0-beta.6" - resolved "https://registry.yarnpkg.com/@docusaurus/core/-/core-2.0.0-beta.6.tgz#9847ae211a04f1d2b057f8e5ba650e76b9c2df83" - integrity sha512-XMeI+lJKeJBGYBNOfO/Tc+5FMf21E5p1xZjfe75cgYcfZdERZ+W7aemXquwReno8xxHb4Rnfmi9dxkbOLDjqDA== - dependencies: - "@babel/core" "^7.12.16" - "@babel/generator" "^7.12.15" - "@babel/plugin-syntax-dynamic-import" "^7.8.3" - "@babel/plugin-transform-runtime" "^7.12.15" - "@babel/preset-env" "^7.12.16" - "@babel/preset-react" "^7.12.13" - "@babel/preset-typescript" "^7.12.16" - "@babel/runtime" "^7.12.5" - "@babel/runtime-corejs3" "^7.12.13" - "@babel/traverse" "^7.12.13" - "@docusaurus/cssnano-preset" "2.0.0-beta.6" - "@docusaurus/react-loadable" "5.5.0" - "@docusaurus/types" "2.0.0-beta.6" - "@docusaurus/utils" "2.0.0-beta.6" - "@docusaurus/utils-common" "2.0.0-beta.6" - "@docusaurus/utils-validation" "2.0.0-beta.6" - "@slorber/static-site-generator-webpack-plugin" "^4.0.0" - "@svgr/webpack" "^5.5.0" - autoprefixer "^10.2.5" - babel-loader "^8.2.2" - babel-plugin-dynamic-import-node "2.3.0" - boxen "^5.0.1" - chalk "^4.1.1" - chokidar "^3.5.1" - clean-css "^5.1.5" - commander "^5.1.0" - copy-webpack-plugin "^9.0.0" - core-js "^3.9.1" - css-loader "^5.1.1" - css-minimizer-webpack-plugin "^3.0.1" - cssnano "^5.0.4" - del "^6.0.0" - detect-port "^1.3.0" - escape-html "^1.0.3" - eta "^1.12.1" - express "^4.17.1" - file-loader "^6.2.0" - fs-extra "^10.0.0" - github-slugger "^1.3.0" - globby "^11.0.2" - html-minifier-terser "^5.1.1" - html-tags "^3.1.0" - html-webpack-plugin "^5.3.2" - import-fresh "^3.3.0" - is-root "^2.1.0" - leven "^3.1.0" - lodash "^4.17.20" - mini-css-extract-plugin "^1.6.0" - module-alias "^2.2.2" - nprogress "^0.2.0" - postcss "^8.2.15" - postcss-loader "^5.3.0" - prompts "^2.4.1" - react-dev-utils "^11.0.1" - react-error-overlay "^6.0.9" - react-helmet "^6.1.0" - react-loadable "^5.5.0" - react-loadable-ssr-addon-v5-slorber "^1.0.1" - react-router "^5.2.0" - react-router-config "^5.1.1" - react-router-dom "^5.2.0" - remark-admonitions "^1.2.1" - resolve-pathname "^3.0.0" - rtl-detect "^1.0.3" - semver "^7.3.4" - serve-handler "^6.1.3" - shelljs "^0.8.4" - std-env "^2.2.1" - strip-ansi "^6.0.0" - terser-webpack-plugin "^5.1.3" - tslib "^2.2.0" - update-notifier "^5.1.0" - url-loader "^4.1.1" - wait-on "^5.3.0" - webpack "^5.40.0" - webpack-bundle-analyzer "^4.4.2" - webpack-dev-server "^3.11.2" - webpack-merge "^5.8.0" - webpackbar "^5.0.0-3" - -"@docusaurus/core@2.0.0-beta.7", "@docusaurus/core@^2.0.0-beta.7": - version "2.0.0-beta.7" - resolved "https://registry.yarnpkg.com/@docusaurus/core/-/core-2.0.0-beta.7.tgz#cdfcb57c6737c51122e6b1c009f58cc5570f896b" - integrity sha512-owJDhldpimQBOCr6YbGJiQRQufK3A9zmA3lyDnbtipAGV+sDRQNiYKUDtwnbESCchA4vhKj7suvcLvA4sSEVIQ== +"@docusaurus/core@2.0.0-beta.9", "@docusaurus/core@^2.0.0-beta.9": + version "2.0.0-beta.9" + resolved "https://registry.yarnpkg.com/@docusaurus/core/-/core-2.0.0-beta.9.tgz#59b57c5e60fe83ef9e3c6aa7000d470eb0c52656" + integrity sha512-Bf9c6+yftZfAJk2h4HyaDvzBp5TLhqYtfnfWKKNi0Gdw9vRLXhi7IaiGaLWIuNAIJLTi++Ql0BAn+C0OO8EsWA== dependencies: "@babel/core" "^7.12.16" "@babel/generator" "^7.12.15" @@ -1781,12 +1685,12 @@ "@babel/runtime" "^7.15.4" "@babel/runtime-corejs3" "^7.15.4" "@babel/traverse" "^7.12.13" - "@docusaurus/cssnano-preset" "2.0.0-beta.7" - "@docusaurus/react-loadable" "5.5.0" - "@docusaurus/types" "2.0.0-beta.7" - "@docusaurus/utils" "2.0.0-beta.7" - "@docusaurus/utils-common" "2.0.0-beta.7" - "@docusaurus/utils-validation" "2.0.0-beta.7" + "@docusaurus/cssnano-preset" "2.0.0-beta.9" + "@docusaurus/react-loadable" "5.5.2" + "@docusaurus/types" "2.0.0-beta.9" + "@docusaurus/utils" "2.0.0-beta.9" + "@docusaurus/utils-common" "2.0.0-beta.9" + "@docusaurus/utils-validation" "2.0.0-beta.9" "@slorber/static-site-generator-webpack-plugin" "^4.0.0" "@svgr/webpack" "^5.5.0" autoprefixer "^10.3.5" @@ -1806,28 +1710,26 @@ detect-port "^1.3.0" escape-html "^1.0.3" eta "^1.12.3" - express "^4.17.1" file-loader "^6.2.0" fs-extra "^10.0.0" github-slugger "^1.4.0" globby "^11.0.2" html-minifier-terser "^6.0.2" html-tags "^3.1.0" - html-webpack-plugin "^5.3.2" + html-webpack-plugin "^5.4.0" import-fresh "^3.3.0" is-root "^2.1.0" leven "^3.1.0" lodash "^4.17.20" mini-css-extract-plugin "^1.6.0" - module-alias "^2.2.2" nprogress "^0.2.0" postcss "^8.3.7" postcss-loader "^6.1.1" prompts "^2.4.1" - react-dev-utils "^11.0.1" + react-dev-utils "12.0.0-next.47" react-error-overlay "^6.0.9" react-helmet "^6.1.0" - react-loadable "^5.5.0" + react-loadable "npm:@docusaurus/react-loadable@5.5.2" react-loadable-ssr-addon-v5-slorber "^1.0.1" react-router "^5.2.0" react-router-config "^5.1.1" @@ -1845,63 +1747,30 @@ update-notifier "^5.1.0" url-loader "^4.1.1" wait-on "^6.0.0" - webpack "^5.40.0" + webpack "^5.61.0" webpack-bundle-analyzer "^4.4.2" - webpack-dev-server "^3.11.2" + webpack-dev-server "^4.4.0" webpack-merge "^5.8.0" webpackbar "^5.0.0-3" -"@docusaurus/cssnano-preset@2.0.0-beta.6": - version "2.0.0-beta.6" - resolved "https://registry.yarnpkg.com/@docusaurus/cssnano-preset/-/cssnano-preset-2.0.0-beta.6.tgz#0c277854f0262dca7bcb3daf99866e8a49e29118" - integrity sha512-RCizp2NAbADopkX5nUz1xrAbU6hGZzziQk9RdSDGJLzMgVCN6RDotq9odS8VgzNa9x2Lx3WN527UxeEbzc2GVQ== - dependencies: - cssnano-preset-advanced "^5.1.1" - postcss "^8.2.15" - postcss-sort-media-queries "^3.10.11" - -"@docusaurus/cssnano-preset@2.0.0-beta.7": - version "2.0.0-beta.7" - resolved "https://registry.yarnpkg.com/@docusaurus/cssnano-preset/-/cssnano-preset-2.0.0-beta.7.tgz#d39fc576f689dc74aa0fd0812cebd96c9ce38890" - integrity sha512-S1vMCRpIpxWbETcQ/7KzZmFct7WYM2IN1FQSHqCxF1UmAfDH5wwlxUhCe8aQu00f/JV/0Oy/HMOOCJ3cDnLjgw== +"@docusaurus/cssnano-preset@2.0.0-beta.9": + version "2.0.0-beta.9" + resolved "https://registry.yarnpkg.com/@docusaurus/cssnano-preset/-/cssnano-preset-2.0.0-beta.9.tgz#4ad9079c68b79744c08be6e48e51d2c12907f71f" + integrity sha512-oIdoiZ/i4LXRxmuLN2ZmvGpMqtwba+ck9TlaQDWC7wvHx+EA9mvvcewKWgc7e4dxPA00+777cQvrDctAreAqLw== dependencies: cssnano-preset-advanced "^5.1.4" postcss "^8.3.7" postcss-sort-media-queries "^4.1.0" -"@docusaurus/mdx-loader@2.0.0-beta.6": - version "2.0.0-beta.6" - resolved "https://registry.yarnpkg.com/@docusaurus/mdx-loader/-/mdx-loader-2.0.0-beta.6.tgz#a5aeee5be0d04bb273752c893366cc6cffeb2b32" - integrity sha512-yO6N+OESR77WZ/pXz7muOJGLletYYksx7s7wrwrr0x+A8tzdSwiHZ9op0NyjjpW5AnItU/WQQfcjv37qv4K6HA== - dependencies: - "@babel/parser" "^7.12.16" - "@babel/traverse" "^7.12.13" - "@docusaurus/core" "2.0.0-beta.6" - "@docusaurus/utils" "2.0.0-beta.6" - "@mdx-js/mdx" "^1.6.21" - "@mdx-js/react" "^1.6.21" - chalk "^4.1.1" - escape-html "^1.0.3" - file-loader "^6.2.0" - fs-extra "^10.0.0" - github-slugger "^1.3.0" - gray-matter "^4.0.3" - mdast-util-to-string "^2.0.0" - remark-emoji "^2.1.0" - stringify-object "^3.3.0" - unist-util-visit "^2.0.2" - url-loader "^4.1.1" - webpack "^5.40.0" - -"@docusaurus/mdx-loader@2.0.0-beta.7": - version "2.0.0-beta.7" - resolved "https://registry.yarnpkg.com/@docusaurus/mdx-loader/-/mdx-loader-2.0.0-beta.7.tgz#a86ca29fd393f01869e671afaaf80d41d728ac49" - integrity sha512-AGL+JDQy/mcJ9IEbCdVDdvqCBvbvG8EIomtopxpxE307gpC3QjQT4bLyUyYBIESDEoniwNsE0R4cABqp+zoIoA== +"@docusaurus/mdx-loader@2.0.0-beta.9": + version "2.0.0-beta.9" + resolved "https://registry.yarnpkg.com/@docusaurus/mdx-loader/-/mdx-loader-2.0.0-beta.9.tgz#e87a1ff22fdabcb6bea59beae8b2d999dfb6eb81" + integrity sha512-qb+/Ew69kaAIiot+1lJ13ozsyCY+7/VryzopDTgr60BDCsLUvuDzjNKreBqo1xdC4JxYD/hJMV7UAHkZ8rWB8Q== dependencies: "@babel/parser" "^7.12.16" "@babel/traverse" "^7.12.13" - "@docusaurus/core" "2.0.0-beta.7" - "@docusaurus/utils" "2.0.0-beta.7" + "@docusaurus/core" "2.0.0-beta.9" + "@docusaurus/utils" "2.0.0-beta.9" "@mdx-js/mdx" "^1.6.21" "@mdx-js/react" "^1.6.21" chalk "^4.1.2" @@ -1915,41 +1784,18 @@ stringify-object "^3.3.0" unist-util-visit "^2.0.2" url-loader "^4.1.1" - webpack "^5.40.0" - -"@docusaurus/plugin-content-blog@2.0.0-beta.6": - version "2.0.0-beta.6" - resolved "https://registry.yarnpkg.com/@docusaurus/plugin-content-blog/-/plugin-content-blog-2.0.0-beta.6.tgz#54ae1c96a8e95dbc58484157c259e8aaf47a3fcb" - integrity sha512-ohfMt7+rPiFQImc4Clpvc9m/1yWUQAjpG3e/coJywlJYbDXvi1pmH0VKkDUMBSe/35Wtz9457DYgNFG81lhV7Q== - dependencies: - "@docusaurus/core" "2.0.0-beta.6" - "@docusaurus/mdx-loader" "2.0.0-beta.6" - "@docusaurus/types" "2.0.0-beta.6" - "@docusaurus/utils" "2.0.0-beta.6" - "@docusaurus/utils-validation" "2.0.0-beta.6" - chalk "^4.1.1" - escape-string-regexp "^4.0.0" - feed "^4.2.2" - fs-extra "^10.0.0" - globby "^11.0.2" - js-yaml "^4.0.0" - loader-utils "^2.0.0" - lodash "^4.17.20" - reading-time "^1.3.0" - remark-admonitions "^1.2.1" - tslib "^2.2.0" - webpack "^5.40.0" - -"@docusaurus/plugin-content-blog@2.0.0-beta.7": - version "2.0.0-beta.7" - resolved "https://registry.yarnpkg.com/@docusaurus/plugin-content-blog/-/plugin-content-blog-2.0.0-beta.7.tgz#f40d9314f2ea07e0cea75e0eeaae79a259f8c937" - integrity sha512-Pz4peB7I38aCMogdqiDhp43ip22GtSUWSewMRwZFd5poxvhShAb7/TUOP85p4bHfTmEWPcsFxQkDBMiv2nFkbw== - dependencies: - "@docusaurus/core" "2.0.0-beta.7" - "@docusaurus/mdx-loader" "2.0.0-beta.7" - "@docusaurus/types" "2.0.0-beta.7" - "@docusaurus/utils" "2.0.0-beta.7" - "@docusaurus/utils-validation" "2.0.0-beta.7" + webpack "^5.61.0" + +"@docusaurus/plugin-content-blog@2.0.0-beta.9": + version "2.0.0-beta.9" + resolved "https://registry.yarnpkg.com/@docusaurus/plugin-content-blog/-/plugin-content-blog-2.0.0-beta.9.tgz#d72a32013232610552cbc45509ba2ddaea653690" + integrity sha512-KZ6UmUa/P4SSX8/xnZpwSt7krnAfRg3S/ghZ7zeIzcp12iumSZBmLNi5rIIXcsFVH0IPOnIofEoWEaEIwaNerg== + dependencies: + "@docusaurus/core" "2.0.0-beta.9" + "@docusaurus/mdx-loader" "2.0.0-beta.9" + "@docusaurus/types" "2.0.0-beta.9" + "@docusaurus/utils" "2.0.0-beta.9" + "@docusaurus/utils-validation" "2.0.0-beta.9" chalk "^4.1.2" escape-string-regexp "^4.0.0" feed "^4.2.2" @@ -1962,44 +1808,18 @@ remark-admonitions "^1.2.1" tslib "^2.3.1" utility-types "^3.10.0" - webpack "^5.40.0" - -"@docusaurus/plugin-content-docs@2.0.0-beta.6": - version "2.0.0-beta.6" - resolved "https://registry.yarnpkg.com/@docusaurus/plugin-content-docs/-/plugin-content-docs-2.0.0-beta.6.tgz#38fd58e42fe39e2a0cc738df077917a6fcd4e7ee" - integrity sha512-cM5WWogWmX+qKPKv332eDWGRVVT5OjskbmFKe2QimwoaON3Cv6XY8Fo2xdYopqGIU0r0z8dVtRmoGS0ji7zB7w== - dependencies: - "@docusaurus/core" "2.0.0-beta.6" - "@docusaurus/mdx-loader" "2.0.0-beta.6" - "@docusaurus/types" "2.0.0-beta.6" - "@docusaurus/utils" "2.0.0-beta.6" - "@docusaurus/utils-validation" "2.0.0-beta.6" - chalk "^4.1.1" - combine-promises "^1.1.0" - escape-string-regexp "^4.0.0" - execa "^5.0.0" - fs-extra "^10.0.0" - globby "^11.0.2" - import-fresh "^3.2.2" - js-yaml "^4.0.0" - loader-utils "^1.2.3" - lodash "^4.17.20" - remark-admonitions "^1.2.1" - shelljs "^0.8.4" - tslib "^2.2.0" - utility-types "^3.10.0" - webpack "^5.40.0" - -"@docusaurus/plugin-content-docs@2.0.0-beta.7": - version "2.0.0-beta.7" - resolved "https://registry.yarnpkg.com/@docusaurus/plugin-content-docs/-/plugin-content-docs-2.0.0-beta.7.tgz#546713a2ae773266d5fa385f8f25e9f2438194c9" - integrity sha512-O8Ky+xqiL4+LGIa2hT5WMknbbpw9Q0GaOfnPgX1XTBUwWFlwadMX1rMb4Vb1ki7qhqV1clVCcWZ2EdilkMqhUg== - dependencies: - "@docusaurus/core" "2.0.0-beta.7" - "@docusaurus/mdx-loader" "2.0.0-beta.7" - "@docusaurus/types" "2.0.0-beta.7" - "@docusaurus/utils" "2.0.0-beta.7" - "@docusaurus/utils-validation" "2.0.0-beta.7" + webpack "^5.61.0" + +"@docusaurus/plugin-content-docs@2.0.0-beta.9": + version "2.0.0-beta.9" + resolved "https://registry.yarnpkg.com/@docusaurus/plugin-content-docs/-/plugin-content-docs-2.0.0-beta.9.tgz#53ac2b43beb0f183c8a9b8fab6201e5e8f444a67" + integrity sha512-GC+CvqKuravPpK5fqlYJVmj9hc6nkd/c/rM2ONueFCqw2wyuH7esWL8RpMqgS0JM1qwwuRpi0Dd3R/zdOptHIQ== + dependencies: + "@docusaurus/core" "2.0.0-beta.9" + "@docusaurus/mdx-loader" "2.0.0-beta.9" + "@docusaurus/types" "2.0.0-beta.9" + "@docusaurus/utils" "2.0.0-beta.9" + "@docusaurus/utils-validation" "2.0.0-beta.9" chalk "^4.1.2" combine-promises "^1.1.0" escape-string-regexp "^4.0.0" @@ -2014,117 +1834,102 @@ shelljs "^0.8.4" tslib "^2.3.1" utility-types "^3.10.0" - webpack "^5.40.0" - -"@docusaurus/plugin-content-pages@2.0.0-beta.6": - version "2.0.0-beta.6" - resolved "https://registry.yarnpkg.com/@docusaurus/plugin-content-pages/-/plugin-content-pages-2.0.0-beta.6.tgz#254e6ee60a8a2b4d85c4fa8408388d585eea0507" - integrity sha512-N6wARzOA8gTFeBXZSKbAN5s1Ej6R/pVg+J946E8GCYefXTFikTNRQ8+OPhax4MRzgzoOvhTQbLbRCSoAzSmjig== - dependencies: - "@docusaurus/core" "2.0.0-beta.6" - "@docusaurus/mdx-loader" "2.0.0-beta.6" - "@docusaurus/types" "2.0.0-beta.6" - "@docusaurus/utils" "2.0.0-beta.6" - "@docusaurus/utils-validation" "2.0.0-beta.6" - globby "^11.0.2" - lodash "^4.17.20" - remark-admonitions "^1.2.1" - tslib "^2.1.0" - webpack "^5.40.0" - -"@docusaurus/plugin-content-pages@2.0.0-beta.7": - version "2.0.0-beta.7" - resolved "https://registry.yarnpkg.com/@docusaurus/plugin-content-pages/-/plugin-content-pages-2.0.0-beta.7.tgz#9167c5a9924d284b076d261b08b19b21ac8836fd" - integrity sha512-5NKtexqh5Ug75xDUQ8ZhgTZ5qDQKF+5/MvOsDdV6OPPq39Rc54RnXyOBpn2hmG4sOkITSav0Rx9WdSwXQ1S56w== - dependencies: - "@docusaurus/core" "2.0.0-beta.7" - "@docusaurus/mdx-loader" "2.0.0-beta.7" - "@docusaurus/types" "2.0.0-beta.7" - "@docusaurus/utils" "2.0.0-beta.7" - "@docusaurus/utils-validation" "2.0.0-beta.7" + webpack "^5.61.0" + +"@docusaurus/plugin-content-pages@2.0.0-beta.9": + version "2.0.0-beta.9" + resolved "https://registry.yarnpkg.com/@docusaurus/plugin-content-pages/-/plugin-content-pages-2.0.0-beta.9.tgz#115309f03bae2864bb9e4cd8fae646ea2e1f31dc" + integrity sha512-27nFHhPpZEWra6izyWgY+EkBspr3OAUUHojRXzMUKplYLZ5gIciM224PXbwLyECjpn51eaf8/2Ay+/H9BdTCBw== + dependencies: + "@docusaurus/core" "2.0.0-beta.9" + "@docusaurus/mdx-loader" "2.0.0-beta.9" + "@docusaurus/types" "2.0.0-beta.9" + "@docusaurus/utils" "2.0.0-beta.9" + "@docusaurus/utils-validation" "2.0.0-beta.9" globby "^11.0.2" lodash "^4.17.20" remark-admonitions "^1.2.1" tslib "^2.3.1" - webpack "^5.40.0" + webpack "^5.61.0" -"@docusaurus/plugin-debug@2.0.0-beta.7": - version "2.0.0-beta.7" - resolved "https://registry.yarnpkg.com/@docusaurus/plugin-debug/-/plugin-debug-2.0.0-beta.7.tgz#eb8f9b1297bbc82e9541c6ad528643e0766b1953" - integrity "sha1-64+bEpe7yC6VQcatUoZD4HZrGVM= sha512-yClzF4ATBclrnBf5hz5YlFmK7Ds63suWf9Sv6VaLsrW4nnP/TinpbBfXSPdbM1zqaDqzNGcYabm0jbA2bstXJA==" +"@docusaurus/plugin-debug@2.0.0-beta.9": + version "2.0.0-beta.9" + resolved "https://registry.yarnpkg.com/@docusaurus/plugin-debug/-/plugin-debug-2.0.0-beta.9.tgz#97920e6ba333f99537bd72ae97a8999beeb39a3b" + integrity sha512-uVnLfNE7YBMCWVcfoy6NgAxbqfG3bXfrLozM2RMafPmsCitaw+wrTdnba/irM364wPFFursF9lDrNLwSrYiRbw== dependencies: - "@docusaurus/core" "2.0.0-beta.7" - "@docusaurus/types" "2.0.0-beta.7" - "@docusaurus/utils" "2.0.0-beta.7" + "@docusaurus/core" "2.0.0-beta.9" + "@docusaurus/types" "2.0.0-beta.9" + "@docusaurus/utils" "2.0.0-beta.9" fs-extra "^10.0.0" react-json-view "^1.21.3" tslib "^2.3.1" -"@docusaurus/plugin-google-analytics@2.0.0-beta.7": - version "2.0.0-beta.7" - resolved "https://registry.yarnpkg.com/@docusaurus/plugin-google-analytics/-/plugin-google-analytics-2.0.0-beta.7.tgz#040e246bd34300cf88d8f0eed1d677693aae0ec0" - integrity "sha1-BA4ka9NDAM+I2PDu0dZ3aTquDsA= sha512-588Fwd5vlcS13osCm9SeQOL4BnEH5M680pLnFQ2oTfZ1RzLfNFJ0O2mP3c4ivZCsHqT8OguUwZBoSKQC3FtBzg==" +"@docusaurus/plugin-google-analytics@2.0.0-beta.9": + version "2.0.0-beta.9" + resolved "https://registry.yarnpkg.com/@docusaurus/plugin-google-analytics/-/plugin-google-analytics-2.0.0-beta.9.tgz#5584d20b2e64cc3f84978d42cb0edeeabaf49123" + integrity sha512-fYnunrefFko2jF/M973FONZjn9QHzNnt7+uMokR4frK/BX/zEyW2Yw6vh7dC0oo+ml5625Pv5OfwwlOJ9DRmHw== dependencies: - "@docusaurus/core" "2.0.0-beta.7" + "@docusaurus/core" "2.0.0-beta.9" -"@docusaurus/plugin-google-gtag@2.0.0-beta.7": - version "2.0.0-beta.7" - resolved "https://registry.yarnpkg.com/@docusaurus/plugin-google-gtag/-/plugin-google-gtag-2.0.0-beta.7.tgz#d8f4be826dbc7b53aaba27c65be86b32ba8bb7b9" - integrity "sha1-2PS+gm28e1OquifGW+hrMrqLt7k= sha512-52nm+VlFk4sGX6R9H3PMv5VVIZSEpzMhnAWTrcLNJyCj7TmX45Rr44+nkFdC1S5xg1Jy6u6farpD9VUK69oQfA==" +"@docusaurus/plugin-google-gtag@2.0.0-beta.9": + version "2.0.0-beta.9" + resolved "https://registry.yarnpkg.com/@docusaurus/plugin-google-gtag/-/plugin-google-gtag-2.0.0-beta.9.tgz#70de97bb5a25bc10969941ec257d694db0abed79" + integrity sha512-AlVhbjN0OEiM8r8BncdiP82B9I7Dw3fN4cj2pPLtcOmvcRPQM2BfdzxbXPBUHgyT50Rd6hxS+R2Fl/s2RpUAHA== dependencies: - "@docusaurus/core" "2.0.0-beta.7" + "@docusaurus/core" "2.0.0-beta.9" -"@docusaurus/plugin-sitemap@2.0.0-beta.7": - version "2.0.0-beta.7" - resolved "https://registry.yarnpkg.com/@docusaurus/plugin-sitemap/-/plugin-sitemap-2.0.0-beta.7.tgz#a81fcbc65b606d98814ac5ce880b3d8124c32696" - integrity "sha1-qB/LxltgbZiBSsXOiAs9gSTDJpY= sha512-jCBwxHFDkKbUTa+rc1TGegBpaLM+F/C2N97jhnvT9i07kUClt5w+tWplc+jm1Z1Ei/qwiMC+VLAK7xXnnPWTWg==" +"@docusaurus/plugin-sitemap@2.0.0-beta.9": + version "2.0.0-beta.9" + resolved "https://registry.yarnpkg.com/@docusaurus/plugin-sitemap/-/plugin-sitemap-2.0.0-beta.9.tgz#4c944b6fc4e8fa4625a6fd985ec21d6455c31647" + integrity sha512-p6Qc1vo/yb1v767/u0E72inkGKayx77HDKsDOGrNj2IH0db0cMsskBLeKYcDfVz5+dtmFrR+lubINp7TyofkvA== dependencies: - "@docusaurus/core" "2.0.0-beta.7" - "@docusaurus/types" "2.0.0-beta.7" - "@docusaurus/utils" "2.0.0-beta.7" - "@docusaurus/utils-common" "2.0.0-beta.7" - "@docusaurus/utils-validation" "2.0.0-beta.7" + "@docusaurus/core" "2.0.0-beta.9" + "@docusaurus/types" "2.0.0-beta.9" + "@docusaurus/utils" "2.0.0-beta.9" + "@docusaurus/utils-common" "2.0.0-beta.9" + "@docusaurus/utils-validation" "2.0.0-beta.9" fs-extra "^10.0.0" sitemap "^7.0.0" tslib "^2.3.1" -"@docusaurus/preset-classic@^2.0.0-beta.7": - version "2.0.0-beta.7" - resolved "https://registry.yarnpkg.com/@docusaurus/preset-classic/-/preset-classic-2.0.0-beta.7.tgz#7340144e1ed486ef14cdbfd4366e2d7fc350ff13" - integrity "sha1-c0AUTh7Uhu8Uzb/UNm4tf8NQ/xM= sha512-lrIpgjrcea3AxroU7G1q1cMFxYNBJleNWCMJ+3Ed1cwn6yRmOQAtUJPQzhPX67Uay5Wl6d2K8/cPpMdAT3E0Fg==" - dependencies: - "@docusaurus/core" "2.0.0-beta.7" - "@docusaurus/plugin-content-blog" "2.0.0-beta.7" - "@docusaurus/plugin-content-docs" "2.0.0-beta.7" - "@docusaurus/plugin-content-pages" "2.0.0-beta.7" - "@docusaurus/plugin-debug" "2.0.0-beta.7" - "@docusaurus/plugin-google-analytics" "2.0.0-beta.7" - "@docusaurus/plugin-google-gtag" "2.0.0-beta.7" - "@docusaurus/plugin-sitemap" "2.0.0-beta.7" - "@docusaurus/theme-classic" "2.0.0-beta.7" - "@docusaurus/theme-search-algolia" "2.0.0-beta.7" - -"@docusaurus/react-loadable@5.5.0": - version "5.5.0" - resolved "https://registry.yarnpkg.com/@docusaurus/react-loadable/-/react-loadable-5.5.0.tgz#6d6f0c8fd9a434b62a1ab1f8645ee7bde5a9ec21" - integrity sha512-Ld/kwUE6yATIOTLq3JCsWiTa/drisajwKqBQ2Rw6IcT+sFsKfYek8F2jSH8f68AT73xX97UehduZeCSlnuCBIg== - dependencies: +"@docusaurus/preset-classic@^2.0.0-beta.9": + version "2.0.0-beta.9" + resolved "https://registry.yarnpkg.com/@docusaurus/preset-classic/-/preset-classic-2.0.0-beta.9.tgz#853e6fa376eb389a2f06c0c85f2bc823b26b3010" + integrity sha512-wm4x+jOKYaBL+7ckJwskyiITayNm3127e42kz4CtvmjjccpZu68JCfjehqkpnoPDTByBYnaeOKyga4azeAQLSA== + dependencies: + "@docusaurus/core" "2.0.0-beta.9" + "@docusaurus/plugin-content-blog" "2.0.0-beta.9" + "@docusaurus/plugin-content-docs" "2.0.0-beta.9" + "@docusaurus/plugin-content-pages" "2.0.0-beta.9" + "@docusaurus/plugin-debug" "2.0.0-beta.9" + "@docusaurus/plugin-google-analytics" "2.0.0-beta.9" + "@docusaurus/plugin-google-gtag" "2.0.0-beta.9" + "@docusaurus/plugin-sitemap" "2.0.0-beta.9" + "@docusaurus/theme-classic" "2.0.0-beta.9" + "@docusaurus/theme-search-algolia" "2.0.0-beta.9" + +"@docusaurus/react-loadable@5.5.2", "react-loadable@npm:@docusaurus/react-loadable@5.5.2": + version "5.5.2" + resolved "https://registry.yarnpkg.com/@docusaurus/react-loadable/-/react-loadable-5.5.2.tgz#81aae0db81ecafbdaee3651f12804580868fa6ce" + integrity sha512-A3dYjdBGuy0IGT+wyLIGIKLRE+sAk1iNk0f1HjNDysO7u8lhL4N3VEm+FAubmJbAztn94F7MxBTPmnixbiyFdQ== + dependencies: + "@types/react" "*" prop-types "^15.6.2" -"@docusaurus/theme-classic@2.0.0-beta.7": - version "2.0.0-beta.7" - resolved "https://registry.yarnpkg.com/@docusaurus/theme-classic/-/theme-classic-2.0.0-beta.7.tgz#af51c439280ad2064d0a65b77bc0475590a74d06" - integrity "sha1-r1HEOSgK0gZNCmW3e8BHVZCnTQY= sha512-x93sqAlR5l0oZa12GetWrvwm6Olx23bA3nv1gYIAhaxHqo374vC6EWIsvjJCBdCSmmuPX6TaZm1rSDYFWc/37g==" - dependencies: - "@docusaurus/core" "2.0.0-beta.7" - "@docusaurus/plugin-content-blog" "2.0.0-beta.7" - "@docusaurus/plugin-content-docs" "2.0.0-beta.7" - "@docusaurus/plugin-content-pages" "2.0.0-beta.7" - "@docusaurus/theme-common" "2.0.0-beta.7" - "@docusaurus/types" "2.0.0-beta.7" - "@docusaurus/utils" "2.0.0-beta.7" - "@docusaurus/utils-common" "2.0.0-beta.7" - "@docusaurus/utils-validation" "2.0.0-beta.7" +"@docusaurus/theme-classic@2.0.0-beta.9", "@docusaurus/theme-classic@^2.0.0-beta.9": + version "2.0.0-beta.9" + resolved "https://registry.yarnpkg.com/@docusaurus/theme-classic/-/theme-classic-2.0.0-beta.9.tgz#d4d246c295e91938bb4ae9745104e352bc5dfe94" + integrity sha512-vTijCGrkFkaqzpOu7w1AaXOBFOo6wirkNEN0+TMkx3oTu95Yj7h98rt/9Z60f6L9HVjOFQ18h3fU6cWloNG+Bg== + dependencies: + "@docusaurus/core" "2.0.0-beta.9" + "@docusaurus/plugin-content-blog" "2.0.0-beta.9" + "@docusaurus/plugin-content-docs" "2.0.0-beta.9" + "@docusaurus/plugin-content-pages" "2.0.0-beta.9" + "@docusaurus/theme-common" "2.0.0-beta.9" + "@docusaurus/types" "2.0.0-beta.9" + "@docusaurus/utils" "2.0.0-beta.9" + "@docusaurus/utils-common" "2.0.0-beta.9" + "@docusaurus/utils-validation" "2.0.0-beta.9" "@mdx-js/mdx" "^1.6.21" "@mdx-js/react" "^1.6.21" chalk "^4.1.2" @@ -2142,164 +1947,73 @@ react-router-dom "^5.2.0" rtlcss "^3.3.0" -"@docusaurus/theme-classic@^2.0.0-beta.6": - version "2.0.0-beta.6" - resolved "https://registry.yarnpkg.com/@docusaurus/theme-classic/-/theme-classic-2.0.0-beta.6.tgz#4ae476e90cf875bb13eba5daedbd71d0cfdd8194" - integrity sha512-fMb6gAKUdaojInZabimIJE+yPWs8dQfmZII7v/LHmgxafh/FylmrBkKhyJfa2ix4QRibo9E01LGX44/aKzemxw== - dependencies: - "@docusaurus/core" "2.0.0-beta.6" - "@docusaurus/plugin-content-blog" "2.0.0-beta.6" - "@docusaurus/plugin-content-docs" "2.0.0-beta.6" - "@docusaurus/plugin-content-pages" "2.0.0-beta.6" - "@docusaurus/theme-common" "2.0.0-beta.6" - "@docusaurus/types" "2.0.0-beta.6" - "@docusaurus/utils" "2.0.0-beta.6" - "@docusaurus/utils-common" "2.0.0-beta.6" - "@docusaurus/utils-validation" "2.0.0-beta.6" - "@mdx-js/mdx" "^1.6.21" - "@mdx-js/react" "^1.6.21" - chalk "^4.1.1" - clsx "^1.1.1" - copy-text-to-clipboard "^3.0.1" - fs-extra "^10.0.0" - globby "^11.0.2" - infima "0.2.0-alpha.33" - lodash "^4.17.20" - parse-numeric-range "^1.2.0" - postcss "^8.2.15" - prism-react-renderer "^1.2.1" - prismjs "^1.23.0" - prop-types "^15.7.2" - react-router-dom "^5.2.0" - rtlcss "^3.1.2" - -"@docusaurus/theme-common@2.0.0-beta.6": - version "2.0.0-beta.6" - resolved "https://registry.yarnpkg.com/@docusaurus/theme-common/-/theme-common-2.0.0-beta.6.tgz#17cbf38400d752e264cdbebbc57a92f2bdfc7052" - integrity sha512-53nFWMjpFdyHEvBfQQQoDm9rNKgGangy7vSp1B/F3+uRyYAItE7O4l8MdOALXFALlddiiPYvCtI1qGx2dnzndA== - dependencies: - "@docusaurus/core" "2.0.0-beta.6" - "@docusaurus/plugin-content-blog" "2.0.0-beta.6" - "@docusaurus/plugin-content-docs" "2.0.0-beta.6" - "@docusaurus/plugin-content-pages" "2.0.0-beta.6" - "@docusaurus/types" "2.0.0-beta.6" - clsx "^1.1.1" - fs-extra "^10.0.0" - tslib "^2.1.0" - -"@docusaurus/theme-common@2.0.0-beta.7": - version "2.0.0-beta.7" - resolved "https://registry.yarnpkg.com/@docusaurus/theme-common/-/theme-common-2.0.0-beta.7.tgz#958b3b9b537e78b6f933bf0b2348650e2e8f8d37" - integrity sha512-50ySwGxsLDS2Jqh771ZkN72QJvLHu+mz+oh/sVerfaQZUmTHZ2AHm0F2Ugzwcwu7ZeuAt31qjHsCLC0ZgVy88A== - dependencies: - "@docusaurus/core" "2.0.0-beta.7" - "@docusaurus/plugin-content-blog" "2.0.0-beta.7" - "@docusaurus/plugin-content-docs" "2.0.0-beta.7" - "@docusaurus/plugin-content-pages" "2.0.0-beta.7" - "@docusaurus/types" "2.0.0-beta.7" +"@docusaurus/theme-common@2.0.0-beta.9": + version "2.0.0-beta.9" + resolved "https://registry.yarnpkg.com/@docusaurus/theme-common/-/theme-common-2.0.0-beta.9.tgz#a2bd5eb242baa38b110a191126f9054740267925" + integrity sha512-ZsFP+wH1CY6SBqkBGAdj9kHZHkV/7Y77Jw0rnEVbVU4zX2Jh6apWRCOJVaPrroDES8/9D6WWKQgQifeoJ2EeIA== + dependencies: + "@docusaurus/core" "2.0.0-beta.9" + "@docusaurus/plugin-content-blog" "2.0.0-beta.9" + "@docusaurus/plugin-content-docs" "2.0.0-beta.9" + "@docusaurus/plugin-content-pages" "2.0.0-beta.9" + "@docusaurus/types" "2.0.0-beta.9" clsx "^1.1.1" fs-extra "^10.0.0" tslib "^2.3.1" utility-types "^3.10.0" -"@docusaurus/theme-search-algolia@2.0.0-beta.7", "@docusaurus/theme-search-algolia@^2.0.0-beta.7": - version "2.0.0-beta.7" - resolved "https://registry.yarnpkg.com/@docusaurus/theme-search-algolia/-/theme-search-algolia-2.0.0-beta.7.tgz#d89e56cb98e3632f0b50a0ff72d34882efabe68b" - integrity sha512-N/5AVhs/nx1lcHeWG6ek3SjpARJ8UCGyWgcDDb0Li867YFle/b8Slai8ZgKUDrHlRl1+t3iE8G9w5+xAB+FdwA== +"@docusaurus/theme-search-algolia@2.0.0-beta.9", "@docusaurus/theme-search-algolia@^2.0.0-beta.9": + version "2.0.0-beta.9" + resolved "https://registry.yarnpkg.com/@docusaurus/theme-search-algolia/-/theme-search-algolia-2.0.0-beta.9.tgz#ccece22535b91e83757c21d895817c5f577fcc36" + integrity sha512-pbpA18kqr5H7A7snmHf4dzMYV+3nsTDYMhV9f2Tms7yP9cxW7ZMHJwaEKXh1myE58Nbkv84AF734TR1UgYrziw== dependencies: "@docsearch/react" "^3.0.0-alpha.39" - "@docusaurus/core" "2.0.0-beta.7" - "@docusaurus/theme-common" "2.0.0-beta.7" - "@docusaurus/utils" "2.0.0-beta.7" - "@docusaurus/utils-validation" "2.0.0-beta.7" + "@docusaurus/core" "2.0.0-beta.9" + "@docusaurus/theme-common" "2.0.0-beta.9" + "@docusaurus/utils" "2.0.0-beta.9" + "@docusaurus/utils-validation" "2.0.0-beta.9" algoliasearch "^4.10.5" algoliasearch-helper "^3.5.5" clsx "^1.1.1" eta "^1.12.3" lodash "^4.17.20" -"@docusaurus/types@2.0.0-beta.6": - version "2.0.0-beta.6" - resolved "https://registry.yarnpkg.com/@docusaurus/types/-/types-2.0.0-beta.6.tgz#f92a61cc42e5921d325114ebc7b30c5e8c368683" - integrity sha512-TrwxyI93XTZEhOmdEI8FPKDbGV61zE9PzXCdE1alwz1NOV+YXwcv+9sRTZEVLqBpr+TIja+IeeS6mxnyen/Ptg== - dependencies: - commander "^5.1.0" - joi "^17.4.0" - querystring "0.2.0" - webpack "^5.40.0" - webpack-merge "^5.8.0" - -"@docusaurus/types@2.0.0-beta.7": - version "2.0.0-beta.7" - resolved "https://registry.yarnpkg.com/@docusaurus/types/-/types-2.0.0-beta.7.tgz#78ea662e6879ef9d819e505e7338a4f861f0b53f" - integrity sha512-p65Fy/8yhnhOvEcqrDtSqfepEmBOxo6mFtCVFS/zQJgXCH10pvbN4SjIFIFFHh8AvbAgv1Dp2TBel2mdwQO2yA== +"@docusaurus/types@2.0.0-beta.9": + version "2.0.0-beta.9" + resolved "https://registry.yarnpkg.com/@docusaurus/types/-/types-2.0.0-beta.9.tgz#3561a0e3ce9bcb0892d02a025161bb854a189d10" + integrity sha512-7qK7PCwRImHzv9RMi5HJ7RoHKQ8r7oqZK79UucmzBXl5nyfZridBC7JQ+LG7GBqYVaIjfOHUflOOLIVn+gK2/g== dependencies: commander "^5.1.0" joi "^17.4.2" querystring "0.2.0" utility-types "^3.10.0" - webpack "^5.40.0" + webpack "^5.61.0" webpack-merge "^5.8.0" -"@docusaurus/utils-common@2.0.0-beta.6": - version "2.0.0-beta.6" - resolved "https://registry.yarnpkg.com/@docusaurus/utils-common/-/utils-common-2.0.0-beta.6.tgz#afd26a9f67b16479058ead66a310738c21293ae5" - integrity sha512-MKm6bJxvsYWRl072jLR60z+71tTWSxoERh2eTmCYlegFnu3Tby3HOC8I3jDcC6VpVuoDGsBGNoQbOgy2LqQbXQ== - dependencies: - "@docusaurus/types" "2.0.0-beta.6" - tslib "^2.2.0" - -"@docusaurus/utils-common@2.0.0-beta.7": - version "2.0.0-beta.7" - resolved "https://registry.yarnpkg.com/@docusaurus/utils-common/-/utils-common-2.0.0-beta.7.tgz#223096c825990c17da59f1009299cac3edcb00c7" - integrity sha512-qfuEZFe81TEJfNVx5rbwRSPID/guulpr0mTVOTpe8y4MozkqhWvK2j9xElVg9olJc+6WBd9IUN2NIq33g7QjYQ== +"@docusaurus/utils-common@2.0.0-beta.9": + version "2.0.0-beta.9" + resolved "https://registry.yarnpkg.com/@docusaurus/utils-common/-/utils-common-2.0.0-beta.9.tgz#3c61db4dd87b4644266e9c240024049cd991f338" + integrity sha512-ftVRifnVXW9eQjwOSuTzhEb9P55KSVfqEbQHgUlMm8KYXHC4NNdn4V+9sHmdJ8rDWNU+PA/+FMjGxWLVejMkxg== dependencies: - "@docusaurus/types" "2.0.0-beta.7" + "@docusaurus/types" "2.0.0-beta.9" tslib "^2.3.1" -"@docusaurus/utils-validation@2.0.0-beta.6": - version "2.0.0-beta.6" - resolved "https://registry.yarnpkg.com/@docusaurus/utils-validation/-/utils-validation-2.0.0-beta.6.tgz#7b98216de844138e9606a128c09182185ed84621" - integrity sha512-v0nk9bpawUd2JFDFyiHDmZuMG+/O1UvxtxvcRbvrxrul+rlzD7Q9CGxMgW3Grp2OCKQ4yFXRidBIccwqON5AVw== +"@docusaurus/utils-validation@2.0.0-beta.9": + version "2.0.0-beta.9" + resolved "https://registry.yarnpkg.com/@docusaurus/utils-validation/-/utils-validation-2.0.0-beta.9.tgz#7a4e4ab29627b618a784e8b59fbe4b4bab736594" + integrity sha512-8XZ2wdg+HPSVqgFzhfvntPLwX0+sCypvODatXR8A3YUraZYqQU0NK7SLqD1epLpmHjT/bztSq5DydoGoFRJdIA== dependencies: - "@docusaurus/utils" "2.0.0-beta.6" - chalk "^4.1.1" - joi "^17.4.0" - tslib "^2.1.0" - -"@docusaurus/utils-validation@2.0.0-beta.7": - version "2.0.0-beta.7" - resolved "https://registry.yarnpkg.com/@docusaurus/utils-validation/-/utils-validation-2.0.0-beta.7.tgz#b2cc4bf77b70b6956f00aa436aed8c610f2ecd8a" - integrity sha512-lcnsW1sJ+DUTkPS2aA3JysO6pZ9kqDAhesrG7Y8q2ivlwU/unED2N7/rubP0Yw0KYFr/1b+v/uOtyfNYZya3gQ== - dependencies: - "@docusaurus/utils" "2.0.0-beta.7" + "@docusaurus/utils" "2.0.0-beta.9" chalk "^4.1.2" joi "^17.4.2" tslib "^2.3.1" -"@docusaurus/utils@2.0.0-beta.6": - version "2.0.0-beta.6" - resolved "https://registry.yarnpkg.com/@docusaurus/utils/-/utils-2.0.0-beta.6.tgz#1438df9f28b632fe7a4f50663340b463cff07cab" - integrity sha512-S72/o7VDaTvrXJy+NpfuctghGGoMW30m94PMkrL3I6V+o5eE2Uzax7dbM++moclmHvi0/Khv+TXmRIQs6ZvwgQ== - dependencies: - "@docusaurus/types" "2.0.0-beta.6" - "@types/github-slugger" "^1.3.0" - chalk "^4.1.1" - escape-string-regexp "^4.0.0" - fs-extra "^10.0.0" - globby "^11.0.4" - gray-matter "^4.0.3" - lodash "^4.17.20" - micromatch "^4.0.4" - resolve-pathname "^3.0.0" - tslib "^2.2.0" - -"@docusaurus/utils@2.0.0-beta.7": - version "2.0.0-beta.7" - resolved "https://registry.yarnpkg.com/@docusaurus/utils/-/utils-2.0.0-beta.7.tgz#e92d11f0fb39aa8c0938025770901594782425a5" - integrity sha512-sL6IKOR/12btiHZS1yiCwlyFMbbkTUGSXSMSmDgumQRSDU63OaAvTLO7lWi2zDwFPXtFtfruAULcXc8XO4RjUA== +"@docusaurus/utils@2.0.0-beta.9": + version "2.0.0-beta.9" + resolved "https://registry.yarnpkg.com/@docusaurus/utils/-/utils-2.0.0-beta.9.tgz#b9d2b5e2baaed94b5041288fa759438e0164c408" + integrity sha512-f5TUY72Qux0wv1tjxsvjFDjfRnsWtQjsjR5Q/gJ5V021H9lycC9YCk0cEReg3bI3+IVL2iGvQqNnH3R1G7NcRw== dependencies: - "@docusaurus/types" "2.0.0-beta.7" + "@docusaurus/types" "2.0.0-beta.9" "@mdx-js/runtime" "^1.6.22" "@types/github-slugger" "^1.3.0" chalk "^4.1.2" @@ -2760,14 +2474,6 @@ resolved "https://registry.yarnpkg.com/@types/github-slugger/-/github-slugger-1.3.0.tgz#16ab393b30d8ae2a111ac748a015ac05a1fc5524" integrity sha512-J/rMZa7RqiH/rT29TEVZO4nBoDP9XJOjnbbIofg7GQKs4JIduEO3WLpte+6WeUz/TcrXKlY+bM7FYrp8yFB+3g== -"@types/glob@^7.1.1": - version "7.1.4" - resolved "https://registry.yarnpkg.com/@types/glob/-/glob-7.1.4.tgz#ea59e21d2ee5c517914cb4bc8e4153b99e566672" - integrity sha512-w+LsMxKyYQm347Otw+IfBXOv9UWVjpHpCDdbBMt8Kz/xbvCYNjP+0qPh91Km3iKfSRLBB0P7fAMf0KHrPu+MyA== - dependencies: - "@types/minimatch" "*" - "@types/node" "*" - "@types/hast@^2.0.0": version "2.3.2" resolved "https://registry.yarnpkg.com/@types/hast/-/hast-2.3.2.tgz#236201acca9e2695e42f713d7dd4f151dc2982e4" @@ -2775,17 +2481,24 @@ dependencies: "@types/unist" "*" -"@types/html-minifier-terser@^5.0.0": - version "5.1.2" - resolved "https://registry.yarnpkg.com/@types/html-minifier-terser/-/html-minifier-terser-5.1.2.tgz#693b316ad323ea97eed6b38ed1a3cc02b1672b57" - integrity sha512-h4lTMgMJctJybDp8CQrxTUiiYmedihHWkjnF/8Pxseu2S6Nlfcy8kwboQ8yejh456rP2yWoEVm1sS/FVsfM48w== +"@types/html-minifier-terser@^6.0.0": + version "6.0.0" + resolved "https://registry.yarnpkg.com/@types/html-minifier-terser/-/html-minifier-terser-6.0.0.tgz#563c1c6c132cd204e71512f9c0b394ff90d3fae7" + integrity sha512-NZwaaynfs1oIoLAV1vg18e7QMVDvw+6SQrdJc8w3BwUaoroVSf6EBj/Sk4PBWGxsq0dzhA2drbsuMC1/6C6KgQ== + +"@types/http-proxy@^1.17.5": + version "1.17.7" + resolved "https://registry.yarnpkg.com/@types/http-proxy/-/http-proxy-1.17.7.tgz#30ea85cc2c868368352a37f0d0d3581e24834c6f" + integrity sha512-9hdj6iXH64tHSLTY+Vt2eYOGzSogC+JQ2H7bdPWkuh7KXP5qLllWx++t+K9Wk556c3dkDdPws/SpMRi0sdCT1w== + dependencies: + "@types/node" "*" "@types/json-schema@*", "@types/json-schema@^7.0.5", "@types/json-schema@^7.0.7": version "7.0.8" resolved "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.8.tgz#edf1bf1dbf4e04413ca8e5b17b3b7d7d54b59818" integrity sha512-YSBPTLTVm2e2OoQIDYx8HaeWJ5tTToLH67kXR7zYNGupXMEHa2++G8k+DczX2cFVgalypqtyZIcU19AFcmOpmg== -"@types/json-schema@^7.0.8": +"@types/json-schema@^7.0.4", "@types/json-schema@^7.0.8": version "7.0.9" resolved "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.9.tgz#97edc9037ea0c38585320b28964dde3b39e4660d" integrity sha512-qcUXuemtEu+E5wZSJHNxUXeCZhAfXKQ41D+duX+VYPde7xyEVZci+/oXKJL13tnRs9lR2pr4fod59GT6/X1/yQ== @@ -2809,11 +2522,6 @@ dependencies: "@types/unist" "*" -"@types/minimatch@*": - version "3.0.5" - resolved "https://registry.yarnpkg.com/@types/minimatch/-/minimatch-3.0.5.tgz#1001cc5e6a3704b83c236027e77f2f58ea010f40" - integrity sha512-Klz949h02Gz2uZCMGwDUSDS1YBlTdDDgbWHi+81l29tQALUtvz4rAYi5uoVhE5Lagoq6DeqAUlbrHvW/mXDgdQ== - "@types/node@*": version "16.3.1" resolved "https://registry.yarnpkg.com/@types/node/-/node-16.3.1.tgz#24691fa2b0c3ec8c0d34bfcfd495edac5593ebb4" @@ -2834,11 +2542,30 @@ resolved "https://registry.yarnpkg.com/@types/parse5/-/parse5-5.0.3.tgz#e7b5aebbac150f8b5fdd4a46e7f0bd8e65e19109" integrity sha512-kUNnecmtkunAoQ3CnjmMkzNU/gtxG8guhi+Fk2U/kOpIKjIMKnXGp4IJCgQJrXSgMsWYimYG4TGjz/UzbGEBTw== +"@types/prop-types@*": + version "15.7.4" + resolved "https://registry.yarnpkg.com/@types/prop-types/-/prop-types-15.7.4.tgz#fcf7205c25dff795ee79af1e30da2c9790808f11" + integrity sha512-rZ5drC/jWjrArrS8BR6SIr4cWpW09RNTYt9AMZo3Jwwif+iacXAqgVjm0B0Bv/S1jhDXKHqRVNCbACkJ89RAnQ== + "@types/q@^1.5.1": version "1.5.5" resolved "https://registry.yarnpkg.com/@types/q/-/q-1.5.5.tgz#75a2a8e7d8ab4b230414505d92335d1dcb53a6df" integrity sha512-L28j2FcJfSZOnL1WBjDYp2vUHCeIFlyYI/53EwD/rKUBQ7MtUUfbQWiyKJGpcnv4/WgrhWsFKrcPstcAt/J0tQ== +"@types/react@*": + version "17.0.35" + resolved "https://registry.yarnpkg.com/@types/react/-/react-17.0.35.tgz#217164cf830267d56cd1aec09dcf25a541eedd4c" + integrity sha512-r3C8/TJuri/SLZiiwwxQoLAoavaczARfT9up9b4Jr65+ErAUX3MIkU0oMOQnrpfgHme8zIqZLX7O5nnjm5Wayw== + dependencies: + "@types/prop-types" "*" + "@types/scheduler" "*" + csstype "^3.0.2" + +"@types/retry@^0.12.0": + version "0.12.1" + resolved "https://registry.yarnpkg.com/@types/retry/-/retry-0.12.1.tgz#d8f1c0d0dc23afad6dc16a9e993a0865774b4065" + integrity sha512-xoDlM2S4ortawSWORYqsdU+2rxdh4LRW9ytc3zmT37RIKQh6IHyKwwtKhKis9ah8ol07DCkZxPt8BBvPjC6v4g== + "@types/sax@^1.2.1": version "1.2.2" resolved "https://registry.yarnpkg.com/@types/sax/-/sax-1.2.2.tgz#0b8996ffb9ca0b0491e791a09b550d77fa852f5d" @@ -2846,6 +2573,11 @@ dependencies: "@types/node" "*" +"@types/scheduler@*": + version "0.16.2" + resolved "https://registry.yarnpkg.com/@types/scheduler/-/scheduler-0.16.2.tgz#1a62f89525723dde24ba1b01b092bf5df8ad4d39" + integrity sha512-hppQEBDmlwhFAXKJX2KnWLYu5yMfi91yazPb2l+lbJiwW+wdo1gNeRA+3RgNSO39WYX2euey41KEwnqesU2Jew== + "@types/semver@^7.3.8": version "7.3.8" resolved "https://registry.yarnpkg.com/@types/semver/-/semver-7.3.8.tgz#508a27995498d7586dcecd77c25e289bfaf90c59" @@ -3015,6 +2747,11 @@ acorn-dynamic-import@^4.0.0: resolved "https://registry.yarnpkg.com/acorn-dynamic-import/-/acorn-dynamic-import-4.0.0.tgz#482210140582a36b83c3e342e1cfebcaa9240948" integrity sha512-d3OEjQV4ROpoflsnUA8HozoIR504TFxNivYEUi6uwz0IYhBkTDXGuWlNdMtybRt3nqVx/L6XqMt0FxkXuWKZhw== +acorn-import-assertions@^1.7.6: + version "1.8.0" + resolved "https://registry.yarnpkg.com/acorn-import-assertions/-/acorn-import-assertions-1.8.0.tgz#ba2b5939ce62c238db6d93d81c9b111b29b855e9" + integrity sha512-m7VZ3jwz4eK6A4Vtt8Ew1/mNbP24u0FhdyfA7fSvnJR6LMdfOYnmuIrrJAgrYfYJ10F/otaHTtrtrtmHdMNzEw== + acorn-jsx@^5.0.1: version "5.3.2" resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-5.3.2.tgz#7ed5bb55908b3b2f1bc55c6af1653bada7f07937" @@ -3035,7 +2772,7 @@ acorn@^8.0.4, acorn@^8.4.1: resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.4.1.tgz#56c36251fc7cabc7096adc18f05afe814321a28c" integrity sha512-asabaBSkEKosYKMITunzX177CXxQ4Q8BSSzMTKD+FefUhipQC70gfW5SiUDhYQ3vk8G+81HqQk7Fv9OXwwn9KA== -address@1.1.2, address@^1.0.1: +address@^1.0.1, address@^1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/address/-/address-1.1.2.tgz#bf1116c9c758c51b7a933d296b72c221ed9428b6" integrity sha512-aT6camzM4xEA54YVJYSqxz1kv4IHnQZRtThJJHhUMRExaU5spC7jX5ugSwTaTgJliIgs4VhZOk7htClvQ/LmRA== @@ -3064,17 +2801,12 @@ aggregate-error@^3.0.0: clean-stack "^2.0.0" indent-string "^4.0.0" -ajv-errors@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/ajv-errors/-/ajv-errors-1.0.1.tgz#f35986aceb91afadec4102fbd85014950cefa64d" - integrity sha512-DCRfO/4nQ+89p/RK43i8Ezd41EqdGIU4ld7nGF8OQ14oc/we5rEntLCUa7+jrn3nn83BosfwZA0wb4pon2o8iQ== - -ajv-keywords@^3.1.0, ajv-keywords@^3.5.2: +ajv-keywords@^3.4.1, ajv-keywords@^3.5.2: version "3.5.2" resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-3.5.2.tgz#31f29da5ab6e00d1c2d329acf7b5929614d5014d" integrity sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ== -ajv@^6.1.0, ajv@^6.12.3, ajv@^6.12.4, ajv@^6.12.5: +ajv@^6.12.2, ajv@^6.12.3, ajv@^6.12.4, ajv@^6.12.5: version "6.12.6" resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4" integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g== @@ -3143,11 +2875,6 @@ ansi-align@^3.0.0: dependencies: string-width "^3.0.0" -ansi-colors@^3.0.0: - version "3.2.4" - resolved "https://registry.yarnpkg.com/ansi-colors/-/ansi-colors-3.2.4.tgz#e3a3da4bfbae6c86a9c285625de124a234026fbf" - integrity sha512-hHUXGagefjN2iRrID63xckIvotOXOojhQKWIPUZ4mNUZ9nLZW+7FMNoE1lOkEhNWYsx/7ysGIuJYCiMAA9FnrA== - ansi-escapes@^4.3.1: version "4.3.2" resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-4.3.2.tgz#6b2291d1db7d98b6521d5f1efa42d0f3a9feb65e" @@ -3155,17 +2882,17 @@ ansi-escapes@^4.3.1: dependencies: type-fest "^0.21.3" -ansi-html-community@0.0.8, ansi-html-community@^0.0.8: +ansi-html-community@^0.0.8: version "0.0.8" resolved "https://registry.yarnpkg.com/ansi-html-community/-/ansi-html-community-0.0.8.tgz#69fbc4d6ccbe383f9736934ae34c3f8290f1bf41" integrity "sha1-afvE1sy+OD+XNpNK40w/gpDxv0E= sha512-1APHAyr3+PCamwNw3bXCPp4HFLONZt/yIH0sZp0/469KWNTEy+qN5jQ3GVX6DMZ1UXAi34yVwtTeaG/HpBuuzw==" -ansi-regex@^2.0.0, ansi-regex@^3.0.0, ansi-regex@^4.1.0, ansi-regex@^5.0.0, ansi-regex@^5.0.1: +ansi-regex@^2.0.0, ansi-regex@^3.0.0, ansi-regex@^4.1.0, ansi-regex@^5.0.0, ansi-regex@^5.0.1, ansi-regex@^6.0.1: version "5.0.1" resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304" integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ== -ansi-styles@^3.2.0, ansi-styles@^3.2.1: +ansi-styles@^3.2.1: version "3.2.1" resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA== @@ -3189,14 +2916,6 @@ ansistyles@~0.1.3: resolved "https://registry.yarnpkg.com/ansistyles/-/ansistyles-0.1.3.tgz#5de60415bda071bb37127854c864f41b23254539" integrity sha1-XeYEFb2gcbs3EnhUyGT0GyMlRTk= -anymatch@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-2.0.0.tgz#bcb24b4f37934d9aa7ac17b4adaf89e7c76ef2eb" - integrity sha512-5teOsQWABXHHBFP9y3skS5P3d/WfWXpv3FUpy+LorMrNYaT9pI4oLMQX7jzQ2KklNpGpWHzdCXTDT2Y3XGlZBw== - dependencies: - micromatch "^3.1.4" - normalize-path "^2.1.1" - anymatch@~3.1.2: version "3.1.2" resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.2.tgz#c0557c096af32f106198f4f4e2a383537e378716" @@ -3253,16 +2972,6 @@ argparse@^2.0.1: resolved "https://registry.yarnpkg.com/argparse/-/argparse-2.0.1.tgz#246f50f3ca78a3240f6c997e8a9bd1eac49e4b38" integrity sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q== -arr-diff@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/arr-diff/-/arr-diff-4.0.0.tgz#d6461074febfec71e7e15235761a329a5dc7c520" - integrity sha1-1kYQdP6/7HHn4VI1dhoyml3HxSA= - -arr-flatten@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/arr-flatten/-/arr-flatten-1.1.0.tgz#36048bbff4e7b47e136644316c99669ea5ae91f1" - integrity sha512-L3hKV5R/p5o81R7O02IGnwpDmkp6E982XhtbuwSe3O4qOtMMMtodicASA1Cny2U+aCXcNpml+m4dPsvsJ3jatg== - arr-union@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/arr-union/-/arr-union-3.1.0.tgz#e39b09aea9def866a8f206e288af63919bae39c4" @@ -3278,13 +2987,6 @@ array-flatten@^2.1.0: resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-2.1.2.tgz#24ef80a28c1a893617e2149b0c6d0d788293b099" integrity sha512-hNfzcOV8W4NdualtqBFPyVO+54DSJuZGY9qT4pRroB6S9e3iiido2ISIC5h9R2sPJ8H3FHCIiEnsv1lPXO3KtQ== -array-union@^1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/array-union/-/array-union-1.0.2.tgz#9a34410e4f4e3da23dea375be5be70f24778ec39" - integrity sha1-mjRBDk9OPaI96jdb5b5w8kd47Dk= - dependencies: - array-uniq "^1.0.1" - array-union@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/array-union/-/array-union-2.1.0.tgz#b798420adbeb1de828d84acd8a2e23d3efe85e8d" @@ -3295,16 +2997,6 @@ array-union@^3.0.1: resolved "https://registry.yarnpkg.com/array-union/-/array-union-3.0.1.tgz#da52630d327f8b88cfbfb57728e2af5cd9b6b975" integrity sha512-1OvF9IbWwaeiM9VhzYXVQacMibxpXOMYVNIvMtKRyX9SImBXpKcFr8XvFDeEslCyuH/t6KRt7HEO94AlP8Iatw== -array-uniq@^1.0.1: - version "1.0.3" - resolved "https://registry.yarnpkg.com/array-uniq/-/array-uniq-1.0.3.tgz#af6ac877a25cc7f74e058894753858dfdb24fdb6" - integrity sha1-r2rId6Jcx/dOBYiUdThY39sk/bY= - -array-unique@^0.3.2: - version "0.3.2" - resolved "https://registry.yarnpkg.com/array-unique/-/array-unique-0.3.2.tgz#a894b75d4bc4f6cd679ef3244a9fd8f46ae2d428" - integrity sha1-qJS3XUvE9s1nnvMkSp/Y9Gri1Cg= - asap@^2.0.0, asap@~2.0.3: version "2.0.6" resolved "https://registry.yarnpkg.com/asap/-/asap-2.0.6.tgz#e50347611d7e690943208bbdafebcbc2fb866d46" @@ -3322,21 +3014,6 @@ assert-plus@1.0.0, assert-plus@^1.0.0: resolved "https://registry.yarnpkg.com/assert-plus/-/assert-plus-1.0.0.tgz#f12e0f3c5d77b0b1cdd9146942e4e96c1e4dd525" integrity sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU= -assign-symbols@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/assign-symbols/-/assign-symbols-1.0.0.tgz#59667f41fadd4f20ccbc2bb96b8d4f7f78ec0367" - integrity sha1-WWZ/QfrdTyDMvCu5a41Pf3jsA2c= - -async-each@^1.0.1: - version "1.0.3" - resolved "https://registry.yarnpkg.com/async-each/-/async-each-1.0.3.tgz#b727dbf87d7651602f06f4d4ac387f47d91b0cbf" - integrity sha512-z/WhQ5FPySLdvREByI2vZiTWwCnF0moMJ1hK9YQwDTHKh6I7/uSckMetoRGb5UBZPC1z0jlw+n/XCgjeH7y1AQ== - -async-limiter@~1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/async-limiter/-/async-limiter-1.0.1.tgz#dd379e94f0db8310b08291f9d64c3209766617fd" - integrity sha512-csOlWGAcRFJaI6m+F2WKdnMKr4HhdhFVBk0H/QbJFMCr+uO2kwohwXQPxw/9OCxp05r5ghVBFSyioixx3gfkNQ== - async@^2.6.2: version "2.6.3" resolved "https://registry.yarnpkg.com/async/-/async-2.6.3.tgz#d72625e2344a3656e3a3ad4fa749fa83299d82ff" @@ -3349,12 +3026,12 @@ asynckit@^0.4.0: resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" integrity sha1-x57Zf380y48robyXkLzDZkdLS3k= -atob@^2.1.2: - version "2.1.2" - resolved "https://registry.yarnpkg.com/atob/-/atob-2.1.2.tgz#6d9517eb9e030d2436666651e86bd9f6f13533c9" - integrity sha512-Wm6ukoaOGJi/73p/cl2GvLjTI5JM1k/O14isD73YML8StrH/7/lRFgmg8nICZgD3bZZvjwCGxtMOD3wWNAu8cg== +at-least-node@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/at-least-node/-/at-least-node-1.0.0.tgz#602cd4b46e844ad4effc92a8011a3c46e0238dc2" + integrity sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg== -autoprefixer@^10.2.0, autoprefixer@^10.2.5: +autoprefixer@^10.2.0: version "10.3.1" resolved "https://registry.yarnpkg.com/autoprefixer/-/autoprefixer-10.3.1.tgz#954214821d3aa06692406c6a0a9e9d401eafbed2" integrity sha512-L8AmtKzdiRyYg7BUXJTzigmhbQRCXFKz6SA1Lqo0+AR2FBbQ4aTAPFSDlOutnFkjhiz8my4agGXog1xlMjPJ6A== @@ -3481,19 +3158,6 @@ base16@^1.0.0: resolved "https://registry.yarnpkg.com/base16/-/base16-1.0.0.tgz#e297f60d7ec1014a7a971a39ebc8a98c0b681e70" integrity sha1-4pf2DX7BAUp6lxo568ipjAtoHnA= -base@^0.11.1: - version "0.11.2" - resolved "https://registry.yarnpkg.com/base/-/base-0.11.2.tgz#7bde5ced145b6d551a90db87f83c558b4eb48a8f" - integrity sha512-5T6P4xPgpp0YDFvSWwEZ4NoE3aM4QBQXDzmVbraCkFj8zHM+mba8SyqB5DbZWyR7mYHo6Y7BdQo3MoA4m0TeQg== - dependencies: - cache-base "^1.0.1" - class-utils "^0.3.5" - component-emitter "^1.2.1" - define-property "^1.0.0" - isobject "^3.0.1" - mixin-deep "^1.2.0" - pascalcase "^0.1.1" - bash-glob@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/bash-glob/-/bash-glob-2.0.0.tgz#a8ef19450783403ed93fccca2dbe09f2cf6320dc" @@ -3544,23 +3208,11 @@ bin-links@^2.3.0: rimraf "^3.0.0" write-file-atomic "^3.0.3" -binary-extensions@^1.0.0: - version "1.13.1" - resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-1.13.1.tgz#598afe54755b2868a5330d2aff9d4ebb53209b65" - integrity sha512-Un7MIEDdUC5gNpcGDV97op1Ywk748MpHcFTHoYs6qnj1Z3j7I53VG3nwZhKzoBZmbdRNnb6WRdFlwl7tSDuZGw== - binary-extensions@^2.0.0, binary-extensions@^2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.2.0.tgz#75f502eeaf9ffde42fc98829645be4ea76bd9e2d" integrity sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA== -bindings@^1.5.0: - version "1.5.0" - resolved "https://registry.yarnpkg.com/bindings/-/bindings-1.5.0.tgz#10353c9e945334bc0511a6d90b38fbc7c9c504df" - integrity sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ== - dependencies: - file-uri-to-path "1.0.0" - bluebird@^3.7.1: version "3.7.2" resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.7.2.tgz#9f229c15be272454ffa973ace0dbee79a1b0c36f" @@ -3621,22 +3273,6 @@ brace-expansion@^1.1.7: balanced-match "^1.0.0" concat-map "0.0.1" -braces@^2.3.1, braces@^2.3.2: - version "2.3.2" - resolved "https://registry.yarnpkg.com/braces/-/braces-2.3.2.tgz#5979fd3f14cd531565e5fa2df1abfff1dfaee729" - integrity sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w== - dependencies: - arr-flatten "^1.1.0" - array-unique "^0.3.2" - extend-shallow "^2.0.1" - fill-range "^4.0.0" - isobject "^3.0.1" - repeat-element "^1.1.2" - snapdragon "^0.8.1" - snapdragon-node "^2.0.1" - split-string "^3.0.2" - to-regex "^3.0.1" - braces@^3.0.1, braces@~3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107" @@ -3644,7 +3280,7 @@ braces@^3.0.1, braces@~3.0.2: dependencies: fill-range "^7.0.1" -browserslist@4.14.2, browserslist@^4.0.0, browserslist@^4.14.5, browserslist@^4.16.0, browserslist@^4.16.5, browserslist@^4.16.6, browserslist@^4.17.3: +browserslist@^4.0.0, browserslist@^4.14.5, browserslist@^4.16.0, browserslist@^4.16.5, browserslist@^4.16.6, browserslist@^4.17.3: version "4.17.4" resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.17.4.tgz#72e2508af2a403aec0a49847ef31bd823c57ead4" integrity sha512-Zg7RpbZpIJRW3am9Lyckue7PLytvVxxhJj1CaJVlCWENsGEAOlnlt8X0ZxGRPp7Bt9o8tIRM5SEXy4BCPMJjLQ== @@ -3717,21 +3353,6 @@ cacache@^15.0.3, cacache@^15.0.5, cacache@^15.2.0, cacache@^15.3.0: tar "^6.0.2" unique-filename "^1.1.1" -cache-base@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/cache-base/-/cache-base-1.0.1.tgz#0a7f46416831c8b662ee36fe4e7c59d76f666ab2" - integrity sha512-AKcdTnFSWATd5/GCPRxr2ChwIJ85CeyrEyjRHlKxQ56d4XJMGym0uAiKn0xbLOGOl3+yRpOTi484dVCEc5AUzQ== - dependencies: - collection-visit "^1.0.0" - component-emitter "^1.2.1" - get-value "^2.0.6" - has-value "^1.0.0" - isobject "^3.0.1" - set-value "^2.0.0" - to-object-path "^0.3.0" - union-value "^1.0.0" - unset-value "^1.0.0" - cacheable-request@^6.0.0: version "6.1.0" resolved "https://registry.yarnpkg.com/cacheable-request/-/cacheable-request-6.1.0.tgz#20ffb8bd162ba4be11e9567d823db651052ca912" @@ -3758,7 +3379,7 @@ callsites@^3.0.0: resolved "https://registry.yarnpkg.com/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73" integrity sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ== -camel-case@^4.1.1, camel-case@^4.1.2: +camel-case@^4.1.2: version "4.1.2" resolved "https://registry.yarnpkg.com/camel-case/-/camel-case-4.1.2.tgz#9728072a954f805228225a6deea6b38461e1bd5a" integrity sha512-gxGWBrTT1JuMx6R+o5PTXMmUnhnVzLQ9SNutD4YqKtI6ap897t3tKECYla6gCWEkplXnlNybEkZg9GEGxKFCgw== @@ -3771,11 +3392,6 @@ camelcase-css@2.0.1: resolved "https://registry.yarnpkg.com/camelcase-css/-/camelcase-css-2.0.1.tgz#ee978f6947914cc30c6b44741b6ed1df7f043fd5" integrity sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA== -camelcase@^5.0.0: - version "5.3.1" - resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320" - integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg== - camelcase@^6.2.0: version "6.2.0" resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-6.2.0.tgz#924af881c9d525ac9d87f40d964e5cea982a1809" @@ -3811,7 +3427,7 @@ ccount@^1.0.0, ccount@^1.0.3: resolved "https://registry.yarnpkg.com/ccount/-/ccount-1.1.0.tgz#246687debb6014735131be8abab2d93898f8d043" integrity sha512-vlNK021QdI7PNeiUh/lKkC/mNHHfV0m/Ad5JoI0TYtlBnJAslM/JIkm/tGC88bkLIwO6OQ5uV6ztS6kVAtCDlg== -chalk@2.4.2, chalk@^2.0.0, chalk@^2.4.1, chalk@^2.4.2: +chalk@^2.0.0, chalk@^2.4.1, chalk@^2.4.2: version "2.4.2" resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== @@ -3828,7 +3444,7 @@ chalk@^4.0.0, chalk@^4.1.2: ansi-styles "^4.1.0" supports-color "^7.1.0" -chalk@^4.1.0, chalk@^4.1.1: +chalk@^4.1.0: version "4.1.1" resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.1.tgz#c80b3fab28bf6371e6863325eee67e618b77e6ad" integrity sha512-diHzdDKxcU+bAsUboHLPEDQiw0qEe0qd7SYUn3HgcFlWgbDcfLGswOHYeGrHKzG9z6UYf01d9VFMfZxPM1xZSg== @@ -3873,26 +3489,7 @@ cheerio@^0.22.0: lodash.reject "^4.4.0" lodash.some "^4.4.0" -chokidar@^2.1.8: - version "2.1.8" - resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-2.1.8.tgz#804b3a7b6a99358c3c5c61e71d8728f041cff917" - integrity sha512-ZmZUazfOzf0Nve7duiCKD23PFSCs4JPoYyccjUFF3aQkQadqBhfzhjkwBH2mNOG9cTBwhamM37EIsIkZw3nRgg== - dependencies: - anymatch "^2.0.0" - async-each "^1.0.1" - braces "^2.3.2" - glob-parent "^3.1.0" - inherits "^2.0.3" - is-binary-path "^1.0.0" - is-glob "^4.0.0" - normalize-path "^3.0.0" - path-is-absolute "^1.0.0" - readdirp "^2.2.1" - upath "^1.1.1" - optionalDependencies: - fsevents "^1.2.7" - -chokidar@^3.5.1, chokidar@^3.5.2: +chokidar@^3.4.2, chokidar@^3.5.2: version "3.5.2" resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.5.2.tgz#dba3976fcadb016f66fd365021d91600d01c1e75" integrity sha512-ekGhOnNVPgT77r4K/U3GDhu+FQ2S8TnK/s2KbIGXi0SZWuwkZ2QNyfWdZW+TVfn84DpEP7rLeCt2UI6bJ8GwbQ== @@ -3934,28 +3531,11 @@ cidr-regex@^3.1.1: dependencies: ip-regex "^4.1.0" -class-utils@^0.3.5: - version "0.3.6" - resolved "https://registry.yarnpkg.com/class-utils/-/class-utils-0.3.6.tgz#f93369ae8b9a7ce02fd41faad0ca83033190c463" - integrity sha512-qOhPa/Fj7s6TY8H8esGu5QNpMMQxz79h+urzrNYN6mn+9BnxlDGf5QZ+XeCDsxSjPqsSR56XOZOJmpeurnLMeg== - dependencies: - arr-union "^3.1.0" - define-property "^0.2.5" - isobject "^3.0.0" - static-extend "^0.1.1" - classnames@^2.2.6: version "2.3.1" resolved "https://registry.yarnpkg.com/classnames/-/classnames-2.3.1.tgz#dfcfa3891e306ec1dad105d0e88f4417b8535e8e" integrity sha512-OlQdbZ7gLfGarSqxesMesDa5uz7KFbID8Kpq/SxIoNGDqY8lSYs0D+hhtBXhcdB3rcbXArFr7vlHheLk1voeNA== -clean-css@^4.2.3: - version "4.2.3" - resolved "https://registry.yarnpkg.com/clean-css/-/clean-css-4.2.3.tgz#507b5de7d97b48ee53d84adb0160ff6216380f78" - integrity sha512-VcMWDN54ZN/DS+g58HYL5/n4Zrqe8vHJpGA8KdgUXFU4fuP/aHNw8eld9SyEIyabIMJX/0RaY/fplOo5hYLSFA== - dependencies: - source-map "~0.6.0" - clean-css@^5.1.5: version "5.1.5" resolved "https://registry.yarnpkg.com/clean-css/-/clean-css-5.1.5.tgz#3b0af240dcfc9a3779a08c2332df3ebd4474f232" @@ -3991,15 +3571,6 @@ cli-table3@^0.6.0: optionalDependencies: colors "^1.1.2" -cliui@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/cliui/-/cliui-5.0.0.tgz#deefcfdb2e800784aa34f46fa08e06851c7bbbc5" - integrity sha512-PYeGSEmmHM6zvoef2w8TPzlrnNpXIjTipYK780YswmIP9vjxmd6Y2a3CB2Ks6/AU8NHjZugXvo8w3oWM2qnwXA== - dependencies: - string-width "^3.1.0" - strip-ansi "^5.2.0" - wrap-ansi "^5.1.0" - clone-deep@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/clone-deep/-/clone-deep-4.0.1.tgz#c19fd9bdbbf85942b4fd979c84dcf7d5f07c2387" @@ -4052,14 +3623,6 @@ collapse-white-space@^1.0.2: resolved "https://registry.yarnpkg.com/collapse-white-space/-/collapse-white-space-1.0.6.tgz#e63629c0016665792060dbbeb79c42239d2c5287" integrity sha512-jEovNnrhMuqyCcjfEJA56v0Xq8SkIoPKDyaHahwo3POf4qcSXqMYuwNcOTzp74vTsR9Tn08z4MxWqAhcekogkQ== -collection-visit@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/collection-visit/-/collection-visit-1.0.0.tgz#4bc0373c164bc3291b4d368c829cf1a80a59dca0" - integrity sha1-S8A3PBZLwykbTTaMgpzxqApZ3KA= - dependencies: - map-visit "^1.0.0" - object-visit "^1.0.0" - color-convert@^1.9.0: version "1.9.3" resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8" @@ -4104,6 +3667,11 @@ colorette@^1.2.2: resolved "https://registry.yarnpkg.com/colorette/-/colorette-1.2.2.tgz#cbcc79d5e99caea2dbf10eb3a26fd8b3e6acfa94" integrity sha512-MKGMzyfeuutC/ZJ1cba9NqcNpfeqMUcYmyF1ZFY6/Cn7CNSAKx6a+s48sqLqyAiZuaP2TcqMhoo+dlwFnVxT9w== +colorette@^2.0.10: + version "2.0.16" + resolved "https://registry.yarnpkg.com/colorette/-/colorette-2.0.16.tgz#713b9af84fdb000139f04546bd4a93f62a5085da" + integrity sha512-hUewv7oMjCp+wkBv5Rm0v87eJhq4woh5rSR+42YSQJKecCqgIqNkZ6lAlQms/BwHPJA5NKMRlpxPRv0n8HQW6g== + colors@^1.1.2, colors@^1.4.0: version "1.4.0" resolved "https://registry.yarnpkg.com/colors/-/colors-1.4.0.tgz#c50491479d4c1bdaed2c9ced32cf7c7dc2360f78" @@ -4139,11 +3707,6 @@ commander@^2.11.0, commander@^2.20.0: resolved "https://registry.yarnpkg.com/commander/-/commander-2.20.3.tgz#fd485e84c03eb4881c20722ba48035e8531aeb33" integrity sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ== -commander@^4.1.1: - version "4.1.1" - resolved "https://registry.yarnpkg.com/commander/-/commander-4.1.1.tgz#9fd602bd936294e9e9ef46a3f4d6964044b18068" - integrity sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA== - commander@^5.1.0: version "5.1.0" resolved "https://registry.yarnpkg.com/commander/-/commander-5.1.0.tgz#46abbd1652f8e059bddaef99bbdcb2ad9cf179ae" @@ -4265,17 +3828,12 @@ cookie@0.4.0: resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.4.0.tgz#beb437e7022b3b6d49019d088665303ebe9c14ba" integrity sha512-+Hp8fLp57wnUSt0tY0tHEXh4voZRDnoIrZPqlo3DPiI4y9lwg/jqx+1Om94/W6ZaPDOUbnjOt/99w66zk+l1Xg== -copy-descriptor@^0.1.0: - version "0.1.1" - resolved "https://registry.yarnpkg.com/copy-descriptor/-/copy-descriptor-0.1.1.tgz#676f6eb3c39997c2ee1ac3a924fd6124748f578d" - integrity sha1-Z29us8OZl8LuGsOpJP1hJHSPV40= - copy-text-to-clipboard@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/copy-text-to-clipboard/-/copy-text-to-clipboard-3.0.1.tgz#8cbf8f90e0a47f12e4a24743736265d157bce69c" integrity sha512-rvVsHrpFcL4F2P8ihsoLdFHmd404+CMg71S756oRSeQgqk51U3kicGdnvfkrxva0xXH92SjGS62B0XIJsbh+9Q== -copy-webpack-plugin@^9.0.0, copy-webpack-plugin@^9.0.1: +copy-webpack-plugin@^9.0.1: version "9.0.1" resolved "https://registry.yarnpkg.com/copy-webpack-plugin/-/copy-webpack-plugin-9.0.1.tgz#b71d21991599f61a4ee00ba79087b8ba279bbb59" integrity sha512-14gHKKdYIxF84jCEgPgYXCPpldbwpxxLbCmA7LReY7gvbaT555DgeBWBgBZM116tv/fO6RRJrsivBqRyRlukhw== @@ -4304,11 +3862,6 @@ core-js-compat@^3.16.0, core-js-compat@^3.16.2: browserslist "^4.17.3" semver "7.0.0" -core-js-pure@^3.15.0: - version "3.15.2" - resolved "https://registry.yarnpkg.com/core-js-pure/-/core-js-pure-3.15.2.tgz#c8e0874822705f3385d3197af9348f7c9ae2e3ce" - integrity sha512-D42L7RYh1J2grW8ttxoY1+17Y4wXZeKe7uyplAI3FkNQyI5OgBIAjUfFiTPfL1rs0qLpxaabITNbjKl1Sp82tA== - core-js-pure@^3.16.0: version "3.18.3" resolved "https://registry.yarnpkg.com/core-js-pure/-/core-js-pure-3.18.3.tgz#7eed77dcce1445ab68fd68715856633e2fb3b90c" @@ -4319,16 +3872,22 @@ core-js@^3.18.0: resolved "https://registry.yarnpkg.com/core-js/-/core-js-3.18.3.tgz#86a0bba2d8ec3df860fefcc07a8d119779f01509" integrity sha512-tReEhtMReZaPFVw7dajMx0vlsz3oOb8ajgPoHVYGxr8ErnZ6PcYEvvmjGmXlfpnxpkYSdOQttjB+MvVbCGfvLw== -core-js@^3.9.1: - version "3.15.2" - resolved "https://registry.yarnpkg.com/core-js/-/core-js-3.15.2.tgz#740660d2ff55ef34ce664d7e2455119c5bdd3d61" - integrity sha512-tKs41J7NJVuaya8DxIOCnl8QuPHx5/ZVbFo1oKgVl1qHFBBrDctzQGtuLjPpRdNTWmKPH6oEvgN/MUID+l485Q== - core-util-is@1.0.2, core-util-is@~1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7" integrity sha1-tf1UIgqivFq1eqtxQMlAdUUDwac= +cosmiconfig@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-6.0.0.tgz#da4fee853c52f6b1e6935f41c1a2fc50bd4a9982" + integrity sha512-xb3ZL6+L8b9JLLCx3ZdoZy4+2ECphCMo2PwqgP1tlfVq6M6YReyzBJtvWWtbDSpNr9hn96pkCiZqUcFEc+54Qg== + dependencies: + "@types/parse-json" "^4.0.0" + import-fresh "^3.1.0" + parse-json "^5.0.0" + path-type "^4.0.0" + yaml "^1.7.2" + cosmiconfig@^7.0.0: version "7.0.0" resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-7.0.0.tgz#ef9b44d773959cae63ddecd122de23853b60f8d3" @@ -4347,15 +3906,6 @@ cross-fetch@^3.0.4: dependencies: node-fetch "2.6.1" -cross-spawn@7.0.3, cross-spawn@^7.0.3: - version "7.0.3" - resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6" - integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w== - dependencies: - path-key "^3.1.0" - shebang-command "^2.0.0" - which "^2.0.1" - cross-spawn@^5.1.0: version "5.1.0" resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-5.1.0.tgz#e8bd0efee58fcff6f8f94510a0a554bbfa235449" @@ -4365,16 +3915,14 @@ cross-spawn@^5.1.0: shebang-command "^1.2.0" which "^1.2.9" -cross-spawn@^6.0.0: - version "6.0.5" - resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-6.0.5.tgz#4a5ec7c64dfae22c3a14124dbacdee846d80cbc4" - integrity sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ== +cross-spawn@^7.0.3: + version "7.0.3" + resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6" + integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w== dependencies: - nice-try "^1.0.4" - path-key "^2.0.1" - semver "^5.5.0" - shebang-command "^1.2.0" - which "^1.2.9" + path-key "^3.1.0" + shebang-command "^2.0.0" + which "^2.0.1" crypto-random-string@^2.0.0: version "2.0.0" @@ -4414,19 +3962,6 @@ css-loader@^5.1.1: schema-utils "^3.0.0" semver "^7.3.5" -css-minimizer-webpack-plugin@^3.0.1: - version "3.0.2" - resolved "https://registry.yarnpkg.com/css-minimizer-webpack-plugin/-/css-minimizer-webpack-plugin-3.0.2.tgz#8fadbdf10128cb40227bff275a4bb47412534245" - integrity sha512-B3I5e17RwvKPJwsxjjWcdgpU/zqylzK1bPVghcmpFHRL48DXiBgrtqz1BJsn68+t/zzaLp9kYAaEDvQ7GyanFQ== - dependencies: - cssnano "^5.0.6" - jest-worker "^27.0.2" - p-limit "^3.0.2" - postcss "^8.3.5" - schema-utils "^3.0.0" - serialize-javascript "^6.0.0" - source-map "^0.6.1" - css-minimizer-webpack-plugin@^3.0.2: version "3.1.1" resolved "https://registry.yarnpkg.com/css-minimizer-webpack-plugin/-/css-minimizer-webpack-plugin-3.1.1.tgz#27bafa3b75054713565b2266c64b0228acd18634" @@ -4512,18 +4047,6 @@ cssesc@^3.0.0: resolved "https://registry.yarnpkg.com/cssesc/-/cssesc-3.0.0.tgz#37741919903b868565e1c09ea747445cd18983ee" integrity sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg== -cssnano-preset-advanced@^5.1.1: - version "5.1.3" - resolved "https://registry.yarnpkg.com/cssnano-preset-advanced/-/cssnano-preset-advanced-5.1.3.tgz#a2c6cf2fe39108b81e88810e3c399d1c0fe030ea" - integrity sha512-pS4+Q2Hoo/FevZs2JqA2BG8Vn5o5VeXgj+z6kGndKTq3RFYvlKeJ1ZPnLXo9zyYKwmSqWW0rWqtGxxmigIte0Q== - dependencies: - autoprefixer "^10.2.0" - cssnano-preset-default "^5.1.3" - postcss-discard-unused "^5.0.1" - postcss-merge-idents "^5.0.1" - postcss-reduce-idents "^5.0.1" - postcss-zindex "^5.0.1" - cssnano-preset-advanced@^5.1.4: version "5.1.4" resolved "https://registry.yarnpkg.com/cssnano-preset-advanced/-/cssnano-preset-advanced-5.1.4.tgz#b492d03756550f75cd8131d59105efdea88c2f7e" @@ -4611,7 +4134,7 @@ cssnano-utils@^2.0.1: resolved "https://registry.yarnpkg.com/cssnano-utils/-/cssnano-utils-2.0.1.tgz#8660aa2b37ed869d2e2f22918196a9a8b6498ce2" integrity sha512-i8vLRZTnEH9ubIyfdZCAdIdgnHAUeQeByEeQ2I7oTilvP9oHO6RScpeq3GsFUVqeB8uZgOQ9pw8utofNn32hhQ== -cssnano@^5.0.4, cssnano@^5.0.6: +cssnano@^5.0.6: version "5.0.6" resolved "https://registry.yarnpkg.com/cssnano/-/cssnano-5.0.6.tgz#2a91ad34c6521ae31eab3da9c90108ea3093535d" integrity sha512-NiaLH/7yqGksFGsFNvSRe2IV/qmEBAeDE64dYeD8OBrgp6lE8YoMeQJMtsv5ijo6MPyhuoOvFhI94reahBRDkw== @@ -4637,6 +4160,11 @@ csso@^4.0.2, csso@^4.2.0: dependencies: css-tree "^1.1.2" +csstype@^3.0.2: + version "3.0.10" + resolved "https://registry.yarnpkg.com/csstype/-/csstype-3.0.10.tgz#2ad3a7bed70f35b965707c092e5f30b327c290e5" + integrity sha512-2u44ZG2OcNUO9HDp/Jl8C07x6pU/eTR3ncV91SiK3dhG9TWvRVsCoJw14Ckx5DgWkzGA3waZWO3d7pgqpUI/XA== + dashdash@^1.12.0: version "1.14.1" resolved "https://registry.yarnpkg.com/dashdash/-/dashdash-1.14.1.tgz#853cfa0f7cbe2fed5de20326b8dd581035f6e2f0" @@ -4644,7 +4172,7 @@ dashdash@^1.12.0: dependencies: assert-plus "^1.0.0" -debug@2.6.9, debug@^2.2.0, debug@^2.3.3, debug@^2.6.0: +debug@2.6.9, debug@^2.6.0: version "2.6.9" resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== @@ -4658,7 +4186,7 @@ debug@4, debug@^4.1.0, debug@^4.1.1, debug@^4.3.1: dependencies: ms "2.1.2" -debug@^3.1.1, debug@^3.2.6: +debug@^3.1.1: version "3.2.7" resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.7.tgz#72580b7e9145fb39b6676f9c5e5fb100b934179a" integrity sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ== @@ -4670,16 +4198,6 @@ debuglog@^1.0.1: resolved "https://registry.yarnpkg.com/debuglog/-/debuglog-1.0.1.tgz#aa24ffb9ac3df9a2351837cfb2d279360cd78492" integrity sha1-qiT/uaw9+aI1GDfPstJ5NgzXhJI= -decamelize@^1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290" - integrity sha1-9lNNFRSCabIDUue+4m9QH5oZEpA= - -decode-uri-component@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/decode-uri-component/-/decode-uri-component-0.2.0.tgz#eb3913333458775cb84cd1a1fae062106bb87545" - integrity sha1-6zkTMzRYd1y4TNGh+uBiEGu4dUU= - decompress-response@^3.3.0: version "3.3.0" resolved "https://registry.yarnpkg.com/decompress-response/-/decompress-response-3.3.0.tgz#80a4dd323748384bfa248083622aedec982adff3" @@ -4709,13 +4227,12 @@ deepmerge@^4.0.0, deepmerge@^4.2.2: resolved "https://registry.yarnpkg.com/deepmerge/-/deepmerge-4.2.2.tgz#44d2ea3679b8f4d4ffba33f03d865fc1e7bf4955" integrity sha512-FJ3UgI4gIl+PHZm53knsuSFpE+nESMr7M4v9QcgB7S63Kj/6WqMiFQJpBBYz1Pt+66bZpP3Q7Lye0Oo9MPKEdg== -default-gateway@^4.2.0: - version "4.2.0" - resolved "https://registry.yarnpkg.com/default-gateway/-/default-gateway-4.2.0.tgz#167104c7500c2115f6dd69b0a536bb8ed720552b" - integrity sha512-h6sMrVB1VMWVrW13mSc6ia/DwYYw5MN6+exNu1OaJeFac5aSAvwM7lZ0NVfTABuSkQelr4h5oebg3KB1XPdjgA== +default-gateway@^6.0.0: + version "6.0.3" + resolved "https://registry.yarnpkg.com/default-gateway/-/default-gateway-6.0.3.tgz#819494c888053bdb743edbf343d6cdf7f2943a71" + integrity sha512-fwSOJsbbNzZ/CUFpqFBqYfYNLj1NbMPm8MMCIzHjC83iSJRBEGmDUxU+WP661BaBQImeC2yHwXtz+P/O9o+XEg== dependencies: - execa "^1.0.0" - ip-regex "^2.1.0" + execa "^5.0.0" defaults@^1.0.3: version "1.0.3" @@ -4729,6 +4246,11 @@ defer-to-connect@^1.0.1: resolved "https://registry.yarnpkg.com/defer-to-connect/-/defer-to-connect-1.1.3.tgz#331ae050c08dcf789f8c83a7b81f0ed94f4ac591" integrity sha512-0ISdNousHvZT2EiFlZeZAHBUvSxmKswVCEf8hW7KWgG4a8MVEu/3Vb6uWYozkjylyCxe0JBIiRB1jV45S70WVQ== +define-lazy-prop@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/define-lazy-prop/-/define-lazy-prop-2.0.0.tgz#3f7ae421129bcaaac9bc74905c98a0009ec9ee7f" + integrity sha512-Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og== + define-properties@^1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.1.3.tgz#cf88da6cbee26fe6db7094f61d870cbd84cee9f1" @@ -4736,41 +4258,6 @@ define-properties@^1.1.3: dependencies: object-keys "^1.0.12" -define-property@^0.2.5: - version "0.2.5" - resolved "https://registry.yarnpkg.com/define-property/-/define-property-0.2.5.tgz#c35b1ef918ec3c990f9a5bc57be04aacec5c8116" - integrity sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY= - dependencies: - is-descriptor "^0.1.0" - -define-property@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/define-property/-/define-property-1.0.0.tgz#769ebaaf3f4a63aad3af9e8d304c9bbe79bfb0e6" - integrity sha1-dp66rz9KY6rTr56NMEybvnm/sOY= - dependencies: - is-descriptor "^1.0.0" - -define-property@^2.0.2: - version "2.0.2" - resolved "https://registry.yarnpkg.com/define-property/-/define-property-2.0.2.tgz#d459689e8d654ba77e02a817f8710d702cb16e9d" - integrity sha512-jwK2UV4cnPpbcG7+VRARKTZPUWowwXA8bzH5NP6ud0oeAxyYPuGZUAC7hMugpCdz4BeSZl2Dl9k66CHJ/46ZYQ== - dependencies: - is-descriptor "^1.0.2" - isobject "^3.0.1" - -del@^4.1.1: - version "4.1.1" - resolved "https://registry.yarnpkg.com/del/-/del-4.1.1.tgz#9e8f117222ea44a31ff3a156c049b99052a9f0b4" - integrity sha512-QwGuEUouP2kVwQenAsOof5Fv8K9t3D8Ca8NxcXKrIpEHjTXK5J2nXLdP+ALI1cgv8wj7KuwBhTwBkOZSJKM5XQ== - dependencies: - "@types/glob" "^7.1.1" - globby "^6.1.0" - is-path-cwd "^2.0.0" - is-path-in-cwd "^2.0.0" - p-map "^2.0.0" - pify "^4.0.1" - rimraf "^2.6.3" - del@^6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/del/-/del-6.0.0.tgz#0b40d0332cea743f1614f818be4feb717714c952" @@ -4817,7 +4304,7 @@ detect-node@^2.0.4: resolved "https://registry.yarnpkg.com/detect-node/-/detect-node-2.1.0.tgz#c9c70775a49c3d03bc2c06d9a73be550f978f8b1" integrity sha512-T0NIuQpnTvFDATNuHN5roPwSBG83rFsuO+MXXH9/3N1eFbn4wcPjttvjMLEPWJ0RGUYgQE7cGgS3tNxbqCGM7g== -detect-port-alt@1.1.6: +detect-port-alt@^1.1.6: version "1.1.6" resolved "https://registry.yarnpkg.com/detect-port-alt/-/detect-port-alt-1.1.6.tgz#24707deabe932d4a3cf621302027c2b266568275" integrity sha512-5tQykt+LqfJFBEYaDITx7S7cR7mJ/zQmLXZ2qt5w04ainYZw6tBf9dBunMjVeVOdYVRUzUOE4HkY5J7+uttb5Q== @@ -5002,11 +4489,6 @@ electron-to-chromium@^1.3.867: resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.3.872.tgz#2311a82f344d828bab6904818adc4afb57b35369" integrity "sha1-IxGoLzRNgouraQSBitxK+1ezU2k= sha512-qG96atLFY0agKyEETiBFNhpRLSXGSXOBuhXWpbkYqrLKKASpRyRBUtfkn0ZjIf/yXfA7FA4nScVOMpXSHFlUCQ==" -"emoji-regex@>=6.0.0 <=6.1.1": - version "6.1.1" - resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-6.1.1.tgz#c6cd0ec1b0642e2a3c67a1137efc5e796da4f88e" - integrity sha1-xs0OwbBkLio8Z6ETfvxeeW2k+I4= - emoji-regex@^7.0.1: version "7.0.3" resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-7.0.3.tgz#933a04052860c85e83c122479c4748a8e4c72156" @@ -5046,10 +4528,10 @@ end-of-stream@^1.1.0: dependencies: once "^1.4.0" -enhanced-resolve@^5.8.0: - version "5.8.2" - resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-5.8.2.tgz#15ddc779345cbb73e97c611cd00c01c1e7bf4d8b" - integrity sha512-F27oB3WuHDzvR2DOGNTaYy0D5o0cnrv8TeI482VM4kYgQd/FT9lUQwuNsJ0oOHtBUq7eiW5ytqzp7nBFknL+GA== +enhanced-resolve@^5.8.3: + version "5.8.3" + resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-5.8.3.tgz#6d552d465cce0423f5b3d718511ea53826a7b2f0" + integrity sha512-EGAbGvH7j7Xt2nc0E7D99La1OiEs8LnyimkRgwExpUMScN6O+3x9tIWs7PLQZVNx4YD+00skHXPXi1yQHpAmZA== dependencies: graceful-fs "^4.2.4" tapable "^2.2.0" @@ -5079,13 +4561,6 @@ err-code@^2.0.2: resolved "https://registry.yarnpkg.com/err-code/-/err-code-2.0.3.tgz#23c2f3b756ffdfc608d30e27c9a941024807e7f9" integrity sha512-2bmlRpNKBxT/CRmPOlyISQpNj+qSeYvcym/uT0Jx2bMOlKLtSy1ZmLuVxSEKKyor/N5yhvp/ZiG1oE3DEYMSFA== -errno@^0.1.3: - version "0.1.8" - resolved "https://registry.yarnpkg.com/errno/-/errno-0.1.8.tgz#8bb3e9c7d463be4976ff888f76b4809ebc2e811f" - integrity sha512-dJ6oBr5SQ1VSd9qkk7ByRgb/1SH4JZjCHSW/mr63/QcXO9zLVxvJ6Oy13nio03rxpSnVDDjFor75SjVeZWPW/A== - dependencies: - prr "~1.0.1" - error-ex@^1.3.1: version "1.3.2" resolved "https://registry.yarnpkg.com/error-ex/-/error-ex-1.3.2.tgz#b4ac40648107fdcdcfae242f428bea8a14d4f1bf" @@ -5115,10 +4590,10 @@ es-abstract@^1.17.2, es-abstract@^1.18.0-next.2, es-abstract@^1.18.2: string.prototype.trimstart "^1.0.4" unbox-primitive "^1.0.1" -es-module-lexer@^0.7.1: - version "0.7.1" - resolved "https://registry.yarnpkg.com/es-module-lexer/-/es-module-lexer-0.7.1.tgz#c2c8e0f46f2df06274cdaf0dd3f3b33e0a0b267d" - integrity sha512-MgtWFl5No+4S3TmhDmCz2ObFGm6lEpTnzbQi+Dd+pw4mlTIZTmM2iAs5gRlmx5zS9luzobCSBSI90JM/1/JgOw== +es-module-lexer@^0.9.0: + version "0.9.3" + resolved "https://registry.yarnpkg.com/es-module-lexer/-/es-module-lexer-0.9.3.tgz#6f13db00cc38417137daf74366f535c8eb438f19" + integrity sha512-1HQ2M2sPtxwnvOvT1ZClHyQDiggdNjURWpY2we6aMKCQiUVxTmVs2UYPLIrD84sS+kMdUwfBSylbJPwNnBrnHQ== es-to-primitive@^1.2.1: version "1.2.1" @@ -5144,16 +4619,16 @@ escape-html@^1.0.3, escape-html@~1.0.3: resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988" integrity sha1-Aljq5NPQwJdN4cFpGI7wBR0dGYg= -escape-string-regexp@2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz#a30304e99daa32e23b2fd20f51babd07cffca344" - integrity sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w== - escape-string-regexp@^1.0.5: version "1.0.5" resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" integrity sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ= +escape-string-regexp@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz#a30304e99daa32e23b2fd20f51babd07cffca344" + integrity sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w== + escape-string-regexp@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz#14ba83a5d373e3d311e5afca29cf5bfad965bf34" @@ -5194,7 +4669,7 @@ esutils@^2.0.2: resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== -eta@^1.12.1, eta@^1.12.3: +eta@^1.12.3: version "1.12.3" resolved "https://registry.yarnpkg.com/eta/-/eta-1.12.3.tgz#2982d08adfbef39f9fa50e2fbd42d7337e7338b1" integrity sha512-qHixwbDLtekO/d51Yr4glcaUJCIjGVJyTzuqV4GPlgZo1YpgOKG+avQynErZIYrfM6JIJdtiG2Kox8tbb+DoGg== @@ -5226,26 +4701,6 @@ events@^3.2.0: resolved "https://registry.yarnpkg.com/events/-/events-3.3.0.tgz#31a95ad0a924e2d2c419a813aeb2c4e878ea7400" integrity sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q== -eventsource@^1.0.7: - version "1.1.0" - resolved "https://registry.yarnpkg.com/eventsource/-/eventsource-1.1.0.tgz#00e8ca7c92109e94b0ddf32dac677d841028cfaf" - integrity sha512-VSJjT5oCNrFvCS6igjzPAt5hBzQ2qPBFIbJ03zLI9SE0mxwZpMw6BfJrbFHm1a141AavMEB8JHmBhWAd66PfCg== - dependencies: - original "^1.0.0" - -execa@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/execa/-/execa-1.0.0.tgz#c6236a5bb4df6d6f15e88e7f017798216749ddd8" - integrity sha512-adbxcyWV46qiHyvSp50TKt05tB4tK3HcmF7/nxfAdhnox83seTDbwnaqKO4sXRy7roHAIFqJP/Rw/AuEbX61LA== - dependencies: - cross-spawn "^6.0.0" - get-stream "^4.0.0" - is-stream "^1.1.0" - npm-run-path "^2.0.0" - p-finally "^1.0.0" - signal-exit "^3.0.0" - strip-eof "^1.0.0" - execa@^5.0.0: version "5.1.1" resolved "https://registry.yarnpkg.com/execa/-/execa-5.1.1.tgz#f80ad9cbf4298f7bd1d4c9555c21e93741c411dd" @@ -5261,19 +4716,6 @@ execa@^5.0.0: signal-exit "^3.0.3" strip-final-newline "^2.0.0" -expand-brackets@^2.1.4: - version "2.1.4" - resolved "https://registry.yarnpkg.com/expand-brackets/-/expand-brackets-2.1.4.tgz#b77735e315ce30f6b6eff0f83b04151a22449622" - integrity sha1-t3c14xXOMPa27/D4OwQVGiJEliI= - dependencies: - debug "^2.3.3" - define-property "^0.2.5" - extend-shallow "^2.0.1" - posix-character-classes "^0.1.0" - regex-not "^1.0.0" - snapdragon "^0.8.1" - to-regex "^3.0.1" - express@^4.17.1: version "4.17.1" resolved "https://registry.yarnpkg.com/express/-/express-4.17.1.tgz#4491fc38605cf51f8629d39c2b5d026f98a4c134" @@ -5317,33 +4759,11 @@ extend-shallow@^2.0.1: dependencies: is-extendable "^0.1.0" -extend-shallow@^3.0.0, extend-shallow@^3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-3.0.2.tgz#26a71aaf073b39fb2127172746131c2704028db8" - integrity sha1-Jqcarwc7OfshJxcnRhMcJwQCjbg= - dependencies: - assign-symbols "^1.0.0" - is-extendable "^1.0.1" - extend@^3.0.0, extend@~3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.2.tgz#f8b1136b4071fbd8eb140aff858b1019ec2915fa" integrity sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g== -extglob@^2.0.4: - version "2.0.4" - resolved "https://registry.yarnpkg.com/extglob/-/extglob-2.0.4.tgz#ad00fe4dc612a9232e8718711dc5cb5ab0285543" - integrity sha512-Nmb6QXkELsuBr24CJSkilo6UHHgbekK5UiZgfE6UHD3Eb27YC6oD+bhcT+tJ6cl8dmsgdQxnWlcry8ksBIBLpw== - dependencies: - array-unique "^0.3.2" - define-property "^1.0.0" - expand-brackets "^2.1.4" - extend-shallow "^2.0.1" - fragment-cache "^0.2.1" - regex-not "^1.0.0" - snapdragon "^0.8.1" - to-regex "^3.0.1" - extsprintf@1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.3.0.tgz#96918440e3041a7a414f8c52e3c574eb3c3e1e05" @@ -5448,25 +4868,10 @@ file-loader@^6.2.0: loader-utils "^2.0.0" schema-utils "^3.0.0" -file-uri-to-path@1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz#553a7b8446ff6f684359c445f1e37a05dacc33dd" - integrity sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw== - -filesize@6.1.0: - version "6.1.0" - resolved "https://registry.yarnpkg.com/filesize/-/filesize-6.1.0.tgz#e81bdaa780e2451d714d71c0d7a4f3238d37ad00" - integrity sha512-LpCHtPQ3sFx67z+uh2HnSyWSLLu5Jxo21795uRDuar/EOuYWXib5EmPaGIBuSnRqH2IODiKA2k5re/K9OnN/Yg== - -fill-range@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-4.0.0.tgz#d544811d428f98eb06a63dc402d2403c328c38f7" - integrity sha1-1USBHUKPmOsGpj3EAtJAPDKMOPc= - dependencies: - extend-shallow "^2.0.1" - is-number "^3.0.0" - repeat-string "^1.6.1" - to-regex-range "^2.1.0" +filesize@^6.1.0: + version "6.4.0" + resolved "https://registry.yarnpkg.com/filesize/-/filesize-6.4.0.tgz#914f50471dd66fdca3cefe628bd0cde4ef769bcd" + integrity sha512-mjFIpOHC4jbfcTfoh4rkWpI31mF7viw9ikj/JyLoKzqlwG/YsefKfvYlYhdYdg/9mtK2z1AzgN/0LvVQ3zdlSQ== fill-range@^7.0.1: version "7.0.1" @@ -5506,14 +4911,6 @@ find-cache-dir@^3.3.2: make-dir "^3.0.2" pkg-dir "^4.1.0" -find-up@4.1.0, find-up@^4.0.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/find-up/-/find-up-4.1.0.tgz#97afe7d6cdc0bc5928584b7c8d7b16e8a9aa5d19" - integrity sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw== - dependencies: - locate-path "^5.0.0" - path-exists "^4.0.0" - find-up@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/find-up/-/find-up-3.0.0.tgz#49169f1d7993430646da61ecc5ae355c21c97b73" @@ -5521,6 +4918,14 @@ find-up@^3.0.0: dependencies: locate-path "^3.0.0" +find-up@^4.0.0, find-up@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/find-up/-/find-up-4.1.0.tgz#97afe7d6cdc0bc5928584b7c8d7b16e8a9aa5d19" + integrity sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw== + dependencies: + locate-path "^5.0.0" + path-exists "^4.0.0" + find-up@^5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/find-up/-/find-up-5.0.0.tgz#4c92819ecb7083561e4f4a240a86be5198f536fc" @@ -5555,28 +4960,29 @@ follow-redirects@^1.14.0: resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.14.4.tgz#838fdf48a8bbdd79e52ee51fb1c94e3ed98b9379" integrity sha512-zwGkiSXC1MUJG/qmeIFH2HBJx9u0V46QGUe3YR1fXG8bXQxq7fLj0RjLZQ5nubr9qNJUZrH+xUcwXEoXNpfS+g== -for-in@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/for-in/-/for-in-1.0.2.tgz#81068d295a8142ec0ac726c6e2200c30fb6d5e80" - integrity sha1-gQaNKVqBQuwKxybG4iAMMPttXoA= - forever-agent@~0.6.1: version "0.6.1" resolved "https://registry.yarnpkg.com/forever-agent/-/forever-agent-0.6.1.tgz#fbc71f0c41adeb37f96c577ad1ed42d8fdacca91" integrity sha1-+8cfDEGt6zf5bFd60e1C2P2sypE= -fork-ts-checker-webpack-plugin@4.1.6: - version "4.1.6" - resolved "https://registry.yarnpkg.com/fork-ts-checker-webpack-plugin/-/fork-ts-checker-webpack-plugin-4.1.6.tgz#5055c703febcf37fa06405d400c122b905167fc5" - integrity sha512-DUxuQaKoqfNne8iikd14SAkh5uw4+8vNifp6gmA73yYNS6ywLIWSLD/n/mBzHQRpW3J7rbATEakmiA8JvkTyZw== +fork-ts-checker-webpack-plugin@^6.0.5: + version "6.4.0" + resolved "https://registry.yarnpkg.com/fork-ts-checker-webpack-plugin/-/fork-ts-checker-webpack-plugin-6.4.0.tgz#057e477cf1d8b013b2ed2669437f818680289c4c" + integrity sha512-3I3wFkc4DbzaUDPWEi96wdYGu4EKtxBafhZYm0o4mX51d9bphAY4P3mBl8K5mFXFJqVzHfmdbm9kLGnm7vwwBg== dependencies: - "@babel/code-frame" "^7.5.5" - chalk "^2.4.1" - micromatch "^3.1.10" + "@babel/code-frame" "^7.8.3" + "@types/json-schema" "^7.0.5" + chalk "^4.1.0" + chokidar "^3.4.2" + cosmiconfig "^6.0.0" + deepmerge "^4.2.2" + fs-extra "^9.0.0" + glob "^7.1.6" + memfs "^3.1.2" minimatch "^3.0.4" - semver "^5.6.0" + schema-utils "2.7.0" + semver "^7.3.2" tapable "^1.0.0" - worker-rpc "^0.1.0" form-data@~2.3.2: version "2.3.3" @@ -5597,13 +5003,6 @@ fraction.js@^4.1.1: resolved "https://registry.yarnpkg.com/fraction.js/-/fraction.js-4.1.1.tgz#ac4e520473dae67012d618aab91eda09bcb400ff" integrity sha512-MHOhvvxHTfRFpF1geTK9czMIZ6xclsEor2wkIGYYq+PxcQqT7vStJqjhe6S1TenZrMZzo+wlqOufBDVepUEgPg== -fragment-cache@^0.2.1: - version "0.2.1" - resolved "https://registry.yarnpkg.com/fragment-cache/-/fragment-cache-0.2.1.tgz#4290fad27f13e89be7f33799c6bc5a0abfff0d19" - integrity sha1-QpD60n8T6Jvn8zeZxrxaCr//DRk= - dependencies: - map-cache "^0.2.2" - fresh@0.5.2: version "0.5.2" resolved "https://registry.yarnpkg.com/fresh/-/fresh-0.5.2.tgz#3d8cadd90d976569fa835ab1f8e4b23a105605a7" @@ -5618,6 +5017,16 @@ fs-extra@^10.0.0: jsonfile "^6.0.1" universalify "^2.0.0" +fs-extra@^9.0.0: + version "9.1.0" + resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-9.1.0.tgz#5954460c764a8da2094ba3554bf839e6b9a7c86d" + integrity sha512-hcg3ZmepS30/7BSFqRvoo3DOMQu7IjqxO5nCDt+zM9XWjb33Wg7ziNT+Qvqbuc3+gWpzO02JubVyk2G4Zvo1OQ== + dependencies: + at-least-node "^1.0.0" + graceful-fs "^4.2.0" + jsonfile "^6.0.1" + universalify "^2.0.0" + fs-minipass@^2.0.0, fs-minipass@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/fs-minipass/-/fs-minipass-2.1.0.tgz#7f5036fdbf12c63c169190cbe4199c852271f9fb" @@ -5625,19 +5034,16 @@ fs-minipass@^2.0.0, fs-minipass@^2.1.0: dependencies: minipass "^3.0.0" +fs-monkey@1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/fs-monkey/-/fs-monkey-1.0.3.tgz#ae3ac92d53bb328efe0e9a1d9541f6ad8d48e2d3" + integrity sha512-cybjIfiiE+pTWicSCLFHSrXZ6EilF30oh91FDP9S2B051prEa7QWfrVTQm10/dDpswBDXZugPa1Ogu8Yh+HV0Q== + fs.realpath@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" integrity sha1-FQStJSMVjKpA20onh8sBQRmU6k8= -fsevents@^1.2.7: - version "1.2.13" - resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-1.2.13.tgz#f325cb0455592428bcf11b383370ef70e3bfcc38" - integrity sha512-oWb1Z6mkHIskLzEJ/XWX0srkpkTQ7vaopMQkyaEIoq0fmtFVxOthb8cCxeT+p3ynTdkk/RZwbgG4brR5BeWECw== - dependencies: - bindings "^1.5.0" - nan "^2.12.1" - fsevents@~2.3.2: version "2.3.2" resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.2.tgz#8a526f78b8fdf4623b709e0b975c52c24c02fd1a" @@ -5682,11 +5088,6 @@ gensync@^1.0.0-beta.1, gensync@^1.0.0-beta.2: resolved "https://registry.yarnpkg.com/gensync/-/gensync-1.0.0-beta.2.tgz#32a6ee76c3d7f52d46b2b1ae5d93fea8580a25e0" integrity sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg== -get-caller-file@^2.0.1: - version "2.0.5" - resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e" - integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg== - get-intrinsic@^1.0.2, get-intrinsic@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.1.1.tgz#15f59f376f855c446963948f0d24cd3637b4abc6" @@ -5701,7 +5102,7 @@ get-own-enumerable-property-symbols@^3.0.0: resolved "https://registry.yarnpkg.com/get-own-enumerable-property-symbols/-/get-own-enumerable-property-symbols-3.0.2.tgz#b5fde77f22cbe35f390b4e089922c50bce6ef664" integrity sha512-I0UBV/XOz1XkIJHEUDMZAbzCThU/H8DxmSfmdGcKPnVhu2VfFqr34jr9777IyaTYvxjedWhqVIilEDsCdP5G6g== -get-stream@^4.0.0, get-stream@^4.1.0: +get-stream@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-4.1.0.tgz#c1b255575f3dc21d59bfc79cd3d2b46b1c3a54b5" integrity sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w== @@ -5720,11 +5121,6 @@ get-stream@^6.0.0: resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-6.0.1.tgz#a262d8eef67aced57c2852ad6167526a43cbf7b7" integrity sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg== -get-value@^2.0.3, get-value@^2.0.6: - version "2.0.6" - resolved "https://registry.yarnpkg.com/get-value/-/get-value-2.0.6.tgz#dc15ca1c672387ca76bd37ac0a395ba2042a2c28" - integrity sha1-3BXKHGcjh8p2vTesCjlbogQqLCg= - getpass@^0.1.1: version "0.1.7" resolved "https://registry.yarnpkg.com/getpass/-/getpass-0.1.7.tgz#5eff8e3e684d569ae4cb2b1282604e8ba62149fa" @@ -5732,19 +5128,12 @@ getpass@^0.1.1: dependencies: assert-plus "^1.0.0" -github-slugger@^1.3.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/github-slugger/-/github-slugger-1.3.0.tgz#9bd0a95c5efdfc46005e82a906ef8e2a059124c9" - integrity sha512-gwJScWVNhFYSRDvURk/8yhcFBee6aFjye2a7Lhb2bUyRulpIoek9p0I9Kt7PT67d/nUlZbFu8L9RLiA0woQN8Q== - dependencies: - emoji-regex ">=6.0.0 <=6.1.1" - github-slugger@^1.4.0: version "1.4.0" resolved "https://registry.yarnpkg.com/github-slugger/-/github-slugger-1.4.0.tgz#206eb96cdb22ee56fdc53a28d5a302338463444e" integrity sha512-w0dzqw/nt51xMVmlaV1+JRzN+oCa1KfcgGEWhxUG16wbdA+Xnt/yoFO8Z8x/V82ZcZ0wy6ln9QDup5avbhiDhQ== -glob-parent@^3.1.0, glob-parent@^5.1.2, glob-parent@^6.0.0, glob-parent@~5.1.2: +glob-parent@^5.1.2, glob-parent@^6.0.0, glob-parent@~5.1.2: version "5.1.2" resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4" integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow== @@ -5756,7 +5145,7 @@ glob-to-regexp@^0.4.1: resolved "https://registry.yarnpkg.com/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz#c75297087c851b9a578bd217dd59a92f59fe546e" integrity sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw== -glob@^7.0.0, glob@^7.0.3, glob@^7.1.3: +glob@^7.0.0, glob@^7.1.3: version "7.1.7" resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.7.tgz#3b193e9233f01d42d0b3f78294bbeeb418f94a90" integrity sha512-OvD9ENzPLbegENnYP5UUfJIirTg4+XwMWGaQfQTY0JenxNvvIKP3U3/tAQSPIu/lHxXYSZmpXlUHeqAIdKzBLQ== @@ -5787,7 +5176,7 @@ global-dirs@^3.0.0: dependencies: ini "2.0.0" -global-modules@2.0.0: +global-modules@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/global-modules/-/global-modules-2.0.0.tgz#997605ad2345f27f51539bea26574421215c7780" integrity sha512-NGbfmJBp9x8IxyJSd1P+otYK8vonoJactOogrVfFRIAEY1ukil8RSKDz2Yo7wh1oihl51l/r6W4epkeKJHqL8A== @@ -5808,18 +5197,6 @@ globals@^11.1.0: resolved "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e" integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA== -globby@11.0.1: - version "11.0.1" - resolved "https://registry.yarnpkg.com/globby/-/globby-11.0.1.tgz#9a2bf107a068f3ffeabc49ad702c79ede8cfd357" - integrity sha512-iH9RmgwCmUJHi2z5o2l3eTtGBtXek1OYlHrbcxOYugyHLmAsZrPj43OtHThd62Buh/Vv6VyCBD2bdyWcGNQqoQ== - dependencies: - array-union "^2.1.0" - dir-glob "^3.0.1" - fast-glob "^3.1.1" - ignore "^5.1.4" - merge2 "^1.3.0" - slash "^3.0.0" - globby@^11.0.1, globby@^11.0.2, globby@^11.0.3, globby@^11.0.4: version "11.0.4" resolved "https://registry.yarnpkg.com/globby/-/globby-11.0.4.tgz#2cbaff77c2f2a62e71e9b2813a67b97a3a3001a5" @@ -5844,17 +5221,6 @@ globby@^12.0.2: merge2 "^1.4.1" slash "^4.0.0" -globby@^6.1.0: - version "6.1.0" - resolved "https://registry.yarnpkg.com/globby/-/globby-6.1.0.tgz#f5a6d70e8395e21c858fb0489d64df02424d506c" - integrity sha1-9abXDoOV4hyFj7BInWTfAkJNUGw= - dependencies: - array-union "^1.0.1" - glob "^7.0.3" - object-assign "^4.0.1" - pify "^2.0.0" - pinkie-promise "^2.0.0" - got@^9.6.0: version "9.6.0" resolved "https://registry.yarnpkg.com/got/-/got-9.6.0.tgz#edf45e7d67f99545705de1f7bbeeeb121765ed85" @@ -5872,7 +5238,7 @@ got@^9.6.0: to-readable-stream "^1.0.0" url-parse-lax "^3.0.0" -graceful-fs@^4.1.11, graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.4: +graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.4: version "4.2.6" resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.6.tgz#ff040b2b0853b23c3d31027523706f1885d76bee" integrity sha512-nTnJ528pbqxYanhpDYsi4Rd8MAeaBA67+RZ10CM1m3bTAVFEDcd5AuA4a6W5YkGZ1iNXHzZz8T6TBKLeBuNriQ== @@ -5892,7 +5258,7 @@ gray-matter@^4.0.3: section-matter "^1.0.0" strip-bom-string "^1.0.0" -gzip-size@5.1.1: +gzip-size@^5.1.1: version "5.1.1" resolved "https://registry.yarnpkg.com/gzip-size/-/gzip-size-5.1.1.tgz#cb9bee692f87c0612b232840a873904e4c135274" integrity sha512-FNHi6mmoHvs1mxZAds4PpdCS6QG8B4C1krxJsMutgxl5t3+GlRTzzI3NEkifXx2pVsOvJdOGSmIgDhQ55FwdPA== @@ -5950,37 +5316,6 @@ has-unicode@^2.0.0, has-unicode@^2.0.1: resolved "https://registry.yarnpkg.com/has-unicode/-/has-unicode-2.0.1.tgz#e0e6fe6a28cf51138855e086d1691e771de2a8b9" integrity sha1-4Ob+aijPUROIVeCG0Wkedx3iqLk= -has-value@^0.3.1: - version "0.3.1" - resolved "https://registry.yarnpkg.com/has-value/-/has-value-0.3.1.tgz#7b1f58bada62ca827ec0a2078025654845995e1f" - integrity sha1-ex9YutpiyoJ+wKIHgCVlSEWZXh8= - dependencies: - get-value "^2.0.3" - has-values "^0.1.4" - isobject "^2.0.0" - -has-value@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/has-value/-/has-value-1.0.0.tgz#18b281da585b1c5c51def24c930ed29a0be6b177" - integrity sha1-GLKB2lhbHFxR3vJMkw7SmgvmsXc= - dependencies: - get-value "^2.0.6" - has-values "^1.0.0" - isobject "^3.0.0" - -has-values@^0.1.4: - version "0.1.4" - resolved "https://registry.yarnpkg.com/has-values/-/has-values-0.1.4.tgz#6d61de95d91dfca9b9a02089ad384bff8f62b771" - integrity sha1-bWHeldkd/Km5oCCJrThL/49it3E= - -has-values@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/has-values/-/has-values-1.0.0.tgz#95b0b63fec2146619a6fe57fe75628d5a39efe4f" - integrity sha1-lbC2P+whRmGab+V/51Yo1aOe/k8= - dependencies: - is-number "^3.0.0" - kind-of "^4.0.0" - has-yarn@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/has-yarn/-/has-yarn-2.1.0.tgz#137e11354a7b5bf11aa5cb649cf0c6f3ff2b2e77" @@ -6138,23 +5473,10 @@ hsla-regex@^1.0.0: resolved "https://registry.yarnpkg.com/hsla-regex/-/hsla-regex-1.0.0.tgz#c1ce7a3168c8c6614033a4b5f7877f3b225f9c38" integrity sha1-wc56MWjIxmFAM6S194d/OyJfnDg= -html-entities@^1.3.1: - version "1.4.0" - resolved "https://registry.yarnpkg.com/html-entities/-/html-entities-1.4.0.tgz#cfbd1b01d2afaf9adca1b10ae7dffab98c71d2dc" - integrity sha512-8nxjcBcd8wovbeKx7h3wTji4e6+rhaVuPNpMqwWgnHh+N9ToqsCs6XztWRBPQ+UtzsoMAdKZtUENoVzU/EMtZA== - -html-minifier-terser@^5.0.1, html-minifier-terser@^5.1.1: - version "5.1.1" - resolved "https://registry.yarnpkg.com/html-minifier-terser/-/html-minifier-terser-5.1.1.tgz#922e96f1f3bb60832c2634b79884096389b1f054" - integrity sha512-ZPr5MNObqnV/T9akshPKbVgyOqLmy+Bxo7juKCfTfnjNniTAMdy4hz21YQqoofMBJD2kdREaqPPdThoR78Tgxg== - dependencies: - camel-case "^4.1.1" - clean-css "^4.2.3" - commander "^4.1.1" - he "^1.2.0" - param-case "^3.0.3" - relateurl "^0.2.7" - terser "^4.6.3" +html-entities@^2.3.2: + version "2.3.2" + resolved "https://registry.yarnpkg.com/html-entities/-/html-entities-2.3.2.tgz#760b404685cb1d794e4f4b744332e3b00dcfe488" + integrity sha512-c3Ab/url5ksaT0WyleslpBEthOzWhrjQbg75y7XUsfSzi3Dgzt0l8w5e7DylRn15MTlMMD58dTfzddNS2kcAjQ== html-minifier-terser@^6.0.2: version "6.0.2" @@ -6179,15 +5501,15 @@ html-void-elements@^1.0.0: resolved "https://registry.yarnpkg.com/html-void-elements/-/html-void-elements-1.0.5.tgz#ce9159494e86d95e45795b166c2021c2cfca4483" integrity sha512-uE/TxKuyNIcx44cIWnjr/rfIATDH7ZaOMmstu0CwhFG1Dunhlp4OC6/NMbhiwoq5BpW0ubi303qnEk/PZj614w== -html-webpack-plugin@^5.3.2: - version "5.3.2" - resolved "https://registry.yarnpkg.com/html-webpack-plugin/-/html-webpack-plugin-5.3.2.tgz#7b04bf80b1f6fe84a6d3f66c8b79d64739321b08" - integrity sha512-HvB33boVNCz2lTyBsSiMffsJ+m0YLIQ+pskblXgN9fnjS1BgEcuAfdInfXfGrkdXV406k9FiDi86eVCDBgJOyQ== +html-webpack-plugin@^5.4.0: + version "5.5.0" + resolved "https://registry.yarnpkg.com/html-webpack-plugin/-/html-webpack-plugin-5.5.0.tgz#c3911936f57681c1f9f4d8b68c158cd9dfe52f50" + integrity sha512-sy88PC2cRTVxvETRgUHFrL4No3UxvcH8G1NepGhqaTT+GXN2kTamqasot0inS5hXeg1cMbFDt27zzo9p35lZVw== dependencies: - "@types/html-minifier-terser" "^5.0.0" - html-minifier-terser "^5.0.1" + "@types/html-minifier-terser" "^6.0.0" + html-minifier-terser "^6.0.2" lodash "^4.17.21" - pretty-error "^3.0.4" + pretty-error "^4.0.0" tapable "^2.0.0" htmlparser2@^3.9.1: @@ -6268,17 +5590,18 @@ http-proxy-agent@^4.0.1: agent-base "6" debug "4" -http-proxy-middleware@0.19.1: - version "0.19.1" - resolved "https://registry.yarnpkg.com/http-proxy-middleware/-/http-proxy-middleware-0.19.1.tgz#183c7dc4aa1479150306498c210cdaf96080a43a" - integrity sha512-yHYTgWMQO8VvwNS22eLLloAkvungsKdKTLO8AJlftYIKNfJr3GK3zK0ZCfzDDGUBttdGc8xFy1mCitvNKQtC3Q== +http-proxy-middleware@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/http-proxy-middleware/-/http-proxy-middleware-2.0.1.tgz#7ef3417a479fb7666a571e09966c66a39bd2c15f" + integrity sha512-cfaXRVoZxSed/BmkA7SwBVNI9Kj7HFltaE5rqYOub5kWzWZ+gofV2koVN1j2rMW7pEfSSlCHGJ31xmuyFyfLOg== dependencies: - http-proxy "^1.17.0" - is-glob "^4.0.0" - lodash "^4.17.11" - micromatch "^3.1.10" + "@types/http-proxy" "^1.17.5" + http-proxy "^1.18.1" + is-glob "^4.0.1" + is-plain-obj "^3.0.0" + micromatch "^4.0.2" -http-proxy@^1.17.0: +http-proxy@^1.18.1: version "1.18.1" resolved "https://registry.yarnpkg.com/http-proxy/-/http-proxy-1.18.1.tgz#401541f0534884bbf95260334e72f88ee3976549" integrity sha512-7mz/721AbnJwIVbnaSv1Cz3Am0ZLT/UBwkC92VlxhXv/k/BBQfM2fXElQNC27BVGr0uwUpplYPQM9LnaBMR5NQ== @@ -6354,12 +5677,12 @@ ignore@^5.1.4, ignore@^5.1.8: resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.1.8.tgz#f150a8b50a34289b33e22f5889abd4d8016f0e57" integrity sha512-BMpfD7PpiETpBl/A6S498BaIJ6Y/ABT93ETbby2fP00v4EbvPBXWEoaR1UBPKs3iR53pJY7EtZk5KACI57i1Uw== -immer@8.0.1, immer@^9.0.6: +immer@^9.0.6: version "9.0.6" resolved "https://registry.yarnpkg.com/immer/-/immer-9.0.6.tgz#7a96bf2674d06c8143e327cbf73539388ddf1a73" integrity sha512-G95ivKpy+EvVAnAab4fVa4YGYn24J1SpEktnJX7JJ45Bd7xqME/SCplFzYFmTbrkwZbQ4xJK1xMTUYBkN6pWsQ== -import-fresh@^3.2.1, import-fresh@^3.2.2, import-fresh@^3.3.0: +import-fresh@^3.1.0, import-fresh@^3.2.1, import-fresh@^3.2.2, import-fresh@^3.3.0: version "3.3.0" resolved "https://registry.yarnpkg.com/import-fresh/-/import-fresh-3.3.0.tgz#37162c25fcb9ebaa2e6e53d5b4d88ce17d9e0c2b" integrity sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw== @@ -6372,14 +5695,6 @@ import-lazy@^2.1.0: resolved "https://registry.yarnpkg.com/import-lazy/-/import-lazy-2.1.0.tgz#05698e3d45c88e8d7e9d92cb0584e77f096f3e43" integrity sha1-BWmOPUXIjo1+nZLLBYTnfwlvPkM= -import-local@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/import-local/-/import-local-2.0.0.tgz#55070be38a5993cf18ef6db7e961f5bee5c5a09d" - integrity sha512-b6s04m3O+s3CGSbqDIyP4R6aAwAeYlVq9+WUWep6iHa8ETRf9yei1U48C5MmfJmV9AiLYYBKPMq/W+/WRpQmCQ== - dependencies: - pkg-dir "^3.0.0" - resolve-cwd "^2.0.0" - imurmurhash@^0.1.4: version "0.1.4" resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" @@ -6395,11 +5710,6 @@ infer-owner@^1.0.4: resolved "https://registry.yarnpkg.com/infer-owner/-/infer-owner-1.0.4.tgz#c4cefcaa8e51051c2a40ba2ce8a3d27295af9467" integrity sha512-IClj+Xz94+d7irH5qRyfJonOdfTzuDaifE6ZPWfx0N0+/ATZCbuTPq2prFl526urkQd90WyUKIh1DfBQ2hMz9A== -infima@0.2.0-alpha.33: - version "0.2.0-alpha.33" - resolved "https://registry.yarnpkg.com/infima/-/infima-0.2.0-alpha.33.tgz#8d1a77ea916bedcebffa60dcd2dffbe382e09abf" - integrity sha512-iLZI8/vGTbbhbeFhlWv1zwvrqfNDLAayuEdqZqNqCyGuh0IW469dRIRm0FLZ98YyLikt2njzuKfy6xUrBWRXcg== - infima@0.2.0-alpha.34: version "0.2.0-alpha.34" resolved "https://registry.yarnpkg.com/infima/-/infima-0.2.0-alpha.34.tgz#14a900d79a4de2013e025ac95749a4592f16ef6e" @@ -6413,7 +5723,7 @@ inflight@^1.0.4: once "^1.3.0" wrappy "1" -inherits@2, inherits@2.0.4, inherits@^2.0.0, inherits@^2.0.1, inherits@^2.0.3, inherits@^2.0.4, inherits@~2.0.3: +inherits@2, inherits@2.0.4, inherits@^2.0.0, inherits@^2.0.1, inherits@^2.0.3, inherits@~2.0.3: version "2.0.4" resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== @@ -6451,25 +5761,22 @@ inline-style-parser@0.1.1: resolved "https://registry.yarnpkg.com/inline-style-parser/-/inline-style-parser-0.1.1.tgz#ec8a3b429274e9c0a1f1c4ffa9453a7fef72cea1" integrity sha512-7NXolsK4CAS5+xvdj5OMMbI962hU/wvwoxk+LWR9Ek9bVtyuuYScDN6eS0rUm6TxApFpw7CX1o4uJzcd4AyD3Q== -internal-ip@^4.3.0: - version "4.3.0" - resolved "https://registry.yarnpkg.com/internal-ip/-/internal-ip-4.3.0.tgz#845452baad9d2ca3b69c635a137acb9a0dad0907" - integrity sha512-S1zBo1D6zcsyuC6PMmY5+55YMILQ9av8lotMx447Bq6SAgo/sDK6y6uUKmuYhW7eacnIhFfsPmCNYdDzsnnDCg== +internal-ip@^6.2.0: + version "6.2.0" + resolved "https://registry.yarnpkg.com/internal-ip/-/internal-ip-6.2.0.tgz#d5541e79716e406b74ac6b07b856ef18dc1621c1" + integrity sha512-D8WGsR6yDt8uq7vDMu7mjcR+yRMm3dW8yufyChmszWRjcSHuxLBkR3GdS2HZAjodsaGuCvXeEJpueisXJULghg== dependencies: - default-gateway "^4.2.0" - ipaddr.js "^1.9.0" + default-gateway "^6.0.0" + ipaddr.js "^1.9.1" + is-ip "^3.1.0" + p-event "^4.2.0" interpret@^1.0.0: version "1.4.0" resolved "https://registry.yarnpkg.com/interpret/-/interpret-1.4.0.tgz#665ab8bc4da27a774a40584e812e3e0fa45b1a1e" integrity sha512-agE4QfB2Lkp9uICn7BAqoscw4SZP9kTE2hxiFI3jBPmXJfdqiahTbUuKGsMoN2GtqL9AxhYioAcVvgsb1HvRbA== -ip-regex@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/ip-regex/-/ip-regex-2.1.0.tgz#fa78bf5d2e6913c911ce9f819ee5146bb6d844e9" - integrity sha1-+ni/XS5pE8kRzp+BnuUUa7bYROk= - -ip-regex@^4.1.0: +ip-regex@^4.0.0, ip-regex@^4.1.0: version "4.3.0" resolved "https://registry.yarnpkg.com/ip-regex/-/ip-regex-4.3.0.tgz#687275ab0f57fa76978ff8f4dddc8a23d5990db5" integrity sha512-B9ZWJxHHOHUhUjCPrMpLD4xEq35bUTClHM1S6CBU5ixQnkZmwipwgc96vAd7AAGM9TGHvJR+Uss+/Ak6UphK+Q== @@ -6479,30 +5786,21 @@ ip@^1.1.0, ip@^1.1.5: resolved "https://registry.yarnpkg.com/ip/-/ip-1.1.5.tgz#bdded70114290828c0a039e72ef25f5aaec4354a" integrity sha1-vd7XARQpCCjAoDnnLvJfWq7ENUo= -ipaddr.js@1.9.1, ipaddr.js@^1.9.0: +ipaddr.js@1.9.1, ipaddr.js@^1.9.1: version "1.9.1" resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-1.9.1.tgz#bff38543eeb8984825079ff3a2a8e6cbd46781b3" integrity sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g== +ipaddr.js@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-2.0.1.tgz#eca256a7a877e917aeb368b0a7497ddf42ef81c0" + integrity sha512-1qTgH9NG+IIJ4yfKs2e6Pp1bZg8wbDbKHT21HrLIeYBTRLgMYKnMTPAuI3Lcs61nfx5h1xlXnbJtH1kX5/d/ng== + is-absolute-url@^3.0.3: version "3.0.3" resolved "https://registry.yarnpkg.com/is-absolute-url/-/is-absolute-url-3.0.3.tgz#96c6a22b6a23929b11ea0afb1836c36ad4a5d698" integrity sha512-opmNIX7uFnS96NtPmhWQgQx6/NYFgsUXYMllcfzwWKUMwfo8kku1TvE6hkNcH+Q1ts5cMVrsY7j0bxXQDciu9Q== -is-accessor-descriptor@^0.1.6: - version "0.1.6" - resolved "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-0.1.6.tgz#a9e12cb3ae8d876727eeef3843f8a0897b5c98d6" - integrity sha1-qeEss66Nh2cn7u84Q/igiXtcmNY= - dependencies: - kind-of "^3.0.2" - -is-accessor-descriptor@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz#169c2f6d3df1f992618072365c9b0ea1f6878656" - integrity sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ== - dependencies: - kind-of "^6.0.0" - is-alphabetical@1.0.4, is-alphabetical@^1.0.0: version "1.0.4" resolved "https://registry.yarnpkg.com/is-alphabetical/-/is-alphabetical-1.0.4.tgz#9e7d6b94916be22153745d184c298cbf986a686d" @@ -6533,13 +5831,6 @@ is-bigint@^1.0.1: resolved "https://registry.yarnpkg.com/is-bigint/-/is-bigint-1.0.2.tgz#ffb381442503235ad245ea89e45b3dbff040ee5a" integrity sha512-0JV5+SOCQkIdzjBK9buARcV804Ddu7A0Qet6sHi3FimE9ne6m4BGQZfRn+NZiXbBk4F4XmHfDZIipLj9pX8dSA== -is-binary-path@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-1.0.1.tgz#75f16642b480f187a711c814161fd3a4a7655898" - integrity sha1-dfFmQrSA8YenEcgUFh/TpKdlWJg= - dependencies: - binary-extensions "^1.0.0" - is-binary-path@~2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-2.1.0.tgz#ea1f7f3b80f064236e83470f86c09c254fb45b09" @@ -6554,11 +5845,6 @@ is-boolean-object@^1.1.0: dependencies: call-bind "^1.0.2" -is-buffer@^1.1.5: - version "1.1.6" - resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be" - integrity sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w== - is-buffer@^2.0.0: version "2.0.5" resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-2.0.5.tgz#ebc252e400d22ff8d77fa09888821a24a658c191" @@ -6609,20 +5895,6 @@ is-core-module@^2.5.0: dependencies: has "^1.0.3" -is-data-descriptor@^0.1.4: - version "0.1.4" - resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz#0b5ee648388e2c860282e793f1856fec3f301b56" - integrity sha1-C17mSDiOLIYCgueT8YVv7D8wG1Y= - dependencies: - kind-of "^3.0.2" - -is-data-descriptor@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz#d84876321d0e7add03990406abbbbd36ba9268c7" - integrity sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ== - dependencies: - kind-of "^6.0.0" - is-date-object@^1.0.1: version "1.0.4" resolved "https://registry.yarnpkg.com/is-date-object/-/is-date-object-1.0.4.tgz#550cfcc03afada05eea3dd30981c7b09551f73e5" @@ -6633,41 +5905,16 @@ is-decimal@^1.0.0: resolved "https://registry.yarnpkg.com/is-decimal/-/is-decimal-1.0.4.tgz#65a3a5958a1c5b63a706e1b333d7cd9f630d3fa5" integrity sha512-RGdriMmQQvZ2aqaQq3awNA6dCGtKpiDFcOzrTWrDAT2MiWrKQVPmxLGHl7Y2nNu6led0kEyoX0enY0qXYsv9zw== -is-descriptor@^0.1.0: - version "0.1.6" - resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-0.1.6.tgz#366d8240dde487ca51823b1ab9f07a10a78251ca" - integrity sha512-avDYr0SB3DwO9zsMov0gKCESFYqCnE4hq/4z3TdUlukEy5t9C0YRq7HLrsN52NAcqXKaepeCD0n+B0arnVG3Hg== - dependencies: - is-accessor-descriptor "^0.1.6" - is-data-descriptor "^0.1.4" - kind-of "^5.0.0" - -is-descriptor@^1.0.0, is-descriptor@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-1.0.2.tgz#3b159746a66604b04f8c81524ba365c5f14d86ec" - integrity sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg== - dependencies: - is-accessor-descriptor "^1.0.0" - is-data-descriptor "^1.0.0" - kind-of "^6.0.2" - -is-docker@^2.0.0: +is-docker@^2.0.0, is-docker@^2.1.1: version "2.2.1" resolved "https://registry.yarnpkg.com/is-docker/-/is-docker-2.2.1.tgz#33eeabe23cfe86f14bde4408a02c0cfb853acdaa" integrity sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ== -is-extendable@^0.1.0, is-extendable@^0.1.1: +is-extendable@^0.1.0: version "0.1.1" resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-0.1.1.tgz#62b110e289a471418e3ec36a617d472e301dfc89" integrity sha1-YrEQ4omkcUGOPsNqYX1HLjAd/Ik= -is-extendable@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-1.0.1.tgz#a7470f9e426733d81bd81e1155264e3a3507cab4" - integrity sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA== - dependencies: - is-plain-object "^2.0.4" - is-extglob@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" @@ -6710,6 +5957,13 @@ is-installed-globally@^0.4.0: global-dirs "^3.0.0" is-path-inside "^3.0.2" +is-ip@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/is-ip/-/is-ip-3.1.0.tgz#2ae5ddfafaf05cb8008a62093cf29734f657c5d8" + integrity sha512-35vd5necO7IitFPjd/YBeqwWnyDWbuLH9ZXQdMfDA8TEo7pv5X8yfrvVO3xbJbLUlERCMvf6X0hTUamQxCYJ9Q== + dependencies: + ip-regex "^4.0.0" + is-lambda@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/is-lambda/-/is-lambda-1.0.1.tgz#3d9877899e6a53efc0160504cde15f82e6f061d5" @@ -6730,13 +5984,6 @@ is-number-object@^1.0.4: resolved "https://registry.yarnpkg.com/is-number-object/-/is-number-object-1.0.5.tgz#6edfaeed7950cff19afedce9fbfca9ee6dd289eb" integrity sha512-RU0lI/n95pMoUKu9v1BZP5MBcZuNSVJkMkAG2dJqC4z2GlkGUNeH68SuHuBKBD/XFe+LHZ+f9BKkLET60Niedw== -is-number@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/is-number/-/is-number-3.0.0.tgz#24fd6201a4782cf50561c810276afc7d12d71195" - integrity sha1-JP1iAaR4LPUFYcgQJ2r8fRLXEZU= - dependencies: - kind-of "^3.0.2" - is-number@^7.0.0: version "7.0.0" resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b" @@ -6752,25 +5999,11 @@ is-obj@^2.0.0: resolved "https://registry.yarnpkg.com/is-obj/-/is-obj-2.0.0.tgz#473fb05d973705e3fd9620545018ca8e22ef4982" integrity sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w== -is-path-cwd@^2.0.0, is-path-cwd@^2.2.0: +is-path-cwd@^2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/is-path-cwd/-/is-path-cwd-2.2.0.tgz#67d43b82664a7b5191fd9119127eb300048a9fdb" integrity sha512-w942bTcih8fdJPJmQHFzkS76NEP8Kzzvmw92cXsazb8intwLqPibPPdXf4ANdKV3rYMuuQYGIWtvz9JilB3NFQ== -is-path-in-cwd@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/is-path-in-cwd/-/is-path-in-cwd-2.1.0.tgz#bfe2dca26c69f397265a4009963602935a053acb" - integrity sha512-rNocXHgipO+rvnP6dk3zI20RpOtrAM/kzbB258Uw5BWr3TpXi861yzjo16Dn4hUox07iw5AyeMLHWsujkjzvRQ== - dependencies: - is-path-inside "^2.1.0" - -is-path-inside@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-2.1.0.tgz#7c9810587d659a40d27bcdb4d5616eab059494b2" - integrity sha512-wiyhTzfDWsvwAW53OBWF5zuvaOGlZ6PwYxAbPVDhpm+gM09xKQGjBq/8uYN12aDvMxnAnq3dxTyoSoRNmg5YFg== - dependencies: - path-is-inside "^1.0.2" - is-path-inside@^3.0.2: version "3.0.3" resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-3.0.3.tgz#d231362e53a07ff2b0e0ea7fed049161ffd16283" @@ -6781,6 +6014,11 @@ is-plain-obj@^2.0.0: resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-2.1.0.tgz#45e42e37fccf1f40da8e5f76ee21515840c09287" integrity sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA== +is-plain-obj@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-3.0.0.tgz#af6f2ea14ac5a646183a5bbdb5baabbc156ad9d7" + integrity sha512-gwsOE28k+23GP1B6vFl1oVh/WOzmawBrKwo5Ev6wMKzPkaXaCDIQKzLnvsA42DRlbVTWorkgTKIviAKCWkfUwA== + is-plain-object@^2.0.4: version "2.0.4" resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-2.0.4.tgz#2c163b3fafb1b606d9d17928f05c2a1c38e07677" @@ -6811,16 +6049,11 @@ is-resolvable@^1.1.0: resolved "https://registry.yarnpkg.com/is-resolvable/-/is-resolvable-1.1.0.tgz#fb18f87ce1feb925169c9a407c19318a3206ed88" integrity sha512-qgDYXFSR5WvEfuS5dMj6oTMEbrrSaM0CrFk2Yiq/gXnBvD9pMa2jGXxyhGLfvhZpuMZe18CJpFxAt3CRs42NMg== -is-root@2.1.0, is-root@^2.1.0: +is-root@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/is-root/-/is-root-2.1.0.tgz#809e18129cf1129644302a4f8544035d51984a9c" integrity sha512-AGOriNp96vNBd3HtU+RzFEc75FfR5ymiYv8E553I71SCeXBiMsVDUtdio1OEFvrPyLIQ9tVR5RxXIFe5PUFjMg== -is-stream@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44" - integrity sha1-EtSj3U5o4Lec6428hBc66A2RykQ= - is-stream@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-2.0.0.tgz#bde9c32680d6fae04129d6ac9d921ce7815f78e3" @@ -6848,7 +6081,7 @@ is-whitespace-character@^1.0.0: resolved "https://registry.yarnpkg.com/is-whitespace-character/-/is-whitespace-character-1.0.4.tgz#0858edd94a95594c7c9dd0b5c174ec6e45ee4aa7" integrity sha512-SDweEzfIZM0SJV0EUga669UTKlmL0Pq8Lno0QDQsPnvECB3IM2aP0gdx5TrU0A01MAPfViaZiI2V1QMZLaKK5w== -is-windows@^1.0.1, is-windows@^1.0.2: +is-windows@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/is-windows/-/is-windows-1.0.2.tgz#d1850eb9791ecd18e6182ce12a30f396634bb19d" integrity sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA== @@ -6858,12 +6091,7 @@ is-word-character@^1.0.0: resolved "https://registry.yarnpkg.com/is-word-character/-/is-word-character-1.0.4.tgz#ce0e73216f98599060592f62ff31354ddbeb0230" integrity sha512-5SMO8RVennx3nZrqtKwCGyyetPE9VDba5ugvKLaD4KopPG5kR4mQ7tNt/r7feL5yt5h3lpuBbIUmCOG2eSzXHA== -is-wsl@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/is-wsl/-/is-wsl-1.1.0.tgz#1f16e4aa22b04d1336b66188a66af3c600c3a66d" - integrity sha1-HxbkqiKwTRM2tmGIpmrzxgDDpm0= - -is-wsl@^2.1.1: +is-wsl@^2.1.1, is-wsl@^2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/is-wsl/-/is-wsl-2.2.0.tgz#74a4c76e77ca9fd3f932f290c17ea326cd157271" integrity sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww== @@ -6880,7 +6108,7 @@ isarray@0.0.1: resolved "https://registry.yarnpkg.com/isarray/-/isarray-0.0.1.tgz#8a18acfca9a8f4177e09abfc6038939b05d1eedf" integrity sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8= -isarray@1.0.0, isarray@~1.0.0: +isarray@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" integrity sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE= @@ -6890,14 +6118,7 @@ isexe@^2.0.0: resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" integrity sha1-6PvzdNxVb/iUehDcsFctYz8s+hA= -isobject@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/isobject/-/isobject-2.1.0.tgz#f065561096a3f1da2ef46272f815c840d87e0c89" - integrity sha1-8GVWEJaj8dou9GJy+BXIQNh+DIk= - dependencies: - isarray "1.0.0" - -isobject@^3.0.0, isobject@^3.0.1: +isobject@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df" integrity sha1-TkMekrEalzFjaqH5yNHMvP2reN8= @@ -6925,7 +6146,7 @@ jest-worker@^27.0.6: merge-stream "^2.0.0" supports-color "^8.0.0" -joi@^17.3.0, joi@^17.4.0: +joi@^17.4.0: version "17.4.1" resolved "https://registry.yarnpkg.com/joi/-/joi-17.4.1.tgz#15d2f23c8cbe4d1baded2dd190c58f8dbe11cca0" integrity sha512-gDPOwQ5sr+BUxXuPDGrC1pSNcVR/yGGcTI0aCnjYxZEa3za60K/iCQ+OFIkEHWZGVCUcUlXlFKvMmrlmxrG6UQ== @@ -7017,11 +6238,6 @@ json-stringify-safe@~5.0.1: resolved "https://registry.yarnpkg.com/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz#1296a2d58fd45f19a0f6ce01d65701e2c735b6eb" integrity sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus= -json3@^3.3.3: - version "3.3.3" - resolved "https://registry.yarnpkg.com/json3/-/json3-3.3.3.tgz#7fc10e375fc5ae42c4705a5cc0aa6f62be305b81" - integrity sha512-c7/8mbUsKigAbLkD5B010BK4D9LZm7A1pNItkEwiUZRpIN66exu/e7YQWysGun+TRKaJp8MhemM+VkfWv42aCA== - json5@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/json5/-/json5-1.0.1.tgz#779fb0018604fa854eacbf6252180d83543e3dbe" @@ -7077,30 +6293,6 @@ keyv@^3.0.0: dependencies: json-buffer "3.0.0" -killable@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/killable/-/killable-1.0.1.tgz#4c8ce441187a061c7474fb87ca08e2a638194892" - integrity sha512-LzqtLKlUwirEUyl/nicirVmNiPvYs7l5n8wOPP7fyJVpUPkvCnW/vuiXGpylGUlnPDnB7311rARzAt3Mhswpjg== - -kind-of@^3.0.2, kind-of@^3.0.3, kind-of@^3.2.0: - version "3.2.2" - resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-3.2.2.tgz#31ea21a734bab9bbb0f32466d893aea51e4a3c64" - integrity sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ= - dependencies: - is-buffer "^1.1.5" - -kind-of@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-4.0.0.tgz#20813df3d712928b207378691a45066fae72dd57" - integrity sha1-IIE989cSkosgc3hpGkUGb65y3Vc= - dependencies: - is-buffer "^1.1.5" - -kind-of@^5.0.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-5.1.0.tgz#729c91e2d857b7a419a1f9aa65685c4c33f5845d" - integrity sha512-NGEErnH6F2vUuXDh+OlbcKW7/wOcfdRHaZ7VWtqCztfHri/++YKmP51OdWeGPuqCOba6kk2OTe5d02VmTB80Pw== - kind-of@^6.0.0, kind-of@^6.0.2: version "6.0.3" resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-6.0.3.tgz#07c05034a6c349fa06e24fa35aa76db4580ce4dd" @@ -7258,16 +6450,7 @@ loader-runner@^4.2.0: resolved "https://registry.yarnpkg.com/loader-runner/-/loader-runner-4.2.0.tgz#d7022380d66d14c5fb1d496b89864ebcfd478384" integrity sha512-92+huvxMvYlMzMt0iIOukcwYBFpkYJdpl2xsZ7LrlayO7E8SOv+JJUEK17B/dJIHAOLMfh2dZZ/Y18WgmGtYNw== -loader-utils@2.0.0, loader-utils@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-2.0.0.tgz#e4cace5b816d425a166b5f097e10cd12b36064b0" - integrity sha512-rP4F0h2RaWSvPEkD7BLDFQnvSf+nK+wr3ESUjNTyAGobqrijmW92zc+SO6d4p4B1wh7+B/Jg1mkQe5NYUEHtHQ== - dependencies: - big.js "^5.2.2" - emojis-list "^3.0.0" - json5 "^2.1.2" - -loader-utils@^1.2.3, loader-utils@^1.4.0: +loader-utils@^1.4.0: version "1.4.0" resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-1.4.0.tgz#c579b5e34cb34b1a74edc6c1fb36bfa371d5a613" integrity sha512-qH0WSMBtn/oHuwjy/NucEgbx5dbxxnxup9s4PVXJUDHZBQY+s0NWA9rJf53RBnQZxfch7euUui7hpoAPvALZdA== @@ -7276,6 +6459,15 @@ loader-utils@^1.2.3, loader-utils@^1.4.0: emojis-list "^3.0.0" json5 "^1.0.1" +loader-utils@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-2.0.0.tgz#e4cace5b816d425a166b5f097e10cd12b36064b0" + integrity sha512-rP4F0h2RaWSvPEkD7BLDFQnvSf+nK+wr3ESUjNTyAGobqrijmW92zc+SO6d4p4B1wh7+B/Jg1mkQe5NYUEHtHQ== + dependencies: + big.js "^5.2.2" + emojis-list "^3.0.0" + json5 "^2.1.2" + locate-path@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-3.0.0.tgz#dbec3b3ab759758071b58fe59fc41871af21400e" @@ -7400,16 +6592,11 @@ lodash.uniq@4.5.0, lodash.uniq@^4.5.0: resolved "https://registry.yarnpkg.com/lodash.uniq/-/lodash.uniq-4.5.0.tgz#d0225373aeb652adc1bc82e4945339a842754773" integrity sha1-0CJTc662Uq3BvILklFM5qEJ1R3M= -lodash@4.17.21, lodash@^4.17.11, lodash@^4.17.14, lodash@^4.17.19, lodash@^4.17.20, lodash@^4.17.21: +lodash@4.17.21, lodash@^4.17.14, lodash@^4.17.19, lodash@^4.17.20, lodash@^4.17.21: version "4.17.21" resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== -loglevel@^1.6.8: - version "1.7.1" - resolved "https://registry.yarnpkg.com/loglevel/-/loglevel-1.7.1.tgz#005fde2f5e6e47068f935ff28573e125ef72f197" - integrity sha512-Hesni4s5UkWkwCGJMQGAh71PaLUmKFM60dHvq0zi/vDhhrzuk+4GgNbTXJ12YYQJn6ZKBDNIjYcuQGKudvqrIw== - loose-envify@^1.0.0, loose-envify@^1.1.0, loose-envify@^1.2.0, loose-envify@^1.3.1, loose-envify@^1.4.0: version "1.4.0" resolved "https://registry.yarnpkg.com/loose-envify/-/loose-envify-1.4.0.tgz#71ee51fa7be4caec1a63839f7e682d8132d30caf" @@ -7485,18 +6672,6 @@ make-fetch-happen@^9.0.1, make-fetch-happen@^9.1.0: socks-proxy-agent "^6.0.0" ssri "^8.0.0" -map-cache@^0.2.2: - version "0.2.2" - resolved "https://registry.yarnpkg.com/map-cache/-/map-cache-0.2.2.tgz#c32abd0bd6525d9b051645bb4f26ac5dc98a0dbf" - integrity sha1-wyq9C9ZSXZsFFkW7TyasXcmKDb8= - -map-visit@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/map-visit/-/map-visit-1.0.0.tgz#ecdca8f13144e660f1b5bd41f12f3479d98dfb8f" - integrity sha1-7Nyo8TFE5mDxtb1B8S80edmN+48= - dependencies: - object-visit "^1.0.0" - markdown-escapes@^1.0.0: version "1.0.4" resolved "https://registry.yarnpkg.com/markdown-escapes/-/markdown-escapes-1.0.4.tgz#c95415ef451499d7602b91095f3c8e8975f78535" @@ -7555,19 +6730,18 @@ media-typer@0.3.0: resolved "https://registry.yarnpkg.com/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748" integrity sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g= +memfs@^3.1.2, memfs@^3.2.2: + version "3.3.0" + resolved "https://registry.yarnpkg.com/memfs/-/memfs-3.3.0.tgz#4da2d1fc40a04b170a56622c7164c6be2c4cbef2" + integrity sha512-BEE62uMfKOavX3iG7GYX43QJ+hAeeWnwIAuJ/R6q96jaMtiLzhsxHJC8B1L7fK7Pt/vXDRwb3SG/yBpNGDPqzg== + dependencies: + fs-monkey "1.0.3" + memoize-one@^5.1.1: version "5.2.1" resolved "https://registry.yarnpkg.com/memoize-one/-/memoize-one-5.2.1.tgz#8337aa3c4335581839ec01c3d594090cebe8f00e" integrity sha512-zYiwtZUcYyXKo/np96AGZAckk+FWWsUdJ3cHGGmld7+AhvcWmQyGCYUh1hc4Q/pkOhb65dQR/pqCyK0cOaHz4Q== -memory-fs@^0.4.1: - version "0.4.1" - resolved "https://registry.yarnpkg.com/memory-fs/-/memory-fs-0.4.1.tgz#3a9a20b8462523e447cfbc7e8bb80ed667bfc552" - integrity sha1-OpoguEYlI+RHz7x+i7gO1me/xVI= - dependencies: - errno "^0.1.3" - readable-stream "^2.0.1" - merge-descriptors@1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.1.tgz#b00aaa556dd8b44568150ec9d1b953f3f90cbb61" @@ -7588,31 +6762,7 @@ methods@~1.1.2: resolved "https://registry.yarnpkg.com/methods/-/methods-1.1.2.tgz#5529a4d67654134edcc5266656835b0f851afcee" integrity sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4= -microevent.ts@~0.1.1: - version "0.1.1" - resolved "https://registry.yarnpkg.com/microevent.ts/-/microevent.ts-0.1.1.tgz#70b09b83f43df5172d0205a63025bce0f7357fa0" - integrity sha512-jo1OfR4TaEwd5HOrt5+tAZ9mqT4jmpNAusXtyfNzqVm9uiSYFZlKM1wYL4oU7azZW/PxQW53wM0S6OR1JHNa2g== - -micromatch@^3.1.10, micromatch@^3.1.4: - version "3.1.10" - resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-3.1.10.tgz#70859bc95c9840952f359a068a3fc49f9ecfac23" - integrity sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg== - dependencies: - arr-diff "^4.0.0" - array-unique "^0.3.2" - braces "^2.3.1" - define-property "^2.0.2" - extend-shallow "^3.0.2" - extglob "^2.0.4" - fragment-cache "^0.2.1" - kind-of "^6.0.2" - nanomatch "^1.2.9" - object.pick "^1.3.0" - regex-not "^1.0.0" - snapdragon "^0.8.1" - to-regex "^3.0.2" - -micromatch@^4.0.4: +micromatch@^4.0.2, micromatch@^4.0.4: version "4.0.4" resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.4.tgz#896d519dfe9db25fce94ceb7a500919bf881ebf9" integrity sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg== @@ -7630,6 +6780,11 @@ mime-db@1.50.0: resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.50.0.tgz#abd4ac94e98d3c0e185016c67ab45d5fde40c11f" integrity sha512-9tMZCDlYHqeERXEHO9f/hKfNXhre5dK2eE/krIvUjZbS2KPcqGDfNShIWS1uW9XOTKQKqK6qbeOci18rbfW77A== +mime-db@1.51.0: + version "1.51.0" + resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.51.0.tgz#d9ff62451859b18342d960850dc3cfb77e63fb0c" + integrity sha512-5y8A56jg7XVQx2mbv1lu49NR4dokRnhZYTtL+KGfaa27uq4pSTXkwQkFJl4pkRMyNFz/EtYDSkiiEHx3F7UN6g== + mime-db@~1.33.0: version "1.33.0" resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.33.0.tgz#a3492050a5cb9b63450541e39d9788d2272783db" @@ -7656,12 +6811,19 @@ mime-types@^2.1.27, mime-types@~2.1.17, mime-types@~2.1.24: dependencies: mime-db "1.48.0" +mime-types@^2.1.31: + version "2.1.34" + resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.34.tgz#5a712f9ec1503511a945803640fafe09d3793c24" + integrity sha512-6cP692WwGIs9XXdOO4++N+7qjqv0rqxxVvJ3VHPh/Sc9mVZcQP+ZGhkKiTvWMQRr2tbHkJP/Yn7Y0npb3ZBs4A== + dependencies: + mime-db "1.51.0" + mime@1.6.0: version "1.6.0" resolved "https://registry.yarnpkg.com/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1" integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg== -mime@^2.3.1, mime@^2.4.4: +mime@^2.3.1: version "2.5.2" resolved "https://registry.yarnpkg.com/mime/-/mime-2.5.2.tgz#6e3dc6cc2b9510643830e5f19d5cb753da5eeabe" integrity sha512-tqkh47FzKeCPD2PUiPB6pkbMzsCasjxAfC62/Wap5qrUWcb+sFasXUC5I3gYM5iBM8v/Qpn4UK0x+j0iHyFPDg== @@ -7772,14 +6934,6 @@ minizlib@^2.0.0, minizlib@^2.1.1: minipass "^3.0.0" yallist "^4.0.0" -mixin-deep@^1.2.0: - version "1.3.2" - resolved "https://registry.yarnpkg.com/mixin-deep/-/mixin-deep-1.3.2.tgz#1120b43dc359a785dce65b55b82e257ccf479566" - integrity sha512-WRoDn//mXBiJ1H40rqa3vH0toePwSsGb45iInWlTySa+Uu4k3tYUSxa2v1KqAiLtvlrSzaExqS1gtk96A9zvEA== - dependencies: - for-in "^1.0.2" - is-extendable "^1.0.1" - mkdirp-infer-owner@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/mkdirp-infer-owner/-/mkdirp-infer-owner-2.0.0.tgz#55d3b368e7d89065c38f32fd38e638f0ab61d316" @@ -7789,7 +6943,7 @@ mkdirp-infer-owner@^2.0.0: infer-owner "^1.0.4" mkdirp "^1.0.3" -mkdirp@^0.5.1, mkdirp@^0.5.5, mkdirp@~0.5.1: +mkdirp@^0.5.5, mkdirp@~0.5.1: version "0.5.5" resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.5.tgz#d91cefd62d1436ca0f41620e251288d420099def" integrity sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ== @@ -7801,11 +6955,6 @@ mkdirp@^1.0.3, mkdirp@^1.0.4: resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-1.0.4.tgz#3eb5ed62622756d79a5f0e2a221dfebad75c2f7e" integrity sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw== -module-alias@^2.2.2: - version "2.2.2" - resolved "https://registry.yarnpkg.com/module-alias/-/module-alias-2.2.2.tgz#151cdcecc24e25739ff0aa6e51e1c5716974c0e0" - integrity sha512-A/78XjoX2EmNvppVWEhM2oGk3x4lLxnkEA4jTbaK97QKSDjkIoOsKQlfylt/d3kKKi596Qy3NP5XrXJ6fZIC9Q== - ms@2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" @@ -7844,11 +6993,6 @@ mute-stream@~0.0.4: resolved "https://registry.yarnpkg.com/mute-stream/-/mute-stream-0.0.8.tgz#1630c42b2251ff81e2a283de96a5497ea92e5e0d" integrity sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA== -nan@^2.12.1: - version "2.14.2" - resolved "https://registry.yarnpkg.com/nan/-/nan-2.14.2.tgz#f5376400695168f4cc694ac9393d0c9585eeea19" - integrity sha512-M2ufzIiINKCuDfBSAUr1vWQ+vuVcA9kqx8JJUsbQi6yf1uGRyb7HfpdfUr5qLXf3B/t8dPvcjhKMmlfnP47EzQ== - nanoid@^3.1.23: version "3.1.23" resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.1.23.tgz#f744086ce7c2bc47ee0a8472574d5c78e4183a81" @@ -7859,23 +7003,6 @@ nanoid@^3.1.28: resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.1.30.tgz#63f93cc548d2a113dc5dfbc63bfa09e2b9b64362" integrity sha512-zJpuPDwOv8D2zq2WRoMe1HsfZthVewpel9CAvTfc/2mBD1uUT/agc5f7GHGWXlYkFvi1mVxe4IjvP2HNrop7nQ== -nanomatch@^1.2.9: - version "1.2.13" - resolved "https://registry.yarnpkg.com/nanomatch/-/nanomatch-1.2.13.tgz#b87a8aa4fc0de8fe6be88895b38983ff265bd119" - integrity sha512-fpoe2T0RbHwBTBUOftAfBPaDEi06ufaUai0mE6Yn1kacc3SnTErfb/h+X94VXzI64rKFHYImXSvdwGGCmwOqCA== - dependencies: - arr-diff "^4.0.0" - array-unique "^0.3.2" - define-property "^2.0.2" - extend-shallow "^3.0.2" - fragment-cache "^0.2.1" - is-windows "^1.0.2" - kind-of "^6.0.2" - object.pick "^1.3.0" - regex-not "^1.0.0" - snapdragon "^0.8.1" - to-regex "^3.0.1" - negotiator@0.6.2, negotiator@^0.6.2: version "0.6.2" resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.2.tgz#feacf7ccf525a77ae9634436a64883ffeca346fb" @@ -7886,11 +7013,6 @@ neo-async@^2.6.2: resolved "https://registry.yarnpkg.com/neo-async/-/neo-async-2.6.2.tgz#b4aafb93e3aeb2d8174ca53cf163ab7d7308305f" integrity sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw== -nice-try@^1.0.4: - version "1.0.5" - resolved "https://registry.yarnpkg.com/nice-try/-/nice-try-1.0.5.tgz#a3378a7696ce7d223e88fc9b764bd7ef1089e366" - integrity sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ== - nmtree@^1.0.6: version "1.0.6" resolved "https://registry.yarnpkg.com/nmtree/-/nmtree-1.0.6.tgz#953e057ad545e9e627f1275bd25fea4e92c1cf63" @@ -7977,13 +7099,6 @@ normalize-package-data@^3.0.0, normalize-package-data@^3.0.2: semver "^7.3.4" validate-npm-package-license "^3.0.1" -normalize-path@^2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-2.1.1.tgz#1ab28b556e198363a8c1a6f7e6fa20137fe6aed9" - integrity sha1-GrKLVW4Zg2Oowab35vogE3/mrtk= - dependencies: - remove-trailing-separator "^1.0.1" - normalize-path@^3.0.0, normalize-path@~3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65" @@ -8088,13 +7203,6 @@ npm-registry-fetch@^11.0.0: minizlib "^2.0.0" npm-package-arg "^8.0.0" -npm-run-path@^2.0.0: - version "2.0.2" - resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-2.0.2.tgz#35a9232dfa35d7067b4cb2ddf2357b1871536c5f" - integrity sha1-NakjLfo11wZ7TLLd8jV7GHFTbF8= - dependencies: - path-key "^2.0.0" - npm-run-path@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-4.0.1.tgz#b7ecd1e5ed53da8e37a55e1c2269e0b97ed748ea" @@ -8225,19 +7333,10 @@ oauth-sign@~0.9.0: resolved "https://registry.yarnpkg.com/oauth-sign/-/oauth-sign-0.9.0.tgz#47a7b016baa68b5fa0ecf3dee08a85c679ac6455" integrity sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ== -object-assign@^4.0.1, object-assign@^4.1.0, object-assign@^4.1.1: +object-assign@^4.1.0, object-assign@^4.1.1: version "4.1.1" - resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" - integrity sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM= - -object-copy@^0.1.0: - version "0.1.0" - resolved "https://registry.yarnpkg.com/object-copy/-/object-copy-0.1.0.tgz#7e7d858b781bd7c991a41ba975ed3812754e998c" - integrity sha1-fn2Fi3gb18mRpBupde04EnVOmYw= - dependencies: - copy-descriptor "^0.1.0" - define-property "^0.2.5" - kind-of "^3.0.3" + resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" + integrity sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM= object-inspect@^1.10.3: version "1.11.0" @@ -8257,13 +7356,6 @@ object-keys@^1.0.12, object-keys@^1.1.1: resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e" integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA== -object-visit@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/object-visit/-/object-visit-1.0.1.tgz#f79c4493af0c5377b59fe39d395e41042dd045bb" - integrity sha1-95xEk68MU3e1n+OdOV5BBC3QRbs= - dependencies: - isobject "^3.0.0" - object.assign@^4.1.0, object.assign@^4.1.2: version "4.1.2" resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.2.tgz#0ed54a342eceb37b38ff76eb831a0e788cb63940" @@ -8283,13 +7375,6 @@ object.getownpropertydescriptors@^2.1.0: define-properties "^1.1.3" es-abstract "^1.18.0-next.2" -object.pick@^1.3.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/object.pick/-/object.pick-1.3.0.tgz#87a10ac4c1694bd2e1cbf53591a66141fb5dd747" - integrity sha1-h6EKxMFpS9Lhy/U1kaZhQftd10c= - dependencies: - isobject "^3.0.1" - object.values@^1.1.0: version "1.1.4" resolved "https://registry.yarnpkg.com/object.values/-/object.values-1.1.4.tgz#0d273762833e816b693a637d30073e7051535b30" @@ -8338,30 +7423,32 @@ open@^7.0.2: is-docker "^2.0.0" is-wsl "^2.1.1" +open@^8.0.9: + version "8.4.0" + resolved "https://registry.yarnpkg.com/open/-/open-8.4.0.tgz#345321ae18f8138f82565a910fdc6b39e8c244f8" + integrity sha512-XgFPPM+B28FtCCgSb9I+s9szOC1vZRSwgWsRUA5ylIxRTgKozqjOCrVOqGsYABPYK5qnfqClxZTFBa8PKt2v6Q== + dependencies: + define-lazy-prop "^2.0.0" + is-docker "^2.1.1" + is-wsl "^2.2.0" + opener@^1.5.2: version "1.5.2" resolved "https://registry.yarnpkg.com/opener/-/opener-1.5.2.tgz#5d37e1f35077b9dcac4301372271afdeb2a13598" integrity sha512-ur5UIdyw5Y7yEj9wLzhqXiy6GZ3Mwx0yGI+5sMn2r0N0v3cKJvUmFH5yPP+WXh9e0xfyzyJX95D8l088DNFj7A== -opn@^5.5.0: - version "5.5.0" - resolved "https://registry.yarnpkg.com/opn/-/opn-5.5.0.tgz#fc7164fab56d235904c51c3b27da6758ca3b9bfc" - integrity sha512-PqHpggC9bLV0VeWcdKhkpxY+3JTzetLSqTCWL/z/tFIbI6G8JCjondXklT1JinczLz2Xib62sSp0T/gKT4KksA== - dependencies: - is-wsl "^1.1.0" - -original@^1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/original/-/original-1.0.2.tgz#e442a61cffe1c5fd20a65f3261c26663b303f25f" - integrity sha512-hyBVl6iqqUOJ8FqRe+l/gS8H+kKYjrEndd5Pm1MfBtsEKA038HkkdbAl/72EAXGyonD/PFsvmVG+EvcIpliMBg== - dependencies: - url-parse "^1.4.3" - p-cancelable@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/p-cancelable/-/p-cancelable-1.1.0.tgz#d078d15a3af409220c886f1d9a0ca2e441ab26cc" integrity sha512-s73XxOZ4zpt1edZYZzvhqFa6uvQc1vwUa0K0BdtIZgQMAJj9IbebH+JkgKZc9h+B05PKHLOTl4ajG1BmNrVZlw== +p-event@^4.2.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/p-event/-/p-event-4.2.0.tgz#af4b049c8acd91ae81083ebd1e6f5cae2044c1b5" + integrity sha512-KXatOjCRXXkSePPb1Nbi0p0m+gQAwdlbhi4wQKJPI1HsMQS9g+Sqp2o+QHziPr7eYJyOZet836KoHEVM1mwOrQ== + dependencies: + p-timeout "^3.1.0" + p-finally@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/p-finally/-/p-finally-1.0.0.tgz#3fbcfb15b899a44123b34b6dcc18b724336a2cae" @@ -8416,11 +7503,6 @@ p-locate@^6.0.0: dependencies: p-limit "^4.0.0" -p-map@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/p-map/-/p-map-2.1.0.tgz#310928feef9c9ecc65b68b17693018a665cea175" - integrity sha512-y3b8Kpd8OAN444hxfBbFfj1FY/RjtTd8tzYwhUqNYXx0fXx2iX4maP4Qr6qhIKbQXI02wTLAda4fYUbDagTUFw== - p-map@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/p-map/-/p-map-4.0.0.tgz#bb2f95a5eda2ec168ec9274e06a747c3e2904d2b" @@ -8428,12 +7510,20 @@ p-map@^4.0.0: dependencies: aggregate-error "^3.0.0" -p-retry@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/p-retry/-/p-retry-3.0.1.tgz#316b4c8893e2c8dc1cfa891f406c4b422bebf328" - integrity sha512-XE6G4+YTTkT2a0UWb2kjZe8xNwf8bIbnqpc/IS/idOBVhyves0mK5OJgeocjx7q5pvX/6m23xuzVPYT1uGM73w== +p-retry@^4.5.0: + version "4.6.1" + resolved "https://registry.yarnpkg.com/p-retry/-/p-retry-4.6.1.tgz#8fcddd5cdf7a67a0911a9cf2ef0e5df7f602316c" + integrity sha512-e2xXGNhZOZ0lfgR9kL34iGlU8N/KO0xZnQxVEwdeOvpqNDQfdnxIYizvWtK8RglUa3bGqI8g0R/BdfzLMxRkiA== dependencies: - retry "^0.12.0" + "@types/retry" "^0.12.0" + retry "^0.13.1" + +p-timeout@^3.1.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/p-timeout/-/p-timeout-3.2.0.tgz#c7e17abc971d2a7962ef83626b35d635acf23dfe" + integrity sha512-rhIwUycgwwKcP9yTOOFK/AKsAopjjCakVqLHePO3CC6Mir1Z99xT+R63jZxAT5lFZLa2inS5h+ZS2GvR99/FBg== + dependencies: + p-finally "^1.0.0" p-try@^2.0.0: version "2.2.0" @@ -8500,7 +7590,7 @@ pacote@^12.0.0, pacote@^12.0.2: ssri "^8.0.1" tar "^6.1.0" -param-case@^3.0.3, param-case@^3.0.4: +param-case@^3.0.4: version "3.0.4" resolved "https://registry.yarnpkg.com/param-case/-/param-case-3.0.4.tgz#7d17fe4aa12bde34d4a77d91acfb6219caad01c5" integrity sha512-RXlj7zCYokReqWpOPH9oYivUzLYZ5vAPIfEmCTNViosC78F8F0H9y7T7gG2M39ymgutxF5gcFEsyZQSph9Bp3A== @@ -8546,11 +7636,6 @@ parse-json@^5.0.0: json-parse-even-better-errors "^2.3.0" lines-and-columns "^1.1.6" -parse-numeric-range@^1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/parse-numeric-range/-/parse-numeric-range-1.2.0.tgz#aa70b00f29624ed13e9f943e9461b306e386b0fa" - integrity sha512-1q2tXpAOplPxcl8vrIGPWz1dJxxfmdRkCFcpxxMBerDnGuuHalOWF/xj9L8Nn5XoTUoB/6F0CeQBp2fMgkOYFg== - parse-numeric-range@^1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/parse-numeric-range/-/parse-numeric-range-1.3.0.tgz#7c63b61190d61e4d53a1197f0c83c47bb670ffa3" @@ -8579,11 +7664,6 @@ pascal-case@^3.1.2: no-case "^3.0.4" tslib "^2.0.3" -pascalcase@^0.1.1: - version "0.1.1" - resolved "https://registry.yarnpkg.com/pascalcase/-/pascalcase-0.1.1.tgz#b363e55e8006ca6fe21784d2db22bd15d7917f14" - integrity sha1-s2PlXoAGym/iF4TS2yK9FdeRfxQ= - path-exists@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515" @@ -8604,16 +7684,11 @@ path-is-absolute@^1.0.0: resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" integrity sha1-F0uSaHNVNP+8es5r9TpanhtcX18= -path-is-inside@1.0.2, path-is-inside@^1.0.2: +path-is-inside@1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/path-is-inside/-/path-is-inside-1.0.2.tgz#365417dede44430d1c11af61027facf074bdfc53" integrity sha1-NlQX3t5EQw0cEa9hAn+s8HS9/FM= -path-key@^2.0.0, path-key@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/path-key/-/path-key-2.0.1.tgz#411cadb574c5a140d3a4b1910d40d80cc9f40b40" - integrity sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A= - path-key@^3.0.0, path-key@^3.1.0: version "3.1.1" resolved "https://registry.yarnpkg.com/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375" @@ -8666,35 +7741,11 @@ picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.2.3: resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.0.tgz#f1f061de8f6a4bf022892e2d128234fb98302972" integrity sha512-lY1Q/PiJGC2zOv/z391WOTD+Z02bCgsFfvxoXXf6h7kv9o+WmsmzYqrAwY63sNgOxE4xEdq0WyUnXfKeBrSvYw== -pify@^2.0.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c" - integrity sha1-7RQaasBDqEnqWISY59yosVMw6Qw= - pify@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/pify/-/pify-4.0.1.tgz#4b2cd25c50d598735c50292224fd8c6df41e3231" integrity sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g== -pinkie-promise@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/pinkie-promise/-/pinkie-promise-2.0.1.tgz#2135d6dfa7a358c069ac9b178776288228450ffa" - integrity sha1-ITXW36ejWMBprJsXh3YogihFD/o= - dependencies: - pinkie "^2.0.0" - -pinkie@^2.0.0: - version "2.0.4" - resolved "https://registry.yarnpkg.com/pinkie/-/pinkie-2.0.4.tgz#72556b80cfa0d48a974e80e77248e80ed4f7f870" - integrity sha1-clVrgM+g1IqXToDnckjoDtT3+HA= - -pkg-dir@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-3.0.0.tgz#2749020f239ed990881b1f71210d51eb6523bea3" - integrity sha512-/E57AYkoeQ25qkxMj5PBOVgF8Kiu/h7cYS30Z5+R7WaiCCBfLq58ZI/dSeaEKb9WVJV5n/03QwrN3IeWIFllvw== - dependencies: - find-up "^3.0.0" - pkg-dir@^4.1.0: version "4.2.0" resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-4.2.0.tgz#f099133df7ede422e81d1d8448270eeb3e4261f3" @@ -8709,14 +7760,14 @@ pkg-dir@^6.0.1: dependencies: find-up "^6.1.0" -pkg-up@3.1.0: +pkg-up@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/pkg-up/-/pkg-up-3.1.0.tgz#100ec235cc150e4fd42519412596a28512a0def5" integrity sha512-nDywThFk1i4BQK4twPQ6TA4RT8bDY96yeuCVBWL3ePARCiEKDRSrNGbFIgUJpLp+XeIR65v8ra7WuJOFUBtkMA== dependencies: find-up "^3.0.0" -portfinder@^1.0.26: +portfinder@^1.0.28: version "1.0.28" resolved "https://registry.yarnpkg.com/portfinder/-/portfinder-1.0.28.tgz#67c4622852bd5374dd1dd900f779f53462fac778" integrity sha512-Se+2isanIcEqf2XMHjyUKskczxbPH7dQnlMjXX6+dybayyHvAf/TCgyMRlzf/B6QDhAEFOGes0pzRo3by4AbMA== @@ -8725,11 +7776,6 @@ portfinder@^1.0.26: debug "^3.1.1" mkdirp "^0.5.5" -posix-character-classes@^0.1.0: - version "0.1.1" - resolved "https://registry.yarnpkg.com/posix-character-classes/-/posix-character-classes-0.1.1.tgz#01eac0fe3b5af71a2a6c02feabb8c1fef7e00eab" - integrity sha1-AerA/jta9xoqbAL+q7jB/vfgDqs= - postcss-calc@^8.0.0: version "8.0.0" resolved "https://registry.yarnpkg.com/postcss-calc/-/postcss-calc-8.0.0.tgz#a05b87aacd132740a5db09462a3612453e5df90a" @@ -8782,15 +7828,6 @@ postcss-discard-unused@^5.0.1: dependencies: postcss-selector-parser "^6.0.5" -postcss-loader@^5.3.0: - version "5.3.0" - resolved "https://registry.yarnpkg.com/postcss-loader/-/postcss-loader-5.3.0.tgz#1657f869e48d4fdb018a40771c235e499ee26244" - integrity sha512-/+Z1RAmssdiSLgIZwnJHwBMnlABPgF7giYzTN2NOfr9D21IJZ4mQC1R2miwp80zno9M4zMD/umGI8cR+2EL5zw== - dependencies: - cosmiconfig "^7.0.0" - klona "^2.0.4" - semver "^7.3.4" - postcss-loader@^6.1.1: version "6.2.0" resolved "https://registry.yarnpkg.com/postcss-loader/-/postcss-loader-6.2.0.tgz#714370a3f567141cf4cadcdf9575f5234d186bc5" @@ -9006,13 +8043,6 @@ postcss-selector-parser@^6.0.2, postcss-selector-parser@^6.0.4, postcss-selector cssesc "^3.0.0" util-deprecate "^1.0.2" -postcss-sort-media-queries@^3.10.11: - version "3.11.12" - resolved "https://registry.yarnpkg.com/postcss-sort-media-queries/-/postcss-sort-media-queries-3.11.12.tgz#bfc449fadedfe2765ca4566c30b24694635ad182" - integrity sha512-PNhEOWR/btZ0bNNRqqdW4TWxBPQ1mu2I6/Zpco80vBUDSyEjtduUAorY0Vm68rvDlGea3+sgEnQ36iQ1A/gG8Q== - dependencies: - sort-css-media-queries "1.5.4" - postcss-sort-media-queries@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/postcss-sort-media-queries/-/postcss-sort-media-queries-4.1.0.tgz#c2ca1374189259049dc039a9e95ba72a31b57bc7" @@ -9070,13 +8100,13 @@ prepend-http@^2.0.0: resolved "https://registry.yarnpkg.com/prepend-http/-/prepend-http-2.0.0.tgz#e92434bfa5ea8c19f41cdfd401d741a3c819d897" integrity sha1-6SQ0v6XqjBn0HN/UAddBo8gZ2Jc= -pretty-error@^3.0.4: - version "3.0.4" - resolved "https://registry.yarnpkg.com/pretty-error/-/pretty-error-3.0.4.tgz#94b1d54f76c1ed95b9c604b9de2194838e5b574e" - integrity sha512-ytLFLfv1So4AO1UkoBF6GXQgJRaKbiSiGFICaOPNwQ3CMvBvXpLRubeQWyPGnsbV/t9ml9qto6IeCsho0aEvwQ== +pretty-error@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/pretty-error/-/pretty-error-4.0.0.tgz#90a703f46dd7234adb46d0f84823e9d1cb8f10d6" + integrity sha512-AoJ5YMAcXKYxKhuJGdcvse+Voc6v1RgnsR3nWcYU7q4t6z0Q6T86sv5Zq8VIRbOWWFpvdGE83LtdSMNd+6Y0xw== dependencies: lodash "^4.17.20" - renderkid "^2.0.6" + renderkid "^3.0.0" pretty-time@^1.1.0: version "1.1.0" @@ -9133,10 +8163,10 @@ promise@^7.1.1: dependencies: asap "~2.0.3" -prompts@2.4.0: - version "2.4.0" - resolved "https://registry.yarnpkg.com/prompts/-/prompts-2.4.0.tgz#4aa5de0723a231d1ee9121c40fdf663df73f61d7" - integrity sha512-awZAKrk3vN6CroQukBL+R9051a4R3zCZBlJm/HBfrSZ8iTpYix3VX1vU4mveiLpiwmOJT4wokTF9m6HUk4KqWQ== +prompts@^2.4.0: + version "2.4.2" + resolved "https://registry.yarnpkg.com/prompts/-/prompts-2.4.2.tgz#7b57e73b3a48029ad10ebd44f74b01722a4cb069" + integrity sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q== dependencies: kleur "^3.0.3" sisteransi "^1.0.5" @@ -9156,7 +8186,7 @@ promzard@^0.3.0: dependencies: read "1" -prop-types@^15.5.0, prop-types@^15.6.2, prop-types@^15.7.2: +prop-types@^15.6.2, prop-types@^15.7.2: version "15.7.2" resolved "https://registry.yarnpkg.com/prop-types/-/prop-types-15.7.2.tgz#52c41e75b8c87e72b9d9360e0206b99dcbffa6c5" integrity sha512-8QQikdH7//R2vurIJSutZ1smHYTcLpRWEOlHnzcWHmBYrOGUysKwSsrC89BCiFj3CbrfJ/nXFdJepOVrY1GCHQ== @@ -9180,11 +8210,6 @@ proxy-addr@~2.0.5: forwarded "0.2.0" ipaddr.js "1.9.1" -prr@~1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/prr/-/prr-1.0.1.tgz#d3fc114ba06995a45ec6893f484ceb1d78f5f476" - integrity sha1-0/wRS6BplaRexok/SEzrHXj19HY= - pseudomap@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/pseudomap/-/pseudomap-1.0.2.tgz#f052a28da70e618917ef0a8ac34c1ae5a68286b3" @@ -9255,11 +8280,6 @@ querystring@0.2.0: resolved "https://registry.yarnpkg.com/querystring/-/querystring-0.2.0.tgz#b209849203bb25df820da756e747005878521620" integrity sha1-sgmEkgO7Jd+CDadW50cAWHhSFiA= -querystringify@^2.1.1: - version "2.2.0" - resolved "https://registry.yarnpkg.com/querystringify/-/querystringify-2.2.0.tgz#3345941b4153cb9d082d8eee4cda2016a9aef7f6" - integrity sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ== - queue-microtask@^1.2.2: version "1.2.3" resolved "https://registry.yarnpkg.com/queue-microtask/-/queue-microtask-1.2.3.tgz#4929228bbc724dfac43e0efb058caf7b6cfb6243" @@ -9312,35 +8332,35 @@ react-base16-styling@^0.6.0: lodash.flow "^3.3.0" pure-color "^1.2.0" -react-dev-utils@^11.0.1: - version "11.0.4" - resolved "https://registry.yarnpkg.com/react-dev-utils/-/react-dev-utils-11.0.4.tgz#a7ccb60257a1ca2e0efe7a83e38e6700d17aa37a" - integrity sha512-dx0LvIGHcOPtKbeiSUM4jqpBl3TcY7CDjZdfOIcKeznE7BWr9dg0iPG90G5yfVQ+p/rGNMXdbfStvzQZEVEi4A== - dependencies: - "@babel/code-frame" "7.10.4" - address "1.1.2" - browserslist "4.14.2" - chalk "2.4.2" - cross-spawn "7.0.3" - detect-port-alt "1.1.6" - escape-string-regexp "2.0.0" - filesize "6.1.0" - find-up "4.1.0" - fork-ts-checker-webpack-plugin "4.1.6" - global-modules "2.0.0" - globby "11.0.1" - gzip-size "5.1.1" - immer "8.0.1" - is-root "2.1.0" - loader-utils "2.0.0" +react-dev-utils@12.0.0-next.47: + version "12.0.0-next.47" + resolved "https://registry.yarnpkg.com/react-dev-utils/-/react-dev-utils-12.0.0-next.47.tgz#e55c31a05eb30cfd69ca516e8b87d61724e880fb" + integrity sha512-PsE71vP15TZMmp/RZKOJC4fYD5Pvt0+wCoyG3QHclto0d4FyIJI78xGRICOOThZFROqgXYlZP6ddmeybm+jO4w== + dependencies: + "@babel/code-frame" "^7.10.4" + address "^1.1.2" + browserslist "^4.16.5" + chalk "^2.4.2" + cross-spawn "^7.0.3" + detect-port-alt "^1.1.6" + escape-string-regexp "^2.0.0" + filesize "^6.1.0" + find-up "^4.1.0" + fork-ts-checker-webpack-plugin "^6.0.5" + global-modules "^2.0.0" + globby "^11.0.1" + gzip-size "^5.1.1" + immer "^9.0.6" + is-root "^2.1.0" + loader-utils "^2.0.0" open "^7.0.2" - pkg-up "3.1.0" - prompts "2.4.0" - react-error-overlay "^6.0.9" - recursive-readdir "2.2.2" - shell-quote "1.7.2" - strip-ansi "6.0.0" - text-table "0.2.0" + pkg-up "^3.1.0" + prompts "^2.4.0" + react-error-overlay "7.0.0-next.54+1465357b" + recursive-readdir "^2.2.2" + shell-quote "^1.7.2" + strip-ansi "^6.0.0" + text-table "^0.2.0" react-dom@^16.8.4: version "16.14.0" @@ -9352,6 +8372,11 @@ react-dom@^16.8.4: prop-types "^15.6.2" scheduler "^0.19.1" +react-error-overlay@7.0.0-next.54+1465357b: + version "7.0.0-next.54" + resolved "https://registry.yarnpkg.com/react-error-overlay/-/react-error-overlay-7.0.0-next.54.tgz#c1eb5ab86aee15e9552e6d97897b08f2bd06d140" + integrity sha512-b96CiTnZahXPDNH9MKplvt5+jD+BkxDw7q5R3jnkUXze/ux1pLv32BBZmlj0OfCUeMqyz4sAmF+0ccJGVMlpXw== + react-error-overlay@^6.0.9: version "6.0.9" resolved "https://registry.yarnpkg.com/react-error-overlay/-/react-error-overlay-6.0.9.tgz#3c743010c9359608c375ecd6bc76f35d93995b0a" @@ -9399,13 +8424,6 @@ react-loadable-ssr-addon-v5-slorber@^1.0.1: dependencies: "@babel/runtime" "^7.10.3" -react-loadable@^5.5.0: - version "5.5.0" - resolved "https://registry.yarnpkg.com/react-loadable/-/react-loadable-5.5.0.tgz#582251679d3da86c32aae2c8e689c59f1196d8c4" - integrity sha512-C8Aui0ZpMd4KokxRdVAm2bQtI03k2RMRNzOB+IipV3yxFTSVICv7WoUr5L9ALB5BmKO1iHgZtWM8EvYG83otdg== - dependencies: - prop-types "^15.5.0" - react-player@^2.7.2: version "2.9.0" resolved "https://registry.yarnpkg.com/react-player/-/react-player-2.9.0.tgz#ef7fe7073434087565f00ff219824e1e02c4b046" @@ -9506,7 +8524,7 @@ read@1, read@^1.0.7, read@~1.0.1, read@~1.0.7: dependencies: mute-stream "~0.0.4" -readable-stream@^2.0.1, readable-stream@^2.0.2, readable-stream@^2.0.6: +readable-stream@^2.0.1, readable-stream@^2.0.6: version "2.3.7" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.7.tgz#1eca1cf711aef814c04f62252a36a62f6cb23b57" integrity sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw== @@ -9538,15 +8556,6 @@ readdir-scoped-modules@^1.1.0: graceful-fs "^4.1.2" once "^1.3.0" -readdirp@^2.2.1: - version "2.2.1" - resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-2.2.1.tgz#0e87622a3325aa33e892285caf8b4e846529a525" - integrity sha512-1JU/8q+VgFZyxwrJ+SVIOsh+KywWGpds3NTqikiKpDMZWScmAYyKIgqkO+ARvNWJfXeXR1zxz7aHF4u4CyH6vQ== - dependencies: - graceful-fs "^4.1.11" - micromatch "^3.1.10" - readable-stream "^2.0.2" - readdirp@~3.6.0: version "3.6.0" resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-3.6.0.tgz#74a370bd857116e245b29cc97340cd431a02a6c7" @@ -9554,7 +8563,7 @@ readdirp@~3.6.0: dependencies: picomatch "^2.2.1" -reading-time@^1.2.0, reading-time@^1.3.0: +reading-time@^1.2.0: version "1.3.0" resolved "https://registry.yarnpkg.com/reading-time/-/reading-time-1.3.0.tgz#d13e74431589a4a9038669f24d5acbc08bbb015d" integrity sha512-RJ8J5O6UvrclfZpcPSPuKusrdRfoY7uXXoYOOdeswZNtSkQaewT3919yz6RyloDBR+iwcUyz5zGOUjhgvfuv3g== @@ -9571,7 +8580,7 @@ rechoir@^0.6.2: dependencies: resolve "^1.1.6" -recursive-readdir@2.2.2: +recursive-readdir@^2.2.2: version "2.2.2" resolved "https://registry.yarnpkg.com/recursive-readdir/-/recursive-readdir-2.2.2.tgz#9946fb3274e1628de6e36b2f6714953b4845094f" integrity sha512-nRCcW9Sj7NuZwa2XvH9co8NPeXUBhZP7CRKJtU+cS6PW9FpCIFoI5ib0NT1ZrbNuPoRy0ylyCaUL8Gih4LSyFg== @@ -9609,14 +8618,6 @@ regenerator-transform@^0.14.2: dependencies: "@babel/runtime" "^7.8.4" -regex-not@^1.0.0, regex-not@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/regex-not/-/regex-not-1.0.2.tgz#1f4ece27e00b0b65e0247a6810e6a85d83a5752c" - integrity sha512-J6SDjUgDxQj5NusnOtdFxDwN/+HWykR8GELwctJ7mdqhcyy1xEc4SRFHUXvxTp661YaVKAjfRLZ9cCqS6tn32A== - dependencies: - extend-shallow "^3.0.2" - safe-regex "^1.1.0" - regexp.prototype.flags@^1.2.0: version "1.3.1" resolved "https://registry.yarnpkg.com/regexp.prototype.flags/-/regexp.prototype.flags-1.3.1.tgz#7ef352ae8d159e758c0eadca6f8fcb4eef07be26" @@ -9776,28 +8777,18 @@ remark-squeeze-paragraphs@4.0.0: dependencies: mdast-squeeze-paragraphs "^4.0.0" -remove-trailing-separator@^1.0.1: - version "1.1.0" - resolved "https://registry.yarnpkg.com/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz#c24bce2a283adad5bc3f58e0d48249b92379d8ef" - integrity sha1-wkvOKig62tW8P1jg1IJJuSN52O8= - -renderkid@^2.0.6: - version "2.0.7" - resolved "https://registry.yarnpkg.com/renderkid/-/renderkid-2.0.7.tgz#464f276a6bdcee606f4a15993f9b29fc74ca8609" - integrity sha512-oCcFyxaMrKsKcTY59qnCAtmDVSLfPbrv6A3tVbPdFMMrv5jaK10V6m40cKsoPNhAqN6rmHW9sswW4o3ruSrwUQ== +renderkid@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/renderkid/-/renderkid-3.0.0.tgz#5fd823e4d6951d37358ecc9a58b1f06836b6268a" + integrity sha512-q/7VIQA8lmM1hF+jn+sFSPWGlMkSAeNYcPLmDQx2zzuiDfaLrOmumR8iaUKlenFgh0XRPIUeSPlH3A+AW3Z5pg== dependencies: css-select "^4.1.3" dom-converter "^0.2.0" htmlparser2 "^6.1.0" lodash "^4.17.21" - strip-ansi "^3.0.1" - -repeat-element@^1.1.2: - version "1.1.4" - resolved "https://registry.yarnpkg.com/repeat-element/-/repeat-element-1.1.4.tgz#be681520847ab58c7568ac75fbfad28ed42d39e9" - integrity sha512-LFiNfRcSu7KK3evMyYOuCzv3L10TW7yC1G2/+StMjK8Y6Vqd2MG7r/Qjw4ghtuCOjFvlnms/iMmLqpvW/ES/WQ== + strip-ansi "^6.0.1" -repeat-string@^1.5.4, repeat-string@^1.6.1: +repeat-string@^1.5.4: version "1.6.1" resolved "https://registry.yarnpkg.com/repeat-string/-/repeat-string-1.6.1.tgz#8dcae470e1c88abc2d600fff4a776286da75e637" integrity sha1-jcrkcOHIirwtYA//Sndihtp15jc= @@ -9828,38 +8819,16 @@ request@^2.88.2: tunnel-agent "^0.6.0" uuid "^3.3.2" -require-directory@^2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" - integrity sha1-jGStX9MNqxyXbiNE/+f3kqam30I= - "require-like@>= 0.1.1": version "0.1.2" resolved "https://registry.yarnpkg.com/require-like/-/require-like-0.1.2.tgz#ad6f30c13becd797010c468afa775c0c0a6b47fa" integrity sha1-rW8wwTvs15cBDEaK+ndcDAprR/o= -require-main-filename@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-2.0.0.tgz#d0b329ecc7cc0f61649f62215be69af54aa8989b" - integrity sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg== - requires-port@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/requires-port/-/requires-port-1.0.0.tgz#925d2601d39ac485e091cf0da5c6e694dc3dcaff" integrity sha1-kl0mAdOaxIXgkc8NpcbmlNw9yv8= -resolve-cwd@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/resolve-cwd/-/resolve-cwd-2.0.0.tgz#00a9f7387556e27038eae232caa372a6a59b665a" - integrity sha1-AKn3OHVW4nA46uIyyqNypqWbZlo= - dependencies: - resolve-from "^3.0.0" - -resolve-from@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-3.0.0.tgz#b22c7af7d9d6881bc8b6e653335eebcb0a188748" - integrity sha1-six699nWiBvItuZTM17rywoYh0g= - resolve-from@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-4.0.0.tgz#4abcd852ad32dd7baabfe9b40e00a36db5f392e6" @@ -9870,11 +8839,6 @@ resolve-pathname@^3.0.0: resolved "https://registry.yarnpkg.com/resolve-pathname/-/resolve-pathname-3.0.0.tgz#99d02224d3cf263689becbb393bc560313025dcd" integrity sha512-C7rARubxI8bXFNB/hqcp/4iUeIXJhJZvFPFPiSPRnhU5UPxzMFIl+2E6yY6c4k9giDJAhtV+enfA+G89N6Csng== -resolve-url@^0.2.1: - version "0.2.1" - resolved "https://registry.yarnpkg.com/resolve-url/-/resolve-url-0.2.1.tgz#2c637fe77c893afd2a663fe21aa9080068e2052a" - integrity sha1-LGN/53yJOv0qZj/iGqkIAGjiBSo= - resolve@^1.1.6, resolve@^1.14.2, resolve@^1.3.2: version "1.20.0" resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.20.0.tgz#629a013fb3f70755d6f0b7935cc1c2c5378b1975" @@ -9890,16 +8854,16 @@ responselike@^1.0.2: dependencies: lowercase-keys "^1.0.0" -ret@~0.1.10: - version "0.1.15" - resolved "https://registry.yarnpkg.com/ret/-/ret-0.1.15.tgz#b8a4825d5bdb1fc3f6f53c2bc33f81388681c7bc" - integrity sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg== - retry@^0.12.0: version "0.12.0" resolved "https://registry.yarnpkg.com/retry/-/retry-0.12.0.tgz#1b42a6266a21f07421d1b0b54b7dc167b01c013b" integrity sha1-G0KmJmoh8HQh0bC1S33BZ7AcATs= +retry@^0.13.1: + version "0.13.1" + resolved "https://registry.yarnpkg.com/retry/-/retry-0.13.1.tgz#185b1587acf67919d63b357349e03537b2484658" + integrity sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg== + reusify@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/reusify/-/reusify-1.0.4.tgz#90da382b1e126efc02146e90845a88db12925d76" @@ -9915,13 +8879,6 @@ rgba-regex@^1.0.0: resolved "https://registry.yarnpkg.com/rgba-regex/-/rgba-regex-1.0.0.tgz#43374e2e2ca0968b0ef1523460b7d730ff22eeb3" integrity sha1-QzdOLiyglosO8VI0YLfXMP8i7rM= -rimraf@^2.6.3: - version "2.7.1" - resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.7.1.tgz#35797f13a7fdadc566142c29d4f07ccad483e3ec" - integrity sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w== - dependencies: - glob "^7.1.3" - rimraf@^3.0.0, rimraf@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-3.0.2.tgz#f1a5402ba6220ad52cc1282bac1ae3aa49fd061a" @@ -9929,27 +8886,11 @@ rimraf@^3.0.0, rimraf@^3.0.2: dependencies: glob "^7.1.3" -rtl-detect@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/rtl-detect/-/rtl-detect-1.0.3.tgz#42145b9a4f9cf0b94c4542aba90d57f0d18559bf" - integrity sha512-2sMcZO60tL9YDEFe24gqddg3hJ+xSmJFN8IExcQUxeHxQzydQrN6GHPL+yAWgzItXSI7es53hcZC9pJneuZDKA== - rtl-detect@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/rtl-detect/-/rtl-detect-1.0.4.tgz#40ae0ea7302a150b96bc75af7d749607392ecac6" integrity sha512-EBR4I2VDSSYr7PkBmFy04uhycIpDKp+21p/jARYXlCSjQksTBQcJ0HFUPOO79EPPH5JS6VAhiIQbycf0O3JAxQ== -rtlcss@^3.1.2: - version "3.3.0" - resolved "https://registry.yarnpkg.com/rtlcss/-/rtlcss-3.3.0.tgz#fa9d29b071a863fe959704da6a93de3076aeeca4" - integrity sha512-XZ2KEatH2nU5yPlts1Wu8SGIuZ3ndN025HQX5MqtUCUiOn5WkCDbcpJ2VJWjpuFmM2cUTQ1xtH21fhMCSseI5A== - dependencies: - chalk "^4.1.0" - find-up "^5.0.0" - mkdirp "^1.0.4" - postcss "^8.2.4" - strip-json-comments "^3.1.1" - rtlcss@^3.3.0: version "3.4.0" resolved "https://registry.yarnpkg.com/rtlcss/-/rtlcss-3.4.0.tgz#81c5cab77050ea0d880147b8a78943330d4d1813" @@ -9968,13 +8909,6 @@ run-parallel@^1.1.9: dependencies: queue-microtask "^1.2.2" -rxjs@^6.6.3: - version "6.6.7" - resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-6.6.7.tgz#90ac018acabf491bf65044235d5863c4dab804c9" - integrity sha512-hTdwr+7yYNIT5n4AMYp85KA6yw2Va0FLa3Rguvbpa4W3I5xynaBZo41cM3XM+4Q6fRMj3sBYIR1VAmZMXYJvRQ== - dependencies: - tslib "^1.9.0" - rxjs@^7.1.0: version "7.4.0" resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-7.4.0.tgz#a12a44d7eebf016f5ff2441b87f28c9a51cebc68" @@ -9992,13 +8926,6 @@ safe-buffer@>=5.1.0, safe-buffer@^5.0.1, safe-buffer@^5.1.0, safe-buffer@^5.1.2, resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== -safe-regex@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/safe-regex/-/safe-regex-1.1.0.tgz#40a3669f3b077d1e943d44629e157dd48023bf2e" - integrity sha1-QKNmnzsHfR6UPURinhV91IAjvy4= - dependencies: - ret "~0.1.10" - "safer-buffer@>= 2.1.2 < 3", "safer-buffer@>= 2.1.2 < 3.0.0", safer-buffer@^2.0.2, safer-buffer@^2.1.0, safer-buffer@~2.1.0: version "2.1.2" resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" @@ -10017,14 +8944,14 @@ scheduler@^0.19.1: loose-envify "^1.1.0" object-assign "^4.1.1" -schema-utils@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-1.0.0.tgz#0b79a93204d7b600d4b2850d1f66c2a34951c770" - integrity sha512-i27Mic4KovM/lnGsy8whRCHhc7VicJajAjTrYg11K9zfZXnYIt4k5F+kZkwjnrhKzLic/HLU4j11mjsz2G/75g== +schema-utils@2.7.0: + version "2.7.0" + resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-2.7.0.tgz#17151f76d8eae67fbbf77960c33c676ad9f4efc7" + integrity sha512-0ilKFI6QQF5nxDZLFn2dMjvc4hjg/Wkg7rHd3jK6/A4a1Hl9VFdQWvgB1UMGoU94pad1P/8N7fMcEnLnSiju8A== dependencies: - ajv "^6.1.0" - ajv-errors "^1.0.0" - ajv-keywords "^3.1.0" + "@types/json-schema" "^7.0.4" + ajv "^6.12.2" + ajv-keywords "^3.4.1" schema-utils@^2.6.5: version "2.7.1" @@ -10066,7 +8993,7 @@ select-hose@^2.0.0: resolved "https://registry.yarnpkg.com/select-hose/-/select-hose-2.0.0.tgz#625d8658f865af43ec962bfc376a37359a4994ca" integrity sha1-Yl2GWPhlr0Psliv8N2o3NZpJlMo= -selfsigned@^1.10.8: +selfsigned@^1.10.11: version "1.10.11" resolved "https://registry.yarnpkg.com/selfsigned/-/selfsigned-1.10.11.tgz#24929cd906fe0f44b6d01fb23999a739537acbe9" integrity sha512-aVmbPOfViZqOZPgRBT0+3u4yZFHpmnIghLMlAcb5/xhp5ZtB/RVnKhz5vl2M32CLXAqR4kha9zfhNg0Lf/sxKA== @@ -10085,7 +9012,7 @@ semver@7.0.0: resolved "https://registry.yarnpkg.com/semver/-/semver-7.0.0.tgz#5f3ca35761e47e05b206c6daff2cf814f0316b8e" integrity sha512-+GB6zVA9LWh6zovYQLALHwv5rb2PHGlJi3lfiqIHxR0uuwCgefcOJc59v9fv1w8GbStwxuuqqAjI9NMAOOgq1A== -semver@^5.4.1, semver@^5.5.0, semver@^5.6.0: +semver@^5.4.1: version "5.7.1" resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7" integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ== @@ -10170,7 +9097,7 @@ set-blocking@^2.0.0, set-blocking@~2.0.0: resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7" integrity sha1-BF+XgtARrppoA93TgrJDkrPYkPc= -set-value@^2.0.0, set-value@^2.0.1, set-value@^4.0.1: +set-value@^4.0.1: version "4.1.0" resolved "https://registry.yarnpkg.com/set-value/-/set-value-4.1.0.tgz#aa433662d87081b75ad88a4743bd450f044e7d09" integrity sha512-zTEg4HL0RwVrqcWs3ztF+x1vkxfm0lP+MQQFPiMJTKVceBwEV0A569Ou8l9IYQG8jOZdMVI1hGsc0tmeD2o/Lw== @@ -10224,10 +9151,10 @@ shebang-regex@^3.0.0: resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-3.0.0.tgz#ae16f1644d873ecad843b0307b143362d4c42172" integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A== -shell-quote@1.7.2: - version "1.7.2" - resolved "https://registry.yarnpkg.com/shell-quote/-/shell-quote-1.7.2.tgz#67a7d02c76c9da24f99d20808fcaded0e0e04be2" - integrity sha512-mRz/m/JVscCrkMyPqHc/bczi3OQHkLTqXHEFu0zDhK/qfv3UcOA4SVmRCLmos4bhjr9ekVQubj/R7waKapmiQg== +shell-quote@^1.7.2: + version "1.7.3" + resolved "https://registry.yarnpkg.com/shell-quote/-/shell-quote-1.7.3.tgz#aa40edac170445b9a431e17bb62c0b881b9c4123" + integrity sha512-Vpfqwm4EnqGdlsBFNmHhxhElJYrdfcxPThu+ryKS5J8L/fhAwLazFZtq+S+TWZ9ANj2piSQLGj6NQg+lKPmxrw== shelljs@^0.8.4: version "0.8.4" @@ -10282,48 +9209,6 @@ smart-buffer@^4.1.0: resolved "https://registry.yarnpkg.com/smart-buffer/-/smart-buffer-4.2.0.tgz#6e1d71fa4f18c05f7d0ff216dd16a481d0e8d9ae" integrity sha512-94hK0Hh8rPqQl2xXc3HsaBoOXKV20MToPkcXvwbISWLEs+64sBq5kFgn2kJDHb1Pry9yrP0dxrCI9RRci7RXKg== -snapdragon-node@^2.0.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/snapdragon-node/-/snapdragon-node-2.1.1.tgz#6c175f86ff14bdb0724563e8f3c1b021a286853b" - integrity sha512-O27l4xaMYt/RSQ5TR3vpWCAB5Kb/czIcqUFOM/C4fYcLnbZUc1PkjTAMjof2pBWaSTwOUd6qUHcFGVGj7aIwnw== - dependencies: - define-property "^1.0.0" - isobject "^3.0.0" - snapdragon-util "^3.0.1" - -snapdragon-util@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/snapdragon-util/-/snapdragon-util-3.0.1.tgz#f956479486f2acd79700693f6f7b805e45ab56e2" - integrity sha512-mbKkMdQKsjX4BAL4bRYTj21edOf8cN7XHdYUJEe+Zn99hVEYcMvKPct1IqNe7+AZPirn8BCDOQBHQZknqmKlZQ== - dependencies: - kind-of "^3.2.0" - -snapdragon@^0.8.1: - version "0.8.2" - resolved "https://registry.yarnpkg.com/snapdragon/-/snapdragon-0.8.2.tgz#64922e7c565b0e14204ba1aa7d6964278d25182d" - integrity sha512-FtyOnWN/wCHTVXOMwvSv26d+ko5vWlIDD6zoUJ7LW8vh+ZBC8QdljveRP+crNrtBwioEUWy/4dMtbBjA4ioNlg== - dependencies: - base "^0.11.1" - debug "^2.2.0" - define-property "^0.2.5" - extend-shallow "^2.0.1" - map-cache "^0.2.2" - source-map "^0.5.6" - source-map-resolve "^0.5.0" - use "^3.1.0" - -sockjs-client@^1.5.0: - version "1.5.1" - resolved "https://registry.yarnpkg.com/sockjs-client/-/sockjs-client-1.5.1.tgz#256908f6d5adfb94dabbdbd02c66362cca0f9ea6" - integrity sha512-VnVAb663fosipI/m6pqRXakEOw7nvd7TUgdr3PlR/8V2I95QIdwT8L4nMxhyU8SmDBHYXU1TOElaKOmKLfYzeQ== - dependencies: - debug "^3.2.6" - eventsource "^1.0.7" - faye-websocket "^0.11.3" - inherits "^2.0.4" - json3 "^3.3.3" - url-parse "^1.5.1" - sockjs@^0.3.21: version "0.3.21" resolved "https://registry.yarnpkg.com/sockjs/-/sockjs-0.3.21.tgz#b34ffb98e796930b60a0cfa11904d6a339a7d417" @@ -10350,11 +9235,6 @@ socks@^2.6.1: ip "^1.1.5" smart-buffer "^4.1.0" -sort-css-media-queries@1.5.4: - version "1.5.4" - resolved "https://registry.yarnpkg.com/sort-css-media-queries/-/sort-css-media-queries-1.5.4.tgz#24182b12002a13d01ba943ddf74f5098d7c244ce" - integrity sha512-YP5W/h4Sid/YP7Lp87ejJ5jP13/Mtqt2vx33XyhO+IAugKlufRPbOrPlIiEUuxmpNBSBd3EeeQpFhdu3RfI2Ag== - sort-css-media-queries@2.0.4: version "2.0.4" resolved "https://registry.yarnpkg.com/sort-css-media-queries/-/sort-css-media-queries-2.0.4.tgz#b2badfa519cb4a938acbc6d3aaa913d4949dc908" @@ -10365,7 +9245,7 @@ sort-object-keys@^1.1.3: resolved "https://registry.yarnpkg.com/sort-object-keys/-/sort-object-keys-1.1.3.tgz#bff833fe85cab147b34742e45863453c1e190b45" integrity sha512-855pvK+VkU7PaKYPc+Jjnmt4EzejQHyhhF33q31qG8x7maDzkeFhAAThdCYay11CISO+qAMwjOBP+fPZe0IPyg== -source-list-map@^2.0.0, source-list-map@^2.0.1: +source-list-map@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/source-list-map/-/source-list-map-2.0.1.tgz#3993bd873bfc48479cca9ea3a547835c7c154b34" integrity sha512-qnQ7gVMxGNxsiL4lEuJwe/To8UnK7fAnmbGEEH8RpLouuKbeEm0lhbQVFIrNSuB+G7tVrAlVsZgETT5nljf+Iw== @@ -10375,18 +9255,7 @@ source-map-js@^0.6.2: resolved "https://registry.yarnpkg.com/source-map-js/-/source-map-js-0.6.2.tgz#0bb5de631b41cfbda6cfba8bd05a80efdfd2385e" integrity sha512-/3GptzWzu0+0MBQFrDKzw/DvvMTUORvgY6k6jd/VS6iCR4RDTKWH6v6WPwQoUO8667uQEf9Oe38DxAYWY5F/Ug== -source-map-resolve@^0.5.0: - version "0.5.3" - resolved "https://registry.yarnpkg.com/source-map-resolve/-/source-map-resolve-0.5.3.tgz#190866bece7553e1f8f267a2ee82c606b5509a1a" - integrity sha512-Htz+RnsXWk5+P2slx5Jh3Q66vhQj1Cllm0zvnaY98+NFx+Dv2CF/f5O/t8x+KaNdrdIAsruNzoh/KpialbqAnw== - dependencies: - atob "^2.1.2" - decode-uri-component "^0.2.0" - resolve-url "^0.2.1" - source-map-url "^0.4.0" - urix "^0.1.0" - -source-map-support@~0.5.12, source-map-support@~0.5.19: +source-map-support@~0.5.19: version "0.5.19" resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.19.tgz#a98b62f86dcaf4f67399648c085291ab9e8fed61" integrity sha512-Wonm7zOCIJzBGQdB+thsPar0kYuCIzYvxZwlBa87yi/Mdjv7Tip2cyVbLj5o0cFPN4EVkuTwb3GDDyUx2DGnGw== @@ -10402,12 +9271,7 @@ source-map-support@~0.5.20: buffer-from "^1.0.0" source-map "^0.6.0" -source-map-url@^0.4.0: - version "0.4.1" - resolved "https://registry.yarnpkg.com/source-map-url/-/source-map-url-0.4.1.tgz#0af66605a745a5a2f91cf1bbf8a7afbc283dec56" - integrity sha512-cPiFOTLUKvJFIg4SKVScy4ilPPW6rFgMgfuZJPNoDuMs3nC1HbMUycBoJw77xFIp6z1UJQJOfx6C9GMH80DiTw== - -source-map@^0.5.0, source-map@^0.5.6: +source-map@^0.5.0: version "0.5.7" resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc" integrity sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w= @@ -10481,13 +9345,6 @@ spdy@^4.0.2: select-hose "^2.0.0" spdy-transport "^3.0.0" -split-string@^3.0.2: - version "3.1.0" - resolved "https://registry.yarnpkg.com/split-string/-/split-string-3.1.0.tgz#7cb09dda3a86585705c64b39a6466038682e8fe2" - integrity sha512-NzNVhJDYpwceVVii8/Hu6DKfD2G+NrQHlS/V/qgv763EYudVwEcMQNxd2lh+0VrUByXN/oJkl5grOhYWvQUYiw== - dependencies: - extend-shallow "^3.0.0" - sprintf-js@~1.0.2: version "1.0.3" resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" @@ -10525,14 +9382,6 @@ state-toggle@^1.0.0: resolved "https://registry.yarnpkg.com/state-toggle/-/state-toggle-1.0.3.tgz#e123b16a88e143139b09c6852221bc9815917dfe" integrity sha512-d/5Z4/2iiCnHw6Xzghyhb+GcmF89bxwgXG60wjIiZaxnymbyOmI8Hk4VqHXiVVp6u2ysaskFfXg3ekCj4WNftQ== -static-extend@^0.1.1: - version "0.1.2" - resolved "https://registry.yarnpkg.com/static-extend/-/static-extend-0.1.2.tgz#60809c39cbff55337226fd5e0b520f341f1fb5c6" - integrity sha1-YICcOcv/VTNyJv1eC1IPNB8ftcY= - dependencies: - define-property "^0.2.5" - object-copy "^0.1.0" - "statuses@>= 1.4.0 < 2", "statuses@>= 1.5.0 < 2", statuses@~1.5.0: version "1.5.0" resolved "https://registry.yarnpkg.com/statuses/-/statuses-1.5.0.tgz#161c7dac177659fd9811f43771fa99381478628c" @@ -10571,7 +9420,7 @@ string-width@^1.0.1: is-fullwidth-code-point "^3.0.0" strip-ansi "^6.0.1" -string-width@^3.0.0, string-width@^3.1.0: +string-width@^3.0.0: version "3.1.0" resolved "https://registry.yarnpkg.com/string-width/-/string-width-3.1.0.tgz#22767be21b62af1081574306f69ac51b62203961" integrity sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w== @@ -10633,13 +9482,6 @@ stringify-package@^1.0.1: resolved "https://registry.yarnpkg.com/stringify-package/-/stringify-package-1.0.1.tgz#e5aa3643e7f74d0f28628b72f3dad5cecfc3ba85" integrity sha512-sa4DUQsYciMP1xhKWGuFM04fB0LG/9DlluZoSVywUMRNvzid6XucHK0/90xGxRoHrAaROrcHK1aPKaijCtSrhg== -strip-ansi@6.0.0, strip-ansi@^6.0.0: - version "6.0.0" - resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.0.tgz#0b1571dd7669ccd4f3e06e14ef1eed26225ae532" - integrity sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w== - dependencies: - ansi-regex "^5.0.0" - strip-ansi@^3.0.0, strip-ansi@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-3.0.1.tgz#6a385fb8853d952d5ff05d0e8aaf94278dc63dcf" @@ -10654,13 +9496,20 @@ strip-ansi@^3.0.0, strip-ansi@^3.0.1: dependencies: ansi-regex "^3.0.0" -strip-ansi@^5.0.0, strip-ansi@^5.1.0, strip-ansi@^5.2.0: +strip-ansi@^5.1.0: version "5.2.0" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-5.2.0.tgz#8c9a536feb6afc962bdfa5b104a5091c1ad9c0ae" integrity sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA== dependencies: ansi-regex "^4.1.0" +strip-ansi@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.0.tgz#0b1571dd7669ccd4f3e06e14ef1eed26225ae532" + integrity sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w== + dependencies: + ansi-regex "^5.0.0" + strip-ansi@^6.0.1: version "6.0.1" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" @@ -10668,16 +9517,18 @@ strip-ansi@^6.0.1: dependencies: ansi-regex "^5.0.1" +strip-ansi@^7.0.0: + version "7.0.1" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-7.0.1.tgz#61740a08ce36b61e50e65653f07060d000975fb2" + integrity sha512-cXNxvT8dFNRVfhVME3JAe98mkXDYN2O1l7jmcwMnOslDeESg1rF/OZMtK0nRAhiari1unG5cD4jG3rapUAkLbw== + dependencies: + ansi-regex "^6.0.1" + strip-bom-string@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/strip-bom-string/-/strip-bom-string-1.0.0.tgz#e5211e9224369fbb81d633a2f00044dc8cedad92" integrity sha1-5SEekiQ2n7uB1jOi8ABE3IztrZI= -strip-eof@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/strip-eof/-/strip-eof-1.0.0.tgz#bb43ff5598a6eb05d89b59fcd129c983313606bf" - integrity sha1-u0P/VZim6wXYm1n80SnJgzE2Br8= - strip-final-newline@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/strip-final-newline/-/strip-final-newline-2.0.0.tgz#89b852fb2fcbe936f6f4b3187afb0a12c1ab58ad" @@ -10715,13 +9566,6 @@ supports-color@^5.3.0: dependencies: has-flag "^3.0.0" -supports-color@^6.1.0: - version "6.1.0" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-6.1.0.tgz#0764abc69c63d5ac842dd4867e8d025e880df8f3" - integrity sha512-qe1jfm1Mg7Nq/NSh6XE24gPXROEVsWHxC1LIx//XNlD9iw7YZQGjZNjYN7xGaEG6iKdA8EtNFW6R0gjnVXp+wQ== - dependencies: - has-flag "^3.0.0" - supports-color@^7.1.0: version "7.2.0" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-7.2.0.tgz#1b7dcdcb32b8138801b3e478ba6a51caa89648da" @@ -10834,15 +9678,6 @@ terser-webpack-plugin@^5.2.4: source-map "^0.6.1" terser "^5.7.2" -terser@^4.6.3: - version "4.8.0" - resolved "https://registry.yarnpkg.com/terser/-/terser-4.8.0.tgz#63056343d7c70bb29f3af665865a46fe03a0df17" - integrity sha512-EAPipTNeWsb/3wLPeup1tVPaXfIaU68xMnVdPafIL1TV05OhASArYyIfFvnvJCNrR2NIOvDVNNTFRa+Re2MWyw== - dependencies: - commander "^2.20.0" - source-map "~0.6.1" - source-map-support "~0.5.12" - terser@^5.7.0: version "5.7.1" resolved "https://registry.yarnpkg.com/terser/-/terser-5.7.1.tgz#2dc7a61009b66bb638305cb2a824763b116bf784" @@ -10861,7 +9696,7 @@ terser@^5.7.2: source-map "~0.7.2" source-map-support "~0.5.20" -text-table@0.2.0, text-table@^0.2.0, text-table@~0.2.0: +text-table@^0.2.0, text-table@~0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4" integrity sha1-f17oI66AUgfACvLfSoTsP8+lcLQ= @@ -10896,26 +9731,11 @@ to-fast-properties@^2.0.0: resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e" integrity sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4= -to-object-path@^0.3.0: - version "0.3.0" - resolved "https://registry.yarnpkg.com/to-object-path/-/to-object-path-0.3.0.tgz#297588b7b0e7e0ac08e04e672f85c1f4999e17af" - integrity sha1-KXWIt7Dn4KwI4E5nL4XB9JmeF68= - dependencies: - kind-of "^3.0.2" - to-readable-stream@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/to-readable-stream/-/to-readable-stream-1.0.0.tgz#ce0aa0c2f3df6adf852efb404a783e77c0475771" integrity sha512-Iq25XBt6zD5npPhlLVXGFN3/gyR2/qODcKNNyTMd4vbm39HUaOiAM4PMq0eMVC/Tkxz+Zjdsc55g9yyz+Yq00Q== -to-regex-range@^2.1.0: - version "2.1.1" - resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-2.1.1.tgz#7c80c17b9dfebe599e27367e0d4dd5590141db38" - integrity sha1-fIDBe53+vlmeJzZ+DU3VWQFB2zg= - dependencies: - is-number "^3.0.0" - repeat-string "^1.6.1" - to-regex-range@^5.0.1: version "5.0.1" resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4" @@ -10923,16 +9743,6 @@ to-regex-range@^5.0.1: dependencies: is-number "^7.0.0" -to-regex@^3.0.1, to-regex@^3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/to-regex/-/to-regex-3.0.2.tgz#13cfdd9b336552f30b51f33a8ae1b42a7a7599ce" - integrity sha512-FWtleNAtZ/Ki2qtqej2CXTOayOH9bHDQF+Q48VpWyDXjbYxA4Yz8iDB31zXOBUlOHHKidDbqGVrTUvQMPmBGBw== - dependencies: - define-property "^2.0.2" - extend-shallow "^3.0.2" - regex-not "^1.0.2" - safe-regex "^1.1.0" - toidentifier@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/toidentifier/-/toidentifier-1.0.0.tgz#7e1be3470f1e77948bc43d94a3c8f4d7752ba553" @@ -10976,12 +9786,7 @@ ts-essentials@^2.0.3: resolved "https://registry.yarnpkg.com/ts-essentials/-/ts-essentials-2.0.12.tgz#c9303f3d74f75fa7528c3d49b80e089ab09d8745" integrity sha512-3IVX4nI6B5cc31/GFFE+i8ey/N2eA0CZDbo6n0yrz0zDX8ZJ8djmU1p+XRz7G3is0F3bB3pu2pAroFdAWQKU3w== -tslib@^1.9.0: - version "1.14.1" - resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00" - integrity sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg== - -tslib@^2.0.3, tslib@^2.1.0, tslib@^2.2.0: +tslib@^2.0.3: version "2.3.0" resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.3.0.tgz#803b8cdab3e12ba581a4ca41c8839bbb0dacb09e" integrity sha512-N82ooyxVNm6h1riLCoyS9e3fuJ3AMG2zIZs2Gd1ATcSFjSA23Q0fzjjZeh0jbJvWVDZ0cJT8yaNNaaXHzueNjg== @@ -11125,16 +9930,6 @@ unified@^8.4.2: trough "^1.0.0" vfile "^4.0.0" -union-value@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/union-value/-/union-value-1.0.1.tgz#0b6fe7b835aecda61c6ea4d4f02c14221e109847" - integrity sha512-tJfXmxMeWYnczCVs7XAEvIV7ieppALdyepWMkHkwciRpZraG/xwT+s2JN8+pr1+8jCRf80FFzvr+MpQeeoF4Xg== - dependencies: - arr-union "^3.1.0" - get-value "^2.0.6" - is-extendable "^0.1.1" - set-value "^2.0.1" - uniqs@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/uniqs/-/uniqs-2.0.0.tgz#ffede4b36b25290696e6e165d4a59edb998e6b02" @@ -11241,19 +10036,6 @@ unquote@~1.1.1: resolved "https://registry.yarnpkg.com/unquote/-/unquote-1.1.1.tgz#8fded7324ec6e88a0ff8b905e7c098cdc086d544" integrity sha1-j97XMk7G6IoP+LkF58CYzcCG1UQ= -unset-value@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/unset-value/-/unset-value-1.0.0.tgz#8376873f7d2335179ffb1e6fc3a8ed0dfc8ab559" - integrity sha1-g3aHP30jNRef+x5vw6jtDfyKtVk= - dependencies: - has-value "^0.3.1" - isobject "^3.0.0" - -upath@^1.1.1: - version "1.2.0" - resolved "https://registry.yarnpkg.com/upath/-/upath-1.2.0.tgz#8f66dbcd55a883acdae4408af8b035a5044c1894" - integrity sha512-aZwGpamFO61g3OlfT7OQCHqhGnW43ieH9WZeP7QxN/G/jS4jfqUkZxoryvJgVPEcrl5NL/ggHsSmLMHuH64Lhg== - update-notifier@^5.1.0: version "5.1.0" resolved "https://registry.yarnpkg.com/update-notifier/-/update-notifier-5.1.0.tgz#4ab0d7c7f36a231dd7316cf7729313f0214d9ad9" @@ -11281,11 +10063,6 @@ uri-js@^4.2.2: dependencies: punycode "^2.1.0" -urix@^0.1.0: - version "0.1.0" - resolved "https://registry.yarnpkg.com/urix/-/urix-0.1.0.tgz#da937f7a62e21fec1fd18d49b35c2935067a6c72" - integrity sha1-2pN/emLiH+wf0Y1Js1wpNQZ6bHI= - url-loader@^4.1.1: version "4.1.1" resolved "https://registry.yarnpkg.com/url-loader/-/url-loader-4.1.1.tgz#28505e905cae158cf07c92ca622d7f237e70a4e2" @@ -11302,14 +10079,6 @@ url-parse-lax@^3.0.0: dependencies: prepend-http "^2.0.0" -url-parse@^1.4.3, url-parse@^1.5.1: - version "1.5.3" - resolved "https://registry.yarnpkg.com/url-parse/-/url-parse-1.5.3.tgz#71c1303d38fb6639ade183c2992c8cc0686df862" - integrity sha512-IIORyIQD9rvj0A4CLWsHkBBJuNqWpFQe224b6j9t/ABmquIS0qDU2pY6kl6AuOrL5OkCXHMCFNe1jBcuAggjvQ== - dependencies: - querystringify "^2.1.1" - requires-port "^1.0.0" - url@^0.11.0: version "0.11.0" resolved "https://registry.yarnpkg.com/url/-/url-0.11.0.tgz#3838e97cfc60521eb73c525a8e55bfdd9e2e28f1" @@ -11337,11 +10106,6 @@ use-latest@^1.0.0: dependencies: use-isomorphic-layout-effect "^1.0.0" -use@^3.1.0: - version "3.1.1" - resolved "https://registry.yarnpkg.com/use/-/use-3.1.1.tgz#d50c8cac79a19fbc20f2911f56eb973f4e10070f" - integrity sha512-cwESVXlO3url9YWlFW/TA9cshCEhtu7IKJ/p5soJ/gGpj7vbvFrAY/eIioQ6Dw23KjZhYgiIo8HOs1nQ2vr/oQ== - util-deprecate@^1.0.1, util-deprecate@^1.0.2, util-deprecate@~1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" @@ -11439,17 +10203,6 @@ vfile@^4.0.0: unist-util-stringify-position "^2.0.0" vfile-message "^2.0.0" -wait-on@^5.3.0: - version "5.3.0" - resolved "https://registry.yarnpkg.com/wait-on/-/wait-on-5.3.0.tgz#584e17d4b3fe7b46ac2b9f8e5e102c005c2776c7" - integrity sha512-DwrHrnTK+/0QFaB9a8Ol5Lna3k7WvUR4jzSKmz0YaPBpuN2sACyiPVKVfj6ejnjcajAcvn3wlbTyMIn9AZouOg== - dependencies: - axios "^0.21.1" - joi "^17.3.0" - lodash "^4.17.21" - minimist "^1.2.5" - rxjs "^6.6.3" - wait-on@^6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/wait-on/-/wait-on-6.0.0.tgz#7e9bf8e3d7fe2daecbb7a570ac8ca41e9311c7e7" @@ -11508,63 +10261,47 @@ webpack-bundle-analyzer@^4.4.2: sirv "^1.0.7" ws "^7.3.1" -webpack-dev-middleware@^3.7.2: - version "3.7.3" - resolved "https://registry.yarnpkg.com/webpack-dev-middleware/-/webpack-dev-middleware-3.7.3.tgz#0639372b143262e2b84ab95d3b91a7597061c2c5" - integrity sha512-djelc/zGiz9nZj/U7PTBi2ViorGJXEWo/3ltkPbDyxCXhhEXkW0ce99falaok4TPj+AsxLiXJR0EBOb0zh9fKQ== +webpack-dev-middleware@^5.2.1: + version "5.2.1" + resolved "https://registry.yarnpkg.com/webpack-dev-middleware/-/webpack-dev-middleware-5.2.1.tgz#97c948144349177856a3d2d9c612cc3fee180cf1" + integrity sha512-Kx1X+36Rn9JaZcQMrJ7qN3PMAuKmEDD9ZISjUj3Cgq4A6PtwYsC4mpaKotSRYH3iOF6HsUa8viHKS59FlyVifQ== dependencies: - memory-fs "^0.4.1" - mime "^2.4.4" - mkdirp "^0.5.1" + colorette "^2.0.10" + memfs "^3.2.2" + mime-types "^2.1.31" range-parser "^1.2.1" - webpack-log "^2.0.0" + schema-utils "^3.1.0" -webpack-dev-server@^3.11.2: - version "3.11.2" - resolved "https://registry.yarnpkg.com/webpack-dev-server/-/webpack-dev-server-3.11.2.tgz#695ebced76a4929f0d5de7fd73fafe185fe33708" - integrity sha512-A80BkuHRQfCiNtGBS1EMf2ChTUs0x+B3wGDFmOeT4rmJOHhHTCH2naNxIHhmkr0/UillP4U3yeIyv1pNp+QDLQ== +webpack-dev-server@^4.4.0: + version "4.5.0" + resolved "https://registry.yarnpkg.com/webpack-dev-server/-/webpack-dev-server-4.5.0.tgz#614b5112cfa4730a4801bb4ddebb3be5b0d70497" + integrity sha512-Ss4WptsUjYa+3hPI4iYZYEc8FrtnfkaPrm5WTjk9ux5kiCS718836srs0ppKMHRaCHP5mQ6g4JZGcfDdGbCjpQ== dependencies: - ansi-html-community "0.0.8" + ansi-html-community "^0.0.8" bonjour "^3.5.0" - chokidar "^2.1.8" + chokidar "^3.5.2" + colorette "^2.0.10" compression "^1.7.4" connect-history-api-fallback "^1.6.0" - debug "^4.1.1" - del "^4.1.1" + del "^6.0.0" express "^4.17.1" - html-entities "^1.3.1" - http-proxy-middleware "0.19.1" - import-local "^2.0.0" - internal-ip "^4.3.0" - ip "^1.1.5" - is-absolute-url "^3.0.3" - killable "^1.0.1" - loglevel "^1.6.8" - opn "^5.5.0" - p-retry "^3.0.1" - portfinder "^1.0.26" - schema-utils "^1.0.0" - selfsigned "^1.10.8" - semver "^6.3.0" + graceful-fs "^4.2.6" + html-entities "^2.3.2" + http-proxy-middleware "^2.0.0" + internal-ip "^6.2.0" + ipaddr.js "^2.0.1" + open "^8.0.9" + p-retry "^4.5.0" + portfinder "^1.0.28" + schema-utils "^3.1.0" + selfsigned "^1.10.11" serve-index "^1.9.1" sockjs "^0.3.21" - sockjs-client "^1.5.0" spdy "^4.0.2" - strip-ansi "^3.0.1" - supports-color "^6.1.0" + strip-ansi "^7.0.0" url "^0.11.0" - webpack-dev-middleware "^3.7.2" - webpack-log "^2.0.0" - ws "^6.2.1" - yargs "^13.3.2" - -webpack-log@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/webpack-log/-/webpack-log-2.0.0.tgz#5b7928e0637593f119d32f6227c1e0ac31e1b47f" - integrity sha512-cX8G2vR/85UYG59FgkoMamwHUIkSSlV3bBMRsbxVXVUk2j6NleCKjQ/WE9eYg9WY4w25O9w8wKP4rzNZFmUcUg== - dependencies: - ansi-colors "^3.0.0" - uuid "^3.3.2" + webpack-dev-middleware "^5.2.1" + ws "^8.1.0" webpack-merge@^5.8.0: version "5.8.0" @@ -11582,18 +10319,15 @@ webpack-sources@^1.1.0, webpack-sources@^1.4.3: source-list-map "^2.0.0" source-map "~0.6.1" -webpack-sources@^2.3.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/webpack-sources/-/webpack-sources-2.3.0.tgz#9ed2de69b25143a4c18847586ad9eccb19278cfa" - integrity sha512-WyOdtwSvOML1kbgtXbTDnEW0jkJ7hZr/bDByIwszhWd/4XX1A3XMkrbFMsuH4+/MfLlZCUzlAdg4r7jaGKEIgQ== - dependencies: - source-list-map "^2.0.1" - source-map "^0.6.1" +webpack-sources@^3.2.2: + version "3.2.2" + resolved "https://registry.yarnpkg.com/webpack-sources/-/webpack-sources-3.2.2.tgz#d88e3741833efec57c4c789b6010db9977545260" + integrity sha512-cp5qdmHnu5T8wRg2G3vZZHoJPN14aqQ89SyQ11NpGH5zEMDCclt49rzo+MaRazk7/UeILhAI+/sEtcM+7Fr0nw== -webpack@^5.40.0: - version "5.44.0" - resolved "https://registry.yarnpkg.com/webpack/-/webpack-5.44.0.tgz#97b13a02bd79fb71ac6301ce697920660fa214a1" - integrity sha512-I1S1w4QLoKmH19pX6YhYN0NiSXaWY8Ou00oA+aMcr9IUGeF5azns+IKBkfoAAG9Bu5zOIzZt/mN35OffBya8AQ== +webpack@^5.61.0: + version "5.64.1" + resolved "https://registry.yarnpkg.com/webpack/-/webpack-5.64.1.tgz#fd59840c16f04fe315f2b2598a85026f12dfa1bb" + integrity sha512-b4FHmRgaaAjP+aVOVz41a9Qa5SmkUPQ+u8FntTQ1roPHahSComB6rXnLwc976VhUY4CqTaLu5mCswuHiNhOfVw== dependencies: "@types/eslint-scope" "^3.7.0" "@types/estree" "^0.0.50" @@ -11601,10 +10335,11 @@ webpack@^5.40.0: "@webassemblyjs/wasm-edit" "1.11.1" "@webassemblyjs/wasm-parser" "1.11.1" acorn "^8.4.1" + acorn-import-assertions "^1.7.6" browserslist "^4.14.5" chrome-trace-event "^1.0.2" - enhanced-resolve "^5.8.0" - es-module-lexer "^0.7.1" + enhanced-resolve "^5.8.3" + es-module-lexer "^0.9.0" eslint-scope "5.1.1" events "^3.2.0" glob-to-regexp "^0.4.1" @@ -11613,11 +10348,11 @@ webpack@^5.40.0: loader-runner "^4.2.0" mime-types "^2.1.27" neo-async "^2.6.2" - schema-utils "^3.0.0" + schema-utils "^3.1.0" tapable "^2.1.1" terser-webpack-plugin "^5.1.3" watchpack "^2.2.0" - webpack-sources "^2.3.0" + webpack-sources "^3.2.2" webpackbar@^5.0.0-3: version "5.0.0-3" @@ -11658,11 +10393,6 @@ which-boxed-primitive@^1.0.2: is-string "^1.0.5" is-symbol "^1.0.3" -which-module@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/which-module/-/which-module-2.0.0.tgz#d9ef07dce77b9902b8a3a8fa4b31c3e3f7e6e87a" - integrity sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho= - which@^1.2.9, which@^1.3.1: version "1.3.1" resolved "https://registry.yarnpkg.com/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a" @@ -11696,22 +10426,6 @@ wildcard@^2.0.0: resolved "https://registry.yarnpkg.com/wildcard/-/wildcard-2.0.0.tgz#a77d20e5200c6faaac979e4b3aadc7b3dd7f8fec" integrity sha512-JcKqAHLPxcdb9KM49dufGXn2x3ssnfjbcaQdLlfZsL9rH9wgDQjUtDxbo8NE0F6SFvydeu1VhZe7hZuHsB2/pw== -worker-rpc@^0.1.0: - version "0.1.1" - resolved "https://registry.yarnpkg.com/worker-rpc/-/worker-rpc-0.1.1.tgz#cb565bd6d7071a8f16660686051e969ad32f54d5" - integrity sha512-P1WjMrUB3qgJNI9jfmpZ/htmBEjFh//6l/5y8SD9hg1Ef5zTTVVoRjTrTEzPrNBQvmhMxkoTsjOXN10GWU7aCg== - dependencies: - microevent.ts "~0.1.1" - -wrap-ansi@^5.1.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-5.1.0.tgz#1fd1f67235d5b6d0fee781056001bfb694c03b09" - integrity sha512-QC1/iN/2/RPVJ5jYK8BGttj5z83LmSKmvbvrXPNCLZSEb32KKVDJDl/MOt2N01qU2H/FkzEa9PKto1BqDjtd7Q== - dependencies: - ansi-styles "^3.2.0" - string-width "^3.0.0" - strip-ansi "^5.0.0" - wrap-ansi@^7.0.0: version "7.0.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" @@ -11736,18 +10450,16 @@ write-file-atomic@^3.0.0, write-file-atomic@^3.0.3: signal-exit "^3.0.2" typedarray-to-buffer "^3.1.5" -ws@^6.2.1: - version "6.2.2" - resolved "https://registry.yarnpkg.com/ws/-/ws-6.2.2.tgz#dd5cdbd57a9979916097652d78f1cc5faea0c32e" - integrity sha512-zmhltoSR8u1cnDsD43TX59mzoMZsLKqUweyYBAIvTngR3shc0W6aOZylZmq/7hqyVxPdi+5Ud2QInblgyE72fw== - dependencies: - async-limiter "~1.0.0" - ws@^7.3.1: version "7.5.3" resolved "https://registry.yarnpkg.com/ws/-/ws-7.5.3.tgz#160835b63c7d97bfab418fc1b8a9fced2ac01a74" integrity sha512-kQ/dHIzuLrS6Je9+uv81ueZomEwH0qVYstcAQ4/Z93K8zeko9gtAbttJWzoC5ukqXY1PpoouV3+VSOqEAFt5wg== +ws@^8.1.0: + version "8.2.3" + resolved "https://registry.yarnpkg.com/ws/-/ws-8.2.3.tgz#63a56456db1b04367d0b721a0b80cae6d8becbba" + integrity sha512-wBuoj1BDpC6ZQ1B7DWQBYVLphPWkm8i9Y0/3YdHjHKHiohOJ1ws+3OccDWtH+PoC9DZD5WOTrJvNbWvjS6JWaA== + xdg-basedir@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/xdg-basedir/-/xdg-basedir-4.0.0.tgz#4bc8d9984403696225ef83a1573cbbcb4e79db13" @@ -11765,11 +10477,6 @@ xtend@^4.0.0, xtend@^4.0.1: resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54" integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ== -y18n@^4.0.0: - version "4.0.3" - resolved "https://registry.yarnpkg.com/y18n/-/y18n-4.0.3.tgz#b5f259c82cd6e336921efd7bfd8bf560de9eeedf" - integrity sha512-JKhqTOwSrqNA1NY5lSztJ1GrBiUodLMmIZuLiDaMRJ+itFd+ABVE8XBjOvIWL+rSqNDC74LCSFmlb/U4UZ4hJQ== - yallist@^2.1.2: version "2.1.2" resolved "https://registry.yarnpkg.com/yallist/-/yallist-2.1.2.tgz#1c11f9218f076089a47dd512f93c6699a6a81d52" @@ -11780,35 +10487,11 @@ yallist@^4.0.0: resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A== -yaml@^1.10.0, yaml@^1.10.2: +yaml@^1.10.0, yaml@^1.10.2, yaml@^1.7.2: version "1.10.2" resolved "https://registry.yarnpkg.com/yaml/-/yaml-1.10.2.tgz#2301c5ffbf12b467de8da2333a459e29e7920e4b" integrity sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg== -yargs-parser@^13.1.2: - version "13.1.2" - resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-13.1.2.tgz#130f09702ebaeef2650d54ce6e3e5706f7a4fb38" - integrity sha512-3lbsNRf/j+A4QuSZfDRA7HRSfWrzO0YjqTJd5kjAq37Zep1CEgaYmrH9Q3GwPiB9cHyd1Y1UwggGhJGoxipbzg== - dependencies: - camelcase "^5.0.0" - decamelize "^1.2.0" - -yargs@^13.3.2: - version "13.3.2" - resolved "https://registry.yarnpkg.com/yargs/-/yargs-13.3.2.tgz#ad7ffefec1aa59565ac915f82dccb38a9c31a2dd" - integrity sha512-AX3Zw5iPruN5ie6xGRIDgqkT+ZhnRlZMLMHAs8tg7nRruy2Nb+i5o9bwghAogtM08q1dpr2LVoS8KSTMYpWXUw== - dependencies: - cliui "^5.0.0" - find-up "^3.0.0" - get-caller-file "^2.0.1" - require-directory "^2.1.1" - require-main-filename "^2.0.0" - set-blocking "^2.0.0" - string-width "^3.0.0" - which-module "^2.0.0" - y18n "^4.0.0" - yargs-parser "^13.1.2" - yarn-audit-fix@^7.1.2: version "7.1.2" resolved "https://registry.yarnpkg.com/yarn-audit-fix/-/yarn-audit-fix-7.1.2.tgz#bb9bdec44a8e7e560416165088f8db07ed63ab7e" From 7bfd0d70bace0f3461879e74919c780548e8eee1 Mon Sep 17 00:00:00 2001 From: Mark Hamilton Date: Wed, 17 Nov 2021 02:02:44 -0500 Subject: [PATCH 24/40] docs: update and improve readme (#1262) --- README.md | 121 +++++++++++------------------------------------------- 1 file changed, 24 insertions(+), 97 deletions(-) diff --git a/README.md b/README.md index 4a8207187a..6f50ecbbe5 100644 --- a/README.md +++ b/README.md @@ -9,17 +9,21 @@ [![Version](https://img.shields.io/badge/version-0.9.4-blue)](https://github.com/Microsoft/SynapseML/releases) [![Snapshot Version](https://mmlspark.blob.core.windows.net/icons/badges/master_version3.svg)](#sbt) -SynapseML is an ecosystem of tools aimed towards expanding the distributed computing framework -[Apache Spark](https://github.com/apache/spark) in several new directions. +SynapseML (previously MMLSpark) is an open source library to simplify the creation of scalable machine learning pipelines. +SynapseML builds on [Apache Spark](https://github.com/apache/spark) and SparkML to enable new kinds of +machine learning, analytics, and model deployment workflows. SynapseML adds many deep learning and data science tools to the Spark ecosystem, -including seamless integration of Spark Machine Learning pipelines with [Microsoft Cognitive Toolkit -(CNTK)](https://github.com/Microsoft/CNTK), [LightGBM](https://github.com/Microsoft/LightGBM) and +including seamless integration of Spark Machine Learning pipelines with the [Open Neural Network Exchange +(ONNX)](https://onnx.ai), +[LightGBM](https://github.com/Microsoft/LightGBM), +[The Cognitive Services](https://azure.microsoft.com/en-us/services/cognitive-services/), +[Vowpal Wabbit](https://vowpalwabbit.org/), and [OpenCV](http://www.opencv.org/). These tools enable powerful and highly-scalable predictive and analytical models for a variety of datasources. SynapseML also brings new networking capabilities to the Spark Ecosystem. With the HTTP on Spark project, users -can embed **any** web service into their SparkML models. In this vein, SynapseML provides easy to use -SparkML transformers for a wide variety of [Microsoft Cognitive Services](https://azure.microsoft.com/en-us/services/cognitive-services/). For production grade deployment, the Spark Serving project enables high throughput, +can embed **any** web service into their SparkML models. +For production grade deployment, the Spark Serving project enables high throughput, sub-millisecond latency web services, backed by your Spark cluster. SynapseML requires Scala 2.12, Spark 3.0+, and Python 3.6+. @@ -30,8 +34,8 @@ PySpark](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/index.html).

Table of Contents -- [Notable features](#notable-features) -- [A short example](#a-short-example) +- [Features](#features) +- [Documentation](#documentation-and-examples) - [Setup and installation](#setup-and-installation) - [Docker](#docker) - [GPU VM Setup](#gpu-vm-setup) @@ -46,97 +50,26 @@ PySpark](https://mmlspark.blob.core.windows.net/docs/0.9.4/pyspark/index.html).
-## Projects +## Features | | | | | |:-------------------------:|:-------------------------:|:-------------------------:|:-------------------------:| -| **Vowpal Wabbit on Spark** | **The Cognitive Services on Spark**| **LightGBM on Spark** | **Spark Serving** | +| [**Vowpal Wabbit on Spark**](https://microsoft.github.io/SynapseML/docs/features/vw/about/) | [**The Cognitive Services for Big Data**](https://microsoft.github.io/SynapseML/docs/features/cognitive_services/CognitiveServices%20-%20Overview/)| [**LightGBM on Spark**](https://microsoft.github.io/SynapseML/docs/features/lightgbm/about/) | [**Spark Serving**](https://microsoft.github.io/SynapseML/docs/features/spark_serving/about/) | | Fast, Sparse, and Effective Text Analytics | Leverage the Microsoft Cognitive Services at Unprecedented Scales in your existing SparkML pipelines | Train Gradient Boosted Machines with LightGBM | Serve any Spark Computation as a Web Service with Sub-Millisecond Latency | -| | | | | +| | | | | |:-------------------------:|:-------------------------:|:-------------------------:|:-------------------------:| -| **HTTP on Spark** | **CNTK on Spark** | **Model Interpretation on Spark**| **Spark Binding Autogeneration** | -| An Integration Between Spark and the HTTP Protocol, enabling Distributed Microservice Orchestration|Distributed Deep Learning with the Microsoft Cognitive Toolkit | Distributed, Model Agnostic, Interpretations for Classifiers | Automatically Generate Spark bindings for PySpark and SparklyR| +| [**HTTP on Spark**](https://microsoft.github.io/SynapseML/docs/features/cognitive_services/CognitiveServices%20-%20Overview/#arbitrary-web-apis) | [**ONNX on Spark**](https://microsoft.github.io/SynapseML/docs/features/onnx/about/) | [**Responsible AI**](https://microsoft.github.io/SynapseML/docs/features/responsible_ai/Model%20Interpretation%20on%20Spark/)| [**Spark Binding Autogeneration**](https://microsoft.github.io/SynapseML/docs/reference/developer-readme/#packagepython) | +| An Integration Between Spark and the HTTP Protocol, enabling Distributed Microservice Orchestration| Distributed and Hardware Accelerated Model Inference on Spark | Understand Opaque-box Models and Measure Dataset Biases | Automatically Generate Spark bindings for PySpark and SparklyR| -| | | | -|:--:|:--:|:--:| -| **Isolation Forest on Spark** | [**CyberML**](https://github.com/Microsoft/SynapseML/blob/master/notebooks/CyberML%20-%20Anomalous%20Access%20Detection.ipynb) | **Conditional KNN** | +| | | | +|:---:|:---:|:---:| +| [**Isolation Forest on Spark**](https://microsoft.github.io/SynapseML/docs/documentation/estimators/estimators_core/#isolationforest) | [**CyberML**](https://github.com/Microsoft/SynapseML/blob/master/notebooks/CyberML%20-%20Anomalous%20Access%20Detection.ipynb) | [**Conditional KNN**](https://microsoft.github.io/SynapseML/docs/features/other/ConditionalKNN%20-%20Exploring%20Art%20Across%20Cultures/) | | Distributed Nonlinear Outlier Detection | Machine Learning Tools for Cyber Security | Scalable KNN Models with Conditional Queries | +## Documentation and Examples - -## Examples - -- Create a deep image classifier with transfer learning ([example 9]) -- Fit a LightGBM classification or regression model on a biochemical dataset - ([example 3]), to learn more check out the [LightGBM documentation - page](docs/lightgbm.md). -- Deploy a deep network as a distributed web service with [SynapseML - Serving](docs/mmlspark-serving.md) -- Use web services in Spark with [HTTP on Apache Spark](docs/http.md) -- Use Bi-directional LSTMs from Keras for medical entity extraction - ([example 8]) -- Create a text analytics system on Amazon book reviews ([example 4]) -- Perform distributed hyperparameter tuning to identify Breast Cancer - ([example 5]) -- Easily ingest images from HDFS into Spark `DataFrame` ([example 6]) -- Use OpenCV on Spark to manipulate images ([example 7]) -- Train classification and regression models easily via implicit featurization - of data ([example 1]) -- Train and evaluate a flight delay prediction system ([example 2]) -- Finding anomalous data access patterns using the Access Anomalies package of CyberML ([example 11]) -- Model interpretation ([example 12], [example 13], [example 14]) - -See our [notebooks](notebooks/) for all examples. - -[example 1]: notebooks/Classification%20-%20Adult%20Census.ipynb "Adult Census Income Training" - -[example 2]: notebooks/Regression%20-%20Flight%20Delays.ipynb "Regression Example with Flight Delay Dataset" - -[example 3]: notebooks/LightGBM%20-%20Overview.ipynb "Quantile Regression with LightGBM" - -[example 4]: notebooks/TextAnalytics%20-%20Amazon%20Book%20Reviews.ipynb "Amazon Book Reviews - TextFeaturizer" - -[example 5]: notebooks/HyperParameterTuning%20-%20Fighting%20Breast%20Cancer.ipynb "Hyperparameter Tuning with SynapseML" - -[example 6]: notebooks/DeepLearning%20-%20CIFAR10%20Convolutional%20Network.ipynb "CIFAR10 CNTK CNN Evaluation" - -[example 7]: notebooks/OpenCV%20-%20Pipeline%20Image%20Transformations.ipynb "Pipeline Image Transformations" - -[example 8]: notebooks/DeepLearning%20-%20BiLSTM%20Medical%20Entity%20Extraction.ipynb "Medical Entity Extraction" - -[example 9]: notebooks/DeepLearning%20-%20Flower%20Image%20Classification.ipynb "Deep Flower Classification" - -[example 10]: notebooks/gpu/DeepLearning%20-%20Distributed%20CNTK%20training.ipynb "CIFAR10 CNTK CNN Training" - -[example 11]: notebooks/CyberML%20-%20Anomalous%20Access%20Detection.ipynb "Access Anomalies documenation, training and evaluation example" - -[example 12]: notebooks/Interpretability%20-%20Tabular%20SHAP%20explainer.ipynb "Interpretability - Tabular SHAP Explainer" - -[example 13]: notebooks/Interpretability%20-%20Image%20Explainers.ipynb "Interpretability - Image Explainers" - -[example 14]: notebooks/Interpretability%20-%20Text%20Explainers.ipynb "Interpretability - Text Explainers" - -## A short example - -Below is an excerpt from a simple example of using a pre-trained CNN to -classify images in the CIFAR-10 dataset. View the whole source code in notebook [example 9]. - -```python -... -import synapse.ml -# Initialize CNTKModel and define input and output columns -cntkModel = synapse.ml.cntk.CNTKModel() \ - .setInputCol("images").setOutputCol("output") \ - .setModelLocation(modelFile) -# Train on dataset with internal spark pipeline -scoredImages = cntkModel.transform(imagesWithLabels) -... -``` - -See [other sample notebooks](notebooks/) as well as the SynapseML -documentation for [Scala](http://mmlspark.azureedge.net/docs/scala/) and -[PySpark](http://mmlspark.azureedge.net/docs/pyspark/). +For quickstarts, documentation, demos, and examples please see our [website](https://aka.ms/spark). ## Setup and installation @@ -277,7 +210,7 @@ and some necessary custom wrappers may be missing. - [Conditional Image Retrieval](https://arxiv.org/abs/2007.07177) -- [SynapseML: Unifying Machine Learning Ecosystems at Massive Scales](https://arxiv.org/abs/1810.08744) +- [MMLSpark: Unifying Machine Learning Ecosystems at Massive Scales](https://arxiv.org/abs/1810.08744) - [Flexible and Scalable Deep Learning with SynapseML](https://arxiv.org/abs/1804.04031) @@ -293,7 +226,7 @@ and some necessary custom wrappers may be missing. - Explore [our collaboration with Apache Spark] on image analysis. -[website]: https://mmlspark.blob.core.windows.net/website/index.html "aka.ms/spark" +[website]: https://microsoft.github.io/SynapseML/ "aka.ms/spark" [the Spark+AI Summit 2018]: https://databricks.com/sparkaisummit/north-america/spark-summit-2018-keynotes#Intelligent-cloud "Developing for the Intelligent Cloud and Intelligent Edge" @@ -301,16 +234,12 @@ and some necessary custom wrappers may be missing. [the Spark+AI European Summit 2018]: https://youtu.be/N3ozCZXeOeU?t=472 -[our paper]: https://arxiv.org/abs/1804.04031 "Flexible and Scalable Deep Learning with SynapseML" - [help endangered species]: https://www.microsoft.com/en-us/ai/ai-lab-stories?activetab=pivot1:primaryr3 "Identifying snow leopards with AI" [our collaboration with The MET and MIT]: https://www.microsoft.com/en-us/ai/ai-lab-stories?activetab=pivot1:primaryr4 "Generative art at the MET" [our collaboration with Apache Spark]: https://blogs.technet.microsoft.com/machinelearning/2018/03/05/image-data-support-in-apache-spark/ "Image Data Support in Apache Spark" -[SynapseML in Azure Machine Learning]: https://docs.microsoft.com/en-us/azure/machine-learning/preview/how-to-use-mmlspark "How to Use Microsoft Machine Learning Library for Apache Spark" - [SynapseML at the Spark Summit]: https://databricks.com/session/mmlspark-lessons-from-building-a-sparkml-compatible-machine-learning-library-for-apache-spark "MMLSpark: Lessons from Building a SparkML-Compatible Machine Learning Library for Apache Spark" ## Contributing & feedback @@ -343,8 +272,6 @@ Issue](https://help.github.com/articles/creating-an-issue/). - [Microsoft Cognitive Toolkit](https://github.com/Microsoft/CNTK) -- [Azure Machine Learning - preview features](https://docs.microsoft.com/en-us/azure/machine-learning/preview) _Apache®, Apache Spark, and Spark® are either registered trademarks or trademarks of the Apache Software Foundation in the United States and/or other From 16a94189826e19b9a9eb0f8c5f0362a21ac8ff19 Mon Sep 17 00:00:00 2001 From: Mark Hamilton Date: Wed, 17 Nov 2021 14:48:38 -0500 Subject: [PATCH 25/40] docs: add website announcement bar (#1263) --- website/docusaurus.config.js | 4 ++++ website/src/css/custom.css | 22 ++++++++++++++++++++++ 2 files changed, 26 insertions(+) diff --git a/website/docusaurus.config.js b/website/docusaurus.config.js index 9e75fd3b0d..8797aa319b 100644 --- a/website/docusaurus.config.js +++ b/website/docusaurus.config.js @@ -123,6 +123,10 @@ module.exports = { trackingID: 'G-RWPE0183E8', anonymizeIP: true, }, + announcementBar: { + id: 'announcementBar-1', // Increment on change + content: `⭐️ If you like SynapseML, consider giving it a star on
GitHub ⭐`, + }, }, presets: [ [ diff --git a/website/src/css/custom.css b/website/src/css/custom.css index 814516a9ba..ea32162ac5 100644 --- a/website/src/css/custom.css +++ b/website/src/css/custom.css @@ -5,6 +5,9 @@ */ :root { + + --site-primary-hue-saturation: 167, 68%; + --ifm-color-primary: #3578e5; --ifm-color-primary-dark: #1d68e1; --ifm-color-primary-darker: #1b62d4; @@ -173,3 +176,22 @@ html[data-theme='dark'] .DocSearch { .markdown > table td { min-width: 160px; } + +div[class^='announcementBar_'] { + --site-announcement-bar-stripe-color1: hsl( + var(--site-primary-hue-saturation), + 85% + ); + --site-announcement-bar-stripe-color2: hsl( + var(--site-primary-hue-saturation), + 95% + ); + background: repeating-linear-gradient( + 35deg, + var(--site-announcement-bar-stripe-color1), + var(--site-announcement-bar-stripe-color1) 20px, + var(--site-announcement-bar-stripe-color2) 10px, + var(--site-announcement-bar-stripe-color2) 40px + ); + font-weight: bold; +} From d8e64b3fcb624dbf7ae7f1a285e3f2619c09b45d Mon Sep 17 00:00:00 2001 From: Mark Hamilton Date: Wed, 17 Nov 2021 18:38:30 -0500 Subject: [PATCH 26/40] fix: remove some deps that cause sec issues (#1264) --- website/package.json | 3 +- website/yarn.lock | 1985 ++---------------------------------------- 2 files changed, 74 insertions(+), 1914 deletions(-) diff --git a/website/package.json b/website/package.json index d4a8b23916..02d77dfb4d 100644 --- a/website/package.json +++ b/website/package.json @@ -24,8 +24,7 @@ "react": "^16.8.4", "react-dom": "^16.8.4", "react-player": "^2.7.2", - "reading-time": "^1.2.0", - "yarn-audit-fix": "^7.1.2" + "reading-time": "^1.2.0" }, "resolutions": { "@docusaurus/core/**/set-value": "^4.0.1", diff --git a/website/yarn.lock b/website/yarn.lock index e3153856eb..c0f5eee58e 100644 --- a/website/yarn.lock +++ b/website/yarn.lock @@ -2028,11 +2028,6 @@ resolve-pathname "^3.0.0" tslib "^2.3.1" -"@gar/promisify@^1.0.1": - version "1.1.2" - resolved "https://registry.yarnpkg.com/@gar/promisify/-/promisify-1.1.2.tgz#30aa825f11d438671d585bd44e7fd564535fc210" - integrity sha512-82cpyJyKRoQoRi+14ibCeGPu0CwypgtBAdBhq1WfvagpCZNKqwXbKwXllYSMG91DhmG4jt9gN8eP6lGOtozuaw== - "@hapi/hoek@^9.0.0": version "9.2.0" resolved "https://registry.yarnpkg.com/@hapi/hoek/-/hoek-9.2.0.tgz#f3933a44e365864f4dad5db94158106d511e8131" @@ -2045,11 +2040,6 @@ dependencies: "@hapi/hoek" "^9.0.0" -"@isaacs/string-locale-compare@^1.0.1", "@isaacs/string-locale-compare@^1.1.0": - version "1.1.0" - resolved "https://registry.yarnpkg.com/@isaacs/string-locale-compare/-/string-locale-compare-1.1.0.tgz#291c227e93fd407a96ecd59879a35809120e432b" - integrity sha512-SQ7Kzhh9+D+ZW9MA0zkYv3VXhIDNx+LzM6EJ+/65I3QY+enU6Itte7E5XX7EWrqLW2FN4n06GWzBnPoC3th2aQ== - "@mdx-js/mdx@1.6.22", "@mdx-js/mdx@^1.6.21": version "1.6.22" resolved "https://registry.yarnpkg.com/@mdx-js/mdx/-/mdx-1.6.22.tgz#8a723157bf90e78f17dc0f27995398e6c731f1ba" @@ -2115,169 +2105,6 @@ "@nodelib/fs.scandir" "2.1.5" fastq "^1.6.0" -"@npmcli/arborist@^4.0.0", "@npmcli/arborist@^4.0.1": - version "4.0.1" - resolved "https://registry.yarnpkg.com/@npmcli/arborist/-/arborist-4.0.1.tgz#18230aca9d58acf920d61351ea42cda4d0364a91" - integrity sha512-EhHFbvwNbkVl2T0FYUyxt00pxLCuqMSloikOOpjGXGSHLZSkItQGxDM3ly4liKGEBuU1qJBRH3VlJJKCz0c6vQ== - dependencies: - "@isaacs/string-locale-compare" "^1.0.1" - "@npmcli/installed-package-contents" "^1.0.7" - "@npmcli/map-workspaces" "^2.0.0" - "@npmcli/metavuln-calculator" "^2.0.0" - "@npmcli/move-file" "^1.1.0" - "@npmcli/name-from-folder" "^1.0.1" - "@npmcli/node-gyp" "^1.0.1" - "@npmcli/package-json" "^1.0.1" - "@npmcli/run-script" "^2.0.0" - bin-links "^2.3.0" - cacache "^15.0.3" - common-ancestor-path "^1.0.1" - json-parse-even-better-errors "^2.3.1" - json-stringify-nice "^1.1.4" - mkdirp "^1.0.4" - mkdirp-infer-owner "^2.0.0" - npm-install-checks "^4.0.0" - npm-package-arg "^8.1.5" - npm-pick-manifest "^6.1.0" - npm-registry-fetch "^11.0.0" - pacote "^12.0.0" - parse-conflict-json "^1.1.1" - proc-log "^1.0.0" - promise-all-reject-late "^1.0.0" - promise-call-limit "^1.0.1" - read-package-json-fast "^2.0.2" - readdir-scoped-modules "^1.1.0" - rimraf "^3.0.2" - semver "^7.3.5" - ssri "^8.0.1" - treeverse "^1.0.4" - walk-up-path "^1.0.0" - -"@npmcli/ci-detect@^1.3.0", "@npmcli/ci-detect@^1.4.0": - version "1.4.0" - resolved "https://registry.yarnpkg.com/@npmcli/ci-detect/-/ci-detect-1.4.0.tgz#18478bbaa900c37bfbd8a2006a6262c62e8b0fe1" - integrity sha512-3BGrt6FLjqM6br5AhWRKTr3u5GIVkjRYeAFrMp3HjnfICrg4xOrVRwFavKT6tsp++bq5dluL5t8ME/Nha/6c1Q== - -"@npmcli/config@^2.3.0": - version "2.3.0" - resolved "https://registry.yarnpkg.com/@npmcli/config/-/config-2.3.0.tgz#364fbe942037e562a832a113206e14ccb651f7bc" - integrity sha512-yjiC1xv7KTmUTqfRwN2ZL7BHV160ctGF0fLXmKkkMXj40UOvBe45Apwvt5JsFRtXSoHkUYy1ouzscziuWNzklg== - dependencies: - ini "^2.0.0" - mkdirp-infer-owner "^2.0.0" - nopt "^5.0.0" - semver "^7.3.4" - walk-up-path "^1.0.0" - -"@npmcli/disparity-colors@^1.0.1": - version "1.0.1" - resolved "https://registry.yarnpkg.com/@npmcli/disparity-colors/-/disparity-colors-1.0.1.tgz#b23c864c9658f9f0318d5aa6d17986619989535c" - integrity sha512-kQ1aCTTU45mPXN+pdAaRxlxr3OunkyztjbbxDY/aIcPS5CnCUrx+1+NvA6pTcYR7wmLZe37+Mi5v3nfbwPxq3A== - dependencies: - ansi-styles "^4.3.0" - -"@npmcli/fs@^1.0.0": - version "1.0.0" - resolved "https://registry.yarnpkg.com/@npmcli/fs/-/fs-1.0.0.tgz#589612cfad3a6ea0feafcb901d29c63fd52db09f" - integrity sha512-8ltnOpRR/oJbOp8vaGUnipOi3bqkcW+sLHFlyXIr08OGHmVJLB1Hn7QtGXbYcpVtH1gAYZTlmDXtE4YV0+AMMQ== - dependencies: - "@gar/promisify" "^1.0.1" - semver "^7.3.5" - -"@npmcli/git@^2.0.7", "@npmcli/git@^2.1.0": - version "2.1.0" - resolved "https://registry.yarnpkg.com/@npmcli/git/-/git-2.1.0.tgz#2fbd77e147530247d37f325930d457b3ebe894f6" - integrity sha512-/hBFX/QG1b+N7PZBFs0bi+evgRZcK9nWBxQKZkGoXUT5hJSwl5c4d7y8/hm+NQZRPhQ67RzFaj5UM9YeyKoryw== - dependencies: - "@npmcli/promise-spawn" "^1.3.2" - lru-cache "^6.0.0" - mkdirp "^1.0.4" - npm-pick-manifest "^6.1.1" - promise-inflight "^1.0.1" - promise-retry "^2.0.1" - semver "^7.3.5" - which "^2.0.2" - -"@npmcli/installed-package-contents@^1.0.6", "@npmcli/installed-package-contents@^1.0.7": - version "1.0.7" - resolved "https://registry.yarnpkg.com/@npmcli/installed-package-contents/-/installed-package-contents-1.0.7.tgz#ab7408c6147911b970a8abe261ce512232a3f4fa" - integrity sha512-9rufe0wnJusCQoLpV9ZPKIVP55itrM5BxOXs10DmdbRfgWtHy1LDyskbwRnBghuB0PrF7pNPOqREVtpz4HqzKw== - dependencies: - npm-bundled "^1.1.1" - npm-normalize-package-bin "^1.0.1" - -"@npmcli/map-workspaces@^2.0.0": - version "2.0.0" - resolved "https://registry.yarnpkg.com/@npmcli/map-workspaces/-/map-workspaces-2.0.0.tgz#e342efbbdd0dad1bba5d7723b674ca668bf8ac5a" - integrity sha512-QBJfpCY1NOAkkW3lFfru9VTdqvMB2TN0/vrevl5xBCv5Fi0XDVcA6rqqSau4Ysi4Iw3fBzyXV7hzyTBDfadf7g== - dependencies: - "@npmcli/name-from-folder" "^1.0.1" - glob "^7.1.6" - minimatch "^3.0.4" - read-package-json-fast "^2.0.1" - -"@npmcli/metavuln-calculator@^2.0.0": - version "2.0.0" - resolved "https://registry.yarnpkg.com/@npmcli/metavuln-calculator/-/metavuln-calculator-2.0.0.tgz#70937b8b5a5cad5c588c8a7b38c4a8bd6f62c84c" - integrity sha512-VVW+JhWCKRwCTE+0xvD6p3uV4WpqocNYYtzyvenqL/u1Q3Xx6fGTJ+6UoIoii07fbuEO9U3IIyuGY0CYHDv1sg== - dependencies: - cacache "^15.0.5" - json-parse-even-better-errors "^2.3.1" - pacote "^12.0.0" - semver "^7.3.2" - -"@npmcli/move-file@^1.0.1", "@npmcli/move-file@^1.1.0": - version "1.1.2" - resolved "https://registry.yarnpkg.com/@npmcli/move-file/-/move-file-1.1.2.tgz#1a82c3e372f7cae9253eb66d72543d6b8685c674" - integrity sha512-1SUf/Cg2GzGDyaf15aR9St9TWlb+XvbZXWpDx8YKs7MLzMH/BCeopv+y9vzrzgkfykCGuWOlSu3mZhj2+FQcrg== - dependencies: - mkdirp "^1.0.4" - rimraf "^3.0.2" - -"@npmcli/name-from-folder@^1.0.1": - version "1.0.1" - resolved "https://registry.yarnpkg.com/@npmcli/name-from-folder/-/name-from-folder-1.0.1.tgz#77ecd0a4fcb772ba6fe927e2e2e155fbec2e6b1a" - integrity sha512-qq3oEfcLFwNfEYOQ8HLimRGKlD8WSeGEdtUa7hmzpR8Sa7haL1KVQrvgO6wqMjhWFFVjgtrh1gIxDz+P8sjUaA== - -"@npmcli/node-gyp@^1.0.1", "@npmcli/node-gyp@^1.0.2": - version "1.0.3" - resolved "https://registry.yarnpkg.com/@npmcli/node-gyp/-/node-gyp-1.0.3.tgz#a912e637418ffc5f2db375e93b85837691a43a33" - integrity sha512-fnkhw+fmX65kiLqk6E3BFLXNC26rUhK90zVwe2yncPliVT/Qos3xjhTLE59Df8KnPlcwIERXKVlU1bXoUQ+liA== - -"@npmcli/package-json@^1.0.1": - version "1.0.1" - resolved "https://registry.yarnpkg.com/@npmcli/package-json/-/package-json-1.0.1.tgz#1ed42f00febe5293c3502fd0ef785647355f6e89" - integrity sha512-y6jnu76E9C23osz8gEMBayZmaZ69vFOIk8vR1FJL/wbEJ54+9aVG9rLTjQKSXfgYZEr50nw1txBBFfBZZe+bYg== - dependencies: - json-parse-even-better-errors "^2.3.1" - -"@npmcli/promise-spawn@^1.2.0", "@npmcli/promise-spawn@^1.3.2": - version "1.3.2" - resolved "https://registry.yarnpkg.com/@npmcli/promise-spawn/-/promise-spawn-1.3.2.tgz#42d4e56a8e9274fba180dabc0aea6e38f29274f5" - integrity sha512-QyAGYo/Fbj4MXeGdJcFzZ+FkDkomfRBrPM+9QYJSg+PxgAUL+LU3FneQk37rKR2/zjqkCV1BLHccX98wRXG3Sg== - dependencies: - infer-owner "^1.0.4" - -"@npmcli/run-script@^1.8.2": - version "1.8.6" - resolved "https://registry.yarnpkg.com/@npmcli/run-script/-/run-script-1.8.6.tgz#18314802a6660b0d4baa4c3afe7f1ad39d8c28b7" - integrity sha512-e42bVZnC6VluBZBAFEr3YrdqSspG3bgilyg4nSLBJ7TRGNCzxHa92XAHxQBLYg0BmgwO4b2mf3h/l5EkEWRn3g== - dependencies: - "@npmcli/node-gyp" "^1.0.2" - "@npmcli/promise-spawn" "^1.3.2" - node-gyp "^7.1.0" - read-package-json-fast "^2.0.1" - -"@npmcli/run-script@^2.0.0": - version "2.0.0" - resolved "https://registry.yarnpkg.com/@npmcli/run-script/-/run-script-2.0.0.tgz#9949c0cab415b17aaac279646db4f027d6f1e743" - integrity sha512-fSan/Pu11xS/TdaTpTB0MRn9guwGU8dye+x56mEVgBEd/QsybBbYcAL0phPXi8SGWFEChkQd6M9qL4y6VOpFig== - dependencies: - "@npmcli/node-gyp" "^1.0.2" - "@npmcli/promise-spawn" "^1.3.2" - node-gyp "^8.2.0" - read-package-json-fast "^2.0.1" - "@polka/url@^1.0.0-next.15": version "1.0.0-next.15" resolved "https://registry.yarnpkg.com/@polka/url/-/url-1.0.0-next.15.tgz#6a9d143f7f4f49db2d782f9e1c8839a29b43ae23" @@ -2426,11 +2253,6 @@ dependencies: defer-to-connect "^1.0.1" -"@tootallnate/once@1": - version "1.1.2" - resolved "https://registry.yarnpkg.com/@tootallnate/once/-/once-1.1.2.tgz#ccb91445360179a04e7fe6aff78c00ffc1eeaf82" - integrity sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw== - "@trysound/sax@0.1.1": version "0.1.1" resolved "https://registry.yarnpkg.com/@trysound/sax/-/sax-0.1.1.tgz#3348564048e7a2d7398c935d466c0414ebb6a669" @@ -2457,18 +2279,6 @@ resolved "https://registry.yarnpkg.com/@types/estree/-/estree-0.0.50.tgz#1e0caa9364d3fccd2931c3ed96fdbeaa5d4cca83" integrity sha512-C6N5s2ZFtuZRj54k2/zyRhNDjJwwcViAM3Nbm8zjBpbqAdZ00mr0CFxvSKeO8Y/e03WVFLpQMdHYVfUd6SB+Hw== -"@types/find-cache-dir@^3.2.1": - version "3.2.1" - resolved "https://registry.yarnpkg.com/@types/find-cache-dir/-/find-cache-dir-3.2.1.tgz#7b959a4b9643a1e6a1a5fe49032693cc36773501" - integrity sha512-frsJrz2t/CeGifcu/6uRo4b+SzAwT4NYCVPu1GN8IB9XTzrpPkGuV0tmh9mN+/L0PklAlsC3u5Fxt0ju00LXIw== - -"@types/fs-extra@^9.0.13": - version "9.0.13" - resolved "https://registry.yarnpkg.com/@types/fs-extra/-/fs-extra-9.0.13.tgz#7594fbae04fe7f1918ce8b3d213f74ff44ac1f45" - integrity sha512-nEnwB++1u5lVDM2UI4c1+5R+FYaKfaAzS4OococimjVm3nQw3TuzH5UNsocrcTBbhnerblyHj4A49qXbIiZdpA== - dependencies: - "@types/node" "*" - "@types/github-slugger@^1.3.0": version "1.3.0" resolved "https://registry.yarnpkg.com/@types/github-slugger/-/github-slugger-1.3.0.tgz#16ab393b30d8ae2a111ac748a015ac05a1fc5524" @@ -2503,18 +2313,6 @@ resolved "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.9.tgz#97edc9037ea0c38585320b28964dde3b39e4660d" integrity sha512-qcUXuemtEu+E5wZSJHNxUXeCZhAfXKQ41D+duX+VYPde7xyEVZci+/oXKJL13tnRs9lR2pr4fod59GT6/X1/yQ== -"@types/lodash-es@^4.17.5": - version "4.17.5" - resolved "https://registry.yarnpkg.com/@types/lodash-es/-/lodash-es-4.17.5.tgz#1c3fdd16849d84aea43890b1c60da379fb501353" - integrity sha512-SHBoI8/0aoMQWAgUHMQ599VM6ZiSKg8sh/0cFqqlQQMyY9uEplc0ULU5yQNzcvdR4ZKa0ey8+vFmahuRbOCT1A== - dependencies: - "@types/lodash" "*" - -"@types/lodash@*": - version "4.14.175" - resolved "https://registry.yarnpkg.com/@types/lodash/-/lodash-4.14.175.tgz#b78dfa959192b01fae0ad90e166478769b215f45" - integrity sha512-XmdEOrKQ8a1Y/yxQFOMbC47G/V2VDO1GvMRnl4O75M4GW/abC5tnfzadQYkqEveqRM1dEJGFFegfPNA2vvx2iw== - "@types/mdast@^3.0.0": version "3.0.4" resolved "https://registry.yarnpkg.com/@types/mdast/-/mdast-3.0.4.tgz#8ee6b5200751b6cadb9a043ca39612693ad6cb9e" @@ -2578,21 +2376,11 @@ resolved "https://registry.yarnpkg.com/@types/scheduler/-/scheduler-0.16.2.tgz#1a62f89525723dde24ba1b01b092bf5df8ad4d39" integrity sha512-hppQEBDmlwhFAXKJX2KnWLYu5yMfi91yazPb2l+lbJiwW+wdo1gNeRA+3RgNSO39WYX2euey41KEwnqesU2Jew== -"@types/semver@^7.3.8": - version "7.3.8" - resolved "https://registry.yarnpkg.com/@types/semver/-/semver-7.3.8.tgz#508a27995498d7586dcecd77c25e289bfaf90c59" - integrity sha512-D/2EJvAlCEtYFEYmmlGwbGXuK886HzyCc3nZX/tkFTQdEU8jZDAgiv08P162yB17y4ZXZoq7yFAnW4GDBb9Now== - "@types/unist@*", "@types/unist@^2.0.0", "@types/unist@^2.0.2", "@types/unist@^2.0.3": version "2.0.5" resolved "https://registry.yarnpkg.com/@types/unist/-/unist-2.0.5.tgz#fdd299f23205c3455af88ce618dd65c14cb73e22" integrity sha512-wnra4Vw9dopnuybR6HBywJ/URYpYrKLoepBTEtgfJup8Ahoi2zJECPP2cwiXp7btTvOT2CULv87aQRA4eZSP6g== -"@types/yarnpkg__lockfile@^1.1.5": - version "1.1.5" - resolved "https://registry.yarnpkg.com/@types/yarnpkg__lockfile/-/yarnpkg__lockfile-1.1.5.tgz#9639020e1fb65120a2f4387db8f1e8b63efdf229" - integrity sha512-8NYnGOctzsI4W0ApsP/BIHD/LnxpJ6XaGf2AZmz4EyDYJMxtprN4279dLNI1CPZcwC9H18qYcaFv4bXi0wmokg== - "@webassemblyjs/ast@1.11.1": version "1.11.1" resolved "https://registry.yarnpkg.com/@webassemblyjs/ast/-/ast-1.11.1.tgz#2bfd767eae1a6996f432ff7e8d7fc75679c0b6a7" @@ -2724,16 +2512,6 @@ resolved "https://registry.yarnpkg.com/@xtuc/long/-/long-4.2.2.tgz#d291c6a4e97989b5c61d9acf396ae4fe133a718d" integrity sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ== -"@yarnpkg/lockfile@^1.1.0": - version "1.1.0" - resolved "https://registry.yarnpkg.com/@yarnpkg/lockfile/-/lockfile-1.1.0.tgz#e77a97fbd345b76d83245edcd17d393b1b41fb31" - integrity sha512-GpSwvyXOcOOlV70vbnzjj4fW5xW/FdUF6nQEt1ENy7m4ZCczi1+/buVUPAqmGfqznsORNFzUMjctTIp8a9tuCQ== - -abbrev@1, abbrev@~1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.1.1.tgz#f8f2c887ad10bf67f634f005b6987fed3179aac8" - integrity sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q== - accepts@~1.3.4, accepts@~1.3.5, accepts@~1.3.7: version "1.3.7" resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.7.tgz#531bc726517a3b2b41f850021c6cc15eaab507cd" @@ -2777,22 +2555,6 @@ address@^1.0.1, address@^1.1.2: resolved "https://registry.yarnpkg.com/address/-/address-1.1.2.tgz#bf1116c9c758c51b7a933d296b72c221ed9428b6" integrity sha512-aT6camzM4xEA54YVJYSqxz1kv4IHnQZRtThJJHhUMRExaU5spC7jX5ugSwTaTgJliIgs4VhZOk7htClvQ/LmRA== -agent-base@6, agent-base@^6.0.2: - version "6.0.2" - resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-6.0.2.tgz#49fff58577cfee3f37176feab4c22e00f86d7f77" - integrity sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ== - dependencies: - debug "4" - -agentkeepalive@^4.1.3: - version "4.1.4" - resolved "https://registry.yarnpkg.com/agentkeepalive/-/agentkeepalive-4.1.4.tgz#d928028a4862cb11718e55227872e842a44c945b" - integrity sha512-+V/rGa3EuU74H6wR04plBb7Ks10FbtUQgRj/FQOG7uUIEuaINI+AiqJR1k6t3SVNs7o7ZjIdus6706qqzVq8jQ== - dependencies: - debug "^4.1.0" - depd "^1.1.2" - humanize-ms "^1.2.1" - aggregate-error@^3.0.0: version "3.1.0" resolved "https://registry.yarnpkg.com/aggregate-error/-/aggregate-error-3.1.0.tgz#92670ff50f5359bdb7a3e0d40d0ec30c5737687a" @@ -2806,7 +2568,7 @@ ajv-keywords@^3.4.1, ajv-keywords@^3.5.2: resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-3.5.2.tgz#31f29da5ab6e00d1c2d329acf7b5929614d5014d" integrity sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ== -ajv@^6.12.2, ajv@^6.12.3, ajv@^6.12.4, ajv@^6.12.5: +ajv@^6.12.2, ajv@^6.12.4, ajv@^6.12.5: version "6.12.6" resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4" integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g== @@ -2887,7 +2649,7 @@ ansi-html-community@^0.0.8: resolved "https://registry.yarnpkg.com/ansi-html-community/-/ansi-html-community-0.0.8.tgz#69fbc4d6ccbe383f9736934ae34c3f8290f1bf41" integrity "sha1-afvE1sy+OD+XNpNK40w/gpDxv0E= sha512-1APHAyr3+PCamwNw3bXCPp4HFLONZt/yIH0sZp0/469KWNTEy+qN5jQ3GVX6DMZ1UXAi34yVwtTeaG/HpBuuzw==" -ansi-regex@^2.0.0, ansi-regex@^3.0.0, ansi-regex@^4.1.0, ansi-regex@^5.0.0, ansi-regex@^5.0.1, ansi-regex@^6.0.1: +ansi-regex@^4.1.0, ansi-regex@^5.0.0, ansi-regex@^5.0.1, ansi-regex@^6.0.1: version "5.0.1" resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304" integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ== @@ -2899,23 +2661,13 @@ ansi-styles@^3.2.1: dependencies: color-convert "^1.9.0" -ansi-styles@^4.0.0, ansi-styles@^4.1.0, ansi-styles@^4.3.0: +ansi-styles@^4.0.0, ansi-styles@^4.1.0: version "4.3.0" resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937" integrity sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg== dependencies: color-convert "^2.0.1" -ansicolors@~0.3.2: - version "0.3.2" - resolved "https://registry.yarnpkg.com/ansicolors/-/ansicolors-0.3.2.tgz#665597de86a9ffe3aa9bfbe6cae5c6ea426b4979" - integrity sha1-ZlWX3oap/+Oqm/vmyuXG6kJrSXk= - -ansistyles@~0.1.3: - version "0.1.3" - resolved "https://registry.yarnpkg.com/ansistyles/-/ansistyles-0.1.3.tgz#5de60415bda071bb37127854c864f41b23254539" - integrity sha1-XeYEFb2gcbs3EnhUyGT0GyMlRTk= - anymatch@~3.1.2: version "3.1.2" resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.2.tgz#c0557c096af32f106198f4f4e2a383537e378716" @@ -2924,37 +2676,6 @@ anymatch@~3.1.2: normalize-path "^3.0.0" picomatch "^2.0.4" -aproba@^1.0.3: - version "1.2.0" - resolved "https://registry.yarnpkg.com/aproba/-/aproba-1.2.0.tgz#6802e6264efd18c790a1b0d517f0f2627bf2c94a" - integrity sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw== - -"aproba@^1.0.3 || ^2.0.0", aproba@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/aproba/-/aproba-2.0.0.tgz#52520b8ae5b569215b354efc0caa3fe1e45a8adc" - integrity sha512-lYe4Gx7QT+MKGbDsA+Z+he/Wtef0BiwDOlK/XkBrdfsh9J/jPPXbX0tE9x9cl27Tmu5gg3QUbUrQYa/y+KOHPQ== - -archy@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/archy/-/archy-1.0.0.tgz#f9c8c13757cc1dd7bc379ac77b2c62a5c2868c40" - integrity sha1-+cjBN1fMHde8N5rHeyxipcKGjEA= - -are-we-there-yet@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/are-we-there-yet/-/are-we-there-yet-2.0.0.tgz#372e0e7bd279d8e94c653aaa1f67200884bf3e1c" - integrity sha512-Ci/qENmwHnsYo9xKIcUJN5LeDKdJ6R1Z1j9V/J5wyq8nh/mYPEpIKJbBZXtZjG04HiK7zV/p6Vs9952MrMeUIw== - dependencies: - delegates "^1.0.0" - readable-stream "^3.6.0" - -are-we-there-yet@~1.1.2: - version "1.1.7" - resolved "https://registry.yarnpkg.com/are-we-there-yet/-/are-we-there-yet-1.1.7.tgz#b15474a932adab4ff8a50d9adfa7e4e926f21146" - integrity sha512-nxwy40TuMiUGqMyRHgCSWZ9FM4VAoRP4xUYSTv5ImRog+h9yISPbVH7H8fASCIzYn9wlEv4zvFL7uKDMCFQm3g== - dependencies: - delegates "^1.0.0" - readable-stream "^2.0.6" - arg@^5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/arg/-/arg-5.0.0.tgz#a20e2bb5710e82950a516b3f933fee5ed478be90" @@ -2972,11 +2693,6 @@ argparse@^2.0.1: resolved "https://registry.yarnpkg.com/argparse/-/argparse-2.0.1.tgz#246f50f3ca78a3240f6c997e8a9bd1eac49e4b38" integrity sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q== -arr-union@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/arr-union/-/arr-union-3.1.0.tgz#e39b09aea9def866a8f206e288af63919bae39c4" - integrity sha1-45sJrqne+Gao8gbiiK9jkZuuOcQ= - array-flatten@1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-1.1.1.tgz#9a5f699051b1e7073328f2a008968b64ea2955d2" @@ -2992,28 +2708,11 @@ array-union@^2.1.0: resolved "https://registry.yarnpkg.com/array-union/-/array-union-2.1.0.tgz#b798420adbeb1de828d84acd8a2e23d3efe85e8d" integrity sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw== -array-union@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/array-union/-/array-union-3.0.1.tgz#da52630d327f8b88cfbfb57728e2af5cd9b6b975" - integrity sha512-1OvF9IbWwaeiM9VhzYXVQacMibxpXOMYVNIvMtKRyX9SImBXpKcFr8XvFDeEslCyuH/t6KRt7HEO94AlP8Iatw== - -asap@^2.0.0, asap@~2.0.3: +asap@~2.0.3: version "2.0.6" resolved "https://registry.yarnpkg.com/asap/-/asap-2.0.6.tgz#e50347611d7e690943208bbdafebcbc2fb866d46" integrity sha1-5QNHYR1+aQlDIIu9r+vLwvuGbUY= -asn1@~0.2.3: - version "0.2.4" - resolved "https://registry.yarnpkg.com/asn1/-/asn1-0.2.4.tgz#8d2475dfab553bb33e77b54e59e880bb8ce23136" - integrity sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg== - dependencies: - safer-buffer "~2.1.0" - -assert-plus@1.0.0, assert-plus@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/assert-plus/-/assert-plus-1.0.0.tgz#f12e0f3c5d77b0b1cdd9146942e4e96c1e4dd525" - integrity sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU= - async@^2.6.2: version "2.6.3" resolved "https://registry.yarnpkg.com/async/-/async-2.6.3.tgz#d72625e2344a3656e3a3ad4fa749fa83299d82ff" @@ -3021,11 +2720,6 @@ async@^2.6.2: dependencies: lodash "^4.17.14" -asynckit@^0.4.0: - version "0.4.0" - resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" - integrity sha1-x57Zf380y48robyXkLzDZkdLS3k= - at-least-node@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/at-least-node/-/at-least-node-1.0.0.tgz#602cd4b46e844ad4effc92a8011a3c46e0238dc2" @@ -3055,16 +2749,6 @@ autoprefixer@^10.3.5: picocolors "^0.2.1" postcss-value-parser "^4.1.0" -aws-sign2@~0.7.0: - version "0.7.0" - resolved "https://registry.yarnpkg.com/aws-sign2/-/aws-sign2-0.7.0.tgz#b46e890934a9591f2d2f6f86d7e6a9f1b3fe76a8" - integrity sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg= - -aws4@^1.8.0: - version "1.11.0" - resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.11.0.tgz#d61f46d83b2519250e2784daf5b09479a8b41c59" - integrity sha512-xh1Rl34h6Fi1DC2WWKfxUTVqRsNnr6LsKz2+hfwDxQJWmrx8+c7ylaqBMcHfl1U1r2dsifOvKX3LQuLNZ+XSvA== - axios@^0.21.1: version "0.21.4" resolved "https://registry.yarnpkg.com/axios/-/axios-0.21.4.tgz#c67b90dc0568e5c1cf2b0b858c43ba28e2eda575" @@ -3158,57 +2842,17 @@ base16@^1.0.0: resolved "https://registry.yarnpkg.com/base16/-/base16-1.0.0.tgz#e297f60d7ec1014a7a971a39ebc8a98c0b681e70" integrity sha1-4pf2DX7BAUp6lxo568ipjAtoHnA= -bash-glob@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/bash-glob/-/bash-glob-2.0.0.tgz#a8ef19450783403ed93fccca2dbe09f2cf6320dc" - integrity sha512-53/NJ+t2UAkEYgQPO6aFjbx1Ue8vNNXCYaA4EljNKP1SR8A9dSQQoBmYWR8BLXO0/NDRJEMSJ4BxWihi//m3Kw== - dependencies: - bash-path "^1.0.1" - component-emitter "^1.2.1" - cross-spawn "^5.1.0" - each-parallel-async "^1.0.0" - extend-shallow "^2.0.1" - is-extglob "^2.1.1" - is-glob "^4.0.0" - -bash-path@^1.0.1: - version "1.0.3" - resolved "https://registry.yarnpkg.com/bash-path/-/bash-path-1.0.3.tgz#dbc9efbdf18b1c11413dcb59b960e6aa56c84258" - integrity sha512-mGrYvOa6yTY/qNCiZkPFJqWmODK68y6kmVRAJ1NNbWlNoJrUrsFxu7FU2EKg7gbrer6ttrKkF2s/E/lhRy7/OA== - dependencies: - arr-union "^3.1.0" - is-windows "^1.0.1" - batch@0.6.1: version "0.6.1" resolved "https://registry.yarnpkg.com/batch/-/batch-0.6.1.tgz#dc34314f4e679318093fc760272525f94bf25c16" integrity sha1-3DQxT05nkxgJP8dgJyUl+UvyXBY= -bcrypt-pbkdf@^1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz#a4301d389b6a43f9b67ff3ca11a3f6637e360e9e" - integrity sha1-pDAdOJtqQ/m2f/PKEaP2Y342Dp4= - dependencies: - tweetnacl "^0.14.3" - big.js@^5.2.2: version "5.2.2" resolved "https://registry.yarnpkg.com/big.js/-/big.js-5.2.2.tgz#65f0af382f578bcdc742bd9c281e9cb2d7768328" integrity sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ== -bin-links@^2.3.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/bin-links/-/bin-links-2.3.0.tgz#1ff241c86d2c29b24ae52f49544db5d78a4eb967" - integrity sha512-JzrOLHLwX2zMqKdyYZjkDgQGT+kHDkIhv2/IK2lJ00qLxV4TmFoHi8drDBb6H5Zrz1YfgHkai4e2MGPqnoUhqA== - dependencies: - cmd-shim "^4.0.1" - mkdirp-infer-owner "^2.0.0" - npm-normalize-package-bin "^1.0.0" - read-cmd-shim "^2.0.0" - rimraf "^3.0.0" - write-file-atomic "^3.0.3" - -binary-extensions@^2.0.0, binary-extensions@^2.2.0: +binary-extensions@^2.0.0: version "2.2.0" resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.2.0.tgz#75f502eeaf9ffde42fc98829645be4ea76bd9e2d" integrity sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA== @@ -3314,11 +2958,6 @@ buffer-indexof@^1.0.0: resolved "https://registry.yarnpkg.com/buffer-indexof/-/buffer-indexof-1.1.1.tgz#52fabcc6a606d1a00302802648ef68f639da268c" integrity sha512-4/rOEg86jivtPTeOUUT61jJO1Ya1TrR/OkqCSZDyq84WJh3LuuiphBYJN+fm5xufIk4XAFcEwte/8WzC8If/1g== -builtins@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/builtins/-/builtins-1.0.3.tgz#cb94faeb61c8696451db36534e1422f94f0aee88" - integrity sha1-y5T662HIaWRR2zZTThQi+U8K7og= - bytes@3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.0.0.tgz#d32815404d689699f85a4ea4fa8755dd13a96048" @@ -3329,30 +2968,6 @@ bytes@3.1.0: resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.1.0.tgz#f6cf7933a360e0588fa9fde85651cdc7f805d1f6" integrity sha512-zauLjrfCG+xvoyaqLoV8bLVXXNGC4JqlxFCutSDWA6fJrTo2ZuvLYTqZ7aHBLZSMOopbzwv8f+wZcVzfVTI2Dg== -cacache@^15.0.3, cacache@^15.0.5, cacache@^15.2.0, cacache@^15.3.0: - version "15.3.0" - resolved "https://registry.yarnpkg.com/cacache/-/cacache-15.3.0.tgz#dc85380fb2f556fe3dda4c719bfa0ec875a7f1eb" - integrity sha512-VVdYzXEn+cnbXpFgWs5hTT7OScegHVmLhJIR8Ufqk3iFD6A6j5iSX1KuBTfNEv4tdJWE2PzA6IVFtcLC7fN9wQ== - dependencies: - "@npmcli/fs" "^1.0.0" - "@npmcli/move-file" "^1.0.1" - chownr "^2.0.0" - fs-minipass "^2.0.0" - glob "^7.1.4" - infer-owner "^1.0.4" - lru-cache "^6.0.0" - minipass "^3.1.1" - minipass-collect "^1.0.2" - minipass-flush "^1.0.5" - minipass-pipeline "^1.2.2" - mkdirp "^1.0.3" - p-map "^4.0.0" - promise-inflight "^1.0.1" - rimraf "^3.0.2" - ssri "^8.0.1" - tar "^6.0.2" - unique-filename "^1.1.1" - cacheable-request@^6.0.0: version "6.1.0" resolved "https://registry.yarnpkg.com/cacheable-request/-/cacheable-request-6.1.0.tgz#20ffb8bd162ba4be11e9567d823db651052ca912" @@ -3417,11 +3032,6 @@ caniuse-lite@^1.0.30001264, caniuse-lite@^1.0.30001265: resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001269.tgz#3a71bee03df627364418f9fd31adfc7aa1cc2d56" integrity sha512-UOy8okEVs48MyHYgV+RdW1Oiudl1H6KolybD6ZquD0VcrPSgj25omXO1S7rDydjpqaISCwA8Pyx+jUQKZwWO5w== -caseless@~0.12.0: - version "0.12.0" - resolved "https://registry.yarnpkg.com/caseless/-/caseless-0.12.0.tgz#1b681c21ff84033c826543090689420d187151dc" - integrity sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw= - ccount@^1.0.0, ccount@^1.0.3: version "1.1.0" resolved "https://registry.yarnpkg.com/ccount/-/ccount-1.1.0.tgz#246687debb6014735131be8abab2d93898f8d043" @@ -3436,14 +3046,6 @@ chalk@^2.0.0, chalk@^2.4.1, chalk@^2.4.2: escape-string-regexp "^1.0.5" supports-color "^5.3.0" -chalk@^4.0.0, chalk@^4.1.2: - version "4.1.2" - resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01" - integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA== - dependencies: - ansi-styles "^4.1.0" - supports-color "^7.1.0" - chalk@^4.1.0: version "4.1.1" resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.1.tgz#c80b3fab28bf6371e6863325eee67e618b77e6ad" @@ -3452,6 +3054,14 @@ chalk@^4.1.0: ansi-styles "^4.1.0" supports-color "^7.1.0" +chalk@^4.1.2: + version "4.1.2" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01" + integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA== + dependencies: + ansi-styles "^4.1.0" + supports-color "^7.1.0" + character-entities-legacy@^1.0.0: version "1.1.4" resolved "https://registry.yarnpkg.com/character-entities-legacy/-/character-entities-legacy-1.1.4.tgz#94bc1845dce70a5bb9d2ecc748725661293d8fc1" @@ -3504,11 +3114,6 @@ chokidar@^3.4.2, chokidar@^3.5.2: optionalDependencies: fsevents "~2.3.2" -chownr@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/chownr/-/chownr-2.0.0.tgz#15bfbe53d2eab4cf70f18a8cd68ebe5b3cb1dece" - integrity sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ== - chrome-trace-event@^1.0.2: version "1.0.3" resolved "https://registry.yarnpkg.com/chrome-trace-event/-/chrome-trace-event-1.0.3.tgz#1015eced4741e15d06664a957dbbf50d041e26ac" @@ -3524,13 +3129,6 @@ ci-info@^3.0.0: resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-3.2.0.tgz#2876cb948a498797b5236f0095bc057d0dca38b6" integrity sha512-dVqRX7fLUm8J6FgHJ418XuIgDLZDkYcDFTeL6TA2gt5WlIZUQrrH6EZrNClwT/H0FateUsZkGIOPRrLbP+PR9A== -cidr-regex@^3.1.1: - version "3.1.1" - resolved "https://registry.yarnpkg.com/cidr-regex/-/cidr-regex-3.1.1.tgz#ba1972c57c66f61875f18fd7dd487469770b571d" - integrity sha512-RBqYd32aDwbCMFJRL6wHOlDNYJsPNTt8vC82ErHF5vKt8QQzxm1FrkW8s/R5pVrXMf17sba09Uoy91PKiddAsw== - dependencies: - ip-regex "^4.1.0" - classnames@^2.2.6: version "2.3.1" resolved "https://registry.yarnpkg.com/classnames/-/classnames-2.3.1.tgz#dfcfa3891e306ec1dad105d0e88f4417b8535e8e" @@ -3553,24 +3151,6 @@ cli-boxes@^2.2.1: resolved "https://registry.yarnpkg.com/cli-boxes/-/cli-boxes-2.2.1.tgz#ddd5035d25094fce220e9cab40a45840a440318f" integrity sha512-y4coMcylgSCdVinjiDBuR8PCC2bLjyGTwEmPb9NHR/QaNU6EUOXcTY/s6VjGMD6ENSEaeQYHCY0GNGS5jfMwPw== -cli-columns@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/cli-columns/-/cli-columns-4.0.0.tgz#9fe4d65975238d55218c41bd2ed296a7fa555646" - integrity sha512-XW2Vg+w+L9on9wtwKpyzluIPCWXjaBahI7mTcYjx+BVIYD9c3yqcv/yKC7CmdCZat4rq2yiE1UMSJC5ivKfMtQ== - dependencies: - string-width "^4.2.3" - strip-ansi "^6.0.1" - -cli-table3@^0.6.0: - version "0.6.0" - resolved "https://registry.yarnpkg.com/cli-table3/-/cli-table3-0.6.0.tgz#b7b1bc65ca8e7b5cef9124e13dc2b21e2ce4faee" - integrity sha512-gnB85c3MGC7Nm9I/FkiasNBOKjOiO1RNuXXarQms37q4QMpWdlbBgD/VnOStA2faG1dpXMv31RFApjX1/QdgWQ== - dependencies: - object-assign "^4.1.0" - string-width "^4.2.0" - optionalDependencies: - colors "^1.1.2" - clone-deep@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/clone-deep/-/clone-deep-4.0.1.tgz#c19fd9bdbbf85942b4fd979c84dcf7d5f07c2387" @@ -3587,23 +3167,11 @@ clone-response@^1.0.2: dependencies: mimic-response "^1.0.0" -clone@^1.0.2: - version "1.0.4" - resolved "https://registry.yarnpkg.com/clone/-/clone-1.0.4.tgz#da309cc263df15994c688ca902179ca3c7cd7c7e" - integrity sha1-2jCcwmPfFZlMaIypAheco8fNfH4= - clsx@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/clsx/-/clsx-1.1.1.tgz#98b3134f9abbdf23b2663491ace13c5c03a73188" integrity sha512-6/bPho624p3S2pMyvP5kKBPXnI3ufHLObBFCfgx+LkeR5lg2XYy2hqZqUf45ypD8COn2bhgGJSUE+l5dhNBieA== -cmd-shim@^4.0.1: - version "4.1.0" - resolved "https://registry.yarnpkg.com/cmd-shim/-/cmd-shim-4.1.0.tgz#b3a904a6743e9fede4148c6f3800bf2a08135bdd" - integrity sha512-lb9L7EM4I/ZRVuljLPEtUJOP+xiQVknZ4ZMpMgEp4JzNldPb27HU03hi6K1/6CoIuit/Zm/LQXySErFeXxDprw== - dependencies: - mkdirp-infer-owner "^2.0.0" - coa@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/coa/-/coa-2.0.2.tgz#43f6c21151b4ef2bf57187db0d73de229e3e7ec3" @@ -3613,11 +3181,6 @@ coa@^2.0.2: chalk "^2.4.1" q "^1.1.2" -code-point-at@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/code-point-at/-/code-point-at-1.1.0.tgz#0d070b4d043a5bea33a2f1a40e2edb3d9a4ccf77" - integrity sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c= - collapse-white-space@^1.0.2: version "1.0.6" resolved "https://registry.yarnpkg.com/collapse-white-space/-/collapse-white-space-1.0.6.tgz#e63629c0016665792060dbbeb79c42239d2c5287" @@ -3647,11 +3210,6 @@ color-name@~1.1.4: resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== -color-support@^1.1.2: - version "1.1.3" - resolved "https://registry.yarnpkg.com/color-support/-/color-support-1.1.3.tgz#93834379a1cc9a0c61f82f52f0d04322251bd5a2" - integrity sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg== - colord@^2.0.1: version "2.1.0" resolved "https://registry.yarnpkg.com/colord/-/colord-2.1.0.tgz#28cd9d6ac874dff97ef5ec1432c5c0b4e58e49c7" @@ -3672,37 +3230,17 @@ colorette@^2.0.10: resolved "https://registry.yarnpkg.com/colorette/-/colorette-2.0.16.tgz#713b9af84fdb000139f04546bd4a93f62a5085da" integrity sha512-hUewv7oMjCp+wkBv5Rm0v87eJhq4woh5rSR+42YSQJKecCqgIqNkZ6lAlQms/BwHPJA5NKMRlpxPRv0n8HQW6g== -colors@^1.1.2, colors@^1.4.0: - version "1.4.0" - resolved "https://registry.yarnpkg.com/colors/-/colors-1.4.0.tgz#c50491479d4c1bdaed2c9ced32cf7c7dc2360f78" - integrity sha512-a+UqTh4kgZg/SlGvfbzDHpgRu7AAQOmmqRHJnxhRZICKFUT91brVhNNt58CMWU9PsBbv3PDCZUHbVxuDiH2mtA== - -columnify@~1.5.4: - version "1.5.4" - resolved "https://registry.yarnpkg.com/columnify/-/columnify-1.5.4.tgz#4737ddf1c7b69a8a7c340570782e947eec8e78bb" - integrity sha1-Rzfd8ce2mop8NAVweC6UfuyOeLs= - dependencies: - strip-ansi "^3.0.0" - wcwidth "^1.0.0" - combine-promises@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/combine-promises/-/combine-promises-1.1.0.tgz#72db90743c0ca7aab7d0d8d2052fd7b0f674de71" integrity sha512-ZI9jvcLDxqwaXEixOhArm3r7ReIivsXkpbyEWyeOhzz1QS0iSgBPnWvEqvIQtYyamGCYA88gFhmUrs9hrrQ0pg== -combined-stream@^1.0.6, combined-stream@~1.0.6: - version "1.0.8" - resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f" - integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg== - dependencies: - delayed-stream "~1.0.0" - comma-separated-tokens@^1.0.0: version "1.0.8" resolved "https://registry.yarnpkg.com/comma-separated-tokens/-/comma-separated-tokens-1.0.8.tgz#632b80b6117867a158f1080ad498b2fbe7e3f5ea" integrity sha512-GHuDRO12Sypu2cV70d1dkA2EUmXHgntrzbpvOB+Qy+49ypNfGgFQIC2fhhXbnyrJRynDCAARsT7Ou0M6hirpfw== -commander@^2.11.0, commander@^2.20.0: +commander@^2.20.0: version "2.20.3" resolved "https://registry.yarnpkg.com/commander/-/commander-2.20.3.tgz#fd485e84c03eb4881c20722ba48035e8531aeb33" integrity sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ== @@ -3722,26 +3260,16 @@ commander@^7.1.0: resolved "https://registry.yarnpkg.com/commander/-/commander-7.2.0.tgz#a36cb57d0b501ce108e4d20559a150a391d97ab7" integrity sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw== -commander@^8.1.0, commander@^8.2.0: +commander@^8.1.0: version "8.2.0" resolved "https://registry.yarnpkg.com/commander/-/commander-8.2.0.tgz#37fe2bde301d87d47a53adeff8b5915db1381ca8" integrity sha512-LLKxDvHeL91/8MIyTAD5BFMNtoIwztGPMiM/7Bl8rIPmHCZXRxmSWr91h57dpOpnQ6jIUqEWdXE/uBYMfiVZDA== -common-ancestor-path@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/common-ancestor-path/-/common-ancestor-path-1.0.1.tgz#4f7d2d1394d91b7abdf51871c62f71eadb0182a7" - integrity sha512-L3sHRo1pXXEqX8VU28kfgUY+YGsk09hPqZiZmLacNib6XNTCM8ubYeT7ryXQw8asB1sKgcU5lkB7ONug08aB8w== - commondir@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/commondir/-/commondir-1.0.1.tgz#ddd800da0c66127393cca5950ea968a3aaf1253b" integrity sha1-3dgA2gxmEnOTzKWVDqloo6rxJTs= -component-emitter@^1.2.1: - version "1.3.0" - resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.3.0.tgz#16e4070fba8ae29b679f2215853ee181ab2eabc0" - integrity sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg== - compressible@~2.0.16: version "2.0.18" resolved "https://registry.yarnpkg.com/compressible/-/compressible-2.0.18.tgz#af53cca6b070d4c3c0750fbd77286a6d7cc46fba" @@ -3789,11 +3317,6 @@ consola@^2.15.0: resolved "https://registry.yarnpkg.com/consola/-/consola-2.15.3.tgz#2e11f98d6a4be71ff72e0bdf07bd23e12cb61550" integrity sha512-9vAdYbHj6x2fLKC4+oPH0kFzY/orMZyG2Aj+kNylHxKGJ/Ed4dpNyAQYwJOdqO4zdM7XpVHmyejQDcQHrnuXbw== -console-control-strings@^1.0.0, console-control-strings@^1.1.0, console-control-strings@~1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/console-control-strings/-/console-control-strings-1.1.0.tgz#3d7cf4464db6446ea644bf4b39507f9851008e8e" - integrity sha1-PXz0Rk22RG6mRL9LOVB/mFEAjo4= - content-disposition@0.5.2: version "0.5.2" resolved "https://registry.yarnpkg.com/content-disposition/-/content-disposition-0.5.2.tgz#0cf68bb9ddf5f2be7961c3a85178cb85dba78cb4" @@ -3872,7 +3395,7 @@ core-js@^3.18.0: resolved "https://registry.yarnpkg.com/core-js/-/core-js-3.18.3.tgz#86a0bba2d8ec3df860fefcc07a8d119779f01509" integrity sha512-tReEhtMReZaPFVw7dajMx0vlsz3oOb8ajgPoHVYGxr8ErnZ6PcYEvvmjGmXlfpnxpkYSdOQttjB+MvVbCGfvLw== -core-util-is@1.0.2, core-util-is@~1.0.0: +core-util-is@~1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7" integrity sha1-tf1UIgqivFq1eqtxQMlAdUUDwac= @@ -3906,15 +3429,6 @@ cross-fetch@^3.0.4: dependencies: node-fetch "2.6.1" -cross-spawn@^5.1.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-5.1.0.tgz#e8bd0efee58fcff6f8f94510a0a554bbfa235449" - integrity sha1-6L0O/uWPz/b4+UUQoKVUu/ojVEk= - dependencies: - lru-cache "^4.0.1" - shebang-command "^1.2.0" - which "^1.2.9" - cross-spawn@^7.0.3: version "7.0.3" resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6" @@ -4165,13 +3679,6 @@ csstype@^3.0.2: resolved "https://registry.yarnpkg.com/csstype/-/csstype-3.0.10.tgz#2ad3a7bed70f35b965707c092e5f30b327c290e5" integrity sha512-2u44ZG2OcNUO9HDp/Jl8C07x6pU/eTR3ncV91SiK3dhG9TWvRVsCoJw14Ckx5DgWkzGA3waZWO3d7pgqpUI/XA== -dashdash@^1.12.0: - version "1.14.1" - resolved "https://registry.yarnpkg.com/dashdash/-/dashdash-1.14.1.tgz#853cfa0f7cbe2fed5de20326b8dd581035f6e2f0" - integrity sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA= - dependencies: - assert-plus "^1.0.0" - debug@2.6.9, debug@^2.6.0: version "2.6.9" resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" @@ -4179,13 +3686,6 @@ debug@2.6.9, debug@^2.6.0: dependencies: ms "2.0.0" -debug@4, debug@^4.1.0, debug@^4.1.1, debug@^4.3.1: - version "4.3.2" - resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.2.tgz#f0a49c18ac8779e31d4a0c6029dfb76873c7428b" - integrity sha512-mOp8wKcvj7XxC78zLgw/ZA+6TSgkoE2C/ienthhRD298T7UNwAg9diBpLRxC0mOezLl4B0xV7M0cCO6P/O0Xhw== - dependencies: - ms "2.1.2" - debug@^3.1.1: version "3.2.7" resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.7.tgz#72580b7e9145fb39b6676f9c5e5fb100b934179a" @@ -4193,10 +3693,12 @@ debug@^3.1.1: dependencies: ms "^2.1.1" -debuglog@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/debuglog/-/debuglog-1.0.1.tgz#aa24ffb9ac3df9a2351837cfb2d279360cd78492" - integrity sha1-qiT/uaw9+aI1GDfPstJ5NgzXhJI= +debug@^4.1.0, debug@^4.1.1: + version "4.3.2" + resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.2.tgz#f0a49c18ac8779e31d4a0c6029dfb76873c7428b" + integrity sha512-mOp8wKcvj7XxC78zLgw/ZA+6TSgkoE2C/ienthhRD298T7UNwAg9diBpLRxC0mOezLl4B0xV7M0cCO6P/O0Xhw== + dependencies: + ms "2.1.2" decompress-response@^3.3.0: version "3.3.0" @@ -4234,13 +3736,6 @@ default-gateway@^6.0.0: dependencies: execa "^5.0.0" -defaults@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/defaults/-/defaults-1.0.3.tgz#c656051e9817d9ff08ed881477f3fe4019f3ef7d" - integrity sha1-xlYFHpgX2f8I7YgUd/P+QBnz730= - dependencies: - clone "^1.0.2" - defer-to-connect@^1.0.1: version "1.1.3" resolved "https://registry.yarnpkg.com/defer-to-connect/-/defer-to-connect-1.1.3.tgz#331ae050c08dcf789f8c83a7b81f0ed94f4ac591" @@ -4272,17 +3767,7 @@ del@^6.0.0: rimraf "^3.0.2" slash "^3.0.0" -delayed-stream@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" - integrity sha1-3zrhmayt+31ECqrgsp4icrJOxhk= - -delegates@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/delegates/-/delegates-1.0.0.tgz#84c6e159b81904fdca59a0ef44cd870d31250f9a" - integrity sha1-hMbhWbgZBP3KWaDvRM2HDTElD5o= - -depd@^1.1.2, depd@~1.1.2: +depd@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/depd/-/depd-1.1.2.tgz#9bcd52e14c097763e749b274c4346ed2e560b5a9" integrity sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak= @@ -4320,19 +3805,6 @@ detect-port@^1.3.0: address "^1.0.1" debug "^2.6.0" -dezalgo@^1.0.0: - version "1.0.3" - resolved "https://registry.yarnpkg.com/dezalgo/-/dezalgo-1.0.3.tgz#7f742de066fc748bc8db820569dddce49bf0d456" - integrity sha1-f3Qt4Gb8dIvI24IFad3c5Jvw1FY= - dependencies: - asap "^2.0.0" - wrappy "1" - -diff@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/diff/-/diff-5.0.0.tgz#7ed6ad76d859d030787ec35855f5b1daf31d852b" - integrity sha512-/VTCrvm5Z0JGty/BWHljh+BAiw3IK+2j87NGMu8Nwc/f48WoDAC395uomO9ZD117ZOBaHmkX1oyLvkVM/aIT3w== - dir-glob@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/dir-glob/-/dir-glob-3.0.1.tgz#56dbf73d992a4a93ba1584f4534063fd2e41717f" @@ -4466,19 +3938,6 @@ duplexer@^0.1.1, duplexer@^0.1.2: resolved "https://registry.yarnpkg.com/duplexer/-/duplexer-0.1.2.tgz#3abe43aef3835f8ae077d136ddce0f276b0400e6" integrity sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg== -each-parallel-async@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/each-parallel-async/-/each-parallel-async-1.0.0.tgz#91783e190000c7dd588336b2d468ebaf71980f7b" - integrity sha512-P/9kLQiQj0vZNzphvKKTgRgMnlqs5cJsxeAiuog1jrUnwv0Z3hVUwJDQiP7MnLb2I9S15nR9SRUceFT9IxtqRg== - -ecc-jsbn@~0.1.1: - version "0.1.2" - resolved "https://registry.yarnpkg.com/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz#3a83a904e54353287874c564b7549386849a98c9" - integrity sha1-OoOpBOVDUyh4dMVkt1SThoSamMk= - dependencies: - jsbn "~0.1.0" - safer-buffer "^2.1.0" - ee-first@1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d" @@ -4514,13 +3973,6 @@ encodeurl@~1.0.2: resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59" integrity sha1-rT/0yG7C0CkyL1oCw6mmBslbP1k= -encoding@^0.1.12: - version "0.1.13" - resolved "https://registry.yarnpkg.com/encoding/-/encoding-0.1.13.tgz#56574afdd791f54a8e9b2785c0582a2d26210fa9" - integrity sha512-ETBauow1T35Y/WZMkio9jiM0Z5xjHHmJ4XmjZOq1l/dXz3lr2sRn87nJy20RupqSh1F2m3HHPSp8ShIPQJrJ3A== - dependencies: - iconv-lite "^0.6.2" - end-of-stream@^1.1.0: version "1.4.4" resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.4.tgz#5ae64a5f45057baf3626ec14da0ca5e4b2431eb0" @@ -4546,21 +3998,6 @@ entities@^2.0.0: resolved "https://registry.yarnpkg.com/entities/-/entities-2.2.0.tgz#098dc90ebb83d8dffa089d55256b351d34c4da55" integrity sha512-p92if5Nz619I0w+akJrLZH0MX0Pb5DX39XOwQTtXSdQQOaYH03S1uIQp4mhOZtAXrxq4ViO67YTiLBo2638o9A== -env-paths@^2.2.0: - version "2.2.1" - resolved "https://registry.yarnpkg.com/env-paths/-/env-paths-2.2.1.tgz#420399d416ce1fbe9bc0a07c62fa68d67fd0f8f2" - integrity sha512-+h1lkLKhZMTYjog1VEpJNG7NZJWcuc2DDk/qsqSTRRCOXiLjeQ1d1/udrUGhqMxUgAlwKNZ0cf2uqan5GLuS2A== - -eol@^0.9.1: - version "0.9.1" - resolved "https://registry.yarnpkg.com/eol/-/eol-0.9.1.tgz#f701912f504074be35c6117a5c4ade49cd547acd" - integrity sha512-Ds/TEoZjwggRoz/Q2O7SE3i4Jm66mqTDfmdHdq/7DKVk3bro9Q8h6WdXKdPqFLMoqxrDK5SVRzHVPOS6uuGtrg== - -err-code@^2.0.2: - version "2.0.3" - resolved "https://registry.yarnpkg.com/err-code/-/err-code-2.0.3.tgz#23c2f3b756ffdfc608d30e27c9a941024807e7f9" - integrity sha512-2bmlRpNKBxT/CRmPOlyISQpNj+qSeYvcym/uT0Jx2bMOlKLtSy1ZmLuVxSEKKyor/N5yhvp/ZiG1oE3DEYMSFA== - error-ex@^1.3.1: version "1.3.2" resolved "https://registry.yarnpkg.com/error-ex/-/error-ex-1.3.2.tgz#b4ac40648107fdcdcfae242f428bea8a14d4f1bf" @@ -4759,27 +4196,17 @@ extend-shallow@^2.0.1: dependencies: is-extendable "^0.1.0" -extend@^3.0.0, extend@~3.0.2: +extend@^3.0.0: version "3.0.2" resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.2.tgz#f8b1136b4071fbd8eb140aff858b1019ec2915fa" integrity sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g== -extsprintf@1.3.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.3.0.tgz#96918440e3041a7a414f8c52e3c574eb3c3e1e05" - integrity sha1-lpGEQOMEGnpBT4xS48V06zw+HgU= - -extsprintf@^1.2.0: - version "1.4.0" - resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.4.0.tgz#e2689f8f356fad62cca65a3a91c5df5f9551692f" - integrity sha1-4mifjzVvrWLMplo6kcXfX5VRaS8= - fast-deep-equal@^3.1.1: version "3.1.3" resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525" integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q== -fast-glob@^3.1.1, fast-glob@^3.2.5, fast-glob@^3.2.7: +fast-glob@^3.1.1, fast-glob@^3.2.5: version "3.2.7" resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.2.7.tgz#fd6cb7a2d7e9aa7a7846111e85a196d6b2f766a1" integrity sha512-rYGMRwip6lUMvYD3BTScMwT1HtAs2d71SMv66Vrxs0IekGZEjhM0pcMfjQPnknBt2zeCwQMEupiN02ZP4DiT1Q== @@ -4802,11 +4229,6 @@ fast-url-parser@1.1.3: dependencies: punycode "^1.3.2" -fastest-levenshtein@^1.0.12: - version "1.0.12" - resolved "https://registry.yarnpkg.com/fastest-levenshtein/-/fastest-levenshtein-1.0.12.tgz#9990f7d3a88cc5a9ffd1f1745745251700d497e2" - integrity sha512-On2N+BpYJ15xIC974QNVuYGMOlEVt4s0EOI3wwMqOmK1fdDY+FN/zltPV8vosq4ad4c/gJ1KHScUn/6AWIgiow== - fastq@^1.6.0: version "1.11.1" resolved "https://registry.yarnpkg.com/fastq/-/fastq-1.11.1.tgz#5d8175aae17db61947f8b162cfc7f63264d22807" @@ -4902,15 +4324,6 @@ find-cache-dir@^3.3.1: make-dir "^3.0.2" pkg-dir "^4.1.0" -find-cache-dir@^3.3.2: - version "3.3.2" - resolved "https://registry.yarnpkg.com/find-cache-dir/-/find-cache-dir-3.3.2.tgz#b30c5b6eff0730731aea9bbd9dbecbd80256d64b" - integrity sha512-wXZV5emFEjrridIgED11OoUKLxiYjAcqot/NJdAkOhlJ+vGzwhOAfcG5OX1jP+S0PcjEn8bdMJv+g2jwQ3Onig== - dependencies: - commondir "^1.0.1" - make-dir "^3.0.2" - pkg-dir "^4.1.0" - find-up@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/find-up/-/find-up-3.0.0.tgz#49169f1d7993430646da61ecc5ae355c21c97b73" @@ -4934,14 +4347,6 @@ find-up@^5.0.0: locate-path "^6.0.0" path-exists "^4.0.0" -find-up@^6.1.0, find-up@^6.2.0: - version "6.2.0" - resolved "https://registry.yarnpkg.com/find-up/-/find-up-6.2.0.tgz#f3b81d633fa83bebe64f83a8bab357f86d5914be" - integrity sha512-yWHzMzXCaFoABSnFTCPKNFlYoq4mSga9QLRRKOCLSJ33hSkzROB14ITbAWW0QDQDyuzsPQ33S1DsOWQb/oW1yA== - dependencies: - locate-path "^7.0.0" - path-exists "^5.0.0" - flux@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/flux/-/flux-4.0.1.tgz#7843502b02841d4aaa534af0b373034a1f75ee5c" @@ -4960,11 +4365,6 @@ follow-redirects@^1.14.0: resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.14.4.tgz#838fdf48a8bbdd79e52ee51fb1c94e3ed98b9379" integrity sha512-zwGkiSXC1MUJG/qmeIFH2HBJx9u0V46QGUe3YR1fXG8bXQxq7fLj0RjLZQ5nubr9qNJUZrH+xUcwXEoXNpfS+g== -forever-agent@~0.6.1: - version "0.6.1" - resolved "https://registry.yarnpkg.com/forever-agent/-/forever-agent-0.6.1.tgz#fbc71f0c41adeb37f96c577ad1ed42d8fdacca91" - integrity sha1-+8cfDEGt6zf5bFd60e1C2P2sypE= - fork-ts-checker-webpack-plugin@^6.0.5: version "6.4.0" resolved "https://registry.yarnpkg.com/fork-ts-checker-webpack-plugin/-/fork-ts-checker-webpack-plugin-6.4.0.tgz#057e477cf1d8b013b2ed2669437f818680289c4c" @@ -4984,15 +4384,6 @@ fork-ts-checker-webpack-plugin@^6.0.5: semver "^7.3.2" tapable "^1.0.0" -form-data@~2.3.2: - version "2.3.3" - resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.3.3.tgz#dcce52c05f644f298c6a7ab936bd724ceffbf3a6" - integrity sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ== - dependencies: - asynckit "^0.4.0" - combined-stream "^1.0.6" - mime-types "^2.1.12" - forwarded@0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/forwarded/-/forwarded-0.2.0.tgz#2269936428aad4c15c7ebe9779a84bf0b2a81811" @@ -5027,13 +4418,6 @@ fs-extra@^9.0.0: jsonfile "^6.0.1" universalify "^2.0.0" -fs-minipass@^2.0.0, fs-minipass@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/fs-minipass/-/fs-minipass-2.1.0.tgz#7f5036fdbf12c63c169190cbe4199c852271f9fb" - integrity sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg== - dependencies: - minipass "^3.0.0" - fs-monkey@1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/fs-monkey/-/fs-monkey-1.0.3.tgz#ae3ac92d53bb328efe0e9a1d9541f6ad8d48e2d3" @@ -5054,35 +4438,6 @@ function-bind@^1.1.1: resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== -gauge@^3.0.0: - version "3.0.1" - resolved "https://registry.yarnpkg.com/gauge/-/gauge-3.0.1.tgz#4bea07bcde3782f06dced8950e51307aa0f4a346" - integrity sha512-6STz6KdQgxO4S/ko+AbjlFGGdGcknluoqU+79GOFCDqqyYj5OanQf9AjxwN0jCidtT+ziPMmPSt9E4hfQ0CwIQ== - dependencies: - aproba "^1.0.3 || ^2.0.0" - color-support "^1.1.2" - console-control-strings "^1.0.0" - has-unicode "^2.0.1" - object-assign "^4.1.1" - signal-exit "^3.0.0" - string-width "^1.0.1 || ^2.0.0" - strip-ansi "^3.0.1 || ^4.0.0" - wide-align "^1.1.2" - -gauge@~2.7.3: - version "2.7.4" - resolved "https://registry.yarnpkg.com/gauge/-/gauge-2.7.4.tgz#2c03405c7538c39d7eb37b317022e325fb018bf7" - integrity sha1-LANAXHU4w51+s3sxcCLjJfsBi/c= - dependencies: - aproba "^1.0.3" - console-control-strings "^1.0.0" - has-unicode "^2.0.0" - object-assign "^4.1.0" - signal-exit "^3.0.0" - string-width "^1.0.1" - strip-ansi "^3.0.1" - wide-align "^1.1.0" - gensync@^1.0.0-beta.1, gensync@^1.0.0-beta.2: version "1.0.0-beta.2" resolved "https://registry.yarnpkg.com/gensync/-/gensync-1.0.0-beta.2.tgz#32a6ee76c3d7f52d46b2b1ae5d93fea8580a25e0" @@ -5121,13 +4476,6 @@ get-stream@^6.0.0: resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-6.0.1.tgz#a262d8eef67aced57c2852ad6167526a43cbf7b7" integrity sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg== -getpass@^0.1.1: - version "0.1.7" - resolved "https://registry.yarnpkg.com/getpass/-/getpass-0.1.7.tgz#5eff8e3e684d569ae4cb2b1282604e8ba62149fa" - integrity sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo= - dependencies: - assert-plus "^1.0.0" - github-slugger@^1.4.0: version "1.4.0" resolved "https://registry.yarnpkg.com/github-slugger/-/github-slugger-1.4.0.tgz#206eb96cdb22ee56fdc53a28d5a302338463444e" @@ -5157,7 +4505,7 @@ glob@^7.0.0, glob@^7.1.3: once "^1.3.0" path-is-absolute "^1.0.0" -glob@^7.1.1, glob@^7.1.4, glob@^7.1.6, glob@^7.2.0: +glob@^7.1.6: version "7.2.0" resolved "https://registry.yarnpkg.com/glob/-/glob-7.2.0.tgz#d15535af7732e02e948f4c41628bd910293f6023" integrity sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q== @@ -5209,18 +4557,6 @@ globby@^11.0.1, globby@^11.0.2, globby@^11.0.3, globby@^11.0.4: merge2 "^1.3.0" slash "^3.0.0" -globby@^12.0.2: - version "12.0.2" - resolved "https://registry.yarnpkg.com/globby/-/globby-12.0.2.tgz#53788b2adf235602ed4cabfea5c70a1139e1ab11" - integrity sha512-lAsmb/5Lww4r7MM9nCCliDZVIKbZTavrsunAsHLr9oHthrZP1qi7/gAnHOsUs9bLvEt2vKVJhHmxuL7QbDuPdQ== - dependencies: - array-union "^3.0.1" - dir-glob "^3.0.1" - fast-glob "^3.2.7" - ignore "^5.1.8" - merge2 "^1.4.1" - slash "^4.0.0" - got@^9.6.0: version "9.6.0" resolved "https://registry.yarnpkg.com/got/-/got-9.6.0.tgz#edf45e7d67f99545705de1f7bbeeeb121765ed85" @@ -5243,7 +4579,7 @@ graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.4: resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.6.tgz#ff040b2b0853b23c3d31027523706f1885d76bee" integrity sha512-nTnJ528pbqxYanhpDYsi4Rd8MAeaBA67+RZ10CM1m3bTAVFEDcd5AuA4a6W5YkGZ1iNXHzZz8T6TBKLeBuNriQ== -graceful-fs@^4.2.3, graceful-fs@^4.2.6, graceful-fs@^4.2.8: +graceful-fs@^4.2.6: version "4.2.8" resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.8.tgz#e412b8d33f5e006593cbd3cee6df9f2cebbe802a" integrity sha512-qkIilPUYcNhJpd33n0GBXTB1MMPp14TxEsEs0pTrsSVucApsYzW5V+Q8Qxhik6KU3evy+qkAAowTByymK0avdg== @@ -5278,19 +4614,6 @@ handle-thing@^2.0.0: resolved "https://registry.yarnpkg.com/handle-thing/-/handle-thing-2.0.1.tgz#857f79ce359580c340d43081cc648970d0bb234e" integrity sha512-9Qn4yBxelxoh2Ow62nP+Ka/kMnOXRi8BXnRaUwezLNhqelnN49xKz4F/dPP8OYLxLxq6JDtZb2i9XznUQbNPTg== -har-schema@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/har-schema/-/har-schema-2.0.0.tgz#a94c2224ebcac04782a0d9035521f24735b7ec92" - integrity sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI= - -har-validator@~5.1.3: - version "5.1.5" - resolved "https://registry.yarnpkg.com/har-validator/-/har-validator-5.1.5.tgz#1f0803b9f8cb20c0fa13822df1ecddb36bde1efd" - integrity sha512-nmT2T0lljbxdQZfspsno9hgrG3Uir6Ks5afism62poxqBM6sDnMEuPmzTq8XN0OEwqKLLdh1jQI3qyE66Nzb3w== - dependencies: - ajv "^6.12.3" - har-schema "^2.0.0" - has-bigints@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/has-bigints/-/has-bigints-1.0.1.tgz#64fe6acb020673e3b78db035a5af69aa9d07b113" @@ -5311,11 +4634,6 @@ has-symbols@^1.0.1, has-symbols@^1.0.2: resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.2.tgz#165d3070c00309752a1236a479331e3ac56f1423" integrity sha512-chXa79rL/UC2KlX17jo3vRGz0azaWEx5tGqZg5pO3NUyEJVB17dMruQlzCCOfUvElghKcm5194+BCRvi2Rv/Gw== -has-unicode@^2.0.0, has-unicode@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/has-unicode/-/has-unicode-2.0.1.tgz#e0e6fe6a28cf51138855e086d1691e771de2a8b9" - integrity sha1-4Ob+aijPUROIVeCG0Wkedx3iqLk= - has-yarn@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/has-yarn/-/has-yarn-2.1.0.tgz#137e11354a7b5bf11aa5cb649cf0c6f3ff2b2e77" @@ -5446,13 +4764,6 @@ hoist-non-react-statics@^3.1.0: dependencies: react-is "^16.7.0" -hosted-git-info@^4.0.1, hosted-git-info@^4.0.2: - version "4.0.2" - resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-4.0.2.tgz#5e425507eede4fea846b7262f0838456c4209961" - integrity sha512-c9OGXbZ3guC/xOlCg1Ci/VgWlwsqDv1yMQL1CWqXDL0hDjXuNcq0zuR4xqPSuasI3kqFDhqSyTjREz5gzq0fXg== - dependencies: - lru-cache "^6.0.0" - hpack.js@^2.1.6: version "2.1.6" resolved "https://registry.yarnpkg.com/hpack.js/-/hpack.js-2.1.6.tgz#87774c0949e513f42e84575b3c45681fade2a0b2" @@ -5534,7 +4845,7 @@ htmlparser2@^6.1.0: domutils "^2.5.2" entities "^2.0.0" -http-cache-semantics@^4.0.0, http-cache-semantics@^4.1.0: +http-cache-semantics@^4.0.0: version "4.1.0" resolved "https://registry.yarnpkg.com/http-cache-semantics/-/http-cache-semantics-4.1.0.tgz#49e91c5cbf36c9b94bcfcd71c23d5249ec74e390" integrity sha512-carPklcUh7ROWRK7Cv27RPtdhYhUsela/ue5/jKzjegVvXDqM2ILE9Q2BGn9JZJh1g87cp56su/FgQSzcWS8cQ== @@ -5581,15 +4892,6 @@ http-parser-js@>=0.5.1: resolved "https://registry.yarnpkg.com/http-parser-js/-/http-parser-js-0.5.3.tgz#01d2709c79d41698bb01d4decc5e9da4e4a033d9" integrity sha512-t7hjvef/5HEK7RWTdUzVUhl8zkEu+LlaE0IYzdMuvbSDipxBRpOn4Uhw8ZyECEa808iVT8XCjzo6xmYt4CiLZg== -http-proxy-agent@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/http-proxy-agent/-/http-proxy-agent-4.0.1.tgz#8a8c8ef7f5932ccf953c296ca8291b95aa74aa3a" - integrity sha512-k0zdNgqWTGA6aeIRVpvfVob4fL52dTfaehylg0Y4UvSySvOq/Y+BOyPrgpUrA7HylqvU8vIZGsRuXmspskV0Tg== - dependencies: - "@tootallnate/once" "1" - agent-base "6" - debug "4" - http-proxy-middleware@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/http-proxy-middleware/-/http-proxy-middleware-2.0.1.tgz#7ef3417a479fb7666a571e09966c66a39bd2c15f" @@ -5610,35 +4912,11 @@ http-proxy@^1.18.1: follow-redirects "^1.0.0" requires-port "^1.0.0" -http-signature@~1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/http-signature/-/http-signature-1.2.0.tgz#9aecd925114772f3d95b65a60abb8f7c18fbace1" - integrity sha1-muzZJRFHcvPZW2WmCruPfBj7rOE= - dependencies: - assert-plus "^1.0.0" - jsprim "^1.2.2" - sshpk "^1.7.0" - -https-proxy-agent@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-5.0.0.tgz#e2a90542abb68a762e0a0850f6c9edadfd8506b2" - integrity sha512-EkYm5BcKUGiduxzSt3Eppko+PiNWNEpa4ySk9vTC6wDsQJW9rHSa+UhGNJoRYp7bz6Ht1eaRIa6QaJqO5rCFbA== - dependencies: - agent-base "6" - debug "4" - human-signals@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/human-signals/-/human-signals-2.1.0.tgz#dc91fcba42e4d06e4abaed33b3e7a3c02f514ea0" integrity sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw== -humanize-ms@^1.2.1: - version "1.2.1" - resolved "https://registry.yarnpkg.com/humanize-ms/-/humanize-ms-1.2.1.tgz#c46e3159a293f6b896da29316d8b6fe8bb79bbed" - integrity sha1-xG4xWaKT9riW2ikxbYtv6Lt5u+0= - dependencies: - ms "^2.0.0" - iconv-lite@0.4.24: version "0.4.24" resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b" @@ -5646,33 +4924,12 @@ iconv-lite@0.4.24: dependencies: safer-buffer ">= 2.1.2 < 3" -iconv-lite@^0.6.2: - version "0.6.3" - resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.6.3.tgz#a52f80bf38da1952eb5c681790719871a1a72501" - integrity sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw== - dependencies: - safer-buffer ">= 2.1.2 < 3.0.0" - icss-utils@^5.0.0, icss-utils@^5.1.0: version "5.1.0" resolved "https://registry.yarnpkg.com/icss-utils/-/icss-utils-5.1.0.tgz#c6be6858abd013d768e98366ae47e25d5887b1ae" integrity sha512-soFhflCVWLfRNOPU3iv5Z9VUdT44xFRbzjLsEzSr5AQmgqPMTHdU3PMT1Cf1ssx8fLNJDA1juftYl+PUcv3MqA== -ignore-walk@^3.0.3: - version "3.0.4" - resolved "https://registry.yarnpkg.com/ignore-walk/-/ignore-walk-3.0.4.tgz#c9a09f69b7c7b479a5d74ac1a3c0d4236d2a6335" - integrity sha512-PY6Ii8o1jMRA1z4F2hRkH/xN59ox43DavKvD3oDpfurRlOJyAHpifIwpbdv1n4jt4ov0jSpw3kQ4GhJnpBL6WQ== - dependencies: - minimatch "^3.0.4" - -ignore-walk@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/ignore-walk/-/ignore-walk-4.0.1.tgz#fc840e8346cf88a3a9380c5b17933cd8f4d39fa3" - integrity sha512-rzDQLaW4jQbh2YrOFlJdCtX8qgJTehFRYiUB2r1osqTeDzV/3+Jh8fz1oAPzUThf3iku8Ds4IDqawI5d8mUiQw== - dependencies: - minimatch "^3.0.4" - -ignore@^5.1.4, ignore@^5.1.8: +ignore@^5.1.4: version "5.1.8" resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.1.8.tgz#f150a8b50a34289b33e22f5889abd4d8016f0e57" integrity sha512-BMpfD7PpiETpBl/A6S498BaIJ6Y/ABT93ETbby2fP00v4EbvPBXWEoaR1UBPKs3iR53pJY7EtZk5KACI57i1Uw== @@ -5705,11 +4962,6 @@ indent-string@^4.0.0: resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-4.0.0.tgz#624f8f4497d619b2d9768531d58f4122854d7251" integrity sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg== -infer-owner@^1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/infer-owner/-/infer-owner-1.0.4.tgz#c4cefcaa8e51051c2a40ba2ce8a3d27295af9467" - integrity sha512-IClj+Xz94+d7irH5qRyfJonOdfTzuDaifE6ZPWfx0N0+/ATZCbuTPq2prFl526urkQd90WyUKIh1DfBQ2hMz9A== - infima@0.2.0-alpha.34: version "0.2.0-alpha.34" resolved "https://registry.yarnpkg.com/infima/-/infima-0.2.0-alpha.34.tgz#14a900d79a4de2013e025ac95749a4592f16ef6e" @@ -5733,7 +4985,7 @@ inherits@2.0.3: resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de" integrity sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4= -ini@2.0.0, ini@^2.0.0: +ini@2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/ini/-/ini-2.0.0.tgz#e5fd556ecdd5726be978fa1001862eacb0a94bc5" integrity sha512-7PnF4oN3CvZF23ADhA5wRaYEQpJ8qygSkbtTXWBeXWXmEVRXK+1ITciHWwHhsjv1TmW0MgacIv6hEi5pX5NQdA== @@ -5743,19 +4995,6 @@ ini@^1.3.5, ini@~1.3.0: resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.8.tgz#a29da425b48806f34767a4efce397269af28432c" integrity sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew== -init-package-json@^2.0.5: - version "2.0.5" - resolved "https://registry.yarnpkg.com/init-package-json/-/init-package-json-2.0.5.tgz#78b85f3c36014db42d8f32117252504f68022646" - integrity sha512-u1uGAtEFu3VA6HNl/yUWw57jmKEMx8SKOxHhxjGnOFUiIlFnohKDFg4ZrPpv9wWqk44nDxGJAtqjdQFm+9XXQA== - dependencies: - npm-package-arg "^8.1.5" - promzard "^0.3.0" - read "~1.0.1" - read-package-json "^4.1.1" - semver "^7.3.5" - validate-npm-package-license "^3.0.4" - validate-npm-package-name "^3.0.0" - inline-style-parser@0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/inline-style-parser/-/inline-style-parser-0.1.1.tgz#ec8a3b429274e9c0a1f1c4ffa9453a7fef72cea1" @@ -5776,12 +5015,12 @@ interpret@^1.0.0: resolved "https://registry.yarnpkg.com/interpret/-/interpret-1.4.0.tgz#665ab8bc4da27a774a40584e812e3e0fa45b1a1e" integrity sha512-agE4QfB2Lkp9uICn7BAqoscw4SZP9kTE2hxiFI3jBPmXJfdqiahTbUuKGsMoN2GtqL9AxhYioAcVvgsb1HvRbA== -ip-regex@^4.0.0, ip-regex@^4.1.0: +ip-regex@^4.0.0: version "4.3.0" resolved "https://registry.yarnpkg.com/ip-regex/-/ip-regex-4.3.0.tgz#687275ab0f57fa76978ff8f4dddc8a23d5990db5" integrity sha512-B9ZWJxHHOHUhUjCPrMpLD4xEq35bUTClHM1S6CBU5ixQnkZmwipwgc96vAd7AAGM9TGHvJR+Uss+/Ak6UphK+Q== -ip@^1.1.0, ip@^1.1.5: +ip@^1.1.0: version "1.1.5" resolved "https://registry.yarnpkg.com/ip/-/ip-1.1.5.tgz#bdded70114290828c0a039e72ef25f5aaec4354a" integrity sha1-vd7XARQpCCjAoDnnLvJfWq7ENUo= @@ -5862,13 +5101,6 @@ is-ci@^2.0.0: dependencies: ci-info "^2.0.0" -is-cidr@^4.0.2: - version "4.0.2" - resolved "https://registry.yarnpkg.com/is-cidr/-/is-cidr-4.0.2.tgz#94c7585e4c6c77ceabf920f8cde51b8c0fda8814" - integrity sha512-z4a1ENUajDbEl/Q6/pVBpTR1nBjjEE1X7qb7bmWYanNnPoKAvUCPFKeXV6Fe4mgTkWKBqiHIcwsI3SndiO5FeA== - dependencies: - cidr-regex "^3.1.1" - is-color-stop@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/is-color-stop/-/is-color-stop-1.1.0.tgz#cfff471aee4dd5c9e158598fbe12967b5cdad345" @@ -5888,13 +5120,6 @@ is-core-module@^2.2.0: dependencies: has "^1.0.3" -is-core-module@^2.5.0: - version "2.8.0" - resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.8.0.tgz#0321336c3d0925e497fd97f5d95cb114a5ccd548" - integrity sha512-vd15qHsaqrRL7dtH6QNuy0ndJmRDrS9HAM1CAiSifNUFv4x1a0CCVsj18hJ1mShxIG6T2i1sO78MkP56r0nYRw== - dependencies: - has "^1.0.3" - is-date-object@^1.0.1: version "1.0.4" resolved "https://registry.yarnpkg.com/is-date-object/-/is-date-object-1.0.4.tgz#550cfcc03afada05eea3dd30981c7b09551f73e5" @@ -5920,13 +5145,6 @@ is-extglob@^2.1.1: resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" integrity sha1-qIwCU1eR8C7TfHahueqXc8gz+MI= -is-fullwidth-code-point@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz#ef9e31386f031a7f0d643af82fde50c457ef00cb" - integrity sha1-754xOG8DGn8NZDr4L95QxFfvAMs= - dependencies: - number-is-nan "^1.0.0" - is-fullwidth-code-point@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz#a3b30a5c4f199183167aaab93beefae3ddfb654f" @@ -5937,7 +5155,7 @@ is-fullwidth-code-point@^3.0.0: resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz#f116f8064fe90b3f7844a38997c0b75051269f1d" integrity sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg== -is-glob@^4.0.0, is-glob@^4.0.1, is-glob@~4.0.1: +is-glob@^4.0.1, is-glob@~4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.1.tgz#7567dbe9f2f5e2467bc77ab83c4a29482407a5dc" integrity sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg== @@ -5964,11 +5182,6 @@ is-ip@^3.1.0: dependencies: ip-regex "^4.0.0" -is-lambda@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/is-lambda/-/is-lambda-1.0.1.tgz#3d9877899e6a53efc0160504cde15f82e6f061d5" - integrity sha1-PZh3iZ5qU+/AFgUEzeFfgubwYdU= - is-negative-zero@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/is-negative-zero/-/is-negative-zero-2.0.1.tgz#3de746c18dda2319241a53675908d8f766f11c24" @@ -6071,7 +5284,7 @@ is-symbol@^1.0.2, is-symbol@^1.0.3: dependencies: has-symbols "^1.0.2" -is-typedarray@^1.0.0, is-typedarray@~1.0.0: +is-typedarray@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a" integrity sha1-5HnICFjfDBsR3dppQPlgEfzaSpo= @@ -6081,11 +5294,6 @@ is-whitespace-character@^1.0.0: resolved "https://registry.yarnpkg.com/is-whitespace-character/-/is-whitespace-character-1.0.4.tgz#0858edd94a95594c7c9dd0b5c174ec6e45ee4aa7" integrity sha512-SDweEzfIZM0SJV0EUga669UTKlmL0Pq8Lno0QDQsPnvECB3IM2aP0gdx5TrU0A01MAPfViaZiI2V1QMZLaKK5w== -is-windows@^1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/is-windows/-/is-windows-1.0.2.tgz#d1850eb9791ecd18e6182ce12a30f396634bb19d" - integrity sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA== - is-word-character@^1.0.0: version "1.0.4" resolved "https://registry.yarnpkg.com/is-word-character/-/is-word-character-1.0.4.tgz#ce0e73216f98599060592f62ff31354ddbeb0230" @@ -6123,11 +5331,6 @@ isobject@^3.0.1: resolved "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df" integrity sha1-TkMekrEalzFjaqH5yNHMvP2reN8= -isstream@~0.1.2: - version "0.1.2" - resolved "https://registry.yarnpkg.com/isstream/-/isstream-0.1.2.tgz#47e63f7af55afa6f92e1500e690eb8b8529c099a" - integrity sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo= - jest-worker@^27.0.2: version "27.0.6" resolved "https://registry.yarnpkg.com/jest-worker/-/jest-worker-27.0.6.tgz#a5fdb1e14ad34eb228cfe162d9f729cdbfa28aed" @@ -6188,11 +5391,6 @@ js-yaml@^4.0.0: dependencies: argparse "^2.0.1" -jsbn@~0.1.0: - version "0.1.1" - resolved "https://registry.yarnpkg.com/jsbn/-/jsbn-0.1.1.tgz#a5e654c2e5a2deb5f201d96cefbca80c0ef2f513" - integrity sha1-peZUwuWi3rXyAdls77yoDA7y9RM= - jsesc@^2.5.1: version "2.5.2" resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-2.5.2.tgz#80564d2e483dacf6e8ef209650a67df3f0c283a4" @@ -6213,7 +5411,7 @@ json-parse-better-errors@^1.0.2: resolved "https://registry.yarnpkg.com/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz#bb867cfb3450e69107c131d1c514bab3dc8bcaa9" integrity sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw== -json-parse-even-better-errors@^2.3.0, json-parse-even-better-errors@^2.3.1: +json-parse-even-better-errors@^2.3.0: version "2.3.1" resolved "https://registry.yarnpkg.com/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz#7c47805a94319928e05777405dc12e1f7a4ee02d" integrity sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w== @@ -6223,21 +5421,6 @@ json-schema-traverse@^0.4.1: resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660" integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg== -json-schema@0.2.3: - version "0.2.3" - resolved "https://registry.yarnpkg.com/json-schema/-/json-schema-0.2.3.tgz#b480c892e59a2f05954ce727bd3f2a4e882f9e13" - integrity sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM= - -json-stringify-nice@^1.1.4: - version "1.1.4" - resolved "https://registry.yarnpkg.com/json-stringify-nice/-/json-stringify-nice-1.1.4.tgz#2c937962b80181d3f317dd39aa323e14f5a60a67" - integrity sha512-5Z5RFW63yxReJ7vANgW6eZFGWaQvnPE3WNmZoOJrSkGju2etKA2L5rrOa1sm877TVTFt57A80BH1bArcmlLfPw== - -json-stringify-safe@~5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz#1296a2d58fd45f19a0f6ce01d65701e2c735b6eb" - integrity sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus= - json5@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/json5/-/json5-1.0.1.tgz#779fb0018604fa854eacbf6252180d83543e3dbe" @@ -6261,31 +5444,6 @@ jsonfile@^6.0.1: optionalDependencies: graceful-fs "^4.1.6" -jsonparse@^1.3.1: - version "1.3.1" - resolved "https://registry.yarnpkg.com/jsonparse/-/jsonparse-1.3.1.tgz#3f4dae4a91fac315f71062f8521cc239f1366280" - integrity sha1-P02uSpH6wxX3EGL4UhzCOfE2YoA= - -jsprim@^1.2.2: - version "1.4.1" - resolved "https://registry.yarnpkg.com/jsprim/-/jsprim-1.4.1.tgz#313e66bc1e5cc06e438bc1b7499c2e5c56acb6a2" - integrity sha1-MT5mvB5cwG5Di8G3SZwuXFastqI= - dependencies: - assert-plus "1.0.0" - extsprintf "1.3.0" - json-schema "0.2.3" - verror "1.10.0" - -just-diff-apply@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/just-diff-apply/-/just-diff-apply-3.0.0.tgz#a77348d24f0694e378b57293dceb65bdf5a91c4f" - integrity sha512-K2MLc+ZC2DVxX4V61bIKPeMUUfj1YYZ3h0myhchDXOW1cKoPZMnjIoNCqv9bF2n5Oob1PFxuR2gVJxkxz4e58w== - -just-diff@^3.0.1: - version "3.1.1" - resolved "https://registry.yarnpkg.com/just-diff/-/just-diff-3.1.1.tgz#d50c597c6fd4776495308c63bdee1b6839082647" - integrity sha512-sdMWKjRq8qWZEjDcVA6llnUT8RDEBIfOiGpYFPYa9u+2c39JCsejktSP7mj5eRid5EIvTzIpQ2kDOCw1Nq9BjQ== - keyv@^3.0.0: version "3.1.0" resolved "https://registry.yarnpkg.com/keyv/-/keyv-3.1.0.tgz#ecc228486f69991e49e9476485a5be1e8fc5c4d9" @@ -6320,116 +5478,6 @@ leven@^3.1.0: resolved "https://registry.yarnpkg.com/leven/-/leven-3.1.0.tgz#77891de834064cccba82ae7842bb6b14a13ed7f2" integrity sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A== -libnpmaccess@^4.0.2: - version "4.0.3" - resolved "https://registry.yarnpkg.com/libnpmaccess/-/libnpmaccess-4.0.3.tgz#dfb0e5b0a53c315a2610d300e46b4ddeb66e7eec" - integrity sha512-sPeTSNImksm8O2b6/pf3ikv4N567ERYEpeKRPSmqlNt1dTZbvgpJIzg5vAhXHpw2ISBsELFRelk0jEahj1c6nQ== - dependencies: - aproba "^2.0.0" - minipass "^3.1.1" - npm-package-arg "^8.1.2" - npm-registry-fetch "^11.0.0" - -libnpmdiff@^2.0.4: - version "2.0.4" - resolved "https://registry.yarnpkg.com/libnpmdiff/-/libnpmdiff-2.0.4.tgz#bb1687992b1a97a8ea4a32f58ad7c7f92de53b74" - integrity sha512-q3zWePOJLHwsLEUjZw3Kyu/MJMYfl4tWCg78Vl6QGSfm4aXBUSVzMzjJ6jGiyarsT4d+1NH4B1gxfs62/+y9iQ== - dependencies: - "@npmcli/disparity-colors" "^1.0.1" - "@npmcli/installed-package-contents" "^1.0.7" - binary-extensions "^2.2.0" - diff "^5.0.0" - minimatch "^3.0.4" - npm-package-arg "^8.1.1" - pacote "^11.3.0" - tar "^6.1.0" - -libnpmexec@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/libnpmexec/-/libnpmexec-3.0.1.tgz#bc2fddf1b7bd2c1b2c43b4b726ec4cf11920ad0a" - integrity sha512-VUZTpkKBRPv3Z9DIjbsiHhEQXmQ+OwSQ/yLCY9i6CFE8UIczWyE6wVxP5sJ5NSGtSTUs6I98WewQOL45OKMyxA== - dependencies: - "@npmcli/arborist" "^4.0.0" - "@npmcli/ci-detect" "^1.3.0" - "@npmcli/run-script" "^2.0.0" - chalk "^4.1.0" - mkdirp-infer-owner "^2.0.0" - npm-package-arg "^8.1.2" - pacote "^12.0.0" - proc-log "^1.0.0" - read "^1.0.7" - read-package-json-fast "^2.0.2" - walk-up-path "^1.0.0" - -libnpmfund@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/libnpmfund/-/libnpmfund-2.0.1.tgz#3c7e2be61e8c79e22c4918dde91ef57f64faf064" - integrity sha512-OhDbjB3gqdRyuQ56AhUtO49HZ7cZHSM7yCnhQa1lsNpmAmGPnjCImfx8SoWaAkUM7Ov8jngMR5JHKAr1ddjHTQ== - dependencies: - "@npmcli/arborist" "^4.0.0" - -libnpmhook@^6.0.2: - version "6.0.3" - resolved "https://registry.yarnpkg.com/libnpmhook/-/libnpmhook-6.0.3.tgz#1d7f0d7e6a7932fbf7ce0881fdb0ed8bf8748a30" - integrity sha512-3fmkZJibIybzmAvxJ65PeV3NzRc0m4xmYt6scui5msocThbEp4sKFT80FhgrCERYDjlUuFahU6zFNbJDHbQ++g== - dependencies: - aproba "^2.0.0" - npm-registry-fetch "^11.0.0" - -libnpmorg@^2.0.2: - version "2.0.3" - resolved "https://registry.yarnpkg.com/libnpmorg/-/libnpmorg-2.0.3.tgz#4e605d4113dfa16792d75343824a0625c76703bc" - integrity sha512-JSGl3HFeiRFUZOUlGdiNcUZOsUqkSYrg6KMzvPZ1WVZ478i47OnKSS0vkPmX45Pai5mTKuwIqBMcGWG7O8HfdA== - dependencies: - aproba "^2.0.0" - npm-registry-fetch "^11.0.0" - -libnpmpack@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/libnpmpack/-/libnpmpack-3.0.0.tgz#b1cdf182106bc0d25910e79bb5c9b6c23cd71670" - integrity sha512-W6lt4blkR9YXu/qOrFknfnKBajz/1GvAc5q1XcWTGuBJn2DYKDWHtA7x1fuMQdn7hKDBOPlZ/Aqll+ZvAnrM6g== - dependencies: - "@npmcli/run-script" "^2.0.0" - npm-package-arg "^8.1.0" - pacote "^12.0.0" - -libnpmpublish@^4.0.1: - version "4.0.2" - resolved "https://registry.yarnpkg.com/libnpmpublish/-/libnpmpublish-4.0.2.tgz#be77e8bf5956131bcb45e3caa6b96a842dec0794" - integrity sha512-+AD7A2zbVeGRCFI2aO//oUmapCwy7GHqPXFJh3qpToSRNU+tXKJ2YFUgjt04LPPAf2dlEH95s6EhIHM1J7bmOw== - dependencies: - normalize-package-data "^3.0.2" - npm-package-arg "^8.1.2" - npm-registry-fetch "^11.0.0" - semver "^7.1.3" - ssri "^8.0.1" - -libnpmsearch@^3.1.1: - version "3.1.2" - resolved "https://registry.yarnpkg.com/libnpmsearch/-/libnpmsearch-3.1.2.tgz#aee81b9e4768750d842b627a3051abc89fdc15f3" - integrity sha512-BaQHBjMNnsPYk3Bl6AiOeVuFgp72jviShNBw5aHaHNKWqZxNi38iVNoXbo6bG/Ccc/m1To8s0GtMdtn6xZ1HAw== - dependencies: - npm-registry-fetch "^11.0.0" - -libnpmteam@^2.0.3: - version "2.0.4" - resolved "https://registry.yarnpkg.com/libnpmteam/-/libnpmteam-2.0.4.tgz#9dbe2e18ae3cb97551ec07d2a2daf9944f3edc4c" - integrity sha512-FPrVJWv820FZFXaflAEVTLRWZrerCvfe7ZHSMzJ/62EBlho2KFlYKjyNEsPW3JiV7TLSXi3vo8u0gMwIkXSMTw== - dependencies: - aproba "^2.0.0" - npm-registry-fetch "^11.0.0" - -libnpmversion@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/libnpmversion/-/libnpmversion-2.0.1.tgz#20b1425d88cd99c66806a54b458d2d654066b550" - integrity sha512-uFGtNTe/m0GOIBQCE4ryIsgGNJdeShW+qvYtKNLCCuiG7JY3YEslL/maFFZbaO4wlQa/oj1t0Bm9TyjahvtgQQ== - dependencies: - "@npmcli/git" "^2.0.7" - "@npmcli/run-script" "^2.0.0" - json-parse-even-better-errors "^2.3.1" - semver "^7.3.5" - stringify-package "^1.0.1" - lilconfig@^2.0.3: version "2.0.3" resolved "https://registry.yarnpkg.com/lilconfig/-/lilconfig-2.0.3.tgz#68f3005e921dafbd2a2afb48379986aa6d2579fd" @@ -6490,18 +5538,6 @@ locate-path@^6.0.0: dependencies: p-locate "^5.0.0" -locate-path@^7.0.0: - version "7.0.0" - resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-7.0.0.tgz#f0a60c8dd7ef0f737699eb9461b9567a92bc97da" - integrity sha512-+cg2yXqDUKfo4hsFxwa3G1cBJeA+gs1vD8FyV9/odWoUlQe/4syxHQ5DPtKjtfm6gnKbZzjCqzX03kXosvZB1w== - dependencies: - p-locate "^6.0.0" - -lodash-es@^4.17.21: - version "4.17.21" - resolved "https://registry.yarnpkg.com/lodash-es/-/lodash-es-4.17.21.tgz#43e626c46e6591b7750beb2b50117390c609e3ee" - integrity sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw== - lodash.assignin@^4.0.9: version "4.2.0" resolved "https://registry.yarnpkg.com/lodash.assignin/-/lodash.assignin-4.2.0.tgz#ba8df5fb841eb0a3e8044232b0e263a8dc6a28a2" @@ -6592,7 +5628,7 @@ lodash.uniq@4.5.0, lodash.uniq@^4.5.0: resolved "https://registry.yarnpkg.com/lodash.uniq/-/lodash.uniq-4.5.0.tgz#d0225373aeb652adc1bc82e4945339a842754773" integrity sha1-0CJTc662Uq3BvILklFM5qEJ1R3M= -lodash@4.17.21, lodash@^4.17.14, lodash@^4.17.19, lodash@^4.17.20, lodash@^4.17.21: +lodash@^4.17.14, lodash@^4.17.19, lodash@^4.17.20, lodash@^4.17.21: version "4.17.21" resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== @@ -6621,14 +5657,6 @@ lowercase-keys@^2.0.0: resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-2.0.0.tgz#2603e78b7b4b0006cbca2fbcc8a3202558ac9479" integrity sha512-tqNXrS78oMOE73NMxK4EMLQsQowWf8jKooH9g7xPavRT706R6bkQJ6DY2Te7QukaZsulxa30wQ7bk0pm4XiHmA== -lru-cache@^4.0.1: - version "4.1.5" - resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-4.1.5.tgz#8bbe50ea85bed59bc9e33dcab8235ee9bcf443cd" - integrity sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g== - dependencies: - pseudomap "^1.0.2" - yallist "^2.1.2" - lru-cache@^6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94" @@ -6650,28 +5678,6 @@ make-dir@^3.0.0, make-dir@^3.0.2, make-dir@^3.1.0: dependencies: semver "^6.0.0" -make-fetch-happen@^9.0.1, make-fetch-happen@^9.1.0: - version "9.1.0" - resolved "https://registry.yarnpkg.com/make-fetch-happen/-/make-fetch-happen-9.1.0.tgz#53085a09e7971433e6765f7971bf63f4e05cb968" - integrity sha512-+zopwDy7DNknmwPQplem5lAZX/eCOzSvSNNcSKm5eVwTkOBzoktEfXsa9L23J/GIRhxRsaxzkPEhrJEpE2F4Gg== - dependencies: - agentkeepalive "^4.1.3" - cacache "^15.2.0" - http-cache-semantics "^4.1.0" - http-proxy-agent "^4.0.1" - https-proxy-agent "^5.0.0" - is-lambda "^1.0.1" - lru-cache "^6.0.0" - minipass "^3.1.3" - minipass-collect "^1.0.2" - minipass-fetch "^1.3.2" - minipass-flush "^1.0.5" - minipass-pipeline "^1.2.4" - negotiator "^0.6.2" - promise-retry "^2.0.1" - socks-proxy-agent "^6.0.0" - ssri "^8.0.0" - markdown-escapes@^1.0.0: version "1.0.4" resolved "https://registry.yarnpkg.com/markdown-escapes/-/markdown-escapes-1.0.4.tgz#c95415ef451499d7602b91095f3c8e8975f78535" @@ -6752,7 +5758,7 @@ merge-stream@^2.0.0: resolved "https://registry.yarnpkg.com/merge-stream/-/merge-stream-2.0.0.tgz#52823629a14dd00c9770fb6ad47dc6310f2c1f60" integrity sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w== -merge2@^1.3.0, merge2@^1.4.1: +merge2@^1.3.0: version "1.4.1" resolved "https://registry.yarnpkg.com/merge2/-/merge2-1.4.1.tgz#4368892f885e907455a6fd7dc55c0c9d404990ae" integrity sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg== @@ -6775,11 +5781,6 @@ mime-db@1.48.0, "mime-db@>= 1.43.0 < 2": resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.48.0.tgz#e35b31045dd7eada3aaad537ed88a33afbef2d1d" integrity sha512-FM3QwxV+TnZYQ2aRqhlKBMHxk10lTbMt3bBkMAp54ddrNeVSfcQYOOKuGuy3Ddrm38I04If834fOUSq1yzslJQ== -mime-db@1.50.0: - version "1.50.0" - resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.50.0.tgz#abd4ac94e98d3c0e185016c67ab45d5fde40c11f" - integrity sha512-9tMZCDlYHqeERXEHO9f/hKfNXhre5dK2eE/krIvUjZbS2KPcqGDfNShIWS1uW9XOTKQKqK6qbeOci18rbfW77A== - mime-db@1.51.0: version "1.51.0" resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.51.0.tgz#d9ff62451859b18342d960850dc3cfb77e63fb0c" @@ -6797,13 +5798,6 @@ mime-types@2.1.18: dependencies: mime-db "~1.33.0" -mime-types@^2.1.12, mime-types@~2.1.19: - version "2.1.33" - resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.33.tgz#1fa12a904472fafd068e48d9e8401f74d3f70edb" - integrity sha512-plLElXp7pRDd0bNZHw+nMd52vRYjLwQjygaNg7ddJ2uJtTlmnTCjWuPKxVu6//AdaRuME84SvLW91sIkBqGT0g== - dependencies: - mime-db "1.50.0" - mime-types@^2.1.27, mime-types@~2.1.17, mime-types@~2.1.24: version "2.1.31" resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.31.tgz#a00d76b74317c61f9c2db2218b8e9f8e9c5c9e6b" @@ -6846,102 +5840,31 @@ mini-create-react-context@^0.4.0: "@babel/runtime" "^7.12.1" tiny-warning "^1.0.3" -mini-css-extract-plugin@^1.6.0: - version "1.6.2" - resolved "https://registry.yarnpkg.com/mini-css-extract-plugin/-/mini-css-extract-plugin-1.6.2.tgz#83172b4fd812f8fc4a09d6f6d16f924f53990ca8" - integrity sha512-WhDvO3SjGm40oV5y26GjMJYjd2UMqrLAGKy5YS2/3QKJy2F7jgynuHTir/tgUUOiNQu5saXHdc8reo7YuhhT4Q== - dependencies: - loader-utils "^2.0.0" - schema-utils "^3.0.0" - webpack-sources "^1.1.0" - -minimalistic-assert@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz#2e194de044626d4a10e7f7fbc00ce73e83e4d5c7" - integrity sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A== - -minimatch@3.0.4, minimatch@^3.0.4: - version "3.0.4" - resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083" - integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA== - dependencies: - brace-expansion "^1.1.7" - -minimist@^1.2.0, minimist@^1.2.5: - version "1.2.5" - resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.5.tgz#67d66014b66a6a8aaa0c083c5fd58df4e4e97602" - integrity sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw== - -minipass-collect@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/minipass-collect/-/minipass-collect-1.0.2.tgz#22b813bf745dc6edba2576b940022ad6edc8c617" - integrity sha512-6T6lH0H8OG9kITm/Jm6tdooIbogG9e0tLgpY6mphXSm/A9u8Nq1ryBG+Qspiub9LjWlBPsPS3tWQ/Botq4FdxA== - dependencies: - minipass "^3.0.0" - -minipass-fetch@^1.3.0, minipass-fetch@^1.3.2: - version "1.4.1" - resolved "https://registry.yarnpkg.com/minipass-fetch/-/minipass-fetch-1.4.1.tgz#d75e0091daac1b0ffd7e9d41629faff7d0c1f1b6" - integrity sha512-CGH1eblLq26Y15+Azk7ey4xh0J/XfJfrCox5LDJiKqI2Q2iwOLOKrlmIaODiSQS8d18jalF6y2K2ePUm0CmShw== - dependencies: - minipass "^3.1.0" - minipass-sized "^1.0.3" - minizlib "^2.0.0" - optionalDependencies: - encoding "^0.1.12" - -minipass-flush@^1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/minipass-flush/-/minipass-flush-1.0.5.tgz#82e7135d7e89a50ffe64610a787953c4c4cbb373" - integrity sha512-JmQSYYpPUqX5Jyn1mXaRwOda1uQ8HP5KAT/oDSLCzt1BYRhQU0/hDtsB1ufZfEEzMZ9aAVmsBw8+FWsIXlClWw== - dependencies: - minipass "^3.0.0" - -minipass-json-stream@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/minipass-json-stream/-/minipass-json-stream-1.0.1.tgz#7edbb92588fbfc2ff1db2fc10397acb7b6b44aa7" - integrity sha512-ODqY18UZt/I8k+b7rl2AENgbWE8IDYam+undIJONvigAz8KR5GWblsFTEfQs0WODsjbSXWlm+JHEv8Gr6Tfdbg== - dependencies: - jsonparse "^1.3.1" - minipass "^3.0.0" - -minipass-pipeline@^1.2.2, minipass-pipeline@^1.2.4: - version "1.2.4" - resolved "https://registry.yarnpkg.com/minipass-pipeline/-/minipass-pipeline-1.2.4.tgz#68472f79711c084657c067c5c6ad93cddea8214c" - integrity sha512-xuIq7cIOt09RPRJ19gdi4b+RiNvDFYe5JH+ggNvBqGqpQXcru3PcRmOZuHBKWK1Txf9+cQ+HMVN4d6z46LZP7A== - dependencies: - minipass "^3.0.0" - -minipass-sized@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/minipass-sized/-/minipass-sized-1.0.3.tgz#70ee5a7c5052070afacfbc22977ea79def353b70" - integrity sha512-MbkQQ2CTiBMlA2Dm/5cY+9SWFEN8pzzOXi6rlM5Xxq0Yqbda5ZQy9sU75a673FE9ZK0Zsbr6Y5iP6u9nktfg2g== +mini-css-extract-plugin@^1.6.0: + version "1.6.2" + resolved "https://registry.yarnpkg.com/mini-css-extract-plugin/-/mini-css-extract-plugin-1.6.2.tgz#83172b4fd812f8fc4a09d6f6d16f924f53990ca8" + integrity sha512-WhDvO3SjGm40oV5y26GjMJYjd2UMqrLAGKy5YS2/3QKJy2F7jgynuHTir/tgUUOiNQu5saXHdc8reo7YuhhT4Q== dependencies: - minipass "^3.0.0" + loader-utils "^2.0.0" + schema-utils "^3.0.0" + webpack-sources "^1.1.0" -minipass@^3.0.0, minipass@^3.1.0, minipass@^3.1.1, minipass@^3.1.3: - version "3.1.5" - resolved "https://registry.yarnpkg.com/minipass/-/minipass-3.1.5.tgz#71f6251b0a33a49c01b3cf97ff77eda030dff732" - integrity sha512-+8NzxD82XQoNKNrl1d/FSi+X8wAEWR+sbYAfIvub4Nz0d22plFG72CEVVaufV8PNf4qSslFTD8VMOxNVhHCjTw== - dependencies: - yallist "^4.0.0" +minimalistic-assert@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz#2e194de044626d4a10e7f7fbc00ce73e83e4d5c7" + integrity sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A== -minizlib@^2.0.0, minizlib@^2.1.1: - version "2.1.2" - resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-2.1.2.tgz#e90d3466ba209b932451508a11ce3d3632145931" - integrity sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg== +minimatch@3.0.4, minimatch@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083" + integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA== dependencies: - minipass "^3.0.0" - yallist "^4.0.0" + brace-expansion "^1.1.7" -mkdirp-infer-owner@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/mkdirp-infer-owner/-/mkdirp-infer-owner-2.0.0.tgz#55d3b368e7d89065c38f32fd38e638f0ab61d316" - integrity sha512-sdqtiFt3lkOaYvTXSRIUjkIdPTcxgv5+fgqYE/5qgwdw12cOrAuzzgzvVExIkH/ul1oeHN3bCLOWSG3XOqbKKw== - dependencies: - chownr "^2.0.0" - infer-owner "^1.0.4" - mkdirp "^1.0.3" +minimist@^1.2.0, minimist@^1.2.5: + version "1.2.5" + resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.5.tgz#67d66014b66a6a8aaa0c083c5fd58df4e4e97602" + integrity sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw== mkdirp@^0.5.5, mkdirp@~0.5.1: version "0.5.5" @@ -6950,7 +5873,7 @@ mkdirp@^0.5.5, mkdirp@~0.5.1: dependencies: minimist "^1.2.5" -mkdirp@^1.0.3, mkdirp@^1.0.4: +mkdirp@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-1.0.4.tgz#3eb5ed62622756d79a5f0e2a221dfebad75c2f7e" integrity sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw== @@ -6970,7 +5893,7 @@ ms@2.1.2: resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== -ms@^2.0.0, ms@^2.1.1, ms@^2.1.2: +ms@^2.1.1: version "2.1.3" resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2" integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA== @@ -6988,11 +5911,6 @@ multicast-dns@^6.0.1: dns-packet "^1.3.1" thunky "^1.0.2" -mute-stream@~0.0.4: - version "0.0.8" - resolved "https://registry.yarnpkg.com/mute-stream/-/mute-stream-0.0.8.tgz#1630c42b2251ff81e2a283de96a5497ea92e5e0d" - integrity sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA== - nanoid@^3.1.23: version "3.1.23" resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.1.23.tgz#f744086ce7c2bc47ee0a8472574d5c78e4183a81" @@ -7003,7 +5921,7 @@ nanoid@^3.1.28: resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.1.30.tgz#63f93cc548d2a113dc5dfbc63bfa09e2b9b64362" integrity sha512-zJpuPDwOv8D2zq2WRoMe1HsfZthVewpel9CAvTfc/2mBD1uUT/agc5f7GHGWXlYkFvi1mVxe4IjvP2HNrop7nQ== -negotiator@0.6.2, negotiator@^0.6.2: +negotiator@0.6.2: version "0.6.2" resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.2.tgz#feacf7ccf525a77ae9634436a64883ffeca346fb" integrity sha512-hZXc7K2e+PgeI1eDBe/10Ard4ekbfrrqG8Ep+8Jmf4JID2bNg7NvCPOZN+kfF574pFQI7mum2AUqDidoKqcTOw== @@ -7013,13 +5931,6 @@ neo-async@^2.6.2: resolved "https://registry.yarnpkg.com/neo-async/-/neo-async-2.6.2.tgz#b4aafb93e3aeb2d8174ca53cf163ab7d7308305f" integrity sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw== -nmtree@^1.0.6: - version "1.0.6" - resolved "https://registry.yarnpkg.com/nmtree/-/nmtree-1.0.6.tgz#953e057ad545e9e627f1275bd25fea4e92c1cf63" - integrity sha512-SUPCoyX5w/lOT6wD/PZEymR+J899984tYEOYjuDqQlIOeX5NSb1MEsCcT0az+dhZD0MLAj5hGBZEpKQxuDdniA== - dependencies: - commander "^2.11.0" - no-case@^3.0.4: version "3.0.4" resolved "https://registry.yarnpkg.com/no-case/-/no-case-3.0.4.tgz#d361fd5c9800f558551a8369fc0dcd4662b6124d" @@ -7045,60 +5956,11 @@ node-forge@^0.10.0: resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-0.10.0.tgz#32dea2afb3e9926f02ee5ce8794902691a676bf3" integrity sha512-PPmu8eEeG9saEUvI97fm4OYxXVB6bFvyNTyiUOBichBpFG8A1Ljw3bY62+5oOjDEMHRnd0Y7HQ+x7uzxOzC6JA== -node-gyp@^7.1.0: - version "7.1.2" - resolved "https://registry.yarnpkg.com/node-gyp/-/node-gyp-7.1.2.tgz#21a810aebb187120251c3bcec979af1587b188ae" - integrity sha512-CbpcIo7C3eMu3dL1c3d0xw449fHIGALIJsRP4DDPHpyiW8vcriNY7ubh9TE4zEKfSxscY7PjeFnshE7h75ynjQ== - dependencies: - env-paths "^2.2.0" - glob "^7.1.4" - graceful-fs "^4.2.3" - nopt "^5.0.0" - npmlog "^4.1.2" - request "^2.88.2" - rimraf "^3.0.2" - semver "^7.3.2" - tar "^6.0.2" - which "^2.0.2" - -node-gyp@^8.2.0: - version "8.3.0" - resolved "https://registry.yarnpkg.com/node-gyp/-/node-gyp-8.3.0.tgz#ebc36a146d45095e1c6af6ccb0e47d1c8fc3fe69" - integrity "sha1-68NqFG1FCV4cavbMsOR9HI/D/mk= sha512-e+vmKyTiybKgrmvs4M2REFKCnOd+NcrAAnn99Yko6NQA+zZdMlRvbIUHojfsHrSQ1CddLgZnHicnEVgDHziJzA==" - dependencies: - env-paths "^2.2.0" - glob "^7.1.4" - graceful-fs "^4.2.6" - make-fetch-happen "^9.1.0" - nopt "^5.0.0" - npmlog "^4.1.2" - rimraf "^3.0.2" - semver "^7.3.5" - tar "^6.1.2" - which "^2.0.2" - node-releases@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.0.tgz#67dc74903100a7deb044037b8a2e5f453bb05400" integrity sha512-aA87l0flFYMzCHpTM3DERFSYxc6lv/BltdbRTOMZuxZ0cwZCD3mejE5n9vLhSJCN++/eOqr77G1IO5uXxlQYWA== -nopt@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/nopt/-/nopt-5.0.0.tgz#530942bb58a512fccafe53fe210f13a25355dc88" - integrity sha512-Tbj67rffqceeLpcRXrT7vKAN8CwfPeIBgM7E6iBkmKLV7bEMwpGgYLGv0jACUsECaa/vuxP0IjEont6umdMgtQ== - dependencies: - abbrev "1" - -normalize-package-data@^3.0.0, normalize-package-data@^3.0.2: - version "3.0.3" - resolved "https://registry.yarnpkg.com/normalize-package-data/-/normalize-package-data-3.0.3.tgz#dbcc3e2da59509a0983422884cd172eefdfa525e" - integrity sha512-p2W1sgqij3zMMyRC067Dg16bfzVH+w7hyegmpIvZ4JNjqtGOVAIvLmjBx3yP7YTe9vKJgkoNOPjwQGogDoMXFA== - dependencies: - hosted-git-info "^4.0.1" - is-core-module "^2.5.0" - semver "^7.3.4" - validate-npm-package-license "^3.0.1" - normalize-path@^3.0.0, normalize-path@~3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65" @@ -7119,90 +5981,6 @@ normalize-url@^6.0.1: resolved "https://registry.yarnpkg.com/normalize-url/-/normalize-url-6.1.0.tgz#40d0885b535deffe3f3147bec877d05fe4c5668a" integrity sha512-DlL+XwOy3NxAQ8xuC0okPgK46iuVNAK01YN7RueYBqqFeGsBjV9XmCAzAdgt+667bCl5kPh9EqKKDwnaPG1I7A== -npm-audit-report@^2.1.5: - version "2.1.5" - resolved "https://registry.yarnpkg.com/npm-audit-report/-/npm-audit-report-2.1.5.tgz#a5b8850abe2e8452fce976c8960dd432981737b5" - integrity sha512-YB8qOoEmBhUH1UJgh1xFAv7Jg1d+xoNhsDYiFQlEFThEBui0W1vIz2ZK6FVg4WZjwEdl7uBQlm1jy3MUfyHeEw== - dependencies: - chalk "^4.0.0" - -npm-bundled@^1.1.1: - version "1.1.2" - resolved "https://registry.yarnpkg.com/npm-bundled/-/npm-bundled-1.1.2.tgz#944c78789bd739035b70baa2ca5cc32b8d860bc1" - integrity sha512-x5DHup0SuyQcmL3s7Rx/YQ8sbw/Hzg0rj48eN0dV7hf5cmQq5PXIeioroH3raV1QC1yh3uTYuMThvEQF3iKgGQ== - dependencies: - npm-normalize-package-bin "^1.0.1" - -npm-install-checks@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/npm-install-checks/-/npm-install-checks-4.0.0.tgz#a37facc763a2fde0497ef2c6d0ac7c3fbe00d7b4" - integrity sha512-09OmyDkNLYwqKPOnbI8exiOZU2GVVmQp7tgez2BPi5OZC8M82elDAps7sxC4l//uSUtotWqoEIDwjRvWH4qz8w== - dependencies: - semver "^7.1.1" - -npm-normalize-package-bin@^1.0.0, npm-normalize-package-bin@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/npm-normalize-package-bin/-/npm-normalize-package-bin-1.0.1.tgz#6e79a41f23fd235c0623218228da7d9c23b8f6e2" - integrity sha512-EPfafl6JL5/rU+ot6P3gRSCpPDW5VmIzX959Ob1+ySFUuuYHWHekXpwdUZcKP5C+DS4GEtdJluwBjnsNDl+fSA== - -npm-package-arg@^8.0.0, npm-package-arg@^8.0.1, npm-package-arg@^8.1.0, npm-package-arg@^8.1.1, npm-package-arg@^8.1.2, npm-package-arg@^8.1.5: - version "8.1.5" - resolved "https://registry.yarnpkg.com/npm-package-arg/-/npm-package-arg-8.1.5.tgz#3369b2d5fe8fdc674baa7f1786514ddc15466e44" - integrity sha512-LhgZrg0n0VgvzVdSm1oiZworPbTxYHUJCgtsJW8mGvlDpxTM1vSJc3m5QZeUkhAHIzbz3VCHd/R4osi1L1Tg/Q== - dependencies: - hosted-git-info "^4.0.1" - semver "^7.3.4" - validate-npm-package-name "^3.0.0" - -npm-packlist@^2.1.4: - version "2.2.2" - resolved "https://registry.yarnpkg.com/npm-packlist/-/npm-packlist-2.2.2.tgz#076b97293fa620f632833186a7a8f65aaa6148c8" - integrity sha512-Jt01acDvJRhJGthnUJVF/w6gumWOZxO7IkpY/lsX9//zqQgnF7OJaxgQXcerd4uQOLu7W5bkb4mChL9mdfm+Zg== - dependencies: - glob "^7.1.6" - ignore-walk "^3.0.3" - npm-bundled "^1.1.1" - npm-normalize-package-bin "^1.0.1" - -npm-packlist@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/npm-packlist/-/npm-packlist-3.0.0.tgz#0370df5cfc2fcc8f79b8f42b37798dd9ee32c2a9" - integrity sha512-L/cbzmutAwII5glUcf2DBRNY/d0TFd4e/FnaZigJV6JD85RHZXJFGwCndjMWiiViiWSsWt3tiOLpI3ByTnIdFQ== - dependencies: - glob "^7.1.6" - ignore-walk "^4.0.1" - npm-bundled "^1.1.1" - npm-normalize-package-bin "^1.0.1" - -npm-pick-manifest@^6.0.0, npm-pick-manifest@^6.1.0, npm-pick-manifest@^6.1.1: - version "6.1.1" - resolved "https://registry.yarnpkg.com/npm-pick-manifest/-/npm-pick-manifest-6.1.1.tgz#7b5484ca2c908565f43b7f27644f36bb816f5148" - integrity sha512-dBsdBtORT84S8V8UTad1WlUyKIY9iMsAmqxHbLdeEeBNMLQDlDWWra3wYUx9EBEIiG/YwAy0XyNHDd2goAsfuA== - dependencies: - npm-install-checks "^4.0.0" - npm-normalize-package-bin "^1.0.1" - npm-package-arg "^8.1.2" - semver "^7.3.4" - -npm-profile@^5.0.3: - version "5.0.4" - resolved "https://registry.yarnpkg.com/npm-profile/-/npm-profile-5.0.4.tgz#73e5bd1d808edc2c382d7139049cc367ac43161b" - integrity sha512-OKtU7yoAEBOnc8zJ+/uo5E4ugPp09sopo+6y1njPp+W99P8DvQon3BJYmpvyK2Bf1+3YV5LN1bvgXRoZ1LUJBA== - dependencies: - npm-registry-fetch "^11.0.0" - -npm-registry-fetch@^11.0.0: - version "11.0.0" - resolved "https://registry.yarnpkg.com/npm-registry-fetch/-/npm-registry-fetch-11.0.0.tgz#68c1bb810c46542760d62a6a965f85a702d43a76" - integrity sha512-jmlgSxoDNuhAtxUIG6pVwwtz840i994dL14FoNVZisrmZW5kWd63IUTNv1m/hyRSGSqWjCUp/YZlS1BJyNp9XA== - dependencies: - make-fetch-happen "^9.0.1" - minipass "^3.1.3" - minipass-fetch "^1.3.0" - minipass-json-stream "^1.0.1" - minizlib "^2.0.0" - npm-package-arg "^8.0.0" - npm-run-path@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-4.0.1.tgz#b7ecd1e5ed53da8e37a55e1c2269e0b97ed748ea" @@ -7210,107 +5988,6 @@ npm-run-path@^4.0.1: dependencies: path-key "^3.0.0" -npm-user-validate@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/npm-user-validate/-/npm-user-validate-1.0.1.tgz#31428fc5475fe8416023f178c0ab47935ad8c561" - integrity sha512-uQwcd/tY+h1jnEaze6cdX/LrhWhoBxfSknxentoqmIuStxUExxjWd3ULMLFPiFUrZKbOVMowH6Jq2FRWfmhcEw== - -npm@8.1.0: - version "8.1.0" - resolved "https://registry.yarnpkg.com/npm/-/npm-8.1.0.tgz#2b4a44f00cab4ae7e8fb2d8180f2af92e1098b09" - integrity sha512-rPiiYY4t8pqK1rCDQy1bE6HRR63B2RwfYsIVc1/BNkeIilvqZMwGeTksSBK8WTozBz8c/tMd7jk0c4Q4f/F+UA== - dependencies: - "@isaacs/string-locale-compare" "^1.1.0" - "@npmcli/arborist" "^4.0.1" - "@npmcli/ci-detect" "^1.4.0" - "@npmcli/config" "^2.3.0" - "@npmcli/map-workspaces" "^2.0.0" - "@npmcli/package-json" "^1.0.1" - "@npmcli/run-script" "^2.0.0" - abbrev "~1.1.1" - ansicolors "~0.3.2" - ansistyles "~0.1.3" - archy "~1.0.0" - cacache "^15.3.0" - chalk "^4.1.2" - chownr "^2.0.0" - cli-columns "^4.0.0" - cli-table3 "^0.6.0" - columnify "~1.5.4" - fastest-levenshtein "^1.0.12" - glob "^7.2.0" - graceful-fs "^4.2.8" - hosted-git-info "^4.0.2" - ini "^2.0.0" - init-package-json "^2.0.5" - is-cidr "^4.0.2" - json-parse-even-better-errors "^2.3.1" - libnpmaccess "^4.0.2" - libnpmdiff "^2.0.4" - libnpmexec "^3.0.1" - libnpmfund "^2.0.1" - libnpmhook "^6.0.2" - libnpmorg "^2.0.2" - libnpmpack "^3.0.0" - libnpmpublish "^4.0.1" - libnpmsearch "^3.1.1" - libnpmteam "^2.0.3" - libnpmversion "^2.0.1" - make-fetch-happen "^9.1.0" - minipass "^3.1.3" - minipass-pipeline "^1.2.4" - mkdirp "^1.0.4" - mkdirp-infer-owner "^2.0.0" - ms "^2.1.2" - node-gyp "^8.2.0" - nopt "^5.0.0" - npm-audit-report "^2.1.5" - npm-install-checks "^4.0.0" - npm-package-arg "^8.1.5" - npm-pick-manifest "^6.1.1" - npm-profile "^5.0.3" - npm-registry-fetch "^11.0.0" - npm-user-validate "^1.0.1" - npmlog "^5.0.1" - opener "^1.5.2" - pacote "^12.0.2" - parse-conflict-json "^1.1.1" - qrcode-terminal "^0.12.0" - read "~1.0.7" - read-package-json "^4.1.1" - read-package-json-fast "^2.0.3" - readdir-scoped-modules "^1.1.0" - rimraf "^3.0.2" - semver "^7.3.5" - ssri "^8.0.1" - tar "^6.1.11" - text-table "~0.2.0" - tiny-relative-date "^1.3.0" - treeverse "^1.0.4" - validate-npm-package-name "~3.0.0" - which "^2.0.2" - write-file-atomic "^3.0.3" - -npmlog@^4.1.2: - version "4.1.2" - resolved "https://registry.yarnpkg.com/npmlog/-/npmlog-4.1.2.tgz#08a7f2a8bf734604779a9efa4ad5cc717abb954b" - integrity sha512-2uUqazuKlTaSI/dC8AzicUck7+IrEaOnN/e0jd3Xtt1KcGpwx30v50mL7oPyr/h9bL3E4aZccVwpwP+5W9Vjkg== - dependencies: - are-we-there-yet "~1.1.2" - console-control-strings "~1.1.0" - gauge "~2.7.3" - set-blocking "~2.0.0" - -npmlog@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/npmlog/-/npmlog-5.0.1.tgz#f06678e80e29419ad67ab964e0fa69959c1eb8b0" - integrity sha512-AqZtDUWOMKs1G/8lwylVjrdYgqA4d9nu8hc+0gzRxlDb1I10+FHBGMXs6aiQHFdCUUlqH99MUMuLfzWDNDtfxw== - dependencies: - are-we-there-yet "^2.0.0" - console-control-strings "^1.1.0" - gauge "^3.0.0" - set-blocking "^2.0.0" - nprogress@^0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/nprogress/-/nprogress-0.2.0.tgz#cb8f34c53213d895723fcbab907e9422adbcafb1" @@ -7323,16 +6000,6 @@ nth-check@^1.0.2, nth-check@^2.0.0, nth-check@^2.0.1, nth-check@~1.0.1: dependencies: boolbase "^1.0.0" -number-is-nan@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/number-is-nan/-/number-is-nan-1.0.1.tgz#097b602b53422a522c1afb8790318336941a011d" - integrity sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0= - -oauth-sign@~0.9.0: - version "0.9.0" - resolved "https://registry.yarnpkg.com/oauth-sign/-/oauth-sign-0.9.0.tgz#47a7b016baa68b5fa0ecf3dee08a85c679ac6455" - integrity sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ== - object-assign@^4.1.0, object-assign@^4.1.1: version "4.1.1" resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" @@ -7468,13 +6135,6 @@ p-limit@^3.0.2, p-limit@^3.1.0: dependencies: yocto-queue "^0.1.0" -p-limit@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-4.0.0.tgz#914af6544ed32bfa54670b061cafcbd04984b644" - integrity sha512-5b0R4txpzjPWVw/cXXUResoD4hb6U/x9BH08L7nw+GN1sezDzPdxeRvpc9c433fZhBan/wusjbCsqwqm4EIBIQ== - dependencies: - yocto-queue "^1.0.0" - p-locate@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-3.0.0.tgz#322d69a05c0264b25997d9f40cd8a891ab0064a4" @@ -7496,13 +6156,6 @@ p-locate@^5.0.0: dependencies: p-limit "^3.0.2" -p-locate@^6.0.0: - version "6.0.0" - resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-6.0.0.tgz#3da9a49d4934b901089dca3302fa65dc5a05c04f" - integrity sha512-wPrq66Llhl7/4AGC6I+cqxT07LhXvWL08LNXz1fENOw0Ap4sRZZ/gZpTTJ5jpurzzzfS2W/Ge9BY3LgLjCShcw== - dependencies: - p-limit "^4.0.0" - p-map@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/p-map/-/p-map-4.0.0.tgz#bb2f95a5eda2ec168ec9274e06a747c3e2904d2b" @@ -7540,56 +6193,6 @@ package-json@^6.3.0: registry-url "^5.0.0" semver "^6.2.0" -pacote@^11.3.0: - version "11.3.5" - resolved "https://registry.yarnpkg.com/pacote/-/pacote-11.3.5.tgz#73cf1fc3772b533f575e39efa96c50be8c3dc9d2" - integrity sha512-fT375Yczn4zi+6Hkk2TBe1x1sP8FgFsEIZ2/iWaXY2r/NkhDJfxbcn5paz1+RTFCyNf+dPnaoBDJoAxXSU8Bkg== - dependencies: - "@npmcli/git" "^2.1.0" - "@npmcli/installed-package-contents" "^1.0.6" - "@npmcli/promise-spawn" "^1.2.0" - "@npmcli/run-script" "^1.8.2" - cacache "^15.0.5" - chownr "^2.0.0" - fs-minipass "^2.1.0" - infer-owner "^1.0.4" - minipass "^3.1.3" - mkdirp "^1.0.3" - npm-package-arg "^8.0.1" - npm-packlist "^2.1.4" - npm-pick-manifest "^6.0.0" - npm-registry-fetch "^11.0.0" - promise-retry "^2.0.1" - read-package-json-fast "^2.0.1" - rimraf "^3.0.2" - ssri "^8.0.1" - tar "^6.1.0" - -pacote@^12.0.0, pacote@^12.0.2: - version "12.0.2" - resolved "https://registry.yarnpkg.com/pacote/-/pacote-12.0.2.tgz#14ae30a81fe62ec4fc18c071150e6763e932527c" - integrity sha512-Ar3mhjcxhMzk+OVZ8pbnXdb0l8+pimvlsqBGRNkble2NVgyqOGE3yrCGi/lAYq7E7NRDMz89R1Wx5HIMCGgeYg== - dependencies: - "@npmcli/git" "^2.1.0" - "@npmcli/installed-package-contents" "^1.0.6" - "@npmcli/promise-spawn" "^1.2.0" - "@npmcli/run-script" "^2.0.0" - cacache "^15.0.5" - chownr "^2.0.0" - fs-minipass "^2.1.0" - infer-owner "^1.0.4" - minipass "^3.1.3" - mkdirp "^1.0.3" - npm-package-arg "^8.0.1" - npm-packlist "^3.0.0" - npm-pick-manifest "^6.0.0" - npm-registry-fetch "^11.0.0" - promise-retry "^2.0.1" - read-package-json-fast "^2.0.1" - rimraf "^3.0.2" - ssri "^8.0.1" - tar "^6.1.0" - param-case@^3.0.4: version "3.0.4" resolved "https://registry.yarnpkg.com/param-case/-/param-case-3.0.4.tgz#7d17fe4aa12bde34d4a77d91acfb6219caad01c5" @@ -7605,15 +6208,6 @@ parent-module@^1.0.0: dependencies: callsites "^3.0.0" -parse-conflict-json@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/parse-conflict-json/-/parse-conflict-json-1.1.1.tgz#54ec175bde0f2d70abf6be79e0e042290b86701b" - integrity sha512-4gySviBiW5TRl7XHvp1agcS7SOe0KZOjC//71dzZVWJrY9hCrgtvl5v3SyIxCZ4fZF47TxD9nfzmxcx76xmbUw== - dependencies: - json-parse-even-better-errors "^2.3.0" - just-diff "^3.0.1" - just-diff-apply "^3.0.0" - parse-entities@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/parse-entities/-/parse-entities-2.0.0.tgz#53c6eb5b9314a1f4ec99fa0fdf7ce01ecda0cbe8" @@ -7674,11 +6268,6 @@ path-exists@^4.0.0: resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-4.0.0.tgz#513bdbe2d3b95d7762e8c1137efa195c6c61b5b3" integrity sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w== -path-exists@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-5.0.0.tgz#a6aad9489200b21fab31e49cf09277e5116fb9e7" - integrity sha512-RjhtfwJOxzcFmNOi6ltcbcu4Iu+FL3zEj83dk4kAS+fVpTxXLO1b38RvJgT/0QwvV/L3aY9TAnyv0EOqW4GoMQ== - path-is-absolute@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" @@ -7721,11 +6310,6 @@ path-type@^4.0.0: resolved "https://registry.yarnpkg.com/path-type/-/path-type-4.0.0.tgz#84ed01c0a7ba380afe09d90a8c180dcd9d03043b" integrity sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw== -performance-now@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/performance-now/-/performance-now-2.1.0.tgz#6309f4e0e5fa913ec1c69307ae364b4b377c9e7b" - integrity sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns= - picocolors@^0.2.1: version "0.2.1" resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-0.2.1.tgz#570670f793646851d1ba135996962abad587859f" @@ -7753,13 +6337,6 @@ pkg-dir@^4.1.0: dependencies: find-up "^4.0.0" -pkg-dir@^6.0.1: - version "6.0.1" - resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-6.0.1.tgz#8ec964cecaef98a2bdb9c164733f90a5bcd2352d" - integrity sha512-C9R+PTCKGA32HG0n5I4JMYkdLL58ZpayVuncQHQrGeKa8o26A4o2x0u6BKekHG+Au0jv5ZW7Xfq1Cj6lm9Ag4w== - dependencies: - find-up "^6.1.0" - pkg-up@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/pkg-up/-/pkg-up-3.1.0.tgz#100ec235cc150e4fd42519412596a28512a0def5" @@ -8123,39 +6700,11 @@ prismjs@^1.23.0: resolved "https://registry.yarnpkg.com/prismjs/-/prismjs-1.25.0.tgz#6f822df1bdad965734b310b315a23315cf999756" integrity sha512-WCjJHl1KEWbnkQom1+SzftbtXMKQoezOCYs5rECqMN+jP+apI7ftoflyqigqzopSO3hMhTEb0mFClA8lkolgEg== -proc-log@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/proc-log/-/proc-log-1.0.0.tgz#0d927307401f69ed79341e83a0b2c9a13395eb77" - integrity sha512-aCk8AO51s+4JyuYGg3Q/a6gnrlDO09NpVWePtjp7xwphcoQ04x5WAfCyugcsbLooWcMJ87CLkD4+604IckEdhg== - process-nextick-args@~2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2" integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag== -promise-all-reject-late@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/promise-all-reject-late/-/promise-all-reject-late-1.0.1.tgz#f8ebf13483e5ca91ad809ccc2fcf25f26f8643c2" - integrity sha512-vuf0Lf0lOxyQREH7GDIOUMLS7kz+gs8i6B+Yi8dC68a2sychGrHTJYghMBD6k7eUcH0H5P73EckCA48xijWqXw== - -promise-call-limit@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/promise-call-limit/-/promise-call-limit-1.0.1.tgz#4bdee03aeb85674385ca934da7114e9bcd3c6e24" - integrity sha512-3+hgaa19jzCGLuSCbieeRsu5C2joKfYn8pY6JAuXFRVfF4IO+L7UPpFWNTeWT9pM7uhskvbPPd/oEOktCn317Q== - -promise-inflight@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/promise-inflight/-/promise-inflight-1.0.1.tgz#98472870bf228132fcbdd868129bad12c3c029e3" - integrity sha1-mEcocL8igTL8vdhoEputEsPAKeM= - -promise-retry@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/promise-retry/-/promise-retry-2.0.1.tgz#ff747a13620ab57ba688f5fc67855410c370da22" - integrity sha512-y+WKFlBR8BGXnsNlIHFGPZmyDf3DFMoLhaflAnyZgV6rG6xu+JwesTo2Q9R6XwYmtmwAFCkAk3e35jEdoeh/3g== - dependencies: - err-code "^2.0.2" - retry "^0.12.0" - promise@^7.1.1: version "7.3.1" resolved "https://registry.yarnpkg.com/promise/-/promise-7.3.1.tgz#064b72602b18f90f29192b8b1bc418ffd1ebd3bf" @@ -8179,13 +6728,6 @@ prompts@^2.4.1: kleur "^3.0.3" sisteransi "^1.0.5" -promzard@^0.3.0: - version "0.3.0" - resolved "https://registry.yarnpkg.com/promzard/-/promzard-0.3.0.tgz#26a5d6ee8c7dee4cb12208305acfb93ba382a9ee" - integrity sha1-JqXW7ox97kyxIggwWs+5O6OCqe4= - dependencies: - read "1" - prop-types@^15.6.2, prop-types@^15.7.2: version "15.7.2" resolved "https://registry.yarnpkg.com/prop-types/-/prop-types-15.7.2.tgz#52c41e75b8c87e72b9d9360e0206b99dcbffa6c5" @@ -8210,16 +6752,6 @@ proxy-addr@~2.0.5: forwarded "0.2.0" ipaddr.js "1.9.1" -pseudomap@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/pseudomap/-/pseudomap-1.0.2.tgz#f052a28da70e618917ef0a8ac34c1ae5a68286b3" - integrity sha1-8FKijacOYYkX7wqKw0wa5aaChrM= - -psl@^1.1.28: - version "1.8.0" - resolved "https://registry.yarnpkg.com/psl/-/psl-1.8.0.tgz#9326f8bcfb013adcc005fdff056acce020e51c24" - integrity sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ== - pump@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/pump/-/pump-3.0.0.tgz#b4a2116815bde2f4e1ea602354e8c75565107a64" @@ -8238,7 +6770,7 @@ punycode@^1.3.2: resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.4.1.tgz#c0d5a63b2718800ad8e1eb0fa5269c84dd41845e" integrity sha1-wNWmOycYgArY4esPpSachN1BhF4= -punycode@^2.1.0, punycode@^2.1.1: +punycode@^2.1.0: version "2.1.1" resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec" integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== @@ -8260,21 +6792,11 @@ q@^1.1.2: resolved "https://registry.yarnpkg.com/q/-/q-1.5.1.tgz#7e32f75b41381291d04611f1bf14109ac00651d7" integrity sha1-fjL3W0E4EpHQRhHxvxQQmsAGUdc= -qrcode-terminal@^0.12.0: - version "0.12.0" - resolved "https://registry.yarnpkg.com/qrcode-terminal/-/qrcode-terminal-0.12.0.tgz#bb5b699ef7f9f0505092a3748be4464fe71b5819" - integrity sha512-EXtzRZmC+YGmGlDFbXKxQiMZNwCLEO6BANKXG4iCtSIM0yqc/pappSx3RIKr4r0uh5JsBckOXeKrB3Iz7mdQpQ== - qs@6.7.0: version "6.7.0" resolved "https://registry.yarnpkg.com/qs/-/qs-6.7.0.tgz#41dc1a015e3d581f1621776be31afb2876a9b1bc" integrity sha512-VCdBRNFTX1fyE7Nb6FYoURo/SPe62QCaAyzJvUjwRaIsc+NePBEniHlvxFmmX56+HZphIGtV0XeCirBtpDrTyQ== -qs@~6.5.2: - version "6.5.2" - resolved "https://registry.yarnpkg.com/qs/-/qs-6.5.2.tgz#cb3ae806e8740444584ef154ce8ee98d403f3e36" - integrity sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA== - querystring@0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/querystring/-/querystring-0.2.0.tgz#b209849203bb25df820da756e747005878521620" @@ -8494,37 +7016,7 @@ react@^16.8.4: object-assign "^4.1.1" prop-types "^15.6.2" -read-cmd-shim@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/read-cmd-shim/-/read-cmd-shim-2.0.0.tgz#4a50a71d6f0965364938e9038476f7eede3928d9" - integrity sha512-HJpV9bQpkl6KwjxlJcBoqu9Ba0PQg8TqSNIOrulGt54a0uup0HtevreFHzYzkm0lpnleRdNBzXznKrgxglEHQw== - -read-package-json-fast@^2.0.1, read-package-json-fast@^2.0.2, read-package-json-fast@^2.0.3: - version "2.0.3" - resolved "https://registry.yarnpkg.com/read-package-json-fast/-/read-package-json-fast-2.0.3.tgz#323ca529630da82cb34b36cc0b996693c98c2b83" - integrity sha512-W/BKtbL+dUjTuRL2vziuYhp76s5HZ9qQhd/dKfWIZveD0O40453QNyZhC0e63lqZrAQ4jiOapVoeJ7JrszenQQ== - dependencies: - json-parse-even-better-errors "^2.3.0" - npm-normalize-package-bin "^1.0.1" - -read-package-json@^4.1.1: - version "4.1.1" - resolved "https://registry.yarnpkg.com/read-package-json/-/read-package-json-4.1.1.tgz#153be72fce801578c1c86b8ef2b21188df1b9eea" - integrity sha512-P82sbZJ3ldDrWCOSKxJT0r/CXMWR0OR3KRh55SgKo3p91GSIEEC32v3lSHAvO/UcH3/IoL7uqhOFBduAnwdldw== - dependencies: - glob "^7.1.1" - json-parse-even-better-errors "^2.3.0" - normalize-package-data "^3.0.0" - npm-normalize-package-bin "^1.0.0" - -read@1, read@^1.0.7, read@~1.0.1, read@~1.0.7: - version "1.0.7" - resolved "https://registry.yarnpkg.com/read/-/read-1.0.7.tgz#b3da19bd052431a97671d44a42634adf710b40c4" - integrity sha1-s9oZvQUkMal2cdRKQmNK33ELQMQ= - dependencies: - mute-stream "~0.0.4" - -readable-stream@^2.0.1, readable-stream@^2.0.6: +readable-stream@^2.0.1: version "2.3.7" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.7.tgz#1eca1cf711aef814c04f62252a36a62f6cb23b57" integrity sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw== @@ -8537,7 +7029,7 @@ readable-stream@^2.0.1, readable-stream@^2.0.6: string_decoder "~1.1.1" util-deprecate "~1.0.1" -readable-stream@^3.0.6, readable-stream@^3.1.1, readable-stream@^3.6.0: +readable-stream@^3.0.6, readable-stream@^3.1.1: version "3.6.0" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.0.tgz#337bbda3adc0706bd3e024426a286d4b4b2c9198" integrity sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA== @@ -8546,16 +7038,6 @@ readable-stream@^3.0.6, readable-stream@^3.1.1, readable-stream@^3.6.0: string_decoder "^1.1.1" util-deprecate "^1.0.1" -readdir-scoped-modules@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/readdir-scoped-modules/-/readdir-scoped-modules-1.1.0.tgz#8d45407b4f870a0dcaebc0e28670d18e74514309" - integrity sha512-asaikDeqAQg7JifRsZn1NJZXo9E+VwlyCfbkZhwyISinqk5zNS6266HS5kah6P0SaQKGF6SkNnZVHUzHFYxYDw== - dependencies: - debuglog "^1.0.1" - dezalgo "^1.0.0" - graceful-fs "^4.1.2" - once "^1.3.0" - readdirp@~3.6.0: version "3.6.0" resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-3.6.0.tgz#74a370bd857116e245b29cc97340cd431a02a6c7" @@ -8793,32 +7275,6 @@ repeat-string@^1.5.4: resolved "https://registry.yarnpkg.com/repeat-string/-/repeat-string-1.6.1.tgz#8dcae470e1c88abc2d600fff4a776286da75e637" integrity sha1-jcrkcOHIirwtYA//Sndihtp15jc= -request@^2.88.2: - version "2.88.2" - resolved "https://registry.yarnpkg.com/request/-/request-2.88.2.tgz#d73c918731cb5a87da047e207234146f664d12b3" - integrity sha512-MsvtOrfG9ZcrOwAW+Qi+F6HbD0CWXEh9ou77uOb7FM2WPhwT7smM833PzanhJLsgXjN89Ir6V2PczXNnMpwKhw== - dependencies: - aws-sign2 "~0.7.0" - aws4 "^1.8.0" - caseless "~0.12.0" - combined-stream "~1.0.6" - extend "~3.0.2" - forever-agent "~0.6.1" - form-data "~2.3.2" - har-validator "~5.1.3" - http-signature "~1.2.0" - is-typedarray "~1.0.0" - isstream "~0.1.2" - json-stringify-safe "~5.0.1" - mime-types "~2.1.19" - oauth-sign "~0.9.0" - performance-now "^2.1.0" - qs "~6.5.2" - safe-buffer "^5.1.2" - tough-cookie "~2.5.0" - tunnel-agent "^0.6.0" - uuid "^3.3.2" - "require-like@>= 0.1.1": version "0.1.2" resolved "https://registry.yarnpkg.com/require-like/-/require-like-0.1.2.tgz#ad6f30c13becd797010c468afa775c0c0a6b47fa" @@ -8854,11 +7310,6 @@ responselike@^1.0.2: dependencies: lowercase-keys "^1.0.0" -retry@^0.12.0: - version "0.12.0" - resolved "https://registry.yarnpkg.com/retry/-/retry-0.12.0.tgz#1b42a6266a21f07421d1b0b54b7dc167b01c013b" - integrity sha1-G0KmJmoh8HQh0bC1S33BZ7AcATs= - retry@^0.13.1: version "0.13.1" resolved "https://registry.yarnpkg.com/retry/-/retry-0.13.1.tgz#185b1587acf67919d63b357349e03537b2484658" @@ -8879,7 +7330,7 @@ rgba-regex@^1.0.0: resolved "https://registry.yarnpkg.com/rgba-regex/-/rgba-regex-1.0.0.tgz#43374e2e2ca0968b0ef1523460b7d730ff22eeb3" integrity sha1-QzdOLiyglosO8VI0YLfXMP8i7rM= -rimraf@^3.0.0, rimraf@^3.0.2: +rimraf@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-3.0.2.tgz#f1a5402ba6220ad52cc1282bac1ae3aa49fd061a" integrity sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA== @@ -8921,12 +7372,12 @@ safe-buffer@5.1.2, safe-buffer@~5.1.0, safe-buffer@~5.1.1: resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== -safe-buffer@>=5.1.0, safe-buffer@^5.0.1, safe-buffer@^5.1.0, safe-buffer@^5.1.2, safe-buffer@~5.2.0: +safe-buffer@>=5.1.0, safe-buffer@^5.0.1, safe-buffer@^5.1.0, safe-buffer@~5.2.0: version "5.2.1" resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== -"safer-buffer@>= 2.1.2 < 3", "safer-buffer@>= 2.1.2 < 3.0.0", safer-buffer@^2.0.2, safer-buffer@^2.1.0, safer-buffer@~2.1.0: +"safer-buffer@>= 2.1.2 < 3": version "2.1.2" resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== @@ -9022,7 +7473,7 @@ semver@^6.0.0, semver@^6.1.1, semver@^6.1.2, semver@^6.2.0, semver@^6.3.0: resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d" integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== -semver@^7.1.1, semver@^7.1.3, semver@^7.3.2, semver@^7.3.4, semver@^7.3.5: +semver@^7.3.2, semver@^7.3.4, semver@^7.3.5: version "7.3.5" resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.5.tgz#0b621c879348d8998e4b0e4be94b3f12e6018ef7" integrity sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ== @@ -9092,11 +7543,6 @@ serve-static@1.14.1: parseurl "~1.3.3" send "0.17.1" -set-blocking@^2.0.0, set-blocking@~2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7" - integrity sha1-BF+XgtARrppoA93TgrJDkrPYkPc= - set-value@^4.0.1: version "4.1.0" resolved "https://registry.yarnpkg.com/set-value/-/set-value-4.1.0.tgz#aa433662d87081b75ad88a4743bd450f044e7d09" @@ -9127,13 +7573,6 @@ shallow-clone@^3.0.0: dependencies: kind-of "^6.0.2" -shebang-command@^1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-1.2.0.tgz#44aac65b695b03398968c39f363fee5deafdf1ea" - integrity sha1-RKrGW2lbAzmJaMOfNj/uXer98eo= - dependencies: - shebang-regex "^1.0.0" - shebang-command@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-2.0.0.tgz#ccd0af4f8835fbdc265b82461aaf0c36663f34ea" @@ -9141,11 +7580,6 @@ shebang-command@^2.0.0: dependencies: shebang-regex "^3.0.0" -shebang-regex@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-1.0.0.tgz#da42f49740c0b42db2ca9728571cb190c98efea3" - integrity sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM= - shebang-regex@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-3.0.0.tgz#ae16f1644d873ecad843b0307b143362d4c42172" @@ -9165,7 +7599,7 @@ shelljs@^0.8.4: interpret "^1.0.0" rechoir "^0.6.2" -signal-exit@^3.0.0, signal-exit@^3.0.2, signal-exit@^3.0.3: +signal-exit@^3.0.2, signal-exit@^3.0.3: version "3.0.3" resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.3.tgz#a1410c2edd8f077b08b4e253c8eacfcaf057461c" integrity sha512-VUJ49FC8U1OxwZLxIbTTrDvLnf/6TDgxZcK8wxR8zs13xpx7xbG60ndBlhNrFi2EMuFRoeDoJO7wthSLq42EjA== @@ -9199,16 +7633,6 @@ slash@^3.0.0: resolved "https://registry.yarnpkg.com/slash/-/slash-3.0.0.tgz#6539be870c165adbd5240220dbe361f1bc4d4634" integrity sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q== -slash@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/slash/-/slash-4.0.0.tgz#2422372176c4c6c5addb5e2ada885af984b396a7" - integrity sha512-3dOsAHXXUkQTpOYcoAxLIorMTp4gIQr5IW3iVb7A7lFIp0VHhnynm9izx6TssdrIcVIESAlVjtnO2K8bg+Coew== - -smart-buffer@^4.1.0: - version "4.2.0" - resolved "https://registry.yarnpkg.com/smart-buffer/-/smart-buffer-4.2.0.tgz#6e1d71fa4f18c05f7d0ff216dd16a481d0e8d9ae" - integrity sha512-94hK0Hh8rPqQl2xXc3HsaBoOXKV20MToPkcXvwbISWLEs+64sBq5kFgn2kJDHb1Pry9yrP0dxrCI9RRci7RXKg== - sockjs@^0.3.21: version "0.3.21" resolved "https://registry.yarnpkg.com/sockjs/-/sockjs-0.3.21.tgz#b34ffb98e796930b60a0cfa11904d6a339a7d417" @@ -9218,33 +7642,11 @@ sockjs@^0.3.21: uuid "^3.4.0" websocket-driver "^0.7.4" -socks-proxy-agent@^6.0.0: - version "6.1.0" - resolved "https://registry.yarnpkg.com/socks-proxy-agent/-/socks-proxy-agent-6.1.0.tgz#869cf2d7bd10fea96c7ad3111e81726855e285c3" - integrity sha512-57e7lwCN4Tzt3mXz25VxOErJKXlPfXmkMLnk310v/jwW20jWRVcgsOit+xNkN3eIEdB47GwnfAEBLacZ/wVIKg== - dependencies: - agent-base "^6.0.2" - debug "^4.3.1" - socks "^2.6.1" - -socks@^2.6.1: - version "2.6.1" - resolved "https://registry.yarnpkg.com/socks/-/socks-2.6.1.tgz#989e6534a07cf337deb1b1c94aaa44296520d30e" - integrity sha512-kLQ9N5ucj8uIcxrDwjm0Jsqk06xdpBjGNQtpXy4Q8/QY2k+fY7nZH8CARy+hkbG+SGAovmzzuauCpBlb8FrnBA== - dependencies: - ip "^1.1.5" - smart-buffer "^4.1.0" - sort-css-media-queries@2.0.4: version "2.0.4" resolved "https://registry.yarnpkg.com/sort-css-media-queries/-/sort-css-media-queries-2.0.4.tgz#b2badfa519cb4a938acbc6d3aaa913d4949dc908" integrity sha512-PAIsEK/XupCQwitjv7XxoMvYhT7EAfyzI3hsy/MyDgTvc+Ft55ctdkctJLOy6cQejaIC+zjpUL4djFVm2ivOOw== -sort-object-keys@^1.1.3: - version "1.1.3" - resolved "https://registry.yarnpkg.com/sort-object-keys/-/sort-object-keys-1.1.3.tgz#bff833fe85cab147b34742e45863453c1e190b45" - integrity sha512-855pvK+VkU7PaKYPc+Jjnmt4EzejQHyhhF33q31qG8x7maDzkeFhAAThdCYay11CISO+qAMwjOBP+fPZe0IPyg== - source-list-map@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/source-list-map/-/source-list-map-2.0.1.tgz#3993bd873bfc48479cca9ea3a547835c7c154b34" @@ -9296,32 +7698,6 @@ space-separated-tokens@^1.0.0: resolved "https://registry.yarnpkg.com/space-separated-tokens/-/space-separated-tokens-1.1.5.tgz#85f32c3d10d9682007e917414ddc5c26d1aa6899" integrity sha512-q/JSVd1Lptzhf5bkYm4ob4iWPjx0KiRe3sRFBNrVqbJkFaBm5vbbowy1mymoPNLRa52+oadOhJ+K49wsSeSjTA== -spdx-correct@^3.0.0: - version "3.1.1" - resolved "https://registry.yarnpkg.com/spdx-correct/-/spdx-correct-3.1.1.tgz#dece81ac9c1e6713e5f7d1b6f17d468fa53d89a9" - integrity sha512-cOYcUWwhCuHCXi49RhFRCyJEK3iPj1Ziz9DpViV3tbZOwXD49QzIN3MpOLJNxh2qwq2lJJZaKMVw9qNi4jTC0w== - dependencies: - spdx-expression-parse "^3.0.0" - spdx-license-ids "^3.0.0" - -spdx-exceptions@^2.1.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/spdx-exceptions/-/spdx-exceptions-2.3.0.tgz#3f28ce1a77a00372683eade4a433183527a2163d" - integrity sha512-/tTrYOC7PPI1nUAgx34hUpqXuyJG+DTHJTnIULG4rDygi4xu/tfgmq1e1cIRwRzwZgo4NLySi+ricLkZkw4i5A== - -spdx-expression-parse@^3.0.0: - version "3.0.1" - resolved "https://registry.yarnpkg.com/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz#cf70f50482eefdc98e3ce0a6833e4a53ceeba679" - integrity sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q== - dependencies: - spdx-exceptions "^2.1.0" - spdx-license-ids "^3.0.0" - -spdx-license-ids@^3.0.0: - version "3.0.10" - resolved "https://registry.yarnpkg.com/spdx-license-ids/-/spdx-license-ids-3.0.10.tgz#0d9becccde7003d6c658d487dd48a32f0bf3014b" - integrity sha512-oie3/+gKf7QtpitB0LYLETe+k8SifzsX4KixvpOsbI6S0kRiRQ5MKOio8eMSAKQ17N06+wdEOXRiId+zOxo0hA== - spdy-transport@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/spdy-transport/-/spdy-transport-3.0.0.tgz#00d4863a6400ad75df93361a1608605e5dcdcf31" @@ -9350,28 +7726,6 @@ sprintf-js@~1.0.2: resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" integrity sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw= -sshpk@^1.7.0: - version "1.16.1" - resolved "https://registry.yarnpkg.com/sshpk/-/sshpk-1.16.1.tgz#fb661c0bef29b39db40769ee39fa70093d6f6877" - integrity sha512-HXXqVUq7+pcKeLqqZj6mHFUMvXtOJt1uoUx09pFW6011inTMxqI8BA8PM95myrIyyKwdnzjdFjLiE6KBPVtJIg== - dependencies: - asn1 "~0.2.3" - assert-plus "^1.0.0" - bcrypt-pbkdf "^1.0.0" - dashdash "^1.12.0" - ecc-jsbn "~0.1.1" - getpass "^0.1.1" - jsbn "~0.1.0" - safer-buffer "^2.0.2" - tweetnacl "~0.14.0" - -ssri@^8.0.0, ssri@^8.0.1: - version "8.0.1" - resolved "https://registry.yarnpkg.com/ssri/-/ssri-8.0.1.tgz#638e4e439e2ffbd2cd289776d5ca457c4f51a2af" - integrity sha512-97qShzy1AiyxvPNIkLWoGua7xoQzzPjQ0HAH4B0rWKo7SZ6USuPcrUiAFrws0UH8RrbWmgq3LMTObhPIHbbBeQ== - dependencies: - minipass "^3.1.1" - stable@^0.1.8: version "0.1.8" resolved "https://registry.yarnpkg.com/stable/-/stable-0.1.8.tgz#836eb3c8382fe2936feaf544631017ce7d47a3cf" @@ -9394,32 +7748,6 @@ std-env@^2.2.1: dependencies: ci-info "^3.0.0" -string-width@^1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/string-width/-/string-width-1.0.2.tgz#118bdf5b8cdc51a2a7e70d211e07e2b0b9b107d3" - integrity sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M= - dependencies: - code-point-at "^1.0.0" - is-fullwidth-code-point "^1.0.0" - strip-ansi "^3.0.0" - -"string-width@^1.0.1 || ^2.0.0": - version "2.1.1" - resolved "https://registry.yarnpkg.com/string-width/-/string-width-2.1.1.tgz#ab93f27a8dc13d28cac815c462143a6d9012ae9e" - integrity sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw== - dependencies: - is-fullwidth-code-point "^2.0.0" - strip-ansi "^4.0.0" - -"string-width@^1.0.2 || 2 || 3 || 4", string-width@^4.2.3: - version "4.2.3" - resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" - integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== - dependencies: - emoji-regex "^8.0.0" - is-fullwidth-code-point "^3.0.0" - strip-ansi "^6.0.1" - string-width@^3.0.0: version "3.1.0" resolved "https://registry.yarnpkg.com/string-width/-/string-width-3.1.0.tgz#22767be21b62af1081574306f69ac51b62203961" @@ -9477,25 +7805,6 @@ stringify-object@^3.3.0: is-obj "^1.0.1" is-regexp "^1.0.0" -stringify-package@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/stringify-package/-/stringify-package-1.0.1.tgz#e5aa3643e7f74d0f28628b72f3dad5cecfc3ba85" - integrity sha512-sa4DUQsYciMP1xhKWGuFM04fB0LG/9DlluZoSVywUMRNvzid6XucHK0/90xGxRoHrAaROrcHK1aPKaijCtSrhg== - -strip-ansi@^3.0.0, strip-ansi@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-3.0.1.tgz#6a385fb8853d952d5ff05d0e8aaf94278dc63dcf" - integrity sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8= - dependencies: - ansi-regex "^2.0.0" - -"strip-ansi@^3.0.1 || ^4.0.0", strip-ansi@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-4.0.0.tgz#a8479022eb1ac368a871389b635262c505ee368f" - integrity sha1-qEeQIusaw2iocTibY1JixQXuNo8= - dependencies: - ansi-regex "^3.0.0" - strip-ansi@^5.1.0: version "5.2.0" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-5.2.0.tgz#8c9a536feb6afc962bdfa5b104a5091c1ad9c0ae" @@ -9617,21 +7926,6 @@ svgo@^2.3.0: csso "^4.2.0" stable "^0.1.8" -synp@^1.9.7: - version "1.9.7" - resolved "https://registry.yarnpkg.com/synp/-/synp-1.9.7.tgz#1d971c2eea208c5ed156a5d65238c4d43182672a" - integrity sha512-/LVvZKAqaNQN8ZMrxhURx9i7EEVmErUnieGQZ+7oqhA+/sAZnCScyhu9mmaKud7tXBwqd8VeOrcGLPNBowc3+w== - dependencies: - "@yarnpkg/lockfile" "^1.1.0" - bash-glob "^2.0.0" - colors "^1.4.0" - commander "^7.1.0" - eol "^0.9.1" - lodash "4.17.21" - nmtree "^1.0.6" - semver "^7.3.4" - sort-object-keys "^1.1.3" - tapable@^1.0.0: version "1.1.3" resolved "https://registry.yarnpkg.com/tapable/-/tapable-1.1.3.tgz#a1fccc06b58db61fd7a45da2da44f5f3a3e67ba2" @@ -9642,18 +7936,6 @@ tapable@^2.0.0, tapable@^2.1.1, tapable@^2.2.0: resolved "https://registry.yarnpkg.com/tapable/-/tapable-2.2.0.tgz#5c373d281d9c672848213d0e037d1c4165ab426b" integrity sha512-FBk4IesMV1rBxX2tfiK8RAmogtWn53puLOQlvO8XuwlgxcYbP4mVPS9Ph4aeamSyyVjOl24aYWAuc8U5kCVwMw== -tar@^6.0.2, tar@^6.1.0, tar@^6.1.11, tar@^6.1.2: - version "6.1.11" - resolved "https://registry.yarnpkg.com/tar/-/tar-6.1.11.tgz#6760a38f003afa1b2ffd0ffe9e9abbd0eab3d621" - integrity sha512-an/KZQzQUkZCkuoAA64hM92X0Urb6VpRhAFllDzz44U2mcD5scmT3zBc4VgVpkugF580+DQn8eAFSyoQt0tznA== - dependencies: - chownr "^2.0.0" - fs-minipass "^2.0.0" - minipass "^3.0.0" - minizlib "^2.1.1" - mkdirp "^1.0.3" - yallist "^4.0.0" - terser-webpack-plugin@^5.1.3: version "5.1.4" resolved "https://registry.yarnpkg.com/terser-webpack-plugin/-/terser-webpack-plugin-5.1.4.tgz#c369cf8a47aa9922bd0d8a94fe3d3da11a7678a1" @@ -9696,7 +7978,7 @@ terser@^5.7.2: source-map "~0.7.2" source-map-support "~0.5.20" -text-table@^0.2.0, text-table@~0.2.0: +text-table@^0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4" integrity sha1-f17oI66AUgfACvLfSoTsP8+lcLQ= @@ -9716,11 +7998,6 @@ tiny-invariant@^1.0.2: resolved "https://registry.yarnpkg.com/tiny-invariant/-/tiny-invariant-1.1.0.tgz#634c5f8efdc27714b7f386c35e6760991d230875" integrity sha512-ytxQvrb1cPc9WBEI/HSeYYoGD0kWnGEOR8RY6KomWLBVhqz0RgTwVO9dLrGz7dC+nN9llyI7OKAgRq8Vq4ZBSw== -tiny-relative-date@^1.3.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/tiny-relative-date/-/tiny-relative-date-1.3.0.tgz#fa08aad501ed730f31cc043181d995c39a935e07" - integrity sha512-MOQHpzllWxDCHHaDno30hhLfbouoYlOI8YlMNtvKe1zXbjEVhbcEovQxvZrPvtiYW630GQDoMMarCnjfyfHA+A== - tiny-warning@^1.0.0, tiny-warning@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/tiny-warning/-/tiny-warning-1.0.3.tgz#94a30db453df4c643d0fd566060d60a875d84754" @@ -9753,19 +8030,6 @@ totalist@^1.0.0: resolved "https://registry.yarnpkg.com/totalist/-/totalist-1.1.0.tgz#a4d65a3e546517701e3e5c37a47a70ac97fe56df" integrity sha512-gduQwd1rOdDMGxFG1gEvhV88Oirdo2p+KjoYFU7k2g+i7n6AFFbDQ5kMPUsW0pNbfQsB/cwXvT1i4Bue0s9g5g== -tough-cookie@~2.5.0: - version "2.5.0" - resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-2.5.0.tgz#cd9fb2a0aa1d5a12b473bd9fb96fa3dcff65ade2" - integrity sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g== - dependencies: - psl "^1.1.28" - punycode "^2.1.1" - -treeverse@^1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/treeverse/-/treeverse-1.0.4.tgz#a6b0ebf98a1bca6846ddc7ecbc900df08cb9cd5f" - integrity sha512-whw60l7r+8ZU8Tu/Uc2yxtc4ZTZbR/PF3u1IPNKGQ6p8EICLb3Z2lAgoqw9bqYd8IkgnsaOcLzYHFckjqNsf0g== - trim-trailing-lines@^1.0.0: version "1.1.4" resolved "https://registry.yarnpkg.com/trim-trailing-lines/-/trim-trailing-lines-1.1.4.tgz#bd4abbec7cc880462f10b2c8b5ce1d8d1ec7c2c0" @@ -9801,18 +8065,6 @@ tslib@~2.1.0: resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.1.0.tgz#da60860f1c2ecaa5703ab7d39bc05b6bf988b97a" integrity sha512-hcVC3wYEziELGGmEEXue7D75zbwIIVUMWAVbHItGPx0ziyXxrOMQx4rQEVEV45Ut/1IotuEvwqPopzIOkDMf0A== -tunnel-agent@^0.6.0: - version "0.6.0" - resolved "https://registry.yarnpkg.com/tunnel-agent/-/tunnel-agent-0.6.0.tgz#27a5dea06b36b04a0a9966774b290868f0fc40fd" - integrity sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0= - dependencies: - safe-buffer "^5.0.1" - -tweetnacl@^0.14.3, tweetnacl@~0.14.0: - version "0.14.5" - resolved "https://registry.yarnpkg.com/tweetnacl/-/tweetnacl-0.14.5.tgz#5ae68177f192d4456269d108afa93ff8743f4f64" - integrity sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q= - type-fest@^0.20.2: version "0.20.2" resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.20.2.tgz#1bf207f4b28f91583666cb5fbd327887301cd5f4" @@ -9935,20 +8187,6 @@ uniqs@^2.0.0: resolved "https://registry.yarnpkg.com/uniqs/-/uniqs-2.0.0.tgz#ffede4b36b25290696e6e165d4a59edb998e6b02" integrity sha1-/+3ks2slKQaW5uFl1KWe25mOawI= -unique-filename@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/unique-filename/-/unique-filename-1.1.1.tgz#1d69769369ada0583103a1e6ae87681b56573230" - integrity sha512-Vmp0jIp2ln35UTXuryvjzkjGdRyf9b2lTXuSYUiPmzRcl3FDtYqAwOnTJkAngD9SWhnoJzDbTKwaOrZ+STtxNQ== - dependencies: - unique-slug "^2.0.0" - -unique-slug@^2.0.0: - version "2.0.2" - resolved "https://registry.yarnpkg.com/unique-slug/-/unique-slug-2.0.2.tgz#baabce91083fc64e945b0f3ad613e264f7cd4e6c" - integrity sha512-zoWr9ObaxALD3DOPfjPSqxt4fnZiWblxHIgeWqW8x7UqDzEtHEQLzji2cuJYQFCU6KmoJikOYAZlrTHHebjx2w== - dependencies: - imurmurhash "^0.1.4" - unique-string@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/unique-string/-/unique-string-2.0.0.tgz#39c6451f81afb2749de2b233e3f7c5e8843bd89d" @@ -10136,26 +8374,11 @@ utils-merge@1.0.1: resolved "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713" integrity sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM= -uuid@^3.3.2, uuid@^3.4.0: +uuid@^3.4.0: version "3.4.0" resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.4.0.tgz#b23e4358afa8a202fe7a100af1f5f883f02007ee" integrity sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A== -validate-npm-package-license@^3.0.1, validate-npm-package-license@^3.0.4: - version "3.0.4" - resolved "https://registry.yarnpkg.com/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz#fc91f6b9c7ba15c857f4cb2c5defeec39d4f410a" - integrity sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew== - dependencies: - spdx-correct "^3.0.0" - spdx-expression-parse "^3.0.0" - -validate-npm-package-name@^3.0.0, validate-npm-package-name@~3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/validate-npm-package-name/-/validate-npm-package-name-3.0.0.tgz#5fa912d81eb7d0c74afc140de7317f0ca7df437e" - integrity sha1-X6kS2B630MdK/BQN5zF/DKffQ34= - dependencies: - builtins "^1.0.3" - value-equal@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/value-equal/-/value-equal-1.0.1.tgz#1e0b794c734c5c0cade179c437d356d931a34d6c" @@ -10171,15 +8394,6 @@ vendors@^1.0.3: resolved "https://registry.yarnpkg.com/vendors/-/vendors-1.0.4.tgz#e2b800a53e7a29b93506c3cf41100d16c4c4ad8e" integrity sha512-/juG65kTL4Cy2su4P8HjtkTxk6VmJDiOPBufWniqQ6wknac6jNiXS9vU+hO3wgusiyqWlzTbVHi0dyJqRONg3w== -verror@1.10.0: - version "1.10.0" - resolved "https://registry.yarnpkg.com/verror/-/verror-1.10.0.tgz#3a105ca17053af55d6e270c1f8288682e18da400" - integrity sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA= - dependencies: - assert-plus "^1.0.0" - core-util-is "1.0.2" - extsprintf "^1.2.0" - vfile-location@^3.0.0, vfile-location@^3.2.0: version "3.2.0" resolved "https://registry.yarnpkg.com/vfile-location/-/vfile-location-3.2.0.tgz#d8e41fbcbd406063669ebf6c33d56ae8721d0f3c" @@ -10214,11 +8428,6 @@ wait-on@^6.0.0: minimist "^1.2.5" rxjs "^7.1.0" -walk-up-path@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/walk-up-path/-/walk-up-path-1.0.0.tgz#d4745e893dd5fd0dbb58dd0a4c6a33d9c9fec53e" - integrity sha512-hwj/qMDUEjCU5h0xr90KGCf0tg0/LgJbmOWgrWKYlcJZM7XvquvUJZ0G/HMGr7F7OQMOUuPHWP9JpriinkAlkg== - watchpack@^2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/watchpack/-/watchpack-2.2.0.tgz#47d78f5415fe550ecd740f99fe2882323a58b1ce" @@ -10234,13 +8443,6 @@ wbuf@^1.1.0, wbuf@^1.7.3: dependencies: minimalistic-assert "^1.0.0" -wcwidth@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/wcwidth/-/wcwidth-1.0.1.tgz#f0b0dcf915bc5ff1528afadb2c0e17b532da2fe8" - integrity sha1-8LDc+RW8X/FSivrbLA4XtTLaL+g= - dependencies: - defaults "^1.0.3" - web-namespaces@^1.0.0, web-namespaces@^1.1.2: version "1.1.4" resolved "https://registry.yarnpkg.com/web-namespaces/-/web-namespaces-1.1.4.tgz#bc98a3de60dadd7faefc403d1076d529f5e030ec" @@ -10393,27 +8595,20 @@ which-boxed-primitive@^1.0.2: is-string "^1.0.5" is-symbol "^1.0.3" -which@^1.2.9, which@^1.3.1: +which@^1.3.1: version "1.3.1" resolved "https://registry.yarnpkg.com/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a" integrity sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ== dependencies: isexe "^2.0.0" -which@^2.0.1, which@^2.0.2: +which@^2.0.1: version "2.0.2" resolved "https://registry.yarnpkg.com/which/-/which-2.0.2.tgz#7c6a8dd0a636a0327e10b59c9286eee93f3f51b1" integrity sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA== dependencies: isexe "^2.0.0" -wide-align@^1.1.0, wide-align@^1.1.2: - version "1.1.5" - resolved "https://registry.yarnpkg.com/wide-align/-/wide-align-1.1.5.tgz#df1d4c206854369ecf3c9a4898f1b23fbd9d15d3" - integrity sha512-eDMORYaPNZ4sQIuuYPDHdQvf4gyCF9rEEV/yPxGfwPkRodwEgiMUUXTx/dex+Me0wxx53S+NgUHaP7y3MGlDmg== - dependencies: - string-width "^1.0.2 || 2 || 3 || 4" - widest-line@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/widest-line/-/widest-line-3.1.0.tgz#8292333bbf66cb45ff0de1603b136b7ae1496eca" @@ -10440,7 +8635,7 @@ wrappy@1: resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8= -write-file-atomic@^3.0.0, write-file-atomic@^3.0.3: +write-file-atomic@^3.0.0: version "3.0.3" resolved "https://registry.yarnpkg.com/write-file-atomic/-/write-file-atomic-3.0.3.tgz#56bd5c5a5c70481cd19c571bd39ab965a5de56e8" integrity sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q== @@ -10477,11 +8672,6 @@ xtend@^4.0.0, xtend@^4.0.1: resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54" integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ== -yallist@^2.1.2: - version "2.1.2" - resolved "https://registry.yarnpkg.com/yallist/-/yallist-2.1.2.tgz#1c11f9218f076089a47dd512f93c6699a6a81d52" - integrity sha1-HBH5IY8HYImkfdUS+TxmmaaoHVI= - yallist@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" @@ -10492,40 +8682,11 @@ yaml@^1.10.0, yaml@^1.10.2, yaml@^1.7.2: resolved "https://registry.yarnpkg.com/yaml/-/yaml-1.10.2.tgz#2301c5ffbf12b467de8da2333a459e29e7920e4b" integrity sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg== -yarn-audit-fix@^7.1.2: - version "7.1.2" - resolved "https://registry.yarnpkg.com/yarn-audit-fix/-/yarn-audit-fix-7.1.2.tgz#bb9bdec44a8e7e560416165088f8db07ed63ab7e" - integrity sha512-0sSOpdcyKBEIWLiedJvK+mjLl5C0DILsK3DkQSY1n2uE0O6R+J9gP/IaMdpMc0/KlY+qgHV3yxmLkxXngdpeDQ== - dependencies: - "@types/find-cache-dir" "^3.2.1" - "@types/fs-extra" "^9.0.13" - "@types/lodash-es" "^4.17.5" - "@types/semver" "^7.3.8" - "@types/yarnpkg__lockfile" "^1.1.5" - "@yarnpkg/lockfile" "^1.1.0" - chalk "^4.1.2" - commander "^8.2.0" - find-cache-dir "^3.3.2" - find-up "^6.2.0" - fs-extra "^10.0.0" - globby "^12.0.2" - lodash-es "^4.17.21" - npm "8.1.0" - pkg-dir "^6.0.1" - semver "^7.3.5" - synp "^1.9.7" - tslib "^2.3.1" - yocto-queue@^0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-0.1.0.tgz#0294eb3dee05028d31ee1a5fa2c556a6aaf10a1b" integrity sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q== -yocto-queue@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-1.0.0.tgz#7f816433fb2cbc511ec8bf7d263c3b58a1a3c251" - integrity sha512-9bnSc/HEW2uRy67wc+T8UwauLuPJVn28jb+GtJY16iiKWyvmYJRXVT4UamsAEGQfPohgr2q4Tq0sQbQlxTfi1g== - zwitch@^1.0.0: version "1.0.5" resolved "https://registry.yarnpkg.com/zwitch/-/zwitch-1.0.5.tgz#d11d7381ffed16b742f6af7b3f223d5cd9fe9920" From d0406c8e3a3d3a7c96cd46387b941b8695872d0f Mon Sep 17 00:00:00 2001 From: Jason Wang Date: Mon, 22 Nov 2021 22:07:07 -0800 Subject: [PATCH 27/40] fix: Removing broadcast hint (#1255) --- .../microsoft/azure/synapse/ml/explainers/KernelSHAPBase.scala | 2 +- .../com/microsoft/azure/synapse/ml/explainers/LIMEBase.scala | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/explainers/KernelSHAPBase.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/explainers/KernelSHAPBase.scala index 9ae74e7138..74a88a4054 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/explainers/KernelSHAPBase.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/explainers/KernelSHAPBase.scala @@ -90,7 +90,7 @@ abstract class KernelSHAPBase(override val uid: String) (id, coefficientsMatrix, metrics.toSpark) }.toDF(idCol, this.getOutputCol, this.getMetricsCol) - preprocessed.hint("broadcast").join(fitted, Seq(idCol), "inner").drop(idCol) + preprocessed.join(fitted, Seq(idCol), "inner").drop(idCol) } override def copy(extra: ParamMap): Transformer = defaultCopy(extra) diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/explainers/LIMEBase.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/explainers/LIMEBase.scala index 67afb5c103..39b3ba7d91 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/explainers/LIMEBase.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/explainers/LIMEBase.scala @@ -111,7 +111,7 @@ abstract class LIMEBase(override val uid: String) (id, coefficientsMatrix, metrics.toSpark) }.toDF(idCol, this.getOutputCol, this.getMetricsCol) - preprocessed.hint("broadcast").join(fitted, Seq(idCol), "inner").drop(idCol) + preprocessed.join(fitted, Seq(idCol), "inner").drop(idCol) } override def copy(extra: ParamMap): Transformer = this.defaultCopy(extra) From 1bbcb7b96db87e742344a54b403ab442786e737a Mon Sep 17 00:00:00 2001 From: Serena Ruan <82044803+serena-ruan@users.noreply.github.com> Date: Tue, 23 Nov 2021 15:54:57 +0800 Subject: [PATCH 28/40] fix: fix installation instruction (#1268) * fix: fix installation instruction * fix translator test --- README.md | 6 +++--- .../ml/cognitive/split1/TranslatorSuite.scala | 12 ++++++------ website/src/pages/index.js | 6 +++--- 3 files changed, 12 insertions(+), 12 deletions(-) diff --git a/README.md b/README.md index 6f50ecbbe5..53f29e9305 100644 --- a/README.md +++ b/README.md @@ -105,9 +105,9 @@ SynapseML can be conveniently installed on existing Spark clusters via the `--packages` option, examples: ```bash -spark-shell --packages com.microsoft.azure:synapseml_2.12:0.9.4 -pyspark --packages com.microsoft.azure:synapseml_2.12:0.9.4 -spark-submit --packages com.microsoft.azure:synapseml_2.12:0.9.4 MyApp.jar +spark-shell --packages com.microsoft.azure:synapseml_2.12:0.9.4 --conf spark.jars.repositories=https://mmlspark.azureedge.net/maven +pyspark --packages com.microsoft.azure:synapseml_2.12:0.9.4 --conf spark.jars.repositories=https://mmlspark.azureedge.net/maven +spark-submit --packages com.microsoft.azure:synapseml_2.12:0.9.4 MyApp.jar --conf spark.jars.repositories=https://mmlspark.azureedge.net/maven ``` This can be used in other Spark contexts too. For example, you can use SynapseML diff --git a/cognitive/src/test/scala/com/microsoft/azure/synapse/ml/cognitive/split1/TranslatorSuite.scala b/cognitive/src/test/scala/com/microsoft/azure/synapse/ml/cognitive/split1/TranslatorSuite.scala index 9ef46e99a5..af408dd63b 100644 --- a/cognitive/src/test/scala/com/microsoft/azure/synapse/ml/cognitive/split1/TranslatorSuite.scala +++ b/cognitive/src/test/scala/com/microsoft/azure/synapse/ml/cognitive/split1/TranslatorSuite.scala @@ -62,7 +62,7 @@ class TranslateSuite extends TransformerFuzzing[Translate] test("Translate multiple pieces of text with language autodetection") { val result1 = getTranslationTextResult(translate.setToLanguage(Seq("zh-Hans")), textDf2).collect() - assert(result1(0).getSeq(0).mkString("\n") == "你好,你叫什么名字?\n再见") + assert(result1(0).getSeq(0).mkString("\n") == "您好,您叫什么名字?\n再见") val translate1: Translate = new Translate() .setSubscriptionKey(translatorKey) @@ -71,7 +71,7 @@ class TranslateSuite extends TransformerFuzzing[Translate] .setOutputCol("translation") .setConcurrency(5) val result3 = getTranslationTextResult(translate1.setToLanguage("zh-Hans"), emptyDf).collect() - assert(result3(0).getSeq(0).mkString("\n") == "嗨, 这是突触!") + assert(result3(0).getSeq(0).mkString("\n") == "嗨,这是突触!") val translate2: Translate = new Translate() .setSubscriptionKey(translatorKey) @@ -81,7 +81,7 @@ class TranslateSuite extends TransformerFuzzing[Translate] .setOutputCol("translation") .setConcurrency(5) val result4 = getTranslationTextResult(translate2, textDf6).collect() - assert(result4(0).getSeq(0).mkString("") == "嗨, 这是突触!") + assert(result4(0).getSeq(0).mkString("") == "嗨,这是突触!") assert(result4(1).get(0) == null) assert(result4(2).get(0) == null) } @@ -103,13 +103,13 @@ class TranslateSuite extends TransformerFuzzing[Translate] .withColumn("transliteration", col("translation.transliteration.text")) .withColumn("translation", col("translation.text")) .select("translation", "transliteration").collect() - assert(results.head.getSeq(0).mkString("\n") === "你好,你叫什么名字?") - assert(results.head.getSeq(1).mkString("\n") === "nǐ hǎo , nǐ jiào shén me míng zì ?") + assert(results.head.getSeq(0).mkString("\n") === "您好,您叫什么名字?") + assert(results.head.getSeq(1).mkString("\n") === "nín hǎo , nín jiào shén me míng zì ?") } test("Translate to multiple languages") { val result1 = getTranslationTextResult(translate.setToLanguage(Seq("zh-Hans", "de")), textDf1).collect() - assert(result1(0).getSeq(0).mkString("\n") == "你好,你叫什么名字?\nHallo, wie heißt du?") + assert(result1(0).getSeq(0).mkString("\n") == "您好,您叫什么名字?\nHallo, wie heißt du?") } test("Handle profanity") { diff --git a/website/src/pages/index.js b/website/src/pages/index.js index 36d9202ef1..b5c42322bc 100644 --- a/website/src/pages/index.js +++ b/website/src/pages/index.js @@ -288,9 +288,9 @@ function Home() { SynapseML can be conveniently installed on existing Spark clusters via the --packages option, examples: This can be used in other Spark contexts too. For example, you From 175fbc53b56bea7a76ff68d930e49f6666abc5e3 Mon Sep 17 00:00:00 2001 From: elswork Date: Wed, 24 Nov 2021 05:53:46 +0100 Subject: [PATCH 29/40] Fix a couple of links (#1266) --- website/docs/getting_started/installation.md | 4 ++-- .../version-0.9.4/getting_started/installation.md | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/website/docs/getting_started/installation.md b/website/docs/getting_started/installation.md index 7f7bec14d4..d960501b5c 100644 --- a/website/docs/getting_started/installation.md +++ b/website/docs/getting_started/installation.md @@ -105,7 +105,7 @@ docker run -it -p 8888:8888 -e ACCEPT_EULA=yes mcr.microsoft.com/mmlspark/releas ``` Navigate to in your web browser to run the sample -notebooks. See the [documentation](https://github.com/microsoft/SynapseML/blob/master/docs/docker.md) for more on Docker use. +notebooks. See the [documentation](reference/docker.md) for more on Docker use. > To read the EULA for using the docker image, run ``` bash @@ -116,7 +116,7 @@ docker run -it -p 8888:8888 mcr.microsoft.com/mmlspark/release eula ### Building from source SynapseML has recently transitioned to a new build infrastructure. -For detailed developer docs please see the [Developer Readme](https://github.com/microsoft/SynapseML/blob/master/docs/developer-readme.md) +For detailed developer docs please see the [Developer Readme](reference/developer-readme.md) If you are an existing SynapseML developer, you will need to reconfigure your development setup. We now support platform independent development and diff --git a/website/versioned_docs/version-0.9.4/getting_started/installation.md b/website/versioned_docs/version-0.9.4/getting_started/installation.md index 7f7bec14d4..d960501b5c 100644 --- a/website/versioned_docs/version-0.9.4/getting_started/installation.md +++ b/website/versioned_docs/version-0.9.4/getting_started/installation.md @@ -105,7 +105,7 @@ docker run -it -p 8888:8888 -e ACCEPT_EULA=yes mcr.microsoft.com/mmlspark/releas ``` Navigate to in your web browser to run the sample -notebooks. See the [documentation](https://github.com/microsoft/SynapseML/blob/master/docs/docker.md) for more on Docker use. +notebooks. See the [documentation](reference/docker.md) for more on Docker use. > To read the EULA for using the docker image, run ``` bash @@ -116,7 +116,7 @@ docker run -it -p 8888:8888 mcr.microsoft.com/mmlspark/release eula ### Building from source SynapseML has recently transitioned to a new build infrastructure. -For detailed developer docs please see the [Developer Readme](https://github.com/microsoft/SynapseML/blob/master/docs/developer-readme.md) +For detailed developer docs please see the [Developer Readme](reference/developer-readme.md) If you are an existing SynapseML developer, you will need to reconfigure your development setup. We now support platform independent development and From db03b01aad00dee4ee30d6e59a9557d568f2175c Mon Sep 17 00:00:00 2001 From: Dung Nguyen Date: Tue, 30 Nov 2021 23:21:40 +0700 Subject: [PATCH 30/40] feat: add predict_disable_shape_check in LightGBM (#1273) * feat: add predict_disable_shape_check in LightGBM * test: predict_disable_shape_check * fix: change to BooleanParam --- .../main/python/synapse/ml/lightgbm/mixin.py | 11 +++++++ .../ml/lightgbm/LightGBMClassifier.scala | 4 +-- .../synapse/ml/lightgbm/LightGBMRanker.scala | 2 +- .../ml/lightgbm/LightGBMRegressor.scala | 2 +- .../ml/lightgbm/booster/LightGBMBooster.scala | 20 +++++++------ .../ml/lightgbm/params/LightGBMParams.scala | 8 +++++ .../split1/VerifyLightGBMClassifier.scala | 29 +++++++++++++++++-- 7 files changed, 61 insertions(+), 15 deletions(-) diff --git a/lightgbm/src/main/python/synapse/ml/lightgbm/mixin.py b/lightgbm/src/main/python/synapse/ml/lightgbm/mixin.py index c2b58e1a1f..84e9ff8463 100644 --- a/lightgbm/src/main/python/synapse/ml/lightgbm/mixin.py +++ b/lightgbm/src/main/python/synapse/ml/lightgbm/mixin.py @@ -64,3 +64,14 @@ def getBoosterNumFeatures(self): The number of features. """ return self._java_obj.getBoosterNumFeatures() + + def setPredictDisableShapeCheck(self, value=None): + """ + Set shape check or not when predict. + """ + if not value: + value = False + else: + value = True + + self._java_obj.setPredictDisableShapeCheck(value) diff --git a/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/LightGBMClassifier.scala b/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/LightGBMClassifier.scala index 5ede62a535..887903dae3 100644 --- a/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/LightGBMClassifier.scala +++ b/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/LightGBMClassifier.scala @@ -163,11 +163,11 @@ class LightGBMClassificationModel(override val uid: String) override def numClasses: Int = getActualNumClasses override def predictRaw(features: Vector): Vector = { - Vectors.dense(getModel.score(features, true, true)) + Vectors.dense(getModel.score(features, true, true, getPredictDisableShapeCheck)) } override def predictProbability(features: Vector): Vector = { - Vectors.dense(getModel.score(features, false, true)) + Vectors.dense(getModel.score(features, false, true, getPredictDisableShapeCheck)) } override def copy(extra: ParamMap): LightGBMClassificationModel = defaultCopy(extra) diff --git a/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/LightGBMRanker.scala b/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/LightGBMRanker.scala index 92e897b27e..65d1dcb47c 100644 --- a/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/LightGBMRanker.scala +++ b/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/LightGBMRanker.scala @@ -144,7 +144,7 @@ class LightGBMRankerModel(override val uid: String) override def predict(features: Vector): Double = { logPredict( - getModel.score(features, false, false)(0) + getModel.score(features, false, false, getPredictDisableShapeCheck)(0) ) } diff --git a/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/LightGBMRegressor.scala b/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/LightGBMRegressor.scala index 359cf330f6..99f5c12f71 100644 --- a/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/LightGBMRegressor.scala +++ b/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/LightGBMRegressor.scala @@ -127,7 +127,7 @@ class LightGBMRegressionModel(override val uid: String) override def predict(features: Vector): Double = { logPredict( - getModel.score(features, false, false)(0) + getModel.score(features, false, false, getPredictDisableShapeCheck)(0) ) } diff --git a/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/booster/LightGBMBooster.scala b/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/booster/LightGBMBooster.scala index c19f7c9020..def6d9350c 100644 --- a/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/booster/LightGBMBooster.scala +++ b/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/booster/LightGBMBooster.scala @@ -387,14 +387,14 @@ class LightGBMBooster(val trainDataset: Option[LightGBMDataset] = None, val para } } - def score(features: Vector, raw: Boolean, classification: Boolean): Array[Double] = { + def score(features: Vector, raw: Boolean, classification: Boolean, disableShapeCheck: Boolean): Array[Double] = { val kind = if (raw) boosterHandler.rawScoreConstant else boosterHandler.normalScoreConstant features match { - case dense: DenseVector => predictForMat(dense.toArray, kind, + case dense: DenseVector => predictForMat(dense.toArray, kind, disableShapeCheck, boosterHandler.scoredDataLengthLongPtr.get().ptr, boosterHandler.scoredDataOutPtr.get().ptr) - case sparse: SparseVector => predictForCSR(sparse, kind, + case sparse: SparseVector => predictForCSR(sparse, kind, disableShapeCheck, boosterHandler.scoredDataLengthLongPtr.get().ptr, boosterHandler.scoredDataOutPtr.get().ptr) } predScoreToArray(classification, boosterHandler.scoredDataOutPtr.get().ptr, kind) @@ -403,9 +403,9 @@ class LightGBMBooster(val trainDataset: Option[LightGBMDataset] = None, val para def predictLeaf(features: Vector): Array[Double] = { val kind = boosterHandler.leafIndexPredictConstant features match { - case dense: DenseVector => predictForMat(dense.toArray, kind, + case dense: DenseVector => predictForMat(dense.toArray, kind, false, boosterHandler.leafIndexDataLengthLongPtr.get().ptr, boosterHandler.leafIndexDataOutPtr.get().ptr) - case sparse: SparseVector => predictForCSR(sparse, kind, + case sparse: SparseVector => predictForCSR(sparse, kind, false, boosterHandler.leafIndexDataLengthLongPtr.get().ptr, boosterHandler.leafIndexDataOutPtr.get().ptr) } predLeafToArray(boosterHandler.leafIndexDataOutPtr.get().ptr) @@ -414,9 +414,9 @@ class LightGBMBooster(val trainDataset: Option[LightGBMDataset] = None, val para def featuresShap(features: Vector): Array[Double] = { val kind = boosterHandler.contribPredictConstant features match { - case dense: DenseVector => predictForMat(dense.toArray, kind, + case dense: DenseVector => predictForMat(dense.toArray, kind, false, boosterHandler.shapDataLengthLongPtr.get().ptr, boosterHandler.shapDataOutPtr.get().ptr) - case sparse: SparseVector => predictForCSR(sparse, kind, + case sparse: SparseVector => predictForCSR(sparse, kind, false, boosterHandler.shapDataLengthLongPtr.get().ptr, boosterHandler.shapDataOutPtr.get().ptr) } shapToArray(boosterHandler.shapDataOutPtr.get().ptr) @@ -508,11 +508,12 @@ class LightGBMBooster(val trainDataset: Option[LightGBMDataset] = None, val para lazy val numTotalIterations: Int = numTotalModel / numModelPerIteration protected def predictForCSR(sparseVector: SparseVector, kind: Int, + disableShapeCheck: Boolean, dataLengthLongPtr: SWIGTYPE_p_long_long, dataOutPtr: SWIGTYPE_p_double): Unit = { val numCols = sparseVector.size - val datasetParams = "max_bin=255" + val datasetParams = s"max_bin=255 predict_disable_shape_check=${disableShapeCheck.toString}" val dataInt32bitType = boosterHandler.dataInt32bitType val data64bitType = boosterHandler.data64bitType @@ -526,6 +527,7 @@ class LightGBMBooster(val trainDataset: Option[LightGBMDataset] = None, val para } protected def predictForMat(row: Array[Double], kind: Int, + disableShapeCheck: Boolean, dataLengthLongPtr: SWIGTYPE_p_long_long, dataOutPtr: SWIGTYPE_p_double): Unit = { val data64bitType = boosterHandler.data64bitType @@ -533,7 +535,7 @@ class LightGBMBooster(val trainDataset: Option[LightGBMDataset] = None, val para val numCols = row.length val isRowMajor = 1 - val datasetParams = "max_bin=255" + val datasetParams = s"max_bin=255 predict_disable_shape_check=${disableShapeCheck.toString}" LightGBMUtils.validate( lightgbmlib.LGBM_BoosterPredictForMatSingle( diff --git a/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/params/LightGBMParams.scala b/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/params/LightGBMParams.scala index b7c27f8821..586b67595f 100644 --- a/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/params/LightGBMParams.scala +++ b/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/params/LightGBMParams.scala @@ -257,6 +257,14 @@ trait LightGBMPredictionParams extends Wrappable { def getFeaturesShapCol: String = $(featuresShapCol) def setFeaturesShapCol(value: String): this.type = set(featuresShapCol, value) + + val predictDisableShapeCheck = new BooleanParam(this, "predictDisableShapeCheck", + "control whether or not LightGBM raises an error " + + "when you try to predict on data with a different number of features than the training data") + setDefault(predictDisableShapeCheck -> false) + + def getPredictDisableShapeCheck: Boolean = $(predictDisableShapeCheck) + def setPredictDisableShapeCheck(value: Boolean): this.type = set(predictDisableShapeCheck, value) } /** Defines parameters for LightGBM models diff --git a/lightgbm/src/test/scala/com/microsoft/azure/synapse/ml/lightgbm/split1/VerifyLightGBMClassifier.scala b/lightgbm/src/test/scala/com/microsoft/azure/synapse/ml/lightgbm/split1/VerifyLightGBMClassifier.scala index 8c52c73951..30dade8f13 100644 --- a/lightgbm/src/test/scala/com/microsoft/azure/synapse/ml/lightgbm/split1/VerifyLightGBMClassifier.scala +++ b/lightgbm/src/test/scala/com/microsoft/azure/synapse/ml/lightgbm/split1/VerifyLightGBMClassifier.scala @@ -14,8 +14,8 @@ import com.microsoft.azure.synapse.ml.stages.MultiColumnAdapter import org.apache.commons.io.FileUtils import org.apache.spark.TaskContext import org.apache.spark.ml.evaluation.{BinaryClassificationEvaluator, MulticlassClassificationEvaluator} -import org.apache.spark.ml.feature.{StringIndexer, VectorAssembler} -import org.apache.spark.ml.linalg.{DenseVector, Vector} +import org.apache.spark.ml.feature.{LabeledPoint, StringIndexer, VectorAssembler} +import org.apache.spark.ml.linalg.{DenseVector, Vector, Vectors} import org.apache.spark.ml.tuning.{ParamGridBuilder, TrainValidationSplit} import org.apache.spark.ml.util.MLReadable import org.apache.spark.ml.{Estimator, Model} @@ -641,6 +641,31 @@ class VerifyLightGBMClassifier extends Benchmarks with EstimatorFuzzing[LightGBM assertFitWithoutErrors(baseModel, df) } + test("Verify LightGBM Classifier won't get stuck on " + + "number of features in data is not the same as it was in training data") { + val inputData = Seq( + LabeledPoint(1.0, Vectors.dense(1.0, 0.0, 3.0)), + LabeledPoint(0.0, Vectors.sparse(3, Array(0, 2), Array(1.0, 3.0))) + ) + val testData = Seq( + ( + "uuid-value", + Vectors.sparse(15, Array(2, 6, 8, 14), Array(1.0, 1.0, 1.0, 2.0)) + ) + ) + + val modelDF = spark.createDataFrame(inputData).toDF("labels", "features") + val testDF = spark.createDataFrame(testData).toDF("uuid", "features") + + val fitModel = baseModel.fit(modelDF) + val oldModelString = fitModel.getModel.modelStr.get + + val testModel = LightGBMClassificationModel.loadNativeModelFromString(oldModelString) + testModel.setPredictDisableShapeCheck(true) + + assert(testModel.transform(testDF).collect().length > 0) + } + def verifyLearnerOnBinaryCsvFile(fileName: String, labelColumnName: String, decimals: Int): Unit = { From eae0e1587508fd9d6c24c4da48ad35f0151659ac Mon Sep 17 00:00:00 2001 From: Jason Wang Date: Wed, 1 Dec 2021 08:34:21 -0800 Subject: [PATCH 31/40] fix: support Math expressions in markdown doc. (#1278) --- .gitignore | 3 +- ...alanceAnalysis - Adult Census Income.ipynb | 84 ++++++++++--------- .../responsible_ai/Data Balance Analysis.md | 56 +++++++------ website/docusaurus.config.js | 19 ++++- website/package.json | 5 +- website/yarn.lock | 74 +++++++++++++++- 6 files changed, 169 insertions(+), 72 deletions(-) diff --git a/.gitignore b/.gitignore index 522b7684ba..f111f5283b 100644 --- a/.gitignore +++ b/.gitignore @@ -56,4 +56,5 @@ node_modules/ # Misc .bsp -website/.docusaurus \ No newline at end of file +website/.docusaurus +null/ \ No newline at end of file diff --git a/notebooks/features/responsible_ai/DataBalanceAnalysis - Adult Census Income.ipynb b/notebooks/features/responsible_ai/DataBalanceAnalysis - Adult Census Income.ipynb index a7eb218fbd..36f4d10795 100644 --- a/notebooks/features/responsible_ai/DataBalanceAnalysis - Adult Census Income.ipynb +++ b/notebooks/features/responsible_ai/DataBalanceAnalysis - Adult Census Income.ipynb @@ -21,20 +21,24 @@ "\n", "---\n", "\n", - "Data Balance Analysis is relevant for overall understanding of datasets, but it becomes essential when thinking about building Machine Learning services out of such datasets. Having a well balanced data representation is critical when developing models in a responsible way, specially in terms of fairness. \n", + "Data Balance Analysis is relevant for overall understanding of datasets, but it becomes essential when thinking about building Machine Learning services out of such datasets. Having a well balanced data representation is critical when developing models in a responsible way, specially in terms of fairness.\n", "It is unfortunately all too easy to build an ML model that produces biased results for subsets of an overall population, by training or testing the model on biased ground truth data. There are multiple case studies of biased models assisting in granting loans, healthcare, recruitment opportunities and many other decision making tasks. In most of these examples, the data from which these models are trained was the common issue. These findings emphasize how important it is for model creators and auditors to analyze data balance: to measure training data across sub-populations and ensure the data has good coverage and a balanced representation of labels across sensitive categories and category combinations, and to check that test data is representative of the target population.\n", "\n", "In summary, Data Balance Analysis, used as a step for building ML models has the following benefits:\n", - "* **Reduces risks for unbalanced models (facilitate service fairness) and reduces costs of ML building** by identifying early on data representation gaps that prompt data scientists to seek mitigation steps (collect more data, follow a specific sampling mechanism, create synthetic data, etc.) before proceeding to train their models. \n", - "* **Enables easy e2e debugging of ML systems** in combination with [Fairlearn](https://fairlearn.org/) by providing a clear view if for an unbalanced model the issue is tied to the data or the model. \n", + "\n", + "* **Reduces risks for unbalanced models (facilitate service fairness) and reduces costs of ML building** by identifying early on data representation gaps that prompt data scientists to seek mitigation steps (collect more data, follow a specific sampling mechanism, create synthetic data, etc.) before proceeding to train their models.\n", + "* **Enables easy e2e debugging of ML systems** in combination with [Fairlearn](https://fairlearn.org/) by providing a clear view if for an unbalanced model the issue is tied to the data or the model.\n", "\n", "---\n", "\n", "Note: If you are running this notebook in a Spark environment such as Azure Synapse or Databricks, then you can easily visualize the imbalance measures using the built-in plotting features.\n", "\n", "Python dependencies:\n", - "* matplotlib==3.2.2\n", - "* numpy==1.19.2" + "\n", + "```text\n", + "matplotlib==3.2.2\n", + "numpy==1.19.2\n", + "```\n" ] }, { @@ -170,23 +174,23 @@ "\n", "Feature Balance Measures allow us to see whether each combination of sensitive feature is receiving the positive outcome (true prediction) at equal rates.\n", "\n", - "In this context, we define a feature balance measure, also referred to as the parity, for label y as the absolute difference between the association metrics of two different sensitive classes \\\\([x_A, x_B]\\\\), with respect to the association metric \\\\(A(x_i, y)\\\\). That is:\n", + "In this context, we define a feature balance measure, also referred to as the parity, for label y as the absolute difference between the association metrics of two different sensitive classes $[x_A, x_B]$, with respect to the association metric $A(x_i, y)$. That is:\n", "\n", - "$$parity(y \\vert x_A, x_B, A(\\cdot)) \\coloneqq A(x_A, y) - A(x_B, y) $$\n", + "$$parity(y \\vert x_A, x_B, A(\\cdot)) \\coloneqq A(x_A, y) - A(x_B, y)$$\n", "\n", "Using the dataset, we can see if the various sexes and races are receiving >50k income at equal or unequal rates.\n", "\n", "Note: Many of these metrics were influenced by this paper [Measuring Model Biases in the Absence of Ground Truth](https://arxiv.org/abs/2103.03417).\n", "\n", - "Measure | Family | Description | Interpretation/Formula | Reference\n", - "- | - | - | - | -\n", - "Demographic Parity | Fairness | Proportion of each segment of a protected class (e.g. gender) should receive the positive outcome at equal rates. | As close to 0 means better parity. \\\\(DP = P(Y \\vert A = \"Male\") - P(Y \\vert A = \"Female\")\\\\). Y = Positive label rate. | [Link](https://en.wikipedia.org/wiki/Fairness_%28machine_learning%29)\n", - "Pointwise Mutual Information (PMI), normalized PMI | Entropy | The PMI of a pair of feature values (ex: Gender=Male and Gender=Female) quantifies the discrepancy between the probability of their coincidence given their joint distribution and their individual distributions (assuming independence). | Range (normalized) [-1, 1]. -1 for no co-occurences. 0 for co-occurences at random. 1 for complete co-occurences. | [Link](https://en.wikipedia.org/wiki/Pointwise_mutual_information)\n", - "Sorensen-Dice Coefficient (SDC) | Intersection-over-Union | Used to gauge the similarity of two samples. Related to F1 score. | Equals twice the number of elements common to both sets divided by the sum of the number of elements in each set. | [Link](https://en.wikipedia.org/wiki/S%C3%B8rensen%E2%80%93Dice_coefficient)\n", - "Jaccard Index | Intersection-over-Union | Similar to SDC, guages the similarity and diversity of sample sets. | Equals the size of the intersection divided by the size of the union of the sample sets. | [Link](https://en.wikipedia.org/wiki/Jaccard_index)\n", - "Kendall Rank Correlation | Correlation and Statistical Tests | Used to measure the ordinal association between two measured quantities. | High when observations have a similar rank and low when observations have a dissimilar rank between the two variables. | [Link](https://en.wikipedia.org/wiki/Kendall_rank_correlation_coefficient)\n", - "Log-Likelihood Ratio | Correlation and Statistical Tests | Calculates the degree to which data supports one variable versus another. Log of the likelihood ratio, which gives the probability of correctly predicting the label in ratio to probability of incorrectly predicting label. | If likelihoods are similar, it should be close to 0. | [Link](https://en.wikipedia.org/wiki/Likelihood_function#Likelihood_ratio)\n", - "t-test | Correlation and Statistical Tests | Used to compare the means of two groups (pairwise). | Value looked up in t-Distribution tell if statistically significant or not. | [Link](https://en.wikipedia.org/wiki/Student's_t-test)" + "| Association Metric | Family | Description | Interpretation/Formula | Reference |\n", + "|----------------------------------------------------|-----------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|------------------------------------------------------------------------------------------------------------------------|------------------------------------------------------------------------------|\n", + "| Demographic Parity | Fairness | Proportion of each segment of a protected class (e.g. gender) should receive the positive outcome at equal rates. | As close to 0 means better parity. $DP = P(Y \\vert A = \"Male\") - P(Y \\vert A = \"Female\")$. | [Link](https://en.wikipedia.org/wiki/Fairness_%28machine_learning%29) |\n", + "| Pointwise Mutual Information (PMI), normalized PMI | Entropy | The PMI of a pair of feature values (ex: Gender=Male and Gender=Female) quantifies the discrepancy between the probability of their coincidence given their joint distribution and their individual distributions (assuming independence). | Range (normalized) $[-1, 1]$. -1 for no co-occurences. 0 for co-occurences at random. 1 for complete co-occurences. | [Link](https://en.wikipedia.org/wiki/Pointwise_mutual_information) |\n", + "| Sorensen-Dice Coefficient (SDC) | Intersection-over-Union | Used to gauge the similarity of two samples. Related to F1 score. | Equals twice the number of elements common to both sets divided by the sum of the number of elements in each set. | [Link](https://en.wikipedia.org/wiki/S%C3%B8rensen%E2%80%93Dice_coefficient) |\n", + "| Jaccard Index | Intersection-over-Union | Similar to SDC, guages the similarity and diversity of sample sets. | Equals the size of the intersection divided by the size of the union of the sample sets. | [Link](https://en.wikipedia.org/wiki/Jaccard_index) |\n", + "| Kendall Rank Correlation | Correlation and Statistical Tests | Used to measure the ordinal association between two measured quantities. | High when observations have a similar rank and low when observations have a dissimilar rank between the two variables. | [Link](https://en.wikipedia.org/wiki/Kendall_rank_correlation_coefficient) |\n", + "| Log-Likelihood Ratio | Correlation and Statistical Tests | Calculates the degree to which data supports one variable versus another. Log of the likelihood ratio, which gives the probability of correctly predicting the label in ratio to probability of incorrectly predicting label. | If likelihoods are similar, it should be close to 0. | [Link](https://en.wikipedia.org/wiki/Likelihood_function#Likelihood_ratio) |\n", + "| t-test | Correlation and Statistical Tests | Used to compare the means of two groups (pairwise). | Value looked up in t-Distribution tell if statistically significant or not. | [Link](https://en.wikipedia.org/wiki/Student's_t-test) |" ] }, { @@ -375,25 +379,29 @@ "* \"Other\" appears 2 times\n", "\n", "Assuming the uniform distribution:\n", - "$$ReferenceCount \\coloneqq \\frac{numRows}{numFeatureValues}$$\n", - "$$ReferenceProbability \\coloneqq \\frac{1}{numFeatureValues}$$\n", - "\n", - "Feature Value | Observed Count | Reference Count | Observed Probability | Reference Probabiliy\n", - "- | - | - | - | -\n", - "Male | 4 | 9/3 = 3 | 4/9 = 0.44 | 3/9 = 0.33\n", - "Female | 3 | 9/3 = 3 | 3/9 = 0.33 | 3/9 = 0.33\n", - "Other | 2 | 9/3 = 3 | 2/9 = 0.22 | 3/9 = 0.33\n", + "$$\n", + "ReferenceCount \\coloneqq \\frac{numRows}{numFeatureValues}\n", + "$$\n", + "$$\n", + "ReferenceProbability \\coloneqq \\frac{1}{numFeatureValues}\n", + "$$\n", + "\n", + "| Feature Value | Observed Count | Reference Count | Observed Probability | Reference Probability |\n", + "|---------------|----------------|-----------------|----------------------|-----------------------|\n", + "| Male | 4 | 9/3 = 3 | 4/9 = 0.44 | 3/9 = 0.33 |\n", + "| Female | 3 | 9/3 = 3 | 3/9 = 0.33 | 3/9 = 0.33 |\n", + "| Other | 2 | 9/3 = 3 | 2/9 = 0.22 | 3/9 = 0.33 |\n", "\n", "We can use distance measures to find out how far our observed and reference distributions of these feature values are. Some of these distance measures include:\n", "\n", - "Measure | Description | Interpretation | Reference\n", - "- | - | - | -\n", - "KL Divergence | Measure of how one probability distribution is different from a second, reference probability distribution. Measure of the information gained when one revises one's beliefs from the prior probability distribution Q to the posterior probability distribution P. In other words, it is the amount of information lost when Q is used to approximate P. | Non-negative. 0 means P = Q. | [Link](https://en.wikipedia.org/wiki/Kullback%E2%80%93Leibler_divergence)\n", - "JS Distance | Measuring the similarity between two probability distributions. Symmetrized and smoothed version of the Kullback–Leibler (KL) divergence. Square root of JS Divergence. | Range [0, 1]. 0 means perfectly same to balanced distribution. | [Link](https://en.wikipedia.org/wiki/Jensen%E2%80%93Shannon_divergence)\n", - "Wasserstein Distance | This distance is also known as the earth mover’s distance, since it can be seen as the minimum amount of “work” required to transform u into v, where “work” is measured as the amount of distribution weight that must be moved, multiplied by the distance it has to be moved. | Non-negative. 0 means P = Q. | [Link](https://en.wikipedia.org/wiki/Wasserstein_metric)\n", - "Infinity Norm Distance | Distance between two vectors is the greatest of their differences along any coordinate dimension. Also called Chebyshev distance or chessboard distance. | Non-negative. 0 means same distribution. | [Link](https://en.wikipedia.org/wiki/Chebyshev_distance)\n", - "Total Variation Distance | It is equal to half the L1 (Manhattan) distance between the two distributions. Take the difference between the two proportions in each category, add up the absolute values of all the differences, and then divide the sum by 2. | Non-negative. 0 means same distribution. | [Link](https://en.wikipedia.org/wiki/Total_variation_distance_of_probability_measures)\n", - "Chi-Squared Test | The chi-square test tests the null hypothesis that the categorical data has the given frequencies given expected frequencies in each category. | p-value gives evidence against null-hypothesis that difference in observed and expected frequencies is by random chance. | [Link](https://en.wikipedia.org/wiki/Chi-squared_test)" + "| Measure | Description | Interpretation | Reference |\n", + "|--------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|--------------------------------------------------------------------------------------------------------------------------|----------------------------------------------------------------------------------------|\n", + "| KL Divergence | Measure of how one probability distribution is different from a second, reference probability distribution. Measure of the information gained when one revises one's beliefs from the prior probability distribution Q to the posterior probability distribution P. In other words, it is the amount of information lost when Q is used to approximate P. | Non-negative. 0 means P = Q. | [Link](https://en.wikipedia.org/wiki/Kullback%E2%80%93Leibler_divergence) |\n", + "| JS Distance | Measuring the similarity between two probability distributions. Symmetrized and smoothed version of the Kullback–Leibler (KL) divergence. Square root of JS Divergence. | Range [0, 1]. 0 means perfectly same to balanced distribution. | [Link](https://en.wikipedia.org/wiki/Jensen%E2%80%93Shannon_divergence) |\n", + "| Wasserstein Distance | This distance is also known as the earth mover’s distance, since it can be seen as the minimum amount of “work” required to transform u into v, where “work” is measured as the amount of distribution weight that must be moved, multiplied by the distance it has to be moved. | Non-negative. 0 means P = Q. | [Link](https://en.wikipedia.org/wiki/Wasserstein_metric) |\n", + "| Infinity Norm Distance | Distance between two vectors is the greatest of their differences along any coordinate dimension. Also called Chebyshev distance or chessboard distance. | Non-negative. 0 means same distribution. | [Link](https://en.wikipedia.org/wiki/Chebyshev_distance) |\n", + "| Total Variation Distance | It is equal to half the L1 (Manhattan) distance between the two distributions. Take the difference between the two proportions in each category, add up the absolute values of all the differences, and then divide the sum by 2. | Non-negative. 0 means same distribution. | [Link](https://en.wikipedia.org/wiki/Total_variation_distance_of_probability_measures) |\n", + "| Chi-Squared Test | The chi-square test tests the null hypothesis that the categorical data has the given frequencies given expected frequencies in each category. | p-value gives evidence against null-hypothesis that difference in observed and expected frequencies is by random chance. | [Link](https://en.wikipedia.org/wiki/Chi-squared_test) |" ] }, { @@ -534,11 +542,11 @@ "\n", "These measures look at distribution of records across all combinations of sensitive columns. For example, if Sex and Race are sensitive columns, it shall try to quantify imbalance across all combinations - (Male, Black), (Female, White), (Male, Asian-Pac-Islander), etc.\n", "\n", - "Measure | Description | Interpretation | Reference\n", - "- | - | - | -\n", - "Atkinson Index | It presents the percentage of total income that a given society would have to forego in order to have more equal shares of income between its citizens. This measure depends on the degree of society aversion to inequality (a theoretical parameter decided by the researcher), where a higher value entails greater social utility or willingness by individuals to accept smaller incomes in exchange for a more equal distribution. An important feature of the Atkinson index is that it can be decomposed into within-group and between-group inequality. | Range [0, 1]. 0 if perfect equality. 1 means maximum inequality. In our case, it is the proportion of records for a sensitive columns’ combination. | [Link](https://en.wikipedia.org/wiki/Atkinson_index)\n", - "Theil T Index | GE(1) = Theil's T and is more sensitive to differences at the top of the distribution. The Theil index is a statistic used to measure economic inequality. The Theil index measures an entropic \"distance\" the population is away from the \"ideal\" egalitarian state of everyone having the same income. | If everyone has the same income, then T_T equals 0. If one person has all the income, then T_T gives the result (ln N). 0 means equal income and larger values mean higher level of disproportion. | [Link](https://en.wikipedia.org/wiki/Theil_index)\n", - "Theil L Index | GE(0) = Theil's L and is more sensitive to differences at the lower end of the distribution. Logarithm of (mean income)/(income i), over all the incomes included in the summation. It is also referred to as the mean log deviation measure. Because a transfer from a larger income to a smaller one will change the smaller income's ratio more than it changes the larger income's ratio, the transfer-principle is satisfied by this index. | Same interpretation as Theil T Index. | [Link](https://en.wikipedia.org/wiki/Theil_index)" + "| Measure | Description | Interpretation | Reference |\n", + "|----------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|------------------------------------------------------|\n", + "| Atkinson Index | It presents the percentage of total income that a given society would have to forego in order to have more equal shares of income between its citizens. This measure depends on the degree of society aversion to inequality (a theoretical parameter decided by the researcher), where a higher value entails greater social utility or willingness by individuals to accept smaller incomes in exchange for a more equal distribution. An important feature of the Atkinson index is that it can be decomposed into within-group and between-group inequality. | Range $[0, 1]$. 0 if perfect equality. 1 means maximum inequality. In our case, it is the proportion of records for a sensitive columns’ combination. | [Link](https://en.wikipedia.org/wiki/Atkinson_index) |\n", + "| Theil T Index | GE(1) = Theil's T and is more sensitive to differences at the top of the distribution. The Theil index is a statistic used to measure economic inequality. The Theil index measures an entropic \"distance\" the population is away from the \"ideal\" egalitarian state of everyone having the same income. | If everyone has the same income, then T_T equals 0. If one person has all the income, then T_T gives the result $ln(N)$. 0 means equal income and larger values mean higher level of disproportion. | [Link](https://en.wikipedia.org/wiki/Theil_index) |\n", + "| Theil L Index | GE(0) = Theil's L and is more sensitive to differences at the lower end of the distribution. Logarithm of (mean income)/(income i), over all the incomes included in the summation. It is also referred to as the mean log deviation measure. Because a transfer from a larger income to a smaller one will change the smaller income's ratio more than it changes the larger income's ratio, the transfer-principle is satisfied by this index. | Same interpretation as Theil T Index. | [Link](https://en.wikipedia.org/wiki/Theil_index) |" ] }, { @@ -638,4 +646,4 @@ }, "nbformat": 4, "nbformat_minor": 0 -} \ No newline at end of file +} diff --git a/website/docs/features/responsible_ai/Data Balance Analysis.md b/website/docs/features/responsible_ai/Data Balance Analysis.md index e95ffb78bc..52d60c9c21 100644 --- a/website/docs/features/responsible_ai/Data Balance Analysis.md +++ b/website/docs/features/responsible_ai/Data Balance Analysis.md @@ -101,23 +101,23 @@ Note: If you are running this notebook in a Spark environment such as Azure Syna Feature Balance Measures allow us to see whether each combination of sensitive feature is receiving the positive outcome (true prediction) at balanced probability. -In this context, we define a feature balance measure, also referred to as the parity, for label y as the difference between the association metrics of two different sensitive classes \\([x_A, x_B]\\), with respect to the association metric \\(A(x_i, y)\\). That is: +In this context, we define a feature balance measure, also referred to as the parity, for label y as the difference between the association metrics of two different sensitive classes $[x_A, x_B]$, with respect to the association metric $A(x_i, y)$. That is: -$$parity(y \vert x_A, x_B, A(\cdot)) \coloneqq A(x_A, y) - A(x_B, y) $$ +$parity(y \vert x_A, x_B, A(\cdot)) \coloneqq A(x_A, y) - A(x_B, y)$ Using the dataset, we can see if the various sexes and races are receiving >50k income at equal or unequal rates. Note: Many of these metrics were influenced by this paper [Measuring Model Biases in the Absence of Ground Truth](https://arxiv.org/abs/2103.03417). -Association Metric | Family | Description | Interpretation/Formula | Reference -| - | - | - | - | - -Demographic Parity | Fairness | Proportion of each segment of a protected class (e.g. gender) should receive the positive outcome at equal rates. | As close to 0 means better parity. \\(DP = P(Y \vert A = "Male") - P(Y \vert A = "Female")\\). Y = Positive label rate. | [Link](https://en.wikipedia.org/wiki/Fairness_%28machine_learning%29) -Pointwise Mutual Information (PMI), normalized PMI | Entropy | The PMI of a pair of feature values (ex: Gender=Male and Gender=Female) quantifies the discrepancy between the probability of their coincidence given their joint distribution and their individual distributions (assuming independence). | Range (normalized) [-1, 1]. -1 for no co-occurences. 0 for co-occurences at random. 1 for complete co-occurences. | [Link](https://en.wikipedia.org/wiki/Pointwise_mutual_information) -Sorensen-Dice Coefficient (SDC) | Intersection-over-Union | Used to gauge the similarity of two samples. Related to F1 score. | Equals twice the number of elements common to both sets divided by the sum of the number of elements in each set. | [Link](https://en.wikipedia.org/wiki/S%C3%B8rensen%E2%80%93Dice_coefficient) -Jaccard Index | Intersection-over-Union | Similar to SDC, guages the similarity and diversity of sample sets. | Equals the size of the intersection divided by the size of the union of the sample sets. | [Link](https://en.wikipedia.org/wiki/Jaccard_index) -Kendall Rank Correlation | Correlation and Statistical Tests | Used to measure the ordinal association between two measured quantities. | High when observations have a similar rank and low when observations have a dissimilar rank between the two variables. | [Link](https://en.wikipedia.org/wiki/Kendall_rank_correlation_coefficient) -Log-Likelihood Ratio | Correlation and Statistical Tests | Calculates the degree to which data supports one variable versus another. Log of the likelihood ratio, which gives the probability of correctly predicting the label in ratio to probability of incorrectly predicting label. | If likelihoods are similar, it should be close to 0. | [Link](https://en.wikipedia.org/wiki/Likelihood_function#Likelihood_ratio) -t-test | Correlation and Statistical Tests | Used to compare the means of two groups (pairwise). | Value looked up in t-Distribution tell if statistically significant or not. | [Link](https://en.wikipedia.org/wiki/Student's_t-test) +| Association Metric | Family | Description | Interpretation/Formula | Reference | +|----------------------------------------------------|-----------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|------------------------------------------------------------------------------------------------------------------------|------------------------------------------------------------------------------| +| Demographic Parity | Fairness | Proportion of each segment of a protected class (e.g. gender) should receive the positive outcome at equal rates. | As close to 0 means better parity. $DP = P(Y \vert A = "Male") - P(Y \vert A = "Female")$. | [Link](https://en.wikipedia.org/wiki/Fairness_%28machine_learning%29) | +| Pointwise Mutual Information (PMI), normalized PMI | Entropy | The PMI of a pair of feature values (ex: Gender=Male and Gender=Female) quantifies the discrepancy between the probability of their coincidence given their joint distribution and their individual distributions (assuming independence). | Range (normalized) $[-1, 1]$. -1 for no co-occurences. 0 for co-occurences at random. 1 for complete co-occurences. | [Link](https://en.wikipedia.org/wiki/Pointwise_mutual_information) | +| Sorensen-Dice Coefficient (SDC) | Intersection-over-Union | Used to gauge the similarity of two samples. Related to F1 score. | Equals twice the number of elements common to both sets divided by the sum of the number of elements in each set. | [Link](https://en.wikipedia.org/wiki/S%C3%B8rensen%E2%80%93Dice_coefficient) | +| Jaccard Index | Intersection-over-Union | Similar to SDC, guages the similarity and diversity of sample sets. | Equals the size of the intersection divided by the size of the union of the sample sets. | [Link](https://en.wikipedia.org/wiki/Jaccard_index) | +| Kendall Rank Correlation | Correlation and Statistical Tests | Used to measure the ordinal association between two measured quantities. | High when observations have a similar rank and low when observations have a dissimilar rank between the two variables. | [Link](https://en.wikipedia.org/wiki/Kendall_rank_correlation_coefficient) | +| Log-Likelihood Ratio | Correlation and Statistical Tests | Calculates the degree to which data supports one variable versus another. Log of the likelihood ratio, which gives the probability of correctly predicting the label in ratio to probability of incorrectly predicting label. | If likelihoods are similar, it should be close to 0. | [Link](https://en.wikipedia.org/wiki/Likelihood_function#Likelihood_ratio) | +| t-test | Correlation and Statistical Tests | Used to compare the means of two groups (pairwise). | Value looked up in t-Distribution tell if statistically significant or not. | [Link](https://en.wikipedia.org/wiki/Student's_t-test) | ### Distribution Balance Measures @@ -130,8 +130,12 @@ For example, let's assume we have a dataset with 9 rows and a Gender column, and * "Other" appears 2 times Assuming the uniform distribution: -$$ReferenceCount \coloneqq \frac{numRows}{numFeatureValues}$$ -$$ReferenceProbability \coloneqq \frac{1}{numFeatureValues}$$ +$$ +ReferenceCount \coloneqq \frac{numRows}{numFeatureValues} +$$ +$$ +ReferenceProbability \coloneqq \frac{1}{numFeatureValues} +$$ Feature Value | Observed Count | Reference Count | Observed Probability | Reference Probabiliy | - | - | - | - | - @@ -141,14 +145,14 @@ Other | 2 | 9/3 = 3 | 2/9 = 0.22 | 3/9 = 0.33 We can use distance measures to find out how far our observed and reference distributions of these feature values are. Some of these distance measures include: -Measure | Description | Interpretation | Reference -| - | - | - | - -KL Divergence | Measure of how one probability distribution is different from a second, reference probability distribution. Measure of the information gained when one revises one's beliefs from the prior probability distribution Q to the posterior probability distribution P. In other words, it is the amount of information lost when Q is used to approximate P. | Non-negative. 0 means P = Q. | [Link](https://en.wikipedia.org/wiki/Kullback%E2%80%93Leibler_divergence) -JS Distance | Measuring the similarity between two probability distributions. Symmetrized and smoothed version of the Kullback–Leibler (KL) divergence. Square root of JS Divergence. | Range [0, 1]. 0 means perfectly same to balanced distribution. | [Link](https://en.wikipedia.org/wiki/Jensen%E2%80%93Shannon_divergence) -Wasserstein Distance | This distance is also known as the earth mover’s distance, since it can be seen as the minimum amount of “work” required to transform u into v, where “work” is measured as the amount of distribution weight that must be moved, multiplied by the distance it has to be moved. | Non-negative. 0 means P = Q. | [Link](https://en.wikipedia.org/wiki/Wasserstein_metric) -Infinity Norm Distance | Distance between two vectors is the greatest of their differences along any coordinate dimension. Also called Chebyshev distance or chessboard distance. | Non-negative. 0 means same distribution. | [Link](https://en.wikipedia.org/wiki/Chebyshev_distance) -Total Variation Distance | It is equal to half the L1 (Manhattan) distance between the two distributions. Take the difference between the two proportions in each category, add up the absolute values of all the differences, and then divide the sum by 2. | Non-negative. 0 means same distribution. | [Link](https://en.wikipedia.org/wiki/Total_variation_distance_of_probability_measures) -Chi-Squared Test | The chi-square test tests the null hypothesis that the categorical data has the given frequencies given expected frequencies in each category. | p-value gives evidence against null-hypothesis that difference in observed and expected frequencies is by random chance. | [Link](https://en.wikipedia.org/wiki/Chi-squared_test) +| Measure | Description | Interpretation | Reference | +|--------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|--------------------------------------------------------------------------------------------------------------------------|----------------------------------------------------------------------------------------| +| KL Divergence | Measure of how one probability distribution is different from a second, reference probability distribution. Measure of the information gained when one revises one's beliefs from the prior probability distribution Q to the posterior probability distribution P. In other words, it is the amount of information lost when Q is used to approximate P. | Non-negative. 0 means P = Q. | [Link](https://en.wikipedia.org/wiki/Kullback%E2%80%93Leibler_divergence) | +| JS Distance | Measuring the similarity between two probability distributions. Symmetrized and smoothed version of the Kullback–Leibler (KL) divergence. Square root of JS Divergence. | Range [0, 1]. 0 means perfectly same to balanced distribution. | [Link](https://en.wikipedia.org/wiki/Jensen%E2%80%93Shannon_divergence) | +| Wasserstein Distance | This distance is also known as the earth mover’s distance, since it can be seen as the minimum amount of “work” required to transform u into v, where “work” is measured as the amount of distribution weight that must be moved, multiplied by the distance it has to be moved. | Non-negative. 0 means P = Q. | [Link](https://en.wikipedia.org/wiki/Wasserstein_metric) | +| Infinity Norm Distance | Distance between two vectors is the greatest of their differences along any coordinate dimension. Also called Chebyshev distance or chessboard distance. | Non-negative. 0 means same distribution. | [Link](https://en.wikipedia.org/wiki/Chebyshev_distance) | +| Total Variation Distance | It is equal to half the L1 (Manhattan) distance between the two distributions. Take the difference between the two proportions in each category, add up the absolute values of all the differences, and then divide the sum by 2. | Non-negative. 0 means same distribution. | [Link](https://en.wikipedia.org/wiki/Total_variation_distance_of_probability_measures) | +| Chi-Squared Test | The chi-square test tests the null hypothesis that the categorical data has the given frequencies given expected frequencies in each category. | p-value gives evidence against null-hypothesis that difference in observed and expected frequencies is by random chance. | [Link](https://en.wikipedia.org/wiki/Chi-squared_test) | ### Aggregate Balance Measures @@ -156,11 +160,11 @@ Aggregate Balance Measures allow us to obtain a higher notion of inequality. The These measures look at distribution of records across all combinations of sensitive columns. For example, if Sex and Race are specified as sensitive features, it then tries to quantify imbalance across all combinations of the two specified features - (Male, Black), (Female, White), (Male, Asian-Pac-Islander), etc. -Measure | Description | Interpretation | Reference -| - | - | - | - -Atkinson Index | It presents the percentage of total income that a given society would have to forego in order to have more equal shares of income between its citizens. This measure depends on the degree of society aversion to inequality (a theoretical parameter decided by the researcher), where a higher value entails greater social utility or willingness by individuals to accept smaller incomes in exchange for a more equal distribution. An important feature of the Atkinson index is that it can be decomposed into within-group and between-group inequality. | Range [0, 1]. 0 if perfect equality. 1 means maximum inequality. In our case, it is the proportion of records for a sensitive columns’ combination. | [Link](https://en.wikipedia.org/wiki/Atkinson_index) -Theil T Index | GE(1) = Theil's T and is more sensitive to differences at the top of the distribution. The Theil index is a statistic used to measure economic inequality. The Theil index measures an entropic "distance" the population is away from the "ideal" egalitarian state of everyone having the same income. | If everyone has the same income, then T_T equals 0. If one person has all the income, then T_T gives the result (ln N). 0 means equal income and larger values mean higher level of disproportion. | [Link](https://en.wikipedia.org/wiki/Theil_index) -Theil L Index | GE(0) = Theil's L and is more sensitive to differences at the lower end of the distribution. Logarithm of (mean income)/(income i), over all the incomes included in the summation. It is also referred to as the mean log deviation measure. Because a transfer from a larger income to a smaller one will change the smaller income's ratio more than it changes the larger income's ratio, the transfer-principle is satisfied by this index. | Same interpretation as Theil T Index. | [Link](https://en.wikipedia.org/wiki/Theil_index) +| Measure | Description | Interpretation | Reference | +|----------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|------------------------------------------------------| +| Atkinson Index | It presents the percentage of total income that a given society would have to forego in order to have more equal shares of income between its citizens. This measure depends on the degree of society aversion to inequality (a theoretical parameter decided by the researcher), where a higher value entails greater social utility or willingness by individuals to accept smaller incomes in exchange for a more equal distribution. An important feature of the Atkinson index is that it can be decomposed into within-group and between-group inequality. | Range $[0, 1]$. 0 if perfect equality. 1 means maximum inequality. In our case, it is the proportion of records for a sensitive columns’ combination. | [Link](https://en.wikipedia.org/wiki/Atkinson_index) | +| Theil T Index | GE(1) = Theil's T and is more sensitive to differences at the top of the distribution. The Theil index is a statistic used to measure economic inequality. The Theil index measures an entropic "distance" the population is away from the "ideal" egalitarian state of everyone having the same income. | If everyone has the same income, then T_T equals 0. If one person has all the income, then T_T gives the result $ln(N)$. 0 means equal income and larger values mean higher level of disproportion. | [Link](https://en.wikipedia.org/wiki/Theil_index) | +| Theil L Index | GE(0) = Theil's L and is more sensitive to differences at the lower end of the distribution. Logarithm of (mean income)/(income i), over all the incomes included in the summation. It is also referred to as the mean log deviation measure. Because a transfer from a larger income to a smaller one will change the smaller income's ratio more than it changes the larger income's ratio, the transfer-principle is satisfied by this index. | Same interpretation as Theil T Index. | [Link](https://en.wikipedia.org/wiki/Theil_index) | ## Mitigation diff --git a/website/docusaurus.config.js b/website/docusaurus.config.js index 8797aa319b..dcf1151ab5 100644 --- a/website/docusaurus.config.js +++ b/website/docusaurus.config.js @@ -1,5 +1,7 @@ +const math = require('remark-math') +const katex = require('rehype-katex') const path = require('path'); -const {all_examples} = require('./src/plugins/examples'); +const { all_examples } = require('./src/plugins/examples'); let version = "0.9.4"; module.exports = { @@ -15,6 +17,13 @@ module.exports = { examples: all_examples(), version: "0.9.4", }, + stylesheets: [ + { + href: "https://cdn.jsdelivr.net/npm/katex@0.13.11/dist/katex.min.css", + integrity: "sha384-Um5gpz1odJg5Z4HAmzPtgZKdTBHZdw8S29IecapCSB31ligYPhHQZMIlWLYQGVoc", + crossorigin: "anonymous", + }, + ], themeConfig: { prism: { theme: require('./src/plugins/prism_themes/github'), @@ -31,9 +40,9 @@ module.exports = { src: 'img/logo.svg', }, items: [ - {to: 'docs/about', label: 'Docs', position: 'left'}, - {to: 'blog', label: 'Blog', position: 'left'}, - {to: 'videos', label: 'Videos', position: 'left'}, + { to: 'docs/about', label: 'Docs', position: 'left' }, + { to: 'blog', label: 'Blog', position: 'left' }, + { to: 'videos', label: 'Videos', position: 'left' }, { type: 'docsVersionDropdown', position: 'right', @@ -134,6 +143,8 @@ module.exports = { { docs: { sidebarPath: require.resolve('./sidebars.js'), + remarkPlugins: [math], + rehypePlugins: [katex], }, theme: { customCss: require.resolve('./src/css/custom.css'), diff --git a/website/package.json b/website/package.json index 02d77dfb4d..b36a2fb7c4 100644 --- a/website/package.json +++ b/website/package.json @@ -21,10 +21,13 @@ "@docusaurus/theme-search-algolia": "^2.0.0-beta.9", "ansi-html-community": "^0.0.8", "classnames": "^2.2.6", + "hast-util-is-element": "1.1.0", "react": "^16.8.4", "react-dom": "^16.8.4", "react-player": "^2.7.2", - "reading-time": "^1.2.0" + "reading-time": "^1.2.0", + "rehype-katex": "4", + "remark-math": "3" }, "resolutions": { "@docusaurus/core/**/set-value": "^4.0.1", diff --git a/website/yarn.lock b/website/yarn.lock index c0f5eee58e..a657c4635b 100644 --- a/website/yarn.lock +++ b/website/yarn.lock @@ -2313,6 +2313,11 @@ resolved "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.9.tgz#97edc9037ea0c38585320b28964dde3b39e4660d" integrity sha512-qcUXuemtEu+E5wZSJHNxUXeCZhAfXKQ41D+duX+VYPde7xyEVZci+/oXKJL13tnRs9lR2pr4fod59GT6/X1/yQ== +"@types/katex@^0.11.0": + version "0.11.1" + resolved "https://registry.yarnpkg.com/@types/katex/-/katex-0.11.1.tgz#34de04477dcf79e2ef6c8d23b41a3d81f9ebeaf5" + integrity sha512-DUlIj2nk0YnJdlWgsFuVKcX27MLW0KbKmGVoUHmFr+74FYYNUDAaj9ZqTADvsbE8rfxuVmSFc7KczYn5Y09ozg== + "@types/mdast@^3.0.0": version "3.0.4" resolved "https://registry.yarnpkg.com/@types/mdast/-/mdast-3.0.4.tgz#8ee6b5200751b6cadb9a043ca39612693ad6cb9e" @@ -3240,7 +3245,7 @@ comma-separated-tokens@^1.0.0: resolved "https://registry.yarnpkg.com/comma-separated-tokens/-/comma-separated-tokens-1.0.8.tgz#632b80b6117867a158f1080ad498b2fbe7e3f5ea" integrity sha512-GHuDRO12Sypu2cV70d1dkA2EUmXHgntrzbpvOB+Qy+49ypNfGgFQIC2fhhXbnyrJRynDCAARsT7Ou0M6hirpfw== -commander@^2.20.0: +commander@^2.19.0, commander@^2.20.0: version "2.20.3" resolved "https://registry.yarnpkg.com/commander/-/commander-2.20.3.tgz#fd485e84c03eb4881c20722ba48035e8531aeb33" integrity sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ== @@ -4682,6 +4687,11 @@ hast-util-from-parse5@^6.0.0: vfile-location "^3.2.0" web-namespaces "^1.0.0" +hast-util-is-element@1.1.0, hast-util-is-element@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/hast-util-is-element/-/hast-util-is-element-1.1.0.tgz#3b3ed5159a2707c6137b48637fbfe068e175a425" + integrity sha512-oUmNua0bFbdrD/ELDSSEadRVtWZOf3iF6Lbv81naqsIV99RnSCieTbWuWCY8BAeEfKJTKl0gRdokv+dELutHGQ== + hast-util-parse-selector@^2.0.0: version "2.2.5" resolved "https://registry.yarnpkg.com/hast-util-parse-selector/-/hast-util-parse-selector-2.2.5.tgz#d57c23f4da16ae3c63b3b6ca4616683313499c3a" @@ -4714,6 +4724,15 @@ hast-util-to-parse5@^6.0.0: xtend "^4.0.0" zwitch "^1.0.0" +hast-util-to-text@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/hast-util-to-text/-/hast-util-to-text-2.0.1.tgz#04f2e065642a0edb08341976084aa217624a0f8b" + integrity sha512-8nsgCARfs6VkwH2jJU9b8LNTuR4700na+0h3PqCaEk4MAnMDeu5P0tP8mjk9LLNGxIeQRLbiDbZVw6rku+pYsQ== + dependencies: + hast-util-is-element "^1.0.0" + repeat-string "^1.0.0" + unist-util-find-after "^3.0.0" + hastscript@^5.0.0: version "5.1.2" resolved "https://registry.yarnpkg.com/hastscript/-/hastscript-5.1.2.tgz#bde2c2e56d04c62dd24e8c5df288d050a355fb8a" @@ -5444,6 +5463,13 @@ jsonfile@^6.0.1: optionalDependencies: graceful-fs "^4.1.6" +katex@^0.12.0: + version "0.12.0" + resolved "https://registry.yarnpkg.com/katex/-/katex-0.12.0.tgz#2fb1c665dbd2b043edcf8a1f5c555f46beaa0cb9" + integrity sha512-y+8btoc/CK70XqcHqjxiGWBOeIL8upbS0peTPXTvgrh21n1RiWWcIpSWM+4uXq+IAgNh9YYQWdc7LVDPDAEEAg== + dependencies: + commander "^2.19.0" + keyv@^3.0.0: version "3.1.0" resolved "https://registry.yarnpkg.com/keyv/-/keyv-3.1.0.tgz#ecc228486f69991e49e9476485a5be1e8fc5c4d9" @@ -7165,6 +7191,18 @@ regjsparser@^0.7.0: dependencies: jsesc "~0.5.0" +rehype-katex@4: + version "4.0.0" + resolved "https://registry.yarnpkg.com/rehype-katex/-/rehype-katex-4.0.0.tgz#ce11a5db0bff014350e7a9cfd30147d314b14330" + integrity sha512-0mgBqYugQyIW0eUl6RDOZ28Cat2YzrnWGaYgKCMQnJw6ClmKgLqXBnkDAPGh2mwxvkkKwQOUMUpSLpA5rt7rzA== + dependencies: + "@types/katex" "^0.11.0" + hast-util-to-text "^2.0.0" + katex "^0.12.0" + rehype-parse "^7.0.0" + unified "^9.0.0" + unist-util-visit "^2.0.0" + rehype-parse@^6.0.2: version "6.0.2" resolved "https://registry.yarnpkg.com/rehype-parse/-/rehype-parse-6.0.2.tgz#aeb3fdd68085f9f796f1d3137ae2b85a98406964" @@ -7174,6 +7212,14 @@ rehype-parse@^6.0.2: parse5 "^5.0.0" xtend "^4.0.0" +rehype-parse@^7.0.0: + version "7.0.1" + resolved "https://registry.yarnpkg.com/rehype-parse/-/rehype-parse-7.0.1.tgz#58900f6702b56767814afc2a9efa2d42b1c90c57" + integrity sha512-fOiR9a9xH+Le19i4fGzIEowAbwG7idy2Jzs4mOrFWBSJ0sNUgy0ev871dwWnbOo371SjgjG4pwzrbgSVrKxecw== + dependencies: + hast-util-from-parse5 "^6.0.0" + parse5 "^6.0.0" + relateurl@^0.2.7: version "0.2.7" resolved "https://registry.yarnpkg.com/relateurl/-/relateurl-0.2.7.tgz#54dbf377e51440aca90a4cd274600d3ff2d888a9" @@ -7202,6 +7248,11 @@ remark-footnotes@2.0.0: resolved "https://registry.yarnpkg.com/remark-footnotes/-/remark-footnotes-2.0.0.tgz#9001c4c2ffebba55695d2dd80ffb8b82f7e6303f" integrity sha512-3Clt8ZMH75Ayjp9q4CorNeyjwIxHFcTkaektplKGl2A1jNGEUey8cKL0ZC5vJwfcD5GFGsNLImLG/NGzWIzoMQ== +remark-math@3: + version "3.0.1" + resolved "https://registry.yarnpkg.com/remark-math/-/remark-math-3.0.1.tgz#85a02a15b15cad34b89a27244d4887b3a95185bb" + integrity sha512-epT77R/HK0x7NqrWHdSV75uNLwn8g9qTyMqCRCDujL0vj/6T6+yhdrR7mjELWtkse+Fw02kijAaBuVcHBor1+Q== + remark-mdx-remove-exports@^1.6.22: version "1.6.22" resolved "https://registry.yarnpkg.com/remark-mdx-remove-exports/-/remark-mdx-remove-exports-1.6.22.tgz#9e34f3d02c9c54b02ca0a1fde946449338d06ecb" @@ -7270,7 +7321,7 @@ renderkid@^3.0.0: lodash "^4.17.21" strip-ansi "^6.0.1" -repeat-string@^1.5.4: +repeat-string@^1.0.0, repeat-string@^1.5.4: version "1.6.1" resolved "https://registry.yarnpkg.com/repeat-string/-/repeat-string-1.6.1.tgz#8dcae470e1c88abc2d600fff4a776286da75e637" integrity sha1-jcrkcOHIirwtYA//Sndihtp15jc= @@ -8182,6 +8233,18 @@ unified@^8.4.2: trough "^1.0.0" vfile "^4.0.0" +unified@^9.0.0: + version "9.2.2" + resolved "https://registry.yarnpkg.com/unified/-/unified-9.2.2.tgz#67649a1abfc3ab85d2969502902775eb03146975" + integrity sha512-Sg7j110mtefBD+qunSLO1lqOEKdrwBFBrR6Qd8f4uwkhWNlbkaqwHse6e7QvD3AP/MNoJdEDLaf8OxYyoWgorQ== + dependencies: + bail "^1.0.0" + extend "^3.0.0" + is-buffer "^2.0.0" + is-plain-obj "^2.0.0" + trough "^1.0.0" + vfile "^4.0.0" + uniqs@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/uniqs/-/uniqs-2.0.0.tgz#ffede4b36b25290696e6e165d4a59edb998e6b02" @@ -8199,6 +8262,13 @@ unist-builder@2.0.3, unist-builder@^2.0.0: resolved "https://registry.yarnpkg.com/unist-builder/-/unist-builder-2.0.3.tgz#77648711b5d86af0942f334397a33c5e91516436" integrity sha512-f98yt5pnlMWlzP539tPc4grGMsFaQQlP/vM396b00jngsiINumNmsY8rkXjfoi1c6QaM8nQ3vaGDuoKWbe/1Uw== +unist-util-find-after@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/unist-util-find-after/-/unist-util-find-after-3.0.0.tgz#5c65fcebf64d4f8f496db46fa8fd0fbf354b43e6" + integrity sha512-ojlBqfsBftYXExNu3+hHLfJQ/X1jYY/9vdm4yZWjIbf0VuWF6CRufci1ZyoD/wV2TYMKxXUoNuoqwy+CkgzAiQ== + dependencies: + unist-util-is "^4.0.0" + unist-util-generated@^1.0.0: version "1.1.6" resolved "https://registry.yarnpkg.com/unist-util-generated/-/unist-util-generated-1.1.6.tgz#5ab51f689e2992a472beb1b35f2ce7ff2f324d4b" From 82719fe14050cf5fde3575fdf2b49e0d9964cbd4 Mon Sep 17 00:00:00 2001 From: Ilya Matiach Date: Wed, 1 Dec 2021 11:45:14 -0500 Subject: [PATCH 32/40] fix: add logging for number of columns and rows when creating datasets, set useSingleDatasetMode=True by default (#1222) --- .../synapse/ml/lightgbm/SharedState.scala | 73 ++++++++++++------- .../ml/lightgbm/TaskTrainingMethods.scala | 9 +-- .../lightgbm/dataset/DatasetAggregator.scala | 15 +++- .../ml/lightgbm/params/LightGBMParams.scala | 2 +- .../split1/VerifyLightGBMClassifier.scala | 3 + .../split2/VerifyLightGBMRegressor.scala | 2 +- 6 files changed, 65 insertions(+), 39 deletions(-) diff --git a/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/SharedState.scala b/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/SharedState.scala index 45d103730b..9dc200754d 100644 --- a/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/SharedState.scala +++ b/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/SharedState.scala @@ -12,37 +12,25 @@ import org.slf4j.Logger import java.util.concurrent.CountDownLatch -class SharedState(columnParams: ColumnParams, - schema: StructType, - trainParams: TrainParams) { - val mainExecutorWorker: Long = LightGBMUtils.getTaskId - val useSingleDataset: Boolean = trainParams.executionParams.useSingleDatasetMode +class SharedDatasetState(columnParams: ColumnParams, + schema: StructType, + trainParams: TrainParams, + sharedState: SharedState) { val chunkSize: Int = trainParams.executionParams.chunkSize + val useSingleDataset: Boolean = trainParams.executionParams.useSingleDatasetMode val matrixType: String = trainParams.executionParams.matrixType lazy val denseAggregatedColumns: BaseDenseAggregatedColumns = new DenseSyncAggregatedColumns(chunkSize) lazy val sparseAggregatedColumns: BaseSparseAggregatedColumns = new SparseSyncAggregatedColumns(chunkSize) - def getArrayType(rowsIter: Iterator[Row], matrixType: String): (Iterator[Row], Boolean) = { - if (matrixType == "auto") { - sampleRowsForArrayType(rowsIter, columnParams) - } else if (matrixType == "sparse") { - (rowsIter: Iterator[Row], true) - } else if (matrixType == "dense") { - (rowsIter: Iterator[Row], false) - } else { - throw new Exception(s"Invalid parameter matrix type specified: ${matrixType}") - } - } - def prep(iter: Iterator[Row]): BaseChunkedColumns = { val (concatRowsIter: Iterator[Row], isSparseHere: Boolean) = getArrayType(iter, matrixType) val peekableIter = new PeekingIterator(concatRowsIter) // Note: the first worker sets "is sparse", other workers read it - linkIsSparse(isSparseHere) + sharedState.linkIsSparse(isSparseHere) - if (!isSparse.get) { + if (!sharedState.isSparse.get) { new DenseChunkedColumns(peekableIter, columnParams, schema, chunkSize) } else { new SparseChunkedColumns(peekableIter, columnParams, schema, chunkSize, useSingleDataset) @@ -50,7 +38,7 @@ class SharedState(columnParams: ColumnParams, } def merge(ts: BaseChunkedColumns): BaseAggregatedColumns = { - val isSparseVal = isSparse.get + val isSparseVal = sharedState.isSparse.get val aggregatedColumns = if (!isSparseVal) { if (useSingleDataset) denseAggregatedColumns else new DenseAggregatedColumns(chunkSize) @@ -68,6 +56,41 @@ class SharedState(columnParams: ColumnParams, aggregatedColumns } + @volatile var arrayProcessedSignal: CountDownLatch = new CountDownLatch(0) + + def incrementArrayProcessedSignal(log: Logger): Int = { + this.synchronized { + val count = arrayProcessedSignal.getCount.toInt + 1 + arrayProcessedSignal = new CountDownLatch(count) + log.info(s"Task incrementing ArrayProcessedSignal to $count") + count + } + } + + def getArrayType(rowsIter: Iterator[Row], matrixType: String): (Iterator[Row], Boolean) = { + if (matrixType == "auto") { + sampleRowsForArrayType(rowsIter, columnParams) + } else if (matrixType == "sparse") { + (rowsIter: Iterator[Row], true) + } else if (matrixType == "dense") { + (rowsIter: Iterator[Row], false) + } else { + throw new Exception(s"Invalid parameter matrix type specified: ${matrixType}") + } + } +} + +class SharedState(columnParams: ColumnParams, + schema: StructType, + trainParams: TrainParams) { + val mainExecutorWorker: Long = LightGBMUtils.getTaskId + val useSingleDataset: Boolean = trainParams.executionParams.useSingleDatasetMode + val chunkSize: Int = trainParams.executionParams.chunkSize + val matrixType: String = trainParams.executionParams.matrixType + + val datasetState: SharedDatasetState = new SharedDatasetState(columnParams, schema, trainParams, this) + val validationDatasetState: SharedDatasetState = new SharedDatasetState(columnParams, schema, trainParams, this) + @volatile var isSparse: Option[Boolean] = None def linkIsSparse(isSparse: Boolean): Unit = { @@ -80,15 +103,9 @@ class SharedState(columnParams: ColumnParams, } } - @volatile var arrayProcessedSignal: CountDownLatch = new CountDownLatch(0) - def incrementArrayProcessedSignal(log: Logger): Int = { - this.synchronized { - val count = arrayProcessedSignal.getCount.toInt + 1 - arrayProcessedSignal = new CountDownLatch(count) - log.info(s"Task incrementing ArrayProcessedSignal to $count") - count - } + datasetState.incrementArrayProcessedSignal(log) + validationDatasetState.incrementArrayProcessedSignal(log) } @volatile var doneSignal: CountDownLatch = new CountDownLatch(0) diff --git a/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/TaskTrainingMethods.scala b/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/TaskTrainingMethods.scala index bd09b4d784..4161586349 100644 --- a/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/TaskTrainingMethods.scala +++ b/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/TaskTrainingMethods.scala @@ -38,13 +38,12 @@ object TaskTrainingMethods { validationData: Option[Broadcast[Array[Row]]], sharedState: SharedState): (BaseAggregatedColumns, Option[BaseAggregatedColumns]) = { val aggregatedColumns = { - val prepAggregatedColumns = sharedState.prep(inputRows) - sharedState.merge(prepAggregatedColumns) + val prepAggregatedColumns = sharedState.datasetState.prep(inputRows) + sharedState.datasetState.merge(prepAggregatedColumns) } - val aggregatedValidationColumns = validationData.map { data => - val prepAggregatedColumns = sharedState.prep(data.value.toIterator) - sharedState.merge(prepAggregatedColumns) + val prepAggregatedColumns = sharedState.validationDatasetState.prep(data.value.toIterator) + sharedState.validationDatasetState.merge(prepAggregatedColumns) } (aggregatedColumns, aggregatedValidationColumns) } diff --git a/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/dataset/DatasetAggregator.scala b/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/dataset/DatasetAggregator.scala index 33ba5cda66..b5abca6a8f 100644 --- a/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/dataset/DatasetAggregator.scala +++ b/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/dataset/DatasetAggregator.scala @@ -7,6 +7,7 @@ import com.microsoft.azure.synapse.ml.lightgbm.dataset.DatasetUtils.getRowAsDoub import com.microsoft.azure.synapse.ml.lightgbm.swig._ import com.microsoft.azure.synapse.ml.lightgbm.{ColumnParams, LightGBMUtils} import com.microsoft.ml.lightgbm.{SWIGTYPE_p_int, lightgbmlib, lightgbmlibConstants} +import org.apache.spark.internal.Logging import org.apache.spark.ml.linalg.SQLDataTypes.VectorType import org.apache.spark.ml.linalg.{DenseVector, SparseVector} import org.apache.spark.sql.Row @@ -181,7 +182,7 @@ private[lightgbm] final class DenseChunkedColumns(rowsIter: PeekingIterator[Row] } -private[lightgbm] abstract class BaseAggregatedColumns(val chunkSize: Int) { +private[lightgbm] abstract class BaseAggregatedColumns(val chunkSize: Int) extends Logging { protected var labels: FloatSwigArray = _ protected var weights: Option[FloatSwigArray] = None protected var initScores: Option[DoubleSwigArray] = None @@ -327,14 +328,17 @@ private[lightgbm] abstract class BaseDenseAggregatedColumns(chunkSize: Int) exte def getFeatures: DoubleSwigArray = features - def generateDataset(referenceDataset: Option[LightGBMDataset], datasetParams: String): LightGBMDataset = { + def generateDataset(referenceDataset: Option[LightGBMDataset], + datasetParams: String): LightGBMDataset = { val pointer = lightgbmlib.voidpp_handle() try { + val numRows = rowCount.get().toInt + logInfo(s"LightGBM task generating dense dataset with $numRows rows and $numCols columns") // Generate the dataset for features LightGBMUtils.validate(lightgbmlib.LGBM_DatasetCreateFromMat( lightgbmlib.double_to_voidp_ptr(features.array), lightgbmlibConstants.C_API_DTYPE_FLOAT64, - rowCount.get().toInt, + numRows, numCols, 1, datasetParams, @@ -434,9 +438,12 @@ private[lightgbm] abstract class BaseSparseAggregatedColumns(chunkSize: Int) } } - def generateDataset(referenceDataset: Option[LightGBMDataset], datasetParams: String): LightGBMDataset = { + def generateDataset(referenceDataset: Option[LightGBMDataset], + datasetParams: String): LightGBMDataset = { indexPointerArrayIncrement(getIndexPointers.array) val pointer = lightgbmlib.voidpp_handle() + val numRows = indptrCount.get() - 1 + logInfo(s"LightGBM task generating sparse dataset with $numRows rows and $numCols columns") // Generate the dataset for features LightGBMUtils.validate(lightgbmlib.LGBM_DatasetCreateFromCSR( lightgbmlib.int_to_voidp_ptr(indexPointers.array), diff --git a/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/params/LightGBMParams.scala b/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/params/LightGBMParams.scala index 586b67595f..dc092aa0c9 100644 --- a/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/params/LightGBMParams.scala +++ b/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/params/LightGBMParams.scala @@ -61,7 +61,7 @@ trait LightGBMExecutionParams extends Wrappable { val useSingleDatasetMode = new BooleanParam(this, "useSingleDatasetMode", "Use single dataset execution mode to create a single native dataset per executor (singleton) " + "to reduce memory and communication overhead. Note this is disabled when running spark in local mode.") - setDefault(useSingleDatasetMode -> false) + setDefault(useSingleDatasetMode -> true) def getUseSingleDatasetMode: Boolean = $(useSingleDatasetMode) def setUseSingleDatasetMode(value: Boolean): this.type = set(useSingleDatasetMode, value) diff --git a/lightgbm/src/test/scala/com/microsoft/azure/synapse/ml/lightgbm/split1/VerifyLightGBMClassifier.scala b/lightgbm/src/test/scala/com/microsoft/azure/synapse/ml/lightgbm/split1/VerifyLightGBMClassifier.scala index 30dade8f13..338bb424f0 100644 --- a/lightgbm/src/test/scala/com/microsoft/azure/synapse/ml/lightgbm/split1/VerifyLightGBMClassifier.scala +++ b/lightgbm/src/test/scala/com/microsoft/azure/synapse/ml/lightgbm/split1/VerifyLightGBMClassifier.scala @@ -329,11 +329,14 @@ class VerifyLightGBMClassifier extends Benchmarks with EstimatorFuzzing[LightGBM } } val scoredDF1 = baseModel + .setUseSingleDatasetMode(false) .fit(pimaDF) .transform(pimaDF) + // Note: run for more iterations than non-custom objective to prevent flakiness // Note we intentionally overfit here on the training data and don't do a split val scoredDF2 = baseModel + .setUseSingleDatasetMode(false) .setFObj(new LogLikelihood()) .setNumIterations(300) .fit(pimaDF) diff --git a/lightgbm/src/test/scala/com/microsoft/azure/synapse/ml/lightgbm/split2/VerifyLightGBMRegressor.scala b/lightgbm/src/test/scala/com/microsoft/azure/synapse/ml/lightgbm/split2/VerifyLightGBMRegressor.scala index cbca56fc8a..5317701127 100644 --- a/lightgbm/src/test/scala/com/microsoft/azure/synapse/ml/lightgbm/split2/VerifyLightGBMRegressor.scala +++ b/lightgbm/src/test/scala/com/microsoft/azure/synapse/ml/lightgbm/split2/VerifyLightGBMRegressor.scala @@ -27,7 +27,7 @@ class VerifyLightGBMRegressor extends Benchmarks verifyLearnerOnRegressionCsvFile("energyefficiency2012_data.train.csv", "Y1", 0, Some(Seq("X1", "X2", "X3", "X4", "X5", "X6", "X7", "X8", "Y2"))) verifyLearnerOnRegressionCsvFile("airfoil_self_noise.train.csv", "Scaled sound pressure level", 1) - verifyLearnerOnRegressionCsvFile("Buzz.TomsHardware.train.csv", "Mean Number of display (ND)", -3) + verifyLearnerOnRegressionCsvFile("Buzz.TomsHardware.train.csv", "Mean Number of display (ND)", -4) verifyLearnerOnRegressionCsvFile("machine.train.csv", "ERP", -2) // TODO: Spark doesn't seem to like the column names here because of '.', figure out how to read in the data // verifyLearnerOnRegressionCsvFile("slump_test.train.csv", "Compressive Strength (28-day)(Mpa)", 2) From c65423c4cfd123c82eb860f9240851f655229b6a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 1 Dec 2021 11:47:06 -0500 Subject: [PATCH 33/40] build: bump algoliasearch-helper from 3.6.1 to 3.6.2 in /website (#1270) Bumps [algoliasearch-helper](https://github.com/algolia/algoliasearch-helper-js) from 3.6.1 to 3.6.2. - [Release notes](https://github.com/algolia/algoliasearch-helper-js/releases) - [Changelog](https://github.com/algolia/algoliasearch-helper-js/blob/develop/CHANGELOG) - [Commits](https://github.com/algolia/algoliasearch-helper-js/compare/3.6.1...3.6.2) --- updated-dependencies: - dependency-name: algoliasearch-helper dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Mark Hamilton --- website/yarn.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/website/yarn.lock b/website/yarn.lock index a657c4635b..1b901d0ad0 100644 --- a/website/yarn.lock +++ b/website/yarn.lock @@ -2584,9 +2584,9 @@ ajv@^6.12.2, ajv@^6.12.4, ajv@^6.12.5: uri-js "^4.2.2" algoliasearch-helper@^3.5.5: - version "3.6.1" - resolved "https://registry.yarnpkg.com/algoliasearch-helper/-/algoliasearch-helper-3.6.1.tgz#b2964de29f69d8266d714e74cf097c06447ffaf2" - integrity sha512-uJkB/t/bU0tYE88ZoJJCGCNSD+9mTdsp/Yp5dG+qviIBYoUUZfKWwFcZBhW+eiqpO+EadTu7Xj3RYYnrvBOUyA== + version "3.6.2" + resolved "https://registry.yarnpkg.com/algoliasearch-helper/-/algoliasearch-helper-3.6.2.tgz#45e19b12589cfa0c611b573287f65266ea2cc14a" + integrity sha512-Xx0NOA6k4ySn+R2l3UMSONAaMkyfmrZ3AP1geEMo32MxDJQJesZABZYsldO9fa6FKQxH91afhi4hO1G0Zc2opg== dependencies: events "^1.1.1" From 3898ad976009105d468fbc39863e957e1431b2cc Mon Sep 17 00:00:00 2001 From: Serena Ruan <82044803+serena-ruan@users.noreply.github.com> Date: Thu, 2 Dec 2021 17:12:02 +0800 Subject: [PATCH 34/40] fix: hotfix for Rtests (#1283) --- environment.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/environment.yaml b/environment.yaml index 93a6466902..c19438bd05 100644 --- a/environment.yaml +++ b/environment.yaml @@ -9,7 +9,7 @@ dependencies: - pip - r-base - r-dplyr - - r-sparklyr + - r-sparklyr=1.7.2 - r-devtools - pip: - wheel From 6ea8a9a4ae49f73829da0e8d0d029abc2a3ed51f Mon Sep 17 00:00:00 2001 From: Stuart Leeks Date: Thu, 2 Dec 2021 18:01:26 +0000 Subject: [PATCH 35/40] feat: Add TextAnalyze transformer to add support for Text Analytics `/analyze` endpoint (#1267) * Initial implementation work * Avoid using fixed task result to determine document count * Add options for TextAnalyze tasks to run * Add example notebook for TextAnalyze * fix style * Fix typo in build helpers * chore: some TA spring cleaning * minor rename of notebook --- .../ml/cognitive/CognitiveServiceBase.scala | 8 +- .../synapse/ml/cognitive/ComputerVision.scala | 3 +- .../synapse/ml/cognitive/TextAnalytics.scala | 313 +++++++++++++++--- .../TextAnalyticsAnalyzeSchemas.scala | 68 ++++ .../ml/cognitive/TextAnalyticsSchemas.scala | 7 +- .../cognitive/split1/TextAnalyticsSuite.scala | 260 +++++++++++++-- .../synapse/ml/io/http/HTTPTransformer.scala | 17 +- .../ml/io/split2/DistributedHTTPSuite.scala | 2 +- .../ml/nbtest/DatabricksUtilities.scala | 2 +- .../CognitiveServices - Analyze Text.ipynb | 118 +++++++ 10 files changed, 710 insertions(+), 88 deletions(-) create mode 100644 cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/TextAnalyticsAnalyzeSchemas.scala create mode 100644 notebooks/features/cognitive_services/CognitiveServices - Analyze Text.ipynb diff --git a/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/CognitiveServiceBase.scala b/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/CognitiveServiceBase.scala index 4f684687bd..0d7eb466a3 100644 --- a/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/CognitiveServiceBase.scala +++ b/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/CognitiveServiceBase.scala @@ -332,10 +332,4 @@ abstract class CognitiveServicesBaseNoHandler(val uid: String) extends Transform } abstract class CognitiveServicesBase(uid: String) extends - CognitiveServicesBaseNoHandler(uid) with HasHandler { - setDefault(handler -> HandlingUtils.advancedUDF(100)) //scalastyle:ignore magic.number - - override def handlingFunc(client: CloseableHttpClient, - request: HTTPRequestData): HTTPResponseData = - getHandler(client, request) -} + CognitiveServicesBaseNoHandler(uid) with HasHandler diff --git a/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/ComputerVision.scala b/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/ComputerVision.scala index f990e82a8e..0519242e2a 100644 --- a/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/ComputerVision.scala +++ b/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/ComputerVision.scala @@ -232,7 +232,7 @@ trait BasicAsyncReply extends HasAsyncReply { request: HTTPRequestData): HTTPResponseData = { val response = HandlingUtils.advanced(getBackoffs: _*)(client, request) if (response.statusLine.statusCode == 202) { - val location = new URI(response.headers.filter(_.name == "Operation-Location").head.value) + val location = new URI(response.headers.filter(_.name.toLowerCase() == "operation-location").head.value) val maxTries = getMaxPollingRetries val key = request.headers.find(_.name == "Ocp-Apim-Subscription-Key").map(_.value) val it = (0 to maxTries).toIterator.flatMap { _ => @@ -255,7 +255,6 @@ trait BasicAsyncReply extends HasAsyncReply { } } - trait HasAsyncReply extends Params { val backoffs: IntArrayParam = new IntArrayParam( this, "backoffs", "array of backoffs to use in the handler") diff --git a/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/TextAnalytics.scala b/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/TextAnalytics.scala index 57e9185b9d..d398f25b8e 100644 --- a/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/TextAnalytics.scala +++ b/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/TextAnalytics.scala @@ -4,24 +4,28 @@ package com.microsoft.azure.synapse.ml.cognitive import com.microsoft.azure.synapse.ml.core.schema.DatasetExtensions -import com.microsoft.azure.synapse.ml.io.http.SimpleHTTPTransformer +import com.microsoft.azure.synapse.ml.io.http.{HasHandler, SimpleHTTPTransformer} import com.microsoft.azure.synapse.ml.logging.BasicLogging import com.microsoft.azure.synapse.ml.stages.{DropColumns, Lambda, UDFTransformer} import org.apache.http.client.methods.{HttpPost, HttpRequestBase} import org.apache.http.entity.{AbstractHttpEntity, StringEntity} import org.apache.spark.injections.UDFUtils -import org.apache.spark.ml.param.ServiceParam +import org.apache.spark.ml.param._ import org.apache.spark.ml.util._ import org.apache.spark.ml.{ComplexParamsReadable, NamespaceInjections, PipelineModel, Transformer} import org.apache.spark.sql.Row +import org.apache.spark.sql.catalyst.expressions.GenericRowWithSchema +import org.apache.spark.sql.expressions.UserDefinedFunction import org.apache.spark.sql.functions._ import org.apache.spark.sql.types._ import spray.json.DefaultJsonProtocol._ import spray.json._ import java.net.URI +import java.util +import scala.collection.JavaConverters._ -abstract class TextAnalyticsBase(override val uid: String) extends CognitiveServicesBase(uid) +abstract class TextAnalyticsBase(override val uid: String) extends CognitiveServicesBaseNoHandler(uid) with HasCognitiveServiceInput with HasInternalJsonOutputParser with HasSetLocation with HasSetLinkedService { @@ -89,40 +93,20 @@ abstract class TextAnalyticsBase(override val uid: String) extends CognitiveServ override protected def prepareEntity: Row => Option[AbstractHttpEntity] = { _ => None } - override protected def getInternalTransformer(schema: StructType): PipelineModel = { - val dynamicParamColName = DatasetExtensions.findUnusedColumnName("dynamic", schema) - - val missingRequiredParams = this.getRequiredParams.filter { - p => this.get(p).isEmpty && this.getDefault(p).isEmpty - } - assert(missingRequiredParams.isEmpty, - s"Missing required params: ${missingRequiredParams.map(s => s.name).mkString("(", ", ", ")")}") - - def reshapeToArray(parameterName: String): Option[(Transformer, String, String)] = { - val reshapedColName = DatasetExtensions.findUnusedColumnName(parameterName, schema) - getVectorParamMap.get(parameterName).flatMap { - case c if schema(c).dataType == StringType => - Some((Lambda(_.withColumn(reshapedColName, array(col(getVectorParam(parameterName))))), - getVectorParam(parameterName), - reshapedColName)) - case _ => None - } + protected def reshapeToArray(schema: StructType, parameterName: String): Option[(Transformer, String, String)] = { + val reshapedColName = DatasetExtensions.findUnusedColumnName(parameterName, schema) + getVectorParamMap.get(parameterName).flatMap { + case c if schema(c).dataType == StringType => + Some((Lambda(_.withColumn(reshapedColName, array(col(getVectorParam(parameterName))))), + getVectorParam(parameterName), + reshapedColName)) + case _ => None } + } - val reshapeCols = Seq(reshapeToArray("text"), reshapeToArray("language")).flatten - - val newColumnMapping = reshapeCols.map { - case (_, oldCol, newCol) => (oldCol, newCol) - }.toMap - - val columnsToGroup = getVectorParamMap.map { case (_, oldCol) => - val newCol = newColumnMapping.getOrElse(oldCol, oldCol) - col(newCol).alias(oldCol) - }.toSeq - + protected def unpackBatchUDF: UserDefinedFunction = { val innerFields = innerResponseDataType.fields.filter(_.name != "id") - - val unpackBatchUDF = UDFUtils.oldUdf({ rowOpt: Row => + UDFUtils.oldUdf({ rowOpt: Row => Option(rowOpt).map { row => val documents = row.getSeq[Row](1).map(doc => (doc.getString(0).toInt, doc)).toMap @@ -141,6 +125,32 @@ abstract class TextAnalyticsBase(override val uid: String) extends CognitiveServ }.add("error-message", StringType) ) ) + } + + override protected def getInternalTransformer(schema: StructType): PipelineModel = { + val dynamicParamColName = DatasetExtensions.findUnusedColumnName("dynamic", schema) + val badColumns = getVectorParamMap.values.toSet.diff(schema.fieldNames.toSet) + assert(badColumns.isEmpty, + s"Could not find dynamic input columns: $badColumns in columns: ${schema.fieldNames.toSet}") + + val missingRequiredParams = this.getRequiredParams.filter { + p => this.get(p).isEmpty && this.getDefault(p).isEmpty + } + assert(missingRequiredParams.isEmpty, + s"Missing required params: ${missingRequiredParams.map(s => s.name).mkString("(", ", ", ")")}") + + val reshapeCols = Seq( + reshapeToArray(schema, "text"), + reshapeToArray(schema, "language")).flatten + + val newColumnMapping = reshapeCols.map { + case (_, oldCol, newCol) => (oldCol, newCol) + }.toMap + + val columnsToGroup = getVectorParamMap.map { case (_, oldCol) => + val newCol = newColumnMapping.getOrElse(oldCol, oldCol) + col(newCol).alias(oldCol) + }.toSeq val stages = reshapeCols.map(_._1).toArray ++ Array( Lambda(_.withColumn( @@ -151,7 +161,7 @@ abstract class TextAnalyticsBase(override val uid: String) extends CognitiveServ .setOutputCol(getOutputCol) .setInputParser(getInternalInputParser(schema)) .setOutputParser(getInternalOutputParser(schema)) - .setHandler(getHandler) + .setHandler(handlingFunc) .setConcurrency(getConcurrency) .setConcurrentTimeout(get(concurrentTimeout)) .setErrorCol(getErrorCol), @@ -204,7 +214,7 @@ trait HasStringIndexType extends HasServiceParams { object TextSentimentV2 extends ComplexParamsReadable[TextSentimentV2] class TextSentimentV2(override val uid: String) - extends TextAnalyticsBase(uid) with BasicLogging { + extends TextAnalyticsBase(uid) with BasicLogging with HasHandler { logClass() def this() = this(Identifiable.randomUID("TextSentimentV2")) @@ -218,7 +228,7 @@ class TextSentimentV2(override val uid: String) object LanguageDetectorV2 extends ComplexParamsReadable[LanguageDetectorV2] class LanguageDetectorV2(override val uid: String) - extends TextAnalyticsBase(uid) with BasicLogging { + extends TextAnalyticsBase(uid) with BasicLogging with HasHandler { logClass() def this() = this(Identifiable.randomUID("LanguageDetectorV2")) @@ -231,7 +241,7 @@ class LanguageDetectorV2(override val uid: String) object EntityDetectorV2 extends ComplexParamsReadable[EntityDetectorV2] class EntityDetectorV2(override val uid: String) - extends TextAnalyticsBase(uid) with BasicLogging { + extends TextAnalyticsBase(uid) with BasicLogging with HasHandler { logClass() def this() = this(Identifiable.randomUID("EntityDetectorV2")) @@ -243,7 +253,7 @@ class EntityDetectorV2(override val uid: String) object NERV2 extends ComplexParamsReadable[NERV2] -class NERV2(override val uid: String) extends TextAnalyticsBase(uid) with BasicLogging { +class NERV2(override val uid: String) extends TextAnalyticsBase(uid) with BasicLogging with HasHandler { logClass() def this() = this(Identifiable.randomUID("NERV2")) @@ -256,7 +266,7 @@ class NERV2(override val uid: String) extends TextAnalyticsBase(uid) with BasicL object KeyPhraseExtractorV2 extends ComplexParamsReadable[KeyPhraseExtractorV2] class KeyPhraseExtractorV2(override val uid: String) - extends TextAnalyticsBase(uid) with BasicLogging { + extends TextAnalyticsBase(uid) with BasicLogging with HasHandler { logClass() def this() = this(Identifiable.randomUID("KeyPhraseExtractorV2")) @@ -266,10 +276,12 @@ class KeyPhraseExtractorV2(override val uid: String) def urlPath: String = "/text/analytics/v2.0/keyPhrases" } +trait TAV3Mixins extends HasModelVersion with HasShowStats with HasStringIndexType with BasicLogging with HasHandler + object TextSentiment extends ComplexParamsReadable[TextSentiment] class TextSentiment(override val uid: String) - extends TextAnalyticsBase(uid) with HasModelVersion with HasShowStats with HasStringIndexType with BasicLogging { + extends TextAnalyticsBase(uid) with TAV3Mixins { logClass() def this() = this(Identifiable.randomUID("TextSentiment")) @@ -295,7 +307,7 @@ class TextSentiment(override val uid: String) object KeyPhraseExtractor extends ComplexParamsReadable[KeyPhraseExtractor] class KeyPhraseExtractor(override val uid: String) - extends TextAnalyticsBase(uid) with HasModelVersion with HasShowStats with BasicLogging { + extends TextAnalyticsBase(uid) with TAV3Mixins { logClass() def this() = this(Identifiable.randomUID("KeyPhraseExtractor")) @@ -308,7 +320,8 @@ class KeyPhraseExtractor(override val uid: String) object NER extends ComplexParamsReadable[NER] class NER(override val uid: String) - extends TextAnalyticsBase(uid) with HasModelVersion with HasShowStats with HasStringIndexType with BasicLogging { + extends TextAnalyticsBase(uid) with HasModelVersion + with HasShowStats with HasStringIndexType with BasicLogging with HasHandler { logClass() def this() = this(Identifiable.randomUID("NER")) @@ -321,7 +334,7 @@ class NER(override val uid: String) object PII extends ComplexParamsReadable[PII] class PII(override val uid: String) - extends TextAnalyticsBase(uid) with HasModelVersion with HasShowStats with HasStringIndexType with BasicLogging { + extends TextAnalyticsBase(uid) with TAV3Mixins { logClass() def this() = this(Identifiable.randomUID("PII")) @@ -344,7 +357,7 @@ class PII(override val uid: String) object LanguageDetector extends ComplexParamsReadable[LanguageDetector] class LanguageDetector(override val uid: String) - extends TextAnalyticsBase(uid) with HasModelVersion with HasShowStats with BasicLogging { + extends TextAnalyticsBase(uid) with HasModelVersion with HasShowStats with BasicLogging with HasHandler { logClass() def this() = this(Identifiable.randomUID("LanguageDetector")) @@ -357,7 +370,7 @@ class LanguageDetector(override val uid: String) object EntityDetector extends ComplexParamsReadable[EntityDetector] class EntityDetector(override val uid: String) - extends TextAnalyticsBase(uid) with HasModelVersion with HasShowStats with HasStringIndexType with BasicLogging { + extends TextAnalyticsBase(uid) with TAV3Mixins { logClass() def this() = this(Identifiable.randomUID("EntityDetector")) @@ -366,3 +379,211 @@ class EntityDetector(override val uid: String) def urlPath: String = "/text/analytics/v3.1/entities/linking" } + + +class TextAnalyzeTaskParam(parent: Params, + name: String, + doc: String, + isValid: Seq[TAAnalyzeTask] => Boolean = (_: Seq[TAAnalyzeTask]) => true) + (@transient implicit val dataFormat: JsonFormat[TAAnalyzeTask]) + extends JsonEncodableParam[Seq[TAAnalyzeTask]](parent, name, doc, isValid) { + type ValueType = TAAnalyzeTask + + override def w(value: Seq[TAAnalyzeTask]): ParamPair[Seq[TAAnalyzeTask]] = super.w(value) + + def w(value: java.util.ArrayList[util.HashMap[String, Any]]): ParamPair[Seq[TAAnalyzeTask]] = + super.w(value.asScala.toArray.map(hashMapToTAAnalyzeTask)) + + def hashMapToTAAnalyzeTask(value: util.HashMap[String, Any]): TAAnalyzeTask = { + if (!value.containsKey("parameters")) { + throw new IllegalArgumentException("Task optiosn must include 'parameters' value") + } + if (value.size() > 1) { + throw new IllegalArgumentException("Task options should only include 'parameters' value") + } + val valParameters = value.get("parameters").asInstanceOf[util.HashMap[String, Any]] + val parameters = valParameters.asScala.toMap.map { x => (x._1, x._2.toString) } + TAAnalyzeTask(parameters) + } +} + +object TextAnalyze extends ComplexParamsReadable[TextAnalyze] + +class TextAnalyze(override val uid: String) extends TextAnalyticsBase(uid) + with HasCognitiveServiceInput with HasInternalJsonOutputParser with HasSetLocation + with HasSetLinkedService with BasicAsyncReply { + + import TAJSONFormat._ + + def this() = this(Identifiable.randomUID("TextAnalyze")) + + val entityRecognitionTasks = new TextAnalyzeTaskParam( + this, + "entityRecognitionTasks", + "the entity recognition tasks to perform on submitted documents" + ) + + def getEntityRecognitionTasks: Seq[TAAnalyzeTask] = $(entityRecognitionTasks) + + def setEntityRecognitionTasks(v: Seq[TAAnalyzeTask]): this.type = set(entityRecognitionTasks, v) + + setDefault(entityRecognitionTasks -> Seq[TAAnalyzeTask]()) + + val entityRecognitionPiiTasks = new TextAnalyzeTaskParam( + this, + "entityRecognitionPiiTasks", + "the entity recognition pii tasks to perform on submitted documents" + ) + + def getEntityRecognitionPiiTasks: Seq[TAAnalyzeTask] = $(entityRecognitionPiiTasks) + + def setEntityRecognitionPiiTasks(v: Seq[TAAnalyzeTask]): this.type = set(entityRecognitionPiiTasks, v) + + setDefault(entityRecognitionPiiTasks -> Seq[TAAnalyzeTask]()) + + val entityLinkingTasks = new TextAnalyzeTaskParam( + this, + "entityLinkingTasks", + "the entity linking tasks to perform on submitted documents" + ) + + def getEntityLinkingTasks: Seq[TAAnalyzeTask] = $(entityLinkingTasks) + + def setEntityLinkingTasks(v: Seq[TAAnalyzeTask]): this.type = set(entityLinkingTasks, v) + + setDefault(entityLinkingTasks -> Seq[TAAnalyzeTask]()) + + val keyPhraseExtractionTasks = new TextAnalyzeTaskParam( + this, + "keyPhraseExtractionTasks", + "the key phrase extraction tasks to perform on submitted documents" + ) + + def getKeyPhraseExtractionTasks: Seq[TAAnalyzeTask] = $(keyPhraseExtractionTasks) + + def setKeyPhraseExtractionTasks(v: Seq[TAAnalyzeTask]): this.type = set(keyPhraseExtractionTasks, v) + + setDefault(keyPhraseExtractionTasks -> Seq[TAAnalyzeTask]()) + + val sentimentAnalysisTasks = new TextAnalyzeTaskParam( + this, + "sentimentAnalysisTasks", + "the sentiment analysis tasks to perform on submitted documents" + ) + + def getSentimentAnalysisTasks: Seq[TAAnalyzeTask] = $(sentimentAnalysisTasks) + + def setSentimentAnalysisTasks(v: Seq[TAAnalyzeTask]): this.type = set(sentimentAnalysisTasks, v) + + setDefault(sentimentAnalysisTasks -> Seq[TAAnalyzeTask]()) + + override protected def responseDataType: StructType = TAAnalyzeResponse.schema + + def urlPath: String = "/text/analytics/v3.1/analyze" + + override protected def prepareEntity: Row => Option[AbstractHttpEntity] = { _ => None } + + override protected def inputFunc(schema: StructType): Row => Option[HttpRequestBase] = { + { row: Row => + if (shouldSkip(row)) { + None + } else if (getValue(row, text).forall(Option(_).isEmpty)) { + None + } else { + import TAJSONFormat._ + val post = new HttpPost(getUrl) + getValueOpt(row, subscriptionKey).foreach(post.setHeader("Ocp-Apim-Subscription-Key", _)) + post.setHeader("Content-Type", "application/json") + val texts = getValue(row, text) + + val languages: Option[Seq[String]] = (getValueOpt(row, language) match { + case Some(Seq(lang)) => Some(Seq.fill(texts.size)(lang)) + case s => s + }) + + val documents = texts.zipWithIndex.map { case (t, i) => + TADocument(languages.flatMap(ls => Option(ls(i))), i.toString, Option(t).getOrElse("")) + } + val displayName = "SynapseML" + val analysisInput = TAAnalyzeAnalysisInput(documents) + val tasks = TAAnalyzeTasks( + entityRecognitionTasks = getEntityRecognitionTasks, + entityLinkingTasks = getEntityLinkingTasks, + entityRecognitionPiiTasks = getEntityRecognitionPiiTasks, + keyPhraseExtractionTasks = getKeyPhraseExtractionTasks, + sentimentAnalysisTasks = getSentimentAnalysisTasks + ) + val json = TAAnalyzeRequest(displayName, analysisInput, tasks).toJson.compactPrint + post.setEntity(new StringEntity(json, "UTF-8")) + Some(post) + } + } + } + + // TODO refactor to remove duplicate from TextAnalyticsBase + private def getTaskRows(tasksRow: GenericRowWithSchema, taskName: String, documentIndex: Int): Option[Seq[Row]] = { + val namedTaskRow = tasksRow + .getAs[Seq[GenericRowWithSchema]](taskName) + if (namedTaskRow == null) { + None + } else { + val taskResults = namedTaskRow + .map(x => x.getAs[GenericRowWithSchema]("results")) + val rows = taskResults.map(result => { + val documents = result.getAs[Seq[GenericRowWithSchema]]("documents") + val errors = result.getAs[Seq[GenericRowWithSchema]]("errors") + val doc = documents.find { d => d.getAs[String]("id").toInt == documentIndex } + val error = errors.find { e => e.getAs[String]("id").toInt == documentIndex } + val resultRow = Row.fromSeq(Seq(doc, error)) // result/errors per task, per document + resultRow + }) + Some(rows) + } + } + + override protected def unpackBatchUDF: UserDefinedFunction = { + val innerResponseDataType = TAAnalyzeResults.schema + + UDFUtils.oldUdf({ rowOpt: Row => + Option(rowOpt).map { row => + val tasks = row.getAs[GenericRowWithSchema]("tasks") + + // Determine the total number of documents (successful docs + errors) + // We need to handle the fact that entityRecognition might not have been specified + // - i.e. find the first task with results + val taskNames = Seq( + "entityRecognitionTasks", + "entityLinkingTasks", + "entityRecognitionPiiTasks", + "keyPhraseExtractionTasks", + "sentimentAnalysisTasks") + val taskSet = taskNames.map(name => tasks.getAs[Seq[GenericRowWithSchema]](name)) + .filter(r => r != null) + .head + val results = taskSet.map(x => x.getAs[GenericRowWithSchema]("results")) + val docCount = results.head.getAs[Seq[GenericRowWithSchema]]("documents").size + val errorCount = results.head.getAs[Seq[GenericRowWithSchema]]("errors").size + + val rows: Seq[Row] = (0 until (docCount + errorCount)).map(i => { + val entityRecognitionRows = getTaskRows(tasks, "entityRecognitionTasks", i) + val entityLinkingRows = getTaskRows(tasks, "entityLinkingTasks", i) + val entityRecognitionPiiRows = getTaskRows(tasks, "entityRecognitionPiiTasks", i) + val keyPhraseRows = getTaskRows(tasks, "keyPhraseExtractionTasks", i) + val sentimentAnalysisRows = getTaskRows(tasks, "sentimentAnalysisTasks", i) + + val taaResult = Seq( + entityRecognitionRows, + entityLinkingRows, + entityRecognitionPiiRows, + keyPhraseRows, + sentimentAnalysisRows) + val resultRow = Row.fromSeq(taaResult) + resultRow + }) + rows + } + }, ArrayType(innerResponseDataType) + ) + } + +} diff --git a/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/TextAnalyticsAnalyzeSchemas.scala b/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/TextAnalyticsAnalyzeSchemas.scala new file mode 100644 index 0000000000..fdd3afd529 --- /dev/null +++ b/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/TextAnalyticsAnalyzeSchemas.scala @@ -0,0 +1,68 @@ +// Copyright (C) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See LICENSE in project root for information. + +package com.microsoft.azure.synapse.ml.cognitive + +import com.microsoft.azure.synapse.ml.core.schema.SparkBindings + +// Text Analytics /analyze endpoint schemas + +case class TAAnalyzeAnalysisInput(documents: Seq[TADocument]) + +object TAAnalyzeAnalysisInput extends SparkBindings[TAAnalyzeAnalysisInput] + +case class TAAnalyzeTask(parameters: Map[String, String]) + +object TAAnalyzeTask extends SparkBindings[TAAnalyzeTask] + +case class TAAnalyzeTasks(entityRecognitionTasks: Seq[TAAnalyzeTask], + entityLinkingTasks: Seq[TAAnalyzeTask], + entityRecognitionPiiTasks: Seq[TAAnalyzeTask], + keyPhraseExtractionTasks: Seq[TAAnalyzeTask], + sentimentAnalysisTasks: Seq[TAAnalyzeTask]) + +object TAAnalyzeTasks extends SparkBindings[TAAnalyzeTasks] + +case class TAAnalyzeRequest(displayName: String, + analysisInput: TAAnalyzeAnalysisInput, + tasks: TAAnalyzeTasks) + +object TAAnalyzeRequest extends SparkBindings[TAAnalyzeRequest] + + +case class TAAnalyzeResponseTaskResults[T](documents: Seq[T], + errors: Seq[TAError], + modelVersion: String) + +case class TAAnalyzeResponseTask[T](state: String, + results: TAAnalyzeResponseTaskResults[T]) + +case class TAAnalyzeResponseTasks(completed: Int, + failed: Int, + inProgress: Int, + total: Int, + entityRecognitionTasks: Option[Seq[TAAnalyzeResponseTask[NERDocV3]]], + entityLinkingTasks: Option[Seq[TAAnalyzeResponseTask[DetectEntitiesScoreV3]]], + entityRecognitionPiiTasks: Option[Seq[TAAnalyzeResponseTask[PIIDocV3]]], + keyPhraseExtractionTasks: Option[Seq[TAAnalyzeResponseTask[KeyPhraseScoreV3]]], + sentimentAnalysisTasks: Option[Seq[TAAnalyzeResponseTask[SentimentScoredDocumentV3]]] + ) + +// API call response +case class TAAnalyzeResponse(status: String, + errors: Option[Seq[TAError]], + displayName: String, + tasks: TAAnalyzeResponseTasks) + +object TAAnalyzeResponse extends SparkBindings[TAAnalyzeResponse] + +case class TAAnalyzeResultTaskResults[T](result: Option[T], + error: Option[TAError]) + +case class TAAnalyzeResult(entityRecognition: Option[Seq[TAAnalyzeResultTaskResults[NERDocV3]]], + entityLinking: Option[Seq[TAAnalyzeResultTaskResults[DetectEntitiesScoreV3]]], + entityRecognitionPii: Option[Seq[TAAnalyzeResultTaskResults[PIIDocV3]]], + keyPhraseExtraction: Option[Seq[TAAnalyzeResultTaskResults[KeyPhraseScoreV3]]], + sentimentAnalysis: Option[Seq[TAAnalyzeResultTaskResults[SentimentScoredDocumentV3]]]) + +object TAAnalyzeResults extends SparkBindings[TAAnalyzeResult] diff --git a/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/TextAnalyticsSchemas.scala b/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/TextAnalyticsSchemas.scala index dd326157a7..253406af62 100644 --- a/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/TextAnalyticsSchemas.scala +++ b/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/TextAnalyticsSchemas.scala @@ -5,6 +5,8 @@ package com.microsoft.azure.synapse.ml.cognitive import com.microsoft.azure.synapse.ml.core.schema.SparkBindings import spray.json.RootJsonFormat +import org.apache.spark.ml.param.Params +import org.apache.spark.ml.param.ParamValidators // General Text Analytics Schemas @@ -36,7 +38,10 @@ object TAJSONFormat { implicit val DocumentFormat: RootJsonFormat[TADocument] = jsonFormat3(TADocument.apply) implicit val RequestFormat: RootJsonFormat[TARequest] = jsonFormat1(TARequest.apply) - + implicit val AnalysisInputsFormat: RootJsonFormat[TAAnalyzeAnalysisInput] = jsonFormat1(TAAnalyzeAnalysisInput.apply) + implicit val AnalysisTaskFormat: RootJsonFormat[TAAnalyzeTask] = jsonFormat1(TAAnalyzeTask.apply) + implicit val AnalysisTasksFormat: RootJsonFormat[TAAnalyzeTasks] = jsonFormat5(TAAnalyzeTasks.apply) + implicit val AnalyzeRequestFormat: RootJsonFormat[TAAnalyzeRequest] = jsonFormat3(TAAnalyzeRequest.apply) } // SentimentV3 Schemas diff --git a/cognitive/src/test/scala/com/microsoft/azure/synapse/ml/cognitive/split1/TextAnalyticsSuite.scala b/cognitive/src/test/scala/com/microsoft/azure/synapse/ml/cognitive/split1/TextAnalyticsSuite.scala index 1dd40bae8b..fa5c600d3f 100644 --- a/cognitive/src/test/scala/com/microsoft/azure/synapse/ml/cognitive/split1/TextAnalyticsSuite.scala +++ b/cognitive/src/test/scala/com/microsoft/azure/synapse/ml/cognitive/split1/TextAnalyticsSuite.scala @@ -11,11 +11,11 @@ import com.microsoft.azure.synapse.ml.stages.FixedMiniBatchTransformer import org.apache.spark.ml.util.MLReadable import org.apache.spark.sql.catalyst.expressions.GenericRowWithSchema import org.apache.spark.sql.functions.col -import org.apache.spark.sql.{DataFrame, Row} +import org.apache.spark.sql.{DataFrame, Dataset, Row} trait TextEndpoint { - lazy val textKey = sys.env.getOrElse("TEXT_API_KEY", Secrets.CognitiveApiKey) - lazy val textApiLocation = sys.env.getOrElse("TEXT_API_LOCATION", "eastus") + lazy val textKey: String = sys.env.getOrElse("TEXT_API_KEY", Secrets.CognitiveApiKey) + lazy val textApiLocation: String = sys.env.getOrElse("TEXT_API_LOCATION", "eastus") } class LanguageDetectorSuite extends TransformerFuzzing[LanguageDetectorV2] with TextEndpoint { @@ -42,7 +42,7 @@ class LanguageDetectorSuite extends TransformerFuzzing[LanguageDetectorV2] with .getItem("name")) .select("lang") .collect().toList - assert(replies(0).getString(0) == "English" && replies(2).getString(0) == "Spanish") + assert(replies.head.getString(0) == "English" && replies(2).getString(0) == "Spanish") } test("Is serializable ") { @@ -50,7 +50,7 @@ class LanguageDetectorSuite extends TransformerFuzzing[LanguageDetectorV2] with .withColumn("lang", col("replies").getItem(0) .getItem("detectedLanguages").getItem(0) .getItem("name")) - .select("text2","lang") + .select("text2", "lang") .sort("text2") .collect().toList assert(replies(2).getString(1) == "English" && replies(3).getString(1) == "Spanish") @@ -94,7 +94,7 @@ class LanguageDetectorV3Suite extends TransformerFuzzing[LanguageDetector] with .getItem("name")) .select("lang") .collect().toList - assert(replies(0).getString(0) == "English" && replies(2).getString(0) == "Spanish") + assert(replies.head.getString(0) == "English" && replies(2).getString(0) == "Spanish") } override def testObjects(): Seq[TestObject[LanguageDetector]] = @@ -165,6 +165,7 @@ class EntityDetectorSuiteV3 extends TransformerFuzzing[EntityDetector] with Text } trait TextSentimentBaseSuite extends TestBase with TextEndpoint { + import spark.implicits._ lazy val df: DataFrame = Seq( @@ -191,13 +192,13 @@ class TextSentimentV3Suite extends TransformerFuzzing[TextSentiment] with TextSe col("replies").alias("scoredDocuments") ).collect().toList - assert(List(4,5).forall(results(_).get(0) == null)) + assert(List(4, 5).forall(results(_).get(0) == null)) assert( results(0).getSeq[Row](0).head.getString(0) == "positive" && - results(2).getSeq[Row](0).head.getString(0) == "negative") + results(2).getSeq[Row](0).head.getString(0) == "negative") } - test("batch usage"){ + test("batch usage") { val t = new TextSentiment() .setSubscriptionKey(textKey) .setLocation("eastus") @@ -229,11 +230,11 @@ class TextSentimentSuite extends TransformerFuzzing[TextSentimentV2] with TextSe col("replies").getItem(0).getItem("score")) .select("score").collect().toList - assert(List(4,5).forall(results(_).get(0) == null)) - assert(results(0).getFloat(0) > .5 && results(2).getFloat(0) < .5) + assert(List(4, 5).forall(results(_).get(0) == null)) + assert(results.head.getFloat(0) > .5 && results(2).getFloat(0) < .5) } - test("batch usage"){ + test("batch usage") { val t = new TextSentimentV2() .setSubscriptionKey(textKey) .setLocation("eastus") @@ -276,7 +277,7 @@ class KeyPhraseExtractorSuite extends TransformerFuzzing[KeyPhraseExtractorV2] w println(results) - assert(results(0).getSeq[String](0).toSet === Set("world", "input text")) + assert(results.head.getSeq[String](0).toSet === Set("world", "input text")) assert(results(2).getSeq[String](0).toSet === Set("carretera", "tráfico", "día")) } @@ -290,6 +291,7 @@ class KeyPhraseExtractorV3Suite extends TransformerFuzzing[KeyPhraseExtractor] w import spark.implicits._ + //noinspection ScalaStyle lazy val df: DataFrame = Seq( ("en", "Hello world. This is some input text that I love."), ("fr", "Bonjour tout le monde"), @@ -310,7 +312,7 @@ class KeyPhraseExtractorV3Suite extends TransformerFuzzing[KeyPhraseExtractor] w println(results) - assert(results(0).getSeq[String](0).toSet === Set("Hello world", "input text")) + assert(results.head.getSeq[String](0).toSet === Set("Hello world", "input text")) assert(results(2).getSeq[String](0).toSet === Set("mucho tráfico", "día", "carretera", "ayer")) } @@ -321,12 +323,13 @@ class KeyPhraseExtractorV3Suite extends TransformerFuzzing[KeyPhraseExtractor] w } class NERSuite extends TransformerFuzzing[NERV2] with TextEndpoint { + import spark.implicits._ lazy val df: DataFrame = Seq( - ("1", "en", "Jeff bought three dozen eggs because there was a 50% discount."), - ("2", "en", "The Great Depression began in 1929. By 1933, the GDP in America fell by 25%.") - ).toDF("id", "language", "text") + ("en", "Jeff bought three dozen eggs because there was a 50% discount."), + ("en", "The Great Depression began in 1929. By 1933, the GDP in America fell by 25%.") + ).toDF("language", "text") lazy val n: NERV2 = new NERV2() .setSubscriptionKey(textKey) @@ -360,12 +363,13 @@ class NERSuite extends TransformerFuzzing[NERV2] with TextEndpoint { } class NERSuiteV3 extends TransformerFuzzing[NER] with TextEndpoint { + import spark.implicits._ lazy val df: DataFrame = Seq( - ("1", "en", "I had a wonderful trip to Seattle last week."), - ("2", "en", "I visited Space Needle 2 times.") - ).toDF("id", "language", "text") + ("en", "I had a wonderful trip to Seattle last week."), + ("en", "I visited Space Needle 2 times.") + ).toDF("language", "text") lazy val n: NER = new NER() .setSubscriptionKey(textKey) @@ -399,14 +403,15 @@ class NERSuiteV3 extends TransformerFuzzing[NER] with TextEndpoint { } class PIISuiteV3 extends TransformerFuzzing[PII] with TextEndpoint { + import spark.implicits._ lazy val df: DataFrame = Seq( - ("1", "en", "My SSN is 859-98-0987"), - ("2", "en", + ("en", "My SSN is 859-98-0987"), + ("en", "Your ABA number - 111000025 - is the first 9 digits in the lower left hand corner of your personal check."), - ("3", "en", "Is 998.214.865-68 your Brazilian CPF number?") - ).toDF("id", "language", "text") + ("en", "Is 998.214.865-68 your Brazilian CPF number?") + ).toDF("language", "text") lazy val n: PII = new PII() .setSubscriptionKey(textKey) @@ -422,7 +427,7 @@ class PIISuiteV3 extends TransformerFuzzing[PII] with TextEndpoint { .getItem(0) .getItem("redactedText")) .select("redactedText") - val redactedText = redactedTexts.collect().head(0).toString() + val redactedText = redactedTexts.collect().head(0).toString assert(redactedText === "My SSN is ***********") val matches = results.withColumn("match", @@ -447,3 +452,208 @@ class PIISuiteV3 extends TransformerFuzzing[PII] with TextEndpoint { override def reader: MLReadable[_] = PII } + +class TextAnalyzeSuite extends TransformerFuzzing[TextAnalyze] with TextEndpoint { + + import spark.implicits._ + + lazy val dfBasic: DataFrame = Seq( + ("en", "I had a wonderful trip to Seattle last week and visited Microsoft."), + ("invalid", "This is irrelevant as the language is invalid") + ).toDF("language", "text") + + lazy val dfBatched: DataFrame = Seq( + ( + Seq("en", "invalid"), + Seq("I had a wonderful trip to Seattle last week and visited Microsoft.", + "This is irrelevant as the language is invalid") + ) + ).toDF("language", "text") + + lazy val n: TextAnalyze = new TextAnalyze() + .setSubscriptionKey(textKey) + .setLocation(textApiLocation) + .setLanguageCol("language") + .setOutputCol("response") + .setErrorCol("error") + .setEntityRecognitionTasks(Seq(TAAnalyzeTask(Map("model-version" -> "latest")))) + .setEntityLinkingTasks(Seq(TAAnalyzeTask(Map("model-version" -> "latest")))) + .setEntityRecognitionPiiTasks(Seq(TAAnalyzeTask(Map("model-version" -> "latest")))) + .setKeyPhraseExtractionTasks(Seq(TAAnalyzeTask(Map("model-version" -> "latest")))) + .setSentimentAnalysisTasks(Seq(TAAnalyzeTask(Map("model-version" -> "latest")))) + + def getEntityRecognitionResults(results: Dataset[Row], resultIndex: Int): Array[Row] = { + results.withColumn("entityRecognition", + col("response") + .getItem(resultIndex) + .getItem("entityRecognition") + .getItem(0) + .getItem("result") + .getItem("entities") + .getItem(0) + ).select("entityRecognition") + .collect() + } + + def getEntityRecognitionErrors(results: Dataset[Row], resultIndex: Int): Array[Row] = { + results.withColumn("error", + col("response") + .getItem(resultIndex) + .getItem("entityRecognition") + .getItem(0) + .getItem("error") + .getItem("error") + ).select("error") + .collect() + } + + def getEntityRecognitionPiiResults(results: Dataset[Row], resultIndex: Int): Array[Row] = { + results.withColumn("entityRecognitionPii", + col("response") + .getItem(resultIndex) + .getItem("entityRecognitionPii") + .getItem(0) + .getItem("result") + .getItem("entities") + .getItem(0) + ).select("entityRecognitionPii") + .collect() + } + + def getKeyPhraseResults(results: Dataset[Row], resultIndex: Int): Array[Row] = { + results.withColumn("keyPhrase", + col("response") + .getItem(0) + .getItem("keyPhraseExtraction") + .getItem(0) + .getItem("result") + .getItem("keyPhrases") + .getItem(0) + ).select("keyPhrase") + .collect() + } + + def getSentimentAnalysisResults(results: Dataset[Row], resultIndex: Int): Array[Row] = { + results.withColumn("sentimentAnalysis", + col("response") + .getItem(0) + .getItem("sentimentAnalysis") + .getItem(0) + .getItem("result") + .getItem("sentiment") + ).select("sentimentAnalysis") + .collect() + } + + def getEntityLinkingResults(results: Dataset[Row], resultIndex: Int): Array[Row] = { + results.withColumn("entityLinking", + col("response") + .getItem(0) + .getItem("entityLinking") + .getItem(0) + .getItem("result") + .getItem("entities") + .getItem(0) + ).select("entityLinking") + .collect() + } + + test("Basic Usage") { + val results = n.transform(dfBasic).cache() + // Validate first row (successful execution) + + // entity recognition + val entityRows = getEntityRecognitionResults(results, resultIndex = 0) + val entityRow = entityRows(0) + val entityResult = entityRow(0).asInstanceOf[GenericRowWithSchema] + + assert(entityResult.getAs[String]("text") === "trip") + assert(entityResult.getAs[Int]("offset") === 18) + assert(entityResult.getAs[Int]("length") === 4) + assert(entityResult.getAs[Double]("confidenceScore") > 0.66) + assert(entityResult.getAs[String]("category") === "Event") + + // entity recognition pii + val entityPiiRows = getEntityRecognitionPiiResults(results, resultIndex = 0) + val entityPiiRow = entityPiiRows(0) + val entityPiiResult = entityPiiRow(0).asInstanceOf[GenericRowWithSchema] + assert(entityPiiResult.getAs[String]("text") === "last week") + assert(entityPiiResult.getAs[Int]("offset") === 34) + assert(entityPiiResult.getAs[Int]("length") === 9) + assert(entityPiiResult.getAs[Double]("confidenceScore") > 0.79) + assert(entityPiiResult.getAs[String]("category") === "DateTime") + + // key phrases + val keyPhraseRows = getKeyPhraseResults(results, resultIndex = 0) + val keyPhraseRow = keyPhraseRows(0).asInstanceOf[GenericRowWithSchema] + assert(keyPhraseRow.getAs[String](0) === "wonderful trip") + + // text sentiment + val sentimentAnalysisRows = getSentimentAnalysisResults(results, resultIndex = 0) + val sentimentAnalysisRow = sentimentAnalysisRows(0).asInstanceOf[GenericRowWithSchema] + assert(sentimentAnalysisRow.getAs[String](0) === "positive") + + // entity linking + val entityLinkingRows = getEntityLinkingResults(results, resultIndex = 0) + val entityLinkingRow = entityLinkingRows(0).asInstanceOf[GenericRowWithSchema] + val entityLinkingResult = entityLinkingRow(0).asInstanceOf[GenericRowWithSchema] + assert(entityLinkingResult.getAs[String]("name") === "Seattle") + + // Validate second row has error + val entityRows2 = getEntityRecognitionErrors(results, resultIndex = 0) + val entityRow2 = entityRows2(1).asInstanceOf[GenericRowWithSchema] + assert(entityRow2.getAs[String]("error").contains("\"code\":\"UnsupportedLanguageCode\"")) + } + + test("Batched Usage") { + val results = n.transform(dfBatched).cache() + // First batch entry + + // entity recognition + val entityRows = getEntityRecognitionResults(results, resultIndex = 0) + val entityRow = entityRows(0) + val entityResult = entityRow(0).asInstanceOf[GenericRowWithSchema] + + assert(entityResult.getAs[String]("text") === "trip") + assert(entityResult.getAs[Int]("offset") === 18) + assert(entityResult.getAs[Int]("length") === 4) + assert(entityResult.getAs[Double]("confidenceScore") > 0.66) + assert(entityResult.getAs[String]("category") === "Event") + + // entity recognition pii + val entityPiiRows = getEntityRecognitionPiiResults(results, resultIndex = 0) + val entityPiiRow = entityPiiRows(0) + val entityPiiResult = entityPiiRow(0).asInstanceOf[GenericRowWithSchema] + assert(entityPiiResult.getAs[String]("text") === "last week") + assert(entityPiiResult.getAs[Int]("offset") === 34) + assert(entityPiiResult.getAs[Int]("length") === 9) + assert(entityPiiResult.getAs[Double]("confidenceScore") > 0.79) + assert(entityPiiResult.getAs[String]("category") === "DateTime") + + // key phrases + val keyPhraseRows = getKeyPhraseResults(results, resultIndex = 0) + val keyPhraseRow = keyPhraseRows(0).asInstanceOf[GenericRowWithSchema] + assert(keyPhraseRow.getAs[String](0) === "wonderful trip") + + // text sentiment + val sentimentAnalysisRows = getSentimentAnalysisResults(results, resultIndex = 0) + val sentimentAnalysisRow = sentimentAnalysisRows(0).asInstanceOf[GenericRowWithSchema] + assert(sentimentAnalysisRow.getAs[String](0) === "positive") + + // entity linking + val entityLinkingRows = getEntityLinkingResults(results, resultIndex = 0) + val entityLinkingRow = entityLinkingRows(0).asInstanceOf[GenericRowWithSchema] + val entityLinkingResult = entityLinkingRow(0).asInstanceOf[GenericRowWithSchema] + assert(entityLinkingResult.getAs[String]("name") === "Seattle") + + // Second batch entry + val entityRows2 = getEntityRecognitionErrors(results, resultIndex = 1) + val entityRow2 = entityRows2(0).asInstanceOf[GenericRowWithSchema] + assert(entityRow2.getAs[String]("error").contains("\"code\":\"UnsupportedLanguageCode\"")) + } + + override def testObjects(): Seq[TestObject[TextAnalyze]] = + Seq(new TestObject[TextAnalyze](n, dfBasic, dfBatched)) + + override def reader: MLReadable[_] = TextAnalyze +} diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/io/http/HTTPTransformer.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/io/http/HTTPTransformer.scala index c905468fab..cc117fc082 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/io/http/HTTPTransformer.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/io/http/HTTPTransformer.scala @@ -7,6 +7,7 @@ import com.microsoft.azure.synapse.ml.codegen.Wrappable import com.microsoft.azure.synapse.ml.core.contracts.{HasInputCol, HasOutputCol} import com.microsoft.azure.synapse.ml.io.http.HandlingUtils.HandlerFunc import com.microsoft.azure.synapse.ml.logging.BasicLogging +import org.apache.http.impl.client.CloseableHttpClient import org.apache.spark.injections.UDFUtils import org.apache.spark.ml.param._ import org.apache.spark.ml.util.Identifiable @@ -28,6 +29,12 @@ trait HasHandler extends Params { def setHandler(v: HandlerFunc): HasHandler.this.type = { set(handler, UDFUtils.oldUdf(v, StringType)) } + + setDefault(handler -> HandlingUtils.advancedUDF(100)) //scalastyle:ignore magic.number + + def handlingFunc(client: CloseableHttpClient, + request: HTTPRequestData): HTTPResponseData = + getHandler(client, request) } trait HTTPParams extends Wrappable { @@ -88,19 +95,19 @@ class HTTPTransformer(val uid: String) with ComplexParamsWritable with BasicLogging { logClass() - setDefault(handler -> HandlingUtils.advancedUDF(100,500,1000)) //scalastyle:ignore magic.number + setDefault(handler -> HandlingUtils.advancedUDF(100, 500, 1000)) //scalastyle:ignore magic.number def this() = this(Identifiable.randomUID("HTTPTransformer")) val clientHolder = SharedVariable { getConcurrency match { - case 1 => new SingleThreadedHTTPClient(getHandler, (getTimeout*1000).toInt) + case 1 => new SingleThreadedHTTPClient(getHandler, (getTimeout * 1000).toInt) case n if n > 1 => val dur = get(concurrentTimeout) - .map(ct => Duration.fromNanos((ct* math.pow(10, 9)).toLong)) //scalastyle:ignore magic.number + .map(ct => Duration.fromNanos((ct * math.pow(10, 9)).toLong)) //scalastyle:ignore magic.number .getOrElse(Duration.Inf) val ec = ExecutionContext.global - new AsyncHTTPClient(getHandler,n, dur, (getTimeout*1000).toInt)(ec) + new AsyncHTTPClient(getHandler, n, dur, (getTimeout * 1000).toInt)(ec) } } @@ -134,7 +141,7 @@ class HTTPTransformer(val uid: String) def transformSchema(schema: StructType): StructType = { assert(schema(getInputCol).dataType == HTTPSchema.Request) - schema.add(getOutputCol, HTTPSchema.Response, nullable=true) + schema.add(getOutputCol, HTTPSchema.Response, nullable = true) } } diff --git a/core/src/test/scala/com/microsoft/azure/synapse/ml/io/split2/DistributedHTTPSuite.scala b/core/src/test/scala/com/microsoft/azure/synapse/ml/io/split2/DistributedHTTPSuite.scala index a8526841b8..8065137e80 100644 --- a/core/src/test/scala/com/microsoft/azure/synapse/ml/io/split2/DistributedHTTPSuite.scala +++ b/core/src/test/scala/com/microsoft/azure/synapse/ml/io/split2/DistributedHTTPSuite.scala @@ -74,7 +74,7 @@ trait HTTPTestUtils extends TestBase with WithFreeUrl with HasHttpClient { new BasicResponseHandler().handleResponse(res) } res.close() - //println("request suceeded") + //println("request succeeded") (out, (t1 - t0).toDouble / 1e6) } diff --git a/core/src/test/scala/com/microsoft/azure/synapse/ml/nbtest/DatabricksUtilities.scala b/core/src/test/scala/com/microsoft/azure/synapse/ml/nbtest/DatabricksUtilities.scala index 9fef06abbc..51576096aa 100644 --- a/core/src/test/scala/com/microsoft/azure/synapse/ml/nbtest/DatabricksUtilities.scala +++ b/core/src/test/scala/com/microsoft/azure/synapse/ml/nbtest/DatabricksUtilities.scala @@ -277,7 +277,7 @@ object DatabricksUtilities extends HasHttpClient { val error = finalState match { case Some("SUCCESS") => - if (logLevel >= 1) println(s"Notebook $nbName Suceeded") + if (logLevel >= 1) println(s"Notebook $nbName Succeeded") None case Some(state) => Some(new RuntimeException(s"Notebook $nbName failed with state $state. " + diff --git a/notebooks/features/cognitive_services/CognitiveServices - Analyze Text.ipynb b/notebooks/features/cognitive_services/CognitiveServices - Analyze Text.ipynb new file mode 100644 index 0000000000..db990c490d --- /dev/null +++ b/notebooks/features/cognitive_services/CognitiveServices - Analyze Text.ipynb @@ -0,0 +1,118 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Cognitive Services - Analyze Text\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "\n", + "if os.environ.get(\"AZURE_SERVICE\", None) == \"Microsoft.ProjectArcadia\":\n", + " from pyspark.sql import SparkSession\n", + " spark = SparkSession.builder.getOrCreate()\n", + " from notebookutils.mssparkutils.credentials import getSecret\n", + " os.environ['TEXT_API_KEY'] = getSecret(\"mmlspark-keys\", \"mmlspark-cs-key\")\n", + "\n", + "#put your service keys here\n", + "key = os.environ['TEXT_API_KEY']\n", + "location = 'eastus'" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "\n", + "df = spark.createDataFrame(data=[\n", + " [\"en\", \"Hello Seattle\"],\n", + " [\"en\", \"There once was a dog who lived in London and thought she was a human\"]\n", + " ], \n", + " schema=[\"language\",\"text\"])" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "display(df)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from synapse.ml.cognitive import *\n", + "\n", + "text_analyze = (TextAnalyze()\n", + " .setLocation(location)\n", + " .setSubscriptionKey(key)\n", + " .setTextCol(\"text\")\n", + " .setOutputCol(\"textAnalysis\")\n", + " .setErrorCol(\"error\")\n", + " .setLanguageCol(\"language\")\n", + " # set the tasks to perform\n", + " .setEntityRecognitionTasks([{\"parameters\": { \"model-version\": \"latest\"}}])\n", + " .setKeyPhraseExtractionTasks([{\"parameters\": { \"model-version\": \"latest\"}}])\n", + " # Uncomment these lines to add more tasks\n", + " # .setEntityRecognitionPiiTasks([{\"parameters\": { \"model-version\": \"latest\"}}])\n", + " # .setEntityLinkingTasks([{\"parameters\": { \"model-version\": \"latest\"}}])\n", + " # .setSentimentAnalysisTasks([{\"parameters\": { \"model-version\": \"latest\"}}])\n", + " )\n", + "\n", + "df_results = text_analyze.transform(df)\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "display(df_results)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from pyspark.sql.functions import col\n", + "\n", + "# reformat and display for easier viewing\n", + "display(\n", + " df_results.select(\"language\", \"text\", \"error\", col(\"textAnalysis\").getItem(0)) # we are not batching so only have a single result\n", + " .select(\"language\", \"text\", \"error\", \"textAnalysis[0].*\") # explode the Text Analytics tasks into columns\n", + ")" + ] + } + ], + "metadata": { + "description": null, + "kernelspec": { + "display_name": "Synapse PySpark", + "name": "synapse_pyspark" + }, + "language_info": { + "name": "python" + }, + "save_output": true + }, + "nbformat": 4, + "nbformat_minor": 0 +} \ No newline at end of file From ff2aa020693b6521da988250376557bf128301ef Mon Sep 17 00:00:00 2001 From: Ilya Matiach Date: Fri, 3 Dec 2021 11:43:19 -0500 Subject: [PATCH 36/40] perf: improve lightgbm training performance 4x-10x by setting num_threads to be cores-1 (#1282) --- .../azure/synapse/ml/lightgbm/LightGBMBase.scala | 8 ++++++-- .../azure/synapse/ml/lightgbm/LightGBMClassifier.scala | 2 +- .../azure/synapse/ml/lightgbm/LightGBMRanker.scala | 2 +- .../azure/synapse/ml/lightgbm/LightGBMRegressor.scala | 2 +- 4 files changed, 9 insertions(+), 5 deletions(-) diff --git a/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/LightGBMBase.scala b/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/LightGBMBase.scala index 08f30a7c06..0ad6ac64de 100644 --- a/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/LightGBMBase.scala +++ b/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/LightGBMBase.scala @@ -247,8 +247,12 @@ trait LightGBMBase[TrainedModel <: Model[TrainedModel]] extends Estimator[Traine * * @return ExecutionParams object containing parameters related to LightGBM execution. */ - protected def getExecutionParams: ExecutionParams = { - ExecutionParams(getChunkSize, getMatrixType, getNumThreads, getUseSingleDatasetMode) + protected def getExecutionParams(numTasksPerExec: Int): ExecutionParams = { + val execNumThreads = + if (getUseSingleDatasetMode) get(numThreads).getOrElse(numTasksPerExec - 1) + else getNumThreads + + ExecutionParams(getChunkSize, getMatrixType, execNumThreads, getUseSingleDatasetMode) } protected def getColumnParams: ColumnParams = { diff --git a/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/LightGBMClassifier.scala b/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/LightGBMClassifier.scala index 887903dae3..1bbc397849 100644 --- a/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/LightGBMClassifier.scala +++ b/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/LightGBMClassifier.scala @@ -54,7 +54,7 @@ class LightGBMClassifier(override val uid: String) getIsUnbalance, getVerbosity, categoricalIndexes, actualNumClasses, getBoostFromAverage, getBoostingType, get(lambdaL1), get(lambdaL2), get(isProvideTrainingMetric), get(metric), get(minGainToSplit), get(maxDeltaStep), getMaxBinByFeature, get(minDataInLeaf), getSlotNames, - getDelegate, getDartParams, getExecutionParams, getObjectiveParams) + getDelegate, getDartParams, getExecutionParams(numTasksPerExec), getObjectiveParams) } def getModel(trainParams: TrainParams, lightGBMBooster: LightGBMBooster): LightGBMClassificationModel = { diff --git a/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/LightGBMRanker.scala b/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/LightGBMRanker.scala index 65d1dcb47c..3a18d26bdf 100644 --- a/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/LightGBMRanker.scala +++ b/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/LightGBMRanker.scala @@ -60,7 +60,7 @@ class LightGBMRanker(override val uid: String) getVerbosity, categoricalIndexes, getBoostingType, get(lambdaL1), get(lambdaL2), getMaxPosition, getLabelGain, get(isProvideTrainingMetric), get(metric), getEvalAt, get(minGainToSplit), get(maxDeltaStep), getMaxBinByFeature, get(minDataInLeaf), getSlotNames, getDelegate, getDartParams, - getExecutionParams, getObjectiveParams) + getExecutionParams(numTasksPerExec), getObjectiveParams) } def getModel(trainParams: TrainParams, lightGBMBooster: LightGBMBooster): LightGBMRankerModel = { diff --git a/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/LightGBMRegressor.scala b/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/LightGBMRegressor.scala index 99f5c12f71..6b070ccfe9 100644 --- a/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/LightGBMRegressor.scala +++ b/lightgbm/src/main/scala/com/microsoft/azure/synapse/ml/lightgbm/LightGBMRegressor.scala @@ -70,7 +70,7 @@ class LightGBMRegressor(override val uid: String) getBoostFromAverage, getBoostingType, get(lambdaL1), get(lambdaL2), get(isProvideTrainingMetric), get(metric), get(minGainToSplit), get(maxDeltaStep), getMaxBinByFeature, get(minDataInLeaf), getSlotNames, getDelegate, - getDartParams, getExecutionParams, getObjectiveParams) + getDartParams, getExecutionParams(numTasksPerExec), getObjectiveParams) } def getModel(trainParams: TrainParams, lightGBMBooster: LightGBMBooster): LightGBMRegressionModel = { From a58ad762236c200c1fd6b7f739a85e736bf27d38 Mon Sep 17 00:00:00 2001 From: Serena Ruan <82044803+serena-ruan@users.noreply.github.com> Date: Mon, 6 Dec 2021 16:14:56 +0800 Subject: [PATCH 37/40] feat: add multivariate anomaly detection (#1272) * feat: add multivariate anomaly detection * update correct format of classes * update diagnosticsInfo as ServiceParam * fix modelReader * fix fuzzingtest * update retry time * move MAD test to a separate folder since the training process takes a long time * ignore serialization fuzzing test for MAD since it takes too long; fix deleting modelId error * move MAD suite back to split1 * update waiting time * ignore experiment fuzzing test as well * address comments * test sparklyr version Co-authored-by: Mark Hamilton --- .../ml/cognitive/FormOntologyLearner.scala | 42 +- .../MultivariateAnomalyDetection.scala | 397 ++++++++++++++++++ .../MultivariateAnomalyDetectorSchemas.scala | 93 ++++ .../MultivariateAnamolyDetectionSuite.scala | 200 +++++++++ .../ml/core/test/fuzzing/FuzzingTest.scala | 3 + 5 files changed, 716 insertions(+), 19 deletions(-) create mode 100644 cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/MultivariateAnomalyDetection.scala create mode 100644 cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/MultivariateAnomalyDetectorSchemas.scala create mode 100644 cognitive/src/test/scala/com/microsoft/azure/synapse/ml/cognitive/split1/MultivariateAnamolyDetectionSuite.scala diff --git a/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/FormOntologyLearner.scala b/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/FormOntologyLearner.scala index 4dd0fe5218..a53d787b64 100644 --- a/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/FormOntologyLearner.scala +++ b/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/FormOntologyLearner.scala @@ -53,21 +53,23 @@ class FormOntologyLearner(override val uid: String) extends Estimator[FormOntolo } override def fit(dataset: Dataset[_]): FormOntologyTransformer = { - val fromRow = AnalyzeResponse.makeFromRowConverter - - def combine(st1: StructType, st2: StructType): StructType = { - FormOntologyLearner.combineDataTypes(st1, st2).asInstanceOf[StructType] - } - - val mergedSchema = dataset.toDF() - .select(col(getInputCol)) - .map(extractOntology(fromRow))(Encoders.kryo[StructType]) - .reduce(combine _) - - new FormOntologyTransformer() - .setInputCol(getInputCol) - .setOutputCol(getOutputCol) - .setOntology(mergedSchema) + logFit({ + val fromRow = AnalyzeResponse.makeFromRowConverter + + def combine(st1: StructType, st2: StructType): StructType = { + FormOntologyLearner.combineDataTypes(st1, st2).asInstanceOf[StructType] + } + + val mergedSchema = dataset.toDF() + .select(col(getInputCol)) + .map(extractOntology(fromRow))(Encoders.kryo[StructType]) + .reduce(combine _) + + new FormOntologyTransformer() + .setInputCol(getInputCol) + .setOutputCol(getOutputCol) + .setOntology(mergedSchema) + }) } override def copy(extra: ParamMap): Estimator[FormOntologyTransformer] = defaultCopy(extra) @@ -109,11 +111,13 @@ class FormOntologyTransformer(override val uid: String) extends Model[FormOntolo } override def transform(dataset: Dataset[_]): DataFrame = { - val fromRow = AnalyzeResponse.makeFromRowConverter - val convertToOntologyUDF = UDFUtils.oldUdf(convertToOntology(fromRow) _, getOntology) + logTransform[DataFrame]({ + val fromRow = AnalyzeResponse.makeFromRowConverter + val convertToOntologyUDF = UDFUtils.oldUdf(convertToOntology(fromRow) _, getOntology) - dataset.toDF() - .withColumn(getOutputCol, convertToOntologyUDF(col(getInputCol))) + dataset.toDF() + .withColumn(getOutputCol, convertToOntologyUDF(col(getInputCol))) + }) } override def transformSchema(schema: StructType): StructType = { diff --git a/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/MultivariateAnomalyDetection.scala b/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/MultivariateAnomalyDetection.scala new file mode 100644 index 0000000000..a47f197b7d --- /dev/null +++ b/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/MultivariateAnomalyDetection.scala @@ -0,0 +1,397 @@ +// Copyright (C) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See LICENSE in project root for information. + +package com.microsoft.azure.synapse.ml.cognitive + +import com.microsoft.azure.synapse.ml.build.BuildInfo +import com.microsoft.azure.synapse.ml.codegen.Wrappable +import com.microsoft.azure.synapse.ml.cognitive.MADJsonProtocol._ +import com.microsoft.azure.synapse.ml.core.contracts.HasOutputCol +import com.microsoft.azure.synapse.ml.core.schema.DatasetExtensions +import com.microsoft.azure.synapse.ml.io.http.HandlingUtils.{convertAndClose, sendWithRetries} +import com.microsoft.azure.synapse.ml.io.http._ +import com.microsoft.azure.synapse.ml.logging.BasicLogging +import com.microsoft.azure.synapse.ml.stages.{DropColumns, Lambda} +import org.apache.commons.io.IOUtils +import org.apache.http.client.methods.HttpGet +import org.apache.http.entity.{AbstractHttpEntity, ContentType, StringEntity} +import org.apache.http.impl.client.CloseableHttpClient +import org.apache.spark.ml._ +import org.apache.spark.ml.param.{Param, ParamMap, ServiceParam} +import org.apache.spark.ml.util._ +import org.apache.spark.sql.catalyst.expressions.GenericRowWithSchema +import org.apache.spark.sql.functions.{col, lit, struct} +import org.apache.spark.sql.types._ +import org.apache.spark.sql.{DataFrame, Dataset, Row} +import spray.json._ + +import java.net.URI +import java.util.concurrent.TimeoutException +import scala.concurrent.blocking +import scala.language.existentials + + +trait HasMADSource extends HasServiceParams { + val source = new ServiceParam[String](this, "source", "The blob link to the input data. " + + "It should be a zipped folder containing csv files. Each csv file should has two columns with header 'timestamp'" + + " and 'value' (case sensitive). The file name will be used as the variable name. The variables used for" + + " detection should be exactly the same as for training. Please refer to the sample data to prepare your" + + " own data accordingly.", isRequired = true) + + def setSource(v: String): this.type = setScalarParam(source, v) + + def setSourceCol(v: String): this.type = setVectorParam(source, v) + + def getSource: String = getScalarParam(source) + + def getSourceCol: String = getVectorParam(source) +} + +trait HasMADStartTime extends HasServiceParams { + val startTime = new ServiceParam[String](this, "startTime", "A required field, start time" + + " of data to be used for detection/generating multivariate anomaly detection model, should be date-time.", + isRequired = true) + + def setStartTime(v: String): this.type = setScalarParam(startTime, v) + + def setStartTimeCol(v: String): this.type = setVectorParam(startTime, v) + + def getStartTime: String = getScalarParam(startTime) + + def getStartTimeCol: String = getVectorParam(startTime) + +} + +trait HasMADEndTime extends HasServiceParams { + val endTime = new ServiceParam[String](this, "endTime", "A required field, end time of data" + + " to be used for detection/generating multivariate anomaly detection model, should be date-time.", + isRequired = true) + + def setEndTime(v: String): this.type = setScalarParam(endTime, v) + + def setEndTimeCol(v: String): this.type = setVectorParam(endTime, v) + + def getEndTime: String = getScalarParam(endTime) + + def getEndTimeCol: String = getVectorParam(endTime) +} + +trait MADBase extends HasAsyncReply with HasMADSource with HasMADStartTime with HasMADEndTime + with HasCognitiveServiceInput with HasInternalJsonOutputParser with HasSetLocation with Wrappable + with HTTPParams with HasOutputCol with HasURL with ComplexParamsWritable + with HasSubscriptionKey with HasErrorCol with BasicLogging { + + setDefault( + outputCol -> (this.uid + "_output"), + errorCol -> (this.uid + "_error")) + + protected def queryForResult(key: Option[String], + client: CloseableHttpClient, + location: URI): Option[HTTPResponseData] = { + val get = new HttpGet() + get.setURI(location) + key.foreach(get.setHeader("Ocp-Apim-Subscription-Key", _)) + get.setHeader("User-Agent", s"synapseml/${BuildInfo.version}${HeaderValues.PlatformInfo}") + val resp = convertAndClose(sendWithRetries(client, get, getBackoffs)) + get.releaseConnection() + val fields = IOUtils.toString(resp.entity.get.content, "UTF-8").parseJson.asJsObject.fields + val status = if (fields.keySet.contains("modelInfo")) { + fields("modelInfo").asInstanceOf[JsObject].fields + .get("status").map(_.convertTo[String]).get.toLowerCase() + } else if (fields.keySet.contains("summary")) { + fields("summary").asInstanceOf[JsObject] + .fields.get("status").map(_.convertTo[String]).get.toLowerCase() + } else { + "None" + } + status match { + case "ready" | "failed" => Some(resp) + case "created" | "running" => None + case s => throw new RuntimeException(s"Received unknown status code: $s") + } + } + + protected def handlingFunc(client: CloseableHttpClient, + request: HTTPRequestData): HTTPResponseData = { + val response = HandlingUtils.advanced(getBackoffs: _*)(client, request) + if (response.statusLine.statusCode == 201) { + val location = new URI(response.headers.filter(_.name == "Location").head.value) + val maxTries = getMaxPollingRetries + val key = request.headers.find(_.name == "Ocp-Apim-Subscription-Key").map(_.value) + val it = (0 to maxTries).toIterator.flatMap { _ => + queryForResult(key, client, location).orElse({ + blocking { + Thread.sleep(getPollingDelay.toLong) + } + None + }) + } + if (it.hasNext) { + it.next() + } else { + throw new TimeoutException( + s"Querying for results did not complete within $maxTries tries") + } + } else { + response + } + } +} + +object MultivariateAnomalyEstimator extends ComplexParamsReadable[MultivariateAnomalyEstimator] with Serializable + +class MultivariateAnomalyEstimator(override val uid: String) extends Estimator[DetectMultivariateAnomaly] + with MADBase { + logClass() + + def this() = this(Identifiable.randomUID("MultivariateAnomalyModel")) + + def urlPath: String = "anomalydetector/v1.1-preview/multivariate/models" + + val slidingWindow = new ServiceParam[Int](this, "slidingWindow", "An optional field, indicates" + + " how many history points will be used to determine the anomaly score of one subsequent point.", { + case Left(x) => (x >= 28) && (x <= 2880) + case Right(_) => true + }, isRequired = true) + + def setSlidingWindow(v: Int): this.type = setScalarParam(slidingWindow, v) + + def setSlidingWindowCol(v: String): this.type = setVectorParam(slidingWindow, v) + + def getSlidingWindow: Int = getScalarParam(slidingWindow) + + def getSlidingWindowCol: String = getVectorParam(slidingWindow) + + val alignMode = new ServiceParam[String](this, "alignMode", "An optional field, indicates how " + + "we align different variables into the same time-range which is required by the model.{Inner, Outer}", { + case Left(s) => Set("inner", "outer")(s.toLowerCase) + case Right(_) => true + }) + + def setAlignMode(v: String): this.type = setScalarParam(alignMode, v.toLowerCase.capitalize) + + def setAlignModeCol(v: String): this.type = setVectorParam(alignMode, v) + + def getAlignMode: String = getScalarParam(alignMode) + + def getAlignModeCol: String = getVectorParam(alignMode) + + val fillNAMethod = new ServiceParam[String](this, "fillNAMethod", "An optional field, indicates how missed " + + "values will be filled with. Can not be set to NotFill, when alignMode is Outer.{Previous, Subsequent," + + " Linear, Zero, Fixed}", { + case Left(s) => Set("previous", "subsequent", "linear", "zero", "fixed")(s.toLowerCase) + case Right(_) => true + }) + + def setFillNAMethod(v: String): this.type = setScalarParam(fillNAMethod, v.toLowerCase.capitalize) + + def setFillNAMethodCol(v: String): this.type = setVectorParam(fillNAMethod, v) + + def getFillNAMethod: String = getScalarParam(fillNAMethod) + + def getFillNAMethodCol: String = getVectorParam(fillNAMethod) + + val paddingValue = new ServiceParam[Int](this, "paddingValue", "optional field, is only useful" + + " if FillNAMethod is set to Fixed.") + + def setPaddingValue(v: Int): this.type = setScalarParam(paddingValue, v) + + def setPaddingValueCol(v: String): this.type = setVectorParam(paddingValue, v) + + def getPaddingValue: Int = getScalarParam(paddingValue) + + def getPaddingValueCol: String = getVectorParam(paddingValue) + + val displayName = new ServiceParam[String](this, "displayName", "optional field," + + " name of the model") + + def setDisplayName(v: String): this.type = setScalarParam(displayName, v) + + def setDisplayNameCol(v: String): this.type = setVectorParam(displayName, v) + + def getDisplayName: String = getScalarParam(displayName) + + def getDisplayNameCol: String = getVectorParam(displayName) + + val diagnosticsInfo = new ServiceParam[DiagnosticsInfo](this, "diagnosticsInfo", + "diagnosticsInfo for training a multivariate anomaly detection model") + + def setDiagnosticsInfo(v: DiagnosticsInfo): this.type = setScalarParam(diagnosticsInfo, v) + + def getDiagnosticsInfo: DiagnosticsInfo = getScalarParam(diagnosticsInfo) + + override protected def prepareEntity: Row => Option[AbstractHttpEntity] = { + r => + Some(new StringEntity(Map("source" -> getValue(r, source).toJson, + "startTime" -> getValue(r, startTime).toJson, + "endTime" -> getValue(r, endTime).toJson, + "slidingWindow" -> getValue(r, slidingWindow).toJson, + "alignPolicy" -> Map("alignMode" -> getValueOpt(r, alignMode).toJson, + "fillNAMethod" -> getValueOpt(r, fillNAMethod).toJson, + "paddingValue" -> getValueOpt(r, paddingValue).toJson).toJson, + "displayName" -> getValueOpt(r, displayName).toJson) + .toJson.compactPrint, ContentType.APPLICATION_JSON)) + } + + override def responseDataType: DataType = MAEResponse.schema + + protected def getInternalTransformer(schema: StructType): PipelineModel = { + val dynamicParamColName = DatasetExtensions.findUnusedColumnName("dynamic", schema) + val badColumns = getVectorParamMap.values.toSet.diff(schema.fieldNames.toSet) + assert(badColumns.isEmpty, + s"Could not find dynamic columns: $badColumns in columns: ${schema.fieldNames.toSet}") + + val missingRequiredParams = this.getRequiredParams.filter { + p => this.get(p).isEmpty && this.getDefault(p).isEmpty + } + assert(missingRequiredParams.isEmpty, + s"Missing required params: ${missingRequiredParams.map(s => s.name).mkString("(", ", ", ")")}") + + val dynamicParamCols = getVectorParamMap.values.toList.map(col) match { + case Nil => Seq(lit(false).alias("placeholder")) + case l => l + } + + val stages = Array( + Lambda(_.withColumn(dynamicParamColName, struct(dynamicParamCols: _*))), + new SimpleHTTPTransformer() + .setInputCol(dynamicParamColName) + .setOutputCol(getOutputCol) + .setInputParser(getInternalInputParser(schema)) + .setOutputParser(getInternalOutputParser(schema)) + .setHandler(handlingFunc) + .setConcurrency(getConcurrency) + .setConcurrentTimeout(get(concurrentTimeout)) + .setErrorCol(getErrorCol), + new DropColumns().setCol(dynamicParamColName) + ) + + NamespaceInjections.pipelineModel(stages) + } + + override def fit(dataset: Dataset[_]): DetectMultivariateAnomaly = { + logFit({ + import MADJsonProtocol._ + + val df = getInternalTransformer(dataset.schema) + .transform(dataset) + .withColumn("diagnosticsInfo", col(getOutputCol) + .getField("modelInfo").getField("diagnosticsInfo")) + .withColumn("modelId", col(getOutputCol).getField("modelId")) + .select(getOutputCol, "modelId", "diagnosticsInfo") + .collect() + this.setDiagnosticsInfo(df.head.get(2).asInstanceOf[GenericRowWithSchema] + .json.parseJson.convertTo[DiagnosticsInfo]) + val modelId = df.head.getString(1) + new DetectMultivariateAnomaly() + .setSubscriptionKey(getSubscriptionKey) + .setLocation(getUrl.split("/".toCharArray)(2).split(".".toCharArray).head) + .setModelId(modelId) + .setSourceCol(getSourceCol) + .setStartTime(getStartTime) + .setEndTime(getEndTime) + }) + } + + override def copy(extra: ParamMap): Estimator[DetectMultivariateAnomaly] = defaultCopy(extra) + + override def transformSchema(schema: StructType): StructType = { + getInternalTransformer(schema).transformSchema(schema) + } +} + +object DetectMultivariateAnomaly extends ComplexParamsReadable[DetectMultivariateAnomaly] with Serializable + +class DetectMultivariateAnomaly(override val uid: String) extends Model[DetectMultivariateAnomaly] + with MADBase { + logClass() + + def this() = this(Identifiable.randomUID("DetectMultivariateAnomaly")) + + def urlPath: String = "anomalydetector/v1.1-preview/multivariate/models/" + + val modelId = new ServiceParam[String](this, "modelId", "Format - uuid. Model identifier.", + isRequired = true) + + def setModelId(v: String): this.type = setScalarParam(modelId, v) + + def setModelIdCol(v: String): this.type = setVectorParam(modelId, v) + + def getModelId: String = getScalarParam(modelId) + + def getModelIdCol: String = getVectorParam(modelId) + + override protected def prepareEntity: Row => Option[AbstractHttpEntity] = { row => + Some(new StringEntity(DMARequest( + getValue(row, source), + getValue(row, startTime), + getValue(row, endTime) + ).toJson.compactPrint)) + } + + override protected def prepareUrl: Row => String = { + val urlParams: Array[ServiceParam[Any]] = + getUrlParams.asInstanceOf[Array[ServiceParam[Any]]]; + // This semicolon is needed to avoid argument confusion + { row: Row => + val base = getUrl + s"${getValue(row, modelId)}/detect" + val appended = if (!urlParams.isEmpty) { + "?" + URLEncodingUtils.format(urlParams.flatMap(p => + getValueOpt(row, p).map(v => p.name -> p.toValueString(v)) + ).toMap) + } else { + "" + } + base + appended + } + } + + override def responseDataType: DataType = DMAResponse.schema + + protected def getInternalTransformer(schema: StructType): PipelineModel = { + val dynamicParamColName = DatasetExtensions.findUnusedColumnName("dynamic", schema) + val badColumns = getVectorParamMap.values.toSet.diff(schema.fieldNames.toSet) + assert(badColumns.isEmpty, + s"Could not find dynamic columns: $badColumns in columns: ${schema.fieldNames.toSet}") + + val missingRequiredParams = this.getRequiredParams.filter { + p => this.get(p).isEmpty && this.getDefault(p).isEmpty + } + assert(missingRequiredParams.isEmpty, + s"Missing required params: ${missingRequiredParams.map(s => s.name).mkString("(", ", ", ")")}") + + val dynamicParamCols = getVectorParamMap.values.toList.map(col) match { + case Nil => Seq(lit(false).alias("placeholder")) + case l => l + } + + val stages = Array( + Lambda(_.withColumn(dynamicParamColName, struct(dynamicParamCols: _*))), + new SimpleHTTPTransformer() + .setInputCol(dynamicParamColName) + .setOutputCol(getOutputCol) + .setInputParser(getInternalInputParser(schema)) + .setOutputParser(getInternalOutputParser(schema)) + .setHandler(handlingFunc) + .setConcurrency(getConcurrency) + .setConcurrentTimeout(get(concurrentTimeout)) + .setErrorCol(getErrorCol), + new DropColumns().setCol(dynamicParamColName) + ) + + NamespaceInjections.pipelineModel(stages) + } + + override def transform(dataset: Dataset[_]): DataFrame = { + logTransform[DataFrame]( + getInternalTransformer(dataset.schema).transform(dataset) + ) + } + + override def copy(extra: ParamMap): DetectMultivariateAnomaly = defaultCopy(extra) + + override def transformSchema(schema: StructType): StructType = { + getInternalTransformer(schema).transformSchema(schema) + } + +} diff --git a/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/MultivariateAnomalyDetectorSchemas.scala b/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/MultivariateAnomalyDetectorSchemas.scala new file mode 100644 index 0000000000..cdcf9d635d --- /dev/null +++ b/cognitive/src/main/scala/com/microsoft/azure/synapse/ml/cognitive/MultivariateAnomalyDetectorSchemas.scala @@ -0,0 +1,93 @@ +// Copyright (C) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See LICENSE in project root for information. + +package com.microsoft.azure.synapse.ml.cognitive + +import com.microsoft.azure.synapse.ml.core.schema.SparkBindings +import spray.json.{DefaultJsonProtocol, RootJsonFormat} + +// DMA stands for DetectMultivariateAnomaly +object DMARequest extends SparkBindings[DMARequest] + +case class DMARequest(source: String, + startTime: String, + endTime: String) + +object DMAResponse extends SparkBindings[DMAResponse] + +case class DMAResponse(resultId: String, + summary: DMASummary, + results: Seq[DMAResult]) + +case class DMASummary(status: String, + errors: Option[Seq[DMAError]], + variableStates: Option[Seq[DMAVariableState]], + setupInfo: DMASetupInfo) + +case class DMAError(code: String, message: String) + +case class DMAVariableState(variable: Option[String], + filledNARatio: Option[Double], + effectiveCount: Option[Int], + startTime: Option[String], + endTime: Option[String], + errors: Option[Seq[DMAError]]) + +case class DMASetupInfo(source: String, + startTime: String, + endTime: String) + +case class DMAResult(timestamp: String, value: Option[DMAValue], errors: Option[Seq[DMAError]]) + +case class DMAValue(contributors: Option[Seq[DMAContributor]], + isAnomaly: Boolean, + severity: Double, + score: Double) + +case class DMAContributor(contributionScore: Option[Double], variable: Option[String]) + +// MAE stands for MultivariateAnomalyEstimator +object MAERequest extends SparkBindings[MAERequest] + +case class MAERequest(source: String, + startTime: String, + endTime: String, + slidingWindow: Option[Int], + alignPolicy: Option[AlignPolicy], + displayName: Option[String]) + +object MAEResponse extends SparkBindings[MAEResponse] + +case class MAEResponse(modelId: String, + createdTime: String, + lastUpdatedTime: String, + modelInfo: MAEModelInfo) + +case class MAEModelInfo(slidingWindow: Option[Int], + alignPolicy: Option[AlignPolicy], + source: String, + startTime: String, + endTime: String, + displayName: Option[String], + status: String, + errors: Option[Seq[DMAError]], + diagnosticsInfo: Option[DiagnosticsInfo]) + +case class AlignPolicy(alignMode: Option[String], fillNAMethod: Option[String], paddingValue: Option[Int]) + +case class DiagnosticsInfo(modelState: Option[ModelState], variableStates: Option[Seq[DMAVariableState]]) + +case class ModelState(epochIds: Option[Seq[Int]], + trainLosses: Option[Seq[Double]], + validationLosses: Option[Seq[Double]], + latenciesInSeconds: Option[Seq[Double]]) + +object MADJsonProtocol extends DefaultJsonProtocol { + implicit val DMAReqEnc: RootJsonFormat[DMARequest] = jsonFormat3(DMARequest.apply) + implicit val EEnc: RootJsonFormat[DMAError] = jsonFormat2(DMAError.apply) + implicit val VSEnc: RootJsonFormat[DMAVariableState] = jsonFormat6(DMAVariableState.apply) + implicit val MSEnc: RootJsonFormat[ModelState] = jsonFormat4(ModelState.apply) + implicit val DIEnc: RootJsonFormat[DiagnosticsInfo] = jsonFormat2(DiagnosticsInfo.apply) + implicit val APEnc: RootJsonFormat[AlignPolicy] = jsonFormat3(AlignPolicy.apply) + implicit val MAEReqEnc: RootJsonFormat[MAERequest] = jsonFormat6(MAERequest.apply) +} diff --git a/cognitive/src/test/scala/com/microsoft/azure/synapse/ml/cognitive/split1/MultivariateAnamolyDetectionSuite.scala b/cognitive/src/test/scala/com/microsoft/azure/synapse/ml/cognitive/split1/MultivariateAnamolyDetectionSuite.scala new file mode 100644 index 0000000000..93f3f64645 --- /dev/null +++ b/cognitive/src/test/scala/com/microsoft/azure/synapse/ml/cognitive/split1/MultivariateAnamolyDetectionSuite.scala @@ -0,0 +1,200 @@ +// Copyright (C) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See LICENSE in project root for information. + +package com.microsoft.azure.synapse.ml.cognitive.split1 + +import com.microsoft.azure.synapse.ml.cognitive._ +import com.microsoft.azure.synapse.ml.core.env.StreamUtilities.using +import com.microsoft.azure.synapse.ml.core.test.base.TestBase +import com.microsoft.azure.synapse.ml.core.test.fuzzing.{EstimatorFuzzing, TestObject} +import org.apache.commons.io.IOUtils +import org.apache.http.client.methods.{HttpDelete, HttpEntityEnclosingRequestBase, HttpGet, HttpRequestBase} +import org.apache.spark.ml.util.MLReadable +import org.apache.spark.sql.DataFrame +import org.apache.spark.sql.functions.col +import org.scalactic.Equality +import spray.json._ + +import java.net.URI + +case class MADListModelsResponse(models: Seq[MADModel], + currentCount: Int, + maxCount: Int, + nextLink: Option[String]) + +case class MADModel(modelId: String, + createdTime: String, + lastUpdatedTime: String, + status: String, + displayName: Option[String], + variablesCount: Int) + +object MADListModelsProtocol extends DefaultJsonProtocol { + implicit val MADModelEnc: RootJsonFormat[MADModel] = jsonFormat6(MADModel) + implicit val MADLMRespEnc: RootJsonFormat[MADListModelsResponse] = jsonFormat4(MADListModelsResponse) +} + +import com.microsoft.azure.synapse.ml.cognitive.split1.MADListModelsProtocol._ + +object MADUtils extends AnomalyKey { + + import com.microsoft.azure.synapse.ml.cognitive.RESTHelpers._ + + def madSend(request: HttpRequestBase, path: String, + params: Map[String, String] = Map()): String = { + + val paramString = if (params.isEmpty) { + "" + } else { + "?" + URLEncodingUtils.format(params) + } + request.setURI(new URI(path + paramString)) + + retry(List(100, 500, 1000), { () => + request.addHeader("Ocp-Apim-Subscription-Key", anomalyKey) + request.addHeader("Content-Type", "application/json") + using(Client.execute(request)) { response => + if (!response.getStatusLine.getStatusCode.toString.startsWith("2")) { + val bodyOpt = request match { + case er: HttpEntityEnclosingRequestBase => IOUtils.toString(er.getEntity.getContent, "UTF-8") + case _ => "" + } + if (response.getStatusLine.getStatusCode.toString.equals("429")) { + val retryTime = response.getHeaders("Retry-After").head.getValue.toInt * 1000 + Thread.sleep(retryTime.toLong) + } + throw new RuntimeException(s"Failed: response: $response " + s"requestUrl: ${request.getURI}" + + s"requestBody: $bodyOpt") + } + if (response.getStatusLine.getReasonPhrase == "No Content") { + "" + } + else if (response.getStatusLine.getReasonPhrase == "Created") { + response.getHeaders("Location").head.getValue + } + else { + IOUtils.toString(response.getEntity.getContent, "UTF-8") + } + }.get + }) + } + + def madDelete(path: String, params: Map[String, String] = Map()): String = { + madSend(new HttpDelete(), "https://westus2.api.cognitive.microsoft.com/anomalydetector/" + + "v1.1-preview/multivariate/models/" + path, params) + } + + def madListModels(params: Map[String, String] = Map()): String = { + madSend(new HttpGet(), "https://westus2.api.cognitive.microsoft.com/anomalydetector/" + + "v1.1-preview/multivariate/models", params) + } +} + +trait MADUtils extends TestBase with AnomalyKey { + + import spark.implicits._ + + lazy val df: DataFrame = Seq( + "https://mmlspark.blob.core.windows.net/datasets/sample_data_5_3000.zip" + ).toDF("source") + + lazy val startTime: String = "2021-01-01T00:00:00Z" + + lazy val endTime: String = "2021-01-02T12:00:00Z" +} + +class MultiAnomalyEstimatorSuite extends EstimatorFuzzing[MultivariateAnomalyEstimator] with MADUtils { + + import MADUtils._ + + override def assertDFEq(df1: DataFrame, df2: DataFrame)(implicit eq: Equality[DataFrame]): Unit = { + def prep(df: DataFrame) = { + df.select("source") + } + + super.assertDFEq(prep(df1), prep(df2))(eq) + } + + def mae: MultivariateAnomalyEstimator = new MultivariateAnomalyEstimator() + .setSubscriptionKey(anomalyKey) + .setLocation("westus2") + .setOutputCol("result") + .setSourceCol("source") + .setStartTime(startTime) + .setEndTime(endTime) + .setConcurrency(5) + + test("Basic Usage") { + val mam = mae.setSlidingWindow(200).setAlignMode("outer").setFillNAMethod("linear") + val model = mam.fit(df) + val diagnosticsInfo = mam.getDiagnosticsInfo + assert(diagnosticsInfo.variableStates.get.length.equals(5)) + + val result = model + .setSourceCol("source").setStartTime(startTime).setEndTime(endTime) + .setOutputCol("result") + .transform(df) + .withColumn("status", col("result.summary.status")) + .withColumn("variableStates", col("result.summary.variableStates")) + .select("status", "variableStates") + .collect() + + assert(result.head.getString(0).equals("READY")) + assert(result.head.getSeq(1).length.equals(5)) + + madDelete(model.getModelId) + } + + test("Throw errors if alignMode is not set correctly") { + val caught = intercept[IllegalArgumentException] { + mae.setSlidingWindow(200).setAlignMode("alignMode").fit(df) + } + assert(caught.getMessage.contains("parameter alignMode given invalid value")) + } + + test("Throw errors if slidingWindow is not between 28 and 2880") { + val caught = intercept[IllegalArgumentException] { + mae.setSlidingWindow(20).fit(df) + } + assert(caught.getMessage.contains("parameter slidingWindow given invalid value")) + } + + test("Throw errors if required fields not set") { + val caught = intercept[AssertionError] { + new MultivariateAnomalyEstimator() + .setSubscriptionKey(anomalyKey) + .setLocation("westus2") + .setOutputCol("result") + .fit(df) + } + assert(caught.getMessage.contains("Missing required params")) + assert(caught.getMessage.contains("slidingWindow")) + assert(caught.getMessage.contains("source")) + assert(caught.getMessage.contains("startTime")) + assert(caught.getMessage.contains("endTime")) + } + + override def testSerialization(): Unit = { + println("ignore the Serialization Fuzzing test because fitting process takes more than 3 minutes") + } + + override def testExperiments(): Unit = { + println("ignore the Experiment Fuzzing test because fitting process takes more than 3 minutes") + } + + override def afterAll(): Unit = { + val models = madListModels().parseJson.convertTo[MADListModelsResponse].models.map(_.modelId) + for (modelId <- models) { + madDelete(modelId) + } + super.afterAll() + } + + override def testObjects(): Seq[TestObject[MultivariateAnomalyEstimator]] = + Seq(new TestObject(mae.setSlidingWindow(200), df)) + + override def reader: MLReadable[_] = MultivariateAnomalyEstimator + + override def modelReader: MLReadable[_] = DetectMultivariateAnomaly +} + diff --git a/src/test/scala/com/microsoft/azure/synapse/ml/core/test/fuzzing/FuzzingTest.scala b/src/test/scala/com/microsoft/azure/synapse/ml/core/test/fuzzing/FuzzingTest.scala index 944ec449b3..98710d285e 100644 --- a/src/test/scala/com/microsoft/azure/synapse/ml/core/test/fuzzing/FuzzingTest.scala +++ b/src/test/scala/com/microsoft/azure/synapse/ml/core/test/fuzzing/FuzzingTest.scala @@ -61,6 +61,7 @@ class FuzzingTest extends TestBase { "com.microsoft.azure.synapse.ml.train.TrainedClassifierModel", "com.microsoft.azure.synapse.ml.lightgbm.LightGBMRankerModel", "com.microsoft.azure.synapse.ml.cognitive.FormOntologyTransformer", + "com.microsoft.azure.synapse.ml.cognitive.DetectMultivariateAnomaly", "com.microsoft.azure.synapse.ml.automl.BestModel" //TODO add proper interfaces to all of these ) val applicableStages = pipelineStages.filter(t => !exemptions(t.getClass.getName)) @@ -109,6 +110,7 @@ class FuzzingTest extends TestBase { "com.microsoft.azure.synapse.ml.vw.VowpalWabbitClassificationModel", "com.microsoft.azure.synapse.ml.vw.VowpalWabbitContextualBanditModel", "com.microsoft.azure.synapse.ml.cognitive.FormOntologyTransformer", + "com.microsoft.azure.synapse.ml.cognitive.DetectMultivariateAnomaly", "com.microsoft.azure.synapse.ml.vw.VowpalWabbitRegressionModel" ) val applicableStages = pipelineStages.filter(t => !exemptions(t.getClass.getName)) @@ -156,6 +158,7 @@ class FuzzingTest extends TestBase { "com.microsoft.azure.synapse.ml.lightgbm.LightGBMRankerModel", "com.microsoft.azure.synapse.ml.lightgbm.LightGBMRegressionModel", "com.microsoft.azure.synapse.ml.cognitive.FormOntologyTransformer", + "com.microsoft.azure.synapse.ml.cognitive.DetectMultivariateAnomaly", "com.microsoft.azure.synapse.ml.train.ComputePerInstanceStatistics" ) val applicableStages = pipelineStages.filter(t => !exemptions(t.getClass.getName)) From b3e91b7caaa2221fc0286171889c0298c63652c2 Mon Sep 17 00:00:00 2001 From: Stuart Leeks Date: Mon, 6 Dec 2021 11:21:12 +0000 Subject: [PATCH 38/40] feat: Add non-array value propagation to FlattenBatch --- .../ml/stages/MiniBatchTransformer.scala | 31 ++++++++++++------- .../ml/stages/MiniBatchTransformerSuite.scala | 22 ++++++++++++- 2 files changed, 41 insertions(+), 12 deletions(-) diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/MiniBatchTransformer.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/MiniBatchTransformer.scala index b4eec0b6ba..1cba8ba44a 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/MiniBatchTransformer.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/MiniBatchTransformer.scala @@ -189,20 +189,23 @@ class FlattenBatch(val uid: String) def this() = this(Identifiable.randomUID("FlattenBatch")) - def transpose(nestedSeq: Seq[Seq[Any]]): Seq[Seq[Any]] = { + def transpose(nestedSeq: Seq[Any]): Seq[Seq[Any]] = { val innerLength = nestedSeq.filter { case null => false - case _ => true - }.head.length + case _ : Seq[Any] => true + case _ => false + }.head.asInstanceOf[Seq[Any]].length assert(nestedSeq.forall{ case null => true - case innerSeq => innerSeq.lengthCompare(innerLength) == 0 + case innerSeq : Seq[Any] => innerSeq.lengthCompare(innerLength) == 0 + case _ => true }) (0 until innerLength).map(i => nestedSeq.map{ case null => null - case innerSeq => innerSeq(i) + case innerSeq : Seq[Any] => innerSeq(i) + case any => any }) } @@ -216,7 +219,13 @@ class FlattenBatch(val uid: String) val transposed: Seq[Seq[Any]] = transpose( (0 until rowOfLists.length) .filterNot(rowOfLists.isNullAt) - .map(rowOfLists.getSeq)) + .map(i => { + val fieldSchema = rowOfLists.schema.fields(i) + fieldSchema.dataType match { + case _ : ArrayType => rowOfLists.getSeq(i) + case _ => rowOfLists.get(i) + } + })) transposed.map { values => new GenericRowWithSchema(values.toArray, outputSchema) } @@ -228,11 +237,11 @@ class FlattenBatch(val uid: String) override def copy(extra: ParamMap): this.type = defaultCopy(extra) override def transformSchema(schema: StructType): StructType = { - assert(schema.fields.forall(sf => sf.dataType match { - case _: ArrayType => true - case _ => false + StructType(schema.map(f => { + f.dataType match { + case arrayField : ArrayType => StructField(f.name, f.dataType.asInstanceOf[ArrayType].elementType) + case nonArrayField => StructField(f.name, f.dataType) + } })) - StructType(schema.map(f => StructField(f.name, f.dataType.asInstanceOf[ArrayType].elementType))) } - } diff --git a/core/src/test/scala/com/microsoft/azure/synapse/ml/stages/MiniBatchTransformerSuite.scala b/core/src/test/scala/com/microsoft/azure/synapse/ml/stages/MiniBatchTransformerSuite.scala index f9f3d5b6b1..82577e8e1c 100644 --- a/core/src/test/scala/com/microsoft/azure/synapse/ml/stages/MiniBatchTransformerSuite.scala +++ b/core/src/test/scala/com/microsoft/azure/synapse/ml/stages/MiniBatchTransformerSuite.scala @@ -9,11 +9,12 @@ import org.apache.spark.injections.UDFUtils import org.apache.spark.ml.param.DataFrameEquality import org.apache.spark.ml.util.MLReadable import org.apache.spark.sql.catalyst.encoders.RowEncoder -import org.apache.spark.sql.functions.col +import org.apache.spark.sql.functions.{col, lit} import org.apache.spark.sql.types.{ArrayType, IntegerType, StringType, StructType} import org.apache.spark.sql.{DataFrame, Dataset} import org.scalactic.Equality import org.scalatest.Assertion +import org.apache.spark.sql.catalyst.expressions.GenericRowWithSchema trait MiniBatchTestUtils extends TestBase with DataFrameEquality { import spark.implicits._ @@ -133,6 +134,25 @@ class FlattenBatchSuite extends TransformerFuzzing[FlattenBatch] { assert(new FlattenBatch().transform(nullifiedDf).count() == 1000) } + test("propagate non-array") { + val batchedDf = new FixedMiniBatchTransformer().setBatchSize(3).transform(df) + val fixedValueDf = batchedDf.withColumn("fixed_col", lit("fixed value")) + + val result = new FlattenBatch().transform(fixedValueDf) + assert(result.count() == 1000) + + val rows = result.collect() + val row1 = rows(0).asInstanceOf[GenericRowWithSchema] + assert(row1(0) == 1) + assert(row1(1) == "foo") + assert(row1(2) == "fixed value") + val row2 = rows(1).asInstanceOf[GenericRowWithSchema] + assert(row2(0) == 2) + assert(row2(1) == "foo") + assert(row2(2) == "fixed value") + + } + override def testObjects(): Seq[TestObject[FlattenBatch]] = Seq( new TestObject[FlattenBatch]( new FlattenBatch(), From 87520984301b025f39d6d82a54390acebb4b053c Mon Sep 17 00:00:00 2001 From: Stuart Leeks Date: Mon, 6 Dec 2021 11:21:21 +0000 Subject: [PATCH 39/40] drop - temp test speedup --- .../azure/synapse/ml/core/test/fuzzing/Fuzzing.scala | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/core/src/test/scala/com/microsoft/azure/synapse/ml/core/test/fuzzing/Fuzzing.scala b/core/src/test/scala/com/microsoft/azure/synapse/ml/core/test/fuzzing/Fuzzing.scala index ec8f1ed5cd..f561c2e207 100644 --- a/core/src/test/scala/com/microsoft/azure/synapse/ml/core/test/fuzzing/Fuzzing.scala +++ b/core/src/test/scala/com/microsoft/azure/synapse/ml/core/test/fuzzing/Fuzzing.scala @@ -213,9 +213,9 @@ trait ExperimentFuzzing[S <: PipelineStage] extends TestBase with DataFrameEqual } } - test("Experiment Fuzzing") { - testExperiments() - } + // test("Experiment Fuzzing") { + // testExperiments() + // } } @@ -291,9 +291,9 @@ trait SerializationFuzzing[S <: PipelineStage with MLWritable] extends TestBase } } - test("Serialization Fuzzing") { - testSerialization() - } + // test("Serialization Fuzzing") { + // testSerialization() + // } } From 5fd33a1b2b1ddd089b30e38f69a9ddade4992254 Mon Sep 17 00:00:00 2001 From: Stuart Leeks Date: Mon, 6 Dec 2021 15:25:44 +0000 Subject: [PATCH 40/40] improve null handling --- .../synapse/ml/stages/MiniBatchTransformer.scala | 15 +++++++++------ .../ml/stages/MiniBatchTransformerSuite.scala | 15 ++++++++++++++- 2 files changed, 23 insertions(+), 7 deletions(-) diff --git a/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/MiniBatchTransformer.scala b/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/MiniBatchTransformer.scala index 1cba8ba44a..90c6ea454f 100644 --- a/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/MiniBatchTransformer.scala +++ b/core/src/main/scala/com/microsoft/azure/synapse/ml/stages/MiniBatchTransformer.scala @@ -218,13 +218,16 @@ class FlattenBatch(val uid: String) it.flatMap { rowOfLists => val transposed: Seq[Seq[Any]] = transpose( (0 until rowOfLists.length) - .filterNot(rowOfLists.isNullAt) .map(i => { - val fieldSchema = rowOfLists.schema.fields(i) - fieldSchema.dataType match { - case _ : ArrayType => rowOfLists.getSeq(i) - case _ => rowOfLists.get(i) - } + if (rowOfLists.isNullAt(i)) { + null + } else { + val fieldSchema = rowOfLists.schema.fields(i) + fieldSchema.dataType match { + case _ : ArrayType => rowOfLists.getSeq(i) + case _ => rowOfLists.get(i) + } + } })) transposed.map { values => new GenericRowWithSchema(values.toArray, outputSchema) diff --git a/core/src/test/scala/com/microsoft/azure/synapse/ml/stages/MiniBatchTransformerSuite.scala b/core/src/test/scala/com/microsoft/azure/synapse/ml/stages/MiniBatchTransformerSuite.scala index 82577e8e1c..343e6eceeb 100644 --- a/core/src/test/scala/com/microsoft/azure/synapse/ml/stages/MiniBatchTransformerSuite.scala +++ b/core/src/test/scala/com/microsoft/azure/synapse/ml/stages/MiniBatchTransformerSuite.scala @@ -131,7 +131,20 @@ class FlattenBatchSuite extends TransformerFuzzing[FlattenBatch] { val batchedDf = new FixedMiniBatchTransformer().setBatchSize(3).transform(df) val nullifiedDf = batchedDf.withColumn( "nullCol", UDFUtils.oldUdf(FlattenBatchUtils.nullify _, ArrayType(IntegerType))(col("in1"))) - assert(new FlattenBatch().transform(nullifiedDf).count() == 1000) + + val result = new FlattenBatch().transform(nullifiedDf) + assert(result.count() == 1000) + + val rows = result.collect() + val row1 = rows(0).asInstanceOf[GenericRowWithSchema] + assert(row1(0) == 1) + assert(row1(1) == "foo") + assert(row1(2) == 1) + val row7 = rows(6).asInstanceOf[GenericRowWithSchema] + assert(row7(0) == 7) + assert(row7(1) == "foo") + assert(row7(2) == null) + } test("propagate non-array") {