Skip to content
This repository has been archived by the owner on Nov 16, 2023. It is now read-only.

Commit

Permalink
Refactor utilities into separate folder
Browse files Browse the repository at this point in the history
  • Loading branch information
Kevin Wang committed Jul 5, 2018
1 parent 960f2fc commit 5b00da8
Show file tree
Hide file tree
Showing 29 changed files with 2,059 additions and 661 deletions.
Expand Up @@ -59,12 +59,12 @@
"from azure.storage.blob import BlockBlobService\n",
"import azure.mgmt.batchai.models as models\n",
"\n",
"# utilities.py contains helper functions used by different notebooks\n",
"sys.path.append('../..')\n",
"import utilities\n",
"# The BatchAI/utilities folder contains helper functions used by different notebooks\n",
"sys.path.append('../../..')\n",
"import utilities as utils\n",
"\n",
"cfg = utilities.Configuration('../../configuration.json')\n",
"client = utilities.create_batchai_client(cfg)"
"cfg = utils.config.Configuration('../../configuration.json')\n",
"client = utils.config.create_batchai_client(cfg)"
]
},
{
Expand All @@ -82,7 +82,7 @@
},
"outputs": [],
"source": [
"utilities.create_resource_group(cfg)\n",
"utils.config.create_resource_group(cfg)\n",
"_ = client.workspaces.create(cfg.resource_group, cfg.workspace, cfg.location).result()"
]
},
Expand Down Expand Up @@ -139,12 +139,12 @@
"outputs": [],
"source": [
"model_url = 'http://download.tensorflow.org/models/inception_v3_2016_08_28.tar.gz'\n",
"utilities.download_file(model_url, 'inception_v3.tar.gz')\n",
"utils.dataset.download_file(model_url, 'inception_v3.tar.gz')\n",
"with tarfile.open('inception_v3.tar.gz', \"r:gz\") as tar:\n",
" tar.extractall()\n",
"\n",
"images_url = 'https://batchaisamples.blob.core.windows.net/samples/imagenet_samples.zip?st=2017-09-29T18%3A29%3A00Z&se=2099-12-31T08%3A00%3A00Z&sp=rl&sv=2016-05-31&sr=c&sig=PmhL%2BYnYAyNTZr1DM2JySvrI12e%2F4wZNIwCtf7TRI%2BM%3D'\n",
"utilities.download_file(images_url, 'imagenet_samples.zip')\n",
"utils.dataset.download_file(images_url, 'imagenet_samples.zip')\n",
"with zipfile.ZipFile('imagenet_samples.zip', 'r') as z:\n",
" z.extractall('.')"
]
Expand Down Expand Up @@ -318,7 +318,7 @@
"source": [
"### Monitor Cluster Creation\n",
"\n",
"Monitor the just created cluster. utilities.py contains a helper function to print out detail status of the cluster."
"Monitor the just created cluster. The `utilities` module contains a helper function to print out detail status of the cluster."
]
},
{
Expand All @@ -330,7 +330,7 @@
"outputs": [],
"source": [
"cluster = client.clusters.get(cfg.resource_group, cfg.workspace, cluster_name)\n",
"utilities.print_cluster_status(cluster)"
"utils.cluster.print_cluster_status(cluster)"
]
},
{
Expand Down Expand Up @@ -452,7 +452,7 @@
},
"outputs": [],
"source": [
"utilities.wait_for_job_completion(client, cfg.resource_group, cfg.workspace, \n",
"utils.job.wait_for_job_completion(client, cfg.resource_group, cfg.workspace, \n",
" experiment_name, job_name, cluster_name, 'stdouterr', 'stdout.txt')"
]
},
Expand Down
Expand Up @@ -60,13 +60,13 @@
"from azure.storage.file import FileService\n",
"import azure.mgmt.batchai.models as models\n",
"\n",
"# utilities.py contains helper functions used by different notebooks \n",
"sys.path.append('../..')\n",
"import utilities\n",
"# The BatchAI/utilities folder contains helper functions used by different notebooks \n",
"sys.path.append('../../..')\n",
"import utilities as utils\n",
"\n",
"cfg = utilities.Configuration('../../configuration.json')\n",
"client = utilities.create_batchai_client(cfg)\n",
"utilities.create_resource_group(cfg)"
"cfg = utils.config.Configuration('../../configuration.json')\n",
"client = utils.config.create_batchai_client(cfg)\n",
"utils.config.create_resource_group(cfg)"
]
},
{
Expand Down Expand Up @@ -165,7 +165,7 @@
"source": [
"### Monitor Cluster Creation\n",
"\n",
"Monitor the just created cluster. utilities.py contains a helper function to print out detail status of the cluster."
"Monitor the just created cluster. The `utilities` module contains a helper function to print out detail status of the cluster."
]
},
{
Expand All @@ -175,7 +175,7 @@
"outputs": [],
"source": [
"cluster = client.clusters.get(cfg.resource_group, cluster_name)\n",
"utilities.print_cluster_status(cluster)"
"utils.cluster.print_cluster_status(cluster)"
]
},
{
Expand Down Expand Up @@ -204,7 +204,7 @@
"source": [
"mnist_dataset_url = 'https://batchaisamples.blob.core.windows.net/samples/mnist_dataset.zip?st=2017-09-29T18%3A29%3A00Z&se=2099-12-31T08%3A00%3A00Z&sp=rl&sv=2016-05-31&sr=c&sig=PmhL%2BYnYAyNTZr1DM2JySvrI12e%2F4wZNIwCtf7TRI%2BM%3D'\n",
"if not os.path.exists('Train-28x28_cntk_text.txt') or not os.path.exists('Test-28x28_cntk_text.txt'):\n",
" utilities.download_file(mnist_dataset_url, 'mnist_dataset.zip')\n",
" utils.dataset.download_file(mnist_dataset_url, 'mnist_dataset.zip')\n",
" print('Extracting MNIST dataset...')\n",
" with zipfile.ZipFile('mnist_dataset.zip', 'r') as z:\n",
" z.extractall('.')\n",
Expand Down Expand Up @@ -424,7 +424,7 @@
"metadata": {},
"outputs": [],
"source": [
"utilities.wait_for_job_completion(client, cfg.resource_group, job_name, cluster_name, 'stdouterr', 'stderr.txt')"
"utils.job.wait_for_job_completion(client, cfg.resource_group, job_name, cluster_name, 'stdouterr', 'stderr.txt')"
]
},
{
Expand All @@ -446,7 +446,7 @@
" models.JobsListOutputFilesOptions(outputdirectoryid='stdouterr')) \n",
"for f in list(files):\n",
" if f.download_url:\n",
" utilities.download_file(f.download_url, f.name)\n",
" utils.dataset.download_file(f.download_url, f.name)\n",
"print('All files downloaded')"
]
},
Expand Down
Expand Up @@ -60,13 +60,13 @@
"from azure.storage.file import FileService, FilePermissions\n",
"import azure.mgmt.batchai.models as models\n",
"\n",
"# utilities.py contains helper functions used by different notebooks \n",
"sys.path.append('../..')\n",
"import utilities\n",
"# The BatchAI/utilities folder contains helper functions used by different notebooks \n",
"sys.path.append('../../..')\n",
"import utilities as utils\n",
"\n",
"cfg = utilities.Configuration('../../configuration.json')\n",
"client = utilities.create_batchai_client(cfg)\n",
"utilities.create_resource_group(cfg)"
"cfg = utils.config.Configuration('../../configuration.json')\n",
"client = utils.config.create_batchai_client(cfg)\n",
"utils.config.create_resource_group(cfg)"
]
},
{
Expand Down Expand Up @@ -171,7 +171,7 @@
"source": [
"### Monitor Cluster Creation\n",
"\n",
"Monitor the just created cluster. utilities.py contains a helper function to print out detail status of the cluster."
"Monitor the just created cluster. The `utilities` module contains a helper function to print out detail status of the cluster."
]
},
{
Expand All @@ -181,7 +181,7 @@
"outputs": [],
"source": [
"cluster = client.clusters.get(cfg.resource_group, cluster_name)\n",
"utilities.print_cluster_status(cluster)"
"utils.cluster.print_cluster_status(cluster)"
]
},
{
Expand Down Expand Up @@ -210,7 +210,7 @@
"source": [
"mnist_dataset_url = 'https://batchaisamples.blob.core.windows.net/samples/mnist_dataset.zip?st=2017-09-29T18%3A29%3A00Z&se=2099-12-31T08%3A00%3A00Z&sp=rl&sv=2016-05-31&sr=c&sig=PmhL%2BYnYAyNTZr1DM2JySvrI12e%2F4wZNIwCtf7TRI%2BM%3D'\n",
"if not os.path.exists('Train-28x28_cntk_text.txt') or not os.path.exists('Test-28x28_cntk_text.txt'):\n",
" utilities.download_file(mnist_dataset_url, 'mnist_dataset.zip')\n",
" utils.dataset.download_file(mnist_dataset_url, 'mnist_dataset.zip')\n",
" print('Extracting MNIST dataset...')\n",
" with zipfile.ZipFile('mnist_dataset.zip', 'r') as z:\n",
" z.extractall('.')\n",
Expand Down Expand Up @@ -430,7 +430,7 @@
"metadata": {},
"outputs": [],
"source": [
"utilities.wait_for_job_completion(client, cfg.resource_group, job_name, cluster_name, 'stdOuterr', 'stderr.txt')"
"utils.job.wait_for_job_completion(client, cfg.resource_group, job_name, cluster_name, 'stdOuterr', 'stderr.txt')"
]
},
{
Expand All @@ -450,7 +450,7 @@
"source": [
"files = client.jobs.list_output_files(cfg.resource_group, job_name, models.JobsListOutputFilesOptions(\"stdOuterr\")) \n",
"for f in list(files):\n",
" utilities.download_file(f.download_url, f.name)\n",
" utils.dataset.download_file(f.download_url, f.name)\n",
"print(\"All files downloaded\")"
]
},
Expand Down
22 changes: 11 additions & 11 deletions recipes/CNTK/CNTK-GPU-BrainScript/CNTK-GPU-BrainScript.ipynb
Expand Up @@ -60,13 +60,13 @@
"from azure.storage.file import FileService\n",
"import azure.mgmt.batchai.models as models\n",
"\n",
"# utilities.py contains helper functions used by different notebooks \n",
"sys.path.append('../..')\n",
"import utilities\n",
"# The BatchAI/utilities folder contains helper functions used by different notebooks \n",
"sys.path.append('../../..')\n",
"import utilities as utils\n",
"\n",
"cfg = utilities.Configuration('../../configuration.json')\n",
"client = utilities.create_batchai_client(cfg)\n",
"utilities.create_resource_group(cfg)"
"cfg = utils.config.Configuration('../../configuration.json')\n",
"client = utils.config.create_batchai_client(cfg)\n",
"utils.config.create_resource_group(cfg)"
]
},
{
Expand Down Expand Up @@ -165,7 +165,7 @@
"source": [
"### Monitor Cluster Creation\n",
"\n",
"Monitor the just created cluster. utilities.py contains a helper function to print out detail status of the cluster."
"Monitor the just created cluster. The `utilities` module contains a helper function to print out detail status of the cluster."
]
},
{
Expand All @@ -175,7 +175,7 @@
"outputs": [],
"source": [
"cluster = client.clusters.get(cfg.resource_group, cluster_name)\n",
"utilities.print_cluster_status(cluster)"
"utils.cluster.print_cluster_status(cluster)"
]
},
{
Expand Down Expand Up @@ -204,7 +204,7 @@
"source": [
"mnist_dataset_url = 'https://batchaisamples.blob.core.windows.net/samples/mnist_dataset.zip?st=2017-09-29T18%3A29%3A00Z&se=2099-12-31T08%3A00%3A00Z&sp=rl&sv=2016-05-31&sr=c&sig=PmhL%2BYnYAyNTZr1DM2JySvrI12e%2F4wZNIwCtf7TRI%2BM%3D'\n",
"if not os.path.exists('Train-28x28_cntk_text.txt') or not os.path.exists('Test-28x28_cntk_text.txt'):\n",
" utilities.download_file(mnist_dataset_url, 'mnist_dataset.zip')\n",
" utils.dataset.download_file(mnist_dataset_url, 'mnist_dataset.zip')\n",
" print('Extracting MNIST dataset...')\n",
" with zipfile.ZipFile('mnist_dataset.zip', 'r') as z:\n",
" z.extractall('.')\n",
Expand Down Expand Up @@ -426,7 +426,7 @@
},
"outputs": [],
"source": [
"utilities.wait_for_job_completion(client, cfg.resource_group, job_name, cluster_name, 'stdouterr', 'stderr.txt')"
"utils.job.wait_for_job_completion(client, cfg.resource_group, job_name, cluster_name, 'stdouterr', 'stderr.txt')"
]
},
{
Expand All @@ -448,7 +448,7 @@
" job_name, models.JobsListOutputFilesOptions(outputdirectoryid='stdouterr')) \n",
"for f in list(files):\n",
" if f.download_url:\n",
" utilities.download_file(f.download_url, f.name)\n",
" utils.dataset.download_file(f.download_url, f.name)\n",
"print('All files downloaded')"
]
},
Expand Down
Expand Up @@ -63,13 +63,13 @@
"from azure.storage.file import FileService\n",
"import azure.mgmt.batchai.models as models\n",
"\n",
"# utilities.py contains helper functions used by different notebooks\n",
"sys.path.append('../..')\n",
"import utilities\n",
"# The BatchAI/utilities folder contains helper functions used by different notebooks\n",
"sys.path.append('../../..')\n",
"import utilities as utils\n",
"\n",
"cfg = utilities.Configuration('../../configuration.json')\n",
"client = utilities.create_batchai_client(cfg)\n",
"utilities.create_resource_group(cfg)"
"cfg = utils.config.Configuration('../../configuration.json')\n",
"client = utils.config.create_batchai_client(cfg)\n",
"utils.config.create_resource_group(cfg)"
]
},
{
Expand All @@ -87,7 +87,7 @@
},
"outputs": [],
"source": [
"utilities.create_resource_group(cfg)\n",
"utils.config.create_resource_group(cfg)\n",
"_ = client.workspaces.create(cfg.resource_group, cfg.workspace, cfg.location).result()"
]
},
Expand Down Expand Up @@ -155,7 +155,7 @@
"source": [
"### Monitor Cluster Creation\n",
"\n",
"Monitor the just created cluster. utilities.py contains a helper function to print out detail status of the cluster."
"Monitor the just created cluster. The `utilities` module contains a helper function to print out detail status of the cluster."
]
},
{
Expand All @@ -167,7 +167,7 @@
"outputs": [],
"source": [
"cluster = client.clusters.get(cfg.resource_group, cfg.workspace, cluster_name)\n",
"utilities.print_cluster_status(cluster)"
"utils.cluster.print_cluster_status(cluster)"
]
},
{
Expand Down Expand Up @@ -217,9 +217,9 @@
},
"outputs": [],
"source": [
"utilities.download_file('https://raw.githubusercontent.com/Microsoft/CNTK/v2.3/Examples/Image/Classification/ResNet/Python/resnet_models.py', 'resnet_models.py')\n",
"utilities.download_file('https://raw.githubusercontent.com/Microsoft/CNTK/v2.3/Examples/Image/Classification/ResNet/Python/TrainResNet_CIFAR10_Distributed.py', 'TrainResNet_CIFAR10_Distributed.py')\n",
"utilities.download_file('https://raw.githubusercontent.com/Microsoft/CNTK/v2.3/Examples/Image/Classification/ResNet/Python/TrainResNet_CIFAR10.py', 'TrainResNet_CIFAR10.py')\n",
"utils.dataset.download_file('https://raw.githubusercontent.com/Microsoft/CNTK/v2.3/Examples/Image/Classification/ResNet/Python/resnet_models.py', 'resnet_models.py')\n",
"utils.dataset.download_file('https://raw.githubusercontent.com/Microsoft/CNTK/v2.3/Examples/Image/Classification/ResNet/Python/TrainResNet_CIFAR10_Distributed.py', 'TrainResNet_CIFAR10_Distributed.py')\n",
"utils.dataset.download_file('https://raw.githubusercontent.com/Microsoft/CNTK/v2.3/Examples/Image/Classification/ResNet/Python/TrainResNet_CIFAR10.py', 'TrainResNet_CIFAR10.py')\n",
"print('Done')"
]
},
Expand Down Expand Up @@ -379,7 +379,7 @@
},
"outputs": [],
"source": [
"utilities.wait_for_job_completion(client, cfg.resource_group, cfg.workspace, \n",
"utils.job.wait_for_job_completion(client, cfg.resource_group, cfg.workspace, \n",
" experiment_name, job_name, cluster_name, 'stdouterr', 'stdout.txt')"
]
},
Expand Down
Expand Up @@ -60,12 +60,12 @@
"from azure.storage.file import FileService\n",
"import azure.mgmt.batchai.models as models\n",
"\n",
"# utilities.py contains helper functions used by different notebooks\n",
"sys.path.append('../..')\n",
"import utilities\n",
"# The BatchAI/utilities folder contains helper functions used by different notebooks\n",
"sys.path.append('../../..')\n",
"import utilities as utils\n",
"\n",
"cfg = utilities.Configuration('../../configuration.json')\n",
"client = utilities.create_batchai_client(cfg)"
"cfg = utils.config.Configuration('../../configuration.json')\n",
"client = utils.config.create_batchai_client(cfg)"
]
},
{
Expand All @@ -83,7 +83,7 @@
},
"outputs": [],
"source": [
"utilities.create_resource_group(cfg)\n",
"utils.config.create_resource_group(cfg)\n",
"_ = client.workspaces.create(cfg.resource_group, cfg.workspace, cfg.location).result()"
]
},
Expand Down Expand Up @@ -217,7 +217,7 @@
"source": [
"### Monitor Cluster Creation\n",
"\n",
"Monitor the just created cluster. utilities.py contains a helper function to print out detail status of the cluster."
"Monitor the just created cluster. The `utilities` module contains a helper function to print out detail status of the cluster."
]
},
{
Expand All @@ -229,7 +229,7 @@
"outputs": [],
"source": [
"cluster = client.clusters.get(cfg.resource_group, cfg.workspace, cluster_name)\n",
"utilities.print_cluster_status(cluster)"
"utils.cluster.print_cluster_status(cluster)"
]
},
{
Expand Down Expand Up @@ -343,7 +343,7 @@
},
"outputs": [],
"source": [
"utilities.wait_for_job_completion(client, cfg.resource_group, cfg.workspace, \n",
"utils.job.wait_for_job_completion(client, cfg.resource_group, cfg.workspace, \n",
" experiment_name, job_name, cluster_name, 'stdouterr', 'stdout.txt')"
]
},
Expand Down

0 comments on commit 5b00da8

Please sign in to comment.