Skip to content

Commit c6d0b79

Browse files
vishalbolludeliahu
authored andcommitted
Lowercase python code (#602)
1 parent e24cd2b commit c6d0b79

File tree

8 files changed

+41
-46
lines changed

8 files changed

+41
-46
lines changed

manager/uninstall.sh

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -16,8 +16,8 @@
1616

1717
set -e
1818

19-
echo -e "Spinning down the cluster ... (this will take a few minutes)\n"
19+
echo -e "spinning down the cluster ... (this will take a few minutes)\n"
2020

2121
eksctl delete cluster --name=$CORTEX_CLUSTER_NAME --region=$CORTEX_REGION
2222

23-
echo -e "\n✓ Spun down the cluster"
23+
echo -e "\n✓ spun down the cluster"

pkg/operator/workloads/api_workload.go

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -249,7 +249,7 @@ type downloadContainerArg struct {
249249
HideUnzippingLog bool `json:"hide_unzipping_log"` // if true, don't log when unzipping
250250
}
251251

252-
const downloaderLastLog = "pulling the %s Serving image"
252+
const downloaderLastLog = "pulling the %s serving image"
253253

254254
func tfAPISpec(
255255
ctx *context.Context,
@@ -279,7 +279,7 @@ func tfAPISpec(
279279
}
280280

281281
downloadConfig := downloadContainerConfig{
282-
LastLog: fmt.Sprintf(downloaderLastLog, "TensorFlow"),
282+
LastLog: fmt.Sprintf(downloaderLastLog, "tensorflow"),
283283
DownloadArgs: []downloadContainerArg{
284284
{
285285
From: ctx.APIs[api.Name].TensorFlow.Model,
@@ -477,7 +477,7 @@ func predictorAPISpec(
477477
}
478478

479479
downloadConfig := downloadContainerConfig{
480-
LastLog: fmt.Sprintf(downloaderLastLog, "Predictor"),
480+
LastLog: fmt.Sprintf(downloaderLastLog, "predictor"),
481481
DownloadArgs: []downloadContainerArg{
482482
{
483483
From: config.AWS.S3Path(ctx.ProjectKey),
@@ -637,7 +637,7 @@ func onnxAPISpec(
637637
}
638638

639639
downloadConfig := downloadContainerConfig{
640-
LastLog: fmt.Sprintf(downloaderLastLog, "ONNX"),
640+
LastLog: fmt.Sprintf(downloaderLastLog, "onnx"),
641641
DownloadArgs: []downloadContainerArg{
642642
{
643643
From: ctx.APIs[api.Name].ONNX.Model,

pkg/workloads/cortex/lib/api_utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@
2222

2323

2424
API_SUMMARY_MESSAGE = (
25-
"send a POST request to this endpoint with a sample in JSON to make a prediction"
25+
"send a post request to this endpoint with a sample in json to make a prediction"
2626
)
2727

2828

pkg/workloads/cortex/lib/context.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -84,7 +84,7 @@ def __init__(self, **kwargs):
8484

8585
if self.api_version != consts.CORTEX_VERSION:
8686
raise ValueError(
87-
"API version mismatch (Context: {}, Image: {})".format(
87+
"api version mismatch (context: {}, image: {})".format(
8888
self.api_version, consts.CORTEX_VERSION
8989
)
9090
)

pkg/workloads/cortex/lib/storage/s3.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -121,7 +121,7 @@ def _read_bytes_from_s3_single(self, key, allow_missing=False, ext_bucket=None):
121121
except Exception as e:
122122
raise CortexException(
123123
'key "{}" in bucket "{}" could not be accessed; '.format(key, bucket)
124-
+ "it may not exist, or you may not have suffienct permissions"
124+
+ "it may not exist, or you may not have sufficient permissions"
125125
) from e
126126

127127
return byte_array.strip()

pkg/workloads/cortex/onnx_serve/api.py

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -308,24 +308,24 @@ def start(args):
308308
except Exception as e:
309309
cx_logger().warn("an error occurred while attempting to load classes", exc_info=True)
310310

311-
cx_logger().info("{} API is live".format(api["name"]))
311+
cx_logger().info("{} api is live".format(api["name"]))
312312
serve(app, listen="*:{}".format(args.port))
313313

314314

315315
def main():
316316
parser = argparse.ArgumentParser()
317317
na = parser.add_argument_group("required named arguments")
318-
na.add_argument("--workload-id", required=True, help="Workload ID")
319-
na.add_argument("--port", type=int, required=True, help="Port (on localhost) to use")
318+
na.add_argument("--workload-id", required=True, help="workload id")
319+
na.add_argument("--port", type=int, required=True, help="port (on localhost) to use")
320320
na.add_argument(
321321
"--context",
322322
required=True,
323-
help="S3 path to context (e.g. s3://bucket/path/to/context.json)",
323+
help="s3 path to context (e.g. s3://bucket/path/to/context.json)",
324324
)
325-
na.add_argument("--api", required=True, help="Resource id of api to serve")
326-
na.add_argument("--model-dir", required=True, help="Directory to download the model to")
327-
na.add_argument("--cache-dir", required=True, help="Local path for the context cache")
328-
na.add_argument("--project-dir", required=True, help="Local path for the project zip file")
325+
na.add_argument("--api", required=True, help="resource id of api to serve")
326+
na.add_argument("--model-dir", required=True, help="directory to download the model to")
327+
na.add_argument("--cache-dir", required=True, help="local path for the context cache")
328+
na.add_argument("--project-dir", required=True, help="local path for the project zip file")
329329

330330
parser.set_defaults(func=start)
331331

pkg/workloads/cortex/predictor_serve/api.py

Lines changed: 11 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -101,12 +101,7 @@ def predict():
101101

102102
@app.route("/predict", methods=["GET"])
103103
def get_summary():
104-
return jsonify(
105-
{
106-
"model_signature": extract_signature(local_cache["input_metadata"]),
107-
"message": api_utils.API_SUMMARY_MESSAGE,
108-
}
109-
)
104+
return jsonify({"message": api_utils.API_SUMMARY_MESSAGE})
110105

111106

112107
@app.errorhandler(Exception)
@@ -126,7 +121,7 @@ def start(args):
126121
if api.get("predictor") is None:
127122
raise CortexException(api["name"], "predictor key not configured")
128123

129-
cx_logger().info("loading the Predictor from {}".format(api["predictor"]["path"]))
124+
cx_logger().info("loading the predictor from {}".format(api["predictor"]["path"]))
130125
local_cache["predictor"] = ctx.get_predictor_impl(api["name"], args.project_dir)
131126

132127
if util.has_function(local_cache["predictor"], "init"):
@@ -138,7 +133,7 @@ def start(args):
138133
args.model_dir, os.path.basename(os.path.normpath(prefix))
139134
)
140135

141-
cx_logger().info("calling the Predictor's init() function")
136+
cx_logger().info("calling the predictor's init() function")
142137
local_cache["predictor"].init(model_path, api["predictor"]["metadata"])
143138
except Exception as e:
144139
raise UserRuntimeException(api["predictor"]["path"], "init", str(e)) from e
@@ -154,24 +149,24 @@ def start(args):
154149
except Exception as e:
155150
cx_logger().warn("an error occurred while attempting to load classes", exc_info=True)
156151

157-
cx_logger().info("{} API is live".format(api["name"]))
152+
cx_logger().info("{} api is live".format(api["name"]))
158153
serve(app, listen="*:{}".format(args.port))
159154

160155

161156
def main():
162157
parser = argparse.ArgumentParser()
163158
na = parser.add_argument_group("required named arguments")
164-
na.add_argument("--workload-id", required=True, help="Workload ID")
165-
na.add_argument("--port", type=int, required=True, help="Port (on localhost) to use")
159+
na.add_argument("--workload-id", required=True, help="workload id")
160+
na.add_argument("--port", type=int, required=True, help="port (on localhost) to use")
166161
na.add_argument(
167162
"--context",
168163
required=True,
169-
help="S3 path to context (e.g. s3://bucket/path/to/context.json)",
164+
help="s3 path to context (e.g. s3://bucket/path/to/context.json)",
170165
)
171-
na.add_argument("--api", required=True, help="Resource id of api to serve")
172-
na.add_argument("--model-dir", required=True, help="Directory to download the model to")
173-
na.add_argument("--cache-dir", required=True, help="Local path for the context cache")
174-
na.add_argument("--project-dir", required=True, help="Local path for the project zip file")
166+
na.add_argument("--api", required=True, help="resource id of api to serve")
167+
na.add_argument("--model-dir", required=True, help="directory to download the model to")
168+
na.add_argument("--cache-dir", required=True, help="local path for the context cache")
169+
na.add_argument("--project-dir", required=True, help="local path for the project zip file")
175170

176171
parser.set_defaults(func=start)
177172

pkg/workloads/cortex/tf_api/api.py

Lines changed: 13 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -233,7 +233,7 @@ def predict():
233233
try:
234234
sample = request.get_json()
235235
except Exception as e:
236-
return "Malformed JSON", status.HTTP_400_BAD_REQUEST
236+
return "malformed json", status.HTTP_400_BAD_REQUEST
237237

238238
ctx = local_cache["ctx"]
239239
api = local_cache["api"]
@@ -310,8 +310,8 @@ def get_summary():
310310
return jsonify(response)
311311

312312

313-
tf_expected_dir_structure = """TensorFlow model directories must have the following structure:
314-
1523423423/ (Version prefix, usually a timestamp)
313+
tf_expected_dir_structure = """tensorflow model directories must have the following structure:
314+
1523423423/ (version prefix, usually a timestamp)
315315
├── saved_model.pb
316316
└── variables/
317317
├── variables.index
@@ -429,7 +429,7 @@ def start(args):
429429
except Exception as e:
430430
if i > 6:
431431
cx_logger().warn(
432-
"unable to read model metadata - model is still loading. Retrying..."
432+
"unable to read model metadata - model is still loading, retrying..."
433433
)
434434
if i == limit - 1:
435435
cx_logger().exception("retry limit exceeded")
@@ -445,27 +445,27 @@ def start(args):
445445
local_cache["parsed_signature"] = parsed_signature
446446
cx_logger().info("model_signature: {}".format(local_cache["parsed_signature"]))
447447

448-
cx_logger().info("{} API is live".format(api["name"]))
448+
cx_logger().info("{} api is live".format(api["name"]))
449449
serve(app, listen="*:{}".format(args.port))
450450

451451

452452
def main():
453453
parser = argparse.ArgumentParser()
454454
na = parser.add_argument_group("required named arguments")
455-
na.add_argument("--workload-id", required=True, help="Workload ID")
456-
na.add_argument("--port", type=int, required=True, help="Port (on localhost) to use")
455+
na.add_argument("--workload-id", required=True, help="workload id")
456+
na.add_argument("--port", type=int, required=True, help="port (on localhost) to use")
457457
na.add_argument(
458-
"--tf-serve-port", type=int, required=True, help="Port (on localhost) where TF Serving runs"
458+
"--tf-serve-port", type=int, required=True, help="port (on localhost) where tf serving runs"
459459
)
460460
na.add_argument(
461461
"--context",
462462
required=True,
463-
help="S3 path to context (e.g. s3://bucket/path/to/context.json)",
463+
help="s3 path to context (e.g. s3://bucket/path/to/context.json)",
464464
)
465-
na.add_argument("--api", required=True, help="Resource id of api to serve")
466-
na.add_argument("--model-dir", required=True, help="Directory to download the model to")
467-
na.add_argument("--cache-dir", required=True, help="Local path for the context cache")
468-
na.add_argument("--project-dir", required=True, help="Local path for the project zip file")
465+
na.add_argument("--api", required=True, help="resource id of api to serve")
466+
na.add_argument("--model-dir", required=True, help="directory to download the model to")
467+
na.add_argument("--cache-dir", required=True, help="local path for the context cache")
468+
na.add_argument("--project-dir", required=True, help="local path for the project zip file")
469469
parser.set_defaults(func=start)
470470

471471
args = parser.parse_args()

0 commit comments

Comments
 (0)