Skip to content
This repository has been archived by the owner on Apr 18, 2023. It is now read-only.

Commit

Permalink
[workload] Fixed some errors when loading model files of Super Resolu…
Browse files Browse the repository at this point in the history
…tion,

           Emotion Analysis and Facial Landmark Detection examples.
  • Loading branch information
BruceDai committed Oct 9, 2019
1 parent 95ce8d8 commit 84355a7
Show file tree
Hide file tree
Showing 4 changed files with 20 additions and 14 deletions.
4 changes: 2 additions & 2 deletions workload/resources/main.js
Expand Up @@ -9,7 +9,7 @@ const BenchmarkClass = {
};

async function main() {
let ctx = showCanvasElement.getContext("2d");
let ctx = showCanvasElement.getContext("2d");
ctx.drawImage(imageElement, 0, 0);
inputElement.setAttribute('disabled', true);
pickBtnElement.setAttribute('class', 'btn btn-primary disabled');
Expand All @@ -31,7 +31,7 @@ async function main() {
let selectedOpt = preferSelectElement.options[preferSelectElement.selectedIndex];
let backendName = configuration[key];
if (configuration[key].indexOf('WebNN') === 0) {
backendName += ` + ${getNativeAPI(selectedOpt.value)}(${selectedOpt.text})`;
backendName += ` (Preference: ${selectedOpt.text})`;
}
logger.log(`${key.padStart(12)}: ${backendName}`);
} else {
Expand Down
2 changes: 1 addition & 1 deletion workload/resources/utils.js
Expand Up @@ -146,7 +146,7 @@ async function loadModelAndLabels(model, label=null) {
arrayBuffer = await this.loadUrl(url + model, true);
}
bytes = new Uint8Array(arrayBuffer);
if (label.toLowerCase().startsWith("https://") || label.toLowerCase().startsWith("http://")) {
if (label != null && (label.toLowerCase().startsWith("https://") || label.toLowerCase().startsWith("http://"))) {
text = await this.loadUrl(label);
} else {
text = label ? await this.loadUrl(url + label) : null;
Expand Down
17 changes: 10 additions & 7 deletions workload/resources/utils_ea.js
Expand Up @@ -4,7 +4,7 @@ class EABenchmark extends Benchmark {
super(...arguments);
this.faceDetector = null;
this.modelName = modelName;
this.modelInfoDict = getModelInfoDict(emotionAnalysisModels, 'Simple CNN 7(TFlite)');
this.modelInfoDict = getModelInfoDict(emotionAnalysisModels, 'Simple CNN 7 (TFlite)');
this.model = null;
this.inputTensor = null;
this.inputSize = null;
Expand All @@ -18,18 +18,21 @@ class EABenchmark extends Benchmark {
let model = faceDetectionModels.filter(f => f.modelName == this.modelName);
inputCanvas.setAttribute("width", model[0].inputSize[1]);
inputCanvas.setAttribute("height", model[0].inputSize[0]);
model[0].modelFile = '../examples' + model[0].modelFile.slice(2);
let modelFile = model[0].modelFile;
if (!modelFile.toLowerCase().startsWith("https://") && !modelFile.toLowerCase().startsWith("http://")) {
model[0].modelFile = '../examples/util/' + model[0].modelFile;
}
this.faceDetector = new FaceDetecor(inputCanvas);
await this.faceDetector.loadModel(model[0]);
await this.faceDetector.init(this.backend.replace('WebNN', 'WebML'), preferSelect.value);
model[0].modelFile = '..' + model[0].modelFile.slice(11);
model[0].modelFile = modelFile;
}

async getFaceDetectResult() {
let detectResult = await this.faceDetector.getFaceBoxes(imageElement);
return detectResult;
}

async setInputOutput(box) {
let inputCanvas = document.createElement('canvas');
let width = this.modelInfoDict.inputSize[1];
Expand All @@ -45,8 +48,8 @@ class EABenchmark extends Benchmark {
inputCanvas.setAttribute("width", width);
inputCanvas.setAttribute("height", height);
let canvasContext = inputCanvas.getContext('2d');
canvasContext.drawImage(imageElement, box[0], box[2],
box[1] - box[0], box[3] - box[2], 0, 0,
canvasContext.drawImage(imageElement, box[0], box[2],
box[1] - box[0], box[3] - box[2], 0, 0,
width, height);
let pixels = canvasContext.getImageData(0, 0, width, height).data;
if (norm) {
Expand Down Expand Up @@ -143,7 +146,7 @@ class EABenchmark extends Benchmark {
// drawImage
let ctx = canvas.getContext('2d');
ctx.drawImage(image, 0, 0, canvas.width, canvas.height);

// drawFaceBox
face_boxes.forEach((box, i) => {
let xmin = box[0] / image.height * canvas.height;
Expand Down
11 changes: 7 additions & 4 deletions workload/resources/utils_fld.js
Expand Up @@ -17,11 +17,14 @@ class FLDBenchmark extends Benchmark {
let model = faceDetectionModels.filter(f => f.modelName == this.modelName);
inputCanvas.setAttribute("width", model[0].inputSize[1]);
inputCanvas.setAttribute("height", model[0].inputSize[0]);
model[0].modelFile = '../examples' + model[0].modelFile.slice(2);
let modelFile = model[0].modelFile;
if (!modelFile.toLowerCase().startsWith("https://") && !modelFile.toLowerCase().startsWith("http://")) {
model[0].modelFile = '../examples/util/' + model[0].modelFile;
}
this.faceDetector = new FaceDetecor(inputCanvas);
await this.faceDetector.loadModel(model[0]);
await this.faceDetector.init(this.backend.replace('WebNN', 'WebML'), preferSelect.value);
model[0].modelFile = '..' + model[0].modelFile.slice(11);
model[0].modelFile = modelFile;
}

async getFaceDetectResult() {
Expand All @@ -46,8 +49,8 @@ class FLDBenchmark extends Benchmark {
inputCanvas.setAttribute("width", width);
inputCanvas.setAttribute("height", height);
let canvasContext = inputCanvas.getContext('2d');
canvasContext.drawImage(imageElement, box[0], box[2],
box[1]-box[0], box[3]-box[2], 0, 0,
canvasContext.drawImage(imageElement, box[0], box[2],
box[1]-box[0], box[3]-box[2], 0, 0,
inputCanvas.width,
inputCanvas.height);
let pixels = canvasContext.getImageData(0, 0, width, height).data;
Expand Down

0 comments on commit 84355a7

Please sign in to comment.