Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
40 changes: 40 additions & 0 deletions face-detection/demos/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
# Demos

Try our demos and get inspired with what you can do with face-detection models!

## Table of Contents
1. [Live Camera Demo](#live-camera-demo)

2. [Upload a Video Demo](#upload-a-video-demo)

3. [How to Run a Demo](#how-to-run-a-demo)

-------------------------------------------------------------------------------

## Live Camera Demo
This demo uses your camera to get live stream and tracks your face in real-time.
You can try out different runtimes to see the difference. It
works on laptops, iPhones and android phones.

[MediaPipeFaceDetector model entry](https://storage.googleapis.com/tfjs-models/demos/face-detection/index.html?model=mediapipe_face_detector)

## Upload a Video Demo
This demo allows you to upload a video (in .mp4 format) to run with the model.
Once the video is processed, it automatically downloads the video with face detection.

[MediaPipeFaceDetector model entry](https://storage.googleapis.com/tfjs-models/demos/face-detection-upload-video/index.html?model=mediapipe_face_detector)

## How to Run a Demo
If you want to run any of the demos locally, follow these steps:

1. Go to the demo folder, e.g. `cd live_video`

2. Remove cache etc. `rm -rf .cache dist node_modules`

3. Build dependency. `yarn build-dep`

4. Install dependencies. `yarn`

5. Run the demo. `yarn watch`

6. The demo runs at `localhost:1234`. (Remember to provide URL model parameter e. g. `localhost:1234/?model=mediapipe_face_detector`)
16 changes: 16 additions & 0 deletions face-detection/demos/live_video/.babelrc
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
{
"presets": [
[
"env",
{
"esmodules": false,
"targets": {
"browsers": [
"> 3%"
]
}
}
]
],
"plugins": ["@babel/plugin-transform-runtime"]
}
63 changes: 63 additions & 0 deletions face-detection/demos/live_video/index.html
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
<!-- Copyright 2022 Google LLC. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================-->
<!DOCTYPE html>
<html>

<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width,initial-scale=1,maximum-scale=1.0, user-scalable=no">
<style>
body {
margin: 0;
}

#stats {
position: relative;
width: 100%;
height: 80px;
}

#main {
position: relative;
margin: 0;
}

#canvas-wrapper {
position: relative;
}
</style>
</head>

<body>
<div id="stats"></div>
<div id="main">
<div class="container">
<div class="canvas-wrapper">
<canvas id="output"></canvas>
<video id="video" playsinline style="
-webkit-transform: scaleX(-1);
transform: scaleX(-1);
visibility: hidden;
width: auto;
height: auto;
">
</video>
</div>
</div>
</div>
</div>
</body>
<script src="https://cdnjs.cloudflare.com/ajax/libs/dat-gui/0.7.6/dat.gui.min.js"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/stats.js/r16/Stats.min.js"></script>
<script src="src/index.js"></script>

</html>
65 changes: 65 additions & 0 deletions face-detection/demos/live_video/package.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
{
"name": "facedetection_demo",
"version": "0.0.1",
"description": "Demo for face detection api",
"main": "index.js",
"license": "Apache-2.0",
"private": true,
"engines": {
"node": ">=8.9.0"
},
"dependencies": {
"@mediapipe/face_detection": "~0.4.0",
"@tensorflow-models/face-detection": "file:../../dist",
"@tensorflow/tfjs-backend-wasm": "^3.12.0",
"@tensorflow/tfjs-backend-webgl": "^3.12.0",
"@tensorflow/tfjs-converter": "^3.12.0",
"@tensorflow/tfjs-core": "^3.12.0",
"scatter-gl": "0.0.8"
},
"scripts": {
"watch": "cross-env NODE_ENV=development parcel index.html --no-hmr --open",
"build": "cross-env NODE_ENV=production parcel build index.html --public-url ./",
"lint": "eslint .",
"build-dep": "cd ../../ && yarn && yarn build",
"link-core": "yalc link @tensorflow/tfjs-core",
"link-webgl": "yalc link @tensorflow/tfjs-backend-webgl"
},
"browser": {
"crypto": false
},
"devDependencies": {
"@babel/core": "^7.7.5",
"@babel/plugin-transform-runtime": "^7.7.6",
"@babel/preset-env": "^7.7.6",
"babel-plugin-external-helpers": "^6.22.0",
"babel-preset-env": "^1.7.0",
"clang-format": "~1.2.2",
"cross-env": "^5.2.0",
"eslint": "^4.19.1",
"eslint-config-google": "^0.9.1",
"parcel-bundler": "1.12.5",
"parcel-plugin-static-files-copy": "^2.5.1",
"yalc": "~1.0.0-pre.50"
},
"resolutions": {
"is-svg": "4.3.1"
},
"eslintConfig": {
"extends": "google",
"rules": {
"require-jsdoc": 0,
"valid-jsdoc": 0
},
"env": {
"es6": true
},
"parserOptions": {
"ecmaVersion": 8,
"sourceType": "module"
}
},
"eslintIgnore": [
"dist/"
]
}
93 changes: 93 additions & 0 deletions face-detection/demos/live_video/src/camera.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,93 @@
/**
* @license
* Copyright 2022 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================================
*/

import {VIDEO_SIZE} from './shared/params';
import {drawResults, isMobile} from './shared/util';

export class Camera {
constructor() {
this.video = document.getElementById('video');
this.canvas = document.getElementById('output');
this.ctx = this.canvas.getContext('2d');
}

/**
* Initiate a Camera instance and wait for the camera stream to be ready.
* @param cameraParam From app `STATE.camera`.
*/
static async setupCamera(cameraParam) {
if (!navigator.mediaDevices || !navigator.mediaDevices.getUserMedia) {
throw new Error(
'Browser API navigator.mediaDevices.getUserMedia not available');
}

const {targetFPS, sizeOption} = cameraParam;
const $size = VIDEO_SIZE[sizeOption];
const videoConfig = {
'audio': false,
'video': {
facingMode: 'user',
// Only setting the video to a specified size for large screen, on
// mobile devices accept the default size.
width: isMobile() ? VIDEO_SIZE['360 X 270'].width : $size.width,
height: isMobile() ? VIDEO_SIZE['360 X 270'].height : $size.height,
frameRate: {
ideal: targetFPS,
},
},
};

const stream = await navigator.mediaDevices.getUserMedia(videoConfig);

const camera = new Camera();
camera.video.srcObject = stream;

await new Promise((resolve) => {
camera.video.onloadedmetadata = () => {
resolve(video);
};
});

camera.video.play();

const videoWidth = camera.video.videoWidth;
const videoHeight = camera.video.videoHeight;
// Must set below two lines, otherwise video element doesn't show.
camera.video.width = videoWidth;
camera.video.height = videoHeight;

camera.canvas.width = videoWidth;
camera.canvas.height = videoHeight;
const canvasContainer = document.querySelector('.canvas-wrapper');
canvasContainer.style = `width: ${videoWidth}px; height: ${videoHeight}px`;

// Because the image from camera is mirrored, need to flip horizontally.
camera.ctx.translate(camera.video.videoWidth, 0);
camera.ctx.scale(-1, 1);

return camera;
}

drawCtx() {
this.ctx.drawImage(
this.video, 0, 0, this.video.videoWidth, this.video.videoHeight);
}

drawResults(faces, boundingBox, keypoints) {
drawResults(this.ctx, faces, boundingBox, keypoints);
}
}
Loading