Permalink
Cannot retrieve contributors at this time
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
143 lines (92 sloc)
4.3 KB
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
<!DOCTYPE html> | |
<html lang="en"> | |
<head> | |
<meta charset="utf-8"> | |
<title>Beyond Reality Face SDK - BRFv5 - Face Tracking for Browser/JavaScript - Minimal Webcam Example</title> | |
<style> | |
html, body { width: 100%; height: 100%; background-color: #ffffff; margin: 0; padding: 0; overflow: hidden; } | |
</style> | |
</head> | |
<body> | |
<!-- | |
This is a minimal modules example. It's much shorter than minimal_no_modules.html, because we already | |
provide lots of functionality within the modules. | |
--> | |
<video id="_webcam" style="display: none;" playsinline></video> | |
<canvas id="_imageData"></canvas> | |
<script type="module"> | |
import { brfv5 } from './js/brfv5/brfv5__init.js' | |
import { loadBRFv5Model } from './js/brfv5/brfv5__init.js' | |
import { configureCameraInput } from './js/brfv5/brfv5__configure.js' | |
import { configureFaceTracking } from './js/brfv5/brfv5__configure.js' | |
import { configureNumFacesToTrack } from './js/brfv5/brfv5__configure.js' | |
import { startCamera } from './js/utils/utils__camera.js' | |
import { drawInputMirrored } from './js/utils/utils__canvas.js' | |
import { drawCircles } from './js/utils/utils__canvas.js' | |
import { drawRect, drawRects } from './js/utils/utils__canvas.js' | |
const _appId = 'brfv5.browser.minimal.modules' // (mandatory): 8 to 64 characters, a-z . 0-9 allowed | |
const _webcam = document.getElementById('_webcam') | |
const _imageData = document.getElementById('_imageData') | |
// Those variables will be retrieved from the stream and the library. | |
let _brfv5Manager = null | |
let _brfv5Config = null | |
let _width = 0 | |
let _height = 0 | |
// loadBRFv5Model and openCamera are being done simultaneously thanks to Promises. Both call | |
// configureTracking which only gets executed once both Promises were successful. Once configured | |
// trackFaces will do the tracking work and draw the results. | |
startCamera(_webcam, { width: 640, height: 480, frameRate: 30, facingMode: 'user' }).then(({ video }) => { | |
console.log('openCamera: done: ' + video.videoWidth + 'x' + video.videoHeight) | |
_width = video.videoWidth | |
_height = video.videoHeight | |
_imageData.width = _width | |
_imageData.height = _height | |
configureTracking() | |
}).catch((e) => { if(e) { console.error('Camera failed: ', e) } }) | |
loadBRFv5Model('68l', 8, './js/brfv5/models/', _appId, | |
(progress) => { console.log(progress) }).then(({ brfv5Manager, brfv5Config }) => { | |
console.log('loadBRFv5Model: done') | |
_brfv5Manager = brfv5Manager | |
_brfv5Config = brfv5Config | |
configureTracking() | |
}).catch((e) => { console.error('BRFv5 failed: ', e) }) | |
const configureTracking = () => { | |
if(_brfv5Config !== null && _width > 0) { | |
configureCameraInput(_brfv5Config, _width, _height) | |
configureNumFacesToTrack(_brfv5Config, 1) | |
configureFaceTracking(_brfv5Config, 3, true) | |
_brfv5Manager.configure(_brfv5Config) | |
trackFaces() | |
} | |
} | |
const trackFaces = () => { | |
if(!_brfv5Manager || !_brfv5Config || !_imageData) { return } | |
const ctx = _imageData.getContext('2d') | |
drawInputMirrored(ctx, _width, _height, _webcam) | |
_brfv5Manager.update(ctx.getImageData(0, 0, _width, _height)) | |
let doDrawFaceDetection = !_brfv5Config.enableFaceTracking | |
if(_brfv5Config.enableFaceTracking) { | |
const sizeFactor = Math.min(_width, _height) / 480.0 | |
const faces = _brfv5Manager.getFaces() | |
for(let i = 0; i < faces.length; i++) { | |
const face = faces[i] | |
if(face.state === brfv5.BRFv5State.FACE_TRACKING) { | |
drawRect(ctx, _brfv5Config.faceTrackingConfig.regionOfInterest, '#00a0ff', 2.0) | |
drawCircles(ctx, face.landmarks, '#00a0ff', 2.0 * sizeFactor) | |
drawRect(ctx, face.bounds, '#ffffff', 1.0) | |
} else { | |
doDrawFaceDetection = true | |
} | |
} | |
} | |
if(doDrawFaceDetection) { | |
// Only draw face detection results, if face detection was performed. | |
drawRect( ctx, _brfv5Config.faceDetectionConfig.regionOfInterest, '#ffffff', 2.0) | |
drawRects(ctx, _brfv5Manager.getDetectedRects(), '#00a0ff', 1.0) | |
drawRects(ctx, _brfv5Manager.getMergedRects(), '#ffffff', 3.0) | |
} | |
requestAnimationFrame(trackFaces) | |
} | |
</script> | |
</body> | |
</html> |