Skip to content

Commit 787c4ff

Browse files
author
Ayaan Farooqui
committed
prototype ready
1 parent 90d8c1e commit 787c4ff

File tree

3 files changed

+110
-321
lines changed

3 files changed

+110
-321
lines changed

src/App.js

Lines changed: 2 additions & 97 deletions
Original file line numberDiff line numberDiff line change
@@ -1,105 +1,10 @@
1-
import { useCallback, useEffect, useMemo, useRef, useState } from "react";
2-
import * as mpHolistic from "@mediapipe/holistic";
3-
import * as tf from '@tensorflow/tfjs';
4-
import { CircularProgress } from "@mui/material";
5-
import { Camera } from '@mediapipe/camera_utils';
1+
import Detector from "./detector/Detector";
62

73
function App() {
84

9-
const [showLoadingSpinner, setShowLoadingSpinner] = useState(false)
10-
const canvasElementRef = useRef();
11-
const videoElementRef = useRef();
12-
const controlsRef = useRef();
13-
const [canvasCtx, setCanvasCtx] = useState(null);
14-
const [model, setModel] = useState();
15-
16-
const onResults = useCallback((results) => {
17-
try {
18-
let pose = tf.zeros([33, 4]), face = tf.zeros([468, 3]), lh = tf.zeros([21, 3]), rh = tf.zeros([21, 3]);
19-
if (results.poseLandmarks) {
20-
for (let res of results.poseLandmarks)
21-
pose = tf.reshape(tf.tensor2d([[res.x, res.y, res.z, res.visibility]]), [-1]);
22-
}
23-
if (results.faceLandmarks) {
24-
for (let res of results.faceLandmarks)
25-
face = tf.reshape(tf.tensor2d([[res.x, res.y, res.z, res.visibility]]), [-1]);
26-
}
27-
if (results.leftHandLandmarks) {
28-
for (let res of results.leftHandLandmarks)
29-
lh = tf.reshape(tf.tensor2d([[res.x, res.y, res.z, res.visibility]]).flatten(), [-1]);
30-
}
31-
if (results.rightHandLandmarks) {
32-
for (let res of results.rightHandLandmarks)
33-
rh = tf.reshape(tf.tensor2d([[res.x, res.y, res.z, res.visibility]]).flatten(), [-1]);
34-
}
35-
console.log("pose1", pose.shape, face.shape, lh.shape, rh.shape);
36-
}
37-
catch (err) {
38-
console.log(err)
39-
}
40-
}, [])
41-
42-
useEffect(() => {
43-
if (typeof canvasElementRef.current !== typeof undefined && typeof canvasElementRef.current !== typeof undefined && typeof controlsRef.current !== typeof undefined) {
44-
console.log("activating")
45-
// Set the canvas context
46-
setCanvasCtx(canvasElementRef.current.getContext('2d'))
47-
// ------------------------------
48-
}
49-
}, [canvasElementRef, videoElementRef, controlsRef])
50-
51-
useEffect(() => {
52-
if (canvasCtx !== null) {
53-
tf.loadLayersModel('jsonmodel/model.json')
54-
.then(model => {
55-
setModel(model)
56-
})
57-
.catch(err => {
58-
console.log(err)
59-
})
60-
const holistic = new mpHolistic.Holistic({
61-
locateFile: (file) => {
62-
return `https://cdn.jsdelivr.net/npm/@mediapipe/holistic@` +
63-
`${mpHolistic.VERSION}/${file}`;
64-
}
65-
})
66-
holistic.setOptions({ minDetectionConfidence: 0.5, minTrackingConfidence: 0.5 })
67-
// set function to run on result of holistic model
68-
holistic.onResults(onResults);
69-
// ------------------------------------------------
70-
// Start the camera using mediapipe camera utility
71-
if (typeof videoElementRef.current !== "undefined" && videoElementRef.current !== null) {
72-
const camera = new Camera(videoElementRef.current, {
73-
onFrame: async () => {
74-
await holistic.send({ image: videoElementRef.current });
75-
},
76-
width: 480,
77-
height: 480
78-
});
79-
camera.start();
80-
}
81-
// --------------------------------------------------
82-
}
83-
}, [canvasCtx, onResults])
84-
855
return (
866
<div className="App">
87-
<div className="container">
88-
<p >Webcam Input</p>
89-
<video ref={videoElementRef} ></video>
90-
91-
<canvas ref={canvasElementRef} width="480px" height="480px">
92-
93-
</canvas>
94-
<div className="loading">
95-
{
96-
showLoadingSpinner && <CircularProgress />
97-
}
98-
</div>
99-
<div ref={controlsRef} >
100-
</div>
101-
</div>
102-
7+
<Detector />
1038
</div>
1049
)
10510
}

src/detector/Detector.js

Lines changed: 61 additions & 199 deletions
Original file line numberDiff line numberDiff line change
@@ -1,215 +1,77 @@
1-
import DeviceDetector from "device-detector-js";
2-
import { useCallback, useEffect, useMemo, useRef, useState } from "react";
1+
import { useEffect, useRef, useState } from "react";
32
import * as mpHolistic from "@mediapipe/holistic";
4-
import { connect, removeLandmarks } from "./detector/helperFunctions";
5-
import * as controls from "@mediapipe/control_utils";
6-
import * as drawingUtils from "@mediapipe/drawing_utils";
3+
import * as tf from '@tensorflow/tfjs';
74
import { CircularProgress } from "@mui/material";
5+
import { Camera } from '@mediapipe/camera_utils';
6+
import { onResults } from "./helperFunctions";
87

9-
export default function Detector() {
10-
const [showLoadingSpinner, setShowLoadingSpinner] = useState(false)
11-
const canvasElementRef = useRef();
12-
const videoElementRef = useRef();
13-
const [canvasCtx, setCanvasCtx] = useState(null);
14-
const fpsControl = useMemo(() => new controls.FPS(), []);
15-
const holistic = useMemo(() => new mpHolistic.Holistic(), []);
16-
17-
let activeEffect = 'mask';
18-
19-
const onResults = useCallback((results) => {
20-
console.log("hello1")
21-
if (canvasCtx !== null) {
22-
// Hide the spinner.
23-
setShowLoadingSpinner(true)
24-
25-
// Remove landmarks we don't want to draw.
26-
removeLandmarks(results);
27-
28-
// Update the frame rate.
29-
fpsControl.tick();
30-
31-
// Draw the overlays.
32-
canvasCtx.save();
33-
canvasCtx.clearRect(0, 0, canvasElementRef.width, canvasElementRef.height);
34-
35-
if (results.segmentationMask) {
36-
canvasCtx.drawImage(
37-
results.segmentationMask, 0, 0, canvasElementRef.width,
38-
canvasElementRef.height);
8+
function Detector() {
399

40-
// Only overwrite existing pixels.
41-
if (activeEffect === 'mask' || activeEffect === 'both') {
42-
canvasCtx.globalCompositeOperation = 'source-in';
43-
// This can be a color or a texture or whatever...
44-
canvasCtx.fillStyle = '#00FF007F';
45-
canvasCtx.fillRect(0, 0, canvasElementRef.width, canvasElementRef.height);
46-
} else {
47-
canvasCtx.globalCompositeOperation = 'source-out';
48-
canvasCtx.fillStyle = '#0000FF7F';
49-
canvasCtx.fillRect(0, 0, canvasElementRef.width, canvasElementRef.height);
50-
}
51-
52-
// Only overwrite missing pixels.
53-
canvasCtx.globalCompositeOperation = 'destination-atop';
54-
canvasCtx.drawImage(
55-
results.image, 0, 0, canvasElementRef.width, canvasElementRef.height);
56-
57-
canvasCtx.globalCompositeOperation = 'source-over';
58-
} else {
59-
canvasCtx.drawImage(
60-
results.image, 0, 0, canvasElementRef.width, canvasElementRef.height);
61-
}
62-
63-
// Connect elbows to hands. Do this first so that the other graphics will draw
64-
// on top of these marks.
65-
canvasCtx.lineWidth = 5;
66-
if (results.poseLandmarks) {
67-
if (results.rightHandLandmarks) {
68-
canvasCtx.strokeStyle = 'white';
69-
connect(canvasCtx, [[
70-
results.poseLandmarks[mpHolistic.POSE_LANDMARKS.RIGHT_ELBOW],
71-
results.rightHandLandmarks[0]
72-
]]);
73-
}
74-
if (results.leftHandLandmarks) {
75-
canvasCtx.strokeStyle = 'white';
76-
connect(canvasCtx, [[
77-
results.poseLandmarks[mpHolistic.POSE_LANDMARKS.LEFT_ELBOW],
78-
results.leftHandLandmarks[0]
79-
]]);
80-
}
81-
}
82-
83-
// Pose...
84-
drawingUtils.drawConnectors(
85-
canvasCtx, results.poseLandmarks, mpHolistic.POSE_CONNECTIONS,
86-
{ color: 'white' });
87-
drawingUtils.drawLandmarks(
88-
canvasCtx,
89-
Object.values(mpHolistic.POSE_LANDMARKS_LEFT)
90-
.map(index => results.poseLandmarks[index]),
91-
{ visibilityMin: 0.65, color: 'white', fillColor: 'rgb(255,138,0)' });
92-
drawingUtils.drawLandmarks(
93-
canvasCtx,
94-
Object.values(mpHolistic.POSE_LANDMARKS_RIGHT)
95-
.map(index => results.poseLandmarks[index]),
96-
{ visibilityMin: 0.65, color: 'white', fillColor: 'rgb(0,217,231)' });
97-
98-
// Hands...
99-
drawingUtils.drawConnectors(
100-
canvasCtx, results.rightHandLandmarks, mpHolistic.HAND_CONNECTIONS,
101-
{ color: 'white' });
102-
drawingUtils.drawLandmarks(canvasCtx, results.rightHandLandmarks, {
103-
color: 'white',
104-
fillColor: 'rgb(0,217,231)',
105-
lineWidth: 2,
106-
radius: (data) => {
107-
return drawingUtils.lerp(data.from.z, -0.15, .1, 10, 1);
108-
}
109-
});
110-
drawingUtils.drawConnectors(
111-
canvasCtx, results.leftHandLandmarks, mpHolistic.HAND_CONNECTIONS,
112-
{ color: 'white' });
113-
drawingUtils.drawLandmarks(canvasCtx, results.leftHandLandmarks, {
114-
color: 'white',
115-
fillColor: 'rgb(255,138,0)',
116-
lineWidth: 2,
117-
radius: (data) => {
118-
return drawingUtils.lerp(data.from.z, -0.15, .1, 10, 1);
119-
}
120-
});
10+
const [showLoadingSpinner, setShowLoadingSpinner] = useState(true)
11+
const videoElementRef = useRef();
12+
const [holisticModel, setHolisticModel] = useState(null);
12113

122-
// Face...
123-
drawingUtils.drawConnectors(
124-
canvasCtx, results.faceLandmarks, mpHolistic.FACEMESH_TESSELATION,
125-
{ color: '#C0C0C070', lineWidth: 1 });
126-
drawingUtils.drawConnectors(
127-
canvasCtx, results.faceLandmarks, mpHolistic.FACEMESH_RIGHT_EYE,
128-
{ color: 'rgb(0,217,231)' });
129-
drawingUtils.drawConnectors(
130-
canvasCtx, results.faceLandmarks, mpHolistic.FACEMESH_RIGHT_EYEBROW,
131-
{ color: 'rgb(0,217,231)' });
132-
drawingUtils.drawConnectors(
133-
canvasCtx, results.faceLandmarks, mpHolistic.FACEMESH_LEFT_EYE,
134-
{ color: 'rgb(255,138,0)' });
135-
drawingUtils.drawConnectors(
136-
canvasCtx, results.faceLandmarks, mpHolistic.FACEMESH_LEFT_EYEBROW,
137-
{ color: 'rgb(255,138,0)' });
138-
drawingUtils.drawConnectors(
139-
canvasCtx, results.faceLandmarks, mpHolistic.FACEMESH_FACE_OVAL,
140-
{ color: '#E0E0E0', lineWidth: 5 });
141-
drawingUtils.drawConnectors(
142-
canvasCtx, results.faceLandmarks, mpHolistic.FACEMESH_LIPS,
143-
{ color: '#E0E0E0', lineWidth: 5 });
14414

145-
canvasCtx.restore();
146-
}
147-
}, [activeEffect, canvasCtx, fpsControl])
14815

14916
useEffect(() => {
150-
if (typeof canvasElementRef.current !== typeof undefined) {
151-
setCanvasCtx(canvasElementRef.current.getContext('2d'))
152-
console.log("hello0")
153-
holistic.onResults(onResults);
17+
// Start the camera using mediapipe camera utility
18+
if (typeof videoElementRef.current !== "undefined" && videoElementRef.current !== null && holisticModel !== null) {
19+
const camera = new Camera(videoElementRef.current, {
20+
onFrame: async () => {
21+
await holisticModel.send({ image: videoElementRef.current });
22+
},
23+
width: 480,
24+
height: 480
25+
});
26+
camera.start();
15427
}
155-
}, [canvasElementRef, holistic, onResults])
28+
// --------------------------------------------------
29+
}, [videoElementRef, holisticModel])
15630

15731
useEffect(() => {
158-
// call function to test the browser compatibility
159-
testSupport([
160-
{ client: 'Chrome' },
161-
]);
32+
// load our custom model and set it
33+
tf.loadLayersModel('jsonmodel/model.json')
34+
.then(fetched_model => {
35+
console.log("fetched custom model")
36+
// initialize the holistic model
37+
const holistic = new mpHolistic.Holistic({
38+
locateFile: (file) => {
39+
return `https://cdn.jsdelivr.net/npm/@mediapipe/holistic@` +
40+
`${mpHolistic.VERSION}/${file}`;
41+
}
42+
})
43+
holistic.setOptions({ minDetectionConfidence: 0.5, minTrackingConfidence: 0.5 })
44+
holistic.onResults((results) => onResults(results, fetched_model));
45+
holistic.initialize()
46+
.then(res => {
47+
console.log("Initialized Mp holistic model")
48+
setShowLoadingSpinner(false)
49+
setHolisticModel(holistic)
50+
})
51+
// -----------------------------
52+
})
53+
.catch(err => {
54+
console.log(err)
55+
})
56+
// ------------------------------------------
16257
}, [])
16358

164-
const testSupport = (supportedDevices) => {
165-
/*
166-
* this function tests the browser support for running the application
167-
* */
168-
const deviceDetector = new DeviceDetector();
169-
const detectedDevice = deviceDetector.parse(navigator.userAgent);
170-
171-
let isSupported = false;
172-
for (const device of supportedDevices) {
173-
if (device.client !== undefined) {
174-
const re = new RegExp(`^${device.client}$`);
175-
if (!re.test(detectedDevice.client.name)) {
176-
continue;
177-
}
178-
}
179-
if (device.os !== undefined) {
180-
const re = new RegExp(`^${device.os}$`);
181-
if (!re.test(detectedDevice.os.name)) {
182-
continue;
183-
}
184-
}
185-
isSupported = true;
186-
break;
187-
}
188-
if (!isSupported) {
189-
alert(`This demo, running on ${detectedDevice.client.name}/${detectedDevice.os.name}, ` +
190-
`is not well supported at this time, continue at your own risk.`);
191-
}
192-
else {
193-
alert(`This demo, running on ${detectedDevice.client.name}/${detectedDevice.os.name}, ` +
194-
`is well supported, please enjoy.`);
195-
}
196-
}
197-
198-
return (
199-
<div className="App">
200-
<div className="container">
201-
<video className="input_video" ref={videoElementRef}></video>
202-
<div className="canvas-container">
203-
<canvas className="output_canvas" ref={canvasElementRef} width="1280px" height="720px">
204-
</canvas>
205-
</div>
206-
<div className="loading">
207-
{
208-
showLoadingSpinner && <CircularProgress />
209-
}
59+
if (showLoadingSpinner)
60+
return (
61+
<div className="loading">
62+
<CircularProgress />
63+
</div>
64+
)
65+
else
66+
return (
67+
<div className="App">
68+
<div className="container">
69+
<p >Webcam Input</p>
70+
<video ref={videoElementRef} ></video>
21071
</div>
72+
21173
</div>
74+
)
75+
}
21276

213-
</div>
214-
);
215-
};
77+
export default Detector;

0 commit comments

Comments
 (0)