Skip to content

Commit

Permalink
chore(liveness): try increase video time chunks to 1000ms (#760)
Browse files Browse the repository at this point in the history
* chore(liveness): try increase video time chunks to 1000ms

* chore(liveness): reorder events to prevent pauseafter countdown

* fix unit tests
  • Loading branch information
thaddmt authored Mar 3, 2023
1 parent 9b9378e commit 3dfd13b
Show file tree
Hide file tree
Showing 3 changed files with 75 additions and 65 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ const ENDPOINT =
process.env.NEXT_PUBLIC_STREAMING_API_URL ||
'wss://streaming-rekognition.us-east-1.amazonaws.com:443';
const REGION = process.env.NEXT_PUBLIC_BACKEND_API_REGION || 'us-east-1';
export const TIME_SLICE = 200;
export const TIME_SLICE = 1000;

export interface Credentials {
accessKeyId: string;
Expand Down
44 changes: 22 additions & 22 deletions packages/ui/src/machines/liveness/__tests__/index.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -153,8 +153,6 @@ describe('Liveness Machine', () => {
await flushPromises(); // detectFaceDistanceBeforeRecording
jest.advanceTimersToNextTimer(); // checkFaceDistanceBeforeRecording
service.send({ type: 'START_RECORDING' });
await flushPromises(); // startRecording
jest.advanceTimersToNextTimer(); // checkRecordingStarted
}

async function advanceMinFaceMatches() {
Expand All @@ -164,9 +162,9 @@ describe('Liveness Machine', () => {

async function transitionToUploading(service) {
await transitionToRecording(service);
await flushPromises(); // checkFaceDetected
jest.advanceTimersToNextTimer(); // ovalMatching
await flushPromises(); // checkMatch
await flushPromises(); // detectInitialFaceAndDrawOval
jest.advanceTimersToNextTimer(); // checkFaceDetected
jest.advanceTimersToNextTimer(); // checkRecordingStarted
await advanceMinFaceMatches(); // detectFaceAndMatchOval
await flushPromises(); // flashFreshnessColors
}
Expand Down Expand Up @@ -421,8 +419,6 @@ describe('Liveness Machine', () => {
await flushPromises(); // detectFaceDistanceBeforeRecording
jest.advanceTimersToNextTimer(); // checkFaceDistanceBeforeRecording
service.send({ type: 'START_RECORDING' });
await flushPromises(); // startRecording
jest.advanceTimersToNextTimer(); // checkRecordingStarted

expect(service.state.value).toEqual({ recording: 'ovalDrawing' });
});
Expand All @@ -442,9 +438,6 @@ describe('Liveness Machine', () => {
expect(service.state.context.videoAssociatedParams.videoMediaStream).toBe(
mockVideoMediaStream
);
expect(
service.state.context.videoAssociatedParams.recordingStartTimestampMs
).toBeDefined();
expect(
service.state.context.livenessStreamProvider.getResponseStream
).toHaveBeenCalledTimes(1);
Expand All @@ -459,12 +452,15 @@ describe('Liveness Machine', () => {

it('should reach ovalMatching state after detectInitialFaceAndDrawOval success and respect ovalMatchingTimeout', async () => {
await transitionToRecording(service);

await flushPromises();

expect(service.state.value).toEqual({ recording: 'checkFaceDetected' });

jest.advanceTimersToNextTimer();
expect(service.state.value).toEqual({ recording: 'ovalMatching' });
jest.advanceTimersToNextTimer(); // checkFaceDetected
jest.advanceTimersToNextTimer(); // checkRecordingStarted
expect(service.state.value).toEqual({
recording: 'ovalMatching',
});
expect(
service.state.context.faceMatchAssociatedParams.faceMatchState
).toBe(FaceMatchState.FACE_IDENTIFIED);
Expand Down Expand Up @@ -547,10 +543,12 @@ describe('Liveness Machine', () => {
expect(mockcomponentProps.onError).toHaveBeenCalledWith(error);
});

it('should reach checkFaceDetected state and send client sessionInformation', async () => {
it('should reach ovalMatching state and send client sessionInformation', async () => {
await transitionToRecording(service);
await flushPromises();
expect(service.state.value).toEqual({ recording: 'checkFaceDetected' });
jest.advanceTimersToNextTimer(); // checkFaceDetected
jest.advanceTimersToNextTimer(); // checkRecordingStarted
expect(service.state.value).toEqual({ recording: 'ovalMatching' });
expect(
expect(mockLivenessStreamProvider.sendClientInfo).toHaveBeenCalledTimes(
1
Expand All @@ -574,8 +572,9 @@ describe('Liveness Machine', () => {

it('should reach flashFreshnessColors state after detectFaceAndMatchOval success', async () => {
await transitionToRecording(service);
await flushPromises(); // checkFaceDetected
jest.advanceTimersToNextTimer(); // ovalMatching
await flushPromises(); // detectInitialFaceAndDrawOval
jest.advanceTimersToNextTimer(); // checkFaceDetected
jest.advanceTimersToNextTimer(); // checkRecordingStarted

await advanceMinFaceMatches(); // detectFaceAndMatchOval

Expand All @@ -592,9 +591,9 @@ describe('Liveness Machine', () => {

it('should reach waitForDisconnect state after flashFreshnessColors', async () => {
await transitionToRecording(service);
await flushPromises(); // checkFaceDetected
jest.advanceTimersToNextTimer(); // ovalMatching
await flushPromises(); // checkMatch
await flushPromises(); // detectInitialFaceAndDrawOval
jest.advanceTimersToNextTimer(); // checkFaceDetected
jest.advanceTimersToNextTimer(); // checkRecordingStarted
await advanceMinFaceMatches(); // detectFaceAndMatchOval
await flushPromises(); // flashFreshnessColors

Expand All @@ -621,8 +620,9 @@ describe('Liveness Machine', () => {
);

await transitionToRecording(service);
await flushPromises(); // checkFaceDetected
jest.advanceTimersToNextTimer(); // ovalMatching
await flushPromises(); // detectInitialFaceAndDrawOval
jest.advanceTimersToNextTimer(); // checkFaceDetected
jest.advanceTimersToNextTimer(); // checkRecordingStarted

await flushPromises();
expect(service.state.value).toEqual({ recording: 'checkMatch' });
Expand Down
94 changes: 52 additions & 42 deletions packages/ui/src/machines/liveness/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -238,18 +238,8 @@ export const livenessMachine = createMachine<LivenessContext, LivenessEvent>(
},
recording: {
entry: ['clearErrorState', 'startRecording'],
initial: 'checkRecordingStarted',
initial: 'ovalDrawing',
states: {
checkRecordingStarted: {
after: {
200: {
target: 'ovalDrawing',
cond: 'hasRecordingStarted',
actions: ['updateRecordingStartTimestampMs'],
},
201: { target: 'checkRecordingStarted' },
},
},
ovalDrawing: {
entry: ['sendTimeoutAfterOvalDrawingDelay'],
invoke: {
Expand All @@ -270,12 +260,22 @@ export const livenessMachine = createMachine<LivenessContext, LivenessEvent>(
checkFaceDetected: {
after: {
0: {
target: 'ovalMatching',
target: 'checkRecordingStarted',
cond: 'hasSingleFace',
},
100: { target: 'ovalDrawing' },
},
},
checkRecordingStarted: {
after: {
0: {
target: 'ovalMatching',
cond: 'hasRecordingStarted',
actions: ['updateRecordingStartTimestampMs'],
},
100: { target: 'checkRecordingStarted' },
},
},
ovalMatching: {
entry: ['cancelOvalDrawingTimeout'],
invoke: {
Expand Down Expand Up @@ -477,17 +477,56 @@ export const livenessMachine = createMachine<LivenessContext, LivenessEvent>(
}),
updateRecordingStartTimestampMs: assign({
videoAssociatedParams: (context) => {
const {
challengeId,
videoAssociatedParams: { videoMediaStream },
ovalAssociatedParams: { initialFace },
livenessStreamProvider,
} = context;
const {
recordingStartApiTimestamp,
recorderStartTimestamp,
firstChunkTimestamp,
} = context.livenessStreamProvider.videoRecorder;
} = livenessStreamProvider.videoRecorder;
const calculatedRecordingStart = firstChunkTimestamp - TIME_SLICE;
const mediaRecorderOnStartCalled = recorderStartTimestamp;
const timestamp = Math.max(
recordingStartApiTimestamp,
Math.min(calculatedRecordingStart, mediaRecorderOnStartCalled)
);

// Send client info for initial face position
const { width, height } = videoMediaStream
.getTracks()[0]
.getSettings();
const flippedInitialFaceLeft =
width - initialFace.left - initialFace.width;

context.livenessStreamProvider.sendClientInfo({
DeviceInformation: {
ClientSDKVersion: '1.0.0',
VideoHeight: height,
VideoWidth: width,
},
Challenge: {
FaceMovementAndLightChallenge: {
ChallengeId: challengeId,
VideoStartTimestamp: timestamp,
InitialFace: {
InitialFaceDetectedTimestamp: initialFace.timestampMs,
BoundingBox: getBoundingBox({
deviceHeight: height,
deviceWidth: width,
height: initialFace.height,
width: initialFace.width,
top: initialFace.top,
left: flippedInitialFaceLeft,
}),
},
},
},
});

return {
...context.videoAssociatedParams,
recordingStartTimestampMs: timestamp,
Expand Down Expand Up @@ -1002,7 +1041,6 @@ export const livenessMachine = createMachine<LivenessContext, LivenessEvent>(
const scaleFactor = videoScaledWidth / videoEl.videoWidth;

// generate oval details from initialFace and video dimensions
const { width, height } = videoMediaStream.getTracks()[0].getSettings();
const ovalDetails = getOvalDetailsFromSessionInformation({
sessionInformation: serverSessionInformation,
videoWidth: videoEl.width,
Expand All @@ -1022,34 +1060,6 @@ export const livenessMachine = createMachine<LivenessContext, LivenessEvent>(
drawLivenessOvalInCanvas(canvasEl, ovalDetails, scaleFactor);
ovalDrawnTimestamp = Date.now();

// Send client info for initial face position
const flippedInitialFaceLeft =
width - initialFace.left - initialFace.width;
context.livenessStreamProvider.sendClientInfo({
DeviceInformation: {
ClientSDKVersion: '1.0.0',
VideoHeight: height,
VideoWidth: width,
},
Challenge: {
FaceMovementAndLightChallenge: {
ChallengeId: challengeId,
VideoStartTimestamp: recordingStartTimestampMs,
InitialFace: {
InitialFaceDetectedTimestamp: initialFace.timestampMs,
BoundingBox: getBoundingBox({
deviceHeight: height,
deviceWidth: width,
height: initialFace.height,
width: initialFace.width,
top: initialFace.top,
left: flippedInitialFaceLeft,
}),
},
},
},
});

return {
faceMatchState,
ovalDetails,
Expand Down

0 comments on commit 3dfd13b

Please sign in to comment.