Skip to content

Commit

Permalink
Eye targeting is not working with near hand pointers
Browse files Browse the repository at this point in the history
microsoft#4431

In a previous change (microsoft#4270) a gaze provider state machine was added to bring the head gaze pointer behavior in-line with how the shell works. This had the side effect of also impacting eye-gaze state. This change brings the old eye-gaze behavior back by updating the state machine to be aware of the type of gaze being provided.

Also added a bunch of tests for this new thing, along with tests that show that transitions between the two (for example, when the eye tracking system gains tracking or loses tracking) works well.
  • Loading branch information
wiwei committed May 20, 2019
1 parent fe2c774 commit a4f70d7
Show file tree
Hide file tree
Showing 3 changed files with 171 additions and 18 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -878,7 +878,10 @@ private void ReconcilePointers()
}
if (gazePointer != null)
{
gazePointerStateMachine.UpdateState(NumNearPointersActive, NumFarPointersActive);
gazePointerStateMachine.UpdateState(
NumNearPointersActive,
NumFarPointersActive,
InputSystem.EyeGazeProvider.IsEyeGazeValid);

// The gaze cursor's visibility is controlled by IsInteractionEnabled
gazePointer.IsInteractionEnabled = gazePointerStateMachine.IsGazePointerActive;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,31 +4,76 @@
using System;

namespace Microsoft.MixedReality.Toolkit.Input
{
{
/// <summary>
/// Helper class for managing the visibility of the gaze pointer to match windows mixed reality and HoloLens 2
/// When application starts, gaze pointer is visible. Then when articulate hands / motion controllers
/// appear, hide the gaze cursor. Whenever user says "select", make the gaze cursor appear.
/// </summary>
/// <remarks>
/// Has different behavior depending on whether or not eye gaze or head gaze in use - see comments on
/// GazePointerState for more details.
/// </remarks>
public class GazePointerVisibilityStateMachine : IMixedRealitySpeechHandler
{
private enum GazePointerState
{
Initial, // When the application starts up, the gaze pointer should be active
GazePointerActive, // Gaze pointer is active when no hands are visible, after "select"
GazePointerInactive // Gaze pointer is inactive as soon as motion controller or articulated hand pointers appear
// When the application starts up, the gaze pointer should be active
Initial,

// If head gaze is in use, then the gaze pointer is active when no hands are visible, after "select"
// If eye gaze is use, then the gaze pointer is active when no far pointers are active.
GazePointerActive,

// If head gaze is in use, then the gaze pointer is inactive as soon as motion controller or
// articulated hand pointers appear.
// If eye gaze is in use, then the gaze pointer is inactive when far pointers are active.
GazePointerInactive
}
private GazePointerState gazePointerState = GazePointerState.Initial;
private bool activateGazeKeywordIsSet = false;
private bool eyeGazeValid = false;

public bool IsGazePointerActive
{
get { return gazePointerState != GazePointerState.GazePointerInactive; }
}

public void UpdateState(int numNearPointersActive, int numFarPointersActive)
/// <summary>
/// Updates the state machine based on the number of near pointers, the number of far pointers,
/// and whether or not eye gaze is valid.
/// </summary>
public void UpdateState(int numNearPointersActive, int numFarPointersActive, bool isEyeGazeValid)
{
if (eyeGazeValid != isEyeGazeValid)
{
activateGazeKeywordIsSet = false;
eyeGazeValid = isEyeGazeValid;
}

if (isEyeGazeValid)
{
UpdateStateEyeGaze(numNearPointersActive, numFarPointersActive);
}
else
{
UpdateStateHeadGaze(numNearPointersActive, numFarPointersActive);
}
}

private void UpdateStateEyeGaze(int numNearPointersActive, int numFarPointersActive)
{
// If there are any far pointers active while eye gaze is valid, then
// eye gaze should be disabled.
bool isEyeGazePointerActive = numFarPointersActive == 0;

gazePointerState = isEyeGazePointerActive ?
GazePointerState.GazePointerActive :
GazePointerState.GazePointerInactive;
}

private void UpdateStateHeadGaze(int numNearPointersActive, int numFarPointersActive)
{
GazePointerState newState = gazePointerState;
bool isMotionControllerOrHandUp = numFarPointersActive > 0 || numNearPointersActive > 0;
switch (gazePointerState)
{
Expand All @@ -38,33 +83,32 @@ public void UpdateState(int numNearPointersActive, int numFarPointersActive)
// There is some pointer other than the gaze pointer in the scene, assume
// this is from a motion controller or articulated hand, and that we should
// hide the gaze pointer
newState = GazePointerState.GazePointerInactive;
gazePointerState = GazePointerState.GazePointerInactive;
}
break;
case GazePointerState.GazePointerActive:
if (isMotionControllerOrHandUp)
{
newState = GazePointerState.GazePointerInactive;
activateGazeKeywordIsSet = false;
gazePointerState = GazePointerState.GazePointerInactive;
}
break;
case GazePointerState.GazePointerInactive:
// Go from inactive to active if we say the word "select"
if (activateGazeKeywordIsSet)
{
newState = GazePointerState.GazePointerActive;
activateGazeKeywordIsSet = false;
gazePointerState = GazePointerState.GazePointerActive;
}
break;
default:
break;
}
gazePointerState = newState;
}

public void OnSpeechKeywordRecognized(SpeechEventData eventData)
{
if (eventData.Command.Keyword.Equals("select", StringComparison.CurrentCultureIgnoreCase))
if (!eyeGazeValid && eventData.Command.Keyword.Equals("select", StringComparison.CurrentCultureIgnoreCase))
{
activateGazeKeywordIsSet = true;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ namespace Microsoft.MixedReality.Toolkit.Tests.InputSystem
class GazePointerStateMachineTests
{
[Test]
public void TestHandAndSpeechBehaviour()
public void TestHeadGazeHandAndSpeechBehaviour()
{
TestUtilities.InitializeMixedRealityToolkitScene(true);

Expand All @@ -21,7 +21,7 @@ public void TestHandAndSpeechBehaviour()
Assert.IsTrue(gsm.IsGazePointerActive, "Gaze pointer should be visible on start");

// After hand is raised, no pointer should show up;
gsm.UpdateState(1, 0);
gsm.UpdateState(1 /*numNearPointersActive*/, 0 /*numFarPointersActive*/, false);
Assert.IsFalse(gsm.IsGazePointerActive, "After hand is raised, gaze pointer should go away");

// After select called, pointer should show up again but only if no hands are up
Expand All @@ -34,16 +34,122 @@ public void TestHandAndSpeechBehaviour()
gsm.OnSpeechKeywordRecognized(data);
Assert.IsFalse(gsm.IsGazePointerActive, "After select is called but hands are up, gaze pointer should not show up");

gsm.UpdateState(0, 0);
gsm.UpdateState(0 /*numNearPointersActive*/, 0 /*numFarPointersActive*/, false);
gsm.OnSpeechKeywordRecognized(data);
gsm.UpdateState(0, 0);
gsm.UpdateState(0 /*numNearPointersActive*/, 0 /*numFarPointersActive*/, false);
Assert.IsTrue(gsm.IsGazePointerActive, "When no hands present and select called, gaze pointer should show up");

// Say select while gaze pointer is active, then raise hand. Gaze pointer should go away
gsm.OnSpeechKeywordRecognized(data);
gsm.UpdateState(1, 0);
gsm.UpdateState(1, 0);
gsm.UpdateState(1 /*numNearPointersActive*/, 0 /*numFarPointersActive*/, false);
gsm.UpdateState(1 /*numNearPointersActive*/, 0 /*numFarPointersActive*/, false);
Assert.IsFalse(gsm.IsGazePointerActive, "After select called with hands present, then hand up, gaze pointer should go away");
}

[Test]
public void TestEyeGazeHandAndSpeechBehaviour()
{
TestUtilities.InitializeMixedRealityToolkitScene(true);

// Initial state: gaze pointer active
var gsm = new GazePointerVisibilityStateMachine();
Assert.IsTrue(gsm.IsGazePointerActive, "Gaze pointer should be visible on start");

// With the hand raised, eye gaze pointer should still exist because only far interaction causes the
// eye gaze pointer to go away.
gsm.UpdateState(1 /*numNearPointersActive*/, 0 /*numFarPointersActive*/, true);
Assert.IsTrue(gsm.IsGazePointerActive, "With near interaction, gaze pointer should continue to exist");

// With far interaction active, eye gaze pointer should be hidden.
gsm.UpdateState(0 /*numNearPointersActive*/, 1 /*numFarPointersActive*/, true);
Assert.IsFalse(gsm.IsGazePointerActive, "With far interaction, gaze pointer should go away");

// Reset the state and validate that it goes back to being visible.
gsm.UpdateState(0 /*numNearPointersActive*/, 0 /*numFarPointersActive*/, true);
Assert.IsTrue(gsm.IsGazePointerActive, "Gaze pointer should be visible when no near or far pointers");

// Saying "select" should have no impact on the state of eye gaze-based interactions.
SpeechEventData data = new SpeechEventData(EventSystem.current);
data.Initialize(new BaseGenericInputSource("test input source", new IMixedRealityPointer[0], InputSourceType.Voice),
Utilities.RecognitionConfidenceLevel.High,
System.TimeSpan.MinValue,
System.DateTime.Now,
new SpeechCommands("select", KeyCode.Alpha1, MixedRealityInputAction.None));
gsm.OnSpeechKeywordRecognized(data);
Assert.IsTrue(gsm.IsGazePointerActive, "Saying 'select' should have no impact on eye gaze");

// With far and near interaction active, eye gaze pointer should be hidden (because far interaction wins over
// the eye gaze regardless of near interaction state).
gsm.UpdateState(1 /*numNearPointersActive*/, 1 /*numFarPointersActive*/, true);
Assert.IsFalse(gsm.IsGazePointerActive, "With far and near interaction, gaze pointer should go away");
}

[Test]
public void TestEyeGazeToHeadGazeTransition()
{
TestUtilities.InitializeMixedRealityToolkitScene(true);

// Initial state: gaze pointer active
var gsm = new GazePointerVisibilityStateMachine();
Assert.IsTrue(gsm.IsGazePointerActive, "Gaze pointer should be visible on start");

// With the hand raised, eye gaze pointer should still exist because only far interaction causes the
// eye gaze pointer to go away.
gsm.UpdateState(1 /*numNearPointersActive*/, 0 /*numFarPointersActive*/, true);
Assert.IsTrue(gsm.IsGazePointerActive, "With near interaction, gaze pointer should continue to exist");

// With far interaction active, eye gaze pointer should be hidden.
gsm.UpdateState(0 /*numNearPointersActive*/, 1 /*numFarPointersActive*/, true);
Assert.IsFalse(gsm.IsGazePointerActive, "With far interaction, gaze pointer should go away");

// Send a "select" command right now, to show that this cached select value doesn't affect the
// state machine once eye gaze degrades into head gaze.
SpeechEventData data = new SpeechEventData(EventSystem.current);
data.Initialize(new BaseGenericInputSource("test input source", new IMixedRealityPointer[0], InputSourceType.Voice),
Utilities.RecognitionConfidenceLevel.High,
System.TimeSpan.MinValue,
System.DateTime.Now,
new SpeechCommands("select", KeyCode.Alpha1, MixedRealityInputAction.None));
gsm.OnSpeechKeywordRecognized(data);
Assert.IsFalse(gsm.IsGazePointerActive, "Select should have no impact while eye gaze is active");

// From this point on, we're simulating what happens when eye gaze degrades into head gaze.
// Note that gaze pointer should still be hidden at this point despite no hands being visible
// because "select" wasn't spoken after the degredation happened.
// A user saying "select" 10 minutes before shouldn't have that "select" invocation carry over
// 10 minutes later.
gsm.UpdateState(0 /*numNearPointersActive*/, 0 /*numFarPointersActive*/, false);
Assert.IsFalse(gsm.IsGazePointerActive, "Gaze pointer should be inactive");

// Saying select at this point should now show the eye gaze pointer.
data = new SpeechEventData(EventSystem.current);
data.Initialize(new BaseGenericInputSource("test input source", new IMixedRealityPointer[0], InputSourceType.Voice),
Utilities.RecognitionConfidenceLevel.High,
System.TimeSpan.MinValue,
System.DateTime.Now,
new SpeechCommands("select", KeyCode.Alpha1, MixedRealityInputAction.None));
gsm.OnSpeechKeywordRecognized(data);
gsm.UpdateState(0 /*numNearPointersActive*/, 0 /*numFarPointersActive*/, false);
Assert.IsTrue(gsm.IsGazePointerActive, "Gaze pointer should be active");
}

[Test]
public void TestHeadGazeToEyeGazeTransition()
{
TestUtilities.InitializeMixedRealityToolkitScene(true);

// Initial state: gaze pointer active
var gsm = new GazePointerVisibilityStateMachine();
Assert.IsTrue(gsm.IsGazePointerActive, "Gaze pointer should be visible on start");

// The eye pointer should go away because a hand was raised and head gaze is active.
gsm.UpdateState(1 /*numNearPointersActive*/, 0 /*numFarPointersActive*/, false);
Assert.IsFalse(gsm.IsGazePointerActive, "With near interaction and head gaze, gaze pointer should be inactive");

// After transitioning to eye gaze, the gaze pointer should now be active because near interaction
// doesn't affect the visibility of eye-gaze style pointers.
gsm.UpdateState(1 /*numNearPointersActive*/, 0 /*numFarPointersActive*/, true);
Assert.IsTrue(gsm.IsGazePointerActive, "With near interaction and eye gaze, gaze pointer should be active");
}
}
}

0 comments on commit a4f70d7

Please sign in to comment.