Integrate Facial Tracking with Your Avatar
Note: Before you start, please install or update the latest VIVE Software from SteamVR or OOBE, and check your
SR_Runtime
4. In Start function:
OpenXR Facial Tracking Plugin Setup
Supported Unity Engine version: 2020.2+
-
Enable OpenXR Plugins:
- Please enable OpenXR plugin in Edit > Project Settings > XR Plug-in Management:

- Click Exclamation mark next to “OpenXR” then choose “Fix All”.


- Add Interaction profiles for your device.
As follows, taking Vive Controller as an example.

- Enable Facial Tracking extensions

Update avatar in your App
-
- Import your avatar (Use the avatar provided by our FacialTracking sample as an example).
Assets > Samples > VIVE Wave OpenXR Plugin - Windows > 1.0.4 > FacialTracking Example > ViveSR > Models > version2 > Avatar_Shieh_V2.fbx

Note : It is recommended to negate the z scale value so that the avatar is consistent with the user's left and right direction.


-
Create new script which declares corresponding enum types as follows.
-
OpenXR eye blend shapes
enum OpenXREyeShape { XR_EYE_SHAPE_NONE_HTC = -1, XR_EYE_EXPRESSION_LEFT_BLINK_HTC = 0, XR_EYE_EXPRESSION_LEFT_WIDE_HTC = 1, XR_EYE_EXPRESSION_RIGHT_BLINK_HTC = 2, XR_EYE_EXPRESSION_RIGHT_WIDE_HTC = 3, XR_EYE_EXPRESSION_LEFT_SQUEEZE_HTC = 4, XR_EYE_EXPRESSION_RIGHT_SQUEEZE_HTC = 5, XR_EYE_EXPRESSION_LEFT_DOWN_HTC = 6, XR_EYE_EXPRESSION_RIGHT_DOWN_HTC = 7, XR_EYE_EXPRESSION_LEFT_OUT_HTC = 8, XR_EYE_EXPRESSION_RIGHT_IN_HTC = 9, XR_EYE_EXPRESSION_LEFT_IN_HTC = 10, XR_EYE_EXPRESSION_RIGHT_OUT_HTC = 11, XR_EYE_EXPRESSION_LEFT_UP_HTC = 12, XR_EYE_EXPRESSION_RIGHT_UP_HTC = 13, XR_EYE_EXPRESSION_MAX_ENUM_HTC = 14, }
-
OpenXR eye blend shapes
-
-
OpenXR lip blend shapes
public enum OpenXRLipShapeHTC { XR_LIP_SHAPE_NONE_HTC = -1, XR_LIP_SHAPE_JAW_RIGHT_HTC = 0, XR_LIP_SHAPE_JAW_LEFT_HTC = 1, XR_LIP_SHAPE_JAW_FORWARD_HTC = 2, XR_LIP_SHAPE_JAW_OPEN_HTC = 3, XR_LIP_SHAPE_MOUTH_APE_SHAPE_HTC = 4,
XR_LIP_SHAPE_MOUTH_UPPER_RIGHT_HTC = 5, XR_LIP_SHAPE_MOUTH_UPPER_LEFT_HTC = 6, XR_LIP_SHAPE_MOUTH_LOWER_RIGHT_HTC = 7, XR_LIP_SHAPE_MOUTH_LOWER_LEFT_HTC = 8, XR_LIP_SHAPE_MOUTH_UPPER_OVERTURN_HTC = 9, XR_LIP_SHAPE_MOUTH_LOWER_OVERTURN_HTC = 10, XR_LIP_SHAPE_MOUTH_POUT_HTC = 11, XR_LIP_SHAPE_MOUTH_SMILE_RIGHT_HTC = 12, XR_LIP_SHAPE_MOUTH_SMILE_LEFT_HTC = 13, XR_LIP_SHAPE_MOUTH_SAD_RIGHT_HTC = 14, XR_LIP_SHAPE_MOUTH_SAD_LEFT_HTC = 15, XR_LIP_SHAPE_CHEEK_PUFF_RIGHT_HTC = 16, XR_LIP_SHAPE_CHEEK_PUFF_LEFT_HTC = 17, XR_LIP_SHAPE_CHEEK_SUCK_HTC = 18, XR_LIP_SHAPE_MOUTH_UPPER_UPRIGHT_HTC = 19, XR_LIP_SHAPE_MOUTH_UPPER_UPLEFT_HTC = 20, XR_LIP_SHAPE_MOUTH_LOWER_DOWNRIGHT_HTC = 21, XR_LIP_SHAPE_MOUTH_LOWER_DOWNLEFT_HTC = 22, XR_LIP_SHAPE_MOUTH_UPPER_INSIDE_HTC = 23, XR_LIP_SHAPE_MOUTH_LOWER_INSIDE_HTC = 24, XR_LIP_SHAPE_MOUTH_LOWER_OVERLAY_HTC = 25, XR_LIP_SHAPE_TONGUE_LONGSTEP1_HTC = 26, XR_LIP_SHAPE_TONGUE_LEFT_HTC = 27, XR_LIP_SHAPE_TONGUE_RIGHT_HTC = 28, XR_LIP_SHAPE_TONGUE_UP_HTC = 29, XR_LIP_SHAPE_TONGUE_DOWN_HTC = 30, XR_LIP_SHAPE_TONGUE_ROLL_HTC = 31, XR_LIP_SHAPE_TONGUE_LONGSTEP2_HTC = 32, XR_LIP_SHAPE_TONGUE_UPRIGHT_MORPH_HTC = 33, XR_LIP_SHAPE_TONGUE_UPLEFT_MORPH_HTC = 34, XR_LIP_SHAPE_TONGUE_DOWNRIGHT_MORPH_HTC = 35, XR_LIP_SHAPE_TONGUE_DOWNLEFT_MORPH_HTC = 36, XR_LIP_SHAPE_MAX_ENUM_HTC = 37 }
-
OpenXR lip blend shapes
-
-
Your avatar blend shapes.
-
Your avatar blend shapes.
|
|
-
Create new script for eye tracking.
- Add following namespaces to your script.
using UnityEngine.XR.OpenXR; using VIVE.FacialTracking; using System; using System.Runtime.InteropServices;
-
- Add the following properties:
private static XrFacialExpressionsHTC EyeExpression_; private static float[] blendshapes = new float[60]; //Map OpenXR eye shapes to avatar blend shapes private static Dictionary<OpenXREyeShape, SkinnedMeshRendererShape> ShapeMap; public SkinnedMeshRenderer HeadskinnedMeshRenderer;
-
- Attach it to your avatar and set options in inspector.


4. In Start function:
-
-
Set the mapping relations between OpenXR eye shapes and avatar blend shapes.
ShapeMap = new Dictionary<OpenXREyeShape, SkinnedMeshRendererShape>(); ShapeMap.Add(OpenXREyeShape.XR_EYE_EXPRESSION_LEFT_BLINK_HTC, SkinnedMeshRendererShape.Eye_Left_Blink); ShapeMap.Add(OpenXREyeShape.XR_EYE_EXPRESSION_LEFT_WIDE_HTC, SkinnedMeshRendererShape.Eye_Left_Wide); ShapeMap.Add(OpenXREyeShape.XR_EYE_EXPRESSION_RIGHT_BLINK_HTC, SkinnedMeshRendererShape.Eye_Right_Blink); ShapeMap.Add(OpenXREyeShape.XR_EYE_EXPRESSION_RIGHT_WIDE_HTC, SkinnedMeshRendererShape.Eye_Right_Wide ); ShapeMap.Add(OpenXREyeShape.XR_EYE_EXPRESSION_LEFT_SQUEEZE_HTC, SkinnedMeshRendererShape.Eye_Left_Squeeze ); ShapeMap.Add(OpenXREyeShape.XR_EYE_EXPRESSION_RIGHT_SQUEEZE_HTC,SkinnedMeshRendererShape.Eye_Right_Squeeze ); ShapeMap.Add(OpenXREyeShape.XR_EYE_EXPRESSION_LEFT_DOWN_HTC, SkinnedMeshRendererShape.Eye_Left_Down); ShapeMap.Add(OpenXREyeShape.XR_EYE_EXPRESSION_RIGHT_DOWN_HTC,SkinnedMeshRendererShape.Eye_Right_Down); ShapeMap.Add(OpenXREyeShape.XR_EYE_EXPRESSION_LEFT_OUT_HTC,SkinnedMeshRendererShape.Eye_Left_Left ); ShapeMap.Add(OpenXREyeShape.XR_EYE_EXPRESSION_RIGHT_IN_HTC,SkinnedMeshRendererShape.Eye_Right_Left ); ShapeMap.Add(OpenXREyeShape.XR_EYE_EXPRESSION_LEFT_IN_HTC, SkinnedMeshRendererShape.Eye_Left_Right ); ShapeMap.Add(OpenXREyeShape.XR_EYE_EXPRESSION_RIGHT_OUT_HTC, SkinnedMeshRendererShape.Eye_Right_Right ); ShapeMap.Add(OpenXREyeShape.XR_EYE_EXPRESSION_LEFT_UP_HTC, SkinnedMeshRendererShape.Eye_Left_Up ); ShapeMap.Add(OpenXREyeShape.XR_EYE_EXPRESSION_RIGHT_UP_HTC ,SkinnedMeshRendererShape.Eye_Right_Up );
-
Set the mapping relations between OpenXR eye shapes and avatar blend shapes.
-
-
To start eye tracking detection:
var feature = OpenXRSettings.Instance.GetFeature(); XrFacialTrackerCreateInfoHTC m_expressioncreateInfo = new XrFacialTrackerCreateInfoHTC( XrStructureType.XR_TYPE_FACIAL_TRACKER_CREATE_INFO_HTC, IntPtr.Zero, XrFacialTrackingTypeHTC.XR_FACIAL_TRACKING_TYPE_EYE_DEFAULT_HTC); int res = feature.xrCreateFacialTrackerHTC(m_expressioncreateInfo, out feature.m_expressionHandle); if (res == (int)XrResult.XR_SUCCESS || res == (int)XrResult.XR_SESSION_LOSS_PENDING) { Debug.Log("Initial Eye success : " + res); } else { Debug.LogError("Initial Eye fail : " + res); }
-
To start eye tracking detection:
-
-
Get eye tracking detection results:
EyeExpression_.expressionCount = 60; EyeExpression_.type = XrStructureType.XR_TYPE_FACIAL_EXPRESSIONS_HTC; EyeExpression_.blendShapeWeightings = Marshal.AllocCoTaskMem(sizeof(float) * EyeExpression_.expressionCount); var feature = OpenXRSettings.Instance.GetFeature(); int res = feature.xrGetFacialExpressionsHTC(feature.m_expressionHandle, ref EyeExpression_); if (res == (int)XrResult.XR_SUCCESS) { Marshal.Copy(EyeExpression_.blendShapeWeightings, blendshapes, 0, EyeExpression_.expressionCount); } else { Debug.LogError("Get eye tracking data failed with error code : " + res); }
-
Get eye tracking detection results:
-
-
Update avatar blend shapes
for(int i = (int)OpenXREyeShape.XR_EYE_EXPRESSION_LEFT_BLINK_HTC; i < (int)OpenXREyeShape.XR_EYE_EXPRESSION_MAX_ENUM_HTC; i++) { HeadskinnedMeshRenderer.SetBlendShapeWeight((int)ShapeMap[(OpenXREyeShape)i], blendshapes[i] * 100f); }
-
Update avatar blend shapes
-
-
Stop eye tracking detection:
var feature = OpenXRSettings.Instance.GetFeature(); int res = feature.xrDestroyFacialTrackerHTC(feature.m_expressionHandle); if (res == (int)XrResult.XR_SUCCESS) { Debug.Log("Release Eye engine success : " + res); } else { Debug.LogError("Release Eye engine fail : " + res); }
-
Stop eye tracking detection:

-
Create new script for lip tracking.
-
Add following namespaces to your script.
using UnityEngine.XR.OpenXR; using VIVE.FacialTracking; using System; using System.Runtime.InteropServices;
-
Add following namespaces to your script.
-
-
Add the following properties:
public SkinnedMeshRenderer HeadskinnedMeshRenderer; private static XrFacialExpressionsHTC LipExpression_; private static float[] blendshapes = new float[60]; //Map OpenXR lip shape to avatar lip blend shape private static Dictionary<OpenXRLipShape, SkinnedMeshRendererShape> ShapeMap;
-
Add the following properties:
-
-
Attach it to your avatar and set options in inspector.
-
Attach it to your avatar and set options in inspector.
-
-
In Start function:
-
Set the mapping relations between OpenXR lip shapes and avatar blend shapes.
ShapeMap = new Dictionary<OpenXRLipShape, SkinnedMeshRendererShape>(); ShapeMap.Add(OpenXRLipShape.XR_LIP_SHAPE_JAW_RIGHT_HTC, SkinnedMeshRendererShape.Jaw_Right); ShapeMap.Add(OpenXRLipShape.XR_LIP_SHAPE_JAW_LEFT_HTC, SkinnedMeshRendererShape.Jaw_Left); ShapeMap.Add(OpenXRLipShape.XR_LIP_SHAPE_JAW_FORWARD_HTC, SkinnedMeshRendererShape.Jaw_Forward); ShapeMap.Add(OpenXRLipShape.XR_LIP_SHAPE_JAW_OPEN_HTC, SkinnedMeshRendererShape.Jaw_Open); ShapeMap.Add(OpenXRLipShape.XR_LIP_SHAPE_MOUTH_APE_SHAPE_HTC, SkinnedMeshRendererShape.Mouth_Ape_Shape); ShapeMap.Add(OpenXRLipShape.XR_LIP_SHAPE_MOUTH_UPPER_RIGHT_HTC, SkinnedMeshRendererShape.Mouth_Upper_Right); ShapeMap.Add(OpenXRLipShape.XR_LIP_SHAPE_MOUTH_UPPER_LEFT_HTC, SkinnedMeshRendererShape.Mouth_Upper_Left); ShapeMap.Add(OpenXRLipShape.XR_LIP_SHAPE_MOUTH_LOWER_RIGHT_HTC, SkinnedMeshRendererShape.Mouth_Lower_Right); ShapeMap.Add(OpenXRLipShape.XR_LIP_SHAPE_MOUTH_LOWER_LEFT_HTC, SkinnedMeshRendererShape.Mouth_Lower_Left); ShapeMap.Add(OpenXRLipShape.XR_LIP_SHAPE_MOUTH_UPPER_OVERTURN_HTC, SkinnedMeshRendererShape.Mouth_Upper_Overturn); ShapeMap.Add(OpenXRLipShape.XR_LIP_SHAPE_MOUTH_LOWER_OVERTURN_HTC, SkinnedMeshRendererShape.Mouth_Lower_Overturn); ShapeMap.Add(OpenXRLipShape.XR_LIP_SHAPE_MOUTH_POUT_HTC, SkinnedMeshRendererShape.Mouth_Pout); ShapeMap.Add(OpenXRLipShape.XR_LIP_SHAPE_MOUTH_SMILE_RIGHT_HTC, SkinnedMeshRendererShape.Mouth_Smile_Right); ShapeMap.Add(OpenXRLipShape.XR_LIP_SHAPE_MOUTH_SMILE_LEFT_HTC, SkinnedMeshRendererShape.Mouth_Smile_Left); ShapeMap.Add(OpenXRLipShape.XR_LIP_SHAPE_MOUTH_SAD_RIGHT_HTC, SkinnedMeshRendererShape.Mouth_Sad_Right); ShapeMap.Add(OpenXRLipShape.XR_LIP_SHAPE_MOUTH_SAD_LEFT_HTC, SkinnedMeshRendererShape.Mouth_Sad_Left); ShapeMap.Add(OpenXRLipShape.XR_LIP_SHAPE_CHEEK_PUFF_RIGHT_HTC, SkinnedMeshRendererShape.Cheek_Puff_Right); ShapeMap.Add(OpenXRLipShape.XR_LIP_SHAPE_CHEEK_PUFF_LEFT_HTC, SkinnedMeshRendererShape.Cheek_Puff_Left); ShapeMap.Add(OpenXRLipShape.XR_LIP_SHAPE_CHEEK_SUCK_HTC, SkinnedMeshRendererShape.Cheek_Suck); ShapeMap.Add(OpenXRLipShape.XR_LIP_SHAPE_MOUTH_UPPER_UPRIGHT_HTC, SkinnedMeshRendererShape.Mouth_Upper_UpRight); ShapeMap.Add(OpenXRLipShape.XR_LIP_SHAPE_MOUTH_UPPER_UPLEFT_HTC, SkinnedMeshRendererShape.Mouth_Upper_UpLeft); ShapeMap.Add(OpenXRLipShape.XR_LIP_SHAPE_MOUTH_LOWER_DOWNRIGHT_HTC, SkinnedMeshRendererShape.Mouth_Lower_DownRight); ShapeMap.Add(OpenXRLipShape.XR_LIP_SHAPE_MOUTH_LOWER_DOWNLEFT_HTC, SkinnedMeshRendererShape.Mouth_Lower_DownLeft); ShapeMap.Add(OpenXRLipShape.XR_LIP_SHAPE_MOUTH_UPPER_INSIDE_HTC, SkinnedMeshRendererShape.Mouth_Upper_Inside); ShapeMap.Add(OpenXRLipShape.XR_LIP_SHAPE_MOUTH_LOWER_INSIDE_HTC, SkinnedMeshRendererShape.Mouth_Lower_Inside); ShapeMap.Add(OpenXRLipShape.XR_LIP_SHAPE_MOUTH_LOWER_OVERLAY_HTC, SkinnedMeshRendererShape.Mouth_Lower_Overlay); ShapeMap.Add(OpenXRLipShape.XR_LIP_SHAPE_TONGUE_LONGSTEP1_HTC, SkinnedMeshRendererShape.Tongue_LongStep1); ShapeMap.Add(OpenXRLipShape.XR_LIP_SHAPE_TONGUE_LEFT_HTC, SkinnedMeshRendererShape.Tongue_Left); ShapeMap.Add(OpenXRLipShape.XR_LIP_SHAPE_TONGUE_RIGHT_HTC, SkinnedMeshRendererShape.Tongue_Right); ShapeMap.Add(OpenXRLipShape.XR_LIP_SHAPE_TONGUE_UP_HTC, SkinnedMeshRendererShape.Tongue_Up); ShapeMap.Add(OpenXRLipShape.XR_LIP_SHAPE_TONGUE_DOWN_HTC, SkinnedMeshRendererShape.Tongue_Down); ShapeMap.Add(OpenXRLipShape.XR_LIP_SHAPE_TONGUE_ROLL_HTC, SkinnedMeshRendererShape.Tongue_Roll); ShapeMap.Add(OpenXRLipShape.XR_LIP_SHAPE_TONGUE_LONGSTEP2_HTC, SkinnedMeshRendererShape.Tongue_LongStep2); ShapeMap.Add(OpenXRLipShape.XR_LIP_SHAPE_TONGUE_UPRIGHT_MORPH_HTC, SkinnedMeshRendererShape.Tongue_UpRight_Morph); ShapeMap.Add(OpenXRLipShape.XR_LIP_SHAPE_TONGUE_UPLEFT_MORPH_HTC, SkinnedMeshRendererShape.Tongue_UpLeft_Morph); ShapeMap.Add(OpenXRLipShape.XR_LIP_SHAPE_TONGUE_DOWNRIGHT_MORPH_HTC, SkinnedMeshRendererShape.Tongue_DownRight_Morph); ShapeMap.Add(OpenXRLipShape.XR_LIP_SHAPE_TONGUE_DOWNLEFT_MORPH_HTC, SkinnedMeshRendererShape.Tongue_DownLeft_Morph);
-
Set the mapping relations between OpenXR lip shapes and avatar blend shapes.
-
In Start function:
-
-
-
To start lip tracking detection:
var feature = OpenXRSettings.Instance.GetFeature(); XrFacialTrackerCreateInfoHTC m_expressioncreateInfo = new XrFacialTrackerCreateInfoHTC( XrStructureType.XR_TYPE_FACIAL_TRACKER_CREATE_INFO_HTC, IntPtr.Zero, XrFacialTrackingTypeHTC.XR_FACIAL_TRACKING_TYPE_LIP_DEFAULT_HTC); int res = feature.xrCreateFacialTrackerHTC(m_expressioncreateInfo, out feature.m_expressionHandle_Lip); if (res == (int)XrResult.XR_SUCCESS || res == (int)XrResult.XR_SESSION_LOSS_PENDING) { Debug.Log("Initial Lip success : " + res); } else { Debug.LogError("Initial Lip fail : " + res); }
-
To start lip tracking detection:
-
-
In Update function:
-
Get lip tracking detection results:
LipExpression_.expressionCount = 60; LipExpression_.type = XrStructureType.XR_TYPE_FACIAL_EXPRESSIONS_HTC; LipExpression_.blendShapeWeightings = Marshal.AllocCoTaskMem(sizeof(float) * LipExpression_.expressionCount); var feature = OpenXRSettings.Instance.GetFeature(); int res = feature.xrGetFacialExpressionsHTC(feature.m_expressionHandle_Lip, ref LipExpression_); if (res == (int)XrResult.XR_SUCCESS) { Marshal.Copy(LipExpression_.blendShapeWeightings, blendshapes, 0, LipExpression_.expressionCount); } else { Debug.LogError("Get lip tracking data failed with error code : " + res); }
-
Get lip tracking detection results:
-
-
Update avatar blend shapes
for(int i = (int)OpenXREyeShape.XR_EYE_EXPRESSION_LEFT_BLINK_HTC; i < (int)OpenXREyeShape.XR_EYE_EXPRESSION_MAX_ENUM_HTC; i++) { HeadskinnedMeshRenderer.SetBlendShapeWeight((int)ShapeMap[(OpenXREyeShape)i], blendshapes[i] * 100f); }
-
Update avatar blend shapes
-
In OnDestroy function:
-
Stop eye tracking detection:
var feature = OpenXRSettings.Instance.GetFeature(); int res = feature.xrDestroyFacialTrackerHTC(feature.m_expressionHandle); if (res == (int)XrResult.XR_SUCCESS) { Debug.Log("Release Eye engine success : " + res); } else { Debug.LogError("Release Eye engine fail : " + res); }
-
Stop eye tracking detection:
-
Lip tracking result:
Tips
- You can perform eye tracking and lip tracking on avatar at the same time to achieve facial tracking as shown below.

- To let avatar be more vivid, you could roughly infer the position of the left pupil and update avatar by following blend shapes. The similar way can also apply to right pupil.
XR_EYE_EXPRESSION_LEFT_UP_HTC XR_EYE_EXPRESSION_LEFT_DOWN_HTC XR_EYE_EXPRESSION_LEFT_IN_HTC XR_EYE_EXPRESSION_LEFT_OUT_HTC
|
For example: add below code to script of eye tracking.
-
Add the following properties:
public GameObject leftEye; public GameObject rightEye; private GameObject[] EyeAnchors;
- Set options in inspector.


-
Add following codes in Start function:
-
Create anchors for left and right eyes.
EyeAnchors = new GameObject[2]; EyeAnchors[0] = new GameObject(); EyeAnchors[0].name = "EyeAnchor_" + 0; EyeAnchors[0].transform.SetParent(gameObject.transform); EyeAnchors[0].transform.localPosition = leftEye.transform.localPosition; EyeAnchors[0].transform.localRotation = leftEye.transform.localRotation; EyeAnchors[0].transform.localScale = leftEye.transform.localScale; EyeAnchors[1] = new GameObject(); EyeAnchors[1].name = "EyeAnchor_" + 1; EyeAnchors[1].transform.SetParent(gameObject.transform); EyeAnchors[1].transform.localPosition = rightEye.transform.localPosition; EyeAnchors[1].transform.localRotation = rightEye.transform.localRotation; EyeAnchors[1].transform.localScale = rightEye.transform.localScale;
-
Create anchors for left and right eyes.
-
Add following codes in Update function
-
Calculate eye gaze direction and update eye rotation.
Vector3 GazeDirectionCombinedLocal = Vector3.zero; if (blendshapes[(int)OpenXREyeShape.XR_EYE_EXPRESSION_LEFT_IN_HTC] > blendshapes[(int)OpenXREyeShape.XR_EYE_EXPRESSION_LEFT_OUT_HTC]) { GazeDirectionCombinedLocal.x = -blendshapes[(int)OpenXREyeShape.XR_EYE_EXPRESSION_LEFT_IN_HTC]; } else { GazeDirectionCombinedLocal.x = blendshapes[(int)OpenXREyeShape.XR_EYE_EXPRESSION_LEFT_OUT_HTC]; } if (blendshapes[(int)OpenXREyeShape.XR_EYE_EXPRESSION_LEFT_UP_HTC] > blendshapes[(int)OpenXREyeShape.XR_EYE_EXPRESSION_LEFT_WIDE_HTC]) { GazeDirectionCombinedLocal.y = blendshapes[(int)OpenXREyeShape.XR_EYE_EXPRESSION_LEFT_UP_HTC]; } else { GazeDirectionCombinedLocal.y = -blendshapes[(int)OpenXREyeShape.XR_EYE_EXPRESSION_LEFT_WIDE_HTC]; } GazeDirectionCombinedLocal.z = (float)-1.0; Vector3 target = EyeAnchors[0].transform.TransformPoint(GazeDirectionCombinedLocal); leftEye.transform.LookAt(target); if (blendshapes[(int)OpenXREyeShape.XR_EYE_EXPRESSION_RIGHT_IN_HTC] > blendshapes[(int)OpenXREyeShape.XR_EYE_EXPRESSION_RIGHT_UP_HTC]) { GazeDirectionCombinedLocal.x = blendshapes[(int)OpenXREyeShape.XR_EYE_EXPRESSION_RIGHT_IN_HTC]; } else { GazeDirectionCombinedLocal.x = -blendshapes[(int)OpenXREyeShape.XR_EYE_EXPRESSION_RIGHT_UP_HTC]; } if (blendshapes[(int)OpenXREyeShape.XR_EYE_EXPRESSION_RIGHT_UP_HTC] > blendshapes[(int)OpenXREyeShape.XR_EYE_EXPRESSION_RIGHT_DOWN_HTC]) { GazeDirectionCombinedLocal.y = -blendshapes[(int)OpenXREyeShape.XR_EYE_EXPRESSION_RIGHT_UP_HTC]; } else { GazeDirectionCombinedLocal.y = blendshapes[(int)OpenXREyeShape.XR_EYE_EXPRESSION_RIGHT_DOWN_HTC]; } GazeDirectionCombinedLocal.z = (float)-1.0; target = EyeAnchors[1].transform.TransformPoint(GazeDirectionCombinedLocal); rightEye.transform.LookAt(target);
-
Calculate eye gaze direction and update eye rotation.
- Results
