Jump to content

Eye-tracking data of user watching a 360 video?


Recommended Posts

Hello, I'm very new to both Vive pro EYE and development. 

And I wanted to extract the gaze data from audience when they watch a 360 video. 

Do I need to import the 360 videos in Unity? or is there another way to access the eye-tracking data?

I'm sorry if the question is too vague. 

Link to comment
Share on other sites

@eugie You will likely need to use Unity to build a 360 video player and integrate the eye tracking SDK to access the data needed. There are guides/sample projects available to help you get started with a 360 video player with Unity.

  • Like 1
Link to comment
Share on other sites

@Corvus thank you very much! I managed to make the 360 video player within Unity, but the eye-tracker data is coming out null. 

When I printed out each steps, the problem seemed to be at the callback process. 

The eyedatacallback keeps coming back 'false'. 

I attached my code below. Thank you. 

 

using UnityEngine;
using ViveSR.anipal.Eye;
using System.Runtime.InteropServices;
using UnityEngine.UI;

/// <summary>
/// Example usage for eye tracking callback
/// Note: Callback runs on a separate thread to report at ~120hz.
/// Unity is not threadsafe and cannot call any UnityEngine api from within callback thread.
/// </summary>
public class CallbackExample : MonoBehaviour
{
    private static EyeData eyeData = new EyeData();
    private static bool eye_callback_registered = false;

    public Text uiText;
    private float updateSpeed = 0;
    private static float lastTime, currentTime;


    private void Update()
    {
/*        print("-----");
        print(SRanipal_Eye_Framework.Status);
        print(SRanipal_Eye_Framework.FrameworkStatus.WORKING);
        print("-----");*/
        if (SRanipal_Eye_Framework.Status != SRanipal_Eye_Framework.FrameworkStatus.WORKING) return;

        print("------");
        print(SRanipal_Eye_Framework.Instance.EnableEyeDataCallback);
        print(eye_callback_registered);
        print("-----");
        if (SRanipal_Eye_Framework.Instance.EnableEyeDataCallback == true && eye_callback_registered == false)
        {
            print("1111");
            SRanipal_Eye.WrapperRegisterEyeDataCallback(Marshal.GetFunctionPointerForDelegate((SRanipal_Eye.CallbackBasic)EyeCallback));
            eye_callback_registered = true;
        }
        else if (SRanipal_Eye_Framework.Instance.EnableEyeDataCallback == false && eye_callback_registered == true)
        {
            print("2222");
            SRanipal_Eye.WrapperUnRegisterEyeDataCallback(Marshal.GetFunctionPointerForDelegate((SRanipal_Eye.CallbackBasic)EyeCallback));
            eye_callback_registered = false;
        }

        updateSpeed = currentTime - lastTime;
        uiText.text = updateSpeed.ToString() + " ms";
    }

    private void OnDisable()
    {
        Release();
    }

    void OnApplicationQuit()
    {
        Release();
    }

    /// <summary>
    /// Release callback thread when disabled or quit
    /// </summary>
    private static void Release()
    {
        if (eye_callback_registered == true)
        {
            SRanipal_Eye.WrapperUnRegisterEyeDataCallback(Marshal.GetFunctionPointerForDelegate((SRanipal_Eye.CallbackBasic)EyeCallback));
            eye_callback_registered = false;
        }
    }

    /// <summary>
    /// Required class for IL2CPP scripting backend support
    /// </summary>
    internal class MonoPInvokeCallbackAttribute : System.Attribute
    {
        public MonoPInvokeCallbackAttribute() { }
    }

    /// <summary>
    /// Eye tracking data callback thread.
    /// Reports data at ~120hz
    /// MonoPInvokeCallback attribute required for IL2CPP scripting backend
    /// </summary>
    /// <param name="eye_data">Reference to latest eye_data</param>
    [MonoPInvokeCallback]
    private static void EyeCallback(ref EyeData eye_data)
    {
        Debug.Log("callback started");
        // Gets data from anipal's Eye module
        eyeData = eye_data;
        lastTime = currentTime;
        currentTime = eyeData.timestamp;


        /*        // The time when the frame was capturing. in millisecond.
                timeStamp = eyeData.timestamp;

                // The point in the eye from which the gaze ray originates in meter miles.(right-handed coordinate system)
                gazeOriginLeft = eyeData.verbose_data.left.gaze_origin_mm;
                gazeOriginRight = eyeData.verbose_data.right.gaze_origin_mm;
                Debug.Log("gazeOriginLeft: " + gazeOriginLeft);

                // The normalized gaze direction of the eye in [0,1].(right-handed coordinate system)
                gazeDirectionLeft = eyeData.verbose_data.left.gaze_direction_normalized;
                gazeDirectionRight = eyeData.verbose_data.right.gaze_direction_normalized;
                gazeDirectionCombined = eyeData.verbose_data.combined.eye_data.gaze_direction_normalized;
                Debug.Log("gaze_direction_left: " + gazeDirectionLeft);

                // The diameter of the pupil in milli meter
                pupilDiameterLeft = eyeData.verbose_data.left.pupil_diameter_mm;
                pupilDiameterRight = eyeData.verbose_data.right.pupil_diameter_mm;
                pupilDiameterCombined = eyeData.verbose_data.combined.eye_data.pupil_diameter_mm;
                Debug.Log("pupilDiameterLeft: " + pupilDiameterLeft);

                // A value representing how open the eye is in [0,1]
                eyeOpenLeft = eyeData.verbose_data.left.eye_openness;
                eyeOpenRight = eyeData.verbose_data.right.eye_openness;
                eyeOpenCombined = eyeData.verbose_data.combined.eye_data.eye_openness;
                Debug.Log("eyeOpenLeft: " + eyeOpenLeft);

                // The normalized position of a pupil in [0,1]
                pupilPositionLeft = eyeData.verbose_data.left.pupil_position_in_sensor_area;
                pupilPositionRight = eyeData.verbose_data.right.pupil_position_in_sensor_area;
                pupilPositionCombined = eyeData.verbose_data.combined.eye_data.pupil_position_in_sensor_area;
                Debug.Log("pupilPositionLeft: " + pupilPositionLeft);

                lock (DebugWriter)
                {
                    CSVWriter.Write();
                }*/

    }
}

 

Edited by eugie
Link to comment
Share on other sites

Create an account or sign in to comment

You need to be a member in order to leave a comment

Create an account

Sign up for a new account in our community. It's easy!

Register a new account

Sign in

Already have an account? Sign in here.

Sign In Now
×
×
  • Create New...