NRSDK Scripting API

Session Events

public class SessionEventsListener : MonoBehaviour
 {
     void Awake()
     {
         // Tracking mode changed event.
         NRSessionManager.OnChangeTrackingMode += OnChangeTrackingMode;
         // Glasses disconnected event.
         NRSessionManager.OnGlassesDisconnect += OnGlassesDisconnect;
         // Glasses state changed event,PutOn,PutOff.
         NRSessionManager.OnGlassesStateChanged += OnGlassesStateChanged;
         // Tracking state lost event.
         NRSessionManager.OnHMDLostTracking += OnHMDLostTracking;
         // Tracking state ready event.
         NRSessionManager.OnHMDPoseReady += OnHMDPoseReady;
         // Session kernal error event, such as NRRGBCameraDeviceNotFindError, NRPermissionDenyError, NRUnSupportedHandtrackingCalculationError.
         NRSessionManager.OnKernalError += OnKernalError;
     }

     /// <summary>
     /// Session kernal error event.
     /// </summary>
     /// <param name="exception">NRRGBCameraDeviceNotFindError, NRPermissionDenyError, NRUnSupportedHandtrackingCalculationError</param>
     private void OnKernalError(NRKernalError exception)
     {
         NRDebugger.Info("[SessionEventsListener] OnKernalError.");
     }

     private void OnHMDPoseReady()
     {
         NRDebugger.Info("[SessionEventsListener] OnHMDPoseReady.");
     }

     private void OnHMDLostTracking()
     {
         NRDebugger.Info("[SessionEventsListener] OnHMDLostTracking.");
     }

     private void OnGlassesStateChanged(NRDevice.GlassesEventType eventtype)
     {
         NRDebugger.Info("[SessionEventsListener] OnGlassesStateChanged:" + eventtype.ToString());
     }

     private void OnGlassesDisconnect(GlassesDisconnectReason reason)
     {
         NRDebugger.Info("[SessionEventsListener] OnGlassesDisconnect:" + reason.ToString());
     }

     private void OnChangeTrackingMode(NRHMDPoseTracker.TrackingType origin, NRHMDPoseTracker.TrackingType target)
     {
         NRDebugger.Info("[SessionEventsListener] OnChangeTrackingMode, from:{0} to:{1}", origin, target);
     }
 }

Nreal Light Properties

Glasses SN (Experimental feature)

string sn = NRDevice.Instance.NativeGlassesController?.GetGlassesSN();

Device Parameters

// display project matrix
var matrix_data = NRFrame.GetEyeProjectMatrix(out result, 0.03f, 100f);
// left display
Matrix4x4 left_display_matrix = matrix_data.LEyeMatrix
// right display
Matrix4x4 left_display_matrix = matrix_data.LEyeMatrix


// camera external parameters
var eyePoseFromHead = NRFrame.EyePoseFromHead;
// left display local pose from "NRCameraRig"
Pose left_display = eyePoseFromHead.LEyePose;
// right display local pose from "NRCameraRig"
Pose right_display = eyePoseFromHead.REyePose;
// rgb camera local pose from "NRCameraRig"
Pose rgb_camera = eyePoseFromHead.RGBEyePose;


// rgb camera intrinsic matrix
NativeMat3f rgb_in_mat = NRFrame.GetRGBCameraIntrinsicMatrix();

// rgb camera distortion
NRDistortionParams rgb_in_mat = NRFrame.GetRGBCameraDistortion();

Camera Properties

Gray Camera Data (Preview feature)

var grayCameraDataProvider = new GrayCameraDataProvider(NativeGrayEye.LEFT);
grayCameraDataProvider.Play();

After initialize grayCameraDataProvider, the developer could access gray camera data by grayCameraDataProvider.RawData or grayCameraDataProvider.Texture.

The GrayCameraDataProvider is following.

using NRKernal;
using NRKernal.Preview;
using System;
using UnityEngine;

public class GrayCameraDataProvider : CameraModelView
{
    private NativeGrayEye m_Eye;
    public NativeGrayEye Eye
    {
        get { return m_Eye; }
    }

    private Texture2D m_Texture;
    public Texture2D Texture
    {
        get { return m_Texture; }
    }

    private byte[] m_RawData;
    public byte[] RawData
    {
        get { return m_RawData; }
    }

    private ulong m_TimeStamp;
    public ulong TimeStamp
    {
        get { return m_TimeStamp; }
    }

    public GrayCameraDataProvider(NativeGrayEye eye)
    {
        m_Eye = eye;

        m_NativeCameraProxy = CameraProxyFactoryExtension.CreateGrayCameraProxy(m_Eye);
        m_NativeCameraProxy.Regist(this);

        InitTexture();
    }

    private void InitTexture()
    {
        if (m_Texture == null)
        {
            m_Texture = new Texture2D(Width, Height, TextureFormat.R8, false);
        }
    }

    protected override void OnRawDataUpdate(FrameRawData frame)
    {
        base.OnRawDataUpdate(frame);

        // Copy the raw data of the specified eye
        int size = frame.data.Length / 2;
        int offset = m_Eye == NativeGrayEye.LEFT ? 0 : size;
        if (m_RawData == null)
        {
            m_RawData = new byte[size];
        }
        Array.Copy(frame.data, offset, m_RawData, 0, size);

        // Fill in texture with raw data
        m_Texture.LoadRawTextureData(m_RawData);
        m_Texture.Apply();

        m_TimeStamp = frame.timeStamp;
    }
}

Intrinsic parameters

Intrinsic parameters are specific to a camera. They include information like focal length (fx,fy) and optical centers (cx,cy).

using NRKernal.Preview;

// Get fc, cc of left eye
matrix = NRFrameExtension.GetGrayCameraIntrinsicMatrix(NativeGrayEye.LEFT);

// Get kc, camera_model of left eye
distortionParams = NRFrameExtension.GetGrayCameraDistortion(
                   NativeGrayEye.LEFT);

Extrinsic parameters

Extrinsic parameters corresponds to rotation and translation vectors which translate a coordinates of a 3D point to a coordinate system.

Pose leyePose = NRFrameExtension.GrayEyePoseFromHead.LEyePose;

// The following utility (or ConversionUtility) converts a pose to a matrix
extrinsicMat = PoseToTransformMatrix(leyePose);

public class Utility
{
    public static NativeMat4f PoseToTransformMatrix(Pose pose)
    {
        NativeMat4f mat4f = NativeMat4f.identity;

        Vector3 p = pose.position;
        Quaternion q = pose.rotation;
        q.z = -q.z;
        q.w = -q.w;

        float qxx = q.x * q.x;
        float qyy = q.y * q.y;
        float qzz = q.z * q.z;
        float qxz = q.x * q.z;
        float qxy = q.x * q.y;
        float qyz = q.y * q.z;
        float qwx = q.w * q.x;
        float qwy = q.w * q.y;
        float qwz = q.w * q.z;

        mat4f[0, 0] = 1f - 2f * (qyy + qzz);
        mat4f[0, 1] = 2f * (qxy + qwz);
        mat4f[0, 2] = 2f * (qxz - qwy);

        mat4f[1, 0] = 2f * (qxy - qwz);
        mat4f[1, 1] = 1f - 2f * (qxx + qzz);
        mat4f[1, 2] = 2f * (qyz + qwx);

        mat4f[2, 0] = 2f * (qxz + qwy);
        mat4f[2, 1] = 2f * (qyz - qwx);
        mat4f[2, 2] = 1f - 2f * (qxx + qyy);

        mat4f[3, 0] = p.x;
        mat4f[3, 1] = p.y;
        mat4f[3, 2] = -p.z;

        mat4f[0, 3] = .0f;
        mat4f[1, 3] = .0f;
        mat4f[2, 3] = .0f;
        mat4f[3, 3] = 1.0f;

        return mat4f;
    }
}

IMU RawData

 var m_NRIMUProvider = new NRIMUDataProvider(OnUpdate);
 m_NRIMUProvider.Start();

 private void OnUpdate(IMUDataFrame frame)
 {
      // frame is the imu rawdata
 }


 // or you can get it in "Update"
void Update()
{
    if (m_NRIMUProvider == null)
    {
        return;
    }

    var frame = m_NRIMUProvider.GetCurrentFrame();
    // frame is the imu rawdata

}

Image Tracking

To use image tracking feature, set Image Tracking Mode as ENABLE in NRKernalSessionConfig config file.

using NRKernal;
using System;
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.Events;

public class MarkerObserver : MonoBehaviour
{
    public Action<NRTrackableImage> onTrackingBegin;
    public Action<NRTrackableImage> onTracking;
    public Action<NRTrackableImage> onTrackingEnd;

    /// <summary>
    /// Hold all tracking images database indexes.
    /// </summary>
    private HashSet<int> m_TrackingImageIndexes = new HashSet<int>();

    private List<NRTrackableImage> m_TrackableImages = new List<NRTrackableImage>();

    void Update()
    {
        NRFrame.GetTrackables<NRTrackableImage>(m_TrackableImages, NRTrackableQueryFilter.All);
        foreach (var trackableImage in m_TrackableImages)
        {
            int databaseIndex = trackableImage.GetDataBaseIndex();

            if (trackableImage.GetTrackingState() == TrackingState.Tracking)
            {
                if (!m_TrackingImageIndexes.Contains(databaseIndex))
                {
                    m_TrackingImageIndexes.Add(databaseIndex);
                    onTrackingBegin?.Invoke(trackableImage);
                }
                else
                {
                    onTracking?.Invoke(trackableImage);
                }
            }
            else
            {
                if (m_TrackingImageIndexes.Contains(databaseIndex))
                {
                    m_TrackingImageIndexes.Remove(databaseIndex);
                    onTrackingEnd?.Invoke(trackableImage);
                }
            }
        }
    }
}

Native API

NativeHMD

bool GetCameraIntrinsicMatrix(int eye, ref NativeMat3f CameraIntrinsicMatix)