/// <summary> /// Gets the timestamp of the hologram that will be composited for the current frame of the compositor. /// </summary> /// <returns>The hologram timestamp corresponding to the current video frame, in Unity's timeline.</returns> public float GetHologramTime() { float time = Time.time; #if UNITY_EDITOR if (isVideoFrameProviderInitialized) { if (poseCache.poses.Count > 0) { time = timeSynchronizer.GetUnityTimeFromCameraTime(GetTimeFromFrame(CurrentCompositeFrame)); } else { //Clamp time to video dt float videoDeltaTime = GetVideoFrameDuration(); int frame = (int)(time / videoDeltaTime); //Subtract the queued frames frame -= UnityCompositorInterface.GetCaptureFrameIndex() - CurrentCompositeFrame; time = videoDeltaTime * frame; } } #endif return(time); }
// This function is not/not always called on the main thread. private void OnAudioFilterRead(float[] data, int channels) { if (!UnityCompositorInterface.IsRecording()) { return; } //Create new stream if (audioMemoryStream == null) { audioMemoryStream = new MemoryStream(); audioStreamWriter = new BinaryWriter(audioMemoryStream); double audioSettingsTime = AudioSettings.dspTime; // Audio time in seconds, more accurate than Time.time double captureFrameTime = UnityCompositorInterface.GetCaptureFrameIndex() * UnityCompositorInterface.GetColorDuration() / 10000000.0; // Capture Frame Time in seconds DebugLog($"Obtained Audio Sample, AudioSettingsTime:{audioSettingsTime}, CaptureFrameTime:{captureFrameTime}"); audioStartTime = captureFrameTime; numCachedAudioFrames = 0; } //Put data into stream for (int i = 0; i < data.Length; i++) { // Rescale float to short range for encoding. short audioEntry = (short)(data[i] * short.MaxValue); audioStreamWriter.Write(audioEntry); } numCachedAudioFrames++; //Send to compositor (buffer a few calls to reduce potential timing errors between packages) if (numCachedAudioFrames >= MAX_NUM_CACHED_AUDIO_FRAMES) { audioStreamWriter.Flush(); byte[] outBytes = audioMemoryStream.ToArray(); audioMemoryStream = null; // The Unity compositor assumes that the audioStartTime will be in capture frame sample time. // Above we default to capture frame time compared to AudioSettings.dspTime. // Any interpolation between these two time sources needs to be done in the editor before handing sample time values to the compositor. UnityCompositorInterface.SetAudioData(outBytes, outBytes.Length, audioStartTime); } }
private void Update() { #if UNITY_EDITOR UpdateStatsElement(framerateStatistics, 1.0f / Time.deltaTime); int captureFrameIndex = UnityCompositorInterface.GetCaptureFrameIndex(); int prevCompositeFrame = CurrentCompositeFrame; //Set our current frame towards the latest captured frame. Dont get too close to it, and dont fall too far behind int step = (captureFrameIndex - CurrentCompositeFrame); if (step < 8) { step = 0; } else if (step > 16) { step -= 16; } else { step = 1; } CurrentCompositeFrame += step; UnityCompositorInterface.SetCompositeFrameIndex(CurrentCompositeFrame); #region Spectator View Transform if (IsCalibrationDataLoaded && transform.parent != null) { //Update time syncronizer { float captureTime = GetTimeFromFrame(captureFrameIndex); SpectatorViewPoseCache.PoseData poseData = poseCache.GetLatestPose(); if (poseData != null) { timeSynchronizer.Update(UnityCompositorInterface.GetCaptureFrameIndex(), captureTime, poseData.Index, poseData.TimeStamp); } } if (overrideCameraPose) { transform.parent.localPosition = overrideCameraPosition; transform.parent.localRotation = overrideCameraRotation; } else { //Set camera transform for the currently composited frame float cameraTime = GetTimeFromFrame(prevCompositeFrame); float poseTime = timeSynchronizer.GetPoseTimeFromCameraTime(cameraTime); Quaternion currRot; Vector3 currPos; poseTime += VideoTimestampToHolographicTimestampOffset; if (captureFrameIndex <= 0) //No frames captured yet, lets use the very latest camera transform { poseTime = float.MaxValue; } poseCache.GetPose(poseTime, out currPos, out currRot); transform.parent.localPosition = currPos; transform.parent.localRotation = currRot; } } #endregion if (!isVideoFrameProviderInitialized) { isVideoFrameProviderInitialized = UnityCompositorInterface.InitializeFrameProviderOnDevice(CaptureDevice); if (isVideoFrameProviderInitialized) { CurrentCompositeFrame = 0; timeSynchronizer.Reset(); poseCache.Reset(); } } UnityCompositorInterface.UpdateCompositor(); #endif }
private void Update() { #if UNITY_EDITOR UpdateStatsElement(framerateStatistics, 1.0f / Time.deltaTime); int captureFrameIndex = UnityCompositorInterface.GetCaptureFrameIndex(); int prevCompositeFrame = CurrentCompositeFrame; //Set our current frame towards the latest captured frame. Dont get too close to it, and dont fall too far behind int step = (captureFrameIndex - CurrentCompositeFrame); if (step < 8) { step = 0; } else if (step > 16) { step -= 16; } else { step = 1; } CurrentCompositeFrame += step; UnityCompositorInterface.SetCompositeFrameIndex(CurrentCompositeFrame); #region Spectator View Transform if (IsCalibrationDataLoaded && transform.parent != null) { //Update time syncronizer { float captureTime = GetTimeFromFrame(captureFrameIndex); SpectatorViewPoseCache.PoseData poseData = poseCache.GetLatestPose(); if (poseData != null) { timeSynchronizer.Update(UnityCompositorInterface.GetCaptureFrameIndex(), captureTime, poseData.Index, poseData.TimeStamp); } } if (overrideCameraPose) { transform.parent.localPosition = overrideCameraPosition; transform.parent.localRotation = overrideCameraRotation; } else { //Set camera transform for the currently composited frame float cameraTime = GetTimeFromFrame(prevCompositeFrame); float poseTime = timeSynchronizer.GetPoseTimeFromCameraTime(cameraTime); Quaternion currRot; Vector3 currPos; poseTime += VideoTimestampToHolographicTimestampOffset; if (captureFrameIndex <= 0) //No frames captured yet, lets use the very latest camera transform { poseTime = float.MaxValue; } poseCache.GetPose(poseTime, out currPos, out currRot); transform.parent.localPosition = currPos; transform.parent.localRotation = currRot; } } #endregion if (!isVideoFrameProviderInitialized) { if (UnityCompositorInterface.IsFrameProviderSupported(CaptureDevice) && UnityCompositorInterface.IsOutputFrameProviderSupported(OutputDevice)) { isVideoFrameProviderInitialized = UnityCompositorInterface.InitializeFrameProviderOnDevice(CaptureDevice, OutputDevice); if (isVideoFrameProviderInitialized) { CurrentCompositeFrame = 0; timeSynchronizer.Reset(); poseCache.Reset(); if (UnityCompositorInterface.IsCameraCalibrationInformationAvailable()) { stationaryCameraBroadcaster.SetActive(true); HolographicCameraObserver.Instance.ConnectTo("127.0.0.1"); } } } else { Debug.LogWarning($"The current device selection, Capture: {CaptureDevice}, Output: {OutputDevice}, is not supported by your build of SpectatorView.Compositor.UnityPlugin.dll."); } } UnityCompositorInterface.UpdateCompositor(); #endif }