public bool AddFrame(Texture2D texture, MediaTime time)
 {
     CoreMediaEncoderLog("AddFrame(tex, time)");
     if (Encoder.IsCreated)
     {
         return(Encoder.Target.AddFrame(texture, time));
     }
     return(false);
 }
        protected override void WriteFrame(AsyncGPUReadbackRequest r, double timestamp)
        {
            double currentTime = ((AudioInputBase)m_Inputs[1]).audioTime;

            if (currentTime - timestamp > 2)
            {
                Debug.Log($"(MovieRecorder) Received heavily delayed frame. Requested at [{timestamp}]. Received at [{currentTime}].");
            }

            var format = Settings.GetCurrentEncoder().GetTextureFormat(Settings);

            if (Settings.FrameRatePlayback == FrameRatePlayback.Variable)
            {
                // The closest media frame to the actual frame's timestamp.
                // Convert m_FrameRate using floating-point division. The overloaded cast-to-double operator uses integer division.
                MediaTime time = new MediaTime
                {
                    count = (long)Math.Round(timestamp * m_FrameRate.numerator / m_FrameRate.denominator),
                    rate  = m_FrameRate
                };

                if (time.count > lastFrame) // If two render frames fall on the same encoding frame, ignore.
                {
                    Settings.m_EncoderManager.AddFrame(m_EncoderHandle, r.width, r.height, 0, format, r.GetData <byte>(),
                                                       time);
                }

                lastFrame = time.count;
            }
            else
            {
                Settings.m_EncoderManager.AddFrame(m_EncoderHandle, r.width, r.height, 0, format, r.GetData <byte>());
            }

            WarnOfConcurrentRecorders();
        }
 extern private static bool Internal_MediaDecoder_SetPosition(IntPtr decoder, MediaTime time);
 extern private static bool Internal_MediaDecoder_GetNextFrame(IntPtr decoder, Texture2D texture, out MediaTime time);
 public bool SetPosition(MediaTime time)
 {
     return(Internal_MediaDecoder_SetPosition(m_Ptr, time));
 }
 public bool GetNextFrame(Texture2D tex, out MediaTime time)
 {
     return(Internal_MediaDecoder_GetNextFrame(m_Ptr, tex, out time));
 }
示例#7
0
 public bool AddFrame(MediaEncoderHandle handle, Texture2D texture, MediaTime time)
 {
     DisposeCheck(handle);
     return(m_Encoders[handle.m_VersionHandle.Index].m_encoderInterface.AddFrame(texture, time));
 }
示例#8
0
 public bool AddFrame(MediaEncoderHandle handle, int width, int height, int rowBytes, TextureFormat format, NativeArray <byte> data, MediaTime time)
 {
     DisposeCheck(handle);
     return(m_Encoders[handle.m_VersionHandle.Index].m_encoderInterface.AddFrame(width, height, rowBytes, format, data, time));
 }
 public bool AddFrame(int width, int height, int rowBytes, TextureFormat format, NativeArray <byte> data, MediaTime time)
 {
     CoreMediaEncoderLog("AddFrame(w,h,r,f,d,t)");
     if (Encoder.IsCreated)
     {
         return(Encoder.Target.AddFrame(width, height, rowBytes, format, data, time));
     }
     return(false);
 }
 public bool SetPosition(MediaTime time)
 {
     ThrowIfDisposed();
     return(Internal_MediaDecoder_SetPosition(m_Ptr, time));
 }
 extern private static bool Internal_MediaDecoder_GetNextFrame(IntPtr decoder, [NotNull("NullExceptionObject")] Texture2D texture, out MediaTime time);