public override void RecordFrame(RecordingSession session) { if (m_Inputs.Count != 1) { throw new Exception("Unsupported number of sources"); } var input = (BaseRenderTextureInput)m_Inputs[0]; var frame = input.outputRT; if (!m_ctx) { var settings = m_Settings.m_GifEncoderSettings; settings.width = frame.width; settings.height = frame.height; m_ctx = fcAPI.fcGifCreateContext(ref settings); var fileName = m_Settings.m_BaseFileName.BuildFileName(session, recordedFramesCount, frame.width, frame.height, "gif"); var path = Path.Combine(m_Settings.m_DestinationPath.GetFullPath(), fileName); m_stream = fcAPI.fcCreateFileStream(path); fcAPI.fcGifAddOutputStream(m_ctx, m_stream); } fcAPI.fcLock(frame, TextureFormat.RGB24, (data, fmt) => { fcAPI.fcGifAddFramePixels(m_ctx, data, fmt, session.RecorderTime); }); }
public override void RecordFrame(RecordingSession session) { if (m_Inputs.Count != 1) { throw new Exception("Unsupported number of sources"); } var input = (BaseRenderTextureInput)m_Inputs[0]; var frame = input.outputRT; if (!m_ctx) { var settings = m_Settings.m_WebmEncoderSettings; settings.video = true; settings.audio = false; settings.videoWidth = frame.width; settings.videoHeight = frame.height; settings.videoTargetFramerate = 60; // ? m_ctx = fcAPI.fcWebMCreateContext(ref settings); m_stream = fcAPI.fcCreateFileStream(BuildOutputPath(session)); fcAPI.fcWebMAddOutputStream(m_ctx, m_stream); } fcAPI.fcLock(frame, TextureFormat.RGB24, (data, fmt) => { fcAPI.fcWebMAddVideoFramePixels(m_ctx, data, fmt, session.m_CurrentFrameStartTS); }); }
public override void RecordFrame(RecordingSession session) { if (m_Inputs.Count != 1) { throw new Exception("Unsupported number of sources"); } var input = (BaseRenderTextureInput)m_Inputs[0]; var frame = input.outputRT; if (!m_ctx) { var gifSettings = m_Settings.gifEncoderSettings; gifSettings.width = frame.width; gifSettings.height = frame.height; m_ctx = fcAPI.fcGifCreateContext(ref gifSettings); var path = m_Settings.fileNameGenerator.BuildAbsolutePath(session); m_stream = fcAPI.fcCreateFileStream(path); fcAPI.fcGifAddOutputStream(m_ctx, m_stream); } fcAPI.fcLock(frame, TextureFormat.RGB24, (data, fmt) => { fcAPI.fcGifAddFramePixels(m_ctx, data, fmt, session.recorderTime); }); }
public override void RecordFrame(RecordingSession session) { if (m_Inputs.Count != 1) { throw new Exception("Unsupported number of sources"); } var input = (BaseRenderTextureInput)m_Inputs[0]; var frame = input.outputRT; if (!m_ctx) { var settings = m_Settings.m_WebmEncoderSettings; settings.video = true; settings.audio = false; settings.videoWidth = frame.width; settings.videoHeight = frame.height; if (m_Settings.m_AutoSelectBR) { settings.videoTargetBitrate = (int)(((frame.width * frame.height / 1000.0) / 245 + 1.16) * (settings.videoTargetFramerate / 48.0 + 0.5) * 1000000); } settings.videoTargetFramerate = (int)Math.Ceiling(m_Settings.m_FrameRate); m_ctx = fcAPI.fcWebMCreateContext(ref settings); var fileName = m_Settings.m_BaseFileName.BuildFileName(session, recordedFramesCount, settings.videoWidth, settings.videoHeight, "webm"); var path = Path.Combine(m_Settings.m_DestinationPath.GetFullPath(), fileName); m_stream = fcAPI.fcCreateFileStream(path); fcAPI.fcWebMAddOutputStream(m_ctx, m_stream); } fcAPI.fcLock(frame, TextureFormat.RGB24, (data, fmt) => { fcAPI.fcWebMAddVideoFramePixels(m_ctx, data, fmt, session.recorderTime); }); }