/// <summary> /// Check if device support GPU encoding. /// </summary> /// <returns></returns> public bool IsSupported() { ResolutionPresetSettings(); EncoderStatus status = GPUEncoder_GetCaptureCapability(); // Check GPU capability for video encoding if (status != EncoderStatus.OK) { return(false); } // MAX video encoding resolution // AMD: 4096 x 2048 // NVIDIA: 4096 x 4096 if (GraphicsCard.AMD == GPUEncoder_CheckGPUManufacturer() && (frameWidth > 4096 || frameHeight > 2048)) { return(false); } else if (GraphicsCard.NVIDIA == GPUEncoder_CheckGPUManufacturer() && (frameWidth > 4096 || frameHeight > 4096)) { return(false); } else if (GraphicsCard.UNSUPPORTED_DEVICE == GPUEncoder_CheckGPUManufacturer()) { return(false); } return(true); }
public async Task <EncoderStatus> Start(string arguments, CancellationTokenSourceWrapper tokenSource, Action <string> processedData) { try { EncoderStatus encoderStatus; await _process.Start(arguments, processedData, tokenSource.Token); if (_tempFileManager.ExistsEncodingFile()) { encoderStatus = CreateEncoderStatus(EncoderState.Completed, String.Empty); } else { tokenSource.Cancel(); encoderStatus = CreateEncoderStatus(EncoderState.Failed, "Output file was not created."); } return(encoderStatus); } catch (OperationCanceledException ex) { tokenSource.Cancel(); EncoderStatus encoderStatus = CreateEncoderStatus(EncoderState.Cancelled, ex.Message); return(encoderStatus); } catch (Exception ex) { tokenSource.Cancel(); EncoderStatus encoderStatus = CreateEncoderStatus(EncoderState.Failed, ex.Message); return(encoderStatus); } }
public void Initialize() { _pipelineMediator = new Mock <IStepMediator>(); _webClient = new Mock <IEncodeWebClient>(); _ffmpegProcess = new Mock <IFfmpeg>(); _watchDogTimer = new Mock <IWatchDogTimer>(); _dataReceiveHandler = new Mock <IDataReceivedHandler>(); _encodeStringBuilder = new Mock <IEncodeStringBuilder>(); _tokenSource = new Mock <CancellationTokenSourceWrapper>(); _pipelineStep = new EncodeStep(_pipelineMediator.Object, _webClient.Object, _ffmpegProcess.Object, _watchDogTimer.Object); _encoderStatus = new EncoderStatus() { EncoderState = EncoderState.Failed, ErrorMessage = "errorMessage" }; _stepData = new CreatorStepData() { EncoderState = EncoderState.Completed, EncodeStringBuilder = _encodeStringBuilder.Object, DataReceivedHandler = _dataReceiveHandler.Object }; _encodeStringBuilder.Setup(m => m.GetContentType()).Returns(ContentType); _encodeStringBuilder.Setup(m => m.GetFfmpegArguments()).Returns(Arguments); _ffmpegProcess.Setup(m => m.Start(Arguments, _tokenSource.Object, _dataReceiveHandler.Object.ProcessData)).Returns(() => { var tcs = new TaskCompletionSource <EncoderStatus>(); tcs.SetResult(_encoderStatus); return(tcs.Task); }); _pipelineStep.SetData(_stepData); }
/// <summary> /// Check if device support GPU encoding. /// </summary> /// <returns></returns> public bool IsSupported() { if (SystemInfo.graphicsDeviceVendor == "NVIDIA" && SystemInfo.graphicsDeviceName.Contains("RTX")) { return(false); } EncoderStatus status = GPUEncoder_GetCaptureCapability(); // Check GPU capability for video encoding if (status != EncoderStatus.OK) { return(false); } // MAX video encoding resolution // AMD: 4096 x 2048 // NVIDIA: 4096 x 4096 if (GraphicsCard.AMD == GPUEncoder_CheckGPUManufacturer() && (frameWidth > 4096 || frameHeight > 2048)) { return(false); } else if (GraphicsCard.NVIDIA == GPUEncoder_CheckGPUManufacturer() && (frameWidth > 4096 || frameHeight > 4096)) { return(false); } //else if (GraphicsCard.UNSUPPORTED_DEVICE == GPUEncoder_CheckGPUManufacturer()) // return false; return(true); }
public static void CompressWithoutTransparency(Surface surface, EncoderOptions options, AvifProgressCallback avifProgress, ref uint progressDone, uint progressTotal, CICPColorData colorInfo, out CompressedAV1Image color) { BitmapData bitmapData = new BitmapData { scan0 = surface.Scan0.Pointer, width = (uint)surface.Width, height = (uint)surface.Height, stride = (uint)surface.Stride }; ProgressContext progressContext = new ProgressContext(avifProgress, progressDone, progressTotal); using (CompressedAV1DataAllocator allocator = new CompressedAV1DataAllocator(1)) { IntPtr colorImage; CompressedAV1OutputAlloc outputAllocDelegate = new CompressedAV1OutputAlloc(allocator.Allocate); EncoderStatus status = EncoderStatus.Ok; if (IntPtr.Size == 8) { status = AvifNative_64.CompressImage(ref bitmapData, options, progressContext, ref colorInfo, outputAllocDelegate, out colorImage, IntPtr.Zero); } else { status = AvifNative_86.CompressImage(ref bitmapData, options, progressContext, ref colorInfo, outputAllocDelegate, out colorImage, IntPtr.Zero); } GC.KeepAlive(outputAllocDelegate); if (status != EncoderStatus.Ok) { HandleError(status, allocator.ExceptionInfo); } color = new CompressedAV1Image(allocator.GetCompressedAV1Data(colorImage), surface.Width, surface.Height, options.yuvFormat); } progressDone = progressContext.progressDone; GC.KeepAlive(avifProgress); }
private EncoderStatus StartFfmpeg(CancellationTokenSourceWrapper tokenSource, string arguments) { _watchDogTimer.Start(tokenSource); EncoderStatus encoderStatus = _ffmpeg.Start(arguments, tokenSource, StepData.DataReceivedHandler.ProcessData).Result; _watchDogTimer.Stop(); return(encoderStatus); }
public override void Execute(CancellationTokenSourceWrapper tokenSource) { string contentType = StepData.EncodeStringBuilder.GetContentType(); string arguments = StepData.EncodeStringBuilder.GetFfmpegArguments(); RegisterProcessCallback(); EncoderStatus encoderStatus = StartFfmpeg(tokenSource, arguments); EncodeStepData nextStepData = CreateStepData(encoderStatus, contentType); SetStatus(); Mediator.Send(nextStepData, this); }
private EncodeStepData CreateStepData(EncoderStatus encoderStatus, string contentType) { if (_watchDogTimer.IsOverflowing) { return(new EncodeStepData { EncoderState = EncoderState.Hanging, ErrorMessage = "Ffmpeg is Hanging" }); } return(new EncodeStepData { EncoderState = encoderStatus.EncoderState, ErrorMessage = encoderStatus.ErrorMessage, ContentType = contentType }); }
private static void HandleError(EncoderStatus status, ExceptionDispatchInfo exceptionDispatchInfo) { if (exceptionDispatchInfo != null) { exceptionDispatchInfo.Throw(); } else { switch (status) { case EncoderStatus.Ok: break; case EncoderStatus.NullParameter: throw new FormatException("A required encoder parameter was null."); case EncoderStatus.OutOfMemory: throw new OutOfMemoryException(); case EncoderStatus.UnknownYUVFormat: throw new FormatException("The YUV format is not supported by the encoder."); case EncoderStatus.CodecInitFailed: throw new FormatException("Unable to initialize AV1 encoder."); case EncoderStatus.EncodeFailed: throw new FormatException("The AV1 encode failed."); case EncoderStatus.UserCancelled: throw new OperationCanceledException(); default: throw new FormatException("An unknown error occurred when encoding the image."); } } }
/// <summary> /// Configuration for Video Recording /// </summary> private bool StartVodCapture() { EncoderStatus status = EncoderStatus.OK; // If we haven't set the save path, we want to use project folder and timestamped file name by default if (customFileName != null) { videoSavePath = string.Format("{0}{1}.mp4", saveFolderFullPath, customFileName); } else { videoSavePath = string.Format("{0}capture_{1}x{2}_{3}_{4}.mp4", saveFolderFullPath, outputFrameWidth, outputFrameHeight, Utils.GetTimeString(), Utils.GetRandomString(5)); } // Video Encoding Configuration Settings status = GPUEncoder_SetVodCaptureSettings( width: outputFrameWidth, height: outputFrameHeight, frameRate: frameRate, bitRate: bitrate * 1000, // in bps fullSavePath: videoSavePath, is360: captureMode == CaptureMode._360 ? true : false, verticalFlip: captureSource == CaptureSource.SCREEN ? true : false, horizontalFlip: false, projectionType: projectionType, stereoMode: StereoMode.NONE); if (status != EncoderStatus.OK) { OnError(EncoderErrorCode.VOD_FAILED_TO_START, status); return(false); } // Pick attached audio device resources for audio capture status = GPUEncoder_SetMicAndAudioRenderDeviceByVRDeviceType(attachedHMD); if (status != EncoderStatus.OK) { OnError(EncoderErrorCode.VOD_FAILED_TO_START, status); return(false); } // Make enable audio output capture(ex. speaker) status = GPUEncoder_SetAudioEnabledDuringCapture(captureAudio); if (status != EncoderStatus.OK) { OnError(EncoderErrorCode.VOD_FAILED_TO_START, status); return(false); } // Make enable audio input capture(ex. microphone) status = GPUEncoder_SetMicEnabledDuringCapture(captureMicrophone); if (status != EncoderStatus.OK) { OnError(EncoderErrorCode.VOD_FAILED_TO_START, status); return(false); } // Start VOD capture status = GPUEncoder_StartVodCapture(); if (status != EncoderStatus.OK) { OnError(EncoderErrorCode.VOD_FAILED_TO_START, status); return(false); } OnEncoderStarted(EncoderCaptureType.VOD); return(true); }
/// <summary> /// Configuration for Live Streaming /// </summary> private bool StartLiveStreaming() { EncoderStatus status = EncoderStatus.OK; if (string.IsNullOrEmpty(liveStreamUrl)) { OnError(EncoderErrorCode.INVALID_STREAM_URI, null); return(false); } // Video Encoding and Live Configuration Settings status = GPUEncoder_SetLiveCaptureSettings( width: outputFrameWidth, height: outputFrameHeight, frameRate: frameRate, bitRate: bitrate * 1000, // in bps flushCycleStart: encodingInitialFlushCycle, flushCycleAfter: encodingSecondaryFlushCycle, streamUrl: liveStreamUrl, is360: captureMode == CaptureMode._360 ? true : false, verticalFlip: captureSource == CaptureSource.SCREEN ? true : false, horizontalFlip: false, projectionType: projectionType, stereoMode: StereoMode.NONE); if (status != EncoderStatus.OK) { OnError(EncoderErrorCode.LIVE_FAILED_TO_START, status); return(false); } // Pick attached audio device resources for audio capture status = GPUEncoder_SetMicAndAudioRenderDeviceByVRDeviceType(attachedHMD); if (status != EncoderStatus.OK) { OnError(EncoderErrorCode.LIVE_FAILED_TO_START, status); return(false); } // Make enable audio output capture(ex. speaker) status = GPUEncoder_SetAudioEnabledDuringCapture(captureAudio); if (status != EncoderStatus.OK) { OnError(EncoderErrorCode.LIVE_FAILED_TO_START, status); return(false); } // Make enable audio input capture(ex. microphone) status = GPUEncoder_SetMicEnabledDuringCapture(captureMicrophone); if (status != EncoderStatus.OK) { OnError(EncoderErrorCode.LIVE_FAILED_TO_START, status); return(false); } status = GPUEncoder_StartLiveCapture(); if (status != EncoderStatus.OK) { OnError(EncoderErrorCode.LIVE_FAILED_TO_START, status); return(false); } OnEncoderStarted(EncoderCaptureType.LIVE); return(true); }
/// <summary> /// Start video capture /// </summary> public bool StartCapture() { EncoderStatus status = EncoderStatus.OK; // Check camera setup if (captureSource == CaptureSource.CAMERA) { if (captureMode == CaptureMode.REGULAR && !regularCamera) { status = EncoderStatus.CAMERA_SET_FAILED; } else if (captureMode == CaptureMode._360 && !regularCamera) { status = EncoderStatus.CAMERA_SET_FAILED; } if (stereoMode != StereoMode.NONE && !stereoCamera) { status = EncoderStatus.CAMERA_SET_FAILED; } } if (status != EncoderStatus.OK) { OnError(EncoderErrorCode.CAMERA_SET_FAILED, status); return(false); } if (captureStarted || GPUEncoder_GetEncoderStatus() != EncoderStatus.OK) { OnError(EncoderErrorCode.CAPTURE_ALREADY_IN_PROGRESS, null); return(false); } if (captureMode != CaptureMode.REGULAR && captureSource == CaptureSource.RENDERTEXTURE) { Debug.LogFormat(LOG_FORMAT, "CaptureMode should be set regular for render texture capture"); captureMode = CaptureMode.REGULAR; } if (captureMode == CaptureMode._360 && projectionType == ProjectionType.NONE) { Debug.LogFormat(LOG_FORMAT, "ProjectionType should be set for 360 capture, et type to equirect for generating texture properly!"); projectionType = ProjectionType.EQUIRECT; } else if (captureMode == CaptureMode.REGULAR) { // Non 360 capture doesn't have projection type projectionType = ProjectionType.NONE; } // Check GPU capability for video encoding status = GPUEncoder_GetCaptureCapability(); if (status != EncoderStatus.OK) { OnError(EncoderErrorCode.VOD_FAILED_TO_START, status); return(false); } // MAX video encoding resolution // AMD: 4096 x 2048 // NVIDIA: 4096 x 4096 if (GraphicsCard.AMD == GPUEncoder_CheckGPUManufacturer() && (frameWidth > 4096 || frameHeight > 2048)) { Debug.LogFormat(LOG_FORMAT, "Max video encoding resolution on AMD is 4096 x 2048"); OnError(EncoderErrorCode.UNSUPPORTED_SPEC, null); return(false); } else if (GraphicsCard.NVIDIA == GPUEncoder_CheckGPUManufacturer() && (frameWidth > 4096 || frameHeight > 4096)) { Debug.LogFormat(LOG_FORMAT, "Max video encoding resolution on NVIDIA is 4096 x 4096"); OnError(EncoderErrorCode.UNSUPPORTED_SPEC, null); return(false); } //else if (GraphicsCard.UNSUPPORTED_DEVICE == GPUEncoder_CheckGPUManufacturer()) //{ // Debug.LogFormat(LOG_FORMAT, // "Unsupported gpu device or you missed to call GPUEncoder_GetCaptureCapability supporting gpu device check"); // OnError(EncoderErrorCode.UNSUPPORTED_SPEC, null); // return false; //} // Create RenderTextures which will be used for video encoding CreateRenderTextures(); // Create textures for stereo CreateStereoTextures(); if (captureType == CaptureType.LIVE) { // Start live streaming video capture return(StartLiveStreaming()); } else { // Start VOD video capture return(StartVodCapture()); } }
/// <summary> /// Start video capture /// </summary> public bool StartCapture() { EncoderStatus status = EncoderStatus.OK; //regularCamera.enabled = false; //cubemapCamera.enabled = false; if (captureStarted || GPUEncoder_GetEncoderStatus() != EncoderStatus.OK) { OnError(EncoderErrorCode.CAPTURE_ALREADY_IN_PROGRESS, null); return(false); } if (captureMode != CaptureMode.REGULAR && inputTexture != null) { Debug.LogFormat(LOG_FORMAT, "CaptureMode should be set for regular for user input render texture"); captureMode = CaptureMode.REGULAR; } if (captureMode == CaptureMode._360 && projectionType == ProjectionType.NONE) { Debug.LogFormat(LOG_FORMAT, "ProjectionType should be set for 360 capture, et type to equirect for generating texture properly!"); projectionType = ProjectionType.EQUIRECT; } else if (captureMode == CaptureMode.REGULAR) { // Non 360 capture doesn't have projection type projectionType = ProjectionType.NONE; } if (antiAliasing != 1) { Debug.LogFormat(LOG_FORMAT, "GPU encoding not support anti-aliasing settings, reset anti-aliasing to 1."); antiAliasing = 1; } // Resolution preset settings ResolutionPresetSettings(); // Check GPU capability for video encoding status = GPUEncoder_GetCaptureCapability(); if (status != EncoderStatus.OK) { OnError(EncoderErrorCode.VOD_FAILED_TO_START, status); return(false); } // MAX video encoding resolution // AMD: 4096 x 2048 // NVIDIA: 4096 x 4096 if (GraphicsCard.AMD == GPUEncoder_CheckGPUManufacturer() && (frameWidth > 4096 || frameHeight > 2048)) { Debug.LogFormat(LOG_FORMAT, "Max video encoding resolution on AMD is 4096 x 2048"); OnError(EncoderErrorCode.UNSUPPORTED_SPEC, null); return(false); } else if (GraphicsCard.NVIDIA == GPUEncoder_CheckGPUManufacturer() && (frameWidth > 4096 || frameHeight > 4096)) { Debug.LogFormat(LOG_FORMAT, "Max video encoding resolution on NVIDIA is 4096 x 4096"); OnError(EncoderErrorCode.UNSUPPORTED_SPEC, null); return(false); } else if (GraphicsCard.UNSUPPORTED_DEVICE == GPUEncoder_CheckGPUManufacturer()) { Debug.LogFormat(LOG_FORMAT, "Unsupported gpu device or you missed to call GPUEncoder_GetCaptureCapability supporting gpu device check"); OnError(EncoderErrorCode.UNSUPPORTED_SPEC, null); return(false); } // Create RenderTextures which will be used for video encoding CreateRenderTextures(); // Create textures for stereo CreateStereoTextures(); // if (videoCaptureType == VideoCaptureType.LIVE) // { // // Start live streaming video capture // return StartLiveStreaming(); // } // else { // Start VOD video capture return(StartVodCapture()); } }