/// <summary> /// Record a video for 5 seconds and store it as c:\temp\example.mp4 /// </summary> /// <param name="args"></param> static void Main(string[] args) { // Install-Package Emgu.CV // Install-Package Emgu.CV.runtime.windows // Install-Package FFMediaToolkit // Install-Package DirectShowLib var camIndex = SelectCameraIndex(); _captureDevice = new VideoCapture(camIndex, VideoCapture.API.DShow) { FlipVertical = true }; _captureDevice.ImageGrabbed += CaptureDeviceImageGrabbed; var settings = new VideoEncoderSettings(width: _captureDevice.Width , height: _captureDevice.Height , framerate: 15 , codec: VideoCodec.H264) { EncoderPreset = EncoderPreset.Fast, CRF = 17 // Constant Rate Factor }; // Download from https://github.com/BtbN/FFmpeg-Builds/releases FFmpegLoader.FFmpegPath = @"C:\Users\fiach\source\repos\Webcam\FFmpeg\"; _videoOutput = MediaBuilder.CreateContainer(@"c:\temp\example.mp4").WithVideo(settings).Create(); _captureDevice.Start(); Thread.Sleep(TimeSpan.FromSeconds(5)); _captureDevice.Stop(); _captureDevice.Dispose(); _videoOutput.Dispose(); }
public void Setup(VideoEncoderSettings encoderSettings, VideoEncoderDescription descr) { this.EncoderSettings = encoderSettings; LoadEncoderProfilesItems(); LoadRateModeItems(); LoadAspectRatioItems(); this.Text = descr.Name; this.formatTextBox.Text = EncoderSettings.EncoderFormat.ToString(); this.encProfileComboBox.SelectedItem = EncoderSettings.Profile; this.bitrateModeComboBox.SelectedItem = EncoderSettings.BitrateMode; this.MaxBitrateNumeric.Value = EncoderSettings.MaxBitrate; this.bitrateNumeric.Value = EncoderSettings.Bitrate; this.fpsNumeric.Value = EncoderSettings.FrameRate.Num; this.latencyModeCheckBox.Checked = EncoderSettings.LowLatency; this.qualityNumeric.Value = EncoderSettings.Quality; this.gopSizeNumeric.Value = EncoderSettings.GOPSize; var aspectRatio = EncoderSettings.AspectRatio ?? AspectRatio.AspectRatio_1_1; var aspectItem = aspectRatios.FirstOrDefault(i => i.Den == aspectRatio.Den && i.Num == aspectRatio.Num); if (aspectItem == null) { aspectItem = aspectRatios.FirstOrDefault(); } this.aspectRatioComboBox.SelectedItem = aspectItem; }
internal void Play(VideoEncoderSettings inputPars, VideoEncoderSettings outputPars, NetworkSettings networkPars) { logger.Debug("RemoteDesktopClient::Play(...)"); VideoReceiver = new VideoReceiver(); VideoReceiver.Setup(inputPars, outputPars, networkPars); VideoReceiver.UpdateBuffer += VideoReceiver_UpdateBuffer; VideoReceiver.Play(); }
private void SetupVideo(ScreencastChannelInfo videoChannelInfo) { logger.Verb("ScreenCastControl::SetupVideo(...)"); //logger.Debug("SetupVideo(...)"); var videoInfo = videoChannelInfo.MediaInfo as VideoChannelInfo; if (videoInfo == null) { return; } var videoAddr = videoChannelInfo.Address; if (videoChannelInfo.Transport == TransportMode.Tcp) { videoAddr = ServerAddr; } var videoPort = videoChannelInfo.Port; var encoderSettings = new VideoEncoderSettings { //Resolution = videoInfo.Resolution, Width = videoInfo.Resolution.Width, Height = videoInfo.Resolution.Height, FrameRate = new MediaRatio(videoInfo.Fps, 1), }; var networkSettings = new NetworkSettings { LocalAddr = videoAddr, LocalPort = videoPort, TransportMode = videoChannelInfo.Transport, SSRC = videoChannelInfo.SSRC, }; VideoReceiver = new VideoReceiverEx(); VideoReceiver.DataReceived += VideoReceiver_DataReceived; VideoReceiver.Setup(networkSettings); videoRendererSink = new D3D9RendererSink(); encoderSettings.LowLatency = true; encoderSettings.UseHardware = true; videoRendererSink.Setup(encoderSettings, VideoHandle); }
public void Setup(MfVideoArgs args) { encoder = new FFmpegLib.H264Encoder(); VideoEncoderSettings settings = new VideoEncoderSettings { EncoderId = "libx264", FrameRate = MfTool.LongToInts(args.FrameRate), Width = args.Width, Height = args.Height, }; encoder.Setup(settings); encoder.DataEncoded += Encoder_DataEncoded; }
/// <summary> /// Adds a new video stream to the container. Usable only in encoding, before locking file. /// </summary> /// <param name="config">The stream configuration.</param> public void AddVideoStream(VideoEncoderSettings config) { if (IsFileCreated) { throw new InvalidOperationException("The stream must be added before creating a file."); } if (Video != null) { throw new InvalidOperationException("The video stream was already created."); } Video = OutputStreamFactory.CreateVideo(this, config); }
private void InitMediaSettings() { screenCaptureDeviceDescr = new ScreenCaptureDevice { Resolution = new Size(1920, 1080), CaptureMouse = true, AspectRatio = true, CaptureType = VideoCaptureType.DXGIDeskDupl, UseHardware = true, Fps = 30, }; videoEncoderSettings = new VideoEncoderSettings { Resolution = new Size(1920, 1080), Encoder = VideoEncoderMode.H264, Profile = H264Profile.Main, BitrateMode = BitrateControlMode.CBR, Bitrate = 2500, MaxBitrate = 5000, FrameRate = 30, LowLatency = true, }; videoSettings = new VideoStreamSettings { Enabled = true, SessionId = "video_" + Guid.NewGuid().ToString(), NetworkParams = new NetworkSettings(), CaptureDescription = null, EncodingParams = videoEncoderSettings, }; audioEncoderSettings = new AudioEncoderSettings { SampleRate = 8000, Channels = 1, Encoding = "PCMU", }; audioSettings = new AudioStreamSettings { Enabled = true, SessionId = "audio_" + Guid.NewGuid().ToString(), NetworkParams = new NetworkSettings(), CaptureParams = new AudioCaptureSettings(), EncodingParams = audioEncoderSettings, }; }
/// <summary> /// Stops video recording /// </summary> public void StopRecording() { _videoCaptureDevice.SignalToStop(); _dispatcherTimer.Stop(); var settings = new VideoEncoderSettings(width: _width, height: _height, framerate: 1, codec: VideoCodec.H264); settings.EncoderPreset = EncoderPreset.Fast; settings.CRF = 17; using (var file = MediaBuilder.CreateContainer(_filePath + "\\videoCapture.mp4").WithVideo(settings).Create()) { foreach (var bitmap in _frames) { file.Video.AddFrame(ToImageData(Convert(bitmap))); } } }
public void Setup(VideoEncoderSettings encPars, NetworkSettings networkParams) { logger.Debug("VideoHttpStreamer::Setup(...) " + encPars.Resolution.Width + "x" + encPars.Resolution.Height + " " + encPars.EncoderId); if (State != MediaState.Closed) { throw new InvalidOperationException("Invalid state " + State); } try { this.networkParams = networkParams; httpStreamer = new Networks.HttpStreamer(); encoder = new FFmpegVideoEncoder(); encoder.Open(encPars); encoder.DataEncoded += Encoder_DataEncoded; state = MediaState.Initialized; } catch (Exception ex) { logger.Error(ex); errorCode = 100503; if (encoder != null) { encoder.DataEncoded -= Encoder_DataEncoded; encoder.Close(); encoder = null; } state = MediaState.Closed; throw; } }
/// <summary> /// Creates a new video stream for the specified <see cref="OutputContainer"/>. /// </summary> /// <param name="container">The media container.</param> /// <param name="config">The stream settings.</param> /// <returns>The new video stream.</returns> public static OutputStream <VideoFrame> CreateVideo(OutputContainer container, VideoEncoderSettings config) { var codecId = config.Codec ?? container.Pointer->oformat->video_codec; if (codecId == AVCodecID.AV_CODEC_ID_NONE) { throw new InvalidOperationException("The media container doesn't support video!"); } var codec = ffmpeg.avcodec_find_encoder(codecId); if (codec == null) { throw new InvalidOperationException($"Cannot find an encoder with the {codecId}!"); } if (codec->type != AVMediaType.AVMEDIA_TYPE_VIDEO) { throw new InvalidOperationException($"The {codecId} encoder doesn't support video!"); } var videoStream = ffmpeg.avformat_new_stream(container.Pointer, codec); var codecContext = videoStream->codec; codecContext->codec_id = codecId; codecContext->codec_type = AVMediaType.AVMEDIA_TYPE_VIDEO; codecContext->bit_rate = config.Bitrate; codecContext->width = config.VideoWidth; codecContext->height = config.VideoHeight; codecContext->time_base.den = config.Framerate; codecContext->time_base.num = 1; codecContext->gop_size = config.KeyframeRate; codecContext->pix_fmt = (AVPixelFormat)config.VideoFormat; if ((container.Pointer->oformat->flags & ffmpeg.AVFMT_GLOBALHEADER) != 0) { codecContext->flags |= ffmpeg.AV_CODEC_FLAG_GLOBAL_HEADER; } var dict = new FFDictionary(config.CodecOptions); var ptr = dict.Pointer; ffmpeg.avcodec_open2(codecContext, codec, &ptr); dict.Update(ptr); return(new OutputStream <VideoFrame>(videoStream, container)); }
private void SetupVideo(ScreencastChannelInfo videoChannelInfo) { tracer.Verb("ScreenCastControl::SetupVideo(...)"); //logger.Debug("SetupVideo(...)"); var videoInfo = videoChannelInfo.MediaInfo as VideoChannelInfo; if (videoInfo == null) { return; } var videoAddr = videoChannelInfo.Address; if (videoChannelInfo.Transport == TransportMode.Tcp) { videoAddr = ServerAddr; } var videoPort = videoChannelInfo.Port; var inputPars = new VideoEncoderSettings { //Resolution = videoInfo.Resolution, Width = videoInfo.Resolution.Width, Height = videoInfo.Resolution.Height, FrameRate = new MediaRatio(videoInfo.Fps, 1), }; var outputPars = new VideoEncoderSettings { //Resolution = videoInfo.Resolution, Width = videoInfo.Resolution.Width, Height = videoInfo.Resolution.Height, //AspectRatio = new MediaRatio(1, 1), //Resolution = new System.Drawing.Size(1920, 1080); FrameRate = new MediaRatio(videoInfo.Fps, 1), }; var networkPars = new NetworkSettings { LocalAddr = videoAddr, LocalPort = videoPort, TransportMode = videoChannelInfo.Transport, SSRC = videoChannelInfo.SSRC, }; VideoReceiver = new VideoReceiver(); VideoReceiver.UpdateBuffer += VideoReceiver_UpdateBuffer; VideoReceiver.Setup(inputPars, outputPars, networkPars); d3dProvider = new D3D11RendererProvider(); d3dProvider.Init(VideoReceiver.sharedTexture); //d3dRenderer.Setup(VideoReceiver.sharedTexture); }
private void playButton_Click(object sender, EventArgs e) { logger.Debug("playButton_Click(...)"); var address = addressTextBox.Text; if (string.IsNullOrEmpty(address)) { return; } var port = (int)portNumeric.Value; try { remoteClient = new RemoteDesktopClient(); remoteClient.UpdateBuffer += RemoteClient_UpdateBuffer; var w = (int)srcWidthNumeric.Value; var h = (int)srcHeightNumeric.Value; var inputPars = new VideoEncoderSettings { //Width = (int)srcWidthNumeric.Value, //Height = (int)srcHeightNumeric.Value, //Width = 2560, //Height = 1440, //Width = 640,//2560, //Height = 480,//1440, Resolution = new Size(w, h), FrameRate = 30, }; var _w = (int)destWidthNumeric.Value; var _h = (int)destHeightNumeric.Value; var outputPars = new VideoEncoderSettings { //Width = 640,//2560, //Height = 480,//1440, //Width = 2560, //Height = 1440, //Width = (int)destWidthNumeric.Value, //Height = (int)destHeightNumeric.Value, Resolution = new Size(_w, _h), FrameRate = 30, }; var transport = GetTransportMode(); var networkPars = new NetworkSettings { LocalAddr = address, LocalPort = port, TransportMode = transport, }; remoteClient.Play(inputPars, outputPars, networkPars); string title = (@"rtp://" + address + ":" + port); ShowVideoForm(title); } catch (Exception ex) { logger.Error(ex); CleanUp(); } }
public void Setup(HttpScreenStreamerArgs args) { logger.Debug("HttpScreenStreamer::Setup() " + args.ToString()); if (state != MediaState.Closed) { throw new InvalidOperationException("Invalid state " + State); } errorCode = 0; //var srcRect = System.Windows.Forms.Screen.PrimaryScreen.Bounds; //var destSize = new Size(1920, 1080); var srcRect = args.CaptureRegion; var destSize = args.Resolution; var ratio = srcRect.Width / (double)srcRect.Height; int destWidth = destSize.Width; int destHeight = (int)(destWidth / ratio); if (ratio < 1) { destHeight = destSize.Height; destWidth = (int)(destHeight * ratio); } destSize = new Size(destWidth, destHeight); var captureType = args.CaptureTypes; var captureProp = new ScreenCaptureProperties { CaptureType = captureType, Fps = (int)args.Fps, CaptureMouse = args.CaptureMouse, AspectRatio = true, UseHardware = false, }; ScreenCaptureDevice captureParams = new ScreenCaptureDevice { CaptureRegion = srcRect, Resolution = destSize, Properties = captureProp, //CaptureType = captureType, //Fps = (int)args.Fps, //CaptureMouse = args.CaptureMouse, //AspectRatio = true, //UseHardware = false, }; if (captureType == VideoCaptureType.GDI || captureType == VideoCaptureType.GDILayered || captureType == VideoCaptureType.GDIPlus || captureType == VideoCaptureType.Datapath) {// масштабируем на энкодере captureParams.Resolution = new Size(srcRect.Width, srcRect.Height); } VideoEncoderSettings encodingParams = new VideoEncoderSettings { EncoderFormat = VideoCodingFormat.JPEG, //Resolution = destSize, Width = destSize.Width, Height = destSize.Height, FrameRate = new MediaRatio(captureParams.Properties.Fps, 1), EncoderId = "mjpeg", }; NetworkSettings networkParams = new NetworkSettings { RemoteAddr = args.Addres, RemotePort = args.Port, }; try { httpScreenSource = new ScreenSource(); httpScreenSource.Setup(captureParams); httpScreenSource.CaptureStopped += HttpScreenSource_CaptureStopped; httpStreamer = new VideoHttpStreamer(httpScreenSource); httpStreamer.Setup(encodingParams, networkParams); httpStreamer.StreamerStopped += HttpStreamer_StreamerStopped; state = MediaState.Initialized; } catch (Exception ex) { logger.Error(ex); errorCode = 100503; Close(); throw; } }
public void Setup(VideoEncoderSettings encoderSettings, NetworkSettings networkSettings) { logger.Debug("ScreenStreamer::Setup()"); this.Id = "VideoStreamer_" + Guid.NewGuid().ToString(); this.EncoderSettings = encoderSettings; this.NetworkSettings = networkSettings; try { H264Session = new H264Session(); if (networkSettings.TransportMode == TransportMode.Tcp) { RtpSender = new RtpTcpSender(H264Session); } else if (networkSettings.TransportMode == TransportMode.Udp) { RtpSender = new RtpUdpSender(H264Session); } else { throw new FormatException("NotSupportedFormat " + networkSettings.TransportMode); } //rtpStreamer = new RtpStreamer(h264Session); RtpSender.Setup(networkSettings); networkSettings.SSRC = H264Session.SSRC; RtpSender.Start(); //var hwContext = screenSource.hwContext; //var hwDevice = hwContext.device; //var srcSize = videoSource.SrcSize; //new Size(screenSource.Buffer.bitmap.Width, screenSource.Buffer.bitmap.Height); //if (encodingSettings.UseResoulutionFromSource) //{ // encodingSettings.Resolution = srcSize; //} //encoder = new FFmpegVideoEncoder(); //encoder.Open(encodingParams); //encoder.DataEncoded += Encoder_DataEncoded; videoEncoder = new VideoEncoder(videoSource); videoEncoder.Open(encoderSettings); videoEncoder.DataEncoded += VideoEncoder_DataEncoded; videoSource.BufferUpdated += ScreenSource_BufferUpdated; state = StreamerState.Initialized; } catch (Exception ex) { logger.Error(ex); CleanUp(); throw; } }
public void Open(VideoEncoderSettings encoderSettings) { logger.Debug("VideoEncoder::Setup(...)"); //var hwContext = videoSource.hwContext; // var hwDevice = hwContext.Device3D11; var hwBuffer = videoSource.SharedTexture; var hwDescr = hwBuffer.Description; var srcSize = new Size(hwDescr.Width, hwDescr.Height); var srcFormat = MfTool.GetVideoFormatGuidFromDXGIFormat(hwDescr.Format); var destSize = encoderSettings.Resolution; //new Size(destParams.Width, destParams.Height); var adapterId = videoSource.AdapterId; using (var adapter = DxTool.FindAdapter1(adapterId)) { var descr = adapter.Description; int adapterVenId = descr.VendorId; logger.Info("Adapter: " + descr.Description + " " + adapterVenId); var flags = DeviceCreationFlags.VideoSupport | DeviceCreationFlags.BgraSupport; //DeviceCreationFlags.Debug; device = new SharpDX.Direct3D11.Device(adapter, flags); using (var multiThread = device.QueryInterface <SharpDX.Direct3D11.Multithread>()) { multiThread.SetMultithreadProtected(true); } } var profile = MfTool.GetMfH264Profile(encoderSettings.Profile); var bitrateMode = MfTool.GetMfBitrateMode(encoderSettings.BitrateMode); var aspectRatio = encoderSettings.AspectRatio; var encArgs = new MfVideoArgs { Width = destSize.Width, //srcSize.Width, Height = destSize.Height, //srcSize.Height, Format = VideoFormatGuids.NV12, //VideoFormatGuids.Argb32, FrameRate = MfTool.PackToLong(encoderSettings.FrameRate), MaxBitrate = encoderSettings.MaxBitrate * 1000, //kbps->bps AvgBitrate = encoderSettings.Bitrate * 1000, LowLatency = encoderSettings.LowLatency, AdapterId = videoSource.AdapterId, Profile = profile, BitrateMode = bitrateMode, GopSize = encoderSettings.GOPSize, Quality = encoderSettings.Quality, EncoderId = encoderSettings.EncoderId, AspectRatio = MfTool.PackToLong(aspectRatio) }; processor = new MfVideoProcessor(device); var inProcArgs = new MfVideoArgs { Width = srcSize.Width, Height = srcSize.Height, Format = srcFormat, //SharpDX.MediaFoundation.VideoFormatGuids.Argb32, }; var outProcArgs = new MfVideoArgs { Width = encArgs.Width, Height = encArgs.Height, Format = encArgs.Format, //VideoFormatGuids.NV12,//.Argb32, }; processor.Setup(inProcArgs, outProcArgs); bufTexture = new Texture2D(device, new Texture2DDescription { // Format = Format.NV12, Format = hwDescr.Format, //SharpDX.DXGI.Format.B8G8R8A8_UNorm, Width = srcSize.Width, Height = srcSize.Height, MipLevels = 1, ArraySize = 1, SampleDescription = { Count = 1 }, }); processor?.Start(); var encoderName = encoderSettings.EncoderId; if (encoderName == "libx264" || encoderName == "h264_nvenc") { encoder = new MfFFMpegVideoEncoder(); } else { encoder = new MfH264EncoderEx(device); } encoder.Setup(encArgs); encoder.DataEncoded += Encoder_DataEncoded; ////encoder.DataReady += MfEncoder_DataReady; encoder.Start(); }
public void Setup(VideoEncoderSettings settings, IntPtr hwnd) { logger.Debug("D3D9RendererSink::Setup()"); this.hWnd = hwnd; this.EncoderSettings = settings; var avgTimePerFrame = MfTool.FrameRateToAverageTimePerFrame(EncoderSettings.FrameRate); this.EncoderSettings.AverageTimePerFrame = avgTimePerFrame; MediaFactory.CreatePresentationClock(out presentationClock); PresentationTimeSource timeSource = null; try { MediaFactory.CreateSystemTimeSource(out timeSource); presentationClock.TimeSource = timeSource; } finally { timeSource?.Dispose(); } videoRenderer = new MfVideoRenderer(); //TODO: нужно настраивать когда декодер пришлет свой формат videoRenderer.Setup(new VideoRendererArgs { hWnd = hWnd, FourCC = new SharpDX.Multimedia.FourCC("NV12"), //FourCC = 0x59565955, //"UYVY", Resolution = settings.Resolution, // //Resolution = new System.Drawing.Size(1920, 1088), FrameRate = settings.FrameRate, //new Tuple<int, int>(settings.FrameRate, 1), }); videoRenderer.RendererStarted += VideoRenderer_RendererStarted; videoRenderer.RendererStopped += VideoRenderer_RendererStopped; videoRenderer.SetPresentationClock(presentationClock); videoRenderer.Resize(new System.Drawing.Rectangle(0, 0, 100, 100)); SharpDX.MediaFoundation.DirectX.Direct3DDeviceManager d3dManager = null; if (EncoderSettings.UseHardware) { d3dManager = videoRenderer.D3DDeviceManager; } decoder = new MfH264Dxva2Decoder(d3dManager); var inputArgs = new MfVideoArgs { Width = EncoderSettings.Resolution.Width, Height = EncoderSettings.Resolution.Height, FrameRate = MfTool.PackToLong(EncoderSettings.FrameRate), LowLatency = EncoderSettings.LowLatency, }; decoder.Setup(inputArgs); }
//public IntPtr hWnd = IntPtr.Zero; public void Setup(VideoEncoderSettings inputPars, VideoEncoderSettings outputPars, NetworkSettings networkPars) { logger.Debug("ScreenReceiver::Setup(...)"); var inputArgs = new MfVideoArgs { Width = inputPars.Resolution.Width, Height = inputPars.Resolution.Height, FrameRate = MfTool.PackToLong(inputPars.FrameRate), }; var outputArgs = new MfVideoArgs { Width = outputPars.Resolution.Width, Height = outputPars.Resolution.Height, FrameRate = MfTool.PackToLong(outputPars.FrameRate), }; int adapterIndex = 0; using (var dxgiFactory = new SharpDX.DXGI.Factory1()) { using (var adapter = dxgiFactory.GetAdapter1(adapterIndex)) { device = new Device(adapter, //DeviceCreationFlags.Debug | DeviceCreationFlags.VideoSupport | DeviceCreationFlags.BgraSupport); using (var multiThread = device.QueryInterface <SharpDX.Direct3D11.Multithread>()) { multiThread.SetMultithreadProtected(true); } } } sharedTexture = new Texture2D(device, new Texture2DDescription { CpuAccessFlags = CpuAccessFlags.None, BindFlags = BindFlags.RenderTarget | BindFlags.ShaderResource, Format = SharpDX.DXGI.Format.B8G8R8A8_UNorm, Width = outputArgs.Width, //640,//texture.Description.Width, Height = outputArgs.Height, //480,//texture.Description.Height, MipLevels = 1, ArraySize = 1, SampleDescription = { Count = 1, Quality = 0 }, Usage = ResourceUsage.Default, //OptionFlags = ResourceOptionFlags.GdiCompatible//ResourceOptionFlags.None, OptionFlags = ResourceOptionFlags.Shared, }); //ImageProvider = new D3DImageProvider(dispatcher); //decoder = new DXVADecoder(IntPtr.Zero); decoder = new MfH264Decoder(device); decoder.Setup(inputArgs); var decoderType = decoder.OutputMediaType; var decFormat = decoderType.Get(MediaTypeAttributeKeys.Subtype); var decFrameSize = MfTool.GetFrameSize(decoderType); processor = new MfVideoProcessor(device); var inProcArgs = new MfVideoArgs { Width = decFrameSize.Width, Height = decFrameSize.Height, Format = decFormat, }; var outProcArgs = new MfVideoArgs { Width = outputArgs.Width, Height = outputArgs.Height, Format = VideoFormatGuids.Argb32, }; processor.Setup(inProcArgs, outProcArgs); h264Session = new H264Session(); if (networkPars.TransportMode == TransportMode.Tcp) { rtpReceiver = new RtpTcpReceiver(h264Session); } else if (networkPars.TransportMode == TransportMode.Udp) { rtpReceiver = new RtpUdpReceiver(h264Session); } else { throw new Exception("networkPars.TransportMode"); } h264Session.SSRC = networkPars.SSRC; rtpReceiver.Open(networkPars); rtpReceiver.RtpPacketReceived += RtpReceiver_RtpPacketReceived; }
/// <summary> /// Creates a new video stream for the specified <see cref="OutputContainer"/>. /// </summary> /// <param name="container">The media container.</param> /// <param name="config">The stream settings.</param> /// <returns>The new video stream.</returns> public static OutputStream <VideoFrame> CreateVideo(OutputContainer container, VideoEncoderSettings config) { var codecId = config.Codec == VideoCodec.Default ? container.Pointer->oformat->video_codec : (AVCodecID)config.Codec; if (codecId == AVCodecID.AV_CODEC_ID_NONE) { throw new InvalidOperationException("The media container doesn't support video!"); } var codec = ffmpeg.avcodec_find_encoder(codecId); if (codec == null) { throw new InvalidOperationException($"Cannot find an encoder with the {codecId}!"); } if (codec->type != AVMediaType.AVMEDIA_TYPE_VIDEO) { throw new InvalidOperationException($"The {codecId} encoder doesn't support video!"); } var videoStream = ffmpeg.avformat_new_stream(container.Pointer, codec); videoStream->time_base = config.TimeBase; videoStream->r_frame_rate = config.FramerateRational; var codecContext = videoStream->codec; codecContext->codec_id = codecId; codecContext->codec_type = AVMediaType.AVMEDIA_TYPE_VIDEO; codecContext->width = config.VideoWidth; codecContext->height = config.VideoHeight; codecContext->time_base = videoStream->time_base; codecContext->framerate = videoStream->r_frame_rate; codecContext->gop_size = config.KeyframeRate; codecContext->pix_fmt = (AVPixelFormat)config.VideoFormat; if ((container.Pointer->oformat->flags & ffmpeg.AVFMT_GLOBALHEADER) != 0) { codecContext->flags |= ffmpeg.AV_CODEC_FLAG_GLOBAL_HEADER; } var dict = new FFDictionary(config.CodecOptions); if (config.CRF.HasValue && config.Codec.IsMatch(VideoCodec.H264, VideoCodec.H265, VideoCodec.VP9, VideoCodec.VP8)) { dict["crf"] = config.CRF.Value.ToString(); } else { codecContext->bit_rate = config.Bitrate; } if (config.Codec.IsMatch(VideoCodec.H264, VideoCodec.H265)) { dict["preset"] = config.EncoderPreset.GetDescription(); } var ptr = dict.Pointer; ffmpeg.avcodec_open2(codecContext, codec, &ptr); dict.Update(ptr); return(new OutputStream <VideoFrame>(videoStream, container)); }
private void ClientProc() { var address = "net.tcp://" + ServerAddr + "/ScreenCaster"; if (this.ServerPort > 0) { address = "net.tcp://" + ServerAddr + ":" + ServerPort + "/ScreenCaster"; } try { var uri = new Uri(address); this.ClientId = RngProvider.GetRandomNumber().ToString(); //NetTcpSecurity security = new NetTcpSecurity //{ // Mode = SecurityMode.Transport, // Transport = new TcpTransportSecurity // { // ClientCredentialType = TcpClientCredentialType.Windows, // ProtectionLevel = System.Net.Security.ProtectionLevel.EncryptAndSign, // }, //}; NetTcpSecurity security = new NetTcpSecurity { Mode = SecurityMode.None, }; var binding = new NetTcpBinding { ReceiveTimeout = TimeSpan.MaxValue,//TimeSpan.FromSeconds(10), SendTimeout = TimeSpan.FromSeconds(10), Security = security, }; factory = new ChannelFactory<IScreenCastService>(binding, new EndpointAddress(uri)); var channel = factory.CreateChannel(); try { //channel.PostMessage(new ServerRequest { Command = "Ping" }); var channelInfos = channel.GetChannelInfos(); if (channelInfos == null) { logger.Error("channelInfos == null"); return; } TransportMode transportMode = TransportMode.Udp; var videoChannelInfo = channelInfos.FirstOrDefault(c => c.MediaInfo is VideoChannelInfo); if (videoChannelInfo != null) { transportMode = videoChannelInfo.Transport; if(transportMode == TransportMode.Tcp) { if (videoChannelInfo.ClientsCount > 0) { throw new Exception("Server is busy"); } } var videoAddr = videoChannelInfo.Address; if(transportMode == TransportMode.Tcp) { videoAddr = ServerAddr; } var videoPort = videoChannelInfo.Port; //if (string.IsNullOrEmpty(videoAddr)) //{ // //channel.Play() //} //if (transportMode == TransportMode.Tcp) //{ // var res = channel.Play(channelInfos); //} var videoInfo = videoChannelInfo.MediaInfo as VideoChannelInfo; if (videoInfo != null) { var inputPars = new VideoEncoderSettings { Resolution = videoInfo.Resolution, //Width = videoInfo.Resolution.Width, //Height = videoInfo.Resolution.Height, FrameRate = new MediaRatio(videoInfo.Fps, }; var outputPars = new VideoEncoderSettings { //Width = 640,//2560, //Height = 480,//1440, //Width = 1920, //Height = 1080, //FrameRate = 30, //Width = videoInfo.Resolution.Width, //Height = videoInfo.Resolution.Height, Resolution = videoInfo.Resolution, FrameRate = videoInfo.Fps, }; //bool keepRatio = true; //if (keepRatio) //{ // var srcSize = new Size(inputPars.Width, inputPars.Height); // var destSize = new Size(outputPars.Width, outputPars.Height); // var ratio = srcSize.Width / (double)srcSize.Height; // int destWidth = destSize.Width; // int destHeight = (int)(destWidth / ratio); // if (ratio < 1) // { // destHeight = destSize.Height; // destWidth = (int)(destHeight * ratio); // } // outputPars.Width = destWidth; // outputPars.Height = destHeight; //} var networkPars = new NetworkSettings { LocalAddr = videoAddr, LocalPort = videoPort, TransportMode = transportMode, SSRC = videoChannelInfo.SSRC, }; VideoReceiver = new VideoReceiver(); VideoReceiver.Setup(inputPars, outputPars, networkPars); VideoReceiver.UpdateBuffer += VideoReceiver_UpdateBuffer; } } var audioChannelInfo =channelInfos.FirstOrDefault(c => c.MediaInfo is AudioChannelInfo); if (audioChannelInfo != null) { var audioInfo = audioChannelInfo.MediaInfo as AudioChannelInfo; if (audioInfo != null) { var audioAddr = audioChannelInfo.Address; transportMode = audioChannelInfo.Transport; if (transportMode == TransportMode.Tcp) { audioAddr = ServerAddr; } if (transportMode == TransportMode.Tcp) { if (audioChannelInfo.ClientsCount > 0) { throw new Exception("Server is busy"); } } var audioPort = audioChannelInfo.Port; AudioReceiver = new AudioReceiver(); var networkPars = new NetworkSettings { LocalAddr = audioAddr, LocalPort = audioPort, TransportMode = transportMode, SSRC = audioChannelInfo.SSRC, }; var audioDeviceId = ""; try { var devices = DirectSoundOut.Devices; var device = devices.FirstOrDefault(); audioDeviceId = device?.Guid.ToString() ?? ""; } catch(Exception ex) { logger.Error(ex); } var audioPars = new AudioEncoderSettings { SampleRate = audioInfo.SampleRate, Channels = audioInfo.Channels, Encoding = "ulaw", DeviceId = audioDeviceId,//currentDirectSoundDeviceInfo?.Guid.ToString() ?? "", }; AudioReceiver.Setup(audioPars, networkPars); } } if (VideoReceiver != null) { VideoReceiver.Play(); } if (AudioReceiver != null) { AudioReceiver.Play(); } running = true; State = ClientState.Connected; OnStateChanged(State); while (running) { channel.PostMessage(new ServerRequest { Command = "Ping" }); syncEvent.WaitOne(1000); //InternalCommand command = null; //do //{ // command = DequeueCommand(); // if (command != null) // { // ProcessCommand(command); // } //} while (command != null); } } finally { running = false; State = ClientState.Disconnected; OnStateChanged(State); try { var c = (IClientChannel)channel; if (c.State != CommunicationState.Faulted) { c.Close(); } else { c.Abort(); } } catch (Exception ex) { logger.Error(ex); } } } catch (Exception ex) { logger.Error(ex); State = ClientState.Faulted; OnStateChanged(State); //Close(); } finally { Close(); } }
// private Screen currentScreen = null; private void httpStartButton_Click(object sender, EventArgs e) { //currentScreen = HttpGetCurrentScreen(); var srcRect = HttpGetCurrentScreen(); //currentScreen.Bounds; //var srcRect = currentScreen.Bounds; var _destWidth = (int)httpDestWidthNumeric.Value; var _destHeight = (int)httpDestHeightNumeric.Value; var destSize = new Size(_destWidth, _destHeight); var ratio = srcRect.Width / (double)srcRect.Height; int destWidth = destSize.Width; int destHeight = (int)(destWidth / ratio); if (ratio < 1) { destHeight = destSize.Height; destWidth = (int)(destHeight * ratio); } destSize = new Size(destWidth, destHeight); var fps = httpFpsNumeric.Value; var addr = httpAddrTextBox.Text; var port = (int)httpPortNumeric.Value; VideoCaptureType captureType = (VideoCaptureType)captureTypesComboBox.SelectedItem; httpScreenSource = new ScreenSource(); ScreenCaptureDevice captureParams = new ScreenCaptureDevice { CaptureRegion = srcRect, Resolution = destSize, }; captureParams.Properties.CaptureType = captureType;//CaptureType.DXGIDeskDupl, captureParams.Properties.Fps = (int)fps; captureParams.Properties.CaptureMouse = true; captureParams.Properties.AspectRatio = true; captureParams.Properties.UseHardware = false; if (captureType == VideoCaptureType.GDI || captureType == VideoCaptureType.GDIPlus) {// масштабируем на энкодере captureParams.Resolution = new Size(srcRect.Width, srcRect.Height); } httpScreenSource.Setup(captureParams); httpStreamer = new VideoHttpStreamer(httpScreenSource); NetworkSettings networkParams = new NetworkSettings { RemoteAddr = addr, RemotePort = port, }; VideoEncoderSettings encodingParams = new VideoEncoderSettings { Width = destSize.Width, // options.Width, Height = destSize.Height, // options.Height, //Resolution = destSize, FrameRate = new MediaRatio((int)fps, 1), EncoderId = "mjpeg", }; httpStreamer.Setup(encodingParams, networkParams); httpStreamer.Start(); httpScreenSource.Start(); statisticForm.Location = srcRect.Location; // statisticForm.Start(); }
public static StreamSession Default() { //int port = -1; //var freeTcpPorts = MediaToolkit.Utils.NetTools.GetFreePortRange(System.Net.Sockets.ProtocolType.Tcp, 1, 808); //if (freeTcpPorts != null && freeTcpPorts.Count() > 0) //{ // port = freeTcpPorts.FirstOrDefault(); //} var session = new StreamSession { StreamName = Environment.MachineName, NetworkIpAddress = "0.0.0.0", MutlicastAddress = "239.0.0.1", CommunicationPort = 0, IsMulticast = false, TransportMode = TransportMode.Tcp, }; var videoEncoderSettings = new VideoEncoderSettings { Width = 1920, Height = 1080, EncoderFormat = VideoCodingFormat.H264, Profile = H264Profile.Main, BitrateMode = BitrateControlMode.CBR, Bitrate = 2500, MaxBitrate = 5000, FrameRate = new MediaRatio(30, 1), LowLatency = true, }; var videoSettings = new VideoStreamSettings { Enabled = true, //Id = "video_" + Guid.NewGuid().ToString(), NetworkSettings = new NetworkSettings(), CaptureDevice = null, EncoderSettings = videoEncoderSettings, StreamFlags = VideoStreamFlags.UseEncoderResoulutionFromSource, //ScreenCaptureProperties = captureProperties, }; var audioEncoderSettings = new AudioEncoderSettings { SampleRate = 8000, Channels = 1, Encoding = "PCMU", }; var audioSettings = new AudioStreamSettings { Enabled = false, //Id = "audio_" + Guid.NewGuid().ToString(), NetworkSettings = new NetworkSettings(), CaptureDevice = new AudioCaptureDevice(), EncoderSettings = audioEncoderSettings, }; session.AudioSettings = audioSettings; session.VideoSettings = videoSettings; return(session); }
private void ClientProc() { var address = "net.tcp://" + ServerAddr + "/RemoteDesktop"; try { var uri = new Uri(address); //NetTcpSecurity security = new NetTcpSecurity //{ // Mode = SecurityMode.Transport, // Transport = new TcpTransportSecurity // { // ClientCredentialType = TcpClientCredentialType.Windows, // ProtectionLevel = System.Net.Security.ProtectionLevel.EncryptAndSign, // }, //}; NetTcpSecurity security = new NetTcpSecurity { Mode = SecurityMode.None, }; var binding = new NetTcpBinding { ReceiveTimeout = TimeSpan.MaxValue,//TimeSpan.FromSeconds(10), SendTimeout = TimeSpan.FromSeconds(10), Security = security, }; factory = new ChannelFactory <IRemoteDesktopService>(binding, new EndpointAddress(uri)); var channel = factory.CreateChannel(); try { this.ClientId = RngProvider.GetRandomNumber().ToString(); var connectReq = new RemoteDesktopRequest { SenderId = ClientId, }; var connectionResponse = channel.Connect(connectReq); if (!connectionResponse.IsSuccess) { logger.Error("connectionResponse " + connectionResponse.FaultCode); return; } this.ServerId = connectionResponse.ServerId; this.ServerName = connectionResponse.HostName; var screens = connectionResponse.Screens; var primaryScreen = screens.FirstOrDefault(s => s.IsPrimary); var startRequest = new StartSessionRequest { SenderId = this.ClientId, SrcRect = primaryScreen.Bounds, DestAddr = "", //"192.168.1.135",//localAddr.Address.ToString(), //localAddr.ToString(), DestPort = 1234, DstSize = new Size(1920, 1080), EnableInputSimulator = true, }; var startResponse = channel.Start(startRequest); if (!startResponse.IsSuccess) { logger.Error("startResponse " + startResponse.FaultCode); return; } var inputPars = new VideoEncoderSettings { Resolution = startRequest.DstSize, //Width = startRequest.DstSize.Width, //Height = startRequest.DstSize.Height, //Width = 640,//2560, //Height = 480,//1440, FrameRate = 30, }; var outputPars = new VideoEncoderSettings { //Width = 640,//2560, //Height = 480,//1440, //Width = startRequest.DstSize.Width, //Height = startRequest.DstSize.Height, Resolution = startRequest.DstSize, FrameRate = 30, }; var transport = TransportMode.Udp; var networkPars = new NetworkSettings { LocalAddr = ServerAddr, LocalPort = 1234, TransportMode = transport, }; this.Play(inputPars, outputPars, networkPars); InputManager = new InputManager(); InputManager.Start(ServerAddr, 8888); running = true; State = ClientState.Connected; OnStateChanged(State); while (running) { channel.PostMessage("Ping", null); syncEvent.WaitOne(1000); //InternalCommand command = null; //do //{ // command = DequeueCommand(); // if (command != null) // { // ProcessCommand(command); // } //} while (command != null); } } finally { running = false; State = ClientState.Disconnected; OnStateChanged(State); try { var c = (IClientChannel)channel; if (c.State != CommunicationState.Faulted) { c.Close(); } else { c.Abort(); } } catch (Exception ex) { logger.Error(ex); } } } catch (Exception ex) { logger.Error(ex); State = ClientState.Faulted; OnStateChanged(State); Close(); } }