Пример #1
0
 public RecordFileTest()
 {
     videoEncoder       = Resolve <IVideoEncoder>();
     videoDeviceManager = Resolve <IVideoDeviceManager>();
     aacEncoder         = Resolve <IAacEncoder>();
     audioDeviceManager = Resolve <IAudioDeviceManager>();
 }
        /// <summary>
        /// Creates the object that implements the IQuality interface
        /// </summary>
        public static IQuality createQualityControl(Configuration configuration, IBaseFilter filterVideoEncoder,
                                                    IBaseFilter filterCapture, IBaseFilter filterMultiplexer,
                                                    IBaseFilter filterVideoCompressor)
        {
            ICodecAPI codecAPI = checkCodecAPI(filterVideoEncoder, filterCapture, filterMultiplexer, filterVideoCompressor);

            if (codecAPI != null)
            {
                return(new CodecAPIControl(configuration, codecAPI));
            }

            IVideoEncoder videoEncoder = checkVideoEncoder(filterVideoEncoder, filterCapture, filterMultiplexer,
                                                           filterVideoCompressor);

            if (videoEncoder != null)
            {
                return(new VideoEncoderControl(configuration, videoEncoder));
            }

#pragma warning disable 618,612
            IEncoderAPI encoderAPI = checkEncoderAPI(filterVideoEncoder, filterCapture, filterMultiplexer,
                                                     filterVideoCompressor);
            if (encoderAPI != null)
            {
                return(new EncoderAPIControl(configuration, encoderAPI));
            }
#pragma warning restore 618,612

            return(null);
        }
Пример #3
0
 /// <summary>
 /// Creates a new instance of <see cref="EncodingVideoStreamWrapper"/>.
 /// </summary>
 /// <param name="baseStream">Video stream to be wrapped.</param>
 /// <param name="encoder">Encoder to be used.</param>
 /// <param name="ownsEncoder">Whether to dispose the encoder.</param>
 public EncodingVideoStreamWrapper(IAviVideoStreamInternal baseStream, IVideoEncoder encoder, bool ownsEncoder)
     : base(baseStream)
 {
     this.encoder     = encoder;
     this.ownsEncoder = ownsEncoder;
     encodedBuffer    = new byte[encoder.MaxEncodedSize];
 }
#pragma warning restore 618,612

        private static IVideoEncoder checkVideoEncoder(IBaseFilter filterVideoEncoder, IBaseFilter filterCapture,
                                                       IBaseFilter filterMultiplexer, IBaseFilter filterVideoCompressor)
        {
            IVideoEncoder videoEncoder = null;

            if (filterVideoEncoder != null)
            {
                videoEncoder = filterVideoEncoder as IVideoEncoder;
            }

            if (videoEncoder == null && filterCapture != null)
            {
                videoEncoder = filterCapture as IVideoEncoder;
            }

            if (videoEncoder == null && filterMultiplexer != null)
            {
                videoEncoder = filterMultiplexer as IVideoEncoder;
            }

            if (videoEncoder == null && filterVideoCompressor != null)
            {
                videoEncoder = filterVideoCompressor as IVideoEncoder;
            }

            return(videoEncoder);
        }
Пример #5
0
        public VideoTestPatternSource(IVideoEncoder encoder = null)
        {
            if (encoder != null)
            {
                _videoEncoder  = encoder;
                _formatManager = new MediaFormatManager <VideoFormat>(SupportedFormats);
            }

            var assem          = typeof(VideoTestPatternSource).GetTypeInfo().Assembly;
            var testPatternStm = assem.GetManifestResourceStream(TEST_PATTERN_RESOURCE_PATH);

            if (testPatternStm == null)
            {
                OnVideoSourceError?.Invoke(
                    $"Test pattern embedded resource could not be found, {TEST_PATTERN_RESOURCE_PATH}.");
            }
            else
            {
                _testI420Buffer = new byte[TEST_PATTERN_WIDTH * TEST_PATTERN_HEIGHT * 3 / 2];
                testPatternStm.Read(_testI420Buffer, 0, _testI420Buffer.Length);
                testPatternStm.Close();
                _sendTestPatternTimer = new Timer(GenerateTestPattern, null, Timeout.Infinite, Timeout.Infinite);
                _frameSpacing         = 1000 / DEFAULT_FRAMES_PER_SECOND;
            }
        }
Пример #6
0
 /// <summary>
 /// Initializes a new instance of the <see cref="VideoEncoderControl"/> class.
 /// </summary>
 /// <param name="configuration">The encoder settings to use.</param>
 /// <param name="videoEncoder">The IVideoEncoder interface to the filter that must be used to control the quality.</param>
 public VideoEncoderControl(Configuration configuration, IVideoEncoder videoEncoder)
   : base(configuration)
 {
   _videoEncoder = videoEncoder;
   Log.Log.WriteFile("analog: IVideoEncoder supported by: " +
                     FilterGraphTools.GetFilterName(_videoEncoder as IBaseFilter) + "; Checking capabilities ");
   CheckCapabilities();
 }
Пример #7
0
 public Record(IVideoEncoder videoEncoder, IAacEncoder aacEncoder)
 {
     State   = RecAndLiveState.NotStart;
     _handle = VsNetRecordSdk.FileMuxer_AllocInstance();
     VsNetRecordSdk.FileMuxer_EnableSync(_handle, true);
     _videoEncoder = videoEncoder;
     _aacEncoder   = aacEncoder;
 }
Пример #8
0
 /// <summary>
 /// Initializes a new instance of the <see cref="VideoEncoderControl"/> class.
 /// </summary>
 /// <param name="configuration">The encoder settings to use.</param>
 /// <param name="videoEncoder">The IVideoEncoder interface to the filter that must be used to control the quality.</param>
 public VideoEncoderControl(Configuration configuration, IVideoEncoder videoEncoder)
     : base(configuration)
 {
     _videoEncoder = videoEncoder;
     Log.Log.WriteFile("analog: IVideoEncoder supported by: " +
                       FilterGraphTools.GetFilterName(_videoEncoder as IBaseFilter) + "; Checking capabilities ");
     CheckCapabilities();
 }
Пример #9
0
        public LiveBroadcast(IVideoEncoder videoEncoder, IAacEncoder aacEncoder)
        {
            State         = RecAndLiveState.NotStart;
            _videoEncoder = videoEncoder;
            _aacEncoder   = aacEncoder;

            _handle = VsNetRtmpSenderSdk.RtmpSender_AllocInstance();
            VsNetRtmpSenderSdk.RtmpSender_SendInThread(_handle, true);
        }
Пример #10
0
        /// <summary>
        /// Creates a new instance of <see cref="EncodingVideoStreamWrapper"/>.
        /// </summary>
        /// <param name="baseStream">Video stream to be wrapped.</param>
        /// <param name="encoder">Encoder to be used.</param>
        /// <param name="ownsEncoder">Whether to dispose the encoder.</param>
        public EncodingVideoStreamWrapper(IAviVideoStreamInternal baseStream, IVideoEncoder encoder, bool ownsEncoder)
            : base(baseStream)
        {
            Argument.IsNotNull(encoder, nameof(encoder));

            this.encoder     = encoder;
            this.ownsEncoder = ownsEncoder;
            encodedBuffer    = new byte[encoder.MaxEncodedSize];
        }
Пример #11
0
        /// <summary>
        /// Creates a new instance of <see cref="SingleThreadedVideoEncoderWrapper"/>.
        /// </summary>
        /// <param name="encoderFactory">
        /// Factory for creating an encoder instance.
        /// It will be invoked on the same thread as all subsequent operations of the <see cref="IVideoEncoder"/> interface.
        /// </param>
        public SingleThreadedVideoEncoderWrapper(Func <IVideoEncoder> encoderFactory)
        {
            Argument.IsNotNull(encoderFactory, nameof(encoderFactory));

            scheduler = new SingleThreadTaskScheduler();

            // TODO: Create encoder on the first frame
            encoder = SchedulerInvoke(encoderFactory)
                      ?? throw new InvalidOperationException("Encoder factory has created no instance.");
        }
Пример #12
0
 public IAviVideoStream AddEncodingVideoStream(IVideoEncoder encoder, int width, int height, bool ownsEncoder)
 {
     return(AddStream <IAviVideoStreamInternal>(index =>
     {
         var stream = new AviVideoStream(index, this, width, height, BitsPerPixel.Bpp32);
         var encodingStream = new EncodingVideoStreamWrapper(stream, encoder, ownsEncoder);
         var asyncStream = new AsyncVideoStreamWrapper(encodingStream);
         return asyncStream;
     }));
 }
Пример #13
0
        /// <summary>
        /// Creates a new instance of <see cref="EncodingVideoStreamWrapper"/>.
        /// </summary>
        /// <param name="baseStream">Video stream to be wrapped.</param>
        /// <param name="encoder">Encoder to be used.</param>
        /// <param name="ownsEncoder">Whether to dispose the encoder.</param>
        public EncodingVideoStreamWrapper(IAviVideoStreamInternal baseStream, IVideoEncoder encoder, bool ownsEncoder)
            : base(baseStream)
        {
            Contract.Requires(baseStream != null);
            Contract.Requires(encoder != null);

            this.encoder     = encoder;
            this.ownsEncoder = ownsEncoder;
            encodedBuffer    = new byte[encoder.MaxEncodedSize];
        }
Пример #14
0
        /// <summary>
        /// Creates a new instance of <see cref="EncodingVideoStreamWrapper"/>.
        /// </summary>
        /// <param name="baseStream">Video stream to be wrapped.</param>
        /// <param name="encoder">Encoder to be used.</param>
        /// <param name="ownsEncoder">Whether to dispose the encoder.</param>
        public EncodingVideoStreamWrapper(IAviVideoStreamInternal baseStream, IVideoEncoder encoder, bool ownsEncoder)
            : base(baseStream)
        {
            Contract.Requires(baseStream != null);
            Contract.Requires(encoder != null);

            this.encoder = encoder;
            this.ownsEncoder = ownsEncoder;
            encodedBuffer = new byte[encoder.MaxEncodedSize];
        }
Пример #15
0
        public VideoRecorder(string fileName)
        {
            string path = Path.GetFullPath(Path.Combine(AppDomain.CurrentDomain.BaseDi‌​rectory, "Video"));

            _outputFilePath = Path.Combine(path, fileName);

            _configurator = new VideoConfigurator()
            {
                Quality = VideoQuality.High, FramePerSecond = 30
            };
            _selectedEncoder = EncoderProvider.GetAvailableEncoder(_configurator);
        }
Пример #16
0
        /// <summary>Adds new encoding video stream.</summary>
        /// <param name="encoder">Encoder to be used.</param>
        /// <param name="ownsEncoder">Whether encoder should be disposed with the writer.</param>
        /// <param name="width">Frame's width.</param>
        /// <param name="height">Frame's height.</param>
        /// <returns>Newly added video stream.</returns>
        /// <remarks>
        /// <para>
        /// Stream is initialized to be to be encoded with the specified encoder.
        /// Method <see cref="IAviVideoStream.WriteFrame"/> expects data in the same format as encoders,
        /// that is top-down BGR32 bitmap. It is passed to the encoder and the encoded result is written
        /// to the stream.
        /// Parameters <c>isKeyFrame</c> and <c>length</c> are ignored by encoding streams,
        /// as encoders determine on their own which frames are keys, and the size of input bitmaps is fixed.
        /// </para>
        /// <para>
        /// Properties <see cref="IAviVideoStream.Codec"/> and <see cref="IAviVideoStream.BitsPerPixel"/>
        /// are defined by the encoder, and cannot be modified.
        /// </para>
        /// </remarks>
        public IAviVideoStream AddEncodingVideoStream(IVideoEncoder encoder, bool ownsEncoder = true, int width = 1, int height = 1)
        {
            Contract.Requires(encoder != null);
            Contract.Requires(Streams.Count < 100);
            Contract.Ensures(Contract.Result <IAviVideoStream>() != null);

            return(AddStream <IAviVideoStreamInternal>(index =>
            {
                var stream = new AviVideoStream(index, this, width, height, BitsPerPixel.Bpp32);
                var encodingStream = new EncodingVideoStreamWrapper(stream, encoder, ownsEncoder);
                var asyncStream = new AsyncVideoStreamWrapper(encodingStream);
                return asyncStream;
            }));
        }
Пример #17
0
        /// <summary>Adds new encoding video stream.</summary>
        /// <param name="encoder">Encoder to be used.</param>
        /// <param name="ownsEncoder">Whether encoder should be disposed with the writer.</param>
        /// <param name="width">Frame's width.</param>
        /// <param name="height">Frame's height.</param>
        /// <returns>Newly added video stream.</returns>
        /// <remarks>
        /// <para>
        /// Stream is initialized to be to be encoded with the specified encoder.
        /// Method <see cref="IAviVideoStream.WriteFrame"/> expects data in the same format as encoders,
        /// that is top-down BGR32 bitmap. It is passed to the encoder and the encoded result is written
        /// to the stream.
        /// Parameters <c>isKeyFrame</c> and <c>length</c> are ignored by encoding streams,
        /// as encoders determine on their own which frames are keys, and the size of input bitmaps is fixed.
        /// </para>
        /// <para>
        /// Properties <see cref="IAviVideoStream.Codec"/> and <see cref="IAviVideoStream.BitsPerPixel"/>
        /// are defined by the encoder, and cannot be modified.
        /// </para>
        /// </remarks>
        public IAviVideoStream AddEncodingVideoStream(IVideoEncoder encoder, bool ownsEncoder = true, int width = 1, int height = 1)
        {
            Argument.IsNotNull(encoder, nameof(encoder));
            Argument.IsPositive(width, nameof(width));
            Argument.IsPositive(height, nameof(height));

            return(AddStream <IAviVideoStreamInternal>(index =>
            {
                var stream = new AviVideoStream(index, this, width, height, BitsPerPixel.Bpp32);
                var encodingStream = new EncodingVideoStreamWrapper(stream, encoder, ownsEncoder);
                var asyncStream = new AsyncVideoStreamWrapper(encodingStream);
                return asyncStream;
            }));
        }
        /// <summary>
        /// Attempts to create a new video source from a local video capture device.
        /// </summary>
        /// <param name="videoEncoder">A video encoder that can be used to encode and decode video frames.</param>
        /// <param name="width">Optional. If specified the video capture device will be requested to initialise with this frame
        /// width. If the attempt fails an exception is thrown. If not specified the device's default frame width will
        /// be used.</param>
        /// <param name="height">Optional. If specified the video capture device will be requested to initialise with this frame
        /// height. If the attempt fails an exception is thrown. If not specified the device's default frame height will
        /// be used.</param>
        /// <param name="fps">Optional. If specified the video capture device will be requested to initialise with this frame
        /// rate. If the attempt fails an exception is thrown. If not specified the device's default frame rate will
        /// be used.</param>
        public WindowsVideoEndPoint(IVideoEncoder videoEncoder,
                                    string videoDeviceID = null,
                                    uint width           = 0,
                                    uint height          = 0,
                                    uint fps             = 0)
        {
            _videoEncoder  = videoEncoder;
            _videoDeviceID = videoDeviceID;
            _width         = width;
            _height        = height;
            _fpsNumerator  = fps;

            _mediaCapture         = new MediaCapture();
            _mediaCapture.Failed += VideoCaptureDevice_Failed;
            _videoFormatManager   = new MediaFormatManager <VideoFormat>(videoEncoder.SupportedFormats);
        }
 public VideoLiveAndRecordProvider(ILoggerFacade logger, IAudioDeviceManager audioDeviceManager, int defaultMicrophoneID)
 {
     _logger             = logger;
     _aacEncoder         = new AacEncoder();
     _videoEncoder       = new H264VideoEncoder();
     _audioDeviceManager = audioDeviceManager;
     _useAudioDevice     = _audioDeviceManager.GetAudioDeviceById(defaultMicrophoneID);
     if (_useAudioDevice == null)
     {
         _useAudioDevice = _audioDeviceManager.GetAudioDevices().First();
     }
     _aacEncoder.SetAudioDataSource(_useAudioDevice);
     _videoRecord        = new Record.Record(_videoEncoder, _aacEncoder);
     _videoLiveBroadcast = new LiveBroadcast.LiveBroadcast(_videoEncoder, _aacEncoder);
     _videoLiveBroadcast.OnNetworkInterruption          += _videoLiveBroadcast_OnNetworkInterruption;
     _videoLiveBroadcast.OnNetworkReconnectionFailed    += _videoLiveBroadcast_OnNetworkReconnectionFailed;
     _videoLiveBroadcast.OnNetworkReconnectionSucceeded += _videoLiveBroadcast_OnNetworkReconnectionSucceeded;
 }
        /// <summary>
        /// Creates a new instance of <see cref="SingleThreadedVideoEncoderWrapper"/>.
        /// </summary>
        /// <param name="encoderFactory">
        /// Factory for creating an encoder instance.
        /// It will be invoked on the same thread as all subsequent operations of the <see cref="IVideoEncoder"/> interface.
        /// </param>
        public SingleThreadedVideoEncoderWrapper(Func<IVideoEncoder> encoderFactory)
        {
            Contract.Requires(encoderFactory != null);

            this.thread = new Thread(RunDispatcher)
                {
                    IsBackground = true,
                    Name = typeof(SingleThreadedVideoEncoderWrapper).Name
                };
            var dispatcherCreated = new AutoResetEvent(false);
            thread.Start(dispatcherCreated);
            dispatcherCreated.WaitOne();
            this.dispatcher = Dispatcher.FromThread(thread);

            // TODO: Create encoder on the first frame
            this.encoder = (IVideoEncoder)dispatcher.Invoke(encoderFactory);
            if (encoder == null)
                throw new InvalidOperationException("Encoder factory has created no instance.");
        }
        /// <summary>
        /// Creates a new instance of <see cref="SingleThreadedVideoEncoderWrapper"/>.
        /// </summary>
        /// <param name="encoderFactory">
        /// Factory for creating an encoder instance.
        /// It will be invoked on the same thread as all subsequent operations of the <see cref="IVideoEncoder"/> interface.
        /// </param>
        public SingleThreadedVideoEncoderWrapper(Func <IVideoEncoder> encoderFactory)
        {
            Contract.Requires(encoderFactory != null);

            this.thread = new Thread(RunDispatcher)
            {
                IsBackground = true,
                Name         = typeof(SingleThreadedVideoEncoderWrapper).Name
            };
            var dispatcherCreated = new AutoResetEvent(false);

            thread.Start(dispatcherCreated);
            dispatcherCreated.WaitOne();
            this.dispatcher = Dispatcher.FromThread(thread);

            // TODO: Create encoder on the first frame
            this.encoder = DispatcherInvokeAndPropagateException(encoderFactory);
            if (encoder == null)
            {
                throw new InvalidOperationException("Encoder factory has created no instance.");
            }
        }
Пример #22
0
 public DesktopWindowCollectorTest()
 {
     videoEncoder       = Resolve <IVideoEncoder>();
     aacEncoder         = Resolve <IAacEncoder>();
     audioDeviceManager = Resolve <IAudioDeviceManager>();
 }
Пример #23
0
 public DecoderVideoSink(IVideoEncoder videoDecoder)
 {
     _videoDecoder  = videoDecoder;
     _formatManager = new MediaFormatManager <VideoFormat>(videoDecoder.SupportedFormats);
 }
 public Recorder(string outputFilePath, IVideoEncoder videoEncoder, VideoConfigurator configurator)
 {
     _outputFilePath  = outputFilePath;
     _selectedEncoder = videoEncoder;
     _configurator    = configurator;
 }