示例#1
0
        public VideoTestPatternSource(IVideoEncoder encoder = null)
        {
            if (encoder != null)
            {
                _videoEncoder  = encoder;
                _formatManager = new MediaFormatManager <VideoFormat>(SupportedFormats);
            }

            var assem          = typeof(VideoTestPatternSource).GetTypeInfo().Assembly;
            var testPatternStm = assem.GetManifestResourceStream(TEST_PATTERN_RESOURCE_PATH);

            if (testPatternStm == null)
            {
                OnVideoSourceError?.Invoke(
                    $"Test pattern embedded resource could not be found, {TEST_PATTERN_RESOURCE_PATH}.");
            }
            else
            {
                _testI420Buffer = new byte[TEST_PATTERN_WIDTH * TEST_PATTERN_HEIGHT * 3 / 2];
                testPatternStm.Read(_testI420Buffer, 0, _testI420Buffer.Length);
                testPatternStm.Close();
                _sendTestPatternTimer = new Timer(GenerateTestPattern, null, Timeout.Infinite, Timeout.Infinite);
                _frameSpacing         = 1000 / DEFAULT_FRAMES_PER_SECOND;
            }
        }
示例#2
0
#pragma warning restore CS0067

        public AudioExtrasSource()
        {
            _audioEncoder       = new AudioEncoder();
            _audioFormatManager = new MediaFormatManager <AudioFormat>(_audioEncoder.SupportedFormats);
            _audioOpts          = new AudioSourceOptions {
                AudioSource = AudioSourcesEnum.None
            };
        }
示例#3
0
#pragma warning disable CS8618 // Non-nullable field must contain a non-null value when exiting constructor. Consider declaring as nullable.
        public FFmpegAudioSource(IAudioEncoder audioEncoder)
#pragma warning restore CS8618 // Non-nullable field must contain a non-null value when exiting constructor. Consider declaring as nullable.
        {
            if (audioEncoder == null)
            {
                throw new ApplicationException("Audio encoder provided is null");
            }

            _audioFormatManager = new MediaFormatManager <AudioFormat>(_supportedAudioFormats);
            _audioEncoder       = audioEncoder;
        }
        /// <summary>
        /// Attempts to create a new video source from a local video capture device.
        /// </summary>
        /// <param name="videoEncoder">A video encoder that can be used to encode and decode video frames.</param>
        /// <param name="width">Optional. If specified the video capture device will be requested to initialise with this frame
        /// width. If the attempt fails an exception is thrown. If not specified the device's default frame width will
        /// be used.</param>
        /// <param name="height">Optional. If specified the video capture device will be requested to initialise with this frame
        /// height. If the attempt fails an exception is thrown. If not specified the device's default frame height will
        /// be used.</param>
        /// <param name="fps">Optional. If specified the video capture device will be requested to initialise with this frame
        /// rate. If the attempt fails an exception is thrown. If not specified the device's default frame rate will
        /// be used.</param>
        public WindowsVideoEndPoint(IVideoEncoder videoEncoder,
                                    string videoDeviceID = null,
                                    uint width           = 0,
                                    uint height          = 0,
                                    uint fps             = 0)
        {
            _videoEncoder  = videoEncoder;
            _videoDeviceID = videoDeviceID;
            _width         = width;
            _height        = height;
            _fpsNumerator  = fps;

            _mediaCapture         = new MediaCapture();
            _mediaCapture.Failed += VideoCaptureDevice_Failed;
            _videoFormatManager   = new MediaFormatManager <VideoFormat>(videoEncoder.SupportedFormats);
        }
        /// <summary>
        /// Creates a new basic RTP session that captures and renders audio to/from the default system devices.
        /// </summary>
        /// <param name="audioEncoder">An audio encoder that can be used to encode and decode
        /// specific audio codecs.</param>
        /// <param name="externalSource">Optional. An external source to use in combination with the source
        /// provided by this end point. The application will need to signal which source is active.</param>
        /// <param name="disableSource">Set to true to disable the use of the audio source functionality, i.e.
        /// don't capture input from the microphone.</param>
        /// <param name="disableSink">Set to true to disable the use of the audio sink functionality, i.e.
        /// don't playback audio to the speaker.</param>
        public WindowsAudioEndPoint(IAudioEncoder audioEncoder,
                                    int audioOutDeviceIndex = AUDIO_OUTPUTDEVICE_INDEX,
                                    int audioInDeviceIndex  = AUDIO_INPUTDEVICE_INDEX,
                                    bool disableSource      = false,
                                    bool disableSink        = false)
        {
            logger = SIPSorcery.LogFactory.CreateLogger <WindowsAudioEndPoint>();

            _audioFormatManager = new MediaFormatManager <AudioFormat>(audioEncoder.SupportedFormats);
            _audioEncoder       = audioEncoder;

            _audioOutDeviceIndex = audioOutDeviceIndex;
            _disableSource       = disableSource;
            _disableSink         = disableSink;

            if (!_disableSink)
            {
                InitPlaybackDevice(_audioOutDeviceIndex, DefaultAudioPlaybackRate.GetHashCode());
            }

            if (!_disableSource)
            {
                if (WaveInEvent.DeviceCount > 0)
                {
                    if (WaveInEvent.DeviceCount > audioInDeviceIndex)
                    {
                        _waveInEvent = new WaveInEvent();
                        _waveInEvent.BufferMilliseconds = AUDIO_SAMPLE_PERIOD_MILLISECONDS;
                        _waveInEvent.NumberOfBuffers    = INPUT_BUFFERS;
                        _waveInEvent.DeviceNumber       = audioInDeviceIndex;
                        _waveInEvent.WaveFormat         = _waveSourceFormat;
                        _waveInEvent.DataAvailable     += LocalAudioSampleAvailable;
                    }
                    else
                    {
                        logger.LogWarning($"The requested audio input device index {audioInDeviceIndex} exceeds the maximum index of {WaveInEvent.DeviceCount - 1}.");
                        OnAudioSourceError?.Invoke($"The requested audio input device index {audioInDeviceIndex} exceeds the maximum index of {WaveInEvent.DeviceCount - 1}.");
                    }
                }
                else
                {
                    logger.LogWarning("No audio capture devices are available.");
                    OnAudioSourceError?.Invoke("No audio capture devices are available.");
                }
            }
        }
        public FFmpegFileSource(string path, bool repeat, IAudioEncoder audioEncoder)
        {
            if (!File.Exists(path))
            {
                throw new ApplicationException($"Requested path for FFmpeg file source could not be found {path}.");
            }

            _audioFormatManager = new MediaFormatManager <AudioFormat>(_supportedAudioFormats);
            _videoFormatManager = new MediaFormatManager <VideoFormat>(_supportedVideoFormats);

            _audioEncoder      = audioEncoder;
            _fileSourceDecoder = new FileSourceDecoder(path, repeat);
            _videoEncoder      = new FFmpegVideoEncoder();
            _fileSourceDecoder.OnVideoFrame += FileSourceDecoder_OnVideoFrame;
            _fileSourceDecoder.OnAudioFrame += FileSourceDecoder_OnAudioFrame;
            _fileSourceDecoder.OnEndOfFile  += () =>
            {
                logger.LogDebug($"File source decode complete for {path}.");
                OnEndOfFile?.Invoke();
                _fileSourceDecoder.Dispose();
            };
        }
示例#7
0
#pragma warning restore CS0067

        /// <summary>
        /// Creates a new basic RTP session that captures and renders audio to/from the default system devices.
        /// </summary>
        public PortAudioEndPoint(IAudioEncoder audioEncoder)
        {
            _audioEncoder       = audioEncoder;
            _audioFormatManager = new MediaFormatManager <AudioFormat>(_audioEncoder.SupportedFormats);

            var apiType = PortAudioHostApiType.DirectSound;

            if (Environment.OSVersion.Platform == PlatformID.Unix)
            {
                apiType = PortAudioHostApiType.Alsa;
            }

            _portAudioOutputDevice = PortAudioHostApi.SupportedHostApis.Where(x => x.HostApiType == apiType).First().DefaultOutputDevice;

            _outputDevicePump = new PortAudioDevicePump(_portAudioOutputDevice, AUDIO_CHANNEL_COUNT,
                                                        new PortAudioSampleFormat(PortAudioSampleFormat.PortAudioNumberFormat.Signed, AUDIO_BYTES_PER_SAMPLE),
                                                        TimeSpan.FromMilliseconds(SAMPLING_PERIOD_MILLISECONDS), AUDIO_SAMPLING_RATE, ReadAudioDataCalback);

            _portAudioInputDevice = PortAudioHostApi.SupportedHostApis.Where(x => x.HostApiType == apiType).First().DefaultInputDevice;

            _inputDevicePump = new PortAudioDevicePump(_portAudioInputDevice, AUDIO_CHANNEL_COUNT,
                                                       new PortAudioSampleFormat(PortAudioSampleFormat.PortAudioNumberFormat.Signed, AUDIO_BYTES_PER_SAMPLE),
                                                       TimeSpan.FromMilliseconds(SAMPLING_PERIOD_MILLISECONDS), AUDIO_SAMPLING_RATE, WriteDataCallback);
        }
示例#8
0
 public DecoderVideoSink(IVideoEncoder videoDecoder)
 {
     _videoDecoder  = videoDecoder;
     _formatManager = new MediaFormatManager <VideoFormat>(videoDecoder.SupportedFormats);
 }
#pragma warning restore CS0067

        /// <summary>
        /// Creates a new video source that can encode and decode samples.
        /// </summary>
        public Vp8NetVideoEncoderEndPoint()
        {
            _formatManager = new MediaFormatManager <VideoFormat>(SupportedFormats);
            _vp8Codec      = new VP8Codec();
        }
#pragma warning restore CS0067

        /// <summary>
        /// Creates a new video source that can encode and decode samples.
        /// </summary>
        public VideoEncoderEndPoint()
        {
            _formatManager = new MediaFormatManager <VideoFormat>(SupportedFormats);
            _videoEncoder  = new VpxVideoEncoder();
        }
示例#11
0
#pragma warning disable CS0067
        //public event EncodedSampleDelegate? OnVideoSourceEncodedSample;
        //public event RawExtVideoSampleDelegate? OnVideoSourceRawExtSample;
        //public event RawVideoSampleDelegate? OnVideoSourceRawSample;
        //public event SourceErrorDelegate? OnVideoSourceError;
#pragma warning restore CS0067

        public FFmpegVideoEndPoint()
        {
            _videoFormatManager = new MediaFormatManager <VideoFormat>(_supportedFormats);
            _ffmpegEncoder      = new FFmpegVideoEncoder();
        }
 public FFmpegVideoSource()
 {
     _videoFormatManager = new MediaFormatManager <VideoFormat>(_supportedVideoFormats);
     _videoEncoder       = new FFmpegVideoEncoder();
 }