private void audioPlayButton_Click(object sender, EventArgs e) { audioReceiver = new AudioReceiver(); var addr = audioAddrTextBox.Text; var port = (int)audioPortNumeric.Value; var transport = (TransportMode)transportComboBox.SelectedItem; var sampleRate = (int)sampleRateNumeric.Value; var channels = (int)channelsNumeric.Value; var networkPars = new NetworkSettings { LocalAddr = addr, LocalPort = port, TransportMode = transport, }; var audioPars = new AudioEncoderSettings { SampleRate = sampleRate, Channels = channels, Encoding = "ulaw", DeviceId = currentDirectSoundDeviceInfo?.Guid.ToString() ?? "", }; audioReceiver.SetWaveformPainter(this.waveformPainter1); audioReceiver.Setup(audioPars, networkPars); audioReceiver.Play(); }
public MixingWriter(MixingSource mixer, int iSource) { this.m_settings = new AudioEncoderSettings(mixer.PCM); this.mixer = mixer; this.iSource = iSource; this.samplePos = 0; this.mixbuff = null; this.volume = 1.0f; }
public LAMEEncoder(string path, Stream IO, AudioEncoderSettings settings) { if (settings.PCM.BitsPerSample != 16)// && pcm.BitsPerSample != 32) { throw new ArgumentOutOfRangeException("format", "Only 16 & 32 bits samples supported"); } m_settings = settings; _path = path; _IO = IO; }
private void SetupAudio(ScreencastChannelInfo audioChannelInfo) { tracer.Verb("ScreenCastControl::SetupAudio(...)"); //logger.Debug("SetupAudio(...)"); var audioInfo = audioChannelInfo.MediaInfo as AudioChannelInfo; if (audioInfo == null) { return; } var audioAddr = audioChannelInfo.Address; if (audioChannelInfo.Transport == TransportMode.Tcp) { audioAddr = ServerAddr; } var audioPort = audioChannelInfo.Port; AudioReceiver = new AudioReceiver(); var networkPars = new NetworkSettings { LocalAddr = audioAddr, LocalPort = audioPort, TransportMode = audioChannelInfo.Transport, SSRC = audioChannelInfo.SSRC, }; var audioDeviceId = ""; try { var devices = NAudio.Wave.DirectSoundOut.Devices; var device = devices.FirstOrDefault(); audioDeviceId = device?.Guid.ToString() ?? ""; } catch (Exception ex) { tracer.Error(ex); //logger.Error(ex); } var audioPars = new AudioEncoderSettings { SampleRate = audioInfo.SampleRate, Channels = audioInfo.Channels, Encoding = "ulaw", DeviceId = audioDeviceId, }; AudioReceiver.Setup(audioPars, networkPars); }
private void InitMediaSettings() { screenCaptureDeviceDescr = new ScreenCaptureDevice { Resolution = new Size(1920, 1080), CaptureMouse = true, AspectRatio = true, CaptureType = VideoCaptureType.DXGIDeskDupl, UseHardware = true, Fps = 30, }; videoEncoderSettings = new VideoEncoderSettings { Resolution = new Size(1920, 1080), Encoder = VideoEncoderMode.H264, Profile = H264Profile.Main, BitrateMode = BitrateControlMode.CBR, Bitrate = 2500, MaxBitrate = 5000, FrameRate = 30, LowLatency = true, }; videoSettings = new VideoStreamSettings { Enabled = true, SessionId = "video_" + Guid.NewGuid().ToString(), NetworkParams = new NetworkSettings(), CaptureDescription = null, EncodingParams = videoEncoderSettings, }; audioEncoderSettings = new AudioEncoderSettings { SampleRate = 8000, Channels = 1, Encoding = "PCMU", }; audioSettings = new AudioStreamSettings { Enabled = true, SessionId = "audio_" + Guid.NewGuid().ToString(), NetworkParams = new NetworkSettings(), CaptureParams = new AudioCaptureSettings(), EncodingParams = audioEncoderSettings, }; }
public DirectSoundOut(Control owner, AudioPCMConfig pcm, int delay) { this.m_settings = new AudioEncoderSettings(pcm); //buffer = new CyclicBuffer(44100*4/10); //output = new CycilcBufferOutputStream(buffer); //input = new CycilcBufferInputStream(buffer); dSound = new Device(); dSound.SetCooperativeLevel(owner, CooperativeLevel.Priority); format.AverageBytesPerSecond = pcm.SampleRate * pcm.BlockAlign; format.BitsPerSample = (short)pcm.BitsPerSample; format.BlockAlign = (short)pcm.BlockAlign; format.Channels = (short)pcm.ChannelCount; format.SamplesPerSecond = pcm.SampleRate; format.FormatTag = WaveFormatTag.Pcm; SecBufByteSize = delay * pcm.SampleRate * pcm.BlockAlign / 1000; description.Format = format; description.BufferBytes = SecBufByteSize; description.CanGetCurrentPosition = true; description.ControlPositionNotify = true; //description.ControlVolume = true; description.GlobalFocus = true; secondaryBuffer = new SecondaryBuffer(description, dSound); //secondaryBuffer.Volume = 100; notify = new Notify(secondaryBuffer); BufferPositionNotify[] bufferPositions = new BufferPositionNotify[3]; bufferPositions[0].Offset = 0; bufferPositions[0].EventNotifyHandle = SecBufNotifyAtBegin.Handle; bufferPositions[1].Offset = SecBufByteSize / 3; bufferPositions[1].EventNotifyHandle = SecBufNotifyAtOneThird.Handle; bufferPositions[2].Offset = 2 * SecBufByteSize / 3; bufferPositions[2].EventNotifyHandle = SecBufNotifyAtTwoThirds.Handle; notify.SetNotificationPositions(bufferPositions); pcmStream = new MemoryStream(SecBufByteSize); SecBufWaitHandles = new WaitHandle[] { SecBufNotifyAtBegin, SecBufNotifyAtOneThird, SecBufNotifyAtTwoThirds }; //wavoutput = new WAVWriter("", output, pcm); }
/// <summary> /// Creates a new WASAPI Output /// </summary> /// <param name="device">Device to use</param> /// <param name="shareMode"></param> /// <param name="useEventSync">true if sync is done with event. false use sleep.</param> /// <param name="latency"></param> public WasapiOut(MMDevice device, AudioClientShareMode shareMode, bool useEventSync, int latency, AudioPCMConfig pcm) { this.m_settings = new AudioEncoderSettings(pcm); this.audioClient = device.AudioClient; this.shareMode = shareMode; this.isUsingEventSync = useEventSync; this.latencyMilliseconds = latency; this.outputFormat = new NAudio.Wave.WaveFormatExtensible(pcm.SampleRate, pcm.BitsPerSample, pcm.ChannelCount); NAudio.Wave.WaveFormatExtensible closestSampleRateFormat; if (!audioClient.IsFormatSupported(shareMode, outputFormat, out closestSampleRateFormat)) { throw new NotSupportedException("PCM format mismatch"); } Init(); bufferFrameCount = audioClient.BufferSize; readBuffers = new AudioBuffer[2]; readBuffers[0] = new AudioBuffer(pcm, bufferFrameCount); readBuffers[1] = new AudioBuffer(pcm, bufferFrameCount); //if (this.shareMode == AudioClientShareMode.Exclusive) // this.latencyMilliseconds = (int)(this.audioClient.DefaultDevicePeriod / 10000); }
public LossyWAVWriter(IAudioDest audioDest, IAudioDest lwcdfDest, double quality, AudioEncoderSettings settings) { _audioDest = audioDest; _lwcdfDest = lwcdfDest; m_settings = settings; if (_audioDest != null && _audioDest.Settings.PCM.BitsPerSample > Settings.PCM.BitsPerSample) { throw new Exception("audio parameters mismatch"); } if (_lwcdfDest != null && _lwcdfDest.Settings.PCM.BitsPerSample != Settings.PCM.BitsPerSample) { throw new Exception("audio parameters mismatch"); } int quality_integer = (int)Math.Floor(quality); fft_analysis_string = new string[4] { "0100010", "0110010", "0111010", "0111110" }; bool[] quality_auto_fft32_on = { false, false, false, true, true, true, true, true, true, true, true }; double[] quality_noise_threshold_shifts = { 20, 16, 9, 6, 3, 0, -2.4, -4.8, -7.2, -9.6, -12 }; double[] quality_signal_to_noise_ratio = { -18, -22, -23.5, -23.5, -23.5, -25, -28, -31, -34, -37, -40 }; double[] quality_dynamic_minimum_bits_to_keep = { 2.5, 2.75, 3.00, 3.25, 3.50, 3.75, 4.0, 4.25, 4.5, 4.75, 5.00 }; double[] quality_maximum_clips_per_channel = { 3, 3, 3, 3, 2, 1, 0, 0, 0, 0, 0 }; this_analysis_number = 2; impulse = quality_auto_fft32_on[quality_integer]; linkchannels = false; noise_threshold_shift = Math.Round(interpolate_param(quality_noise_threshold_shifts, quality) * 1000) / 1000; snr_value = Math.Round(interpolate_param(quality_signal_to_noise_ratio, quality) * 1000) / 1000; dynamic_minimum_bits_to_keep = Math.Round(interpolate_param(quality_dynamic_minimum_bits_to_keep, quality) * 1000) / 1000; maximum_clips_per_channel = (int)Math.Round(interpolate_param(quality_maximum_clips_per_channel, quality)); scaling_factor = 1.0; shaping_factor = Math.Min(1, quality / 10); shaping_is_on = shaping_factor > 0; _audioBuffer = new AudioBuffer(Settings.PCM, 256); }
public LAMEEncoder(string path, AudioEncoderSettings settings) : this(path, null, settings) { }
public DummyWriter(string path, AudioEncoderSettings settings) { m_settings = settings; }
public LAMEEncoderVBR(string path, AudioEncoderSettings settings) : base(path, null, settings) { }
public void Setup(AudioEncoderSettings encoderSettings, NetworkSettings networkSettings) { logger.Debug("AudioStreamer::Start(...) "); this.Id = "AudioStreamer_" + Guid.NewGuid().ToString(); this.EncoderSettings = encoderSettings; this.NetworkSettings = networkSettings; try { // var capture = audioSource.Capture; var waveFormat = audioSource.WaveFormat; //bufferedWaveProvider = new BufferedWaveProvider(waveFormat); //bufferedWaveProvider.DiscardOnBufferOverflow = true; //sampleChannel = new SampleChannel(bufferedWaveProvider); audioResampler = new AudioEncoder(); var captureParams = new AudioEncoderSettings { SampleRate = waveFormat.SampleRate, Channels = waveFormat.Channels, }; audioResampler.Open(captureParams, encoderSettings); session = new PCMUSession(); if (networkSettings.TransportMode == TransportMode.Tcp) { RtpSender = new RtpTcpSender(session); } else if (networkSettings.TransportMode == TransportMode.Udp) { RtpSender = new RtpUdpSender(session); } else { throw new FormatException("NotSupportedFormat " + networkSettings.TransportMode); } audioSource.DataAvailable += AudioSource_DataAvailable; RtpSender.Setup(networkSettings); networkSettings.SSRC = session.SSRC; RtpSender.Start(); IsStreaming = true; OnStateChanged(); } catch (Exception ex) { logger.Error(ex); Close(); } }
public void Setup(AudioEncoderSettings inputPars, NetworkSettings networkPars) { logger.Debug("AudioReceiver::Setup(...)"); try { decoder = new AudioDecoder(); decoder.Open(inputPars); waveFormat = new WaveFormat(8000, 16, 1); var _deviceId = inputPars.DeviceId; Guid deviceId = Guid.Empty; if (!string.IsNullOrEmpty(_deviceId)) { Guid.TryParse(_deviceId, out deviceId); } DirectSoundDeviceInfo deviceInfo = null; var DSDevices = DirectSoundOut.Devices; if (DSDevices != null && DSDevices.Count() > 0) { //DirectSoundOut.DSDEVID_DefaultPlayback deviceInfo = DSDevices.FirstOrDefault(d => d.Guid == deviceId) ?? DSDevices.FirstOrDefault(); } if (deviceId == null) { throw new Exception("Audio device not found..."); } if (deviceInfo != null) { logger.Info(deviceInfo.Description + " " + deviceInfo.ModuleName + " " + deviceInfo.Guid); wavePlayer = new NAudio.Wave.DirectSoundOut(deviceInfo.Guid); //wavePlayer = new NAudio.Wave.WaveOut(); wavePlayer.PlaybackStopped += WavePlayer_PlaybackStopped; waveBuffer = new BufferedWaveProvider(waveFormat) { BufferDuration = TimeSpan.FromMilliseconds(300), //BufferDuration = TimeSpan.FromMilliseconds(300), DiscardOnBufferOverflow = true }; volumeProvider = new VolumeSampleProvider(waveBuffer.ToSampleProvider()); var meteringSampleProvider = new MeteringSampleProvider(volumeProvider); meteringSampleProvider.StreamVolume += PostVolumeMeter_StreamVolume; wavePlayer.Init(meteringSampleProvider); bufferLost = false; } else { throw new Exception("DirectSound devices is not available..."); } session = new PCMUSession(); if (networkPars.TransportMode == TransportMode.Tcp) { rtpReceiver = new RtpTcpReceiver(session); } else { rtpReceiver = new RtpUdpReceiver(session); } session.SSRC = networkPars.SSRC; rtpReceiver.Open(networkPars); rtpReceiver.RtpPacketReceived += RtpReceiver_RtpPacketReceived; } catch (Exception ex) { logger.Debug(ex); CleanUp(); throw; } }
public IcecastWriter(AudioPCMConfig pcm, IcecastSettingsData settings) { this.m_settings = new AudioEncoderSettings(pcm); this.settings = settings; }
/// <summary> /// Creates a new audio stream for the specified <see cref="OutputContainer"/>. /// </summary> /// <param name="container">The media container.</param> /// <param name="config">The stream settings.</param> /// <returns>The new audio stream.</returns> public static OutputStream <AudioFrame> CreateAudio(OutputContainer container, AudioEncoderSettings config) { var codecId = config.Codec == AudioCodec.Default ? container.Pointer->oformat->audio_codec : (AVCodecID)config.Codec; if (codecId == AVCodecID.AV_CODEC_ID_NONE) { throw new InvalidOperationException("The media container doesn't support audio!"); } var codec = ffmpeg.avcodec_find_encoder(codecId); if (codec == null) { throw new InvalidOperationException($"Cannot find an encoder with the {codecId}!"); } if (codec->type != AVMediaType.AVMEDIA_TYPE_AUDIO) { throw new InvalidOperationException($"The {codecId} encoder doesn't support audio!"); } var audioStream = ffmpeg.avformat_new_stream(container.Pointer, codec); var codecContext = audioStream->codec; codecContext->time_base = config.TimeBase; codecContext->codec_id = codecId; codecContext->codec_type = AVMediaType.AVMEDIA_TYPE_AUDIO; codecContext->bit_rate = config.Bitrate; codecContext->sample_rate = config.SampleRate; codecContext->frame_size = config.SamplesPerFrame; codecContext->sample_fmt = (AVSampleFormat)config.SampleFormat; codecContext->channels = config.Channels; codecContext->channel_layout = (ulong)ffmpeg.av_get_default_channel_layout(config.Channels); if ((container.Pointer->oformat->flags & ffmpeg.AVFMT_GLOBALHEADER) != 0) { codecContext->flags |= ffmpeg.AV_CODEC_FLAG_GLOBAL_HEADER; } var dict = new FFDictionary(config.CodecOptions); var ptr = dict.Pointer; ffmpeg.avcodec_open2(codecContext, codec, &ptr); dict.Update(ptr); return(new OutputStream <AudioFrame>(audioStream, container)); }
public static StreamSession Default() { //int port = -1; //var freeTcpPorts = MediaToolkit.Utils.NetTools.GetFreePortRange(System.Net.Sockets.ProtocolType.Tcp, 1, 808); //if (freeTcpPorts != null && freeTcpPorts.Count() > 0) //{ // port = freeTcpPorts.FirstOrDefault(); //} var session = new StreamSession { StreamName = Environment.MachineName, NetworkIpAddress = "0.0.0.0", MutlicastAddress = "239.0.0.1", CommunicationPort = 0, IsMulticast = false, TransportMode = TransportMode.Tcp, }; var videoEncoderSettings = new VideoEncoderSettings { Width = 1920, Height = 1080, EncoderFormat = VideoCodingFormat.H264, Profile = H264Profile.Main, BitrateMode = BitrateControlMode.CBR, Bitrate = 2500, MaxBitrate = 5000, FrameRate = new MediaRatio(30, 1), LowLatency = true, }; var videoSettings = new VideoStreamSettings { Enabled = true, //Id = "video_" + Guid.NewGuid().ToString(), NetworkSettings = new NetworkSettings(), CaptureDevice = null, EncoderSettings = videoEncoderSettings, StreamFlags = VideoStreamFlags.UseEncoderResoulutionFromSource, //ScreenCaptureProperties = captureProperties, }; var audioEncoderSettings = new AudioEncoderSettings { SampleRate = 8000, Channels = 1, Encoding = "PCMU", }; var audioSettings = new AudioStreamSettings { Enabled = false, //Id = "audio_" + Guid.NewGuid().ToString(), NetworkSettings = new NetworkSettings(), CaptureDevice = new AudioCaptureDevice(), EncoderSettings = audioEncoderSettings, }; session.AudioSettings = audioSettings; session.VideoSettings = videoSettings; return(session); }
public LAMEEncoderVBR(string path, Stream IO, AudioEncoderSettings settings) : base(path, IO, settings) { }
private void ClientProc() { var address = "net.tcp://" + ServerAddr + "/ScreenCaster"; if (this.ServerPort > 0) { address = "net.tcp://" + ServerAddr + ":" + ServerPort + "/ScreenCaster"; } try { var uri = new Uri(address); this.ClientId = RngProvider.GetRandomNumber().ToString(); //NetTcpSecurity security = new NetTcpSecurity //{ // Mode = SecurityMode.Transport, // Transport = new TcpTransportSecurity // { // ClientCredentialType = TcpClientCredentialType.Windows, // ProtectionLevel = System.Net.Security.ProtectionLevel.EncryptAndSign, // }, //}; NetTcpSecurity security = new NetTcpSecurity { Mode = SecurityMode.None, }; var binding = new NetTcpBinding { ReceiveTimeout = TimeSpan.MaxValue,//TimeSpan.FromSeconds(10), SendTimeout = TimeSpan.FromSeconds(10), Security = security, }; factory = new ChannelFactory<IScreenCastService>(binding, new EndpointAddress(uri)); var channel = factory.CreateChannel(); try { //channel.PostMessage(new ServerRequest { Command = "Ping" }); var channelInfos = channel.GetChannelInfos(); if (channelInfos == null) { logger.Error("channelInfos == null"); return; } TransportMode transportMode = TransportMode.Udp; var videoChannelInfo = channelInfos.FirstOrDefault(c => c.MediaInfo is VideoChannelInfo); if (videoChannelInfo != null) { transportMode = videoChannelInfo.Transport; if(transportMode == TransportMode.Tcp) { if (videoChannelInfo.ClientsCount > 0) { throw new Exception("Server is busy"); } } var videoAddr = videoChannelInfo.Address; if(transportMode == TransportMode.Tcp) { videoAddr = ServerAddr; } var videoPort = videoChannelInfo.Port; //if (string.IsNullOrEmpty(videoAddr)) //{ // //channel.Play() //} //if (transportMode == TransportMode.Tcp) //{ // var res = channel.Play(channelInfos); //} var videoInfo = videoChannelInfo.MediaInfo as VideoChannelInfo; if (videoInfo != null) { var inputPars = new VideoEncoderSettings { Resolution = videoInfo.Resolution, //Width = videoInfo.Resolution.Width, //Height = videoInfo.Resolution.Height, FrameRate = new MediaRatio(videoInfo.Fps, }; var outputPars = new VideoEncoderSettings { //Width = 640,//2560, //Height = 480,//1440, //Width = 1920, //Height = 1080, //FrameRate = 30, //Width = videoInfo.Resolution.Width, //Height = videoInfo.Resolution.Height, Resolution = videoInfo.Resolution, FrameRate = videoInfo.Fps, }; //bool keepRatio = true; //if (keepRatio) //{ // var srcSize = new Size(inputPars.Width, inputPars.Height); // var destSize = new Size(outputPars.Width, outputPars.Height); // var ratio = srcSize.Width / (double)srcSize.Height; // int destWidth = destSize.Width; // int destHeight = (int)(destWidth / ratio); // if (ratio < 1) // { // destHeight = destSize.Height; // destWidth = (int)(destHeight * ratio); // } // outputPars.Width = destWidth; // outputPars.Height = destHeight; //} var networkPars = new NetworkSettings { LocalAddr = videoAddr, LocalPort = videoPort, TransportMode = transportMode, SSRC = videoChannelInfo.SSRC, }; VideoReceiver = new VideoReceiver(); VideoReceiver.Setup(inputPars, outputPars, networkPars); VideoReceiver.UpdateBuffer += VideoReceiver_UpdateBuffer; } } var audioChannelInfo =channelInfos.FirstOrDefault(c => c.MediaInfo is AudioChannelInfo); if (audioChannelInfo != null) { var audioInfo = audioChannelInfo.MediaInfo as AudioChannelInfo; if (audioInfo != null) { var audioAddr = audioChannelInfo.Address; transportMode = audioChannelInfo.Transport; if (transportMode == TransportMode.Tcp) { audioAddr = ServerAddr; } if (transportMode == TransportMode.Tcp) { if (audioChannelInfo.ClientsCount > 0) { throw new Exception("Server is busy"); } } var audioPort = audioChannelInfo.Port; AudioReceiver = new AudioReceiver(); var networkPars = new NetworkSettings { LocalAddr = audioAddr, LocalPort = audioPort, TransportMode = transportMode, SSRC = audioChannelInfo.SSRC, }; var audioDeviceId = ""; try { var devices = DirectSoundOut.Devices; var device = devices.FirstOrDefault(); audioDeviceId = device?.Guid.ToString() ?? ""; } catch(Exception ex) { logger.Error(ex); } var audioPars = new AudioEncoderSettings { SampleRate = audioInfo.SampleRate, Channels = audioInfo.Channels, Encoding = "ulaw", DeviceId = audioDeviceId,//currentDirectSoundDeviceInfo?.Guid.ToString() ?? "", }; AudioReceiver.Setup(audioPars, networkPars); } } if (VideoReceiver != null) { VideoReceiver.Play(); } if (AudioReceiver != null) { AudioReceiver.Play(); } running = true; State = ClientState.Connected; OnStateChanged(State); while (running) { channel.PostMessage(new ServerRequest { Command = "Ping" }); syncEvent.WaitOne(1000); //InternalCommand command = null; //do //{ // command = DequeueCommand(); // if (command != null) // { // ProcessCommand(command); // } //} while (command != null); } } finally { running = false; State = ClientState.Disconnected; OnStateChanged(State); try { var c = (IClientChannel)channel; if (c.State != CommunicationState.Faulted) { c.Close(); } else { c.Abort(); } } catch (Exception ex) { logger.Error(ex); } } } catch (Exception ex) { logger.Error(ex); State = ClientState.Faulted; OnStateChanged(State); //Close(); } finally { Close(); } }