/// <summary> /// Create an input /// </summary> /// <param name="ID">The ID of the input to be created</param> public Input(string ID) { // Set the device ID deviceID = ID; // Get Device from specified ID MMDeviceEnumerator devices = new MMDeviceEnumerator(); device = devices.GetDevice(ID); // Set wave in to WASAPI capture of the specified device waveIn = new WasapiCapture(device); // Set the number of bytes used by each sample sampleByteSize = waveIn.WaveFormat.BitsPerSample / 8; // Add event handler to retrieve samples from the device waveIn.DataAvailable += waveIn_DataAvailable; // Create buffered wave provider bufferedWaveProvider = new BufferedWaveProvider(waveIn.WaveFormat); bufferedWaveProvider.DiscardOnBufferOverflow = true; // Create sample channel sampleChannel = new SampleChannel(bufferedWaveProvider); // Create sample provider sampleChannel.PreVolumeMeter += sampleProvider_StreamVolume; // Start recording try { waveIn.StartRecording(); } catch { throw new ArgumentException("This input device is not supported."); } }
public AudioStreamReader(Stream WavStream) { lockObject = new object(); CreateReaderStream(WavStream); sourceBytesPerSample = (readerStream.WaveFormat.BitsPerSample / 8) * readerStream.WaveFormat.Channels; sampleChannel = new SampleChannel(readerStream, false); destBytesPerSample = 4*sampleChannel.WaveFormat.Channels; length = SourceToDest(readerStream.Length); }
/// <summary> /// Initializes a new instance of AudioFileReader /// </summary> /// <param name="fileName">The file to open</param> public AudioFileReader(string fileName) { this.fileName = fileName; CreateReaderStream(fileName); this.sourceBytesPerSample = (readerStream.WaveFormat.BitsPerSample / 8) * readerStream.WaveFormat.Channels; this.destBytesPerSample = 8; // stereo float this.sampleChannel = new SampleChannel(readerStream, false); this.length = SourceToDest(readerStream.Length); this.lockObject = new object(); }
/// <summary> /// Initializes a new instance of AudioFileReader /// </summary> /// <param name="fileName">The file to open</param> public AudioFileReader(string fileName) { lockObject = new object(); this.fileName = fileName; CreateReaderStream(fileName); sourceBytesPerSample = (readerStream.WaveFormat.BitsPerSample / 8) * readerStream.WaveFormat.Channels; sampleChannel = new SampleChannel(readerStream, false); destBytesPerSample = 4*sampleChannel.WaveFormat.Channels; length = SourceToDest(readerStream.Length); }
private ISampleProvider CreateInputStream(string fileName) { this.audioFileReader = new AudioFileReader(fileName); var sampleChannel = new SampleChannel(audioFileReader, true); sampleChannel.PreVolumeMeter+= OnPreVolumeMeter; this.setVolumeDelegate = (vol) => sampleChannel.Volume = vol; var postVolumeMeter = new MeteringSampleProvider(sampleChannel); postVolumeMeter.StreamVolume += OnPostVolumeMeter; return postVolumeMeter; }
public frmMain() { InitializeComponent(); MemoryStream stream = new MemoryStream(Properties.Resources.music); this.channel = new SampleChannel(new Mp3FileReader(stream)); this.channel.Volume = 0.02f; this._player = new WaveOut() { DesiredLatency = 500 }; this._player.Init(this.channel); this._player.Play(); this._player.PlaybackStopped += _player_PlaybackStopped; }
public AudioDevice(string fileName) : this() { ISampleProvider sampleProvider = new AudioFileReader(fileName); this.fileWaveStream = (WaveStream) sampleProvider; // create sample channel SampleToWaveProvider waveProvider = new SampleToWaveProvider(sampleProvider); this.sampleChannel = new SampleChannel(waveProvider, true); this.sampleChannel.PreVolumeMeter += OnPreVolumeMeter; // play //IWavePlayer waveOut = new WaveOut(); //waveOut.Init(waveProvider); //waveOut.Play(); }
public ISampleProvider CreateInputStream(string fileName) { var plugin = new WaveInputFilePlugin(); if (plugin == null) { throw new InvalidOperationException("Unsupported file extension"); } fileWaveStream = plugin.CreateWaveStream(fileName); var waveChannel = new NAudio.Wave.SampleProviders.SampleChannel(fileWaveStream); setVolumeDelegate = (vol) => waveChannel.Volume = vol; waveChannel.PreVolumeMeter += OnPreVolumeMeter; var postVolumeMeter = new MeteringSampleProvider(waveChannel); postVolumeMeter.StreamVolume += OnPostVolumeMeter; return(postVolumeMeter); }
private ISampleProvider CreateInputStream(string fileName) { if (fileName.EndsWith(".wav")) { fileStream = OpenWavStream(fileName); } else if (fileName.EndsWith(".mp3")) { fileStream = new Mp3FileReader(fileName); } else { throw new InvalidOperationException("Unsupported extension"); } var inputStream = new SampleChannel(fileStream); var sampleStream = new NotifyingSampleProvider(inputStream); sampleStream.Sample += (s, e) => aggregator.Add(e.Left); return sampleStream; }
//WARNFIX //private readonly WaveFormat waveFormat; /// <summary> /// Initializes a new instance of AudioFileReader /// </summary> /// <param name="fileName">The file to open</param> public AudioFileReaderRB(string fileName) { lockObject = new object(); this.fileName = fileName; CreateReaderStream(fileName); sourceBytesPerSample = (readerStream.WaveFormat.BitsPerSample / 8) * readerStream.WaveFormat.Channels; sampleChannel = new SampleChannel(readerStream, false); destBytesPerSample = 4 * sampleChannel.WaveFormat.Channels; length = SourceToDest(readerStream.Length); PadWithZeroes = true; pan = 0; var providers = new RockBox.SampleConverters.ISampleChunkConverter[] { new RockBox.SampleConverters.Mono8SampleChunkConverter(), new RockBox.SampleConverters.Stereo8SampleChunkConverter(), new RockBox.SampleConverters.Mono16SampleChunkConverter(), new RockBox.SampleConverters.Stereo16SampleChunkConverter(), new RockBox.SampleConverters.Mono24SampleChunkConverter(), new RockBox.SampleConverters.Stereo24SampleChunkConverter(), new RockBox.SampleConverters.MonoFloatSampleChunkConverter(), new RockBox.SampleConverters.StereoFloatSampleChunkConverter(), }; foreach (var provider in providers) { if (provider.Supports(readerStream.WaveFormat)) { this.sampleProvider = provider; break; } } if (this.sampleProvider == null) { throw new ArgumentException("Unsupported sourceStream format"); } }
private void StreamMP3() { HttpWebRequest request = null; try { var resp = ConnectionFactory.GetResponse(_source,false, out request); var buffer = new byte[16384 * 4]; // needs to be big enough to hold a decompressed frame IMp3FrameDecompressor decompressor = null; using (var responseStream = resp.GetResponseStream()) { var readFullyStream = new ReadFullyStream(responseStream); while (!_stopEvent.WaitOne(10, false) && !MainForm.ShuttingDown) { if (_bufferedWaveProvider != null && _bufferedWaveProvider.BufferLength - _bufferedWaveProvider.BufferedBytes < _bufferedWaveProvider.WaveFormat.AverageBytesPerSecond/4) { //Debug.WriteLine("Buffer getting full, taking a break"); Thread.Sleep(100); } else { var da = DataAvailable; if (da != null) { Mp3Frame frame; try { frame = Mp3Frame.LoadFromStream(readFullyStream); } catch (EndOfStreamException) { // reached the end of the MP3 file / stream break; } catch (WebException) { // probably we have aborted download from the GUI thread break; } if (decompressor == null || _bufferedWaveProvider == null) { // don't think these details matter too much - just help ACM select the right codec // however, the buffered provider doesn't know what sample rate it is working at // until we have a frame WaveFormat waveFormat = new Mp3WaveFormat(frame.SampleRate, frame.ChannelMode == ChannelMode.Mono ? 1 : 2, frame.FrameLength, frame.BitRate); RecordingFormat = new WaveFormat(frame.SampleRate, 16, frame.ChannelMode == ChannelMode.Mono ? 1 : 2); decompressor = new AcmMp3FrameDecompressor(waveFormat); _bufferedWaveProvider = new BufferedWaveProvider(decompressor.OutputFormat) {BufferDuration = TimeSpan.FromSeconds(5)}; _sampleChannel = new SampleChannel(_bufferedWaveProvider); _sampleChannel.PreVolumeMeter += SampleChannelPreVolumeMeter; } int decompressed = decompressor.DecompressFrame(frame, buffer, 0); _bufferedWaveProvider.AddSamples(buffer, 0, decompressed); var sampleBuffer = new float[buffer.Length]; _sampleChannel.Read(sampleBuffer, 0, buffer.Length); da.Invoke(this, new DataAvailableEventArgs((byte[]) buffer.Clone())); if (WaveOutProvider != null && Listening) { WaveOutProvider.AddSamples(buffer, 0, buffer.Length); } } } if (_stopEvent.WaitOne(0, false)) break; } AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(ReasonToFinishPlaying.StoppedByUser)); // was doing this in a finally block, but for some reason // we are hanging on response stream .Dispose so never get there if (decompressor != null) { decompressor.Dispose(); decompressor = null; } } } catch (Exception ex) { var af = AudioFinished; af?.Invoke(this, new PlayingFinishedEventArgs(ReasonToFinishPlaying.DeviceLost)); MainForm.LogExceptionToFile(ex,"MP3Stream"); } finally { try { request?.Abort(); } catch { } request = null; } }
// Worker thread private void WorkerThread() { // buffer to read stream var buffer = new byte[BufSize]; var encoding = new ASCIIEncoding(); var res = ReasonToFinishPlaying.StoppedByUser; while (!_stopEvent.WaitOne(0, false) && !MainForm.ShuttingDown) { // reset reload event _reloadEvent.Reset(); // HTTP web request HttpWebRequest request = null; // web responce WebResponse response = null; // stream for MJPEG downloading Stream stream = null; // boundary betweeen images (string and binary versions) try { // create request request = (HttpWebRequest)WebRequest.Create(_source); // set user agent if (_userAgent != null) { request.UserAgent = _userAgent; } // set proxy if (_proxy != null) { request.Proxy = _proxy; } if (_usehttp10) request.ProtocolVersion = HttpVersion.Version10; // set timeout value for the request request.Timeout = request.ServicePoint.ConnectionLeaseTimeout = request.ServicePoint.MaxIdleTime = _requestTimeout; request.AllowAutoRedirect = true; // set login and password if ((_login != null) && (_password != null) && (_login != string.Empty)) request.Credentials = new NetworkCredential(_login, _password); // set connection group name if (_useSeparateConnectionGroup) request.ConnectionGroupName = GetHashCode().ToString(); // get response response = request.GetResponse(); // get response stream stream = response.GetResponseStream(); stream.ReadTimeout = _requestTimeout; byte[] boundary = encoding.GetBytes("--myboundary"); byte[] sep = encoding.GetBytes("\r\n\r\n"); // loop int startPacket = -1; int endPacket = -1; int ttl = 0; bool hasaudio = false; while ((!_stopEvent.WaitOne(0, false)) && (!_reloadEvent.WaitOne(0, false))) { int read; if ((read = stream.Read(buffer, ttl, ReadSize)) == 0) throw new ApplicationException(); ttl += read; if (startPacket==-1) { startPacket = ByteArrayUtils.Find(buffer, boundary, 0, ttl); } else { if (endPacket == -1) { endPacket = ByteArrayUtils.Find(buffer, boundary, startPacket + boundary.Length, ttl-(startPacket + boundary.Length)); } } var nf = NewFrame; if (startPacket>-1 && endPacket>startPacket) { int br = ByteArrayUtils.Find(buffer, sep, startPacket, 100); if (br != -1) { var arr = new byte[br]; System.Array.Copy(buffer, startPacket, arr, 0, br - startPacket); string s = Encoding.ASCII.GetString(arr); int k = s.IndexOf("Content-type: ", StringComparison.Ordinal); if (k!=-1) { s = s.Substring(k+14); s = s.Substring(0,s.IndexOf("\r\n", StringComparison.Ordinal)); s = s.Trim(); } switch (s) { case "image/jpeg": try { using (var ms = new MemoryStream(buffer, br + 4, endPacket - br - 8)) { using (var bmp = (Bitmap)Image.FromStream(ms)) { var dae = new NewFrameEventArgs(bmp); nf.Invoke(this, dae); } } } catch (Exception ex) { //sometimes corrupted packets come through... MainForm.LogExceptionToFile(ex,"KinectNetwork"); } break; case "audio/raw": if (!hasaudio) { hasaudio = true; //fixed 16khz 1 channel format RecordingFormat = new WaveFormat(16000, 16, 1); _waveProvider = new BufferedWaveProvider(RecordingFormat) { DiscardOnBufferOverflow = true, BufferDuration = TimeSpan.FromMilliseconds(500) }; _sampleChannel = new SampleChannel(_waveProvider); _sampleChannel.PreVolumeMeter +=SampleChannelPreVolumeMeter; if (HasAudioStream != null) { HasAudioStream(this, EventArgs.Empty); HasAudioStream = null; } } var da = DataAvailable; if (da != null) { int l = endPacket - br - 8; var data = new byte[l]; int d; using (var ms = new MemoryStream(buffer, br+4, l)) { d = ms.Read(data, 0, l); } if (d > 0) { _waveProvider.AddSamples(data, 0, data.Length); if (Listening) { WaveOutProvider.AddSamples(data, 0, data.Length); } //forces processing of volume level without piping it out var sampleBuffer = new float[data.Length]; int r = _sampleChannel.Read(sampleBuffer, 0, data.Length); da(this, new DataAvailableEventArgs((byte[]) data.Clone(),r)); } } break; case "alert/text": // code to handle alert notifications goes here if (AlertHandler != null) { int dl = endPacket - br - 8; var data2 = new byte[dl]; using (var ms = new MemoryStream(buffer, br + 4, dl)) { ms.Read(data2, 0, dl); } string alerttype = Encoding.ASCII.GetString(data2); AlertHandler(this, new AlertEventArgs(alerttype)); } break; } } ttl -= endPacket; System.Array.Copy(buffer, endPacket, buffer, 0, ttl); startPacket = -1; endPacket = -1; } } } catch (ApplicationException) { // do nothing for Application Exception, which we raised on our own // wait for a while before the next try Thread.Sleep(250); } catch (ThreadAbortException) { break; } catch (Exception ex) { // provide information to clients MainForm.LogExceptionToFile(ex, "KinectNetwork"); res = ReasonToFinishPlaying.DeviceLost; break; // wait for a while before the next try //Thread.Sleep(250); } finally { request?.Abort(); stream?.Flush(); stream?.Close(); response?.Close(); } // need to stop ? if (_stopEvent.WaitOne(0, false)) break; } PlayingFinished?.Invoke(this, new PlayingFinishedEventArgs(res)); }
private SoundFormat SoundFormatCallback(SoundFormat sf) { if (_needsSetup) { _recordingFormat = new WaveFormat(sf.Rate, 16, sf.Channels); _waveProvider = new BufferedWaveProvider(RecordingFormat); _sampleChannel = new SampleChannel(_waveProvider); _sampleChannel.PreVolumeMeter += SampleChannelPreVolumeMeter; _needsSetup = false; } return sf; }
private ISampleProvider CreateInputStream(string fileName) { var plugin = GetPluginForFile(fileName); if(plugin == null) { throw new InvalidOperationException("Unsupported file extension"); } this.fileWaveStream = plugin.CreateWaveStream(fileName); var waveChannel = new SampleChannel(this.fileWaveStream, true); this.setVolumeDelegate = (vol) => waveChannel.Volume = vol; waveChannel.PreVolumeMeter += OnPreVolumeMeter; var postVolumeMeter = new MeteringSampleProvider(waveChannel); postVolumeMeter.StreamVolume += OnPostVolumeMeter; return postVolumeMeter; }
/// <summary> /// Start audio source. /// </summary> /// /// <remarks>Starts audio source and return execution to caller. audio source /// object creates background thread and notifies about new frames with the /// help of <see cref="DataAvailable"/> event.</remarks> /// /// <exception cref="ArgumentException">audio source is not specified.</exception> /// public void Start() { if (!IsRunning) { // check source if (_stream == null) throw new ArgumentException("Audio source is not specified."); _waveProvider = new BufferedWaveProvider(RecordingFormat); _sampleChannel = new SampleChannel(_waveProvider); _sampleChannel.PreVolumeMeter += SampleChannelPreVolumeMeter; _stopEvent = new ManualResetEvent(false); _thread = new Thread(DirectStreamListener) { Name = "DirectStream Audio Receiver" }; _thread.Start(); } }
private void FfmpegListener() { _reasonToStop = ReasonToFinishPlaying.StoppedByUser; _vfr = null; bool open = false; string errmsg = ""; _eventing = null; _stopping = false; try { Program.FfmpegMutex.WaitOne(); _vfr = new VideoFileReader(); //ensure http/https is lower case for string compare in ffmpeg library int i = _source.IndexOf("://", StringComparison.Ordinal); if (i > -1) { _source = _source.Substring(0, i).ToLower() + _source.Substring(i); } _vfr.Timeout = Timeout; _vfr.AnalyzeDuration = AnalyzeDuration; _vfr.Cookies = Cookies; _vfr.UserAgent = UserAgent; _vfr.Headers = Headers; _vfr.Flags = -1; _vfr.NoBuffer = true; _vfr.RTSPMode = RTSPMode; _vfr.Open(_source); open = true; } catch (Exception ex) { Logger.LogExceptionToFile(ex, "FFMPEG"); } finally { try { Program.FfmpegMutex.ReleaseMutex(); } catch (ObjectDisposedException) { //can happen on shutdown } } if (_vfr == null || !_vfr.IsOpen || !open) { ShutDown("Could not open stream" + ": " + _source); return; } bool hasaudio = false; if (_vfr.Channels > 0) { hasaudio = true; RecordingFormat = new WaveFormat(_vfr.SampleRate, 16, _vfr.Channels); _waveProvider = new BufferedWaveProvider(RecordingFormat) { DiscardOnBufferOverflow = true, BufferDuration = TimeSpan.FromMilliseconds(500) }; SampleChannel = new SampleChannel(_waveProvider); SampleChannel.PreVolumeMeter += SampleChannelPreVolumeMeter; } Duration = _vfr.Duration; _videoQueue = new ConcurrentQueue<Bitmap>(); _audioQueue = new ConcurrentQueue<byte[]>(); _eventing = new Thread(EventManager) { Name = "ffmpeg eventing", IsBackground = true }; _eventing.Start(); try { while (!_stopEvent.WaitOne(5) && !MainForm.ShuttingDown) { var nf = NewFrame; if (nf == null) break; object frame = _vfr.ReadFrame(); switch (_vfr.LastFrameType) { case 0: //null packet if ((DateTime.UtcNow - LastFrame).TotalMilliseconds > Timeout) throw new TimeoutException("Timeout reading from video stream"); break; case 1: LastFrame = DateTime.UtcNow; if (hasaudio) { var data = frame as byte[]; if (data?.Length > 0) { ProcessAudio(data); } } break; case 2: LastFrame = DateTime.UtcNow; var bmp = frame as Bitmap; if (bmp != null) { if (_videoQueue.Count<20) _videoQueue.Enqueue(bmp); } break; } } } catch (Exception e) { Logger.LogExceptionToFile(e, "FFMPEG"); errmsg = e.Message; } _stopEvent.Set(); _eventing.Join(); if (SampleChannel != null) { SampleChannel.PreVolumeMeter -= SampleChannelPreVolumeMeter; SampleChannel = null; } if (_waveProvider?.BufferedBytes > 0) _waveProvider?.ClearBuffer(); ShutDown(errmsg); }
internal void Stop() { if (waveOut != null) { waveOut.Stop(); try { waveOut.Dispose(); } catch { } waveOut = null; } waveProvider = null; sampleChannel = null; sampleStream = null; }
private void SpyServerListener() { var data = new byte[3200]; try { var request = (HttpWebRequest)WebRequest.Create(_source); request.Timeout = 10000; request.ReadWriteTimeout = 5000; var response = request.GetResponse(); using (Stream stream = response.GetResponseStream()) { if (stream == null) throw new Exception("Stream is null"); stream.ReadTimeout = 5000; while (!_stopEvent.WaitOne(0, false) && !MainForm.ShuttingDown) { var da = DataAvailable; if (da != null) { int recbytesize = stream.Read(data, 0, 3200); if (recbytesize == 0) throw new Exception("lost stream"); byte[] dec; ALawDecoder.ALawDecode(data, recbytesize, out dec); if (_sampleChannel != null) { _waveProvider.AddSamples(dec, 0, dec.Length); var sampleBuffer = new float[dec.Length]; int read = _sampleChannel.Read(sampleBuffer, 0, dec.Length); da(this, new DataAvailableEventArgs((byte[])dec.Clone(), read)); if (Listening) { WaveOutProvider?.AddSamples(dec, 0, read); } } } else { break; } // need to stop ? if (_stopEvent.WaitOne(0, false)) break; } } AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(ReasonToFinishPlaying.StoppedByUser)); } catch (Exception e) { var af = AudioFinished; af?.Invoke(this, new PlayingFinishedEventArgs(ReasonToFinishPlaying.DeviceLost)); MainForm.LogExceptionToFile(e,"ispyServer"); } if (_sampleChannel != null) { _sampleChannel.PreVolumeMeter -= SampleChannelPreVolumeMeter; _sampleChannel = null; } if (_waveProvider?.BufferedBytes > 0) _waveProvider.ClearBuffer(); if (WaveOutProvider?.BufferedBytes > 0) WaveOutProvider?.ClearBuffer(); }
/// <summary> /// Start audio source. /// </summary> /// /// <remarks>Starts audio source and return execution to caller. audio source /// object creates background thread and notifies about new frames with the /// help of <see cref="DataAvailable"/> event.</remarks> /// /// <exception cref="ArgumentException">audio source is not specified.</exception> /// public void Start() { if (!IsRunning) { // check source int i = 0, selind = -1; for (int n = 0; n < WaveIn.DeviceCount; n++) { if (WaveIn.GetCapabilities(n).ProductName == _source) selind = i; i++; } if (selind == -1) { //device no longer connected or not configured if (i > 0) selind = 0; else { //if (AudioSourceError != null) // AudioSourceError(this, new AudioSourceErrorEventArgs("not connected")); AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(ReasonToFinishPlaying.DeviceLost)); return; } } _waveIn = new WaveInEvent { BufferMilliseconds = 200, DeviceNumber = selind, WaveFormat = RecordingFormat }; _waveIn.DataAvailable += WaveInDataAvailable; _waveIn.RecordingStopped += WaveInRecordingStopped; _waveProvider = new WaveInProvider(_waveIn); _sampleChannel = new SampleChannel(_waveProvider); if (LevelChanged != null) { _sampleChannel.PreVolumeMeter += SampleChannelPreVolumeMeter; } _waveIn.StartRecording(); } }
private void DisposePlayer() { try { if (_sampleChannel != null) { _sampleChannel.PreVolumeMeter -= SampleChannelPreVolumeMeter; _sampleChannel = null; } _mMedia.Events.DurationChanged -= EventsDurationChanged; _mMedia.Events.StateChanged -= EventsStateChanged; _mPlayer.Stop(); _mMedia.Dispose(); _mMedia = null; if (_waveProvider?.BufferedBytes > 0) { try { _waveProvider?.ClearBuffer(); } catch { // ignored } } } catch (Exception ex) { Logger.LogExceptionToFile(ex, "VLC"); } _waveProvider = null; Listening = false; }
public void Start() { if (_sensor != null) Stop(); foreach (var potentialSensor in KinectSensor.KinectSensors) { if (potentialSensor.Status == KinectStatus.Connected && _uniqueKinectId == potentialSensor.UniqueKinectId) { _sensor = potentialSensor; break; } } if (_sensor==null) { Logger.LogMessageToFile("Sensor not found: "+_uniqueKinectId,"KinectStream"); _isrunning = false; return; } if (_skeleton) { _sensor.SkeletonStream.Enable(); _sensor.SkeletonFrameReady += SensorSkeletonFrameReady; } switch (StreamMode) { case 0://color _sensor.ColorStream.Enable(ColorImageFormat.RgbResolution640x480Fps30); _sensor.ColorFrameReady += SensorColorFrameReady; break; case 1://depth _sensor.DepthStream.Enable(DepthImageFormat.Resolution640x480Fps30); _sensor.DepthFrameReady += SensorDepthFrameReady; // Allocate space to put the depth pixels we'll receive _depthPixels = new short[_sensor.DepthStream.FramePixelDataLength]; // Allocate space to put the color pixels we'll create _colorPixels = new byte[_sensor.DepthStream.FramePixelDataLength * sizeof(int)]; break; case 2://infrared _sensor.ColorStream.Enable(ColorImageFormat.InfraredResolution640x480Fps30); _sensor.ColorFrameReady += SensorColorFrameReady; break; } // Start the sensor try { _sensor.Start(); _audioStream = _sensor.AudioSource.Start(); RecordingFormat = new WaveFormat(16000, 16, 1); _waveProvider = new BufferedWaveProvider(RecordingFormat) { DiscardOnBufferOverflow = true, BufferDuration = TimeSpan.FromMilliseconds(500) }; _sampleChannel = new SampleChannel(_waveProvider); _sampleChannel.PreVolumeMeter += SampleChannelPreVolumeMeter; if (HasAudioStream != null) { HasAudioStream(this, EventArgs.Empty); HasAudioStream = null; } _isrunning = true; _stopEvent = new ManualResetEvent(false); // create and start new thread var thread = new Thread(AudioThread) { Name = "kinect audio", IsBackground = true}; thread.Start(); } catch (Exception ex)//IOException) { Logger.LogExceptionToFile(ex, "KinectStream"); _sensor = null; _isrunning = false; } }
/// <summary> /// Start audio source. /// </summary> /// /// <remarks>Starts audio source and return execution to caller. audio source /// object creates background thread and notifies about new frames with the /// help of <see cref="DataAvailable"/> event.</remarks> /// /// <exception cref="ArgumentException">audio source is not specified.</exception> /// public void Start() { if (string.IsNullOrEmpty(_source)) throw new ArgumentException("Audio source is not specified."); if (_started) return; // check source lock (_lock) { if (_started) return; int i = 0, selind = -1; for (var n = 0; n < WaveIn.DeviceCount; n++) { if (WaveIn.GetCapabilities(n).ProductName == _source) selind = i; i++; } if (selind == -1) { AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(ReasonToFinishPlaying.DeviceLost)); return; } _started = true; _waveIn = new WaveInEvent { BufferMilliseconds = 200, DeviceNumber = selind, WaveFormat = RecordingFormat }; _waveIn.DataAvailable += WaveInDataAvailable; _waveIn.RecordingStopped += WaveInRecordingStopped; _waveProvider = new WaveInProvider(_waveIn); _sampleChannel = new SampleChannel(_waveProvider); _sampleChannel.PreVolumeMeter += SampleChannelPreVolumeMeter; _waveIn.StartRecording(); } }
public void Enable() { _processing = true; _sampleRate = Micobject.settings.samples; _bitsPerSample = Micobject.settings.bits; _channels = Micobject.settings.channels; RecordingFormat = new WaveFormat(_sampleRate, _bitsPerSample, _channels); //local device int i = 0, selind = -1; for (int n = 0; n < WaveIn.DeviceCount; n++) { if (WaveIn.GetCapabilities(n).ProductName == Micobject.settings.sourcename) selind = i; i++; } if (selind == -1) { //device no longer connected Micobject.settings.active = false; NoSource = true; _processing = false; return; } _waveIn = new WaveIn { BufferMilliseconds = 40, DeviceNumber = selind, WaveFormat = RecordingFormat }; _waveIn.DataAvailable += WaveInDataAvailable; _waveIn.RecordingStopped += WaveInRecordingStopped; _waveProvider = new WaveInProvider(_waveIn); _sampleChannel = new SampleChannel(_waveProvider); _meteringProvider = new MeteringSampleProvider(_sampleChannel); _meteringProvider.StreamVolume += _meteringProvider_StreamVolume; try { _waveIn.StartRecording(); } catch (Exception ex) { MainForm.LogExceptionToFile(ex); MessageBox.Show(LocRM.GetString("AudioMonitoringError") + ": " + ex.Message, LocRM.GetString("Error")); _processing = false; return; } NoSource = false; Micobject.settings.active = true; MainForm.NeedsSync = true; Invalidate(); _processing = false; }
private SoundFormat SoundFormatCallback(SoundFormat sf) { if (_needsSetup) { _recordingFormat = new WaveFormat(sf.Rate, 16, sf.Channels); _waveProvider = new BufferedWaveProvider(RecordingFormat); _sampleChannel = new SampleChannel(_waveProvider); _meteringProvider = new MeteringSampleProvider(_sampleChannel); _meteringProvider.StreamVolume += MeteringProviderStreamVolume; _needsSetup = false; if (HasAudioStream != null) HasAudioStream(this, EventArgs.Empty); } return sf; }
public void LoadMp3File(string fileName) { if (aReader != null) aReader.Dispose(); aReader = new AudioFileReader(fileName); var sampleChannel = new SampleChannel(aReader, true); volumeMeter = new MeteringSampleProvider(sampleChannel); player.Init(volumeMeter); }
/// <summary> /// Start audio source. /// </summary> /// /// <remarks>Starts audio source and return execution to caller. audio source /// object creates background thread and notifies about new frames with the /// help of <see cref="DataAvailable"/> event.</remarks> /// /// <exception cref="ArgumentException">audio source is not specified.</exception> /// public void Start() { if (!IsRunning) { // check source if (string.IsNullOrEmpty(_source)) throw new ArgumentException("Audio source is not specified."); _waveProvider = new BufferedWaveProvider(RecordingFormat); _sampleChannel = new SampleChannel(_waveProvider); _sampleChannel.PreVolumeMeter += SampleChannelPreVolumeMeter; _stopEvent = new ManualResetEvent(false); _thread = new Thread(SpyServerListener) { Name = "iSpyServer Audio Receiver (" + _source + ")" }; _thread.Start(); } }
private ISampleProvider CreateInputStream(string fileName) { IInputFileFormatPlugin plugin = GetPluginForFile(fileName); if (plugin == null) { throw new InvalidOperationException("Unsupported file extension"); } _fileWaveStream = plugin.CreateWaveStream(fileName); var waveChannel = new SampleChannel(_fileWaveStream, true); _setVolumeDelegate = vol => waveChannel.Volume = vol; var postVolumeMeter = new MeteringSampleProvider(waveChannel); return postVolumeMeter; }
public void Start() { if (_sensor != null) Stop(); foreach (var potentialSensor in KinectSensor.KinectSensors) { if (potentialSensor.Status == KinectStatus.Connected && _uniqueKinectId == potentialSensor.UniqueKinectId) { _sensor = potentialSensor; break; } } if (_sensor==null) { Log.Warn("Sensor not found: "+_uniqueKinectId); _isrunning = false; return; } if (_skeleton) { _sensor.SkeletonStream.Enable(); _sensor.SkeletonFrameReady += SensorSkeletonFrameReady; } //if (_depth) //{ // _sensor.DepthStream.Enable(DepthImageFormat.Resolution640x480Fps30); // _sensor.DepthFrameReady += SensorDepthFrameReady; // // Allocate space to put the depth pixels we'll receive // this.depthPixels = new short[_sensor.DepthStream.FramePixelDataLength]; // // Allocate space to put the color pixels we'll create // this.colorPixels = new byte[_sensor.DepthStream.FramePixelDataLength * sizeof(int)]; // // This is the bitmap we'll display on-screen // _colorBitmap = new WriteableBitmap(this.sensor.DepthStream.FrameWidth, this.sensor.DepthStream.FrameHeight, 96.0, 96.0, PixelFormats.Bgr32, null); //} //else //{ _sensor.ColorStream.Enable(ColorImageFormat.RgbResolution640x480Fps30); _sensor.ColorFrameReady += SensorColorFrameReady; //} // Turn on the skeleton stream to receive skeleton frames // Start the sensor try { _sensor.Start(); _audioStream = _sensor.AudioSource.Start(); RecordingFormat = new WaveFormat(16000, 16, 1); WaveOutProvider = new BufferedWaveProvider(RecordingFormat) { DiscardOnBufferOverflow = true }; _waveProvider = new BufferedWaveProvider(RecordingFormat) { DiscardOnBufferOverflow = true }; _sampleChannel = new SampleChannel(_waveProvider); _meteringProvider = new MeteringSampleProvider(_sampleChannel); _meteringProvider.StreamVolume += MeteringProviderStreamVolume; if (HasAudioStream != null) HasAudioStream(this, EventArgs.Empty); _isrunning = true; _stopEvent = new ManualResetEvent(false); // create and start new thread var thread = new Thread(AudioThread) { Name = "kinect audio" }; thread.Start(); } catch (Exception ex)//IOException) { Log.Error("",ex);//MainForm.LogExceptionToFile(ex); _sensor = null; _isrunning = false; } }
/// <summary> /// Generates the input stream based on what kind of file it is. /// </summary> /// <param name="fileName">Name of the file about to be played.</param> private void CreateInputStream(string fileName) { SampleChannel inputStream; if (fileName.EndsWith(".wav")) { readerStream = new WaveFileReader(fileName); if (readerStream.WaveFormat.Encoding != WaveFormatEncoding.Pcm) { readerStream = WaveFormatConversionStream.CreatePcmStream(readerStream); readerStream = new BlockAlignReductionStream(readerStream); } if (readerStream.WaveFormat.BitsPerSample != 16) { var format = new WaveFormat(readerStream.WaveFormat.SampleRate, 16, readerStream.WaveFormat.Channels); readerStream = new WaveFormatConversionStream(format, readerStream); } inputStream = new SampleChannel(readerStream); } else if (fileName.EndsWith(".mp3")) { readerStream = new Mp3FileReader(fileName); inputStream = new SampleChannel(readerStream); } else { throw new InvalidOperationException("Unsupported extension"); } sampleProvider = new NotifyingSampleProvider(inputStream); sampleProvider.Sample += (s, e) => aggregator.Add(e.Left, e.Right); }
private void Start(Audio.Codecs.INetworkChatCodec codec) { ShouldTryRestartOutput = false; Stop(); waveOut = GetWavePlayer(); waveProvider = new BufferedWaveProvider(codec.RecordFormat); sampleChannel = new SampleChannel(waveProvider, false); sampleStream = new NotifyingSampleProvider(sampleChannel); sampleStream.Sample += (s, e) => aggregator.Add(e.Left); waveOut.Init(sampleStream); waveOut.Play(); if (LevelManager == null) LevelManager = new AudioLevelManagerDisconnected(); OutputFormat = codec.RecordFormat.ToString(); }
private SoundFormat SoundFormatCallback(SoundFormat sf) { if (!_needsSetup) return sf; int chan = _realChannels = sf.Channels; if (chan > 1) chan = 2;//downmix _recordingFormat = new WaveFormat(sf.Rate, 16, chan); _waveProvider = new BufferedWaveProvider(RecordingFormat); _sampleChannel = new SampleChannel(_waveProvider); _sampleChannel.PreVolumeMeter += SampleChannelPreVolumeMeter; _needsSetup = false; if (HasAudioStream != null) { HasAudioStream(this, EventArgs.Empty); HasAudioStream = null; } return sf; }