Esempio n. 1
0
 /// <summary>
 /// Initialises a new instance of SampleChannel
 /// </summary>
 /// <param name="waveProvider">Source wave provider, must be PCM or IEEE</param>
 /// <param name="forceStereo">force mono inputs to become stereo</param>
 public SampleChannel(IWaveProvider waveProvider, bool forceStereo)
 {
     ISampleProvider sampleProvider = SampleProviderConverters.ConvertWaveProviderIntoSampleProvider(waveProvider);
     if (sampleProvider.WaveFormat.Channels == 1 && forceStereo)
     {
         sampleProvider = new MonoToStereoSampleProvider(sampleProvider);
     }
     this.waveFormat = sampleProvider.WaveFormat;
     // let's put the meter before the volume (useful for drawing waveforms)
     this.preVolumeMeter = new MeteringSampleProvider(sampleProvider);
     this.volumeProvider = new VolumeSampleProvider(preVolumeMeter);
 }
Esempio n. 2
0
        /// <summary>
        /// Initialises a new instance of SampleChannel
        /// </summary>
        /// <param name="waveProvider">Source wave provider, must be PCM or IEEE</param>
        /// <param name="forceStereo">force mono inputs to become stereo</param>
        public SampleChannel(IWaveProvider waveProvider, bool forceStereo)
        {
            ISampleProvider sampleProvider = SampleProviderConverters.ConvertWaveProviderIntoSampleProvider(waveProvider);

            if (sampleProvider.WaveFormat.Channels == 1 && forceStereo)
            {
                sampleProvider = new MonoToStereoSampleProvider(sampleProvider);
            }
            this.waveFormat = sampleProvider.WaveFormat;
            // let's put the meter before the volume (useful for drawing waveforms)
            this.preVolumeMeter = new MeteringSampleProvider(sampleProvider);
            this.volumeProvider = new VolumeSampleProvider(preVolumeMeter);
        }
        /// <summary>
        /// Initialises a new instance of SampleChannel
        /// </summary>
        /// <param name="waveProvider">Source wave provider, must be PCM or IEEE</param>
        public SampleChannel(IWaveProvider waveProvider)
        {
            ISampleProvider sampleProvider;

            if (waveProvider.WaveFormat.Encoding == WaveFormatEncoding.Pcm)
            {
                // go to float
                if (waveProvider.WaveFormat.BitsPerSample == 8)
                {
                    sampleProvider = new Pcm8BitToSampleProvider(waveProvider);
                }
                else if (waveProvider.WaveFormat.BitsPerSample == 16)
                {
                    sampleProvider = new Pcm16BitToSampleProvider(waveProvider);
                }
                else if (waveProvider.WaveFormat.BitsPerSample == 24)
                {
                    sampleProvider = new Pcm24BitToSampleProvider(waveProvider);
                }
                else
                {
                    throw new InvalidOperationException("Unsupported operation");
                }
            }
            else if (waveProvider.WaveFormat.Encoding == WaveFormatEncoding.IeeeFloat)
            {
                sampleProvider = new WaveToSampleProvider(waveProvider);
            }
            else
            {
                throw new ArgumentException("Unsupported source encoding");
            }
            if (sampleProvider.WaveFormat.Channels == 1)
            {
                sampleProvider = new MonoToStereoSampleProvider(sampleProvider);
            }
            this.waveFormat = sampleProvider.WaveFormat;
            // let's put the meter before the volume (useful for drawing waveforms)
            this.preVolumeMeter = new MeteringSampleProvider(sampleProvider);
            this.volumeProvider = new VolumeSampleProvider(preVolumeMeter);
        }
Esempio n. 4
0
 /// <summary>
 /// Initialises a new instance of SampleChannel
 /// </summary>
 /// <param name="waveProvider">Source wave provider, must be PCM or IEEE</param>
 public SampleChannel(IWaveProvider waveProvider)
 {
     ISampleProvider sampleProvider;
     if (waveProvider.WaveFormat.Encoding == WaveFormatEncoding.Pcm)
     {
         // go to float
         if (waveProvider.WaveFormat.BitsPerSample == 8)
         {
             sampleProvider = new Pcm8BitToSampleProvider(waveProvider);
         }
         else if (waveProvider.WaveFormat.BitsPerSample == 16)
         {
             sampleProvider = new Pcm16BitToSampleProvider(waveProvider);
         }
         else if (waveProvider.WaveFormat.BitsPerSample == 24)
         {
             sampleProvider = new Pcm24BitToSampleProvider(waveProvider);
         }
         else
         {
             throw new InvalidOperationException("Unsupported operation");
         }
     }
     else if (waveProvider.WaveFormat.Encoding == WaveFormatEncoding.IeeeFloat)
     {
         sampleProvider = new WaveToSampleProvider(waveProvider);
     }
     else
     {
         throw new ArgumentException("Unsupported source encoding");
     }
     if (sampleProvider.WaveFormat.Channels == 1)
     {
         sampleProvider = new MonoToStereoSampleProvider(sampleProvider);
     }
     this.waveFormat = sampleProvider.WaveFormat;
     // let's put the meter before the volume (useful for drawing waveforms)
     this.preVolumeMeter = new MeteringSampleProvider(sampleProvider);
     this.volumeProvider = new VolumeSampleProvider(preVolumeMeter);
 }
Esempio n. 5
0
        public void Start()
        {
            if (_sensor != null)
                Stop();

            foreach (var potentialSensor in KinectSensor.KinectSensors)
            {
                if (potentialSensor.Status == KinectStatus.Connected && _uniqueKinectId == potentialSensor.UniqueKinectId)
                {
                    _sensor = potentialSensor;
                    break;
                }
            }
            if (_sensor==null)
            {
                Log.Warn("Sensor not found: "+_uniqueKinectId);
                _isrunning = false;
                return;
            }

            if (_skeleton)
            {
                _sensor.SkeletonStream.Enable();
                _sensor.SkeletonFrameReady += SensorSkeletonFrameReady;
            }

            //if (_depth)
            //{
            //    _sensor.DepthStream.Enable(DepthImageFormat.Resolution640x480Fps30);
            //    _sensor.DepthFrameReady += SensorDepthFrameReady;
            //    // Allocate space to put the depth pixels we'll receive
            //    this.depthPixels = new short[_sensor.DepthStream.FramePixelDataLength];

            //    // Allocate space to put the color pixels we'll create
            //    this.colorPixels = new byte[_sensor.DepthStream.FramePixelDataLength * sizeof(int)];

            //    // This is the bitmap we'll display on-screen
            //    _colorBitmap = new WriteableBitmap(this.sensor.DepthStream.FrameWidth, this.sensor.DepthStream.FrameHeight, 96.0, 96.0, PixelFormats.Bgr32, null);

            //}
            //else
            //{
                _sensor.ColorStream.Enable(ColorImageFormat.RgbResolution640x480Fps30);
                _sensor.ColorFrameReady += SensorColorFrameReady;
            //}

            // Turn on the skeleton stream to receive skeleton frames

            // Start the sensor
            try
            {
                _sensor.Start();
                _audioStream = _sensor.AudioSource.Start();

                RecordingFormat = new WaveFormat(16000, 16, 1);

                WaveOutProvider = new BufferedWaveProvider(RecordingFormat) { DiscardOnBufferOverflow = true };
                _waveProvider = new BufferedWaveProvider(RecordingFormat) { DiscardOnBufferOverflow = true };

                _sampleChannel = new SampleChannel(_waveProvider);
                _meteringProvider = new MeteringSampleProvider(_sampleChannel);
                _meteringProvider.StreamVolume += MeteringProviderStreamVolume;

                if (HasAudioStream != null)
                    HasAudioStream(this, EventArgs.Empty);

                _isrunning = true;

                _stopEvent = new ManualResetEvent(false);

                // create and start new thread
                var thread = new Thread(AudioThread) { Name = "kinect audio" };
                thread.Start();
            }
            catch (Exception ex)//IOException)
            {
                Log.Error("",ex);//MainForm.LogExceptionToFile(ex);
                _sensor = null;
                _isrunning = false;
            }
        }
Esempio n. 6
0
        private SoundFormat SoundFormatCallback(SoundFormat sf)
        {
            if (_needsSetup)
            {
                _recordingFormat = new WaveFormat(sf.Rate, 16, sf.Channels);
                _waveProvider = new BufferedWaveProvider(RecordingFormat);
                _sampleChannel = new SampleChannel(_waveProvider);

                _meteringProvider = new MeteringSampleProvider(_sampleChannel);
                _meteringProvider.StreamVolume += MeteringProviderStreamVolume;
                _needsSetup = false;
                if (HasAudioStream != null)
                    HasAudioStream(this, EventArgs.Empty);
            }

            return sf;
        }
        private ISampleProvider CreateInputStream(string fileName)
        {
            var plugin = GetPluginForFile(fileName);
            if(plugin == null)
            {
                throw new InvalidOperationException("Unsupported file extension");
            }
            this.fileWaveStream = plugin.CreateWaveStream(fileName);
            var waveChannel =  new SampleChannel(this.fileWaveStream, true);
            this.setVolumeDelegate = (vol) => waveChannel.Volume = vol;
            waveChannel.PreVolumeMeter += OnPreVolumeMeter;

            var postVolumeMeter = new MeteringSampleProvider(waveChannel);
            postVolumeMeter.StreamVolume += OnPostVolumeMeter;

            return postVolumeMeter;
        }
Esempio n. 8
0
        private ISampleProvider CreateInputStream(string fileName)
        {
            IInputFileFormatPlugin plugin = GetPluginForFile(fileName);
            if (plugin == null)
            {
                throw new InvalidOperationException("Unsupported file extension");
            }
            _fileWaveStream = plugin.CreateWaveStream(fileName);
            var waveChannel = new SampleChannel(_fileWaveStream, true);
            _setVolumeDelegate = vol => waveChannel.Volume = vol;
            var postVolumeMeter = new MeteringSampleProvider(waveChannel);

            return postVolumeMeter;
        }
Esempio n. 9
0
        public void Enable()
        {
            _processing = true;
            _sampleRate = Micobject.settings.samples;
            _bitsPerSample = Micobject.settings.bits;
            _channels = Micobject.settings.channels;

            RecordingFormat = new WaveFormat(_sampleRate, _bitsPerSample, _channels);

            //local device
            int i = 0, selind = -1;
            for (int n = 0; n < WaveIn.DeviceCount; n++)
            {
                if (WaveIn.GetCapabilities(n).ProductName == Micobject.settings.sourcename)
                    selind = i;
                i++;
            }
            if (selind == -1)
            {
                //device no longer connected
                Micobject.settings.active = false;
                NoSource = true;
                _processing = false;
                return;
            }

            _waveIn = new WaveIn { BufferMilliseconds = 40, DeviceNumber = selind, WaveFormat = RecordingFormat };
            _waveIn.DataAvailable += WaveInDataAvailable;
            _waveIn.RecordingStopped += WaveInRecordingStopped;

            _waveProvider = new WaveInProvider(_waveIn);
            _sampleChannel = new SampleChannel(_waveProvider);

            _meteringProvider = new MeteringSampleProvider(_sampleChannel);
            _meteringProvider.StreamVolume += _meteringProvider_StreamVolume;

            try
            {
                _waveIn.StartRecording();
            }
            catch (Exception ex)
            {
                MainForm.LogExceptionToFile(ex);
                MessageBox.Show(LocRM.GetString("AudioMonitoringError") + ": " + ex.Message, LocRM.GetString("Error"));
                _processing = false;
                return;
            }

            NoSource = false;
            Micobject.settings.active = true;

            MainForm.NeedsSync = true;
            Invalidate();
            _processing = false;
        }
Esempio n. 10
0
        private ISampleProvider CreateInputStream(string fileName)
        {
            audioFileReader = new AudioFileReader(fileName);
            
            var sampleChannel = new SampleChannel(audioFileReader, true);
            sampleChannel.PreVolumeMeter+= OnPreVolumeMeter;
            setVolumeDelegate = vol => sampleChannel.Volume = vol;
            var postVolumeMeter = new MeteringSampleProvider(sampleChannel);
            postVolumeMeter.StreamVolume += OnPostVolumeMeter;

            return postVolumeMeter;
        }
Esempio n. 11
0
        // Worker thread
        private void WorkerThread()
        {
            // buffer to read stream
            var buffer = new byte[BufSize];
            var encoding = new ASCIIEncoding();

            while (!_stopEvent.WaitOne(0, false))
            {
                // reset reload event
                _reloadEvent.Reset();

                // HTTP web request
                HttpWebRequest request = null;
                // web responce
                WebResponse response = null;
                // stream for MJPEG downloading
                Stream stream = null;
                // boundary betweeen images (string and binary versions)
                byte[] boundary;

                try
                {
                    // create request
                    request = (HttpWebRequest)WebRequest.Create(_source);
                    // set user agent
                    if (_userAgent != null)
                    {
                        request.UserAgent = _userAgent;
                    }

                    // set proxy
                    if (_proxy != null)
                    {
                        request.Proxy = _proxy;
                    }

                    if (_usehttp10)
                        request.ProtocolVersion = HttpVersion.Version10;

                    // set timeout value for the request
                    request.Timeout = _requestTimeout;
                    request.AllowAutoRedirect = true;

                    // set login and password
                    if ((_login != null) && (_password != null) && (_login != string.Empty))
                        request.Credentials = new NetworkCredential(_login, _password);
                    // set connection group name
                    if (_useSeparateConnectionGroup)
                        request.ConnectionGroupName = GetHashCode().ToString();
                    // get response
                    response = request.GetResponse();

                    // get response stream
                    stream = response.GetResponseStream();
                    stream.ReadTimeout = _requestTimeout;

                    boundary = encoding.GetBytes("--myboundary");
                    byte[] sep = encoding.GetBytes("\r\n\r\n");

                    // loop

                    int startPacket = -1;
                    int endPacket = -1;
                    int ttl = 0;

                    bool hasaudio = false;

                    while ((!_stopEvent.WaitOne(0, false)) && (!_reloadEvent.WaitOne(0, false)))
                    {

                        int read;
                        if ((read = stream.Read(buffer, ttl, ReadSize)) == 0)
                            throw new ApplicationException();

                        ttl += read;

                        if (startPacket==-1)
                        {
                            startPacket = ByteArrayUtils.Find(buffer, boundary, 0, ttl);
                        }
                        else
                        {
                            if (endPacket == -1)
                            {
                                endPacket = ByteArrayUtils.Find(buffer, boundary, startPacket + boundary.Length, ttl-(startPacket + boundary.Length));
                            }
                        }

                        if (startPacket>-1 && endPacket>startPacket)
                        {
                            int br = ByteArrayUtils.Find(buffer, sep, startPacket, 100);

                            if (br != -1)
                            {
                                var arr = new byte[br];
                                Array.Copy(buffer, startPacket, arr, 0, br-startPacket);
                                string s = Encoding.ASCII.GetString(arr);
                                int k = s.IndexOf("Content-type: ");
                                if (k!=-1)
                                {
                                    s = s.Substring(k+14);
                                    s = s.Substring(0,s.IndexOf("\r\n"));
                                    s = s.Trim();
                                }
                                switch (s)
                                {
                                    case "image/jpeg":
                                        Bitmap bitmap;
                                        using (var ms = new MemoryStream(buffer, br+4, endPacket-br-8))
                                        {
                                            bitmap = (Bitmap) Image.FromStream(ms);
                                        }
                                        // notify client
                                        NewFrame(this, new NewFrameEventArgs(bitmap));
                                        // release the image
                                        bitmap.Dispose();
                                        bitmap = null;

                                        break;
                                    case "audio/raw":
                                        if (!hasaudio)
                                        {
                                            hasaudio = true;
                                            RecordingFormat = new WaveFormat(16000, 16, 1);

                                            WaveOutProvider = new BufferedWaveProvider(RecordingFormat) { DiscardOnBufferOverflow = true };
                                            _waveProvider = new BufferedWaveProvider(RecordingFormat) { DiscardOnBufferOverflow = true };

                                            _sampleChannel = new SampleChannel(_waveProvider);
                                            _meteringProvider = new MeteringSampleProvider(_sampleChannel);
                                            _meteringProvider.StreamVolume += MeteringProviderStreamVolume;
                                            if (HasAudioStream != null)
                                                HasAudioStream(this, EventArgs.Empty);
                                        }

                                        if (DataAvailable != null)
                                        {
                                            int l = endPacket - br - 8;
                                            var data = new byte[l];
                                            using (var ms = new MemoryStream(buffer, br+4, l))
                                            {
                                                ms.Read(data, 0, l);
                                            }
                                            _waveProvider.AddSamples(data, 0, data.Length);

                                            if (Listening)
                                            {
                                                WaveOutProvider.AddSamples(data, 0, data.Length);
                                            }

                                            //forces processing of volume level without piping it out
                                            var sampleBuffer = new float[data.Length];

                                            _meteringProvider.Read(sampleBuffer, 0, data.Length);
                                            DataAvailable(this, new DataAvailableEventArgs((byte[]) data.Clone()));
                                        }

                                        break;
                                    case "alert/text":
                                        // code to handle alert notifications goes here
                                        if (AlertHandler != null)
                                        {
                                            int dl = endPacket - br - 8;
                                            var data2 = new byte[dl];
                                            using (var ms = new MemoryStream(buffer, br + 4, dl))
                                            {
                                                ms.Read(data2, 0, dl);
                                            }
                                            string alerttype = Encoding.ASCII.GetString(data2);
                                            AlertHandler(this, new AlertEventArgs(alerttype));
                                        }
                                        break;
                                }
                            }

                            ttl -= endPacket;
                            Array.Copy(buffer, endPacket, buffer, 0, ttl);
                            startPacket = -1;
                            endPacket = -1;
                        }

                    }
                }
                catch (ApplicationException)
                {
                    // do nothing for Application Exception, which we raised on our own
                    // wait for a while before the next try
                    Thread.Sleep(250);
                }
                catch (ThreadAbortException)
                {
                    break;
                }
                catch (Exception exception)
                {
                    // provide information to clients
                    if (VideoSourceError != null)
                    {
                        VideoSourceError(this, new VideoSourceErrorEventArgs(exception.Message));
                    }
                    // wait for a while before the next try
                    Thread.Sleep(250);
                }
                finally
                {
                    // abort request
                    if (request != null)
                    {
                        request.Abort();
                        request = null;
                    }
                    // close response stream
                    if (stream != null)
                    {
                        stream.Close();
                        stream = null;
                    }
                    // close response
                    if (response != null)
                    {
                        response.Close();
                        response = null;
                    }
                }

                // need to stop ?
                if (_stopEvent.WaitOne(0, false))
                    break;
            }

            if (PlayingFinished != null)
            {
                PlayingFinished(this, ReasonToFinishPlaying.StoppedByUser);
            }
        }
Esempio n. 12
0
        private void button9_Click( object sender, EventArgs e )
        {
            // check if we have select file
            if ( String.IsNullOrEmpty( audioFileName ) ) {
                button8_Click( sender, e );
            }

            if ( String.IsNullOrEmpty( audioFileName ) ) {
                return;
            }

            // create wave out device
            CreateWaveOut();

            // create steram file
            ISampleProvider sampleProvider = null;

            try {

                // read the file
                fileWaveStream = new Mp3FileReader( audioFileName );

                // cretae the wave channel
                var waveChannel =  new SampleChannel( this.fileWaveStream );
                waveChannel.Volume = 0.5f;

                // create the sampler
                sampleProvider = new MeteringSampleProvider( waveChannel );

            } catch ( Exception createException ) {
                MessageBox.Show( String.Format( "{0}", createException.Message ), "Error Loading File" );
                return;
            }

            try {

                // in the wave out ( give and the call back for the editing
                waveOut.Init( new SampleToWaveProvider( sampleProvider), callback);

            } catch ( Exception initException ) {
                MessageBox.Show( String.Format( "{0}", initException.Message ), "Error Initializing Output" );
                return;
            }

            if ( plot_signal_spectrum == null )
            {

                #region Plot Creation

                signal_spectrum = new FxVectorF(256);

                // insert the plot of the time filter
                filterPlot = new PloterElement(signal_spectrum);
                filterPlot.Position.X = 0;
                filterPlot.Position.Y = 410;
                filterPlot.Origin = new FxVector2f(10, 100);
                filterPlot.FitPlots();
                canvas_audio.AddElements(filterPlot);

                // create the plot for the spectrum
                plot_signal_spectrum = new PloterElement( signal_spectrum );
                plot_signal_spectrum.Position.X = 0;
                plot_signal_spectrum.Position.Y = 10;
                plot_signal_spectrum.Origin = new FxVector2f(10, 100);
                plot_signal_spectrum.FitPlots();
                plot_signal_spectrum.AddPlot(signal_spectrum, PlotType.Lines, Color.Aqua);

                // add the signal to canva
                canvas_audio.AddElements(plot_signal_spectrum);

                // create the plot for the spectrum
                plot_signal_spectrum_original = new PloterElement(signal_spectrum);
                plot_signal_spectrum_original.Position.X = 600;
                plot_signal_spectrum_original.Position.Y = 10;
                plot_signal_spectrum_original.Origin = new FxVector2f(10, 100);
                plot_signal_spectrum_original.FitPlots();

                // add the signal to canva
                canvas_audio.AddElements(plot_signal_spectrum_original);

                // create the plot for the spectrum
                plot_filter_spectrum = new PloterElement(signal_spectrum);
                plot_filter_spectrum.Position.X = 600;
                plot_filter_spectrum.Position.Y = 410;
                plot_filter_spectrum.Origin = new FxVector2f(10, 100);
                plot_filter_spectrum.FitPlots();

                // add the signal to canva
                canvas_audio.AddElements(plot_filter_spectrum);
                #endregion

                // add filter
                UpdateFilter(BiQuadFilter.BandPassFilterConstantPeakGain(44100, 20000, 0.5f));

            }

            // start play
            waveOut.Play();
        }
Esempio n. 13
0
        protected override void playSong()
        {
            try {

                currentSong = nextSong;
                if (currentSong == null) {

                    currentSong = getNextSong();
                    if (currentSong == null) {
                        MessageBox.Show("No songs to play.");
                        return;
                    }
                }

                if (fileReader != null)
                    fileReader.Dispose();
                fileReader = new Mp3FileReader(currentSong.filepath);

            } catch (Exception) {
                MessageBox.Show("Error reading " + currentSong + ".\n" +
                    "File does not exist or cannot be read.");
                if (fileReader != null)
                    fileReader.Dispose();
                loadNextSong(null, null);
                return;
            }

            startGUIThread();

            waveChannel = new WaveChannel32(fileReader, 1, panSlider.Pan);
            waveChannel.PadWithZeroes = false;

            SampleChannel sampleChannel = new SampleChannel(waveChannel);
            sampleChannel.PreVolumeMeter += onPreVolumeMeter;
            volumeDelegate = (vol) => sampleChannel.Volume = vol;
            volumeDelegate(absoluteVolume * mixedVolume);

            //Debug.WriteLine("absoluteVolume: " + absoluteVolume + " mixedVolume: " + mixedVolume);
            //Debug.WriteLine("absoluteVolume * mixedVolume = " + absoluteVolume * mixedVolume);

            postVolumeMeter = new MeteringSampleProvider(sampleChannel);
            postVolumeMeter.StreamVolume += onPostVolumeMeter;

            deviceOut.Init(postVolumeMeter);
            deviceOut.Play();

            label_EndTime.Text = String.Format("{0:00}:{1:00}",
                        (int)fileReader.TotalTime.TotalMinutes,
                        fileReader.TotalTime.Seconds);

            label_SongTitle.Text = currentSong.ToString();
        }