public void Test_VerifyAverageBetweenDataContextSwitch()
        {
            SampleAggregator uut = new SampleAggregator();

            uint count = 200;
            uint value = 10;

            var samples = GenerateSamples(count, value, value);

            List<AggregatedSample> aggregatedSamples = new List<AggregatedSample>();
            uut.DataAggregatedEvent +=
                sampleString =>
                {
                    aggregatedSamples.Add(ConvertToSample(sampleString));
                };

            uut.AggregateData(samples, FeatureVectorType.AverageContextSwitch);

            Assert.AreEqual<int>((int)(count / value), aggregatedSamples.Count);
            uint expectedValue = (value * (value - 1)) / (value - 1);  //
            foreach (AggregatedSample s in aggregatedSamples)
            {
                Assert.AreEqual<uint>(expectedValue, s.EventValue1);
                Assert.AreEqual<uint>(expectedValue, s.EventValue2);
                Assert.AreEqual<uint>(expectedValue, s.EventValue3);
                Assert.AreEqual<uint>(expectedValue, s.EventValue4);
                Assert.AreEqual<uint>(expectedValue, s.EventValue5);
                Assert.AreEqual<uint>(expectedValue, s.EventValue6);
            }
        }
Ejemplo n.º 2
0
 /// <summary>
 /// Creates a waveform view of the <code>Audio</code> that is associated scaled to the timeinfo.
 /// </summary>
 /// <param name="timeinfo"></param>
 public Waveform(TimeInfo timeinfo)
     : base(timeinfo)
 {
     samples   = new SampleAggregator();
     BackColor = Color.Gray;
     Visible   = false;
 }
Ejemplo n.º 3
0
        private void RenderFile()
        {
            SampleAggregator.RaiseRestart();
            using (WaveFileReader reader = new WaveFileReader(this.voiceRecorderState.ActiveFile))
            {
                this.samplesPerSecond = reader.WaveFormat.SampleRate;
                SampleAggregator.NotificationCount = reader.WaveFormat.SampleRate / 10;

                byte[]     buffer     = new byte[1024];
                WaveBuffer waveBuffer = new WaveBuffer(buffer);
                waveBuffer.ByteBufferCount = buffer.Length;
                int bytesRead;
                do
                {
                    bytesRead = reader.Read(waveBuffer, 0, buffer.Length);
                    int samples = bytesRead / 2;
                    for (int sample = 0; sample < samples; sample++)
                    {
                        if (bytesRead > 0)
                        {
                            sampleAggregator.Add(waveBuffer.ShortBuffer[sample] / 32768f);
                        }
                    }
                } while (bytesRead > 0);
                int totalSamples = (int)reader.Length / 2;
                TotalWaveFormSamples = totalSamples / sampleAggregator.NotificationCount;
                SelectAll();
            }
            audioPlayer.LoadFile(this.voiceRecorderState.ActiveFile);
        }
        public AudioRecorder()
            : this(sampleRate : 16000, channels : 1, device : 1)
        {
            sampleAggregator = new SampleAggregator();

            RecordingFormat = new WaveFormat(SelectedSampleRate, SelectedChannel + 1);
        }
Ejemplo n.º 5
0
 private void OnExamQsSampleAggregator(ExamQsSampleAggregatorMessage message)
 {
     //Application.Current.Dispatcher.Invoke(new Action(() =>
     //{
     QsSampleAggregator = message.SampleAggregator;
     //}));
 }
Ejemplo n.º 6
0
 /// <summary>
 /// Creates a waveform view of the <code>Audio</code> that is associated scaled to the timeinfo.
 /// </summary>
 /// <param name="timeinfo"></param>
 public Waveform(TimeInfo timeinfo)
     : base(timeinfo)
 {
     samples   = new SampleAggregator();
     BackColor = Color.Gray;
     Visible   = false;
     Ruler.SelectedMarkMove += waveForm_SelectedMarkMove;
 }
Ejemplo n.º 7
0
 /// <summary>
 /// Creates a waveform view of the <code>Audio</code> that is associated scaled to the timeinfo.
 /// </summary>
 /// <param name="timeinfo"></param>
 public Waveform(TimeInfo timeinfo)
     : base(timeinfo)
 {
     samples   = new SampleAggregator();
     BackColor = Color.Gray;
     Visible   = false;
     _timeLineGlobalEventManager = TimeLineGlobalEventManager.Manager;
     _timeLineGlobalEventManager.AlignmentActivity += WaveFormSelectedTimeLineGlobalMove;
 }
Ejemplo n.º 8
0
 private static void recordStart()
 {
     sampleAggregator = new SampleAggregator(Convert.ToInt32(fftLengthDefault / Math.Pow(2, Lightning.delay)));
     sampleAggregator.FftCalculated += new EventHandler <FftEventArgs>(fft);
     sampleAggregator.PerformFFT     = true;
     waveIn = new WasapiLoopbackCapture(Volume.getListenDevice());
     waveIn.DataAvailable += OnDataAvailable;
     waveIn.StartRecording();
 }
Ejemplo n.º 9
0
        private void waveformGenerateWorker_DoWork()
        {
#if (MARKERS)
            var span = Markers.EnterSpan("waveformGen");
#endif

            using (Mp3FileReader waveformMp3Stream = new Mp3FileReader(song.FileName))
                using (WaveChannel32 waveformInputStream = new WaveChannel32(waveformMp3Stream))
                {
                    waveformInputStream.Sample += waveStream_Sample;

                    int    frameCount = (int)((float)waveformInputStream.Length / frameLength);
                    byte[] readBuffer = new byte[frameLength];
                    waveformAggregator = new SampleAggregator(frameLength);

                    int currentPointIndex = 0;

                    float[] waveformArray    = new float[frameCount * 2];
                    float   waveformLeftMax  = 0;
                    float   waveformRightMax = 0;

                    while (currentPointIndex < frameCount * 2)
                    {
                        waveformInputStream.Read(readBuffer, 0, readBuffer.Length);

                        var leftMaxVolume  = waveformAggregator.LeftMaxVolume;
                        var rightMaxVolume = waveformAggregator.RightMaxVolume;
                        waveformArray[currentPointIndex++] = leftMaxVolume;
                        waveformArray[currentPointIndex++] = rightMaxVolume;

                        if (leftMaxVolume > waveformLeftMax)
                        {
                            waveformLeftMax = leftMaxVolume;
                        }
                        if (rightMaxVolume > waveformRightMax)
                        {
                            waveformRightMax = rightMaxVolume;
                        }

                        waveformAggregator.Clear();

                        tkn.ThrowIfCancellationRequested();
                    }

                    byte[] waveformBytes = new byte[waveformArray.Length];
                    float  factor        = 31f / Math.Max(Math.Abs(waveformLeftMax), Math.Abs(waveformRightMax));
                    for (int ndx = 0; ndx < waveformArray.Length; ndx++)
                    {
                        waveformBytes[ndx] = (byte)Math.Abs(Math.Abs(waveformArray[ndx]) * factor);
                    }

                    song.WaveformData = waveformBytes;
                }
#if (MARKERS)
            span.Leave();
#endif
        }
Ejemplo n.º 10
0
 public void Initalize(MMDevice audioDevice, int fftLength, int bufferLenght)
 {
     this.audioDevice  = audioDevice;
     this.bufferLenght = bufferLenght;
     Capture           = new WasapiLoopbackCapture(audioDevice);
     SampleAggregator  = new SampleAggregator(fftLength);
     SampleAggregator.FftCalculated += new EventHandler <FftEventArgs>(FftCalculated);
     SampleAggregator.PerformFFT     = true;
     //capture.ShareMode = AudioClientShareMode.Shared;
 }
Ejemplo n.º 11
0
		public void Initalize(MMDevice audioDevice, int fftLength, int bufferLenght)
		{
            this.audioDevice = audioDevice;
			this.bufferLenght = bufferLenght;
			Capture = new WasapiLoopbackCapture(audioDevice);
			SampleAggregator = new SampleAggregator(fftLength);
			SampleAggregator.FftCalculated += new EventHandler<FftEventArgs>(FftCalculated);
			SampleAggregator.PerformFFT = true;
			//capture.ShareMode = AudioClientShareMode.Shared;
		}
Ejemplo n.º 12
0
        public void Dispose()
        {
            OutputDevice?.Stop();
            OutputDevice?.Dispose();
            OutputDevice = null;

            Aggregator = null;
            FadeInOut  = null;

            AudioFile?.Close();
            AudioFile?.Dispose();
            AudioFile = null;
        }
Ejemplo n.º 13
0
        private void InitAggregator(int sampleRate)
        {
            // the aggregator collects audio sample metrics
            // and publishes the results at suitable intervals.
            // Used by the OnlyR volume meter
            if (_sampleAggregator != null)
            {
                _sampleAggregator.ReportEvent -= AggregatorReportHandler;
            }

            _sampleAggregator              = new SampleAggregator(sampleRate, RequiredReportingIntervalMs);
            _sampleAggregator.ReportEvent += AggregatorReportHandler;
        }
Ejemplo n.º 14
0
 protected override void Dispose(bool disposing)
 {
     if (audio != null)
     {
         audio.Dispose();
         audio = null;
     }
     if (samples != null)
     {
         samples.Clear();
         samples = null;
         //samples = new SampleAggregator();
     }
     base.Dispose(disposing);
 }
 private void OpenFile(string fileName)
 {
     try
     {
         var inputStream = new AudioFileReader(fileName);
         _fileStream = inputStream;
         var aggregator = new SampleAggregator(inputStream);
         aggregator.NotificationCount = inputStream.WaveFormat.SampleRate / 100;
         aggregator.PerformFFT        = true;
         _playbackDevice.Init(aggregator);
     }
     catch
     {
         CloseFile();
         throw;
     }
 }
Ejemplo n.º 16
0
    /// <summary>
    /// init event
    /// </summary>
    private void CreateEvent()
    {
        DisposeEvent();

        sampleAggregator = new SampleAggregator((int)m_bufferLength);
        waveoutPlayer    = new WaveOut()
        {
            DesiredLatency = 100
        };
        waveoutPlayer.PlaybackStopped += OnPlaybackStopped;
        waveStream  = InitFileReader(currentSongPath);
        inputStream = new WaveChannel32(waveStream);
        waveoutPlayer.Init(inputStream);

        m_totalTime         = (float)inputStream.TotalTime.TotalSeconds;
        inputStream.Sample += SampleRead;
    }
Ejemplo n.º 17
0
        protected override void Dispose(bool disposing)
        {
            //Only delete the Audio if Dispose call is explicit.
            if ((audio != null) && (disposing == true))
            {
                audio.Dispose();
                audio = null;
            }

            if (samples != null)
            {
                samples.Clear();
                samples = null;
                //samples = new SampleAggregator();
            }
            base.Dispose(disposing);
        }
Ejemplo n.º 18
0
        /// <summary>
        /// 上传音频获取时间戳(百度不支持MP3,所以要先转成wav)
        /// </summary>
        /// <param name="sound_path">音频路径</param>
        /// <param name="word_path">文本路径</param>
        /// <param name="language">语言</param>
        /// <param name="splitTime">间隔时间</param>
        public void GetTimeSpanByNAudio(string sound_path, string word_path, string language, double splitTime = 1.5)
        {
            try
            {
                Task task_max = Task.Factory.StartNew(() =>
                {
                    try
                    {
                        if (sound_path.Contains(".mp3"))
                        {
                            sound_path = NAudioHelper.GetWavPath(sound_path);
                        }
                        var inputStream  = new AudioFileReader(sound_path);
                        string file_type = Path.GetExtension(sound_path).Substring(1);

                        var aggregator = new SampleAggregator(inputStream);
                        aggregator.NotificationCount  = inputStream.WaveFormat.SampleRate / 100;
                        aggregator.MaximumCalculated += (s, a) =>
                        {
                            MaximumCalculated(a, file_type, inputStream, splitTime);
                        };

                        //IWavePlayer playbackDevice = new WaveOut { DesiredLatency = 200 };
                        IWavePlayer playbackDevice = new DirectSoundOut(DirectSoundOut.DSDEVID_DefaultPlayback);
                        playbackDevice.Init(aggregator);
                        playbackDevice.PlaybackStopped += (s, a) =>
                        {
                            PlaybackStopped(a, sound_path, word_path, language, out isFinish, splitTime, inputStream, s as IWavePlayer);
                        };
                        playbackDevice.Play();
                    }
                    catch (Exception ex)
                    {
                        LogHelper.Error(ex.Message);
                    }
                });
                Task.WaitAny(task_max);
            }
            catch (Exception ex)
            {
                LogHelper.Error(ex.Message);
            }
        }
Ejemplo n.º 19
0
        internal void Init()
        {
            this._mediaFoundationReader = new MediaFoundationReader(this._songManager.GetNextSong());
            this._sampleAggregator      = new SampleAggregator(this._mediaFoundationReader.ToSampleProvider())
            {
                NotificationCount = this._mediaFoundationReader.WaveFormat.SampleRate / 1024,
                PerformFFT        = true,
            };


            this._sampleAggregator.PerformFFT = true;

            this._sampleAggregator.FftCalculated     += _sampleAggregator_FftCalculated;
            this._sampleAggregator.MaximumCalculated += _sampleAggregator_MaximumCalculated;
            //this._inputStream = new AudioReader();

            this._playbackDevice.PlaybackStopped += _playbackDevice_PlaybackStopped;
            this._playbackDevice.Init(this._sampleAggregator);
        }
Ejemplo n.º 20
0
        public static async void Play()
        {
            //主页面设置缓冲中状态
            ViewModelManager.MainWindowViewModel.SetBufferState();
            //异步加载音乐
            await Task.Run(new Action(() =>
            {
                PrePlay();
            }));

            //主页面设置播放状态
            ViewModelManager.MainWindowViewModel.SetPlayState();

            try
            {
                if (State == PlaybackState.Stopped)
                {
                    reader     = new MediaFoundationReader(Source);
                    channel    = new SampleChannel(reader);
                    aggregator = new SampleAggregator(channel);
                    aggregator.NotificationCount = reader.WaveFormat.SampleRate / 100;
                    aggregator.PerformFFT        = true;
                    aggregator.FftCalculated    += DrawFFT;
                    player.Init(aggregator);
                }

                player.Play();
            }
            catch (Exception e)
            {
                MessageBox.Show(e.Message);
            }


            //当前进度设置
            PlayerNotification.Start();

            if (!File.Exists(DownloadManager.MusicCachePath + PlayMusic.Id + ".tmp") && PlayMusic.Origin != Enum.MusicSource.Local)
            {
                DownloadManager.DownloadFileAsync(Source, DownloadManager.MusicCachePath, PlayMusic.Id + ".tmp");
            }
        }
Ejemplo n.º 21
0
        public void OpenFile(string path)
        {
            Stop();
            IsOpen = false;

            if (ActiveStream != null)
            {
                SelectionBegin  = TimeSpan.Zero;
                SelectionEnd    = TimeSpan.Zero;
                ChannelPosition = 0;
            }

            StopAndCloseStream();

            if (System.IO.File.Exists(path))
            {
                try
                {
                    waveOutDevice = new WaveOut()
                    {
                        DesiredLatency = 100
                    };
                    ActiveStream = new Mp3FileReader(path);
                    inputStream  = new WaveChannel32(ActiveStream);
                    Console.WriteLine(inputStream.TotalTime.ToString());
                    sampleAggregator               = new SampleAggregator(frameLength);
                    inputStream.Sample            += inputStream_Sample;
                    waveOutDevice.PlaybackStopped += waveOutDevice_PlaybackStopped;
                    waveOutDevice.Init(inputStream);
                    ChannelLength = inputStream.TotalTime.TotalSeconds;

                    GenerateWaveformData(path);
                    IsOpen  = true;
                    CanPlay = true;
                }
                catch (Exception ex)
                {
                    ActiveStream = null;
                    CanPlay      = false;
                }
            }
        }
Ejemplo n.º 22
0
 private void OpenFileNoDes(string fileName)
 {
     try
     {
         var inputStream = new AudioFileReader(fileName);
         fileStream = inputStream;
         var aggregator = new SampleAggregator(inputStream);
         aggregator.NotificationCount  = inputStream.WaveFormat.SampleRate / 100;
         aggregator.PerformFFT         = true;
         aggregator.FftCalculated     += (s, a) => OnFftCalculated(a);
         aggregator.MaximumCalculated += (s, a) => OnMaximumCalculated(a);
         playbackDevice.Init(aggregator);
     }
     catch (Exception e)
     {
         Log4NetHelper.ErrorFormat("OpenFileNoDes文件打开失败. cause by:{0}", e.Message);
         //MessageBox.Show(e.Message, "Problem opening file");
         CloseFile();
     }
 }
Ejemplo n.º 23
0
 private void OpenFile(string fileName)
 {
     try
     {
         var inputStream = new AudioFileReader(fileName);
         this.fileStream = inputStream;
         var aggregator = new SampleAggregator(inputStream)
         {
             NotificationCount = inputStream.WaveFormat.SampleRate / 100,
             PerformFFT        = true
         };
         aggregator.FftCalculated     += (s, a) => this.OnFftCalculated(a);
         aggregator.MaximumCalculated += (s, a) => this.OnMaximumCalculated(a);
         this.playbackDevice.Init(aggregator);
     }
     catch (Exception e)
     {
         MessageBox.Show(e.Message, "Problem opening file");
         this.CloseFile();
     }
 }
Ejemplo n.º 24
0
 public AudioClip(string fileName)
 {
     if (File.Exists(fileName))
     {
         AudioFile  = new AudioFileReader2(fileName);
         Aggregator = new SampleAggregator(AudioFile, 1024)
         {
             PerformFFT = false
         };
         FadeInOut    = new FadeInOutSampleProvider(Aggregator, false);
         OutputDevice = new WaveOutEvent()
         {
             NumberOfBuffers = 10,
             DesiredLatency  = 85
         };
         OutputDevice.Init(FadeInOut);
     }
     else
     {
         this = Empty;
     }
 }
Ejemplo n.º 25
0
        protected override void Dispose(bool disposing)
        {
            //Only delete the Audio if Dispose call is explicit.
            if ((audio != null) && (disposing == true))
            {
                audio.Dispose();
                audio = null;
            }

            if (samples != null)
            {
                samples.Clear();
                samples = null;
                //samples = new SampleAggregator();
            }

            if (disposing)
            {
                _timeLineGlobalEventManager.AlignmentActivity -= WaveFormSelectedTimeLineGlobalMove;
            }

            base.Dispose(disposing);
        }
Ejemplo n.º 26
0
 private void OpenFile(string fileName)
 {
     try
     {
         string outFile = Path.GetTempFileName();
         DESHelper.DESFileClass.DecryptFile(fileName, outFile, "www.17kouyu.com");
         var inputStream = new AudioFileReader(outFile);
         //var inputStream = new AudioFileReader(fileName);
         fileStream = inputStream;
         var aggregator = new SampleAggregator(inputStream);
         aggregator.NotificationCount  = inputStream.WaveFormat.SampleRate / 100;
         aggregator.PerformFFT         = true;
         aggregator.FftCalculated     += (s, a) => OnFftCalculated(a);
         aggregator.MaximumCalculated += (s, a) => OnMaximumCalculated(a);
         playbackDevice.Init(aggregator);
     }
     catch (Exception e)
     {
         Log4NetHelper.ErrorFormat("OpenFile文件打开失败. cause by:{0}", e.Message);
         //MessageBox.Show(e.Message, "Problem opening file");
         CloseFile();
     }
 }
Ejemplo n.º 27
0
        //TODU Look here

        private void OpenFile(string fileName)
        {
            try
            {
                var inputStream = new AudioFileReader(fileName);
                fileStream = inputStream;
                var aggregator = new SampleAggregator(inputStream);
                aggregator.NotificationCount = inputStream.WaveFormat.SampleRate / 100;
                aggregator.PerformFFT        = true;
                playbackDevice.Init(aggregator);
                minutesPosition.Text       = fileStream.TotalTime.Minutes.ToString();
                secondsPosition.Text       = fileStream.TotalTime.Seconds.ToString("00");
                slider.Maximum             = fileStream.TotalTime.TotalSeconds;
                minutesAcyualPosition.Text = "0";
                secondsAcyualPosition.Text = "00";
                Action <object, EventArgs> action = (object send, EventArgs f) =>
                {
                    try
                    {
                        slider.Value = fileStream.CurrentTime.TotalSeconds;
                        minutesAcyualPosition.Text = fileStream.CurrentTime.Minutes.ToString();
                        secondsAcyualPosition.Text = fileStream.CurrentTime.Seconds.ToString("00");
                        timeTest();
                    }
                    catch
                    {
                    }
                };
                timer.Tick    += new EventHandler(action);
                timer.Interval = TimeSpan.FromSeconds(1);
            }
            catch (Exception e)
            {
                MessageBox.Show(e.Message, "Problem opening file");
                CloseFile();
            }
        }
Ejemplo n.º 28
0
        public void OpenFile(string path)
        {
            Stop();

            if (ActiveStream != null)
            {
                SelectionBegin = TimeSpan.Zero;
                SelectionEnd = TimeSpan.Zero;
                ChannelPosition = 0;
            }

            StopAndCloseStream();

            if (System.IO.File.Exists(path))
            {
                try
                {
                    waveOutDevice = new WaveOut()
                    {
                        DesiredLatency = 100
                    };
                    ActiveStream = new Mp3FileReader(path);
                    inputStream = new WaveChannel32(ActiveStream);
                    sampleAggregator = new SampleAggregator(fftDataSize);
                    inputStream.Sample += inputStream_Sample;
                    waveOutDevice.Init(inputStream);
                    ChannelLength = inputStream.TotalTime.TotalSeconds;
                    FileTag = TagLib.File.Create(path);
                    GenerateWaveformData(path);
                    CanPlay = true;
                }
                catch
                {
                    ActiveStream = null;
                    CanPlay = false;
                }
            }
        }
        public void Test_VerifyBinningWorks()
        {
            SampleAggregator uut = new SampleAggregator();

            uint count = 1000;
            uint value = 10;
            var samples = GenerateDifferentSamples(count, value, 50);

            List<string> aggregatedSamples = new List<string>();
            uut.DataAggregatedEvent +=
                sampleString =>
                {
                    aggregatedSamples.Add(sampleString);
                };

            uut.AggregateData(samples, FeatureVectorType.Histogram);
        }
Ejemplo n.º 30
0
        //  MediaPlayer Player = new MediaPlayer();
        public MainWindow()
        {
            InitializeComponent();

            Global.KirmiziGamJeomEvent += Global_KirmiziGamJeomEvent;
            Global.MaviGamJeomEvent    += Global_MaviGamJeomEvent;
            System.Windows.Threading.DispatcherTimer dispatcherTimer = new System.Windows.Threading.DispatcherTimer();
            dispatcherTimer.Tick    += dispatcherTimer_Tick;
            dispatcherTimer.Interval = new TimeSpan(0, 0, 1);
            dispatcherTimer.Start();

            MolaSureEvent += MainWindow_MolaSureEvent;

            ////------------Player
            var inputStream = new AudioFileReader(SoundLocation1);

            fileStream1 = inputStream;
            var aggregator = new SampleAggregator(inputStream);

            aggregator.NotificationCount = inputStream.WaveFormat.SampleRate / 100;
            aggregator.PerformFFT        = true;
            Player1 = new WaveOut {
                DesiredLatency = 500
            };
            Player1.Init(aggregator);
            ////------------------Player
            ////------------Player
            var inputStream2 = new AudioFileReader(SoundLocation2);

            fileStream2 = inputStream2;
            var aggregator2 = new SampleAggregator(inputStream2);

            aggregator2.NotificationCount = inputStream2.WaveFormat.SampleRate / 100;
            aggregator2.PerformFFT        = true;
            Player2 = new WaveOut {
                DesiredLatency = 500
            };
            Player2.Init(aggregator2);
            ////------------------Player
            ////------------Player
            var inputStream3 = new AudioFileReader(SoundLocation3);

            fileStream3 = inputStream3;
            var aggregator3 = new SampleAggregator(inputStream3);

            aggregator3.NotificationCount = inputStream3.WaveFormat.SampleRate / 100;
            aggregator3.PerformFFT        = true;
            Player3 = new WaveOut {
                DesiredLatency = 500
            };
            Player3.Init(aggregator3);
            ////------------------Player

            ////------------Player
            var inputStreamgs = new AudioFileReader(GeriSayim);

            fileStreamgs = inputStreamgs;
            var aggregatorgs = new SampleAggregator(inputStreamgs);

            aggregatorgs.NotificationCount = inputStreamgs.WaveFormat.SampleRate / 100;
            aggregatorgs.PerformFFT        = true;
            Playergs = new WaveOut {
                DesiredLatency = 500
            };
            Playergs.Init(aggregatorgs);
            ////------------------Player
            //   SoundLocation = dir + "\\" + SoundLocation;
            var Ayar = Global.db.Global_Veriler.FirstOrDefault();

            EventManager.RegisterClassHandler(typeof(Window),
                                              Keyboard.KeyUpEvent, new KeyEventHandler(keyUp), true);
            AyarDegistir();

            Global.KirmiziSkorDegistiEvent += Global_KirmiziSkorDegistiEvent;
            Global.MaviSkorDegistiEvent    += Global_MaviSkorDegistiEvent;
            Global.RaundDegistiEvent       += Global_RaundDegistiEvent;
            var    productId = 0x0011;
            Thread Th        = new Thread(Timer);

            Th.IsBackground = true;
            Th.Start();
            int VendorId = 0x0079;

            Global.Cihaz = HidDevices.Enumerate(VendorId, productId).FirstOrDefault();
            var _device = Global.Cihaz;

            btnSureyiDurdur.Content    = "Süreyi Başlat";
            btnSureyiDurdur.IsEnabled  = false;
            btnMaciBitir.IsEnabled     = false;
            btnRaundBaslat.IsEnabled   = false;
            btnScoreDuzeltme.IsEnabled = false;
            btnSayacArttir.Visibility  = Visibility.Hidden;
            btnSayacDusur.Visibility   = Visibility.Hidden;
            var vr = Global.db.Maclar;

            if (Ayar != null)
            {
                var converter   = new System.Windows.Media.BrushConverter();
                var KirmiziRenk = (Brush)converter.ConvertFromString(Ayar.kirmizirenk);
                var MaviRenk    = (Brush)converter.ConvertFromString(Ayar.mavirenk);
                var SkorRenk    = (Brush)converter.ConvertFromString(Ayar.skorrenk);
                Global.AraSuresi          = Ayar.arasuresi;
                Global.MaxPuanSayi        = Ayar.maxpuansayi;
                Global.MaxPuan            = Ayar.maxpuan;
                Global.RaundSayisi        = Ayar.raundsayisi;
                Global.OnikiFarkPuani     = Ayar.onikifarkpuani;
                Global.SureArtan          = Ayar.sureartan;
                Global.TimeOutSureArtan   = Ayar.timeoutsureartan;
                Global.VucutPuani         = Ayar.vucutpuani;
                Global.KafaPuani          = Ayar.kafapuani;
                Global.RaundSuresi        = Ayar.raundsuresi.Value;
                Global.KirmiziRenk        = KirmiziRenk;
                Global.MaviRenk           = MaviRenk;
                Global.SkorRenk           = SkorRenk;
                Global.SesEfekti          = true;
                lblMaviSkor.Foreground    = SkorRenk;
                lblKirmiziSkor.Foreground = SkorRenk;
            }
            Global.KirmiziAd       = "Hasan";
            Global.MaviAd          = "Ahmet";
            lblMaviIsim.Content    = Global.MaviAd;
            lblKirmiziIsim.Content = Global.KirmiziAd;
            if (Global.Cihaz != null)
            {
                Thread.Sleep(300);
                _device.OpenDevice();

                _device.Inserted += DeviceAttachedHandler;
                _device.Removed  += DeviceRemovedHandler;

                _device.MonitorDeviceEvents = true;
            }
            pnlCezalar.IsEnabled = false;
        }
Ejemplo n.º 31
0
        private void WaveFormGenerateWork(object sender, DoWorkEventArgs e)
        {
            WaveformGenerationParams waveformParams      = e.Argument as WaveformGenerationParams;
            AudioFileReader          waveformMp3Stream   = new AudioFileReader(waveformParams.Path);
            WaveChannel32            waveformInputStream = new WaveChannel32(waveformMp3Stream);

            waveformInputStream.Sample += waveStream_Sample;

            int frameLength    = fftDataSize;
            int frameCount     = (int)((double)waveformInputStream.Length / (double)frameLength);
            int waveformLength = frameCount * 2;

            byte[] readBuffer = new byte[frameLength];
            waveformAggregator = new SampleAggregator(frameLength);

            float maxLeftPointLevel  = float.MinValue;
            float maxRightPointLevel = float.MinValue;
            int   currentPointIndex  = 0;

            float[]      waveformCompressedPoints = new float[waveformParams.Points];
            List <float> waveformData             = new List <float>();
            List <int>   waveMaxPointIndexes      = new List <int>();

            for (int i = 1; i <= waveformParams.Points; i++)
            {
                waveMaxPointIndexes.Add((int)Math.Round(waveformLength * ((double)i / (double)waveformParams.Points), 0));
            }
            int readCount = 0;

            while (currentPointIndex * 2 < waveformParams.Points)
            {
                waveformInputStream.Read(readBuffer, 0, readBuffer.Length);

                waveformData.Add(waveformAggregator.LeftMaxVolume);
                waveformData.Add(waveformAggregator.RightMaxVolume);

                if (waveformAggregator.LeftMaxVolume > maxLeftPointLevel)
                {
                    maxLeftPointLevel = waveformAggregator.LeftMaxVolume;
                }
                if (waveformAggregator.RightMaxVolume > maxRightPointLevel)
                {
                    maxRightPointLevel = waveformAggregator.RightMaxVolume;
                }

                if (readCount > waveMaxPointIndexes[currentPointIndex])
                {
                    waveformCompressedPoints[(currentPointIndex * 2)]     = maxLeftPointLevel;
                    waveformCompressedPoints[(currentPointIndex * 2) + 1] = maxRightPointLevel;
                    maxLeftPointLevel  = float.MinValue;
                    maxRightPointLevel = float.MinValue;
                    currentPointIndex++;
                }
                if (readCount % 3000 == 0)
                {
                    float[] clonedData = (float[])waveformCompressedPoints.Clone();
                    App.Current.Dispatcher.Invoke(new Action(() =>
                    {
                        WaveformData = clonedData;
                    }));
                }

                if (waveformGenerateWorker.CancellationPending)
                {
                    e.Cancel = true;
                    break;
                }
                readCount++;
            }

            float[] finalClonedData = (float[])waveformCompressedPoints.Clone();
            App.Current.Dispatcher.Invoke(new Action(() =>
            {
                fullLevelData = waveformData.ToArray();
                WaveformData  = finalClonedData;
            }));
            waveformInputStream.Close();
            waveformInputStream.Dispose();
            waveformInputStream = null;
            waveformMp3Stream.Close();
            waveformMp3Stream.Dispose();
            waveformMp3Stream = null;
        }
Ejemplo n.º 32
0
 public AudioRecorder()
 {
     this.sampleAggregator = new SampleAggregator();
     this.recordingFormat  = new WaveFormat(16000, 1);
     this.sampleAggregator.NotificationCount = this.recordingFormat.SampleRate / 10;
 }
Ejemplo n.º 33
0
 public ExamQsSampleAggregatorMessage(SampleAggregator sampleAggregator)
 {
     this.SampleAggregator = sampleAggregator;
 }
Ejemplo n.º 34
0
        private void waveformGenerateWorker_DoWork(object sender, DoWorkEventArgs e)
        {
            WaveformGenerationParams waveformParams = e.Argument as WaveformGenerationParams;
            Mp3FileReader waveformMp3Stream = new Mp3FileReader(waveformParams.Path);
            WaveChannel32 waveformInputStream = new WaveChannel32(waveformMp3Stream);
            waveformInputStream.Sample += waveStream_Sample;

            int frameLength = fftDataSize;
            int frameCount = (int)((double)waveformInputStream.Length / (double)frameLength);
            int waveformLength = frameCount * 2;
            byte[] readBuffer = new byte[frameLength];
            waveformAggregator = new SampleAggregator(frameLength);

            float maxLeftPointLevel = float.MinValue;
            float maxRightPointLevel = float.MinValue;
            int currentPointIndex = 0;
            float[] waveformCompressedPoints = new float[waveformParams.Points];
            List<float> waveformData = new List<float>();
            List<int> waveMaxPointIndexes = new List<int>();

            for (int i = 1; i <= waveformParams.Points; i++)
            {
                waveMaxPointIndexes.Add((int)Math.Round(waveformLength * ((double)i / (double)waveformParams.Points), 0));
            }
            int readCount = 0;
            while (currentPointIndex * 2 < waveformParams.Points && waveformInputStream.Position < (waveformInputStream.Length - 1024))
            {
                waveformInputStream.Read(readBuffer, 0, readBuffer.Length);

                waveformData.Add(waveformAggregator.LeftMaxVolume);
                waveformData.Add(waveformAggregator.RightMaxVolume);

                if (waveformAggregator.LeftMaxVolume > maxLeftPointLevel)
                    maxLeftPointLevel = waveformAggregator.LeftMaxVolume;
                if (waveformAggregator.RightMaxVolume > maxRightPointLevel)
                    maxRightPointLevel = waveformAggregator.RightMaxVolume;

                if (readCount > waveMaxPointIndexes[currentPointIndex])
                {
                    waveformCompressedPoints[(currentPointIndex * 2)] = maxLeftPointLevel;
                    waveformCompressedPoints[(currentPointIndex * 2) + 1] = maxRightPointLevel;
                    maxLeftPointLevel = float.MinValue;
                    maxRightPointLevel = float.MinValue;
                    currentPointIndex++;
                }
                if (readCount % 3000 == 0)
                {
                    float[] clonedData = (float[])waveformCompressedPoints.Clone();
                    App.Current.Dispatcher.Invoke(new Action(() =>
                    {
                        WaveformData = clonedData;
                    }));
                }

                if (waveformGenerateWorker.CancellationPending)
                {
                    e.Cancel = true;
                    break;
                }
                readCount++;
            }

            float[] finalClonedData = (float[])waveformCompressedPoints.Clone();
            App.Current.Dispatcher.Invoke(new Action(() =>
            {
                fullLevelData = waveformData.ToArray();
                WaveformData = finalClonedData;
            }));
            waveformInputStream.Close();
            waveformInputStream.Dispose();
            waveformInputStream = null;
            waveformMp3Stream.Close();
            waveformMp3Stream.Dispose();
            waveformMp3Stream = null;
        }
Ejemplo n.º 35
0
        private void waveformGenerateWorker_DoWork(object sender, DoWorkEventArgs e)
        {
            WaveformGenerationParams waveformParams      = e.Argument as WaveformGenerationParams;
            Mp3FileReader            waveformMp3Stream   = new Mp3FileReader(waveformParams.Path);
            WaveChannel32            waveformInputStream = new WaveChannel32(waveformMp3Stream);

            waveformInputStream.Sample += waveStream_Sample;

            int frameCount = (int)((double)waveformInputStream.Length / (double)frameLength);

            byte[] readBuffer = new byte[frameLength];
            waveformAggregator = new SampleAggregator(frameLength);

            int currentPointIndex = 0;

            float[] waveformArray    = new float[frameCount * 2];
            float   waveformLeftMax  = 0;
            float   waveformRightMax = 0;
            int     readCount        = 0;

            while (currentPointIndex < frameCount * 2)
            {
                waveformInputStream.Read(readBuffer, 0, readBuffer.Length);

                waveformArray[currentPointIndex++] = waveformAggregator.LeftMaxVolume;
                waveformArray[currentPointIndex++] = waveformAggregator.RightMaxVolume;

                if (waveformAggregator.LeftMaxVolume > waveformLeftMax)
                {
                    waveformLeftMax = waveformAggregator.LeftMaxVolume;
                }
                if (waveformAggregator.RightMaxVolume > waveformRightMax)
                {
                    waveformRightMax = waveformAggregator.RightMaxVolume;
                }

                waveformAggregator.Clear();

                if (waveformGenerateWorker.CancellationPending)
                {
                    e.Cancel = true;
                    break;
                }
                readCount++;
            }
            byte[] waveformBytes = new byte[waveformArray.Length];
            float  factor        = 31f / Math.Max(Math.Abs(waveformLeftMax), Math.Abs(waveformRightMax));

            for (int ndx = 0; ndx < waveformArray.Length; ndx++)
            {
                waveformBytes[ndx] = (byte)Math.Abs(Math.Abs(waveformArray[ndx]) * factor);
            }

            //UI.Invoke(new Action(() => { WaveformData = waveformBytes; }));
            waveformData = waveformBytes;

            waveformInputStream.Close();
            waveformInputStream.Dispose();
            waveformInputStream = null;
            waveformMp3Stream.Close();
            waveformMp3Stream.Dispose();
            waveformMp3Stream = null;
        }
        public void Test_VerifySumBetweenDataContextSwitchWithSampleCountDimension()
        {
            SampleAggregator uut = new SampleAggregator();

            uint count = 520;
            uint value = 10;

            var samples = GenerateSamples(count, value, value);

            List<AggregatedSample> aggregatedSamples = new List<AggregatedSample>();
            uut.DataAggregatedEvent +=
                sampleString =>
                {
                    aggregatedSamples.Add(ConvertToSample(sampleString));
                };

            uut.AggregateData(samples, FeatureVectorType.SummedContextSwitchWithTotalCountIncluded);

            Assert.AreEqual<int>((int)(count / value), aggregatedSamples.Count);
            uint expectedValue = value * (value - 1);
            foreach (AggregatedSample s in aggregatedSamples)
            {
                Assert.AreEqual<uint>(expectedValue, s.EventValue1);
                Assert.AreEqual<uint>(expectedValue, s.EventValue2);
                Assert.AreEqual<uint>(expectedValue, s.EventValue3);
                Assert.AreEqual<uint>(expectedValue, s.EventValue4);
                Assert.AreEqual<uint>(expectedValue, s.EventValue5);
                Assert.AreEqual<uint>(expectedValue, s.EventValue6);
                Assert.AreEqual<uint>(value - 1, s.SampleCountBetweenContextSwitches);
            }
        }