Ejemplo n.º 1
0
        public int EnqueueSamples(int channels, int rate, byte[] samples, int frames)
        {
            if (buffer == null)
            {
                buffer = new NAudio.Wave.BufferedWaveProvider(new NAudio.Wave.WaveFormat(rate, channels));
                //NAudio.Wave.DirectSoundOut dso = new NAudio.Wave.DirectSoundOut(70);
                //NAudio.Wave.AsioOut dso = new NAudio.Wave.AsioOut();
                NAudio.Wave.WaveOut dso = new NAudio.Wave.WaveOut();
                dso.Init(buffer);
                dso.Play();

                player = dso;
            }
            int space = buffer.BufferLength - buffer.BufferedBytes;

            if (space > samples.Length)
            {
                if (times == 0)
                {
                    Console.WriteLine("Enqueue");
                }

                times = (times + 1) % 100;
                return(frames);
            }
            return(0);
        }
Ejemplo n.º 2
0
        private void backgroundWorker1_DoWork(object sender, DoWorkEventArgs e)
        {
            while (!stop)
            {
                if (Frames.Count < 8 || AudioBuffer.BufferedBytes < 8192 * 4)
                {
retry:
                    byte[] audio;
                    Bitmap b = Video.GetNextFrame(out audio);
                    if (audio != null)
                    {
                        short[] data   = AudioConverter.GetWaveData(audio, 0, audio.Length);
                        byte[]  result = new byte[data.Length * 2];
                        IOUtil.WriteS16sLE(result, 0, data);
                        AudioBuffer.AddSamples(result, 0, result.Length);
                        goto retry;
                    }
                    if (b == null)
                    {
                        stop = true;
                        if ((Video.Header.Flags & 4) == 4)
                        {
                            Player.Stop();
                            Player.Dispose();
                            Player      = null;
                            AudioBuffer = null;
                        }
                    }
                    else
                    {
                        Frames.Enqueue(b);
                    }
                }
            }
        }
Ejemplo n.º 3
0
 private void backgroundWorker1_DoWork(object sender, DoWorkEventArgs e)
 {
     while (!stop)
     {
         if (Frames.Count < 8 || AudioBuffer.BufferedBytes < 8192 * 4)
         {
         retry:
             byte[] audio;
             Bitmap b = Video.GetNextFrame(out audio);
             if (audio != null)
             {
                 short[] data = AudioConverter.GetWaveData(audio, 0, audio.Length);
                 byte[] result = new byte[data.Length * 2];
                 IOUtil.WriteS16sLE(result, 0, data);
                 AudioBuffer.AddSamples(result, 0, result.Length);
                 goto retry;
             }
             if (b == null)
             {
                 stop = true;
                 if ((Video.Header.Flags & 4) == 4)
                 {
                     Player.Stop();
                     Player.Dispose();
                     Player = null;
                     AudioBuffer = null;
                 }
             }
             else Frames.Enqueue(b);
         }
     }
 }
Ejemplo n.º 4
0
 private void PlaybackManager_OnPlaybackStateChange(NAudio.Wave.BufferedWaveProvider sender, PlaybackManager.StreamingPlaybackState state)
 {
     Dispatcher.Invoke(() =>
     {
         dataGrid.Items.Refresh();
     });
 }
Ejemplo n.º 5
0
        private void button1_Click(object sender, EventArgs e)
        {
            try
            {
                wavein = new NAudio.Wave.WaveIn()
                {
                    DeviceNumber = devicelist.SelectedIndex - 1
                };
                wavein.WaveFormat         = new NAudio.Wave.WaveFormat(48000, 2);
                wavein.BufferMilliseconds = 200;

                //wavein.DataAvailable += new EventHandler<NAudio.Wave.WaveInEventArgs>(wi_DataAvailable);

                wavein.DataAvailable += wi_DataAvailable;
                bwp = new NAudio.Wave.BufferedWaveProvider(wavein.WaveFormat);
                bwp.DiscardOnBufferOverflow = true;
                String[] serverIP = textBox1.Text.Split(':');
                tcpclnt_sound = new TcpClient(serverIP[0], Int32.Parse(serverIP[1]) + 1);
                stm_s         = tcpclnt_sound.GetStream();
                wavein.StartRecording();
                logbox.AppendText("Command \"" + "@@ speak_tcp" + "\" sended.\n");
            }
            catch (Exception ex)
            {
                logbox.AppendText(ex.Message, Color.Red);
                crash++;
            }
        }
Ejemplo n.º 6
0
 private static void InitAudio()
 {
     _audioProvider = new NAudio.Wave.BufferedWaveProvider(new NAudio.Wave.WaveFormat(44100, 2));
     _audioProvider.BufferDuration = TimeSpan.FromSeconds(300);
     _audioSink = new NAudio.Wave.WaveOut();
     _audioSink.Init(_audioProvider);
 }
Ejemplo n.º 7
0
 /// <summary>
 /// Init the audio playback
 /// </summary>
 public void Init()
 {
     NAudio.Wave.WaveFormat format = new NAudio.Wave.WaveFormat(44100, 16, 1);
     provider = new NAudio.Wave.BufferedWaveProvider(format);
     waveOut  = new NAudio.Wave.WaveOut();
     waveOut.Init(provider);
     waveOut.Play();
 }
Ejemplo n.º 8
0
 /// <summary>
 /// Dispose and destroy the audio playback object
 /// </summary>
 public void Destroy()
 {
     waveOut.Stop();
     provider.ClearBuffer();
     waveOut.Dispose();
     waveOut  = null;
     provider = null;
 }
Ejemplo n.º 9
0
        private void RunAudioThread(object jobObj)
        {
            var job = (AudioOut.Job)jobObj;

            using (var audioOut = new NAudio.Wave.WaveOut())
            {
                var audioBuffer = new NAudio.Wave.BufferedWaveProvider(new NAudio.Wave.WaveFormat());

                audioOut.DesiredLatency = 100;
                audioOut.Init(audioBuffer);
                audioOut.Play();

                var bufferSize   = 5000;
                var sampleBuffer = new float[bufferSize];
                var byteBuffer   = new byte[bufferSize * 4];
                while (true)
                {
                    while (audioBuffer.BufferedBytes < bufferSize * 2)
                    {
                        for (var i = 0; i < sampleBuffer.Length; i++)
                        {
                            sampleBuffer[i] = 0;
                        }

                        var sampleNum = job.GetNextSamples(sampleBuffer);
                        if (sampleNum == 0)
                        {
                            goto end;
                        }

                        for (var i = 0; i < sampleNum; i++)
                        {
                            var sampleU = unchecked ((ushort)(short)(sampleBuffer[i] * 0x4000));

                            byteBuffer[i * 4 + 0] = (byte)((sampleU >> 0) & 0xff);
                            byteBuffer[i * 4 + 1] = (byte)((sampleU >> 8) & 0xff);
                            byteBuffer[i * 4 + 2] = (byte)((sampleU >> 0) & 0xff);
                            byteBuffer[i * 4 + 3] = (byte)((sampleU >> 8) & 0xff);
                        }

                        audioBuffer.AddSamples(byteBuffer, 0, sampleNum * 4);
                    }

                    System.Threading.Thread.Sleep(50);
                }

end:
                audioOut.Stop();
            }

            lock (audioThreads)
                audioThreads.Remove(System.Threading.Thread.CurrentThread);
        }
Ejemplo n.º 10
0
        private void FMV_Load(object sender, EventArgs e)
        {
            /*double ticks = 10000000.0 / (Video.Header.FrameRate / 256.0);
             * float exp = (float)(ticks - Math.Floor(ticks));
             * if (exp != 0)
             * {
             *      int i = 0;
             *      float result;
             *      do
             *      {
             *              i++;
             *              result = exp * i;
             *      }
             *      while((float)(result - Math.Floor(result)) != 0);
             * }*/
            //TODO: Calculate timing based on fps
            if ((Video.Header.Flags & 4) == 4)
            {
                AudioConverter = new IMAADPCMDecoder();
                AudioBuffer    = new NAudio.Wave.BufferedWaveProvider(new NAudio.Wave.WaveFormat((int)Video.Header.AudioRate, 16, 1));
                AudioBuffer.DiscardOnBufferOverflow = true;
                AudioBuffer.BufferLength            = 8192 * 16;
                Player = new NAudio.Wave.WaveOut();
                Player.DesiredLatency = 150;
                Player.Init(AudioBuffer);
                Player.Play();
            }
            new System.Threading.Thread(new System.Threading.ThreadStart(delegate
            {
                int state = 0;
                while (!stop)
                {
                    if (Frames.Count != 0)
                    {
                        pictureBox1.Image = Frames.Dequeue();
                        switch (state)
                        {
                        case 0: System.Threading.Thread.Sleep(TimeSpan.FromTicks(666666)); break;

                        case 1: System.Threading.Thread.Sleep(TimeSpan.FromTicks(666667)); break;

                        case 2: System.Threading.Thread.Sleep(TimeSpan.FromTicks(666667)); break;
                        }
                        state = (state + 1) % 3;
                    }
                }
                System.Threading.Thread.CurrentThread.Abort();
            })).Start();
            backgroundWorker1.RunWorkerAsync();
        }
Ejemplo n.º 11
0
        /// <summary>
        /// Stop playing
        /// </summary>
        public void Stop()
        {
            if (dso != null)
            {
                dso.Stop();
            }
            if (buffer != null)
            {
                buffer.ClearBuffer();
            }

            //  Force NAudio reset when we next play something.
            buffer = null;
            dso    = null;
        }
Ejemplo n.º 12
0
 private void FMV_FormClosing(object sender, FormClosingEventArgs e)
 {
     if (!stop)
     {
         stop = true;
         if ((Video.Header.Flags & 4) == 4)
         {
             Player.Stop();
             Player.Dispose();
             Player      = null;
             AudioBuffer = null;
         }
     }
     Video.Close();
 }
Ejemplo n.º 13
0
 public int EnqueueSamples(int channels, int rate, byte[] samples, int frames)
 {
     if (buffer == null)
     {
         buffer = new NAudio.Wave.BufferedWaveProvider(new NAudio.Wave.WaveFormat(rate, channels));
         NAudio.Wave.DirectSoundOut dso = new NAudio.Wave.DirectSoundOut(70);
         dso.Init(buffer);
         dso.Play();
     }
     int space = buffer.BufferLength - buffer.BufferedBytes;
     if (space > samples.Length)
     {
         buffer.AddSamples(samples, 0, samples.Length);
         return frames;
     }
     return 0;
 }
Ejemplo n.º 14
0
        public WaveInAudioStream()
        {
            this.audioIn = new NAudio.Wave.WaveInEvent();
            this.audioIn.BufferMilliseconds = 64; // 1024 samples at 16kHz
            this.audioIn.NumberOfBuffers = 4;
            this.audioIn.WaveFormat = new NAudio.Wave.WaveFormat(rate: 16000, bits: 16, channels: 1);

            this.sampleBuffer = new NAudio.Wave.BufferedWaveProvider(this.audioIn.WaveFormat);
            this.sampleBuffer.DiscardOnBufferOverflow = true;

            this.audioIn.DataAvailable += delegate(object sender, NAudio.Wave.WaveInEventArgs args)
            {
                bool shouldSignalReader = false;
                byte[] b = null;
                TaskCompletionSource<byte[]> currentReadSource = null;
                lock (this)
                {
                    this.sampleBuffer.AddSamples(args.Buffer, 0, args.BytesRecorded);
                    if (this.readPending && this.sampleBuffer.BufferedBytes >= this.readSize)
                    {
                        this.readPending = false;
                        shouldSignalReader = true;
                        currentReadSource = this.readSource;
                        this.readSource = null;
                        b = new byte[this.readSize];
                        this.sampleBuffer.Read(b, 0, this.readSize);
                        // At this point (after the lock is released), we're ready for another ReadSamplesAsync.
                    }
                }
                if (shouldSignalReader)
                {
                    // We don't access this.readSource directly since another ReadSamplesAsync may have started after
                    // the lock was released.
                    currentReadSource.SetResult(b);
                }
            };
        }
Ejemplo n.º 15
0
        public int EnqueueSamples(int channels, int rate, byte[] samples, int frames)
        {
            if (buffer == null)
            {
                buffer = new NAudio.Wave.BufferedWaveProvider(new NAudio.Wave.WaveFormat(rate, channels));
                //NAudio.Wave.DirectSoundOut dso = new NAudio.Wave.DirectSoundOut(70);
                //NAudio.Wave.AsioOut dso = new NAudio.Wave.AsioOut();
                NAudio.Wave.WaveOut dso = new NAudio.Wave.WaveOut();
                dso.Init(buffer);
                dso.Play();

                player = dso;
            }
            int space = buffer.BufferLength - buffer.BufferedBytes;
            if (space > samples.Length)
            {
                if (times == 0)
                    Console.WriteLine("Enqueue");

                times = (times + 1) % 100;
                return frames;
            }
            return 0;
        }
Ejemplo n.º 16
0
        /// <summary>
        /// Add some music sample to the buffer. Not all samples are required to be accepted into the player buffer
        /// </summary>
        /// <param name="channels"></param>
        /// <param name="rate"></param>
        /// <param name="samples"></param>
        /// <param name="frames"></param>
        /// <returns></returns>
        public int EnqueueSamples(int channels, int rate, byte[] samples, int frames)
        {
            //  If we don't yet have a buffer, allocate one and start playing from it as a barground activity
            if (buffer == null)
            {
                buffer = new NAudio.Wave.BufferedWaveProvider(new NAudio.Wave.WaveFormat(rate, channels));
                dso    = new NAudio.Wave.DirectSoundOut(70);
                dso.Init(buffer);
                dso.Play();
            }

            //  Do we have room in the buffer to add all the new samples
            int space = buffer.BufferLength - buffer.BufferedBytes;

            if (space > samples.Length)
            {
                //  Add them all
                buffer.AddSamples(samples, 0, samples.Length);
                return(frames);
            }

            //  None added as there was insufficient room for them all
            return(0);
        }
Ejemplo n.º 17
0
 private void FMV_FormClosing(object sender, FormClosingEventArgs e)
 {
     if (!stop)
     {
         stop = true;
         if ((Video.Header.Flags & 4) == 4)
         {
             Player.Stop();
             Player.Dispose();
             Player = null;
             AudioBuffer = null;
         }
     }
     Video.Close();
 }
Ejemplo n.º 18
0
 private void FMV_Load(object sender, EventArgs e)
 {
     /*double ticks = 10000000.0 / (Video.Header.FrameRate / 256.0);
     float exp = (float)(ticks - Math.Floor(ticks));
     if (exp != 0)
     {
         int i = 0;
         float result;
         do
         {
             i++;
             result = exp * i;
         }
         while((float)(result - Math.Floor(result)) != 0);
     }*/
     //TODO: Calculate timing based on fps
     if ((Video.Header.Flags & 4) == 4)
     {
         AudioConverter = new ADPCM();
         AudioBuffer = new NAudio.Wave.BufferedWaveProvider(new NAudio.Wave.WaveFormat((int)Video.Header.AudioRate, 16, 1));
         AudioBuffer.DiscardOnBufferOverflow = true;
         AudioBuffer.BufferLength = 8192 * 16;
         Player = new NAudio.Wave.WaveOut();
         Player.DesiredLatency = 150;
         Player.Init(AudioBuffer);
         Player.Play();
     }
     new System.Threading.Thread(new System.Threading.ThreadStart(delegate
         {
             int state = 0;
             while (!stop)
             {
                 if (Frames.Count != 0)
                 {
                     pictureBox1.Image = Frames.Dequeue();
                     switch (state)
                     {
                         case 0: System.Threading.Thread.Sleep(TimeSpan.FromTicks(666666)); break;
                         case 1: System.Threading.Thread.Sleep(TimeSpan.FromTicks(666667)); break;
                         case 2: System.Threading.Thread.Sleep(TimeSpan.FromTicks(666667)); break;
                     }
                     state = (state + 1) % 3;
                 }
             }
             System.Threading.Thread.CurrentThread.Abort();
         })).Start();
     backgroundWorker1.RunWorkerAsync();
 }
Ejemplo n.º 19
0
        private void THP_Load(object sender, EventArgs e)
        {
            pictureBox1.Image = File.GetFrame(0).ToBitmap();
            Width             = (int)((IO.Misc.THP.THPComponents.THPVideoInfo)File.Components.THPInfos[0]).Width + 20;
            Height            = (int)((IO.Misc.THP.THPComponents.THPVideoInfo)File.Components.THPInfos[0]).Height + 29;
            audio             = File.Components.THPInfos[1] != null;
            if (audio)
            {
                bb = new NAudio.Wave.BufferedWaveProvider(new NAudio.Wave.WaveFormat((int)((IO.Misc.THP.THPComponents.THPAudioInfo)File.Components.THPInfos[1]).Frequentie, 2));
                ww = new NAudio.Wave.WaveOut();
                bb.DiscardOnBufferOverflow = true;
                ww.Init(bb);
                ww.Play();
                //	WaveLib.WaveNative.waveOutOpen(out WaveOut, 0, new WaveLib.WaveFormat((int)((IO.Misc.THP.THPComponents.THPAudioInfo)File.Components.THPInfos[1]).Frequentie, 16, 2), new WaveLib.WaveNative.WaveDelegate(WaveCallBack), 0, 0);
                //w = new WaveLib.WaveOutBuffer(WaveOut, (int)File.Header.MaxBufferSize);
                //w = new WaveLib.WaveOutPlayer(0, new WaveLib.WaveFormat((int)((IO.Misc.THP.THPComponents.THPAudioInfo)File.Components.THPInfos[1]).Frequentie, 16, 2), (int)File.Header.MaxBufferSize, 1, new WaveLib.BufferFillEventHandler(BufferFiller));

                /*h.dwBytesRecorded = 0;
                 * h.dwUser = IntPtr.Zero;
                 * h.dwFlags = 0;
                 * h.dwLoops = 0;
                 * h.lpNext = IntPtr.Zero;
                 * h.reserved = 0;
                 *
                 * unsafe
                 * {
                 *      WaveLib.WaveNative.waveOutPrepareHeader(WaveOut, ref h, sizeof(WaveLib.WaveNative.WaveHdr));
                 * }*/
            }
            //for (int i = 0; i < 10; i++)
            //{
            //	t.Enqueue(File.GetFrame(frame++));
            //}
            //backgroundWorker1.RunWorkerAsync();
            //timer1.Interval = (int)(1000f / File.Header.FPS - 0.5f);
            //timer1.Enabled = true;
            //	Timer = SetTimer(Handle, IntPtr.Zero, /*(uint)(1000f / File.Header.FPS-0.68336f)*/(uint)(1000f / File.Header.FPS - (1000f / File.Header.FPS / 10f)), IntPtr.Zero);
            backgroundWorker2.RunWorkerAsync();
            if (audio)
            {
                backgroundWorker1.RunWorkerAsync();
            }

            if (audio)
            {
                //backgroundWorker1.RunWorkerAsync();
                //bb = new NAudio.Wave.BufferedWaveProvider(new NAudio.Wave.WaveFormat((int)((IO.Misc.THP.THPComponents.THPAudioInfo)File.Components.THPInfos[1]).Frequentie, 2));
                //ww = new NAudio.Wave.WaveOut();
                //bb.DiscardOnBufferOverflow = true;
                //ww.Init(bb);
                //ww.Play();
                //	WaveLib.WaveNative.waveOutOpen(out WaveOut, 0, new WaveLib.WaveFormat((int)((IO.Misc.THP.THPComponents.THPAudioInfo)File.Components.THPInfos[1]).Frequentie, 16, 2), new WaveLib.WaveNative.WaveDelegate(WaveCallBack), 0, 0);
                //w = new WaveLib.WaveOutBuffer(WaveOut, (int)File.Header.MaxBufferSize);
                //w = new WaveLib.WaveOutPlayer(0, new WaveLib.WaveFormat((int)((IO.Misc.THP.THPComponents.THPAudioInfo)File.Components.THPInfos[1]).Frequentie, 16, 2), (int)File.Header.MaxAudioSamples * 2 * 2, 1, new WaveLib.BufferFillEventHandler(BufferFiller));

                /*h.dwBytesRecorded = 0;
                 * h.dwUser = IntPtr.Zero;
                 * h.dwFlags = 0;
                 * h.dwLoops = 0;
                 * h.lpNext = IntPtr.Zero;
                 * h.reserved = 0;
                 *
                 * unsafe
                 * {
                 *      WaveLib.WaveNative.waveOutPrepareHeader(WaveOut, ref h, sizeof(WaveLib.WaveNative.WaveHdr));
                 * }*/
            }
        }