コード例 #1
0
 public void EmitDah()
 {
     setBufferAndWave(dahunit);
     buffer.Write(0, GenerateOneUnitSound(dahunit), LockFlag.EntireBuffer);
     buffer.Volume = (int)Volume.Max;
     buffer.Play(0, BufferPlayFlags.Default);
 }
コード例 #2
0
        private NextNotificationTask HandleNewBytesInControlThread(int nextPlaceForBytes, int byteWindowSize, BufferNotificationEventArgs ea)
        {
            LockFlag lockFlag      = LockFlag.None;
            int      bytesObtained = ea.NewSoundByte.Length;

            if (bytesObtained > byteWindowSize)
            {
                SB.Stop();
                throw new ApplicationException("An event handler provided the streaming buffer with " + bytesObtained + " bytes of sound, but it only requested " + byteWindowSize + " bytes.");
            }
            else if (bytesObtained == byteWindowSize)
            {
                SB.Write(nextPlaceForBytes, ea.NewSoundByte, lockFlag);
            }
            else
            {
                // Fill the remainder of the segment with silence.
                if (ea.NewSoundByte.Length > 0)
                {
                    SB.Write(nextPlaceForBytes, ea.NewSoundByte, lockFlag);
                }
                SB.Write(nextPlaceForBytes + ea.NewSoundByte.Length, new byte[byteWindowSize - ea.NewSoundByte.Length], lockFlag);

                if (ea.SoundFinished)
                {
                    return(NextNotificationTask.FillSectionWithSilence);
                }
            }
            return(NextNotificationTask.FillSectionWithNewSound);
        }
コード例 #3
0
        private static void osd_update_audio_stream(byte[] buffer, int samples_this_frame)
        {
            int play_position, write_position;
            int stream_in;

            byte[] buffer1, buffer2;
            int    length1, length2;

            buf2.GetCurrentPosition(out play_position, out write_position);
            if (write_position < play_position)
            {
                write_position += 0x9400;
            }
            stream_in = stream_buffer_in;
            if (stream_in < write_position)
            {
                stream_in += 0x9400;
            }
            while (stream_in < write_position)
            {
                //buffer_underflows++;
                stream_in += 0xf00;
            }
            if (stream_in + 0xf00 > play_position + 0x9400)
            {
                //buffer_overflows++;
                return;
            }
            stream_buffer_in = stream_in % 0x9400;
            if (stream_buffer_in + 0xf00 < 0x9400)
            {
                length1 = 0xf00;
                length2 = 0;
                buffer1 = new byte[length1];
                Array.Copy(buffer, buffer1, length1);
                buf2.Write(stream_buffer_in, buffer1, LockFlag.None);
                stream_buffer_in = stream_buffer_in + 0xf00;
            }
            else if (stream_buffer_in + 0xf00 == 0x9400)
            {
                length1 = 0xf00;
                length2 = 0;
                buffer1 = new byte[length1];
                Array.Copy(buffer, buffer1, length1);
                buf2.Write(stream_buffer_in, buffer1, LockFlag.None);
                stream_buffer_in = 0;
            }
            else if (stream_buffer_in + 0xf00 > 0x9400)
            {
                length1 = 0x9400 - stream_buffer_in;
                length2 = 0xf00 - length1;
                buffer1 = new byte[length1];
                buffer2 = new byte[length2];
                Array.Copy(buffer, buffer1, length1);
                Array.Copy(buffer, length1, buffer2, 0, length2);
                buf2.Write(stream_buffer_in, buffer1, LockFlag.None);
                buf2.Write(0, buffer2, LockFlag.None);
                stream_buffer_in = length2;
            }
        }
コード例 #4
0
        private void SoundPlayback()
        {
            lock (_thread)
            {
                if (!_running)
                {
                    return;
                }

                // Set up the initial sound buffer to be the full length
                var bufferLength = _samplesPerUpdate * 2 * _channels;
                var soundData    = new short[bufferLength];

                // Prime it with the first x seconds of data
                _pullAudio(soundData, soundData.Length);
                _soundBuffer.Write(0, soundData, LockFlag.None);

                // Start it playing
                _soundBuffer.Play(0, BufferPlayFlags.Looping);

                var lastWritten = 0;
                while (_running)
                {
                    if (_halted)
                    {
                        Monitor.Pulse(_thread);
                        Monitor.Wait(_thread);
                    }

                    // Wait on one of the notification events with a 3ms timeout
                    WaitHandle.WaitAny(_fillEvent, 3, true);

                    // Get the current play position (divide by two because we are using 16 bit samples)
                    if (_soundBuffer != null)
                    {
                        var tmp = _soundBuffer.PlayPosition / 2;

                        // Generate new sounds from lastWritten to tmp in the sound buffer
                        if (tmp == lastWritten)
                        {
                            continue;
                        }
                        else
                        {
                            soundData = new short[(tmp - lastWritten + bufferLength) % bufferLength];
                        }

                        _pullAudio(soundData, soundData.Length);

                        // Write in the generated data
                        _soundBuffer.Write(lastWritten * 2, soundData, LockFlag.None);

                        // Save the position we were at
                        lastWritten = tmp;
                    }
                }
            }
        }
コード例 #5
0
        private void btn_play_Click(object sender, EventArgs e)//press play button;
        {
            // Set initial amplitude and frequency
            try
            {
                bool   Is_input_OK = true;
                double frequency_L = 1000;
                double frequency_R = 1000;
                if (syncLR == false)
                {
                    frequency_L = Convert.ToDouble(tB_L.Text);
                    frequency_R = Convert.ToDouble(tB_R.Text);
                }
                else
                {
                    frequency_R = frequency_L;
                    tB_R.Text   = tB_L.Text;
                }
                if (frequency_L > 20000 || frequency_L < 20)
                {
                    Is_input_OK = false;
                }
                if (frequency_R > 20000 || frequency_R < 20)
                {
                    Is_input_OK = false;
                }
                if (!Is_input_OK)
                {
                    MessageBox.Show("输入频率范围在:20-20000");
                    return;
                }
                double amplitude = short.MaxValue;
                double two_pi    = 2 * Math.PI;
                // Iterate through time
                for (int i = 0; i < buffer.Length / 2; i++)
                {
                    // Add to sine
                    buffer[2 * i]     = (short)(amplitude * Math.Sin(i * two_pi * frequency_L / waveFormat.SamplesPerSecond));
                    buffer[2 * i + 1] = (short)(amplitude * Math.Sin(i * two_pi * frequency_R / waveFormat.SamplesPerSecond));
                }

                bufferSound.Volume = (vol == 0)?-10000:-600 * (10 - vol);//(int)Volume.Max;
                bufferSound.Write(0, buffer, LockFlag.None);
                bufferSound.Play(0, BufferPlayFlags.Looping);

                isSync.Enabled = false;//disable sync checkbox;
            }
            catch
            {
                MessageBox.Show("输入错误!");
            }

            if (bufferSound.Status.Playing)
            {
                btn_play.Enabled = false; Sweep.Enabled = false; btn_stop.Enabled = true;
            }
        }
コード例 #6
0
        /*
         * Receive audio data coming on port 1550 and feed it to the speakers to be played.
         */
        private void UDP_Receive()
        {
            try {
                bStop = false;
                IPEndPoint remoteEP = new IPEndPoint(IPAddress.Any, 0);

                while (!bStop)
                {
                    //Receive data.
                    byte[] byteData = udpClient.Receive(ref remoteEP);

                    string c_ip = remoteEP.Address.ToString();


                    //G711 compresses the data by 50%, so we allocate a buffer of double
                    //the size to store the decompressed data.
                    byte[] byteDecodedData = new byte[byteData.Length * 2];

                    //Decompress data using the proper vocoder.
                    ALawDecoder.ALawDecode(byteData, out byteDecodedData);

                    //Play the data received to the user.
                    playbackBuffer = new SecondaryBuffer(playbackBufferDescription, device);
                    playbackBuffer.Write(0, byteDecodedData, LockFlag.None);
                    playbackBuffer.Play(0, BufferPlayFlags.Default);
                }
            } catch (Exception ex) {
                //MessageBox.Show(ex.Message, "VoiceChat-Receive ()", MessageBoxButtons.OK, MessageBoxIcon.Error);
            } finally {
                nUdpClientFlag += 1;
            }
        }
コード例 #7
0
 private void Receive()
 {
     try
     {
         playbackBuffer = new SecondaryBuffer(playbackBufferDescription, device);
         while (flagSrarting)
         {
             byte[] array  = udpRecive.Receive(ref recive_Com);
             byte[] array2 = new byte[checked (array.Length + 1)];
             array2 = array;
             playbackBuffer.Dispose();
             playbackBuffer = new SecondaryBuffer(playbackBufferDescription, device);
             playbackBuffer.Write(0, array2, LockFlag.None);
             playbackBuffer.Play(0, BufferPlayFlags.Default);
         }
         playbackBuffer.Dispose();
         udpRecive.Close();
     }
     catch (Exception ex)
     {
         ProjectData.SetProjectError(ex);
         Exception ex2 = ex;
         ProjectData.ClearProjectError();
     }
     finally
     {
     }
 }
コード例 #8
0
        private void Play(string filename)
        {
            StreamReader reader = new StreamReader(filename);

            WaveFormat format = new WaveFormat();

            format.AverageBytesPerSecond = bitspersample / 8 * samplerate;
            format.BitsPerSample         = bitspersample;
            format.BlockAlign            = bitspersample / 8;
            format.Channels         = 1;
            format.FormatTag        = WaveFormatTag.Pcm;
            format.SamplesPerSecond = samplerate;

            BufferDescription desc = new BufferDescription(format);

            desc.BufferBytes   = (int)reader.BaseStream.Length;
            desc.ControlVolume = true;
            desc.GlobalFocus   = true;

            buffer = new SecondaryBuffer(desc, dev);
            //buffer = new SecondaryBuffer(filename, dev);
            buffer.Write(0, reader.BaseStream, (int)reader.BaseStream.Length, LockFlag.None);

            reader.Close();

            //m_BufferBytes = buffer.Caps.BufferBytes;


            buffer.Play(0, BufferPlayFlags.Default);
        }
コード例 #9
0
ファイル: CallManager.cs プロジェクト: TJoskowiak/TIP_Projekt
        /*
         * Receive audio data coming on port 1550 and feed it to the speakers to be played.
         */
        private void Receive()
        {
            try
            {
                bStop = false;
                IPEndPoint remoteEP = new IPEndPoint(IPAddress.Any, 0);

                while (!bStop)
                {
                    //Receive data.
                    byte[] byteData = udpClient.Receive(ref remoteEP);

                    //G711 compresses the data by 50%, so we allocate a buffer of double
                    //the size to store the decompressed data.
                    byte[] byteDecodedData = new byte[byteData.Length * 2];

                    //Decompress data using the proper vocoder. And decrypt
                    var decryptedData = AES_Crypto.Decrypt(byteData, CallCurrentPass, CallCurrentSalt);
                    ALawDecoder.ALawDecode(decryptedData, out byteDecodedData);

                    //Play the data received to the user.
                    playbackBuffer = new SecondaryBuffer(playbackBufferDescription, device);
                    playbackBuffer.Write(0, byteDecodedData, LockFlag.None);
                    playbackBuffer.Play(0, BufferPlayFlags.Default);
                }
            }
            catch (Exception e)
            {
                MessageBox.Show(e.Message, "VoiceChat-Receive ()", MessageBoxButton.OK, MessageBoxImage.Error, MessageBoxResult.OK);
            }
            finally
            {
                nUdpClientFlag += 1;
            }
        }
コード例 #10
0
ファイル: VideoWindow.cs プロジェクト: ShahidKhosa/video-chat
        /*
         * Receive audio data coming on port 1550 and feed it to the speakers to be played.
         */
        private void Receive()
        {
            try
            {
                udpAudioReceiving = new UdpClient(1550);
                IPEndPoint remoteEP = new IPEndPoint(IPAddress.Any, 0);

                audioReceive = true;
                while (audioReceive)
                {
                    //Receive data.
                    byte[] byteData = udpAudioReceiving.Receive(ref remoteEP);

                    //Play the data received to the user.
                    playbackBuffer = new SecondaryBuffer(playbackBufferDescription, device);
                    playbackBuffer.Write(0, byteData, LockFlag.None);
                    playbackBuffer.Play(0, BufferPlayFlags.Default);
                }
            }
            catch (Exception ex)
            {
                Receive();
                //MessageBox.Show(ex.Message, "StudentSide:VideoWindow:Receive()", MessageBoxButtons.OK, MessageBoxIcon.Error);
                //return;
            }
        }
コード例 #11
0
ファイル: VCES.cs プロジェクト: ShahidKhosa/video-chat
        /*
         * Receive audio data coming on port 1550 and feed it to the speakers to be played.
         */
        private void Receive()
        {
            try
            {
                bStop = false;
                IPEndPoint remoteEP = new IPEndPoint(IPAddress.Any, 0);

                while (!bStop)
                {
                    //Receive data.
                    byte[] byteData = udpClient.Receive(ref remoteEP);

                    //Play the data received to the user.
                    playbackBuffer = new SecondaryBuffer(playbackBufferDescription, device);
                    playbackBuffer.Write(0, byteData, LockFlag.None);
                    playbackBuffer.Play(0, BufferPlayFlags.Default);
                }
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message, "VoiceChat-Receive ()", MessageBoxButtons.OK, MessageBoxIcon.Error);
            }
            finally
            {
                nUdpClientFlag += 1;
            }
        }
コード例 #12
0
 private void FillData()
 {
     lock (buffer)
         buffer.Write(writePosition, GetData(), LockFlag.None);
     writePosition += SectorSize;
     writePosition %= BufferSize;
 }
コード例 #13
0
ファイル: DXSound.cs プロジェクト: biddyweb/communicator
        public void StartPlaying(MemoryStream buffer)
        {
            if (buffer == null)
            {
                return;
            }
            buffer.Position = 0;

            if (mSecondaryBuffer != null)
            {
                mSecondaryBuffer.Dispose();
            }

            // buffer description
            BufferDescription bufferDescription = new BufferDescription(mWaveFormat);

            bufferDescription.DeferLocation = true;
            bufferDescription.BufferBytes   = Convert.ToInt32(buffer.Length);
            mSecondaryBuffer = new SecondaryBuffer(bufferDescription, mDevice);

            //load audio samples to secondary buffer
            mSecondaryBuffer.Write(0, buffer, Convert.ToInt32(buffer.Length), LockFlag.EntireBuffer);

            //play audio buffer
            mSecondaryBuffer.Play(0, BufferPlayFlags.Default);
        }
コード例 #14
0
ファイル: SoundManager.cs プロジェクト: itamargreen/metalx
 void fillFore()
 {
     if (mp3Stream == null)
     {
         return;
     }
     if (secondaryBuffer == null || secondaryBuffer.Disposed || mp3Stream == null)
     {
         return;
     }
     try
     {
         mp3Stream.Read(buff, 0, halfSize);
         secondaryBuffer.Write(0, new System.IO.MemoryStream(buff), halfSize, LockFlag.None);
     }
     catch { }
 }
コード例 #15
0
        private void SoundPlayback()
        {
            lock (this.thread)
            {
                if (!this.running)
                {
                    return;
                }

                // Set up the initial sound buffer to be the full length
                int     bufferLength = this.samplesPerUpdate * 2 * this.channels;
                short[] soundData    = new short[bufferLength];

                // Prime it with the first x seconds of data
                this.pullAudio(soundData, soundData.Length);
                this.soundBuffer.Write(0, soundData, LockFlag.None);

                // Start it playing
                this.soundBuffer.Play(0, BufferPlayFlags.Looping);

                int lastWritten = 0;
                while (this.running)
                {
                    if (this.halted)
                    {
                        Monitor.Pulse(this.thread);
                        Monitor.Wait(this.thread);
                    }

                    // Wait on one of the notification events
                    WaitHandle.WaitAny(this.fillEvent, 3, true);

                    // Get the current play position (divide by two because we are using 16 bit samples)
                    int tmp = this.soundBuffer.PlayPosition / 2;

                    // Generate new sounds from lastWritten to tmp in the sound buffer
                    if (tmp == lastWritten)
                    {
                        continue;
                    }
                    else
                    {
                        soundData = new short[(tmp - lastWritten + bufferLength) % bufferLength];
                    }

                    this.pullAudio(soundData, soundData.Length);

                    // Write in the generated data
                    soundBuffer.Write(lastWritten * 2, soundData, LockFlag.None);

                    // Save the position we were at
                    lastWritten = tmp;
                }
            }
        }
コード例 #16
0
 /// <summary>
 /// 从字节数组中获取音频数据,并进行播放
 /// </summary>
 /// <param name="intRecv">字节数组长度</param>
 /// <param name="bytRecv">包含音频数据的字节数组</param>
 public void GetVoiceData(int intRecv, byte[] bytRecv)
 {
     //intPosWrite指示最新的数据写好后的末尾。intPosPlay指示本次播放开始的位置。
     if (intPosWrite + intRecv <= memstream.Capacity)
     {//如果当前写指针所在的位移+将要写入到缓冲区的长度小于缓冲区总大小
         if ((intPosWrite - intPosPlay >= 0 && intPosWrite - intPosPlay < intNotifySize) || (intPosWrite - intPosPlay < 0 && intPosWrite - intPosPlay + memstream.Capacity < intNotifySize))
         {
             memstream.Write(bytRecv, 0, intRecv);
             intPosWrite += intRecv;
         }
         else if (intPosWrite - intPosPlay >= 0)
         {                                                                     //先存储一定量的数据,当达到一定数据量时就播放声音。
             buffDiscript.BufferBytes = intPosWrite - intPosPlay;              //缓冲区大小为播放指针到写指针之间的距离。
             SecondaryBuffer sec = new SecondaryBuffer(buffDiscript, PlayDev); //建立一个合适的缓冲区用于播放这段数据。
             memstream.Position = intPosPlay;                                  //先将memstream的指针定位到这一次播放开始的位置
             sec.Write(0, memstream, intPosWrite - intPosPlay, LockFlag.FromWriteCursor);
             sec.Play(0, BufferPlayFlags.Default);
             memstream.Position = intPosWrite;//写完后重新将memstream的指针定位到将要写下去的位置。
             intPosPlay         = intPosWrite;
         }
         else if (intPosWrite - intPosPlay < 0)
         {
             buffDiscript.BufferBytes = intPosWrite - intPosPlay + memstream.Capacity; //缓冲区大小为播放指针到写指针之间的距离。
             SecondaryBuffer sec = new SecondaryBuffer(buffDiscript, PlayDev);         //建立一个合适的缓冲区用于播放这段数据。
             memstream.Position = intPosPlay;
             sec.Write(0, memstream, memstream.Capacity - intPosPlay, LockFlag.FromWriteCursor);
             memstream.Position = 0;
             sec.Write(memstream.Capacity - intPosPlay, memstream, intPosWrite, LockFlag.FromWriteCursor);
             sec.Play(0, BufferPlayFlags.Default);
             memstream.Position = intPosWrite;
             intPosPlay         = intPosWrite;
         }
     }
     else
     {                                                     //当数据将要大于memstream可容纳的大小时
         int irest = memstream.Capacity - intPosWrite;     //memstream中剩下的可容纳的字节数。
         memstream.Write(bytRecv, 0, irest);               //先写完这个内存流。
         memstream.Position = 0;                           //然后让新的数据从memstream的0位置开始记录
         memstream.Write(bytRecv, irest, intRecv - irest); //覆盖旧的数据
         intPosWrite = intRecv - irest;                    //更新写指针位置。写指针指示下一个开始写入的位置而不是上一次结束的位置,因此不用减一
     }
 }
コード例 #17
0
        private void buttonWhiteNoise_Click(object sender, System.EventArgs e)
        {
            WaveFormat format = new WaveFormat();

            format.BitsPerSample = 8;
            format.Channels      = 1;
            format.BlockAlign    = 1;

            format.FormatTag             = WaveFormatTag.Pcm;
            format.SamplesPerSecond      = 8000; //sampling frequency of your data;
            format.AverageBytesPerSecond = format.SamplesPerSecond * format.BlockAlign;

            // buffer description
            BufferDescription desc = new BufferDescription(format);

            desc.DeferLocation = true;
            desc.BufferBytes   = 3 * format.AverageBytesPerSecond;

            // create the buffer
            //Device ApplicationDevice = new Device();

            SecondaryBuffer secondaryBuffer = new SecondaryBuffer(desc, applicationDevice);


            //generate ramdom data (white noise)
            byte[] rawsamples = new byte[22050];
            Random rnd1       = new System.Random();

            for (int i = 0; i < 22050; i++)
            {
                //-----------------------------------------------
                //Completely random
                //add a new audio sample to array
                rawsamples[i] = (byte)rnd1.Next(255);
                //-----------------------------------------------


                //-----------------------------------------------
                //-- Sine wave? (comment out for white noise)
                int convert = (int)(Math.Sin(i) * Math.PI);
                for (int index = 0; index < 2; index++)
                {
                    i            += index;
                    rawsamples[i] = (byte)(convert >> (index * 8));
                }
                //-----------------------------------------------
            }

            //load audio samples to secondary buffer
            secondaryBuffer.Write(0, rawsamples, LockFlag.EntireBuffer);

            //play audio buffer
            secondaryBuffer.Play(0, BufferPlayFlags.Default);
        }
コード例 #18
0
 //following use for client receiving
 public void getVoiceData(int intRecv, byte[] bytRecv)
 {
     if (intPosWrite + intRecv <= memstream.Capacity)
     {
         if ((intPosWrite - intPosPlay >= 0 && intPosWrite - intPosPlay < intNotifySize) || (intPosWrite - intPosPlay < 0 && intPosWrite - intPosPlay + memstream.Capacity < intNotifySize))
         {
             memstream.Write(bytRecv, 0, intRecv);
             intPosWrite += intRecv;
         }
         else if (intPosWrite - intPosPlay >= 0)
         {
             buffDiscript.BufferBytes = intPosWrite - intPosPlay;
             SecondaryBuffer sec = new SecondaryBuffer(buffDiscript, playDev);
             memstream.Position = intPosPlay;
             sec.Write(0, memstream, intPosWrite - intPosPlay, LockFlag.FromWriteCursor);
             sec.Play(0, BufferPlayFlags.Default);
             memstream.Position = intPosWrite;
             intPosPlay         = intPosWrite;
         }
         else if (intPosWrite - intPosPlay < 0)
         {
             buffDiscript.BufferBytes = intPosWrite - intPosPlay + memstream.Capacity;
             SecondaryBuffer sec = new SecondaryBuffer(buffDiscript, playDev);
             memstream.Position = intPosPlay;
             sec.Write(0, memstream, memstream.Capacity - intPosPlay, LockFlag.FromWriteCursor);
             memstream.Position = 0;
             sec.Write(memstream.Capacity - intPosPlay, memstream, intPosWrite, LockFlag.FromWriteCursor);
             sec.Play(0, BufferPlayFlags.Default);
             memstream.Position = intPosWrite;
             intPosPlay         = intPosWrite;
         }
     }
     else
     {
         int irest = memstream.Capacity - intPosWrite;
         memstream.Write(bytRecv, 0, irest);
         memstream.Position = 0;
         memstream.Write(bytRecv, irest, intRecv - irest);
         intPosWrite = intRecv - irest;
     }
 }
コード例 #19
0
 /// <summary>
 /// Play data stream from one position to another.
 /// </summary>
 /// <param name="lEndPosition">Use 0 to play until the end.</param>
 private void PlayDataStream(long startPos, long endPos)
 {
     //if (mState != PlayerState.Stopped) throw new Exception("Player is not stopped.");
     startPos              = CalculationFunctions.AdaptToFrame(startPos, mCurrentAudioMediaData.getPCMFormat().getBlockAlign());
     endPos                = CalculationFunctions.AdaptToFrame(endPos, mCurrentAudioMediaData.getPCMFormat().getBlockAlign());
     mPlayed               = startPos;
     mPlayUntil            = endPos == 0 ? mCurrentAudioMediaData.getPCMLength() : endPos;
     mAtEndOfAsset         = false;
     mAtLastRefresh        = false;
     mAudioStream          = mCurrentAudioMediaData.getAudioData();
     mAudioStream.Position = startPos;
     mSoundBuffer.Write(0, mAudioStream, mBufferSize, 0);
     mPlayed += mBufferSize;
     ChangeState(PlayerState.Playing);
     mMonitoringTimer.Enabled = true;
     mSoundBuffer.Play(0, BufferPlayFlags.Looping);
     mBufferCheck = 1;
     //initialise and start thread for refreshing buffer
     mRefreshThread = new Thread(new ThreadStart(RefreshBuffer));
     mRefreshThread.Start();
 }
コード例 #20
0
        /// <summary>
        ///     Looks after refilling the audio buffer.
        /// </summary>
        public void StreamingThread()
        {
            // Set stream position to the start :).
            _sample.DataStream.Position = _sample.DataChunkPosition;

            while (true)
            {
                if (_buffer != null && _sample != null && _isPaused == false && (_isPlaying == true || _firstStreamingChunkLoaded == false))
                {
                    // Mark that we have loaded the first chunk.
                    _firstStreamingChunkLoaded = true;

                    // Work out how much to read.
                    int bytes = _buffer.PlayPosition <= _streamingPosition ?
                                _buffer.PlayPosition + _streamedBufferSize - _streamingPosition :
                                _buffer.PlayPosition - _streamingPosition;

                    bytes = Math.Min(bytes, _streamThreshold);

                    if (_streamFinishing == true && _buffer.Status.Playing == false)
                    {
                        ((ISampleBuffer)this).Stop();
                        break;
                    }

                    if (bytes > 0 && _streamFinishing == false)
                    {
                        // Ask for the data.
                        byte[] data = _sample.StreamedDataRequired(_sample, bytes, ref _streamingDataPosition, _isLooping);
                        if (data.Length < bytes && _isLooping == false)
                        {
                            _streamFinishing = true;
                            _buffer.Play(0, 0);
                        }

                        // Copy data to the buffer
                        Stream dataStream = new MemoryStream(data);
                        dataStream.Position = 0;
                        _buffer.Write(_streamingPosition, dataStream, data.Length, LockFlag.None);
                        dataStream.Close();

                        _streamingPosition += data.Length;
                        if (_streamingPosition >= _streamedBufferSize)
                        {
                            _streamingPosition -= _streamedBufferSize;
                        }
                    }
                }

                // Sleep until we are needed again :P.
                Thread.Sleep(_buffer == null ? 50 : BytesToMS(_streamedBufferSize) / 6);
            }
        }
コード例 #21
0
    public void PlayReceivedVoice(byte[] byteData)
    {
        try
        {
            byte[] byteDecodedData = new byte[byteData.Length * 2];

            ALawDecoder.ALawDecode(byteData, out byteDecodedData);
            _playbackBuffer = new SecondaryBuffer(_playbackBufferDescription, _device);
            _playbackBuffer.Write(0, byteDecodedData, LockFlag.None);
            _playbackBuffer.Play(0, BufferPlayFlags.Default);
        }
        catch  { }
    }
コード例 #22
0
        private void WMDataFill()
        {
            //int bufferwriteoffset = 0;
            using (WmaStream str = new WmaStream(file)) {
                byte[] buffer = new byte[131072];

                int firstread = str.Read(buffer, 0, buffer.Length);
                sound.Write(0, buffer, LockFlag.None);
                //firstread = str.Read(buffer, 0, buffer.Length);
                //sound.Write(131072, buffer, LockFlag.None);

                try {
                    int count = 0;
                    int read;
                    while ((read = str.Read(buffer, 0, buffer.Length)) > 0)
                    {
                        BufferNotificationEvent.WaitOne();
                        if (sound.PlayPosition < 131071)
                        {
                            sound.Write(131072, buffer, LockFlag.None);
                        }
                        else
                        {
                            sound.Write(0, buffer, LockFlag.None);
                        }
                        count++;
                    }
                }
                finally {
                    str.Close();
                    str.Dispose();
                }

                BufferNotificationEvent.WaitOne();
                End();
            }
        }
コード例 #23
0
    public void PlayReceivedVoice(byte[] byteData)
    {
        try
        {
            //G711 compresses the data by 50%, so we allocate a buffer of double
            //the size to store the decompressed data.
            byte[] byteDecodedData = new byte[byteData.Length * 2];

            ALawDecoder.ALawDecode(byteData, out byteDecodedData);      // G.711 decoding
            playbackBuffer = new SecondaryBuffer(playbackBufferDescription, device);
            playbackBuffer.Write(0, byteDecodedData, LockFlag.None);    // 0= Starting Point offset
            playbackBuffer.Play(0, BufferPlayFlags.Default);            // 0 = The Priority of Sound for hardware that mixing the voice resources
        }
        catch  { }
    }
コード例 #24
0
        /*
         * Receive audio data coming on port 1550 and feed it to the speakers to be played.
         */
        private void Receive()
        {
            try
            {
                bStop = false;
                IPEndPoint remoteEP = new IPEndPoint(IPAddress.Any, 0);

                while (!bStop && Process.GetCurrentProcess().MainWindowHandle != IntPtr.Zero)
                {
                    //Receive data.
                    byte[] byteData = udpClient.Receive(ref remoteEP);

                    //G711 compresses the data by 50%, so we allocate a buffer of double
                    //the size to store the decompressed data.
                    byte[] byteDecodedData = new byte[byteData.Length * 2];

                    //Decompress data using the proper vocoder.

                    /*if (vocoder == Vocoder.ALaw)
                     * {
                     *  ALawDecoder.ALawDecode(byteData, out byteDecodedData);
                     * }
                     * else if (vocoder == Vocoder.uLaw)
                     * {
                     *  MuLawDecoder.MuLawDecode(byteData, out byteDecodedData);
                     * }
                     * else*/
                    {
                        byteDecodedData = new byte[byteData.Length];
                        byteDecodedData = byteData;
                    }


                    //Play the data received to the user.
                    playbackBuffer = new SecondaryBuffer(playbackBufferDescription, device);
                    playbackBuffer.Write(0, byteDecodedData, LockFlag.None);
                    playbackBuffer.Play(0, BufferPlayFlags.Default);
                }
            }
            catch (Exception)
            {
                //MessageBox.Show("Wystąpił problem podczas odbierania pakietów!", "Błąd", MessageBoxButtons.OK, MessageBoxIcon.Error);
            }
            finally
            {
                nUdpClientFlag += 1;
            }
        }
コード例 #25
0
ファイル: WorkBench.cs プロジェクト: Zazcallabah/jsdemos
        public void TestMethod1()
        {
            // Set up wave format
            WaveFormat waveFormat = new WaveFormat();
            waveFormat.FormatTag = WaveFormatTag.Pcm;
            waveFormat.Channels = 1;
            waveFormat.BitsPerSample = 16;
            waveFormat.SamplesPerSecond = 44100;
            waveFormat.BlockAlign = (short) ( waveFormat.Channels * waveFormat.BitsPerSample / 8 );
            waveFormat.AverageBytesPerSecond = waveFormat.BlockAlign * waveFormat.SamplesPerSecond;

            // Set up buffer description
            BufferDescription bufferDesc = new BufferDescription( waveFormat );
            bufferDesc.Control3D = false;
            bufferDesc.ControlEffects = false;
            bufferDesc.ControlFrequency = true;
            bufferDesc.ControlPan = true;
            bufferDesc.ControlVolume = true;
            bufferDesc.DeferLocation = true;
            bufferDesc.GlobalFocus = true;

            Device d = new Device();
            d.SetCooperativeLevel( new System.Windows.Forms.Control(), CooperativeLevel.Priority );

            int samples = 5 * waveFormat.SamplesPerSecond * waveFormat.Channels;
            char[] buffer = new char[samples];

            // Set buffer length
            bufferDesc.BufferBytes = buffer.Length * waveFormat.BlockAlign;

            // Set initial amplitude and frequency
            double frequency = 500;
            double amplitude = short.MaxValue / 3;
            double two_pi = 2 * Math.PI;

            // Iterate through time
            for( int i = 0; i < buffer.Length; i++ )
            {
                // Add to sine
                buffer[i] = (char) ( amplitude * Math.Sin( i * two_pi * frequency / waveFormat.SamplesPerSecond ) );
            }

            SecondaryBuffer bufferSound = new SecondaryBuffer( bufferDesc, d );
            bufferSound.Volume = (int) Volume.Max;
            bufferSound.Write( 0, buffer, LockFlag.None );
            bufferSound.Play( 0, BufferPlayFlags.Default );
            System.Threading.Thread.Sleep( 10000 );
        }
コード例 #26
0
        private void startPlaybackDirectSound()
        {
            WaveFormat format = new WaveFormat();

            format.BitsPerSample = 8;
            format.Channels      = 1;
            format.BlockAlign    = 1;

            format.FormatTag             = WaveFormatTag.Pcm;
            format.SamplesPerSecond      = 8000; //sampling frequency of your data;
            format.AverageBytesPerSecond = format.SamplesPerSecond * format.BlockAlign;

            // buffer description
            BufferDescription desc = new BufferDescription(format);

            desc.DeferLocation = true;
            desc.ControlVolume = true;
            double bytesEstimate = getSoundDuration() * ((double)format.AverageBytesPerSecond);

            desc.BufferBytes = Convert.ToInt32(bytesEstimate);

            // create the buffer
            //Device ApplicationDevice = new Device();

            activeSoundBuffer = new SecondaryBuffer(desc, soundDevice);
            setVolume();


            //generate ramdom data (white noise)
            byte[] rawsamples = new byte[desc.BufferBytes];
            Random rnd        = new System.Random();

            for (int i = 0; i < desc.BufferBytes; i++)
            {
                //-----------------------------------------------
                //Completely random
                //add a new audio sample to array
                rawsamples[i] = (byte)rnd.Next(255);
                //-----------------------------------------------
            }


            //load audio samples to secondary buffer
            activeSoundBuffer.Write(0, rawsamples, LockFlag.EntireBuffer);

            activeSoundBuffer.Play(0, BufferPlayFlags.Looping);
        }
コード例 #27
0
ファイル: UDP_Radio.cs プロジェクト: zxdreamer/ICE_JX
 /// <summary>
 /// 接收发送给本机ip对应端口号的数据报
 /// </summary>
 public void ReciveMsg()
 {
     while (true)
     {
         try
         {
             EndPoint point  = new IPEndPoint(IPAddress.Any, 0);      //用来保存发送方的ip和端口号
             int      length = server.ReceiveFrom(buffer, ref point); //接收数据报
             secBuffer.Play(0, BufferPlayFlags.Looping);
             secBuffer.Write(0, buffer, LockFlag.FromWriteCursor);
         }
         catch
         {
             secBuffer.Stop();
         };
         Array.Clear(buffer, 0, 2048);
     }
 }
コード例 #28
0
        private void button1_Click(object sender, System.EventArgs e)
        {
            if (spc != null || snd != null)
            {
                Device            dev  = getDev();
                BufferDescription desc = new BufferDescription(WavFile.getFormat());
                desc.ControlVolume = true;
                desc.Flags         = BufferDescriptionFlags.ControlVolume;
                byte[] raw = snd == null?spc.getWave().rawBytes() : snd.getWave().rawBytes();

                desc.BufferBytes = raw.Length;
                SecondaryBuffer buf = new SecondaryBuffer(desc, dev);
                buf.Write(0, raw, LockFlag.EntireBuffer);
                buf.SetCurrentPosition(0);
                buf.Volume = 0;
                buf.Play(0, BufferPlayFlags.Default);
            }
        }
コード例 #29
0
ファイル: DirectSound.cs プロジェクト: alfishe/ZXMAK2
        private void WavePlayThreadProc()
        {
            try
            {
                _soundBuffer.Play(0, BufferPlayFlags.Looping);
                try
                {
                    var playingBuffer = new uint[_bufferSize];
                    fixed(uint *lpBuffer = playingBuffer)
                    {
                        for (var i = 0; i < playingBuffer.Length; i++)
                        {
                            lpBuffer[i] = _zeroValue;
                        }
                        const int sampleSize        = 4;
                        var       rawBufferLength   = _bufferSize * sampleSize;
                        var       lastWrittenBuffer = -1;

                        do
                        {
                            _fillEvent.WaitOne();
                            var nextIndex    = (lastWrittenBuffer + 1) % _bufferCount;
                            var playPos      = _soundBuffer.PlayPosition % (_bufferCount * rawBufferLength);
                            var playingIndex = playPos / rawBufferLength;
                            for (var i = nextIndex; i != playingIndex && !_isFinished; i = ++i % _bufferCount)
                            {
                                OnBufferRequest(lpBuffer, playingBuffer.Length);
                                var writePos = i * rawBufferLength;
                                _soundBuffer.Write(writePos, playingBuffer, LockFlag.None);
                                lastWrittenBuffer = i;
                            }
                        } while (!_isFinished);
                    }
                }
                finally
                {
                    _soundBuffer.Stop();
                }
            }
            catch (Exception ex)
            {
                Logger.Error(ex);
            }
        }
コード例 #30
0
ファイル: SoundPlayer.cs プロジェクト: ewin66/AdHocDesktop
        void Play()
        {
            int playPos;
            int writePos;
            int lockSize;

            try
            {
                m_Buffer.GetCurrentPosition(out playPos, out writePos);
                lockSize = writePos - nextWriteOffset;
                if (lockSize < 0)
                {
                    lockSize += m_BufferBytes;
                }

                // Block align lock size so that we are always write on a boundary
                lockSize -= (lockSize % notifySize);

                if (0 == lockSize)
                {
                    return;
                }
                if (lockSize == m_BufferBytes)
                {
                }

                byte[] writeBytes = new byte[lockSize];

                if (circularBuffer.Read(writeBytes) > 0)
                {
                    m_Buffer.Write(nextWriteOffset, writeBytes, LockFlag.None);

                    // Move the capture offset along
                    nextWriteOffset += lockSize;
                    nextWriteOffset %= m_BufferBytes; // Circular buffer
                }
            }
            catch (Exception)
            {
            }
            finally
            {
            }
        }
コード例 #31
0
        //Receive audio data coming on port 1550 and feed it to the speakers to be played.

        private void Receive()
        {
            try
            {
                bStop = false;
                IPEndPoint remoteEP = new IPEndPoint(IPAddress.Any, 0);

                while (!bStop)
                {
                    //Receive data.
                    byte[] byteData = udpClient.Receive(ref remoteEP);

                    //G711 compresses the data by 50%, so we allocate a buffer of double
                    //the size to store the decompressed data.
                    byte[] byteDecodedData = new byte[byteData.Length * 2];

                    //Decompress data using the proper vocoder.
                    if (vocoder == Vocoder.ALaw)
                    {
                        ALawDecoder.ALawDecode(byteData, out byteDecodedData);
                    }

                    else
                    {
                        byteDecodedData = new byte[byteData.Length];
                        byteDecodedData = byteData;
                    }


                    //Play the data received to the user.
                    playbackBuffer = new SecondaryBuffer(playbackBufferDescription, device);
                    playbackBuffer.Write(0, byteDecodedData, LockFlag.None);
                    playbackBuffer.Play(0, BufferPlayFlags.Default);
                }
            }
            catch (Exception ex)
            {
            }
            finally
            {
                nUdpClientFlag += 1;
            }
        }
コード例 #32
0
ファイル: SoundWaves.cs プロジェクト: Alexrerx/yaalp
        public void playbytes(Byte[] soundbytes, int bitsPerSample, int sampleRate)
        {
            int bytesPerSample;
            if (bitsPerSample == 8) bytesPerSample = 1;
            else if (bitsPerSample == 16) bytesPerSample = 2;
            else throw new NotSupportedException("Supports 8bit or 16bit");

            WaveFormat format = new WaveFormat();
            format.BitsPerSample = (short)bitsPerSample;
            format.Channels = 1;
            format.BlockAlign = (short)bytesPerSample;

            format.FormatTag = WaveFormatTag.Pcm;
            format.SamplesPerSecond = sampleRate; //sampling frequency of your data;   
            format.AverageBytesPerSecond = format.SamplesPerSecond * bytesPerSample;

            BufferDescription desc = new BufferDescription(format);
            desc.DeferLocation = true;
            desc.BufferBytes = soundbytes.Length;

            SecondaryBuffer currentBuffer = new SecondaryBuffer(desc, deviceSound);
            currentBuffer.Write(0, soundbytes, LockFlag.EntireBuffer);
            currentBuffer.Play(0, BufferPlayFlags.Default);
        }
コード例 #33
0
ファイル: frmRemoteCam.cs プロジェクト: ender35/MenuOnRobot
 /// <summary>
 /// 将新收到的音频数据存入辅助播放缓冲区,准备播放
 /// </summary>
 /// <param name="secBuf">辅助播放缓冲区</param>
 void m_objVP_ReadyForNewData(SecondaryBuffer secBuf)
 {
     try
     {
         int curPlayPos, curWritePos;
         secBuf.GetCurrentPosition(out curPlayPos, out curWritePos);
         //                Write( "curWritePos[" + curWritePos.ToString() + "]"
         //                     + "curPlay[" 	  + curPlayPos.ToString()  + "]");
         // 写到下一个写入通知之前
         int lockSize;// = m_nNextWritePos - curWritePos;
         #region 测试代码
         //curWritePos = (curWritePos / m_objVP.nNotifySize) * m_objVP.nNotifySize;
         lockSize = m_objVP.nNotifySize;
         #endregion
         if (lockSize < 0) lockSize += m_objVP.nBufSize;
         if (0 != lockSize)
         {
             byte[] data = new byte[lockSize];
             if (m_objBuf.Read(data) > 0)
             {
                 secBuf.Write(curWritePos, data, LockFlag.FromWriteCursor);
                 m_nNextWritePos = (m_nNextWritePos + m_objVP.nNotifySize) % m_objVP.nBufSize;
                 // Write( "curWritePos[" + curWritePos.ToString() + "]"
                 //      + "dataLength["  + data.Length.ToString() + "]");
             } // end of if
         } // end of if
     }
     catch
     {
     }
 }
コード例 #34
0
ファイル: Sound.cs プロジェクト: BackupTheBerlios/agex-svn
        public Sound(string filename, int ID, short type)
            : base(filename, ID)
        {
            // get the file data
            WaveFile wf = FileManager.Instance.Load(filename);

            if(wf.WavFile != null) // we have a wave file with headers
            {
                // set up the buffer properties
                soundDesc = new BufferDescription();
                soundDesc.GlobalFocus = false;
                soundDesc.ControlVolume = true;

                // enable 3D features for 3D sounds
                if(type == Sound.THREED_SOUND)
                {
                    soundDesc.Control3D = true;
                    soundDesc.Mute3DAtMaximumDistance = true;
                }

                // load the wave file from the stream into the buffer
                sound = new SecondaryBuffer(wf.WavFile, soundDesc, ((DirectSoundManager)SoundManager.Instance).Device);

            } else { // we have only raw PCM encoded sound data (usually from a decoder)

                // convert the format settings
                WaveFormat wfo = new WaveFormat();
                wfo.BitsPerSample = wf.Bits;
                wfo.Channels = wf.Channels;
                wfo.SamplesPerSecond = wf.Frequency;
                wfo.BlockAlign = (short)(wf.Bits*wf.Channels / 8);
                wfo.FormatTag = WaveFormatTag.Pcm;
                wfo.AverageBytesPerSecond = wf.Frequency * wfo.BlockAlign;

                // set up buffer properties
                soundDesc = new BufferDescription(wfo);
                soundDesc.GlobalFocus = false;
                soundDesc.ControlVolume = true;
                soundDesc.BufferBytes = (int)wf.Data.Length;

                // enable 3D features for 3D sounds
                if(type == Sound.THREED_SOUND)
                {
                    soundDesc.Control3D = true;
                    soundDesc.Mute3DAtMaximumDistance = true;
                }

                // initialise the buffer and copy the (raw data) stream into it
                sound = new SecondaryBuffer(soundDesc, ((DirectSoundManager)SoundManager.Instance).Device);
                sound.Write(0, wf.Data, (int)wf.Data.Length, LockFlag.EntireBuffer);
            }

            // create a 3D buffer for 3D sounds
            if(type == Sound.THREED_SOUND)
            {
                threeDsound = new Buffer3D(sound);
                threeDsound.Mode = Mode3D.Normal;
                threeDsound.Deferred = true;
            }
        }
コード例 #35
0
ファイル: VoiceChat.cs プロジェクト: netonjm/VoiceChat
        /*
          * Receive audio data coming on port 1550 and feed it to the speakers to be played.
          */
        private void Receive()
        {
            try
             {

                 IsThreadReceiveEnd = false;

                 byte[] byteData;
                 bStop = false;
                 IPEndPoint remoteEP = new IPEndPoint(IPAddress.Any, 0);

                 if (eMode == Mode.Server)
                 {
                     LogAppend("Server Started");
                     LogUsersConnected();
                 }
                 else
                     LogAppend("Client Audio Connected");

                 while (!bStop)
                 {
                     //Receive data.
                     try
                     {

                         //bytes_received = udp_socket.ReceiveFrom(data, ref ep);

                         try
                         {
                             byteData = udpClient.Receive(ref remoteEP);
                         }
                         catch (Exception)
                         {
                             return;
                         }

                         //G711 compresses the data by 50%, so we allocate a buffer of double
                         //the size to store the decompressed data.
                         byte[] byteDecodedData = new byte[byteData.Length * 2];

                         if (vocoder == Vocoder.ALaw)
                             ALawDecoder.ALawDecode(byteData, out byteDecodedData); //Vocoder.ALaw
                         else if (vocoder == Vocoder.uLaw)
                             MuLawDecoder.MuLawDecode(byteData, out byteDecodedData); //Vocoder.uLaw
                         else
                         {
                             byteDecodedData = new byte[byteData.Length];
                             byteDecodedData = byteData;
                         }

                         if (eMode == Mode.Server)
                         {
                             lock (otherPartyIPs)
                             {
                                 for (int i = 0; i < otherPartyIPs.Count; i++)
                                     udpClient.Send(byteDecodedData, byteDecodedData.Length, otherPartyIPs[i].Address.ToString(), 1550);

                             }

                         }

                         //Play the data received to the user.
                         playbackBuffer = new SecondaryBuffer(playbackBufferDescription, device);
                         playbackBuffer.Write(0, byteDecodedData, LockFlag.None);
                         playbackBuffer.Play(0, BufferPlayFlags.Default);
                     }
                     catch (Exception)
                     {

                     }

                 }

                 if (eMode == Mode.Server)
                 {
                     LogAppend("Server Stopped");
                     LogUsersConnected();
                 }
                 else
                     LogAppend("Client Audio Disconnected");

             }
             catch (Exception ex)
             {

                 LogAppend("Voice Receive > " + ex.Message);
                 //MessageBox.Show(ex.Message, "VoiceChat-Receive ()", MessageBoxButtons.OK, MessageBoxIcon.Error);
             }
             finally
             {
                 nUdpClientFlag += 1;
             }

             IsThreadReceiveEnd = true;
        }
コード例 #36
0
ファイル: VoiceRecorder.cs プロジェクト: pokk/CSharpVAST
        private int intNotifySize = 5000;  // 設置通知大小

        #region 以自節數組中獲取音頻數據,並進行播放    <param name="intRecv">字節數組長度</param>    <param name="bytRecv">包含音頻數據的字節數組</param>  ------------------------------------- (3-3)
        public void GetVoiceData(int intRecv, byte[] bytRecv, bool listener, int pan_val, int vol, PictureBox ShowVoice)
        {
            //new Bitmap(Properties.Resources.Voice);
            // intPosWrite指示最新的數據寫好後的末尾。 intPosPlay指示本次播放開始的位置。
            if (intPosWrite + intRecv <= memstream.Capacity)
            {
                // 如果當前寫指針所在的位移 + 將要寫入到緩衝區長度小於缓衝區總大小
                if ((intPosWrite - intPosPlay >= 0 && intPosWrite - intPosPlay < intNotifySize) || (intPosWrite - intPosPlay < 0 && intPosWrite - intPosPlay + memstream.Capacity < intNotifySize))
                {
                    memstream.Write(bytRecv, 0, intRecv);
                    intPosWrite += intRecv;
                }
                else if (intPosWrite - intPosPlay >= 0)
                {
                    // 先儲存一定量的數據,當達到一定數據量時就播放聲音。
                    buffDiscript.BufferBytes = intPosWrite - intPosPlay;  // 缓衝區大小為播放指針到寫指針之間的距離。
                    SecondaryBuffer sec = new SecondaryBuffer(buffDiscript, PlayDev);  // 建立一個合適的缓衝區用於播放這段數據。
                    sec.Volume = vol;  // 聲音大小, 設定為最大聲
                    if (!listener)  // 若此資料是偶聽者的資料
                        if (sec.Volume == -500)  // 若為在後面者
                            sec.Volume = -1000;  // 聲音大小聲再降低
                    sec.Pan = pan_val;  // 左聲道 max_value -10000, 右聲道 max_value 10000
                    memstream.Position = intPosPlay;  // 先將memstream的指針定位到達一次播放開始的位置
                    sec.Write(0, memstream, intPosWrite - intPosPlay, LockFlag.FromWriteCursor);
                    sec.Play(0, BufferPlayFlags.Default);  // 播放聲音
                    memstream.Position = intPosWrite;  // 寫完後重新將memstream的指針定位到將要寫下去的位置。
                    intPosPlay = intPosWrite;
                    ShowVoice.Image = new Bitmap(Properties.Resources.Voice);
                }
                else if (intPosWrite - intPosPlay < 0)
                {
                    buffDiscript.BufferBytes = intPosWrite - intPosPlay + memstream.Capacity;  // 缓衝區大小為播放指針到寫指針之間的距離。
                    SecondaryBuffer sec = new SecondaryBuffer(buffDiscript, PlayDev);  // 建立一個合適的缓衝區用於播放這段數據。
                    sec.Volume = vol;  // 聲音大小, 設定為最大聲
                    if (!listener)  // 若此資料是偶聽者的資料
                        if (sec.Volume == -500)  // 若為在後面者
                            sec.Volume = -1000;  // 聲音大小聲降低
                    sec.Pan = pan_val;  // 左聲道, 右聲道
                    memstream.Position = intPosPlay;
                    sec.Write(0, memstream, memstream.Capacity - intPosPlay, LockFlag.FromWriteCursor);
                    memstream.Position = 0;
                    sec.Write(memstream.Capacity - intPosPlay, memstream, intPosWrite, LockFlag.FromWriteCursor);
                    sec.Play(0, BufferPlayFlags.Default);  // 播放聲音
                    memstream.Position = intPosWrite;
                    intPosPlay = intPosWrite;
                    ShowVoice.Image = new Bitmap(Properties.Resources.Voice);
                }
            }
            else
            {
                // 當數據將要大於memstream可容納的大小時
                int irest = memstream.Capacity - intPosWrite;  // memstream中剩下的可容纳的字節數。
                memstream.Write(bytRecv, 0, irest);  // 先寫完這個內存流。
                memstream.Position = 0;  // 然後讓新的數據從memstream的0位置開始記錄
                memstream.Write(bytRecv, irest, intRecv - irest);  // 覆蓋舊的數據
                intPosWrite = intRecv - irest;  // 更新寫指針位置。寫指針指示下一個開始寫入的位置而不是上一次结束的位置,因此不用减一
            }
        }
コード例 #37
0
ファイル: Sounds.cs プロジェクト: Skinny1001/PlayUO
 public SecondaryBuffer ReadFromDisk(int SoundID)
 {
     if (m_Device == null)
     {
         return null;
     }
     if (SoundID < 0)
     {
         return null;
     }
     m_Index.BaseStream.Seek((long) (SoundID * 12), SeekOrigin.Begin);
     int num = m_Index.ReadInt32();
     int num2 = m_Index.ReadInt32();
     int num3 = m_Index.ReadInt32();
     if ((num < 0) || (num2 <= 0))
     {
         if (!this.Translate(ref SoundID))
         {
             return null;
         }
         m_Index.BaseStream.Seek((long) (SoundID * 12), SeekOrigin.Begin);
         num = m_Index.ReadInt32();
         num2 = m_Index.ReadInt32();
         num3 = m_Index.ReadInt32();
     }
     if ((num < 0) || (num2 <= 0))
     {
         return null;
     }
     num2 -= 40;
     m_Stream.Seek((long) (num + 40), SeekOrigin.Begin);
     BufferDescription description = new BufferDescription(m_Format);
     description.set_BufferBytes(num2);
     description.set_ControlPan(true);
     description.set_ControlVolume(true);
     SecondaryBuffer buffer = new SecondaryBuffer(description, m_Device);
     buffer.Write(0, m_Stream, num2, 2);
     return buffer;
 }
コード例 #38
0
 private void button1_Click(object sender, System.EventArgs e)
 {
     if (spc != null || snd!=null)
     {
         Device dev = getDev();
         BufferDescription desc = new BufferDescription(WavFile.getFormat());
         desc.ControlVolume = true;
         desc.Flags = BufferDescriptionFlags.ControlVolume;
         byte[] raw = snd==null?spc.getWave().rawBytes():snd.getWave().rawBytes();
         desc.BufferBytes = raw.Length;
         SecondaryBuffer buf = new SecondaryBuffer(desc, dev);
         buf.Write(0, raw, LockFlag.EntireBuffer);
         buf.SetCurrentPosition(0);
         buf.Volume = 0;
         buf.Play(0, BufferPlayFlags.Default);
     }
 }
コード例 #39
0
        private void startPlaybackDirectSound()
        {
            WaveFormat format = new WaveFormat();
            format.BitsPerSample = 8;
            format.Channels = 1;
            format.BlockAlign = 1;

            format.FormatTag = WaveFormatTag.Pcm;
            format.SamplesPerSecond = 8000; //sampling frequency of your data;
            format.AverageBytesPerSecond = format.SamplesPerSecond * format.BlockAlign;

            // buffer description
            BufferDescription desc = new BufferDescription(format);
            desc.DeferLocation = true;
            desc.ControlVolume = true;
            double bytesEstimate = getSoundDuration() * ((double)format.AverageBytesPerSecond);
            desc.BufferBytes = Convert.ToInt32(bytesEstimate);

            // create the buffer
            //Device ApplicationDevice = new Device();

            activeSoundBuffer = new SecondaryBuffer(desc, soundDevice);
            setVolume();

            //generate ramdom data (white noise)
            byte[] rawsamples = new byte[desc.BufferBytes];
            Random rnd = new System.Random();

            for (int i = 0; i < desc.BufferBytes; i++)
            {
                //-----------------------------------------------
                //Completely random
                //add a new audio sample to array
                rawsamples[i] = (byte)rnd.Next(255);
                //-----------------------------------------------

            }

            //load audio samples to secondary buffer
            activeSoundBuffer.Write(0, rawsamples, LockFlag.EntireBuffer);

            activeSoundBuffer.Play(0, BufferPlayFlags.Looping);
        }
コード例 #40
0
        public SecondaryBuffer PreparaAudio(byte[] buffer)
        {
            WaveFormat format = new WaveFormat
                                    {
                                        BitsPerSample = avs.BitsPerSample,
                                        Channels = avs.ChannelsCount,
                                        BlockAlign = Convert.ToInt16(avs.BytesPerSample*avs.ChannelsCount),
                                        FormatTag = WaveFormatTag.Pcm,
                                        SamplesPerSecond = avs.AudioSampleRate,
                                        AverageBytesPerSecond = avs.AvgBytesPerSec
                                    };

            // buffer description
            BufferDescription desc = new BufferDescription(format)
                                         {
                                             DeferLocation = true,
                                             ControlVolume = true,
                                             BufferBytes = buffer.Length
                                         };

            // create the buffer
            //Device ApplicationDevice = new Device();
            SecondaryBuffer secondaryBuffer = null;
            try
            {
                secondaryBuffer = new SecondaryBuffer(desc, applicationDevice);
                //load audio samples to secondary buffer
                secondaryBuffer.Write(0, buffer, LockFlag.EntireBuffer);

            }
            catch (OutOfMemoryException)
            {
                MessageBox.Show("Error en el envío de sonido por falta de memoria.\nUna posible solución sería rebajar el valor del campo Multiplicador.");
            }
            catch (Exception x)
            {
                MessageBox.Show("Error en el envío de sonido (" + x.ToString() + "): " + x.Message);
            }

            return secondaryBuffer;
        }
コード例 #41
0
        /*
         * Receive audio data coming on port 1550 and feed it to the speakers to be played.
         */
        private void Receive()
        {
            try
            {
                bStop = false;
                IPEndPoint remoteEP = new IPEndPoint(IPAddress.Any, 0);

                while (!bStop)
                {
                    //Receive data.
                    byte[] byteData = udpClient.Receive(ref remoteEP);

                    //G711 compresses the data by 50%, so we allocate a buffer of double
                    //the size to store the decompressed data.
                    byte[] byteDecodedData = new byte[byteData.Length * 2];

                    //Decompress data using the proper vocoder.
                    if (vocoder == Vocoder.ALaw)
                    {
                        ALawDecoder.ALawDecode(byteData, out byteDecodedData);
                    }
                    else if (vocoder == Vocoder.uLaw)
                    {
                        MuLawDecoder.MuLawDecode(byteData, out byteDecodedData);
                    }
                    else
                    {
                        byteDecodedData = new byte[byteData.Length];
                        byteDecodedData = byteData;
                    }

                    //Play the data received to the user.
                    playbackBuffer = new SecondaryBuffer(playbackBufferDescription, device);
                    playbackBuffer.Write(0, byteDecodedData, LockFlag.None);
                    playbackBuffer.Play(0, BufferPlayFlags.Default);
                }
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message, "VoiceChat-Receive ()", MessageBoxButtons.OK, MessageBoxIcon.Error);
            }
            finally
            {
                nUdpClientFlag += 1;
            }
        }
コード例 #42
0
ファイル: RtpAudioFacade.cs プロジェクト: ajf8/marine-radio
 private void PlayFrame(byte[] data)
 {
     try
     {
         playbackBuffer = new SecondaryBuffer(playbackBufferDescription, device);
         if (ConfSingleton.Instance.Compression)
         {
             byte[] byteDecodedData = new byte[data.Length * 2];
             ALawDecoder.ALawDecode(data, out byteDecodedData);
             playbackBuffer.Write(0, byteDecodedData, LockFlag.None);
         }
         else
         {
             playbackBuffer.Write(0, data, LockFlag.None);
         }
         playbackBuffer.Play(0, BufferPlayFlags.Default);
     }
     catch (Exception)
     {
         RefreshSoundDevice();
     }
 }
コード例 #43
0
        /// <summary>
        ///		Rewrites the audio data into the buffer and resets the format.
        /// </summary>
        public void RefreshSampleBuffer()
        {
            if (_sample == null)
                return;

            // Create the format description used by the buffer.
            WaveFormat format = new WaveFormat();
            format.BitsPerSample = (short)_sample.BitsPerSample;
            format.Channels = (short)_sample.ChannelCount;
            format.FormatTag = WaveFormatTag.Pcm;
            format.SamplesPerSecond = _sample.SampleRate;
            format.BlockAlign = (short)(format.Channels * (format.BitsPerSample / 8));
            format.AverageBytesPerSecond = format.SamplesPerSecond * format.BlockAlign;

            // Only mono sounds can be used for 3D.
            if ((_flags & SoundFlags.Positional) != 0 && format.Channels != 1)
                _flags &= ~SoundFlags.Positional;

            // Store some sample details.
            _frequency = _sample.SampleRate;
            _streamedBufferSize = MSToBytes(1000, format); // 1 seconds of audio.
            _streamThreshold = MSToBytes(400, format); // 400 millisecond threshold

            // Create the buffer description.
            BufferDescription desc = new BufferDescription(format);
            desc.BufferBytes = ((_flags & SoundFlags.Streamed) != 0) ? _streamedBufferSize : _sample.Data.Length;
            if ((_flags & SoundFlags.Positional) != 0)
            {
                desc.StickyFocus = true;
                desc.Control3D = true;
                desc.Mute3DAtMaximumDistance = true;
                desc.Guid3DAlgorithm = DSoundHelper.Guid3DAlgorithmHrtfFull;
            }
            else
            {
                desc.ControlVolume = true;
                desc.ControlPan = true;
                desc.ControlFrequency = true;
            }

            // Create DirectSound buffer.
            _buffer = new SecondaryBuffer(desc, _dx9Driver.Device);

            // Create the 3D buffer.
            if ((_flags & SoundFlags.Positional) != 0)
            {
                _3dBuffer = new Buffer3D(_buffer);
                _3dBuffer.Mode = Mode3D.Normal;
                _3dBuffer.MaxDistance = 300;
                _3dBuffer.MinDistance = 30;
            }

            // Write in the samples data.
            if (_sample.Data.Length > 0)
            {
                _buffer.Write(0, _sample.Data, LockFlag.EntireBuffer);
                _bufferLength = _sample.Data.Length;
            }
        }
コード例 #44
0
ファイル: Main.cs プロジェクト: possan/randomjunk
        public void Run()
        {
            S_RNDR();
            // generate sound

            sounddevice = new Device();
            sounddevice.SetCooperativeLevel(Program.F1, CooperativeLevel.Normal);
            //	Console.WriteLine("sounddevice = " + sounddevice);

            WaveFormat wf = new WaveFormat();
            wf.FormatTag = WaveFormatTag.Pcm;
            wf.SamplesPerSecond = 44100;
            wf.BitsPerSample = 16;
            wf.Channels = 1;
            wf.BlockAlign = 2;// (short)(wf.Channels * wf.BitsPerSample / 8);
            wf.AverageBytesPerSecond = 88200;// 88200;// wf.SamplesPerSecond * wf.BlockAlign;

            BufferDescription bufferDesc = new BufferDescription(wf);
            bufferDesc.BufferBytes = S_B.Length;
            bufferDesc.GlobalFocus = true;
            bufferDesc.ControlEffects = true;

            buffer = new SecondaryBuffer(bufferDesc, sounddevice);
            /*
            EffectDescription[] effects = new EffectDescription[2];
            effects[0].GuidEffectClass = DSoundHelper.StandardEchoGuid;
            //	effects[].GuidEffectClass = DSoundHelper.StandardFlangerGuid;
            effects[1].GuidEffectClass = DSoundHelper.StandardCompressorGuid;
            buffer.SetEffects(effects);

            EchoEffect ee = buffer.GetEffects(0) as EchoEffect;
            if (ee != null)
            {
                EffectsEcho ep = ee.AllParameters;
                ep.Feedback = 76.0f;
                ep.WetDryMix = 10.0f;
                ep.LeftDelay = 60000.0f / BPM / 4.0f * 3.0f;
                ep.RightDelay = 60000.0f / BPM / 4.0f * 3.0f;
                ep.PanDelay = 0;
                ee.AllParameters = ep;
            }
            */
            /*
            DistortionEffect de = buffer.GetEffects(0) as DistortionEffect;
            if (de != null)
            {
                EffectsDistortion ep = de.AllParameters;
                ep.PreLowpassCutoff = 8000.0f;
                ep.PostEqBandwidth = 4000.0f;
                ep.PostEqCenterFrequency = 4000.0f;
                ep.Edge = 0.0f;
                ep.Gain = 0.0f;
                de.AllParameters = ep;
            }*/

            buffer.Write(0, S_B, LockFlag.None);
            buffer.Play(0, BufferPlayFlags.Default);
            //	Console.WriteLine("buffer = " + buffer);

            //	Console.WriteLine("Running DemoThread...");
        }
コード例 #45
0
ファイル: SoundOut.cs プロジェクト: sanyaade-g2g-repos/knack
		public SoundOut()
		{
			samplesPerSecond = Settings.Instance.GetInt(
				"/Settings/Output/Sound/General/SamplesPerSecond");
			bitsPerSample = Settings.Instance.GetInt(
				"/Settings/Output/Sound/General/BitsPerSample");
			channels = Settings.Instance.GetInt(
				"/Settings/Output/Sound/General/Channels");
			bufferBlockBytes = Utils.Settings.Instance.GetInt(
			    "/Settings/Output/Sound/DirectSound/BufferSamples")
				*(bitsPerSample/8)*channels;
			bufferBytes = Settings.Instance.GetInt(
				"/Settings/Output/Sound/DirectSound/Buffers")
				*bufferBlockBytes;
			sleepTime = Settings.Instance.GetInt(
				"/Settings/Output/Sound/DirectSound/ThreadSleepTime");
			
			soundDevice = new Device();
			soundDevice.SetCooperativeLevel(new Form(), CooperativeLevel.Normal);  
			blockData = new short[bufferBlockBytes];
			floatLeftChannel = new float[bufferBlockBytes/2];
			floatRightChannel = new float[bufferBlockBytes/2];
			
			soundWaveFormat.SamplesPerSecond = samplesPerSecond;
			soundWaveFormat.Channels = (short) channels;
			soundWaveFormat.BitsPerSample = (short) bitsPerSample;
			soundWaveFormat.BlockAlign = (short)(soundWaveFormat.Channels * (soundWaveFormat.BitsPerSample / 8));
			soundWaveFormat.AverageBytesPerSecond = soundWaveFormat.BlockAlign * soundWaveFormat.SamplesPerSecond;
			soundWaveFormat.FormatTag = WaveFormatTag.Pcm;
			System.Console.WriteLine(soundWaveFormat.ToString());
			soundBufferDescription = new BufferDescription();
			soundBufferDescription.GlobalFocus = true;
			soundBufferDescription.LocateInSoftware = true;
			soundBufferDescription.BufferBytes = bufferBytes;
			soundBufferDescription.CanGetCurrentPosition = true;
			soundBufferDescription.ControlVolume = true;  
			soundBufferDescription.Format = soundWaveFormat;
			
			soundBuffer = new SecondaryBuffer(soundBufferDescription,soundDevice);
			bufferBytes = soundBuffer.Caps.BufferBytes;
			
			if (streamingType == StreamingType.Timer) {
				soundTimer = new System.Timers.Timer(sleepTime);
				soundTimer.Enabled = false;
				soundTimer.Elapsed += new System.Timers.ElapsedEventHandler(SoundTimerElapsed);
			}
			
			blockData.Initialize();
			for (int i = 0; i<bufferBytes/bufferBlockBytes; i++)  {
				soundBuffer.Write(blockData.Length*i, blockData, LockFlag.EntireBuffer);  
			}
			
		}
コード例 #46
0
        private static void PlayThread(object osn)
        {
            EmulatorForm myform = (EmulatorForm)osn;

            SecondaryBuffer SecBuf;
            AutoResetEvent SecBufNotifyAtHalf = new AutoResetEvent(false);
            AutoResetEvent SecBufNotifyAtBeginning = new AutoResetEvent(false);
            
            int SamplingRate = (int)myform._samplingRate;
            int HoldThisManySamples = (int)(1 * SamplingRate);
            int BlockAlign = 2;
            int SecBufByteSize = HoldThisManySamples * BlockAlign;

            WaveFormat MyWaveFormat = new WaveFormat();

            // Set the format
            MyWaveFormat.AverageBytesPerSecond = (int)(myform._samplingRate * BlockAlign);
            MyWaveFormat.BitsPerSample = (short)16;
            MyWaveFormat.BlockAlign = (short)BlockAlign;
            MyWaveFormat.Channels = (short)1;
            MyWaveFormat.SamplesPerSecond = (int)myform._samplingRate;
            MyWaveFormat.FormatTag = WaveFormatTag.Pcm;

            BufferDescription MyDescription;

            // Set BufferDescription
            MyDescription = new BufferDescription();

            MyDescription.Format = MyWaveFormat;
            MyDescription.BufferBytes = HoldThisManySamples * BlockAlign;
            MyDescription.CanGetCurrentPosition = true;
            MyDescription.ControlPositionNotify = true;
            MyDescription.GlobalFocus = true;

            // Create the buffer
            SecBuf = new SecondaryBuffer(MyDescription,myform._directSoundDevice);

            Notify MyNotify;

            MyNotify = new Notify(SecBuf);

            BufferPositionNotify[] MyBufferPositions = new BufferPositionNotify[2];

            MyBufferPositions[0].Offset = 0;
            MyBufferPositions[0].EventNotifyHandle = SecBufNotifyAtBeginning.Handle;
            MyBufferPositions[1].Offset = (HoldThisManySamples / 2) * BlockAlign;
            MyBufferPositions[1].EventNotifyHandle = SecBufNotifyAtHalf.Handle;

            MyNotify.SetNotificationPositions(MyBufferPositions);

            WaitHandle[] SecBufWaitHandles = { SecBufNotifyAtBeginning, SecBufNotifyAtHalf };
            
            Int16[] buffer;

            buffer = myform._sn.GenerateSamples((uint)HoldThisManySamples, "");
            SecBuf.Write(0, buffer, LockFlag.None);
            SecBuf.Play(0, BufferPlayFlags.Looping);
            
            int SecBufNextWritePosition = 0;

            while (myform._bufferPlaying)
            {
                int WriteCount = 0,
                    PlayPosition = SecBuf.PlayPosition,
                    WritePosition = SecBuf.WritePosition;

                if (SecBufNextWritePosition < PlayPosition
                    && (WritePosition >= PlayPosition || WritePosition < SecBufNextWritePosition))
                    WriteCount = PlayPosition - SecBufNextWritePosition;
                else if (SecBufNextWritePosition > WritePosition
                    && WritePosition >= PlayPosition)
                    WriteCount = (SecBufByteSize - SecBufNextWritePosition) + PlayPosition;
               // System.Diagnostics.Debug.WriteLine("WC: "+WriteCount.ToString());
                if (WriteCount > 0)
                {
                    WriteCount = (int)Math.Min(WriteCount,1000);

                    buffer = myform._sn.GenerateSamples((uint)WriteCount/2, "");
                    
                    SecBuf.Write(
                        SecBufNextWritePosition,
                        buffer,
                        LockFlag.None);

                    SecBufNextWritePosition = (SecBufNextWritePosition + WriteCount) % SecBufByteSize;
                }
                else
                {
                    WaitHandle.WaitAny(SecBufWaitHandles, new TimeSpan(0, 0, 5), true);
                }
            }

            SecBuf.Dispose();
            MyDescription.Dispose();
            MyNotify.Dispose();

        }