Esempio n. 1
0
        public byte[] Encode()
        {
            int ms = 20;
            int channels = 2;
            int sampleRate = 48000;

            int blockSize = 48 * 2 * channels * ms; //the size per each frame to encode
            byte[] buffer = new byte[blockSize]; //a nicely sized pcm buffer to work with.
            var outFormat = new WaveFormat(sampleRate, 16, channels);
            
            if(__filename.EndsWith(".mp3"))
            {
                using (var mp3Reader = new Mp3FileReader(__filename))
                {
                    using (var resampler = new WaveFormatConversionStream(outFormat, mp3Reader))
                    {
                        int byteCount;
                        using (BinaryWriter bw = new BinaryWriter(new MemoryStream()))
                        {
                            while ((byteCount = resampler.Read(buffer, 0, blockSize)) > 0)
                            {
                                //now to encode
                                byte[] opusOutput = new byte[buffer.Length]; //extra bytes but that's okay
                                int opusEncoded = encoder.EncodeFrame(buffer, 0, opusOutput);
                                bw.Write((ushort)opusEncoded);
                                bw.Write(opusOutput, 0, opusEncoded);
                            }
                            MemoryStream baseStream = bw.BaseStream as MemoryStream;
                            return baseStream.ToArray();
                        }
                    }
                }
            }
            return null;
        }
Esempio n. 2
0
        private static void SynthToCam(string fileName, CameraWindow cw)
        {
            using (var waveStream = new MemoryStream())
            {

                //write some silence to the stream to allow camera to initialise properly
                var silence = new byte[1 * 22050];
                waveStream.Write(silence, 0, silence.Count());

                //read in and convert the wave stream into our format
                using (var reader = new WaveFileReader(fileName))
                {
                    var newFormat = new WaveFormat(11025, 16, 1);
                    byte[] buff = new byte[22050];

                    using (var conversionStream = new WaveFormatConversionStream(newFormat, reader))
                    {
                        do
                        {
                            int i = conversionStream.Read(buff, 0, 22050);
                            waveStream.Write(buff, 0, i);
                            if (i < 22050)
                                break;
                        } while (true);
                    }
                }

                //write some silence to the stream to allow camera to end properly
                waveStream.Write(silence, 0, silence.Count());

                waveStream.Seek(0, SeekOrigin.Begin);

                var ds = new DirectStream(waveStream) { RecordingFormat = new WaveFormat(11025, 16, 1) };
                var talkTarget = TalkHelper.GetTalkTarget(cw.Camobject, ds);

                ds.Start();
                talkTarget.Start();
                while (ds.IsRunning)
                {
                    Thread.Sleep(100);
                }
                ds.Stop();
                talkTarget.Stop();
                talkTarget = null;
                ds = null;

                waveStream.Close();
            }
        }
Esempio n. 3
0
        static Task DoVoice(DiscordVoiceClient vc, string file)
        {
            return Task.Run(() =>
            {
                try
                {
                    int ms = 20;
                    int channels = 2;
                    int sampleRate = 48000;

                    int blockSize = 48 * 2 * channels * ms; //sample rate * 2 * channels * milliseconds
                    byte[] buffer = new byte[blockSize];
                    var outFormat = new WaveFormat(sampleRate, 16, channels);

                    vc.SetSpeaking(true);
                    using (var mp3Reader = new Mp3FileReader(file))
                    {
                        using (var resampler = new WaveFormatConversionStream(outFormat, mp3Reader))
                        {
                            int byteCount;
                            while ((byteCount = resampler.Read(buffer, 0, blockSize)) > 0)
                            {
                                if (vc.Connected)
                                {
                                    //sequence = await vc.SendSmallOpusAudioPacket(buffer, sampleRate, byteCount, sequence).ConfigureAwait(false);
                                    vc.SendVoice(buffer);
                                    //sequence = vc.SendSmallOpusAudioPacket(buffer, 48000, buffer.Length, sequence);
                                    //Task.Delay(19).Wait();
                                }
                                else
                                    break;
                            }
                            Console.ForegroundColor = ConsoleColor.Yellow;
                            Console.WriteLine("Voice finished enqueuing");
                            Console.ForegroundColor = ConsoleColor.White;
                        }
                    }
                }
                catch (Exception ex)
                {
                    Console.WriteLine(ex.Message);
                }
            });
        }
Esempio n. 4
0
        /// <summary>
        /// Converts a WMA file to a WAV stream
        /// </summary>
        /// <param name="outputStream">Stream to store the converted wav.</param>
        /// <param name="filePath">Path to a .wma file to convert</param>
        /// <returns>The WaveFormat object of the converted wav</returns>
        private static WaveFormat wmaToWav(string pathToWma, Stream outputStream, int sampleRate, int bitDepth, int numChannels)
        {
            if (!Path.GetExtension(pathToWma).ToLowerInvariant().Contains("wma"))
                throw new ArgumentException("Must be a .wma file!");

            using (var reader = new WMAFileReader(pathToWma))
            {
                var targetFormat = new NAudio.Wave.WaveFormat(sampleRate, bitDepth, numChannels);
                var pcmStream = new WaveFormatConversionStream(targetFormat, reader);
                var buffer = new byte[pcmStream.Length];
                pcmStream.Read(buffer, 0, (int)pcmStream.Length);
                outputStream.Write(buffer, 0, buffer.Length);
                outputStream.Position = 0;

                pcmStream.Close();

                return targetFormat;
            }
        }
Esempio n. 5
0
        public static void ComputeAllFeatures(string wavpath, string resultpath, System.Windows.Forms.TextBox progress)
        {
            int aa=0;
            StreamWriter writer = new StreamWriter("d:\\features.txt");
            for (int i = 1; i <= 674; i++)
            {

                WaveStream readerStream = new WaveFileReader(@"D:\Z projekti\Emocije\code\sustav(pun)\wavs\" + i.ToString()+ ".wav");
                WaveFormat format = new WaveFormat(readerStream.WaveFormat.SampleRate, 8, 1);
                readerStream = new WaveFormatConversionStream(format, readerStream);

                int length = (int)readerStream.Length;
                byte[] buffer = new byte[length];
                readerStream.Read(buffer, 0, length);

                Classifier = new Classifiers.GoodClassifier();
                Classifier.SamplingFrequency = 11025;
                Classifier.SubWindowLength = 256; // 44100 [samples per second] * 0.025 [25 milisecond interval]
                Classifier.SubWindowShift = 165; // 44100 [samples per second] * 0.015 [15 milisecond interval]

                Classifier.SuperWindowLength =  (int)Math.Floor((double)length / (Classifier.SubWindowShift))-10; // 44100 [samples per second] / 1102 [SubFeatures per second] * 2 [seconds]
                Classifier.SuperWindowShift = 5; // 44100 [samples per second] / 1102 [SubFeatures per second] * 1 [seconds]

                Classifier.ClassificationComplete += new AbstractClassifier.ClassifComplete(Classifier_ClassificationComplete);
                Classifier.SubFeaturesComputed += new AbstractClassifier.SubFeaturesComp(Classifier_SubFeaturesComputed);
                Classifier.SuperFeaturesComputed += new AbstractClassifier.SuperFeaturesComp(Classifier_SuperFeaturesComputed);
                Classifier.EnqueueData(buffer);

                System.Windows.Forms.Application.DoEvents();
                string line = "";
                foreach (double d in Classifier.AllFeatures.First())
                {
                    line += d.ToString(CultureInfo.InvariantCulture) + ",";
                }
                line = line.TrimEnd(',');
                line += "\r\n";
                writer.WriteLine(line);
                writer.Flush();
                progress.Text = i.ToString();
            }
            writer.Flush();
        }
Esempio n. 6
0
        private void GetAudioSamples()
        {
            ////var pcmStream = WaveFormatConversionStream.CreatePcmStream(new Mp3FileReader("whitelight.mp3"));
            //var pcmStream = new WaveFileReader("whitelight-ulaw.wav");
            //byte[] sampleBuffer = new byte[160];
            //int bytesRead = pcmStream.Read(sampleBuffer, 0, 160);
            ////int bytesRead = m_rawRTPPayloadReader.BaseStream.Read(sampleBuffer, 0, 160);
            //while (bytesRead > 0)
            //{
            //    m_rtpChannel.AddSample(sampleBuffer);
            //    bytesRead = pcmStream.Read(sampleBuffer, 0, 160);
            //    //bytesRead = m_rawRTPPayloadReader.BaseStream.Read(sampleBuffer, 0, 160);
            //}

            var pcmFormat = new WaveFormat(8000, 16, 1);
            var ulawFormat = WaveFormat.CreateMuLawFormat(8000, 1);

            using (WaveFormatConversionStream pcmStm = new WaveFormatConversionStream(pcmFormat, new Mp3FileReader("whitelight.mp3")))
            {
                using (WaveFormatConversionStream ulawStm = new WaveFormatConversionStream(ulawFormat, pcmStm))
                {
                    byte[] buffer = new byte[160];
                    int bytesRead = ulawStm.Read(buffer, 0, 160);

                    while (bytesRead > 0)
                    {
                        byte[] sample = new byte[bytesRead];
                        Array.Copy(buffer, sample, bytesRead);
                        m_rtpChannel.Send(sample, 20);

                        bytesRead = ulawStm.Read(buffer, 0, 160);
                    }
                }
            }

            logger.Debug("Finished adding audio samples.");
        }
Esempio n. 7
0
 private void AppendAllOfFile_stream(WaveStream readerWave, double? speedChange)
 {
     InitWriterIfNull(readerWave);
     speedChange = speedChange ?? 1;
     using (var pcmStream = WaveFormatConversionStream.CreatePcmStream(readerWave))
     using (var downsampledStream = new WaveFormatConversionStream(new WaveFormat(
         (int)(format.SampleRate / speedChange),
         format.BitsPerSample,
         format.Channels), pcmStream)) 
     {
         const int readThisManyBytes = 16384;
         byte[] buffer = new byte[readThisManyBytes];
         int bufferL = 0;
         //ByteBufferCompressor bbbb = new ByteBufferCompressor(readThisManyBytes, speedChange);
         while (downsampledStream.Position < downsampledStream.Length) {
             bufferL = downsampledStream.Read(buffer, 0, readThisManyBytes);
             if (bufferL == 0)
                 break;
             writer.Write(buffer, 0, bufferL);
             //var buffer2L = bbbb.Transform(buffer, bufferL);
             //writer.Write(bbbb.GetNewBuffer(), 0, buffer2L);
         }
     }
 }
Esempio n. 8
0
        private byte[] RealMix(ReceivedRtp item1, ReceivedRtp item2)
        {
            if (item1 == null || item2 == null) return null;

            if (item1.size == 0 || item2.size == 0) return null;

            byte[] wavSrc1 = new byte[item1.size - headersize];
            byte[] wavSrc2 = new byte[item2.size - headersize];

            Array.Copy(item1.buff, headersize, wavSrc1, 0, (item1.size - headersize));
            Array.Copy(item2.buff, headersize, wavSrc2, 0, (item2.size - headersize));

            WaveMixerStream32 mixer = new WaveMixerStream32();
            // mixer.AutoStop = true;
            MemoryStream memstrem = new MemoryStream(wavSrc1);
            RawSourceWaveStream rawsrcstream = new RawSourceWaveStream(memstrem, this.codec);
            WaveFormatConversionStream conversionstream = new WaveFormatConversionStream(pcmFormat16, rawsrcstream);
            WaveChannel32 channelstream = new WaveChannel32(conversionstream);
            mixer.AddInputStream(channelstream);

            memstrem = new MemoryStream(wavSrc2);
            rawsrcstream = new RawSourceWaveStream(memstrem, this.codec);
            conversionstream = new WaveFormatConversionStream(pcmFormat16, rawsrcstream);
            channelstream = new WaveChannel32(conversionstream);
            mixer.AddInputStream(channelstream);
            mixer.Position = 0;

            Wave32To16Stream to16 = new Wave32To16Stream(mixer);
            var convStm = new WaveFormatConversionStream(pcmFormat8, to16);
            byte[] mixedbytes = new byte[(int)convStm.Length];
            int chk = convStm.Read(mixedbytes, 0, (int)convStm.Length);
            //Buffer.BlockCopy(tobyte, 0, writingBuffer, 0, tobyte.Length);

            memstrem.Close();
            rawsrcstream.Close();
            conversionstream.Close();
            channelstream.Close();

            convStm.Close(); convStm.Dispose(); convStm = null;
            to16.Close(); to16.Dispose(); to16 = null;
            mixer.Close(); mixer.Dispose(); mixer = null;

            return mixedbytes;
        }
Esempio n. 9
0
        private void AudioSourceDataAvailable(object sender, DataAvailableEventArgs e)
        {
            try
            {
                lock (_obj)
                {
                    if (_bTalking && _avstream != null)
                    {
                        byte[] bSrc = e.RawData;
                        int totBytes = bSrc.Length;

                        if (!_audioSource.RecordingFormat.Equals(_waveFormat))
                        {
                            var ws = new TalkHelperStream(bSrc, totBytes, _audioSource.RecordingFormat);
                            var helpStm = new WaveFormatConversionStream(_waveFormat, ws);
                            totBytes = helpStm.Read(bSrc, 0, 25000);
                            ws.Close();
                            ws.Dispose();
                            helpStm.Close();
                            helpStm.Dispose();
                        }
                        var enc = new byte[totBytes / 2];
                        ALawEncoder.ALawEncode(bSrc, totBytes, enc);

                        try {
                            _avstream.Write(enc, 0, enc.Length);
                            _avstream.Flush();
                        }
                        catch (SocketException)
                        {
                            StopTalk();
                        }
                    }
                }
            }
            catch (Exception ex)
            {
                Log.Error("",ex);//MainForm.LogExceptionToFile(ex);
                StopTalk();
            }
        }
Esempio n. 10
0
        private void AudioSourceDataAvailable(object sender, DataAvailableEventArgs e)
        {
            try
            {
                lock (_obj)
                {
                    if (_bTalking && _avstream != null)
                    {
                        byte[] bSrc = e.RawData;
                        int totBytes = bSrc.Length;
                        int j = -1;
                        if (!_audioSource.RecordingFormat.Equals(_waveFormat))
                        {
                            using (var ws = new TalkHelperStream(bSrc, totBytes, _audioSource.RecordingFormat))
                            {

                                var bDst = new byte[44100];
                                totBytes = 0;
                                using (var helpStm = new WaveFormatConversionStream(_waveFormat, ws))
                                {
                                    while (j != 0)
                                    {
                                        j = helpStm.Read(bDst, totBytes, 10000);
                                        totBytes += j;
                                    }
                                    helpStm.Close();
                                }
                                ws.Close();
                                bSrc = bDst;
                            }
                        }

                        if (_needsencodeinit)
                        {
                            _enc.EncodeInit(BitConverter.ToInt16(e.RawData, 0), BitConverter.ToInt16(e.RawData, 2));
                            _needsencodeinit = false;
                        }

                        var buff = new byte[25000];
                        int c;
                        unsafe
                        {
                            fixed (byte* src = bSrc)
                            {
                                fixed (byte* dst = buff)
                                {
                                    c = (int)_enc.EncodeFoscam(src, totBytes, dst);
                                }
                            }
                        }
                        Buffer.BlockCopy(buff,0,_talkBuffer,_talkDatalen,c);
                        _talkDatalen += c;

                        var dtms = (int) (Helper.Now - _dt).TotalMilliseconds;
                        int i = 0;
                        j = 0;
                        try
                        {
                            while (j + 160 < _talkDatalen)
                            {
                                //need to write out in 160 byte packets for 40ms
                                byte[] cmd = SInit(TalkData, MoIPAvFlag);

                                cmd = AddNext(cmd, dtms + (i*40));
                                cmd = AddNext(cmd, _seq);
                                cmd = AddNext(cmd, (int) (Helper.Now - _dt).TotalSeconds);
                                cmd = AddNext(cmd, (byte) 0x0);
                                cmd = AddNext(cmd, 160);

                                var pkt = new byte[160];
                                Buffer.BlockCopy(_talkBuffer, j, pkt, 0, 160);
                                cmd = AddNext(cmd, pkt, 160);
                                Encode(ref cmd);

                                _avstream.Write(cmd, 0, cmd.Length);
                                j += 160;
                                _seq++;
                                i++;
                            }
                            if (j < _talkDatalen)
                            {
                                Buffer.BlockCopy(_talkBuffer, j, _talkBuffer, 0, _talkDatalen-j);
                                _talkDatalen = _talkDatalen - j;
                            }
                        }
                        catch (SocketException)
                        {
                            StopTalk(true);
                        }
                    }
                }
            }
            catch (Exception ex)
            {
                MainForm.LogExceptionToFile(ex);
                StopTalk(true);
            }
        }
Esempio n. 11
0
        private void ProcessMixingFinal(RcvData data, int dataSize)
        {
            string processingFn = string.Format("e:\\{0}_{1}_{2}.wav", data.seqnum, data.extension, data.peernumber);

            List<RecInfos> ls0 = lExtension0.FindAll(
                        delegate(RecInfos list)
                        {
                            return list.rcvData.Equals(data) && list.isExtension == 0;
                        });

            List<RecInfos> ls1 = lExtension1.FindAll(
                        delegate(RecInfos list)
                        {
                            return list.rcvData.Equals(data) && list.isExtension == 1;
                        });

            IsExtensionComparer isExtensionCompare = new IsExtensionComparer();
            ls0.Sort(isExtensionCompare);
            ls1.Sort(isExtensionCompare);

            int count = 0;
            int count0 = ls0.Count();
            int count1 = ls1.Count();

            if (count0 - count1 < 0)
                count = count0;
            else
                count = count1;

            for (int i = 0; i < count; i++)
            {
                if (ls0[i].seq == ls1[i].seq)
                {
                    // 믹싱

                    byte[] wavSrc0 = new byte[160];
                    byte[] wavSrc1 = new byte[160];

                    Array.Copy(ls0[i].voice, 12, wavSrc0, 0, wavSrc0.Length);
                    Array.Copy(ls1[i].voice, 12, wavSrc1, 0, wavSrc1.Length);

                    WaveMixerStream32 mixer = new WaveMixerStream32();
                    //mixer.AutoStop = true;

                    WaveChannel32 channelStm = null;

                    for (int j = 0; j < 2; j++)
                    {
                        MemoryStream memStm = null;
                        BufferedStream bufStm = null;
                        RawSourceWaveStream rawSrcStm = null;
                        WaveFormatConversionStream conversionStm = null;

                        if (j == 0)
                            memStm = new MemoryStream(wavSrc0);
                        else
                            memStm = new MemoryStream(wavSrc1);

                        bufStm = new BufferedStream(memStm);
                        rawSrcStm = new RawSourceWaveStream(bufStm, mulawFormat);
                        conversionStm = new WaveFormatConversionStream(pcmFormat, rawSrcStm);

                        channelStm = new WaveChannel32(conversionStm);
                        mixer.AddInputStream(channelStm);
                    }
                    mixer.Position = 0;

                    if (File.Exists(processingFn))
                    {
                        var wavefilestream = new WaveFileReader(processingFn);
                        byte[] wavefilebyte = new byte[(int)wavefilestream.Length];
                        int chk0 = wavefilestream.Read(wavefilebyte, 0, wavefilebyte.Length);

                        Wave32To16Stream to16 = new Wave32To16Stream(mixer);
                        var conversionStm = new WaveFormatConversionStream(pcmFormat, to16);
                        byte[] tobyte = new byte[(int)conversionStm.Length];
                        int chk1 = conversionStm.Read(tobyte, 0, (int)conversionStm.Length);

                        byte[] desByte = new byte[wavefilebyte.Length + tobyte.Length];

                        conversionStm.Close();
                        wavefilestream.Close();

                        Buffer.BlockCopy(wavefilebyte, 0, desByte, 0, wavefilebyte.Length);
                        Buffer.BlockCopy(tobyte, 0, desByte, wavefilebyte.Length, tobyte.Length);

                        using (MemoryStream memStm = new MemoryStream(desByte))
                        using (BufferedStream buffStm = new BufferedStream(memStm))
                        using (RawSourceWaveStream rawSrcStm = new RawSourceWaveStream(buffStm, pcmFormat))
                        {
                            WaveFileWriter.CreateWaveFile(processingFn, rawSrcStm);
                        }
                    }
                    else
                    {
                        var mixedStm = new Wave32To16Stream(mixer);
                        var convStm = new WaveFormatConversionStream(pcmFormat, mixedStm);
                        WaveFileWriter.CreateWaveFile(processingFn, convStm);
                        convStm.Close();
                        mixedStm.Close();
                    }

                    mixer.Close();

                    // 삭제
                    lExtension0.Remove(ls0[i]);
                    lExtension1.Remove(ls1[i]);
                }
                else if (ls0[i].seq - ls1[i].seq < 0)
                {
                    // ls0 만 믹싱
                    // ls0 원본에 byte[] 붙임 > 원본 byte[]를 wavesream 으로 변환 > wave 파일로 저장

                    if (File.Exists(processingFn))
                    {
                        //wavefilestream = new WaveFileReader(processingFn);
                    }
                    else
                    {

                    }

                    // 삭제
                    lExtension0.Remove(ls0[i]);
                    ls1.Insert(i + 1, ls1[i]);
                }
                else if (ls0[i].seq - ls1[i].seq > 0)
                {
                    // ls1 만 믹싱
                    // ls1 원본에 byte[] 붙임 > 원본 byte[]를 wavesream 으로 변환 > wave 파일로 저장

                    if (File.Exists(processingFn))
                    {
                        //wavefilestream = new WaveFileReader(processingFn);
                    }
                    else
                    {

                    }

                    // 삭제
                    lExtension1.Remove(ls1[i]);
                    ls0.Insert(i + 1, ls0[i]);
                }
            }
        }
Esempio n. 12
0
        public void AudioDeviceDataAvailable(object sender, DataAvailableEventArgs e)
        {
            if (Levels == null || IsReconnect)
                return;
            try
            {
                lock (_lockobject)
                {
                    Helper.FrameAction fa;
                    if (!Recording)
                    {
                        var dt = Helper.Now.AddSeconds(0 - Micobject.settings.buffer);
                        while (Buffer.Count > 0)
                        {
                            if (Buffer.TryPeek(out fa))
                            {
                                if (fa.TimeStamp < dt)
                                {
                                    if (Buffer.TryDequeue(out fa))
                                        fa.Nullify();
                                }
                                else
                                {
                                    break;
                                }
                            }
                        }
                    }
                    fa = new Helper.FrameAction(e.RawData, e.BytesRecorded, Levels.Max(), Helper.Now);
                    Buffer.Enqueue(fa);

                }

                if (Micobject.settings.needsupdate)
                {
                    Micobject.settings.samples = AudioSource.RecordingFormat.SampleRate;
                    Micobject.settings.channels = AudioSource.RecordingFormat.Channels;
                    Micobject.settings.needsupdate = false;
                }

                OutSockets.RemoveAll(p => p.TcpClient.Client.Connected == false);
                if (OutSockets.Count>0)
                {
                    if (_mp3Writer == null)
                    {
                        _audioStreamFormat = new WaveFormat(22050, 16, Micobject.settings.channels);
                        var wf = new WaveFormat(_audioStreamFormat.SampleRate, _audioStreamFormat.BitsPerSample, _audioStreamFormat.Channels);
                        _mp3Writer = new LameMP3FileWriter(_outStream, wf, LAMEPreset.STANDARD);
                    }

                    byte[] bSrc = e.RawData;
                    int totBytes = bSrc.Length;

                    var ws = new TalkHelperStream(bSrc, totBytes, AudioSource.RecordingFormat);
                    var helpStm = new WaveFormatConversionStream(_audioStreamFormat, ws);
                    totBytes = helpStm.Read(_bResampled, 0, 22050);

                    ws.Close();
                    ws.Dispose();
                    helpStm.Close();
                    helpStm.Dispose();

                    _mp3Writer.Write(_bResampled, 0, totBytes);

                    var bterm = Encoding.ASCII.GetBytes("\r\n");

                    if (_outStream.Length > 0)
                    {
                        var bout = new byte[(int) _outStream.Length];

                        _outStream.Seek(0, SeekOrigin.Begin);
                        _outStream.Read(bout, 0, (int) _outStream.Length);

                        _outStream.SetLength(0);
                        _outStream.Seek(0, SeekOrigin.Begin);

                        foreach (var s in OutSockets)
                        {
                            var b = Encoding.ASCII.GetBytes(bout.Length.ToString("X") + "\r\n");
                            try
                            {
                                s.Stream.Write(b, 0, b.Length);
                                s.Stream.Write(bout, 0, bout.Length);
                                s.Stream.Write(bterm, 0, bterm.Length);
                            }
                            catch
                            {
                                OutSockets.Remove(s);
                                break;
                            }
                        }
                    }

                }
                else
                {
                    if (_mp3Writer != null)
                    {
                        _mp3Writer.Close();
                        _mp3Writer = null;
                    }
                }

                if (DataAvailable != null)
                {
                    DataAvailable(this, new NewDataAvailableArgs((byte[])e.RawData.Clone()));
                }

                if (_reconnectTime != DateTime.MinValue)
                {
                    Micobject.settings.active = true;
                    _errorTime = _reconnectTime = DateTime.MinValue;
                    DoAlert("reconnect");
                }
                _errorTime = DateTime.MinValue;

            }
            catch (Exception ex)
            {
                if (ErrorHandler != null)
                    ErrorHandler(ex.Message);
            }
        }
Esempio n. 13
0
        public static byte[] Resample(byte[] pcm, int fromRate, short fromDepth, short fromChannels, int toRate, short toDepth, short toChannels)
        {
            using (MemoryStream mem = new MemoryStream(pcm))
            {
                using (RawSourceWaveStream stream = new RawSourceWaveStream(mem, new WaveFormat(fromRate, fromDepth, fromChannels)))
                {
                    var outFormat = new WaveFormat(toRate, stream.WaveFormat.Channels);
                    using (var resampler = new WaveFormatConversionStream(outFormat, stream))
                    {

                        int resampled_length = (int)((float)pcm.Length * ((float)toRate / (float)fromRate));

                        byte[] ret = new byte[resampled_length];
                        resampler.Read(ret, 0, resampled_length);
                        return ret;
                    }
                }
            }
        }
Esempio n. 14
0
        private static void SynthToCam(string fileName, CameraWindow cw)
        {
            using (var waveStream = new MemoryStream())
            {

                //write some silence to the stream to allow camera to initialise properly
                var silence = new byte[1 * 22050];
                waveStream.Write(silence, 0, silence.Count());

                //read in and convert the wave stream into our format
                using (var reader = new WaveFileReader(fileName))
                {
                    var newFormat = new WaveFormat(11025, 16, 1);
                    byte[] buff = new byte[22050];

                    using (var conversionStream = new WaveFormatConversionStream(newFormat, reader))
                    {
                        do
                        {
                            int i = conversionStream.Read(buff, 0, 22050);
                            waveStream.Write(buff, 0, i);
                            if (i < 22050)
                                break;
                        } while (true);
                    }
                }

                //write some silence to the stream to allow camera to end properly
                waveStream.Write(silence, 0, silence.Count());

                waveStream.Seek(0, SeekOrigin.Begin);

                ITalkTarget talkTarget;

                var ds = new DirectStream(waveStream) { RecordingFormat = new WaveFormat(11025, 16, 1) };
                switch (cw.Camobject.settings.audiomodel)
                {
                    case "Foscam":
                        ds.Interval = 40;
                        ds.PacketSize = 882; // (40ms packet at 22050 bytes per second)
                        talkTarget = new TalkFoscam(cw.Camobject.settings.audioip, cw.Camobject.settings.audioport,
                                                    cw.Camobject.settings.audiousername,
                                                    cw.Camobject.settings.audiopassword, ds);
                        break;
                    case "NetworkKinect":
                        ds.Interval = 40;
                        ds.PacketSize = 882;
                        talkTarget = new TalkNetworkKinect(cw.Camobject.settings.audioip, cw.Camobject.settings.audioport, ds);
                        break;
                    case "iSpyServer":
                        ds.Interval = 40;
                        ds.PacketSize = 882;
                        talkTarget = new TalkiSpyServer(cw.Camobject.settings.audioip,
                                                        cw.Camobject.settings.audioport,
                                                        ds);
                        break;
                    case "Axis":
                        talkTarget = new TalkAxis(cw.Camobject.settings.audioip, cw.Camobject.settings.audioport,
                                                    cw.Camobject.settings.audiousername,
                                                    cw.Camobject.settings.audiopassword, ds);
                        break;
                    default:
                        //local playback
                        talkTarget = new TalkLocal(ds);

                        break;
                }
                ds.Start();
                talkTarget.Start();
                while (ds.IsRunning)
                {
                    Thread.Sleep(100);
                }
                ds.Stop();
                if (talkTarget != null)
                    talkTarget.Stop();
                talkTarget = null;
                ds = null;

                waveStream.Close();
            }
        }
Esempio n. 15
0
 /// <summary>
 /// Resample wavefile to new waveformat and save output file
 /// </summary>
 /// <param name="wavIn">input stream</param>
 /// <param name="waveFormat">waveformat</param>
 /// <returns>byte[] array</returns>
 public static byte[] ResampleWav(Stream wavIn, WaveFormat waveFormat)
 {
     using (var reader = new WaveFileReader(wavIn))
     {
         using (var conversionStream = new WaveFormatConversionStream(waveFormat, reader))
         {
             using (MemoryStream ms = new MemoryStream())
             {
                 int bytes = 0;
                 byte[] buffer = new byte[16*1024];
                 while ((bytes = conversionStream.Read(buffer, 0, buffer.Length)) != 0) {
                     ms.Write(buffer, 0, bytes);
                 }
                 return ms.ToArray();
             }
         }
     }
 }
        private void Resample()
        {
            // If this causes problems, check at http://mark-dot-net.blogspot.de/2014/05/how-to-resample-audio-with-naudio.html

            using (RawSourceWaveStream source = new RawSourceWaveStream(new MemoryStream(preprocessed), new WaveFormat(SourceSampleRate, 1)))
            using (WaveFormatConversionStream resampler = new WaveFormatConversionStream(new WaveFormat(TargetSampleRate, 1), source))
            {
                resampled = new byte[resampler.Length];
                resampler.Read(resampled, 0, resampled.Length);
            }
        }
 private void CanCreateConversionStream(WaveFormat inputFormat, WaveFormat outputFormat)
 {
     WaveStream inputStream = new NullWaveStream(inputFormat, 10000);
     using (WaveFormatConversionStream stream = new WaveFormatConversionStream(
         outputFormat, inputStream))
     {
         byte[] buffer = new byte[stream.WaveFormat.AverageBytesPerSecond];
         int totalRead = 0;
         int bytesRead;
         do
         {
             bytesRead = stream.Read(buffer, 0, buffer.Length);
             totalRead += bytesRead;
         } while (bytesRead > 0);
         Debug.WriteLine(String.Format("Converted {0}", totalRead));
         Assert.AreEqual(inputStream.Length, inputStream.Position);
     }
 }
Esempio n. 18
0
        private void AudioSourceDataAvailable(object sender, DataAvailableEventArgs e)
        {
            try
            {
                lock (_obj)
                {
                    if (_bTalking && _avstream != null)
                    {
                        byte[] bSrc = e.RawData;
                        int totBytes = bSrc.Length;
                        int j = -1;
                        if (!_audioSource.RecordingFormat.Equals(_waveFormat))
                        {
                            using (var ws = new TalkHelperStream(bSrc, totBytes, _audioSource.RecordingFormat))
                            {

                                var bDst = new byte[44100];
                                totBytes = 0;
                                using (var helpStm = new WaveFormatConversionStream(_waveFormat, ws))
                                {
                                    while (j != 0)
                                    {
                                        j = helpStm.Read(bDst, totBytes, 10000);
                                        totBytes += j;
                                    }
                                    helpStm.Close();
                                }
                                ws.Close();
                                bSrc = bDst;
                            }
                        }
                        var enc = _muLawCodec.Encode(bSrc, 0, totBytes);
                        //ALawEncoder.ALawEncode(bSrc, totBytes, enc);

                        Buffer.BlockCopy(enc, 0, _talkBuffer, _talkDatalen, enc.Length);
                        _talkDatalen += enc.Length;

                        j = 0;
                        try
                        {
                            while (j + 240 < _talkDatalen)
                            {
                                //need to write out in 240 byte packets
                                var pkt = new byte[240];
                                Buffer.BlockCopy(_talkBuffer, j, pkt, 0, 240);

                               // _avstream.Write(_hdr, 0, _hdr.Length);
                                _avstream.Write(pkt, 0, 240);
                                j += 240;
                            }
                            if (j < _talkDatalen)
                            {
                                Buffer.BlockCopy(_talkBuffer, j, _talkBuffer, 0, _talkDatalen - j);
                                _talkDatalen = _talkDatalen - j;
                            }
                        }
                        catch (SocketException)
                        {
                            StopTalk();
                        }
                    }
                }
            }
            catch (Exception ex)
            {
                MainForm.LogExceptionToFile(ex);
                StopTalk();
            }
        }
Esempio n. 19
0
        void AudioDeviceDataAvailable(object sender, DataAvailableEventArgs e)
        {
            if (Levels == null)
                return;
            try
            {
                if (WriterBuffer == null)
                {
                    var dt = DateTime.Now.AddSeconds(0 - Micobject.settings.buffer);
                    AudioBuffer.RemoveAll(p => p.TimeStamp < dt);
                    AudioBuffer.Add(new AudioAction(e.RawData, Levels.Max(), DateTime.Now));
                }
                else
                {
                    WriterBuffer.Enqueue(new AudioAction(e.RawData, Levels.Max(), DateTime.Now));
                }

                if (Micobject.settings.needsupdate)
                {
                    Micobject.settings.samples = AudioSource.RecordingFormat.SampleRate;
                    Micobject.settings.channels = AudioSource.RecordingFormat.Channels;
                    Micobject.settings.needsupdate = false;
                }

                OutSockets.RemoveAll(p => p.Connected == false);
                if (OutSockets.Count>0)
                {
                    if (_mp3Writer == null)
                    {
                        //_as = new AudioStreamer();
                        //_as.Open(AudioCodec.AAC, AudioSource.RecordingFormat.BitsPerSample * AudioSource.RecordingFormat.SampleRate * AudioSource.RecordingFormat.Channels, AudioSource.RecordingFormat.SampleRate, AudioSource.RecordingFormat.Channels);

                        _audioStreamFormat = new WaveFormat(22050, 16, Micobject.settings.channels);
                        var wf = new MP3Stream.WaveFormat(_audioStreamFormat.SampleRate, _audioStreamFormat.BitsPerSample, _audioStreamFormat.Channels);
                        _mp3Writer = new Mp3Writer(_outStream, wf, false);
                    }
                    //unsafe
                    //{
                    //    fixed (byte* p = e.RawData)
                    //    {
                    //        int byteLength = 0;
                    //        int* l = &byteLength;
                    //        byte* outStream = _as.WriteAudio(p, e.RawData.Length,  l);
                    //        byteLength = *l;

                    //        if (byteLength > 0)
                    //        {
                    //            var toSend = new byte[byteLength];
                    //            for (var i = 0; i < byteLength;i++ )
                    //            {
                    //                if (i==0)
                    //                    Debug.Write(toSend[0]);
                    //                toSend[i] = *(outStream + i);

                    //            }

                    //            foreach (Socket s in OutSockets)
                    //            {
                    //                s.Send(Encoding.ASCII.GetBytes(byteLength.ToString("X") + "\r\n"));
                    //                s.Send(toSend);
                    //                s.Send(Encoding.ASCII.GetBytes("\r\n"));
                    //            }
                    //        }
                    //    }
                    //}
                    byte[] bSrc = e.RawData;
                    int totBytes = bSrc.Length;

                    var ws = new TalkHelperStream(bSrc, totBytes, AudioSource.RecordingFormat);
                    var helpStm = new WaveFormatConversionStream(_audioStreamFormat, ws);
                    totBytes = helpStm.Read(_bResampled, 0, 25000);

                    ws.Close();
                    ws.Dispose();
                    helpStm.Close();
                    helpStm.Dispose();

                    _mp3Writer.Write(_bResampled, 0, totBytes);

                    if (_outStream.Length > 0)
                    {
                        var bout = new byte[(int) _outStream.Length];

                        _outStream.Seek(0, SeekOrigin.Begin);
                        _outStream.Read(bout, 0, (int) _outStream.Length);

                        _outStream.SetLength(0);
                        _outStream.Seek(0, SeekOrigin.Begin);

                        foreach (Socket s in OutSockets)
                        {
                            s.Send(Encoding.ASCII.GetBytes(bout.Length.ToString("X") + "\r\n"));
                            s.Send(bout);
                            s.Send(Encoding.ASCII.GetBytes("\r\n"));
                        }
                    }

                }
                else
                {
                    if (_mp3Writer != null)
                    {
                        _mp3Writer.Close();
                        _mp3Writer = null;
                    }

                    //if (_as!=null)
                    //{
                    //    _as.Close();
                    //    _as.Dispose();
                    //    _as = null;
                    //}
                }

                if (DataAvailable != null)
                {
                    DataAvailable(this, new NewDataAvailableArgs((byte[])e.RawData.Clone()));
                }

            }
            catch (Exception ex)
            {
                Log.Error("",ex);//MainForm.LogExceptionToFile(ex);
            }
        }
Esempio n. 20
0
        private void ProcessMixing2(RcvData data, int dataSize)
        {
            string processingFn = string.Format("d:\\{0}_{1}_{2}.wav", data.seqnum, data.extension, data.peernumber);

            List<RecInfos> ls0 = lExtension0.FindAll(
                        delegate(RecInfos list)
                        {
                            return list.rcvData.Equals(data) && list.isExtension == 0;
                        });

            List<RecInfos> ls1 = lExtension1.FindAll(
                        delegate(RecInfos list)
                        {
                            return list.rcvData.Equals(data) && list.isExtension == 1;
                        });

            IsExtensionComparer isExtensionCompare = new IsExtensionComparer();
            ls0.Sort(isExtensionCompare);
            ls1.Sort(isExtensionCompare);

            int count = 0;
            int count0 = ls0.Count();
            int count1 = ls1.Count();

            if (count0 - count1 < 0)
                count = count0;
            else
                count = count1;

            byte[] buffWriting = new byte[320 * count];

            for (int i = 0; i < count; i++)
            {
                if (ls0[i].seq == ls1[i].seq)
                {
                    // 믹싱
                    // 코덱 종류에 따라서 바이트 길이는 달라질 수 있다. 실제로 만들 때 경우의 수 확인하고 만들어야 한다.
                    byte[] wavSrc0 = new byte[160];
                    byte[] wavSrc1 = new byte[160];

                    Array.Copy(ls0[i].voice, 12, wavSrc0, 0, wavSrc0.Length);
                    Array.Copy(ls1[i].voice, 12, wavSrc1, 0, wavSrc1.Length);

                    WaveMixerStream32 mixer = new WaveMixerStream32();
                    //mixer.AutoStop = true;

                    WaveChannel32 channelStm = null;

                    MemoryStream memStm = null;
                    BufferedStream bufStm = null;
                    RawSourceWaveStream rawSrcStm = null;
                    WaveFormatConversionStream conversionStm = null;

                    for (int j = 0; j < 2; j++)
                    {
                        if (j == 0)
                            memStm = new MemoryStream(wavSrc0);
                        else
                            memStm = new MemoryStream(wavSrc1);

                        bufStm = new BufferedStream(memStm);
                        rawSrcStm = new RawSourceWaveStream(bufStm, mulawFormat);
                        conversionStm = new WaveFormatConversionStream(pcmFormat, rawSrcStm);

                        channelStm = new WaveChannel32(conversionStm);
                        mixer.AddInputStream(channelStm);
                    }
                    mixer.Position = 0;

                    Wave32To16Stream to16 = new Wave32To16Stream(mixer);
                    var convStm = new WaveFormatConversionStream(pcmFormat, to16);
                    byte[] tobyte = new byte[(int)convStm.Length];
                    int chk = convStm.Read(tobyte, 0, (int)convStm.Length);
                    Buffer.BlockCopy(tobyte, 0, buffWriting, i * tobyte.Length, tobyte.Length);

                    conversionStm.Close();
                    rawSrcStm.Close();
                    bufStm.Close();
                    memStm.Close();

                    convStm.Close();
                    to16.Close();
                    channelStm.Close();
                    mixer.Close();

                    // 삭제
                    lExtension0.Remove(ls0[i]);
                    lExtension1.Remove(ls1[i]);
                }
                else if (ls0[i].seq - ls1[i].seq < 0)
                {
                    // ls0 만 믹싱
                    // ls0 원본에 byte[] 붙임 > 원본 byte[]를 wavesream 으로 변환 > wave 파일로 저장

                    // 믹싱
                    // 코덱 종류에 따라서 바이트 길이는 달라질 수 있다. 실제로 만들 때 경우의 수 확인하고 만들어야 한다.
                    byte[] wavSrc0 = new byte[160];
                    byte[] wavSrc1 = new byte[160];

                    Array.Copy(ls0[i].voice, 12, wavSrc0, 0, wavSrc0.Length);
                    Array.Copy(ls1[i].voice, 12, wavSrc1, 0, wavSrc1.Length);

                    WaveMixerStream32 mixer = new WaveMixerStream32();
                    //mixer.AutoStop = true;

                    WaveChannel32 channelStm = null;

                    MemoryStream memStm = null;
                    BufferedStream bufStm = null;
                    RawSourceWaveStream rawSrcStm = null;
                    WaveFormatConversionStream conversionStm = null;

                    for (int j = 0; j < 2; j++)
                    {
                        if (j == 0)
                            memStm = new MemoryStream(wavSrc0);
                        else
                            memStm = new MemoryStream(wavSrc1);

                        bufStm = new BufferedStream(memStm);
                        rawSrcStm = new RawSourceWaveStream(bufStm, mulawFormat);
                        conversionStm = new WaveFormatConversionStream(pcmFormat, rawSrcStm);

                        channelStm = new WaveChannel32(conversionStm);
                        mixer.AddInputStream(channelStm);
                    }
                    mixer.Position = 0;

                    Wave32To16Stream to16 = new Wave32To16Stream(mixer);
                    var convStm = new WaveFormatConversionStream(pcmFormat, to16);
                    byte[] tobyte = new byte[(int)convStm.Length];
                    int chk = convStm.Read(tobyte, 0, (int)convStm.Length);
                    Buffer.BlockCopy(tobyte, 0, buffWriting, i * tobyte.Length, tobyte.Length);

                    conversionStm.Close();
                    rawSrcStm.Close();
                    bufStm.Close();
                    memStm.Close();

                    convStm.Close();
                    to16.Close();
                    channelStm.Close();
                    mixer.Close();

                    // 삭제
                    lExtension0.Remove(ls0[i]);
                    ls1.Insert(i + 1, ls1[i]);
                }
                else if (ls0[i].seq - ls1[i].seq > 0)
                {
                    // ls1 만 믹싱
                    // ls1 원본에 byte[] 붙임 > 원본 byte[]를 wavesream 으로 변환 > wave 파일로 저장

                    // 믹싱
                    // 코덱 종류에 따라서 바이트 길이는 달라질 수 있다. 실제로 만들 때 경우의 수 확인하고 만들어야 한다.
                    byte[] wavSrc0 = new byte[160];
                    byte[] wavSrc1 = new byte[160];

                    Array.Copy(ls0[i].voice, 12, wavSrc0, 0, wavSrc0.Length);
                    Array.Copy(ls1[i].voice, 12, wavSrc1, 0, wavSrc1.Length);

                    WaveMixerStream32 mixer = new WaveMixerStream32();
                    //mixer.AutoStop = true;

                    WaveChannel32 channelStm = null;

                    MemoryStream memStm = null;
                    BufferedStream bufStm = null;
                    RawSourceWaveStream rawSrcStm = null;
                    WaveFormatConversionStream conversionStm = null;

                    for (int j = 0; j < 2; j++)
                    {
                        if (j == 0)
                            memStm = new MemoryStream(wavSrc0);
                        else
                            memStm = new MemoryStream(wavSrc1);

                        bufStm = new BufferedStream(memStm);
                        rawSrcStm = new RawSourceWaveStream(bufStm, mulawFormat);
                        conversionStm = new WaveFormatConversionStream(pcmFormat, rawSrcStm);

                        channelStm = new WaveChannel32(conversionStm);
                        mixer.AddInputStream(channelStm);
                    }
                    mixer.Position = 0;

                    Wave32To16Stream to16 = new Wave32To16Stream(mixer);
                    var convStm = new WaveFormatConversionStream(pcmFormat, to16);
                    byte[] tobyte = new byte[(int)convStm.Length];
                    int chk = convStm.Read(tobyte, 0, (int)convStm.Length);
                    Buffer.BlockCopy(tobyte, 0, buffWriting, i * tobyte.Length, tobyte.Length);

                    conversionStm.Close();
                    rawSrcStm.Close();
                    bufStm.Close();
                    memStm.Close();

                    convStm.Close();
                    to16.Close();
                    channelStm.Close();
                    mixer.Close();

                    // 삭제
                    lExtension1.Remove(ls1[i]);
                    ls0.Insert(i + 1, ls0[i]);
                }
            }

            // 10개의 버프를 바이트로 만들어 WaveFileWrite
            WaveFileWriting(buffWriting, processingFn);
        }
Esempio n. 21
0
        private void AudioSourceDataAvailable(object sender, DataAvailableEventArgs e)
        {
            try
            {
                lock (_obj)
                {
                    if (_bTalking && _avstream != null)
                    {
                        byte[] bSrc = e.RawData;
                        int totBytes = bSrc.Length;

                        if (!_audioSource.RecordingFormat.Equals(_waveFormat))
                        {
                            using (var ws = new TalkHelperStream(bSrc, totBytes, _audioSource.RecordingFormat))
                            {
                                int j = -1;
                                var bDst = new byte[44100];
                                totBytes = 0;
                                using (var helpStm = new WaveFormatConversionStream(_waveFormat, ws))
                                {
                                    while (j != 0)
                                    {
                                        j = helpStm.Read(bDst, totBytes, 10000);
                                        totBytes += j;
                                    }
                                    helpStm.Close();
                                }
                                ws.Close();
                                bSrc = bDst;
                            }
                        }
                        var enc = new byte[totBytes / 2];
                        ALawEncoder.ALawEncode(bSrc, totBytes, enc);

                        try
                        {
                            _avstream.Write(enc, 0, enc.Length);
                        }
                        catch (SocketException)
                        {
                            StopTalk();
                        }
                    }
                }
            }
            catch (Exception ex)
            {
                MainForm.LogExceptionToFile(ex);
                StopTalk();
            }
        }
Esempio n. 22
0
        private static WaveFormat reencodeWav(string pathToWav, Stream outputStream, int sampleRate, int bitDepth, int numChannels)
        {
            using (var reader = new WaveFileReader(pathToWav))
            {
                var targetFormat = new NAudio.Wave.WaveFormat(sampleRate, bitDepth, numChannels);
                var pcmStream = new WaveFormatConversionStream(targetFormat, reader);
                var buffer = new byte[pcmStream.Length];
                pcmStream.Read(buffer, 0, (int)pcmStream.Length);

                outputStream.Write(buffer, 0, buffer.Length);
                outputStream.Position = 0;

                pcmStream.Close();

                return targetFormat;
            }
        }