コード例 #1
0
ファイル: Streaming.cs プロジェクト: WildGenie/MonitoreoXP
 public void recibirStreaming(int numberDevice, string ipEmisor, int puerto)
 {
     waveOut = new WaveOut(WaveOut.Devices[numberDevice], 8000, 16, 1);
     servidorUdp = new UdpServer();
     servidorUdp.Bindings = new IPEndPoint[] { new IPEndPoint(IPAddress.Parse(ipEmisor), (int)puerto) };
     servidorUdp.PacketReceived += new PacketReceivedHandler(recibirPaquetes_ServidorUdp);
     servidorUdp.Start();
 }
コード例 #2
0
 public Player(Stream stream)
 {
     Stream = stream;
     Decoder = new OggVorbisDecoder(stream);
     if (Decoder.Channels != 1)
         throw new InvalidDataException("Only mono sounds supported");
     WaveOut = new WaveOut(WaveOut.Devices[0], Decoder.SamplesPerSecond, 16, Decoder.Channels);
     Thread = new Thread(ThreadFunc);
     Thread.Start();
 }
コード例 #3
0
        public void JoinConference(int MicIndex)
        {
            if (!InConference)
            {
                audioWaveOut = new WaveOut(WaveOut.Devices[0], 8000, 16, 1);
                audioServer = new UdpServer();
                audioServer.Bindings = new IPEndPoint[] { new IPEndPoint(IPAddress.Parse(Main.User.MyIPAddress), audioPort) };
                audioServer.PacketReceived += new PacketReceivedHandler(AudioServer_PacketReceived);
                audioServer.Start();

                audioWaveIn = new WaveIn(WaveIn.Devices[MicIndex], 8000, 16, 1, 400);
                audioWaveIn.BufferFull += new BufferFullHandler(audioWaveIn_BufferFull);
                audioWaveIn.Start();
                InConference = true;
            }
        }
コード例 #4
0
ファイル: SoundManager.cs プロジェクト: ITPuppy/myRepo
        public void StartRecordAndSend()
        {
            if (_waveIn != null || _waveOut != null)
            {
                throw new Exception("Call is allready started");
            }

            // int waveInDevice = (Int32)Application.UserAppDataRegistry.GetValue("WaveIn", 0);
            //  int waveOutDevice = (Int32)Application.UserAppDataRegistry.GetValue("WaveOut", 0);

            int waveOutDevice = 0;

            _waveIn = new WaveIn(WaveIn.Devices[0], 8000, 16, 1, 400);

            _waveIn.BufferFull += new BufferFullHandler(_waveIn_BufferFull);
            _waveIn.Start();

            _waveOut = new WaveOut(WaveOut.Devices[waveOutDevice], 8000, 16, 1);


           

        }
コード例 #5
0
 public SoundReciever(IPEndPoint endPoint, WaveOut outputDevice)
 {
     _waveOut = outputDevice;
     _udpEndpoint = endPoint;
 }
コード例 #6
0
ファイル: formDevice.cs プロジェクト: solsetmary/Makhzan
        private void timerMic_Tick(object sender, EventArgs e)
        {
            timerMic.Enabled = false;
            var outDevices = WaveOut.Devices;
            if (outDevices.Length == 0)
            {
                MessageBox.Show("There are no output sound devices installed.");
                return;
            }

            var outDevice = outDevices[speakerID];
            var waveOut = new WaveOut(outDevice, samplespersec, bitspersample, wavechannel);

            new Thread(delegate()
            {
                try
                {
                    //int ittr = 0;
                    byte[] buffer = null;
                    byte[] bufferOld = null;
                    string waveMD5 = "";
                    string waveMD5Old = "";

                    while (isPlayVoice)
                    {
                        //ittr++;
                        System.Diagnostics.Stopwatch sw = new System.Diagnostics.Stopwatch();
                        sw.Start();
                        if (!(clientMic == null))
                        {
                            if (isCalculateImageDelay)
                            {
                                isCalculateImageDelay = false;
                                syncTimeing = 1000; // ((waveBuffer * wavechannel) / (samplespersec * (bitspersample / 8))) * 1000;
                                syncTimeing = waveBuffer * wavechannel;
                                syncTimeing = syncTimeing / samplespersec;
                                syncTimeing = syncTimeing / (bitspersample / 8);
                                syncTimeing = (syncTimeing * 1000) + syncTimeing * 800;
                                isImageDelay = true;
                                swDelay.Start();
                            }

                            waveMD5 = clientMic.getWaveMD5();

                            if (waveMD5 != waveMD5Old)
                            {
                                buffer = clientMic.getMicBuffer();
                            }
                        }
                        else
                            return;

                        if (buffer == null)
                            return;

                        if (waveMD5 != waveMD5Old)
                        {
                            waveOut.Play(buffer, 0, buffer.Length);
                        }
                        waveMD5Old = waveMD5;

                        /*if (!ByteArrayCompare(buffer, bufferOld))
                        {
                            waveOut.Play(buffer, 0, buffer.Length);
                        }*/
                        bufferOld = buffer;

                        sw.Stop();
                        double d = Math.Floor(sw.Elapsed.TotalMilliseconds);
                        int l = buffer.Length;// / 1024;
                        waveBuffer = l;
                        if (lblCamerasNr.InvokeRequired)
                        {
                            lblCamerasNr.BeginInvoke((MethodInvoker)delegate
                            {
                                lblCamerasNr.Text = string.Format("Stream info: {0} byte , {1} ms", l, d);
                                lblCamerasNr.Refresh();
                            });
                        }
                        else
                        {
                            lblCamerasNr.Text = string.Format("Stream info: {0} byte , {1} ms", l, d);
                            lblCamerasNr.Refresh();
                        }
                    }
                }
                catch
                {
                }
            }).Start();
        }
コード例 #7
0
        private void btn_StartAudio_Click(object sender, EventArgs e)
        {
            if (audioIsRunning)
            {
                audioIsRunning = false;
                audioServer.Dispose();
                audioServer = null;
                audioWaveOut.Dispose();
                audioWaveOut = null;

                audioWaveIn.Dispose();
                audioWaveIn = null;
                btn_StartAudio.BackColor = Color.Green;
                ddl_AudioDevices.Enabled = true;
            }
            else
            {
                audioIsRunning = true;
                audioWaveOut = new WaveOut(WaveOut.Devices[0], 8000, 16, 1);
                audioServer = new UdpServer();
                audioServer.Bindings = new IPEndPoint[] { new IPEndPoint(IPAddress.Parse(GetMyIP()), audioPort) };
                audioServer.PacketReceived += new PacketReceivedHandler(AudioServer_PacketReceived);
                audioServer.Start();

                audioWaveIn = new WaveIn(WaveIn.Devices[ddl_AudioDevices.SelectedIndex], 8000, 16, 1, 400);
                audioWaveIn.BufferFull += new BufferFullHandler(audioWaveIn_BufferFull);
                audioWaveIn.Start();
                btn_StartAudio.BackColor = Color.Red;
                ddl_AudioDevices.Enabled = false;
            }
        }