Example #1
0
        private void Window_Loaded(object sender, RoutedEventArgs e)
        {
            lblWebCam1.Content = LabelContents.WEB_CAM_LBL;
            lblWebCam2.Content = LabelContents.WEB_CAM_LBL;


            // MessageEditBox에 초점을 맞추어놓는다.

            this.messageEditBox.Focus();


            // 항시적으로 비디오가 들어오는가를 감시한다.

            CheckVideoFrame();


            // 환영 메세지를 출력한다.

            ShowErrorOrMessage(ManagerMessage.MESSAGE_WELCOME);


            // 음성채팅가능.
            Stop();
            WaveFormat fmt = new WaveFormat(44100, 16, 2);

            m_Player           = new WaveOutPlayer(-1, fmt, 3000, 3, new BufferFillEventHandler(Filler));
            m_bVoicePlayerFlag = true;


            // Notify

            Main._ClientEngine.AttachHandler(OnReceive);

            Main._VideoEngine.AttachUdpHandler(OnVideoReceive);


            // 사용자가 여자일때 웹캠전송을 진행한다.

            if (Main._UserInfo.Kind == 1)
            {
                this.numberMark.IsEnabled  = false;
                this.btnSendMark.IsEnabled = false;
                SetVideoChat();
            }
        }
Example #2
0
        public void Play(bool looped)
        {
            try
            {
                Stop();

                _looped         = looped;
                _lastBlock      = -1;
                _state          = new DviAdpcmDecoder.AdpcmState();
                _player         = new WaveOutPlayer(-1, _format, _wave.BlockSize * 4, 3, Filler);
                _leftOverBuffer = null;
            }
            catch
            {
                MessageBox.Show("Audio play error.",
                                "Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
            }
        }
Example #3
0
 private void Start()
 {
     Stop();
     try
     {
         WaveFormat fmt = new WaveFormat(44100, 16, 2);
         m_Player = new WaveOutPlayer(-1, fmt, 16384, 3, new BufferFillEventHandler(Filler));
         //The first parameter is the ID of the wave device that you want to use.The value -1 represents the default system device, but if your system has more than one sound card, then you can pass any number from 0 to the number of installed sound cards minus one to select a particular device.
         //The second parameter is the format of the audio samples.
         //The third and forth parameters are the size of the internal wave buffers and the number of buffers to allocate. You should set these to reasonable values. Smaller buffers will give you less latency(short period of delay ), but the audio may stutter if your computer is not fast enough.
         //The fifth and last parameter is a delegate that will be called periodically as internal audio buffers finish playing, so that you can feed them with new sound data.
         m_Recorder = new WaveInRecorder(-1, fmt, 16384, 3, new BufferDoneEventHandler(Send_Voice));
     }
     catch
     {
         Stop();
         throw;
     }
 }
Example #4
0
        private void GetOutputDevices()
        {
            WaveOutPlayer wp = new WaveOutPlayer();

            uint dv = wp.GetDeviceCount();

            if (dv != 0)
            {
                string prod = "";
                for (uint x = 0; x < dv; x++)
                {
                    wp.GetOutputDeviceName(x, ref prod);
                    cbOutputDevices.Items.Add(prod);
                }
                cbOutputDevices.SelectedIndex = 0;
                wp.GetOutputDeviceName((uint)this.OutputDevice, ref prod);
                cbOutputDevices.Text = prod;
            }
            wp.Dispose();
        }
        private void InternalStart()
        {
            format = new WaveFormat(FORMAT_SAMPLES_PER_SECOND, 8 * bytesPerSample / FORMAT_CHANNELS, FORMAT_CHANNELS);
            if (this.audioEnabled == false)
            {
                return;
            }

            try
            {
                if (player == null)
                {
                    player = new WaveOutPlayer(-1, format, PLAY_BUFFER_SAMPLES * this.bytesPerSample, PLAY_BUFFER_COUNT, new WaveLib.BufferFillEventHandler(this.FillerCallback));
                }
                stopped = false;
            }
            catch
            {
                // TODO: Shouldn't get this if initialization worked right.
            }
        }
Example #6
0
        private void Start()
        {
            Stop();
            try
            {
                SoundUtils.SetVolumePercent(100);

                WaveFormat fmt = new WaveFormat(44100, 16, 2);
                if (!m_bVoicePlayerFlag)
                {
                    m_Player = new WaveOutPlayer(-1, fmt, 3000, 3, new BufferFillEventHandler(Filler));
                }
                m_Recorder = new WaveInRecorder(-1, fmt, 3000, 3, new BufferDoneEventHandler(Voice_Out));

                m_bVoicePlayerFlag = false;
            }
            catch
            {
                Stop();
                throw;
            }
        }
Example #7
0
 private void Start()
 {
     Stop();
     try
     {
         _waveFormat = new WaveFormat(44100, 16, 2);
         _recorder   = new WaveInRecorder((-1), _waveFormat, 16384, 3, new BufferDoneEventHandler(DataArrived));
         if (_isPlayer == true)
         {
             _player = new WaveOutPlayer((-1), _waveFormat, 16384, 3, new BufferFillEventHandler(Filler));
         }
         consoletext += DateTime.Now.ToString() + " : Audio input device polling started\r\n";
         consoletext += DateTime.Now + " : Device = " + (-1).ToString() + "\r\n";
         consoletext += DateTime.Now + " : Channels = " + 2.ToString() + "\r\n";
         consoletext += DateTime.Now + " : Bits per sample = " + 16.ToString() + "\r\n";
         consoletext += DateTime.Now + " : Samples per second = " + 44100.ToString() + "\r\n";
         consoletext += DateTime.Now + " : Frame size = " + 8192.ToString() + "\r\n";
     }
     catch
     {
         //if (ex.InnerException != null) consoletext += DateTime.Now + " : " + ex.InnerException.ToString() + "\r\n";
     }
 }