예제 #1
0
 // 停止の初期化
 private void CloseVideoSource()
 {
     if (!(videoSource == null))
     {
         if (videoSource.IsRunning)
         {
             videoSource.SignalToStop();
             videoSource.WaitForStop();
             videoSource = null;
             Console.Out.WriteLine("videoSource Disposed");
         }
     }
     if (!(audioSource == null))
     {
         if (audioSource.IsRunning)
         {
             audioSource.SignalToStop();
             audioSource.WaitForStop();
         }
         audioSource.Dispose();
         audioSource = null;
         Console.Out.WriteLine("audioSource Disposed");
     }
     finalizeXAudio2();
 }
예제 #2
0
        public void ChangeCapturedDevices(AudioCaptureDevice audioDevice, VideoCaptureDevice videoDevice)
        {
            try
            {
                SelectedAudioDevice = audioDevice;
                SelectedVideoDevice = videoDevice;

                // Remember our initial capture state.
                bool wasCaptured = CaptureSource.State == CaptureState.Started;
                if (wasCaptured)
                {
                    CaptureSource.Stop();
                }

                CaptureSource.AudioCaptureDevice = audioDevice;
                CaptureSource.VideoCaptureDevice = videoDevice ?? CaptureSource.VideoCaptureDevice;
                ConfigureAudioCaptureDevice(CaptureSource.AudioCaptureDevice);
                ConfigureVideoCaptureDevice(CaptureSource.VideoCaptureDevice);

                // Restore capture state to how it originally was.
                if (wasCaptured)
                {
                    CaptureSelectedInputDevices();
                }
            }
            catch (Exception ex)
            {
                ClientLogger.ErrorException(ex, "Error updating captured devices");
                MessageService.ShowErrorHint(ex.Message);
            }
        }
예제 #3
0
        public void Start()
        {
            _Writer.Width      = _CaptureRegion.Width;
            _Writer.Height     = _CaptureRegion.Height;
            _Writer.FrameRate  = _FrameRate;
            _Writer.BitRate    = _BitRate;
            _Writer.VideoCodec = VideoCodec.Default;

            _Writer.Open(_FilePath + _FileName);

            if (_RecordingScreen)
            {
                _ScreenSource.FrameInterval = 20;
                _ScreenSource.NewFrame     += Screen_NewFrame;
                _ScreenSource.Start();
            }

            if (_RecordingMicrophone)
            {
                _AudioSource           = new AudioCaptureDevice(_CurrenrAudioDevice);
                _AudioSource.NewFrame += new EventHandler <Accord.Audio.NewFrameEventArgs>(Microphone_NewFrame);
                _AudioSource.Start();
            }
            if (_RecordingAudio)
            {
                Trace.WriteLine("start recoder Speaker");
                _SpeakerSource           = new AudioCaptureDevice(_CurrenrSpeakerDevice);
                _SpeakerSource.NewFrame += new EventHandler <Accord.Audio.NewFrameEventArgs>(Speaker_NewFrame);
                _SpeakerSource.Start();
            }
        }
예제 #4
0
 public MainViewModel()
 {
     ListenAddress       = "localhost:8002/SpeexStreamer";
     StartCommand        = new DelegateCommand <object>(o => SelectedAudioDevice != null, DoStartPlay);
     ListenCommand       = new DelegateCommand <object>(o => !string.IsNullOrWhiteSpace(ListenAddress), DoStartListen);
     SelectedAudioDevice = CaptureDeviceConfiguration.GetDefaultAudioCaptureDevice();
 }
        /// <summary>
        ///   Starts recording. Only works if the player has
        ///   already been started and is grabbing frames.
        /// </summary>
        ///
        public void StartRecording()
        {
            if (IsRecording || !IsPlaying)
            {
                return;
            }

            Rectangle area     = CaptureRegion;
            string    fileName = newFileName();

            int height       = area.Height;
            int width        = area.Width;
            int framerate    = 1000 / screenStream.FrameInterval;
            int videoBitRate = 1200 * 1000;
            int audioBitRate = 320 * 1000;

            OutputPath         = Path.Combine(main.CurrentDirectory, fileName);
            RecordingStartTime = DateTime.MinValue;
            videoWriter        = new VideoFileWriter();

            // Create audio devices which have been checked
            var audioDevices = new List <AudioCaptureDevice>();

            foreach (var audioViewModel in AudioCaptureDevices)
            {
                if (!audioViewModel.Checked)
                {
                    continue;
                }

                var device = new AudioCaptureDevice(audioViewModel.DeviceInfo);
                device.AudioSourceError += device_AudioSourceError;
                device.Format            = SampleFormat.Format16Bit;
                device.SampleRate        = Settings.Default.SampleRate;
                device.DesiredFrameSize  = 2 * 4098;
                device.Start();

                audioDevices.Add(device);
            }

            if (audioDevices.Count > 0) // Check if we need to record audio
            {
                audioDevice = new AudioSourceMixer(audioDevices);
                audioDevice.AudioSourceError += device_AudioSourceError;
                audioDevice.NewFrame         += audioDevice_NewFrame;
                audioDevice.Start();

                videoWriter.Open(OutputPath, width, height, framerate, VideoCodec.H264, videoBitRate,
                                 AudioCodec.MP3, audioBitRate, audioDevice.SampleRate, audioDevice.Channels);
            }
            else
            {
                videoWriter.Open(OutputPath, width, height, framerate, VideoCodec.H264, videoBitRate);
            }

            HasRecorded = false;
            IsRecording = true;
        }
        private void initalizeAudioStuff()
        {
            try
            {
                AudioSource = new AudioCaptureDevice();
                AudioDeviceInfo info = null;
                var             adc  = new AudioDeviceCollection(AudioDeviceCategory.Capture);
                foreach (var ad in adc)
                {
                    string desc = ad.Description;
                    if (desc.IndexOf("Audio") > -1)
                    {
                        info = ad;
                    }
                }
                if (info == null)
                {
                    AudioSource = new AudioCaptureDevice();
                }
                else
                {
                    AudioSource = new AudioCaptureDevice(info);
                }

                //AudioCaptureDevice source = new AudioCaptureDevice();
                AudioSource.DesiredFrameSize = 4096;
                AudioSource.SampleRate       = 44100;
                //int sampleRate = 44100;
                //int sampleRate = 22050;

                AudioSource.NewFrame += AudioSource_NewFrame;
                // AudioSource.Format = SampleFormat.Format64BitIeeeFloat;
                AudioSource.AudioSourceError += AudioSource_AudioSourceError;
                // AudioSource.Start();
                int x = 1;
            }
            catch
            {
            }

            // Create buffer for wavechart control
            current = new float[AudioSource.DesiredFrameSize];

            // Create stream to store file
            stream  = new MemoryStream();
            encoder = new WaveEncoder(stream);

            frames  = 0;
            samples = 0;



            // Start
            AudioSource.Start();
        }
예제 #7
0
        /// <summary>
        ///   Starts recording audio from the sound card
        ///   开始从声卡录制音频
        /// </summary>
        ///
        private void BtnRecord_Click(object sender, EventArgs e)
        {
            // Create capture device
            //[1]创建本地音频捕获设备
            #region 本地音频捕获设备

            /*
             * 本地音频捕获设备(即麦克风)的音频源。
             * 备考:此accord.audio.iaudiosource捕获从本地音频获取的音频数据
             * 捕获设备,如麦克风。使用DirectSound捕获音频
             * 通过slimdx。//有关如何列出捕获设备的说明,
             * 请参见accord.directsound.audioDeviceCollection//文档页。
             */
            #endregion
            source = new AudioCaptureDevice()
            {
                //获取或设置所需的帧大小。
                //监听22050赫兹
                // Listen on 22050 Hz
                DesiredFrameSize = 4096,
                SampleRate       = 22050,

                #region 我们将读取16位PCM(脉冲编码调制)
                //我们将读取16位PCM(脉冲编码调制)
                // We will be reading 16-bit PCM
                //PCM 即脉冲编码调制 (Pulse Code Modulation)。
                //https://baike.baidu.com/item/pcm%E7%BC%96%E7%A0%81/10865033?fr=aladdin
                #endregion

                Format = SampleFormat.Format16Bit
            };

            // Wire up some events
            //注册音频资源事件
            source.NewFrame         += source_NewFrame;
            source.AudioSourceError += source_AudioSourceError;

            // Create buffer for wavechart control
            //每帧上要读取的样本量。
            Current = new float[source.DesiredFrameSize];

            // Create stream to store file
            //创建流以存储文件
            stream  = new MemoryStream();
            encoder = new WaveEncoder(stream);

            // Start
            source.Start();
            updateButtons();
        }
예제 #8
0
        public Recorder(int xlocation, int ylocation, int width, int height, AudioDeviceInfo audioDevice)
        {
            audioEncoder  = new WaveEncoder(@"D:\Projects\test_audio.wav");
            audio         = new AudioCaptureDevice(audioDevice);
            captureStream = new ScreenCaptureStream(new System.Drawing.Rectangle(xlocation, ylocation, width, height));
            video         = new VideoFileWriter();

            audio.DesiredFrameSize = 4096;
            audio.SampleRate       = 44100;
            audio.NewFrame        += Audio_NewFrame;

            captureStream.FrameInterval = 1;
            captureStream.NewFrame     += CaptureStream_NewFrame;
        }
예제 #9
0
        private void StartVoiceCapture(string fileName)
        {
            AudioCapture = new AudioCaptureDevice(new Guid(Properties.UserSettings.Default.Microphone));
            // Specify capturing options
            AudioCapture.DesiredFrameSize = 4096;
            AudioCapture.SampleRate       = 44100;

            AudioCapture.NewFrame         += NewAudioFrame;
            AudioCapture.AudioSourceError += AudioError;

            _audioStream = new FileStream(baseDirectory + "AppFiles/TempFiles/" + fileName, FileMode.Create);
            _waveEncoder = new WaveEncoder(_audioStream);

            AudioCapture.Start();
        }
예제 #10
0
        private void StopVoiceCapture()
        {
            AudioCapture.SignalToStop();

            FrameRate    = (double)BitmapsCount / _waveEncoder.Duration * 1000;
            BitmapsCount = 0;

            AudioCapture.NewFrame         -= NewAudioFrame;
            AudioCapture.AudioSourceError -= AudioError;
            AudioCapture = null;
            _waveEncoder = null;
            _audioStream.Close();
            _audioStream = null;
            stopAll     -= StopVoiceCapture;
        }
예제 #11
0
        private AudioCaptureDevice GetCurrentAudioCaptureDevice()
        {
            AudioCaptureDevice captureSettings = null;

            var item = audioSourceComboBox.SelectedItem;

            if (item != null)
            {
                var tag = ((item as ComboBoxItem)?.Tag);
                if (tag != null)
                {
                    captureSettings = tag as AudioCaptureDevice;
                }
            }

            return(captureSettings);
        }
예제 #12
0
        private void UpdateDevices()
        {
            AudioCaptureDevice            currentDefault   = CaptureDeviceConfiguration.GetDefaultAudioCaptureDevice();
            SilverlightAudioCaptureDevice currentDefaultEx = null;

            lock (this.devices)
            {
                var newDevices = new HashSet <AudioCaptureDevice> (CaptureDeviceConfiguration.GetAvailableAudioCaptureDevices(),
                                                                   AudioCaptureDeviceEqualityComparer.Instance);
                foreach (var kvp in this.devices)
                {
                    if (currentDefault != null && kvp.Key.FriendlyName == currentDefault.FriendlyName)
                    {
                        currentDefaultEx = kvp.Value;
                    }

                    if (newDevices.Remove(kvp.Key))
                    {
                        kvp.Value.IsAvailable = true;
                        continue;
                    }

                    this.devices[kvp.Key].IsAvailable = false;
                }

                foreach (AudioCaptureDevice device in newDevices)
                {
                    var d = new SilverlightAudioCaptureDevice(device);
                    if (currentDefault != null && device.FriendlyName == currentDefault.FriendlyName)
                    {
                        currentDefaultEx = d;
                    }

                    this.devices.Add(device, d);
                }
            }

            if (DefaultDevice != currentDefaultEx)
            {
                DefaultDevice = currentDefaultEx;
                OnDefaultDeviceChanged(EventArgs.Empty);
                OnPropertyChanged(new PropertyChangedEventArgs("DefaultDevice"));
            }
        }
예제 #13
0
        public void StartCapturing()
        {
            if (videoCaptureDevices.Count < 0)
            {
                return;
            }

            videoCaptureDevice = new VideoCaptureDevice(videoCaptureDevices[0].MonikerString);

            audioCaptureDevice = GetProperAudioDevice();

            ProbeVideoDevice();

            videoCaptureDevice.NewFrame += CaptureVideoDeviceNewFrame;
            videoCaptureDevice.Start();

            audioCaptureDevice.NewFrame += AudioCaptureDeviceNewFrame;
            audioCaptureDevice.Start();
        }
예제 #14
0
        /// <summary>
        /// Create capture buffer, output wave file and stream recorded samples to disk every 50 milliseconds
        /// </summary>
        public void StreamAudio()
        {
            AudioListener.Position    = new Vector3D(0, 0, 0);
            AudioListener.Velocity    = new Vector3D(0, 0, 0);
            AudioListener.Orientation = new Orientation(new Vector3D(1, 1, 0), new Vector3D(0, 1, 0));
            Byte[] recordedData = null;

            AudioFormatEnum HQcaptureFormat     = AudioFormatEnum.Stereo16;
            int             HQcaptureFrequency  = 44100;
            int             HQcaptureBufferSize = 1028000;

            //Console.WriteLine("Creating File {0}", Environment.CurrentDirectory + "\\test.wav");

            if (_FileName == "")
            {
                _FileName = "undefined.wav";
            }

            WaveFileWriter wave = new WaveFileWriter();

            wave.CreateFile(_FileName, HQcaptureFormat);

            using (AudioCaptureDevice g = new AudioCaptureDevice(null, HQcaptureFormat, HQcaptureFrequency, HQcaptureBufferSize))
            {
                //Console.WriteLine("Started Recording (press Enter To Stop)");
                g.Start();

                while (OpenALRecoding)
                {
                    Thread.Sleep(50);
                    int samplecount = g.AvaliabeSampleCount;
                    recordedData = g.CaptureSamples();
                    wave.WriteCaptured(recordedData);
                }

                g.Stop();
                //Console.WriteLine("Stopped Recording");
            }
            wave.CloseFile();
            //Console.WriteLine("File Saved");
        }
예제 #15
0
        /// <summary>
        /// The idea here is to select a format which is closest to the format we actually want,
        /// and allows us to do the simplest possible downsampling (or none at all).
        /// </summary>
        /// <param name="device">The selected audio capture device</param>
        public static void SelectBestAudioFormat(AudioCaptureDevice device)
        {
            if (device != null && device.SupportedFormats.Count > 0)
            {
                // Some devices return a "SamplesPerSecond" of 0 at this stage. Damn Microsoft.
                var possibleAudioFormats = device.SupportedFormats.Where(format =>
                                                                         format.BitsPerSample == AudioConstants.BitsPerSample &&
                                                                         format.WaveFormat == WaveFormatType.Pcm).ToList();

                var formats = new StringBuilder();
                foreach (var format in device.SupportedFormats)
                {
                    formats.AppendFormat("BitsPerSample={0}, Channels={1}, SamplesPerSecond={2}\r\n", format.BitsPerSample, format.Channels, format.SamplesPerSecond);
                }
                ClientLogger.Debug("Possible audio formats: " + formats);

                // This will select any format that is an exact match of the desired format.
                var bestAudioFormat = possibleAudioFormats
                                      .FirstOrDefault(format => format.SamplesPerSecond == AudioConstants.WidebandSamplesPerSecond &&
                                                      format.Channels == AudioConstants.Channels &&
                                                      format.BitsPerSample == AudioConstants.BitsPerSample);

                // This will prefer formats that are exact multiples of the desired format, and which have the same number of channels.
                if (bestAudioFormat == null)
                {
                    bestAudioFormat = possibleAudioFormats
                                      .OrderBy(format =>
                                               (format.SamplesPerSecond != 0)
                                                        ? (format.SamplesPerSecond % AudioConstants.WidebandSamplesPerSecond) + format.Channels - AudioConstants.Channels
                                                        : int.MaxValue)
                                      .FirstOrDefault();
                }
                Debug.Assert(bestAudioFormat != null, "No appropriate audio format was found; possible formats = \r\n" + formats);
                ClientLogger.Debug("Selected audio format: BitsPerSample={0}, Channels={1}, SamplesPerSecond={2}", bestAudioFormat.BitsPerSample, bestAudioFormat.Channels, bestAudioFormat.SamplesPerSecond);
                device.DesiredFormat = bestAudioFormat;
            }
            else
            {
                ClientLogger.Debug("No audio capture device was found.");
            }
        }
예제 #16
0
        public static void BeginAudioCapture(AudioCaptureDevice audioCaptureDevice)
        {
            try
            {
                StopAudioCapture();

                // Determines the audio buffer size in relation to the parent buffer. Bigger multiplier = smaller proportion.
                // Larger multiplier also means dumping and copying to a new parent buffer less frequently at the cost of using a larger chunk of virtual memory
                // ex. If multiplier is 4, the audio buffer for a single soundboard sample will takeup 25% of the larger buffer
                int audioBufferMultiplier = 5;

                if (audioCaptureDevice.MMDeviceInstance != null)
                {
                    WasapiLoopbackCapture = new WasapiLoopbackCapture(audioCaptureDevice.MMDeviceInstance);

                    int audioSampleSize = ApplicationConfiguration.Instance.SoundboardSampleSeconds * WasapiLoopbackCapture.WaveFormat.AverageBytesPerSecond;
                    int audioBufferMax  = audioSampleSize * audioBufferMultiplier;

                    WasapiLoopbackCapture.DataAvailable += (sender, e) =>
                    {
                        // Copy a clip-sized chunk of audio to a new large buffer upon filling this one up
                        if (_audioByteBuffer.Count + e.BytesRecorded > audioBufferMax)
                        {
                            List <byte> retainedBytes = _audioByteBuffer.GetRange(_audioByteBuffer.Count - audioSampleSize, audioSampleSize);
                            _audioByteBuffer.Clear();
                            _audioByteBuffer.AddRange(retainedBytes);
                        }

                        byte[] capturedBytes = new byte[e.BytesRecorded];
                        Array.Copy(e.Buffer, 0, capturedBytes, 0, e.BytesRecorded);
                        _audioByteBuffer.AddRange(capturedBytes);
                    };

                    WasapiLoopbackCapture.StartRecording();
                }
            }
            catch (Exception ex)
            {
                ApplicationLogger.Log(ex.Message, ex.StackTrace);
            }
        }
예제 #17
0
        public RecognitionDemo(string region, string key, string locale, int millisecondsPerFrame)
        {
            _disposed             = false;
            _millisecondsPerFrame = millisecondsPerFrame;
            SpeechConfig config = SpeechConfig.FromSubscription(key, region);

            config.SpeechRecognitionLanguage = locale;
            config.OutputFormat = OutputFormat.Detailed;
            _audioInput         = CreateAudioInputStream();
            _recognizer         = new SpeechRecognizer(config, AudioConfig.FromStreamInput(_audioInput));
            _audioCapture       = CreateAudioCaptureDevice();
            _audio      = new FileStream("audio.raw", FileMode.Create);
            _transcript = new StreamWriter(new FileStream("transcript.txt", FileMode.Create), Encoding.UTF8);
            _stopwatch  = new Stopwatch();

            _framesCaptured = 0;
            _intermediateResultsReceived = 0;
            _finalResultsReceived        = 0;
            _identicalResults            = 0;
            _lastResult = null;
        }
예제 #18
0
        public static List <AudioCaptureDevice> GetAudioCaptureDevices()
        {
            List <AudioCaptureDevice> captureDevices = new List <AudioCaptureDevice>();

            var mmdevices = GetMMDevices();

            foreach (var d in mmdevices)
            {
                AudioCaptureDevice captureDevice = null;
                var client = d.AudioClient;
                if (client != null)
                {
                    var mixFormat = client.MixFormat;
                    if (mixFormat != null)
                    {
                        captureDevice = new AudioCaptureDevice
                        {
                            DeviceId = d.ID,
                            Name     = d.FriendlyName,

                            BitsPerSample = mixFormat.BitsPerSample,
                            SampleRate    = mixFormat.SampleRate,
                            Channels      = mixFormat.Channels,
                            Description   = $"{mixFormat.BitsPerSample} bit PCM: {mixFormat.SampleRate / 1000}kHz {mixFormat.Channels} channels",

                            //Properties = prop,
                        };

                        captureDevices.Add(captureDevice);
                    }
                }

                d?.Dispose();
            }
            mmdevices.Clear();


            return(captureDevices);
        }
예제 #19
0
        public void AudioCaptureDevice()
        {
            ReadOnlyCollection <AudioCaptureDevice> roc = CaptureDeviceConfiguration.GetAvailableAudioCaptureDevices();

            Assert.IsNotNull(roc, "GetAvailableAudioCaptureDevices");
            // results are not cached
            Assert.AreNotSame(roc, CaptureDeviceConfiguration.GetAvailableAudioCaptureDevices(), "!same-collection");

            AudioCaptureDevice acd = CaptureDeviceConfiguration.GetDefaultAudioCaptureDevice();

            if (acd != null)
            {
                // a new instance of AudioCaptureDevice is returned
                Assert.IsFalse(roc.Contains(acd), "Contains(Default)");
                Assert.AreNotSame(acd, CaptureDeviceConfiguration.GetDefaultAudioCaptureDevice(), "!same");
            }
            else
            {
                // no default then collection should be empty
                Assert.AreEqual(0, roc.Count, "Empty");
            }
        }
예제 #20
0
        public void SetRecorder(Guid?guid = null)
        {
            Guid?deviceGUID = guid;
            var  capture    = new AudioDeviceCollection(AudioDeviceCategory.Capture);

            foreach (var d in capture)
            {
                //Console.WriteLine($"{d.Guid}: {d.Description}");
                if (d.Description.IndexOf("Audio") != -1)
                {
                    deviceGUID = d.Guid;
                    break;
                }
            }
            Guid _guid = deviceGUID ?? throw new Exception("Guid of capture devise is not set");

            _microphone            = new AudioCaptureDevice(_guid);
            _microphone.SampleRate = 44100;
            _microphone.Channels   = 2;
            _microphone.Format     = SampleFormat.Format32BitIeeeFloat;
            _microphone.NewFrame  += source_NewFrame;
        }
예제 #21
0
 private void ConfigureAudioCaptureDevice(AudioCaptureDevice device)
 {
     // Set the audio properties.
     if (device != null)
     {
         MediaDeviceConfig.SelectBestAudioFormat(device);
         device.AudioFrameSize = AudioConstants.MillisecondsPerFrame;                 // 20 milliseconds
         if (device.DesiredFormat == null)
         {
             ClientLogger.Error(CommonStrings.Media_NoAudioFormat);
             MessageService.ShowErrorHint(CommonStrings.Media_NoAudioFormat);
         }
     }
     else
     {
         // Only show an error if there really is no microphone attached.
         var audioDevices = CaptureDeviceConfiguration.GetAvailableAudioCaptureDevices();
         if (audioDevices.Count == 0)
         {
             ClientLogger.Debug(CommonStrings.Media_NoAudioDevice);
             MessageService.ShowErrorHint(CommonStrings.Media_NoAudioDevice);
         }
     }
 }
예제 #22
0
        public void Setup(AudioCaptureDevice captDeviceSettings)
        {
            CaptDeviceSettings = captDeviceSettings;

            captureProps = (WasapiCaptureProperties)CaptDeviceSettings.Properties;

            exclusiveModeCheckBox.Checked = captureProps.ExclusiveMode;

            var bufferMsec = captureProps.BufferMilliseconds;

            if (bufferMsec > bufferSizeNumeric.Maximum)
            {
                bufferMsec = (int)bufferSizeNumeric.Maximum;
            }
            else if (bufferMsec < bufferSizeNumeric.Minimum)
            {
                bufferMsec = (int)bufferSizeNumeric.Minimum;
            }
            bufferSizeNumeric.Value = bufferMsec;


            eventSyncModeCheckBox.Checked = captureProps.EventSyncMode;
            //showCaptureBorderCheckBox.Checked = captureProps.ShowDebugBorder;
        }
        public static ObservableCollection <SupportedFormat> GetFriendlySupportedFormats(this AudioCaptureDevice device)
        {
            var list = new ObservableCollection <SupportedFormat>();

            foreach (var format in device.SupportedFormats)
            {
                if (format.Channels == 2 & format.BitsPerSample == 16)
                {
                    list.Add(new SupportedFormat(format));
                }
            }
            return(list);
        }
예제 #24
0
        // 開始or停止ボタン
        private void button1_Click(object sender, EventArgs e)
        {
            if (button1.Text == "開始")
            {
                if (DeviceExist)
                {
                    if (this.videoWindow == null)
                    {
                        this.videoWindow = new Form2();
                    }

                    this.CloseVideoSource();

                    videoSource = new VideoCaptureDevice(videoDevices[comboBox1.SelectedIndex].MonikerString);
                    if ((videoCapabilities != null) && (videoCapabilities.Length != 0))
                    {
                        videoSource.VideoResolution = videoSource.VideoCapabilities[comboBox3.SelectedIndex];
                        label1.Text           = videoSource.VideoResolution.FrameSize + " ";
                        this.videoWindow.Size = videoSource.VideoResolution.FrameSize;
                    }
                    if ((videoCapabilities2 != null) && (videoCapabilities2.Length != 0))
                    {
                        videoSource.DesiredFrameRate = (int)comboBox4.SelectedItem;
                        label1.Text += " " + videoSource.VideoResolution.FrameRate;
                    }

                    this.videoWindow.videoSourcePlayer1.VideoSource = videoSource;
                    this.videoWindow.videoSourcePlayer1.Start();

                    button1.Text = "停止";

                    this.videoWindow.Show();

                    //timer1.Enabled = true;
                }
                else
                {
                    label1.Text = "No Video Devices";
                }
                if (checkBox1.Checked)
                {
                    this.audioSource = new AudioCaptureDevice((AudioDeviceInfo)comboBox2.SelectedItem);
                    initXAudio2();
                    this.audioSource.NewFrame += source_NewFrame;
                    this.audioSource.Start();
                    new Thread(playSound).Start();
                }
            }
            else
            {
                if (this.videoWindow.videoSourcePlayer1.VideoSource != null)
                {
                    // stop camera
                    this.videoWindow.videoSourcePlayer1.SignalToStop();
                    this.videoWindow.videoSourcePlayer1.VideoSource = null;
                    this.CloseVideoSource();
                    this.videoWindow.Hide();
                    label1.Text  = "停止中";
                    button1.Text = "開始";
                }
            }
        }
예제 #25
0
        /// <summary>
        ///   Starts recording. Only works if the player has
        ///   already been started and is grabbing frames.
        /// </summary>
        ///
        public void StartRecording()
        {
            if (IsRecording || !IsPlaying)
            {
                return;
            }

            Rectangle area     = CaptureRegion;
            string    fileName = NewFileName();

            int      height         = area.Height;
            int      width          = area.Width;
            Rational framerate      = new Rational(1000, screenStream.FrameInterval);
            int      videoBitRate   = 1200 * 1000;
            int      audioBitRate   = 320 * 1000;
            int      audioFrameSize = 10 * 4096;

            OutputPath                           = Path.Combine(main.CurrentDirectory, fileName);
            RecordingStartTime                   = DateTime.MinValue;
            videoWriter                          = new VideoFileWriter();
            videoWriter.BitRate                  = videoBitRate;
            videoWriter.FrameRate                = framerate;
            videoWriter.Width                    = width;
            videoWriter.Height                   = height;
            videoWriter.VideoCodec               = VideoCodec.H264;
            videoWriter.VideoOptions["crf"]      = "18"; // visually lossless
            videoWriter.VideoOptions["preset"]   = "veryfast";
            videoWriter.VideoOptions["tune"]     = "zerolatency";
            videoWriter.VideoOptions["x264opts"] = "no-mbtree:sliced-threads:sync-lookahead=0";

            // Create audio devices which have been checked
            var audioDevices = new List <AudioCaptureDevice>();

            foreach (var audioViewModel in AudioCaptureDevices)
            {
                if (!audioViewModel.Checked)
                {
                    continue;
                }

                var device = new AudioCaptureDevice(audioViewModel.DeviceInfo);
                device.AudioSourceError += Device_AudioSourceError;
                device.Format            = SampleFormat.Format32BitIeeeFloat;
                device.SampleRate        = Settings.Default.SampleRate;
                device.DesiredFrameSize  = audioFrameSize;
                device.Start();

                audioDevices.Add(device);
            }

            if (audioDevices.Count > 0) // Check if we need to record audio
            {
                audioMixer = new AudioSourceMixer(audioDevices);
                audioMixer.AudioSourceError += Device_AudioSourceError;
                audioMixer.NewFrame         += AudioDevice_NewFrame;
                audioMixer.Start();

                videoWriter.AudioBitRate = audioBitRate;
                videoWriter.AudioCodec   = AudioCodec.Aac;
                videoWriter.AudioLayout  = audioMixer.NumberOfChannels == 1 ? AudioLayout.Mono : AudioLayout.Stereo;
                videoWriter.FrameSize    = audioFrameSize;
                videoWriter.SampleRate   = audioMixer.SampleRate;
            }

            //this.lastFrameTime = DateTime.MinValue;

            videoWriter.Open(OutputPath);

            HasRecorded = false;
            IsRecording = true;
        }
예제 #26
0
 public AudioSourceItem(AudioCaptureDevice d)
 {
     this.Device = d;
 }