private void UserControlLoaded(object sender, RoutedEventArgs e) { // Initialize the webcam captureSource = new CaptureSource(); captureSource.VideoCaptureDevice = CaptureDeviceConfiguration.GetDefaultVideoCaptureDevice(); // Desired format is 640 x 480 (good tracking results and performance) captureSource.VideoCaptureDevice.DesiredFormat = new VideoFormat(PixelFormatType.Unknown, 640, 480, 60); captureSource.CaptureImageCompleted += CaptureSourceCaptureImageCompleted; // Fill the Viewport Rectangle with the VideoBrush var vidBrush = new VideoBrush(); vidBrush.SetSource(captureSource); Viewport.Fill = vidBrush; // Conctruct the Detector arDetector = new BitmapMarkerDetector { Threshold = 200, JitteringThreshold = 1 }; // Load the marker patterns. It has 16x16 segments and a width of 80 millimeters slarMarker = Marker.LoadFromResource("data/Marker_SLAR_16x16segments_80width.pat", 16, 16, 80); lMarker = Marker.LoadFromResource("data/Marker_L_16x16segments_80width.pat", 16, 16, 80); }
void Load() { if (CaptureDeviceConfiguration.AllowedDeviceAccess || CaptureDeviceConfiguration.RequestDeviceAccess()) { var devices = CaptureDeviceConfiguration.GetAvailableVideoCaptureDevices(); foreach (var device in devices) { var videoItem = new VideoItem(); videoItem.Name = device.FriendlyName; var source = new CaptureSource(); source.VideoCaptureDevice = device; var videoBrush = new VideoBrush(); videoBrush.SetSource(source); videoItem.Brush = videoBrush; this.sources.Add(source); this.sourceItems.Add(videoItem); } this.videoItems.ItemsSource = this.sourceItems; this.StartAll(); } }
private void InitializeCaptureSource() { if (mCaptureSource == null) { // Setup the capture source (for recording audio) mCaptureSource = new CaptureSource { VideoCaptureDevice = CaptureDeviceConfiguration.GetDefaultVideoCaptureDevice() }; if (mCaptureSource.VideoCaptureDevice != null) { MediaDeviceConfig.SelectBestVideoFormat(mCaptureSource.VideoCaptureDevice); if (mCaptureSource.AudioCaptureDevice.DesiredFormat != null) { mCaptureSource.AudioCaptureDevice.AudioFrameSize = AudioConstants.MillisecondsPerFrame; // 20 milliseconds mVideoSink = new VideoSinkAdapter(mCaptureSource, mRecorder, mVideoQualityController); ClientLogger.Debug("CaptureSource initialized."); } else { ClientLogger.Debug("No suitable audio format was found."); } panelWebcam.DataContext = mCaptureSource; } else { // Do something more here eventually, once we figure out what the user experience should be. ClientLogger.Debug("No audio capture device was found."); } } }
private void DoStartPlay(object obj) { CaptureDeviceConfiguration.RequestDeviceAccess(); if (captureSource != null) { captureSource.Stop(); captureSource = null; } // Desired format is 16kHz 16bit var queriedAudioFormats = from format in SelectedAudioDevice.SupportedFormats where format.SamplesPerSecond == 8000 && format.BitsPerSample == 16 && format.Channels == 1 select format; SelectedAudioDevice.DesiredFormat = queriedAudioFormats.FirstOrDefault(); SelectedAudioDevice.AudioFrameSize = 40; captureSource = new CaptureSource { AudioCaptureDevice = SelectedAudioDevice }; audioSink = new SpeexEncoderAudioSink(new Uri(@"http://" + ListenAddress)) { CaptureSource = captureSource }; captureSource.Start(); }
public void StartWebCam() { _captureSource = new CaptureSource(); _captureSource.CaptureImageCompleted += new EventHandler <CaptureImageCompletedEventArgs>(_captureSource_CaptureImageCompleted); _captureSource.VideoCaptureDevice = CaptureDeviceConfiguration.GetDefaultVideoCaptureDevice(); try { // Start capturing if (_captureSource.State != CaptureState.Started) { // Create video brush and fill the WebcamVideo rectangle with it var vidBrush = new VideoBrush(); vidBrush.Stretch = Stretch.Uniform; vidBrush.SetSource(_captureSource); WebcamVideo.Fill = vidBrush; // Ask user for permission and start the capturing if (CaptureDeviceConfiguration.RequestDeviceAccess()) { _captureSource.Start(); } } } catch (InvalidOperationException) { InfoTextBox.Text = "Web Cam already started - if not, I can't find it..."; } catch (Exception) { InfoTextBox.Text = "Could not start web cam, do you have one?"; } }
public GraphicsCaptureItemWrapper GetOrCreateCaptureItem(CaptureSource source, bool isWindow) { var items = isWindow ? _windows : _displays; var found = items.FirstOrDefault(s => s.Equals(source)); if (found == null) { found = items.FirstOrDefault(s => s.Name == source.Name); } if (found != null) { var handle = new IntPtr(found.CaptureId); if (!_captureCache.TryGetValue(handle, out var item)) { item = _screenCaptureManager.CreateGraphicsCaptureItem(handle, isWindow); _captureCache[handle] = item; } if (item.Wrapped.Size.Height == 0 || item.Wrapped.Size.Width == 0) { Log.Warning($"{source.CaptureId}- {source.Name} has empty size"); return(null); } return(item); } return(null); }
private void InitializeCaptureSource() { if (_captureSource != null) { return; } // Setup the capture source (for recording audio) _captureSource = new CaptureSource(); _captureSource.AudioCaptureDevice = CaptureDeviceConfiguration.GetDefaultAudioCaptureDevice(); if (_captureSource.AudioCaptureDevice != null) { MediaDeviceConfig.SelectBestAudioFormat(_captureSource.AudioCaptureDevice); if (_captureSource.AudioCaptureDevice.DesiredFormat != null) { var mediaStats = new MediaStatistics(); var mediaEnvironment = new MediaEnvironment(mediaStats); _captureSource.AudioCaptureDevice.AudioFrameSize = AudioFormat.Default.MillisecondsPerFrame; // 20 milliseconds _audioSinkAdapter = new MultipleControllerAudioSinkAdapter(GetMediaConfig(), _captureSource, 2000); _mediaStreamSource = new MultipleControllerAudioMediaStreamSource(2000); ClientLogger.Debug("CaptureSource initialized."); } else { ClientLogger.Debug("No suitable audio format was found."); } } else { // Do something more here eventually, once we figure out what the user experience should be. ClientLogger.Debug("No audio capture device was found."); } }
private SceneRect GetRect(CaptureSource model) { SceneRect rect = null; if (model.W > 0 && model.H > 0) { var ratio = (double)model.W / (double)model.H; var baseExtent = 0.5; var baseRatio = 16.0 / 9.0; if (ratio > baseRatio) { rect = new SceneRect { W = baseExtent, H = baseExtent * baseRatio / ratio } } ; else { rect = new SceneRect { W = baseExtent * ratio / baseRatio, H = baseExtent } }; } return(rect); }
void MainPage_Loaded(object sender, RoutedEventArgs e) { this.source = new CaptureSource(); this.deviceList.ItemsSource = CaptureDeviceConfiguration.GetAvailableVideoCaptureDevices(); this.deviceList.SelectedIndex = 0; this.source.VideoCaptureDevice = (VideoCaptureDevice)this.deviceList.SelectedItem; }
protected override void OnNavigatedTo(NavigationEventArgs e) { base.OnNavigatedTo(e); Client = Connect.Client; changlanguage(); threadInvite = new Thread(Invite); //实例化线程 threadShare = new Thread(new ThreadStart(GetPreview)); if (captureSource == null) { // 创建摄像机对象。 captureSource = new CaptureSource(); videoCaptureDevice = CaptureDeviceConfiguration.GetDefaultVideoCaptureDevice(); // eventhandlers capturesource添加为。 captureSource.CaptureFailed += new EventHandler <ExceptionRoutedEventArgs>(OnCaptureFailed); // 初始化相机如果存在手机上。 if (videoCaptureDevice != null) { TheVideoBrush.SetSource(captureSource); captureSource.Start(); } else { MessageBox.Show("您的摄像头设备不支持"); } } }
public static VideoCapturer Create(CaptureSource source) { VideoCapturer cap; switch (source) { case CaptureSource.CameraBack: cap = new VideoCapturer(source); break; case CaptureSource.CameraFront: cap = new VideoCapturer(source); break; case CaptureSource.Screen: cap = new VideoCapturer(source); break; case CaptureSource.RenderTexture: throw new NotImplementedException("CaptureSource.RenderTexture is not yet supported"); default: throw new NotSupportedException("CaptureSource not valid"); } return(cap); }
private static void VideoSouceChanged(DependencyObject d, DependencyPropertyChangedEventArgs e) { CaptureSource video = (CaptureSource)e.NewValue; VideoBrush vb = (VideoBrush)d; vb.SetSource(video); }
private void InitializeCaptureSource() { if (captureSource != null) { captureSource.Stop(); } captureSource = new CaptureSource(); captureSource.AudioCaptureDevice = (AudioCaptureDevice)listBoxAudioSources.SelectedItem; MediaDeviceConfig.SelectBestAudioFormat(captureSource.AudioCaptureDevice); captureSource.AudioCaptureDevice.DesiredFormat = captureSource.AudioCaptureDevice.SupportedFormats .First(format => format.BitsPerSample == AudioConstants.BitsPerSample && format.WaveFormat == WaveFormatType.Pcm && format.Channels == 1 && format.SamplesPerSecond == sampleRate); captureSource.AudioCaptureDevice.AudioFrameSize = AudioFormat.Default.MillisecondsPerFrame; // 20 milliseconds audioSink = new TestAudioSinkAdapter(captureSource, new NullAudioController()); audioSink.RawFrameAvailable += audioSink_RawFrameAvailable; audioSink.ProcessedFrameAvailable += audioSink_FrameArrived; ClientLogger.Debug("Checking device access."); if (CaptureDeviceConfiguration.AllowedDeviceAccess || CaptureDeviceConfiguration.RequestDeviceAccess()) { savedFramesForDebug = new List <byte[]>(); captureSource.Start(); ClientLogger.Debug("CaptureSource started."); } }
private void InitializeCaptureSource() { if (_captureSource == null) { // Setup the capture source (for recording audio) _captureSource = new CaptureSource(); _captureSource.AudioCaptureDevice = CaptureDeviceConfiguration.GetDefaultAudioCaptureDevice(); if (_captureSource.AudioCaptureDevice != null) { MediaDeviceConfig.SelectBestAudioFormat(_captureSource.AudioCaptureDevice); if (_captureSource.AudioCaptureDevice.DesiredFormat != null) { _captureSource.AudioCaptureDevice.AudioFrameSize = AudioFormat.Default.MillisecondsPerFrame; // 20 milliseconds _audioSink = new AudioSinkAdapter(_captureSource, null, MediaConfig.Default, new TestMediaEnvironment(), AudioFormat.Default); _recorder = new RecorderBase(_captureSource, _audioSink, speakersAudioVisualizer); chkSynchronizeRecording.DataContext = _audioSink; ClientLogger.Debug("CaptureSource initialized."); } else { ClientLogger.Debug("No suitable audio format was found."); } panelMicrophone.DataContext = _captureSource; } else { // Do something more here eventually, once we figure out what the user experience should be. ClientLogger.Debug("No audio capture device was found."); } } }
public RecorderBase(CaptureSource captureSource, AudioSinkAdapter audioSinkAdapter, AudioVisualizer audioVisualizer) { mCaptureSource = captureSource; mAudioSinkAdapter = audioSinkAdapter; mAudioVisualizer = audioVisualizer; VisualizationRate = 1; }
protected void Dispose(bool disposing) { if (!_disposed) { if (disposing) { // Stop any active processes. if (_captureTimer != null) { _captureTimer.Stop(); } Stop(); if (CaptureSource != null) { CaptureSource.Stop(); } // Release all references to help with the double-reference problem that keeps stuff from getting garbage collected. CaptureSource = null; AudioSink = null; VideoSink = null; } _disposed = true; } }
public void ChangeCapturedDevices(AudioCaptureDevice audioDevice, VideoCaptureDevice videoDevice) { try { SelectedAudioDevice = audioDevice; SelectedVideoDevice = videoDevice; // Remember our initial capture state. bool wasCaptured = CaptureSource.State == CaptureState.Started; if (wasCaptured) { CaptureSource.Stop(); } CaptureSource.AudioCaptureDevice = audioDevice; CaptureSource.VideoCaptureDevice = videoDevice ?? CaptureSource.VideoCaptureDevice; ConfigureAudioCaptureDevice(CaptureSource.AudioCaptureDevice); ConfigureVideoCaptureDevice(CaptureSource.VideoCaptureDevice); // Restore capture state to how it originally was. if (wasCaptured) { CaptureSelectedInputDevices(); } } catch (Exception ex) { ClientLogger.ErrorException(ex, "Error updating captured devices"); MessageService.ShowErrorHint(ex.Message); } }
void MainPage_Loaded(object sender, RoutedEventArgs e) { captureSource = new CaptureSource { VideoCaptureDevice = CaptureDeviceConfiguration.GetDefaultVideoCaptureDevice() }; var videoBrush = new VideoBrush(); videoBrush.SetSource(captureSource); Viewport.Fill = videoBrush; markerDetector = new CaptureSourceMarkerDetector(); var marker = Marker.LoadFromResource("Bola.pat", 64, 64, 80); markerDetector.Initialize(captureSource, 1d, 4000d, marker); markerDetector.MarkersDetected += (obj, args) => { Dispatcher.BeginInvoke(() => { var results = args.DetectionResults; if (results.HasResults) { var centerAtOrigin = Matrix3DFactory. CreateTranslation( -Imagem.ActualWidth * 0.5, -Imagem. ActualHeight * 0.5, 0); var scale = Matrix3DFactory.CreateScale (0.5, -0.5, 0.5); var world = centerAtOrigin * scale * results[0]. Transformation; var vp = Matrix3DFactory. CreateViewportTransformation (Viewport.ActualWidth, Viewport.ActualHeight); var m = Matrix3DFactory. CreateViewportProjection (world, Matrix3D.Identity, markerDetector. Projection, vp); Imagem.Projection = new Matrix3DProjection { ProjectionMatrix = m }; } }); }; }
void InitializeVideoRecorder(Rectangle viewfinderRectangle) { if (captureSource == null) { // Create the VideoRecorder objects. captureSource = new CaptureSource(); fileSink = new FileSink(); videoCaptureDevice = CaptureDeviceConfiguration.GetDefaultVideoCaptureDevice(); // Add eventhandlers for captureSource. captureSource.CaptureFailed += new EventHandler <ExceptionRoutedEventArgs>(OnCaptureFailed); // Initialize the camera if it exists on the device. if (videoCaptureDevice != null) { // Create the VideoBrush for the viewfinder. videoRecorderBrush = new VideoBrush(); videoRecorderBrush.SetSource(captureSource); // Display the viewfinder image on the rectangle. viewfinderRectangle.Fill = videoRecorderBrush; // Start video capture and display it on the viewfinder. captureSource.Start(); } else { // A camera is not supported on this device } } }
private void cmdStartRecord_Click(object sender, RoutedEventArgs e) { if (CaptureDeviceConfiguration.AllowedDeviceAccess || CaptureDeviceConfiguration.RequestDeviceAccess()) { if (audioSink == null) { capture = new CaptureSource(); capture.AudioCaptureDevice = CaptureDeviceConfiguration.GetDefaultAudioCaptureDevice(); audioSink = new MemoryStreamAudioSink(); audioSink.CaptureSource = capture; } else { audioSink.CaptureSource.Stop(); } audioSink.CaptureSource.Start(); cmdStartRecord.IsEnabled = false; // Add a delay to make sure the recording is initialized. // (Otherwise, a user may cause an error by attempting to stop it immediately.) System.Threading.Thread.Sleep(TimeSpan.FromSeconds(0.5)); cmdStopRecord.IsEnabled = true; lblStatus.Text = "Now recording ..."; } }
public void start() { if (captureSource == null) { // Create the VideoRecorder objects. captureSource = new CaptureSource(); fileSink = new FileSink(); videoCaptureDevice = CaptureDeviceConfiguration.GetDefaultVideoCaptureDevice(); // Add eventhandlers for captureSource. captureSource.CaptureFailed += new EventHandler <ExceptionRoutedEventArgs>(OnCaptureFailed); // Initialize the camera if it exists on the device. if (videoCaptureDevice != null) { // Create the VideoBrush for the viewfinder. videoRecorderBrush = new VideoBrush(); videoRecorderBrush.SetSource(captureSource); videoRecorderBrush.RelativeTransform = new CompositeTransform() { CenterX = 0.5, CenterY = 0.5, Rotation = 90 }; viewfinderRectangle.Fill = videoRecorderBrush; // Start video capture and display it on the viewfinder. captureSource.Start(); System.Diagnostics.Debug.WriteLine("Started"); _isRunning = true; } } }
private static void CaptureSourceChanged(DependencyObject d, DependencyPropertyChangedEventArgs e) { CaptureSource cs = (CaptureSource)d; MessageBox.Show("test"); cs.VideoCaptureDevice = CaptureDeviceConfiguration.GetDefaultVideoCaptureDevice(); }
public void InitializeVideoRecorder() { if (captureSource == null) { // Create the VideoRecorder objects. captureSource = new CaptureSource(); fileSink = new FileSink(); videoCaptureDevice = CaptureDeviceConfiguration.GetDefaultVideoCaptureDevice(); captureSource.CaptureImageCompleted += captureSource_CaptureImageCompleted; // Add eventhandlers for captureSource. captureSource.CaptureFailed += new EventHandler <ExceptionRoutedEventArgs>(OnCaptureFailed); // Initialize the camera if it exists on the device. if (videoCaptureDevice != null) { // Create the VideoBrush for the viewfinder. videoRecorderBrush = new VideoBrush(); videoRecorderBrush.SetSource(captureSource); // Display the viewfinder image on the rectangle. viewfinderRectangle.Fill = videoRecorderBrush; // Start video capture and display it on the viewfinder. captureSource.Start(); // Set the button state and the message. UpdateUI(ButtonState.Initialized, "Tap record to start recording..."); } else { // Disable buttons when the camera is not supported by the device. UpdateUI(ButtonState.CameraNotSupported, "A camera is not supported on this device."); } } }
public override void CreateFilteredMat() { FilteredMat = CaptureSource.Clone(); ImageFilterHelper.whiteToDarkPixel(FilteredMat, 150); ImageFilterHelper.killDarkPixel(FilteredMat, 40); ImageFilterHelper.saturation(FilteredMat, 0, 255, 1); }
private void InitializeCaptureSource() { if (captureSource == null) { mediaElement = new MediaElement(); audioStreamSource = new TestAudioStreamSource(this); mediaElement.SetSource(audioStreamSource); // Set the audio properties. captureSource = new CaptureSource(); captureSource.AudioCaptureDevice = CaptureDeviceConfiguration.GetDefaultAudioCaptureDevice(); if (captureSource.AudioCaptureDevice != null) { MediaDeviceConfig.SelectBestAudioFormat(captureSource.AudioCaptureDevice); if (captureSource.AudioCaptureDevice.DesiredFormat != null) { captureSource.AudioCaptureDevice.AudioFrameSize = AudioFormat.Default.MillisecondsPerFrame; // 20 milliseconds audioSink = new TestAudioSinkAdapter(captureSource); audioSink.ProcessedFrameAvailable += audioSink_FrameArrived; ClientLogger.Debug("CaptureSource initialized."); } else { ClientLogger.Debug("No suitable audio format was found."); } } else { // Do something more here eventually, once we figure out what the user experience should be. ClientLogger.Debug("No audio capture device was found."); } } }
public void StopSendingAudioToRoom() { _captureSource.Stop(); _captureSource = null; _mediaController.Dispose(); _mediaController = null; _audioSinkAdapter = null; }
public TestMultipleDestinationAudioSinkAdapter( CaptureSource captureSource, SourceMediaController mediaController, Dictionary <Guid, DestinationMediaController> mediaControllers, MediaConfig mediaConfig) : base(captureSource, mediaController, mediaConfig, new TestMediaEnvironment(), AudioFormat.Default) { this.mediaControllers = mediaControllers; }
public AudioSinkAdapter(CaptureSource captureSource, IAudioController audioController, MediaConfig mediaConfig, IMediaEnvironment mediaEnvironment, AudioFormat playedAudioFormat) { ClientLogger.Debug(GetType().Name + " created."); CaptureSource = captureSource; AudioController = audioController; _mediaConfig = mediaConfig; _mediaEnvironment = mediaEnvironment; _playedAudioFormat = playedAudioFormat; _logger = new AudioSinkAdapterLogger(); }
private void stop_Click(object sender, EventArgs e) { if (videoRecorder != null && videoRecorder.State == CaptureState.Started) { videoRecorder.Stop(); videoRecorder = null; fileWriter = null; videoContainer.Fill = new SolidColorBrush(Colors.Gray); } }
public MultipleControllerAudioSinkAdapter(MediaConfig mediaConfig, CaptureSource captureSource, int frequency) { ClientLogger.Debug("MultipleControllerAudioSinkAdapter created"); this.mediaConfig = mediaConfig; AudioControllers = new List <IAudioController>(); AudioContexts = new List <AudioContext>(); CaptureSource = captureSource; RawAudioFormat = new AudioFormat(CaptureSource.AudioCaptureDevice.DesiredFormat.SamplesPerSecond); oscillator = new Oscillator(); oscillator.Frequency = frequency; }