void _timerImg_Tick(object sender, EventArgs e) { if (_images.Count == _countImage + 1) { //VIDEO---------------------------------------------------------------------------------------------------------------------- _timerImg.Stop(); //WriteableBitmap img = new WriteableBitmap(objManip.convertBitmapToGray(_images[_countImage], strColor,out histTotal)); img = new WriteableBitmap(objManip.convertImgAnySizeTo256x256(_images[_countImage], 255, strColor)); //"RGB" "R" "G" "B" " " run(); } //START NEW CICKLE---------------------------------------------------------------------------------------------------------------------- _countImage += 1; if (_countImage == _intPackImage) { _countImage = 0; _images.Clear(); } if (_captureSource.State == System.Windows.Media.CaptureState.Started) { _captureSource.CaptureImageAsync(); // Start Image Async... _timerImg.Start(); // Start Timer } //START NEW CICKLE---------------------------------------------------------------------------------------------------------------------- }
private void InitializeUpdateMethod() { doAR = false; DateTime startTime = DateTime.Now; int framesCount = 0; CompositionTarget.Rendering += (s, ea) => { if (doAR) { // Take webcam snapshot. // CaptureImageAsync calls the event CaptureImageCompleted += (s, e) => DetectMarkers(e.Result); captureSource.CaptureImageAsync(); } else { var trans = Matrix.CreateTranslation(new Vector(0, 0, -400)); timedRotation += 2; trans = Matrix.CreateRotationZ(timedRotation) * trans; ApplyFinalTransformation(trans); } // Update FPS text and reset counter every 30 frames if (++framesCount >= 30) { TimeSpan elapsed = DateTime.Now - startTime; Txt.Text = String.Format("{0:###0.00} fps", framesCount / elapsed.TotalSeconds); startTime = DateTime.Now; framesCount = 0; } }; }
private void SnapshotButton_Click(object sender, RoutedEventArgs e) { if (myCaptureSource.VideoCaptureDevice != null && myCaptureSource.State == CaptureState.Started) { myCaptureSource.CaptureImageAsync(); } //#region TESTING CODE - REMOVE ON DEPLOY TO PRODUCTION //// The following code is ONLY FOR TESTING PURPOSES. //// This code should be removed once deploying to production //// TESTING CODE --- //if (myCaptureSource.VideoCaptureDevice == null) //{ // Uri serviceUri = new Uri("../Camera/_SaveCameraImage", UriKind.Relative); // WebClient webClient = new WebClient(); // webClient.Headers["Content-Type"] = "application/x-www-form-urlencoded"; // webClient.OpenWriteCompleted += new OpenWriteCompletedEventHandler(webClient_OpenWriteCompleted); // webClient.OpenWriteAsync(serviceUri, "POST"); //} //// END TESTING CODE --- //#endregion }
private void TakeButton_Click(object sender, RoutedEventArgs e) { if (_captureSource != null && _captureSource.State == CaptureState.Started) { _captureSource.CaptureImageAsync(); } }
// Set recording state: start recording. private void StartVideoRecording() { _fileId = TLLong.Random(); _isPartReady = true; _uploadingLength = 0; _uploadableParts.Clear(); try { // Connect _fileSink to _captureSource. if (_captureSource.VideoCaptureDevice != null && _captureSource.State == CaptureState.Started) { _captureSource.Stop(); Viewfinder.Fill = new SolidColorBrush(Colors.Black); // Connect the input and output of _fileSink. _fileSink.CaptureSource = _captureSource; _fileSink.IsolatedStorageFileName = _videoFileName; } ThreadPool.QueueUserWorkItem(state => { Thread.Sleep(100); Deployment.Current.Dispatcher.BeginInvoke(() => { try { // Begin recording. if (_captureSource.VideoCaptureDevice != null && _captureSource.State == CaptureState.Stopped) { Viewfinder.Fill = null; _captureSource.Start(); Viewfinder.Fill = ViewfinderBrush; _timerCounter = 0; _startTime = DateTime.Now; _timer.Start(); } _captureSource.CaptureImageAsync(); // Set the button states and the message. UpdateUI(ButtonState.Recording, AppResources.Recording); } catch (Exception e) { DebugText.Text = AppResources.Error.ToUpperInvariant() + ": " + e.Message; } }); }); } // If recording fails, display an error. catch (Exception e) { DebugText.Text = AppResources.Error.ToUpperInvariant() + ": " + e.Message; } }
private void btSnapshot_Click(object sender, RoutedEventArgs e) { if (capture.State == CaptureState.Started) { capture.CaptureImageCompleted += new System.EventHandler <CaptureImageCompletedEventArgs>(capture_CaptureImageCompleted); capture.CaptureImageAsync(); } }
private void cmdSnapshot_Click(object sender, RoutedEventArgs e) { if (capture.State == CaptureState.Started) { capture.CaptureImageCompleted += capture_CaptureImageCompleted; capture.CaptureImageAsync(); } }
private void CaptureButton_Click(object sender, RoutedEventArgs e) { // verify the VideoCaptureDevice is not null and the device is started if (captureSource.VideoCaptureDevice != null && captureSource.State == CaptureState.Started) { captureSource.CaptureImageAsync(); } }
private void ButtonRecognizeOcto_Click(object sender, RoutedEventArgs e) { try { busyIndicator.BusyContent = "Imaged captured & send to server"; busyIndicator.IsBusy = true; _appMode = AppMode.RecognitionOcto; _captureSource.CaptureImageAsync(); } catch (InvalidOperationException ex) { TextBoxInfo.Text = "Start the web cam first"; busyIndicator.IsBusy = false; } catch (Exception ex) { TextBoxInfo.Text = "Undetermined exception"; busyIndicator.IsBusy = false; } }
private void BtnCaptureClick(object sender, RoutedEventArgs e) { // Request webcam access and start the capturing if (CaptureDeviceConfiguration.RequestDeviceAccess()) { captureSource.Start(); Earth.Scale = Sun.Scale = 50; Sun.IsVisible = true; ArCtrls.Visibility = Visibility.Visible; // Capture periodically CompositionTarget.Rendering += (s, e2) => captureSource.CaptureImageAsync(); } }
// Start the video recording. private void StartRecording_Click(object sender, EventArgs e) { try { // Avoid duplicate taps. recordIconButton.IsEnabled = false; addOrRemoveAppBarButton(recordIconButton, false); addOrRemoveAppBarButton(sendIconButton, false); captureSource.CaptureImageAsync(); progressTimer.Start(); StartVideoRecording(); } catch (Exception ex) { Debug.WriteLine("Record Video :: StartRecording_Click , Exception:" + ex.StackTrace); } }
private void btnWebCamStart_Click(object sender, RoutedEventArgs e) { if (_captureSource != null & blnStart == false) { try { _captureSource.Stop(); // stop whatever device may be capturing _captureSource.VideoCaptureDevice = CaptureDeviceConfiguration.GetDefaultVideoCaptureDevice(); _captureSource.AudioCaptureDevice = CaptureDeviceConfiguration.GetDefaultAudioCaptureDevice(); } catch { _captureSource = null; } try { if (CaptureDeviceConfiguration.AllowedDeviceAccess || CaptureDeviceConfiguration.RequestDeviceAccess()) { _captureSource.Start(); if (_captureSource.State == System.Windows.Media.CaptureState.Started) { // capture the current frame and add it to our observable collection _images.Clear(); _captureSource.CaptureImageAsync(); _timerImg.Start(); blnStart = true; btnWebCamStart.Content = "WebCam STOP"; } return; } } catch { } } else { _captureSource.Stop(); blnStart = false; btnWebCamStart.Content = "WebCam START"; } }
private void SnapshotButton_Click(object sender, RoutedEventArgs e) { if (myCaptureSource.VideoCaptureDevice != null && myCaptureSource.State == CaptureState.Started) { myCaptureSource.CaptureImageAsync(); } if (myCaptureSource.VideoCaptureDevice == null) { Uri serviceUri = new Uri("../Camera/_SaveCameraImage", UriKind.Relative); WebClient webClient = new WebClient(); webClient.Headers["Content-Type"] = "application/x-www-form-urlencoded"; webClient.OpenWriteCompleted += new OpenWriteCompletedEventHandler(webClient_OpenWriteCompleted); webClient.OpenWriteAsync(serviceUri, "POST"); } }
private void UserControlLoaded(object sender, RoutedEventArgs e) { // Initialize the webcam captureSource = new CaptureSource { VideoCaptureDevice = CaptureDeviceConfiguration.GetDefaultVideoCaptureDevice() }; // Desired format is 640 x 480 (good tracking results and performance) captureSource.VideoCaptureDevice.DesiredFormat = new VideoFormat(PixelFormatType.Unknown, 640, 480, 60); captureSource.CaptureImageCompleted += CaptureSourceCaptureImageCompleted; // Fill the Viewport Rectangle with the VideoBrush var vidBrush = new VideoBrush(); vidBrush.SetSource(captureSource); Viewport.Fill = vidBrush; // Conctruct the Detector arDetector = new BitmapMarkerDetector { Threshold = 200, JitteringThreshold = 1 }; // Load the marker patterns. It has 16x16 segments and a width of 80 millimeters slarMarker = Marker.LoadFromResource("data/Marker_SLAR_16x16segments_80width.pat", 16, 16, 80); // Capture or transform periodically CompositionTarget.Rendering += (s, e2) => { if (captureSource.State == CaptureState.Started) { captureSource.CaptureImageAsync(); } else { Game.SetWorldMatrix(Balder.Math.Matrix.Identity); } if (Game.ParticleSystem.Particles != null && Game.ParticleSystem.Particles.Count > 0) { } }; }
private void _timerSend_Tick(object sender, EventArgs e) { if (_images.Count == 1) { // _timerSend.Stop(); TransformMethods.TransferTo objT = new TransferTo(); _imagesSend.Add(objT.convertImgAnySizeTo256x256(_images[0])); rSend.Fill = new ImageBrush() { ImageSource = _imagesSend[_countImage] }; switch (_countImage) { case 0: rS0.Fill = new ImageBrush() { ImageSource = _imagesSend[_countImage] }; break; case 1: rS1.Fill = new ImageBrush() { ImageSource = _imagesSend[_countImage] }; break; case 2: rS2.Fill = new ImageBrush() { ImageSource = _imagesSend[_countImage] }; break; case 3: rS3.Fill = new ImageBrush() { ImageSource = _imagesSend[_countImage] }; break; default:; break; } _countImage += 1; _images.Clear(); } TransformMethods.WaveLetHaar objH = new WaveLetHaar(); TransformMethods.WaveLetCDF97 objD = new WaveLetCDF97(); //START NEW CICKLE---------------------------------------------------------------------------------------------------------------------- if (_countImage == _intPackImage) { DateTime dt0 = DateTime.Now; TransformMethods.TransferTo objT = new TransferTo(); imgOut1024x256 = objT.convertGrayImgToGray1024x256(_imagesSend); rSendCombine.Fill = new ImageBrush() { ImageSource = imgOut1024x256 }; //======================================================================== sbyte[,] sbHaar; sbHaar = objT.convertGrayImgToSByte256(imgOut1024x256); if (rbH.IsChecked == true) { objH.Haar2DByteForward(ref sbHaar, bLoss, vHaar, hHaar); } else { objD.CDF2DForward(ref sbHaar, bLoss, vHaar, hHaar); } rSendHaar.Fill = new ImageBrush() { ImageSource = objT.convertSByte256ToGrayImg(sbHaar) }; TransformMethods.MyArc objA = new MyArc(); List <byte> outBt = objA.archiveWithTable(sbHaar); lblSize.Content = outBt.Count.ToString() + " bytes ... (" + (256 * 256 * 4).ToString() + ")"; outByte = new List <byte>(outBt); flagReady = true; DateTime dt1 = DateTime.Now; lblDelta.Content = dt1.Subtract(dt0).Milliseconds.ToString() + " ms"; _countImage = 0; _imagesSend.Clear(); } if (_captureSource.State == System.Windows.Media.CaptureState.Started) { _captureSource.CaptureImageAsync(); // Start Image Async... // _timerSend.Start(); // Start Timer } //START NEW CICKLE---------------------------------------------------------------------------------------------------------------------- }