private void CaptureSourceCaptureImageCompleted(object sender, CaptureImageCompletedEventArgs e) { var writeableBitmap = e.Result; // Perform AR DetectMarkers(writeableBitmap); }
private void captureSource_CaptureImageCompleted(object sender, CaptureImageCompletedEventArgs e) { using (IsolatedStorageFile isoStore = IsolatedStorageFile.GetUserStoreForApplication()) { System.Windows.Media.Imaging.WriteableBitmap wb = e.Result; if (isoStore.FileExists(thumbnailFileName)) isoStore.DeleteFile(thumbnailFileName); IsolatedStorageFileStream file = isoStore.CreateFile(thumbnailFileName); System.Windows.Media.Imaging.Extensions.SaveJpeg(wb, file, wb.PixelWidth, wb.PixelHeight, 0, 85); file.Close(); } }
void captureSource_CaptureImageCompleted(object sender, CaptureImageCompletedEventArgs e) { com.google.zxing.qrcode.QRCodeReader qrRead = new com.google.zxing.qrcode.QRCodeReader(); //This is like a platform neutral way of identifying colors in an image RGBLuminanceSource luminiance = new RGBLuminanceSource(e.Result, e.Result.PixelWidth, e.Result.PixelHeight); //The next 2 things are used to change color to black and white to be read by the reader com.google.zxing.common.HybridBinarizer binarizer = new com.google.zxing.common.HybridBinarizer(luminiance); com.google.zxing.BinaryBitmap binBitmap = new com.google.zxing.BinaryBitmap(binarizer); com.google.zxing.Result results = default(com.google.zxing.Result); try { //barcode found results = qrRead.decode(binBitmap); capturedBarcodes.Items.Insert(0, new ScannedImage(results.Text, e.Result)); capturedBarcodes.SelectedIndex = 0; mediaElement1.Stop(); mediaElement1.Play(); ImageBrush brush = new ImageBrush(); brush.ImageSource = e.Result; capturedImage.Fill = brush; } catch (com.google.zxing.ReaderException) { //no barcode found if (captureSource.State == CaptureState.Started) { captureSource.CaptureImageAsync(); } } catch (Exception ex) { MessageBox.Show(ex.Message); StartButton_Click(this, new RoutedEventArgs()); } try { BarcodeRead(this, new CustomEventHandler() { Barcode = results.Text }); } catch (Exception) { //no javascript event attached } }
void capture_CaptureImageCompleted(object sender, CaptureImageCompletedEventArgs e) { Image img = new Image(); img.Source = e.Result; stackImages.Children.Add(img); }
private void CaptureSourceOnCaptureImageCompleted(object sender, CaptureImageCompletedEventArgs args) { NavigationService.Navigate(new Uri("/QuestionPage.xaml", UriKind.Relative)); StateBag.CurrentImage = args.Result; }
private void ImageCapturedFromWebCam(object sender, CaptureImageCompletedEventArgs e) { Snapshots.Add(e.Result); SelectedSnapshot = e.Result; TakePicture.TriggerCanExecuteChanged(); }
private void CaptureSource_CaptureImageCompleted(object sender, CaptureImageCompletedEventArgs e) { // Set the ImageBrush to the WriteableBitmap capturedImage.ImageSource = e.Result; }
private void _capture_CaptureImageCompleted(object sender, CaptureImageCompletedEventArgs e) { PageOrientation temp = Orientation; this.rectVideo.Fill = new ImageBrush { ImageSource = e.Result }; if (muteImage.Visibility == System.Windows.Visibility.Visible) { //remove it from canvas before capturing the layout muteImage.Visibility = System.Windows.Visibility.Collapsed; } LayOutToBitMapImage(); //as the layout has the imagebrushcapture and the images layed on top of it. if (!_isSoundLive) { //if sound is Muted enable the MuteImage icon muteImage.Visibility = System.Windows.Visibility.Visible; } }
private void captureCompleted(object sender, CaptureImageCompletedEventArgs e) { var image = e.Result; var resized = image.Resize(320, 240, WriteableBitmapExtensions.Interpolation.Bilinear); //resized.ForEach((x, y, c) => Color.FromArgb(c.A, (byte)(c.R * 0.3),(byte)( c.G * 0.59),(byte) (c.B * 0.11))); var imgCollection = new ObservableCollection<int>(resized.Pixels); switch(_appMode) { case AppMode.RecognitionOcto: _client.RecognizeFromOctoAsync(imgCollection, resized.PixelWidth); break; case AppMode.TrainingOcto: _client.AddToOctoSetAsync(imgCollection, resized.PixelWidth,TextBoxLabel.Text); break; } }
void _cSource_CaptureImageCompleted(object sender, CaptureImageCompletedEventArgs e) { WriteableBitmap wBitmap = e.Result; _screnshotImage.Source = wBitmap; }
// callback function for _src.CaptureImageAsync() which saves the captured image void _src_srcImageCompleted(object sender, CaptureImageCompletedEventArgs e) { // create a BackgroundWorker to asyncronously handle the saving of this image BackgroundWorker b1 = new BackgroundWorker(); // Convert raw captured bitmap to the image that Image Tools understand with the extension method ImageTools.Image bmp = e.Result.ToImage(); // assign the BackgroundWorker to SaveImage and use a delegate to pass arguments b1.DoWork += delegate(object s, DoWorkEventArgs ev) { saveImage(bmp, nextImgName, nextImgNum); }; // when the worker completes, update the webcam with the current time b1.RunWorkerCompleted += delegate(object s, RunWorkerCompletedEventArgs r) { lastImageTaken = DateTime.Now; }; // run background worker b1.RunWorkerAsync(); }
void source_CaptureImageCompleted(object sender, CaptureImageCompletedEventArgs e) { Dispatcher.BeginInvoke(() => { SetCurrentFrame(e.Result, "webcam.jpg"); m_source.Stop(); m_source = null; FinishWebcam(); }); }
private void CaptureSourceCaptureImageCompleted(object sender, CaptureImageCompletedEventArgs e) { var writeableBitmap = e.Result; if (Sun != null) { if (ChkGlass.IsChecked.Value) { // Set reflection map dynamically using the webcam snapsot var tex = new Texture2D(GraphicsDeviceManager.Current.GraphicsDevice, writeableBitmap.PixelWidth, writeableBitmap.PixelHeight, false, SurfaceFormat.Color); writeableBitmap.CopyTo(tex); Sun.ReflectionTexture = tex; } else { Sun.ReflectionTexture = null; } } // Perform AR DetectMarkers(writeableBitmap); }
private void captureSource_CaptureImageCompleted(object sender, CaptureImageCompletedEventArgs e) { // Set reflection map dynamically using the webcam snapsot if (TeapotMaterial != null) { var bitmap = e.Result.Resize(512, 512, WriteableBitmapExtensions.Interpolation.NearestNeighbor); TeapotMaterial.ReflectionMap = new WriteableBitmapMap(bitmap); } // Perform AR DetectMarkers(e.Result); }
/// <summary> /// On image capture complete /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void triggerCaptureImage(object sender, CaptureImageCompletedEventArgs e) { if (e.Error != null) { return; } var name = this.getFileName("jpg"); Picture picture = new Picture(); picture.Path = name; picture.Time = DateTime.Now; picture.Rotated = this.Orientation == PageOrientation.LandscapeRight; picture.Backup = new BackupInfo(); var wb = e.Result; var stream = Storage.CreateFile(picture.Path); wb.SaveJpeg(stream, wb.PixelWidth, wb.PixelHeight, 0, 60); stream.Close(); if (this.IsRecording) { this.video.Preview = name; } else { this.triggerPictureSave(picture); } }
void _captureSource_CaptureImageCompleted(object sender, CaptureImageCompletedEventArgs e) { //local copy of the event handler - in case someone decides to unsubscribe just after the null check var eventCopy = ImageCaptured; if (eventCopy!= null) { eventCopy(this, new ImageCapturedArgs { Bitmap = e.Result }); } }
void TakePictureCompleted(object sender, CaptureImageCompletedEventArgs e) { this.LocalMediaItems.Add( new MediaItem() { Name = GetNextFileName(false), DisplayImage = e.Result, Data = MediaItem.GetJpg(e.Result) }); }
void mySource_CaptureImageCompleted(object sender, CaptureImageCompletedEventArgs e) { e.Result.SaveJpeg(thumbnailStream, 75, 75, 0, 100); }
void CaptureImageCompleted(object sender, CaptureImageCompletedEventArgs e) { if (this.TakePictureCompleted != null) this.TakePictureCompleted(this, e); }