예제 #1
0
        private void canvas_Loaded(object sender, RoutedEventArgs e)
        {
            texInterop = new TextureGraphInterop();

            // Set window bounds in dips
            texInterop.WindowBounds = new Windows.Foundation.Size(
                (float)canvas.ActualWidth,
                (float)canvas.ActualHeight
                );

            // Set native resolution in pixels
            texInterop.NativeResolution = new Windows.Foundation.Size(
                (float)Math.Floor(canvas.ActualWidth * Application.Current.Host.Content.ScaleFactor / 100.0f + 0.5f),
                (float)Math.Floor(canvas.ActualHeight * Application.Current.Host.Content.ScaleFactor / 100.0f + 0.5f)
                );

            // Set render resolution to the full native resolution
            texInterop.RenderResolution = texInterop.NativeResolution;

            // Hook-up native component to DrawingSurface
            canvas.SetContentProvider(texInterop.CreateContentProvider());
            canvas.SetManipulationHandler(texInterop);

            var previewSize = new Windows.Foundation.Size(800, 448);

            cam = new Camera(previewSize, CameraSensorLocation.Back);
            im = new ImageProcessing(detector);

            // When we have an input frame, call ImageProcessing::processFrame
            cam.OnFrameReady += im.processFrame;

            // When we have processed a frame, output it to the textureInterop
            im.frameProcessed += texInterop.setTexturePtr;
        }
        async void MainPage_Loaded(object sender, System.Windows.RoutedEventArgs e)
        {
            Size targetMediaElementSize = new Size(640, 480);
            double aspectRatio = 4.0/3.0;

            // 1. Open camera 
            if (m_camera == null)
            {
                var captureRes = PhotoCaptureDevice.GetAvailableCaptureResolutions(CameraSensorLocation.Back);
                Size selectedCaptureRes = captureRes.Where(res => Math.Abs(aspectRatio - res.Width/res.Height ) <= 0.1)
                                                    .OrderBy(res => res.Width)
                                                    .Last();
                m_camera = await PhotoCaptureDevice.OpenAsync(CameraSensorLocation.Back, selectedCaptureRes);
                m_camera.SetProperty(KnownCameraGeneralProperties.EncodeWithOrientation, m_camera.SensorLocation == CameraSensorLocation.Back ? m_camera.SensorRotationInDegrees : -m_camera.SensorRotationInDegrees);
               
                var previewRes = PhotoCaptureDevice.GetAvailablePreviewResolutions(CameraSensorLocation.Back);
                Size selectedPreviewRes = previewRes.Where(res => Math.Abs(aspectRatio - res.Width/res.Height ) <= 0.1) 
                                                    .Where(res => (res.Height >= targetMediaElementSize.Height) && (res.Width >= targetMediaElementSize.Width))
                                                    .OrderBy(res => res.Width)
                                                    .First();
                await m_camera.SetPreviewResolutionAsync(selectedPreviewRes);
                cameraEffect.CaptureDevice = m_camera;
            }

            // Always create a new source, otherwise the MediaElement will not start.
            source = new CameraStreamSource(cameraEffect, targetMediaElementSize);
            MyCameraMediaElement.SetSource(source);

            m_timer = new DispatcherTimer();
            m_timer.Interval = new TimeSpan(0, 0, 0, 1, 0); // Tick every 1s.
            m_timer.Tick += m_timer_Tick;
            m_timer.Start();
        }
예제 #3
0
    private async Task initCameraAsync(CameraSensorLocation sensorLocation) {

      if (cam != null) {
        cam.Dispose();
        cam = null;
      }


      Windows.Foundation.Size res = new Windows.Foundation.Size(640, 480);

      cam = await PhotoCaptureDevice.OpenAsync(sensorLocation, res);
      await cam.SetPreviewResolutionAsync(res);

      viewfinder.SetSource(cam);

      viewfinderTransform.Rotation = sensorLocation == CameraSensorLocation.Back ?
                                       cam.SensorRotationInDegrees : -cam.SensorRotationInDegrees;

      imgFilterTransform.Rotation = sensorLocation == CameraSensorLocation.Back ?
                                       cam.SensorRotationInDegrees : -cam.SensorRotationInDegrees;

      // Vorbereitung für die Live s/w Vorschau
      bmp = new WriteableBitmap((int)cam.PreviewResolution.Width,  (int)cam.PreviewResolution.Height);
      timer = new DispatcherTimer {
        Interval = TimeSpan.FromMilliseconds(10)
      };
      timer.Tick += timer_Tick;
      timer.Start();

    }
예제 #4
0
        // Just like last time with the LineGraph, we need to hook up our TextureGraph
        private void videoCanvas_Loaded(object sender, RoutedEventArgs e)
        {
            texGraph = new TextureGraphInterop();

            // Set window bounds in dips
            texGraph.WindowBounds = new Windows.Foundation.Size(
                (float)videoCanvas.ActualWidth,
                (float)videoCanvas.ActualHeight
                );

            // Set native resolution in pixels
            texGraph.NativeResolution = new Windows.Foundation.Size(
                (float)Math.Floor(videoCanvas.ActualWidth * Application.Current.Host.Content.ScaleFactor / 100.0f + 0.5f),
                (float)Math.Floor(videoCanvas.ActualHeight * Application.Current.Host.Content.ScaleFactor / 100.0f + 0.5f)
                );

            // Set render resolution to the full native resolution
            texGraph.RenderResolution = texGraph.NativeResolution;

            // Hook-up native component to DrawingSurface
            videoCanvas.SetContentProvider(texGraph.CreateContentProvider());
            videoCanvas.SetManipulationHandler(texGraph);

            // Set the capture size of libvideo
            Windows.Foundation.Size captureSize = new Windows.Foundation.Size(1280, 720);

            // Construct libvideo's Camera object
            cam = new Camera(captureSize, CameraSensorLocation.Back);

            // When we have an input frame, call TextureGraphInterop::setTexturePtr
            cam.OnFrameReady += texGraph.setTexturePtr;
        }
예제 #5
0
        /// <summary>
        /// Opens and sets up the camera if not already. Creates a new
        /// CameraStreamSource with an effect and shows it on the screen via
        /// the media element.
        /// </summary>
        private async void Initialize()
        {
            Size mediaElementSize = new Size(MediaElementWidth, MediaElementHeight);

            if (camera == null)
            {
                // Resolve the capture resolution and open the camera
                var captureResolutions =
                    PhotoCaptureDevice.GetAvailableCaptureResolutions(CameraSensorLocation.Back);

                Size selectedCaptureResolution =
                    captureResolutions.Where(
                        resolution => Math.Abs(AspectRatio - resolution.Width / resolution.Height) <= 0.1)
                            .OrderBy(resolution => resolution.Width).Last();

                camera = await PhotoCaptureDevice.OpenAsync(
                    CameraSensorLocation.Back, selectedCaptureResolution);

                // Set the image orientation prior to encoding
                camera.SetProperty(KnownCameraGeneralProperties.EncodeWithOrientation,
                    camera.SensorLocation == CameraSensorLocation.Back
                    ? camera.SensorRotationInDegrees : -camera.SensorRotationInDegrees);

                // Resolve and set the preview resolution
                var previewResolutions =
                    PhotoCaptureDevice.GetAvailablePreviewResolutions(CameraSensorLocation.Back);

                Size selectedPreviewResolution =
                    previewResolutions.Where(
                        resolution => Math.Abs(AspectRatio - resolution.Width / resolution.Height) <= 0.1)
                            .Where(resolution => (resolution.Height >= mediaElementSize.Height)
                                   && (resolution.Width >= mediaElementSize.Width))
                                .OrderBy(resolution => resolution.Width).First();

                await camera.SetPreviewResolutionAsync(selectedPreviewResolution);

                cameraEffect.CaptureDevice = camera;
            }


            if (mediaElement == null)
            {
                mediaElement = new MediaElement();
                mediaElement.Stretch = Stretch.UniformToFill;
                mediaElement.BufferingTime = new TimeSpan(0);
                mediaElement.Tap += OnMyCameraMediaElementTapped;
                source = new CameraStreamSource(cameraEffect, mediaElementSize);
                mediaElement.SetSource(source);
                MediaElementContainer.Children.Add(mediaElement);
                
            } 
            
            // Show the index and the name of the current effect
            if (cameraEffect is NokiaSketchEffect)
            {
                NokiaSketchEffect effects = cameraEffect as NokiaSketchEffect;
            }
            
        }
예제 #6
0
 public void Maximize()
 {
     var size = new Windows.Foundation.Size(Current().Pixels.Width, Current().Pixels.Height);
     size.Height -= 100;
     size.Width -= 100;
     var av = ApplicationView.GetForCurrentView();
     av.TryResizeView(size);
 }
        private async void ProcessCurrentVideoFrame(DispatcherTimer timer)
        {
            if (captureManager.CameraStreamState != Windows.Media.Devices.CameraStreamState.Streaming)
            {
                return;
            }
            if (!await frameProcessingSemaphore.WaitAsync(250))
            {
                return;
            }

            try
            {
                IEnumerable <DetectedFace> faces = null;

                // Create a VideoFrame object specifying the pixel format we want our capture image to be (NV12 bitmap in this case).
                // GetPreviewFrame will convert the native webcam frame into this format.
                const BitmapPixelFormat InputPixelFormat = BitmapPixelFormat.Nv12;
                using (VideoFrame previewFrame = new VideoFrame(InputPixelFormat, (int)this.videoProperties.Width, (int)this.videoProperties.Height))
                {
                    await this.captureManager.GetPreviewFrameAsync(previewFrame);

                    // The returned VideoFrame should be in the supported NV12 format but we need to verify this.
                    if (FaceDetector.IsBitmapPixelFormatSupported(previewFrame.SoftwareBitmap.BitmapPixelFormat))
                    {
                        faces = await this.faceTracker.ProcessNextFrameAsync(previewFrame);

                        if (this.FilterOutSmallFaces)
                        {
                            // We filter out small faces here.
                            faces = faces.Where(f => CoreUtil.IsFaceBigEnoughForDetection((int)f.FaceBox.Height, (int)this.videoProperties.Height));
                        }

                        this.NumFacesOnLastFrame = faces.Count();

                        if (this.EnableAutoCaptureMode)
                        {
                            this.UpdateAutoCaptureState(faces);
                        }

                        // Create our visualization using the frame dimensions and face results but run it on the UI thread.
                        var previewFrameSize = new Windows.Foundation.Size(previewFrame.SoftwareBitmap.PixelWidth, previewFrame.SoftwareBitmap.PixelHeight);
                        var ignored          = this.Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () =>
                        {
                            this.ShowFaceTrackingVisualization(previewFrameSize, faces);
                        });
                    }
                }
            }
            catch (Exception x)
            {
                Debug.WriteLine(x.Message);
            }
            finally
            {
                frameProcessingSemaphore.Release();
            }
        }
예제 #8
0
        public static Windows.Foundation.Size GetCurrentDisplaySize()
        {
            var dispInfo = DisplayInformation.GetForCurrentView();
            var size     = new Windows.Foundation.Size(
                dispInfo.ScreenWidthInRawPixels * dispInfo.RawPixelsPerViewPixel,
                dispInfo.ScreenHeightInRawPixels * dispInfo.RawPixelsPerViewPixel);

            return(size);
        }
예제 #9
0
        /// <summary>
        /// Applies the framework element constraints like the size and max size, using the provided measured size.
        /// </summary>
        /// <param name="view"></param>
        public static void OnMeasureOverride <T>(T view, _Size measuredSize)
            where T : View, IFrameworkElement
        {
            var updated = IFrameworkElementHelper
                          .SizeThatFits(view, new _Size(measuredSize.Width, measuredSize.Height).PhysicalToLogicalPixels())
                          .LogicalToPhysicalPixels();

            Windows.UI.Xaml.Controls.Layouter.SetMeasuredDimensions(view, (int)updated.Width, (int)updated.Height);
        }
예제 #10
0
        static UWPPoint AdjustToMakeVisible(UWPPoint point, UWPSize itemSize, ScrollViewer scrollViewer)
        {
            if (IsVertical(scrollViewer))
            {
                return(AdjustToMakeVisibleVertical(point, itemSize, scrollViewer));
            }

            return(AdjustToMakeVisibleHorizontal(point, itemSize, scrollViewer));
        }
예제 #11
0
        public void Vector2ToSizeTest()
        {
            Vector2 vector = new Vector2(23, 42);

            Windows.Foundation.Size result = vector.ToSize();

            Assert.AreEqual(23.0, result.Width);
            Assert.AreEqual(42.0, result.Height);
        }
예제 #12
0
        public void Vector2FromSizeTest()
        {
            var size = new Windows.Foundation.Size(23, 42);

            Vector2 result = size.ToVector2();

            Assert.AreEqual(23.0f, result.X);
            Assert.AreEqual(42.0f, result.Y);
        }
예제 #13
0
        protected override Windows.Foundation.Size ArrangeOverride(Windows.Foundation.Size finalSize)
        {
            foreach (var child in Children)
            {
                child.Arrange(new Windows.Foundation.Rect(new Windows.Foundation.Point(0, 0), finalSize));
            }

            return(finalSize);
        }
예제 #14
0
        protected override Windows.Foundation.Size ArrangeOverride(Windows.Foundation.Size finalSize)
        {
            if (_formsEmptyView != null)
            {
                _formsEmptyView.Layout(new Rectangle(0, 0, finalSize.Width, finalSize.Height));
            }

            return(base.ArrangeOverride(finalSize));
        }
예제 #15
0
        static UWPPoint AdjustToEnd(UWPPoint point, UWPSize itemSize, ScrollViewer scrollViewer)
        {
            if (IsVertical(scrollViewer))
            {
                return(AdjustToEndVertical(point, itemSize, scrollViewer));
            }

            return(AdjustToEndHorizontal(point, itemSize, scrollViewer));
        }
예제 #16
0
 /// <summary>
 /// Renders a writeable bitmap preview of the given frame.
 /// </summary>
 /// <param name="frame">Frame to render.</param>
 /// <param name="size">Preview size in pixels.</param>
 /// <returns>Rendered frame preview.</returns>
 public static async Task <WriteableBitmap> RenderPreviewAsync(Frame frame, Windows.Foundation.Size size)
 {
     using (var bitmap = new Bitmap(frame.Dimensions, Internal.Utilities.FrameFormatToColorMode(frame.Format), frame.Pitch, frame.Buffer.AsBuffer()))
         using (var source = new BitmapImageSource(bitmap))
             using (var renderer = new WriteableBitmapRenderer(source, new WriteableBitmap((int)size.Width, (int)size.Height), OutputOption.Stretch))
             {
                 return(await renderer.RenderAsync());
             }
 }
예제 #17
0
 void UpdateFloatingObjectSize()
 {
     if (this._sheet != null)
     {
         double height = 0.0;
         for (int i = this.StartRow; i < this.EndRow; i++)
         {
             height += this._sheet.GetActualRowHeight(i, SheetArea.Cells);
         }
         if (height != 0.0)
         {
             if (this._sheet.GetActualRowHeight(this.StartRow, SheetArea.Cells) > 0.0)
             {
                 height -= this.StartRowOffset;
             }
             if (this._sheet.GetActualRowHeight(this.EndRow, SheetArea.Cells) > 0.0)
             {
                 height += this.EndRowOffset;
             }
         }
         else if (this.StartRow == this.EndRow)
         {
             height = this.EndRowOffset - this.StartRowOffset;
         }
         double width = 0.0;
         for (int j = this.StartColumn; j < this.EndColumn; j++)
         {
             width += this._sheet.GetActualColumnWidth(j, SheetArea.Cells);
         }
         if (width != 0.0)
         {
             if (this._sheet.GetActualColumnWidth(this.StartColumn, SheetArea.Cells) > 0.0)
             {
                 width -= this.StartColumnOffset;
             }
             if (this._sheet.GetActualColumnWidth(this.EndColumn, SheetArea.Cells) > 0.0)
             {
                 width += this.EndColumnOffset;
             }
         }
         else if (this.StartColumn == this.EndColumn)
         {
             width = this.EndColumnOffset - this.StartColumnOffset;
         }
         if (width < 0.0)
         {
             width = 0.0;
         }
         if (height < 0.0)
         {
             height = 0.0;
         }
         this._size = new Windows.Foundation.Size(width, height);
         this.RaisePropertyChanged("Size");
     }
 }
예제 #18
0
        /// <summary>
        /// Initializes a new MediaCapture instance and starts the Preview streaming to the CamPreview UI element.
        /// </summary>
        /// <returns>Async Task object returning true if initialization and streaming were successful and false if an exception occurred.</returns>
        //private async Task<bool> StartWebcamStreaming()
        //{
        //    bool successful = true;

        //    try
        //    {
        //        this.mediaCapture = new MediaCapture();

        //        // For this scenario, we only need Video (not microphone) so specify this in the initializer.
        //        // NOTE: the appxmanifest only declares "webcam" under capabilities and if this is changed to include
        //        // microphone (default constructor) you must add "microphone" to the manifest or initialization will fail.
        //        MediaCaptureInitializationSettings settings = new MediaCaptureInitializationSettings();
        //        settings.StreamingCaptureMode = StreamingCaptureMode.Video;
        //        await this.mediaCapture.InitializeAsync(settings);
        //        this.mediaCapture.Failed += this.MediaCapture_CameraStreamFailed;

        //        // Cache the media properties as we'll need them later.
        //        var deviceController = this.mediaCapture.VideoDeviceController;
        //        this.videoProperties = deviceController.GetMediaStreamProperties(MediaStreamType.VideoPreview) as VideoEncodingProperties;

        //        // Immediately start streaming to our CaptureElement UI.
        //        // NOTE: CaptureElement's Source must be set before streaming is started.
        //        this.CamPreview.Source = this.mediaCapture;
        //        await this.mediaCapture.StartPreviewAsync();

        //        // Ensure the Semaphore is in the signalled state.
        //        this.frameProcessingSemaphore.Release();

        //        // Use a 66 milisecond interval for our timer, i.e. 15 frames per second
        //        TimeSpan timerInterval = TimeSpan.FromMilliseconds(66);
        //        this.frameProcessingTimer = Windows.System.Threading.ThreadPoolTimer.CreatePeriodicTimer(new Windows.System.Threading.TimerElapsedHandler(ProcessCurrentVideoFrame), timerInterval);
        //    }
        //    catch (System.UnauthorizedAccessException)
        //    {
        //        // If the user has disabled their webcam this exception is thrown; provide a descriptive message to inform the user of this fact.
        //        //this.rootPage.NotifyUser("Webcam is disabled or access to the webcam is disabled for this app.\nEnsure Privacy Settings allow webcam usage.", NotifyType.ErrorMessage);

        //        successful = false;
        //    }
        //    catch (Exception ex)
        //    {
        //        //this.rootPage.NotifyUser(ex.ToString(), NotifyType.ErrorMessage);
        //        successful = false;
        //    }

        //    return successful;
        //}

        /// <summary>
        /// Safely stops webcam streaming (if running) and releases MediaCapture object.
        /// </summary>
        //private async void ShutdownWebCam()
        //{
        //    if (this.frameProcessingTimer != null)
        //    {
        //        this.frameProcessingTimer.Cancel();
        //    }

        //    if (this.mediaCapture != null)
        //    {
        //        if (this.mediaCapture.CameraStreamState == Windows.Media.Devices.CameraStreamState.Streaming)
        //        {
        //            try
        //            {
        //                await this.mediaCapture.StopPreviewAsync();
        //            }
        //            catch (Exception)
        //            {
        //                ;   // Since we're going to destroy the MediaCapture object there's nothing to do here
        //            }
        //        }
        //        this.mediaCapture.Dispose();
        //    }

        //    this.frameProcessingTimer = null;
        //    this.CamPreview.Source = null;
        //    this.mediaCapture = null;
        //    this.CameraStreamingButton.IsEnabled = true;

        //}

        /// <summary>
        /// This method is invoked by a ThreadPoolTimer to execute the FaceTracker and Visualization logic at approximately 15 frames per second.
        /// </summary>
        /// <remarks>
        /// Keep in mind this method is called from a Timer and not sychronized with the camera stream. Also, the processing time of FaceTracker
        /// will vary depending on the size of each frame and the number of faces being tracked. That is, a large image with several tracked faces may
        /// take longer to process.
        /// </remarks>
        /// <param name="timer">Timer object invoking this call</param>
        private async void ProcessCurrentVideoFrame(ThreadPoolTimer timer)
        {
            //if (this.currentState != ScenarioState.Streaming)
            //{
            //    return;
            //}

            // If a lock is being held it means we're still waiting for processing work on the previous frame to complete.
            // In this situation, don't wait on the semaphore but exit immediately.
            if (!frameProcessingSemaphore.Wait(0))
            {
                return;
            }

            try
            {
                IList <DetectedFace> faces = null;

                // Create a VideoFrame object specifying the pixel format we want our capture image to be (NV12 bitmap in this case).
                // GetPreviewFrame will convert the native webcam frame into this format.
                const BitmapPixelFormat InputPixelFormat = BitmapPixelFormat.Nv12;
                using (VideoFrame previewFrame = new VideoFrame(InputPixelFormat, (int)this.videoProperties.Width, (int)this.videoProperties.Height))
                {
                    await this.mediaCapture.GetPreviewFrameAsync(previewFrame);

                    // The returned VideoFrame should be in the supported NV12 format but we need to verify this.
                    if (FaceDetector.IsBitmapPixelFormatSupported(previewFrame.SoftwareBitmap.BitmapPixelFormat))
                    {
                        faces = await this.faceTracker.ProcessNextFrameAsync(previewFrame);
                    }
                    else
                    {
                        throw new System.NotSupportedException("PixelFormat '" + InputPixelFormat.ToString() + "' is not supported by FaceDetector");
                    }

                    // Create our visualization using the frame dimensions and face results but run it on the UI thread.
                    var previewFrameSize = new Windows.Foundation.Size(previewFrame.SoftwareBitmap.PixelWidth, previewFrame.SoftwareBitmap.PixelHeight);

                    var ignored = rootPage.Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () =>
                    {
                        this.SetupVisualization(previewFrameSize, faces);
                    });
                }
            }
            catch (Exception ex)
            {
                //var ignored = rootPage.Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () =>
                //{
                //    this.rootPage.NotifyUser(ex.ToString(), NotifyType.ErrorMessage);
                //});
            }
            finally
            {
                frameProcessingSemaphore.Release();
            }
        }
        public void Maximize()
        {
            var size = new Windows.Foundation.Size(Current().Pixels.Width, Current().Pixels.Height);

            size.Height -= 100;
            size.Width  -= 100;
            var av = ApplicationView.GetForCurrentView();

            av.TryResizeView(size);
        }
        void UpdateBounds()
        {
            Windows.Foundation.Size masterSize = Control.MasterSize;
            Windows.Foundation.Size detailSize = Control.DetailSize;


            Element.MasterBounds = new Rectangle(0, 0, masterSize.Width, masterSize.Height);
            Element.DetailBounds = new Rectangle(0, 0, detailSize.Width, detailSize.Height);
            RefreshInsidePagesSize();
        }
        // Code to execute when the application is activated (brought to foreground)
        // This code will not execute when the application is first launched
        private void Application_Activated(object sender, ActivatedEventArgs e)
        {
            var resolution = new Windows.Foundation.Size(640, 480);
            var task       = PhotoCaptureDevice.OpenAsync(CameraSensorLocation.Back, new Windows.Foundation.Size(640, 480)).AsTask();

            task.Wait();

            Camera = task.Result;
            Camera.SetPreviewResolutionAsync(resolution).AsTask().Wait();
        }
        protected override Windows.Foundation.Size MeasureOverride(Windows.Foundation.Size availableSize)
        {
            var result = base.MeasureOverride(availableSize);

            if (!double.IsInfinity(availableSize.Width))
            {
                result.Width = availableSize.Width;
            }
            return(result);
        }
예제 #23
0
        public void PlatformToSystemException(float width, float height)
        {
#if __IOS__
            var platform = new CoreGraphics.CGSize(width, height);
            Assert.Throws <ArgumentOutOfRangeException>(() => platform.ToSystemSize());
#elif WINDOWS_UWP
            var platform = new Windows.Foundation.Size(width, height);
            Assert.Throws <ArgumentOutOfRangeException>(() => platform.ToSystemSize());
#endif
        }
        partial void PlatformApplyChanges()
        {
            var dinfo   = Windows.Graphics.Display.DisplayInformation.GetForCurrentView();
            var scale   = dinfo.RawPixelsPerViewPixel;
            var size    = new Windows.Foundation.Size(_preferredBackBufferWidth / scale, _preferredBackBufferHeight / scale);
            var appView = ApplicationView.GetForCurrentView();
            var success = appView.TryResizeView(size);

            System.Diagnostics.Debug.Assert(success, "TryResizeView failed");
        }
예제 #25
0
        /// <summary>
        /// Invoked when the application is launched normally by the end user.  Other entry points
        /// will be used such as when the application is launched to open a specific file.
        /// </summary>
        /// <param name="args">Details about the launch request and process.</param>
        protected override void OnLaunched(LaunchActivatedEventArgs args)
        {
            this.EnableLogging();

            Frame rootFrame = Window.Current.Content as Frame;

            // Do not repeat app initialization when the Window already has content,
            // just ensure that the window is active
            if (rootFrame == null)
            {
                // Create a Frame to act as the navigation context and navigate to the first page
                rootFrame = new Frame();

                rootFrame.NavigationFailed += OnNavigationFailed;

                if (args.PreviousExecutionState == ApplicationExecutionState.Terminated)
                {
                    // N/A? - saved in Settings" that will be loaded automatically.
                }
                else if (args.PreviousExecutionState == ApplicationExecutionState.ClosedByUser)
                {
                    Models.CleanerSettings.Instance.HtmlSource = string.Empty;
                }

                // Place the frame in the current Window
                Window.Current.Content = rootFrame;
            }

            Logger.Log(LogLevel.Information, nameof(App), $"Started from previous state {args.PreviousExecutionState}.");

            if (!args.PrelaunchActivated)
            {
                if (rootFrame.Content == null)
                {
                    // When the navigation stack isn't restored navigate to the first page,
                    // configuring the new page by passing required information as a navigation
                    // parameter
                    rootFrame.Navigate(typeof(MainPage), args.Arguments);
                }

                float DPI = Windows.Graphics.Display.DisplayInformation.GetForCurrentView().LogicalDpi;
                ApplicationView.PreferredLaunchWindowingMode = ApplicationViewWindowingMode.PreferredLaunchViewSize;

                // Height="450" Width="800"
                var desiredSize = new Windows.Foundation.Size(800f * 96.0f / DPI, 450f * 96.0f / DPI);
                ApplicationView.PreferredLaunchViewSize = desiredSize;

                // Ensure the current window is active
                Window.Current.Activate();
                ApplicationView.GetForCurrentView().TryResizeView(desiredSize);

                //var main = (MainPage)((Frame)Window.Current.Content).Content;
                //rootFrame.KeyDown += main.Page_KeyDown;
            }
        }
예제 #26
0
        private async void ProcessCurrentVideoFrame(ThreadPoolTimer timer)
        {
            if (captureManager.CameraStreamState != CameraStreamState.Streaming ||
                !frameProcessingSemaphore.Wait(0))
            {
                return;
            }

            try
            {
                IEnumerable <DetectedFace> faces = null;

                // Create a VideoFrame object specifying the pixel format we want our capture image to be (NV12 bitmap in this case).
                // GetPreviewFrame will convert the native webcam frame into this format.
                const BitmapPixelFormat InputPixelFormat = BitmapPixelFormat.Nv12;

                using (VideoFrame currentFrame = new VideoFrame(InputPixelFormat, (int)videoProperties.Width, (int)videoProperties.Height))
                {
                    await captureManager.GetPreviewFrameAsync(currentFrame);

                    // The returned VideoFrame should be in the supported NV12 format but we need to verify this.
                    if (FaceDetector.IsBitmapPixelFormatSupported(currentFrame.SoftwareBitmap.BitmapPixelFormat))
                    {
                        faces = await faceTracker.ProcessNextFrameAsync(currentFrame);

                        if (FilterOutSmallFaces)
                        {
                            // We filter out small faces here.
                            faces = faces.Where(f => CoreUtil.IsFaceBigEnoughForDetection((int)f.FaceBox.Height, (int)videoProperties.Height));
                        }

                        NumFacesOnLastFrame = faces.Count();

                        if (EnableAutoCaptureMode)
                        {
                            UpdateAutoCaptureState(faces);
                        }

                        // Create our visualization using the frame dimensions and face results but run it on the UI thread.
                        var currentFrameSize = new Windows.Foundation.Size(currentFrame.SoftwareBitmap.PixelWidth, currentFrame.SoftwareBitmap.PixelHeight);

                        var rgbaBitmap = SoftwareBitmap.Convert(currentFrame.SoftwareBitmap, BitmapPixelFormat.Rgba8);

                        HandleFaces(currentFrameSize, faces, rgbaBitmap);
                    }
                }
            }
            catch (Exception)
            {
            }
            finally
            {
                frameProcessingSemaphore.Release();
            }
        }
예제 #27
0
        public async Task AdjustSize_Expected_Result(Stretch stretch, double availableWidth, double availableHeight, double expectedWidth, double expectedHeight)
        {
            var imageNaturalSize   = new Size(1000, 500);
            var availableSize      = new Size(availableWidth, availableHeight);
            var expectedOutputSize = new Size(expectedWidth, expectedHeight);

            ImageSizeHelper
            .AdjustSize(stretch, availableSize, imageNaturalSize)
            .Should()
            .Be(expectedOutputSize, 0.5, $"Invalid output for image size {imageNaturalSize} when available is {availableSize} using stretch {stretch}");
        }
예제 #28
0
        protected override Windows.Foundation.Size MeasureOverride(Windows.Foundation.Size availableSize)
        {
            if (CrossPlatformMeasure == null)
            {
                return(base.MeasureOverride(availableSize));
            }

            var measure = CrossPlatformMeasure(availableSize.Width, availableSize.Height);

            return(measure.ToNative());
        }
예제 #29
0
        private void SetupVisualization(Windows.Foundation.Size framePizelSize, IList <DetectedFace> foundFaces)
        {
            this.VisualizationCanvas.Children.Clear();



            double actualWidth  = this.VisualizationCanvas.ActualWidth;
            double actualHeight = this.VisualizationCanvas.ActualHeight;


            TextBlock texto = new TextBlock();

            if (this.currentState == ScenarioState.Streaming && foundFaces != null && actualWidth != 0 && actualHeight != 0)
            {
                double    widthScale  = framePizelSize.Width / actualWidth;
                double    heightScale = framePizelSize.Height / actualHeight;
                Rectangle box         = new Rectangle();



                int i = 0;
                foreach (DetectedFace face in foundFaces)
                {
                    if (IdentidadEncontrada != "Catalina" && IdentidadEncontrada != "Rochy")
                    {
                        box.Width           = (int)face.FaceBox.Width / (int)widthScale;
                        box.Height          = (int)(face.FaceBox.Height / heightScale);
                        box.Fill            = this.fillBrush;
                        box.Stroke          = this.lineBrush;
                        box.StrokeThickness = this.lineThickness;
                        box.Margin          = new Thickness((uint)(face.FaceBox.X / widthScale), (uint)(face.FaceBox.Y / heightScale), 0, 0);


                        texto.Text       = IdentidadEncontrada;
                        texto.Margin     = new Thickness((uint)(face.FaceBox.X / widthScale), (uint)(face.FaceBox.Y / heightScale) - 15, 0, 0);
                        texto.Foreground = this.lineBrush;
                        this.VisualizationCanvas.Children.Add(box);

                        this.VisualizationCanvas.Children.Add(texto);
                    }
                    else
                    {
                        imageBrush.ImageSource = imageSource;
                        imageUnic.Source       = imageSource;
                        imageUnic.Margin       = new Thickness((uint)(face.FaceBox.X / widthScale), (uint)(face.FaceBox.Y / heightScale), 0, 0);


                        imageUnic.Width  = ((int)face.FaceBox.Width / (int)widthScale) * 1.15;
                        imageUnic.Height = ((int)face.FaceBox.Height / heightScale) * 1.15;
                        this.VisualizationCanvas.Children.Add(imageUnic);
                    }
                }
            }
        }
예제 #30
0
        // Provide touch focus in the viewfinder.
        async void focus_Tapped(object sender, GestureEventArgs e)
        {
            if (cam == null)
            {
                return;
            }


            Point uiTapPoint = e.GetPosition(viewfinderCanvas);

            if (PhotoCaptureDevice.IsFocusRegionSupported(cam.SensorLocation))
            {
                Size _focusRegionSize = new Size(100, 100);

                // Get tap coordinates as a foundation point
                Windows.Foundation.Point tapPoint = new Windows.Foundation.Point(uiTapPoint.X, uiTapPoint.Y);

                double xRatio = viewfinderCanvas.ActualHeight / cam.PreviewResolution.Width;
                double yRatio = viewfinderCanvas.ActualWidth / cam.PreviewResolution.Height;

                // adjust to center focus on the tap point
                Windows.Foundation.Point displayOrigin = new Windows.Foundation.Point(
                    tapPoint.Y - _focusRegionSize.Width / 2, (viewfinderCanvas.ActualWidth - tapPoint.X) - _focusRegionSize.Height / 2);

                // adjust for resolution difference between preview image and the canvas
                Windows.Foundation.Point viewFinderOrigin = new Windows.Foundation.Point(displayOrigin.X / xRatio, displayOrigin.Y / yRatio);

                Rect focusrect = new Rect(viewFinderOrigin, _focusRegionSize);

                // clip to preview resolution
                Rect viewPortRect = new Rect(0, 0, cam.PreviewResolution.Width, cam.PreviewResolution.Height);
                focusrect.Intersect(viewPortRect);

                cam.FocusRegion = focusrect;

                // show a focus indicator
                //focusBrackets.SetValue(Shape.StrokeProperty, new SolidColorBrush(Colors.Blue));
                focusBrackets.Visibility = Visibility.Visible;
                focusBrackets.SetValue(Canvas.LeftProperty, uiTapPoint.X - _focusRegionSize.Width / 2);
                focusBrackets.SetValue(Canvas.TopProperty, uiTapPoint.Y - _focusRegionSize.Height / 2);

                CameraFocusStatus status = await cam.FocusAsync();

                if (status == CameraFocusStatus.Locked)
                {
                    //focusBrackets.SetValue(Shape.StrokeProperty, new SolidColorBrush(Colors.Green));
                    cam.SetProperty(KnownCameraPhotoProperties.LockedAutoFocusParameters, AutoFocusParameters.Focus);
                }
                else
                {
                    cam.SetProperty(KnownCameraPhotoProperties.LockedAutoFocusParameters, AutoFocusParameters.None);
                }
            }
        }
        protected override Windows.Foundation.Size  MeasureOverride(Windows.Foundation.Size availableSize)
        {
            _textBlock.Measure(availableSize);

            // This deliberately does something wrong so we can demo fixing it
            WRect  bounds      = ApplicationView.GetForCurrentView().VisibleBounds;
            double scaleFactor = DisplayInformation.GetForCurrentView().RawPixelsPerViewPixel;
            var    size        = new Size(bounds.Width * scaleFactor, bounds.Height * scaleFactor);

            return(new Windows.Foundation.Size(size.Width, _textBlock.DesiredSize.Height));
        }
예제 #32
0
        public MobileScreenTrigger()
        {
            Windows.UI.Xaml.Window.Current.SizeChanged += Window_SizeChanged;

            Windows.Foundation.Size size = new Windows.Foundation.Size()
            {
                Width  = Windows.UI.Xaml.Window.Current.Bounds.Width,
                Height = Windows.UI.Xaml.Window.Current.Bounds.Height
            };
            UpdateTrigger(size);
        }
        protected override WSize MeasureOverride(WSize availableSize)
        {
            LoadCarouselView();

            if (_item != null)
            {
                SetDataContext(_item);
                _item = null;
            }

            return(base.MeasureOverride(availableSize));
        }
예제 #34
0
 internal virtual void Init(string name, double x, double y, double width, double height)
 {
     this._name              = name;
     this._location          = new Windows.Foundation.Point(x, y);
     this._size              = new Windows.Foundation.Size(width, height);
     this._dynamicMove       = true;
     this._dynamicSize       = true;
     this._sizeWithSameRatio = false;
     this._canPrint          = true;
     this._visible           = true;
     this._suspendState.Reset();
 }
        protected override Windows.Foundation.Size ArrangeOverride(Windows.Foundation.Size finalSize)
        {
            var size   = base.ArrangeOverride(finalSize);
            var header = GetTemplateChild("HeaderArea") as UIElement;

            if (header != null)
            {
                var p = header.TransformToVisual(this).TransformPoint(new Windows.Foundation.Point(0, header.DesiredSize.Height * .5));
                HeaderBaseline = p.Y - size.Height * .5;
            }
            return(size);
        }
예제 #36
0
 void AdjustSize()
 {
     Windows.Foundation.Size sheetBounds = this.GetSheetBounds();
     if (this._size.Width > sheetBounds.Width)
     {
         this._size.Width = sheetBounds.Width;
     }
     if (this._size.Height > sheetBounds.Height)
     {
         this._size.Height = sheetBounds.Height;
     }
 }
		public SizeRequest GetDesiredSize(double widthConstraint, double heightConstraint)
		{
			var constraint = new Windows.Foundation.Size(widthConstraint, heightConstraint);

			double oldWidth = Width;
			double oldHeight = Height;

			Height = double.NaN;
			Width = double.NaN;

			Measure(constraint);
			var result = new Size(Math.Ceiling(DesiredSize.Width), Math.Ceiling(DesiredSize.Height));

			Width = oldWidth;
			Height = oldHeight;

			return new SizeRequest(result);
		}
        private void InitializeCamera()
        {
            Windows.Foundation.Size captureResolution;

            var deviceName = DeviceStatus.DeviceName;

            if (deviceName.Contains("RM-875") || deviceName.Contains("RM-876") || deviceName.Contains("RM-877"))
            {
                captureResolution = new Windows.Foundation.Size(7712, 4352); // 16:9
                //captureResolution = new Windows.Foundation.Size(7136, 5360); // 4:3
            }
            else if (deviceName.Contains("RM-937") || deviceName.Contains("RM-938") || deviceName.Contains("RM-939"))
            {
                captureResolution = new Windows.Foundation.Size(5376, 3024); // 16:9
                //captureResolution = new Windows.Foundation.Size(4992, 3744); // 4:3
            }
            else
            {
                captureResolution = PhotoCaptureDevice.GetAvailableCaptureResolutions(SENSOR_LOCATION).First();
            }

            var task = PhotoCaptureDevice.OpenAsync(SENSOR_LOCATION, captureResolution).AsTask();

            task.Wait();

            _device = task.Result;
            _device.SetProperty(KnownCameraGeneralProperties.PlayShutterSoundOnCapture, true);

            //if (_flashButton != null)
            //{
            //    SetFlashState(_flashState);
            //}

            //AdaptToOrientation();

            ViewfinderVideoBrush.SetSource(_device);

            if (PhotoCaptureDevice.IsFocusSupported(SENSOR_LOCATION))
            {
                Microsoft.Devices.CameraButtons.ShutterKeyHalfPressed += CameraButtons_ShutterKeyHalfPressed;
            }

            Microsoft.Devices.CameraButtons.ShutterKeyPressed += CameraButtons_ShutterKeyPressed;
        }
예제 #39
0
    private async Task initCameraAsync(CameraSensorLocation sensorLocation) {

      if (_cam != null) {
        _cam.Dispose();
        _cam = null;
      }


      var res = new Windows.Foundation.Size(640, 480);

      _cam = await PhotoCaptureDevice.OpenAsync(sensorLocation, res);
      await _cam.SetPreviewResolutionAsync(res);

      viewfinder.SetSource(_cam);

      viewfinderTransform.Rotation = sensorLocation == CameraSensorLocation.Back ?
                                       _cam.SensorRotationInDegrees : -_cam.SensorRotationInDegrees;



    }
		/// <summary>
		/// Resolves the available resolutions for the device defined by the given
		/// media capture instance.
		/// </summary>
		/// <param name="mediaCapture">An initialised media capture instance.</param>
		/// <param name="mediaStreamType">The type of the media stream (e.g. video or photo).</param>
		/// <returns>The list of available resolutions or an empty list, if not available.</returns>
		private List<Size> ResolveCameraResolutions(MediaCapture mediaCapture, MediaStreamType mediaStreamType)
		{
			List<Size> resolutions = new List<Size>();
			IReadOnlyList<IMediaEncodingProperties> mediaStreamPropertiesList = null;

			try
			{
				mediaStreamPropertiesList = mediaCapture.VideoDeviceController.GetAvailableMediaStreamProperties(mediaStreamType);
			}
			catch (Exception e)
			{
				Debug.WriteLine(DebugTag + "ResolveCameraResolutions(): " + e.ToString());
				return resolutions;
			}

			foreach (var mediaStreamProperties in mediaStreamPropertiesList)
			{
				Size size = new Size(0,0);
				bool sizeSet = false;

				var streamProperties = mediaStreamProperties as VideoEncodingProperties;
				if (streamProperties != null)
				{
					VideoEncodingProperties properties = streamProperties;
					size = new Size(properties.Width, properties.Height);
					sizeSet = true;
				}
				else
				{
					var encodingProperties = mediaStreamProperties as ImageEncodingProperties;
					if (encodingProperties != null)
					{
						ImageEncodingProperties properties = encodingProperties;
						size = new Size(properties.Width, properties.Height);
						sizeSet = true;
					}
				}

				if (sizeSet)
				{
					if (!resolutions.Contains(size))
					{
						resolutions.Add(size);
					}
				}
			}

			return resolutions;
		}
        void selectPicture()
        {
            try
            {
                PhotoChooserTask task = new PhotoChooserTask();
                task.Completed += async (s, res) =>
                {
                    if (res.TaskResult == TaskResult.OK)
                    {
                        if (HRImagesource != null)
                        {
                            HRImagesource.Dispose();
                            HRImagesource = null;
                        }
                        HRImagesource = new StreamImageSource(res.ChosenPhoto);
                        var info = await HRImagesource.GetInfoAsync();
                        ImageSize = info.ImageSize;

                        //create LR image
                        using (var renderer = new WriteableBitmapRenderer(HRImagesource, LRImageSource))
                            await renderer.RenderAsync();

                        requestProcessing();
                    }

                };
                task.Show();
            }
            catch (Exception)
            {

                throw;
            }
        }
        private async Task<Tuple<ProcessResult, WriteableBitmap>> ProcessFrameAsync(OpticalReaderLib.Frame frame)
        {
            //System.Diagnostics.Debug.WriteLine("Start processing");
            
            var rectSize = new Windows.Foundation.Size(
                ReaderBorder.ActualWidth / Canvas.ActualWidth * frame.Dimensions.Width / _zoom,
                ReaderBorder.ActualHeight / Canvas.ActualHeight * frame.Dimensions.Height / _zoom);

            var rectOrigin = new Windows.Foundation.Point(
                frame.Dimensions.Width / 2 - rectSize.Width / 2,
                frame.Dimensions.Height / 2 - rectSize.Height / 2);

            var area = new Windows.Foundation.Rect(rectOrigin, rectSize);

            ProcessResult result = null;

            try
            {
                result = await OpticalReaderTask.Instance.Processor.ProcessAsync(frame, area, _rotation);
            }
            catch (Exception ex)
            {
                System.Diagnostics.Debug.WriteLine(String.Format("Processing frame failed: {0}\n{1}", ex.Message, ex.StackTrace));
            }

            //System.Diagnostics.Debug.WriteLine("Stop processing");

            InterestAreaPolygon.Points = null;

            if (result != null)
            {
                _lastSuccess = DateTime.Now;

                var thumbnail = GenerateThumbnail();

                var interestPointCollection = new PointCollection();

                foreach (var point in result.InterestPoints)
                {
                    interestPointCollection.Add(new System.Windows.Point(point.X, point.Y));
                }

                InterestAreaPolygon.Points = interestPointCollection;

                return new Tuple<ProcessResult, WriteableBitmap>(result, thumbnail);
            }
            else
            {
                var sinceLastSuccess = DateTime.Now - _lastSuccess;

                if (sinceLastSuccess > OpticalReaderTask.Instance.FocusInterval)
                {
                    try
                    {
                        var status = await _device.FocusAsync();

                        _lastSuccess = DateTime.Now;

                        // todo use camera focus lock status
                    }
                    catch (Exception ex)
                    {
                        System.Diagnostics.Debug.WriteLine(String.Format("Focusing camera failed: {0}\n{1}", ex.Message, ex.StackTrace));
                    }
                }

                return null;
            }
        }
예제 #43
0
        public void Vector2FromSizeTest()
        {
            var size = new Windows.Foundation.Size(23, 42);

            Vector2 result = size.ToVector2();

            Assert.AreEqual(23.0f, result.X);
            Assert.AreEqual(42.0f, result.Y);
        }
        /// <summary>
        /// Initializes camera.
        /// </summary>
        /// <param name="sensorLocation">Camera sensor to initialize</param>
        private async Task InitializeCamera(CameraSensorLocation sensorLocation)
        {
            IReadOnlyList<Windows.Foundation.Size> availablePreviewResolutions = PhotoCaptureDevice.GetAvailablePreviewResolutions(sensorLocation);

            Windows.Foundation.Size previewResolution = new Windows.Foundation.Size(int.MaxValue, int.MaxValue);
            for (int i = 0; i < availablePreviewResolutions.Count; i++)
            {
                double ratio = availablePreviewResolutions[i].Width / availablePreviewResolutions[i].Height;
                if (ratio > 1.32 && ratio < 1.34 && PerfectCamera.DataContext.Instance.CameraRatio == CameraRatio.Ratio_4x3)
                {
                    if (previewResolution.Width > availablePreviewResolutions[i].Width)
                    {
                        previewResolution = availablePreviewResolutions[i];
                    }
                }
                else if (ratio > 1.7 && ratio < 1.8 && PerfectCamera.DataContext.Instance.CameraRatio == CameraRatio.Ratio_16x9)
                {
                    if (previewResolution.Width > availablePreviewResolutions[i].Width)
                    {
                        previewResolution = availablePreviewResolutions[i];
                    }
                }
            }

            PerfectCamera.DataContext.Instance.PreviewResolution = previewResolution;

            IReadOnlyList<Windows.Foundation.Size> availableResolutions = PhotoCaptureDevice.GetAvailableCaptureResolutions(sensorLocation);

            //find 4:3 (2048 x 1536) or 16:9 (1280x720)
            Windows.Foundation.Size captureResolution = new Windows.Foundation.Size(0, 0);
            for (int i = 0; i < availableResolutions.Count; i++)
            {
                double ratio = availableResolutions[i].Width / availableResolutions[i].Height;
                if (ratio > 1.32 && ratio < 1.34 && PerfectCamera.DataContext.Instance.CameraRatio == CameraRatio.Ratio_4x3)
                {
                    if (captureResolution.Width < availableResolutions[i].Width)
                    {
                        captureResolution = availableResolutions[i];
                    }
                }
                else if (ratio > 1.7 && ratio < 1.8 && PerfectCamera.DataContext.Instance.CameraRatio == CameraRatio.Ratio_16x9)
                {
                    if (captureResolution.Width < availableResolutions[i].Width)
                    {
                        captureResolution = availableResolutions[i];
                    }
                }
            }
            //
            

            PhotoCaptureDevice device = await PhotoCaptureDevice.OpenAsync(sensorLocation, captureResolution);

            await device.SetPreviewResolutionAsync(previewResolution);
            await device.SetCaptureResolutionAsync(captureResolution);

            Camera = device;

            if (PerfectCamera.DataContext.Instance.CameraType == PerfectCameraType.Selfie)
            {
                _cameraEffect = new Effects()
                {
                    PhotoCaptureDevice = Camera
                };

                _cameraStreamSource = new CameraStreamSource(_cameraEffect, previewResolution);
            }

            if (Camera != null)
            {
                if (Camera.SensorLocation == CameraSensorLocation.Front)
                {
                    FlashButton.IsHitTestVisible = false;
                    FlashButton.Opacity = 0.5;
                }
                else
                {
                    FlashButton.IsHitTestVisible = true;
                    FlashButton.Opacity = 1.0;
                }
            }

            SetOrientation(this.Orientation);
        }
예제 #45
0
        private void OnFrameFormatUpdate(FrameFormat obj)
        {
            if (!IsInCallMode)
            {
                return;
            }

            if(obj.IsLocal)
            {
                LocalSwapChainPanelHandle = obj.SwapChainHandle;
                var s = new Windows.Foundation.Size();
                if (ApplicationView.GetForCurrentView().Orientation == ApplicationViewOrientation.Landscape)
                {
                    s.Width = (float)obj.Width;
                    s.Height = (float)obj.Height;
                }
                else
                {
                    s.Width = (float)obj.Height;
                    s.Height = (float)obj.Width;
                }
                LocalNativeVideoSize = s;
            }
            else
            {
                RemoteSwapChainPanelHandle = obj.SwapChainHandle;
                var s = new Windows.Foundation.Size();
                s.Width = (float)obj.Width;
                s.Height = (float)obj.Height;
                RemoteNativeVideoSize = s;
            }
        }
예제 #46
0
        // Code to execute when the application is activated (brought to foreground)
        // This code will not execute when the application is first launched
        private void Application_Activated(object sender, ActivatedEventArgs e)
        {
            var resolution = new Windows.Foundation.Size(640, 480);
            var task = PhotoCaptureDevice.OpenAsync(CameraSensorLocation.Back, new Windows.Foundation.Size(640, 480)).AsTask();

            task.Wait();

            Camera = task.Result;
            Camera.SetPreviewResolutionAsync(resolution).AsTask().Wait();
        }
예제 #47
0
		public SizeRequest GetDesiredSize(double widthConstraint, double heightConstraint)
		{
			if (_canvas.Children.Count == 0)
				return new SizeRequest();

			var constraint = new Windows.Foundation.Size(widthConstraint, heightConstraint);
			var child = (FrameworkElement)_canvas.Children[0];

			var oldWidth = child.Width;
			var oldHeight = child.Height;

			child.Height = double.NaN;
			child.Width = double.NaN;

			child.Measure(constraint);
			var result = new Size(Math.Ceiling(child.DesiredSize.Width), Math.Ceiling(child.DesiredSize.Height));

			child.Width = oldWidth;
			child.Height = oldHeight;

			return new SizeRequest(result);
		}
예제 #48
0
        /// <summary>
        /// Initializes camera.
        /// </summary>
        /// <param name="sensorLocation">Camera sensor to initialize</param>
        private async Task InitializeCamera(CameraSensorLocation sensorLocation)
        {
            Windows.Foundation.Size initialResolution =
                new Windows.Foundation.Size(FilterEffects.DataContext.DefaultPreviewResolutionWidth,
                                            FilterEffects.DataContext.DefaultPreviewResolutionHeight);
            Windows.Foundation.Size previewResolution =
                new Windows.Foundation.Size(FilterEffects.DataContext.DefaultPreviewResolutionWidth,
                                            FilterEffects.DataContext.DefaultPreviewResolutionHeight);

            // Find out the largest 4:3 capture resolution available on device
            IReadOnlyList<Windows.Foundation.Size> availableResolutions =
                PhotoCaptureDevice.GetAvailableCaptureResolutions(sensorLocation);

            Windows.Foundation.Size captureResolution = new Windows.Foundation.Size(0, 0);

            for (int i = 0; i < availableResolutions.Count; i++)
            {
                double ratio = availableResolutions[i].Width / availableResolutions[i].Height;
                if (ratio > 1.32 && ratio < 1.34)
                {
                    if (captureResolution.Width < availableResolutions[i].Width)
                    {
                        captureResolution = availableResolutions[i];
                    }
                }
            }
 
            PhotoCaptureDevice device =
                await PhotoCaptureDevice.OpenAsync(sensorLocation, initialResolution);

            await device.SetPreviewResolutionAsync(previewResolution);
            await device.SetCaptureResolutionAsync(captureResolution);

            _photoCaptureDevice = device;

            SetOrientation(this.Orientation);
        }
예제 #49
0
        /// <summary>
        /// Create and apply edge path using calculated ER parameters stored in edge
        /// </summary>
        /// <param name="useCurrentCoords">Use current vertices coordinates or final coorfinates (for.ex if move animation is active final coords will be its destination)</param>
        /// <param name="externalRoutingPoints">Provided custom routing points will be used instead of stored ones.</param>
        /// <param name="updateLabel">Should edge label be updated in this pass</param>
        public virtual void PrepareEdgePath(bool useCurrentCoords = false, Measure.Point[] externalRoutingPoints = null, bool updateLabel = true)
        {
            //do not calculate invisible edges
            if ((Visibility != Visibility.Visible && !IsHiddenEdgesUpdated) && Source == null || Target == null || ManualDrawing || !IsTemplateLoaded) return;

            #region Get the inputs
            //get the size of the source
            var sourceSize = new Size
            {
                Width = Source.ActualWidth,
                Height = Source.ActualHeight
            };
            if (CustomHelper.IsInDesignMode(this)) sourceSize = new Size(80, 20);

            //get the position center of the source
            var sourcePos = new Point
            {
                X = (useCurrentCoords ? GraphAreaBase.GetX(Source) : GraphAreaBase.GetFinalX(Source)) + sourceSize.Width * .5,
                Y = (useCurrentCoords ? GraphAreaBase.GetY(Source) : GraphAreaBase.GetFinalY(Source)) + sourceSize.Height * .5
            };

            //get the size of the target
            var targetSize = new Size
            {
                Width = Target.ActualWidth,
                Height = Target.ActualHeight
            };
            if (CustomHelper.IsInDesignMode(this))
                targetSize = new Size(80, 20);

            //get the position center of the target
            var targetPos = new Point
            {
                X = (useCurrentCoords ? GraphAreaBase.GetX(Target) : GraphAreaBase.GetFinalX(Target)) + targetSize.Width * .5,
                Y = (useCurrentCoords ? GraphAreaBase.GetY(Target) : GraphAreaBase.GetFinalY(Target)) + targetSize.Height * .5
            };

            var routedEdge = Edge as IRoutingInfo;
            if (routedEdge == null)
                throw new GX_InvalidDataException("Edge must implement IRoutingInfo interface");

            //get the route informations
            var routeInformation = externalRoutingPoints ?? routedEdge.RoutingPoints;

            // Get the TopLeft position of the Source Vertex.
            var sourcePos1 = new Point
            {
                X = (useCurrentCoords ? GraphAreaBase.GetX(Source) : GraphAreaBase.GetFinalX(Source)),
                Y = (useCurrentCoords ? GraphAreaBase.GetY(Source) : GraphAreaBase.GetFinalY(Source))
            };
            // Get the TopLeft position of the Target Vertex.
            var targetPos1 = new Point
            {
                X = (useCurrentCoords ? GraphAreaBase.GetX(Target) : GraphAreaBase.GetFinalX(Target)),
                Y = (useCurrentCoords ? GraphAreaBase.GetY(Target) : GraphAreaBase.GetFinalY(Target))
            };

            var hasEpSource = _edgePointerForSource != null;
            var hasEpTarget = _edgePointerForTarget != null;
            #endregion

            //if self looped edge
            if (IsSelfLooped)
            {
                PrepareSelfLoopedEdge(sourcePos1);
                return;
            }

            //check if we have some edge route data
            var hasRouteInfo = routeInformation != null && routeInformation.Length > 1;

            //calculate source and target edge attach points
            if (RootArea != null && !hasRouteInfo && RootArea.EnableParallelEdges && ParallelEdgeOffset != 0)
            {
                sourcePos = GetParallelOffset(Source, Target, ParallelEdgeOffset);
                targetPos = GetParallelOffset(Target, Source, -ParallelEdgeOffset);
            }

            /* Rectangular shapes implementation by bleibold */

            var gEdge = Edge as IGraphXCommonEdge;
            Point p1;
            Point p2;

            //calculate edge source (p1) and target (p2) endpoints based on different settings
            if (gEdge != null && gEdge.SourceConnectionPointId.HasValue)
            {
                var sourceCp = Source.GetConnectionPointById(gEdge.SourceConnectionPointId.Value, true);
                if (sourceCp == null)
                    throw new GX_ObjectNotFoundException(string.Format("Can't find source vertex VCP by edge source connection point Id({1}) : {0}", Source, gEdge.SourceConnectionPointId));
                if (sourceCp.Shape == VertexShape.None) p1 = sourceCp.RectangularSize.Center();
                else
                {
                    var targetCpPos = gEdge.TargetConnectionPointId.HasValue ? Target.GetConnectionPointById(gEdge.TargetConnectionPointId.Value, true).RectangularSize.Center() : (hasRouteInfo ? routeInformation[1].ToWindows() : (targetPos));
                    p1 = GeometryHelper.GetEdgeEndpoint(sourceCp.RectangularSize.Center(), sourceCp.RectangularSize, targetCpPos, sourceCp.Shape);
                }
            }
            else
                p1 = GeometryHelper.GetEdgeEndpoint(sourcePos, new SysRect(sourcePos1, sourceSize), (hasRouteInfo ? routeInformation[1].ToWindows() : (targetPos)), Source.VertexShape);

            if (gEdge != null && gEdge.TargetConnectionPointId.HasValue)
            {
                var targetCp = Target.GetConnectionPointById(gEdge.TargetConnectionPointId.Value, true);
                if (targetCp == null)
                    throw new GX_ObjectNotFoundException(string.Format("Can't find target vertex VCP by edge target connection point Id({1}) : {0}", Target, gEdge.TargetConnectionPointId));
                if (targetCp.Shape == VertexShape.None) p2 = targetCp.RectangularSize.Center();
                else
                {
                    var sourceCpPos = gEdge.SourceConnectionPointId.HasValue ? Source.GetConnectionPointById(gEdge.SourceConnectionPointId.Value, true).RectangularSize.Center() : hasRouteInfo ? routeInformation[routeInformation.Length - 2].ToWindows() : (sourcePos);
                    p2 = GeometryHelper.GetEdgeEndpoint(targetCp.RectangularSize.Center(), targetCp.RectangularSize, sourceCpPos, targetCp.Shape);
                }
            }
            else
                p2 = GeometryHelper.GetEdgeEndpoint(targetPos, new SysRect(targetPos1, targetSize), hasRouteInfo ? routeInformation[routeInformation.Length - 2].ToWindows() : (sourcePos), Target.VertexShape);

            SourceConnectionPoint = p1;
            TargetConnectionPoint = p2;

            _linegeometry = new PathGeometry(); PathFigure lineFigure;

            //if we have route and route consist of 2 or more points
            if (RootArea != null && hasRouteInfo)
            {
                //replace start and end points with accurate ones
                var routePoints = routeInformation.ToWindows().ToList();
                routePoints.Remove(routePoints.First());
                routePoints.Remove(routePoints.Last());
                routePoints.Insert(0, p1);
                routePoints.Add(p2);

                if (RootArea.EdgeCurvingEnabled)
                {
                    var oPolyLineSegment = GeometryHelper.GetCurveThroughPoints(routePoints.ToArray(), 0.5, RootArea.EdgeCurvingTolerance);

                    if (hasEpTarget)
                    {
                        UpdateTargetEpData(oPolyLineSegment.Points[oPolyLineSegment.Points.Count - 1], oPolyLineSegment.Points[oPolyLineSegment.Points.Count - 2]);
                        oPolyLineSegment.Points.RemoveAt(oPolyLineSegment.Points.Count - 1);
                    }
                    if (hasEpSource) UpdateSourceEpData(oPolyLineSegment.Points.First(), oPolyLineSegment.Points[1]);

                    lineFigure = GeometryHelper.GetPathFigureFromPathSegments(routePoints[0], true, true, oPolyLineSegment);
#if WPF
                    //freeze and create resulting geometry
                    GeometryHelper.TryFreeze(oPolyLineSegment);
#endif
                }
                else
                {
                    if (hasEpSource) UpdateSourceEpData(routePoints.First(), routePoints[1]);
                    if (hasEpTarget)
                        routePoints[routePoints.Count - 1] = routePoints[routePoints.Count - 1].Subtract(UpdateTargetEpData(p2, routePoints[routePoints.Count - 2]));

                    var pcol = new PointCollection();
                    foreach (var item in routePoints)
                        pcol.Add(item);

                    lineFigure = new PathFigure { StartPoint = p1, Segments = new PathSegmentCollection { new PolyLineSegment { Points = pcol } }, IsClosed = false };
                }

            }
            else // no route defined
            {
                if (hasEpSource) UpdateSourceEpData(p1, p2);
                if (hasEpTarget)
                    p2 = p2.Subtract(UpdateTargetEpData(p2, p1));

                lineFigure = new PathFigure { StartPoint = p1, Segments = new PathSegmentCollection { new LineSegment() { Point = p2 } }, IsClosed = false };
            }
            ((PathGeometry)_linegeometry).Figures.Add(lineFigure);
#if WPF
            GeometryHelper.TryFreeze(lineFigure);
            GeometryHelper.TryFreeze(_linegeometry);
#endif
            if (ShowLabel && _edgeLabelControl != null && _updateLabelPosition && updateLabel)
                _edgeLabelControl.UpdatePosition();
        }
예제 #50
0
        /// <summary>
        /// Initializes camera. Once initialized the instance is set to the
        /// DataContext.Device property for further usage from this or other
        /// pages.
        /// </summary>
        /// <param name="sensorLocation">Camera sensor to initialize.</param>
        private async Task InitializeCamera(CameraSensorLocation sensorLocation)
        {
            // Find out the largest capture resolution available on device
            IReadOnlyList<Windows.Foundation.Size> availableResolutions =
                PhotoCaptureDevice.GetAvailableCaptureResolutions(sensorLocation);

            Windows.Foundation.Size captureResolution = new Windows.Foundation.Size(0, 0);

            for (int i = 0; i < availableResolutions.Count; ++i)
            {
                if (captureResolution.Width < availableResolutions[i].Width)
                {
                    Debug.WriteLine("MainPage.InitializeCamera(): New capture resolution: " + availableResolutions[i]);
                    captureResolution = availableResolutions[i];
                }
            }
            
            PhotoCaptureDevice device =
                await PhotoCaptureDevice.OpenAsync(sensorLocation, DefaultCameraResolution);

            await device.SetPreviewResolutionAsync(DefaultCameraResolution);
            await device.SetCaptureResolutionAsync(captureResolution);

            device.SetProperty(KnownCameraGeneralProperties.EncodeWithOrientation,
                          device.SensorLocation == CameraSensorLocation.Back ?
                          device.SensorRotationInDegrees : -device.SensorRotationInDegrees);

            _dataContext.Device = device;

        }
예제 #51
0
        internal void SetClientSize(int width, int height)
        {
            if (_appView.IsFullScreenMode)
                return;

            if (_viewBounds.Width == width &&
                _viewBounds.Height == height)
                return;

            var viewSize = new Windows.Foundation.Size(width / _dinfo.RawPixelsPerViewPixel, height / _dinfo.RawPixelsPerViewPixel);

            //_appView.SetPreferredMinSize(viewSize);
            if (!_appView.TryResizeView(viewSize))
            {
                // TODO: What now?
            }
        }
        /// <summary>
        /// This method is invoked by a ThreadPoolTimer to execute the FaceTracker and Visualization logic at approximately 15 frames per second.
        /// </summary>
        /// <remarks>
        /// Keep in mind this method is called from a Timer and not sychronized with the camera stream. Also, the processing time of FaceTracker
        /// will vary depending on the size of each frame and the number of faces being tracked. That is, a large image with several tracked faces may
        /// take longer to process.
        /// </remarks>
        /// <param name="timer">Timer object invoking this call</param>
        private async void ProcessCurrentVideoFrame(ThreadPoolTimer timer)
        {
            if (this.currentState != ScenarioState.Streaming)
            {
                return;
            }

            // If a lock is being held it means we're still waiting for processing work on the previous frame to complete.
            // In this situation, don't wait on the semaphore but exit immediately.
            if (!frameProcessingSemaphore.Wait(0))
            {
                return;
            }

            try
            {
                IList<DetectedFace> faces = null;

                // Create a VideoFrame object specifying the pixel format we want our capture image to be (NV12 bitmap in this case).
                // GetPreviewFrame will convert the native webcam frame into this format.
                const BitmapPixelFormat InputPixelFormat = BitmapPixelFormat.Nv12;
                using (VideoFrame previewFrame = new VideoFrame(InputPixelFormat, (int)this.videoProperties.Width, (int)this.videoProperties.Height))
                {
                    await this.mediaCapture.GetPreviewFrameAsync(previewFrame);

                    // The returned VideoFrame should be in the supported NV12 format but we need to verify this.
                    if (FaceDetector.IsBitmapPixelFormatSupported(previewFrame.SoftwareBitmap.BitmapPixelFormat))
                    {
                        faces = await this.faceTracker.ProcessNextFrameAsync(previewFrame);
                    }
                    else
                    {
                        throw new System.NotSupportedException("PixelFormat '" + InputPixelFormat.ToString() + "' is not supported by FaceDetector");
                    }

                    // Create our visualization using the frame dimensions and face results but run it on the UI thread.
                    var previewFrameSize = new Windows.Foundation.Size(previewFrame.SoftwareBitmap.PixelWidth, previewFrame.SoftwareBitmap.PixelHeight);
                    var ignored = this.Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () =>
                    {
                        this.SetupVisualization(previewFrameSize, faces);
                    });
                }
            }
            catch (Exception ex)
            {
                var ignored = this.Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () =>
                {
                    this.rootPage.NotifyUser(ex.ToString(), NotifyType.ErrorMessage);
                });
            }
            finally
            {
                frameProcessingSemaphore.Release();
            }

        }
		/// <summary>
		/// Resolves the screen resolution and display size.
		/// </summary>
		private async void ResolveScreenResolutionAsync()
		{
			// Initialise the values
			ScreenResolution = Resolutions.Unknown;
			ScreenResolutionSize = new Size(0, 0);

			double rawPixelsPerViewPixel = 0;
			double rawDpiX = 0;
			double rawDpiY = 0;
			double logicalDpi = 0;
			double screenResolutionX = 0;
			double screenResolutionY = 0;

			await Windows.ApplicationModel.Core.CoreApplication.MainView.CoreWindow.Dispatcher.RunAsync(
				Windows.UI.Core.CoreDispatcherPriority.Normal, () =>
				{
#if WINDOWS_PHONE_APP
					DisplayInformation displayInformation =
						Windows.Graphics.Display.DisplayInformation.GetForCurrentView();
					rawPixelsPerViewPixel = displayInformation.RawPixelsPerViewPixel;
					rawDpiX = displayInformation.RawDpiX;
					rawDpiY = displayInformation.RawDpiY;
					logicalDpi = displayInformation.LogicalDpi;
					screenResolutionX = Windows.ApplicationModel.Core.CoreApplication.MainView.CoreWindow.Bounds.Width * rawPixelsPerViewPixel;
					screenResolutionY = Windows.ApplicationModel.Core.CoreApplication.MainView.CoreWindow.Bounds.Height * rawPixelsPerViewPixel;
#elif NETFX_CORE

#else
					object objExtendedProperty;

					if (DeviceExtendedProperties.TryGetValue("PhysicalScreenResolution", out objExtendedProperty))
					{
						var physicalScreenResolution = (Size)objExtendedProperty;
						screenResolutionY = (int)physicalScreenResolution.Height;
						screenResolutionX = (int)physicalScreenResolution.Width;
					}
					else
					{
						var scaleFactor = Application.Current.Host.Content.ScaleFactor;
						screenResolutionY = (int)(Application.Current.Host.Content.ActualHeight * scaleFactor);
						screenResolutionX = (int)(Application.Current.Host.Content.ActualWidth * scaleFactor);

					}

					objExtendedProperty = null;

					if (DeviceExtendedProperties.TryGetValue("RawDpiX", out objExtendedProperty))
					{
						rawDpiX = (double)objExtendedProperty;
					}

					if (DeviceExtendedProperties.TryGetValue("RawDpiY", out objExtendedProperty))
					{
						rawDpiY = (double)objExtendedProperty;
					}

					// Get PhysicalScreenResolution
					//if (DeviceExtendedProperties.TryGetValue("PhysicalScreenResolution", out objExtendedProperty))
					//{
					//	var scaleFactor = Application.Current.Host.Content.ScaleFactor;

					//	var screenResolution = (Size)objExtendedProperty;
					//	var width = Application.Current.Host.Content.ActualWidth;
					//	var physicalSize = new Size(screenResolution.Width / rawDpiX, screenResolution.Height / rawDpiY);
					//	var scale = Math.Max(1, physicalSize.Width / DisplayConstants.BaselineWidthInInches);
					//	var idealViewWidth = Math.Min(DisplayConstants.BaselineWidthInViewPixels * scale, screenResolution.Width);
					//	var idealScale = screenResolution.Width / idealViewWidth;
					//	rawPixelsPerViewPixel = idealScale.NudgeToClosestPoint(1); //bucketizedScale
					//	var viewResolution = new Size(screenResolution.Width / rawPixelsPerViewPixel, screenResolution.Height / rawPixelsPerViewPixel);
					//}
#endif
				});
			ScreenResolutionSize = new Size(Math.Round(screenResolutionX), Math.Round(screenResolutionY));

			if (screenResolutionY < 960)
			{
				ScreenResolution = Resolutions.WVGA;
			}
			else if (screenResolutionY < 1280)
			{
				ScreenResolution = Resolutions.qHD;
			}
			else if (screenResolutionY < 1920)
			{
				if (screenResolutionX < 768)
				{
					ScreenResolution = Resolutions.HD720;
				}
				else
				{
					ScreenResolution = Resolutions.WXGA;
				}
			}
			else if (screenResolutionY > 1280)
			{
				ScreenResolution = Resolutions.HD1080;
			}

			if (rawDpiX > 0 && rawDpiY > 0)
			{
				// Calculate screen diagonal in inches.
				DisplaySizeInInches =
					Math.Sqrt(Math.Pow(ScreenResolutionSize.Width / rawDpiX, 2) +
							  Math.Pow(ScreenResolutionSize.Height / rawDpiY, 2));
				DisplaySizeInInches = Math.Round(DisplaySizeInInches, 1); // One decimal is enough
			}

			Debug.WriteLine(DebugTag + "ResolveScreenResolutionAsync(): Screen properties:"
				+ "\n - Raw pixels per view pixel: " + rawPixelsPerViewPixel
				+ "\n - Raw DPI: " + rawDpiX + ", " + rawDpiY
				+ "\n . Logical DPI: " + logicalDpi
				+ "\n - Resolution: " + ScreenResolution
				+ "\n - Resolution in pixels: " + ScreenResolutionSize
				+ "\n - Screen size in inches: " + DisplaySizeInInches);

			AsyncOperationComplete();
		}
예제 #54
0
    /// <summary>
    /// Opens and sets up the camera if not already. Creates a new
    /// CameraStreamSource with an effect and shows it on the screen via
    /// the media element.
    /// </summary>
    private async void Initialize() {
      Debug.WriteLine("MainPage.Initialize()");
      var mediaElementSize = new Size(MediaElementWidth, MediaElementHeight);

      if (_camera == null) {
        // Resolve the capture resolution and open the camera
        var captureResolutions =
          PhotoCaptureDevice.GetAvailableCaptureResolutions(CameraSensorLocation.Back);

        var selectedCaptureResolution =
          captureResolutions.Where(
            resolution => Math.Abs(AspectRatio - resolution.Width/resolution.Height) <= 0.1)
                            .OrderBy(resolution => resolution.Width).Last();

        _camera = await PhotoCaptureDevice.OpenAsync(
          CameraSensorLocation.Back, selectedCaptureResolution);

        // Set the image orientation prior to encoding
        _camera.SetProperty(KnownCameraGeneralProperties.EncodeWithOrientation,
                           _camera.SensorLocation == CameraSensorLocation.Back
                           ? _camera.SensorRotationInDegrees : -_camera.SensorRotationInDegrees);

        // Resolve and set the preview resolution
        var previewResolutions =
          PhotoCaptureDevice.GetAvailablePreviewResolutions(CameraSensorLocation.Back);

        Size selectedPreviewResolution =
          previewResolutions.Where(
            resolution => Math.Abs(AspectRatio - resolution.Width/resolution.Height) <= 0.1)
                            .Where(resolution => (resolution.Height >= mediaElementSize.Height)
                                                 && (resolution.Width >= mediaElementSize.Width))
                            .OrderBy(resolution => resolution.Width).First();

        await _camera.SetPreviewResolutionAsync(selectedPreviewResolution);

        _cameraEffect.CaptureDevice = _camera;
      }

      if (_mediaElement == null) {
        _mediaElement = new MediaElement {
          Stretch = Stretch.UniformToFill, 
          BufferingTime = new TimeSpan(0)
        };
        _mediaElement.Tap += OnMyCameraMediaElementTapped;
        _source = new CameraStreamSource(_cameraEffect, mediaElementSize);
        _mediaElement.SetSource(_source);
        MediaElementContainer.Children.Add(_mediaElement);
        _source.FPSChanged += OnFPSChanged;
      }

      // Show the index and the name of the current effect
      if (_cameraEffect is NokiaImagingSDKEffects) {
        var effects =
          _cameraEffect as NokiaImagingSDKEffects;

        EffectNameTextBlock.Text =
          (effects.EffectIndex + 1) + "/"
          + NokiaImagingSDKEffects.NumberOfEffects
          + ": " + effects.EffectName;
      } else {
        EffectNameTextBlock.Text = _cameraEffect.EffectName;
      }
    }
예제 #55
0
        private async Task initCamera(CameraSensorLocation sensorLocation)
        {
            Windows.Foundation.Size res = new Windows.Foundation.Size(MediaElementWidth, MediaElementHeight);
            CameraOff();
            camera = await AudioVideoCaptureDevice.OpenForVideoOnlyAsync(sensorLocation, res);

            await camera.SetPreviewResolutionAsync(res);

            frameBitmap = new WriteableBitmap((int)camera.PreviewResolution.Width,
                   (int)camera.PreviewResolution.Height);

        }
예제 #56
0
        public void Initialize(CoreWindow coreWindow, UIElement inputElement, TouchQueue touchQueue)
        {
            _coreWindow = coreWindow;
            _windowEvents = new InputEvents(_coreWindow, inputElement, touchQueue);

			_dinfo = DisplayInformation.GetForCurrentView();
            _appView = ApplicationView.GetForCurrentView();

            // Set a min size that is reasonable knowing someone might try
            // to use some old school resolution like 640x480.
            var minSize = new Windows.Foundation.Size(640 / _dinfo.RawPixelsPerViewPixel, 480 / _dinfo.RawPixelsPerViewPixel);
            _appView.SetPreferredMinSize(minSize);

            _orientation = ToOrientation(_dinfo.CurrentOrientation);
            _dinfo.OrientationChanged += DisplayProperties_OrientationChanged;
            _swapChainPanel = inputElement as SwapChainPanel;

            _swapChainPanel.SizeChanged += SwapChain_SizeChanged;

            _coreWindow.Closed += Window_Closed;
            _coreWindow.Activated += Window_FocusChanged;
			_coreWindow.CharacterReceived += Window_CharacterReceived;

            SetViewBounds(_appView.VisibleBounds.Width, _appView.VisibleBounds.Height);

            SetCursor(false);
        }
        private async void ApplicationBarIconButton_All(object sender, EventArgs e)
        {
            sFace.Position = 0;
            sBackground.Position = 0;

            IBuffer result;
            using (var faceSource = new StreamImageSource(sFace))
            using (var faceReframing = new FilterEffect(faceSource))
            using (var source = new StreamImageSource(sBackground))
            using (var effect = new FilterEffect(source))
            using (var renderer = new JpegRenderer(effect))
            {

                

                var size = gestureBackground.ImageSize;
                var Facesize = gestureFace.ImageSize;

                //target scale
                var scale = gestureFace.Scale / gestureBackground.Scale;
                //target angle
                var angle = gestureFace.Angle - gestureBackground.Angle;


                //translation between image center and background position
                var backgroundTranslation = new Point(size.Width / 2 - gestureBackground.Pos.X, size.Height / 2 - gestureBackground.Pos.Y);

                //convert translation to Face referential translation
                CompositeTransform gestureTransform = new CompositeTransform();
                gestureTransform.ScaleX = gestureTransform.ScaleY = scale;
                gestureTransform.Rotation = angle;
                var translation = gestureTransform.Inverse.Transform(backgroundTranslation);

                //target position
                var posX = gestureFace.Pos.X + translation.X;
                var posY = gestureFace.Pos.Y + translation.Y;



                var currentSize = new Windows.Foundation.Size(size.Width / scale, size.Height / scale);
                var corner = new Windows.Foundation.Point(posX - currentSize.Width / 2, posY - currentSize.Height / 2);
                var reframing = new ReframingFilter(new Windows.Foundation.Rect(corner, currentSize), -angle);

                //face reframing => blend input
                faceReframing.Filters = new IFilter[] { reframing };
                effect.Filters = new IFilter[] { new BlendFilter(faceReframing) };//


                result = await renderer.RenderAsync();
            }

            using (var media = new MediaLibrary())
                media.SavePictureToCameraRoll("test", result.ToArray());
        }
        private void InitializeCamera()
        {
            var captureResolutions = PhotoCaptureDevice.GetAvailableCaptureResolutions(CameraSensorLocation.Back).ToArray();
            var previewResolutions = PhotoCaptureDevice.GetAvailablePreviewResolutions(CameraSensorLocation.Back).ToArray();

            Windows.Foundation.Size captureResolution = new Windows.Foundation.Size(640, 480);
            Windows.Foundation.Size previewResolution = new Windows.Foundation.Size(640, 480);

            try
            {
                captureResolution = GetFirstWideResolution(captureResolutions);
                previewResolution = GetFirstWideResolution(previewResolutions);
            }
            catch (Exception ex)
            {
                System.Diagnostics.Debug.WriteLine("Unable to get wide resolution for viewfinder, using 640x480");
            }

            var task = PhotoCaptureDevice.OpenAsync(CameraSensorLocation.Back, captureResolution).AsTask();

            task.Wait();

            _device = task.Result;
            _device.SetPreviewResolutionAsync(previewResolution).AsTask().Wait();

            var objectResolutionSide = _device.PreviewResolution.Height * (ReaderBorder.Height - 2 * ReaderBorder.Margin.Top) / 480;
            var objectResolution = new Windows.Foundation.Size(objectResolutionSide, objectResolutionSide);
            var focusRegionSize = new Windows.Foundation.Size(objectResolutionSide, objectResolutionSide);
            var objectSize = OpticalReaderLib.OpticalReaderTask.Instance.ObjectSize;

            if (objectSize.Width * objectSize.Height > 0)
            {
                var parameters = OpticalReaderLib.Utilities.GetSuggestedParameters(_device.PreviewResolution, _device.SensorRotationInDegrees, objectSize, objectResolution);

                _zoom = Math.Max(parameters.Zoom, 1.0);
            }
            else
            {
                _zoom = 1.0;
            }

            var centerPoint = new Windows.Foundation.Point(previewResolution.Width / 2, previewResolution.Height / 2);

            _device.FocusRegion = new Windows.Foundation.Rect(
                centerPoint.X - focusRegionSize.Width / 2, centerPoint.Y - focusRegionSize.Height / 2,
                focusRegionSize.Width, focusRegionSize.Height);

            ViewfinderVideoBrush.SetSource(_device);
        }
예제 #59
0
		public SizeRequest GetDesiredSize(double widthConstraint, double heightConstraint)
		{
			var constraint = new Windows.Foundation.Size(widthConstraint, heightConstraint);
			IVisualElementRenderer childRenderer = Platform.GetRenderer(Element.CurrentPage);
			FrameworkElement child = childRenderer.ContainerElement;

			double oldWidth = child.Width;
			double oldHeight = child.Height;

			child.Height = double.NaN;
			child.Width = double.NaN;

			child.Measure(constraint);
			var result = new Size(Math.Ceiling(child.DesiredSize.Width), Math.Ceiling(child.DesiredSize.Height));

			child.Width = oldWidth;
			child.Height = oldHeight;

			return new SizeRequest(result);
		}