コード例 #1
1
        private async void Button_Click(object sender, RoutedEventArgs e)
        {
            using (var stream = await Root.RenderToRandomAccessStream())
            {
                var device = new CanvasDevice();
                var bitmap = await CanvasBitmap.LoadAsync(device, stream);

                var renderer = new CanvasRenderTarget(device, bitmap.SizeInPixels.Width, bitmap.SizeInPixels.Height, bitmap.Dpi);

                using (var ds = renderer.CreateDrawingSession())
                {
                    var blur = new GaussianBlurEffect();
                    blur.BlurAmount = 5.0f;
                    blur.Source = bitmap;
                    ds.DrawImage(blur);
                }

                stream.Seek(0);
                await renderer.SaveAsync(stream, CanvasBitmapFileFormat.Png);

                BitmapImage image = new BitmapImage();
                image.SetSource(stream);
                Blured.Source = image;
            }
        }
    public async static Task<Uri> ToQrDataUri(this ISdp sdp, int width, int height)
    {
      var qrCodeWriter = new QRCodeWriter();
      var bitMatrix = qrCodeWriter.encode(sdp.ToString(), ZXing.BarcodeFormat.QR_CODE, width, height);

      using (var canvasRenderTarget = new CanvasRenderTarget(CanvasDevice.GetSharedDevice(), 500, 500, 96))
      {
        using (var drawingSession = canvasRenderTarget.CreateDrawingSession())
        {
          for (var y = 0; y < height; y++)
          {
            for (var x = 0; x < width; x++)
            {
              drawingSession.DrawRectangle(x, y, 1, 1, bitMatrix.get(x, y) ? Color.FromArgb(0, 0, 0, 0) : Color.FromArgb(255, 255, 255, 255));
            }
          }
        }

        using (var inMemoryRandomAccessStream = new InMemoryRandomAccessStream())
        {
          await canvasRenderTarget.SaveAsync(inMemoryRandomAccessStream, CanvasBitmapFileFormat.Png);
          inMemoryRandomAccessStream.Seek(0);
          var buffer = new byte[inMemoryRandomAccessStream.Size];
          await inMemoryRandomAccessStream.ReadAsync(buffer.AsBuffer(), (uint)inMemoryRandomAccessStream.Size, InputStreamOptions.None);
          return new Uri($"data:image/png;base64,{Convert.ToBase64String(buffer)}");
        }
      }
    }
コード例 #3
0
ファイル: Photo.cs プロジェクト: shawnhar/stuart
        public async Task Save(StorageFile file)
        {
            var image = GetImage();

            // Measure the extent of the image (which may be cropped).
            Rect imageBounds;

            using (var commandList = new CanvasCommandList(sourceBitmap.Device))
            using (var drawingSession = commandList.CreateDrawingSession())
            {
                imageBounds = image.GetBounds(drawingSession);
            }

            // Rasterize the image into a rendertarget.
            using (var renderTarget = new CanvasRenderTarget(sourceBitmap.Device, (float)imageBounds.Width, (float)imageBounds.Height, 96))
            {
                using (var drawingSession = renderTarget.CreateDrawingSession())
                {
                    drawingSession.Blend = CanvasBlend.Copy;

                    drawingSession.DrawImage(image, -(float)imageBounds.X, -(float)imageBounds.Y);
                }

                // Save it out.
                var format = file.FileType.Equals(".png", StringComparison.OrdinalIgnoreCase) ? CanvasBitmapFileFormat.Png : CanvasBitmapFileFormat.Jpeg;

                using (var stream = await file.OpenAsync(FileAccessMode.ReadWrite))
                {
                    stream.Size = 0;

                    await renderTarget.SaveAsync(stream, format);
                }
            }
        }
コード例 #4
0
		public byte[] DrawStrokeOnImageBackground(IReadOnlyList<InkStroke> strokes, byte[] backgroundImageBuffer)
		{

			var stmbuffer = new InMemoryRandomAccessStream();
			stmbuffer.AsStreamForWrite().AsOutputStream().WriteAsync(backgroundImageBuffer.AsBuffer()).AsTask().Wait();

			CanvasDevice device = CanvasDevice.GetSharedDevice();
			var canbit = CanvasBitmap.LoadAsync(device, stmbuffer, 96).AsTask().Result;


			CanvasRenderTarget renderTarget = new CanvasRenderTarget(device, canbit.SizeInPixels.Width, canbit.SizeInPixels.Height, 96);

			using (var ds = renderTarget.CreateDrawingSession())
			{
				ds.Clear(Colors.Transparent);

				if (backgroundImageBuffer != null)
				{

					ds.DrawImage(canbit);
				}

				ds.DrawInk(strokes);
			}
			var stm = new InMemoryRandomAccessStream();
			renderTarget.SaveAsync(stm, CanvasBitmapFileFormat.Png).AsTask().Wait();
			var readfrom = stm.GetInputStreamAt(0).AsStreamForRead();
			var ms = new MemoryStream();
			readfrom.CopyTo(ms);
			var outputBuffer = ms.ToArray();
			return outputBuffer;
		}
コード例 #5
0
ファイル: Utils.cs プロジェクト: gtarbell/VideoEffect
 public void Process(CanvasBitmap input, CanvasRenderTarget output, TimeSpan time)
 {
     using (CanvasDrawingSession session = output.CreateDrawingSession())
     {
         session.DrawImage(input);
         session.DrawText("Canvas Effect test", 0f, 0f, Colors.Red);
     }
 }
コード例 #6
0
 public void drawText(CanvasRenderTarget crt, Color color)
 {
     using (CanvasDrawingSession ds = crt.CreateDrawingSession())
     {
         ds.DrawTextLayout(textLayout, (float)location.X, (float)location.Y, color);
     }
        
 }
コード例 #7
0
        /// <summary>
        /// Applys a blur to a UI element
        /// </summary>
        /// <param name="sourceElement">UIElement to blur, generally an Image control, but can be anything</param>
        /// <param name="blurAmount">Level of blur to apply</param>
        /// <returns>Blurred UIElement as BitmapImage</returns>
        public static async Task<BitmapImage> BlurElementAsync(this UIElement sourceElement, float blurAmount = 2.0f)
        {
            if (sourceElement == null)
                return null;

            var rtb = new RenderTargetBitmap();
            await rtb.RenderAsync(sourceElement);

            var buffer = await rtb.GetPixelsAsync();
            var array = buffer.ToArray();

            var displayInformation = DisplayInformation.GetForCurrentView();

            using (var stream = new InMemoryRandomAccessStream())
            {
                var pngEncoder = await BitmapEncoder.CreateAsync(BitmapEncoder.PngEncoderId, stream);

                pngEncoder.SetPixelData(BitmapPixelFormat.Bgra8,
                                     BitmapAlphaMode.Premultiplied,
                                     (uint) rtb.PixelWidth,
                                     (uint) rtb.PixelHeight,
                                     displayInformation.RawDpiX,
                                     displayInformation.RawDpiY,
                                     array);

                await pngEncoder.FlushAsync();
                stream.Seek(0);

                var canvasDevice = new CanvasDevice();
                var bitmap = await CanvasBitmap.LoadAsync(canvasDevice, stream);

                var renderer = new CanvasRenderTarget(canvasDevice,
                                                      bitmap.SizeInPixels.Width,
                                                      bitmap.SizeInPixels.Height,
                                                      bitmap.Dpi);

                using (var ds = renderer.CreateDrawingSession())
                {
                    var blur = new GaussianBlurEffect
                    {
                        BlurAmount = blurAmount,
                        Source = bitmap
                    };
                    ds.DrawImage(blur);
                }

                stream.Seek(0);
                await renderer.SaveAsync(stream, CanvasBitmapFileFormat.Png);

                var image = new BitmapImage();
                await image.SetSourceAsync(stream);

                return image;
            }
        }
コード例 #8
0
 public ImageInfo(CanvasRenderTarget image)
 {
     _crt = image;
     BitmapSize bsize = image.SizeInPixels;
     Size size = image.Size;
     _offset = new Point(size.Width / 2.0f, size.Height / 2.0f);
     using (CanvasDrawingSession ds = image.CreateDrawingSession())
     {
         _symbolBounds = image.GetBounds(ds);
         _imageBounds = ShapeUtilities.clone(_symbolBounds);
     }
 }
コード例 #9
0
ファイル: CanvasEffect.cs プロジェクト: gtarbell/VideoEffect
 public void Process(CanvasBitmap input, CanvasRenderTarget output, TimeSpan time)
 {
     using (CanvasDrawingSession session = output.CreateDrawingSession())
     {
         session.DrawImage(input);
         session.FillCircle(
             (float)input.Bounds.Width / 2,
             (float)input.Bounds.Height / 2,
             (float)(Math.Min(input.Bounds.Width, input.Bounds.Height) / 2 * Math.Cos(2 * Math.PI * time.TotalSeconds)),
             Colors.Aqua
             );
     }
 }
コード例 #10
0
        //Creates textures of different sizes
        public void CreateParticleBitmaps(bool blur = false)
        {
            SetColors(baseColor);

            particleBitmaps = new CanvasRenderTarget[sizes];

            int i = -1;
            var nextRadius = 0;
            var nextSize =0;
            var transparent = Color.FromArgb(0, 0, 0, 0);

            float viewportsize = 100; //Here is the trick, if this value is too small appears the displacement and the original image

            for (int r = 1; r < sizes + 1; r += 1)
            {
                nextRadius = (r * minRadius);
                nextSize = nextRadius * 2;
                CanvasRenderTarget canvas = new CanvasRenderTarget(device, viewportsize, viewportsize, parent.Dpi);
                var center = new Vector2((viewportsize - nextRadius) / 2);

                //The following is like a 'drawing graph', the output of the first is the input of the second one;
                using (CanvasDrawingSession targetSession = canvas.CreateDrawingSession())
                {
                    targetSession.Clear(transparent);
                    targetSession.FillCircle(center, nextRadius, outerColor);
                    targetSession.FillCircle(center, nextRadius - 6, innerColor);
                }

                if (!blur)
                {
                    particleBitmaps[++i] = canvas;
                }
                else //Add blur just one time
                {
                    var blurEffect = new GaussianBlurEffect() { BlurAmount = 2f };
                    CanvasRenderTarget blurredcanvas = new CanvasRenderTarget(device, viewportsize, viewportsize, parent.Dpi);
                    blurEffect.Source = canvas;
                    using (CanvasDrawingSession targetSession = blurredcanvas.CreateDrawingSession())
                    {
                        targetSession.Clear(transparent);
                        targetSession.DrawImage(blurEffect);
                    }
                    particleBitmaps[++i] = blurredcanvas;
                }
            }
        }
コード例 #11
0
ファイル: DpiTests.cs プロジェクト: Himansh1306/Win2D
        public void RenderTargetDpiTest()
        {
            const float defaultDpi = 96;
            const float highDpi = defaultDpi * 2;
            const float size = 100;
            const float fractionalSize = 100.8f;

            var device = new CanvasDevice();

            var renderTargetDefault = new CanvasRenderTarget(device, size, size, defaultDpi);
            var renderTargetHigh = new CanvasRenderTarget(device, size, size, highDpi);
            var renderTargetFractionalSize = new CanvasRenderTarget(device, fractionalSize, fractionalSize, highDpi);

            // Check each rendertarget reports the expected DPI.
            Assert.AreEqual(defaultDpi, renderTargetDefault.Dpi);
            Assert.AreEqual(highDpi, renderTargetHigh.Dpi);
            Assert.AreEqual(highDpi, renderTargetFractionalSize.Dpi);

            // Check each rendertarget is of the expected size.
            Assert.AreEqual(size, renderTargetDefault.Size.Width);
            Assert.AreEqual(size, renderTargetDefault.Size.Height);

            Assert.AreEqual(size, renderTargetHigh.Size.Width);
            Assert.AreEqual(size, renderTargetHigh.Size.Height);

            Assert.AreEqual(Math.Round(fractionalSize), renderTargetFractionalSize.Size.Width);
            Assert.AreEqual(Math.Round(fractionalSize), renderTargetFractionalSize.Size.Height);

            // Check sizes in pixels.
            Assert.AreEqual(size, renderTargetDefault.SizeInPixels.Width);
            Assert.AreEqual(size, renderTargetDefault.SizeInPixels.Height);

            Assert.AreEqual(size * highDpi / defaultDpi, renderTargetHigh.SizeInPixels.Width);
            Assert.AreEqual(size * highDpi / defaultDpi, renderTargetHigh.SizeInPixels.Height);

            Assert.AreEqual(Math.Round(fractionalSize * highDpi / defaultDpi), renderTargetFractionalSize.SizeInPixels.Width);
            Assert.AreEqual(Math.Round(fractionalSize * highDpi / defaultDpi), renderTargetFractionalSize.SizeInPixels.Height);

            // Check that drawing sessions inherit the DPI of the rendertarget being drawn onto.
            var drawingSessionDefault = renderTargetDefault.CreateDrawingSession();
            var drawingSessionHigh = renderTargetHigh.CreateDrawingSession();

            Assert.AreEqual(defaultDpi, drawingSessionDefault.Dpi);
            Assert.AreEqual(highDpi, drawingSessionHigh.Dpi);
        }
コード例 #12
0
		public byte[] DrawStrokeOnSolidColorBackground(IReadOnlyList<InkStroke> strokes, int width, int height, Color color )
		{														 
			CanvasDevice device = CanvasDevice.GetSharedDevice();
			CanvasRenderTarget renderTarget = new CanvasRenderTarget(device, width, height, 96);

			using (var ds = renderTarget.CreateDrawingSession())
			{
				ds.Clear(color);
				ds.DrawInk(strokes);
			}
			var stm = new InMemoryRandomAccessStream();
			renderTarget.SaveAsync(stm, CanvasBitmapFileFormat.Png).AsTask().Wait();
			var readfrom = stm.GetInputStreamAt(0).AsStreamForRead();
			var ms = new MemoryStream();
			readfrom.CopyTo(ms);
			var outputBuffer = ms.ToArray();
			return outputBuffer;
		}
コード例 #13
0
        private static async Task<CanvasBitmap> DownloadPng(CanvasDevice device, Uri uri)
        {
            try
            {
                return await CanvasBitmap.LoadAsync(device, await CachedData.GetRandomAccessStreamAsync(uri));
            }
            catch (FileNotFoundException)
            {
                var rt = new CanvasRenderTarget(device, 480, 360, 96);

                using (var ds = rt.CreateDrawingSession())
                {
                    ds.Clear(Colors.Transparent);
                    ds.DrawLine(0, 0, (float)rt.Size.Width, (float)rt.Size.Height, Colors.Black, 1);
                    ds.DrawLine(0, (float)rt.Size.Height, (float)rt.Size.Width, 0, Colors.Black, 1);
                }
                return rt;
            }
        }
コード例 #14
0
        private async void BtnSave_Click(object sender, RoutedEventArgs e)
        {
            StorageFolder storageFolder = KnownFolders.SavedPictures;
            var file = await storageFolder.CreateFileAsync("sample.jpg", CreationCollisionOption.ReplaceExisting);

            CanvasDevice device = CanvasDevice.GetSharedDevice();
            CanvasRenderTarget renderTarget = new CanvasRenderTarget(device, (int)MyInkCanvas.ActualWidth, (int)MyInkCanvas.ActualHeight, 96);

            using (var ds = renderTarget.CreateDrawingSession())
            {
                ds.Clear(Colors.White);
                ds.DrawInk(MyInkCanvas.InkPresenter.StrokeContainer.GetStrokes());
            }

            using (var fileStream = await file.OpenAsync(FileAccessMode.ReadWrite))
            {
                await renderTarget.SaveAsync(fileStream, CanvasBitmapFileFormat.Jpeg, 1f);
            }
        }
コード例 #15
0
		private static void RendererImage(Stream stream, SvgImageRendererSettings settings)
		{
			var svg = settings.Document;
			var viewPort = svg.RootElement.ViewPort;
			if (!viewPort.HasValue) throw new ArgumentException(nameof(settings));

			var width = viewPort.Value.Width;
			var height = viewPort.Value.Height;
			var device = CanvasDevice.GetSharedDevice();
			using (var offScreen = new CanvasRenderTarget(device, width, height, settings.Scaling * 96.0F))
			{
				using (var renderer = new Win2dRenderer(offScreen, svg))
				using (var session = offScreen.CreateDrawingSession())
				{
					session.Clear(Colors.Transparent);
					renderer.Render(width, height, session);
				}
				offScreen.SaveAsync(stream.AsRandomAccessStream(), (CanvasBitmapFileFormat)settings.Format, settings.Quality).AsTask().GetAwaiter().GetResult();
			}
		}
コード例 #16
0
ファイル: InkUtils.cs プロジェクト: coderox/Template10
            public async Task<SoftwareBitmap> RenderAsync(IEnumerable<InkStroke> inkStrokes, double width, double height)
            {
                var dpi = DisplayInformation.GetForCurrentView().LogicalDpi;
                try
                {
                    var renderTarget = new CanvasRenderTarget(_canvasDevice, (float)width, (float)height, dpi); using (renderTarget)
                    {
                        using (var drawingSession = renderTarget.CreateDrawingSession())
                        {
                            drawingSession.DrawInk(inkStrokes);
                        }

                        return await SoftwareBitmap.CreateCopyFromSurfaceAsync(renderTarget);
                    }
                }
                catch (Exception e) when (_canvasDevice.IsDeviceLost(e.HResult))
                {
                    _canvasDevice.RaiseDeviceLost();
                }

                return null;
            }
        public static ImageInfo getIcon(String symbolID, int size, Color color, int outlineSize)
        {
            ImageInfo returnVal = null;
            if (_tgl == null)
                _tgl = TacticalGraphicLookup.getInstance();

            int mapping = _tgl.getCharCodeFromSymbol(symbolID);

            CanvasRenderTarget coreBMP = null;

            SVGPath svgTG = null;
            //SVGPath svgFrame = null;

            if (mapping > 0)
                svgTG = TGSVGTable.getInstance().getSVGPath(mapping);

            //float scale = 1;
            Matrix3x2 mScale, mTranslate;
            Matrix3x2 mIdentity = Matrix3x2.Identity;
            Rect rectF = new Rect();
            Matrix3x2 m = svgTG.CreateMatrix(size, size, out rectF, out mScale, out mTranslate);
            //svgTG.TransformToFitDimensions(size, size);
            Rect rr = svgTG.computeBounds(m);
            
            CanvasDevice device = CanvasDevice.GetSharedDevice();
            //CanvasRenderTarget offscreen = new CanvasRenderTarget(device, width, height, 96);
            coreBMP = new CanvasRenderTarget(device, (int)(rr.Width + 0.5), (int)(rr.Height + 0.5),96);

            using (CanvasDrawingSession cds = coreBMP.CreateDrawingSession())
            {
                svgTG.Draw(cds, Colors.Transparent, 0, color, m);
                cds.DrawRectangle(coreBMP.GetBounds(device), Colors.Red);
            }
            returnVal = new ImageInfo(coreBMP,new Point(coreBMP.Size.Width/2f,coreBMP.Size.Height/2.0f),new Rect(0,0,coreBMP.Size.Width,coreBMP.Size.Height),coreBMP.GetBounds(device));

            return returnVal;
        }
コード例 #18
0
ファイル: CustomThumbnails.cs プロジェクト: fengweijp/Win2D
        public static IRandomAccessStream GenerateFromString(string str, string fontFamily, Color color)
        {
            float size = 512;

            using (var device = new CanvasDevice())
            using (var renderTarget = new CanvasRenderTarget(device, size, size, 96))
            {
                using (var ds = renderTarget.CreateDrawingSession())
                {
                    ds.DrawText(str, size / 2, size / 2, color,
                        new CanvasTextFormat()
                        {
                            FontFamily = fontFamily,
                            FontSize = size / 2,
                            HorizontalAlignment = CanvasHorizontalAlignment.Center,
                            VerticalAlignment = CanvasVerticalAlignment.Center
                        });
                }

                InMemoryRandomAccessStream stream = new InMemoryRandomAccessStream();
                renderTarget.SaveAsync(stream, CanvasBitmapFileFormat.Png).AsTask().Wait();
                return stream;
            }
        }
コード例 #19
0
        public void SetPixelBytesReadHazards()
        {
            var device = new CanvasDevice();
            var bitmap = CanvasBitmap.CreateFromBytes(device, new byte[4], 1, 1, DirectXPixelFormat.B8G8R8A8UIntNormalized);
            var renderTarget = new CanvasRenderTarget(device, 2, 1, 96);

            using (var ds = renderTarget.CreateDrawingSession())
            {
                bitmap.SetPixelBytes(new byte[] { 255, 0, 0, 255 });
                ds.DrawImage(bitmap, 0, 0);

                bitmap.SetPixelBytes(new byte[] { 0, 0, 255, 255 });
                ds.DrawImage(bitmap, 1, 0);
            }

            CollectionAssert.AreEqual(new Color[] { Colors.Blue, Colors.Red }, renderTarget.GetPixelColors());
        }
コード例 #20
0
        static async Task<string> GenerateAndSaveTileImage(CanvasDevice device, float width, float height)
        {
            using (var renderTarget = new CanvasRenderTarget(device, width, height, 96))
            {
                using (var ds = renderTarget.CreateDrawingSession())
                {
                    DrawTile(ds, width, height);
                }

                var filename = string.Format("tile-{0}x{1}-{2}.png",
                                (int)width,
                                (int)height,
                                Guid.NewGuid().ToString());

                await renderTarget.SaveAsync(Path.Combine(ApplicationData.Current.LocalFolder.Path, filename));

                return filename;
            }
        }
コード例 #21
0
            void DoTest(float opacity, CanvasBitmap source, CanvasRenderTarget target)
            {
                using (var ds = target.CreateDrawingSession())
                {
                    ds.FillRectangle(target.Bounds, fillPattern);

                    var leftHalf = source.Bounds;
                    leftHalf.Width /= 2;

                    var rightHalf = source.Bounds;
                    rightHalf.Width /= 2;
                    rightHalf.X += rightHalf.Width;

                    // This version calls D2D DrawBitmap
                    ds.DrawImage(source, 0, 0, leftHalf, opacity, CanvasImageInterpolation.Linear);

                    // This version calls D2D DrawImage with emulated opacity
                    ds.DrawImage(source, (float)rightHalf.X, 0, rightHalf, opacity, CanvasImageInterpolation.Cubic);

                    ds.Antialiasing = CanvasAntialiasing.Aliased;
                    ds.DrawLine((float)rightHalf.X, 0, (float)rightHalf.X, (float)rightHalf.Height, Colors.Black, 1, hairline);
                }
            }
コード例 #22
0
            public OffsetDemo(DrawImageEmulations example, CanvasControl sender)
            {
                fillPattern = example.checkedFillPattern;

                var rt = new CanvasRenderTarget(sender, (float)example.tiger.Size.Width, (float)example.tiger.Size.Height, sender.Dpi / 3);
                using (var ds = rt.CreateDrawingSession())
                {
                    ds.DrawImage(example.tiger, rt.Bounds);
                }
                sourceBitmap = rt;

                sourceEffect = new HueRotationEffect()
                {
                    Source = sourceBitmap,
                    Angle = 1
                };

                showSourceRectRT = new CanvasRenderTarget(sender, (float)rt.Size.Width, (float)rt.Size.Height, rt.Dpi);
            }
コード例 #23
0
        public void ProcessFrame(ProcessVideoFrameContext context)
        {
            try
            {
                // When using SupportedMemoryTypes => MediaMemoryTypes.GpuAndCpu we need to check if we're using GPU or CPU for the frame

                // If we're on GPU, use InputFrame.Direct3DSurface
                if (context.InputFrame.SoftwareBitmap == null)
                {
                    using (var inputBitmap = CanvasBitmap.CreateFromDirect3D11Surface(_canvasDevice, context.InputFrame.Direct3DSurface))
                        using (var renderTarget = CanvasRenderTarget.CreateFromDirect3D11Surface(_canvasDevice, context.OutputFrame.Direct3DSurface))
                            using (var ds = renderTarget.CreateDrawingSession())
                            {
                                ds.DrawImage(inputBitmap);
                                ds.DrawImage(this._overlay, inputBitmap.Bounds);
                            }

                    return;
                }

                // If we're on CPU, use InputFrame.SoftwareBitmap
                if (context.InputFrame.Direct3DSurface == null)
                {
                    // InputFrame's raw pixels
                    byte[] inputFrameBytes = new byte[4 * context.InputFrame.SoftwareBitmap.PixelWidth * context.InputFrame.SoftwareBitmap.PixelHeight];
                    context.InputFrame.SoftwareBitmap.CopyToBuffer(inputFrameBytes.AsBuffer());

                    using (var inputBitmap = CanvasBitmap.CreateFromBytes(
                               _canvasDevice,
                               inputFrameBytes,
                               context.InputFrame.SoftwareBitmap.PixelWidth,
                               context.InputFrame.SoftwareBitmap.PixelHeight,
                               context.InputFrame.SoftwareBitmap.BitmapPixelFormat.ToDirectXPixelFormat()))

                        using (var renderTarget = new CanvasRenderTarget(
                                   _canvasDevice,
                                   context.OutputFrame.SoftwareBitmap.PixelWidth,
                                   context.OutputFrame.SoftwareBitmap.PixelHeight,
                                   (float)context.OutputFrame.SoftwareBitmap.DpiX,
                                   context.OutputFrame.SoftwareBitmap.BitmapPixelFormat.ToDirectXPixelFormat(),
                                   CanvasAlphaMode.Premultiplied))
                        {
                            using (var ds = renderTarget.CreateDrawingSession())
                            {
                                ds.DrawImage(inputBitmap);
                                ds.DrawImage(this._overlay, inputBitmap.Bounds);
                            }
                        }
                }
            }
            catch (Exception)
            {
                if (crashCount < 20)
                {
                    crashCount++;
                    Debug.WriteLine($"ProcessFrame Exception: #{crashCount}");
                }
                else
                {
                    //System.Exception HResult = 0x88990012
                    //Message = Objects used together must be created from the same factory instance. (Exception from HRESULT: 0x88990012)
                    //Source = System.Private.CoreLib
                    //StackTrace:
                    //at System.Runtime.InteropServices.WindowsRuntime.IClosable.Close()
                    //at System.Runtime.InteropServices.WindowsRuntime.IClosableToIDisposableAdapter.Dispose()
                    //at VideoEffects.Win2D.OverlayVideoEffect.ProcessFrame(ProcessVideoFrameContext context) in D:\GitHub\VideoDiary\src\VideoDiary.EffectsLibrary\Win2dEffects\OverlayVideoEffect.cs:line 66

                    throw;
                }
            }
        }
コード例 #24
0
ファイル: MainPage.xaml.cs プロジェクト: freds72/Kinematic
        private void Draw()
        {
            using (var ds = currentSurface.CreateDrawingSession())
            {
                ds.Clear(Colors.Black);
                ds.Transform =
                    Matrix3x2.Multiply(
                        Matrix3x2.CreateScale(new Vector2(1, -1)),
                        Matrix3x2.CreateTranslation(new Vector2(simulationW / 2, simulationH / 2)));

                if (_tempMousePositions != null)
                {
                    for (int i = 1; i < _tempMousePositions.Count; i++)
                    {
                        var p1 = new Vector2(_tempMousePositions[i - 1].X - simulationW / 2, simulationH / 2 - _tempMousePositions[i - 1].Y);
                        var p2 = new Vector2(_tempMousePositions[i].X - simulationW / 2, simulationH / 2 - _tempMousePositions[i].Y);
                        ds.DrawLine(p1, p2, Colors.Yellow);
                    }
                }

                bool syncRobot   = false;
                bool beginFigure = false;
                bool endFigure   = false;
                lock (_jobs)
                {
                    if (_jobs.Count > 0 && DateTime.Now >= _wakeUpTime)
                    {
                        // get oldest list
                        List <Vector2> positions = _jobs[0];
                        if (positions.Count > 0)
                        {
                            if (_jobPosIndex < positions.Count)
                            {
                                if (_jobPosIndex == 0)
                                {
                                    beginFigure = true;
                                }
                                _targetPosition = positions[_jobPosIndex++];
                            }
                            else
                            {
                                endFigure = true;
                                _jobs.RemoveAt(0);
                                _jobPosIndex = 0;
                            }

                            if (_socket != null && _socket.Connected)
                            {
                                syncRobot = true;
                            }
                        }
                        // still something to do?
                        if (_jobs.Count > 0)
                        {
                            _wakeUpTime = DateTime.Now + TimeSpan.FromMilliseconds(125);
                        }
                        else
                        {
                            System.Diagnostics.Debug.WriteLine("no jobs pending");
                        }

                        // draw points
                        foreach (List <Vector2> remainingPaths in _jobs)
                        {
                            for (int i = 1; i < remainingPaths.Count; i++)
                            {
                                var p1 = new Vector2(remainingPaths[i - 1].X - simulationW / 2, simulationH / 2 - remainingPaths[i - 1].Y);
                                var p2 = new Vector2(remainingPaths[i].X - simulationW / 2, simulationH / 2 - remainingPaths[i].Y);
                                ds.DrawLine(p1, p2, Colors.LightSteelBlue);
                            }
                        }
                    }
                }

                var mx = _mousePosition.X - simulationW / 2;
                var my = simulationH / 2 - _mousePosition.Y;
                ds.DrawImage(_bitmap, new Vector2(mx, my));

                // stick to last known position
                var x = _targetPosition.X - simulationW / 2;
                var y = simulationH / 2 - _targetPosition.Y;

                Vector2 pos           = new Vector2(x, y);
                Vector2 normalizedPos = Vector2.Normalize(pos);
                if (pos.LengthSquared() > (l1 + l2) * (l1 + l2))
                {
                    Vector2 clamped = Vector2.Multiply((float)(l1 + l2), normalizedPos);
                    x = clamped.X;
                    y = clamped.Y;
                }

                // http://thingsiamdoing.com/inverse-kinematics/
                double L  = Math.Sqrt(x * x + y * y);
                double a  = Math.Acos((l1 * l1 + L * L - l2 * l2) / (2 * l1 * L));
                double b  = Math.Acos((l1 * l1 + l2 * l2 - L * L) / (2 * l1 * l2));
                double XL = Math.Atan2(normalizedPos.Y, normalizedPos.X) - Math.Atan2(0, 1);
                if (XL < 0)
                {
                    XL += 2 * Math.PI;
                }
                double o1 = XL - a;
                double o2 = Math.PI - b + o1;

                ds.DrawCircle(Vector2.Zero, l1 + l2, Colors.Gray, 2);
                // robot arm
                ds.DrawLine(Vector2.Zero, new Vector2((float)(l1 * Math.Cos(o1)), (float)(l1 * Math.Sin(o1))), Colors.Green, 10);
                ds.DrawLine(
                    new Vector2((float)(l1 * Math.Cos(o1)), (float)(l1 * Math.Sin(o1))),
                    new Vector2((float)(l1 * Math.Cos(o1) + l2 * Math.Cos(o2)), (float)(l1 * Math.Sin(o1) + l2 * Math.Sin(o2))), Colors.Red, 10);

                ds.Transform = Matrix3x2.CreateTranslation(new Vector2(simulationW / 2, simulationH / 2));
                ds.DrawText(string.Format("{0:0.00}:{1:0.00}", 180 * o1 / Math.PI, 180 * (b) / Math.PI), new Vector2(x, -y), Colors.Green);

                if (syncRobot)
                {
                    // send that command to the robot
                    SocketAsyncEventArgs completeArgs = new SocketAsyncEventArgs();
                    string robotCommand = "";
                    if (beginFigure)
                    {
                        robotCommand = "MOV;{0:0.00};{1:0.00}\0DWN;\0";
                    }
                    else if (endFigure)
                    {
                        robotCommand = "UP;\0";
                    }
                    else
                    {
                        robotCommand = "MOV;{0:0.00};{1:0.00}\0";
                    }

                    byte[] buffer = Encoding.ASCII.GetBytes(string.Format(robotCommand, 180 * o1 / Math.PI, (180 * (b) / Math.PI)));
                    completeArgs.SetBuffer(buffer, 0, buffer.Length);
                    completeArgs.UserToken      = _socket;
                    completeArgs.RemoteEndPoint = _socket.RemoteEndPoint;
                    _socket.SendAsync(completeArgs);
                }
            }
        }
コード例 #25
0
        //private bool correctionFlag = false;

        public void ProcessFrame(ProcessVideoFrameContext context)
        {
            using (CanvasBitmap inputBitmap = CanvasBitmap.CreateFromDirect3D11Surface(_canvasDevice, context.InputFrame.Direct3DSurface))
                using (CanvasRenderTarget renderTarget = CanvasRenderTarget.CreateFromDirect3D11Surface(_canvasDevice, context.OutputFrame.Direct3DSurface))
                    using (CanvasDrawingSession ds = renderTarget.CreateDrawingSession())
                        using (var scaleEffect = new ScaleEffect())
                            using (CanvasSolidColorBrush solidColorBrush = new CanvasSolidColorBrush(_canvasDevice, _backgroundColor))
                            {
                                solidColorBrush.Opacity = _backgroundOpacity;
                                double rel = context.InputFrame.RelativeTime.Value.Ticks / (double)TimeSpan.TicksPerMillisecond;

                                //context.OutputFrame.Duration = new TimeSpan( (long)(frameLength * TimeSpan.TicksPerMillisecond));



                                int frameTimeCounter = (int)Math.Round(rel / _frameLength, 0);

                                int[] pitch = new int[_count];
                                int[] yaw   = new int[_count];
                                int[] fov   = new int[_count];

                                for (int i = 0; i < _count; i++)
                                {
                                    try
                                    {
                                        //pitch[i] = this.pitch[ (frameTimeCounter + (int)Math.Round(offset, 0)) * (count) + i];
                                        //fov[i] = this.fov[ (frameTimeCounter + (int)Math.Round(offset, 0)) * (count) + i];
                                        //yaw[i] = this.yaw[ (frameTimeCounter + (int)Math.Round(offset, 0)) * (count) + i];

                                        pitch[i] = this._pitch[(frameTimeCounter + (int)_offset) * (_count) + i];
                                        fov[i]   = this._fov[(frameTimeCounter + (int)_offset) * (_count) + i];
                                        yaw[i]   = this._yaw[(frameTimeCounter + (int)_offset) * (_count) + i];
                                    }
                                    catch (ArgumentOutOfRangeException ex)
                                    {
                                        Debug.WriteLine(ex.Message);
                                        pitch[i] = 0;
                                        fov[i]   = 0;
                                        yaw[i]   = 0;
                                    }
                                }

                                byte[]       tab = Heatmap.GenerateHeatmap(pitch, yaw, fov);
                                CanvasBitmap cb  = CanvasBitmap.CreateFromBytes(_canvasDevice, tab, 64, 64, Windows.Graphics.DirectX.DirectXPixelFormat.B8G8R8A8UIntNormalized, 96, CanvasAlphaMode.Premultiplied);
                                scaleEffect.Source            = cb;
                                scaleEffect.Scale             = new System.Numerics.Vector2((float)_width / 64, (float)_height / 64);
                                scaleEffect.InterpolationMode = CanvasImageInterpolation.Cubic;
                                scaleEffect.BorderMode        = EffectBorderMode.Hard;


                                if (_graysclaleVideoFlag)
                                {
                                    var grayScaleEffect = new GrayscaleEffect
                                    {
                                        BufferPrecision = CanvasBufferPrecision.Precision8UIntNormalized,
                                        CacheOutput     = false,
                                        Source          = inputBitmap
                                    };
                                    ds.DrawImage(grayScaleEffect);
                                }
                                else
                                {
                                    ds.DrawImage(inputBitmap);
                                }

                                ds.DrawImage(scaleEffect, 0, 0, new Windows.Foundation.Rect {
                                    Height = _height, Width = _width
                                }, _heatmapOpacity);



                                if (_generateDots)
                                {
                                    for (int i = 0; i < _count; i++)
                                    {
                                        ds.FillCircle(yaw[i] * _width / 64, pitch[i] * _height / 64, _dotsRadius, _colors[i % 5]);
                                    }
                                }



                                ds.FillRectangle(new Windows.Foundation.Rect {
                                    Height = _height, Width = _width
                                }, solidColorBrush);

                                ds.Flush();
                            }
        }
コード例 #26
0
        public static async Task <ExportResult> ExportPngAsync(
            ExportStyle style,
            InstalledFont selectedFont,
            FontVariant selectedVariant,
            Character selectedChar,
            CanvasTextLayoutAnalysis analysis,
            CanvasTypography typography,
            AppSettings settings)
        {
            try
            {
                string name = GetFileName(selectedFont, selectedVariant, selectedChar, "png");
                if (await PickFileAsync(name, "PNG Image", new[] { ".png" }) is StorageFile file)
                {
                    CachedFileManager.DeferUpdates(file);

                    if (analysis.GlyphFormats.Contains(GlyphImageFormat.Png))
                    {
                        IBuffer buffer = GetGlyphBuffer(selectedVariant.FontFace, selectedChar.UnicodeIndex, GlyphImageFormat.Png);
                        await FileIO.WriteBufferAsync(file, buffer);
                    }
                    else
                    {
                        var device   = Utils.CanvasDevice;
                        var localDpi = 96; //Windows.Graphics.Display.DisplayInformation.GetForCurrentView().LogicalDpi;

                        var canvasH = (float)settings.PngSize;
                        var canvasW = (float)settings.PngSize;

                        using var renderTarget = new CanvasRenderTarget(device, canvasW, canvasH, localDpi);
                        using (var ds = renderTarget.CreateDrawingSession())
                        {
                            ds.Clear(Colors.Transparent);
                            var d = settings.PngSize;
                            var r = settings.PngSize / 2;

                            var textColor = style == ExportStyle.Black ? Colors.Black : Colors.White;
                            var fontSize  = (float)d;

                            using CanvasTextLayout layout = new CanvasTextLayout(device, $"{selectedChar.Char}", new CanvasTextFormat
                            {
                                FontSize            = fontSize,
                                FontFamily          = selectedVariant.Source,
                                FontStretch         = selectedVariant.FontFace.Stretch,
                                FontWeight          = selectedVariant.FontFace.Weight,
                                FontStyle           = selectedVariant.FontFace.Style,
                                HorizontalAlignment = CanvasHorizontalAlignment.Center,
                                Options             = style == ExportStyle.ColorGlyph ? CanvasDrawTextOptions.EnableColorFont : CanvasDrawTextOptions.Default
                            }, canvasW, canvasH);

                            if (style == ExportStyle.ColorGlyph)
                            {
                                layout.Options = CanvasDrawTextOptions.EnableColorFont;
                            }

                            layout.SetTypography(0, 1, typography);

                            var    db    = layout.DrawBounds;
                            double scale = Math.Min(1, Math.Min(canvasW / db.Width, canvasH / db.Height));
                            var    x     = -db.Left + ((canvasW - (db.Width * scale)) / 2d);
                            var    y     = -db.Top + ((canvasH - (db.Height * scale)) / 2d);

                            ds.Transform =
                                Matrix3x2.CreateTranslation(new Vector2((float)x, (float)y))
                                * Matrix3x2.CreateScale(new Vector2((float)scale));

                            ds.DrawTextLayout(layout, new Vector2(0), textColor);
                        }

                        using var fileStream = await file.OpenAsync(FileAccessMode.ReadWrite);

                        fileStream.Size = 0;
                        await renderTarget.SaveAsync(fileStream, CanvasBitmapFileFormat.Png, 1f);
                    }

                    await CachedFileManager.CompleteUpdatesAsync(file);

                    return(new ExportResult(true, file));
                }
            }
            catch (Exception ex)
            {
                await Ioc.Default.GetService <IDialogService>()
                .ShowMessageAsync(ex.Message, Localization.Get("SaveImageError"));
            }

            return(new ExportResult(false, null));
        }
コード例 #27
0
        public CanvasRenderTarget applyEdgeDetectionEffects(CanvasBitmap workingBitmap)
        {
            //CanvasBitmap workingBitmap = SelectWorkingBitmap(useOriginalBitmap);


            if (workingBitmap != null)
            {
                int ww = (int)workingBitmap.SizeInPixels.Width;
                int hh = (int)workingBitmap.SizeInPixels.Height;

                //GrayscaleEffect grayscaleEffect = new GrayscaleEffect();
                //grayscaleEffect.Source=canvasBitmap;

                ContrastEffect contrastEffect = new ContrastEffect();
                contrastEffect.Contrast = (float)edgeDetectionContrast;
                contrastEffect.Source   = workingBitmap;

                ExposureEffect exposureEffect = new ExposureEffect();
                exposureEffect.Source   = contrastEffect;
                exposureEffect.Exposure = (float)edgeDetectionExposure;


                EdgeDetectionEffect edgeDetectionEffect = new EdgeDetectionEffect();
                edgeDetectionEffect.Source     = exposureEffect;
                edgeDetectionEffect.Amount     = (float)edgeDetectionAmount;
                edgeDetectionEffect.BlurAmount = (float)edgeDetectionBlurAmount;
                //edgeDetectionEffect.OverlayEdges = true;
                //edgeDetectionEffect.Mode = EdgeDetectionEffectMode.Prewitt;

                GrayscaleEffect grayscaleEffect = null;
                if (edgeDetectionGrayscale)
                {
                    grayscaleEffect            = new GrayscaleEffect();
                    grayscaleEffect.Source     = exposureEffect;
                    edgeDetectionEffect.Source = grayscaleEffect;
                }

                InvertEffect invertEdgeEffect = null;
                if (edgeDetectionMaskInvert)
                {
                    invertEdgeEffect        = new InvertEffect();
                    invertEdgeEffect.Source = edgeDetectionEffect;
                }



                BlendEffect blendEffect = null;
                if (edgeDetectionOverlayImage)
                {
                    OpacityEffect opacityEffect = new OpacityEffect();
                    opacityEffect.Opacity = (float)edgeDetectionOverlayOpacity;
                    opacityEffect.Source  = workingBitmap;

                    blendEffect            = new BlendEffect();
                    blendEffect.Foreground = edgeDetectionEffect;
                    blendEffect.Background = opacityEffect;
                    if (edgeDetectionMaskInvert)
                    {
                        //blendEffect.Background = invertEdgeEffect;
                        //blendEffect.Foreground = opacityEffect;

                        InvertEffect invertOrgEffect = new InvertEffect();
                        invertOrgEffect.Source = opacityEffect;
                        blendEffect.Background = invertOrgEffect;
                    }

                    blendEffect.Mode = edgeDetectionBlendEffectMode;
                }


                //if (canvasRenderTarget != null)
                //    canvasRenderTarget.Dispose();
                CanvasRenderTarget canvasRenderTarget = new CanvasRenderTarget(CanvasDevice.GetSharedDevice(), ww, hh, canvasBitmap.Dpi);
                using (var session = canvasRenderTarget.CreateDrawingSession())
                {
                    if (edgeDetectionOverlayImage)
                    {
                        session.DrawImage(blendEffect);
                    }
                    else
                    {
                        if (edgeDetectionMaskInvert)
                        {
                            session.DrawImage(invertEdgeEffect);
                        }
                        else
                        {
                            session.DrawImage(edgeDetectionEffect);
                        }
                    }
                }

                return(canvasRenderTarget);
            }

            return(null);
        }
コード例 #28
0
        public CanvasRenderTarget apply3DLightingEffects(CanvasBitmap workingBitmap)
        {
            //CanvasBitmap workingBitmap = SelectWorkingBitmap(useOriginalBitmap);

            if (workingBitmap != null)
            {
                int ww = (int)workingBitmap.SizeInPixels.Width;
                int hh = (int)workingBitmap.SizeInPixels.Height;

                //LuminanceToAlphaEffect heightField = new LuminanceToAlphaEffect();
                //heightField.Source = workingBitmap;


                LuminanceToAlphaEffect heightMap = new LuminanceToAlphaEffect();
                heightMap.Source = workingBitmap;

                GaussianBlurEffect heightField = new GaussianBlurEffect();
                heightField.BlurAmount = (float)gaussianBlurAmount;
                heightField.Source     = heightMap;
                heightField.BorderMode = EffectBorderMode.Soft;

                DistantDiffuseEffect distantDiffuseEffect = new DistantDiffuseEffect();
                distantDiffuseEffect.Source                     = heightField;
                distantDiffuseEffect.HeightMapScale             = (float)distantDiffuseEffectHeightMapScale;
                distantDiffuseEffect.HeightMapInterpolationMode = CanvasImageInterpolation.HighQualityCubic;
                distantDiffuseEffect.Azimuth                    = (float)distantAzimuth;
                distantDiffuseEffect.Elevation                  = (float)distantElevation;
                distantDiffuseEffect.LightColor                 = distantLightColor;
                distantDiffuseEffect.HeightMapKernelSize        = new System.Numerics.Vector2((float)distantDiffuseKernelWidth, (float)distantDiffuseKernelWidth);
                //distantDiffuseEffect.DiffuseAmount = (float) distantDiffuseAmount;

                DistantSpecularEffect distantSpecularEffect = new DistantSpecularEffect();
                distantSpecularEffect.Source                     = heightField;
                distantSpecularEffect.SpecularExponent           = (float)distantSpecularEffectSpecularExponent;
                distantSpecularEffect.HeightMapInterpolationMode = CanvasImageInterpolation.HighQualityCubic;
                distantSpecularEffect.HeightMapKernelSize        = new System.Numerics.Vector2((float)distantSpecularKernelWidth, (float)distantSpecularKernelWidth);
                distantSpecularEffect.HeightMapScale             = (float)distantSpecularHeightMapScale;
                distantSpecularEffect.Azimuth                    = (float)distantAzimuth;
                distantSpecularEffect.Elevation                  = (float)distantElevation;
                //distantSpecularEffect.SpecularAmount = (float)distantSpecularAmount;
                //distantSpecularEffect.LightColor = distantLightColor;

                ArithmeticCompositeEffect arithmeticCompositeEffect = new ArithmeticCompositeEffect();
                arithmeticCompositeEffect.Source1 = distantDiffuseEffect;
                //arithmeticCompositeEffect.Source1 = blendedDiffuseEffect;
                arithmeticCompositeEffect.Source2        = distantSpecularEffect;
                arithmeticCompositeEffect.Source1Amount  = 1;
                arithmeticCompositeEffect.Source2Amount  = 1;
                arithmeticCompositeEffect.MultiplyAmount = 0;

                SaturationEffect saturationEffect = new SaturationEffect();
                saturationEffect.Source     = workingBitmap;
                saturationEffect.Saturation = (float)distantDiffuseSaturation;

                ArithmeticCompositeEffect blendedDiffuseEffect = new ArithmeticCompositeEffect();
                //blendedDiffuseEffect.Source1 = workingBitmap;
                blendedDiffuseEffect.Source1        = saturationEffect;
                blendedDiffuseEffect.Source2        = arithmeticCompositeEffect;
                blendedDiffuseEffect.Source1Amount  = 0;
                blendedDiffuseEffect.Source2Amount  = 0;
                blendedDiffuseEffect.MultiplyAmount = 1;


                //if (canvasRenderTarget != null)
                //    canvasRenderTarget.Dispose();
                CanvasRenderTarget canvasRenderTarget = new CanvasRenderTarget(CanvasDevice.GetSharedDevice(), ww, hh, canvasBitmap.Dpi);
                using (var session = canvasRenderTarget.CreateDrawingSession())
                {
                    //session.DrawImage(arithmeticCompositeEffect);
                    session.DrawImage(blendedDiffuseEffect);
                }

                return(canvasRenderTarget);
            }

            return(null);
        }
コード例 #29
0
        private void DrawBackGround(CanvasDevice device, RenderingOptions options)
        {
            float useHeight = (float)ChartWin2DCanvas.Size.Height;
            float useWidth = (float)ChartWin2DCanvas.Size.Width;

            //this is always drawn in the size of control, so all texts drawn in it are sharp
            _offscreenBackGround = new CanvasRenderTarget(device, useWidth, useHeight, 96);

            using (CanvasDrawingSession ds = _offscreenBackGround.CreateDrawingSession())
            {
                ds.Clear(BACKGROUND_COLOR);
                //draw lines
                DrawGraphValueLines(ds, useWidth, useHeight);
                //draw value texts
                DrawYAxisTexts(ds, useHeight, options);
            }
        }
コード例 #30
0
        void canvas_Draw(CanvasControl sender, CanvasDrawEventArgs args)
        {
            // We animate the source image by changing which character is highlighted in yellow.
            // Therefore there can be two changed regions: the highlighted character has changed from
            // white to yellow, while the previous highlight has changed from yellow back to white.

            // Look up the bounds of the two changed characters.
            var highlightBounds = GetCharacterBounds(highlightedCharacter);
            var previousBounds  = GetCharacterBounds(previousHighlight);

            // Tell our effects that the highlighted character region has changed.
            blurEffect.InvalidateSourceRectangle(args.DrawingSession, 0, highlightBounds);
            shadowEffect.InvalidateSourceRectangle(args.DrawingSession, 0, highlightBounds);

            // Query what part of the output image will change as a result.
            var highlightInvalidRects = compositeEffect.GetInvalidRectangles(args.DrawingSession);
            var highlightInvalidUnion = GetRectangleUnion(highlightInvalidRects);

            // Also tell the effects about the previously highlighted character.
            blurEffect.InvalidateSourceRectangle(args.DrawingSession, 0, previousBounds);
            shadowEffect.InvalidateSourceRectangle(args.DrawingSession, 0, previousBounds);

            // Query the output region again. This will return a superset of highlightInvalidRects,
            // as it now accounts for the change to previousBounds as well as highlightBounds.
            var totalInvalidRects = compositeEffect.GetInvalidRectangles(args.DrawingSession);
            var totalInvalidUnion = GetRectangleUnion(totalInvalidRects);

            // We can also look up in the opposite direction: given that we are going to redraw only
            // the totalInvalidUnion area, what portion of each source image is needed to do that?
            // When using filter kernels like blur, this will be larger than just highlightBounds+previousBounds.
            var requiredSourceRects = compositeEffect.GetRequiredSourceRectangles(args.DrawingSession,
                                                                                  totalInvalidUnion,
                                                                                  new ICanvasEffect[] { blurEffect, shadowEffect },
                                                                                  new uint[] { 0, 0 },
                                                                                  new Rect[2] {
                sourceRenderTarget.Bounds, sourceRenderTarget.Bounds
            });

            // How about if we were going to redraw only highlightBounds, skipping previousBounds?
            // (we don't actually do this, but do display what source regions it would require).
            var blurSourceRect   = compositeEffect.GetRequiredSourceRectangle(args.DrawingSession, highlightInvalidUnion, blurEffect, 0, sourceRenderTarget.Bounds);
            var shadowSourceRect = compositeEffect.GetRequiredSourceRectangle(args.DrawingSession, highlightInvalidUnion, shadowEffect, 0, sourceRenderTarget.Bounds);

            // Draw text into the source rendertarget.
            using (var drawingSession = sourceRenderTarget.CreateDrawingSession())
            {
                // To make sure the correct requiredSourceRects were reported, we clear the background outside
                // that region to magenta. If everything is working correctly, this should never be picked up by
                // effect drawing, as we only leave magenta in the areas we don't expect the effects to read from.
                drawingSession.Clear(Colors.Magenta);

                // Clear the requiredSourceRects to transparent.
                drawingSession.Blend = CanvasBlend.Copy;

                foreach (var r in requiredSourceRects)
                {
                    drawingSession.FillRectangle(r, Colors.Transparent);
                }

                // Draw the text characters.
                drawingSession.Blend = CanvasBlend.SourceOver;

                for (int i = 0; i < characterLayouts.Count; i++)
                {
                    var color = (i == highlightedCharacter) ? Colors.Yellow : Colors.White;

                    drawingSession.DrawTextLayout(characterLayouts[i], characterPositions[i], color);
                }
            }

            // Draw the effect graph (which reads from sourceRenderTarget) into destRenderTarget.
            using (var drawingSession = destRenderTarget.CreateDrawingSession())
            {
                // Slightly darken down the existing contents of the output rendertarget. This causes everything
                // except totalInvalidUnion to gradually fade out, so we can see which areas are getting redrawn.
                // If this FillRectangle was removed, the result of redrawing only the changed region would be
                // identical to if we redrew the whole thing every time (by removing the CreateLayer call).
                drawingSession.FillRectangle(destRenderTarget.Bounds, Color.FromArgb(16, 0, 0, 0));

                // Clip our drawing to the totalInvalidUnion rectangle,
                // which should be the only part of the output that has changed.
                using (var layer = drawingSession.CreateLayer(1, totalInvalidUnion))
                {
                    drawingSession.Clear(Colors.CornflowerBlue);
                    drawingSession.DrawImage(compositeEffect, totalInvalidUnion, totalInvalidUnion);
                }
            }

            if (!ThumbnailGenerator.IsDrawingThumbnail)
            {
                args.DrawingSession.Transform = Matrix3x2.CreateTranslation(gap, gap);

                // Display sourceRenderTarget.
                args.DrawingSession.DrawImage(sourceRenderTarget);

                // Display highlightBounds, blurSourceRect, and shadowSourceRect.
                args.DrawingSession.DrawRectangle(highlightBounds, Colors.Gray);
                args.DrawingSession.DrawRectangle(blurSourceRect, Colors.Blue);
                args.DrawingSession.DrawRectangle(shadowSourceRect, Colors.Blue);
            }

            args.DrawingSession.Transform = Matrix3x2.CreateTranslation(gap, gap * 2 + height);

            // Display destRenderTarget.
            args.DrawingSession.DrawImage(destRenderTarget);

            // Display highlightInvalidRects.
            foreach (var i in highlightInvalidRects)
            {
                args.DrawingSession.DrawRectangle(i, Colors.DarkBlue);
            }

            previousHighlight = highlightedCharacter;

            // When generating thumbnails, repeat the first draw a bunch of times to reach a more interesting image.
            if (ThumbnailGenerator.IsDrawingThumbnail && highlightedCharacter < characterLayouts.Count * 5 / 6)
            {
                highlightedCharacter++;
                canvas_Draw(sender, args);
            }
        }
コード例 #31
0
        private async void ExportImage()
        {
            CanvasDevice device = CanvasDevice.GetSharedDevice();

            using (CanvasRenderTarget offscreen = new CanvasRenderTarget(
                       device, item.ImageProperties.Width, item.ImageProperties.Height, 96))
            {
                using (IRandomAccessStream stream = await item.ImageFile.OpenReadAsync())
                    using (CanvasBitmap image = await CanvasBitmap.LoadAsync(offscreen, stream, 96))
                    {
                        ImageEffectsBrush.SetSource(image);

                        using (CanvasDrawingSession ds = offscreen.CreateDrawingSession())
                        {
                            ds.Clear(Windows.UI.Colors.Black);

                            var img = ImageEffectsBrush.Image;
                            ds.DrawImage(img);
                        }

                        var fileSavePicker = new FileSavePicker()
                        {
                            SuggestedStartLocation = PickerLocationId.PicturesLibrary,
                            SuggestedSaveFile      = item.ImageFile
                        };

                        fileSavePicker.FileTypeChoices.Add("JPEG files", new List <string>()
                        {
                            ".jpg"
                        });

                        var outputFile = await fileSavePicker.PickSaveFileAsync();

                        if (outputFile != null)
                        {
                            using (IRandomAccessStream outStream = await outputFile.OpenAsync(FileAccessMode.ReadWrite))
                            {
                                await offscreen.SaveAsync(outStream, CanvasBitmapFileFormat.Jpeg);
                            }

                            // Check whether this save is overwriting the original image.
                            // If it is, replace it in the list. Otherwise, insert it as a copy.
                            bool replace = false;
                            if (outputFile.IsEqual(item.ImageFile))
                            {
                                replace = true;
                            }

                            try
                            {
                                await LoadSavedImageAsync(outputFile, replace);
                            }
                            catch (Exception ex)
                            {
                                if (ex.Message.Contains("0x80070323"))
                                {
                                    // The handle with which this oplock was associated has been closed.
                                    // The oplock is now broken. (Exception from HRESULT: 0x80070323)
                                    // This is a temporary condition, so just try again.
                                    await LoadSavedImageAsync(outputFile, replace);
                                }
                            }
                        }
                    }
            }
        }
コード例 #32
0
        public static async Task <StorageFile> DrawStrokesAsync(StorageFile file, IReadOnlyList <SmoothPathBuilder> strokes, Rect rectangle, BitmapRotation rotation, BitmapFlip flip)
        {
            var device = CanvasDevice.GetSharedDevice();
            var bitmap = await CanvasBitmap.LoadAsync(device, file.Path);

            var canvas1 = new CanvasRenderTarget(device, (float)bitmap.Size.Width, (float)bitmap.Size.Height, bitmap.Dpi);
            var canvas2 = new CanvasRenderTarget(device, (float)bitmap.Size.Width, (float)bitmap.Size.Height, bitmap.Dpi);

            var size       = canvas1.Size.ToVector2();
            var canvasSize = canvas1.Size.ToVector2();

            var scaleX = 1 / (float)rectangle.Width;
            var scaleY = 1 / (float)rectangle.Height;

            var offsetX = (float)rectangle.X * scaleX;
            var offsetY = (float)rectangle.Y * scaleY;

            if (rotation == BitmapRotation.Clockwise270Degrees ||
                rotation == BitmapRotation.Clockwise90Degrees)
            {
                size = new Vector2(size.Y, size.X);

                scaleX = scaleY;
                scaleY = 1 * 1 / (float)rectangle.Width;
            }

            using (var session = canvas1.CreateDrawingSession())
            {
                switch (rotation)
                {
                case BitmapRotation.Clockwise90Degrees:
                    var transform1 = Matrix3x2.CreateRotation(MathFEx.ToRadians(90));
                    transform1.Translation = new Vector2(size.Y, 0);
                    session.Transform      = transform1;
                    break;

                case BitmapRotation.Clockwise180Degrees:
                    var transform2 = Matrix3x2.CreateRotation(MathFEx.ToRadians(180));
                    transform2.Translation = new Vector2(size.X, size.Y);
                    session.Transform      = transform2;
                    break;

                case BitmapRotation.Clockwise270Degrees:
                    var transform3 = Matrix3x2.CreateRotation(MathFEx.ToRadians(270));
                    transform3.Translation = new Vector2(0, size.X);
                    session.Transform      = transform3;
                    break;
                }

                switch (flip)
                {
                case BitmapFlip.Horizontal:
                    switch (rotation)
                    {
                    case BitmapRotation.Clockwise90Degrees:
                    case BitmapRotation.Clockwise270Degrees:
                        session.Transform = Matrix3x2.Multiply(session.Transform, Matrix3x2.CreateScale(1, -1, canvasSize / 2));
                        break;

                    default:
                        session.Transform = Matrix3x2.Multiply(session.Transform, Matrix3x2.CreateScale(-1, 1, canvasSize / 2));
                        break;
                    }
                    break;

                case BitmapFlip.Vertical:
                    switch (rotation)
                    {
                    case BitmapRotation.None:
                    case BitmapRotation.Clockwise180Degrees:
                        session.Transform = Matrix3x2.Multiply(session.Transform, Matrix3x2.CreateScale(1, -1, canvasSize / 2));
                        break;

                    default:
                        session.Transform = Matrix3x2.Multiply(session.Transform, Matrix3x2.CreateScale(-1, 1, canvasSize / 2));
                        break;
                    }
                    break;
                }

                session.Transform = Matrix3x2.Multiply(session.Transform, Matrix3x2.CreateScale(scaleX, scaleY));
                session.Transform = Matrix3x2.Multiply(session.Transform, Matrix3x2.CreateTranslation(-(offsetX * size.X), -(offsetY * size.Y)));

                foreach (var builder in strokes)
                {
                    PencilCanvas.DrawPath(session, builder, size);
                }
            }

            using (var session = canvas2.CreateDrawingSession())
            {
                session.DrawImage(bitmap);
                session.DrawImage(canvas1);
            }

            bitmap.Dispose();

            using (var stream = await file.OpenAsync(FileAccessMode.ReadWrite))
            {
                await canvas2.SaveAsync(stream, CanvasBitmapFileFormat.Jpeg /*, 0.77f*/);
            }

            canvas2.Dispose();
            canvas1.Dispose();

            return(file);
        }
コード例 #33
0
            static CanvasBitmap MakeThumbnailPretty(CanvasBitmap capturedBitmap, float thumbnailWidth, float thumbnailHeight, Rect targetRect)
            {
                var pixelColors = capturedBitmap.GetPixelColors();

                // Remove any unused space around the edge of the bitmap, so it will fill the thumbnail.
                Rect cropRect = CropCapturedBitmap(capturedBitmap, pixelColors);

                // Choose a (hopefully) aesthetically pleasing background color.
                Color backgroundColor = ChooseBackgroundColor(pixelColors);

                // Apply letterbox scaling to fit the image into the target thumbnail.
                Vector2 outputSize = new Vector2((float)targetRect.Width, (float)targetRect.Height);
                var     sourceSize = new Vector2((float)cropRect.Width, (float)cropRect.Height);
                var     letterbox  = Utils.GetDisplayTransform(outputSize, sourceSize);
                var     translate  = Matrix3x2.CreateTranslation((float)targetRect.X, (float)targetRect.Y);

                // Position the image where we want it.
                var scaledImage = new Transform2DEffect
                {
                    Source = new AtlasEffect
                    {
                        Source          = capturedBitmap,
                        SourceRectangle = cropRect,
                    },
                    InterpolationMode = CanvasImageInterpolation.HighQualityCubic,
                    TransformMatrix   = letterbox * translate,
                };

                // Create the final thumbnail image.
                var finalImage = new CompositeEffect
                {
                    Sources =
                    {
                        // Blurred shadow.
                        new ShadowEffect
                        {
                            Source = new MorphologyEffect
                            {
                                Source = scaledImage,
                                Mode   = MorphologyEffectMode.Dilate,
                                Width  = dilateAmount,
                                Height = dilateAmount,
                            },
                            BlurAmount = blurAmount,
                        },

                        // Overlay the image itself.
                        scaledImage
                    }
                };

                // Rasterize the effect into a rendertarget.
                CanvasRenderTarget output = new CanvasRenderTarget(capturedBitmap.Device, thumbnailWidth, thumbnailHeight, 96);

                using (var ds = output.CreateDrawingSession())
                {
                    ds.Clear(backgroundColor);
                    ds.DrawImage(finalImage);
                }

                return(output);
            }
コード例 #34
0
        public void ProcessFrame(ProcessVideoFrameContext context)
        {
            //using (CanvasBitmap inputBitmap = CanvasBitmap.CreateFromDirect3D11Surface(_canvasDevice, context.InputFrame.Direct3DSurface))
            //using (CanvasRenderTarget renderTarget = CanvasRenderTarget.CreateFromDirect3D11Surface(_canvasDevice, context.OutputFrame.Direct3DSurface))
            //using (CanvasDrawingSession ds = renderTarget.CreateDrawingSession())
            //{
            //    var invert = new InvertEffect()
            //    {
            //        Source = inputBitmap
            //    };
            //    ds.DrawImage(invert);

            //}

            // When using SupportedMemoryTypes => MediaMemoryTypes.GpuAndCpu we need to check if we're using GPU or CPU for the frame

            // If we're on GPU, use InputFrame.Direct3DSurface
            if (context.InputFrame.SoftwareBitmap == null)
            {
                using (var inputBitmap = CanvasBitmap.CreateFromDirect3D11Surface(_canvasDevice, context.InputFrame.Direct3DSurface))
                    using (var renderTarget = CanvasRenderTarget.CreateFromDirect3D11Surface(_canvasDevice, context.OutputFrame.Direct3DSurface))
                        using (var ds = renderTarget.CreateDrawingSession())
                        {
                            var invert = new InvertEffect()
                            {
                                Source = inputBitmap
                            };
                            ds.DrawImage(invert);
                        }

                return;
            }

            // If we're on CPU, use InputFrame.SoftwareBitmap
            if (context.InputFrame.Direct3DSurface == null)
            {
                // InputFrame's raw pixels
                byte[] inputFrameBytes = new byte[4 * context.InputFrame.SoftwareBitmap.PixelWidth * context.InputFrame.SoftwareBitmap.PixelHeight];
                context.InputFrame.SoftwareBitmap.CopyToBuffer(inputFrameBytes.AsBuffer());

                using (var inputBitmap = CanvasBitmap.CreateFromBytes(
                           _canvasDevice,
                           inputFrameBytes,
                           context.InputFrame.SoftwareBitmap.PixelWidth,
                           context.InputFrame.SoftwareBitmap.PixelHeight,
                           context.InputFrame.SoftwareBitmap.BitmapPixelFormat.ToDirectXPixelFormat()))

                    using (var renderTarget = new CanvasRenderTarget(
                               _canvasDevice,
                               context.OutputFrame.SoftwareBitmap.PixelWidth,
                               context.OutputFrame.SoftwareBitmap.PixelHeight,
                               (float)context.OutputFrame.SoftwareBitmap.DpiX,
                               context.OutputFrame.SoftwareBitmap.BitmapPixelFormat.ToDirectXPixelFormat(),
                               CanvasAlphaMode.Premultiplied))
                    {
                        using (var ds = renderTarget.CreateDrawingSession())
                        {
                            var invert = new InvertEffect()
                            {
                                Source = inputBitmap
                            };
                            ds.DrawImage(invert);
                        }
                    }
            }
        }
コード例 #35
0
        private ICanvasImage AddTextOverlay(ICanvasImage effect, float x, float y)
        {
            var textOverlay = new CanvasRenderTarget(canvas, 200, 30);

            using (var ds = textOverlay.CreateDrawingSession())
            {
                ds.Clear(Color.FromArgb(0, 0, 0, 0));
                ds.DrawText(effect.GetType().Name.Replace("Effect", ""), 0, 0, Colors.White);
            }

            return new Transform2DEffect
            {
                Source = new BlendEffect
                {
                    Background = effect,
                    Foreground = textOverlay,
                    Mode = BlendEffectMode.Screen
                },
                TransformMatrix = Matrix3x2.CreateTranslation(x, y)
            };
        }
コード例 #36
0
    /// <summary>
    /// Recognize the text from handwriting using Microsoft Azure service.
    /// </summary>
    public async void RecognizeInking(IReadOnlyList <InkStroke> strokeList, double pageWidth, double pageHeight)
    {
        // Current bounding box for the strokes.
        double tlX = double.MaxValue;
        double tlY = double.MaxValue;
        double brX = 0;
        double brY = 0;

        inkingStatus = SERVICE_STATUS.PENDING;
        // Make a copy of this list
        List <InkStroke> newList = new List <InkStroke>();

        foreach (InkStroke ss in strokeList)
        {
            newList.Add(ss);

            tlX = Math.Min(tlX, ss.BoundingRect.Left);
            tlY = Math.Min(tlY, ss.BoundingRect.Top);
            brX = Math.Max(brX, ss.BoundingRect.Right);
            brY = Math.Max(brY, ss.BoundingRect.Bottom);
        }
        double originalCroppedWidth  = brX - tlX;
        double originalCroppedHeight = brY - tlY;

        // Create boundary
        tlX = Math.Max(0, tlX - originalCroppedWidth * 0.2);
        tlY = Math.Max(0, tlY - originalCroppedHeight * 0.4);
        brX = Math.Min(pageWidth, brX + originalCroppedWidth * 0.2);
        brY = Math.Min(pageHeight, brY + originalCroppedHeight * 0.4);
        originalCroppedWidth  = brX - tlX;
        originalCroppedHeight = brY - tlY;

        StorageFolder storageFolder = KnownFolders.CameraRoll;
        var           file          = await storageFolder.CreateFileAsync("sampleInking.jpg", CreationCollisionOption.GenerateUniqueName);

        // Render a whole image (paper size * inking scale)
        CanvasDevice       device       = CanvasDevice.GetSharedDevice();
        CanvasRenderTarget renderTarget = new CanvasRenderTarget(device, (float)pageWidth, (float)pageHeight, 96);

        //await Windows.ApplicationModel.Core.CoreApplication.MainView.CoreWindow.Dispatcher.RunAsync(CoreDispatcherPriority.Normal,
        //    () =>
        //    {
        //        using (var ds = renderTarget.CreateDrawingSession())
        //        {
        //            ds.Clear(Colors.White);
        //            ds.DrawInk(strokeList);
        //        }
        //    });
        using (var ds = renderTarget.CreateDrawingSession())
        {
            ds.Clear(Colors.White);
            ds.DrawInk(newList);
        }

        // Crop the image: using same algorithm as in OCR method.
        // croppedBytes: image bytes.
        // byteData: final format, with bmp header.
        byte[] byteData = new byte[1];
        byte[] croppedBytes = new byte[1];
        uint   width = 0, height = 0;

        using (var fileStream = await file.OpenAsync(FileAccessMode.ReadWrite))
        {
            await renderTarget.SaveAsync(fileStream, CanvasBitmapFileFormat.Jpeg, 1f);

            //Debug.Log("Save to:" + file.Name);

            // Crop this image.
            // Create a decoder from the stream. With the decoder, we can get the properties of the image.
            BitmapDecoder decoder = await BitmapDecoder.CreateAsync(fileStream);

            // make sure the image is larger than 50x50 in real pixels.
            double scale = Math.Max(1.0f, Math.Max(AZURE_API_MIN_SIZE / originalCroppedWidth, AZURE_API_MIN_SIZE / originalCroppedHeight));

            uint startPointX = (uint)Math.Floor(tlX * scale);
            uint startPointY = (uint)Math.Floor(tlY * scale);
            width  = (uint)Math.Floor(originalCroppedWidth * scale);
            height = (uint)Math.Floor(originalCroppedHeight * scale);

            // The scaledSize of original image.
            uint scaledWidth  = (uint)Math.Floor(decoder.PixelWidth * scale);
            uint scaledHeight = (uint)Math.Floor(decoder.PixelHeight * scale);

            // Refine the start point and the size.
            if (startPointX + width > scaledWidth)
            {
                startPointX = scaledWidth - width;
            }

            if (startPointY + height > scaledHeight)
            {
                startPointY = scaledHeight - height;
            }

            // Get the cropped pixels.
            BitmapTransform transform = new BitmapTransform();
            BitmapBounds    bounds    = new BitmapBounds();
            bounds.X         = startPointX;
            bounds.Y         = startPointY;
            bounds.Height    = height;
            bounds.Width     = width;
            transform.Bounds = bounds;

            transform.ScaledWidth  = scaledWidth;
            transform.ScaledHeight = scaledHeight;

            // Get the cropped pixels within the bounds of transform.
            PixelDataProvider pix = await decoder.GetPixelDataAsync(
                BitmapPixelFormat.Bgra8,
                BitmapAlphaMode.Straight,
                transform,
                ExifOrientationMode.IgnoreExifOrientation,
                ColorManagementMode.ColorManageToSRgb);

            croppedBytes = pix.DetachPixelData();
            //Debug.Log(string.Format("Crop Handwritten image: start: {0},{1}, width:{2}, height:{3}", bounds.X, bounds.Y, bounds.Width, bounds.Height));
        }
        await file.DeleteAsync();

        // Again, I have to save to file stream byte[] to image.
        // https://code.msdn.microsoft.com/windowsapps/How-to-save-WriteableBitmap-bd23d455
        var tempFile = await storageFolder.CreateFileAsync("temp-sampleInking.jpg", CreationCollisionOption.GenerateUniqueName);

        using (var stream = await tempFile.OpenAsync(FileAccessMode.ReadWrite))
        {
            BitmapEncoder encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.JpegEncoderId, stream);

            encoder.SetPixelData(BitmapPixelFormat.Bgra8, BitmapAlphaMode.Ignore, width, height, 96, 96, croppedBytes);
            await encoder.FlushAsync();

            var reader = new DataReader(stream.GetInputStreamAt(0));
            byteData = new byte[stream.Size];
            await reader.LoadAsync((uint)stream.Size);

            reader.ReadBytes(byteData);
        }
        await tempFile.DeleteAsync();

        //ReadHandwrittenText("");
        HttpClient client = new HttpClient();

        // Request headers.
        client.DefaultRequestHeaders.Add("Ocp-Apim-Subscription-Key", azureKey);

        // Request parameter. Set "handwriting" to false for printed text.
        string requestParameters = "handwriting=true";

        // Assemble the URI for the REST API Call.
        string uri = azureUri + "recognizeText?" + requestParameters;

        HttpResponseMessage response = null;

        // This operation requrires two REST API calls. One to submit the image for processing,
        // the other to retrieve the text found in the image. This value stores the REST API
        // location to call to retrieve the text.
        string operationLocation = null;

        // Request body. Posts a locally stored JPEG image.
        //byte[] byteData = canvasImg;

        ByteArrayContent content = new ByteArrayContent(byteData);

        // This example uses content type "application/octet-stream".
        // You can also use "application/json" and specify an image URL.
        content.Headers.ContentType = new MediaTypeHeaderValue("application/octet-stream");

        // The first REST call starts the async process to analyze the written text in the image.
        response = await client.PostAsync(uri, content);

        // The response contains the URI to retrieve the result of the process.
        if (response.IsSuccessStatusCode)
        {
            operationLocation = response.Headers.GetValues("Operation-Location").FirstOrDefault();
        }
        else
        {
            // Display the JSON error data.
            string errInfo = "RecognizeInking: PosyAsync Response Error." + response.StatusCode.ToString();
            inkingResult = errInfo;
            inkingStatus = SERVICE_STATUS.ERROR;
            Debug.Log(errInfo + "\n");
            //Debug.Log(JsonPrettyPrint(await response.Content.ReadAsStringAsync()));
            return;
        }

        // The second REST call retrieves the text written in the image.
        //
        // Note: The response may not be immediately available. Handwriting recognition is an
        // async operation that can take a variable amount of time depending on the length
        // of the handwritten text. You may need to wait or retry this operation.
        //
        // This example checks once per second for ten seconds.
        string contentString;
        int    i = 0;

        do
        {
            await Task.Delay(1000);

            response = await client.GetAsync(operationLocation);

            contentString = await response.Content.ReadAsStringAsync();

            ++i;
        }while (i < 10 && contentString.IndexOf("\"status\":\"Succeeded\"") == -1);

        if (i == 10 && contentString.IndexOf("\"status\":\"Succeeded\"") == -1)
        {
            string errInfo = "RecognizeInking: Timeout Error.";
            inkingResult = errInfo;
            Debug.Log(errInfo + "\n");
            inkingStatus = SERVICE_STATUS.ERROR;
            return;
        }

        // Display the JSON response.
        //Debug.Log("\nResponse:\n");
        //Debug.Log(JsonPrettyPrint(contentString));
        // Parse to output the result.
        var result = JsonConvert.DeserializeObject <JSONInking.RootObject>(contentString).recognitionResult;
        var texts  = new List <string>();

        foreach (var line in result.lines)
        {
            texts.Add(line.text);
        }

        if (texts.Count > 0)
        {
            inkingResult = string.Join(" ", texts);
            //Debug.Log("Inking Recognition succeeded:" + inkingResult);
            inkingStatus = SERVICE_STATUS.DONE;
        }
        else
        {
            string errInfo = "Inking Recognition succeeded but the result is empty.";
            inkingResult = errInfo;
            Debug.Log(errInfo);
            inkingStatus = SERVICE_STATUS.ERROR;
        }
    }
コード例 #37
0
        private async void HomeContentDialog_PrimaryButtonClick(ContentDialog sender, ContentDialogButtonClickEventArgs args)
        {
            HomeContentDialog.Hide();
            LoadingControl.IsLoading = true;//Con:加载控件
            await Task.Delay(400);

            //1、创建一个XDocument对象
            XDocument    xDoc = new XDocument();
            XDeclaration XDec = new XDeclaration("1.0", "utf-8", "no");

            //设置xml的文档定义
            xDoc.Declaration = XDec;


            //2、创建根节点
            XElement root = new XElement("Layers");

            xDoc.Add(root);

            XElement Width = new XElement("Width", App.Model.Width);

            root.Add(Width);
            XElement Height = new XElement("Height", App.Model.Height);

            root.Add(Height);
            XElement Tool = new XElement("Tool", App.Model.Tool);

            root.Add(Tool);
            XElement Index = new XElement("Index", App.Model.Index);

            root.Add(Index);


            //3、创建主图片
            ICanvasImage ci = App.Model.NullRenderTarget;

            for (int i = App.Model.Layers.Count - 1; i >= 0; i--)  //自下而上渲染
            {
                ci = App.RenderTransform(App.Model.Layers[i], ci); //渲染
            }
            using (CanvasDrawingSession ds = App.Model.SecondBottomRenderTarget.CreateDrawingSession())
            {
                ds.Clear(Color.FromArgb(0, 0, 0, 0));
                ds.DrawImage(ci);
            }
            var      MainBytes        = App.Model.SecondBottomRenderTarget.GetPixelBytes(); //把位图转为byte[]
            var      MainString       = Convert.ToBase64String(MainBytes);                  //把btye[]转为字符串
            XElement MainCanvasRender = new XElement("MainRenderTarget", MainString);       //字符串写入xml节点

            root.Add(MainCanvasRender);


            //4、循环创建节点
            foreach (var l in App.Model.Layers)
            {
                XElement Layer = new XElement("Layer");
                root.Add(Layer);

                //4、创建元素
                XElement LayerName = new XElement("LayerName", l.Name);
                Layer.Add(LayerName);
                XElement LayerVisual = new XElement("LayerVisual", l.Visual);
                Layer.Add(LayerVisual);
                XElement LayerOpacity = new XElement("LayerOpacity", l.Opacity);
                Layer.Add(LayerOpacity);
                XElement LayerBlendIndex = new XElement("LayerBlendIndex", l.BlendIndex);
                Layer.Add(LayerBlendIndex);

                XElement LayerWidth = new XElement("LayerWidth", l.CanvasRenderTarget.SizeInPixels.Width);
                Layer.Add(LayerWidth);
                XElement LayerHeight = new XElement("LayerHeight", l.CanvasRenderTarget.SizeInPixels.Height);
                Layer.Add(LayerHeight);

                var      Bytes = l.CanvasRenderTarget.GetPixelBytes();                 //把位图转为byte[]
                var      str   = Convert.ToBase64String(Bytes);                        //把btye[]转为字符串
                XElement CanvasRenderTarget = new XElement("CanvasRenderTarget", str); //字符串写入xml节点
                Layer.Add(CanvasRenderTarget);
            }


            //5、保存
            string path = ApplicationData.Current.LocalFolder.Path + "/" + App.Model.Name + ".photo"; //将XML文件加载进来

            xDoc.Save(path);


            //6、缩略图 (裁切成宽高最大256的图片)


            //缩略图缩放比例
            float scale = App.Model.Width < App.Model.Height ? 256.0f / App.Model.Width : 256.0f / App.Model.Height;

            //缩放后宽高并确定左右上下偏移
            float W = scale * App.Model.Width;
            float H = scale * App.Model.Height;

            CanvasRenderTarget crt = new CanvasRenderTarget(App.Model.VirtualControl, W, H);

            using (CanvasDrawingSession ds = crt.CreateDrawingSession())
            {
                //绘制缩略图
                ds.DrawImage(new ScaleEffect
                {
                    Source = App.Model.MainRenderTarget,
                    Scale  = new Vector2(scale)
                });
            }
            Library.Image.SavePng(ApplicationData.Current.LocalFolder, crt, App.Model.Name, CreationCollisionOption.ReplaceExisting);

            LoadingControl.IsLoading = false;//Con:加载控件

            // Frame.GoBack();
            App.Model.StartVisibility = Visibility.Visible;
        }
コード例 #38
0
            private CanvasRenderTarget CreateSourceImage(CanvasControl sender, CanvasAlphaMode alphaMode)
            {
                var image = new CanvasRenderTarget(sender, 64, 64, sender.Dpi, DirectXPixelFormat.B8G8R8A8UIntNormalized, alphaMode);
                
                using (var ds = image.CreateDrawingSession())
                {
                    ds.Clear(Colors.Transparent);

                    ds.FillRectangle(0, 0, 64, 16, Colors.Blue);
                    ds.FillRectangle(0, 32 - 8, 64, 16, Color.FromArgb(128, 0, 0, 255));
                    ds.FillRectangle(0, 64 - 16, 64, 16, Colors.Blue);

                    ds.FillCircle(32, 32, 16, Colors.Yellow);
                }
                return image;
            }
コード例 #39
0
        async Task GenerateIcon(AppInfo appInfo, IconInfo iconInfo, StorageFolder folder)
        {
            // Draw the icon image into a command list.
            var commandList = new CanvasCommandList(device);

            using (var ds = commandList.CreateDrawingSession())
            {
                appInfo.DrawIconImage(ds, iconInfo);
            }

            ICanvasImage iconImage = commandList;

            // Rasterize into a rendertarget.
            var renderTarget = new CanvasRenderTarget(device, iconInfo.Width, iconInfo.Height, 96);

            using (var ds = renderTarget.CreateDrawingSession())
            {
                // Initialize with the appropriate background color.
                ds.Clear(iconInfo.TransparentBackground ? Colors.Transparent : appInfo.BackgroundColor);

                // Work out where to position the icon image.
                var imageBounds = iconImage.GetBounds(ds);

                imageBounds.Height *= 1 + iconInfo.BottomPadding;

                float scaleUpTheSmallerIcons = Math.Max(1, 1 + (60f - iconInfo.Width) / 50f);

                float imageScale = appInfo.ImageScale * scaleUpTheSmallerIcons;

                var transform = Matrix3x2.CreateTranslation((float)-imageBounds.X, (float)-imageBounds.Y) *
                                Utils.GetDisplayTransform(renderTarget.Size.ToVector2(), new Vector2((float)imageBounds.Width, (float)imageBounds.Height)) *
                                Matrix3x2.CreateScale(imageScale, renderTarget.Size.ToVector2() / 2);

                if (iconInfo.Monochrome)
                {
                    // Optionally convert to monochrome.
                    iconImage = new DiscreteTransferEffect
                    {
                        Source = new Transform2DEffect
                        {
                            Source = new LuminanceToAlphaEffect {
                                Source = iconImage
                            },
                            TransformMatrix = transform
                        },

                        RedTable   = new float[] { 1 },
                        GreenTable = new float[] { 1 },
                        BlueTable  = new float[] { 1 },
                        AlphaTable = new float[] { 0, 1 }
                    };
                }
                else
                {
                    ds.Transform = transform;

                    // Optional shadow effect.
                    if (appInfo.AddShadow)
                    {
                        var shadow = new ShadowEffect
                        {
                            Source     = iconImage,
                            BlurAmount = 12,
                        };

                        ds.DrawImage(shadow);
                    }
                }

                // draw the main icon image.
                ds.DrawImage(iconImage);
            }

            // Save to a file.
            using (var stream = await folder.OpenStreamForWriteAsync(iconInfo.Filename, CreationCollisionOption.ReplaceExisting))
            {
                await renderTarget.SaveAsync(stream.AsRandomAccessStream(), CanvasBitmapFileFormat.Png);
            }
        }
コード例 #40
0
        private async void PlayBackImage_ImageOpened(IRandomAccessStream stream)
        {
            if (CurrentSong != null)
            {
                {
                    var device = new CanvasDevice();
                    var bitmap = await CanvasBitmap.LoadAsync(device, stream);

                    var renderer = new CanvasRenderTarget(device,
                                                          bitmap.SizeInPixels.Width,
                                                          bitmap.SizeInPixels.Height, bitmap.Dpi);

                    using (var ds = renderer.CreateDrawingSession())
                    {
                        var blur = new GaussianBlurEffect
                        {
                            Source = bitmap
                        };
                        blur.BlurAmount = 16.0f;
                        blur.BorderMode = EffectBorderMode.Hard;
                        ds.DrawImage(blur);
                    }

                    stream.Seek(0);
                    await renderer.SaveAsync(stream, CanvasBitmapFileFormat.Png);
                    stream.Seek(0);
                    BitmapImage image = new BitmapImage();
                    image.SetSource(stream);
                    BackgroundBlur.Source = image;
                    renderer = null;
                    bitmap = null;
                    device = null;
                    GC.Collect();
                }
            }
        }
コード例 #41
0
        // ** Methods ** //

        // This is run for every video frame passed in the media pipleine (MediaPlayer, MediaCapture, etc)
        public void ProcessFrame(ProcessVideoFrameContext context)
        {
            evaluatableVideoFrame = VideoFrame.CreateWithDirect3D11Surface(context.InputFrame.Direct3DSurface);

            // ********** Draw Bounding Boxes with Win2D ********** //

            // Use Direct3DSurface if using GPU memory
            if (context.InputFrame.Direct3DSurface != null)
            {
                if (modelBindingComplete && options.PreferredDeviceKind != LearningModelDeviceKindPreview.LearningDeviceGpu)
                {
                    options.PreferredDeviceKind = LearningModelDeviceKindPreview.LearningDeviceGpu;
                }

                using (var inputBitmap = CanvasBitmap.CreateFromDirect3D11Surface(canvasDevice, context.InputFrame.Direct3DSurface))
                    using (var renderTarget = CanvasRenderTarget.CreateFromDirect3D11Surface(canvasDevice, context.OutputFrame.Direct3DSurface))
                        using (var ds = renderTarget.CreateDrawingSession())
                        {
                            ds.DrawImage(inputBitmap);

                            foreach (var box in filteredBoxes)
                            {
                                var x = (uint)Math.Max(box.X, 0);
                                var y = (uint)Math.Max(box.Y, 0);
                                var w = (uint)Math.Min(renderTarget.Bounds.Width - x, box.Width);
                                var h = (uint)Math.Min(renderTarget.Bounds.Height - y, box.Height);

                                // Draw the Text 10px above the top of the bounding box
                                ds.DrawText(box.Label, x, y - 10, Colors.Yellow);
                                ds.DrawRectangle(new Rect(x, y, w, h), new CanvasSolidColorBrush(canvasDevice, Colors.Yellow), 2f);
                            }
                        }

                return;
            }

            // Use SoftwareBitmap if using CPU memory
            if (context.InputFrame.SoftwareBitmap != null)
            {
                if (modelBindingComplete && options.PreferredDeviceKind != LearningModelDeviceKindPreview.LearningDeviceCpu)
                {
                    options.PreferredDeviceKind = LearningModelDeviceKindPreview.LearningDeviceCpu;
                }

                // InputFrame's pixels
                byte[] inputFrameBytes = new byte[4 * context.InputFrame.SoftwareBitmap.PixelWidth * context.InputFrame.SoftwareBitmap.PixelHeight];
                context.InputFrame.SoftwareBitmap.CopyToBuffer(inputFrameBytes.AsBuffer());

                using (var inputBitmap = CanvasBitmap.CreateFromBytes(canvasDevice, inputFrameBytes, context.InputFrame.SoftwareBitmap.PixelWidth, context.InputFrame.SoftwareBitmap.PixelHeight, context.InputFrame.SoftwareBitmap.BitmapPixelFormat.ToDirectXPixelFormat()))
                    using (var renderTarget = new CanvasRenderTarget(canvasDevice, context.OutputFrame.SoftwareBitmap.PixelWidth, context.InputFrame.SoftwareBitmap.PixelHeight, (float)context.OutputFrame.SoftwareBitmap.DpiX, context.OutputFrame.SoftwareBitmap.BitmapPixelFormat.ToDirectXPixelFormat(), CanvasAlphaMode.Premultiplied))
                        using (var ds = renderTarget.CreateDrawingSession())
                        {
                            ds.DrawImage(inputBitmap);

                            foreach (var box in filteredBoxes)
                            {
                                var x = (uint)Math.Max(box.X, 0);
                                var y = (uint)Math.Max(box.Y, 0);
                                var w = (uint)Math.Min(context.OutputFrame.SoftwareBitmap.PixelWidth - x, box.Width);
                                var h = (uint)Math.Min(context.OutputFrame.SoftwareBitmap.PixelHeight - y, box.Height);

                                // Draw the Text 10px above the top of the bounding box
                                ds.DrawText(box.Label, x, y - 10, Colors.Yellow);
                                ds.DrawRectangle(new Rect(x, y, w, h), new CanvasSolidColorBrush(canvasDevice, Colors.Yellow), 2f);
                            }
                        }
            }
        }
コード例 #42
0
        private async void loadImage_Click(object sender, RoutedEventArgs e)
        {
            FileOpenPicker fileOpenPicker = new FileOpenPicker();

            fileOpenPicker.SuggestedStartLocation = PickerLocationId.PicturesLibrary;
            fileOpenPicker.FileTypeFilter.Add(".jpg");
            fileOpenPicker.ViewMode = PickerViewMode.Thumbnail;

            var inputFile = await fileOpenPicker.PickSingleFileAsync();

            if (inputFile == null)
            {
                // The user cancelled the picking operation
                return;
            }

            SoftwareBitmap softwareBitmap;

            using (IRandomAccessStream stream = await inputFile.OpenAsync(FileAccessMode.ReadWrite))
            {
                // Create the decoder from the stream
                BitmapDecoder decoder = await BitmapDecoder.CreateAsync(stream);

                // Get the SoftwareBitmap representation of the file
                softwareBitmap = await decoder.GetSoftwareBitmapAsync();
            }

            if (softwareBitmap.BitmapPixelFormat != BitmapPixelFormat.Bgra8 || softwareBitmap.BitmapAlphaMode == BitmapAlphaMode.Straight)
            {
                softwareBitmap = SoftwareBitmap.Convert(softwareBitmap, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Premultiplied);
            }

            var source = new SoftwareBitmapSource();
            await source.SetBitmapAsync(softwareBitmap);

            imageControl.Source = source;

            var device = CanvasDevice.GetSharedDevice();
            var image  = default(CanvasBitmap);

            using (var s = await inputFile.OpenReadAsync())
            {
                image = await CanvasBitmap.LoadAsync(device, s);
            }

            var offscreen = new CanvasRenderTarget(device, (float)image.Bounds.Width, (float)image.Bounds.Height, 96);

            using (var ds = offscreen.CreateDrawingSession())
            {
                ds.DrawImage(image, 0, 0);
                ds.DrawText("Hello note", 10, 400, Colors.Aqua);
            }


            var displayInformation = DisplayInformation.GetForCurrentView();
            var savepicker         = new FileSavePicker();

            savepicker.FileTypeChoices.Add("png", new List <string> {
                ".png"
            });
            var destFile = await savepicker.PickSaveFileAsync();

            using (var s = await destFile.OpenAsync(FileAccessMode.ReadWrite))
            {
                var encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.PngEncoderId, s);

                encoder.SetPixelData(
                    BitmapPixelFormat.Bgra8,
                    BitmapAlphaMode.Ignore,
                    (uint)offscreen.Size.Width,
                    (uint)offscreen.Size.Height,
                    displayInformation.LogicalDpi,
                    displayInformation.LogicalDpi,
                    offscreen.GetPixelBytes());
                await encoder.FlushAsync();
            }
        }
コード例 #43
0
        public async Task <Point> DrawWeather(CanvasDrawingSession ds, CanvasDevice device, CanvasBitmap canvasBitmap, Point drawPoint, bool isBackground = false)
        {
            #region Init & Get Weather Informations

            Size   screenSize = SettingManager.GetWindowsResolution();
            var    width      = screenSize.Width;
            var    height     = screenSize.Height;
            float  space      = (float)width - (float)drawPoint.X;
            Rect   textSize;
            var    newWidth     = width - space;
            double tempWidth    = 0;
            var    oldDrawPoint = drawPoint.Y;

            if (drawPoint.Y >= height)
            {
                drawPoint.Y = drawPoint.Y > height ? drawPoint.Y - height - height * 22 / 100 : height - height * 25 / 100;
            }

            if (!SettingsHelper.GetSetting <bool>(SettingKey.IsDisplayWeather.ToString()))
            {
                return(drawPoint);
            }

            //Get Weather condition in the 1st time
            if (CurrentWeatherInfo.Temp == null)
            {
                if (CurrentWeather?.Main == null)
                {
                    //If FixedLocation Mode is ON then get weather with FixedLocation
                    if (IsFixedLocation)
                    {
                        await GetWeather(false);
                    }
                    else
                    {
                        await GetWeather();
                    }
                }
            }
            else
            {
                //If the final state is dif then get new WeatherCondition
                if (IsFixedLocation == IsNormalMode)
                {
                    if (IsFixedLocation)
                    {
                        if (!string.IsNullOrEmpty(UserLocation))
                        {
                            await GetWeather(false);
                        }

                        //Update Mode status & IconSaved
                        StartVm.IsIconSaved = false;
                        IsNormalMode        = false;
                    }
                    else
                    {
                        await GetWeather();

                        StartVm.IsIconSaved = false;
                        IsNormalMode        = true;
                    }
                }
            }

            var isFahrenheit = SettingsHelper.GetSetting <bool>(SettingKey.IsFahrenheit.ToString());

            //If CurrentWeather isn't loaded then draw with temporary template
            //Avoid step out DrawWeather method when haven't drawn and make User think Weather Function is broken
            if (CurrentWeather?.Main == null)
            {
                CurrentWeatherInfo.Temp = isFahrenheit ? "? °F" : "? °C";
                if (string.IsNullOrEmpty(CurrentWeatherInfo.Address))
                {
                    CurrentWeatherInfo.Address       = "???";
                    CurrentWeatherInfo.Condition     = "clouds";
                    CurrentWeatherInfo.MainCondition = "??";
                }

                //Update final state to get WeatherCondition in the next time
                IsNormalMode = IsFixedLocation;
            }
            else
            {
                if (isFahrenheit)
                {
                    CurrentWeatherInfo.Temp = Math.Round(CurrentWeather.Main.Temp * 1.8 + 32) + "°F";
                }
                else
                {
                    CurrentWeatherInfo.Temp = Math.Round(CurrentWeather.Main.Temp) + "°C";
                }

                CurrentWeatherInfo.MainCondition = CurrentWeather.Weather[0].Main;
                CurrentWeatherInfo.Condition     = CurrentWeather.Weather[0].Description;
            }

            var address = CurrentWeatherInfo.Address;

            #endregion

            #region Get Address & Caculate area size, position

            if (address != null)
            {
                var strArr = address.Split(',');

                if (strArr.Count() >= 2)
                {
                    CurrentWeatherInfo.Address = string.Format("{0},{1}", strArr[strArr.Length - 2],
                                                               strArr[strArr.Length - 1]);
                }
                else
                {
                    CurrentWeatherInfo.Address = strArr[0];
                }

                var textFormat = new CanvasTextFormat
                {
                    FontFamily   = "Segoe UI Light",
                    FontSize     = (float)(height * 4 / 100),
                    WordWrapping = CanvasWordWrapping.NoWrap
                };

                textSize = BitmapHelper.TextRect(CurrentWeatherInfo.Address, textFormat, ds);

                if (string.IsNullOrEmpty(CurrentWeatherInfo.MainCondition))
                {
                    CurrentWeatherInfo.Condition     = "clouds";
                    CurrentWeatherInfo.MainCondition = "??";
                }

                //Caculate WeatherCondition text length for drawing AntiBright & Weather
                var conditionSize = BitmapHelper.TextRect(CurrentWeatherInfo.MainCondition, new CanvasTextFormat()
                {
                    FontSize     = (float)(height * 4.5 / 100),
                    FontFamily   = "Segoe UI Light",
                    FontWeight   = FontWeights.Thin,
                    WordWrapping = CanvasWordWrapping.NoWrap
                }, ds);

                var tempSize = BitmapHelper.TextRect(CurrentWeatherInfo.Temp, new CanvasTextFormat()
                {
                    FontSize     = (float)(height * 7.5 / 100),
                    WordWrapping = CanvasWordWrapping.NoWrap,
                    FontFamily   = "Segoe UI Light",
                    FontWeight   = FontWeights.Thin
                }, ds);

                //Check if CenterX + WeatherCondition(or Temperature if Temp>WeatherCon) > Address to update CenterX(use for draw WeatherCondition)
                tempWidth = tempSize.Width > conditionSize.Width ? tempSize.Width : conditionSize.Width;

                DrawPoint = new Point(newWidth - textSize.Width, drawPoint.Y);

                if (DrawPoint.X + textSize.Width + height / 100 >= newWidth)
                {
                    newWidth -= height / 100;
                    DrawPoint = new Point(newWidth - textSize.Width, drawPoint.Y);
                }

                if (drawPoint.X == 0)
                {
                    newWidth = textSize.Width > tempWidth + height * 15 / 100 + width / 100 ? textSize.Width + height * 3 / 100 : tempWidth + height * 15 / 100 + width / 100 + height * 3 / 100;

                    DrawPoint = new Point(newWidth - textSize.Width, drawPoint.Y);
                }


                //Check if WeatherIcon + TempWidth > Address to update DrawPoint
                if (DrawPoint.X + tempWidth + height * 15 / 100 + width / 100 + height / 100 >= newWidth)
                {
                    var x = DrawPoint.X - (tempWidth + DrawPoint.X + width / 100 + height * 15 / 100 + height / 100 - newWidth);
                    DrawPoint = new Point(x, drawPoint.Y);
                }

                if (Math.Abs(drawPoint.X - width / 2) < 0.5)
                {
                    var leftToCenter = width / 2 - DrawPoint.X + height * 2 / 100;
                    var haftWidth    = (newWidth - DrawPoint.X + height * 2 / 100) / 2;
                    var tempSpace    = leftToCenter - haftWidth;
                    DrawPoint = new Point(DrawPoint.X + tempSpace, DrawPoint.Y);
                    newWidth += tempSpace;
                }
            }

            #endregion

            #region Draw Methods

            if ((DrawPoint.X == 0) && (DrawPoint.Y == 0))
            {
                return(drawPoint);
            }
            {
                //Check and draw transparent black rectangle if necessary
                if (BitmapHelper.IsBrightArea(canvasBitmap,
                                              (int)(DrawPoint.X - height * 2 / 100),
                                              (int)(DrawPoint.Y - height / 100),
                                              (int)(newWidth - DrawPoint.X + height * 2 / 100),
                                              (int)height * 22 / 100))
                {
                    ds.FillRoundedRectangle(
                        new Rect(DrawPoint.X - height * 2 / 100,
                                 (int)DrawPoint.Y - height / 100,
                                 newWidth - DrawPoint.X + height * 2 / 100,
                                 height * 22 / 100), 20, 20,
                        new CanvasSolidColorBrush(device, Colors.Black)
                    {
                        Opacity = 0.4F
                    });
                }



                var strArr = CurrentWeatherInfo.Address?.Split(',');
                if (strArr?.Length > 2)
                {
                    CurrentWeatherInfo.Address = strArr[strArr.Length - 2] + "," + strArr[strArr.Length - 1];
                }

                if (CurrentWeatherInfo.Address != null)
                {
                    ds.DrawText(CurrentWeatherInfo.Address, (float)(newWidth - height * 1.5 / 100), (float)DrawPoint.Y,
                                Colors.White,
                                new CanvasTextFormat
                    {
                        FontSize            = (float)(height * 4 / 100),
                        HorizontalAlignment = CanvasHorizontalAlignment.Right,
                        FontFamily          = "Segoe UI Light",
                        FontWeight          = FontWeights.Thin
                    });
                }

                var centerX = (float)(DrawPoint.X + (newWidth - DrawPoint.X) / 2);

                //Config Area position for better Draw design
                if (tempWidth + centerX + width * 2 / 100 >= newWidth)
                {
                    centerX -= (float)tempWidth + centerX + (float)(width * 2 / 100) - (float)newWidth;
                }
                else
                {
                    if ((tempWidth + height * 15 / 100 >= textSize.Width) &&
                        (tempWidth + height * 15 / 100 + (newWidth - (float)width * 2 / 100) - textSize.Width) <= newWidth)
                    {
                        centerX = (float)((newWidth - (float)width * 2 / 100) - textSize.Width);
                    }
                }

                if (CurrentWeatherInfo.Temp != null)
                {
                    ds.DrawText(CurrentWeatherInfo.Temp, centerX, (float)(DrawPoint.Y + height * 4.5 / 100), Colors.White,
                                new CanvasTextFormat
                    {
                        FontSize            = (float)(height * 7.5 / 100),
                        HorizontalAlignment = CanvasHorizontalAlignment.Left,
                        FontFamily          = "Segoe UI Light",
                        FontWeight          = FontWeights.Thin
                    });
                }

                if (CurrentWeatherInfo.MainCondition != null)
                {
                    ds.DrawText(CurrentWeatherInfo.MainCondition, centerX, (float)(DrawPoint.Y + height * 13 / 100),
                                Colors.White,
                                new CanvasTextFormat
                    {
                        FontSize            = (float)(height * 4.5 / 100),
                        HorizontalAlignment = CanvasHorizontalAlignment.Left,
                        FontFamily          = "Segoe UI Light",
                        FontWeight          = FontWeights.Thin
                    });
                }

                if (CurrentWeatherInfo.Condition != null)
                {
                    if (isBackground)
                    {
                        var weatherIcon = PickWeatherIcon(CurrentWeatherInfo.Condition);
                        var iconSizeStr = weatherIcon.Substring(0, 3);
                        var iconSize    = int.Parse(iconSizeStr);
                        var iconBitmap  = new CanvasRenderTarget(device, iconSize, iconSize, 500);
                        using (var ds1 = iconBitmap.CreateDrawingSession())
                        {
                            var file =
                                await
                                StorageFile.GetFileFromApplicationUriAsync(
                                    new Uri("ms-appx:///ShareClass/Assets/WeatherIcon/" + weatherIcon + ".svg"));

                            using (var stream = await file.OpenStreamForReadAsync())
                                using (var reader = new StreamReader(stream))
                                {
                                    var xml = new XmlDocument();
                                    xml.LoadXml(reader.ReadToEnd(), new XmlLoadSettings {
                                        ProhibitDtd = false
                                    });

                                    var svgDocument = SvgDocument.Parse(xml);


                                    using (var renderer = new Win2dRenderer(iconBitmap, svgDocument))
                                        renderer.Render(iconSize, iconSize, ds1);

                                    ds.DrawImage(iconBitmap, new Rect(centerX - height * 15 / 100,
                                                                      DrawPoint.Y + height * 5.5 / 100, height * 13 / 100, height * 13 / 100));
                                }
                        }
                    }
                    else
                    {
                        if (!StartVm.IsIconSaved)
                        {
                            var weatherIcon = PickWeatherIcon(CurrentWeatherInfo.Condition);
                            var iconSizeStr = weatherIcon.Substring(0, 3);
                            var iconSize    = int.Parse(iconSizeStr);
                            var iconBitmap  = new CanvasRenderTarget(device, iconSize, iconSize, 500);
                            using (var ds1 = iconBitmap.CreateDrawingSession())
                            {
                                var file =
                                    await
                                    StorageFile.GetFileFromApplicationUriAsync(
                                        new Uri("ms-appx:///ShareClass/Assets/WeatherIcon/" + weatherIcon + ".svg"));

                                using (var stream = await file.OpenStreamForReadAsync())
                                    using (var reader = new StreamReader(stream))
                                    {
                                        var xml = new XmlDocument();
                                        xml.LoadXml(reader.ReadToEnd(), new XmlLoadSettings {
                                            ProhibitDtd = false
                                        });

                                        var svgDocument = SvgDocument.Parse(xml);


                                        using (var renderer = new Win2dRenderer(iconBitmap, svgDocument))
                                            renderer.Render(iconSize, iconSize, ds1);

                                        StartVm.IconBitmap  = new CanvasRenderTarget(device, iconSize, iconSize, 500);
                                        StartVm.IconBitmap  = iconBitmap;
                                        StartVm.IsIconSaved = true;

                                        ds.DrawImage(iconBitmap, new Rect(centerX - height * 15 / 100,
                                                                          DrawPoint.Y + height * 5.5 / 100, height * 13 / 100, height * 13 / 100));
                                    }
                            }
                        }
                        else
                        {
                            ds.DrawImage(StartVm.IconBitmap,
                                         new Rect(centerX - height * 15 / 100, DrawPoint.Y + height * 5.5 / 100, height * 13 / 100, height * 13 / 100));
                        }
                    }
                }


                if (oldDrawPoint >= screenSize.Height)
                {
                    var temp = DrawPoint.Y - height * 1.5 / 100;
                    if (temp + screenSize.Height <= screenSize.Height * 2)
                    {
                        return(new Point(drawPoint.X, temp + screenSize.Height));
                    }
                    return(new Point(-1, -1));
                }
                else
                {
                    var temp = DrawPoint.Y - height * 2 / 100 + height * 22 / 100;
                    if (temp <= screenSize.Height)
                    {
                        return(new Point(drawPoint.X, temp));
                    }
                    return(new Point(-1, -1));
                }
            }

            #endregion
        }
コード例 #44
0
        public void SetPixelColorsReadHazards()
        {
            var device = new CanvasDevice();
            var bitmap = CanvasBitmap.CreateFromColors(device, new Color[1], 1, 1);
            var renderTarget = new CanvasRenderTarget(device, 2, 1, 96);

            using (var ds = renderTarget.CreateDrawingSession())
            {
                bitmap.SetPixelColors(new Color[] { Colors.Blue });
                ds.DrawImage(bitmap, 0, 0);

                bitmap.SetPixelColors(new Color[] { Colors.Red });
                ds.DrawImage(bitmap, 1, 0);
            }

            CollectionAssert.AreEqual(new Color[] { Colors.Blue, Colors.Red }, renderTarget.GetPixelColors());
        }
コード例 #45
0
        private void DrawToCanvas(CanvasRenderTarget crt, List <Dot> dots, float scale, float offsetX, float offsetY, Color color, CanvasStrokeStyle canvasStrokeStyle, float thickness = 1f)
        {
            if (crt == null)
            {
                return;
            }

            if (dots == null || dots.Count == 0)
            {
                return;
            }

            using (CanvasDrawingSession drawSession = crt.CreateDrawingSession())
            {
                if (dots.Count <= 2)
                {
                    float p = (float)dots[dots.Count - 1].Force / 1023 * thickness;

                    if (dots.Count == 1) // 점찍기
                    {
                        drawSession.FillCircle(dots[0].X * scale + offsetX, dots[0].Y * scale + offsetY, p, color);
                    }
                    else if (dots.Count == 2) // 선그리기
                    {
                        drawSession.DrawLine(dots[0].X * scale + offsetX, dots[0].Y * scale + offsetY, dots[1].X * scale + offsetX, dots[1].Y * scale + offsetY, color, p, canvasStrokeStyle);
                    }
                }
                else
                {
                    thickness /= 2;

                    float x0, x1, x2, x3, y0, y1, y2, y3, p0, p1, p2, p3;
                    float vx01, vy01, vx21, vy21;
                    float norm;
                    float n_x0, n_y0, n_x2, n_y2;

                    x0 = dots[0].X * scale + offsetX + 0.1f;
                    y0 = dots[0].Y * scale + offsetY;
                    // TODO Change MaxForce
                    p0 = (float)dots[0].Force / 1023 * thickness;

                    x1 = dots[1].X * scale + offsetX + 0.1f;
                    y1 = dots[1].Y * scale + offsetY;
                    p1 = (float)dots[1].Force / 1023 * thickness;

                    vx01 = x1 - x0;
                    vy01 = y1 - y0;
                    // instead of dividing tangent/norm by two, we multiply norm by 2
                    norm = (float)System.Math.Sqrt(vx01 * vx01 + vy01 * vy01 + 0.0001f) * 2f;
                    //vx01 = vx01 / norm * scaled_pen_thickness * p0;
                    //vy01 = vy01 / norm * scaled_pen_thickness * p0;
                    vx01 = vx01 / norm * p0;
                    vy01 = vy01 / norm * p0;
                    n_x0 = vy01;
                    n_y0 = -vx01;

                    CanvasPathBuilder pathBuilder;

                    int count = dots.Count;

                    for (int i = 2; i < count; ++i)
                    {
                        x3 = dots[i].X * scale + offsetX + 0.1f;
                        y3 = dots[i].Y * scale + offsetY;
                        p3 = (float)dots[i].Force / 1023 * thickness;

                        x2   = (x1 + x3) / 2.0f;
                        y2   = (y1 + y3) / 2.0f;
                        p2   = (p1 + p3) / 2.0f;
                        vx21 = x1 - x2;
                        vy21 = y1 - y2;
                        norm = (float)System.Math.Sqrt(vx21 * vx21 + vy21 * vy21 + 0.0001f) * 2.0f;
                        vx21 = vx21 / norm * p2;
                        vy21 = vy21 / norm * p2;
                        n_x2 = -vy21;
                        n_y2 = vx21;

                        pathBuilder = new CanvasPathBuilder(drawableCanvas);
                        pathBuilder.BeginFigure(x0 + n_x0, y0 + n_y0);
                        // The + boundary of the stroke
                        pathBuilder.AddCubicBezier(new Vector2(x1 + n_x0, y1 + n_y0), new Vector2(x1 + n_x2, y1 + n_y2), new Vector2(x2 + n_x2, y2 + n_y2));
                        // round out the cap
                        pathBuilder.AddCubicBezier(new Vector2(x2 + n_x2 - vx21, y2 + n_y2 - vy21), new Vector2(x2 - n_x2 - vx21, y2 - n_y2 - vy21), new Vector2(x2 - n_x2, y2 - n_y2));
                        // THe - boundary of the stroke
                        pathBuilder.AddCubicBezier(new Vector2(x1 - n_x2, y1 - n_y2), new Vector2(x1 - n_x0, y1 - n_y0), new Vector2(x0 - n_x0, y0 - n_y0));
                        // round out the other cap
                        pathBuilder.AddCubicBezier(new Vector2(x0 - n_x0 - vx01, y0 - n_y0 - vy01), new Vector2(x0 + n_x0 - vx01, y0 + n_y0 - vy01), new Vector2(x0 + n_x0, y0 + n_y0));
                        pathBuilder.EndFigure(CanvasFigureLoop.Open);
                        drawSession.DrawGeometry(CanvasGeometry.CreatePath(pathBuilder), color, p2);

                        x0   = x2;
                        y0   = y2;
                        p0   = p2;
                        x1   = x3;
                        y1   = y3;
                        p1   = p3;
                        vx01 = -vx21;
                        vy01 = -vy21;
                        n_x0 = n_x2;
                        n_y0 = n_y2;
                    }

                    x2 = dots[count - 1].X * scale + offsetX + 0.1f;
                    y2 = dots[count - 1].Y * scale + offsetY;
                    p2 = dots[count - 1].Force / 1023 * thickness;

                    vx21 = x1 - x2;
                    vy21 = y1 - y2;
                    norm = (float)System.Math.Sqrt(vx21 * vx21 + vy21 * vy21 + 0.0001f) * 2f;
                    //vx21 = vx21 / norm * scaled_pen_thickness * p2;
                    //vy21 = vy21 / norm * scaled_pen_thickness * p2;
                    vx21 = vx21 / norm * p2;
                    vy21 = vy21 / norm * p2;
                    n_x2 = -vy21;
                    n_y2 = vx21;

                    pathBuilder = new CanvasPathBuilder(drawableCanvas);
                    pathBuilder.BeginFigure(x0 + n_x0, y0 + n_y0);
                    pathBuilder.AddCubicBezier(new Vector2(x1 + n_x0, y1 + n_y0), new Vector2(x1 + n_x2, y1 + n_y2), new Vector2(x2 + n_x2, y2 + n_y2));
                    pathBuilder.AddCubicBezier(new Vector2(x2 + n_x2 - vx21, y2 + n_y2 - vy21), new Vector2(x2 - n_x2 - vx21, y2 - n_y2 - vy21), new Vector2(x2 - n_x2, y2 - n_y2));
                    pathBuilder.AddCubicBezier(new Vector2(x1 - n_x2, y1 - n_y2), new Vector2(x1 - n_x0, y1 - n_y0), new Vector2(x0 - n_x0, y0 - n_y0));
                    pathBuilder.AddCubicBezier(new Vector2(x0 - n_x0 - vx01, y0 - n_y0 - vy01), new Vector2(x0 + n_x0 - vx01, y0 + n_y0 - vy01), new Vector2(x0 + n_x0, y0 + n_y0));
                    pathBuilder.EndFigure(CanvasFigureLoop.Open);
                    drawSession.DrawGeometry(CanvasGeometry.CreatePath(pathBuilder), color, p2);
                }
            }
        }
コード例 #46
0
        private void DrawCharData(CanvasDevice device, RenderingOptions options, HeartbeatMeasurement[] data)
        {
            //Size restrictions descriped in : http://microsoft.github.io/Win2D/html/P_Microsoft_Graphics_Canvas_CanvasDevice_MaximumBitmapSizeInPixels.htm
            float useHeight = (float)ChartWin2DCanvas.Size.Height > device.MaximumBitmapSizeInPixels ? device.MaximumBitmapSizeInPixels : (float)ChartWin2DCanvas.Size.Height;
            float useWidth = data.Length > device.MaximumBitmapSizeInPixels ? device.MaximumBitmapSizeInPixels : data.Length;

            //this will change the values array to array with drawing-line-points for the graph
            List<DataPoint> dataList = FillOffsetList(data, options, useWidth, useHeight);

            //reset zoom & moving values
            _zoomFactor = 100;
            _graphDrawingPoint = new Point(0, 0);
            _graphDrawingSource = new Size(useWidth, useHeight);
            //create the graph image
            _offscreenChartImage = new CanvasRenderTarget(device, useWidth, useHeight, 96);

            using (CanvasDrawingSession ds = _offscreenChartImage.CreateDrawingSession())
            {
                //This creates drawing geometry from the drawing-line-points
                CanvasGeometry chart = getDrawChartGeometry(device, dataList);
                //and then we simply draw it with defined color
                ds.DrawGeometry(chart, 0, 0, GRAPG_COLOR);
            }
        }
コード例 #47
0
        public override void Init(WriteableBitmap bmp, IGrap9Attr attr = null)
        {
            var newbg = attr.bgurl != bgurl || attr.fgurl != fgurl || attr.hard != hard;

            base.Init(bmp, attr);
            b      = bmp;
            source = new CanvasRenderTarget(
                device, b.PixelWidth, b.PixelHeight, 96,
                Windows.Graphics.DirectX.DirectXPixelFormat.B8G8R8A8UIntNormalized,
                CanvasAlphaMode.Premultiplied
                );
            Canvas = new CanvasRenderTarget(
                device, b.PixelWidth, b.PixelHeight, 96,
                Windows.Graphics.DirectX.DirectXPixelFormat.B8G8R8A8UIntNormalized,
                CanvasAlphaMode.Premultiplied
                );
            Invalidate(true);

            try
            {
                if (newbg)
                {
                    if (fgurl != "")
                    {
                        var img = LayerPaint.Img.Create(fgurl);
                        Shape = CanvasBitmap.CreateFromBytes(
                            device, img.PixelBuffer, img.PixelWidth, img.PixelHeight,
                            Windows.Graphics.DirectX.DirectXPixelFormat.B8G8R8A8UIntNormalized
                            );
                    }
                    else
                    {
                        var px     = 200;
                        var csize  = Canvas.Size;
                        var sample = new CanvasRenderTarget(device, 2 * px, 2 * px, 96,
                                                            Windows.Graphics.DirectX.DirectXPixelFormat.B8G8R8A8UIntNormalized, CanvasAlphaMode.Premultiplied
                                                            );
                        var tb = new CanvasRadialGradientBrush(device, new[]  {
                            new CanvasGradientStop()
                            {
                                Color = Color.FromArgb(255, 255, 255, 255), Position = hard
                            },
                            new CanvasGradientStop()
                            {
                                Color = Color.FromArgb(255, 0, 0, 0), Position = 1f
                            },
                        }, CanvasEdgeBehavior.Clamp, CanvasAlphaMode.Straight);
                        using (var s = sample.CreateDrawingSession())
                        {
                            tb.RadiusX = px; tb.RadiusY = px; tb.Center = new Vector2(px, px);
                            s.FillRectangle(0, 0, px * 2, px * 2, tb);
                        }
                        Shape = sample;
                    }
                    if (bgurl != "")
                    {
                        var img = LayerPaint.Img.Create(bgurl);
                        Texture = CanvasBitmap.CreateFromBytes(
                            device, img.PixelBuffer, img.PixelWidth, img.PixelHeight,
                            Windows.Graphics.DirectX.DirectXPixelFormat.B8G8R8A8UIntNormalized
                            );
                    }
                    else
                    {
                        Texture = null;
                    }
                    //fgurl = bgurl= "";
                }
            }
            catch (Exception e)
            {
                new Windows.UI.Popups.MessageDialog(e.ToString()).ShowMux();
            }

            Loadd();
        }
コード例 #48
0
        private async void SaveInkPicture()
        {
            if (ink.Width == 0)
            {
                return;
            }
             //ink.Width = WB_CapturedImage.PixelWidth;
             //ink.Height = WB_CapturedImage.PixelHeight;
             ((App)App.Current).SyncStrokeEx(strokeMapping, ink.InkPresenter.StrokeContainer, ink.Width, true);
            CanvasDevice device = CanvasDevice.GetSharedDevice();
            //CanvasRenderTarget renderTarget = new CanvasRenderTarget(device, WB_CapturedImage.PixelWidth, WB_CapturedImage.PixelHeight, 96);
            CanvasRenderTarget renderTarget = new CanvasRenderTarget(device, (float)ink.Width, (float)ink.Height, 96);
            using (var ds = renderTarget.CreateDrawingSession())
            {
                ds.Clear(Color.FromArgb(0, 255, 255, 255));
                ds.DrawInk(ink.InkPresenter.StrokeContainer.GetStrokes());
            }
            StorageFolder savedPics = KnownFolders.PicturesLibrary;
            string fileName = DateTime.Now.ToString("yyyyMMddHHmmss") + ".png";
            StorageFile file = await savedPics.CreateFileAsync(fileName, CreationCollisionOption.ReplaceExisting);
            using (var fileStream = await file.OpenAsync(FileAccessMode.ReadWrite))
            {
                await renderTarget.SaveAsync(fileStream, CanvasBitmapFileFormat.Png, 1f);
            }
            using (IRandomAccessStream fileStream = await file.OpenAsync(Windows.Storage.FileAccessMode.ReadWrite))
            {
                //image.SetSource(fileStream);
                WriteableBitmap InkImage = new WriteableBitmap(WB_CapturedImage.PixelWidth, WB_CapturedImage.PixelHeight);
                await InkImage.SetSourceAsync(fileStream);
                imageInk.Source = InkImage;
                imageInk.Visibility = Visibility.Visible;
                ink.InkPresenter.StrokeContainer.Clear();
                ink.Visibility = Visibility.Collapsed;
            }

            RenderTargetBitmap rtb = new RenderTargetBitmap();
            await rtb.RenderAsync(this.imagePanel);
            // 提取像素数据
            IBuffer buffer = await rtb.GetPixelsAsync();

            // 获取文件流
            IRandomAccessStream streamOut = await file.OpenAsync(FileAccessMode.ReadWrite);
            // 实例化编码器
            BitmapEncoder pngEncoder = await BitmapEncoder.CreateAsync(BitmapEncoder.PngEncoderId, streamOut);
            //重写writeablebitmap
            WB_CapturedImage = new WriteableBitmap((int)rtb.PixelWidth, (int)rtb.PixelHeight);
            // 写入像素数据
            byte[] data = buffer.ToArray();
            pngEncoder.SetPixelData(BitmapPixelFormat.Bgra8,
                                    BitmapAlphaMode.Straight,
                                    (uint)rtb.PixelWidth,
                                    (uint)rtb.PixelHeight,
                                    96d, 96d, data);
            await pngEncoder.FlushAsync();
            streamOut.Dispose();

            ink.Visibility = Visibility.Visible;
            imageInk.Visibility = Visibility.Collapsed;

            using (IRandomAccessStream fileStream = await file.OpenAsync(Windows.Storage.FileAccessMode.ReadWrite))
            {
                await WB_CapturedImage.SetSourceAsync(fileStream);
                image.Source = WB_CapturedImage;
            }

            //var bmp = new RenderTargetBitmap();          
            //await bmp.RenderAsync(ink);
            //StorageFolder savedPics = KnownFolders.PicturesLibrary;
            //string fileName = DateTime.Now.ToString("yyyyMMddHHmmss") + ".png";
            //StorageFile file = await savedPics.CreateFileAsync(fileName, CreationCollisionOption.ReplaceExisting);
            //// Saving to file.  
            //using (var stream = await file.OpenStreamForWriteAsync())
            //{
            //    // Initialization.  
            //    var pixelBuffer = await bmp.GetPixelsAsync();
            //    var logicalDpi = DisplayInformation.GetForCurrentView().LogicalDpi;
            //    // convert stream to IRandomAccessStream  
            //    var randomAccessStream = stream.AsRandomAccessStream();
            //    // encoding to PNG  
            //    var encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.PngEncoderId, randomAccessStream);
            //    // Finish saving  
            //    encoder.SetPixelData(BitmapPixelFormat.Bgra8, BitmapAlphaMode.Ignore, (uint)bmp.PixelWidth,
            //               (uint)bmp.PixelHeight, logicalDpi, logicalDpi, pixelBuffer.ToArray());
            //    // Flush encoder.  
            //    await encoder.FlushAsync();
            //    filePath = file.Path;
            //}
        }
コード例 #49
0
        //check
        public static async Task saveInk(bool isfront)
        {
            Card selected = CardPage.selected;

            if (selected != null)
            {
                StorageFolder deckfolder = await ApplicationData.Current.LocalFolder.GetFolderAsync(App.viewModel.selectedDeck.Id);

                StorageFolder cardFolder = await deckfolder.GetFolderAsync(selected.Name);

                if (isfront)
                {
                    IReadOnlyList <InkStroke> currentStrokes = FrontInk.InkPresenter.StrokeContainer.GetStrokes();
                    if (currentStrokes.Count > 0)
                    {
                        try
                        {
                            StorageFile data = await cardFolder.CreateFileAsync("Front" + selected.Name + ".gif", CreationCollisionOption.ReplaceExisting);

                            StorageFile imageNew = await cardFolder.CreateFileAsync("Front" + selected.Name + ".jpeg", CreationCollisionOption.ReplaceExisting);

                            CanvasDevice       device       = CanvasDevice.GetSharedDevice();
                            CanvasRenderTarget renderTarget = new CanvasRenderTarget(device, (int)FrontInk.ActualWidth, (int)FrontInk.ActualHeight, 96);
                            using (var ds = renderTarget.CreateDrawingSession())
                            {
                                ds.Clear(selected.Background);
                                ds.DrawInk(currentStrokes);
                            }

                            using (var fileStream = await imageNew.OpenAsync(FileAccessMode.ReadWrite))
                            {
                                await renderTarget.SaveAsync(fileStream, CanvasBitmapFileFormat.Jpeg, 1f);

                                await fileStream.FlushAsync();

                                fileStream.Dispose();
                            }
                            selected.Front = imageNew.Path;
                            App.viewModel.updateCard(selected);
                            IRandomAccessStream stream = await data.OpenAsync(FileAccessMode.ReadWrite);

                            using (IOutputStream outputStream = stream.GetOutputStreamAt(0))
                            {
                                await FrontInk.InkPresenter.StrokeContainer.SaveAsync(outputStream);

                                await outputStream.FlushAsync();
                            }
                            stream.Dispose();
                        }
                        catch (FileLoadException fex)
                        {
                            await saveInk(true);
                        }
                    }
                    else if (currentStrokes.Count < 1)
                    {
                        selected.Front = "Assets/empty.png";
                        try
                        {
                            StorageFile data = await cardFolder.GetFileAsync("Front" + selected.Name + ".gif");

                            if (data != null)
                            {
                                await data.DeleteAsync();
                            }
                            else if (data == null)
                            {
                                return;
                            }
                        }
                        catch (FileNotFoundException e)
                        {
                            return;
                        }
                    }
                }
                else if (!isfront)
                {
                    IReadOnlyList <InkStroke> currentStrokes = BackInk.InkPresenter.StrokeContainer.GetStrokes();
                    if (currentStrokes.Count > 0)
                    {
                        try
                        {
                            StorageFile data = await cardFolder.CreateFileAsync("Back" + selected.Name + ".gif", CreationCollisionOption.ReplaceExisting);

                            StorageFile imageNew = await cardFolder.CreateFileAsync("Back" + selected.Name + ".jpeg", CreationCollisionOption.ReplaceExisting);

                            CanvasDevice       device       = CanvasDevice.GetSharedDevice();
                            CanvasRenderTarget renderTarget = new CanvasRenderTarget(device, (int)BackInk.ActualWidth, (int)BackInk.ActualHeight, 96);
                            using (var ds = renderTarget.CreateDrawingSession())
                            {
                                ds.Clear(selected.Background);
                                ds.DrawInk(currentStrokes);
                            }

                            using (var fileStream = await imageNew.OpenAsync(FileAccessMode.ReadWrite))
                            {
                                await renderTarget.SaveAsync(fileStream, CanvasBitmapFileFormat.Jpeg, 1f);

                                await fileStream.FlushAsync();

                                fileStream.Dispose();
                            }
                            selected.Back = imageNew.Path;
                            App.viewModel.updateCard(selected);
                            IRandomAccessStream stream = await data.OpenAsync(FileAccessMode.ReadWrite);

                            using (IOutputStream outputStream = stream.GetOutputStreamAt(0))
                            {
                                await BackInk.InkPresenter.StrokeContainer.SaveAsync(outputStream);

                                await outputStream.FlushAsync();
                            }
                            stream.Dispose();
                        }
                        catch (FileLoadException fex)
                        {
                            await saveInk(false);
                        }
                    }
                    else if (currentStrokes.Count < 1)
                    {
                        selected.Back = "Assets/empty.png";
                        try
                        {
                            StorageFile data = await cardFolder.GetFileAsync("Back" + selected.Name + ".gif");

                            if (data != null)
                            {
                                await data.DeleteAsync();
                            }
                            else if (data == null)
                            {
                                return;
                            }
                        }
                        catch (FileNotFoundException e)
                        {
                            return;
                        }
                    }
                }
            }
        }
コード例 #50
0
        private ICanvasImage CreateShadow()
        {
            var renderTarget = new CanvasRenderTarget(canvas, 360, 150);

            using (var ds = renderTarget.CreateDrawingSession())
            {
                ds.Clear(Color.FromArgb(0, 0, 0, 0));

                ds.DrawText("This text is drawn onto a rendertarget", 10, 10, Colors.White);
                ds.DrawText("with a different color per line,", 10, 40, Colors.Red);
                ds.DrawText("after which a drop shadow is", 10, 70, Colors.Green);
                ds.DrawText("generated using image effects.", 10, 100, Colors.Blue);
            }

            var shadowEffect = new Transform2DEffect
            {
                Source = new ShadowEffect
                {
                    Source = renderTarget,
                    BlurAmount = 2
                },
                TransformMatrix = Matrix3x2.CreateTranslation(3, 3)
            };

            var whiteBackground = new CropEffect
            {
                Source = new ColorSourceEffect { Color = Colors.White },
                SourceRectangle = renderTarget.Bounds
            };

            var compositeEffect = new CompositeEffect
            {
                Sources = { whiteBackground, shadowEffect, renderTarget }
            };

            animationFunction = elapsedTime => { };

            currentEffectSize = renderTarget.Size.ToVector2();

            return compositeEffect;
        }
コード例 #51
0
        private void DrawElevation(CanvasDrawingSession drawingSession, Size size, Rect destinationRelative)
        {
            if (trackpoints == null)
            {
                return;
            }

            var foreground          = new CanvasSolidColorBrush(drawingSession, Colors.Green);
            var absoluteDestination = GetAbsoluteDestinationRect(size, destinationRelative);

            var renderTarget = new CanvasRenderTarget(drawingSession, (float)absoluteDestination.Width, (float)absoluteDestination.Height);

            _elevationTransform = GetTransformationMatrix(size, destinationRelative);

            _elevationPoints = GetPolyLinePointsRelative(tp => tp.Coordinate.Elevation, true);

            using (var ds = renderTarget.CreateDrawingSession())
            {
                var elevationRenderTargetTransform = _elevationTransform * Matrix3x2.CreateTranslation((float)(-destinationRelative.Left * size.Width), (float)(-destinationRelative.Top * size.Height));

                var gradientStops  = new List <CanvasGradientStop>();
                var minInclination = _elevationPoints.Keys.Min(tp => tp.Inclination);
                var maxInclination = _elevationPoints.Keys.Max(tp => tp.Inclination);

                var positions = new List <Vector2>();

                var pathBuilder = new CanvasPathBuilder(drawingSession);

                bool first = true;

                foreach (var point in _elevationPoints)
                {
                    var position = point.Value;

                    if (first)
                    {
                        pathBuilder.BeginFigure(position);
                        first = false;
                    }
                    else
                    {
                        pathBuilder.AddLine(position);
                    }

                    var mu = (point.Key.Inclination - minInclination) / (maxInclination - minInclination);

                    var color = GenerateColor(1, 1, 1, 4, 2, 1, 128, 127, mu * 2);

                    gradientStops.Add(new CanvasGradientStop {
                        Color = color, Position = position.X
                    });
                }

                pathBuilder.AddLine(new Vector2(1, 1));
                pathBuilder.AddLine(new Vector2(0, 1));

                pathBuilder.EndFigure(CanvasFigureLoop.Closed);

                var geometry = CanvasGeometry.CreatePath(pathBuilder);
                var brush    = new CanvasLinearGradientBrush(ds, gradientStops.ToArray(), CanvasEdgeBehavior.Clamp, CanvasAlphaMode.Straight)
                {
                    StartPoint = new Vector2((float)0, 0),
                    EndPoint   = new Vector2((float)1, 0),
                };

                ds.Transform = elevationRenderTargetTransform;
                ds.FillGeometry(geometry, brush);
                ds.Transform = Matrix3x2.Identity;
            }


            var sharpen = new ConvolveMatrixEffect()
            {
                Source       = renderTarget,
                KernelMatrix = new float[] { -1, -1, -1, -1, 9, -1, -1, -1, -1 },
            };

            //drawingSession.DrawImage(sharpen, (float)absoluteDestination.X, (float)absoluteDestination.Y);
            drawingSession.DrawImage(renderTarget, (float)absoluteDestination.X, (float)absoluteDestination.Y);
        }