Beispiel #1
0
 /// <summary>
 /// Converts the stream of image camera views to a different pixel format.
 /// </summary>
 /// <param name="source">The source stream of image camera views.</param>
 /// <param name="pixelFormat">The pixel format to convert to.</param>
 /// <param name="deliveryPolicy">An optional delivery policy.</param>
 /// <param name="sharedImageAllocator">Optional image allocator for creating new shared images.</param>
 /// <param name="name">An optional name for the stream operator.</param>
 /// <returns>The resulting stream.</returns>
 public static IProducer <ImageCameraView> Convert(
     this IProducer <ImageCameraView> source,
     PixelFormat pixelFormat,
     DeliveryPolicy <ImageCameraView> deliveryPolicy = null,
     Func <int, int, PixelFormat, Shared <Image> > sharedImageAllocator = null,
     string name = nameof(Convert))
 {
     sharedImageAllocator ??= (width, height, pixelFormat) => ImagePool.GetOrCreate(width, height, pixelFormat);
     return(source.Process <ImageCameraView, ImageCameraView>(
                (imageCameraView, envelope, emitter) =>
     {
         // if the image is null, post null
         if (imageCameraView == null)
         {
             emitter.Post(null, envelope.OriginatingTime);
         }
         else if (pixelFormat == imageCameraView.ViewedObject.Resource.PixelFormat)
         {
             // o/w if image is already in the requested format, shortcut the conversion
             emitter.Post(imageCameraView, envelope.OriginatingTime);
         }
         else
         {
             using var image = sharedImageAllocator(imageCameraView.ViewedObject.Resource.Width, imageCameraView.ViewedObject.Resource.Height, pixelFormat);
             imageCameraView.ViewedObject.Resource.CopyTo(image.Resource);
             using var outputImageCameraView = new ImageCameraView(image, imageCameraView.CameraIntrinsics, imageCameraView.CameraPose);
             emitter.Post(outputImageCameraView, envelope.OriginatingTime);
         }
     },
                deliveryPolicy,
                name));
 }
Beispiel #2
0
        public void SharedImagePoolCollisionTest()
        {
            var bmp57 = new System.Drawing.Bitmap(5, 7);
            var bmp75 = new System.Drawing.Bitmap(7, 5);

            Assert.AreEqual <int>(5, bmp57.Width);
            Assert.AreEqual <int>(7, bmp57.Height);
            Assert.AreEqual <int>(7, bmp75.Width);
            Assert.AreEqual <int>(5, bmp75.Height);

            var shared57 = ImagePool.GetOrCreate(bmp57);

            Assert.AreEqual <int>(5, shared57.Resource.Width);
            Assert.AreEqual <int>(7, shared57.Resource.Height);

            // Ensure that the ImagePool is not recycling images based solely on the product of
            // width*height (i.e. the same number of pixels but different dimensions), as the
            // stride and total size of the recycled image could be incorrect.

            shared57.Dispose();                          // release to be recycled
            var shared75 = ImagePool.GetOrCreate(bmp75); // should *not* get the recycled image

            Assert.AreEqual <int>(7, shared75.Resource.Width);
            Assert.AreEqual <int>(5, shared75.Resource.Height);
        }
Beispiel #3
0
 /// <summary>
 /// Converts the stream of image camera views to a different pixel format.
 /// </summary>
 /// <param name="source">The source stream of image camera views.</param>
 /// <param name="scaleX">Scale factor for X.</param>
 /// <param name="scaleY">Scale factor for Y.</param>
 /// <param name="samplingMode">Method for sampling pixels when rescaling.</param>
 /// <param name="deliveryPolicy">An optional delivery policy.</param>
 /// <param name="sharedImageAllocator">Optional image allocator for creating new shared images.</param>
 /// <param name="name">An optional name for the stream operator.</param>
 /// <returns>The resulting stream.</returns>
 public static IProducer <ImageCameraView> Scale(
     this IProducer <ImageCameraView> source,
     float scaleX,
     float scaleY,
     SamplingMode samplingMode = SamplingMode.Bilinear,
     DeliveryPolicy <ImageCameraView> deliveryPolicy = null,
     Func <int, int, PixelFormat, Shared <Image> > sharedImageAllocator = null,
     string name = nameof(Scale))
 {
     sharedImageAllocator ??= (width, height, pixelFormat) => ImagePool.GetOrCreate(width, height, pixelFormat);
     return(source.Process <ImageCameraView, ImageCameraView>(
                (imageCameraView, envelope, emitter) =>
     {
         // if the image is null, post null
         if (imageCameraView == null)
         {
             emitter.Post(null, envelope.OriginatingTime);
         }
         else
         {
             int finalWidth = (int)(imageCameraView.ViewedObject.Resource.Width * scaleX);
             int finalHeight = (int)(imageCameraView.ViewedObject.Resource.Height * scaleY);
             using var scaledSharedImage = sharedImageAllocator(finalWidth, finalHeight, imageCameraView.ViewedObject.Resource.PixelFormat);
             imageCameraView.ViewedObject.Resource.Scale(scaledSharedImage.Resource, scaleX, scaleY, samplingMode);
             using var outputImageCameraView = new ImageCameraView(scaledSharedImage, imageCameraView.CameraIntrinsics, imageCameraView.CameraPose);
             emitter.Post(outputImageCameraView, envelope.OriginatingTime);
         }
     },
                deliveryPolicy,
                name));
 }
Beispiel #4
0
        /// <summary>
        /// Constructs the input vector for the ImageNet model for a specified image.
        /// </summary>
        /// <param name="sharedImage">The image to construct the input vector for.</param>
        private void ConstructOnnxInputVector(Shared <Image> sharedImage)
        {
            var inputImage  = sharedImage.Resource;
            var inputWidth  = sharedImage.Resource.Width;
            var inputHeight = sharedImage.Resource.Height;

            // crop a center square
            var squareSize = Math.Min(inputWidth, inputHeight);

            using var squareImage = ImagePool.GetOrCreate(squareSize, squareSize, sharedImage.Resource.PixelFormat);
            if (inputWidth > inputHeight)
            {
                inputImage.Crop(squareImage.Resource, (inputWidth - squareSize) / 2, 0, squareSize, squareSize);
            }
            else
            {
                inputImage.Crop(squareImage.Resource, 0, (inputHeight - squareSize) / 2, squareSize, squareSize);
            }

            // resize the image to 224 x 224
            using var resizedImage = ImagePool.GetOrCreate(224, 224, sharedImage.Resource.PixelFormat);
            squareImage.Resource.Resize(resizedImage.Resource, 224, 224, SamplingMode.Bilinear);

            // if the pixel format does not match, do a conversion before extracting the bytes
            var bytes = default(byte[]);

            if (sharedImage.Resource.PixelFormat != PixelFormat.BGR_24bpp)
            {
                using var reformattedImage = ImagePool.GetOrCreate(224, 224, PixelFormat.BGR_24bpp);
                resizedImage.Resource.CopyTo(reformattedImage.Resource);
                bytes = reformattedImage.Resource.ReadBytes(3 * 224 * 224);
            }
            else
            {
                // get the bytes
                bytes = resizedImage.Resource.ReadBytes(3 * 224 * 224);
            }

            // Now populate the onnxInputVector float array / tensor by normalizing
            // using mean = [0.485, 0.456, 0.406] and std = [0.229, 0.224, 0.225].
            int fi = 0;

            // first the red bytes
            for (int i = 2; i < bytes.Length; i += 3)
            {
                this.onnxInputVector[fi++] = ((bytes[i] / 255.0f) - 0.485f) / 0.229f;
            }

            // then the green bytes
            for (int i = 1; i < bytes.Length; i += 3)
            {
                this.onnxInputVector[fi++] = ((bytes[i] / 255.0f) - 0.456f) / 0.224f;
            }

            // then the blue bytes
            for (int i = 0; i < bytes.Length; i += 3)
            {
                this.onnxInputVector[fi++] = ((bytes[i] / 255.0f) - 0.406f) / 0.225f;
            }
        }
Beispiel #5
0
        /// <inheritdoc />
        protected override DateTime GenerateNext(DateTime currentTime)
        {
            GetWindowRect(this.hWnd, out RECT rect);
            var width  = rect.Right - rect.Left;
            var height = rect.Bottom - rect.Top;
            var win    = GetWindowDC(this.hWnd);
            var dest   = CreateCompatibleDC(win);
            var hBmp   = CreateCompatibleBitmap(win, width, height);
            var sel    = SelectObject(dest, hBmp);

            BitBlt(dest, 0, 0, width, height, win, 0, 0, SourceCopy | CaptureBlt);
            var bitmap = Bitmap.FromHbitmap(hBmp);

            using (var sharedImage = ImagePool.GetOrCreate(width, height, PixelFormat.BGRA_32bpp))
            {
                var resource = sharedImage.Resource;
                resource.CopyFrom(bitmap);
                this.Out.Post(sharedImage, currentTime);
            }

            bitmap.Dispose();
            SelectObject(dest, sel);
            DeleteObject(hBmp);
            DeleteDC(dest);
            ReleaseDC(this.hWnd, win);

            return(currentTime + this.interval);
        }
Beispiel #6
0
        /// <summary>
        /// Decode a shared encoded image to a shared image.
        /// </summary>
        /// <param name="sharedEncodedImage">The shared encoded image to decode.</param>
        /// <returns>The decoded shared image.</returns>
        internal static Shared <Image> Decode(this Shared <EncodedImage> sharedEncodedImage)
        {
            Shared <Image> sharedImage = null;

            if (sharedEncodedImage.Resource != null)
            {
                // The code below maintains back-compatibility with encoded images which did not store the pixel format
                // on the instance, but only in the stream. If the pixel format is unknown, we call upon the decoder to
                // retrieve the pixel format. This might be less performant, but enables decoding in the right format
                // even from older versions of encoded images.
                var decoder     = new ImageFromStreamDecoder();
                var pixelFormat = sharedEncodedImage.Resource.PixelFormat == PixelFormat.Undefined ?
                                  decoder.GetPixelFormat(sharedEncodedImage.Resource.ToStream()) : sharedEncodedImage.Resource.PixelFormat;

                // If the decoder does not return a valid pixel format, we throw an exception.
                if (pixelFormat == PixelFormat.Undefined)
                {
                    throw new ArgumentException("The encoded image does not contain a supported pixel format.");
                }

                sharedImage = ImagePool.GetOrCreate(sharedEncodedImage.Resource.Width, sharedEncodedImage.Resource.Height, pixelFormat);
                decoder.DecodeFromStream(sharedEncodedImage.Resource.ToStream(), sharedImage.Resource);
            }

            return(sharedImage);
        }
Beispiel #7
0
 public void Image_CropViaOperator()
 {
     // Test that the pipeline's operator Crop() works on a stream of images and random rectangles
     using (var pipeline = Pipeline.Create("CropViaOperator"))
     {
         var generator = Generators.Sequence(pipeline, 1, x => x + 1, 100);
         var p         = Microsoft.Psi.Operators.Process <int, (Shared <Image>, System.Drawing.Rectangle)>(
             generator,
             (d, e, s) =>
         {
             Random r = new Random();
             System.Drawing.Rectangle rect = default(System.Drawing.Rectangle);
             rect.X      = r.Next() % this.testImage.Width;
             rect.Y      = r.Next() % this.testImage.Height;
             rect.Width  = r.Next() % (this.testImage.Width - rect.X);
             rect.Height = r.Next() % (this.testImage.Height - rect.Y);
             if (rect.Width > 0 && rect.Height > 0)
             {
                 using (var sharedImage = ImagePool.GetOrCreate(this.testImage.Width, this.testImage.Height, this.testImage.PixelFormat))
                 {
                     this.testImage.CopyTo(sharedImage.Resource);
                     s.Post((sharedImage, rect), e.OriginatingTime);
                 }
             }
         }).Crop();
         pipeline.Run();
     }
 }
Beispiel #8
0
        public void Image_GraySetPixel(PixelFormat pixelFormat)
        {
            using var sharedImage = ImagePool.GetOrCreate(this.testImage_GraySetPixel.Width, this.testImage_GraySetPixel.Height, pixelFormat);
            using var refImage    = this.testImage_GraySetPixel.Convert(pixelFormat);

            // The documentation for SetPixel is as follows:
            /// <summary>
            /// Sets a pixel in the image.
            /// </summary>
            /// <param name="x">Pixel's X coordinate.</param>
            /// <param name="y">Pixel's Y coordinate.</param>
            /// <param name="gray">Gray value to set pixel to.</param>

            int shiftBits = pixelFormat.GetBitsPerChannel() - 8;
            int maxValue  = (1 << pixelFormat.GetBitsPerChannel()) - 1;

            for (int x = 0; x <= 255; x++)
            {
                for (int y = 0; y <= 255; y++)
                {
                    int gray = (x << shiftBits) | x;
                    sharedImage.Resource.SetPixel(x, y, gray);
                }
            }

            this.AssertAreImagesEqual(refImage, sharedImage.Resource);
        }
Beispiel #9
0
 public void Image_GrayDrawCircle()
 {
     using var sharedImage = ImagePool.GetOrCreate(this.testImage_Gray.Width, this.testImage_Gray.Height, this.testImage_Gray.PixelFormat);
     this.testImage_Gray.CopyTo(sharedImage.Resource);
     sharedImage.Resource.DrawCircle(new System.Drawing.Point(0, 0), 100, System.Drawing.Color.Red, 3);
     this.AssertAreImagesEqual(this.testImage_GrayDrawCircle, sharedImage.Resource);
 }
        private void Process(ValueTuple <HeadPoseAndGaze, Shared <Image> > data, Envelope envelope)
        {
            var(datum, frame) = data;
            lock (this) {
                if (frame != null && frame.Resource != null)
                {
                    using (var img = ImagePool.GetOrCreate(frame.Resource.Width, frame.Resource.Height, frame.Resource.PixelFormat)) {
                        frame.Resource.CopyTo(img.Resource);
                        var buffer = new ImageBuffer(img.Resource.Width, img.Resource.Height, img.Resource.ImageData, img.Resource.Stride);
                        foreach (var p in datum.HeadPose.VisiableLandmarks)
                        {
                            Methods.DrawPoint(buffer, new Point(p.X, p.Y), 3);
                        }

                        foreach (var p in datum.Gaze.VisiableLandmarks)
                        {
                            Methods.DrawPoint(buffer, new Point(p.X, p.Y), 1);
                        }

                        foreach (var l in datum.HeadPose.IndicatorLines)
                        {
                            Methods.DrawLine(buffer, new Point(l.Item1.X, l.Item1.Y), new Point(l.Item2.X, l.Item2.Y));
                        }

                        foreach (var l in datum.Gaze.IndicatorLines)
                        {
                            Methods.DrawLine(buffer, new Point(l.Item1.X, l.Item1.Y), new Point(l.Item2.X, l.Item2.Y));
                        }

                        Out.Post(img, envelope.OriginatingTime);
                        display.Update(img);
                    }
                }
            }
        }
Beispiel #11
0
 public void Image_GrayDrawRect()
 {
     using var sharedImage = ImagePool.GetOrCreate(this.testImage_Gray.Width, this.testImage_Gray.Height, this.testImage_Gray.PixelFormat);
     this.testImage_Gray.CopyTo(sharedImage.Resource);
     sharedImage.Resource.DrawRectangle(new System.Drawing.Rectangle(0, 0, 20, 20), System.Drawing.Color.White, 3);
     this.AssertAreImagesEqual(this.testImage_GrayDrawRect, sharedImage.Resource);
 }
        protected override DateTime GenerateNext(DateTime currentTime)
        {
            Shared <DepthImage> imgDepth      = DepthImagePool.GetOrCreate(sicsDepth.width, sicsDepth.height);
            Shared <Image>      imgDepthColor = ImagePool.GetOrCreate(sicsDepth.width, sicsDepth.height, PixelFormat.BGR_24bpp);
            Shared <Image>      imgRBG        = ImagePool.GetOrCreate(sicsRBG.width, sicsRBG.height, PixelFormat.BGR_24bpp);

            try
            {
                if (qDepth.PollForFrame(out Frame frDepth))
                {
                    imgDepth.Resource.CopyFrom(frDepth.Data);
                    imgDepthColor.Resource.CopyFrom(imgDepth.Resource.PseudoColorize((0, 2048)));
                    OutDepthImage.Post(imgDepth, msPipe.GetCurrentTime());
                    OutDepthImageColorized.Post(imgDepthColor, msPipe.GetCurrentTime());
                }

                if (qRBG.PollForFrame(out Frame frRBG))
                {
                    imgRBG.Resource.CopyFrom(frRBG.Data);
                    OutRBGImage.Post(imgRBG, msPipe.GetCurrentTime());
                }
            }
            catch (Exception e)
            {
                Console.WriteLine(e.Message);
            }

            return(msPipe.GetCurrentTime());
        }
Beispiel #13
0
 public void Image_GrayDrawText()
 {
     using var sharedImage = ImagePool.GetOrCreate(this.testImage_Gray.Width, this.testImage_Gray.Height, this.testImage_Gray.PixelFormat);
     this.testImage_Gray.CopyTo(sharedImage.Resource);
     sharedImage.Resource.DrawText("Test", new System.Drawing.Point(0, 20), System.Drawing.Color.Red);
     this.AssertAreImagesEqual(this.testImage_GrayDrawText, sharedImage.Resource);
 }
Beispiel #14
0
        /// <inheritdoc/>
        protected override void ProcessSensorFrame(IResearchModeSensorFrame sensorFrame, ResearchModeSensorResolution resolution, ulong frameTicks, DateTime originatingTime)
        {
            var shouldOutputImage = this.Configuration.OutputImage &&
                                    (originatingTime - this.Image.LastEnvelope.OriginatingTime) > this.Configuration.OutputMinInterval;

            var shouldOutputImageCameraView = this.Configuration.OutputImageCameraView &&
                                              (originatingTime - this.ImageCameraView.LastEnvelope.OriginatingTime) > this.Configuration.OutputMinInterval;

            if (shouldOutputImage || shouldOutputImageCameraView)
            {
                var vlcFrame    = sensorFrame as ResearchModeSensorVlcFrame;
                var imageBuffer = vlcFrame.GetBuffer();
                int imageWidth  = (int)resolution.Width;
                int imageHeight = (int)resolution.Height;

                using var image = ImagePool.GetOrCreate(imageWidth, imageHeight, PixelFormat.Gray_8bpp);
                Debug.Assert(image.Resource.Size == imageBuffer.Length * sizeof(byte), "Image size does not match raw image buffer size!");
                image.Resource.CopyFrom(imageBuffer);

                if (shouldOutputImage)
                {
                    this.Image.Post(image, originatingTime);
                }

                if (shouldOutputImageCameraView)
                {
                    using var imageCameraView = new ImageCameraView(image, this.GetCameraIntrinsics(), this.GetCameraPose());
                    this.ImageCameraView.Post(imageCameraView, originatingTime);
                }
            }
        }
Beispiel #15
0
        /// <summary>
        /// Pipeline callback for converting depth image to colored image
        /// </summary>
        /// <param name="depthImage">Depth image</param>
        /// <param name="e">Pipeline information about current depthImage sample</param>
        protected override void Receive(Shared <Image> depthImage, Envelope e)
        {
            using (var colorImageDest = ImagePool.GetOrCreate(depthImage.Resource.Width, depthImage.Resource.Height, Imaging.PixelFormat.BGR_24bpp))
            {
                unsafe
                {
                    ushort maxDepth = ushort.MaxValue;
                    ushort minDepth = 0;

                    Parallel.For(0, depthImage.Resource.Height, iy =>
                    {
                        ushort *src = (ushort *)((byte *)depthImage.Resource.ImageData.ToPointer() + (iy * depthImage.Resource.Stride));
                        byte *dst   = (byte *)colorImageDest.Resource.ImageData.ToPointer() + (iy * colorImageDest.Resource.Stride);

                        for (int ix = 0; ix < depthImage.Resource.Width; ix++)
                        {
                            ushort depth = *src;

                            // short adaptation
                            int normalizedDepth = (depth >= minDepth && depth <= maxDepth) ? (depth * 1024 / 8000) : 0;
                            dst[0] = (byte)this.Saturate(384 - (int)Math.Abs(normalizedDepth - 256));
                            dst[1] = (byte)this.Saturate(384 - (int)Math.Abs(normalizedDepth - 512));
                            dst[2] = (byte)this.Saturate(384 - (int)Math.Abs(normalizedDepth - 768));

                            dst += 3;
                            src += 1;
                        }
                    });
                }

                this.Out.Post(colorImageDest, e.OriginatingTime);
            }
        }
Beispiel #16
0
 public void Image_GrayRotate()
 {
     using var sharedImage = ImagePool.GetOrCreate(this.testImage_Gray.Width, this.testImage_Gray.Height, this.testImage_Gray.PixelFormat);
     this.testImage_Gray.CopyTo(sharedImage.Resource);
     sharedImage.Resource.Rotate(20.0f, SamplingMode.Bilinear);
     this.AssertAreImagesEqual(this.testImage_GrayRotate, sharedImage.Resource);
 }
Beispiel #17
0
        public void Image_CropViaJoinOperator()
        {
            // Test that the pipeline's operator Crop() works on a stream of images and random rectangles
            using (var pipeline = Pipeline.Create("CropViaOperator"))
            {
                using (var sharedImage = ImagePool.GetOrCreate(this.testImage.Width, this.testImage.Height, this.testImage.PixelFormat))
                {
                    this.testImage.CopyTo(sharedImage.Resource);

                    // Use a non-insignificant interval for both Sequences to ensure that the Join processes all
                    // messages from both streams (default interval of 1-tick is too small to guarantee this).
                    var images = Generators.Sequence(pipeline, sharedImage, x => sharedImage, 100, TimeSpan.FromMilliseconds(1));
                    var rects  = Generators.Sequence(
                        pipeline,
                        new System.Drawing.Rectangle(0, 0, 1, 1),
                        x =>
                    {
                        Random r = new Random();
                        System.Drawing.Rectangle rect = default(System.Drawing.Rectangle);
                        rect.X      = r.Next(0, this.testImage.Width);
                        rect.Y      = r.Next(0, this.testImage.Height);
                        rect.Width  = r.Next(1, this.testImage.Width - rect.X);
                        rect.Height = r.Next(1, this.testImage.Height - rect.Y);

                        return(rect);
                    },
                        100,
                        TimeSpan.FromMilliseconds(1));
                    images.Join(rects, Match.Best <System.Drawing.Rectangle>()).Crop();
                    pipeline.Run();
                }
            }
        }
Beispiel #18
0
        /// <summary>
        /// Constructs the input vector for the Tiny Yolo V2 model for a specified image.
        /// </summary>
        /// <param name="sharedImage">The shared image to construct the input vector for.</param>
        private void ConstructOnnxInputVector(Shared <Image> sharedImage)
        {
            var inputImage  = sharedImage.Resource;
            var inputWidth  = sharedImage.Resource.Width;
            var inputHeight = sharedImage.Resource.Height;

            // crop a center square
            var squareSize = Math.Min(inputWidth, inputHeight);

            using var squareImage = ImagePool.GetOrCreate(squareSize, squareSize, sharedImage.Resource.PixelFormat);
            if (inputWidth > inputHeight)
            {
                inputImage.Crop(squareImage.Resource, (inputWidth - squareSize) / 2, 0, squareSize, squareSize);
            }
            else
            {
                inputImage.Crop(squareImage.Resource, 0, (inputHeight - squareSize) / 2, squareSize, squareSize);
            }

            // resize the image to 416 x 416
            using var resizedImage = ImagePool.GetOrCreate(416, 416, sharedImage.Resource.PixelFormat);
            squareImage.Resource.Resize(resizedImage.Resource, 416, 416, SamplingMode.Bilinear);

            // if the pixel format does not match, do a conversion before extracting the bytes
            var bytes = default(byte[]);

            if (sharedImage.Resource.PixelFormat != PixelFormat.BGR_24bpp)
            {
                using var reformattedImage = ImagePool.GetOrCreate(416, 416, PixelFormat.BGR_24bpp);
                resizedImage.Resource.CopyTo(reformattedImage.Resource);
                bytes = reformattedImage.Resource.ReadBytes(3 * 416 * 416);
            }
            else
            {
                // get the bytes
                bytes = resizedImage.Resource.ReadBytes(3 * 416 * 416);
            }

            // now populate the onnxInputVector float array / tensor
            int fi = 0;

            // first the blue bytes
            for (int i = 2; i < bytes.Length; i += 3)
            {
                this.onnxInputVector[fi++] = bytes[i];
            }

            // then the green bytes
            for (int i = 1; i < bytes.Length; i += 3)
            {
                this.onnxInputVector[fi++] = bytes[i];
            }

            // then the red bytes
            for (int i = 0; i < bytes.Length; i += 3)
            {
                this.onnxInputVector[fi++] = bytes[i];
            }
        }
Beispiel #19
0
 public void Image_FlipViaOperator()
 {
     using (var pipeline = Pipeline.Create("FlipViaOperator"))
     {
         using (var sharedImage = ImagePool.GetOrCreate(this.testImage2.Width, this.testImage2.Height, this.testImage2.PixelFormat))
         {
             this.testImage2.CopyTo(sharedImage.Resource);
             Generators.Sequence(pipeline, new[] { sharedImage }, default, null, keepOpen: false).Flip(FlipMode.None).Do((img) =>
Beispiel #20
0
        /// <summary>
        /// Convert encoded image to image.
        /// </summary>
        public Shared <Image> EncodedImageToImage(Shared <EncodedImage> encodedImage, Envelope envelope)
        {
            var decoder     = new ImageFromStreamDecoder();//this is a platform specific decoder
            var image       = encodedImage.Resource.Decode(decoder);
            var sharedImage = ImagePool.GetOrCreate(encodedImage.Resource.Width, encodedImage.Resource.Height, image.PixelFormat);

            sharedImage.Resource.CopyFrom(image.ImageData);
            return(sharedImage);
        }
        private void ShowCurrentImage()
        {
            // Get the current image
            var sharedDepthImage = this.DepthImageVisualizationObject.CurrentValue.GetValueOrDefault().Data;

            if (sharedDepthImage == null || sharedDepthImage.Resource == null)
            {
                this.Image.Visibility = Visibility.Hidden;
            }
            else
            {
                // Update the display image
                using var sharedColorizedImage = ImagePool.GetOrCreate(
                          sharedDepthImage.Resource.Width,
                          sharedDepthImage.Resource.Height,
                          PixelFormat.BGRA_32bpp);

                var minRange = this.DepthImageVisualizationObject.RangeMin;
                var maxRange = this.DepthImageVisualizationObject.RangeMax;
                if (this.DepthImageVisualizationObject.RangeMode == DepthImageRangeMode.Auto)
                {
                    (minRange, maxRange) = sharedDepthImage.Resource.GetPixelRange();
                }

                sharedDepthImage.Resource.PseudoColorize(
                    sharedColorizedImage.Resource,
                    ((ushort)minRange, (ushort)maxRange),
                    (this.DepthImageVisualizationObject.InvalidValue < 0) ? null : (ushort)this.DepthImageVisualizationObject.InvalidValue,
                    this.DepthImageVisualizationObject.InvalidAsTransparent);

                if (this.DepthImageVisualizationObject.HorizontalFlip)
                {
                    // Flip the image before displaying it
                    Bitmap bitmap = sharedColorizedImage.Resource.ToBitmap(true);
                    bitmap.RotateFlip(RotateFlipType.RotateNoneFlipX);
                    using var flippedColorizedImage = Shared.Create(Imaging.Image.FromBitmap(bitmap));
                    this.DisplayImage.UpdateImage(flippedColorizedImage);
                }
                else
                {
                    this.DisplayImage.UpdateImage(sharedColorizedImage);
                }

                if (this.Image.Visibility != Visibility.Visible)
                {
                    this.Image.Visibility = Visibility.Visible;
                }

                // Update the image size if it's changed
                if ((this.Image.Width != this.DisplayImage.Image.PixelWidth) || (this.Image.Height != this.DisplayImage.Image.PixelHeight))
                {
                    this.Image.Width  = this.DisplayImage.Image.PixelWidth;
                    this.Image.Height = this.DisplayImage.Image.PixelHeight;
                }
            }
        }
        private static Shared <Image> Adapter(Shared <EncodedImage> encodedImage, Envelope env)
        {
            Shared <Image> sharedImage = null;

            if ((encodedImage != null) && (encodedImage.Resource != null))
            {
                sharedImage = ImagePool.GetOrCreate(encodedImage.Resource.Width, encodedImage.Resource.Height, ImageDecoder.GetPixelFormat(encodedImage.Resource));
                ImageDecoder.DecodeTo(encodedImage.Resource, sharedImage.Resource);
            }

            return(sharedImage);
        }
        private static Shared <Image> Adapter(Shared <DepthImage> sharedDepthImage, Envelope envelope)
        {
            Shared <Image> sharedImage = null;

            if ((sharedDepthImage != null) && (sharedDepthImage.Resource != null))
            {
                sharedImage = ImagePool.GetOrCreate(sharedDepthImage.Resource.Width, sharedDepthImage.Resource.Height, PixelFormat.Gray_16bpp);
                sharedImage.Resource.CopyFrom(sharedDepthImage.Resource);
            }

            return(sharedImage);
        }
        /// <inheritdoc/>
        public override Shared <Image> GetAdaptedValue(Shared <DepthImage> source, Envelope envelope)
        {
            Shared <Image> sharedImage = null;

            if ((source != null) && (source.Resource != null))
            {
                sharedImage = ImagePool.GetOrCreate(source.Resource.Width, source.Resource.Height, PixelFormat.Gray_16bpp);
                sharedImage.Resource.CopyFrom(source.Resource);
            }

            return(sharedImage);
        }
Beispiel #25
0
        private static Shared <Image> Adapter(byte[] value, Envelope env)
        {
            var buffer = new byte[424 * 512 * 2];

            using (var compressedStream = new GZipStream(new MemoryStream(value), CompressionMode.Decompress))
            {
                compressedStream.Read(buffer, 0, buffer.Length);
            }

            var psiImage = ImagePool.GetOrCreate(512, 424, PixelFormat.Gray_16bpp);

            psiImage.Resource.CopyFrom(buffer);
            return(psiImage);
        }
        public void ReceiveFrame(VideoMediaBuffer videoFrame, string participantId)
        {
            var originatingTime = new DateTime(1900, 1, 1, 0, 0, 0, DateTimeKind.Utc).AddTicks(videoFrame.Timestamp);
            var frames          = new Dictionary <string, Shared <Image> >();

            using (var sharedImage = ImagePool.GetOrCreate(
                       videoFrame.VideoFormat.Width,
                       videoFrame.VideoFormat.Height,
                       PixelFormat.BGR_24bpp))
            {
                var timestamp = (long)videoFrame.Timestamp;
                if (timestamp == 0)
                {
                    this.logger.Warn($"Original sender timestamp is zero: {participantId}");
                    return;
                }
                var length = videoFrame.VideoFormat.Width * videoFrame.VideoFormat.Height * 12 / 8; // This is how to calculate NV12 buffer size
                if (length > videoFrame.Length)
                {
                    return;
                }
                byte[] data = new byte[length];

                try
                {
                    Marshal.Copy(videoFrame.Data, data, 0, length);
                    var bgrImage = NV12toBGR(data, videoFrame.VideoFormat.Width, videoFrame.VideoFormat.Height);
                    sharedImage.Resource.CopyFrom(bgrImage);
                }
                catch (Exception ex)
                {
                    this.logger.Warn($"ON FAILURE: length: {videoFrame.Length}, height: {videoFrame.VideoFormat.Height}, width: {videoFrame.VideoFormat.Width}");
                    this.logger.Error(ex);
                    return;
                }
                lock (this.Video)
                {
                    if (originatingTime > this.Video.LastEnvelope.OriginatingTime)
                    {
                        frames.Add(participantId, sharedImage);
                        this.Video.Post(frames, originatingTime);
                    }
                    else
                    {
                        this.logger.Warn("Out of order frame");
                    }
                }
            }
        }
Beispiel #27
0
        private void ThreadProc()
        {
            Imaging.PixelFormat pixelFormat = PixelFormat.BGR_24bpp;
            switch (this.device.GetColorBpp())
            {
            case 24:
                pixelFormat = PixelFormat.BGR_24bpp;
                break;

            case 32:
                pixelFormat = PixelFormat.BGRX_32bpp;
                break;

            default:
                throw new NotSupportedException("Expected 24bpp or 32bpp image.");
            }

            var  colorImage     = ImagePool.GetOrCreate((int)this.device.GetColorWidth(), (int)this.device.GetColorHeight(), pixelFormat);
            uint colorImageSize = this.device.GetColorHeight() * this.device.GetColorStride();

            switch (this.device.GetDepthBpp())
            {
            case 16:
                pixelFormat = PixelFormat.Gray_16bpp;
                break;

            case 8:
                pixelFormat = PixelFormat.Gray_8bpp;
                break;

            default:
                throw new NotSupportedException("Expected 8bpp or 16bpp image.");
            }

            var depthImage = DepthImagePool.GetOrCreate(
                (int)this.device.GetDepthWidth(),
                (int)this.device.GetDepthHeight(),
                DepthValueSemantics.DistanceToPlane,
                0.001);
            uint depthImageSize = this.device.GetDepthHeight() * this.device.GetDepthStride();

            while (!this.shutdown)
            {
                this.device.ReadFrame(colorImage.Resource.ImageData, colorImageSize, depthImage.Resource.ImageData, depthImageSize);
                DateTime t = DateTime.UtcNow;
                this.ColorImage.Post(colorImage, t);
                this.DepthImage.Post(depthImage, t);
            }
        }
Beispiel #28
0
        private static Shared <Image> Adapter(Shared <EncodedDepthImage> sharedEncodedDepthImage, Envelope envelope)
        {
            Shared <Image> sharedImage = null;

            if ((sharedEncodedDepthImage != null) && (sharedEncodedDepthImage.Resource != null))
            {
                var sharedDepthImage = DepthImagePool.GetOrCreate(sharedEncodedDepthImage.Resource.Width, sharedEncodedDepthImage.Resource.Height);
                sharedImage = ImagePool.GetOrCreate(sharedEncodedDepthImage.Resource.Width, sharedEncodedDepthImage.Resource.Height, PixelFormat.Gray_16bpp);
                var decoder = new DepthImageFromStreamDecoder();
                decoder.DecodeFromStream(sharedEncodedDepthImage.Resource.ToStream(), sharedDepthImage.Resource);
                sharedDepthImage.Resource.CopyTo(sharedImage.Resource);
            }

            return(sharedImage);
        }
Beispiel #29
0
        /// <inheritdoc/>
        public override Shared <Image> GetAdaptedValue(Shared <EncodedDepthImage> source, Envelope envelope)
        {
            Shared <Image> sharedImage = null;

            if ((source != null) && (source.Resource != null))
            {
                using var sharedDepthImage = DepthImagePool.GetOrCreate(source.Resource.Width, source.Resource.Height);
                sharedImage = ImagePool.GetOrCreate(source.Resource.Width, source.Resource.Height, PixelFormat.Gray_16bpp);
                var decoder = new DepthImageFromStreamDecoder();
                decoder.DecodeFromStream(source.Resource.ToStream(), sharedDepthImage.Resource);
                sharedDepthImage.Resource.CopyTo(sharedImage.Resource);
            }

            return(sharedImage);
        }
Beispiel #30
0
        /// <summary>
        /// GenerateNext is called by the Generator base class when the next sample should be read.
        /// </summary>
        /// <param name="previous">Time of previous sample.</param>
        /// <returns>Time for current sample.</returns>
        protected override DateTime GenerateNext(DateTime previous)
        {
            DateTime          originatingTime = default(DateTime);
            int               streamIndex     = 0;
            SourceReaderFlags flags           = SourceReaderFlags.None;
            long              timestamp       = 0;
            Sample            sample          = this.sourceReader.ReadSample(SourceReaderIndex.AnyStream, 0, out streamIndex, out flags, out timestamp);

            if (sample != null)
            {
                originatingTime = this.start + TimeSpan.FromTicks(timestamp);
                MediaBuffer buffer           = sample.ConvertToContiguousBuffer();
                int         currentByteCount = 0;
                int         maxByteCount     = 0;
                IntPtr      data             = buffer.Lock(out maxByteCount, out currentByteCount);

                if (streamIndex == this.imageStreamIndex)
                {
                    using (var sharedImage = ImagePool.GetOrCreate(this.videoWidth, this.videoHeight, Imaging.PixelFormat.BGR_24bpp))
                    {
                        sharedImage.Resource.CopyFrom(data);
                        this.Image.Post(sharedImage, originatingTime);
                    }
                }
                else if (streamIndex == this.audioStreamIndex)
                {
                    AudioBuffer audioBuffer = new AudioBuffer(currentByteCount, this.waveFormat);
                    Marshal.Copy(data, audioBuffer.Data, 0, currentByteCount);
                    this.Audio.Post(audioBuffer, originatingTime);
                }

                buffer.Unlock();
                buffer.Dispose();
                sample.Dispose();
            }

            if (flags == SourceReaderFlags.Endofstream)
            {
                return(DateTime.MaxValue); // Used to indicated there is no more data
            }

            if (originatingTime <= previous)
            {
                return(previous + TimeSpan.FromTicks(1)); // To enforce strictly increasing times for the generator
            }

            return(originatingTime);
        }