Ejemplo n.º 1
0
 public static void TestPersonGroup(string subscriptionKey, string endpoint, string groupId, string directory)
 {
     using (var pipeline = Pipeline.Create())
     {
         var files = Generators.Sequence(pipeline, Directory.GetFiles(directory), TimeSpan.FromTicks(1));
         files
         .Select(file => ImagePool.GetOrCreate(new Bitmap(File.OpenRead(file))))
         .RecognizeFace(new FaceRecognizerConfiguration(subscriptionKey, endpoint, groupId))
         .Join(files)
         .Do(x =>
         {
             Console.WriteLine($"File: {Path.GetFileName(x.Item2)}");
             foreach (var candidates in x.Item1)
             {
                 foreach (var face in candidates)
                 {
                     Console.WriteLine($"  Face: {face.Name} {face.Confidence}");
                 }
             }
         });
         pipeline.Run();
     }
 }
Ejemplo n.º 2
0
        /// <summary>
        /// Called once all the subscriptions are established.
        /// </summary>
        private unsafe void OnPipelineStart()
        {
            this.camera = new MediaCaptureInternal(this.configuration.DeviceId);
            this.camera.Open();
            var isFormatSupported = false;

            foreach (var format in this.camera.SupportedPixelFormats())
            {
                if (format.Pixels == this.configuration.PixelFormat)
                {
                    this.camera.SetVideoFormat(this.configuration.Width, this.configuration.Height, format);
                    isFormatSupported = true;
                }
            }

            if (!isFormatSupported)
            {
                throw new ArgumentException($"Pixel format {this.configuration.PixelFormat} is not supported by the camera");
            }

            var current = this.camera.GetVideoFormat();

            if (current.Width != this.configuration.Width || current.Height != this.configuration.Height)
            {
                throw new ArgumentException($"Width/height {this.configuration.Width}x{this.configuration.Height} is not supported by the camera");
            }

            this.camera.OnFrame += (_, frame) =>
            {
                var originatingTime = this.pipeline.GetCurrentTime();

                if (this.Raw.HasSubscribers)
                {
                    var len = frame.Length;
                    using (Shared <byte[]> shared = RawPool.GetOrCreate(() => new byte[len]))
                    {
                        var buffer = shared.Resource.Length >= len ? shared : new Shared <byte[]>(new byte[len], shared.Recycler);
                        Marshal.Copy(frame.Start, buffer.Resource, 0, len);
                        this.Raw.Post(buffer, originatingTime);
                    }
                }

                if (this.Out.HasSubscribers)
                {
                    using (var sharedImage = ImagePool.GetOrCreate(this.configuration.Width, this.configuration.Height, PixelFormat.BGR_24bpp))
                    {
                        if (this.configuration.PixelFormat == PixelFormatId.BGR24)
                        {
                            sharedImage.Resource.CopyFrom((IntPtr)frame.Start);
                            this.Out.Post(sharedImage, this.pipeline.GetCurrentTime());
                        }
                        else if (this.configuration.PixelFormat == PixelFormatId.YUYV)
                        {
                            // convert YUYV -> BGR24 (see https://msdn.microsoft.com/en-us/library/ms893078.aspx)
                            var len = (int)(frame.Length * 1.5);
                            using (Shared <byte[]> shared = RawPool.GetOrCreate(() => new byte[len]))
                            {
                                var buffer = shared.Resource.Length >= len ? shared : new Shared <byte[]>(new byte[len], shared.Recycler);
                                var bytes  = buffer.Resource;
                                var pY     = (byte *)frame.Start.ToPointer();
                                var pU     = pY + 1;
                                var pV     = pY + 2;
                                for (var i = 0; i < len;)
                                {
                                    var y = (*pY - 16) * 298;
                                    var u = *pU - 128;
                                    var v = *pV - 128;
                                    var b = (y + (516 * u) + 128) >> 8;
                                    var g = (y - (100 * u) - (208 * v) + 128) >> 8;
                                    var r = (y + (409 * v) + 128) >> 8;
                                    bytes[i++] = (byte)(b < 0 ? 0 : b > 255 ? 255 : b);
                                    bytes[i++] = (byte)(g < 0 ? 0 : g > 255 ? 255 : g);
                                    bytes[i++] = (byte)(r < 0 ? 0 : r > 255 ? 255 : r);
                                    pY        += 2;
                                    pU        += 4;
                                    pV        += 4;
                                }

                                this.Raw.Post(buffer, originatingTime);
                            }
                        }
                    }
                }

#if TEST_DROPPED_FRAMES
                System.Threading.Thread.Sleep(1000); // for testing dropped frames
#endif // TEST_DROPPED_FRAMES

                frame.Dispose(); // release back to driver!
            };

            this.camera.StreamBuffers();
        }
Ejemplo n.º 3
0
        /// <inheritdoc/>
        public unsafe void Start(Action <DateTime> notifyCompletionTime)
        {
            // notify that this is an infinite source component
            notifyCompletionTime(DateTime.MaxValue);

            this.camera = new MediaCaptureInternal(this.configuration.DeviceId);
            this.camera.Open();
            var isFormatSupported = false;

            foreach (var format in this.camera.SupportedPixelFormats())
            {
                if (format.Pixels == this.configuration.PixelFormat)
                {
                    this.camera.SetVideoFormat(this.configuration.Width, this.configuration.Height, format);
                    isFormatSupported = true;
                }
            }

            if (!isFormatSupported)
            {
                throw new ArgumentException($"Pixel format {this.configuration.PixelFormat} is not supported by the camera");
            }

            var current = this.camera.GetVideoFormat();

            if (current.Width != this.configuration.Width || current.Height != this.configuration.Height)
            {
                throw new ArgumentException($"Width/height {this.configuration.Width}x{this.configuration.Height} is not supported by the camera");
            }

            this.camera.OnFrame += (_, frame) =>
            {
                var originatingTime = this.pipeline.GetCurrentTime();

                if (this.Raw.HasSubscribers)
                {
                    var len = frame.Length;
                    using (Shared <byte[]> shared = SharedArrayPool <byte> .GetOrCreate(len))
                    {
                        Marshal.Copy(frame.Start, shared.Resource, 0, len);
                        this.Raw.Post(shared, originatingTime);
                    }
                }

                if (this.Out.HasSubscribers)
                {
                    using (var sharedImage = ImagePool.GetOrCreate(this.configuration.Width, this.configuration.Height, PixelFormat.BGR_24bpp))
                    {
                        if (this.configuration.PixelFormat == PixelFormatId.BGR24)
                        {
                            sharedImage.Resource.CopyFrom((IntPtr)frame.Start);
                            this.Out.Post(sharedImage, this.pipeline.GetCurrentTime());
                        }
                        else if (this.configuration.PixelFormat == PixelFormatId.YUYV)
                        {
                            // convert YUYV -> BGR24 (see https://msdn.microsoft.com/en-us/library/ms893078.aspx)
                            var len = (int)(frame.Length * 1.5);
                            using (Shared <byte[]> shared = SharedArrayPool <byte> .GetOrCreate(len))
                            {
                                var bytes = shared.Resource;
                                var pY    = (byte *)frame.Start.ToPointer();
                                var pU    = pY + 1;
                                var pV    = pY + 3;
                                for (var i = 0; i < len;)
                                {
                                    int y = (*pY - 16) * 298;
                                    int u = *pU - 128;
                                    int v = *pV - 128;
                                    int r = (y + (409 * v) + 128) >> 8;
                                    int g = (y - (100 * u) - (208 * v) + 128) >> 8;
                                    int b = (y + (516 * u) + 128) >> 8;

                                    bytes[i++] = (byte)((r < 0) ? 0 : ((r > 255) ? 255 : r));
                                    bytes[i++] = (byte)((g < 0) ? 0 : ((g > 255) ? 255 : g));
                                    bytes[i++] = (byte)((b < 0) ? 0 : ((b > 255) ? 255 : b));

                                    pY += 2;

                                    y          = (*pY - 16) * 298;
                                    r          = (y + (409 * v) + 128) >> 8;
                                    g          = (y - (100 * u) - (208 * v) + 128) >> 8;
                                    b          = (y + (516 * u) + 128) >> 8;
                                    bytes[i++] = (byte)((r < 0) ? 0 : ((r > 255) ? 255 : r));
                                    bytes[i++] = (byte)((g < 0) ? 0 : ((g > 255) ? 255 : g));
                                    bytes[i++] = (byte)((b < 0) ? 0 : ((b > 255) ? 255 : b));

                                    pY += 2;
                                    pU += 4;
                                    pV += 4;
                                }

                                sharedImage.Resource.CopyFrom(bytes);
                                this.Out.Post(sharedImage, originatingTime);
                            }
                        }
                    }
                }

#if TEST_DROPPED_FRAMES
                System.Threading.Thread.Sleep(1000); // for testing dropped frames
#endif // TEST_DROPPED_FRAMES

                frame.Dispose(); // release back to driver!
            };

            this.camera.StreamBuffers();
        }
Ejemplo n.º 4
0
        /// <summary>
        /// Called once all the subscriptions are established.
        /// </summary>
        private void OnPipelineStart()
        {
            MediaCaptureDevice.Initialize();
            CaptureFormat found = null;

            foreach (var device in MediaCaptureDevice.AllDevices)
            {
                if (!device.Attach(this.configuration.UseInSharedMode))
                {
                    continue;
                }

                Trace.WriteLine($"MediaCapture - Searching for width={this.configuration.Width} height={this.configuration.Height} deviceId={this.configuration.DeviceId}");
                Trace.WriteLine($"MediaCapture - Found: Name: '{device.FriendlyName}' SymLink: {device.SymbolicLink}");
                Trace.WriteLine($"MediaCapture -   Current   - Width: {device.CurrentFormat.nWidth} Height: {device.CurrentFormat.nHeight} Type: {device.CurrentFormat.subType.Name}/{device.CurrentFormat.subType.Guid} Framerate: {device.CurrentFormat.nFrameRateNumerator}/{device.CurrentFormat.nFrameRateDenominator}");

                if (string.IsNullOrEmpty(this.configuration.DeviceId) || device.FriendlyName == this.configuration.DeviceId || device.SymbolicLink == this.configuration.DeviceId)
                {
                    foreach (var format in device.Formats)
                    {
                        Trace.WriteLine($"MediaCapture -   Supported - Width: {format.nWidth} Height: {format.nHeight} Type: {format.subType.Name}/{format.subType.Guid} Framerate: {format.nFrameRateNumerator}/{format.nFrameRateDenominator}");
                        if (this.configuration.Width == format.nWidth && this.configuration.Height == format.nHeight)
                        {
                            // found suitable width/height
                            if (this.configuration.Framerate == format.nFrameRateNumerator / format.nFrameRateDenominator)
                            {
                                // found suitable framerate
                                if (found == null || this.configuration.Framerate == found.nFrameRateNumerator / found.nFrameRateDenominator)
                                {
                                    // found first suitable or closer framerate match
                                    this.camera = device;
                                    found       = format;
                                }
                            }
                        }
                    }
                }

                if (found != null)
                {
                    Trace.WriteLine($"MediaCapture - Using - Width: {found.nWidth} Height: {found.nHeight} Type: {found.subType.Name}/{found.subType.Guid} Framerate: {found.nFrameRateNumerator}/{found.nFrameRateDenominator}");
                    break;
                }
            }

            if (found != null)
            {
                this.camera.CurrentFormat = found;
                this.deviceInfo           = new MediaCaptureInfo(this.camera);
                var width  = this.camera.CurrentFormat.nWidth;
                var height = this.camera.CurrentFormat.nHeight;

                // Get default settings for other properties.
                var currentConfig = this.GetDeviceConfiguration();
                this.configuration.BacklightCompensation = currentConfig.BacklightCompensation;
                this.configuration.Brightness            = currentConfig.Brightness;
                this.configuration.ColorEnable           = currentConfig.ColorEnable;
                this.configuration.Contrast     = currentConfig.Contrast;
                this.configuration.Gain         = currentConfig.Gain;
                this.configuration.Gamma        = currentConfig.Gamma;
                this.configuration.Hue          = currentConfig.Hue;
                this.configuration.Saturation   = currentConfig.Saturation;
                this.configuration.Sharpness    = currentConfig.Sharpness;
                this.configuration.WhiteBalance = currentConfig.WhiteBalance;
                this.configuration.Focus        = currentConfig.Focus;

                this.SetDeviceConfiguration(this.configuration);

                this.camera.CaptureSample((data, length, timestamp) =>
                {
                    var time = DateTime.FromFileTimeUtc(timestamp);
                    using (var sharedImage = ImagePool.GetOrCreate(this.configuration.Width, this.configuration.Height, Microsoft.Psi.Imaging.PixelFormat.BGR_24bpp))
                    {
                        sharedImage.Resource.CopyFrom(data);

                        var originatingTime = this.pipeline.GetCurrentTimeFromElapsedTicks(timestamp);
                        this.Out.Post(sharedImage, originatingTime);
                    }
                });
            }
            else
            {
                throw new ArgumentException("Camera specification not found");
            }
        }
Ejemplo n.º 5
0
 public void Image_GrayFlip()
 {
     using var sharedImage = ImagePool.GetOrCreate(this.testImage_Gray.Width, this.testImage_Gray.Height, this.testImage_Gray.PixelFormat);
     this.testImage_Gray.Flip(sharedImage.Resource, FlipMode.AlongHorizontalAxis);
     this.AssertAreImagesEqual(this.testImage_GrayFlip, sharedImage.Resource);
 }
Ejemplo n.º 6
0
        /// <summary>
        /// GenerateNext is called by the Generator base class when the next sample should be read
        /// </summary>
        /// <param name="previous">Time of previous sample</param>
        /// <returns>Time for current sample</returns>
        protected override DateTime GenerateNext(DateTime previous)
        {
            DateTime        originatingTime = default(DateTime);
            FFMPEGFrameInfo frameInfo       = new FFMPEGFrameInfo();
            bool            eos             = false;
            bool            frameRead       = this.mpegReader.NextFrame(ref frameInfo, out eos);

            if (!frameRead)
            {
                return(this.lastAudioTime);
            }

            if (eos)
            {
                return(DateTime.MaxValue);
            }

            double timestamp = 0.0;
            int    dataSize  = 0;

            if (frameInfo.FrameType == FFMPEGFrameInfo.FrameTypeVideo)
            {
                using (var image = ImagePool.GetOrCreate(this.mpegReader.Width, this.mpegReader.Height, this.outputFormat))
                {
                    if (this.mpegReader.ReadFrameData(image.Resource.ImageData, ref dataSize, ref timestamp))
                    {
                        originatingTime = this.start + TimeSpan.FromMilliseconds(timestamp);
                        this.Image.Post(image, originatingTime);
                    }
                }
            }
            else if (frameInfo.FrameType == FFMPEGFrameInfo.FrameTypeAudio)
            {
                if (this.audioData == IntPtr.Zero || frameInfo.BufferSize != this.audioDataSize)
                {
                    if (this.audioData != IntPtr.Zero)
                    {
                        Marshal.FreeHGlobal(this.audioData);
                    }

                    this.audioData     = Marshal.AllocHGlobal(frameInfo.BufferSize);
                    this.audioDataSize = frameInfo.BufferSize;
                }

                if (this.mpegReader.ReadFrameData(this.audioData, ref dataSize, ref timestamp))
                {
                    if (dataSize > 0)
                    {
                        if (dataSize != this.audioBufferSize)
                        {
                            this.audioBuffer     = new AudioBuffer(dataSize, this.waveFormat);
                            this.audioBufferSize = dataSize;
                        }

                        originatingTime = this.start + TimeSpan.FromMilliseconds(timestamp);
                        Marshal.Copy(this.audioData, this.audioBuffer.Data, 0, dataSize);
                        this.Audio.Post(this.audioBuffer, originatingTime);
                        this.lastAudioTime = originatingTime;
                    }
                }
            }

            return(this.lastAudioTime);
        }
Ejemplo n.º 7
0
        /// <inheritdoc/>
        protected override void ProcessSensorFrame(IResearchModeSensorFrame sensorFrame, ResearchModeSensorResolution resolution, ulong frameTicks, DateTime originatingTime)
        {
            var shouldOutputDepthImage = this.Configuration.OutputDepthImage &&
                                         (originatingTime - this.DepthImage.LastEnvelope.OriginatingTime) > this.Configuration.OutputMinInterval;

            var shouldOutputDepthImageCameraView = this.Configuration.OutputDepthImageCameraView &&
                                                   (originatingTime - this.DepthImageCameraView.LastEnvelope.OriginatingTime) > this.Configuration.OutputMinInterval;

            var shouldOutputInfraredImage = this.Configuration.OutputInfraredImage &&
                                            (originatingTime - this.InfraredImage.LastEnvelope.OriginatingTime) > this.Configuration.OutputMinInterval;

            var shouldOutputInfraredImageCameraView = this.Configuration.OutputInfraredImageCameraView &&
                                                      (originatingTime - this.InfraredImageCameraView.LastEnvelope.OriginatingTime) > this.Configuration.OutputMinInterval;

            if (shouldOutputDepthImage ||
                shouldOutputDepthImageCameraView ||
                shouldOutputInfraredImage ||
                shouldOutputInfraredImageCameraView)
            {
                var depthFrame       = sensorFrame as ResearchModeSensorDepthFrame;
                int depthImageWidth  = (int)resolution.Width;
                int depthImageHeight = (int)resolution.Height;

                // Process and post the depth image if need be
                if (shouldOutputDepthImage || shouldOutputDepthImageCameraView)
                {
                    byte[] sigmaBuffer = null;
                    var    depthBuffer = depthFrame.GetBuffer();

                    if (this.isLongThrow)
                    {
                        sigmaBuffer = depthFrame.GetSigmaBuffer(); // Long-throw only
                        Debug.Assert(depthBuffer.Length == sigmaBuffer.Length, "Depth and sigma buffers should be of equal size!");
                    }

                    using var depthImage = DepthImagePool.GetOrCreate(
                              depthImageWidth,
                              depthImageHeight,
                              DepthValueSemantics.DistanceToPoint,
                              0.001);
                    Debug.Assert(depthImage.Resource.Size == depthBuffer.Length * sizeof(ushort), "DepthImage size does not match raw depth buffer size!");

                    unsafe
                    {
                        ushort *depthData = (ushort *)depthImage.Resource.ImageData.ToPointer();
                        for (int i = 0; i < depthBuffer.Length; ++i)
                        {
                            bool invalid = this.isLongThrow ?
                                           ((sigmaBuffer[i] & InvalidMask) > 0) :
                                           (depthBuffer[i] >= InvalidAhatValue);

                            *depthData++ = invalid ? (ushort)0 : depthBuffer[i];
                        }
                    }

                    if (shouldOutputDepthImage)
                    {
                        this.DepthImage.Post(depthImage, originatingTime);
                    }

                    if (shouldOutputDepthImageCameraView)
                    {
                        using var depthImageCameraView = new DepthImageCameraView(depthImage, this.GetCameraIntrinsics(), this.GetCameraPose());
                        this.DepthImageCameraView.Post(depthImageCameraView, originatingTime);
                    }
                }

                // Process and post the infrared image if need be
                if (shouldOutputInfraredImage || shouldOutputInfraredImageCameraView)
                {
                    var infraredBuffer = depthFrame.GetAbDepthBuffer();
                    using var infraredImage = ImagePool.GetOrCreate(depthImageWidth, depthImageHeight, PixelFormat.Gray_16bpp);
                    Debug.Assert(infraredImage.Resource.Size == infraredBuffer.Length * sizeof(ushort), "InfraredImage size does not match raw infrared buffer size!");

                    unsafe
                    {
                        fixed(ushort *p = infraredBuffer)
                        {
                            infraredImage.Resource.CopyFrom((IntPtr)p);
                        }
                    }

                    if (shouldOutputInfraredImage)
                    {
                        this.InfraredImage.Post(infraredImage, originatingTime);
                    }

                    if (shouldOutputInfraredImageCameraView)
                    {
                        using var infraredImageCameraView = new ImageCameraView(infraredImage, this.GetCameraIntrinsics(), this.GetCameraPose());
                        this.InfraredImageCameraView.Post(infraredImageCameraView, originatingTime);
                    }
                }
            }
        }
Ejemplo n.º 8
0
        /// <inheritdoc/>
        public void Start(Action <DateTime> notifyCompletionTime)
        {
            // notify that this is an infinite source component
            notifyCompletionTime(DateTime.MaxValue);

            MediaCaptureDevice.Initialize();
            CaptureFormat found = null;

            foreach (var device in MediaCaptureDevice.AllDevices)
            {
                if (!device.Attach(this.configuration.UseInSharedMode))
                {
                    continue;
                }

                // Trace.WriteLine($"MediaCapture - Searching for width={this.configuration.Width} height={this.configuration.Height} deviceId={this.configuration.DeviceId}");
                // Trace.WriteLine($"MediaCapture - Found: Name: '{device.FriendlyName}' SymLink: {device.SymbolicLink}");
                // Trace.WriteLine($"MediaCapture -   Current   - Width: {device.CurrentFormat.nWidth} Height: {device.CurrentFormat.nHeight} Type: {device.CurrentFormat.subType.Name}/{device.CurrentFormat.subType.Guid} Framerate: {device.CurrentFormat.nFrameRateNumerator}/{device.CurrentFormat.nFrameRateDenominator}");
                if (string.IsNullOrEmpty(this.configuration.DeviceId) || device.FriendlyName == this.configuration.DeviceId || device.SymbolicLink == this.configuration.DeviceId)
                {
                    foreach (var format in device.Formats)
                    {
                        // Trace.WriteLine($"MediaCapture -   Supported - Width: {format.nWidth} Height: {format.nHeight} Type: {format.subType.Name}/{format.subType.Guid} Framerate: {format.nFrameRateNumerator}/{format.nFrameRateDenominator}");
                        if (this.configuration.Width == format.nWidth && this.configuration.Height == format.nHeight)
                        {
                            // found suitable width/height
                            if (this.configuration.Framerate == format.nFrameRateNumerator / format.nFrameRateDenominator)
                            {
                                // found suitable framerate
                                if (found == null || this.configuration.Framerate == found.nFrameRateNumerator / found.nFrameRateDenominator)
                                {
                                    // found first suitable or closer framerate match
                                    this.camera = device;
                                    found       = format;
                                }
                            }
                        }
                    }
                }

                if (found != null)
                {
                    // Trace.WriteLine($"MediaCapture - Using - Width: {found.nWidth} Height: {found.nHeight} Type: {found.subType.Name}/{found.subType.Guid} Framerate: {found.nFrameRateNumerator}/{found.nFrameRateDenominator}");
                    break;
                }
            }

            if (found != null)
            {
                this.camera.CurrentFormat = found;
                this.deviceInfo           = new MediaCaptureInfo(this.camera);
                var width  = this.camera.CurrentFormat.nWidth;
                var height = this.camera.CurrentFormat.nHeight;

                // Get default settings for other properties.
                var currentConfig = this.GetDeviceConfiguration();
                this.configuration.BacklightCompensation = currentConfig.BacklightCompensation;
                this.configuration.Brightness            = currentConfig.Brightness;
                this.configuration.ColorEnable           = currentConfig.ColorEnable;
                this.configuration.Contrast     = currentConfig.Contrast;
                this.configuration.Gain         = currentConfig.Gain;
                this.configuration.Gamma        = currentConfig.Gamma;
                this.configuration.Hue          = currentConfig.Hue;
                this.configuration.Saturation   = currentConfig.Saturation;
                this.configuration.Sharpness    = currentConfig.Sharpness;
                this.configuration.WhiteBalance = currentConfig.WhiteBalance;
                this.configuration.Focus        = currentConfig.Focus;

                this.SetDeviceConfiguration(this.configuration);

                this.camera.CaptureSample((data, length, timestamp) =>
                {
                    var time = DateTime.FromFileTimeUtc(timestamp);
                    using var sharedImage = ImagePool.GetOrCreate(this.configuration.Width, this.configuration.Height, PixelFormat.BGR_24bpp);
                    sharedImage.Resource.CopyFrom(data);

                    var originatingTime = this.pipeline.GetCurrentTimeFromElapsedTicks(timestamp);
                    this.Out.Post(sharedImage, originatingTime);
                });
            }
            else
            {
                // Requested camera capture format was not found. Construct an exception message with a list of supported formats.
                var exceptionMessageBuilder = new StringBuilder();

                if (string.IsNullOrEmpty(this.configuration.DeviceId))
                {
                    exceptionMessageBuilder.Append($"No cameras were found that support the requested capture format of {this.configuration.Width}x{this.configuration.Height} @ {this.configuration.Framerate} fps. ");
                }
                else
                {
                    exceptionMessageBuilder.Append($"The specified camera {this.configuration.DeviceId} does not support the requested capture format of {this.configuration.Width}x{this.configuration.Height} @ {this.configuration.Framerate} fps. ");
                }

                exceptionMessageBuilder.AppendLine("Use one of the following supported camera capture formats instead:");
                this.AppendSupportedCaptureFormats(exceptionMessageBuilder);

                throw new ArgumentException(exceptionMessageBuilder.ToString());
            }
        }
Ejemplo n.º 9
0
        /// <summary>
        /// GenerateNext is called by the Generator base class when the next sample should be read.
        /// </summary>
        /// <param name="currentTime">The originating time that triggered the current call.</param>
        /// <returns>The originating time at which to capture the next sample.</returns>
        protected override DateTime GenerateNext(DateTime currentTime)
        {
            DateTime originatingTime = default;
            var      sample          = this.sourceReader.ReadSample(SourceReaderIndex.AnyStream, 0, out int streamIndex, out SourceReaderFlags flags, out long timestamp);

            if (sample != null)
            {
                originatingTime = this.start + TimeSpan.FromTicks(timestamp);
                var buffer = sample.ConvertToContiguousBuffer();
                var data   = buffer.Lock(out _, out int currentByteCount);

                if (streamIndex == this.imageStreamIndex)
                {
                    // Detect out of order originating times
                    if (originatingTime > this.lastPostedImageTime)
                    {
                        using var sharedImage = ImagePool.GetOrCreate(this.videoWidth, this.videoHeight, Imaging.PixelFormat.BGR_24bpp);
                        sharedImage.Resource.CopyFrom(data);
                        this.Image.Post(sharedImage, originatingTime);
                        this.lastPostedImageTime = originatingTime;
                    }
                    else if (!this.dropOutOfOrderPackets)
                    {
                        throw new InvalidOperationException(
                                  $"The most recently captured image frame has a timestamp ({originatingTime.TimeOfDay}) which is before " +
                                  $"that of the last posted image frame ({this.lastPostedImageTime.TimeOfDay}), as reported by the video stream. This could " +
                                  $"be due to a timing glitch in the video stream. Set the 'dropOutOfOrderPackets' " +
                                  $"parameter to true to handle this condition by dropping " +
                                  $"packets with out of order timestamps.");
                    }
                }
                else if (streamIndex == this.audioStreamIndex)
                {
                    // Detect out of order originating times
                    if (originatingTime > this.lastPostedAudioTime)
                    {
                        var audioBuffer = new AudioBuffer(currentByteCount, this.waveFormat);
                        Marshal.Copy(data, audioBuffer.Data, 0, currentByteCount);
                        this.Audio.Post(audioBuffer, originatingTime);
                        this.lastPostedAudioTime = originatingTime;
                    }
                    else if (!this.dropOutOfOrderPackets)
                    {
                        throw new InvalidOperationException(
                                  $"The most recently captured audio buffer has a timestamp ({originatingTime.TimeOfDay}) which is before " +
                                  $"that of the last posted audio buffer ({this.lastPostedAudioTime.TimeOfDay}), as reported by the audio stream. This could " +
                                  $"be due to a timing glitch in the audio stream. Set the 'dropOutOfOrderPackets' " +
                                  $"parameter to true to handle this condition by dropping " +
                                  $"packets with out of order timestamps.");
                    }
                }

                buffer.Unlock();
                buffer.Dispose();
                sample.Dispose();
            }

            if (flags == SourceReaderFlags.Endofstream)
            {
                return(DateTime.MaxValue); // Used to indicated there is no more data
            }

            return(originatingTime);
        }
Ejemplo n.º 10
0
        private void MapColorToDepth(DepthFrame depthFrame, Shared <Image> colorImage)
        {
            const int colorImageWidth  = 1920;
            const int colorImageHeight = 1080;

            if (!this.configuration.OutputColorToCameraMapping && !this.configuration.OutputRGBD)
            {
                return;
            }

            ushort[] depthData = new ushort[depthFrame.FrameDescription.LengthInPixels];
            depthFrame.CopyFrameDataToArray(depthData);

            if (this.configuration.OutputColorToCameraMapping)
            {
                // Writing out a mapping from color space to camera space
                CameraSpacePoint[] colorToCameraMapping = new CameraSpacePoint[colorImageWidth * colorImageHeight];
                this.kinectSensor.CoordinateMapper.MapColorFrameToCameraSpace(depthData, colorToCameraMapping);
                var time = this.pipeline.GetCurrentTimeFromElapsedTicks(depthFrame.RelativeTime.Ticks);
                this.ColorToCameraMapper.Post(colorToCameraMapping, time);
            }

            if (this.configuration.OutputRGBD)
            {
                unsafe
                {
                    DepthSpacePoint[] depthSpacePoints = new DepthSpacePoint[colorImageWidth * colorImageHeight];
                    this.kinectSensor.CoordinateMapper.MapColorFrameToDepthSpace(depthData, depthSpacePoints);
                    using (var rgbd = ImagePool.GetOrCreate(colorImageWidth, colorImageHeight, Imaging.PixelFormat.RGBA_64bpp))
                    {
                        byte *srcRow      = (byte *)colorImage.Resource.ImageData.ToPointer();
                        byte *dstRow      = (byte *)rgbd.Resource.ImageData.ToPointer();
                        int   depthWidth  = depthFrame.FrameDescription.Width;
                        int   depthHeight = depthFrame.FrameDescription.Height;
                        for (int y = 0; y < colorImage.Resource.Height; y++)
                        {
                            byte *  srcCol = srcRow;
                            ushort *dstCol = (ushort *)dstRow;
                            int     offset = y * colorImageWidth;
                            for (int x = 0; x < colorImage.Resource.Width; x++)
                            {
                                dstCol[0] = (ushort)(srcCol[2] << 8);
                                dstCol[1] = (ushort)(srcCol[1] << 8);
                                dstCol[2] = (ushort)(srcCol[0] << 8);
                                DepthSpacePoint pt = depthSpacePoints[offset];
                                if (pt.X >= 0 && pt.X < depthWidth && pt.Y >= 0 && pt.Y < depthHeight)
                                {
                                    dstCol[3] = depthData[((int)pt.Y * depthWidth) + (int)pt.X];
                                }
                                else
                                {
                                    dstCol[3] = 0;
                                }

                                dstCol += 4;
                                srcCol += colorImage.Resource.BitsPerPixel / 8;
                                offset++;
                            }

                            srcRow += colorImage.Resource.Stride;
                            dstRow += rgbd.Resource.Stride;
                        }

                        var time = this.pipeline.GetCurrentTimeFromElapsedTicks(depthFrame.RelativeTime.Ticks);
                        this.RGBDImage.Post(rgbd, time);
                    }
                }
            }
        }
Ejemplo n.º 11
0
 public Picture(Rectangle rect, string fileName) : base(rect)
 {
     intrinsicState = ImagePool.getImage(fileName);
     //incrementKey();
 }
Ejemplo n.º 12
0
        protected override Shared <Image> ProcessImage(Bitmap image, Envelope envelope)
        {
            var sharedImage = ImagePool.GetOrCreateFromBitmap(image);

            return(sharedImage);
        }
Ejemplo n.º 13
0
        private void Kinect_AllFramesReady(object sender, AllFramesReadyEventArgs allFramesReadyEventArgs)
        {
            ColorImageFrame colorImageFrame = null;
            DepthImageFrame depthImageFrame = null;
            SkeletonFrame   skeletonFrame   = null;

            try
            {
                colorImageFrame = allFramesReadyEventArgs.OpenColorImageFrame();
                depthImageFrame = allFramesReadyEventArgs.OpenDepthImageFrame();
                skeletonFrame   = allFramesReadyEventArgs.OpenSkeletonFrame();

                if (colorImageFrame == null || depthImageFrame == null || skeletonFrame == null)
                {
                    return;
                }

                // Check for image format changes.  The FaceTracker doesn't
                // deal with that so we need to reset.
                if (this.depthImageFormat != depthImageFrame.Format)
                {
                    this.depthImage       = null;
                    this.depthImageFormat = depthImageFrame.Format;
                }

                if (this.colorImageFormat != colorImageFrame.Format)
                {
                    this.colorImage       = null;
                    this.colorImageFormat = colorImageFrame.Format;
                }

                // Create any buffers to store copies of the data we work with
                if (this.depthImage == null)
                {
                    this.depthImage = new short[depthImageFrame.PixelDataLength];
                }

                if (this.colorImage == null)
                {
                    this.colorImage = new byte[colorImageFrame.PixelDataLength];
                }

                // Get the skeleton information
                if (this.skeletonData == null || this.skeletonData.Length != skeletonFrame.SkeletonArrayLength)
                {
                    this.skeletonData = new Skeleton[skeletonFrame.SkeletonArrayLength];
                }


                // TODO look into using the Timestamp on each frame
                var time = pipeline.GetCurrentTime();

                var sharedColorImage = ImagePool.GetOrCreate(colorImageFrame.Width, colorImageFrame.Height, Imaging.PixelFormat.BGRX_32bpp);
                var sharedDepthImage = ImagePool.GetOrCreate(depthImageFrame.Width, depthImageFrame.Height, Imaging.PixelFormat.Gray_16bpp);

                colorImageFrame.CopyPixelDataTo(sharedColorImage.Resource.ImageData, (colorImageFrame.Width * colorImageFrame.Height * 4));
                this.ColorImage.Post(sharedColorImage, time);

                //depthImageFrame.CopyPixelDataTo(sharedDepthImage.Resource.ImageData, (depthImageFrame.Width * depthImageFrame.Height * 2));
                depthImageFrame.CopyPixelDataTo(sharedDepthImage.Resource.ImageData, depthImageFrame.PixelDataLength);
                this.DepthImage.Post(sharedDepthImage, time);


                skeletonFrame.CopySkeletonDataTo(this.skeletonData);
                this.Skeletons.Post(this.skeletonData.ToList(), time);
            }
            catch
            {
                // TODO catch a cold
            }
        }
Ejemplo n.º 14
0
        private void Process(ValueTuple <OpenPoseDatum, Shared <Image> > data, Envelope envelope)
        {
            var(datum, frame) = data;
            lock (this) {
                //draw
                if (frame != null && frame.Resource != null)
                {
                    using (var img = ImagePool.GetOrCreate(frame.Resource.Width, frame.Resource.Height, frame.Resource.PixelFormat)) {
                        frame.Resource.CopyTo(img.Resource);
                        var buffer = new ImageBuffer(img.Resource.Width, img.Resource.Height, img.Resource.ImageData, img.Resource.Stride);
                        //pose
                        if (datum.poseKeypoints != null)
                        {
                            for (var people = 0; people < datum.poseKeypoints.GetSize(0); people++)
                            {
                                Point GetPoint(int bodyPart)
                                {
                                    var x     = datum.poseKeypoints.Get(people, bodyPart, 0);
                                    var y     = datum.poseKeypoints.Get(people, bodyPart, 1);
                                    var score = datum.poseKeypoints.Get(people, bodyPart, 2);

                                    return(new Point(x, y));
                                }

                                void Line(int partIdx1, int partIdx2)
                                {
                                    var p1 = GetPoint(partIdx1);
                                    var p2 = GetPoint(partIdx2);

                                    if ((p1.X != 0 || p1.Y != 0) && (p2.X != 0 || p2.Y != 0))
                                    {
                                        Methods.DrawLine(buffer, p1, p2);
                                    }
                                }

                                //BODY_25
                                for (var i = 0; i < 25; i++)
                                {
                                    Methods.DrawPoint(buffer, GetPoint(i), 3);
                                }
                                Line(0, 1);
                                Line(1, 2);
                                Line(2, 3);
                                Line(3, 4);
                                Line(1, 5);
                                Line(5, 6);
                                Line(6, 7);
                                Line(1, 8);
                                Line(8, 9);
                                Line(9, 10);
                                Line(10, 11);
                                Line(8, 12);
                                Line(12, 13);
                                Line(13, 14);
                                Line(0, 15);
                                Line(0, 16);
                                Line(15, 17);
                                Line(16, 18);
                                Line(19, 20);
                                Line(19, 21);
                                Line(14, 21);
                                Line(22, 23);
                                Line(22, 24);
                                Line(11, 24);
                            }
                        }
                        //face

                        //hand

                        Out.Post(img, envelope.OriginatingTime);
                        display.Update(img);
                    }
                }
            }
        }