protected override StdSharedPtr <StdVector <StdSharedPtr <UserDatum> > > WorkProducer()
        {
            try
            {
                // Close program when empty frame
                if (this._ImageFiles.Length <= this._Counter)
                {
                    OpenPose.Log("Last frame read and added to queue. Closing program after it is processed.", Priority.High);
                    // This funtion stops this worker, which will eventually stop the whole thread system once all the
                    // frames have been processed
                    this.Stop();
                    return(null);
                }
                else
                {
                    // Create new datum
                    var vector    = new StdVector <StdSharedPtr <UserDatum> >();
                    var datumsPtr = new StdSharedPtr <StdVector <StdSharedPtr <UserDatum> > >(vector);
                    datumsPtr.Get().EmplaceBack();
                    var datum = datumsPtr.Get().At(0);

                    // C# cannot set pointer object by using assignment operator
                    datum.Reset(new UserDatum());

                    // Fill datum
                    using (var cvInputData = Cv.ImRead(this._ImageFiles[this._Counter++]))
                        using (var inputData = OpenPose.OP_CV2OPCONSTMAT(cvInputData))
                            datum.Get().CvInputData = inputData;

                    // If empty frame -> return nullptr
                    if (datum.Get().CvInputData.Empty)
                    {
                        OpenPose.Log($"Empty frame detected on path: {this._ImageFiles[this._Counter - 1]}. Closing program.", Priority.High);
                        this.Stop();
                        datumsPtr = null;
                    }

                    return(datumsPtr);
                }
            }
            catch (Exception e)
            {
                OpenPose.Log("Some kind of unexpected error happened.");
                this.Stop();
                OpenPose.Error(e.Message, -1, nameof(this.WorkProducer));
                return(null);
            }
        }
        public StdSharedPtr <StdVector <StdSharedPtr <Datum> > > CreateDatum()
        {
            // Close program when empty frame
            if (this._Closed || this._ImageFiles.Length <= this._Counter)
            {
                OpenPose.Log("Last frame read and added to queue. Closing program after it is processed.", Priority.High);

                // This funtion stops this worker, which will eventually stop the whole thread system once all the
                // frames have been processed
                this._Closed = true;
                return(null);
            }
            else
            {
                // Create new datum
                var vector    = new StdVector <StdSharedPtr <Datum> >();
                var datumsPtr = new StdSharedPtr <StdVector <StdSharedPtr <Datum> > >(vector);
                datumsPtr.Get().EmplaceBack();
                var datum = datumsPtr.Get().At(0);

                // C# cannot set pointer object by using assignment operator
                datum.Reset(new Datum());

                // Fill datum
                using (var cvInputData = Cv.ImRead(this._ImageFiles[this._Counter++]))
                    using (var inputData = OpenPose.OP_CV2OPCONSTMAT(cvInputData))
                        datum.Get().CvInputData = inputData;

                // If empty frame -> return nullptr
                if (datum.Get().CvInputData.Empty)
                {
                    OpenPose.Log($"Empty frame detected on path: {this._ImageFiles[this._Counter - 1]}. Closing program.", Priority.High);
                    this._Closed = true;
                    datumsPtr    = null;
                }

                return(datumsPtr);
            }
        }
示例#3
0
        private static void Display(StdSharedPtr <StdVector <StdSharedPtr <Datum> > > datumsPtr)
        {
            try
            {
                // User's displaying/saving/other processing here
                // datum.cvOutputData: rendered frame with pose or heatmaps
                // datum.poseKeypoints: Array<float> with the estimated pose
                if (datumsPtr != null && datumsPtr.TryGet(out var data) && !data.Empty)
                {
                    var datum = datumsPtr.Get().At(0).Get();

                    // Display image
                    using (var cvMat = OpenPose.OP_OP2CVCONSTMAT(datum.CvOutputData))
                    {
                        Cv.ImShow($"{OpenPose.OpenPoseNameAndVersion()} - Tutorial C++ API", cvMat);
                        Cv.WaitKey(0);
                    }
                }
            }
            catch (Exception e)
            {
                OpenPose.Error(e.Message, -1, nameof(Display));
            }
        }
示例#4
0
        private static bool Display(StdSharedPtr <StdVector <StdSharedPtr <Datum> > > datumsPtr, int desiredChannel = 0)
        {
            try
            {
                if (datumsPtr != null && datumsPtr.TryGet(out var data) && !data.Empty)
                {
                    var datum = datumsPtr.Get().At(0).Get();

                    // Note: Heatmaps are in net_resolution size, which does not necessarily match the final image size
                    // Read heatmaps
                    var poseHeatMaps = datum.PoseHeatMaps;
                    // Read desired channel
                    var numberChannels = poseHeatMaps.GetSize(0);
                    var height         = poseHeatMaps.GetSize(1);
                    var width          = poseHeatMaps.GetSize(2);
                    var eleSize        = sizeof(float);
                    using (var desiredChannelHeatMap = new Mat(height, width, MatType.CV_32F, IntPtr.Add(poseHeatMaps.GetPtr(), (desiredChannel % numberChannels) * height * width * eleSize)))
                    {
                        // Read image used from OpenPose body network (same resolution than heatmaps)
                        var inputNetData = datum.InputNetData[0];
                        using (var inputNetDataB = new Mat(height, width, MatType.CV_32F, IntPtr.Add(inputNetData.GetPtr(), 0 * height * width * eleSize)))
                            using (var inputNetDataG = new Mat(height, width, MatType.CV_32F, IntPtr.Add(inputNetData.GetPtr(), 1 * height * width * eleSize)))
                                using (var inputNetDataR = new Mat(height, width, MatType.CV_32F, IntPtr.Add(inputNetData.GetPtr(), 2 * height * width * eleSize)))
                                    using (var vector = new StdVector <Mat>(new List <Mat>(new[] { inputNetDataB, inputNetDataG, inputNetDataR })))
                                        using (var tmp = new Mat())
                                        {
                                            Cv.Merge(vector, tmp);

                                            using (var add = tmp + 0.5)
                                                using (var mul = add * 255)
                                                    using (var netInputImage = (Mat)mul)
                                                    {
                                                        // Turn into uint8 Cv.Mat
                                                        using (var netInputImageUint8 = new Mat())
                                                        {
                                                            netInputImage.ConvertTo(netInputImageUint8, MatType.CV_8UC1);
                                                            using (var desiredChannelHeatMapUint8 = new Mat())
                                                            {
                                                                desiredChannelHeatMap.ConvertTo(desiredChannelHeatMapUint8, MatType.CV_8UC1);

                                                                // Combining both images
                                                                using (var imageToRender = new Mat())
                                                                {
                                                                    Cv.ApplyColorMap(desiredChannelHeatMapUint8, desiredChannelHeatMapUint8, ColormapType.COLORMAP_JET);
                                                                    Cv.AddWeighted(netInputImageUint8, 0.5, desiredChannelHeatMapUint8, 0.5, 0d, imageToRender);

                                                                    // Display image
                                                                    Cv.ImShow($"{OpenPose.OpenPoseNameAndVersion()} - Tutorial C++ API", imageToRender);
                                                                }
                                                            }
                                                        }
                                                    }
                                        }
                    }
                }
                else
                {
                    OpenPose.Log("Nullptr or empty datumsPtr found.", Priority.High);
                }

                var key = (char)Cv.WaitKey(1);
                return(key == 27);
            }
示例#5
0
        private static int TutorialDeveloperPose2()
        {
            try
            {
                OpenPose.Log("Starting OpenPose demo...", Priority.High);
                var timeBegin = new Stopwatch();
                timeBegin.Start();

                // ------------------------- INITIALIZATION -------------------------
                // Step 1 - Set logging level
                // - 0 will output all the logging messages
                // - 255 will output nothing
                OpenPose.Check(0 <= Flags.LoggingLevel && Flags.LoggingLevel <= 255, "Wrong logging_level value.");
                ConfigureLog.PriorityThreshold = (Priority)Flags.LoggingLevel;
                OpenPose.Log("", Priority.Low);
                // Step 2 - Read GFlags (user defined configuration)
                // outputSize
                var outputSize = OpenPose.FlagsToPoint(Flags.OutputResolution, "-1x-1");
                // netInputSize
                var netInputSize = OpenPose.FlagsToPoint(Flags.NetResolution, "-1x368");
                // poseModel
                var poseModel = OpenPose.FlagsToPoseModel(Flags.ModelPose);
                // Check no contradictory flags enabled
                if (Flags.AlphaPose < 0.0 || Flags.AlphaPose > 1.0)
                {
                    OpenPose.Error("Alpha value for blending must be in the range [0,1].", -1, nameof(TutorialDeveloperPose2));
                }
                if (Flags.ScaleGap <= 0.0 && Flags.ScaleNumber > 1)
                {
                    OpenPose.Error("Incompatible flag configuration: scale_gap must be greater than 0 or scale_number = 1.", -1, nameof(TutorialDeveloperPose2));
                }
                // Step 3 - Initialize all required classes
                using (var scaleAndSizeExtractor = new ScaleAndSizeExtractor(netInputSize, outputSize, Flags.ScaleNumber, Flags.ScaleGap))
                    using (var cvMatToOpInput = new CvMatToOpInput(poseModel))
                        using (var cvMatToOpOutput = new CvMatToOpOutput())
                            using (var poseExtractorPtr = new StdSharedPtr <PoseExtractorCaffe>(new PoseExtractorCaffe(poseModel, Flags.ModelFolder, Flags.NumGpuStart)))
                                using (var poseGpuRenderer = new PoseGpuRenderer(poseModel, poseExtractorPtr, (float)Flags.RenderThreshold, !Flags.DisableBlending, (float)Flags.AlphaPose))
                                {
                                    poseGpuRenderer.SetElementToRender(Flags.PartToShow);

                                    using (var opOutputToCvMat = new OpOutputToCvMat())
                                        using (var frameDisplayer = new FrameDisplayer("OpenPose Tutorial - Example 2", outputSize))
                                        {
                                            // Step 4 - Initialize resources on desired thread (in this case single thread, i.e., we init resources here)
                                            poseExtractorPtr.Get().InitializationOnThread();
                                            poseGpuRenderer.InitializationOnThread();

                                            // ------------------------- POSE ESTIMATION AND RENDERING -------------------------
                                            // Step 1 - Read and load image, error if empty (possibly wrong path)
                                            // Alternative: cv::imread(Flags.image_path, CV_LOAD_IMAGE_COLOR);
                                            using (var inputImage = OpenPose.LoadImage(ImagePath, LoadImageFlag.LoadImageColor))
                                            {
                                                if (inputImage.Empty)
                                                {
                                                    OpenPose.Error("Could not open or find the image: " + ImagePath, -1, nameof(TutorialDeveloperPose2));
                                                }
                                                var imageSize = new Point <int>(inputImage.Cols, inputImage.Rows);
                                                // Step 2 - Get desired scale sizes
                                                var tuple = scaleAndSizeExtractor.Extract(imageSize);
                                                var scaleInputToNetInputs = tuple.Item1;
                                                var netInputSizes         = tuple.Item2;
                                                var scaleInputToOutput    = tuple.Item3;
                                                var outputResolution      = tuple.Item4;
                                                // Step 3 - Format input image to OpenPose input and output formats
                                                var netInputArray = cvMatToOpInput.CreateArray(inputImage, scaleInputToNetInputs, netInputSizes);
                                                var outputArray   = cvMatToOpOutput.CreateArray(inputImage, scaleInputToOutput, outputResolution);
                                                // Step 4 - Estimate poseKeypoints
                                                poseExtractorPtr.Get().ForwardPass(netInputArray, imageSize, scaleInputToNetInputs);
                                                var poseKeypoints    = poseExtractorPtr.Get().GetPoseKeyPoints();
                                                var scaleNetToOutput = poseExtractorPtr.Get().GetScaleNetToOutput();
                                                // Step 5 - Render pose
                                                poseGpuRenderer.RenderPose(outputArray, poseKeypoints, (float)scaleInputToOutput, scaleNetToOutput);
                                                // Step 6 - OpenPose output format to cv::Mat
                                                using (var outputImage = opOutputToCvMat.FormatToCvMat(outputArray))
                                                {
                                                    // ------------------------- SHOWING RESULT AND CLOSING -------------------------
                                                    // Show results
                                                    frameDisplayer.DisplayFrame(outputImage, 0); // Alternative: cv::imshow(outputImage) + cv::waitKey(0)
                                                                                                 // Measuring total time
                                                    timeBegin.Stop();
                                                    var totalTimeSec = timeBegin.ElapsedMilliseconds / 1000d;
                                                    var message      = $"OpenPose demo successfully finished. Total time: {totalTimeSec} seconds.";
                                                    OpenPose.Log(message, Priority.High);
                                                    // Return successful message
                                                    return(0);
                                                }
                                            }
                                        }
                                }
            }
            catch (Exception e)
            {
                OpenPose.Error(e.Message, -1, nameof(TutorialDeveloperPose2));
                return(-1);
            }
        }