Esempio n. 1
0
        private static int TutorialDeveloperPose2()
        {
            try
            {
                OpenPose.Log("Starting OpenPose demo...", Priority.High);
                var timeBegin = new Stopwatch();
                timeBegin.Start();

                // ------------------------- INITIALIZATION -------------------------
                // Step 1 - Set logging level
                // - 0 will output all the logging messages
                // - 255 will output nothing
                OpenPose.Check(0 <= Flags.LoggingLevel && Flags.LoggingLevel <= 255, "Wrong logging_level value.");
                ConfigureLog.PriorityThreshold = (Priority)Flags.LoggingLevel;
                OpenPose.Log("", Priority.Low);
                // Step 2 - Read GFlags (user defined configuration)
                // outputSize
                var outputSize = OpenPose.FlagsToPoint(Flags.OutputResolution, "-1x-1");
                // netInputSize
                var netInputSize = OpenPose.FlagsToPoint(Flags.NetResolution, "-1x368");
                // poseModel
                var poseModel = OpenPose.FlagsToPoseModel(Flags.ModelPose);
                // Check no contradictory flags enabled
                if (Flags.AlphaPose < 0.0 || Flags.AlphaPose > 1.0)
                {
                    OpenPose.Error("Alpha value for blending must be in the range [0,1].", -1, nameof(TutorialDeveloperPose2));
                }
                if (Flags.ScaleGap <= 0.0 && Flags.ScaleNumber > 1)
                {
                    OpenPose.Error("Incompatible flag configuration: scale_gap must be greater than 0 or scale_number = 1.", -1, nameof(TutorialDeveloperPose2));
                }
                // Step 3 - Initialize all required classes
                using (var scaleAndSizeExtractor = new ScaleAndSizeExtractor(netInputSize, outputSize, Flags.ScaleNumber, Flags.ScaleGap))
                    using (var cvMatToOpInput = new CvMatToOpInput(poseModel))
                        using (var cvMatToOpOutput = new CvMatToOpOutput())
                            using (var poseExtractorPtr = new StdSharedPtr <PoseExtractorCaffe>(new PoseExtractorCaffe(poseModel, Flags.ModelFolder, Flags.NumGpuStart)))
                                using (var poseGpuRenderer = new PoseGpuRenderer(poseModel, poseExtractorPtr, (float)Flags.RenderThreshold, !Flags.DisableBlending, (float)Flags.AlphaPose))
                                {
                                    poseGpuRenderer.SetElementToRender(Flags.PartToShow);

                                    using (var opOutputToCvMat = new OpOutputToCvMat())
                                        using (var frameDisplayer = new FrameDisplayer("OpenPose Tutorial - Example 2", outputSize))
                                        {
                                            // Step 4 - Initialize resources on desired thread (in this case single thread, i.e., we init resources here)
                                            poseExtractorPtr.Get().InitializationOnThread();
                                            poseGpuRenderer.InitializationOnThread();

                                            // ------------------------- POSE ESTIMATION AND RENDERING -------------------------
                                            // Step 1 - Read and load image, error if empty (possibly wrong path)
                                            // Alternative: cv::imread(Flags.image_path, CV_LOAD_IMAGE_COLOR);
                                            using (var inputImage = OpenPose.LoadImage(ImagePath, LoadImageFlag.LoadImageColor))
                                            {
                                                if (inputImage.Empty)
                                                {
                                                    OpenPose.Error("Could not open or find the image: " + ImagePath, -1, nameof(TutorialDeveloperPose2));
                                                }
                                                var imageSize = new Point <int>(inputImage.Cols, inputImage.Rows);
                                                // Step 2 - Get desired scale sizes
                                                var tuple = scaleAndSizeExtractor.Extract(imageSize);
                                                var scaleInputToNetInputs = tuple.Item1;
                                                var netInputSizes         = tuple.Item2;
                                                var scaleInputToOutput    = tuple.Item3;
                                                var outputResolution      = tuple.Item4;
                                                // Step 3 - Format input image to OpenPose input and output formats
                                                var netInputArray = cvMatToOpInput.CreateArray(inputImage, scaleInputToNetInputs, netInputSizes);
                                                var outputArray   = cvMatToOpOutput.CreateArray(inputImage, scaleInputToOutput, outputResolution);
                                                // Step 4 - Estimate poseKeypoints
                                                poseExtractorPtr.Get().ForwardPass(netInputArray, imageSize, scaleInputToNetInputs);
                                                var poseKeypoints    = poseExtractorPtr.Get().GetPoseKeyPoints();
                                                var scaleNetToOutput = poseExtractorPtr.Get().GetScaleNetToOutput();
                                                // Step 5 - Render pose
                                                poseGpuRenderer.RenderPose(outputArray, poseKeypoints, (float)scaleInputToOutput, scaleNetToOutput);
                                                // Step 6 - OpenPose output format to cv::Mat
                                                using (var outputImage = opOutputToCvMat.FormatToCvMat(outputArray))
                                                {
                                                    // ------------------------- SHOWING RESULT AND CLOSING -------------------------
                                                    // Show results
                                                    frameDisplayer.DisplayFrame(outputImage, 0); // Alternative: cv::imshow(outputImage) + cv::waitKey(0)
                                                                                                 // Measuring total time
                                                    timeBegin.Stop();
                                                    var totalTimeSec = timeBegin.ElapsedMilliseconds / 1000d;
                                                    var message      = $"OpenPose demo successfully finished. Total time: {totalTimeSec} seconds.";
                                                    OpenPose.Log(message, Priority.High);
                                                    // Return successful message
                                                    return(0);
                                                }
                                            }
                                        }
                                }
            }
            catch (Exception e)
            {
                OpenPose.Error(e.Message, -1, nameof(TutorialDeveloperPose2));
                return(-1);
            }
        }