protected override void WorkConsumer(StdSharedPtr <UserDatum>[] datumsPtr)
        {
            try
            {
                // User's displaying/saving/other processing here
                // datum.cvOutputData: rendered frame with pose or heatmaps
                // datum.poseKeypoints: Array<float> with the estimated pose
                if (datumsPtr != null && datumsPtr.Length != 0)
                {
                    using (var cvOutputData = OpenPose.OP_OP2CVCONSTMAT(datumsPtr[0].Get().CvOutputData))
                    {
                        Cv.ImShow($"{OpenPose.OpenPoseNameAndVersion()} - Tutorial Thread API", cvOutputData);
                        // It displays the image and sleeps at least 1 ms (it usually sleeps ~5-10 msec to display the image)
                        Cv.WaitKey(1);
                    }
                }
            }
            catch (Exception e)
            {
                OpenPose.Log("Some kind of unexpected error happened.");
                this.Stop();
                OpenPose.Error(e.Message, -1, nameof(this.WorkConsumer));
            }

            #endregion
        }
Esempio n. 2
0
 public WUserInput(string directoryPath)
 {
     this._ImageFiles = Directory.GetFiles(directoryPath, "*.jpg");
     if (this._ImageFiles.Length == 0)
     {
         OpenPose.Error("No images found on: " + directoryPath, -1, nameof(WUserInput));
     }
 }
Esempio n. 3
0
        public void Error()
        {
            const string message  = nameof(this.Error);
            const string function = nameof(this.Error);
            const string file     = "OpenPoseTest.cs";

            OpenPose.Error($"{message}", -1, function, file);
        }
 public WUserInput(string directoryPath)
 {
     // For all basic image formats
     // If we want only e.g., "jpg" + "png" images
     this._ImageFiles = OpenPose.GetFilesOnDirectory(directoryPath, Extensions.Images);
     if (this._ImageFiles.Length == 0)
     {
         OpenPose.Error("No images found on: " + directoryPath, -1, nameof(WUserInput));
     }
 }
        protected override StdSharedPtr <StdVector <StdSharedPtr <UserDatum> > > WorkProducer()
        {
            try
            {
                // Close program when empty frame
                if (this._ImageFiles.Length <= this._Counter)
                {
                    OpenPose.Log("Last frame read and added to queue. Closing program after it is processed.", Priority.High);
                    // This funtion stops this worker, which will eventually stop the whole thread system once all the
                    // frames have been processed
                    this.Stop();
                    return(null);
                }
                else
                {
                    // Create new datum
                    var vector    = new StdVector <StdSharedPtr <UserDatum> >();
                    var datumsPtr = new StdSharedPtr <StdVector <StdSharedPtr <UserDatum> > >(vector);
                    datumsPtr.Get().EmplaceBack();
                    var datum = datumsPtr.Get().At(0);

                    // C# cannot set pointer object by using assignment operator
                    datum.Reset(new UserDatum());

                    // Fill datum
                    using (var cvInputData = Cv.ImRead(this._ImageFiles[this._Counter++]))
                        using (var inputData = OpenPose.OP_CV2OPCONSTMAT(cvInputData))
                            datum.Get().CvInputData = inputData;

                    // If empty frame -> return nullptr
                    if (datum.Get().CvInputData.Empty)
                    {
                        OpenPose.Log($"Empty frame detected on path: {this._ImageFiles[this._Counter - 1]}. Closing program.", Priority.High);
                        this.Stop();
                        datumsPtr = null;
                    }

                    return(datumsPtr);
                }
            }
            catch (Exception e)
            {
                OpenPose.Log("Some kind of unexpected error happened.");
                this.Stop();
                OpenPose.Error(e.Message, -1, nameof(this.WorkProducer));
                return(null);
            }
        }
Esempio n. 6
0
        protected override StdSharedPtr <StdVector <UserDatum> > WorkProducer()
        {
            try
            {
                // Close program when empty frame
                if (this._ImageFiles.Length <= this._Counter)
                {
                    OpenPose.Log("Last frame read and added to queue. Closing program after it is processed.", Priority.High);
                    // This funtion stops this worker, which will eventually stop the whole thread system once all the
                    // frames have been processed
                    this.Stop();
                    return(null);
                }
                else
                {
                    // Create new datum
                    var tmp = new StdVector <UserDatum>();
                    tmp.EmplaceBack();
                    var datumsPtr = new StdSharedPtr <StdVector <UserDatum> >(tmp);
                    var datum     = tmp.ToArray()[0];

                    // Fill datum
                    using (var mat = Cv.ImRead(this._ImageFiles[this._Counter++]))
                        datum.CvInputData = mat;

                    // If empty frame -> return nullptr
                    if (datum.CvInputData.Empty)
                    {
                        OpenPose.Log($"Empty frame detected on path: {this._ImageFiles[this._Counter - 1]}. Closing program.", Priority.High);
                        this.Stop();
                        datumsPtr = null;
                    }

                    return(datumsPtr);
                }
            }
            catch (Exception e)
            {
                OpenPose.Log("Some kind of unexpected error happened.");
                this.Stop();
                OpenPose.Error(e.Message, -1, nameof(this.WorkProducer));
                return(null);
            }
        }
 protected override void Work(UserDatum[] datums)
 {
     try
     {
         // User's post-processing (after OpenPose processing & before OpenPose outputs) here
         // datum.cvOutputData: rendered frame with pose or heatmaps
         // datum.poseKeypoints: Array<float> with the estimated pose
         if (datums != null && datums.Length != 0)
         {
             foreach (var datum in datums)
             {
                 Cv.BitwiseNot(datum.CvOutputData, datum.CvOutputData);
             }
         }
     }
     catch (Exception e)
     {
         this.Stop();
         OpenPose.Error(e.Message, -1, nameof(this.Work));
     }
 }
Esempio n. 8
0
 protected override void Work(Datum[] datumsPtr)
 {
     try
     {
         // User's processing here
         // datum.cvInputData: initial cv::Mat obtained from the frames producer (video, webcam, etc.)
         // datum.cvOutputData: final cv::Mat to be displayed
         if (datumsPtr != null)
         {
             foreach (var datum in datumsPtr)
             {
                 Cv.BitwiseNot(datum.CvInputData, datum.CvOutputData);
             }
         }
     }
     catch (Exception e)
     {
         OpenPose.Log("Some kind of unexpected error happened.");
         this.Stop();
         OpenPose.Error(e.Message, -1, nameof(this.Work));
     }
 }
Esempio n. 9
0
 protected override void Work(StdSharedPtr <Datum>[] datums)
 {
     try
     {
         // User's pre-processing (after OpenPose read the input image & before OpenPose processing) here
         // datumPtr->cvInputData: input frame
         if (datums != null && datums.Length != 0)
         {
             foreach (var datum in datums)
             {
                 using (var cvOutputData = OpenPose.OP_OP2CVMAT(datum.Get().CvOutputData))
                     Cv.BitwiseNot(cvOutputData, cvOutputData);
             }
         }
     }
     catch (Exception e)
     {
         OpenPose.Log("Some kind of unexpected error happened.");
         this.Stop();
         OpenPose.Error(e.Message, -1, nameof(this.Work));
     }
 }
Esempio n. 10
0
 protected override void Work(StdSharedPtr <Datum>[] datums)
 {
     try
     {
         // User's post-processing (after OpenPose processing & before OpenPose outputs) here
         // datum.cvOutputData: rendered frame with pose or heatmaps
         // datum.poseKeypoints: Array<float> with the estimated pose
         if (datums != null && datums.Length != 0)
         {
             foreach (var datum in datums)
             {
                 using (var cvOutputData = OpenPose.OP_OP2CVMAT(datum.Get().CvOutputData))
                     Cv.BitwiseNot(cvOutputData, cvOutputData);
             }
         }
     }
     catch (Exception e)
     {
         OpenPose.Log("Some kind of unexpected error happened.");
         this.Stop();
         OpenPose.Error(e.Message, -1, nameof(this.Work));
     }
 }
Esempio n. 11
0
        private static void Display(StdSharedPtr <StdVector <StdSharedPtr <Datum> > > datumsPtr)
        {
            try
            {
                // User's displaying/saving/other processing here
                // datum.cvOutputData: rendered frame with pose or heatmaps
                // datum.poseKeypoints: Array<float> with the estimated pose
                if (datumsPtr != null && datumsPtr.TryGet(out var data) && !data.Empty)
                {
                    var datum = datumsPtr.Get().At(0).Get();

                    // Display image
                    using (var cvMat = OpenPose.OP_OP2CVCONSTMAT(datum.CvOutputData))
                    {
                        Cv.ImShow($"{OpenPose.OpenPoseNameAndVersion()} - Tutorial C++ API", cvMat);
                        Cv.WaitKey(0);
                    }
                }
            }
            catch (Exception e)
            {
                OpenPose.Error(e.Message, -1, nameof(Display));
            }
        }
Esempio n. 12
0
        protected override void WorkConsumer(Datum[] datumsPtr)
        {
            try
            {
                // User's displaying/saving/other processing here
                // datum.cvOutputData: rendered frame with pose or heatmaps
                // datum.poseKeypoints: Array<float> with the estimated pose
                if (datumsPtr != null && datumsPtr.Length != 0)
                {
                    Cv.ImShow("User worker GUI", datumsPtr[0].CvOutputData);

                    // It displays the image and sleeps at least 1 ms (it usually sleeps ~5-10 msec to display the image)
                    Cv.WaitKey(1);
                }
            }
            catch (Exception e)
            {
                OpenPose.Log("Some kind of unexpected error happened.");
                this.Stop();
                OpenPose.Error(e.Message, -1, nameof(this.WorkConsumer));
            }

            #endregion
        }
Esempio n. 13
0
        private static int OpenPoseTutorialThread3()
        {
            try
            {
                OpenPose.Log("Starting OpenPose demo...", Priority.High);
                var timeBegin = new Stopwatch();
                timeBegin.Start();

                // ------------------------- INITIALIZATION -------------------------
                // Step 1 - Set logging level
                // - 0 will output all the logging messages
                // - 255 will output nothing
                OpenPose.Check(0 <= Flags.LoggingLevel && Flags.LoggingLevel <= 255, "Wrong logging_level value.");
                ConfigureLog.PriorityThreshold = (Priority)Flags.LoggingLevel;
                // Step 2 - Setting thread workers && manage
                using (var threadManager = new ThreadManager <Datum>())
                {
                    // Step 3 - Initializing the worker classes
                    // Frames producer (e.g., video, webcam, ...)
                    using (var wUserInput = new StdSharedPtr <UserWorkerProducer <Datum> >(new WUserInput(Flags.ImageDir)))
                    {
                        // Processing
                        using (var wUserProcessing = new StdSharedPtr <UserWorker <Datum> >(new WUserPostProcessing()))
                        {
                            // GUI (Display)
                            using (var wUserOutput = new StdSharedPtr <UserWorkerConsumer <Datum> >(new WUserOutput()))
                            {
                                // ------------------------- CONFIGURING THREADING -------------------------
                                // In this simple multi-thread example, we will do the following:
                                // 3 (virtual) queues: 0, 1, 2
                                // 1 real queue: 1. The first and last queue ids (in this case 0 and 2) are not actual queues, but the
                                // beginning and end of the processing sequence
                                // 2 threads: 0, 1
                                // wUserInput will generate frames (there is no real queue 0) and push them on queue 1
                                // wGui will pop frames from queue 1 and process them (there is no real queue 2)
                                var threadId = 0UL;
                                var queueIn  = 0UL;
                                var queueOut = 1UL;
                                threadManager.Add(threadId++, wUserInput, queueIn++, queueOut++);       // Thread 0, queues 0 -> 1
                                threadManager.Add(threadId++, wUserProcessing, queueIn++, queueOut++);  // Thread 1, queues 1 -> 2
                                threadManager.Add(threadId++, wUserOutput, queueIn++, queueOut++);      // Thread 2, queues 2 -> 3

                                // ------------------------- STARTING AND STOPPING THREADING -------------------------
                                OpenPose.Log("Starting thread(s)...", Priority.High);
                                // Two different ways of running the program on multithread environment
                                // Option a) Using the main thread (this thread) for processing (it saves 1 thread, recommended)
                                threadManager.Exec();
                                // Option b) Giving to the user the control of this thread
                                // // VERY IMPORTANT NOTE: if OpenCV is compiled with Qt support, this option will not work. Qt needs the main
                                // // thread to plot visual results, so the final GUI (which uses OpenCV) would return an exception similar to:
                                // // `QMetaMethod::invoke: Unable to invoke methods with return values in queued connections`
                                // // Start threads
                                // threadManager.start();
                                // // Keep program alive while running threads. Here the user could perform any other desired function
                                // while (threadManager.isRunning())
                                //     std::this_thread::sleep_for(std::chrono::milliseconds{33});
                                // // Stop and join threads
                                // op::log("Stopping thread(s)", op::Priority::High);
                                // threadManager.stop();
                            }
                        }
                    }
                }

                // ------------------------- CLOSING -------------------------
                // Measuring total time
                timeBegin.Stop();
                var totalTimeSec = timeBegin.ElapsedMilliseconds / 1000d;
                var message      = $"OpenPose demo successfully finished. Total time: {totalTimeSec} seconds.";
                OpenPose.Log(message, Priority.High);

                // Return successful message
                return(0);
            }
            catch (Exception e)
            {
                OpenPose.Error(e.Message, -1, nameof(OpenPoseTutorialThread3));
                return(-1);
            }
        }
Esempio n. 14
0
        private static int TutorialDeveloperThread1()
        {
            try
            {
                OpenPose.Log("Starting OpenPose demo...", Priority.High);
                var timeBegin = new Stopwatch();
                timeBegin.Start();

                // ------------------------- INITIALIZATION -------------------------
                // Step 1 - Set logging level
                // - 0 will output all the logging messages
                // - 255 will output nothing
                OpenPose.Check(0 <= Flags.LoggingLevel && Flags.LoggingLevel <= 255, "Wrong logging_level value.");
                ConfigureLog.PriorityThreshold = (Priority)Flags.LoggingLevel;
                // Step 2 - Read GFlags (user defined configuration)
                // cameraSize
                var cameraSize = OpenPose.FlagsToPoint(Flags.CameraResolution, "-1x-1");
                // outputSize
                var outputSize = OpenPose.FlagsToPoint(Flags.OutputResolution, "-1x-1");
                // producerType
                var tie                    = OpenPose.FlagsToProducer(Flags.ImageDir, Flags.Video, Flags.IpCamera, Flags.Camera, Flags.FlirCamera, Flags.FlirCameraIndex);
                var producerType           = tie.Item1;
                var producerString         = tie.Item2;
                var displayProducerFpsMode = Flags.ProcessRealTime ? ProducerFpsMode.OriginalFps : ProducerFpsMode.RetrievalFps;
                using (var producerSharedPtr = OpenPose.CreateProducer(producerType,
                                                                       cameraSize,
                                                                       producerString,
                                                                       Flags.CameraParameterPath,
                                                                       Flags.FrameUndistort,
                                                                       Flags.Views3D))
                {
                    producerSharedPtr.Get().SetProducerFpsMode(displayProducerFpsMode);
                    OpenPose.Log("", Priority.Low);
                    // Step 3 - Setting producer
                    //var videoSeekSharedPtr = std::make_shared<std::pair<std::atomic<bool>, std::atomic<int>>>();
                    //videoSeekSharedPtr->first = false;
                    //videoSeekSharedPtr->second = 0;
                    // Step 4 - Setting thread workers && manager
                    // Note:
                    // nativeDebugging may occur crash
                    using (var threadManager = new ThreadManager <Datum>())
                    {
                        // Step 5 - Initializing the worker classes
                        // Frames producer (e.g., video, webcam, ...)
                        using (var datumProducer = new StdSharedPtr <DatumProducer <Datum> >(new DatumProducer <Datum>(producerSharedPtr)))
                            using (var wDatumProducer = new StdSharedPtr <WDatumProducer <Datum> >(new WDatumProducer <Datum>(datumProducer)))
                            {
                                // GUI (Display)
                                using (var gui = new StdSharedPtr <Gui>(new Gui(outputSize, Flags.FullScreen, threadManager.GetIsRunningSharedPtr())))
                                    using (var wGui = new StdSharedPtr <WGui <Datum> >(new WGui <Datum>(gui)))
                                    {
                                        // ------------------------- CONFIGURING THREADING -------------------------
                                        // In this simple multi-thread example, we will do the following:
                                        // 3 (virtual) queues: 0, 1, 2
                                        // 1 real queue: 1. The first and last queue ids (in this case 0 and 2) are not actual queues, but the
                                        // beginning and end of the processing sequence
                                        // 2 threads: 0, 1
                                        // wDatumProducer will generate frames (there is no real queue 0) and push them on queue 1
                                        // wGui will pop frames from queue 1 and process them (there is no real queue 2)
                                        var threadId = 0UL;
                                        var queueIn  = 0UL;
                                        var queueOut = 1UL;
                                        threadManager.Add(threadId++, wDatumProducer, queueIn++, queueOut++); // Thread 0, queues 0 -> 1
                                        threadManager.Add(threadId++, wGui, queueIn++, queueOut++);           // Thread 1, queues 1 -> 2

                                        // Equivalent single-thread version (option a)
                                        // const auto threadId = 0ull;
                                        // auto queueIn = 0ull;
                                        // auto queueOut = 1ull;
                                        // threadManager.add(threadId, wDatumProducer, queueIn++, queueOut++);       // Thread 0, queues 0 -> 1
                                        // threadManager.add(threadId, wGui, queueIn++, queueOut++);                 // Thread 0, queues 1 -> 2

                                        // Equivalent single-thread version (option b)
                                        // const auto threadId = 0ull;
                                        // const auto queueIn = 0ull;
                                        // const auto queueOut = 1ull;
                                        // threadManager.add(threadId, {wDatumProducer, wGui}, queueIn, queueOut);     // Thread 0, queues 0 -> 1

                                        // ------------------------- STARTING AND STOPPING THREADING -------------------------
                                        OpenPose.Log("Starting thread(s)...", Priority.High);
                                        // Two different ways of running the program on multithread environment
                                        // Option a) Using the main thread (this thread) for processing (it saves 1 thread, recommended)
                                        threadManager.Exec();
                                        // Option b) Giving to the user the control of this thread
                                        // // VERY IMPORTANT NOTE: if OpenCV is compiled with Qt support, this option will not work. Qt needs the main
                                        // // thread to plot visual results, so the final GUI (which uses OpenCV) would return an exception similar to:
                                        // // `QMetaMethod::invoke: Unable to invoke methods with return values in queued connections`
                                        // // Start threads
                                        // threadManager.start();
                                        // // Keep program alive while running threads. Here the user could perform any other desired function
                                        // while (threadManager.isRunning())
                                        //     std::this_thread::sleep_for(std::chrono::milliseconds{33});
                                        // // Stop and join threads
                                        // op::log("Stopping thread(s)", op::Priority::High);
                                        // threadManager.stop();
                                    }
                            }
                    }
                }

                // ------------------------- CLOSING -------------------------
                // Measuring total time
                timeBegin.Stop();
                var totalTimeSec = timeBegin.ElapsedMilliseconds / 1000d;
                var message      = $"OpenPose demo successfully finished. Total time: {totalTimeSec} seconds.";
                OpenPose.Log(message, Priority.High);

                // Return successful message
                return(0);
            }
            catch (Exception e)
            {
                OpenPose.Error(e.Message, -1, nameof(TutorialDeveloperThread1));
                return(-1);
            }
        }
Esempio n. 15
0
        private static int TutorialDeveloperPose2()
        {
            try
            {
                OpenPose.Log("Starting OpenPose demo...", Priority.High);
                var timeBegin = new Stopwatch();
                timeBegin.Start();

                // ------------------------- INITIALIZATION -------------------------
                // Step 1 - Set logging level
                // - 0 will output all the logging messages
                // - 255 will output nothing
                OpenPose.Check(0 <= Flags.LoggingLevel && Flags.LoggingLevel <= 255, "Wrong logging_level value.");
                ConfigureLog.PriorityThreshold = (Priority)Flags.LoggingLevel;
                OpenPose.Log("", Priority.Low);
                // Step 2 - Read GFlags (user defined configuration)
                // outputSize
                var outputSize = OpenPose.FlagsToPoint(Flags.OutputResolution, "-1x-1");
                // netInputSize
                var netInputSize = OpenPose.FlagsToPoint(Flags.NetResolution, "-1x368");
                // poseModel
                var poseModel = OpenPose.FlagsToPoseModel(Flags.ModelPose);
                // Check no contradictory flags enabled
                if (Flags.AlphaPose < 0.0 || Flags.AlphaPose > 1.0)
                {
                    OpenPose.Error("Alpha value for blending must be in the range [0,1].", -1, nameof(TutorialDeveloperPose2));
                }
                if (Flags.ScaleGap <= 0.0 && Flags.ScaleNumber > 1)
                {
                    OpenPose.Error("Incompatible flag configuration: scale_gap must be greater than 0 or scale_number = 1.", -1, nameof(TutorialDeveloperPose2));
                }
                // Step 3 - Initialize all required classes
                using (var scaleAndSizeExtractor = new ScaleAndSizeExtractor(netInputSize, outputSize, Flags.ScaleNumber, Flags.ScaleGap))
                    using (var cvMatToOpInput = new CvMatToOpInput(poseModel))
                        using (var cvMatToOpOutput = new CvMatToOpOutput())
                            using (var poseExtractorPtr = new StdSharedPtr <PoseExtractorCaffe>(new PoseExtractorCaffe(poseModel, Flags.ModelFolder, Flags.NumGpuStart)))
                                using (var poseGpuRenderer = new PoseGpuRenderer(poseModel, poseExtractorPtr, (float)Flags.RenderThreshold, !Flags.DisableBlending, (float)Flags.AlphaPose))
                                {
                                    poseGpuRenderer.SetElementToRender(Flags.PartToShow);

                                    using (var opOutputToCvMat = new OpOutputToCvMat())
                                        using (var frameDisplayer = new FrameDisplayer("OpenPose Tutorial - Example 2", outputSize))
                                        {
                                            // Step 4 - Initialize resources on desired thread (in this case single thread, i.e., we init resources here)
                                            poseExtractorPtr.Get().InitializationOnThread();
                                            poseGpuRenderer.InitializationOnThread();

                                            // ------------------------- POSE ESTIMATION AND RENDERING -------------------------
                                            // Step 1 - Read and load image, error if empty (possibly wrong path)
                                            // Alternative: cv::imread(Flags.image_path, CV_LOAD_IMAGE_COLOR);
                                            using (var inputImage = OpenPose.LoadImage(ImagePath, LoadImageFlag.LoadImageColor))
                                            {
                                                if (inputImage.Empty)
                                                {
                                                    OpenPose.Error("Could not open or find the image: " + ImagePath, -1, nameof(TutorialDeveloperPose2));
                                                }
                                                var imageSize = new Point <int>(inputImage.Cols, inputImage.Rows);
                                                // Step 2 - Get desired scale sizes
                                                var tuple = scaleAndSizeExtractor.Extract(imageSize);
                                                var scaleInputToNetInputs = tuple.Item1;
                                                var netInputSizes         = tuple.Item2;
                                                var scaleInputToOutput    = tuple.Item3;
                                                var outputResolution      = tuple.Item4;
                                                // Step 3 - Format input image to OpenPose input and output formats
                                                var netInputArray = cvMatToOpInput.CreateArray(inputImage, scaleInputToNetInputs, netInputSizes);
                                                var outputArray   = cvMatToOpOutput.CreateArray(inputImage, scaleInputToOutput, outputResolution);
                                                // Step 4 - Estimate poseKeypoints
                                                poseExtractorPtr.Get().ForwardPass(netInputArray, imageSize, scaleInputToNetInputs);
                                                var poseKeypoints    = poseExtractorPtr.Get().GetPoseKeyPoints();
                                                var scaleNetToOutput = poseExtractorPtr.Get().GetScaleNetToOutput();
                                                // Step 5 - Render pose
                                                poseGpuRenderer.RenderPose(outputArray, poseKeypoints, (float)scaleInputToOutput, scaleNetToOutput);
                                                // Step 6 - OpenPose output format to cv::Mat
                                                using (var outputImage = opOutputToCvMat.FormatToCvMat(outputArray))
                                                {
                                                    // ------------------------- SHOWING RESULT AND CLOSING -------------------------
                                                    // Show results
                                                    frameDisplayer.DisplayFrame(outputImage, 0); // Alternative: cv::imshow(outputImage) + cv::waitKey(0)
                                                                                                 // Measuring total time
                                                    timeBegin.Stop();
                                                    var totalTimeSec = timeBegin.ElapsedMilliseconds / 1000d;
                                                    var message      = $"OpenPose demo successfully finished. Total time: {totalTimeSec} seconds.";
                                                    OpenPose.Log(message, Priority.High);
                                                    // Return successful message
                                                    return(0);
                                                }
                                            }
                                        }
                                }
            }
            catch (Exception e)
            {
                OpenPose.Error(e.Message, -1, nameof(TutorialDeveloperPose2));
                return(-1);
            }
        }
Esempio n. 16
0
        private static void ConfigureWrapper(Wrapper <CustomDatum> opWrapper)
        {
            try
            {
                // Configuring OpenPose

                // logging_level
                OpenPose.CheckBool(0 <= Flags.LoggingLevel && Flags.LoggingLevel <= 255, "Wrong logging_level value.");
                ConfigureLog.PriorityThreshold = (Priority)Flags.LoggingLevel;
                Profiler.SetDefaultX((ulong)Flags.ProfileSpeed);

                // Applying user defined configuration - GFlags to program variables
                // producerType
                var tie            = OpenPose.FlagsToProducer(Flags.ImageDir, Flags.Video, Flags.IpCamera, Flags.Camera, Flags.FlirCamera, Flags.FlirCameraIndex);
                var producerType   = tie.Item1;
                var producerString = tie.Item2;
                // cameraSize
                var cameraSize = OpenPose.FlagsToPoint(Flags.CameraResolution, "-1x-1");
                // outputSize
                var outputSize = OpenPose.FlagsToPoint(Flags.OutputResolution, "-1x-1");
                // netInputSize
                var netInputSize = OpenPose.FlagsToPoint(Flags.NetResolution, "-1x368");
                // faceNetInputSize
                var faceNetInputSize = OpenPose.FlagsToPoint(Flags.FaceNetResolution, "368x368 (multiples of 16)");
                // handNetInputSize
                var handNetInputSize = OpenPose.FlagsToPoint(Flags.HandNetResolution, "368x368 (multiples of 16)");
                // poseMode
                var poseMode = OpenPose.FlagsToPoseMode(Flags.Body);
                // poseModel
                var poseModel = OpenPose.FlagsToPoseModel(Flags.ModelPose);
                // JSON saving
                if (!string.IsNullOrEmpty(Flags.WriteKeyPoint))
                {
                    OpenPose.Log("Flag `write_keypoint` is deprecated and will eventually be removed. Please, use `write_json` instead.");
                }
                // keypointScale
                var keyPointScale = OpenPose.FlagsToScaleMode(Flags.KeyPointScale);
                // heatmaps to add
                var heatMapTypes = OpenPose.FlagsToHeatMaps(Flags.HeatmapsAddParts, Flags.HeatmapsAddBackground, Flags.HeatmapsAddPAFs);
                var heatMapScale = OpenPose.FlagsToHeatMapScaleMode(Flags.HeatmapsScale);
                // >1 camera view?
                var multipleView = (Flags.Enable3D || Flags.Views3D > 1 || Flags.FlirCamera);
                // Face and hand detectors
                var faceDetector = OpenPose.FlagsToDetector(Flags.FaceDetector);
                var handDetector = OpenPose.FlagsToDetector(Flags.HandDetector);
                // Enabling Google Logging
                const bool enableGoogleLogging = true;

                // Configuring OpenPose
                OpenPose.Log("Configuring OpenPose...", Priority.High);
                {
                    // Pose configuration (use WrapperStructPose{} for default and recommended configuration)
                    var pose = new WrapperStructPose(poseMode,
                                                     netInputSize,
                                                     outputSize,
                                                     keyPointScale,
                                                     Flags.NumGpu,
                                                     Flags.NumGpuStart,
                                                     Flags.ScaleNumber,
                                                     (float)Flags.ScaleGap,
                                                     OpenPose.FlagsToRenderMode(Flags.RenderPose, multipleView),
                                                     poseModel,
                                                     !Flags.DisableBlending,
                                                     (float)Flags.AlphaPose,
                                                     (float)Flags.AlphaHeatmap,
                                                     Flags.PartToShow,
                                                     Flags.ModelFolder,
                                                     heatMapTypes,
                                                     heatMapScale,
                                                     Flags.PartCandidates,
                                                     (float)Flags.RenderThreshold,
                                                     Flags.NumberPeopleMax,
                                                     Flags.MaximizePositives,
                                                     Flags.FpsMax,
                                                     Flags.PrototxtPath,
                                                     Flags.CaffeModelPath,
                                                     (float)Flags.UpsamplingRatio,
                                                     enableGoogleLogging);
                    // Face configuration (use op::WrapperStructFace{} to disable it)
                    var face = new WrapperStructFace(Flags.Face,
                                                     faceDetector,
                                                     faceNetInputSize,
                                                     OpenPose.FlagsToRenderMode(Flags.FaceRender, multipleView, Flags.RenderPose),
                                                     (float)Flags.FaceAlphaPose,
                                                     (float)Flags.FaceAlphaHeatmap,
                                                     (float)Flags.FaceRenderThreshold);
                    // Hand configuration (use op::WrapperStructHand{} to disable it)
                    var hand = new WrapperStructHand(Flags.Hand,
                                                     handDetector,
                                                     handNetInputSize,
                                                     Flags.HandScaleNumber,
                                                     (float)Flags.HandScaleRange,
                                                     OpenPose.FlagsToRenderMode(Flags.HandRender, multipleView, Flags.RenderPose),
                                                     (float)Flags.HandAlphaPose,
                                                     (float)Flags.HandAlphaHeatmap,
                                                     (float)Flags.HandRenderThreshold);
                    // Extra functionality configuration (use op::WrapperStructExtra{} to disable it)
                    var extra = new WrapperStructExtra(Flags.Enable3D,
                                                       Flags.MinViews3D,
                                                       Flags.Identification,
                                                       Flags.Tracking,
                                                       Flags.IkThreads);
                    // Producer (use default to disable any input)
                    var input = new WrapperStructInput(producerType,
                                                       producerString,
                                                       Flags.FrameFirst,
                                                       Flags.FrameStep,
                                                       Flags.FrameLast,
                                                       Flags.ProcessRealTime,
                                                       Flags.FrameFlip,
                                                       Flags.FrameRotate,
                                                       Flags.FramesRepeat,
                                                       cameraSize,
                                                       Flags.CameraParameterPath,
                                                       Flags.FrameUndistort,
                                                       Flags.Views3D);
                    // Output (comment or use default argument to disable any output)
                    var output = new WrapperStructOutput(Flags.CliVerbose,
                                                         Flags.WriteKeyPoint,
                                                         OpenPose.StringToDataFormat(Flags.WriteKeyPointFormat),
                                                         Flags.WriteJson,
                                                         Flags.WriteCocoJson,
                                                         Flags.WriteCocoJsonVariants,
                                                         Flags.WriteCocoJsonVariant,
                                                         Flags.WriteImages,
                                                         Flags.WriteImagesFormat,
                                                         Flags.WriteVideo,
                                                         Flags.WriteVideoFps,
                                                         Flags.WriteVideoWithAudio,
                                                         Flags.WriteHeatmaps,
                                                         Flags.WriteHeatmapsFormat,
                                                         Flags.WriteVideoAdam,
                                                         Flags.WriteBvh,
                                                         Flags.UdpHost,
                                                         Flags.UdpPort);
                    // GUI (comment or use default argument to disable any visual output)
                    var gui = new WrapperStructGui(OpenPose.FlagsToDisplayMode(Flags.Display, Flags.Enable3D),
                                                   !Flags.NoGuiVerbose,
                                                   Flags.FullScreen);
                    opWrapper.Configure(pose);
                    opWrapper.Configure(face);
                    opWrapper.Configure(hand);
                    opWrapper.Configure(extra);
                    opWrapper.Configure(input);
                    opWrapper.Configure(output);
                    opWrapper.Configure(gui);

                    // Custom post-processing
                    var userPostProcessing  = new UserPostProcessing(/* Your class arguments here */);
                    var wUserPostProcessing = new StdSharedPtr <UserWorker <CustomDatum> >(new WUserPostProcessing(userPostProcessing));

                    // Add custom processing
                    const bool workerProcessingOnNewThread = false;
                    opWrapper.SetWorker(WorkerType.PostProcessing, wUserPostProcessing, workerProcessingOnNewThread);

                    // Set to single-thread (for sequential processing and/or debugging and/or reducing latency)
                    if (Flags.DisableMultiThread)
                    {
                        opWrapper.DisableMultiThreading();
                    }
                }
            }
            catch (Exception e)
            {
                OpenPose.Error(e.Message, -1, nameof(ConfigureWrapper));
            }
        }
Esempio n. 17
0
        private static void ConfigureWrapper(Wrapper <Datum> opWrapper)
        {
            try
            {
                // Configuring OpenPose

                // logging_level
                OpenPose.CheckBool(0 <= Flags.LoggingLevel && Flags.LoggingLevel <= 255, "Wrong logging_level value.");
                ConfigureLog.PriorityThreshold = (Priority)Flags.LoggingLevel;
                Profiler.SetDefaultX((ulong)Flags.ProfileSpeed);

                // Applying user defined configuration - GFlags to program variables
                // outputSize
                var outputSize = OpenPose.FlagsToPoint(Flags.OutputResolution, "-1x-1");
                // netInputSize
                var netInputSize = OpenPose.FlagsToPoint(Flags.NetResolution, "-1x368");
                // faceNetInputSize
                var faceNetInputSize = OpenPose.FlagsToPoint(Flags.FaceNetResolution, "368x368 (multiples of 16)");
                // handNetInputSize
                var handNetInputSize = OpenPose.FlagsToPoint(Flags.HandNetResolution, "368x368 (multiples of 16)");
                // poseMode
                var poseMode = OpenPose.FlagsToPoseMode(Flags.Body);
                // poseModel
                var poseModel = OpenPose.FlagsToPoseModel(Flags.ModelPose);
                // JSON saving
                if (!string.IsNullOrEmpty(Flags.WriteKeyPoint))
                {
                    OpenPose.Log("Flag `write_keypoint` is deprecated and will eventually be removed. Please, use `write_json` instead.", Priority.Max);
                }
                // keypointScale
                var keypointScale = OpenPose.FlagsToScaleMode(Flags.KeyPointScale);
                // heatmaps to add
                var heatMapTypes = OpenPose.FlagsToHeatMaps(Flags.HeatmapsAddParts, Flags.HeatmapsAddBackground, Flags.HeatmapsAddPAFs);
                var heatMapScale = OpenPose.FlagsToHeatMapScaleMode(Flags.HeatmapsScale);
                // >1 camera view?
                var multipleView = (Flags.Enable3D || Flags.Views3D > 1);
                // Face and hand detectors
                var faceDetector = OpenPose.FlagsToDetector(Flags.FaceDetector);
                var handDetector = OpenPose.FlagsToDetector(Flags.HandDetector);
                // Enabling Google Logging
                const bool enableGoogleLogging = true;

                // Pose configuration (use WrapperStructPose{} for default and recommended configuration)
                var pose = new WrapperStructPose(poseMode,
                                                 netInputSize,
                                                 outputSize,
                                                 keypointScale,
                                                 Flags.NumGpu,
                                                 Flags.NumGpuStart,
                                                 Flags.ScaleNumber,
                                                 (float)Flags.ScaleGap,
                                                 OpenPose.FlagsToRenderMode(Flags.RenderPose, multipleView),
                                                 poseModel,
                                                 !Flags.DisableBlending,
                                                 (float)Flags.AlphaPose,
                                                 (float)Flags.AlphaHeatmap,
                                                 Flags.PartToShow,
                                                 Flags.ModelFolder,
                                                 heatMapTypes,
                                                 heatMapScale,
                                                 Flags.PartCandidates,
                                                 (float)Flags.RenderThreshold,
                                                 Flags.NumberPeopleMax,
                                                 Flags.MaximizePositives,
                                                 Flags.FpsMax,
                                                 Flags.PrototxtPath,
                                                 Flags.CaffeModelPath,
                                                 (float)Flags.UpsamplingRatio,
                                                 enableGoogleLogging);
                // Face configuration (use op::WrapperStructFace{} to disable it)
                var face = new WrapperStructFace(Flags.Face,
                                                 faceDetector,
                                                 faceNetInputSize,
                                                 OpenPose.FlagsToRenderMode(Flags.FaceRender, multipleView, Flags.RenderPose),
                                                 (float)Flags.FaceAlphaPose,
                                                 (float)Flags.FaceAlphaHeatmap,
                                                 (float)Flags.FaceRenderThreshold);
                // Hand configuration (use op::WrapperStructHand{} to disable it)
                var hand = new WrapperStructHand(Flags.Hand,
                                                 handDetector,
                                                 handNetInputSize,
                                                 Flags.HandScaleNumber,
                                                 (float)Flags.HandScaleRange,
                                                 OpenPose.FlagsToRenderMode(Flags.HandRender, multipleView, Flags.RenderPose),
                                                 (float)Flags.HandAlphaPose,
                                                 (float)Flags.HandAlphaHeatmap,
                                                 (float)Flags.HandRenderThreshold);
                // Extra functionality configuration (use op::WrapperStructExtra{} to disable it)
                var extra = new WrapperStructExtra(Flags.Enable3D,
                                                   Flags.MinViews3D,
                                                   Flags.Identification,
                                                   Flags.Tracking,
                                                   Flags.IkThreads);
                // Output (comment or use default argument to disable any output)
                var output = new WrapperStructOutput(Flags.CliVerbose,
                                                     Flags.WriteKeyPoint,
                                                     OpenPose.StringToDataFormat(Flags.WriteKeyPointFormat),
                                                     Flags.WriteJson,
                                                     Flags.WriteCocoJson,
                                                     Flags.WriteCocoJsonVariants,
                                                     Flags.WriteCocoJsonVariant,
                                                     Flags.WriteImages,
                                                     Flags.WriteImagesFormat,
                                                     Flags.WriteVideo,
                                                     Flags.WriteVideoFps,
                                                     Flags.WriteVideoWithAudio,
                                                     Flags.WriteHeatmaps,
                                                     Flags.WriteHeatmapsFormat,
                                                     Flags.WriteVideoAdam,
                                                     Flags.WriteBvh,
                                                     Flags.UdpHost,
                                                     Flags.UdpPort);

                opWrapper.Configure(pose);
                opWrapper.Configure(face);
                opWrapper.Configure(hand);
                opWrapper.Configure(extra);
                opWrapper.Configure(output);

                // No GUI. Equivalent to: opWrapper.configure(op::WrapperStructGui{});
                // Set to single-thread (for sequential processing and/or debugging and/or reducing latency)
                if (Flags.DisableMultiThread)
                {
                    opWrapper.DisableMultiThreading();
                }

                // Return successful message
                OpenPose.Log("Stopping OpenPose...", Priority.High);
            }
            catch (Exception e)
            {
                OpenPose.Error(e.Message, -1, nameof(ConfigureWrapper));
            }
        }
Esempio n. 18
0
        protected override void WorkConsumer(StdSharedPtr <UserDatum>[] datumsPtr)
        {
            try
            {
                // User's displaying/saving/other processing here
                // datum.cvOutputData: rendered frame with pose or heatmaps
                // datum.poseKeypoints: Array<float> with the estimated pose
                if (datumsPtr != null && datumsPtr.Length != 0)
                {
                    var datum = datumsPtr[0].Get();

                    // Show in command line the resulting pose keypoints for body, face and hands
                    OpenPose.Log("\nKeypoints:");
                    // Accesing each element of the keypoints
                    var poseKeypoints = datum.PoseKeyPoints;
                    OpenPose.Log("Person pose keypoints:");
                    for (var person = 0; person < poseKeypoints.GetSize(0); person++)
                    {
                        OpenPose.Log($"Person {person} (x, y, score):");
                        for (var bodyPart = 0; bodyPart < poseKeypoints.GetSize(1); bodyPart++)
                        {
                            var valueToPrint = "";
                            for (var xyscore = 0; xyscore < poseKeypoints.GetSize(2); xyscore++)
                            {
                                valueToPrint += poseKeypoints[new[] { person, bodyPart, xyscore }] + " ";
                            }
                            OpenPose.Log(valueToPrint);
                        }
                    }

                    OpenPose.Log(" ");
                    // Alternative: just getting std::string equivalent
                    OpenPose.Log($"Face keypoints: {datum.FaceKeyPoints}");
                    OpenPose.Log($"Left hand keypoints: {datum.HandKeyPoints[0]}");
                    OpenPose.Log($"Right hand keypoints: {datum.HandKeyPoints[1]}");
                    // Heatmaps
                    var poseHeatMaps = datum.PoseHeatMaps;
                    if (!poseHeatMaps.Empty)
                    {
                        OpenPose.Log($"Pose heatmaps size: [{poseHeatMaps.GetSize(0)}, {poseHeatMaps.GetSize(1)}, {poseHeatMaps.GetSize(2)}]");
                        var faceHeatMaps = datum.FaceHeatMaps;
                        OpenPose.Log($"Face heatmaps size: [{faceHeatMaps.GetSize(0)}, {faceHeatMaps.GetSize(1)}, {faceHeatMaps.GetSize(2)}, {faceHeatMaps.GetSize(3)}]");
                        var handHeatMaps = datum.HandHeatMaps;
                        OpenPose.Log($"Left hand heatmaps size: [{handHeatMaps[0].GetSize(0)}, {handHeatMaps[0].GetSize(1)}, {handHeatMaps[0].GetSize(2)}, {handHeatMaps[0].GetSize(3)}]");
                        OpenPose.Log($"Right hand heatmaps size: [{handHeatMaps[1].GetSize(0)}, {handHeatMaps[1].GetSize(1)}, {handHeatMaps[1].GetSize(2)}, {handHeatMaps[1].GetSize(3)}]");
                    }

                    // Display results (if enabled)
                    if (!Flags.NoDisplay)
                    {
                        using (var cvMat = OpenPose.OP_OP2CVCONSTMAT(datum.CvOutputData))
                        {
                            // Display rendered output image
                            Cv.ImShow($"{OpenPose.OpenPoseNameAndVersion()} - Tutorial C++ API", cvMat);
                            // Display image and sleeps at least 1 ms (it usually sleeps ~5-10 msec to display the image)
                            var key = (char)Cv.WaitKey(1);
                            if (key == 27)
                            {
                                this.Stop();
                            }
                        }
                    }
                }
            }
            catch (Exception e)
            {
                this.Stop();
                OpenPose.Error(e.Message, -1, nameof(this.WorkConsumer));
            }
        }