コード例 #1
0
        public StdSharedPtr <StdVector <UserDatum> > CreateDatum()
        {
            // Close program when empty frame
            if (this._Closed || this._ImageFiles.Length <= this._Counter)
            {
                OpenPose.Log("Last frame read and added to queue. Closing program after it is processed.", Priority.High);

                // This funtion stops this worker, which will eventually stop the whole thread system once all the
                // frames have been processed
                this._Closed = true;
                return(null);
            }
            else
            {
                // Create new datum
                var tmp = new StdVector <UserDatum>();
                tmp.EmplaceBack();
                var datumsPtr = new StdSharedPtr <StdVector <UserDatum> >(tmp);
                var datum     = tmp.ToArray()[0];

                // Fill datum
                using (var mat = Cv.ImRead(this._ImageFiles[this._Counter++]))
                    datum.CvInputData = mat;

                // If empty frame -> return nullptr
                if (datum.CvInputData.Empty)
                {
                    OpenPose.Log($"Empty frame detected on path: {this._ImageFiles[this._Counter - 1]}. Closing program.", Priority.High);
                    this._Closed = true;
                    datumsPtr    = null;
                }

                return(datumsPtr);
            }
        }
コード例 #2
0
 public bool Display(StdSharedPtr <StdVector <StdSharedPtr <Datum> > > datumsPtr)
 {
     // User's displaying/saving/other processing here
     // datum.cvOutputData: rendered frame with pose or heatmaps
     // datum.poseKeypoints: Array<float> with the estimated pose
     if (datumsPtr != null && datumsPtr.TryGet(out var data) && !data.Empty)
     {
         var temp = data.ToArray()[0].Get();
         using (var cvMat = OpenPose.OP_OP2CVCONSTMAT(temp.CvOutputData))
             Cv.ImShow($"{OpenPose.OpenPoseNameAndVersion()} - Tutorial C++ API", cvMat);
         // Display image and sleeps at least 1 ms (it usually sleeps ~5-10 msec to display the image)
     }
コード例 #3
0
 private static void Display(StdSharedPtr <StdVector <Datum> > datumsPtr)
 {
     // User's displaying/saving/other processing here
     // datum.cvOutputData: rendered frame with pose or heatmaps
     // datum.poseKeypoints: Array<float> with the estimated pose
     if (datumsPtr != null && datumsPtr.TryGet(out var data) && !data.Empty)
     {
         // Display image
         var temp = data.ToArray();
         Cv.ImShow("User worker GUI", temp[0].CvOutputData);
         Cv.WaitKey();
     }
コード例 #4
0
        public bool Display(StdSharedPtr <StdVector <UserDatum> > datumsPtr)
        {
            // User's displaying/saving/other processing here
            // datum.cvOutputData: rendered frame with pose or heatmaps
            // datum.poseKeypoints: Array<float> with the estimated pose
            var key = ' ';

            if (datumsPtr != null && datumsPtr.TryGet(out var data) && !data.Empty)
            {
                var array = data.ToArray();
                Cv.ImShow("User worker GUI", array[0].CvOutputData);
                // Display image and sleeps at least 1 ms (it usually sleeps ~5-10 msec to display the image)
                key = (char)Cv.WaitKey(1);
            }
コード例 #5
0
        private Datum ProcessBitmap(Bitmap bmp)
        {
            BitmapData bmpData = null;

            try
            {
                var width  = bmp.Width;
                var height = bmp.Height;
                bmpData = bmp.LockBits(new Rectangle(0, 0, width, height), ImageLockMode.ReadOnly, PixelFormat.Format24bppRgb);

                var stride = bmpData.Stride;
                var scan0  = bmpData.Scan0;

                unsafe
                {
                    // convert bitmap to byte array
                    var line  = width * 3;
                    var image = new byte[line * height];
                    var ps    = (byte *)scan0;
                    for (var h = 0; h < height; h++)
                    {
                        Marshal.Copy((IntPtr)ps, image, line * h, line);
                        ps += stride;
                    }

                    // use openpose wrapper to calculate keypoints using byte array object as input image
                    this.datumProcessed = opWrapper.EmplaceAndPop(image, width, height, MatType.CV_8UC3);

                    if (this.datumProcessed != null)                                                                      // if output data exists
                    {
                        if (this.datumProcessed != null && this.datumProcessed.TryGet(out this.data) && !this.data.Empty) // if datumProcessed exists && we can get the data sucessfully && retrieved data exists
                        {
                            return(this.data.ToArray()[0].Get());                                                         // retrieve datum object which contains the keypoint data
                        }
                        else // bad input
                        {
                            OpenPose.Log("Image could not be processed.", Priority.High);
                        }
                    }
                }
            }
            finally
            {
                if (bmpData != null)
                {
                    bmp.UnlockBits(bmpData);
                }
            }
            return(null);
        }
コード例 #6
0
 public Datum ProcessFrame(Mat image)                                              // gets result keypoints from OpenPoseDotNet.Mat
 {
     datumProcessed = opWrapper.EmplaceAndPop(image);                              // method detects on OpenPoseDotNet.Mat
     if (datumProcessed != null && datumProcessed.TryGet(out data) && !data.Empty) // if datumProcessed exists && we can get the data sucessfully && retrieved data exists
     {
         Datum result = data.ToArray()[0].Get();                                   // retrieve datum object which contains the keypoint data
         opWrapper.Dispose();                                                      // dispose of wrapper after detection
         return(result);
     }
     else
     {
         OpenPose.Log("Nullptr or empty datumsPtr found.", Priority.High);
         return(null);
     }
 }
コード例 #7
0
        protected override StdSharedPtr <StdVector <StdSharedPtr <UserDatum> > > WorkProducer()
        {
            try
            {
                // Close program when empty frame
                if (this._ImageFiles.Length <= this._Counter)
                {
                    OpenPose.Log("Last frame read and added to queue. Closing program after it is processed.", Priority.High);
                    // This funtion stops this worker, which will eventually stop the whole thread system once all the
                    // frames have been processed
                    this.Stop();
                    return(null);
                }
                else
                {
                    // Create new datum
                    var vector    = new StdVector <StdSharedPtr <UserDatum> >();
                    var datumsPtr = new StdSharedPtr <StdVector <StdSharedPtr <UserDatum> > >(vector);
                    datumsPtr.Get().EmplaceBack();
                    var datum = datumsPtr.Get().At(0);

                    // C# cannot set pointer object by using assignment operator
                    datum.Reset(new UserDatum());

                    // Fill datum
                    using (var cvInputData = Cv.ImRead(this._ImageFiles[this._Counter++]))
                        using (var inputData = OpenPose.OP_CV2OPCONSTMAT(cvInputData))
                            datum.Get().CvInputData = inputData;

                    // If empty frame -> return nullptr
                    if (datum.Get().CvInputData.Empty)
                    {
                        OpenPose.Log($"Empty frame detected on path: {this._ImageFiles[this._Counter - 1]}. Closing program.", Priority.High);
                        this.Stop();
                        datumsPtr = null;
                    }

                    return(datumsPtr);
                }
            }
            catch (Exception e)
            {
                OpenPose.Log("Some kind of unexpected error happened.");
                this.Stop();
                OpenPose.Error(e.Message, -1, nameof(this.WorkProducer));
                return(null);
            }
        }
コード例 #8
0
 private static void PrintKeypoints(StdSharedPtr <StdVector <StdSharedPtr <Datum> > > datumsPtr)
 {
     try
     {
         // Example: How to use the pose keypoints
         if (datumsPtr != null && datumsPtr.TryGet(out var data) && !data.Empty)
         {
             var temp = data.ToArray()[0].Get();
             OpenPose.Log($"Body keypoints: {temp.PoseKeyPoints}", Priority.High);
             OpenPose.Log($"Face keypoints: {temp.FaceKeyPoints}", Priority.High);
             OpenPose.Log($"Left hand keypoints: {temp.HandKeyPoints[0]}", Priority.High);
             OpenPose.Log($"Right hand keypoints: {temp.HandKeyPoints[1]}", Priority.High);
         }
         else
         {
             OpenPose.Log("Nullptr or empty datumsPtr found.", Priority.High);
         }
     }
コード例 #9
0
        protected override StdSharedPtr <StdVector <UserDatum> > WorkProducer()
        {
            try
            {
                // Close program when empty frame
                if (this._ImageFiles.Length <= this._Counter)
                {
                    OpenPose.Log("Last frame read and added to queue. Closing program after it is processed.", Priority.High);
                    // This funtion stops this worker, which will eventually stop the whole thread system once all the
                    // frames have been processed
                    this.Stop();
                    return(null);
                }
                else
                {
                    // Create new datum
                    var tmp = new StdVector <UserDatum>();
                    tmp.EmplaceBack();
                    var datumsPtr = new StdSharedPtr <StdVector <UserDatum> >(tmp);
                    var datum     = tmp.ToArray()[0];

                    // Fill datum
                    using (var mat = Cv.ImRead(this._ImageFiles[this._Counter++]))
                        datum.CvInputData = mat;

                    // If empty frame -> return nullptr
                    if (datum.CvInputData.Empty)
                    {
                        OpenPose.Log($"Empty frame detected on path: {this._ImageFiles[this._Counter - 1]}. Closing program.", Priority.High);
                        this.Stop();
                        datumsPtr = null;
                    }

                    return(datumsPtr);
                }
            }
            catch (Exception e)
            {
                OpenPose.Log("Some kind of unexpected error happened.");
                this.Stop();
                OpenPose.Error(e.Message, -1, nameof(this.WorkProducer));
                return(null);
            }
        }
コード例 #10
0
        private static bool Display(StdSharedPtr <StdVector <StdSharedPtr <Datum> > > datumsPtr)
        {
            try
            {
                // User's displaying/saving/other processing here
                // datum.cvOutputData: rendered frame with pose or heatmaps
                // datum.poseKeypoints: Array<float> with the estimated pose
                if (datumsPtr != null && datumsPtr.TryGet(out var data) && !data.Empty)
                {
                    // Display image and sleeps at least 1 ms (it usually sleeps ~5-10 msec to display the image)
                    var temp = data.ToArray()[0].Get();
                    using (var cvMat = OpenPose.OP_OP2CVCONSTMAT(temp.CvOutputData))
                        Cv.ImShow($"{OpenPose.OpenPoseNameAndVersion()} - Tutorial C++ API", cvMat);
                }
                else
                {
                    OpenPose.Log("Nullptr or empty datumsPtr found.", Priority.High);
                }

                var key = Cv.WaitKey(1);
                return(key == 27);
            }
コード例 #11
0
 private static void Display(StdSharedPtr <StdVector <StdSharedPtr <Datum> > > datumsPtr)
 {
     try
     {
         // User's displaying/saving/other processing here
         // datum.cvOutputData: rendered frame with pose or heatmaps
         // datum.poseKeypoints: Array<float> with the estimated pose
         if (datumsPtr != null && datumsPtr.TryGet(out var data) && !data.Empty)
         {
             // Display image
             var temp = data.ToArray();
             using (var cvMat = OpenPose.OP_OP2CVCONSTMAT(temp[0].Get().CvOutputData))
             {
                 Cv.ImShow($"{OpenPose.OpenPoseNameAndVersion()} - Tutorial C++ API", cvMat);
                 Cv.WaitKey();
             }
         }
         else
         {
             OpenPose.Log("Nullptr or empty datumsPtr found.", Priority.High);
         }
     }
コード例 #12
0
        public StdSharedPtr <StdVector <StdSharedPtr <Datum> > > CreateDatum()
        {
            // Close program when empty frame
            if (this._Closed || this._ImageFiles.Length <= this._Counter)
            {
                OpenPose.Log("Last frame read and added to queue. Closing program after it is processed.", Priority.High);

                // This funtion stops this worker, which will eventually stop the whole thread system once all the
                // frames have been processed
                this._Closed = true;
                return(null);
            }
            else
            {
                // Create new datum
                var vector    = new StdVector <StdSharedPtr <Datum> >();
                var datumsPtr = new StdSharedPtr <StdVector <StdSharedPtr <Datum> > >(vector);
                datumsPtr.Get().EmplaceBack();
                var datum = datumsPtr.Get().At(0);

                // C# cannot set pointer object by using assignment operator
                datum.Reset(new Datum());

                // Fill datum
                using (var cvInputData = Cv.ImRead(this._ImageFiles[this._Counter++]))
                    using (var inputData = OpenPose.OP_CV2OPCONSTMAT(cvInputData))
                        datum.Get().CvInputData = inputData;

                // If empty frame -> return nullptr
                if (datum.Get().CvInputData.Empty)
                {
                    OpenPose.Log($"Empty frame detected on path: {this._ImageFiles[this._Counter - 1]}. Closing program.", Priority.High);
                    this._Closed = true;
                    datumsPtr    = null;
                }

                return(datumsPtr);
            }
        }
コード例 #13
0
        private static void Display(StdSharedPtr <StdVector <StdSharedPtr <Datum> > > datumsPtr)
        {
            try
            {
                // User's displaying/saving/other processing here
                // datum.cvOutputData: rendered frame with pose or heatmaps
                // datum.poseKeypoints: Array<float> with the estimated pose
                if (datumsPtr != null && datumsPtr.TryGet(out var data) && !data.Empty)
                {
                    var datum = datumsPtr.Get().At(0).Get();

                    // Display image
                    using (var cvMat = OpenPose.OP_OP2CVCONSTMAT(datum.CvOutputData))
                    {
                        Cv.ImShow($"{OpenPose.OpenPoseNameAndVersion()} - Tutorial C++ API", cvMat);
                        Cv.WaitKey(0);
                    }
                }
            }
            catch (Exception e)
            {
                OpenPose.Error(e.Message, -1, nameof(Display));
            }
        }
コード例 #14
0
        private static bool Display(StdSharedPtr <StdVector <StdSharedPtr <Datum> > > datumsPtr, int desiredChannel = 0)
        {
            try
            {
                if (datumsPtr != null && datumsPtr.TryGet(out var data) && !data.Empty)
                {
                    var datum = datumsPtr.Get().At(0).Get();

                    // Note: Heatmaps are in net_resolution size, which does not necessarily match the final image size
                    // Read heatmaps
                    var poseHeatMaps = datum.PoseHeatMaps;
                    // Read desired channel
                    var numberChannels = poseHeatMaps.GetSize(0);
                    var height         = poseHeatMaps.GetSize(1);
                    var width          = poseHeatMaps.GetSize(2);
                    var eleSize        = sizeof(float);
                    using (var desiredChannelHeatMap = new Mat(height, width, MatType.CV_32F, IntPtr.Add(poseHeatMaps.GetPtr(), (desiredChannel % numberChannels) * height * width * eleSize)))
                    {
                        // Read image used from OpenPose body network (same resolution than heatmaps)
                        var inputNetData = datum.InputNetData[0];
                        using (var inputNetDataB = new Mat(height, width, MatType.CV_32F, IntPtr.Add(inputNetData.GetPtr(), 0 * height * width * eleSize)))
                            using (var inputNetDataG = new Mat(height, width, MatType.CV_32F, IntPtr.Add(inputNetData.GetPtr(), 1 * height * width * eleSize)))
                                using (var inputNetDataR = new Mat(height, width, MatType.CV_32F, IntPtr.Add(inputNetData.GetPtr(), 2 * height * width * eleSize)))
                                    using (var vector = new StdVector <Mat>(new List <Mat>(new[] { inputNetDataB, inputNetDataG, inputNetDataR })))
                                        using (var tmp = new Mat())
                                        {
                                            Cv.Merge(vector, tmp);

                                            using (var add = tmp + 0.5)
                                                using (var mul = add * 255)
                                                    using (var netInputImage = (Mat)mul)
                                                    {
                                                        // Turn into uint8 Cv.Mat
                                                        using (var netInputImageUint8 = new Mat())
                                                        {
                                                            netInputImage.ConvertTo(netInputImageUint8, MatType.CV_8UC1);
                                                            using (var desiredChannelHeatMapUint8 = new Mat())
                                                            {
                                                                desiredChannelHeatMap.ConvertTo(desiredChannelHeatMapUint8, MatType.CV_8UC1);

                                                                // Combining both images
                                                                using (var imageToRender = new Mat())
                                                                {
                                                                    Cv.ApplyColorMap(desiredChannelHeatMapUint8, desiredChannelHeatMapUint8, ColormapType.COLORMAP_JET);
                                                                    Cv.AddWeighted(netInputImageUint8, 0.5, desiredChannelHeatMapUint8, 0.5, 0d, imageToRender);

                                                                    // Display image
                                                                    Cv.ImShow($"{OpenPose.OpenPoseNameAndVersion()} - Tutorial C++ API", imageToRender);
                                                                }
                                                            }
                                                        }
                                                    }
                                        }
                    }
                }
                else
                {
                    OpenPose.Log("Nullptr or empty datumsPtr found.", Priority.High);
                }

                var key = (char)Cv.WaitKey(1);
                return(key == 27);
            }
コード例 #15
0
        private static void ConfigureWrapper(Wrapper <Datum> opWrapper)
        {
            try
            {
                // Configuring OpenPose

                // logging_level
                OpenPose.CheckBool(0 <= Flags.LoggingLevel && Flags.LoggingLevel <= 255, "Wrong logging_level value.");
                ConfigureLog.PriorityThreshold = (Priority)Flags.LoggingLevel;
                Profiler.SetDefaultX((ulong)Flags.ProfileSpeed);
                // // For debugging
                // // Print all logging messages
                // ConfigureLog.PriorityThreshold = Priority.None;
                // // Print out speed values faster
                // Profiler.setDefaultX(100);

                // Applying user defined configuration - GFlags to program variables
                // producerType
                var tie            = OpenPose.FlagsToProducer(Flags.ImageDir, Flags.Video, Flags.IpCamera, Flags.Camera, Flags.FlirCamera, Flags.FlirCameraIndex);
                var producerType   = tie.Item1;
                var producerString = tie.Item2;
                // cameraSize
                var cameraSize = OpenPose.FlagsToPoint(Flags.CameraResolution, "-1x-1");
                // outputSize
                var outputSize = OpenPose.FlagsToPoint(Flags.OutputResolution, "-1x-1");
                // netInputSize
                var netInputSize = OpenPose.FlagsToPoint(Flags.NetResolution, "-1x368");
                // faceNetInputSize
                var faceNetInputSize = OpenPose.FlagsToPoint(Flags.FaceNetResolution, "368x368 (multiples of 16)");
                // handNetInputSize
                var handNetInputSize = OpenPose.FlagsToPoint(Flags.HandNetResolution, "368x368 (multiples of 16)");
                // poseMode
                var poseMode = OpenPose.FlagsToPoseMode(Flags.Body);
                // poseModel
                var poseModel = OpenPose.FlagsToPoseModel(Flags.ModelPose);
                // JSON saving
                if (!string.IsNullOrEmpty(Flags.WriteKeyPoint))
                {
                    OpenPose.Log("Flag `write_keypoint` is deprecated and will eventually be removed. Please, use `write_json` instead.", Priority.Max);
                }
                // keypointScale
                var keypointScale = OpenPose.FlagsToScaleMode(Flags.KeyPointScale);
                // heatmaps to add
                var heatMapTypes = OpenPose.FlagsToHeatMaps(Flags.HeatmapsAddParts, Flags.HeatmapsAddBackground, Flags.HeatmapsAddPAFs);
                var heatMapScale = OpenPose.FlagsToHeatMapScaleMode(Flags.HeatmapsScale);
                // >1 camera view?
                var multipleView = (Flags.Enable3D || Flags.Views3D > 1 || Flags.FlirCamera);
                // Face and hand detectors
                var faceDetector = OpenPose.FlagsToDetector(Flags.FaceDetector);
                var handDetector = OpenPose.FlagsToDetector(Flags.HandDetector);
                // Enabling Google Logging
                const bool enableGoogleLogging = true;

                // Initializing the user custom classes
                // GUI (Display)
                var wUserOutput = new StdSharedPtr <UserWorkerConsumer <Datum> >(new WUserOutput());

                // Add custom processing
                const bool workerOutputOnNewThread = true;
                opWrapper.SetWorker(WorkerType.Output, wUserOutput, workerOutputOnNewThread);

                // Pose configuration (use WrapperStructPose{} for default and recommended configuration)
                var pose = new WrapperStructPose(poseMode,
                                                 netInputSize,
                                                 outputSize,
                                                 keypointScale,
                                                 Flags.NumGpu,
                                                 Flags.NumGpuStart,
                                                 Flags.ScaleNumber,
                                                 (float)Flags.ScaleGap,
                                                 OpenPose.FlagsToRenderMode(Flags.RenderPose, multipleView),
                                                 poseModel,
                                                 !Flags.DisableBlending,
                                                 (float)Flags.AlphaPose,
                                                 (float)Flags.AlphaHeatmap,
                                                 Flags.PartToShow,
                                                 Flags.ModelFolder,
                                                 heatMapTypes,
                                                 heatMapScale,
                                                 Flags.PartCandidates,
                                                 (float)Flags.RenderThreshold,
                                                 Flags.NumberPeopleMax,
                                                 Flags.MaximizePositives,
                                                 Flags.FpsMax,
                                                 Flags.PrototxtPath,
                                                 Flags.CaffeModelPath,
                                                 (float)Flags.UpsamplingRatio,
                                                 enableGoogleLogging);

                // Face configuration (use op::WrapperStructFace{} to disable it)
                var face = new WrapperStructFace(Flags.Face,
                                                 faceDetector,
                                                 faceNetInputSize,
                                                 OpenPose.FlagsToRenderMode(Flags.FaceRender, multipleView, Flags.RenderPose),
                                                 (float)Flags.FaceAlphaPose,
                                                 (float)Flags.FaceAlphaHeatmap,
                                                 (float)Flags.FaceRenderThreshold);

                // Hand configuration (use op::WrapperStructHand{} to disable it)
                var hand = new WrapperStructHand(Flags.Hand,
                                                 handDetector,
                                                 handNetInputSize,
                                                 Flags.HandScaleNumber,
                                                 (float)Flags.HandScaleRange,
                                                 OpenPose.FlagsToRenderMode(Flags.HandRender, multipleView, Flags.RenderPose),
                                                 (float)Flags.HandAlphaPose,
                                                 (float)Flags.HandAlphaHeatmap,
                                                 (float)Flags.HandRenderThreshold);

                // Extra functionality configuration (use op::WrapperStructExtra{} to disable it)
                var extra = new WrapperStructExtra(Flags.Enable3D,
                                                   Flags.MinViews3D,
                                                   Flags.Identification,
                                                   Flags.Tracking,
                                                   Flags.IkThreads);

                // Producer (use default to disable any input)
                var input = new WrapperStructInput(producerType,
                                                   producerString,
                                                   Flags.FrameFirst,
                                                   Flags.FrameStep,
                                                   Flags.FrameLast,
                                                   Flags.ProcessRealTime,
                                                   Flags.FrameFlip,
                                                   Flags.FrameRotate,
                                                   Flags.FramesRepeat,
                                                   cameraSize,
                                                   Flags.CameraParameterPath,
                                                   Flags.FrameUndistort,
                                                   Flags.Views3D);

                // Output (comment or use default argument to disable any output)
                var output = new WrapperStructOutput(Flags.CliVerbose,
                                                     Flags.WriteKeyPoint,
                                                     OpenPose.StringToDataFormat(Flags.WriteKeyPointFormat),
                                                     Flags.WriteJson,
                                                     Flags.WriteCocoJson,
                                                     Flags.WriteCocoJsonVariants,
                                                     Flags.WriteCocoJsonVariant,
                                                     Flags.WriteImages,
                                                     Flags.WriteImagesFormat,
                                                     Flags.WriteVideo,
                                                     Flags.WriteVideoFps,
                                                     Flags.WriteVideoWithAudio,
                                                     Flags.WriteHeatmaps,
                                                     Flags.WriteHeatmapsFormat,
                                                     Flags.WriteVideoAdam,
                                                     Flags.WriteBvh,
                                                     Flags.UdpHost,
                                                     Flags.UdpPort);

                // No GUI. Equivalent to: opWrapper.configure(op::WrapperStructGui{});
                opWrapper.Configure(pose);
                opWrapper.Configure(face);
                opWrapper.Configure(hand);
                opWrapper.Configure(extra);
                opWrapper.Configure(input);
                opWrapper.Configure(output);

                // No GUI. Equivalent to: opWrapper.configure(op::WrapperStructGui{});
                // Set to single-thread (for sequential processing and/or debugging and/or reducing latency)
                if (Flags.DisableMultiThread)
                {
                    opWrapper.DisableMultiThreading();
                }
            }
            catch (Exception e)
            {
                OpenPose.Error(e.Message, -1, nameof(ConfigureWrapper));
            }
        }
コード例 #16
0
        private static int TutorialApiCpp9()
        {
            try
            {
                OpenPose.Log("Starting OpenPose demo...", Priority.High);
                var timeBegin = new Stopwatch();
                timeBegin.Start();

                // logging_level
                OpenPose.Check(0 <= Flags.LoggingLevel && Flags.LoggingLevel <= 255, "Wrong logging_level value.");
                ConfigureLog.PriorityThreshold = (Priority)Flags.LoggingLevel;
                Profiler.SetDefaultX((ulong)Flags.ProfileSpeed);

                // Applying user defined configuration - GFlags to program variables
                // outputSize
                var outputSize = OpenPose.FlagsToPoint(Flags.OutputResolution, "-1x-1");
                // netInputSize
                var netInputSize = OpenPose.FlagsToPoint(Flags.NetResolution, "-1x368");
                // faceNetInputSize
                var faceNetInputSize = OpenPose.FlagsToPoint(Flags.FaceNetResolution, "368x368 (multiples of 16)");
                // handNetInputSize
                var handNetInputSize = OpenPose.FlagsToPoint(Flags.HandNetResolution, "368x368 (multiples of 16)");
                // poseModel
                var poseModel = OpenPose.FlagsToPoseModel(Flags.ModelPose);
                // JSON saving
                if (!string.IsNullOrEmpty(Flags.WriteKeyPoint))
                {
                    OpenPose.Log("Flag `write_keypoint` is deprecated and will eventually be removed. Please, use `write_json` instead.", Priority.Max);
                }
                // keypointScale
                var keypointScale = OpenPose.FlagsToScaleMode(Flags.KeyPointScale);
                // heatmaps to add
                var heatMapTypes = OpenPose.FlagsToHeatMaps(Flags.HeatmapsAddParts, Flags.HeatmapsAddBackground, Flags.HeatmapsAddPAFs);
                var heatMapScale = OpenPose.FlagsToHeatMapScaleMode(Flags.HeatmapsScale);
                // >1 camera view?
                var multipleView = (Flags.Enable3D || Flags.Views3D > 1);
                // Enabling Google Logging
                const bool enableGoogleLogging = true;

                // Initializing the user custom classes
                // Frames producer (e.g., video, webcam, ...)
                using (var wUserInput = new StdSharedPtr <UserWorkerProducer <UserDatum> >(new WUserInput(Flags.ImageDir)))
                    // Processing
                    using (var wUserPostProcessing = new StdSharedPtr <UserWorker <UserDatum> >(new WUserPostProcessing()))
                        // GUI (Display)
                        using (var wUserOutput = new StdSharedPtr <UserWorkerConsumer <UserDatum> >(new WUserOutput()))
                        {
                            // OpenPose wrapper
                            OpenPose.Log("Configuring OpenPose...", Priority.High);
                            using (var opWrapperT = new Wrapper <UserDatum>())
                            {
                                // Add custom input
                                const bool workerInputOnNewThread = false;
                                opWrapperT.SetWorker(WorkerType.Input, wUserInput, workerInputOnNewThread);
                                // Add custom processing
                                const bool workerProcessingOnNewThread = false;
                                opWrapperT.SetWorker(WorkerType.PostProcessing, wUserPostProcessing, workerProcessingOnNewThread);
                                // Add custom output
                                const bool workerOutputOnNewThread = true;
                                opWrapperT.SetWorker(WorkerType.Output, wUserOutput, workerOutputOnNewThread);

                                // Pose configuration (use WrapperStructPose{} for default and recommended configuration)
                                using (var pose = new WrapperStructPose(!Flags.BodyDisabled,
                                                                        netInputSize,
                                                                        outputSize,
                                                                        keypointScale,
                                                                        Flags.NumGpu,
                                                                        Flags.NumGpuStart,
                                                                        Flags.ScaleNumber,
                                                                        (float)Flags.ScaleGap,
                                                                        OpenPose.FlagsToRenderMode(Flags.RenderPose, multipleView),
                                                                        poseModel,
                                                                        !Flags.DisableBlending,
                                                                        (float)Flags.AlphaPose,
                                                                        (float)Flags.AlphaHeatmap,
                                                                        Flags.PartToShow,
                                                                        Flags.ModelFolder,
                                                                        heatMapTypes,
                                                                        heatMapScale,
                                                                        Flags.PartCandidates,
                                                                        (float)Flags.RenderThreshold,
                                                                        Flags.NumberPeopleMax,
                                                                        Flags.MaximizePositives,
                                                                        Flags.FpsMax,
                                                                        enableGoogleLogging))

                                    // Face configuration (use op::WrapperStructFace{} to disable it)
                                    using (var face = new WrapperStructFace(Flags.Face,
                                                                            faceNetInputSize,
                                                                            OpenPose.FlagsToRenderMode(Flags.FaceRender, multipleView, Flags.RenderPose),
                                                                            (float)Flags.FaceAlphaPose,
                                                                            (float)Flags.FaceAlphaHeatmap,
                                                                            (float)Flags.FaceRenderThreshold))

                                        // Hand configuration (use op::WrapperStructHand{} to disable it)
                                        using (var hand = new WrapperStructHand(Flags.Hand,
                                                                                handNetInputSize,
                                                                                Flags.HandScaleNumber,
                                                                                (float)Flags.HandScaleRange, Flags.HandTracking,
                                                                                OpenPose.FlagsToRenderMode(Flags.HandRender, multipleView, Flags.RenderPose),
                                                                                (float)Flags.HandAlphaPose,
                                                                                (float)Flags.HandAlphaHeatmap,
                                                                                (float)Flags.HandRenderThreshold))

                                            // Extra functionality configuration (use op::WrapperStructExtra{} to disable it)
                                            using (var extra = new WrapperStructExtra(Flags.Enable3D,
                                                                                      Flags.MinViews3D,
                                                                                      Flags.Identification,
                                                                                      Flags.Tracking,
                                                                                      Flags.IkThreads))

                                                // Output (comment or use default argument to disable any output)
                                                using (var output = new WrapperStructOutput(Flags.CliVerbose,
                                                                                            Flags.WriteKeyPoint,
                                                                                            OpenPose.StringToDataFormat(Flags.WriteKeyPointFormat),
                                                                                            Flags.WriteJson,
                                                                                            Flags.WriteCocoJson,
                                                                                            Flags.WriteCocoFootJson,
                                                                                            Flags.WriteCocoJsonVariant,
                                                                                            Flags.WriteImages,
                                                                                            Flags.WriteImagesFormat,
                                                                                            Flags.WriteVideo,
                                                                                            Flags.WriteVideoFps,
                                                                                            Flags.WriteHeatmaps,
                                                                                            Flags.WriteHeatmapsFormat,
                                                                                            Flags.WriteVideoAdam,
                                                                                            Flags.WriteBvh,
                                                                                            Flags.UdpHost,
                                                                                            Flags.UdpPort))
                                                {
                                                    opWrapperT.Configure(pose);
                                                    opWrapperT.Configure(face);
                                                    opWrapperT.Configure(hand);
                                                    opWrapperT.Configure(extra);
                                                    opWrapperT.Configure(output);

                                                    // No GUI. Equivalent to: opWrapper.configure(op::WrapperStructGui{});
                                                    // Set to single-thread (for sequential processing and/or debugging and/or reducing latency)
                                                    if (Flags.DisableMultiThread)
                                                    {
                                                        opWrapperT.DisableMultiThreading();
                                                    }

                                                    // Start, run, and stop processing - exec() blocks this thread until OpenPose wrapper has finished
                                                    OpenPose.Log("Starting thread(s)...", Priority.High);
                                                    opWrapperT.Exec();
                                                }
                            }
                        }

                // Measuring total time
                timeBegin.Stop();
                var totalTimeSec = timeBegin.ElapsedMilliseconds / 1000d;
                var message      = $"OpenPose demo successfully finished. Total time: {totalTimeSec} seconds.";
                OpenPose.Log(message, Priority.High);

                // Return successful message
                return(0);
            }
            catch (Exception)
            {
                return(-1);
            }
        }
コード例 #17
0
        private static int OpenPoseTutorialThread3()
        {
            try
            {
                OpenPose.Log("Starting OpenPose demo...", Priority.High);
                var timeBegin = new Stopwatch();
                timeBegin.Start();

                // ------------------------- INITIALIZATION -------------------------
                // Step 1 - Set logging level
                // - 0 will output all the logging messages
                // - 255 will output nothing
                OpenPose.Check(0 <= Flags.LoggingLevel && Flags.LoggingLevel <= 255, "Wrong logging_level value.");
                ConfigureLog.PriorityThreshold = (Priority)Flags.LoggingLevel;
                // Step 2 - Setting thread workers && manage
                using (var threadManager = new ThreadManager <Datum>())
                {
                    // Step 3 - Initializing the worker classes
                    // Frames producer (e.g., video, webcam, ...)
                    using (var wUserInput = new StdSharedPtr <UserWorkerProducer <Datum> >(new WUserInput(Flags.ImageDir)))
                    {
                        // Processing
                        using (var wUserProcessing = new StdSharedPtr <UserWorker <Datum> >(new WUserPostProcessing()))
                        {
                            // GUI (Display)
                            using (var wUserOutput = new StdSharedPtr <UserWorkerConsumer <Datum> >(new WUserOutput()))
                            {
                                // ------------------------- CONFIGURING THREADING -------------------------
                                // In this simple multi-thread example, we will do the following:
                                // 3 (virtual) queues: 0, 1, 2
                                // 1 real queue: 1. The first and last queue ids (in this case 0 and 2) are not actual queues, but the
                                // beginning and end of the processing sequence
                                // 2 threads: 0, 1
                                // wUserInput will generate frames (there is no real queue 0) and push them on queue 1
                                // wGui will pop frames from queue 1 and process them (there is no real queue 2)
                                var threadId = 0UL;
                                var queueIn  = 0UL;
                                var queueOut = 1UL;
                                threadManager.Add(threadId++, wUserInput, queueIn++, queueOut++);       // Thread 0, queues 0 -> 1
                                threadManager.Add(threadId++, wUserProcessing, queueIn++, queueOut++);  // Thread 1, queues 1 -> 2
                                threadManager.Add(threadId++, wUserOutput, queueIn++, queueOut++);      // Thread 2, queues 2 -> 3

                                // ------------------------- STARTING AND STOPPING THREADING -------------------------
                                OpenPose.Log("Starting thread(s)...", Priority.High);
                                // Two different ways of running the program on multithread environment
                                // Option a) Using the main thread (this thread) for processing (it saves 1 thread, recommended)
                                threadManager.Exec();
                                // Option b) Giving to the user the control of this thread
                                // // VERY IMPORTANT NOTE: if OpenCV is compiled with Qt support, this option will not work. Qt needs the main
                                // // thread to plot visual results, so the final GUI (which uses OpenCV) would return an exception similar to:
                                // // `QMetaMethod::invoke: Unable to invoke methods with return values in queued connections`
                                // // Start threads
                                // threadManager.start();
                                // // Keep program alive while running threads. Here the user could perform any other desired function
                                // while (threadManager.isRunning())
                                //     std::this_thread::sleep_for(std::chrono::milliseconds{33});
                                // // Stop and join threads
                                // op::log("Stopping thread(s)", op::Priority::High);
                                // threadManager.stop();
                            }
                        }
                    }
                }

                // ------------------------- CLOSING -------------------------
                // Measuring total time
                timeBegin.Stop();
                var totalTimeSec = timeBegin.ElapsedMilliseconds / 1000d;
                var message      = $"OpenPose demo successfully finished. Total time: {totalTimeSec} seconds.";
                OpenPose.Log(message, Priority.High);

                // Return successful message
                return(0);
            }
            catch (Exception e)
            {
                OpenPose.Error(e.Message, -1, nameof(OpenPoseTutorialThread3));
                return(-1);
            }
        }
コード例 #18
0
        private static int TutorialDeveloperThread1()
        {
            try
            {
                OpenPose.Log("Starting OpenPose demo...", Priority.High);
                var timeBegin = new Stopwatch();
                timeBegin.Start();

                // ------------------------- INITIALIZATION -------------------------
                // Step 1 - Set logging level
                // - 0 will output all the logging messages
                // - 255 will output nothing
                OpenPose.Check(0 <= Flags.LoggingLevel && Flags.LoggingLevel <= 255, "Wrong logging_level value.");
                ConfigureLog.PriorityThreshold = (Priority)Flags.LoggingLevel;
                // Step 2 - Read GFlags (user defined configuration)
                // cameraSize
                var cameraSize = OpenPose.FlagsToPoint(Flags.CameraResolution, "-1x-1");
                // outputSize
                var outputSize = OpenPose.FlagsToPoint(Flags.OutputResolution, "-1x-1");
                // producerType
                var tie                    = OpenPose.FlagsToProducer(Flags.ImageDir, Flags.Video, Flags.IpCamera, Flags.Camera, Flags.FlirCamera, Flags.FlirCameraIndex);
                var producerType           = tie.Item1;
                var producerString         = tie.Item2;
                var displayProducerFpsMode = Flags.ProcessRealTime ? ProducerFpsMode.OriginalFps : ProducerFpsMode.RetrievalFps;
                using (var producerSharedPtr = OpenPose.CreateProducer(producerType,
                                                                       cameraSize,
                                                                       producerString,
                                                                       Flags.CameraParameterPath,
                                                                       Flags.FrameUndistort,
                                                                       Flags.Views3D))
                {
                    producerSharedPtr.Get().SetProducerFpsMode(displayProducerFpsMode);
                    OpenPose.Log("", Priority.Low);
                    // Step 3 - Setting producer
                    //var videoSeekSharedPtr = std::make_shared<std::pair<std::atomic<bool>, std::atomic<int>>>();
                    //videoSeekSharedPtr->first = false;
                    //videoSeekSharedPtr->second = 0;
                    // Step 4 - Setting thread workers && manager
                    // Note:
                    // nativeDebugging may occur crash
                    using (var threadManager = new ThreadManager <Datum>())
                    {
                        // Step 5 - Initializing the worker classes
                        // Frames producer (e.g., video, webcam, ...)
                        using (var datumProducer = new StdSharedPtr <DatumProducer <Datum> >(new DatumProducer <Datum>(producerSharedPtr)))
                            using (var wDatumProducer = new StdSharedPtr <WDatumProducer <Datum> >(new WDatumProducer <Datum>(datumProducer)))
                            {
                                // GUI (Display)
                                using (var gui = new StdSharedPtr <Gui>(new Gui(outputSize, Flags.FullScreen, threadManager.GetIsRunningSharedPtr())))
                                    using (var wGui = new StdSharedPtr <WGui <Datum> >(new WGui <Datum>(gui)))
                                    {
                                        // ------------------------- CONFIGURING THREADING -------------------------
                                        // In this simple multi-thread example, we will do the following:
                                        // 3 (virtual) queues: 0, 1, 2
                                        // 1 real queue: 1. The first and last queue ids (in this case 0 and 2) are not actual queues, but the
                                        // beginning and end of the processing sequence
                                        // 2 threads: 0, 1
                                        // wDatumProducer will generate frames (there is no real queue 0) and push them on queue 1
                                        // wGui will pop frames from queue 1 and process them (there is no real queue 2)
                                        var threadId = 0UL;
                                        var queueIn  = 0UL;
                                        var queueOut = 1UL;
                                        threadManager.Add(threadId++, wDatumProducer, queueIn++, queueOut++); // Thread 0, queues 0 -> 1
                                        threadManager.Add(threadId++, wGui, queueIn++, queueOut++);           // Thread 1, queues 1 -> 2

                                        // Equivalent single-thread version (option a)
                                        // const auto threadId = 0ull;
                                        // auto queueIn = 0ull;
                                        // auto queueOut = 1ull;
                                        // threadManager.add(threadId, wDatumProducer, queueIn++, queueOut++);       // Thread 0, queues 0 -> 1
                                        // threadManager.add(threadId, wGui, queueIn++, queueOut++);                 // Thread 0, queues 1 -> 2

                                        // Equivalent single-thread version (option b)
                                        // const auto threadId = 0ull;
                                        // const auto queueIn = 0ull;
                                        // const auto queueOut = 1ull;
                                        // threadManager.add(threadId, {wDatumProducer, wGui}, queueIn, queueOut);     // Thread 0, queues 0 -> 1

                                        // ------------------------- STARTING AND STOPPING THREADING -------------------------
                                        OpenPose.Log("Starting thread(s)...", Priority.High);
                                        // Two different ways of running the program on multithread environment
                                        // Option a) Using the main thread (this thread) for processing (it saves 1 thread, recommended)
                                        threadManager.Exec();
                                        // Option b) Giving to the user the control of this thread
                                        // // VERY IMPORTANT NOTE: if OpenCV is compiled with Qt support, this option will not work. Qt needs the main
                                        // // thread to plot visual results, so the final GUI (which uses OpenCV) would return an exception similar to:
                                        // // `QMetaMethod::invoke: Unable to invoke methods with return values in queued connections`
                                        // // Start threads
                                        // threadManager.start();
                                        // // Keep program alive while running threads. Here the user could perform any other desired function
                                        // while (threadManager.isRunning())
                                        //     std::this_thread::sleep_for(std::chrono::milliseconds{33});
                                        // // Stop and join threads
                                        // op::log("Stopping thread(s)", op::Priority::High);
                                        // threadManager.stop();
                                    }
                            }
                    }
                }

                // ------------------------- CLOSING -------------------------
                // Measuring total time
                timeBegin.Stop();
                var totalTimeSec = timeBegin.ElapsedMilliseconds / 1000d;
                var message      = $"OpenPose demo successfully finished. Total time: {totalTimeSec} seconds.";
                OpenPose.Log(message, Priority.High);

                // Return successful message
                return(0);
            }
            catch (Exception e)
            {
                OpenPose.Error(e.Message, -1, nameof(TutorialDeveloperThread1));
                return(-1);
            }
        }
コード例 #19
0
        private static int TutorialDeveloperPose2()
        {
            try
            {
                OpenPose.Log("Starting OpenPose demo...", Priority.High);
                var timeBegin = new Stopwatch();
                timeBegin.Start();

                // ------------------------- INITIALIZATION -------------------------
                // Step 1 - Set logging level
                // - 0 will output all the logging messages
                // - 255 will output nothing
                OpenPose.Check(0 <= Flags.LoggingLevel && Flags.LoggingLevel <= 255, "Wrong logging_level value.");
                ConfigureLog.PriorityThreshold = (Priority)Flags.LoggingLevel;
                OpenPose.Log("", Priority.Low);
                // Step 2 - Read GFlags (user defined configuration)
                // outputSize
                var outputSize = OpenPose.FlagsToPoint(Flags.OutputResolution, "-1x-1");
                // netInputSize
                var netInputSize = OpenPose.FlagsToPoint(Flags.NetResolution, "-1x368");
                // poseModel
                var poseModel = OpenPose.FlagsToPoseModel(Flags.ModelPose);
                // Check no contradictory flags enabled
                if (Flags.AlphaPose < 0.0 || Flags.AlphaPose > 1.0)
                {
                    OpenPose.Error("Alpha value for blending must be in the range [0,1].", -1, nameof(TutorialDeveloperPose2));
                }
                if (Flags.ScaleGap <= 0.0 && Flags.ScaleNumber > 1)
                {
                    OpenPose.Error("Incompatible flag configuration: scale_gap must be greater than 0 or scale_number = 1.", -1, nameof(TutorialDeveloperPose2));
                }
                // Step 3 - Initialize all required classes
                using (var scaleAndSizeExtractor = new ScaleAndSizeExtractor(netInputSize, outputSize, Flags.ScaleNumber, Flags.ScaleGap))
                    using (var cvMatToOpInput = new CvMatToOpInput(poseModel))
                        using (var cvMatToOpOutput = new CvMatToOpOutput())
                            using (var poseExtractorPtr = new StdSharedPtr <PoseExtractorCaffe>(new PoseExtractorCaffe(poseModel, Flags.ModelFolder, Flags.NumGpuStart)))
                                using (var poseGpuRenderer = new PoseGpuRenderer(poseModel, poseExtractorPtr, (float)Flags.RenderThreshold, !Flags.DisableBlending, (float)Flags.AlphaPose))
                                {
                                    poseGpuRenderer.SetElementToRender(Flags.PartToShow);

                                    using (var opOutputToCvMat = new OpOutputToCvMat())
                                        using (var frameDisplayer = new FrameDisplayer("OpenPose Tutorial - Example 2", outputSize))
                                        {
                                            // Step 4 - Initialize resources on desired thread (in this case single thread, i.e., we init resources here)
                                            poseExtractorPtr.Get().InitializationOnThread();
                                            poseGpuRenderer.InitializationOnThread();

                                            // ------------------------- POSE ESTIMATION AND RENDERING -------------------------
                                            // Step 1 - Read and load image, error if empty (possibly wrong path)
                                            // Alternative: cv::imread(Flags.image_path, CV_LOAD_IMAGE_COLOR);
                                            using (var inputImage = OpenPose.LoadImage(ImagePath, LoadImageFlag.LoadImageColor))
                                            {
                                                if (inputImage.Empty)
                                                {
                                                    OpenPose.Error("Could not open or find the image: " + ImagePath, -1, nameof(TutorialDeveloperPose2));
                                                }
                                                var imageSize = new Point <int>(inputImage.Cols, inputImage.Rows);
                                                // Step 2 - Get desired scale sizes
                                                var tuple = scaleAndSizeExtractor.Extract(imageSize);
                                                var scaleInputToNetInputs = tuple.Item1;
                                                var netInputSizes         = tuple.Item2;
                                                var scaleInputToOutput    = tuple.Item3;
                                                var outputResolution      = tuple.Item4;
                                                // Step 3 - Format input image to OpenPose input and output formats
                                                var netInputArray = cvMatToOpInput.CreateArray(inputImage, scaleInputToNetInputs, netInputSizes);
                                                var outputArray   = cvMatToOpOutput.CreateArray(inputImage, scaleInputToOutput, outputResolution);
                                                // Step 4 - Estimate poseKeypoints
                                                poseExtractorPtr.Get().ForwardPass(netInputArray, imageSize, scaleInputToNetInputs);
                                                var poseKeypoints    = poseExtractorPtr.Get().GetPoseKeyPoints();
                                                var scaleNetToOutput = poseExtractorPtr.Get().GetScaleNetToOutput();
                                                // Step 5 - Render pose
                                                poseGpuRenderer.RenderPose(outputArray, poseKeypoints, (float)scaleInputToOutput, scaleNetToOutput);
                                                // Step 6 - OpenPose output format to cv::Mat
                                                using (var outputImage = opOutputToCvMat.FormatToCvMat(outputArray))
                                                {
                                                    // ------------------------- SHOWING RESULT AND CLOSING -------------------------
                                                    // Show results
                                                    frameDisplayer.DisplayFrame(outputImage, 0); // Alternative: cv::imshow(outputImage) + cv::waitKey(0)
                                                                                                 // Measuring total time
                                                    timeBegin.Stop();
                                                    var totalTimeSec = timeBegin.ElapsedMilliseconds / 1000d;
                                                    var message      = $"OpenPose demo successfully finished. Total time: {totalTimeSec} seconds.";
                                                    OpenPose.Log(message, Priority.High);
                                                    // Return successful message
                                                    return(0);
                                                }
                                            }
                                        }
                                }
            }
            catch (Exception e)
            {
                OpenPose.Error(e.Message, -1, nameof(TutorialDeveloperPose2));
                return(-1);
            }
        }