private static void ConfigureWrapper(Wrapper <Datum> opWrapper) { try { // Configuring OpenPose // logging_level OpenPose.CheckBool(0 <= Flags.LoggingLevel && Flags.LoggingLevel <= 255, "Wrong logging_level value."); ConfigureLog.PriorityThreshold = (Priority)Flags.LoggingLevel; Profiler.SetDefaultX((ulong)Flags.ProfileSpeed); // Applying user defined configuration - GFlags to program variables // outputSize var outputSize = OpenPose.FlagsToPoint(Flags.OutputResolution, "-1x-1"); // netInputSize var netInputSize = OpenPose.FlagsToPoint(Flags.NetResolution, "-1x368"); // faceNetInputSize var faceNetInputSize = OpenPose.FlagsToPoint(Flags.FaceNetResolution, "368x368 (multiples of 16)"); // handNetInputSize var handNetInputSize = OpenPose.FlagsToPoint(Flags.HandNetResolution, "368x368 (multiples of 16)"); // poseMode var poseMode = OpenPose.FlagsToPoseMode(Flags.Body); // poseModel var poseModel = OpenPose.FlagsToPoseModel(Flags.ModelPose); // JSON saving if (!string.IsNullOrEmpty(Flags.WriteKeyPoint)) { OpenPose.Log("Flag `write_keypoint` is deprecated and will eventually be removed. Please, use `write_json` instead.", Priority.Max); } // keypointScale var keypointScale = OpenPose.FlagsToScaleMode(Flags.KeyPointScale); // heatmaps to add var heatMapTypes = OpenPose.FlagsToHeatMaps(Flags.HeatmapsAddParts, Flags.HeatmapsAddBackground, Flags.HeatmapsAddPAFs); var heatMapScale = OpenPose.FlagsToHeatMapScaleMode(Flags.HeatmapsScale); // >1 camera view? var multipleView = (Flags.Enable3D || Flags.Views3D > 1); // Face and hand detectors var faceDetector = OpenPose.FlagsToDetector(Flags.FaceDetector); var handDetector = OpenPose.FlagsToDetector(Flags.HandDetector); // Enabling Google Logging const bool enableGoogleLogging = true; // Pose configuration (use WrapperStructPose{} for default and recommended configuration) var pose = new WrapperStructPose(poseMode, netInputSize, outputSize, keypointScale, Flags.NumGpu, Flags.NumGpuStart, Flags.ScaleNumber, (float)Flags.ScaleGap, OpenPose.FlagsToRenderMode(Flags.RenderPose, multipleView), poseModel, !Flags.DisableBlending, (float)Flags.AlphaPose, (float)Flags.AlphaHeatmap, Flags.PartToShow, Flags.ModelFolder, heatMapTypes, heatMapScale, Flags.PartCandidates, (float)Flags.RenderThreshold, Flags.NumberPeopleMax, Flags.MaximizePositives, Flags.FpsMax, Flags.PrototxtPath, Flags.CaffeModelPath, (float)Flags.UpsamplingRatio, enableGoogleLogging); // Face configuration (use op::WrapperStructFace{} to disable it) var face = new WrapperStructFace(Flags.Face, faceDetector, faceNetInputSize, OpenPose.FlagsToRenderMode(Flags.FaceRender, multipleView, Flags.RenderPose), (float)Flags.FaceAlphaPose, (float)Flags.FaceAlphaHeatmap, (float)Flags.FaceRenderThreshold); // Hand configuration (use op::WrapperStructHand{} to disable it) var hand = new WrapperStructHand(Flags.Hand, handDetector, handNetInputSize, Flags.HandScaleNumber, (float)Flags.HandScaleRange, OpenPose.FlagsToRenderMode(Flags.HandRender, multipleView, Flags.RenderPose), (float)Flags.HandAlphaPose, (float)Flags.HandAlphaHeatmap, (float)Flags.HandRenderThreshold); // Extra functionality configuration (use op::WrapperStructExtra{} to disable it) var extra = new WrapperStructExtra(Flags.Enable3D, Flags.MinViews3D, Flags.Identification, Flags.Tracking, Flags.IkThreads); // Output (comment or use default argument to disable any output) var output = new WrapperStructOutput(Flags.CliVerbose, Flags.WriteKeyPoint, OpenPose.StringToDataFormat(Flags.WriteKeyPointFormat), Flags.WriteJson, Flags.WriteCocoJson, Flags.WriteCocoJsonVariants, Flags.WriteCocoJsonVariant, Flags.WriteImages, Flags.WriteImagesFormat, Flags.WriteVideo, Flags.WriteVideoFps, Flags.WriteVideoWithAudio, Flags.WriteHeatmaps, Flags.WriteHeatmapsFormat, Flags.WriteVideoAdam, Flags.WriteBvh, Flags.UdpHost, Flags.UdpPort); opWrapper.Configure(pose); opWrapper.Configure(face); opWrapper.Configure(hand); opWrapper.Configure(extra); opWrapper.Configure(output); // No GUI. Equivalent to: opWrapper.configure(op::WrapperStructGui{}); // Set to single-thread (for sequential processing and/or debugging and/or reducing latency) if (Flags.DisableMultiThread) { opWrapper.DisableMultiThreading(); } // Return successful message OpenPose.Log("Stopping OpenPose...", Priority.High); } catch (Exception e) { OpenPose.Error(e.Message, -1, nameof(ConfigureWrapper)); } }
private static int TutorialApiCpp9() { try { OpenPose.Log("Starting OpenPose demo...", Priority.High); var timeBegin = new Stopwatch(); timeBegin.Start(); // logging_level OpenPose.Check(0 <= Flags.LoggingLevel && Flags.LoggingLevel <= 255, "Wrong logging_level value."); ConfigureLog.PriorityThreshold = (Priority)Flags.LoggingLevel; Profiler.SetDefaultX((ulong)Flags.ProfileSpeed); // Applying user defined configuration - GFlags to program variables // outputSize var outputSize = OpenPose.FlagsToPoint(Flags.OutputResolution, "-1x-1"); // netInputSize var netInputSize = OpenPose.FlagsToPoint(Flags.NetResolution, "-1x368"); // faceNetInputSize var faceNetInputSize = OpenPose.FlagsToPoint(Flags.FaceNetResolution, "368x368 (multiples of 16)"); // handNetInputSize var handNetInputSize = OpenPose.FlagsToPoint(Flags.HandNetResolution, "368x368 (multiples of 16)"); // poseModel var poseModel = OpenPose.FlagsToPoseModel(Flags.ModelPose); // JSON saving if (!string.IsNullOrEmpty(Flags.WriteKeyPoint)) { OpenPose.Log("Flag `write_keypoint` is deprecated and will eventually be removed. Please, use `write_json` instead.", Priority.Max); } // keypointScale var keypointScale = OpenPose.FlagsToScaleMode(Flags.KeyPointScale); // heatmaps to add var heatMapTypes = OpenPose.FlagsToHeatMaps(Flags.HeatmapsAddParts, Flags.HeatmapsAddBackground, Flags.HeatmapsAddPAFs); var heatMapScale = OpenPose.FlagsToHeatMapScaleMode(Flags.HeatmapsScale); // >1 camera view? var multipleView = (Flags.Enable3D || Flags.Views3D > 1); // Enabling Google Logging const bool enableGoogleLogging = true; // Initializing the user custom classes // Frames producer (e.g., video, webcam, ...) using (var wUserInput = new StdSharedPtr <UserWorkerProducer <UserDatum> >(new WUserInput(Flags.ImageDir))) // Processing using (var wUserPostProcessing = new StdSharedPtr <UserWorker <UserDatum> >(new WUserPostProcessing())) // GUI (Display) using (var wUserOutput = new StdSharedPtr <UserWorkerConsumer <UserDatum> >(new WUserOutput())) { // OpenPose wrapper OpenPose.Log("Configuring OpenPose...", Priority.High); using (var opWrapperT = new Wrapper <UserDatum>()) { // Add custom input const bool workerInputOnNewThread = false; opWrapperT.SetWorker(WorkerType.Input, wUserInput, workerInputOnNewThread); // Add custom processing const bool workerProcessingOnNewThread = false; opWrapperT.SetWorker(WorkerType.PostProcessing, wUserPostProcessing, workerProcessingOnNewThread); // Add custom output const bool workerOutputOnNewThread = true; opWrapperT.SetWorker(WorkerType.Output, wUserOutput, workerOutputOnNewThread); // Pose configuration (use WrapperStructPose{} for default and recommended configuration) using (var pose = new WrapperStructPose(!Flags.BodyDisabled, netInputSize, outputSize, keypointScale, Flags.NumGpu, Flags.NumGpuStart, Flags.ScaleNumber, (float)Flags.ScaleGap, OpenPose.FlagsToRenderMode(Flags.RenderPose, multipleView), poseModel, !Flags.DisableBlending, (float)Flags.AlphaPose, (float)Flags.AlphaHeatmap, Flags.PartToShow, Flags.ModelFolder, heatMapTypes, heatMapScale, Flags.PartCandidates, (float)Flags.RenderThreshold, Flags.NumberPeopleMax, Flags.MaximizePositives, Flags.FpsMax, enableGoogleLogging)) // Face configuration (use op::WrapperStructFace{} to disable it) using (var face = new WrapperStructFace(Flags.Face, faceNetInputSize, OpenPose.FlagsToRenderMode(Flags.FaceRender, multipleView, Flags.RenderPose), (float)Flags.FaceAlphaPose, (float)Flags.FaceAlphaHeatmap, (float)Flags.FaceRenderThreshold)) // Hand configuration (use op::WrapperStructHand{} to disable it) using (var hand = new WrapperStructHand(Flags.Hand, handNetInputSize, Flags.HandScaleNumber, (float)Flags.HandScaleRange, Flags.HandTracking, OpenPose.FlagsToRenderMode(Flags.HandRender, multipleView, Flags.RenderPose), (float)Flags.HandAlphaPose, (float)Flags.HandAlphaHeatmap, (float)Flags.HandRenderThreshold)) // Extra functionality configuration (use op::WrapperStructExtra{} to disable it) using (var extra = new WrapperStructExtra(Flags.Enable3D, Flags.MinViews3D, Flags.Identification, Flags.Tracking, Flags.IkThreads)) // Output (comment or use default argument to disable any output) using (var output = new WrapperStructOutput(Flags.CliVerbose, Flags.WriteKeyPoint, OpenPose.StringToDataFormat(Flags.WriteKeyPointFormat), Flags.WriteJson, Flags.WriteCocoJson, Flags.WriteCocoFootJson, Flags.WriteCocoJsonVariant, Flags.WriteImages, Flags.WriteImagesFormat, Flags.WriteVideo, Flags.WriteVideoFps, Flags.WriteHeatmaps, Flags.WriteHeatmapsFormat, Flags.WriteVideoAdam, Flags.WriteBvh, Flags.UdpHost, Flags.UdpPort)) { opWrapperT.Configure(pose); opWrapperT.Configure(face); opWrapperT.Configure(hand); opWrapperT.Configure(extra); opWrapperT.Configure(output); // No GUI. Equivalent to: opWrapper.configure(op::WrapperStructGui{}); // Set to single-thread (for sequential processing and/or debugging and/or reducing latency) if (Flags.DisableMultiThread) { opWrapperT.DisableMultiThreading(); } // Start, run, and stop processing - exec() blocks this thread until OpenPose wrapper has finished OpenPose.Log("Starting thread(s)...", Priority.High); opWrapperT.Exec(); } } } // Measuring total time timeBegin.Stop(); var totalTimeSec = timeBegin.ElapsedMilliseconds / 1000d; var message = $"OpenPose demo successfully finished. Total time: {totalTimeSec} seconds."; OpenPose.Log(message, Priority.High); // Return successful message return(0); } catch (Exception) { return(-1); } }
private static int OpenPoseDemo() { try { OpenPose.Log("Starting OpenPose demo...", Priority.High); var timer = new Stopwatch(); timer.Start(); // logging_level OpenPose.Check(0 <= Flags.LoggingLevel && Flags.LoggingLevel <= 255, "Wrong logging_level value."); ConfigureLog.PriorityThreshold = (Priority)Flags.LoggingLevel; Profiler.SetDefaultX((ulong)Flags.ProfileSpeed); // // For debugging // // Print all logging messages // op::ConfigureLog::setPriorityThreshold(op::Priority::None); // // Print out speed values faster // op::Profiler::setDefaultX(100); // Applying user defined configuration - GFlags to program variables // cameraSize var cameraSize = OpenPose.FlagsToPoint(Flags.CameraResolution, "-1x-1"); // outputSize var outputSize = OpenPose.FlagsToPoint(Flags.OutputResolution, "-1x-1"); // netInputSize var netInputSize = OpenPose.FlagsToPoint(Flags.NetResolution, "-1x368"); // faceNetInputSize var faceNetInputSize = OpenPose.FlagsToPoint(Flags.FaceNetResolution, "368x368 (multiples of 16)"); // handNetInputSize var handNetInputSize = OpenPose.FlagsToPoint(Flags.HandNetResolution, "368x368 (multiples of 16)"); // producerType var tie = OpenPose.FlagsToProducer(Flags.ImageDir, Flags.Video, Flags.IpCamera, Flags.Camera, Flags.FlirCamera, Flags.FlirCameraIndex); var producerType = tie.Item1; var producerString = tie.Item2; // poseModel var poseModel = OpenPose.FlagsToPoseModel(Flags.ModelPose); // JSON saving if (!string.IsNullOrEmpty(Flags.WriteKeyPoint)) { OpenPose.Log("Flag `write_keypoint` is deprecated and will eventually be removed. Please, use `write_json` instead.", Priority.Max); } // keyPointScale var keyPointScale = OpenPose.FlagsToScaleMode(Flags.KeyPointScale); // heatmaps to add var heatMapTypes = OpenPose.FlagsToHeatMaps(Flags.HeatmapsAddParts, Flags.HeatmapsAddBackground, Flags.HeatmapsAddPAFs); var heatMapScale = OpenPose.FlagsToHeatMapScaleMode(Flags.HeatmapsScale); // >1 camera view? var multipleView = (Flags.Enable3D || Flags.Views3D > 1 || Flags.FlirCamera); // Enabling Google Logging const bool enableGoogleLogging = true; // Configuring OpenPose OpenPose.Log("Configuring OpenPose...", Priority.High); using (var opWrapper = new Wrapper()) { // Pose configuration (use WrapperStructPose{} for default and recommended configuration) using (var pose = new WrapperStructPose(!Flags.BodyDisabled, netInputSize, outputSize, keyPointScale, Flags.NumGpu, Flags.NumGpuStart, Flags.ScaleNumber, (float)Flags.ScaleGap, OpenPose.FlagsToRenderMode(Flags.RenderPose, multipleView), poseModel, !Flags.DisableBlending, (float)Flags.AlphaPose, (float)Flags.AlphaHeatmap, Flags.PartToShow, Flags.ModelFolder, heatMapTypes, heatMapScale, Flags.PartCandidates, (float)Flags.RenderThreshold, Flags.NumberPeopleMax, Flags.MaximizePositives, Flags.FpsMax, enableGoogleLogging)) // Face configuration (use op::WrapperStructFace{} to disable it) using (var face = new WrapperStructFace(Flags.Face, faceNetInputSize, OpenPose.FlagsToRenderMode(Flags.FaceRender, multipleView, Flags.RenderPose), (float)Flags.FaceAlphaPose, (float)Flags.FaceAlphaHeatmap, (float)Flags.FaceRenderThreshold)) // Hand configuration (use op::WrapperStructHand{} to disable it) using (var hand = new WrapperStructHand(Flags.Hand, handNetInputSize, Flags.HandScaleNumber, (float)Flags.HandScaleRange, Flags.HandTracking, OpenPose.FlagsToRenderMode(Flags.HandRender, multipleView, Flags.RenderPose), (float)Flags.HandAlphaPose, (float)Flags.HandAlphaHeatmap, (float)Flags.HandRenderThreshold)) // Extra functionality configuration (use op::WrapperStructExtra{} to disable it) using (var extra = new WrapperStructExtra(Flags.Enable3D, Flags.MinViews3D, Flags.Identification, Flags.Tracking, Flags.IkThreads)) // Producer (use default to disable any input) using (var input = new WrapperStructInput(producerType, producerString, Flags.FrameFirst, Flags.FrameStep, Flags.FrameLast, Flags.ProcessRealTime, Flags.FrameFlip, Flags.FrameRotate, Flags.FramesRepeat, cameraSize, Flags.CameraParameterFolder, !Flags.FrameKeepDistortion, (uint)Flags.Views3D)) // Output (comment or use default argument to disable any output) using (var output = new WrapperStructOutput(Flags.CliVerbose, Flags.WriteKeyPoint, OpenPose.StringToDataFormat(Flags.WriteKeyPointFormat), Flags.WriteJson, Flags.WriteCocoJson, Flags.WriteCocoFootJson, Flags.WriteCocoJsonVariant, Flags.WriteImages, Flags.WriteImagesFormat, Flags.WriteVideo, Flags.WriteVideoFps, Flags.WriteHeatmaps, Flags.WriteHeatmapsFormat, Flags.WriteVideoAdam, Flags.WriteBvh, Flags.UdpHost, Flags.UdpPort)) // GUI (comment or use default argument to disable any visual output) using (var gui = new WrapperStructGui(OpenPose.FlagsToDisplayMode(Flags.Display, Flags.Enable3D), !Flags.NoGuiVerbose, Flags.FullScreen)) { opWrapper.Configure(pose); opWrapper.Configure(face); opWrapper.Configure(hand); opWrapper.Configure(extra); opWrapper.Configure(input); opWrapper.Configure(output); opWrapper.Configure(gui); // Set to single-thread (for sequential processing and/or debugging and/or reducing latency) if (Flags.DisableMultiThread) { opWrapper.DisableMultiThreading(); } // Start, run, and stop processing - exec() blocks this thread until OpenPose wrapper has finished OpenPose.Log("Starting thread(s)...", Priority.High); opWrapper.Exec(); // Measuring total time timer.Stop(); var totalTimeSec = timer.ElapsedMilliseconds * 1000; var message = $"OpenPose demo successfully finished. Total time: {totalTimeSec} seconds."; OpenPose.Log(message, Priority.High); } } // Return successful message OpenPose.Log("Stopping OpenPose...", Priority.High); return(0); } catch (Exception e) { Console.WriteLine(e); return(-1); } }
// configures openpose wrapper on set flags // setInput - set to true if setting input during configuration (e.g. for video file input, image directory, webcam) // set to false if inputting later (e.g. for emblaceandpop on Mat object or raw image) public static void ConfigOnFlags(Wrapper <UserDatum> opWrapper, Boolean setInput) { // Configuring OpenPose // logging_level OpenPose.Check(0 <= Flags.LoggingLevel && Flags.LoggingLevel <= 255, "Wrong logging_level value."); ConfigureLog.PriorityThreshold = (Priority)Flags.LoggingLevel; Profiler.SetDefaultX((ulong)Flags.ProfileSpeed); // Applying user defined configuration - GFlags to program variables // producerType var tie = OpenPose.FlagsToProducer(Flags.ImageDir, Flags.Video, Flags.IpCamera, Flags.Camera, Flags.FlirCamera, Flags.FlirCameraIndex); var producerType = tie.Item1; var producerString = tie.Item2; // cameraSize var cameraSize = OpenPose.FlagsToPoint(Flags.CameraResolution, "-1x-1"); // outputSize var outputSize = OpenPose.FlagsToPoint(Flags.OutputResolution, "-1x-1"); // netInputSize var netInputSize = OpenPose.FlagsToPoint(Flags.NetResolution, "-1x368"); // faceNetInputSize var faceNetInputSize = OpenPose.FlagsToPoint(Flags.FaceNetResolution, "368x368 (multiples of 16)"); // handNetInputSize var handNetInputSize = OpenPose.FlagsToPoint(Flags.HandNetResolution, "368x368 (multiples of 16)"); // poseMode var poseMode = OpenPose.FlagsToPoseMode(Flags.Body); // poseModel var poseModel = OpenPose.FlagsToPoseModel(Flags.ModelPose); // JSON saving if (!string.IsNullOrEmpty(Flags.WriteKeyPoint)) { OpenPose.Log("Flag `write_keypoint` is deprecated and will eventually be removed. Please, use `write_json` instead.", Priority.Max); } // keyPointScale var keyPointScale = OpenPose.FlagsToScaleMode(Flags.KeyPointScale); // heatmaps to add var heatMapTypes = OpenPose.FlagsToHeatMaps(Flags.HeatmapsAddParts, Flags.HeatmapsAddBackground, Flags.HeatmapsAddPAFs); var heatMapScale = OpenPose.FlagsToHeatMapScaleMode(Flags.HeatmapsScale); // >1 camera view? var multipleView = (Flags.Enable3D || Flags.Views3D > 1 || Flags.FlirCamera); // Face and hand detectors var faceDetector = OpenPose.FlagsToDetector(Flags.FaceDetector); var handDetector = OpenPose.FlagsToDetector(Flags.HandDetector); // Enabling Google Logging const bool enableGoogleLogging = true; // Pose configuration (use WrapperStructPose() for default and recommended configuration) var pose = new WrapperStructPose(poseMode, netInputSize, outputSize, keyPointScale, Flags.NumGpu, Flags.NumGpuStart, Flags.ScaleNumber, (float)Flags.ScaleGap, OpenPose.FlagsToRenderMode(Flags.RenderPose, multipleView), poseModel, !Flags.DisableBlending, (float)Flags.AlphaPose, (float)Flags.AlphaHeatmap, Flags.PartToShow, Flags.ModelFolder, heatMapTypes, heatMapScale, Flags.PartCandidates, (float)Flags.RenderThreshold, Flags.NumberPeopleMax, Flags.MaximizePositives, Flags.FpsMax, Flags.PrototxtPath, Flags.CaffeModelPath, (float)Flags.UpsamplingRatio, enableGoogleLogging); // Face configuration (use WrapperStructPose() to disable it) var face = new WrapperStructFace(Flags.Face, faceDetector, faceNetInputSize, OpenPose.FlagsToRenderMode(Flags.FaceRender, multipleView, Flags.RenderPose), (float)Flags.FaceAlphaPose, (float)Flags.FaceAlphaHeatmap, (float)Flags.FaceRenderThreshold); // Hand configuration (use WrapperStructPose() to disable it) var hand = new WrapperStructHand(Flags.Hand, handDetector, handNetInputSize, Flags.HandScaleNumber, (float)Flags.HandScaleRange, OpenPose.FlagsToRenderMode(Flags.HandRender, multipleView, Flags.RenderPose), (float)Flags.HandAlphaPose, (float)Flags.HandAlphaHeatmap, (float)Flags.HandRenderThreshold); // Extra functionality configuration (use WrapperStructPose() to disable it) var extra = new WrapperStructExtra(Flags.Enable3D, Flags.MinViews3D, Flags.Identification, Flags.Tracking, Flags.IkThreads); // Output (comment or use default argument to disable any output) var output = new WrapperStructOutput(Flags.CliVerbose, Flags.WriteKeyPoint, OpenPose.StringToDataFormat(Flags.WriteKeyPointFormat), Flags.WriteJson, Flags.WriteCocoJson, Flags.WriteCocoFootJson, Flags.WriteCocoJsonVariant, Flags.WriteImages, Flags.WriteImagesFormat, Flags.WriteVideo, Flags.WriteVideoWithAudio, Flags.WriteVideoFps, Flags.WriteHeatmaps, Flags.WriteHeatmapsFormat, Flags.WriteVideoAdam, Flags.WriteBvh, Flags.UdpHost, Flags.UdpPort); // GUI (comment or use default argument to disable any visual output) var gui = new WrapperStructGui(OpenPose.FlagsToDisplayMode(Flags.Display, Flags.Enable3D), !Flags.NoGuiVerbose, Flags.FullScreen); // config wrapper on set values opWrapper.Configure(pose); opWrapper.Configure(face); opWrapper.Configure(hand); opWrapper.Configure(extra); if (setInput) { // Producer (use default to disable any input) var input = new WrapperStructInput(producerType, producerString, Flags.FrameFirst, Flags.FrameStep, Flags.FrameLast, Flags.ProcessRealTime, Flags.FrameFlip, Flags.FrameRotate, Flags.FramesRepeat, cameraSize, Flags.CameraParameterPath, Flags.FrameUndistort, Flags.Views3D); opWrapper.Configure(input); } opWrapper.Configure(output); // Set to single-thread (for sequential processing and/or debugging and/or reducing latency) if (Flags.DisableMultiThread) { opWrapper.DisableMultiThreading(); } // start openpose wrapper opWrapper.Start(); }
private static int TutorialDeveloperPose2() { try { OpenPose.Log("Starting OpenPose demo...", Priority.High); var timeBegin = new Stopwatch(); timeBegin.Start(); // ------------------------- INITIALIZATION ------------------------- // Step 1 - Set logging level // - 0 will output all the logging messages // - 255 will output nothing OpenPose.Check(0 <= Flags.LoggingLevel && Flags.LoggingLevel <= 255, "Wrong logging_level value."); ConfigureLog.PriorityThreshold = (Priority)Flags.LoggingLevel; OpenPose.Log("", Priority.Low); // Step 2 - Read GFlags (user defined configuration) // outputSize var outputSize = OpenPose.FlagsToPoint(Flags.OutputResolution, "-1x-1"); // netInputSize var netInputSize = OpenPose.FlagsToPoint(Flags.NetResolution, "-1x368"); // poseModel var poseModel = OpenPose.FlagsToPoseModel(Flags.ModelPose); // Check no contradictory flags enabled if (Flags.AlphaPose < 0.0 || Flags.AlphaPose > 1.0) { OpenPose.Error("Alpha value for blending must be in the range [0,1].", -1, nameof(TutorialDeveloperPose2)); } if (Flags.ScaleGap <= 0.0 && Flags.ScaleNumber > 1) { OpenPose.Error("Incompatible flag configuration: scale_gap must be greater than 0 or scale_number = 1.", -1, nameof(TutorialDeveloperPose2)); } // Step 3 - Initialize all required classes using (var scaleAndSizeExtractor = new ScaleAndSizeExtractor(netInputSize, outputSize, Flags.ScaleNumber, Flags.ScaleGap)) using (var cvMatToOpInput = new CvMatToOpInput(poseModel)) using (var cvMatToOpOutput = new CvMatToOpOutput()) using (var poseExtractorPtr = new StdSharedPtr <PoseExtractorCaffe>(new PoseExtractorCaffe(poseModel, Flags.ModelFolder, Flags.NumGpuStart))) using (var poseGpuRenderer = new PoseGpuRenderer(poseModel, poseExtractorPtr, (float)Flags.RenderThreshold, !Flags.DisableBlending, (float)Flags.AlphaPose)) { poseGpuRenderer.SetElementToRender(Flags.PartToShow); using (var opOutputToCvMat = new OpOutputToCvMat()) using (var frameDisplayer = new FrameDisplayer("OpenPose Tutorial - Example 2", outputSize)) { // Step 4 - Initialize resources on desired thread (in this case single thread, i.e., we init resources here) poseExtractorPtr.Get().InitializationOnThread(); poseGpuRenderer.InitializationOnThread(); // ------------------------- POSE ESTIMATION AND RENDERING ------------------------- // Step 1 - Read and load image, error if empty (possibly wrong path) // Alternative: cv::imread(Flags.image_path, CV_LOAD_IMAGE_COLOR); using (var inputImage = OpenPose.LoadImage(ImagePath, LoadImageFlag.LoadImageColor)) { if (inputImage.Empty) { OpenPose.Error("Could not open or find the image: " + ImagePath, -1, nameof(TutorialDeveloperPose2)); } var imageSize = new Point <int>(inputImage.Cols, inputImage.Rows); // Step 2 - Get desired scale sizes var tuple = scaleAndSizeExtractor.Extract(imageSize); var scaleInputToNetInputs = tuple.Item1; var netInputSizes = tuple.Item2; var scaleInputToOutput = tuple.Item3; var outputResolution = tuple.Item4; // Step 3 - Format input image to OpenPose input and output formats var netInputArray = cvMatToOpInput.CreateArray(inputImage, scaleInputToNetInputs, netInputSizes); var outputArray = cvMatToOpOutput.CreateArray(inputImage, scaleInputToOutput, outputResolution); // Step 4 - Estimate poseKeypoints poseExtractorPtr.Get().ForwardPass(netInputArray, imageSize, scaleInputToNetInputs); var poseKeypoints = poseExtractorPtr.Get().GetPoseKeyPoints(); var scaleNetToOutput = poseExtractorPtr.Get().GetScaleNetToOutput(); // Step 5 - Render pose poseGpuRenderer.RenderPose(outputArray, poseKeypoints, (float)scaleInputToOutput, scaleNetToOutput); // Step 6 - OpenPose output format to cv::Mat using (var outputImage = opOutputToCvMat.FormatToCvMat(outputArray)) { // ------------------------- SHOWING RESULT AND CLOSING ------------------------- // Show results frameDisplayer.DisplayFrame(outputImage, 0); // Alternative: cv::imshow(outputImage) + cv::waitKey(0) // Measuring total time timeBegin.Stop(); var totalTimeSec = timeBegin.ElapsedMilliseconds / 1000d; var message = $"OpenPose demo successfully finished. Total time: {totalTimeSec} seconds."; OpenPose.Log(message, Priority.High); // Return successful message return(0); } } } } } catch (Exception e) { OpenPose.Error(e.Message, -1, nameof(TutorialDeveloperPose2)); return(-1); } }