private string filePath; // input file path /* * initializer for webcam input * netRes = net resolution * faceRes = face detector resolution, disable by setting to null * handRes = hand detector resolution, disable by setting to null * modelPose = which pose model to use * filePath = image file path * outPath = output path for frames, set null to disable output, disable by setting to null */ public ImageDetector(string netRes, string faceRes, string handRes, string modelPose, string filePath, string outPath) { this.filePath = filePath; // set image file path // set logging level OpenPose.Check(0 <= Flags.LoggingLevel && Flags.LoggingLevel <= 255, "Wrong logging_level value."); ConfigureLog.PriorityThreshold = (Priority)Flags.LoggingLevel; Profiler.SetDefaultX((ulong)Flags.ProfileSpeed); OpenPose.Log("Adding config...", Priority.High); InitFlags(netRes, faceRes, handRes, modelPose, outPath); // initialize flags with input values opWrapper = new Wrapper <UserDatum>(ThreadManagerMode.Asynchronous); // configure openpose wrapper obj based on the flags that we set ConfigOnFlags(opWrapper, false); // set input on config }
private static int TutorialApiCpp9() { try { OpenPose.Log("Starting OpenPose demo...", Priority.High); var timeBegin = new Stopwatch(); timeBegin.Start(); // logging_level OpenPose.Check(0 <= Flags.LoggingLevel && Flags.LoggingLevel <= 255, "Wrong logging_level value."); ConfigureLog.PriorityThreshold = (Priority)Flags.LoggingLevel; Profiler.SetDefaultX((ulong)Flags.ProfileSpeed); // Applying user defined configuration - GFlags to program variables // outputSize var outputSize = OpenPose.FlagsToPoint(Flags.OutputResolution, "-1x-1"); // netInputSize var netInputSize = OpenPose.FlagsToPoint(Flags.NetResolution, "-1x368"); // faceNetInputSize var faceNetInputSize = OpenPose.FlagsToPoint(Flags.FaceNetResolution, "368x368 (multiples of 16)"); // handNetInputSize var handNetInputSize = OpenPose.FlagsToPoint(Flags.HandNetResolution, "368x368 (multiples of 16)"); // poseModel var poseModel = OpenPose.FlagsToPoseModel(Flags.ModelPose); // JSON saving if (!string.IsNullOrEmpty(Flags.WriteKeyPoint)) { OpenPose.Log("Flag `write_keypoint` is deprecated and will eventually be removed. Please, use `write_json` instead.", Priority.Max); } // keypointScale var keypointScale = OpenPose.FlagsToScaleMode(Flags.KeyPointScale); // heatmaps to add var heatMapTypes = OpenPose.FlagsToHeatMaps(Flags.HeatmapsAddParts, Flags.HeatmapsAddBackground, Flags.HeatmapsAddPAFs); var heatMapScale = OpenPose.FlagsToHeatMapScaleMode(Flags.HeatmapsScale); // >1 camera view? var multipleView = (Flags.Enable3D || Flags.Views3D > 1); // Enabling Google Logging const bool enableGoogleLogging = true; // Initializing the user custom classes // Frames producer (e.g., video, webcam, ...) using (var wUserInput = new StdSharedPtr <UserWorkerProducer <UserDatum> >(new WUserInput(Flags.ImageDir))) // Processing using (var wUserPostProcessing = new StdSharedPtr <UserWorker <UserDatum> >(new WUserPostProcessing())) // GUI (Display) using (var wUserOutput = new StdSharedPtr <UserWorkerConsumer <UserDatum> >(new WUserOutput())) { // OpenPose wrapper OpenPose.Log("Configuring OpenPose...", Priority.High); using (var opWrapperT = new Wrapper <UserDatum>()) { // Add custom input const bool workerInputOnNewThread = false; opWrapperT.SetWorker(WorkerType.Input, wUserInput, workerInputOnNewThread); // Add custom processing const bool workerProcessingOnNewThread = false; opWrapperT.SetWorker(WorkerType.PostProcessing, wUserPostProcessing, workerProcessingOnNewThread); // Add custom output const bool workerOutputOnNewThread = true; opWrapperT.SetWorker(WorkerType.Output, wUserOutput, workerOutputOnNewThread); // Pose configuration (use WrapperStructPose{} for default and recommended configuration) using (var pose = new WrapperStructPose(!Flags.BodyDisabled, netInputSize, outputSize, keypointScale, Flags.NumGpu, Flags.NumGpuStart, Flags.ScaleNumber, (float)Flags.ScaleGap, OpenPose.FlagsToRenderMode(Flags.RenderPose, multipleView), poseModel, !Flags.DisableBlending, (float)Flags.AlphaPose, (float)Flags.AlphaHeatmap, Flags.PartToShow, Flags.ModelFolder, heatMapTypes, heatMapScale, Flags.PartCandidates, (float)Flags.RenderThreshold, Flags.NumberPeopleMax, Flags.MaximizePositives, Flags.FpsMax, enableGoogleLogging)) // Face configuration (use op::WrapperStructFace{} to disable it) using (var face = new WrapperStructFace(Flags.Face, faceNetInputSize, OpenPose.FlagsToRenderMode(Flags.FaceRender, multipleView, Flags.RenderPose), (float)Flags.FaceAlphaPose, (float)Flags.FaceAlphaHeatmap, (float)Flags.FaceRenderThreshold)) // Hand configuration (use op::WrapperStructHand{} to disable it) using (var hand = new WrapperStructHand(Flags.Hand, handNetInputSize, Flags.HandScaleNumber, (float)Flags.HandScaleRange, Flags.HandTracking, OpenPose.FlagsToRenderMode(Flags.HandRender, multipleView, Flags.RenderPose), (float)Flags.HandAlphaPose, (float)Flags.HandAlphaHeatmap, (float)Flags.HandRenderThreshold)) // Extra functionality configuration (use op::WrapperStructExtra{} to disable it) using (var extra = new WrapperStructExtra(Flags.Enable3D, Flags.MinViews3D, Flags.Identification, Flags.Tracking, Flags.IkThreads)) // Output (comment or use default argument to disable any output) using (var output = new WrapperStructOutput(Flags.CliVerbose, Flags.WriteKeyPoint, OpenPose.StringToDataFormat(Flags.WriteKeyPointFormat), Flags.WriteJson, Flags.WriteCocoJson, Flags.WriteCocoFootJson, Flags.WriteCocoJsonVariant, Flags.WriteImages, Flags.WriteImagesFormat, Flags.WriteVideo, Flags.WriteVideoFps, Flags.WriteHeatmaps, Flags.WriteHeatmapsFormat, Flags.WriteVideoAdam, Flags.WriteBvh, Flags.UdpHost, Flags.UdpPort)) { opWrapperT.Configure(pose); opWrapperT.Configure(face); opWrapperT.Configure(hand); opWrapperT.Configure(extra); opWrapperT.Configure(output); // No GUI. Equivalent to: opWrapper.configure(op::WrapperStructGui{}); // Set to single-thread (for sequential processing and/or debugging and/or reducing latency) if (Flags.DisableMultiThread) { opWrapperT.DisableMultiThreading(); } // Start, run, and stop processing - exec() blocks this thread until OpenPose wrapper has finished OpenPose.Log("Starting thread(s)...", Priority.High); opWrapperT.Exec(); } } } // Measuring total time timeBegin.Stop(); var totalTimeSec = timeBegin.ElapsedMilliseconds / 1000d; var message = $"OpenPose demo successfully finished. Total time: {totalTimeSec} seconds."; OpenPose.Log(message, Priority.High); // Return successful message return(0); } catch (Exception) { return(-1); } }
private static int OpenPoseTutorialThread3() { try { OpenPose.Log("Starting OpenPose demo...", Priority.High); var timeBegin = new Stopwatch(); timeBegin.Start(); // ------------------------- INITIALIZATION ------------------------- // Step 1 - Set logging level // - 0 will output all the logging messages // - 255 will output nothing OpenPose.Check(0 <= Flags.LoggingLevel && Flags.LoggingLevel <= 255, "Wrong logging_level value."); ConfigureLog.PriorityThreshold = (Priority)Flags.LoggingLevel; // Step 2 - Setting thread workers && manage using (var threadManager = new ThreadManager <Datum>()) { // Step 3 - Initializing the worker classes // Frames producer (e.g., video, webcam, ...) using (var wUserInput = new StdSharedPtr <UserWorkerProducer <Datum> >(new WUserInput(Flags.ImageDir))) { // Processing using (var wUserProcessing = new StdSharedPtr <UserWorker <Datum> >(new WUserPostProcessing())) { // GUI (Display) using (var wUserOutput = new StdSharedPtr <UserWorkerConsumer <Datum> >(new WUserOutput())) { // ------------------------- CONFIGURING THREADING ------------------------- // In this simple multi-thread example, we will do the following: // 3 (virtual) queues: 0, 1, 2 // 1 real queue: 1. The first and last queue ids (in this case 0 and 2) are not actual queues, but the // beginning and end of the processing sequence // 2 threads: 0, 1 // wUserInput will generate frames (there is no real queue 0) and push them on queue 1 // wGui will pop frames from queue 1 and process them (there is no real queue 2) var threadId = 0UL; var queueIn = 0UL; var queueOut = 1UL; threadManager.Add(threadId++, wUserInput, queueIn++, queueOut++); // Thread 0, queues 0 -> 1 threadManager.Add(threadId++, wUserProcessing, queueIn++, queueOut++); // Thread 1, queues 1 -> 2 threadManager.Add(threadId++, wUserOutput, queueIn++, queueOut++); // Thread 2, queues 2 -> 3 // ------------------------- STARTING AND STOPPING THREADING ------------------------- OpenPose.Log("Starting thread(s)...", Priority.High); // Two different ways of running the program on multithread environment // Option a) Using the main thread (this thread) for processing (it saves 1 thread, recommended) threadManager.Exec(); // Option b) Giving to the user the control of this thread // // VERY IMPORTANT NOTE: if OpenCV is compiled with Qt support, this option will not work. Qt needs the main // // thread to plot visual results, so the final GUI (which uses OpenCV) would return an exception similar to: // // `QMetaMethod::invoke: Unable to invoke methods with return values in queued connections` // // Start threads // threadManager.start(); // // Keep program alive while running threads. Here the user could perform any other desired function // while (threadManager.isRunning()) // std::this_thread::sleep_for(std::chrono::milliseconds{33}); // // Stop and join threads // op::log("Stopping thread(s)", op::Priority::High); // threadManager.stop(); } } } } // ------------------------- CLOSING ------------------------- // Measuring total time timeBegin.Stop(); var totalTimeSec = timeBegin.ElapsedMilliseconds / 1000d; var message = $"OpenPose demo successfully finished. Total time: {totalTimeSec} seconds."; OpenPose.Log(message, Priority.High); // Return successful message return(0); } catch (Exception e) { OpenPose.Error(e.Message, -1, nameof(OpenPoseTutorialThread3)); return(-1); } }
// configures openpose wrapper on set flags // setInput - set to true if setting input during configuration (e.g. for video file input, image directory, webcam) // set to false if inputting later (e.g. for emblaceandpop on Mat object or raw image) public static void ConfigOnFlags(Wrapper <UserDatum> opWrapper, Boolean setInput) { // Configuring OpenPose // logging_level OpenPose.Check(0 <= Flags.LoggingLevel && Flags.LoggingLevel <= 255, "Wrong logging_level value."); ConfigureLog.PriorityThreshold = (Priority)Flags.LoggingLevel; Profiler.SetDefaultX((ulong)Flags.ProfileSpeed); // Applying user defined configuration - GFlags to program variables // producerType var tie = OpenPose.FlagsToProducer(Flags.ImageDir, Flags.Video, Flags.IpCamera, Flags.Camera, Flags.FlirCamera, Flags.FlirCameraIndex); var producerType = tie.Item1; var producerString = tie.Item2; // cameraSize var cameraSize = OpenPose.FlagsToPoint(Flags.CameraResolution, "-1x-1"); // outputSize var outputSize = OpenPose.FlagsToPoint(Flags.OutputResolution, "-1x-1"); // netInputSize var netInputSize = OpenPose.FlagsToPoint(Flags.NetResolution, "-1x368"); // faceNetInputSize var faceNetInputSize = OpenPose.FlagsToPoint(Flags.FaceNetResolution, "368x368 (multiples of 16)"); // handNetInputSize var handNetInputSize = OpenPose.FlagsToPoint(Flags.HandNetResolution, "368x368 (multiples of 16)"); // poseMode var poseMode = OpenPose.FlagsToPoseMode(Flags.Body); // poseModel var poseModel = OpenPose.FlagsToPoseModel(Flags.ModelPose); // JSON saving if (!string.IsNullOrEmpty(Flags.WriteKeyPoint)) { OpenPose.Log("Flag `write_keypoint` is deprecated and will eventually be removed. Please, use `write_json` instead.", Priority.Max); } // keyPointScale var keyPointScale = OpenPose.FlagsToScaleMode(Flags.KeyPointScale); // heatmaps to add var heatMapTypes = OpenPose.FlagsToHeatMaps(Flags.HeatmapsAddParts, Flags.HeatmapsAddBackground, Flags.HeatmapsAddPAFs); var heatMapScale = OpenPose.FlagsToHeatMapScaleMode(Flags.HeatmapsScale); // >1 camera view? var multipleView = (Flags.Enable3D || Flags.Views3D > 1 || Flags.FlirCamera); // Face and hand detectors var faceDetector = OpenPose.FlagsToDetector(Flags.FaceDetector); var handDetector = OpenPose.FlagsToDetector(Flags.HandDetector); // Enabling Google Logging const bool enableGoogleLogging = true; // Pose configuration (use WrapperStructPose() for default and recommended configuration) var pose = new WrapperStructPose(poseMode, netInputSize, outputSize, keyPointScale, Flags.NumGpu, Flags.NumGpuStart, Flags.ScaleNumber, (float)Flags.ScaleGap, OpenPose.FlagsToRenderMode(Flags.RenderPose, multipleView), poseModel, !Flags.DisableBlending, (float)Flags.AlphaPose, (float)Flags.AlphaHeatmap, Flags.PartToShow, Flags.ModelFolder, heatMapTypes, heatMapScale, Flags.PartCandidates, (float)Flags.RenderThreshold, Flags.NumberPeopleMax, Flags.MaximizePositives, Flags.FpsMax, Flags.PrototxtPath, Flags.CaffeModelPath, (float)Flags.UpsamplingRatio, enableGoogleLogging); // Face configuration (use WrapperStructPose() to disable it) var face = new WrapperStructFace(Flags.Face, faceDetector, faceNetInputSize, OpenPose.FlagsToRenderMode(Flags.FaceRender, multipleView, Flags.RenderPose), (float)Flags.FaceAlphaPose, (float)Flags.FaceAlphaHeatmap, (float)Flags.FaceRenderThreshold); // Hand configuration (use WrapperStructPose() to disable it) var hand = new WrapperStructHand(Flags.Hand, handDetector, handNetInputSize, Flags.HandScaleNumber, (float)Flags.HandScaleRange, OpenPose.FlagsToRenderMode(Flags.HandRender, multipleView, Flags.RenderPose), (float)Flags.HandAlphaPose, (float)Flags.HandAlphaHeatmap, (float)Flags.HandRenderThreshold); // Extra functionality configuration (use WrapperStructPose() to disable it) var extra = new WrapperStructExtra(Flags.Enable3D, Flags.MinViews3D, Flags.Identification, Flags.Tracking, Flags.IkThreads); // Output (comment or use default argument to disable any output) var output = new WrapperStructOutput(Flags.CliVerbose, Flags.WriteKeyPoint, OpenPose.StringToDataFormat(Flags.WriteKeyPointFormat), Flags.WriteJson, Flags.WriteCocoJson, Flags.WriteCocoFootJson, Flags.WriteCocoJsonVariant, Flags.WriteImages, Flags.WriteImagesFormat, Flags.WriteVideo, Flags.WriteVideoWithAudio, Flags.WriteVideoFps, Flags.WriteHeatmaps, Flags.WriteHeatmapsFormat, Flags.WriteVideoAdam, Flags.WriteBvh, Flags.UdpHost, Flags.UdpPort); // GUI (comment or use default argument to disable any visual output) var gui = new WrapperStructGui(OpenPose.FlagsToDisplayMode(Flags.Display, Flags.Enable3D), !Flags.NoGuiVerbose, Flags.FullScreen); // config wrapper on set values opWrapper.Configure(pose); opWrapper.Configure(face); opWrapper.Configure(hand); opWrapper.Configure(extra); if (setInput) { // Producer (use default to disable any input) var input = new WrapperStructInput(producerType, producerString, Flags.FrameFirst, Flags.FrameStep, Flags.FrameLast, Flags.ProcessRealTime, Flags.FrameFlip, Flags.FrameRotate, Flags.FramesRepeat, cameraSize, Flags.CameraParameterPath, Flags.FrameUndistort, Flags.Views3D); opWrapper.Configure(input); } opWrapper.Configure(output); // Set to single-thread (for sequential processing and/or debugging and/or reducing latency) if (Flags.DisableMultiThread) { opWrapper.DisableMultiThreading(); } // start openpose wrapper opWrapper.Start(); }
private static int OpenPoseDemo() { try { OpenPose.Log("Starting OpenPose demo...", Priority.High); var timer = new Stopwatch(); timer.Start(); // logging_level OpenPose.Check(0 <= Flags.LoggingLevel && Flags.LoggingLevel <= 255, "Wrong logging_level value."); ConfigureLog.PriorityThreshold = (Priority)Flags.LoggingLevel; Profiler.SetDefaultX((ulong)Flags.ProfileSpeed); // // For debugging // // Print all logging messages // op::ConfigureLog::setPriorityThreshold(op::Priority::None); // // Print out speed values faster // op::Profiler::setDefaultX(100); // Applying user defined configuration - GFlags to program variables // cameraSize var cameraSize = OpenPose.FlagsToPoint(Flags.CameraResolution, "-1x-1"); // outputSize var outputSize = OpenPose.FlagsToPoint(Flags.OutputResolution, "-1x-1"); // netInputSize var netInputSize = OpenPose.FlagsToPoint(Flags.NetResolution, "-1x368"); // faceNetInputSize var faceNetInputSize = OpenPose.FlagsToPoint(Flags.FaceNetResolution, "368x368 (multiples of 16)"); // handNetInputSize var handNetInputSize = OpenPose.FlagsToPoint(Flags.HandNetResolution, "368x368 (multiples of 16)"); // producerType var tie = OpenPose.FlagsToProducer(Flags.ImageDir, Flags.Video, Flags.IpCamera, Flags.Camera, Flags.FlirCamera, Flags.FlirCameraIndex); var producerType = tie.Item1; var producerString = tie.Item2; // poseModel var poseModel = OpenPose.FlagsToPoseModel(Flags.ModelPose); // JSON saving if (!string.IsNullOrEmpty(Flags.WriteKeyPoint)) { OpenPose.Log("Flag `write_keypoint` is deprecated and will eventually be removed. Please, use `write_json` instead.", Priority.Max); } // keyPointScale var keyPointScale = OpenPose.FlagsToScaleMode(Flags.KeyPointScale); // heatmaps to add var heatMapTypes = OpenPose.FlagsToHeatMaps(Flags.HeatmapsAddParts, Flags.HeatmapsAddBackground, Flags.HeatmapsAddPAFs); var heatMapScale = OpenPose.FlagsToHeatMapScaleMode(Flags.HeatmapsScale); // >1 camera view? var multipleView = (Flags.Enable3D || Flags.Views3D > 1 || Flags.FlirCamera); // Enabling Google Logging const bool enableGoogleLogging = true; // Configuring OpenPose OpenPose.Log("Configuring OpenPose...", Priority.High); using (var opWrapper = new Wrapper()) { // Pose configuration (use WrapperStructPose{} for default and recommended configuration) using (var pose = new WrapperStructPose(!Flags.BodyDisabled, netInputSize, outputSize, keyPointScale, Flags.NumGpu, Flags.NumGpuStart, Flags.ScaleNumber, (float)Flags.ScaleGap, OpenPose.FlagsToRenderMode(Flags.RenderPose, multipleView), poseModel, !Flags.DisableBlending, (float)Flags.AlphaPose, (float)Flags.AlphaHeatmap, Flags.PartToShow, Flags.ModelFolder, heatMapTypes, heatMapScale, Flags.PartCandidates, (float)Flags.RenderThreshold, Flags.NumberPeopleMax, Flags.MaximizePositives, Flags.FpsMax, enableGoogleLogging)) // Face configuration (use op::WrapperStructFace{} to disable it) using (var face = new WrapperStructFace(Flags.Face, faceNetInputSize, OpenPose.FlagsToRenderMode(Flags.FaceRender, multipleView, Flags.RenderPose), (float)Flags.FaceAlphaPose, (float)Flags.FaceAlphaHeatmap, (float)Flags.FaceRenderThreshold)) // Hand configuration (use op::WrapperStructHand{} to disable it) using (var hand = new WrapperStructHand(Flags.Hand, handNetInputSize, Flags.HandScaleNumber, (float)Flags.HandScaleRange, Flags.HandTracking, OpenPose.FlagsToRenderMode(Flags.HandRender, multipleView, Flags.RenderPose), (float)Flags.HandAlphaPose, (float)Flags.HandAlphaHeatmap, (float)Flags.HandRenderThreshold)) // Extra functionality configuration (use op::WrapperStructExtra{} to disable it) using (var extra = new WrapperStructExtra(Flags.Enable3D, Flags.MinViews3D, Flags.Identification, Flags.Tracking, Flags.IkThreads)) // Producer (use default to disable any input) using (var input = new WrapperStructInput(producerType, producerString, Flags.FrameFirst, Flags.FrameStep, Flags.FrameLast, Flags.ProcessRealTime, Flags.FrameFlip, Flags.FrameRotate, Flags.FramesRepeat, cameraSize, Flags.CameraParameterFolder, !Flags.FrameKeepDistortion, (uint)Flags.Views3D)) // Output (comment or use default argument to disable any output) using (var output = new WrapperStructOutput(Flags.CliVerbose, Flags.WriteKeyPoint, OpenPose.StringToDataFormat(Flags.WriteKeyPointFormat), Flags.WriteJson, Flags.WriteCocoJson, Flags.WriteCocoFootJson, Flags.WriteCocoJsonVariant, Flags.WriteImages, Flags.WriteImagesFormat, Flags.WriteVideo, Flags.WriteVideoFps, Flags.WriteHeatmaps, Flags.WriteHeatmapsFormat, Flags.WriteVideoAdam, Flags.WriteBvh, Flags.UdpHost, Flags.UdpPort)) // GUI (comment or use default argument to disable any visual output) using (var gui = new WrapperStructGui(OpenPose.FlagsToDisplayMode(Flags.Display, Flags.Enable3D), !Flags.NoGuiVerbose, Flags.FullScreen)) { opWrapper.Configure(pose); opWrapper.Configure(face); opWrapper.Configure(hand); opWrapper.Configure(extra); opWrapper.Configure(input); opWrapper.Configure(output); opWrapper.Configure(gui); // Set to single-thread (for sequential processing and/or debugging and/or reducing latency) if (Flags.DisableMultiThread) { opWrapper.DisableMultiThreading(); } // Start, run, and stop processing - exec() blocks this thread until OpenPose wrapper has finished OpenPose.Log("Starting thread(s)...", Priority.High); opWrapper.Exec(); // Measuring total time timer.Stop(); var totalTimeSec = timer.ElapsedMilliseconds * 1000; var message = $"OpenPose demo successfully finished. Total time: {totalTimeSec} seconds."; OpenPose.Log(message, Priority.High); } } // Return successful message OpenPose.Log("Stopping OpenPose...", Priority.High); return(0); } catch (Exception e) { Console.WriteLine(e); return(-1); } }
private static int TutorialDeveloperThread1() { try { OpenPose.Log("Starting OpenPose demo...", Priority.High); var timeBegin = new Stopwatch(); timeBegin.Start(); // ------------------------- INITIALIZATION ------------------------- // Step 1 - Set logging level // - 0 will output all the logging messages // - 255 will output nothing OpenPose.Check(0 <= Flags.LoggingLevel && Flags.LoggingLevel <= 255, "Wrong logging_level value."); ConfigureLog.PriorityThreshold = (Priority)Flags.LoggingLevel; // Step 2 - Read GFlags (user defined configuration) // cameraSize var cameraSize = OpenPose.FlagsToPoint(Flags.CameraResolution, "-1x-1"); // outputSize var outputSize = OpenPose.FlagsToPoint(Flags.OutputResolution, "-1x-1"); // producerType var tie = OpenPose.FlagsToProducer(Flags.ImageDir, Flags.Video, Flags.IpCamera, Flags.Camera, Flags.FlirCamera, Flags.FlirCameraIndex); var producerType = tie.Item1; var producerString = tie.Item2; var displayProducerFpsMode = Flags.ProcessRealTime ? ProducerFpsMode.OriginalFps : ProducerFpsMode.RetrievalFps; using (var producerSharedPtr = OpenPose.CreateProducer(producerType, cameraSize, producerString, Flags.CameraParameterPath, Flags.FrameUndistort, Flags.Views3D)) { producerSharedPtr.Get().SetProducerFpsMode(displayProducerFpsMode); OpenPose.Log("", Priority.Low); // Step 3 - Setting producer //var videoSeekSharedPtr = std::make_shared<std::pair<std::atomic<bool>, std::atomic<int>>>(); //videoSeekSharedPtr->first = false; //videoSeekSharedPtr->second = 0; // Step 4 - Setting thread workers && manager // Note: // nativeDebugging may occur crash using (var threadManager = new ThreadManager <Datum>()) { // Step 5 - Initializing the worker classes // Frames producer (e.g., video, webcam, ...) using (var datumProducer = new StdSharedPtr <DatumProducer <Datum> >(new DatumProducer <Datum>(producerSharedPtr))) using (var wDatumProducer = new StdSharedPtr <WDatumProducer <Datum> >(new WDatumProducer <Datum>(datumProducer))) { // GUI (Display) using (var gui = new StdSharedPtr <Gui>(new Gui(outputSize, Flags.FullScreen, threadManager.GetIsRunningSharedPtr()))) using (var wGui = new StdSharedPtr <WGui <Datum> >(new WGui <Datum>(gui))) { // ------------------------- CONFIGURING THREADING ------------------------- // In this simple multi-thread example, we will do the following: // 3 (virtual) queues: 0, 1, 2 // 1 real queue: 1. The first and last queue ids (in this case 0 and 2) are not actual queues, but the // beginning and end of the processing sequence // 2 threads: 0, 1 // wDatumProducer will generate frames (there is no real queue 0) and push them on queue 1 // wGui will pop frames from queue 1 and process them (there is no real queue 2) var threadId = 0UL; var queueIn = 0UL; var queueOut = 1UL; threadManager.Add(threadId++, wDatumProducer, queueIn++, queueOut++); // Thread 0, queues 0 -> 1 threadManager.Add(threadId++, wGui, queueIn++, queueOut++); // Thread 1, queues 1 -> 2 // Equivalent single-thread version (option a) // const auto threadId = 0ull; // auto queueIn = 0ull; // auto queueOut = 1ull; // threadManager.add(threadId, wDatumProducer, queueIn++, queueOut++); // Thread 0, queues 0 -> 1 // threadManager.add(threadId, wGui, queueIn++, queueOut++); // Thread 0, queues 1 -> 2 // Equivalent single-thread version (option b) // const auto threadId = 0ull; // const auto queueIn = 0ull; // const auto queueOut = 1ull; // threadManager.add(threadId, {wDatumProducer, wGui}, queueIn, queueOut); // Thread 0, queues 0 -> 1 // ------------------------- STARTING AND STOPPING THREADING ------------------------- OpenPose.Log("Starting thread(s)...", Priority.High); // Two different ways of running the program on multithread environment // Option a) Using the main thread (this thread) for processing (it saves 1 thread, recommended) threadManager.Exec(); // Option b) Giving to the user the control of this thread // // VERY IMPORTANT NOTE: if OpenCV is compiled with Qt support, this option will not work. Qt needs the main // // thread to plot visual results, so the final GUI (which uses OpenCV) would return an exception similar to: // // `QMetaMethod::invoke: Unable to invoke methods with return values in queued connections` // // Start threads // threadManager.start(); // // Keep program alive while running threads. Here the user could perform any other desired function // while (threadManager.isRunning()) // std::this_thread::sleep_for(std::chrono::milliseconds{33}); // // Stop and join threads // op::log("Stopping thread(s)", op::Priority::High); // threadManager.stop(); } } } } // ------------------------- CLOSING ------------------------- // Measuring total time timeBegin.Stop(); var totalTimeSec = timeBegin.ElapsedMilliseconds / 1000d; var message = $"OpenPose demo successfully finished. Total time: {totalTimeSec} seconds."; OpenPose.Log(message, Priority.High); // Return successful message return(0); } catch (Exception e) { OpenPose.Error(e.Message, -1, nameof(TutorialDeveloperThread1)); return(-1); } }
private static int TutorialDeveloperPose2() { try { OpenPose.Log("Starting OpenPose demo...", Priority.High); var timeBegin = new Stopwatch(); timeBegin.Start(); // ------------------------- INITIALIZATION ------------------------- // Step 1 - Set logging level // - 0 will output all the logging messages // - 255 will output nothing OpenPose.Check(0 <= Flags.LoggingLevel && Flags.LoggingLevel <= 255, "Wrong logging_level value."); ConfigureLog.PriorityThreshold = (Priority)Flags.LoggingLevel; OpenPose.Log("", Priority.Low); // Step 2 - Read GFlags (user defined configuration) // outputSize var outputSize = OpenPose.FlagsToPoint(Flags.OutputResolution, "-1x-1"); // netInputSize var netInputSize = OpenPose.FlagsToPoint(Flags.NetResolution, "-1x368"); // poseModel var poseModel = OpenPose.FlagsToPoseModel(Flags.ModelPose); // Check no contradictory flags enabled if (Flags.AlphaPose < 0.0 || Flags.AlphaPose > 1.0) { OpenPose.Error("Alpha value for blending must be in the range [0,1].", -1, nameof(TutorialDeveloperPose2)); } if (Flags.ScaleGap <= 0.0 && Flags.ScaleNumber > 1) { OpenPose.Error("Incompatible flag configuration: scale_gap must be greater than 0 or scale_number = 1.", -1, nameof(TutorialDeveloperPose2)); } // Step 3 - Initialize all required classes using (var scaleAndSizeExtractor = new ScaleAndSizeExtractor(netInputSize, outputSize, Flags.ScaleNumber, Flags.ScaleGap)) using (var cvMatToOpInput = new CvMatToOpInput(poseModel)) using (var cvMatToOpOutput = new CvMatToOpOutput()) using (var poseExtractorPtr = new StdSharedPtr <PoseExtractorCaffe>(new PoseExtractorCaffe(poseModel, Flags.ModelFolder, Flags.NumGpuStart))) using (var poseGpuRenderer = new PoseGpuRenderer(poseModel, poseExtractorPtr, (float)Flags.RenderThreshold, !Flags.DisableBlending, (float)Flags.AlphaPose)) { poseGpuRenderer.SetElementToRender(Flags.PartToShow); using (var opOutputToCvMat = new OpOutputToCvMat()) using (var frameDisplayer = new FrameDisplayer("OpenPose Tutorial - Example 2", outputSize)) { // Step 4 - Initialize resources on desired thread (in this case single thread, i.e., we init resources here) poseExtractorPtr.Get().InitializationOnThread(); poseGpuRenderer.InitializationOnThread(); // ------------------------- POSE ESTIMATION AND RENDERING ------------------------- // Step 1 - Read and load image, error if empty (possibly wrong path) // Alternative: cv::imread(Flags.image_path, CV_LOAD_IMAGE_COLOR); using (var inputImage = OpenPose.LoadImage(ImagePath, LoadImageFlag.LoadImageColor)) { if (inputImage.Empty) { OpenPose.Error("Could not open or find the image: " + ImagePath, -1, nameof(TutorialDeveloperPose2)); } var imageSize = new Point <int>(inputImage.Cols, inputImage.Rows); // Step 2 - Get desired scale sizes var tuple = scaleAndSizeExtractor.Extract(imageSize); var scaleInputToNetInputs = tuple.Item1; var netInputSizes = tuple.Item2; var scaleInputToOutput = tuple.Item3; var outputResolution = tuple.Item4; // Step 3 - Format input image to OpenPose input and output formats var netInputArray = cvMatToOpInput.CreateArray(inputImage, scaleInputToNetInputs, netInputSizes); var outputArray = cvMatToOpOutput.CreateArray(inputImage, scaleInputToOutput, outputResolution); // Step 4 - Estimate poseKeypoints poseExtractorPtr.Get().ForwardPass(netInputArray, imageSize, scaleInputToNetInputs); var poseKeypoints = poseExtractorPtr.Get().GetPoseKeyPoints(); var scaleNetToOutput = poseExtractorPtr.Get().GetScaleNetToOutput(); // Step 5 - Render pose poseGpuRenderer.RenderPose(outputArray, poseKeypoints, (float)scaleInputToOutput, scaleNetToOutput); // Step 6 - OpenPose output format to cv::Mat using (var outputImage = opOutputToCvMat.FormatToCvMat(outputArray)) { // ------------------------- SHOWING RESULT AND CLOSING ------------------------- // Show results frameDisplayer.DisplayFrame(outputImage, 0); // Alternative: cv::imshow(outputImage) + cv::waitKey(0) // Measuring total time timeBegin.Stop(); var totalTimeSec = timeBegin.ElapsedMilliseconds / 1000d; var message = $"OpenPose demo successfully finished. Total time: {totalTimeSec} seconds."; OpenPose.Log(message, Priority.High); // Return successful message return(0); } } } } } catch (Exception e) { OpenPose.Error(e.Message, -1, nameof(TutorialDeveloperPose2)); return(-1); } }