protected override void WorkConsumer(StdSharedPtr <UserDatum>[] datumsPtr) { try { // User's displaying/saving/other processing here // datum.cvOutputData: rendered frame with pose or heatmaps // datum.poseKeypoints: Array<float> with the estimated pose if (datumsPtr != null && datumsPtr.Length != 0) { using (var cvOutputData = OpenPose.OP_OP2CVCONSTMAT(datumsPtr[0].Get().CvOutputData)) { Cv.ImShow($"{OpenPose.OpenPoseNameAndVersion()} - Tutorial Thread API", cvOutputData); // It displays the image and sleeps at least 1 ms (it usually sleeps ~5-10 msec to display the image) Cv.WaitKey(1); } } } catch (Exception e) { OpenPose.Log("Some kind of unexpected error happened."); this.Stop(); OpenPose.Error(e.Message, -1, nameof(this.WorkConsumer)); } #endregion }
private static int TutorialApiCpp() { try { OpenPose.Log("Starting OpenPose demo...", Priority.High); using (var opTimer = OpenPose.GetTimerInit()) { // OpenPose wrapper OpenPose.Log("Configuring OpenPose...", Priority.High); using (var opWrapper = new Wrapper <Datum>()) { ConfigureWrapper(opWrapper); // Start, run, and stop processing - exec() blocks this thread until OpenPose wrapper has finished OpenPose.Log("Starting thread(s)...", Priority.High); opWrapper.Exec(); } // Measuring total time OpenPose.PrintTime(opTimer, "OpenPose demo successfully finished. Total time: ", " seconds.", Priority.High); } // Return return(0); } catch (Exception) { return(-1); } }
private static int TutorialAddModule1() { try { OpenPose.Log("Starting OpenPose demo...", Priority.High); using (var opTimer = OpenPose.GetTimerInit()) { // Configuring OpenPose OpenPose.Log("Configuring OpenPose...", Priority.High); using (var opWrapperT = new Wrapper <CustomDatum>()) { ConfigureWrapper(opWrapperT); OpenPose.Log("Starting thread(s)...", Priority.High); // Start, run & stop threads - it blocks this thread until all others have finished opWrapperT.Exec(); // Measuring total time OpenPose.PrintTime(opTimer, "OpenPose demo successfully finished. Total time: ", " seconds.", Priority.High); } } // Return successful message return(0); } catch (Exception) { return(-1); } }
// starts detector public override void Run(IProgress <DetectionResults> progress) { // detection loop - detects on each frame while (!userWantsToExit) // setting userWantsToExit to true breaks this loop { // get latest detected frame from input, which we set during the config if (opWrapper.WaitAndPop(out datumProcessed)) // detection data gets put into datumProcessed { if (datumProcessed != null && datumProcessed.TryGet(out data) && !data.Empty) // if datumProcessed exists && we can get the data sucessfully && retrieved data exists { Datum d = data.ToArray()[0].Get(); // retrieve datum object which contains the keypoint data progress.Report(new DetectionResults() // report calculated keypoint data with DetectionResults object { data = d, // keypoint data isFinished = false // are we finished with the detection }); } } else // can't get next frame, so we end the detection loop { OpenPose.Log("Processed datum could not be emplaced.", Priority.High); userWantsToExit = true; break; } } progress.Report(new DetectionResults() // when finished, we report with isFinished set to true { data = null, isFinished = true }); opWrapper.Dispose(); // dispose of openpose wrapper }
public StdSharedPtr <StdVector <UserDatum> > CreateDatum() { // Close program when empty frame if (this._Closed || this._ImageFiles.Length <= this._Counter) { OpenPose.Log("Last frame read and added to queue. Closing program after it is processed.", Priority.High); // This funtion stops this worker, which will eventually stop the whole thread system once all the // frames have been processed this._Closed = true; return(null); } else { // Create new datum var tmp = new StdVector <UserDatum>(); tmp.EmplaceBack(); var datumsPtr = new StdSharedPtr <StdVector <UserDatum> >(tmp); var datum = tmp.ToArray()[0]; // Fill datum using (var mat = Cv.ImRead(this._ImageFiles[this._Counter++])) datum.CvInputData = mat; // If empty frame -> return nullptr if (datum.CvInputData.Empty) { OpenPose.Log($"Empty frame detected on path: {this._ImageFiles[this._Counter - 1]}. Closing program.", Priority.High); this._Closed = true; datumsPtr = null; } return(datumsPtr); } }
private static int TutorialApiCpp() { try { OpenPose.Log("Starting OpenPose demo...", Priority.High); using (var opTimer = OpenPose.GetTimerInit()) { // Configuring OpenPose OpenPose.Log("Configuring OpenPose...", Priority.High); using (var opWrapper = new Wrapper <Datum>(ThreadManagerMode.AsynchronousOut)) { ConfigureWrapper(opWrapper); // Start, run, and stop processing - exec() blocks this thread until OpenPose wrapper has finished OpenPose.Log("Starting thread(s)...", Priority.High); opWrapper.Start(); // User processing var userOutputClass = new UserOutputClass(); var userWantsToExit = false; while (!userWantsToExit) { // Pop frame if (opWrapper.WaitAndPop(out var datumProcessed)) { if (!Flags.NoDisplay) { userWantsToExit = userOutputClass.Display(datumProcessed); } userOutputClass.PrintKeyPoints(datumProcessed); datumProcessed.Dispose(); } // If OpenPose finished reading images else if (!opWrapper.IsRunning) { break; } // Something else happened else { OpenPose.Log("Processed datum could not be emplaced.", Priority.High); } } OpenPose.Log("Stopping thread(s)", Priority.High); opWrapper.Stop(); // Measuring total time OpenPose.PrintTime(opTimer, "OpenPose demo successfully finished. Total time: ", " seconds.", Priority.High); } } // Return return(0); } catch (Exception) { return(-1); } }
public void Log() { const string message = nameof(this.Log); const string function = nameof(this.Log); const string file = "OpenPoseTest.cs"; foreach (var priority in Enum.GetValues(typeof(Priority)).Cast <Priority>()) { OpenPose.Log($"{message}", priority, -1, function, file); } }
private static int TutorialApiCpp() { try { OpenPose.Log("Starting OpenPose demo...", Priority.High); using (var opTimer = OpenPose.GetTimerInit()) { // Configuring OpenPose OpenPose.Log("Configuring OpenPose...", Priority.High); using (var opWrapper = new Wrapper <Datum>(ThreadManagerMode.AsynchronousIn)) { ConfigureWrapper(opWrapper); // Start, run, and stop processing - exec() blocks this thread until OpenPose wrapper has finished OpenPose.Log("Starting thread(s)...", Priority.High); opWrapper.Start(); // User processing var userInputClass = new UserInputClass(Flags.ImageDir); var userWantsToExit = false; while (!userWantsToExit && !userInputClass.IsFinished()) { // Push frame using (var datumToProcess = userInputClass.CreateDatum()) { if (datumToProcess != null) { var successfullyEmplaced = opWrapper.WaitAndEmplace(datumToProcess); if (!successfullyEmplaced) { OpenPose.Log("Processed datum could not be emplaced.", Priority.High); } } } } OpenPose.Log("Stopping thread(s)", Priority.High); opWrapper.Stop(); } // Measuring total time OpenPose.PrintTime(opTimer, "OpenPose demo successfully finished. Total time: ", " seconds.", Priority.High); } // Return return(0); } catch (Exception) { return(-1); } }
private Datum ProcessBitmap(Bitmap bmp) { BitmapData bmpData = null; try { var width = bmp.Width; var height = bmp.Height; bmpData = bmp.LockBits(new Rectangle(0, 0, width, height), ImageLockMode.ReadOnly, PixelFormat.Format24bppRgb); var stride = bmpData.Stride; var scan0 = bmpData.Scan0; unsafe { // convert bitmap to byte array var line = width * 3; var image = new byte[line * height]; var ps = (byte *)scan0; for (var h = 0; h < height; h++) { Marshal.Copy((IntPtr)ps, image, line * h, line); ps += stride; } // use openpose wrapper to calculate keypoints using byte array object as input image this.datumProcessed = opWrapper.EmplaceAndPop(image, width, height, MatType.CV_8UC3); if (this.datumProcessed != null) // if output data exists { if (this.datumProcessed != null && this.datumProcessed.TryGet(out this.data) && !this.data.Empty) // if datumProcessed exists && we can get the data sucessfully && retrieved data exists { return(this.data.ToArray()[0].Get()); // retrieve datum object which contains the keypoint data } else // bad input { OpenPose.Log("Image could not be processed.", Priority.High); } } } } finally { if (bmpData != null) { bmp.UnlockBits(bmpData); } } return(null); }
public Datum ProcessFrame(Mat image) // gets result keypoints from OpenPoseDotNet.Mat { datumProcessed = opWrapper.EmplaceAndPop(image); // method detects on OpenPoseDotNet.Mat if (datumProcessed != null && datumProcessed.TryGet(out data) && !data.Empty) // if datumProcessed exists && we can get the data sucessfully && retrieved data exists { Datum result = data.ToArray()[0].Get(); // retrieve datum object which contains the keypoint data opWrapper.Dispose(); // dispose of wrapper after detection return(result); } else { OpenPose.Log("Nullptr or empty datumsPtr found.", Priority.High); return(null); } }
protected override StdSharedPtr <StdVector <StdSharedPtr <UserDatum> > > WorkProducer() { try { // Close program when empty frame if (this._ImageFiles.Length <= this._Counter) { OpenPose.Log("Last frame read and added to queue. Closing program after it is processed.", Priority.High); // This funtion stops this worker, which will eventually stop the whole thread system once all the // frames have been processed this.Stop(); return(null); } else { // Create new datum var vector = new StdVector <StdSharedPtr <UserDatum> >(); var datumsPtr = new StdSharedPtr <StdVector <StdSharedPtr <UserDatum> > >(vector); datumsPtr.Get().EmplaceBack(); var datum = datumsPtr.Get().At(0); // C# cannot set pointer object by using assignment operator datum.Reset(new UserDatum()); // Fill datum using (var cvInputData = Cv.ImRead(this._ImageFiles[this._Counter++])) using (var inputData = OpenPose.OP_CV2OPCONSTMAT(cvInputData)) datum.Get().CvInputData = inputData; // If empty frame -> return nullptr if (datum.Get().CvInputData.Empty) { OpenPose.Log($"Empty frame detected on path: {this._ImageFiles[this._Counter - 1]}. Closing program.", Priority.High); this.Stop(); datumsPtr = null; } return(datumsPtr); } } catch (Exception e) { OpenPose.Log("Some kind of unexpected error happened."); this.Stop(); OpenPose.Error(e.Message, -1, nameof(this.WorkProducer)); return(null); } }
private static void PrintKeypoints(StdSharedPtr <StdVector <StdSharedPtr <Datum> > > datumsPtr) { try { // Example: How to use the pose keypoints if (datumsPtr != null && datumsPtr.TryGet(out var data) && !data.Empty) { var temp = data.ToArray()[0].Get(); OpenPose.Log($"Body keypoints: {temp.PoseKeyPoints}", Priority.High); OpenPose.Log($"Face keypoints: {temp.FaceKeyPoints}", Priority.High); OpenPose.Log($"Left hand keypoints: {temp.HandKeyPoints[0]}", Priority.High); OpenPose.Log($"Right hand keypoints: {temp.HandKeyPoints[1]}", Priority.High); } else { OpenPose.Log("Nullptr or empty datumsPtr found.", Priority.High); } }
protected override StdSharedPtr <StdVector <UserDatum> > WorkProducer() { try { // Close program when empty frame if (this._ImageFiles.Length <= this._Counter) { OpenPose.Log("Last frame read and added to queue. Closing program after it is processed.", Priority.High); // This funtion stops this worker, which will eventually stop the whole thread system once all the // frames have been processed this.Stop(); return(null); } else { // Create new datum var tmp = new StdVector <UserDatum>(); tmp.EmplaceBack(); var datumsPtr = new StdSharedPtr <StdVector <UserDatum> >(tmp); var datum = tmp.ToArray()[0]; // Fill datum using (var mat = Cv.ImRead(this._ImageFiles[this._Counter++])) datum.CvInputData = mat; // If empty frame -> return nullptr if (datum.CvInputData.Empty) { OpenPose.Log($"Empty frame detected on path: {this._ImageFiles[this._Counter - 1]}. Closing program.", Priority.High); this.Stop(); datumsPtr = null; } return(datumsPtr); } } catch (Exception e) { OpenPose.Log("Some kind of unexpected error happened."); this.Stop(); OpenPose.Error(e.Message, -1, nameof(this.WorkProducer)); return(null); } }
private string filePath; // input file path /* * initializer for webcam input * netRes = net resolution * faceRes = face detector resolution, disable by setting to null * handRes = hand detector resolution, disable by setting to null * modelPose = which pose model to use * filePath = image file path * outPath = output path for frames, set null to disable output, disable by setting to null */ public ImageDetector(string netRes, string faceRes, string handRes, string modelPose, string filePath, string outPath) { this.filePath = filePath; // set image file path // set logging level OpenPose.Check(0 <= Flags.LoggingLevel && Flags.LoggingLevel <= 255, "Wrong logging_level value."); ConfigureLog.PriorityThreshold = (Priority)Flags.LoggingLevel; Profiler.SetDefaultX((ulong)Flags.ProfileSpeed); OpenPose.Log("Adding config...", Priority.High); InitFlags(netRes, faceRes, handRes, modelPose, outPath); // initialize flags with input values opWrapper = new Wrapper <UserDatum>(ThreadManagerMode.Asynchronous); // configure openpose wrapper obj based on the flags that we set ConfigOnFlags(opWrapper, false); // set input on config }
protected override void Work(Datum[] datumsPtr) { // User's post-processing (after OpenPose processing & before OpenPose outputs) here // datum.cvOutputData: rendered frame with pose or heatmaps // datum.poseKeypoints: Array<float> with the estimated pose try { if (datumsPtr != null && datumsPtr.Length != 0) { foreach (var datum in datumsPtr) { Cv.BitwiseNot(datum.CvInputData, datum.CvOutputData); } } } catch (Exception e) { OpenPose.Log("Some kind of unexpected error happened."); this.Stop(); OpenPose.Error(e.Message, -1, nameof(this.Work)); } }
private static bool Display(StdSharedPtr <StdVector <StdSharedPtr <Datum> > > datumsPtr) { try { // User's displaying/saving/other processing here // datum.cvOutputData: rendered frame with pose or heatmaps // datum.poseKeypoints: Array<float> with the estimated pose if (datumsPtr != null && datumsPtr.TryGet(out var data) && !data.Empty) { // Display image and sleeps at least 1 ms (it usually sleeps ~5-10 msec to display the image) var temp = data.ToArray()[0].Get(); using (var cvMat = OpenPose.OP_OP2CVCONSTMAT(temp.CvOutputData)) Cv.ImShow($"{OpenPose.OpenPoseNameAndVersion()} - Tutorial C++ API", cvMat); } else { OpenPose.Log("Nullptr or empty datumsPtr found.", Priority.High); } var key = Cv.WaitKey(1); return(key == 27); }
public StdSharedPtr <StdVector <StdSharedPtr <Datum> > > CreateDatum() { // Close program when empty frame if (this._Closed || this._ImageFiles.Length <= this._Counter) { OpenPose.Log("Last frame read and added to queue. Closing program after it is processed.", Priority.High); // This funtion stops this worker, which will eventually stop the whole thread system once all the // frames have been processed this._Closed = true; return(null); } else { // Create new datum var vector = new StdVector <StdSharedPtr <Datum> >(); var datumsPtr = new StdSharedPtr <StdVector <StdSharedPtr <Datum> > >(vector); datumsPtr.Get().EmplaceBack(); var datum = datumsPtr.Get().At(0); // C# cannot set pointer object by using assignment operator datum.Reset(new Datum()); // Fill datum using (var cvInputData = Cv.ImRead(this._ImageFiles[this._Counter++])) using (var inputData = OpenPose.OP_CV2OPCONSTMAT(cvInputData)) datum.Get().CvInputData = inputData; // If empty frame -> return nullptr if (datum.Get().CvInputData.Empty) { OpenPose.Log($"Empty frame detected on path: {this._ImageFiles[this._Counter - 1]}. Closing program.", Priority.High); this._Closed = true; datumsPtr = null; } return(datumsPtr); } }
protected override void Work(Datum[] datumsPtr) { try { // User's processing here // datum.cvInputData: initial cv::Mat obtained from the frames producer (video, webcam, etc.) // datum.cvOutputData: final cv::Mat to be displayed if (datumsPtr != null) { foreach (var datum in datumsPtr) { Cv.BitwiseNot(datum.CvInputData, datum.CvOutputData); } } } catch (Exception e) { OpenPose.Log("Some kind of unexpected error happened."); this.Stop(); OpenPose.Error(e.Message, -1, nameof(this.Work)); } }
protected override void Work(StdSharedPtr <Datum>[] datums) { try { // User's pre-processing (after OpenPose read the input image & before OpenPose processing) here // datumPtr->cvInputData: input frame if (datums != null && datums.Length != 0) { foreach (var datum in datums) { using (var cvOutputData = OpenPose.OP_OP2CVMAT(datum.Get().CvOutputData)) Cv.BitwiseNot(cvOutputData, cvOutputData); } } } catch (Exception e) { OpenPose.Log("Some kind of unexpected error happened."); this.Stop(); OpenPose.Error(e.Message, -1, nameof(this.Work)); } }
private static void Display(StdSharedPtr <StdVector <StdSharedPtr <Datum> > > datumsPtr) { try { // User's displaying/saving/other processing here // datum.cvOutputData: rendered frame with pose or heatmaps // datum.poseKeypoints: Array<float> with the estimated pose if (datumsPtr != null && datumsPtr.TryGet(out var data) && !data.Empty) { // Display image var temp = data.ToArray(); using (var cvMat = OpenPose.OP_OP2CVCONSTMAT(temp[0].Get().CvOutputData)) { Cv.ImShow($"{OpenPose.OpenPoseNameAndVersion()} - Tutorial C++ API", cvMat); Cv.WaitKey(); } } else { OpenPose.Log("Nullptr or empty datumsPtr found.", Priority.High); } }
protected override void Work(StdSharedPtr <Datum>[] datums) { try { // User's post-processing (after OpenPose processing & before OpenPose outputs) here // datum.cvOutputData: rendered frame with pose or heatmaps // datum.poseKeypoints: Array<float> with the estimated pose if (datums != null && datums.Length != 0) { foreach (var datum in datums) { using (var cvOutputData = OpenPose.OP_OP2CVMAT(datum.Get().CvOutputData)) Cv.BitwiseNot(cvOutputData, cvOutputData); } } } catch (Exception e) { OpenPose.Log("Some kind of unexpected error happened."); this.Stop(); OpenPose.Error(e.Message, -1, nameof(this.Work)); } }
protected override void WorkConsumer(Datum[] datumsPtr) { try { // User's displaying/saving/other processing here // datum.cvOutputData: rendered frame with pose or heatmaps // datum.poseKeypoints: Array<float> with the estimated pose if (datumsPtr != null && datumsPtr.Length != 0) { Cv.ImShow("User worker GUI", datumsPtr[0].CvOutputData); // It displays the image and sleeps at least 1 ms (it usually sleeps ~5-10 msec to display the image) Cv.WaitKey(1); } } catch (Exception e) { OpenPose.Log("Some kind of unexpected error happened."); this.Stop(); OpenPose.Error(e.Message, -1, nameof(this.WorkConsumer)); } #endregion }
private static int TutorialDeveloperThread1() { try { OpenPose.Log("Starting OpenPose demo...", Priority.High); var timeBegin = new Stopwatch(); timeBegin.Start(); // ------------------------- INITIALIZATION ------------------------- // Step 1 - Set logging level // - 0 will output all the logging messages // - 255 will output nothing OpenPose.Check(0 <= Flags.LoggingLevel && Flags.LoggingLevel <= 255, "Wrong logging_level value."); ConfigureLog.PriorityThreshold = (Priority)Flags.LoggingLevel; // Step 2 - Read GFlags (user defined configuration) // cameraSize var cameraSize = OpenPose.FlagsToPoint(Flags.CameraResolution, "-1x-1"); // outputSize var outputSize = OpenPose.FlagsToPoint(Flags.OutputResolution, "-1x-1"); // producerType var tie = OpenPose.FlagsToProducer(Flags.ImageDir, Flags.Video, Flags.IpCamera, Flags.Camera, Flags.FlirCamera, Flags.FlirCameraIndex); var producerType = tie.Item1; var producerString = tie.Item2; var displayProducerFpsMode = Flags.ProcessRealTime ? ProducerFpsMode.OriginalFps : ProducerFpsMode.RetrievalFps; using (var producerSharedPtr = OpenPose.CreateProducer(producerType, cameraSize, producerString, Flags.CameraParameterPath, Flags.FrameUndistort, Flags.Views3D)) { producerSharedPtr.Get().SetProducerFpsMode(displayProducerFpsMode); OpenPose.Log("", Priority.Low); // Step 3 - Setting producer //var videoSeekSharedPtr = std::make_shared<std::pair<std::atomic<bool>, std::atomic<int>>>(); //videoSeekSharedPtr->first = false; //videoSeekSharedPtr->second = 0; // Step 4 - Setting thread workers && manager // Note: // nativeDebugging may occur crash using (var threadManager = new ThreadManager <Datum>()) { // Step 5 - Initializing the worker classes // Frames producer (e.g., video, webcam, ...) using (var datumProducer = new StdSharedPtr <DatumProducer <Datum> >(new DatumProducer <Datum>(producerSharedPtr))) using (var wDatumProducer = new StdSharedPtr <WDatumProducer <Datum> >(new WDatumProducer <Datum>(datumProducer))) { // GUI (Display) using (var gui = new StdSharedPtr <Gui>(new Gui(outputSize, Flags.FullScreen, threadManager.GetIsRunningSharedPtr()))) using (var wGui = new StdSharedPtr <WGui <Datum> >(new WGui <Datum>(gui))) { // ------------------------- CONFIGURING THREADING ------------------------- // In this simple multi-thread example, we will do the following: // 3 (virtual) queues: 0, 1, 2 // 1 real queue: 1. The first and last queue ids (in this case 0 and 2) are not actual queues, but the // beginning and end of the processing sequence // 2 threads: 0, 1 // wDatumProducer will generate frames (there is no real queue 0) and push them on queue 1 // wGui will pop frames from queue 1 and process them (there is no real queue 2) var threadId = 0UL; var queueIn = 0UL; var queueOut = 1UL; threadManager.Add(threadId++, wDatumProducer, queueIn++, queueOut++); // Thread 0, queues 0 -> 1 threadManager.Add(threadId++, wGui, queueIn++, queueOut++); // Thread 1, queues 1 -> 2 // Equivalent single-thread version (option a) // const auto threadId = 0ull; // auto queueIn = 0ull; // auto queueOut = 1ull; // threadManager.add(threadId, wDatumProducer, queueIn++, queueOut++); // Thread 0, queues 0 -> 1 // threadManager.add(threadId, wGui, queueIn++, queueOut++); // Thread 0, queues 1 -> 2 // Equivalent single-thread version (option b) // const auto threadId = 0ull; // const auto queueIn = 0ull; // const auto queueOut = 1ull; // threadManager.add(threadId, {wDatumProducer, wGui}, queueIn, queueOut); // Thread 0, queues 0 -> 1 // ------------------------- STARTING AND STOPPING THREADING ------------------------- OpenPose.Log("Starting thread(s)...", Priority.High); // Two different ways of running the program on multithread environment // Option a) Using the main thread (this thread) for processing (it saves 1 thread, recommended) threadManager.Exec(); // Option b) Giving to the user the control of this thread // // VERY IMPORTANT NOTE: if OpenCV is compiled with Qt support, this option will not work. Qt needs the main // // thread to plot visual results, so the final GUI (which uses OpenCV) would return an exception similar to: // // `QMetaMethod::invoke: Unable to invoke methods with return values in queued connections` // // Start threads // threadManager.start(); // // Keep program alive while running threads. Here the user could perform any other desired function // while (threadManager.isRunning()) // std::this_thread::sleep_for(std::chrono::milliseconds{33}); // // Stop and join threads // op::log("Stopping thread(s)", op::Priority::High); // threadManager.stop(); } } } } // ------------------------- CLOSING ------------------------- // Measuring total time timeBegin.Stop(); var totalTimeSec = timeBegin.ElapsedMilliseconds / 1000d; var message = $"OpenPose demo successfully finished. Total time: {totalTimeSec} seconds."; OpenPose.Log(message, Priority.High); // Return successful message return(0); } catch (Exception e) { OpenPose.Error(e.Message, -1, nameof(TutorialDeveloperThread1)); return(-1); } }
private static int TutorialApiCpp9() { try { OpenPose.Log("Starting OpenPose demo...", Priority.High); var timeBegin = new Stopwatch(); timeBegin.Start(); // logging_level OpenPose.Check(0 <= Flags.LoggingLevel && Flags.LoggingLevel <= 255, "Wrong logging_level value."); ConfigureLog.PriorityThreshold = (Priority)Flags.LoggingLevel; Profiler.SetDefaultX((ulong)Flags.ProfileSpeed); // Applying user defined configuration - GFlags to program variables // outputSize var outputSize = OpenPose.FlagsToPoint(Flags.OutputResolution, "-1x-1"); // netInputSize var netInputSize = OpenPose.FlagsToPoint(Flags.NetResolution, "-1x368"); // faceNetInputSize var faceNetInputSize = OpenPose.FlagsToPoint(Flags.FaceNetResolution, "368x368 (multiples of 16)"); // handNetInputSize var handNetInputSize = OpenPose.FlagsToPoint(Flags.HandNetResolution, "368x368 (multiples of 16)"); // poseModel var poseModel = OpenPose.FlagsToPoseModel(Flags.ModelPose); // JSON saving if (!string.IsNullOrEmpty(Flags.WriteKeyPoint)) { OpenPose.Log("Flag `write_keypoint` is deprecated and will eventually be removed. Please, use `write_json` instead.", Priority.Max); } // keypointScale var keypointScale = OpenPose.FlagsToScaleMode(Flags.KeyPointScale); // heatmaps to add var heatMapTypes = OpenPose.FlagsToHeatMaps(Flags.HeatmapsAddParts, Flags.HeatmapsAddBackground, Flags.HeatmapsAddPAFs); var heatMapScale = OpenPose.FlagsToHeatMapScaleMode(Flags.HeatmapsScale); // >1 camera view? var multipleView = (Flags.Enable3D || Flags.Views3D > 1); // Enabling Google Logging const bool enableGoogleLogging = true; // Initializing the user custom classes // Frames producer (e.g., video, webcam, ...) using (var wUserInput = new StdSharedPtr <UserWorkerProducer <UserDatum> >(new WUserInput(Flags.ImageDir))) // Processing using (var wUserPostProcessing = new StdSharedPtr <UserWorker <UserDatum> >(new WUserPostProcessing())) // GUI (Display) using (var wUserOutput = new StdSharedPtr <UserWorkerConsumer <UserDatum> >(new WUserOutput())) { // OpenPose wrapper OpenPose.Log("Configuring OpenPose...", Priority.High); using (var opWrapperT = new Wrapper <UserDatum>()) { // Add custom input const bool workerInputOnNewThread = false; opWrapperT.SetWorker(WorkerType.Input, wUserInput, workerInputOnNewThread); // Add custom processing const bool workerProcessingOnNewThread = false; opWrapperT.SetWorker(WorkerType.PostProcessing, wUserPostProcessing, workerProcessingOnNewThread); // Add custom output const bool workerOutputOnNewThread = true; opWrapperT.SetWorker(WorkerType.Output, wUserOutput, workerOutputOnNewThread); // Pose configuration (use WrapperStructPose{} for default and recommended configuration) using (var pose = new WrapperStructPose(!Flags.BodyDisabled, netInputSize, outputSize, keypointScale, Flags.NumGpu, Flags.NumGpuStart, Flags.ScaleNumber, (float)Flags.ScaleGap, OpenPose.FlagsToRenderMode(Flags.RenderPose, multipleView), poseModel, !Flags.DisableBlending, (float)Flags.AlphaPose, (float)Flags.AlphaHeatmap, Flags.PartToShow, Flags.ModelFolder, heatMapTypes, heatMapScale, Flags.PartCandidates, (float)Flags.RenderThreshold, Flags.NumberPeopleMax, Flags.MaximizePositives, Flags.FpsMax, enableGoogleLogging)) // Face configuration (use op::WrapperStructFace{} to disable it) using (var face = new WrapperStructFace(Flags.Face, faceNetInputSize, OpenPose.FlagsToRenderMode(Flags.FaceRender, multipleView, Flags.RenderPose), (float)Flags.FaceAlphaPose, (float)Flags.FaceAlphaHeatmap, (float)Flags.FaceRenderThreshold)) // Hand configuration (use op::WrapperStructHand{} to disable it) using (var hand = new WrapperStructHand(Flags.Hand, handNetInputSize, Flags.HandScaleNumber, (float)Flags.HandScaleRange, Flags.HandTracking, OpenPose.FlagsToRenderMode(Flags.HandRender, multipleView, Flags.RenderPose), (float)Flags.HandAlphaPose, (float)Flags.HandAlphaHeatmap, (float)Flags.HandRenderThreshold)) // Extra functionality configuration (use op::WrapperStructExtra{} to disable it) using (var extra = new WrapperStructExtra(Flags.Enable3D, Flags.MinViews3D, Flags.Identification, Flags.Tracking, Flags.IkThreads)) // Output (comment or use default argument to disable any output) using (var output = new WrapperStructOutput(Flags.CliVerbose, Flags.WriteKeyPoint, OpenPose.StringToDataFormat(Flags.WriteKeyPointFormat), Flags.WriteJson, Flags.WriteCocoJson, Flags.WriteCocoFootJson, Flags.WriteCocoJsonVariant, Flags.WriteImages, Flags.WriteImagesFormat, Flags.WriteVideo, Flags.WriteVideoFps, Flags.WriteHeatmaps, Flags.WriteHeatmapsFormat, Flags.WriteVideoAdam, Flags.WriteBvh, Flags.UdpHost, Flags.UdpPort)) { opWrapperT.Configure(pose); opWrapperT.Configure(face); opWrapperT.Configure(hand); opWrapperT.Configure(extra); opWrapperT.Configure(output); // No GUI. Equivalent to: opWrapper.configure(op::WrapperStructGui{}); // Set to single-thread (for sequential processing and/or debugging and/or reducing latency) if (Flags.DisableMultiThread) { opWrapperT.DisableMultiThreading(); } // Start, run, and stop processing - exec() blocks this thread until OpenPose wrapper has finished OpenPose.Log("Starting thread(s)...", Priority.High); opWrapperT.Exec(); } } } // Measuring total time timeBegin.Stop(); var totalTimeSec = timeBegin.ElapsedMilliseconds / 1000d; var message = $"OpenPose demo successfully finished. Total time: {totalTimeSec} seconds."; OpenPose.Log(message, Priority.High); // Return successful message return(0); } catch (Exception) { return(-1); } }
private static int OpenPoseTutorialThread3() { try { OpenPose.Log("Starting OpenPose demo...", Priority.High); var timeBegin = new Stopwatch(); timeBegin.Start(); // ------------------------- INITIALIZATION ------------------------- // Step 1 - Set logging level // - 0 will output all the logging messages // - 255 will output nothing OpenPose.Check(0 <= Flags.LoggingLevel && Flags.LoggingLevel <= 255, "Wrong logging_level value."); ConfigureLog.PriorityThreshold = (Priority)Flags.LoggingLevel; // Step 2 - Setting thread workers && manage using (var threadManager = new ThreadManager <Datum>()) { // Step 3 - Initializing the worker classes // Frames producer (e.g., video, webcam, ...) using (var wUserInput = new StdSharedPtr <UserWorkerProducer <Datum> >(new WUserInput(Flags.ImageDir))) { // Processing using (var wUserProcessing = new StdSharedPtr <UserWorker <Datum> >(new WUserPostProcessing())) { // GUI (Display) using (var wUserOutput = new StdSharedPtr <UserWorkerConsumer <Datum> >(new WUserOutput())) { // ------------------------- CONFIGURING THREADING ------------------------- // In this simple multi-thread example, we will do the following: // 3 (virtual) queues: 0, 1, 2 // 1 real queue: 1. The first and last queue ids (in this case 0 and 2) are not actual queues, but the // beginning and end of the processing sequence // 2 threads: 0, 1 // wUserInput will generate frames (there is no real queue 0) and push them on queue 1 // wGui will pop frames from queue 1 and process them (there is no real queue 2) var threadId = 0UL; var queueIn = 0UL; var queueOut = 1UL; threadManager.Add(threadId++, wUserInput, queueIn++, queueOut++); // Thread 0, queues 0 -> 1 threadManager.Add(threadId++, wUserProcessing, queueIn++, queueOut++); // Thread 1, queues 1 -> 2 threadManager.Add(threadId++, wUserOutput, queueIn++, queueOut++); // Thread 2, queues 2 -> 3 // ------------------------- STARTING AND STOPPING THREADING ------------------------- OpenPose.Log("Starting thread(s)...", Priority.High); // Two different ways of running the program on multithread environment // Option a) Using the main thread (this thread) for processing (it saves 1 thread, recommended) threadManager.Exec(); // Option b) Giving to the user the control of this thread // // VERY IMPORTANT NOTE: if OpenCV is compiled with Qt support, this option will not work. Qt needs the main // // thread to plot visual results, so the final GUI (which uses OpenCV) would return an exception similar to: // // `QMetaMethod::invoke: Unable to invoke methods with return values in queued connections` // // Start threads // threadManager.start(); // // Keep program alive while running threads. Here the user could perform any other desired function // while (threadManager.isRunning()) // std::this_thread::sleep_for(std::chrono::milliseconds{33}); // // Stop and join threads // op::log("Stopping thread(s)", op::Priority::High); // threadManager.stop(); } } } } // ------------------------- CLOSING ------------------------- // Measuring total time timeBegin.Stop(); var totalTimeSec = timeBegin.ElapsedMilliseconds / 1000d; var message = $"OpenPose demo successfully finished. Total time: {totalTimeSec} seconds."; OpenPose.Log(message, Priority.High); // Return successful message return(0); } catch (Exception e) { OpenPose.Error(e.Message, -1, nameof(OpenPoseTutorialThread3)); return(-1); } }
// configures openpose wrapper on set flags // setInput - set to true if setting input during configuration (e.g. for video file input, image directory, webcam) // set to false if inputting later (e.g. for emblaceandpop on Mat object or raw image) public static void ConfigOnFlags(Wrapper <UserDatum> opWrapper, Boolean setInput) { // Configuring OpenPose // logging_level OpenPose.Check(0 <= Flags.LoggingLevel && Flags.LoggingLevel <= 255, "Wrong logging_level value."); ConfigureLog.PriorityThreshold = (Priority)Flags.LoggingLevel; Profiler.SetDefaultX((ulong)Flags.ProfileSpeed); // Applying user defined configuration - GFlags to program variables // producerType var tie = OpenPose.FlagsToProducer(Flags.ImageDir, Flags.Video, Flags.IpCamera, Flags.Camera, Flags.FlirCamera, Flags.FlirCameraIndex); var producerType = tie.Item1; var producerString = tie.Item2; // cameraSize var cameraSize = OpenPose.FlagsToPoint(Flags.CameraResolution, "-1x-1"); // outputSize var outputSize = OpenPose.FlagsToPoint(Flags.OutputResolution, "-1x-1"); // netInputSize var netInputSize = OpenPose.FlagsToPoint(Flags.NetResolution, "-1x368"); // faceNetInputSize var faceNetInputSize = OpenPose.FlagsToPoint(Flags.FaceNetResolution, "368x368 (multiples of 16)"); // handNetInputSize var handNetInputSize = OpenPose.FlagsToPoint(Flags.HandNetResolution, "368x368 (multiples of 16)"); // poseMode var poseMode = OpenPose.FlagsToPoseMode(Flags.Body); // poseModel var poseModel = OpenPose.FlagsToPoseModel(Flags.ModelPose); // JSON saving if (!string.IsNullOrEmpty(Flags.WriteKeyPoint)) { OpenPose.Log("Flag `write_keypoint` is deprecated and will eventually be removed. Please, use `write_json` instead.", Priority.Max); } // keyPointScale var keyPointScale = OpenPose.FlagsToScaleMode(Flags.KeyPointScale); // heatmaps to add var heatMapTypes = OpenPose.FlagsToHeatMaps(Flags.HeatmapsAddParts, Flags.HeatmapsAddBackground, Flags.HeatmapsAddPAFs); var heatMapScale = OpenPose.FlagsToHeatMapScaleMode(Flags.HeatmapsScale); // >1 camera view? var multipleView = (Flags.Enable3D || Flags.Views3D > 1 || Flags.FlirCamera); // Face and hand detectors var faceDetector = OpenPose.FlagsToDetector(Flags.FaceDetector); var handDetector = OpenPose.FlagsToDetector(Flags.HandDetector); // Enabling Google Logging const bool enableGoogleLogging = true; // Pose configuration (use WrapperStructPose() for default and recommended configuration) var pose = new WrapperStructPose(poseMode, netInputSize, outputSize, keyPointScale, Flags.NumGpu, Flags.NumGpuStart, Flags.ScaleNumber, (float)Flags.ScaleGap, OpenPose.FlagsToRenderMode(Flags.RenderPose, multipleView), poseModel, !Flags.DisableBlending, (float)Flags.AlphaPose, (float)Flags.AlphaHeatmap, Flags.PartToShow, Flags.ModelFolder, heatMapTypes, heatMapScale, Flags.PartCandidates, (float)Flags.RenderThreshold, Flags.NumberPeopleMax, Flags.MaximizePositives, Flags.FpsMax, Flags.PrototxtPath, Flags.CaffeModelPath, (float)Flags.UpsamplingRatio, enableGoogleLogging); // Face configuration (use WrapperStructPose() to disable it) var face = new WrapperStructFace(Flags.Face, faceDetector, faceNetInputSize, OpenPose.FlagsToRenderMode(Flags.FaceRender, multipleView, Flags.RenderPose), (float)Flags.FaceAlphaPose, (float)Flags.FaceAlphaHeatmap, (float)Flags.FaceRenderThreshold); // Hand configuration (use WrapperStructPose() to disable it) var hand = new WrapperStructHand(Flags.Hand, handDetector, handNetInputSize, Flags.HandScaleNumber, (float)Flags.HandScaleRange, OpenPose.FlagsToRenderMode(Flags.HandRender, multipleView, Flags.RenderPose), (float)Flags.HandAlphaPose, (float)Flags.HandAlphaHeatmap, (float)Flags.HandRenderThreshold); // Extra functionality configuration (use WrapperStructPose() to disable it) var extra = new WrapperStructExtra(Flags.Enable3D, Flags.MinViews3D, Flags.Identification, Flags.Tracking, Flags.IkThreads); // Output (comment or use default argument to disable any output) var output = new WrapperStructOutput(Flags.CliVerbose, Flags.WriteKeyPoint, OpenPose.StringToDataFormat(Flags.WriteKeyPointFormat), Flags.WriteJson, Flags.WriteCocoJson, Flags.WriteCocoFootJson, Flags.WriteCocoJsonVariant, Flags.WriteImages, Flags.WriteImagesFormat, Flags.WriteVideo, Flags.WriteVideoWithAudio, Flags.WriteVideoFps, Flags.WriteHeatmaps, Flags.WriteHeatmapsFormat, Flags.WriteVideoAdam, Flags.WriteBvh, Flags.UdpHost, Flags.UdpPort); // GUI (comment or use default argument to disable any visual output) var gui = new WrapperStructGui(OpenPose.FlagsToDisplayMode(Flags.Display, Flags.Enable3D), !Flags.NoGuiVerbose, Flags.FullScreen); // config wrapper on set values opWrapper.Configure(pose); opWrapper.Configure(face); opWrapper.Configure(hand); opWrapper.Configure(extra); if (setInput) { // Producer (use default to disable any input) var input = new WrapperStructInput(producerType, producerString, Flags.FrameFirst, Flags.FrameStep, Flags.FrameLast, Flags.ProcessRealTime, Flags.FrameFlip, Flags.FrameRotate, Flags.FramesRepeat, cameraSize, Flags.CameraParameterPath, Flags.FrameUndistort, Flags.Views3D); opWrapper.Configure(input); } opWrapper.Configure(output); // Set to single-thread (for sequential processing and/or debugging and/or reducing latency) if (Flags.DisableMultiThread) { opWrapper.DisableMultiThreading(); } // start openpose wrapper opWrapper.Start(); }
private static bool Display(StdSharedPtr <StdVector <StdSharedPtr <Datum> > > datumsPtr, int desiredChannel = 0) { try { if (datumsPtr != null && datumsPtr.TryGet(out var data) && !data.Empty) { var datum = datumsPtr.Get().At(0).Get(); // Note: Heatmaps are in net_resolution size, which does not necessarily match the final image size // Read heatmaps var poseHeatMaps = datum.PoseHeatMaps; // Read desired channel var numberChannels = poseHeatMaps.GetSize(0); var height = poseHeatMaps.GetSize(1); var width = poseHeatMaps.GetSize(2); var eleSize = sizeof(float); using (var desiredChannelHeatMap = new Mat(height, width, MatType.CV_32F, IntPtr.Add(poseHeatMaps.GetPtr(), (desiredChannel % numberChannels) * height * width * eleSize))) { // Read image used from OpenPose body network (same resolution than heatmaps) var inputNetData = datum.InputNetData[0]; using (var inputNetDataB = new Mat(height, width, MatType.CV_32F, IntPtr.Add(inputNetData.GetPtr(), 0 * height * width * eleSize))) using (var inputNetDataG = new Mat(height, width, MatType.CV_32F, IntPtr.Add(inputNetData.GetPtr(), 1 * height * width * eleSize))) using (var inputNetDataR = new Mat(height, width, MatType.CV_32F, IntPtr.Add(inputNetData.GetPtr(), 2 * height * width * eleSize))) using (var vector = new StdVector <Mat>(new List <Mat>(new[] { inputNetDataB, inputNetDataG, inputNetDataR }))) using (var tmp = new Mat()) { Cv.Merge(vector, tmp); using (var add = tmp + 0.5) using (var mul = add * 255) using (var netInputImage = (Mat)mul) { // Turn into uint8 Cv.Mat using (var netInputImageUint8 = new Mat()) { netInputImage.ConvertTo(netInputImageUint8, MatType.CV_8UC1); using (var desiredChannelHeatMapUint8 = new Mat()) { desiredChannelHeatMap.ConvertTo(desiredChannelHeatMapUint8, MatType.CV_8UC1); // Combining both images using (var imageToRender = new Mat()) { Cv.ApplyColorMap(desiredChannelHeatMapUint8, desiredChannelHeatMapUint8, ColormapType.COLORMAP_JET); Cv.AddWeighted(netInputImageUint8, 0.5, desiredChannelHeatMapUint8, 0.5, 0d, imageToRender); // Display image Cv.ImShow($"{OpenPose.OpenPoseNameAndVersion()} - Tutorial C++ API", imageToRender); } } } } } } } else { OpenPose.Log("Nullptr or empty datumsPtr found.", Priority.High); } var key = (char)Cv.WaitKey(1); return(key == 27); }
protected override void WorkConsumer(StdSharedPtr <UserDatum>[] datumsPtr) { try { // User's displaying/saving/other processing here // datum.cvOutputData: rendered frame with pose or heatmaps // datum.poseKeypoints: Array<float> with the estimated pose if (datumsPtr != null && datumsPtr.Length != 0) { var datum = datumsPtr[0].Get(); // Show in command line the resulting pose keypoints for body, face and hands OpenPose.Log("\nKeypoints:"); // Accesing each element of the keypoints var poseKeypoints = datum.PoseKeyPoints; OpenPose.Log("Person pose keypoints:"); for (var person = 0; person < poseKeypoints.GetSize(0); person++) { OpenPose.Log($"Person {person} (x, y, score):"); for (var bodyPart = 0; bodyPart < poseKeypoints.GetSize(1); bodyPart++) { var valueToPrint = ""; for (var xyscore = 0; xyscore < poseKeypoints.GetSize(2); xyscore++) { valueToPrint += poseKeypoints[new[] { person, bodyPart, xyscore }] + " "; } OpenPose.Log(valueToPrint); } } OpenPose.Log(" "); // Alternative: just getting std::string equivalent OpenPose.Log($"Face keypoints: {datum.FaceKeyPoints}"); OpenPose.Log($"Left hand keypoints: {datum.HandKeyPoints[0]}"); OpenPose.Log($"Right hand keypoints: {datum.HandKeyPoints[1]}"); // Heatmaps var poseHeatMaps = datum.PoseHeatMaps; if (!poseHeatMaps.Empty) { OpenPose.Log($"Pose heatmaps size: [{poseHeatMaps.GetSize(0)}, {poseHeatMaps.GetSize(1)}, {poseHeatMaps.GetSize(2)}]"); var faceHeatMaps = datum.FaceHeatMaps; OpenPose.Log($"Face heatmaps size: [{faceHeatMaps.GetSize(0)}, {faceHeatMaps.GetSize(1)}, {faceHeatMaps.GetSize(2)}, {faceHeatMaps.GetSize(3)}]"); var handHeatMaps = datum.HandHeatMaps; OpenPose.Log($"Left hand heatmaps size: [{handHeatMaps[0].GetSize(0)}, {handHeatMaps[0].GetSize(1)}, {handHeatMaps[0].GetSize(2)}, {handHeatMaps[0].GetSize(3)}]"); OpenPose.Log($"Right hand heatmaps size: [{handHeatMaps[1].GetSize(0)}, {handHeatMaps[1].GetSize(1)}, {handHeatMaps[1].GetSize(2)}, {handHeatMaps[1].GetSize(3)}]"); } // Display results (if enabled) if (!Flags.NoDisplay) { using (var cvMat = OpenPose.OP_OP2CVCONSTMAT(datum.CvOutputData)) { // Display rendered output image Cv.ImShow($"{OpenPose.OpenPoseNameAndVersion()} - Tutorial C++ API", cvMat); // Display image and sleeps at least 1 ms (it usually sleeps ~5-10 msec to display the image) var key = (char)Cv.WaitKey(1); if (key == 27) { this.Stop(); } } } } } catch (Exception e) { this.Stop(); OpenPose.Error(e.Message, -1, nameof(this.WorkConsumer)); } }
private static void ConfigureWrapper(Wrapper <Datum> opWrapper) { try { // Configuring OpenPose // logging_level OpenPose.CheckBool(0 <= Flags.LoggingLevel && Flags.LoggingLevel <= 255, "Wrong logging_level value."); ConfigureLog.PriorityThreshold = (Priority)Flags.LoggingLevel; Profiler.SetDefaultX((ulong)Flags.ProfileSpeed); // Applying user defined configuration - GFlags to program variables // outputSize var outputSize = OpenPose.FlagsToPoint(Flags.OutputResolution, "-1x-1"); // netInputSize var netInputSize = OpenPose.FlagsToPoint(Flags.NetResolution, "-1x368"); // faceNetInputSize var faceNetInputSize = OpenPose.FlagsToPoint(Flags.FaceNetResolution, "368x368 (multiples of 16)"); // handNetInputSize var handNetInputSize = OpenPose.FlagsToPoint(Flags.HandNetResolution, "368x368 (multiples of 16)"); // poseMode var poseMode = OpenPose.FlagsToPoseMode(Flags.Body); // poseModel var poseModel = OpenPose.FlagsToPoseModel(Flags.ModelPose); // JSON saving if (!string.IsNullOrEmpty(Flags.WriteKeyPoint)) { OpenPose.Log("Flag `write_keypoint` is deprecated and will eventually be removed. Please, use `write_json` instead.", Priority.Max); } // keypointScale var keypointScale = OpenPose.FlagsToScaleMode(Flags.KeyPointScale); // heatmaps to add var heatMapTypes = OpenPose.FlagsToHeatMaps(Flags.HeatmapsAddParts, Flags.HeatmapsAddBackground, Flags.HeatmapsAddPAFs); var heatMapScale = OpenPose.FlagsToHeatMapScaleMode(Flags.HeatmapsScale); // >1 camera view? var multipleView = (Flags.Enable3D || Flags.Views3D > 1); // Face and hand detectors var faceDetector = OpenPose.FlagsToDetector(Flags.FaceDetector); var handDetector = OpenPose.FlagsToDetector(Flags.HandDetector); // Enabling Google Logging const bool enableGoogleLogging = true; // Pose configuration (use WrapperStructPose{} for default and recommended configuration) var pose = new WrapperStructPose(poseMode, netInputSize, outputSize, keypointScale, Flags.NumGpu, Flags.NumGpuStart, Flags.ScaleNumber, (float)Flags.ScaleGap, OpenPose.FlagsToRenderMode(Flags.RenderPose, multipleView), poseModel, !Flags.DisableBlending, (float)Flags.AlphaPose, (float)Flags.AlphaHeatmap, Flags.PartToShow, Flags.ModelFolder, heatMapTypes, heatMapScale, Flags.PartCandidates, (float)Flags.RenderThreshold, Flags.NumberPeopleMax, Flags.MaximizePositives, Flags.FpsMax, Flags.PrototxtPath, Flags.CaffeModelPath, (float)Flags.UpsamplingRatio, enableGoogleLogging); // Face configuration (use op::WrapperStructFace{} to disable it) var face = new WrapperStructFace(Flags.Face, faceDetector, faceNetInputSize, OpenPose.FlagsToRenderMode(Flags.FaceRender, multipleView, Flags.RenderPose), (float)Flags.FaceAlphaPose, (float)Flags.FaceAlphaHeatmap, (float)Flags.FaceRenderThreshold); // Hand configuration (use op::WrapperStructHand{} to disable it) var hand = new WrapperStructHand(Flags.Hand, handDetector, handNetInputSize, Flags.HandScaleNumber, (float)Flags.HandScaleRange, OpenPose.FlagsToRenderMode(Flags.HandRender, multipleView, Flags.RenderPose), (float)Flags.HandAlphaPose, (float)Flags.HandAlphaHeatmap, (float)Flags.HandRenderThreshold); // Extra functionality configuration (use op::WrapperStructExtra{} to disable it) var extra = new WrapperStructExtra(Flags.Enable3D, Flags.MinViews3D, Flags.Identification, Flags.Tracking, Flags.IkThreads); // Output (comment or use default argument to disable any output) var output = new WrapperStructOutput(Flags.CliVerbose, Flags.WriteKeyPoint, OpenPose.StringToDataFormat(Flags.WriteKeyPointFormat), Flags.WriteJson, Flags.WriteCocoJson, Flags.WriteCocoJsonVariants, Flags.WriteCocoJsonVariant, Flags.WriteImages, Flags.WriteImagesFormat, Flags.WriteVideo, Flags.WriteVideoFps, Flags.WriteVideoWithAudio, Flags.WriteHeatmaps, Flags.WriteHeatmapsFormat, Flags.WriteVideoAdam, Flags.WriteBvh, Flags.UdpHost, Flags.UdpPort); opWrapper.Configure(pose); opWrapper.Configure(face); opWrapper.Configure(hand); opWrapper.Configure(extra); opWrapper.Configure(output); // No GUI. Equivalent to: opWrapper.configure(op::WrapperStructGui{}); // Set to single-thread (for sequential processing and/or debugging and/or reducing latency) if (Flags.DisableMultiThread) { opWrapper.DisableMultiThreading(); } // Return successful message OpenPose.Log("Stopping OpenPose...", Priority.High); } catch (Exception e) { OpenPose.Error(e.Message, -1, nameof(ConfigureWrapper)); } }
private static int OpenPoseDemo() { try { OpenPose.Log("Starting OpenPose demo...", Priority.High); var timer = new Stopwatch(); timer.Start(); // logging_level OpenPose.Check(0 <= Flags.LoggingLevel && Flags.LoggingLevel <= 255, "Wrong logging_level value."); ConfigureLog.PriorityThreshold = (Priority)Flags.LoggingLevel; Profiler.SetDefaultX((ulong)Flags.ProfileSpeed); // // For debugging // // Print all logging messages // op::ConfigureLog::setPriorityThreshold(op::Priority::None); // // Print out speed values faster // op::Profiler::setDefaultX(100); // Applying user defined configuration - GFlags to program variables // cameraSize var cameraSize = OpenPose.FlagsToPoint(Flags.CameraResolution, "-1x-1"); // outputSize var outputSize = OpenPose.FlagsToPoint(Flags.OutputResolution, "-1x-1"); // netInputSize var netInputSize = OpenPose.FlagsToPoint(Flags.NetResolution, "-1x368"); // faceNetInputSize var faceNetInputSize = OpenPose.FlagsToPoint(Flags.FaceNetResolution, "368x368 (multiples of 16)"); // handNetInputSize var handNetInputSize = OpenPose.FlagsToPoint(Flags.HandNetResolution, "368x368 (multiples of 16)"); // producerType var tie = OpenPose.FlagsToProducer(Flags.ImageDir, Flags.Video, Flags.IpCamera, Flags.Camera, Flags.FlirCamera, Flags.FlirCameraIndex); var producerType = tie.Item1; var producerString = tie.Item2; // poseModel var poseModel = OpenPose.FlagsToPoseModel(Flags.ModelPose); // JSON saving if (!string.IsNullOrEmpty(Flags.WriteKeyPoint)) { OpenPose.Log("Flag `write_keypoint` is deprecated and will eventually be removed. Please, use `write_json` instead.", Priority.Max); } // keyPointScale var keyPointScale = OpenPose.FlagsToScaleMode(Flags.KeyPointScale); // heatmaps to add var heatMapTypes = OpenPose.FlagsToHeatMaps(Flags.HeatmapsAddParts, Flags.HeatmapsAddBackground, Flags.HeatmapsAddPAFs); var heatMapScale = OpenPose.FlagsToHeatMapScaleMode(Flags.HeatmapsScale); // >1 camera view? var multipleView = (Flags.Enable3D || Flags.Views3D > 1 || Flags.FlirCamera); // Enabling Google Logging const bool enableGoogleLogging = true; // Configuring OpenPose OpenPose.Log("Configuring OpenPose...", Priority.High); using (var opWrapper = new Wrapper()) { // Pose configuration (use WrapperStructPose{} for default and recommended configuration) using (var pose = new WrapperStructPose(!Flags.BodyDisabled, netInputSize, outputSize, keyPointScale, Flags.NumGpu, Flags.NumGpuStart, Flags.ScaleNumber, (float)Flags.ScaleGap, OpenPose.FlagsToRenderMode(Flags.RenderPose, multipleView), poseModel, !Flags.DisableBlending, (float)Flags.AlphaPose, (float)Flags.AlphaHeatmap, Flags.PartToShow, Flags.ModelFolder, heatMapTypes, heatMapScale, Flags.PartCandidates, (float)Flags.RenderThreshold, Flags.NumberPeopleMax, Flags.MaximizePositives, Flags.FpsMax, enableGoogleLogging)) // Face configuration (use op::WrapperStructFace{} to disable it) using (var face = new WrapperStructFace(Flags.Face, faceNetInputSize, OpenPose.FlagsToRenderMode(Flags.FaceRender, multipleView, Flags.RenderPose), (float)Flags.FaceAlphaPose, (float)Flags.FaceAlphaHeatmap, (float)Flags.FaceRenderThreshold)) // Hand configuration (use op::WrapperStructHand{} to disable it) using (var hand = new WrapperStructHand(Flags.Hand, handNetInputSize, Flags.HandScaleNumber, (float)Flags.HandScaleRange, Flags.HandTracking, OpenPose.FlagsToRenderMode(Flags.HandRender, multipleView, Flags.RenderPose), (float)Flags.HandAlphaPose, (float)Flags.HandAlphaHeatmap, (float)Flags.HandRenderThreshold)) // Extra functionality configuration (use op::WrapperStructExtra{} to disable it) using (var extra = new WrapperStructExtra(Flags.Enable3D, Flags.MinViews3D, Flags.Identification, Flags.Tracking, Flags.IkThreads)) // Producer (use default to disable any input) using (var input = new WrapperStructInput(producerType, producerString, Flags.FrameFirst, Flags.FrameStep, Flags.FrameLast, Flags.ProcessRealTime, Flags.FrameFlip, Flags.FrameRotate, Flags.FramesRepeat, cameraSize, Flags.CameraParameterFolder, !Flags.FrameKeepDistortion, (uint)Flags.Views3D)) // Output (comment or use default argument to disable any output) using (var output = new WrapperStructOutput(Flags.CliVerbose, Flags.WriteKeyPoint, OpenPose.StringToDataFormat(Flags.WriteKeyPointFormat), Flags.WriteJson, Flags.WriteCocoJson, Flags.WriteCocoFootJson, Flags.WriteCocoJsonVariant, Flags.WriteImages, Flags.WriteImagesFormat, Flags.WriteVideo, Flags.WriteVideoFps, Flags.WriteHeatmaps, Flags.WriteHeatmapsFormat, Flags.WriteVideoAdam, Flags.WriteBvh, Flags.UdpHost, Flags.UdpPort)) // GUI (comment or use default argument to disable any visual output) using (var gui = new WrapperStructGui(OpenPose.FlagsToDisplayMode(Flags.Display, Flags.Enable3D), !Flags.NoGuiVerbose, Flags.FullScreen)) { opWrapper.Configure(pose); opWrapper.Configure(face); opWrapper.Configure(hand); opWrapper.Configure(extra); opWrapper.Configure(input); opWrapper.Configure(output); opWrapper.Configure(gui); // Set to single-thread (for sequential processing and/or debugging and/or reducing latency) if (Flags.DisableMultiThread) { opWrapper.DisableMultiThreading(); } // Start, run, and stop processing - exec() blocks this thread until OpenPose wrapper has finished OpenPose.Log("Starting thread(s)...", Priority.High); opWrapper.Exec(); // Measuring total time timer.Stop(); var totalTimeSec = timer.ElapsedMilliseconds * 1000; var message = $"OpenPose demo successfully finished. Total time: {totalTimeSec} seconds."; OpenPose.Log(message, Priority.High); } } // Return successful message OpenPose.Log("Stopping OpenPose...", Priority.High); return(0); } catch (Exception e) { Console.WriteLine(e); return(-1); } }