public PInputVideoStream(PInputVideoStream other) : this(PapillonPINVOKE.new_PInputVideoStream__SWIG_1(PInputVideoStream.getCPtr(other)), true) { if (PapillonPINVOKE.SWIGPendingException.Pending) { throw PapillonPINVOKE.SWIGPendingException.Retrieve(); } }
public static PResult Open(string uri, PInputVideoStream inputVideoStream) { PResult ret = new PResult(PapillonPINVOKE.PInputVideoStream_Open__SWIG_0(uri, PInputVideoStream.getCPtr(inputVideoStream)), true); if (PapillonPINVOKE.SWIGPendingException.Pending) { throw PapillonPINVOKE.SWIGPendingException.Retrieve(); } return(ret); }
public static PResult Open(SWIGTYPE_p_papillon__PPlugin plugin, PUri uri, PInputVideoStream inputVideoStream) { PResult ret = new PResult(PapillonPINVOKE.PInputVideoStream_Open__SWIG_1(SWIGTYPE_p_papillon__PPlugin.getCPtr(plugin), PUri.getCPtr(uri), PInputVideoStream.getCPtr(inputVideoStream)), true); if (PapillonPINVOKE.SWIGPendingException.Pending) { throw PapillonPINVOKE.SWIGPendingException.Retrieve(); } return(ret); }
public PResult EnrollFromVideoStream(PInputVideoStream ivs, PDescription description, int maxFrames, int maxExamples, PGuid subjectId, string subjectName) { PResult ret = new PResult(PapillonPINVOKE.PEnrollment_EnrollFromVideoStream__SWIG_1(swigCPtr, PInputVideoStream.getCPtr(ivs), PDescription.getCPtr(description), maxFrames, maxExamples, PGuid.getCPtr(subjectId), subjectName), true); if (PapillonPINVOKE.SWIGPendingException.Pending) { throw PapillonPINVOKE.SWIGPendingException.Retrieve(); } return(ret); }
static void Main(string[] args) { PLog.OpenConsoleLogger(); PapillonSDK.Initialise(); string SAMPLE_DIR = PPath.Join(PUtils.GetEnv("PAPILLON_INSTALL_DIR"), "Data", "Samples"); string TITLE = "Papillon SDK - Face Detection Example"; bool ENABLE_FACE_LOCALISER = true; // ************************************************************************ // 1. Open Video Stream // ************************************************************************ PInputVideoStream ivs = new PInputVideoStream(); PInputVideoStream.Open(PPath.Join(SAMPLE_DIR, "face_log.avi"), ivs).OrDie(); // video file //PInputVideoStream.Open("https://youtu.be/Wi7k6IPYNj4?decode_with=vlc", ivs).OrDie(); break; // youtube stream (VLC plugin required) //PInputVideoStream.Open("device:0", ivs).OrDie(); break; // webcam //PInputVideoStream.Open(PPath.Join(SAMPLE_DIR, "face_log.avi"), ivs).OrDie(); break; // video file // ************************************************************************ // 2. Create and configure Face Detector // ************************************************************************ PDetector faceDetector = new PDetector(); PProperties parameters = new PProperties(); PDetector.Create("FaceDetector2", parameters, faceDetector).OrDie(); faceDetector.EnableLocaliser(ENABLE_FACE_LOCALISER); faceDetector.SetMinDetectionSize(80); // ************************************************************************ // 3. Apply Face Detector on each frame of the video stream // ************************************************************************ PFrame frame = new PFrame(); while (ivs.GetFrame(frame).Ok() && !PUtils.IsEscPressed()) { PDetectionList listFaces = new PDetectionList(); faceDetector.Detect(frame, listFaces); PUtils.DisplayDetectionList(frame, listFaces, TITLE); } }
static void Main(string[] args) { // ********************** // intialise Papillon SDK // ********************** PLog.OpenConsoleLogger(); PapillonSDK.Initialise("PapillonCSharpDemo"); string SAMPLE_DIR = PPath.Join(PUtils.GetEnv("PAPILLON_INSTALL_DIR"), "Data", "Samples"); // ********************************************************************* // load an image from disk (can be JPEG, TIF, BMP or PNG) and display it // ********************************************************************* PImage image = new PImage(); image.Load(PPath.Join(SAMPLE_DIR, "sample.jpg")).OrDie(); image.Display("MySampleImage"); // ********************************************************** // set an image from a pixel buffer (a byte[]) and display it //*********************************************************** int width = 600; int height = 400; int depth = 3; var buf = new byte[width * height * depth]; for (int i = 0; i < buf.Length; i+=depth) { buf[i] = 127; buf[i + 1] = 0; buf[i + 2] = 255; } PImage image2 = new PImage(); image2.Set(buf, width, height, PImage.EPixelFormat.E_BGR8U); image2.Display("Pink"); // ********************************************************** // open a video stream from local webcam, then // 1. display the video stream // 2. write it on the disk (AVI file) // 3. stabilise the video stream (if the "stabilisation" plugin is available) // 4. perform face detection on the video stream //*********************************************************** PInputVideoStream inputStream = new PInputVideoStream(); PInputVideoStream.Open("device:0", inputStream).OrDie(); POutputVideoStream outputStream = new POutputVideoStream(); POutputVideoStream.Open("webcam.avi?fps=20.0", outputStream).OrDie(); bool isStabilisationEnabled = false; PImage stabilisedImage = new PImage(); PImageFilterOptions stabilisationOptions = new PImageFilterOptions(); PImageFilter stabilisationFilter = new PImageFilter(); if (PImageFilter.Create("Stabilisation", "", stabilisationFilter).Ok()) { stabilisationOptions.SetDoubleParameter("motion", 0.5); stabilisationOptions.SetIntParameter("panning", 0); isStabilisationEnabled = true; } PFrame frame = new PFrame(); PDetector faceDetector = new PDetector(); PDetector.Create("FaceDetector", "", faceDetector).OrDie(); PDetectorOptions detectorOptions = new PDetectorOptions(); detectorOptions.SetIntParameter("LOCALISER", 0); // no localiser detectorOptions.SetMinDetectionSize(60); PDetectionList detectionList = new PDetectionList(); while (inputStream.GetFrame(frame).Ok()) { frame.Display("Web-cam image", 50); outputStream.PutImage(frame.GetImage()); if (isStabilisationEnabled) { stabilisationFilter.Apply(frame.GetImage(), stabilisationOptions, stabilisedImage); stabilisedImage.Display("Stabilised stream"); } faceDetector.Detect(frame, detectorOptions, detectionList); PUtils.DisplayDetectionList(frame, detectionList, "Face Detector"); } }
internal static global::System.Runtime.InteropServices.HandleRef getCPtr(PInputVideoStream obj) { return((obj == null) ? new global::System.Runtime.InteropServices.HandleRef(null, global::System.IntPtr.Zero) : obj.swigCPtr); }