Ejemplo n.º 1
0
 public PDetectionList(PDetectionList other) : this(PapillonPINVOKE.new_PDetectionList__SWIG_2(PDetectionList.getCPtr(other)), true)
 {
     if (PapillonPINVOKE.SWIGPendingException.Pending)
     {
         throw PapillonPINVOKE.SWIGPendingException.Retrieve();
     }
 }
Ejemplo n.º 2
0
 public void Add(PDetectionList d)
 {
     PapillonPINVOKE.PExampleSet_Add__SWIG_1(swigCPtr, PDetectionList.getCPtr(d));
     if (PapillonPINVOKE.SWIGPendingException.Pending)
     {
         throw PapillonPINVOKE.SWIGPendingException.Retrieve();
     }
 }
Ejemplo n.º 3
0
    public PResult Detect(PFrame frame, PDetectionList detectionList)
    {
        PResult ret = new PResult(PapillonPINVOKE.PDetector_Detect(swigCPtr, PFrame.getCPtr(frame), PDetectionList.getCPtr(detectionList)), true);

        if (PapillonPINVOKE.SWIGPendingException.Pending)
        {
            throw PapillonPINVOKE.SWIGPendingException.Retrieve();
        }
        return(ret);
    }
Ejemplo n.º 4
0
    public PResult DescribeBatch(PDetectionList batchDetections, SWIGTYPE_p_papillon__PDescriptionList batchDescription)
    {
        PResult ret = new PResult(PapillonPINVOKE.PDescriber_DescribeBatch(swigCPtr, PDetectionList.getCPtr(batchDetections), SWIGTYPE_p_papillon__PDescriptionList.getCPtr(batchDescription)), true);

        if (PapillonPINVOKE.SWIGPendingException.Pending)
        {
            throw PapillonPINVOKE.SWIGPendingException.Retrieve();
        }
        return(ret);
    }
Ejemplo n.º 5
0
    public static PResult DisplayFirstFaceDetection(PFrame frame, PDetectionList detectionList, string windowTitle)
    {
        PResult ret = new PResult(PapillonPINVOKE.PUtils_DisplayFirstFaceDetection__SWIG_3(PFrame.getCPtr(frame), PDetectionList.getCPtr(detectionList), windowTitle), true);

        if (PapillonPINVOKE.SWIGPendingException.Pending)
        {
            throw PapillonPINVOKE.SWIGPendingException.Retrieve();
        }
        return(ret);
    }
Ejemplo n.º 6
0
    public virtual PResult ApplyBatch(PDetectionList detectionList, SWIGTYPE_p_papillon__PDescriptionList outputDescription)
    {
        PResult ret = new PResult(PapillonPINVOKE.PDescriberInterface_ApplyBatch(swigCPtr, PDetectionList.getCPtr(detectionList), SWIGTYPE_p_papillon__PDescriptionList.getCPtr(outputDescription)), true);

        if (PapillonPINVOKE.SWIGPendingException.Pending)
        {
            throw PapillonPINVOKE.SWIGPendingException.Retrieve();
        }
        return(ret);
    }
Ejemplo n.º 7
0
    public static PResult BlurDetectionList(PImage image, PDetectionList detectionList, PColour3i color, int lineWidth)
    {
        PResult ret = new PResult(PapillonPINVOKE.PUtils_BlurDetectionList(PImage.getCPtr(image), PDetectionList.getCPtr(detectionList), PColour3i.getCPtr(color), lineWidth), true);

        if (PapillonPINVOKE.SWIGPendingException.Pending)
        {
            throw PapillonPINVOKE.SWIGPendingException.Retrieve();
        }
        return(ret);
    }
Ejemplo n.º 8
0
    public static PResult DisplayFirstFaceDetection(PFrame frame, PDetectionList detectionList, string windowTitle, PColour3i colour, bool boundingBoxOnly, int lineWidth)
    {
        PResult ret = new PResult(PapillonPINVOKE.PUtils_DisplayFirstFaceDetection__SWIG_0(PFrame.getCPtr(frame), PDetectionList.getCPtr(detectionList), windowTitle, PColour3i.getCPtr(colour), boundingBoxOnly, lineWidth), true);

        if (PapillonPINVOKE.SWIGPendingException.Pending)
        {
            throw PapillonPINVOKE.SWIGPendingException.Retrieve();
        }
        return(ret);
    }
Ejemplo n.º 9
0
    public static PResult DisplayDetectionList(PFrame frame, PDetectionList detectionList, string windowTitle, float scaleFactor)
    {
        PResult ret = new PResult(PapillonPINVOKE.PUtils_DisplayDetectionList__SWIG_5(PFrame.getCPtr(frame), PDetectionList.getCPtr(detectionList), windowTitle, scaleFactor), true);

        if (PapillonPINVOKE.SWIGPendingException.Pending)
        {
            throw PapillonPINVOKE.SWIGPendingException.Retrieve();
        }
        return(ret);
    }
Ejemplo n.º 10
0
    public static PResult DisplayDetectionList(PFrame frame, PDetectionList detectionList, string windowTitle, float scaleFactor, PColour3i colour, bool drawId, bool boundingBoxOnly)
    {
        PResult ret = new PResult(PapillonPINVOKE.PUtils_DisplayDetectionList__SWIG_2(PFrame.getCPtr(frame), PDetectionList.getCPtr(detectionList), windowTitle, scaleFactor, PColour3i.getCPtr(colour), drawId, boundingBoxOnly), true);

        if (PapillonPINVOKE.SWIGPendingException.Pending)
        {
            throw PapillonPINVOKE.SWIGPendingException.Retrieve();
        }
        return(ret);
    }
Ejemplo n.º 11
0
    public static PResult DrawDetectionList(PImage image, PDetectionList detectionList)
    {
        PResult ret = new PResult(PapillonPINVOKE.PUtils_DrawDetectionList__SWIG_4(PImage.getCPtr(image), PDetectionList.getCPtr(detectionList)), true);

        if (PapillonPINVOKE.SWIGPendingException.Pending)
        {
            throw PapillonPINVOKE.SWIGPendingException.Retrieve();
        }
        return(ret);
    }
Ejemplo n.º 12
0
    public static PResult DrawDetectionList(PImage image, PDetectionList detectionList, PColour3i colour, bool drawId)
    {
        PResult ret = new PResult(PapillonPINVOKE.PUtils_DrawDetectionList__SWIG_2(PImage.getCPtr(image), PDetectionList.getCPtr(detectionList), PColour3i.getCPtr(colour), drawId), true);

        if (PapillonPINVOKE.SWIGPendingException.Pending)
        {
            throw PapillonPINVOKE.SWIGPendingException.Retrieve();
        }
        return(ret);
    }
Ejemplo n.º 13
0
        static void Main(string[] args)
        {
            PLog.OpenConsoleLogger();
            PapillonSDK.Initialise();
            string SAMPLE_DIR            = PPath.Join(PUtils.GetEnv("PAPILLON_INSTALL_DIR"), "Data", "Samples");
            string TITLE                 = "Papillon SDK - Face Detection Example";
            bool   ENABLE_FACE_LOCALISER = true;


            // ************************************************************************
            // 1. Open Video Stream
            // ************************************************************************
            PInputVideoStream ivs = new PInputVideoStream();

            PInputVideoStream.Open(PPath.Join(SAMPLE_DIR, "face_log.avi"), ivs).OrDie(); // video file
            //PInputVideoStream.Open("https://youtu.be/Wi7k6IPYNj4?decode_with=vlc", ivs).OrDie(); break; // youtube stream (VLC plugin required)
            //PInputVideoStream.Open("device:0", ivs).OrDie();                                     break; // webcam
            //PInputVideoStream.Open(PPath.Join(SAMPLE_DIR, "face_log.avi"), ivs).OrDie();        break; // video file

            // ************************************************************************
            // 2. Create and configure Face Detector
            // ************************************************************************
            PDetector   faceDetector = new PDetector();
            PProperties parameters   = new PProperties();

            PDetector.Create("FaceDetector2", parameters, faceDetector).OrDie();
            faceDetector.EnableLocaliser(ENABLE_FACE_LOCALISER);
            faceDetector.SetMinDetectionSize(80);

            // ************************************************************************
            // 3. Apply Face Detector on each frame of the video stream
            // ************************************************************************
            PFrame frame = new PFrame();

            while (ivs.GetFrame(frame).Ok() && !PUtils.IsEscPressed())
            {
                PDetectionList listFaces = new PDetectionList();
                faceDetector.Detect(frame, listFaces);
                PUtils.DisplayDetectionList(frame, listFaces, TITLE);
            }
        }
Ejemplo n.º 14
0
        static void Main(string[] args) {

           // **********************
           // intialise Papillon SDK
           // **********************
           PLog.OpenConsoleLogger();
           PapillonSDK.Initialise("PapillonCSharpDemo");
           string SAMPLE_DIR = PPath.Join(PUtils.GetEnv("PAPILLON_INSTALL_DIR"), "Data", "Samples");

           // *********************************************************************
           // load an image from disk (can be JPEG, TIF, BMP or PNG) and display it
           // *********************************************************************
           PImage image = new PImage();
           image.Load(PPath.Join(SAMPLE_DIR, "sample.jpg")).OrDie();
           image.Display("MySampleImage");

           // **********************************************************
           // set an image from a pixel buffer (a byte[]) and display it
           //***********************************************************
           int width = 600;
           int height = 400;
           int depth = 3;

           var buf = new byte[width * height * depth];

           for (int i = 0; i < buf.Length; i+=depth) {
               buf[i] = 127;
               buf[i + 1] = 0;
               buf[i + 2] = 255;
           }

           PImage image2 = new PImage();
           image2.Set(buf, width, height, PImage.EPixelFormat.E_BGR8U);
           image2.Display("Pink");

           // **********************************************************
           // open a video stream from local webcam, then
           // 1. display the video stream
           // 2. write it on the disk (AVI file)
           // 3. stabilise the video stream (if the "stabilisation" plugin is available)
           // 4. perform face detection on the video stream
           //***********************************************************
           PInputVideoStream inputStream = new PInputVideoStream();
           PInputVideoStream.Open("device:0", inputStream).OrDie();

           POutputVideoStream outputStream = new POutputVideoStream();
           POutputVideoStream.Open("webcam.avi?fps=20.0", outputStream).OrDie();

           bool isStabilisationEnabled = false;
           PImage stabilisedImage = new PImage();
           PImageFilterOptions stabilisationOptions = new PImageFilterOptions();
           PImageFilter stabilisationFilter = new PImageFilter();

           if (PImageFilter.Create("Stabilisation", "", stabilisationFilter).Ok()) {
               stabilisationOptions.SetDoubleParameter("motion", 0.5);
               stabilisationOptions.SetIntParameter("panning", 0);
               isStabilisationEnabled = true;
           }

           PFrame frame = new PFrame();

           PDetector faceDetector = new PDetector();
           PDetector.Create("FaceDetector", "", faceDetector).OrDie();

           PDetectorOptions detectorOptions = new PDetectorOptions();
           detectorOptions.SetIntParameter("LOCALISER", 0); // no localiser
           detectorOptions.SetMinDetectionSize(60);

           PDetectionList detectionList = new PDetectionList();

           while (inputStream.GetFrame(frame).Ok()) {
               frame.Display("Web-cam image", 50);
               outputStream.PutImage(frame.GetImage());

               if (isStabilisationEnabled) {
                   stabilisationFilter.Apply(frame.GetImage(), stabilisationOptions, stabilisedImage);
                   stabilisedImage.Display("Stabilised stream");
               }

               faceDetector.Detect(frame, detectorOptions, detectionList);
               PUtils.DisplayDetectionList(frame, detectionList, "Face Detector");
           }
        }
Ejemplo n.º 15
0
 internal static global::System.Runtime.InteropServices.HandleRef getCPtr(PDetectionList obj)
 {
     return((obj == null) ? new global::System.Runtime.InteropServices.HandleRef(null, global::System.IntPtr.Zero) : obj.swigCPtr);
 }