public PDetection(PObjectType objectType, PGuid detectorId, PFrame frame, PFeatureMap featureMap, float reliability) : this(PapillonPINVOKE.new_PDetection__SWIG_1(PObjectType.getCPtr(objectType), PGuid.getCPtr(detectorId), PFrame.getCPtr(frame), PFeatureMap.getCPtr(featureMap), reliability), true) { if (PapillonPINVOKE.SWIGPendingException.Pending) { throw PapillonPINVOKE.SWIGPendingException.Retrieve(); } }
public PDetection(PGuid objectId, PGuid detectorId, PFrame frame, PObjectType objectType, PTrack track, PFeatureMap featureMap, float confidence) : this(PapillonPINVOKE.new_PDetection__SWIG_2(PGuid.getCPtr(objectId), PGuid.getCPtr(detectorId), PFrame.getCPtr(frame), PObjectType.getCPtr(objectType), PTrack.getCPtr(track), PFeatureMap.getCPtr(featureMap), confidence), true) { if (PapillonPINVOKE.SWIGPendingException.Pending) { throw PapillonPINVOKE.SWIGPendingException.Retrieve(); } }
public PFrame(PFrame other) : this(PapillonPINVOKE.new_PFrame__SWIG_4(PFrame.getCPtr(other)), true) { if (PapillonPINVOKE.SWIGPendingException.Pending) { throw PapillonPINVOKE.SWIGPendingException.Retrieve(); } }
public void SetFrame(PFrame frame) { PapillonPINVOKE.PDetection_SetFrame(swigCPtr, PFrame.getCPtr(frame)); if (PapillonPINVOKE.SWIGPendingException.Pending) { throw PapillonPINVOKE.SWIGPendingException.Retrieve(); } }
public PResult Apply(PFrame frame, PList events) { PResult ret = new PResult(PapillonPINVOKE.PAnalytics_Apply(swigCPtr, PFrame.getCPtr(frame), PList.getCPtr(events)), true); if (PapillonPINVOKE.SWIGPendingException.Pending) { throw PapillonPINVOKE.SWIGPendingException.Retrieve(); } return(ret); }
public static PResult DisplayFeatureMap(PFrame frame, PFeatureMap featureMap, string windowTitle) { PResult ret = new PResult(PapillonPINVOKE.PUtils_DisplayFeatureMap__SWIG_4(PFrame.getCPtr(frame), PFeatureMap.getCPtr(featureMap), windowTitle), true); if (PapillonPINVOKE.SWIGPendingException.Pending) { throw PapillonPINVOKE.SWIGPendingException.Retrieve(); } return(ret); }
public static PResult DisplayFeatureMap(PFrame frame, PFeatureMap featureMap, string windowTitle, float scaleFactor, PColour3i colour, bool boundingBoxOnly) { PResult ret = new PResult(PapillonPINVOKE.PUtils_DisplayFeatureMap__SWIG_1(PFrame.getCPtr(frame), PFeatureMap.getCPtr(featureMap), windowTitle, scaleFactor, PColour3i.getCPtr(colour), boundingBoxOnly), true); if (PapillonPINVOKE.SWIGPendingException.Pending) { throw PapillonPINVOKE.SWIGPendingException.Retrieve(); } return(ret); }
public static PResult DisplayFirstFaceDetection(PFrame frame, PDetectionList detectionList, string windowTitle) { PResult ret = new PResult(PapillonPINVOKE.PUtils_DisplayFirstFaceDetection__SWIG_3(PFrame.getCPtr(frame), PDetectionList.getCPtr(detectionList), windowTitle), true); if (PapillonPINVOKE.SWIGPendingException.Pending) { throw PapillonPINVOKE.SWIGPendingException.Retrieve(); } return(ret); }
public static PResult DisplayFirstFaceDetection(PFrame frame, PDetectionList detectionList, string windowTitle, PColour3i colour, bool boundingBoxOnly, int lineWidth) { PResult ret = new PResult(PapillonPINVOKE.PUtils_DisplayFirstFaceDetection__SWIG_0(PFrame.getCPtr(frame), PDetectionList.getCPtr(detectionList), windowTitle, PColour3i.getCPtr(colour), boundingBoxOnly, lineWidth), true); if (PapillonPINVOKE.SWIGPendingException.Pending) { throw PapillonPINVOKE.SWIGPendingException.Retrieve(); } return(ret); }
public PResult PutFrame(PFrame frame) { PResult ret = new PResult(PapillonPINVOKE.POutputVideoStream_PutFrame__SWIG_1(swigCPtr, PFrame.getCPtr(frame)), true); if (PapillonPINVOKE.SWIGPendingException.Pending) { throw PapillonPINVOKE.SWIGPendingException.Retrieve(); } return(ret); }
public static PResult DisplayDetectionList(PFrame frame, PDetectionList detectionList, string windowTitle, float scaleFactor, PColour3i colour, bool drawId, bool boundingBoxOnly) { PResult ret = new PResult(PapillonPINVOKE.PUtils_DisplayDetectionList__SWIG_2(PFrame.getCPtr(frame), PDetectionList.getCPtr(detectionList), windowTitle, scaleFactor, PColour3i.getCPtr(colour), drawId, boundingBoxOnly), true); if (PapillonPINVOKE.SWIGPendingException.Pending) { throw PapillonPINVOKE.SWIGPendingException.Retrieve(); } return(ret); }
public PResult Apply(PFrame frame) { PResult ret = new PResult(PapillonPINVOKE.PImageFilter_Apply__SWIG_1(swigCPtr, PFrame.getCPtr(frame)), true); if (PapillonPINVOKE.SWIGPendingException.Pending) { throw PapillonPINVOKE.SWIGPendingException.Retrieve(); } return(ret); }
public Form1() { InitializeComponent(); Width = 920; Height = 680; PFrame pFrame = new PFrame(); pFrame.Dock = DockStyle.Fill; Controls.Add(pFrame); }
public static PResult DisplayDetectionList(PFrame frame, PDetectionList detectionList, string windowTitle, float scaleFactor) { PResult ret = new PResult(PapillonPINVOKE.PUtils_DisplayDetectionList__SWIG_5(PFrame.getCPtr(frame), PDetectionList.getCPtr(detectionList), windowTitle, scaleFactor), true); if (PapillonPINVOKE.SWIGPendingException.Pending) { throw PapillonPINVOKE.SWIGPendingException.Retrieve(); } return(ret); }
public PResult Detect(PFrame frame, PDetectionList detectionList) { PResult ret = new PResult(PapillonPINVOKE.PDetector_Detect(swigCPtr, PFrame.getCPtr(frame), PDetectionList.getCPtr(detectionList)), true); if (PapillonPINVOKE.SWIGPendingException.Pending) { throw PapillonPINVOKE.SWIGPendingException.Retrieve(); } return(ret); }
public PResult GetFrame(PFrame frame, int timeOutMs) { PResult ret = new PResult(PapillonPINVOKE.PInputVideoStream_GetFrame__SWIG_0(swigCPtr, PFrame.getCPtr(frame), timeOutMs), true); if (PapillonPINVOKE.SWIGPendingException.Pending) { throw PapillonPINVOKE.SWIGPendingException.Retrieve(); } return(ret); }
static void Main(string[] args) { PLog.OpenConsoleLogger(); PapillonSDK.Initialise(); string SAMPLE_DIR = PPath.Join(PUtils.GetEnv("PAPILLON_INSTALL_DIR"), "Data", "Samples"); string TITLE = "Papillon SDK - Face Detection Example"; bool ENABLE_FACE_LOCALISER = true; // ************************************************************************ // 1. Open Video Stream // ************************************************************************ PInputVideoStream ivs = new PInputVideoStream(); PInputVideoStream.Open(PPath.Join(SAMPLE_DIR, "face_log.avi"), ivs).OrDie(); // video file //PInputVideoStream.Open("https://youtu.be/Wi7k6IPYNj4?decode_with=vlc", ivs).OrDie(); break; // youtube stream (VLC plugin required) //PInputVideoStream.Open("device:0", ivs).OrDie(); break; // webcam //PInputVideoStream.Open(PPath.Join(SAMPLE_DIR, "face_log.avi"), ivs).OrDie(); break; // video file // ************************************************************************ // 2. Create and configure Face Detector // ************************************************************************ PDetector faceDetector = new PDetector(); PProperties parameters = new PProperties(); PDetector.Create("FaceDetector2", parameters, faceDetector).OrDie(); faceDetector.EnableLocaliser(ENABLE_FACE_LOCALISER); faceDetector.SetMinDetectionSize(80); // ************************************************************************ // 3. Apply Face Detector on each frame of the video stream // ************************************************************************ PFrame frame = new PFrame(); while (ivs.GetFrame(frame).Ok() && !PUtils.IsEscPressed()) { PDetectionList listFaces = new PDetectionList(); faceDetector.Detect(frame, listFaces); PUtils.DisplayDetectionList(frame, listFaces, TITLE); } }
static void Main(string[] args) { // ********************** // intialise Papillon SDK // ********************** PLog.OpenConsoleLogger(); PapillonSDK.Initialise("PapillonCSharpDemo"); string SAMPLE_DIR = PPath.Join(PUtils.GetEnv("PAPILLON_INSTALL_DIR"), "Data", "Samples"); // ********************************************************************* // load an image from disk (can be JPEG, TIF, BMP or PNG) and display it // ********************************************************************* PImage image = new PImage(); image.Load(PPath.Join(SAMPLE_DIR, "sample.jpg")).OrDie(); image.Display("MySampleImage"); // ********************************************************** // set an image from a pixel buffer (a byte[]) and display it //*********************************************************** int width = 600; int height = 400; int depth = 3; var buf = new byte[width * height * depth]; for (int i = 0; i < buf.Length; i+=depth) { buf[i] = 127; buf[i + 1] = 0; buf[i + 2] = 255; } PImage image2 = new PImage(); image2.Set(buf, width, height, PImage.EPixelFormat.E_BGR8U); image2.Display("Pink"); // ********************************************************** // open a video stream from local webcam, then // 1. display the video stream // 2. write it on the disk (AVI file) // 3. stabilise the video stream (if the "stabilisation" plugin is available) // 4. perform face detection on the video stream //*********************************************************** PInputVideoStream inputStream = new PInputVideoStream(); PInputVideoStream.Open("device:0", inputStream).OrDie(); POutputVideoStream outputStream = new POutputVideoStream(); POutputVideoStream.Open("webcam.avi?fps=20.0", outputStream).OrDie(); bool isStabilisationEnabled = false; PImage stabilisedImage = new PImage(); PImageFilterOptions stabilisationOptions = new PImageFilterOptions(); PImageFilter stabilisationFilter = new PImageFilter(); if (PImageFilter.Create("Stabilisation", "", stabilisationFilter).Ok()) { stabilisationOptions.SetDoubleParameter("motion", 0.5); stabilisationOptions.SetIntParameter("panning", 0); isStabilisationEnabled = true; } PFrame frame = new PFrame(); PDetector faceDetector = new PDetector(); PDetector.Create("FaceDetector", "", faceDetector).OrDie(); PDetectorOptions detectorOptions = new PDetectorOptions(); detectorOptions.SetIntParameter("LOCALISER", 0); // no localiser detectorOptions.SetMinDetectionSize(60); PDetectionList detectionList = new PDetectionList(); while (inputStream.GetFrame(frame).Ok()) { frame.Display("Web-cam image", 50); outputStream.PutImage(frame.GetImage()); if (isStabilisationEnabled) { stabilisationFilter.Apply(frame.GetImage(), stabilisationOptions, stabilisedImage); stabilisedImage.Display("Stabilised stream"); } faceDetector.Detect(frame, detectorOptions, detectionList); PUtils.DisplayDetectionList(frame, detectionList, "Face Detector"); } }
internal static global::System.Runtime.InteropServices.HandleRef getCPtr(PFrame obj) { return((obj == null) ? new global::System.Runtime.InteropServices.HandleRef(null, global::System.IntPtr.Zero) : obj.swigCPtr); }
/// <summary> /// Loads a saved mpeg byte array /// </summary> /// <param name="savedArray">saved mpeg byte array</param> /// <returns>MPEGInfo of the saved mpeg images</returns> public static MPEGInfo loadByteArrayMPEG(byte[] savedArray) { savedArray = RLCompression.ModifiedRunLengthDecompress(savedArray); byte[] widthByteArray = new byte[Compression.intToByteSize]; byte[] heightByteArray = new byte[Compression.intToByteSize]; int offset = 0; System.Buffer.BlockCopy(savedArray, 0, widthByteArray, 0, Compression.intToByteSize); offset += Compression.intToByteSize; System.Buffer.BlockCopy(savedArray, offset, heightByteArray, 0, Compression.intToByteSize); offset += Compression.intToByteSize; int originalWidth = BitConverter.ToInt32(widthByteArray, 0); int originalHeight = BitConverter.ToInt32(heightByteArray, 0); // recalculate iframe width and height int iFrameYWidth = originalWidth; int iFrameYHeight = originalHeight; int iFrameCrCbWidth = (int)Math.Ceiling(originalWidth / 2.0); int iFrameCrCbHeight = (int)Math.Ceiling(originalHeight / 2.0); if (originalWidth % Compression.DCT_BLOCK_SIZE != 0) { iFrameYWidth += Compression.DCT_BLOCK_SIZE - (originalWidth % Compression.DCT_BLOCK_SIZE); } if (originalHeight % Compression.DCT_BLOCK_SIZE != 0) { iFrameYHeight += Compression.DCT_BLOCK_SIZE - (originalHeight % Compression.DCT_BLOCK_SIZE); } if (iFrameCrCbWidth % Compression.DCT_BLOCK_SIZE != 0) { iFrameCrCbWidth += Compression.DCT_BLOCK_SIZE - (iFrameCrCbWidth % Compression.DCT_BLOCK_SIZE); } if (iFrameCrCbHeight % Compression.DCT_BLOCK_SIZE != 0) { iFrameCrCbHeight += Compression.DCT_BLOCK_SIZE - (iFrameCrCbHeight % Compression.DCT_BLOCK_SIZE); } int pFrameYWidth = originalWidth; int pFrameYHeight = originalHeight; int pFrameCrCbWidth = (int)Math.Ceiling(originalWidth / 2.0); int pFrameCrCbHeight = (int)Math.Ceiling(originalHeight / 2.0); if (originalWidth % Compression.macroSizeY != 0) { pFrameYWidth += Compression.macroSizeY - (originalWidth % Compression.macroSizeY); } if (originalHeight % Compression.macroSizeY != 0) { pFrameYHeight += Compression.macroSizeY - (originalHeight % Compression.macroSizeY); } if (pFrameCrCbWidth % Compression.macroSizeCrCb != 0) { pFrameCrCbWidth += Compression.macroSizeCrCb - (pFrameCrCbWidth % Compression.macroSizeCrCb); } if (pFrameCrCbHeight % Compression.macroSizeCrCb != 0) { pFrameCrCbHeight += Compression.macroSizeCrCb - (pFrameCrCbHeight % Compression.macroSizeCrCb); } int motionVectorsSize = pFrameYWidth / Compression.macroSizeY * pFrameYHeight / Compression.macroSizeY; int iYCrCbSize = iFrameYWidth * iFrameYHeight + iFrameCrCbHeight * iFrameCrCbWidth * 2; int pYCrCbSize = pFrameYWidth * pFrameYHeight + pFrameCrCbHeight * pFrameCrCbWidth * 2; int pMVSize = motionVectorsSize * 6; int numOfFrames = 0; int numOfIFrames = 0; int numOfPFrames = 0; int currentRemaining = savedArray.Length - Compression.intToByteSize * 2; while (currentRemaining > 0) { if (numOfFrames % 10 == 0) { currentRemaining -= iYCrCbSize; numOfIFrames++; } else { currentRemaining -= (pYCrCbSize + pMVSize); numOfPFrames++; } numOfFrames++; } YCrCb[] iFrames = new YCrCb[numOfIFrames]; PFrame[] pFrames = new PFrame[numOfPFrames]; int pFrameIndex = 0; for (int i = 0; i < numOfFrames; i++) { byte[] iFrameY = new byte[iFrameYHeight * iFrameYWidth]; byte[] iFrameCr = new byte[iFrameCrCbHeight * iFrameCrCbWidth]; byte[] iFrameCb = new byte[iFrameCr.Length]; byte[] pFrameY = new byte[pFrameYHeight * pFrameYWidth]; byte[] pFrameCr = new byte[pFrameCrCbHeight * pFrameCrCbWidth]; byte[] pFrameCb = new byte[pFrameCr.Length]; byte[] motionVectorsYByte = new byte[motionVectorsSize * 2]; byte[] motionVectorsCrByte = new byte[motionVectorsSize * 2]; byte[] motionVectorsCbByte = new byte[motionVectorsSize * 2]; if (i % Compression.I_FRAME_RANGE == 0) { System.Buffer.BlockCopy(savedArray, offset, iFrameY, 0, iFrameY.Length); offset += iFrameY.Length; System.Buffer.BlockCopy(savedArray, offset, iFrameCr, 0, iFrameCr.Length); offset += iFrameCr.Length; System.Buffer.BlockCopy(savedArray, offset, iFrameCb, 0, iFrameCb.Length); offset += iFrameCb.Length; iFrames[i / Compression.I_FRAME_RANGE] = new YCrCb(iFrameY, iFrameCr, iFrameCb, iFrameYHeight, iFrameYWidth, iFrameCrCbHeight, iFrameCrCbWidth); } else { System.Buffer.BlockCopy(savedArray, offset, motionVectorsYByte, 0, motionVectorsSize * 2); offset += motionVectorsSize * 2; System.Buffer.BlockCopy(savedArray, offset, motionVectorsCrByte, 0, motionVectorsSize * 2); offset += motionVectorsSize * 2; System.Buffer.BlockCopy(savedArray, offset, motionVectorsCbByte, 0, motionVectorsSize * 2); offset += motionVectorsSize * 2; System.Buffer.BlockCopy(savedArray, offset, pFrameY, 0, pFrameY.Length); offset += pFrameY.Length; System.Buffer.BlockCopy(savedArray, offset, pFrameCr, 0, pFrameCr.Length); offset += pFrameCr.Length; System.Buffer.BlockCopy(savedArray, offset, pFrameCb, 0, pFrameCb.Length); offset += pFrameCb.Length; YCrCb diffBlock = new YCrCb(pFrameY, pFrameCr, pFrameCb, pFrameYHeight, pFrameYWidth, pFrameCrCbHeight, pFrameCrCbWidth); pFrames[pFrameIndex++] = new PFrame(diffBlock, ArrayTransform.convertToVectorFromByte(motionVectorsYByte), ArrayTransform.convertToVectorFromByte(motionVectorsCrByte), ArrayTransform.convertToVectorFromByte(motionVectorsCbByte)); } } return(new MPEGInfo(originalWidth, originalHeight, iFrames, pFrames)); }
/// <summary> /// prepare and return save byte array for custom mpeg compressed images /// </summary> /// <param name="mpegSaveInfo">mpeg save info</param> /// <returns>saved byte array</returns> public static byte[] saveIntoByteArray(MPEGInfo mpegSaveInfo) { byte[] widthByteArray = BitConverter.GetBytes(mpegSaveInfo.originalWidth); byte[] heightByteArray = BitConverter.GetBytes(mpegSaveInfo.originalHeight); YCrCb[] iFrames = mpegSaveInfo.iFrames; PFrame[] pFrames = mpegSaveInfo.pFrames; int iFramesSpace = iFrames.Length * (iFrames[0].Y.Length + iFrames[0].Cr.Length * 2); int pFramesSpace = pFrames.Length * (pFrames[0].MotionVectorsY.Length * 6 + pFrames[0].DiffBlock.Y.Length + pFrames[0].DiffBlock.Cr.Length * 2); byte[] compressedByteArray = new byte[widthByteArray.Length + heightByteArray.Length + iFramesSpace + pFramesSpace]; int offset = 0; int numOfFrame = iFrames.Length + pFrames.Length; System.Buffer.BlockCopy(widthByteArray, 0, compressedByteArray, 0, widthByteArray.Length); offset += widthByteArray.Length; System.Buffer.BlockCopy(heightByteArray, 0, compressedByteArray, offset, heightByteArray.Length); offset += heightByteArray.Length; int pFrameIndex = 0; for (int i = 0; i < numOfFrame; i++) { if (i % Compression.I_FRAME_RANGE == 0) { YCrCb frame = iFrames[i / Compression.I_FRAME_RANGE]; System.Buffer.BlockCopy(frame.Y, 0, compressedByteArray, offset, frame.Y.Length); offset += frame.Y.Length; System.Buffer.BlockCopy(frame.Cr, 0, compressedByteArray, offset, frame.Cr.Length); offset += frame.Cr.Length; System.Buffer.BlockCopy(frame.Cb, 0, compressedByteArray, offset, frame.Cb.Length); offset += frame.Cb.Length; } else { PFrame frame = pFrames[pFrameIndex]; System.Buffer.BlockCopy(ArrayTransform.convertToByteFromVector(frame.MotionVectorsY), 0, compressedByteArray, offset, frame.MotionVectorsY.Length * 2); offset += frame.MotionVectorsY.Length * 2; System.Buffer.BlockCopy(ArrayTransform.convertToByteFromVector(frame.MotionVectorsCr), 0, compressedByteArray, offset, frame.MotionVectorsCr.Length * 2); offset += frame.MotionVectorsCr.Length * 2; System.Buffer.BlockCopy(ArrayTransform.convertToByteFromVector(frame.MotionVectorsCb), 0, compressedByteArray, offset, frame.MotionVectorsCb.Length * 2); offset += frame.MotionVectorsCb.Length * 2; System.Buffer.BlockCopy(frame.DiffBlock.Y, 0, compressedByteArray, offset, frame.DiffBlock.Y.Length); offset += frame.DiffBlock.Y.Length; System.Buffer.BlockCopy(frame.DiffBlock.Cr, 0, compressedByteArray, offset, frame.DiffBlock.Cr.Length); offset += frame.DiffBlock.Cr.Length; System.Buffer.BlockCopy(frame.DiffBlock.Cb, 0, compressedByteArray, offset, frame.DiffBlock.Cb.Length); offset += frame.DiffBlock.Cb.Length; pFrameIndex++; } } compressedByteArray = RLCompression.ModifiedRunLengthCompression(compressedByteArray); return(compressedByteArray); }
public PFrame GetFrame() { PFrame ret = new PFrame(PapillonPINVOKE.PDetection_GetFrame(swigCPtr), false); return(ret); }