public override void ProcessFrame(byte[] rgba, int w, int h, Frame.Orientation orientation, float timestamp)
 {
     try
     {
         IntPtr addr = Marshal.AllocHGlobal(rgba.Length);
         Marshal.Copy(rgba, 0, addr, rgba.Length);
         // native call
         affdexProcessFrame(nativeHandle, addr, w, h, (int)orientation, Time.realtimeSinceStartup);
         Marshal.FreeHGlobal(addr);
     } catch (Exception e) {
         Debug.LogError(e.Message + " " + e.StackTrace);
     }
 }
Пример #2
0
 // Sample an individual frame from the webcam and send to detector for processing.
 public void ProcessFrame()
 {
     if (cameraTexture != null)
     {
         if (detector.IsRunning)
         {
             if (cameraTexture.isPlaying)
             {
                 Frame.Orientation orientation = Frame.Orientation.Upright;
                 Frame             frame       = new Frame(cameraTexture.GetPixels32(), cameraTexture.width, cameraTexture.height, orientation, Time.realtimeSinceStartup);
                 detector.ProcessFrame(frame);
             }
         }
     }
 }
        public override void ProcessFrame(byte[] rgba, int w, int h, Frame.Orientation orientation, float timestamp)
        {
            try
            {
                IntPtr addr = Marshal.AllocHGlobal(rgba.Length);

                Marshal.Copy(rgba, 0, addr, rgba.Length);
                int result = affdexProcessFrame(nativeHandle, addr, w, h, (int)orientation, timestamp);
                if (result != 1)
                {
                    Debug.Log(affdexGetLastErrorMessage());
                }
                Marshal.FreeHGlobal(addr);
            }
            catch (Exception e)
            {
                Debug.LogError(e.Message + " " + e.StackTrace);
            }
        }
        /// <summary>
        /// Sample an individual frame from the webcam and send to detector for processing.
        /// </summary>
        public void ProcessFrame()
        {
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN || UNITY_EDITOR_OSX || UNITY_STANDALONE_OSX || UNITY_XBOXONE || UNITY_IOS || UNITY_ANDROID
            if (cameraTexture != null)
            {
                if (detector.IsRunning)
                {
    #if UNITY_XBOXONE
                    cameraTexture = CreateKinectImageTexture();
    #else
                    if (cameraTexture.isPlaying)
                    {
    #endif
                    Frame.Orientation orientation = Frame.Orientation.Upright;

    #if UNITY_IOS || UNITY_ANDROID
                    // account for camera rotation on mobile devices
                    switch (cameraTexture.videoRotationAngle)
                    {
                    case 90:
                        orientation = Frame.Orientation.CW_90;
                        break;

                    case 180:
                        orientation = Frame.Orientation.CW_180;
                        break;

                    case 270:
                        orientation = Frame.Orientation.CW_270;
                        break;
                    }
    #endif

                    Frame frame = new Frame(cameraTexture.GetPixels32(), cameraTexture.width, cameraTexture.height, orientation, Time.realtimeSinceStartup);
                    detector.ProcessFrame(frame);
    #if !UNITY_XBOXONE
                }
    #endif
                }
            }
#endif
        }
Пример #5
0
    /// <summary>
    /// Sample an individual frame from the webcam and send to detector for processing.
    /// </summary>
    public void ProcessFrame()
    {
        if (camTexture == null)
        {
            return;
        }

        if (!detector.IsRunning)
        {
            return;
        }

        if (!camTexture.isPlaying)
        {
            return;
        }

        Frame.Orientation orientation = Frame.Orientation.Upright;
        // account for camera rotation on mobile devices
        switch (camTexture.videoRotationAngle)
        {
        case 90:
            orientation = Frame.Orientation.CW_90;
            break;

        case 180:
            orientation = Frame.Orientation.CW_180;
            break;

        case 270:
            orientation = Frame.Orientation.CW_270;
            break;
        }

        Frame frame = new Frame(camTexture.GetPixels32(), camTexture.width, camTexture.height, orientation,
                                Time.realtimeSinceStartup);

        detector.ProcessFrame(frame);
    }
 /// <summary>
 /// Process a single frame of data
 /// </summary>
 /// <param name="rgba">Representation of RGBA colors in 32 bit format.</param>
 /// <param name="width">Width of the frame. Value has to be greater than zero</param>
 /// <param name="height">Height of the frame. Value has to be greater than zero</param>
 /// <param name="angle">Rotation angle of the frame, expressed as positive clockwise angle. Value must be one of {0, 90, 180, 270} </param>
 /// <param name="timestamp">The timestamp of the frame (in seconds). Can be used as an identifier of the frame.  If you use Time.timeScale to pause and use the same time units then you will not be able to process frames while paused.</param>
 public abstract void ProcessFrame(byte[] rgba, int width, int height, Frame.Orientation orientation, float timestamp);