Esempio n. 1
0
    /// <summary>
    /// Texture Reader callback
    /// </summary>
    /// <param name="format"></param>
    /// <param name="width"></param>
    /// <param name="height"></param>
    /// <param name="pixelBuffer"></param>
    /// <param name="bufferSize"></param>
    public void OnImageAvailable(TextureReaderApi.ImageFormatType format, int width, int height, IntPtr pixelBuffer,
                                 int bufferSize)
    {
        if (texture == null || _image == null || _imageWidth != width || _imageHeight != height)
        {
            texture      = new Texture2D(width, height, TextureFormat.RGBA32, false, false);
            _image       = new byte[width * height * 4];
            _imageWidth  = width;
            _imageHeight = height;
        }

        System.Runtime.InteropServices.Marshal.Copy(pixelBuffer, _image, 0, bufferSize);

        // Update the rendering texture with the sampled image.
        texture.LoadRawTextureData(_image);
        texture.Apply();

        //Image without CV Processing and Shader to fit image
        imageWOProc.texture = texture;

        if (m_CachedOrientation != Screen.orientation ||
            m_CachedScreenDimensions.x != Screen.width ||
            m_CachedScreenDimensions.y != Screen.height)
        {
            m_CameraImageToDisplayUvTransformation = Frame.CameraImage.ImageDisplayUvs;
            m_CachedOrientation      = Screen.orientation;
            m_CachedScreenDimensions = new Vector2(Screen.width, Screen.height);
        }

        ProcessingImage();
    }
        /// <summary>
        /// TextureReader callback handler.
        /// </summary>
        /// <param name="format">The format of the image.</param>
        /// <param name="width">Width of the image, in pixels.</param>
        /// <param name="height">Height of the image, in pixels.</param>
        /// <param name="pixelBuffer">Pointer to raw image buffer.</param>
        /// <param name="bufferSize">The size of the image buffer, in bytes.</param>
        public void OnImageAvailable(TextureReaderApi.ImageFormatType format, int width, int height, IntPtr pixelBuffer, int bufferSize)
        {
            if (format != TextureReaderApi.ImageFormatType.ImageFormatGrayscale)
            {
                Debug.Log("No edge detected due to incorrect image format.");
                return;
            }

            if (m_TextureToRender == null || m_EdgeImage == null || m_ImageWidth != width || m_ImageHeight != height)
            {
                m_TextureToRender = new Texture2D(width, height, TextureFormat.R8, false, false);
                m_EdgeImage       = new byte[width * height];
                m_ImageWidth      = width;
                m_ImageHeight     = height;
            }

            // Detect edges within the image.
            if (EdgeDetector.Detect(m_EdgeImage, pixelBuffer, width, height))
            {
                // Update the rendering texture with the edge image.
                m_TextureToRender.LoadRawTextureData(m_EdgeImage);
                m_TextureToRender.Apply();
                BackgroundRenderer.BackgroundMaterial.SetTexture("_ImageTex", m_TextureToRender);
            }
        }
Esempio n. 3
0
    public void OnImageAvailable(TextureReaderApi.ImageFormatType format, int width, int height, IntPtr pixelBuffer, int bufferSize)
    {
        try
        {
            System.DateTime current      = System.DateTime.Now;
            long            elapsedTicks = current.Ticks - begin_.Ticks;
            System.TimeSpan elapsedSpan  = new System.TimeSpan(elapsedTicks);
            timestamp_ = elapsedSpan.TotalSeconds;

            Debug.LogFormat("pushing frame {0}x{1} to NDN-RTC...", width, height);

            FrameInfo finfo            = NdnRtc.videoStream.processIncomingFrame(format, width, height, pixelBuffer, bufferSize);
            int       publishedFrameNo = finfo.playbackNo_;

            if (publishedFrameNo >= 0) // frame was not dropped by the encoder and was published
            {
                Debug.LogFormat("create frame object #{0}, ts {1}, pos {2}, rot {3}, cam {4}",
                                publishedFrameNo, timestamp_, Frame.Pose.position, Frame.Pose.rotation, camForCalcThread_.ToString());

                frameMgr_.CreateFrameObject(publishedFrameNo, timestamp_, Frame.Pose.position, Frame.Pose.rotation, camForCalcThread_);
                frameBuffer_.Enqueue(frameMgr_.frameObjects);

                // spawn fetching task for annotations of this frame
                foreach (var fetcher in annotationFetchers_)
                {
                    spawnAnnotationFetchingTask(publishedFrameNo, fetcher, 0.6f, performSemanticDbQuery);
                }
            }
        }
        catch (System.Exception e)
        {
            Debug.LogExceptionFormat(e, "in OnImageAvailable call");
        }
    }
 private void OnImageAvailableCallbackFunc(TextureReaderApi.ImageFormatType format, int width, int height, IntPtr pixelBuffer, int bufferSize)
 {
     this.format      = format;
     this.width       = width;
     this.height      = height;
     this.pixelBuffer = pixelBuffer;
     this.bufferSize  = bufferSize;
 }
Esempio n. 5
0
    void OnImageAvailable(TextureReaderApi.ImageFormatType format, int width, int height, IntPtr pixelBuffer, int bufferSize)
    {
        if (format != TextureReaderApi.ImageFormatType.ImageFormatColor)
        {
            return;
        }

        // Adjust buffer size if necessary.
        if (bufferSize != m_PixelBufferSize || m_PixelByteBuffer.Length == 0)
        {
            m_PixelBufferSize = bufferSize;
            m_PixelByteBuffer = new byte[bufferSize];
            m_PixelColors     = new Color[width * height];
        }

        // Move raw data into managed buffer.
        System.Runtime.InteropServices.Marshal.Copy(pixelBuffer, m_PixelByteBuffer, 0, bufferSize);

        // Interpret pixel buffer differently depending on which orientation the device is.
        // We need to get pixel colors into a friendly format - an array
        // laid out row by row from bottom to top, and left to right within each row.
        var bufferIndex = 0;

        for (var y = 0; y < height; ++y)
        {
            for (var x = 0; x < width; ++x)
            {
                int r     = m_PixelByteBuffer[bufferIndex++];
                int g     = m_PixelByteBuffer[bufferIndex++];
                int b     = m_PixelByteBuffer[bufferIndex++];
                int a     = m_PixelByteBuffer[bufferIndex++];
                var color = new Color(r / 255f, g / 255f, b / 255f, a / 255f);
                int pixelIndex;
                switch (Screen.orientation)
                {
                case ScreenOrientation.LandscapeRight:
                    pixelIndex = y * width + width - 1 - x;
                    break;

                case ScreenOrientation.Portrait:
                    pixelIndex = (width - 1 - x) * height + height - 1 - y;
                    break;

                case ScreenOrientation.LandscapeLeft:
                    pixelIndex = (height - 1 - y) * width + x;
                    break;

                default:
                    pixelIndex = x * height + y;
                    break;
                }
                m_PixelColors[pixelIndex] = color;
            }
        }

        FeaturePointCubes();
    }
Esempio n. 6
0
        /// <summary>
        /// Handles a new CPU image.
        /// </summary>
        /// <param name="format">The format of the image.</param>
        /// <param name="width">Width of the image, in pixels.</param>
        /// <param name="height">Height of the image, in pixels.</param>
        /// <param name="pixelBuffer">Pointer to raw image buffer.</param>
        /// <param name="bufferSize">The size of the image buffer, in bytes.</param>
        private void _OnImageAvailable(TextureReaderApi.ImageFormatType format, int width, int height, IntPtr pixelBuffer, int bufferSize)
        {
            if (!EdgeDetectionBackgroundImage.enabled)
            {
                return;
            }

            if (format != TextureReaderApi.ImageFormatType.ImageFormatGrayscale)
            {
                Debug.Log("No edge detected due to incorrect image format.");
                return;
            }

            if (m_EdgeDetectionBackgroundTexture == null || m_EdgeDetectionResultImage == null ||
                m_EdgeDetectionBackgroundTexture.width != width || m_EdgeDetectionBackgroundTexture.height != height)
            {
                m_EdgeDetectionBackgroundTexture = new Texture2D(width, height, TextureFormat.R8, false, false);
                m_EdgeDetectionResultImage       = new byte[width * height];
                _UpdateCameraImageToDisplayUVs();
            }

            if (m_CachedOrientation != Screen.orientation || m_CachedScreenDimensions.x != Screen.width ||
                m_CachedScreenDimensions.y != Screen.height)
            {
                _UpdateCameraImageToDisplayUVs();
                m_CachedOrientation      = Screen.orientation;
                m_CachedScreenDimensions = new Vector2(Screen.width, Screen.height);
            }

            // Detect edges within the image.
            if (EdgeDetector.Detect(m_EdgeDetectionResultImage, pixelBuffer, width, height))
            {
                // Update the rendering texture with the edge image.
                m_EdgeDetectionBackgroundTexture.LoadRawTextureData(m_EdgeDetectionResultImage);
                m_EdgeDetectionBackgroundTexture.Apply();
                EdgeDetectionBackgroundImage.material.SetTexture("_ImageTex", m_EdgeDetectionBackgroundTexture);

                const string TOP_LEFT_RIGHT    = "_UvTopLeftRight";
                const string BOTTOM_LEFT_RIGHT = "_UvBottomLeftRight";
                EdgeDetectionBackgroundImage.material.SetVector(TOP_LEFT_RIGHT, new Vector4(
                                                                    m_CameraImageToDisplayUvTransformation.TopLeft.x,
                                                                    m_CameraImageToDisplayUvTransformation.TopLeft.y,
                                                                    m_CameraImageToDisplayUvTransformation.TopRight.x,
                                                                    m_CameraImageToDisplayUvTransformation.TopRight.y));
                EdgeDetectionBackgroundImage.material.SetVector(BOTTOM_LEFT_RIGHT, new Vector4(
                                                                    m_CameraImageToDisplayUvTransformation.BottomLeft.x,
                                                                    m_CameraImageToDisplayUvTransformation.BottomLeft.y,
                                                                    m_CameraImageToDisplayUvTransformation.BottomRight.x,
                                                                    m_CameraImageToDisplayUvTransformation.BottomRight.y));
            }
        }
Esempio n. 7
0
    public int processIncomingFrame(TextureReaderApi.ImageFormatType format, int width, int height, IntPtr pixelBuffer, int bufferSize)
    {
        Debug.Log("[ndnrtc::videostream] incoming image format " + format + " size " + width + "x" + height);

        unsafe {
            byte *ptr    = (byte *)pixelBuffer.ToPointer();
            int   offset = 0;

            for (int i = 0; i < height; i++)
            {
                for (int j = 0; j < width; j++)
                {
                    float r = (float)ptr [offset + 0];
                    float g = (float)ptr [offset + 1];
                    float b = (float)ptr [offset + 2];
                    float a = (float)ptr [offset + 3];
                    ptr [offset + 0] = (byte)a;
                    ptr [offset + 1] = (byte)r;
                    ptr [offset + 2] = (byte)g;
                    ptr [offset + 3] = (byte)b;
                    offset          += 4;
                }
            }
        }

//		uint offset = imageData.stride;
//		uint yPlaneSize = imageData.stride * imageData.height;
//		uint vPlaneSize = (imageData.stride / 2) * (imageData.height / 2);
//		uint uvPLaneSize = yPlaneSize / 2;

        //GCHandle pinnedBuffer = GCHandle.Alloc (switchAB, GCHandleType.Pinned);

//		IntPtr yPlane = new IntPtr (pinnedBuffer.AddrOfPinnedObject ().ToInt64 () + offset);
//		offset += yPlaneSize;
//		IntPtr uvPlane = new IntPtr (pinnedBuffer.AddrOfPinnedObject ().ToInt64 () + offset);

        //IntPtr buffer = new IntPtr (pinnedBuffer.AddrOfPinnedObject ().ToInt64 ());

//		public static extern int ndnrtc_LocalVideoStream_incomingARGBFrame (IntPtr stream,
//			uint width, uint height, IntPtr argbFrameData, uint frameSize);
        int frameNo = NdnRtcWrapper.ndnrtc_LocalVideoStream_incomingArgbFrame(ndnrtcHandle_, (uint)width, (uint)height, pixelBuffer, (uint)bufferSize);

        //Debug.Log ("frameNo = " + frameNo);
        //pinnedBuffer.Free ();

        return(frameNo);
    }
Esempio n. 8
0
    /**<summary> Called when TextureReader returns image buffer </summary>*/
    private async void OnImageCaptured(TextureReaderApi.ImageFormatType format, int width, int height, IntPtr pixelBuffer, int bufferSize)
    {
        // Use threads to not block render thread with image handling
        //TODO ? Do jpg encoding withouth Unity Texture2D which cannot be created in another thread
        byte[] result     = null;
        byte[] encodedJpg = null;

        await Task.Run(() =>
        {
            // 4 bytes per pixel, RGBA
            byte[] imageBytes = new byte[width * height * 4];
            Marshal.Copy(pixelBuffer, imageBytes, 0, imageBytes.Length);

            // Get pixels and apply transforms
            // TODO: check if default orientation is device specific
            Color32[] pixels = ImageUtils.GetPixels(imageBytes);
            pixels           = ImageUtils.Flip(pixels, width, height, true);
            pixels           = ImageUtils.Rotate(pixels, width, height, false);
            result           = ImageUtils.Color32ArrayToByteArray(pixels);
        });

        // Create texture with the image data
        Texture2D texture = new Texture2D(height, width, TextureFormat.RGBA32, false);

        texture.LoadRawTextureData(result);

        await Task.Run(() =>
        {
            // Get jpg encoded bytedata
            encodedJpg = texture.EncodeToJPG(80);
            //File.WriteAllBytes(Application.persistentDataPath.Combine(DateTime.Now.ToString("yyyyMMdd-HHmmss") + ".jpg"), encodedJpg);

            // For delta offset
            SnapshotPose();
        });

        lastLocationTime = Time.time;
        if (App.config.debug)
        {
            Debug.Log("Starting POST request for /location/fine");
        }

        // Send image to LocationAPI
        await ServerAPI.LocationFine(locationController.buildingID, encodedJpg);

        // -> OnLocationResponse
    }
Esempio n. 9
0
        /// <summary>
        /// Handles a new CPU image.
        /// </summary>
        /// <param name="format">The format of the image.</param>
        /// <param name="width">Width of the image, in pixels.</param>
        /// <param name="height">Height of the image, in pixels.</param>
        /// <param name="pixelBuffer">Pointer to raw image buffer.</param>
        /// <param name="bufferSize">The size of the image buffer, in bytes.</param>
        private void OnImageAvailable(TextureReaderApi.ImageFormatType format, int width, int height, IntPtr pixelBuffer, int bufferSize)
        {
            // Initialize texture, output image (bytes array) and output width/height
            if (m_TextureToRender == null || m_OutputImage == null || m_ImageWidth != width || m_ImageHeight != height)
            {
                m_TextureToRender = new Texture2D(width, height, TextureFormat.RGBA32, false, false);
                m_OutputImage     = new byte[width * height * 4];
                m_ImageWidth      = width;
                m_ImageHeight     = height;
            }

            // Copy the pixelBuffer from the TextureReaderAPI stored value to m_OutputImage : 1920*1080*4 byte array
            System.Runtime.InteropServices.Marshal.Copy(pixelBuffer, m_OutputImage, 0, bufferSize);

            // Update the rendering texture
            m_TextureToRender.LoadRawTextureData(m_OutputImage);
            m_TextureToRender.Apply();
        }
Esempio n. 10
0
 public void OnImageAvailable(TextureReaderApi.ImageFormatType format, int width, int height, IntPtr pixelBuffer, int bufferSize)
 {
     if (UpdatePointCloud() && (mode != Mode.DUMMY))
     {
         if (mode == Mode.RELOCALISATION)
         {
             Relocalise(width, height, pixelBuffer, bufferSize);
         }
         if (mode == Mode.LEARNING)
         {
             ProcessJPG(width, height, pixelBuffer, bufferSize);
             ProcessTXT();
             ProcessDSC();
             size = pose;
             pose++;
         }
     }
 }
Esempio n. 11
0
    public FrameInfo processIncomingFrame(TextureReaderApi.ImageFormatType format, int width, int height, IntPtr pixelBuffer, int bufferSize)
    {
        // Debug.Log ("[ndnrtc::videostream] incoming image format " + format + " size " + width + "x" + height);

        flipFrame(width, height, pixelBuffer, true, true, true);

        // publish frame using NDN-RTC
        // return: res < 0 -- frame was skipped due to encoder decision (or library was busy publishing frame)
        //         res >= 0 -- playback number of published frame
        int res = NdnRtcWrapper.ndnrtc_LocalVideoStream_incomingArgbFrame(ndnrtcHandle_, (uint)width, (uint)height, pixelBuffer, (uint)bufferSize);

        // query additional latest published frame information
        FrameInfo finfo = NdnRtcWrapper.ndnrtc_LocalVideoStream_getLastPublishedInfo(ndnrtcHandle_);

        Debug.LogFormat(this, "res: {0} frameNo: {1} timestamp {2} ndn name {3}", res, finfo.playbackNo_, finfo.timestamp_, finfo.ndnName_);

        if (res < 0)
        {
            finfo.playbackNo_ = -1;
        }
        // return res > 0 ? finfo.playbackNo_ : res;
        return(finfo);
    }
    public void OnImageAvailable(TextureReaderApi.ImageFormatType format, int width, int height, IntPtr pixelBuffer, int bufferSize)
    {
        try{
            System.DateTime current      = System.DateTime.Now;
            long            elapsedTicks = current.Ticks - begin.Ticks;
            System.TimeSpan elapsedSpan  = new System.TimeSpan(elapsedTicks);
            timestamp = elapsedSpan.TotalSeconds;
            //Debug.Log("before call to ndnrtc");
            int publishedFrameNo = NdnRtc.videoStream.processIncomingFrame(format, width, height, pixelBuffer, bufferSize);
            Debug.Log("Published frame number: " + publishedFrameNo);

            if (publishedFrameNo >= 0)
            {
                Debug.Log("create frame object frame number: " + publishedFrameNo);
                Debug.Log("create frame object timestamp: " + timestamp);
                Debug.Log("create frame object position: " + Frame.Pose.position);
                Debug.Log("create frame object rotation: " + Frame.Pose.rotation);
                Debug.Log("create frame object camera: " + camForCalcThread.ToString());
                frameMgr.CreateFrameObject(publishedFrameNo, timestamp, Frame.Pose.position, Frame.Pose.rotation, camForCalcThread);
                //frameMgr.CreateFrameObject (imgBuffer, publishedFrameNo, timestamp, Vector3.zero, Quaternion.identity, offset.m_uOffset, offset.m_vOffset, camForCalcThread);

                //frameObjectBuffer.Enqueue (frameMgr.frameObjects [publishedFrameNo]);
                frameBuffer.Enqueue(frameMgr.frameObjects);
                Debug.Log("frame buffer enqueue: " + publishedFrameNo);
                // spawn fetching task for annotations of this frame
                // once successfully received, delegate callback will be called
                aFetcher_.fetchAnnotation(publishedFrameNo, delegate(string jsonArrayString) {
                    int frameNumber = publishedFrameNo;             // storing frame number locally
                    string debuglog = jsonArrayString.Replace(System.Environment.NewLine, " ");
                    Debug.Log("Received annotations JSON (frame " + frameNumber + "): " + debuglog);
                    //Debug.Log("annotations string length: " + jsonArrayString.Length);
                    string[] testDebug = jsonArrayString.Split(']');
                    string formatDebug = testDebug[0] + "]";
                    try{
                        Dictionary <int, FrameObjectData> frameObjects = frameBuffer.Dequeue();
                        FrameObjectData temp;
                        if (frameObjects.TryGetValue(frameNumber, out temp))
                        {
                            //AnnotationData[] data = JsonHelper.FromJson<AnnotationData>(jsonArrayString);
                            //try to print out how many characters the jsonArrayString has
                            string str          = "{ \"annotationData\": " + formatDebug + "}";
                            AnnotationData data = JsonUtility.FromJson <AnnotationData>(str);
                            for (int i = 0; i < data.annotationData.Length; i++)
                            {
                                if (data.annotationData[i].prob >= 0.5f)
                                {
                                    Debug.Log("test: " + data.annotationData.Length);
                                    Debug.Log("test label: " + data.annotationData[i].label + " test xleft: " + data.annotationData[i].xleft
                                              + " test xright: " + data.annotationData[i].xright + " test ytop: " + (1 - data.annotationData[i].ytop) + " test ybottom: " + (1 - data.annotationData[i].ybottom));
                                    //						Debug.Log("test xleft: " + data.annotationData[i].xleft);
                                    //						Debug.Log("test xright: " + data.annotationData[i].xright);
                                    //						Debug.Log("test ytop: " + data.annotationData[i].ytop);
                                    //						Debug.Log("test ybottom: " + data.annotationData[i].ybottom);

                                    // example how to fetch model from content-publisher
                                    // Therese, please check this is the right place in code where models should be requested
                                    // (prob. model doesn't need to be fetched every frame for same object)
                                    //fetchModel(data.annotationData[i].label);
                                }
                            }
                            //				FrameObjectData temp;
                            //				bool success = frameObjectBuffer.TryDequeue(out temp);
                            //				if(success)
                            //				//FrameObjectData temp = frameBuffer.Dequeue();
                            //				{
                            //					Debug.Log("Frame info: " + frameNumber);
                            //					Debug.Log ("Frame info camera position: " + temp.camPos);
                            //					Debug.Log ("Frame info camera rotation: " + temp.camRot);
                            //					Debug.Log ("Frame info points number: " + temp.numPoints);
                            //					Debug.Log ("Frame info points: " + temp.points.ToString());
                            //				}


                            Debug.Log("Frame number annotations: " + frameNumber);
                            Debug.Log("Frame info camera position: " + temp.camPos);
                            Debug.Log("Frame info camera rotation: " + temp.camRot);
                            //Debug.Log ("Frame info points number: " + temp.numPoints);
                            Debug.Log("Frame info points: " + temp.points.ToString());
                            Debug.Log("test time difference: " + (Mathf.Abs((float)(temp.timestamp - timestamp))) + " frame number: " + publishedFrameNo);

                            //int boxCount = Mathf.Min(data.annotationData.Length, 2);
                            int boxCount = data.annotationData.Length;

                            BoxData annoData = new BoxData();
                            Debug.Log("box created boxdata");
                            annoData.frameNumber = frameNumber;
                            annoData.count       = boxCount;
                            annoData.points      = temp.points;
                            annoData.numPoints   = temp.numPoints;
                            annoData.cam         = temp.cam;
                            annoData.camPos      = temp.camPos;
                            annoData.camRot      = temp.camRot;
                            annoData.timestamp   = temp.timestamp;
                            annoData.label       = new string[boxCount];
                            annoData.xleft       = new float[boxCount];
                            annoData.xright      = new float[boxCount];
                            annoData.ytop        = new float[boxCount];
                            annoData.ybottom     = new float[boxCount];
                            annoData.prob        = new float[boxCount];

                            for (int i = 0; i < boxCount; i++)
                            {
                                annoData.label[i]   = data.annotationData[i].label;
                                annoData.xleft[i]   = 1 - data.annotationData[i].xright;
                                annoData.xright[i]  = 1 - data.annotationData[i].xleft;
                                annoData.ytop[i]    = data.annotationData[i].ybottom;
                                annoData.ybottom[i] = data.annotationData[i].ytop;
                                annoData.prob[i]    = data.annotationData[i].prob;
                            }

                            Debug.Log("Received annotations box enqueue");
                            //boxBufferToCalc.Enqueue(annoData);
                            boundingBoxBufferToCalc.Enqueue(annoData);
                        }
                        else
                        {
                            //frame object was not in the pool, lifetime expired
                            Debug.Log("Received annotations but frame expired");
                        }
                    }
                    catch (System.Exception e)
                    {
                        Debug.Log("exception caught annotations: " + e);
                        string debug = jsonArrayString.Replace(System.Environment.NewLine, " ");
                        Debug.Log("exception caught string: " + debug);
                        string str = "{ \"annotationData\": " + debug + "}";
                        Debug.Log("exception caught string with format: " + str);
                    }
                });

                openFaceFetcher_.fetchAnnotation(publishedFrameNo, delegate(string jsonArrayString) {
                    int frameNumber = publishedFrameNo;             // storing frame number locally
                    string debuglog = jsonArrayString.Replace(System.Environment.NewLine, " ");
                    Debug.Log("Received OpenFace annotations JSON (frame " + frameNumber + "): " + debuglog);
                    string[] testDebug = jsonArrayString.Split(']');
                    string formatDebug = testDebug[0] + "]";
                    try{
                        Dictionary <int, FrameObjectData> frameObjects = frameBuffer.Dequeue();
                        FrameObjectData temp;
                        if (frameObjects.TryGetValue(frameNumber, out temp))
                        {
                            string str          = "{ \"annotationData\": " + formatDebug + "}";
                            AnnotationData data = JsonUtility.FromJson <AnnotationData>(str);
                            for (int i = 0; i < data.annotationData.Length; i++)
                            {
                                //if(data.annotationData[i].prob >= 0.7f)
                                {
                                    Debug.Log("openface test: " + data.annotationData.Length);
                                    Debug.Log("openface test label: " + data.annotationData[i].label + " test xleft: " + data.annotationData[i].xleft
                                              + " test xright: " + data.annotationData[i].xright + " test ytop: " + (data.annotationData[i].ytop) + " test ybottom: " + (data.annotationData[i].ybottom));
                                    //						Debug.Log("test xleft: " + data.annotationData[i].xleft);
                                    //						Debug.Log("test xright: " + data.annotationData[i].xright);
                                    //						Debug.Log("test ytop: " + data.annotationData[i].ytop);
                                    //						Debug.Log("test ybottom: " + data.annotationData[i].ybottom);
                                }
                            }
                            //int boxCount = Mathf.Min(data.annotationData.Length, 2);
                            int boxCount = data.annotationData.Length;

                            BoxData annoData = new BoxData();
                            Debug.Log("box created boxdata");
                            annoData.frameNumber = frameNumber;
                            annoData.count       = boxCount;
                            annoData.points      = temp.points;
                            annoData.numPoints   = temp.numPoints;
                            annoData.cam         = temp.cam;
                            annoData.camPos      = temp.camPos;
                            annoData.camRot      = temp.camRot;
                            annoData.timestamp   = temp.timestamp;
                            annoData.label       = new string[boxCount];
                            annoData.xleft       = new float[boxCount];
                            annoData.xright      = new float[boxCount];
                            annoData.ytop        = new float[boxCount];
                            annoData.ybottom     = new float[boxCount];
                            annoData.prob        = new float[boxCount];

                            for (int i = 0; i < boxCount; i++)
                            {
                                if (data.annotationData[i].ytop > 1)
                                {
                                    data.annotationData[i].ytop = 1;
                                }
                                if (data.annotationData[i].ybottom < 0)
                                {
                                    data.annotationData[i].ybottom = 0;
                                }
                                annoData.label[i]   = data.annotationData[i].label;
                                annoData.xleft[i]   = data.annotationData[i].xleft;
                                annoData.xright[i]  = data.annotationData[i].xright;
                                annoData.ytop[i]    = data.annotationData[i].ytop;
                                annoData.ybottom[i] = data.annotationData[i].ybottom;
                                annoData.prob[i]    = 1;
                            }

                            Debug.Log("Received openface annotations box enqueue");
                            //boxBufferToCalc.Enqueue(annoData);
                            boundingBoxBufferToCalc.Enqueue(annoData);
                        }
                        else
                        {
                            //frame object was not in the pool, lifetime expired
                            Debug.Log("Received openface annotations but frame expired");
                        }
                    }
                    catch (System.Exception e)
                    {
                        Debug.Log("exception caught openface annotations: " + e);
                        string debug = jsonArrayString.Replace(System.Environment.NewLine, " ");
                        Debug.Log("exception caught openface string: " + debug);
                        string str = "{ \"annotationData\": " + debug + "}";
                        Debug.Log("exception caught openface string with format: " + str);
                    }
                });
            }
            else
            {
                // frame was dropped by the encoder and was not published
            }
        }
        catch (System.Exception e)
        {
            Debug.Log("exception caught video" + e.ToString());
        }
    }