async void OnFrameSampleAcquired(VideoCaptureSample sample)
    {
        if (frameProccessed == false)
        {
            cnt_out += 1;
            return;
        }
        cnt_in += 1;
        Debug.Log("cnt : in = " + cnt_in.ToString() + ", out = " + cnt_out);
        frameProccessed = false;
        Debug.Log("Frame sample acquired");
        bool mappable = true;

        float[] cameraToWorldMatrixAsFloat;
        float[] projectionMatrixAsFloat;
        mappable &= sample.TryGetCameraToWorldMatrix(out cameraToWorldMatrixAsFloat);
        mappable &= sample.TryGetProjectionMatrix(out projectionMatrixAsFloat);

        //when copying the bytes out of the buffer, you must supply a byte[] that is appropriately sized.
        //you can reuse this byte[] until you need to resize it(for whatever reason).
        byte[] latestImageBytes = null;

        System.Diagnostics.Stopwatch st = new System.Diagnostics.Stopwatch();
        st.Start();
        using (var ms = new InMemoryRandomAccessStream())
        {
            BitmapEncoder encoder = await BitmapEncoder.CreateAsync(BitmapEncoder.JpegEncoderId, ms);

            encoder.SetSoftwareBitmap(sample.Bitmap);
            try
            {
                await encoder.FlushAsync();
            }
            catch (Exception err)
            {
                Debug.LogError(err.Message);
                return;
            }
            latestImageBytes = new byte[ms.Size];
            await ms.ReadAsync(latestImageBytes.AsBuffer(), (uint)ms.Size, InputStreamOptions.None);
        }
        st.Stop();
        Debug.Log("encoding time " + st.ElapsedMilliseconds.ToString());

        // Right now we pass things across the pipe as a float array then convert them back into UnityEngine.Matrix using a utility method
        if (mappable)
        {
            st.Restart();
            cameraToWorld = CameraStreamHelper.ConvertFloatArrayToMatrix4x4(cameraToWorldMatrixAsFloat);
            projection    = CameraStreamHelper.ConvertFloatArrayToMatrix4x4(projectionMatrixAsFloat);
            await SocketManager.Instance.SendPhoto(latestImageBytes);

            st.Stop();
            Debug.Log("network time " + st.ElapsedMilliseconds.ToString());
            BoundingBox[] boxes = await SocketManager.Instance.RecvDetections();

            SceneUnderstanding.Instance.RecvDetections(cameraToWorld, projection, boxes, mappable);
        }
        frameProccessed = true;
    }
 private void OnDestroy()
 {
     if (instance == this)
     {
         instance = null;
     }
 }
示例#3
0
    private void Awake()
    {
        if (instance != null)
        {
            Debug.LogError("Cannot create two instances of CamStreamManager.");
            return;
        }

        instance = this;
    }
    private void Awake()
    {
        if (instance != null)
        {
            Debug.LogError("Cannot create two instances of CamStreamManager.");
            return;
        }

        instance = this;
        HoloLensCameraStream.VideoCapture.CreateAync(OnVideoCaptureInstanceCreated);
    }
示例#5
0
    private void Awake()
    {
        if (instance != null)
        {
            LoggingManager.LogError("Cannot create two instances of CamStreamManager.");
            return;
        }

        instance = this;
        VideoCapture.CreateAync(OnVideoCaptureInstanceCreated);
    }
示例#6
0
    private void Awake()
    {
        if (instance != null)
        {
            Debug.LogError("Cannot create two instances of CamStreamManager.");
            return;
        }

        instance = this;
        VideoCapture.CreateAync(/*26n si es mio, lo deje a medias, false *//*achtung, wont show holograms ,*/ OnVideoCaptureInstanceCreated);
    }
    private void Awake()
    {
        if (instance != null)
        {
            Debug.LogError("Cannot create two instances of CamStreamManager.");
            return;
        }

        instance = this;
        VideoCapture.CreateAsync(OnVideoCaptureInstanceCreated, new SourceKind[] { SourceKind.COLOR });
    }