public static Sample DecodeData(DataMessage dmsg)
        {
            var returnSample = new Sample(dmsg.DataType, null);

            if (dmsg.DataType == DataMessage.Types.DataType.Image)
            {
                var msg = new ImageSamples();
                msg.MergeFrom(dmsg.Data);
                returnSample.Data = msg;
                return(returnSample);
            }
            else if (dmsg.DataType == DataMessage.Types.DataType.Audio)
            {
                var msg = new AudioSamples();
                msg.MergeFrom(dmsg.Data);
                returnSample.Data = msg;
                return(returnSample);
            }
            else if (dmsg.DataType == DataMessage.Types.DataType.String)
            {
                var msg = new StringSample();
                msg.MergeFrom(dmsg.Data);
                returnSample.Data = msg;
                return(returnSample);
            }
            else if (dmsg.DataType == DataMessage.Types.DataType.Float32Arr)
            {
                var msg = new Float32Array();
                msg.MergeFrom(dmsg.Data);
                returnSample.Data = msg;
                return(returnSample);
            }
            else if (dmsg.DataType == DataMessage.Types.DataType.Int64Arr)
            {
                var msg = new Int64Array();
                msg.MergeFrom(dmsg.Data);
                returnSample.Data = msg;
                return(returnSample);
            }
            else
            {
                Debug.Log("Unknown message");
                return(null);
            }
        }
    public bool vision_set(ImageSamples currSamples)
    {
        try
        {
            if (!bodyTransitionReady)
            {
                return(true);
            }

            if (!visionEnabled)
            {
                Debug.Log("Vision modality not enabled. Cannot set");
                return(false);
            }

            if (currSamples == null)
            {
                return(false);
            }

            var currSample = currSamples.Samples[0];


            var currShape = currSample.DataShape;
            // Debug.Log($"{currShape[0]}, {currShape[1]}");
#if ANIMUS_USE_OPENCV
            if (!initMats)
            {
                yuv      = new Mat((int)(currShape[1] * 1.5), (int)currShape[0], CvType.CV_8UC1);
                rgb      = new Mat();
                initMats = true;
            }

            if (currSample.Data.Length != currShape[0] * currShape[1] * 1.5)
            {
                return(true);
            }

            if (currShape[0] <= 100 || currShape[1] <= 100)
            {
                return(true);
            }
            // Debug.Log("cvt Color ops");

            yuv.put(0, 0, currSample.Data.ToByteArray());

            Imgproc.cvtColor(yuv, rgb, Imgproc.COLOR_YUV2BGR_I420);

            if (_imageDims.Count == 0 || currShape[0] != _imageDims[0] || currShape[1] != _imageDims[1] || currShape[2] != _imageDims[2])
            {
                _imageDims = currShape;
                var scaleX = (float)_imageDims[0] / (float)_imageDims[1];

                Debug.Log("Resize triggered. Setting texture resolution to " + currShape[0] + "x" + currShape[1]);
                Debug.Log("Setting horizontal scale to " + scaleX + " " + (float)_imageDims[0] + " " + (float)_imageDims[1]);

                UnityEngine.Vector3 currentScale = _leftPlane.transform.localScale;
                currentScale.x = scaleX;

                _leftPlane.transform.localScale = currentScale;
                _leftTexture = new Texture2D(rgb.width(), rgb.height(), TextureFormat.ARGB32, false)
                {
                    wrapMode = TextureWrapMode.Clamp
                };

                // _rightPlane.transform.localScale = currentScale;
                // _rightTexture = new Texture2D(rgb.width(), rgb.height(), TextureFormat.ARGB32, false)
                // {
                //  wrapMode = TextureWrapMode.Clamp
                // };
//              return true;
            }
            // Debug.Log("matToTexture2D");

            //TODO apply stereo images
            Utils.matToTexture2D(rgb, _leftTexture);
            _leftRenderer.material.mainTexture = _leftTexture;
#endif
        }
        catch (Exception e)
        {
            Debug.Log(e);
        }
//      Debug.Log("done vision set");
        return(true);
    }
Exemple #3
0
    /// <summary>
    /// Display the received image(s) from animus.
    /// </summary>
    /// <param name="currSamples">The image samples received via animus.</param>
    /// <returns>Success of this method.</returns>
    public bool vision_set(ImageSamples currSamples)
    {
        //return true;
        try
        {
            if (!bodyTransitionReady)
            {
                return(true);
            }
            if (StereoPlaneMover.Instance.showingImages)
            {
                return(true);
            }

            if (!visionEnabled)
            {
                Debug.Log("Vision modality not enabled. Cannot set");
                return(false);
            }

            if (currSamples == null)
            {
                return(false);
            }
            // only set vision when it's visible
            if (StateManager.Instance.currentState != StateManager.States.HUD)
            {
                return(false);
            }

            var currSample = currSamples.Samples[0];
            var currShape  = currSample.DataShape;
            //Debug.Log($"currshape: {currShape}");
            if (currShape[1] / currShape[0] >= 2)
            {
                stereovision = true;
            }
            else
            {
                stereovision = false;
                //_rightPlane.SetActive(true);// stereovision && inHUD && animusManager.openModalitiesSuccess);
            }


            var all_bytes = currSample.Data.ToByteArray();
#if ANIMUS_USE_OPENCV
            if (!initMats)
            {
                yuv = new Mat((int)(currShape[1] * 1.5), (int)(currShape[0]), CvType.CV_8UC1);

                rgb      = new Mat();
                initMats = true;
            }

            if (all_bytes.Length != currShape[0] * currShape[1] * 1.5)
            {
                return(true);
            }

            if (currShape[0] <= 100 || currShape[1] <= 100) // TODO delete the / 5
            {
                return(true);
            }

            yuv.put(0, 0, all_bytes);

            // resize triggered
            if (_imageDims.Count == 0 || currShape[0] != _imageDims[0] || currShape[1] != _imageDims[1] || currShape[2] != _imageDims[2])
            {
                _imageDims = currShape;
                Debug.Log($"Resize triggered. Setting texture resolution to {currShape[0]} x {currShape[1] / 2}");
                Debug.Log($"Setting horizontal scale to {(float)_imageDims[0]} {(float)_imageDims[1] / 2}");


                if (stereovision)
                {
                    if (undistortion)
                    {
                        InitUndistortion((int)_imageDims[0], (int)_imageDims[1] / 2);
                    }

                    // only half of the vertical scale corresponds to the image for one eye
                    float scaleFactor = ((float)_imageDims[1] / 2) / (float)_imageDims[0];
                    //_leftPlane.transform.localScale = new Vector3(_leftPlane.transform.localScale.x,
                    //                                              _leftPlane.transform.localScale.y,
                    //                                              scaleFactor * _leftPlane.transform.localScale.x);
                    //_rightPlane.transform.localScale = new Vector3(_rightPlane.transform.localScale.x,
                    //                                              _rightPlane.transform.localScale.y,
                    //                                              scaleFactor * _rightPlane.transform.localScale.x);

                    // the left texture is the upper half of the received image
                    _leftTexture = new Texture2D((int)_imageDims[0], (int)_imageDims[1] / 2, TextureFormat.RGB24, false)
                    {
                        wrapMode = TextureWrapMode.Clamp
                    };

                    // the right texture is the lower half of the received image
                    _rightTexture = new Texture2D((int)_imageDims[0], (int)_imageDims[1] / 2, TextureFormat.RGB24, false)
                    {
                        wrapMode = TextureWrapMode.Clamp
                    };
                }
                else
                {
                    if (undistortion)
                    {
                        InitUndistortion((int)_imageDims[0], (int)_imageDims[1]);
                    }

                    float scaleFactor = (float)_imageDims[1] / (float)_imageDims[0];
                    //_leftPlane.transform.localScale = new Vector3(_leftPlane.transform.localScale.x,
                    //                                              _leftPlane.transform.localScale.y,
                    //                                              scaleFactor * _leftPlane.transform.localScale.x);

                    _leftTexture = new Texture2D((int)_imageDims[0], (int)_imageDims[1], TextureFormat.RGB24, false)
                    {
                        wrapMode = TextureWrapMode.Clamp
                    };
                }
            }

            if (stereovision)
            {
                yuv_left  = yuv.rowRange(0, yuv.rows() / 2);
                yuv_right = yuv.rowRange(yuv.rows() / 2, yuv.rows());
                render_plane(yuv_left, _leftTexture, _leftRenderer, true);
                render_plane(yuv_right, _rightTexture, _rightRenderer, false);
            }
            else
            {
                render_plane(yuv, _leftTexture, _leftRenderer);
            }
#endif
        }
        catch (Exception e)
        {
            Debug.Log(e);
        }

        return(true);
    }