Пример #1
0
        /// <summary>
        /// Render camera background image with tracked frame
        /// </summary>
        /// <param name="state">TrackingState</param>
        public void UpdateCameraBackgroundImage(TrackingState state)
        {
            if (!keepRendering)
            {
                return;
            }

            TrackedImage image = state.GetImage(!rg16SupportGpuAndUnityVersionCheck);

            if (prevImageIndex == image.GetIndex())
            {
                return;
            }

            prevImageIndex = image.GetIndex();

            if (image.GetWidth() == 0 && image.GetHeight() == 0)
            {
                return;
            }

            if (!textureCreateDone)
            {
                CreateCameraTexture(image.GetWidth(), image.GetHeight(), image.GetFormat());
                textureCreateDone = true;
            }

            UpdateInternal(image);
        }
        /// <summary>
        /// Render camera background image with tracked frame
        /// </summary>
        /// <param name="state">TrackingState</param>
        public void UpdateCameraBackgroundImage(TrackingState state)
        {
            if (!keepRendering)
            {
                return;
            }

            TrackedImage image = state.GetImage(rg16TextureSupported);

            if (prevImageIndex == image.GetIndex())
            {
                return;
            }

            prevImageIndex = image.GetIndex();

            if (image.GetWidth() == 0 && image.GetHeight() == 0)
            {
                return;
            }

            if (!textureCreateDone)
            {
                CreateCameraTexture(image.GetWidth(), image.GetHeight(), image.GetFormat());
                textureCreateDone = true;
            }

            UpdateCameraTexture(image);
            TransformBackgroundPlane();
        }
Пример #3
0
        public void FindImageOfCloudRecognition()
        {
            if (autoState)
            {
                return;
            }

            new Thread(() => {
                TrackingState trackingState   = TrackerManager.GetInstance().GetTrackingState();
                TrackingResult trackingResult = trackingState.GetTrackingResult();
                TrackedImage trackedImage     = trackingState.GetImage();

                int trackingCount = trackingResult.GetCount();
                if (trackingCount == 0)
                {
                    if (cloudState == CloudState.CLOUDSTATE_TRACKING || cloudState == CloudState.CLOUDSTATE_STOP)
                    {
                        cloudState = CloudState.CLOUDSTATE_FEATURE_COLLECT_READY;
                    }
                }
                else
                {
                    cloudState = CloudState.CLOUDSTATE_TRACKING;
                }

                if (cloudState != CloudState.CLOUDSTATE_FEATURE_COLLECT_READY)
                {
                    return;
                }


                GetCloudRecognition(trackedImage, (bool cloudResult, string featureBase64) =>
                {
                    if (cloudResult)
                    {
                        this.featureBase64 = featureBase64;
                        this.cloudState    = CloudState.CLOUDSTATE_CONNECT;
                    }
                    else
                    {
                        this.cloudState = CloudState.CLOUDSTATE_FEATURE_COLLECT_READY;
                    }

                    if (this.restart)
                    {
                        this.loopState = true;
                        this.restart   = false;
                    }
                });
            }).Start();
        }
Пример #4
0
        private void GetCloudRecognition(TrackedImage trackedImage, System.Action <bool, string> complete)
        {
            if (secretId == null || secretKey == null)
            {
                complete(false, null);
                Debug.LogError("invalid SecretId, SecretKey");
            }
            else
            {
                if (secretId.Length < 10 || secretKey.Length < 10)
                {
                    complete(false, null);
                    Debug.LogError("invalid SecretId, SecretKey");
                    return;
                }
                int[] resultLength      = { 0, 0 };
                bool  recognitionResult = false;
                if (TrackerManager.GetInstance().IsTrackerDataLoadCompleted() || cloudState == CloudState.CLOUDSTATE_FEATURE_COLLECT_READY)
                {
                    isGetFeatureState = true;
                    recognitionResult = GetFeatureClient(trackedImage, this.cloudFeatureData, resultLength);
                    isGetFeatureState = false;
                }

                if (isDestroy)
                {
                    DestroySingleTon();
                }

                if (this.restart == true)
                {
                    complete(false, null);
                }
                else
                {
                    if (recognitionResult && resultLength[0] > 0)
                    {
                        string sendString = Convert.ToBase64String(this.cloudFeatureData, 0, resultLength[0]);
                        complete(true, sendString);
                    }
                    else
                    {
                        complete(false, null);
                    }
                }
            }
        }
 public bool CheckCameraMove(TrackedImage image)
 {
     return(NativeAPI.CameraDevice_checkCameraMove(image.getImageCptr()));
 }
        private void UpdateCameraTexture(TrackedImage image)
        {
            IntPtr camDataPtr = image.GetDataPtr();

            switch (image.GetFormat())
            {
            case ColorFormat.RGBA8888:
                if (rgbaTexture != null)
                {
                    rgbaTexture.LoadRawTextureData(camDataPtr, image.GetWidth() * image.GetHeight() * 4);
                    rgbaTexture.Apply();
                }
                break;

            case ColorFormat.RGB888:
                if (rgbTexture != null)
                {
                    rgbTexture.LoadRawTextureData(camDataPtr, image.GetWidth() * image.GetHeight() * 3);
                    rgbTexture.Apply();
                }
                break;

            case ColorFormat.YUV420sp:
                if (uvTexture != null)                         // It means that RG16 texture format is supported
                {
                    IntPtr yPtr  = IntPtr.Zero;
                    IntPtr uvPtr = IntPtr.Zero;
                    image.GetYuv420spYUVPtr(out yPtr, out uvPtr);
                    yTexture.LoadRawTextureData(yPtr, image.GetWidth() * image.GetHeight());
                    yTexture.Apply();
                    uvTexture.LoadRawTextureData(uvPtr, image.GetWidth() * image.GetHeight() / 2);
                    uvTexture.Apply();
                }
                else if (yTexture != null && uTexture != null && vTexture != null)
                {
                    IntPtr yPtr = IntPtr.Zero;
                    IntPtr uPtr = IntPtr.Zero;
                    IntPtr vPtr = IntPtr.Zero;
                    image.GetYuv420spYUVPtr(out yPtr, out uPtr, out vPtr);
                    int uvSize = image.GetWidth() * image.GetHeight() / 2;
                    yTexture.LoadRawTextureData(camDataPtr, image.GetWidth() * image.GetHeight());
                    yTexture.Apply();

                    uTexture.LoadRawTextureData(uPtr, uvSize / 2);
                    uTexture.Apply();

                    vTexture.LoadRawTextureData(vPtr, uvSize / 2);
                    vTexture.Apply();
                }
                break;

            case ColorFormat.YUV420_888:
                if (u16bitTexture != null)                          // It means that RG16 texture format is supported
                {
                    IntPtr yPtr = IntPtr.Zero;
                    IntPtr uPtr = IntPtr.Zero;
                    IntPtr vPtr = IntPtr.Zero;
                    image.GetYuv420_888YUVPtr(out yPtr, out uPtr, out vPtr, true);
                    yTexture.LoadRawTextureData(yPtr, image.GetWidth() * image.GetHeight());
                    yTexture.Apply();

                    u16bitTexture.LoadRawTextureData(uPtr, image.GetWidth() * image.GetHeight() / 2);
                    u16bitTexture.Apply();

                    v16bitTexture.LoadRawTextureData(vPtr, image.GetWidth() * image.GetHeight() / 2);
                    v16bitTexture.Apply();
                }
                else if (yTexture != null && uTexture != null && vTexture != null)
                {
                    IntPtr yPtr = IntPtr.Zero;
                    IntPtr uPtr = IntPtr.Zero;
                    IntPtr vPtr = IntPtr.Zero;
                    image.GetYuv420_888YUVPtr(out yPtr, out uPtr, out vPtr, false);

                    int uSize = image.GetWidth() * image.GetHeight() / 2;

                    yTexture.LoadRawTextureData(yPtr, image.GetWidth() * image.GetHeight());
                    yTexture.Apply();

                    uTexture.LoadRawTextureData(uPtr, uSize / 2);
                    uTexture.Apply();

                    vTexture.LoadRawTextureData(vPtr, uSize / 2);
                    vTexture.Apply();
                }
                break;
            }
        }
Пример #7
0
 private bool GetFeatureClient(TrackedImage trackedImage, byte[] descriptData, int[] resultLength)
 {
     return(NativeAPI.maxst_CloudManager_GetFeatureClient(trackedImage.GetImageCptr(), descriptData, resultLength));
 }
Пример #8
0
        internal void StartCloud()
        {
            loopState       = true;
            this.cloudState = CloudState.CLOUDSTATE_FEATURE_COLLECT_READY;
            while (this.loopState)
            {
                Thread.Sleep(100);

                TrackingState  trackingState  = TrackerManager.GetInstance().UpdateTrackingState();
                TrackingResult trackingResult = trackingState.GetTrackingResult();
                TrackedImage   trackedImage   = trackingState.GetImage();

                int trackingCount = trackingResult.GetCount();
                if (trackingCount == 0)
                {
                    if (cloudState == CloudState.CLOUDSTATE_TRACKING)
                    {
                        cloudState = CloudState.CLOUDSTATE_FEATURE_COLLECT_READY;
                    }
                }
                else
                {
                    cloudState = CloudState.CLOUDSTATE_TRACKING;
                }

                if (trackingCount == 0 && (cloudState == CloudState.CLOUDSTATE_TRACKING || cloudState == CloudState.CLOUDSTATE_FEATURE_COLLECT_READY))
                {
                    if (!TrackerManager.GetInstance().IsTrackerDataLoadCompleted() || cloudState == CloudState.CLOUDSTATE_STOP)
                    {
                        continue;
                    }

                    isGetFeatureState = true;
                    bool isCameraMove = CameraDevice.GetInstance().CheckCameraMove(trackedImage);
                    isGetFeatureState = false;
                    if (isCameraMove)
                    {
                        GetCloudRecognition(trackedImage, (bool cloudResult, string featureBase64) =>
                        {
                            if (cloudResult)
                            {
                                this.featureBase64 = featureBase64;
                                this.cloudState    = CloudState.CLOUDSTATE_CONNECT;
                            }
                            else
                            {
                                this.cloudState = CloudState.CLOUDSTATE_FEATURE_COLLECT_READY;
                            }

                            if (this.restart)
                            {
                                this.loopState = true;
                                this.restart   = false;
                            }
                            cloudSemaphore.Release();
                        });
                        cloudSemaphore.WaitOne();
                    }
                }

                if (this.cloudState == CloudState.CLOUDSTATE_STOP)
                {
                    this.loopState = false;
                }
            }
        }
Пример #9
0
        void UpdateInternal(TrackedImage image)
        {
            IntPtr cameraFramePtr = image.GetDataPtr();

            switch (image.GetFormat())
            {
            case ColorFormat.RGB888:
                if (rgbTexture != null)
                {
                    rgbTexture.LoadRawTextureData(cameraFramePtr, image.GetWidth() * image.GetHeight() * 3);
                    rgbTexture.Apply();
                }
                break;

            case ColorFormat.YUV420sp:
                if (uvTexture != null)                         // It means that RG16 texture format is supported
                {
                    IntPtr yPtr;
                    IntPtr uvPtr;
                    unsafe
                    {
                        byte *pointer = (byte *)cameraFramePtr.ToPointer();
                        yPtr     = (IntPtr)pointer;
                        pointer += image.GetWidth() * image.GetHeight();
                        uvPtr    = (IntPtr)pointer;
                    }

                    yTexture.LoadRawTextureData(yPtr, image.GetWidth() * image.GetHeight());
                    yTexture.Apply();
                    uvTexture.LoadRawTextureData(uvPtr, image.GetWidth() * image.GetHeight() / 2);
                    uvTexture.Apply();
                }
                else if (yTexture != null && uTexture != null && vTexture != null)
                {
                    IntPtr yPtr;
                    IntPtr uPtr;
                    IntPtr vPtr;

                    int uvSize = image.GetWidth() * image.GetHeight() / 2;

                    unsafe
                    {
                        byte *pointer = (byte *)cameraFramePtr.ToPointer();
                        yPtr     = (IntPtr)pointer;
                        pointer += image.GetWidth() * image.GetHeight();
                        uPtr     = (IntPtr)pointer;
                        pointer += uvSize / 2;
                        vPtr     = (IntPtr)pointer;
                    }

                    yTexture.LoadRawTextureData(yPtr, image.GetWidth() * image.GetHeight());
                    yTexture.Apply();

                    uTexture.LoadRawTextureData(uPtr, uvSize / 2);
                    uTexture.Apply();

                    vTexture.LoadRawTextureData(vPtr, uvSize / 2);
                    vTexture.Apply();
                }
                break;

            case ColorFormat.YUV420_888:
                if (u16bitTexture != null)                          // It means that RG16 texture format is supported
                {
                    IntPtr yPtr;
                    IntPtr uPtr;
                    IntPtr vPtr;
                    unsafe
                    {
                        byte *pointer = (byte *)cameraFramePtr.ToPointer();
                        yPtr     = (IntPtr)pointer;
                        pointer += image.GetWidth() * image.GetHeight();
                        uPtr     = (IntPtr)pointer;
                        pointer += image.GetWidth() * image.GetHeight() / 2;
                        vPtr     = (IntPtr)pointer;
                    }

                    yTexture.LoadRawTextureData(yPtr, image.GetWidth() * image.GetHeight());
                    yTexture.Apply();

                    u16bitTexture.LoadRawTextureData(uPtr, image.GetWidth() * image.GetHeight() / 2);
                    u16bitTexture.Apply();

                    v16bitTexture.LoadRawTextureData(vPtr, image.GetWidth() * image.GetHeight() / 2);
                    v16bitTexture.Apply();
                }
                else if (yTexture != null && uTexture != null && vTexture != null)
                {
                    IntPtr yPtr;
                    IntPtr uPtr;
                    IntPtr vPtr;

                    int uSize = image.GetWidth() * image.GetHeight() / 2;

                    unsafe
                    {
                        byte *pointer = (byte *)cameraFramePtr.ToPointer();
                        yPtr     = (IntPtr)pointer;
                        pointer += image.GetWidth() * image.GetHeight();
                        uPtr     = (IntPtr)pointer;
                        pointer += uSize;
                        vPtr     = (IntPtr)pointer;
                    }

                    yTexture.LoadRawTextureData(yPtr, image.GetWidth() * image.GetHeight());
                    yTexture.Apply();

                    uTexture.LoadRawTextureData(uPtr, uSize / 2);
                    uTexture.Apply();

                    vTexture.LoadRawTextureData(vPtr, uSize / 2);
                    vTexture.Apply();
                }
                break;
            }
        }