/// <summary>
    /// Call the Computer Vision Service to submit the image.
    /// </summary>
    public IEnumerator AnalyseLastImageCaptured(string imagePath)
    {
        Debug.Log("Analyzing...");

        ObjectRecognitionManager.Instance.PrimaryText = "Analyzing . . .";
        ObjectRecognitionManager.Instance.Update_DebugDisplay();


        WWWForm webForm = new WWWForm();

        using (UnityWebRequest unityWebRequest = UnityWebRequest.Post(predictionEndpoint, webForm))
        {
            // Gets a byte array out of the saved image
            imageBytes = GetImageAsByteArray(imagePath);

            unityWebRequest.SetRequestHeader("Content-Type", "application/octet-stream");
            unityWebRequest.SetRequestHeader("Prediction-Key", predictionKey);

            // The upload handler will help uploading the byte array with the request
            unityWebRequest.uploadHandler             = new UploadHandlerRaw(imageBytes);
            unityWebRequest.uploadHandler.contentType = "application/octet-stream";

            // The download handler will help receiving the analysis from Azure
            unityWebRequest.downloadHandler = new DownloadHandlerBuffer();

            // Send the request
            yield return(unityWebRequest.SendWebRequest());

            string jsonResponse = unityWebRequest.downloadHandler.text;

            Debug.Log("response: " + jsonResponse);

            ObjectRecognitionManager.Instance.PrimaryText = "Receive response";
            ObjectRecognitionManager.Instance.Update_DebugDisplay();

            //// The response will be in JSON format, therefore it needs to be deserialized
            //AnalysisRootObject analysisRootObject = new AnalysisRootObject();

            //analysisRootObject = JsonConvert.DeserializeObject<AnalysisRootObject>(jsonResponse);

            //ObjectRecognitionManager.Instance.ObjectAnalysisResult(analysisRootObject);

            // The response will be in JSON format, therefore it needs to be deserialized
            AnalysisRootObject analysisRootObject = new AnalysisRootObject();

            analysisRootObject = JsonUtility.FromJson <AnalysisRootObject>(jsonResponse);
            //analysisRootObject = JsonConvert.DeserializeObject<AnalysisRootObject>(jsonResponse);

            ObjectRecognitionManager.Instance.PrimaryText = "predictions id: " + analysisRootObject.id;
            ObjectRecognitionManager.Instance.Update_DebugDisplay();

            Debug.Log("predictions id: " + analysisRootObject.id);

            ObjectRecognitionManager.Instance.ObjectAnalysisResult(analysisRootObject);
        }
    }
Exemplo n.º 2
0
    public void PlaceLabels(AnalysisRootObject analysisObject)
    {
        if (analysisObject.predictions != null)
        {
            foreach (var prediction in analysisObject.predictions)
            {
                if (prediction.probability > probabilityThreshold)
                {
                    // LABEL FOR TAG
                    Transform newLabel = Instantiate(label.transform, cursor.transform.position, transform.rotation);
                    newLabel.transform.localScale = new Vector3(0.005f, 0.005f, 0.005f);

                    // LABEL FOR PROBABILITY
                    Transform newProbaLabel = Instantiate(probaLabel.transform, cursor.transform.position, transform.rotation);
                    newProbaLabel.transform.localScale = new Vector3(0.005f, 0.005f, 0.005f);

                    quadRenderer = quad.GetComponent <Renderer>() as Renderer;
                    Bounds quadBounds = quadRenderer.bounds;

                    // Position the label as close as possible to the Bounding Box of the prediction
                    // At this point it will not consider depth
                    newLabel.transform.parent             = quad.transform;
                    newProbaLabel.transform.parent        = quad.transform;
                    newLabel.transform.localPosition      = CalculateBoundingBoxPosition(quadBounds, prediction.boundingBox);
                    newProbaLabel.transform.localPosition = CalculateBoundingBoxPosition(quadBounds, prediction.boundingBox);

                    // Cast a ray from the user's head to the currently placed label, it should hit the object detected by the Service.
                    // At that point it will reposition the label where the ray HL sensor collides with the object,
                    // (using the HL spatial tracking)
                    //Debug.Log("Repositioning Label");
                    Vector3    headPosition = Camera.main.transform.position;
                    RaycastHit objHitInfo;
                    Vector3    objDirection = newLabel.position;
                    if (Physics.Raycast(headPosition, objDirection, out objHitInfo, 30.0f, SpatialMapping.PhysicsRaycastMask))
                    {
                        newLabel.position = objHitInfo.point;
                    }

                    newProbaLabel.position = new Vector3(newLabel.position.x, newLabel.position.y - 0.05f, newLabel.position.z);


                    // Set the tag text
                    newLabel.GetComponent <TextMesh>().text = prediction.tagName;
                    // Set the probability text
                    newProbaLabel.GetComponent <TextMesh>().text = prediction.probability.ToString().Substring(0, 4);
                }
            }
        }

        // Reset the color of the cursor
        cursor.GetComponent <Renderer>().material.color = Color.green;

        // Stop the analysis process
        ImageCapture.Instance.ResetImageCapture();
    }
Exemplo n.º 3
0
    /// <summary>
    /// Set the Tags as Text of the last label created.
    /// </summary>
    public void FinaliseLabel(AnalysisRootObject analysisObject)
    {
        if (analysisObject.predictions != null)
        {
            lastLabelPlacedText = lastLabelPlaced.GetComponent <TextMesh>();
            // Sort the predictions to locate the highest one
            List <Prediction> sortedPredictions = new List <Prediction>();
            sortedPredictions = analysisObject.predictions.OrderBy(p => p.probability).ToList();
            Prediction bestPrediction = new Prediction();
            bestPrediction = sortedPredictions[sortedPredictions.Count - 1];

            if (bestPrediction.probability > probabilityThreshold)
            {
                quadRenderer = quad.GetComponent <Renderer>() as Renderer;
                Bounds quadBounds = quadRenderer.bounds;

                // Position the label as close as possible to the Bounding Box of the prediction
                // At this point it will not consider depth
                lastLabelPlaced.transform.parent        = quad.transform;
                lastLabelPlaced.transform.localPosition = CalculateBoundingBoxPosition(quadBounds, bestPrediction.boundingBox);

                //create rect
                //rect.transform.localPosition = CalculateBoundingBoxPosition(quadBounds, bestPrediction.boundingBox);
                rect.transform.localPosition = CalculateBoundingBoxPosition(quadBounds, bestPrediction.boundingBox);
                rect.transform.localScale    = new Vector3((float)rectHeight * 0.8f, (float)rectWidth * 0.8f, 0.8f);
                Instantiate(rect, cursor.transform.position, transform.rotation);


                // Set the tag text
                lastLabelPlacedText.text = bestPrediction.tagName;

                // Cast a ray from the user's head to the currently placed label, it should hit the object detected by the Service.
                // At that point it will reposition the label where the ray HL sensor collides with the object,
                // (using the HL spatial tracking)
                Debug.Log("Repositioning Label");
                Vector3    headPosition = Camera.main.transform.position;
                RaycastHit objHitInfo;
                Vector3    objDirection = lastLabelPlaced.position;
                if (Physics.Raycast(headPosition, objDirection, out objHitInfo, 30.0f, SpatialMapping.PhysicsRaycastMask))
                {
                    lastLabelPlaced.position = objHitInfo.point;
                }
            }
        }
        // Reset the color of the cursor
        cursor.GetComponent <Renderer>().material.color = Color.green;

        // Stop the analysis process
        ImageCapture.Instance.ResetImageCapture();
    }
Exemplo n.º 4
0
    public void ObjectAnalysisResult(AnalysisRootObject analysisObject)
    {
        Debug.Log("Showing result . . .");
        PrimaryText = "Showing result . . .";
        Update_DebugDisplay();

        if (analysisObject.predictions != null)
        {
            Debug.Log("Prediction is");
            PrimaryText = "Prediction is . . .";
            Update_DebugDisplay();

            // Sort the predictions to locate the highest one
            List <Prediction> sortedPredictions = new List <Prediction>();
            sortedPredictions = analysisObject.predictions.OrderBy(p => p.probability).ToList();
            Prediction bestPrediction = new Prediction();
            bestPrediction = sortedPredictions[sortedPredictions.Count - 1];

            if (bestPrediction.probability > probabilityThreshold)
            {
                //PrimaryText = bestPrediction.tagName + " : " + bestPrediction.probability.ToString();
                PrimaryText = "Take a picture to be analyzed";

                Update_DebugDisplay();

                GameObject resultPanel = Instantiate(panelReference, CameraCache.Main.transform.position, CameraCache.Main.transform.rotation);
                Vector3    toPosition  = CameraCache.Main.transform.position + CameraCache.Main.transform.forward;
                resultPanel.transform.position = toPosition;

                PredictionResultData resultData = new PredictionResultData();
                resultData.objectName      = bestPrediction.tagName;
                resultData.confidentResult = bestPrediction.probability.ToString();

                StateManagement.Instance.lastestPredictionResult = resultData;

                resultPanels.Add(resultPanel);
            }
        }
        else
        {
            Debug.Log("Object is unknown");
            PrimaryText = "Object is unknown";
            Update_DebugDisplay();
        }

        // Stop the analysis process
        ImageCapture.Instance.ResetImageCapture();
    }
Exemplo n.º 5
0
    /// <summary>
    /// Call the Computer Vision Service to submit the image.
    /// </summary>
    public IEnumerator AnalyseLastImageCaptured(string imagePath)
    {
        Debug.Log("Analyzing...");

        WWWForm webForm = new WWWForm();

        using (UnityWebRequest unityWebRequest = UnityWebRequest.Post(predictionEndpoint, webForm))
        {
            // Gets a byte array out of the saved image
            imageBytes = GetImageAsByteArray(imagePath);

            unityWebRequest.SetRequestHeader("Content-Type", "application/octet-stream");
            unityWebRequest.SetRequestHeader("Prediction-Key", predictionKey);

            // The upload handler will help uploading the byte array with the request
            unityWebRequest.uploadHandler             = new UploadHandlerRaw(imageBytes);
            unityWebRequest.uploadHandler.contentType = "application/octet-stream";

            // The download handler will help receiving the analysis from Azure
            unityWebRequest.downloadHandler = new DownloadHandlerBuffer();

            // Send the request
            yield return(unityWebRequest.SendWebRequest());

            string jsonResponse = unityWebRequest.downloadHandler.text;

            Debug.Log("response: " + jsonResponse);

            // Create a texture. Texture size does not matter, since
            // LoadImage will replace with the incoming image size.
            Texture2D tex = new Texture2D(1, 1);
            tex.LoadImage(imageBytes);
            SceneOrganiser.Instance.quadRenderer.material.SetTexture("_MainTex", tex);

            // The response will be in JSON format, therefore it needs to be deserialized
            AnalysisRootObject analysisRootObject = new AnalysisRootObject();
            analysisRootObject = JsonConvert.DeserializeObject <AnalysisRootObject>(jsonResponse);

            SceneOrganiser.Instance.FinaliseLabel(analysisRootObject);
            SceneOrganiser.Instance.PingOnSuccessful();
        }
    }
    public IEnumerator AnalyseLastImageCaptured(string imagePath)
    {
        Debug.Log("Analyzing...");

        //WWWForm是一个HelperClass辅助类,用来生成表单数据,用来存储要访问服务器的数据
        WWWForm webForm = new WWWForm();

        //post方法包括uri和WWWForm表单BodyData两部分
        using (UnityWebRequest unityWebRequest = UnityWebRequest.Post(predictionEndpoint, webForm))
        {
            imageBytes = GetImageAsByteArray(imagePath);

            //设定请求头
            unityWebRequest.SetRequestHeader("Content-Type", "application/octet-stream");
            unityWebRequest.SetRequestHeader("Prediction-Key", predictionKey);

            //上传数据体至服务器端,需要指定请求头的contentType
            unityWebRequest.uploadHandler             = new UploadHandlerRaw(imageBytes);
            unityWebRequest.uploadHandler.contentType = "application/octet-stream";

            //download handler帮助接收服务器端分析的数据
            unityWebRequest.downloadHandler = new DownloadHandlerBuffer();

            //yield只能建立在IEnumerator类中执行
            yield return(unityWebRequest.SendWebRequest());

            string jsonResponse = unityWebRequest.downloadHandler.text;
            Debug.Log("response: " + jsonResponse);


            Texture2D tex = new Texture2D(1, 1);
            tex.LoadImage(imageBytes);
            SceneOrganiser.Instance.quadRenderer.material.SetTexture("_MainTex", tex);

            AnalysisRootObject analysisRootObject = new AnalysisRootObject();
            analysisRootObject = JsonConvert.DeserializeObject <AnalysisRootObject>(jsonResponse);

            //Azure服务对最新照片分析完毕后,触发FinaliseLabel()方法,正确放置标签文字
            SceneOrganiser.Instance.FinaliseLabel(analysisRootObject);
        }
    }
    /// <summary>
    /// Set the Tags as Text of the last label created.
    /// </summary>
    //public void FinaliseLabel(AnalysisRootObject analysisObject)
    //{
    //    if (analysisObject.predictions != null)
    //    {
    //        lastLabelPlacedText = lastLabelPlaced.GetComponent<TextMesh>();
    //        // Sort the predictions to locate the highest one
    //        List<Prediction> sortedPredictions = new List<Prediction>();
    //        sortedPredictions = analysisObject.predictions.OrderBy(p => p.probability).ToList();
    //        Prediction bestPrediction = new Prediction();
    //        bestPrediction = sortedPredictions[sortedPredictions.Count - 1];

    //        if (bestPrediction.probability > probabilityThreshold)
    //        {
    //            quadRenderer = quad.GetComponent<Renderer>() as Renderer;
    //            Bounds quadBounds = quadRenderer.bounds;

    //            // Position the label as close as possible to the Bounding Box of the prediction
    //            // At this point it will not consider depth
    //            lastLabelPlaced.transform.parent = quad.transform;
    //            lastLabelPlaced.transform.localPosition = CalculateBoundingBoxPosition(quadBounds, bestPrediction.boundingBox);

    //            // Set the tag text
    //            lastLabelPlacedText.text = bestPrediction.tagName;

    //            // Cast a ray from the user's head to the currently placed label, it should hit the object detected by the Service.
    //            // At that point it will reposition the label where the ray HL sensor collides with the object,
    //            // (using the HL spatial tracking)
    //            Debug.Log("Repositioning Label");
    //            Vector3 headPosition = Camera.main.transform.position;
    //            RaycastHit objHitInfo;
    //            Vector3 objDirection = lastLabelPlaced.position;
    //            if (Physics.Raycast(headPosition, objDirection, out objHitInfo, 30.0f, SpatialMapping.PhysicsRaycastMask))
    //            {
    //                lastLabelPlaced.position = objHitInfo.point;
    //            }
    //        }
    //    }
    //    // Reset the color of the cursor
    //    cursor.GetComponent<Renderer>().material.color = Color.green;

    //    // Stop the analysis process
    //    ImageCapture.Instance.ResetImageCapture();
    //}

    public void FinaliseLabel(AnalysisRootObject analysisObject)
    {
        if (analysisObject.predictions != null && analysisObject.predictions.ToList().Count != 0)
        {
            lastLabelPlacedText = lastLabelPlaced.GetComponent <TextMesh>();

            quadRenderer = quad.GetComponent <Renderer>() as Renderer;
            Bounds quadBounds = quadRenderer.bounds;

            // Position the label as close as possible to the Bounding Box of the prediction
            // At this point it will not consider depth

            QRObject qRObject = new QRObject();

            qRObject = (analysisObject.predictions.ToList())[0];

            lastLabelPlaced.transform.parent        = quad.transform;
            lastLabelPlaced.transform.localPosition = CalculateBoundingBoxPosition(quadBounds, qRObject.boundingBox);

            // Set the tag text
            lastLabelPlacedText.text = qRObject.tagName;

            // Cast a ray from the user's head to the currently placed label, it should hit the object detected by the Service.
            // At that point it will reposition the label where the ray HL sensor collides with the object,
            // (using the HL spatial tracking)
            Debug.Log("Repositioning Label");
            Vector3    headPosition = Camera.main.transform.position;
            RaycastHit objHitInfo;
            Vector3    objDirection = lastLabelPlaced.position;
            if (Physics.Raycast(headPosition, objDirection, out objHitInfo, 30.0f, SpatialMapping.PhysicsRaycastMask))
            {
                lastLabelPlaced.position = objHitInfo.point;
            }
        }

        // Reset the color of the cursor
        cursor.GetComponent <Renderer>().material.color = Color.green;

        // Stop the analysis process
        ImageCapture.Instance.ResetImageCapture();
    }
Exemplo n.º 8
0
    /// <summary>
    /// Set the Tags as Text of the last label created.
    /// </summary>
    public void ShowTag(AnalysisRootObject analysisObject)
    {
        if (analysisObject.predictions != null)
        {
            // Sort the predictions to locate the highest one. Note that for demo purposes, we are just going to show that tag alone.
            List <Prediction> sortedPredictions = new List <Prediction>();
            sortedPredictions = analysisObject.predictions.OrderBy(p => p.probability).ToList();
            Prediction bestPrediction = new Prediction();
            bestPrediction = sortedPredictions[sortedPredictions.Count - 1];

            if (bestPrediction.probability > probabilityThreshold)
            {
                quadRenderer = quad.GetComponent <Renderer>() as Renderer;
                Bounds quadBounds = quadRenderer.bounds;

                // Position the label as close as possible to the Bounding Box of the prediction
                // At this point it will not consider depth
                this.tagFound.SetActive(true);
                this.tagFound.transform.parent          = quad.transform;
                this.tagFound.transform.localPosition   = CalculateBoundingBoxPosition(quadBounds, bestPrediction.boundingBox);
                this.tagName.GetComponent <Text>().text = bestPrediction.tagName;

                this.ShowObjectIdentified();
            }
            else
            {
                this.ShowNoObjectsIdentified();
            }
        }
        else
        {
            this.ShowNoObjectsIdentified();
        }

        // Stop the analysis process
        ImageCapture.Instance.ResetImageCapture();
    }
Exemplo n.º 9
0
    //1.设置标签文字为最高概率预测
    //2.调用CalculateBoundingBoxPosition方法,在场景中放置标签
    //3.运用Raycast,方向为Camera至BoundingBox的方向,与物理层产生碰撞,即可获得深度信息,调整标签深度
    //4.充值Capture过程,使用户可以捕获新的照片
    public void FinaliseLabel(AnalysisRootObject analysisRootObject)
    {
        if (analysisRootObject.predictions != null)
        {
            lastLabelPlacedText = lastLabelPlaced.GetComponent <TextMesh>();
            List <Prediction> sortedPredictions = new List <Prediction>();
            //OrderBy默认升序排序
            sortedPredictions = analysisRootObject.predictions.OrderBy(p => p.probability).ToList();
            Prediction bestPrediction = new Prediction();
            bestPrediction = sortedPredictions[sortedPredictions.Count - 1];

            if (bestPrediction.probability > probabilityThreshold)
            {
                quadRenderer = quad.GetComponent <Renderer>() as Renderer;
                //Bounds边界盒
                Bounds quadBounds = quadRenderer.bounds;

                lastLabelPlaced.transform.parent = quad.transform;
                //设定相对于父组件的相对位置。标签文字这时是在Quad上的
                //lastLabelPlaced的parent是quad,故localPosition是针对于quad来说的。localPosition为父子组件之间中心的距离
                lastLabelPlaced.transform.localPosition = CalculateBoundingBoxPosition(quadBounds, bestPrediction.boundingBox);
                lastLabelPlacedText.text = bestPrediction.tagName;



                /*
                 * //Cube半透明
                 * Debug.Log("Placing The Box.");
                 * box = GameObject.CreatePrimitive(PrimitiveType.Cube);
                 * float BoxTransparency = 0.5f;
                 * box.GetComponent<Renderer>().material = new Material(Shader.Find("Legacy Shaders/Transparent/Diffuse"));
                 * box.GetComponent<Renderer>().material.color = new Color(1, 1, 1, BoxTransparency);
                 * box.transform.parent = quad.transform;
                 * box.transform.localPosition = CalculateBoundingBoxPosition(quadBounds, bestPrediction.boundingBox);
                 *
                 * //注:localScale相对于父物体缩放比例在(0,1)之间
                 * //貌似正确的Box大小
                 * box.transform.localScale = new Vector3((float)(bestPrediction.boundingBox.width/(2*quad.transform.localScale.x)),(float)(bestPrediction.boundingBox.height/(2*quad.transform.localScale.y)), 0.05f);
                 * Debug.LogFormat(@"The Local Scale X of Quad:{0}  The Local Scale Y of Quad:{1} ", quad.transform.localScale.x, quad.transform.localScale.y);
                 * Debug.Log("Firstly The Label Position: " + lastLabelPlaced.transform.position);
                 * Debug.Log("Firstly The Box Position: " + box.transform.position);
                 */


                Debug.Log("Repositioning Label...");


                Vector3    headPosition = Camera.main.transform.position;
                RaycastHit objHitInfo;
                Vector3    objDirection = lastLabelPlaced.position;

                if (Physics.Raycast(headPosition, objDirection, out objHitInfo, 30.0f, SpatialMapping.PhysicsRaycastMask))
                {
                    //设定世界坐标位置
                    lastLabelPlaced.position = objHitInfo.point;

                    /*
                     * box.transform.position = objHitInfo.point;
                     * Debug.Log("After Raycast The Label Position: " + lastLabelPlaced.position);
                     * Debug.Log("After Raycast The Box Position: " + box.transform.position);
                     */
                }
            }
        }

        cursor.GetComponent <Renderer>().material.color = Color.green;

        ImageCapture.Instance.ResetImageCapture();
    }
Exemplo n.º 10
0
    /// <summary>
    /// Call the Computer Vision Service to submit the image.
    /// </summary>
    public IEnumerator AnalyseLastImageCaptured()
    {
        Debug.Log("Analyzing...");
        WWWForm webForm = new WWWForm();

        using (UnityWebRequest unityWebRequest = UnityWebRequest.Post(visionAnalysisEndpoint, webForm))
        {
            // gets a byte array out of the saved image
            imageBytes = GetImageAsByteArray(imagePath);
            Debug.Log(imageBytes);
            unityWebRequest.SetRequestHeader("Content-Type", "application/octet-stream");
            unityWebRequest.SetRequestHeader(ocpApimSubscriptionKeyHeader, authorizationKey);

            // the download handler will help receiving the analysis from Azure
            unityWebRequest.downloadHandler = new DownloadHandlerBuffer();

            // the upload handler will help uploading the byte array with the request
            unityWebRequest.uploadHandler             = new UploadHandlerRaw(imageBytes);
            unityWebRequest.uploadHandler.contentType = "application/octet-stream";

            yield return(unityWebRequest.SendWebRequest());

            long responseCode = unityWebRequest.responseCode;
            Debug.Log(unityWebRequest.responseCode);

            try
            {
                string jsonResponse = null;
                jsonResponse = unityWebRequest.downloadHandler.text;

                // Create a texture. Texture size does not matter, since
                // LoadImage will replace with the incoming image size.
                //Texture2D tex = new Texture2D(1, 1);
                //tex.LoadImage(imageBytes);
                //ResultsLabel.instance.quadRenderer.material.SetTexture("_MainTex", tex);

                //AnalysisRootObject analysisRootObject = new AnalysisRootObject();
                //analysisRootObject = JsonConvert.DeserializeObject<AnalysisRootObject>(jsonResponse);

                //ResultsLabel.instance.FinaliseLabel(analysisRootObject);


                //            // The response will be in Json format
                //            // therefore it needs to be deserialized into the classes AnalysedObject and TagData
                AnalysisRootObject analysedObject = new AnalysisRootObject();
                analysedObject = JsonUtility.FromJson <AnalysisRootObject>(jsonResponse);

                if (analysedObject.tags == null)
                {
                    Debug.Log("analysedObject.tagData is null");
                }
                else
                {
                    Dictionary <string, float> tagsDictionary = new Dictionary <string, float>();

                    foreach (TagData td in analysedObject.tags)
                    {
                        TagData tag = td as TagData;
                        tagsDictionary.Add(tag.name, tag.confidence);
                    }
                    Texture2D tex = new Texture2D(1, 1);
                    tex.LoadImage(imageBytes);
                    ResultsLabel.instance.quadRenderer.material.SetTexture("_MainTex", tex);
                    ResultsLabel.instance.SetTagsToLastLabel(tagsDictionary);
                }
            }
            catch (Exception exception)
            {
                Debug.Log("Json exception.Message: " + exception.Message);
            }

            yield return(null);
        }
    }
Exemplo n.º 11
0
    /// <summary>
    /// Call the Computer Vision Service to submit the image.
    /// </summary>
    public IEnumerator AnalyseLastImageCaptured(string imagePath)
    {
        Debug.Log("Analyzing...");

        WWWForm webForm = new WWWForm();

        using (UnityWebRequest unityWebRequest = UnityWebRequest.Post(predictionEndpoint, webForm))
        {
            System.Diagnostics.Stopwatch sw = new System.Diagnostics.Stopwatch();
            sw.Start();

            // Gets a byte array out of the saved image
            imageBytes = GetImageAsByteArray(imagePath);

            unityWebRequest.SetRequestHeader("Content-Type", "application/octet-stream");
            unityWebRequest.SetRequestHeader("Prediction-Key", predictionKey);

            // The upload handler will help uploading the byte array with the request
            unityWebRequest.uploadHandler             = new UploadHandlerRaw(imageBytes);
            unityWebRequest.uploadHandler.contentType = "application/octet-stream";

            // The download handler will help receiving the analysis from Azure
            unityWebRequest.downloadHandler = new DownloadHandlerBuffer();

            Debug.Log("Time 0 : " + sw.Elapsed);

            // Send the request
            yield return(unityWebRequest.SendWebRequest());

            Debug.Log("Time 1 : " + sw.Elapsed);

            string jsonResponse = unityWebRequest.downloadHandler.text;

            Debug.Log("response: " + jsonResponse);

            // Create a texture. Texture size does not matter, since
            // LoadImage will replace with the incoming image size.
            Debug.Log("Time 2 : " + sw.Elapsed);
            Texture2D tex = new Texture2D(1, 1);
            tex.LoadImage(imageBytes);
            Debug.Log("Time 3 : " + sw.Elapsed);
            SceneOrganiser.Instance.quadRenderer.material.SetTexture("_MainTex", tex);
            Debug.Log("Time 4 : " + sw.Elapsed);

            // The response will be in JSON format, therefore it needs to be deserialized
            AnalysisRootObject analysisRootObject = new AnalysisRootObject();
            analysisRootObject = JsonConvert.DeserializeObject <AnalysisRootObject>(jsonResponse);

            Debug.Log("Time 5 : " + sw.Elapsed);

            SceneOrganiser.Instance.FinaliseLabel(analysisRootObject);
            for (int i = 0; i < analysisRootObject.predictions.Count; i++)
            {
                Debug.Log("Prediction " + (i + 1) + " in this frame: " + analysisRootObject.predictions[i].probability
                          + " for object " + analysisRootObject.predictions[i].tagName + " in the bounding box of data "
                          + " left: " + analysisRootObject.predictions[i].boundingBox.left
                          + " top: " + analysisRootObject.predictions[i].boundingBox.top
                          + " width: " + analysisRootObject.predictions[i].boundingBox.width
                          + " height: " + analysisRootObject.predictions[i].boundingBox.height);
            }

            sw.Stop();
        }
    }
Exemplo n.º 12
0
    /// <summary>
    /// Set the Tags as Text of the last label created.
    /// </summary>
    public void FinaliseLabel(AnalysisRootObject analysisObject)
    {
        if (analysisObject != null && analysisObject.predictions != null)
        {
            lastLabelPlacedText = lastLabelPlaced.GetComponent <TextMesh>();
            // Sort the predictions to locate the highest one
            List <Prediction> sortedPredictions = new List <Prediction>();
            sortedPredictions = analysisObject.predictions.OrderBy(p => p.probability).ToList();

            //TOO remove below

            /*            quadRenderer = quad.GetComponent<Renderer>() as Renderer;
             *          Bounds quadBounds = quadRenderer.bounds;
             *          lastLabelPlaced.transform.parent = quad.transform;
             *          // Vector3 labelPosition = new Vector3((float)-quadBounds.size.normalized.x/2, (float)-quadBounds.size.normalized.y/2, 0);
             *          Vector3 labelPosition = new Vector3(0, 0, 0);
             *          lastLabelPlaced.transform.localPosition = labelPosition;
             *          lastLabelPlacedText.text = "";//bestPrediction.tagName;
             *          Debug.Log("Repositioning Label");
             *          Vector3 headPosition = Camera.main.transform.position;
             *          RaycastHit objHitInfo;
             *          Vector3 objDirection = lastLabelPlaced.position;
             *
             *          if (Physics.Raycast(captureTimeHeadPosition, objDirection, out objHitInfo, 30.0f, captureTimeRaycastMask))
             *          {
             *              lastLabelPlaced.position = objHitInfo.point;
             *              latestDataPanelPosition = objHitInfo.point;
             *          }
             *          ObjectData data = envData.getObjectData("water bottle");
             *          data.probability = 0.90;
             *          StartCoroutine(makeDataPanel(data, latestDataPanelPosition, latestDataPanelRotation));
             *          IEnumerator coroutine = ResetLoadingUI(lastLabelPlacedText, lastLoadingIcon, 5.0f);
             *          StartCoroutine(coroutine); */
            //TODO uncomment below
            Prediction bestPrediction = new Prediction();

            if (sortedPredictions.Count > 0)
            {
                bestPrediction = sortedPredictions[sortedPredictions.Count - 1];
            }
            else
            {
                bestPrediction.probability = 0;
            }

            if (bestPrediction.probability > probabilityThreshold)
            {
                quadRenderer = quad.GetComponent <Renderer>() as Renderer;
                Bounds quadBounds = quadRenderer.bounds;

                // Position the label as close as possible to the Bounding Box of the prediction
                // At this point it will not consider depth
                lastLabelPlaced.transform.parent = quad.transform;
                Vector3 labelPosition = CalculateBoundingBoxPosition(quadBounds, bestPrediction.boundingBox);
                lastLabelPlaced.transform.localPosition = labelPosition;

                // Set the tag text
                lastLabelPlacedText.text = "";//bestPrediction.tagName;


                // Cast a ray from the user's head to the currently placed label, it should hit the object detected by the Service.
                // At that point it will reposition the label where the ray HL sensor collides with the object,
                // (using the HL spatial tracking)
                Debug.Log("Repositioning Label");
                Vector3    headPosition = Camera.main.transform.position;
                RaycastHit objHitInfo;
                Vector3    objDirection = lastLabelPlaced.position;

                if (Physics.Raycast(captureTimeHeadPosition, objDirection, out objHitInfo, 30.0f, captureTimeRaycastMask))
                {
                    lastLabelPlaced.position = objHitInfo.point;
                    latestDataPanelPosition  = objHitInfo.point;
                }
                Debug.Log("Success");

                ObjectData data = envData.getObjectData(bestPrediction.tagName);
                data.probability = bestPrediction.probability;
                StartCoroutine(MakeDataPanel(data, latestDataPanelPosition, latestDataPanelRotation));
            }
            else
            {
                LoadingRotation loadingRotation = lastLoadingIcon.GetComponent <LoadingRotation>();
                loadingRotation.Failed();
            }

            IEnumerator coroutine = ResetLoadingUI(lastLabelPlacedText, lastLoadingIcon, 5.0f);
            StartCoroutine(coroutine);
        }

        // Reset the color of the cursor
        cursor.GetComponent <Renderer>().material.color = Color.yellow;

        // Stop the analysis process
        ImageCapture.Instance.ResetImageCapture();
    }
Exemplo n.º 13
0
    /// <summary>
    /// Set the Tags as Text of the last label created.
    /// </summary>
    public void FinaliseLabel(AnalysisRootObject analysisObject)
    {
        if (analysisObject.predictions != null)
        {
            // Sort the predictions to locate the highest one
            List <Prediction> sortedPredictions = new List <Prediction>();
            sortedPredictions = analysisObject.predictions.OrderByDescending(p => p.probability).ToList();
            foreach (Prediction prediction in sortedPredictions)
            {
                if (prediction.probability < probabilityThreshold)
                {
                    Debug.Log("Already at the threshold. Stopping");
                    break;
                }

                if (prediction.tagName != suitcaseLabel)
                {
                    Debug.Log("Skipping non-suitcase: " + prediction.tagName);
                    break;
                }
                GameObject label = CreateLabel();

                /// <summary>
                /// Reference to the last Label positioned
                /// </summary>
                Transform labelObj = Instantiate(label.transform, cursor.transform.position, transform.rotation);

                /// <summary>
                /// Reference to the last Label positioned
                /// </summary>
                TextMesh labelObjText = labelObj.GetComponent <TextMesh>();

                // Initialize new label
                PlaceAnalysisLabel(label, labelObj, labelObjText);

                quadRenderer = quad.GetComponent <Renderer>() as Renderer;
                Bounds quadBounds = quadRenderer.bounds;

                // Position the label as close as possible to the Bounding Box of the prediction
                // At this point it will not consider depth
                labelObj.transform.parent = quad.transform;
                MyRectangle positionRect = CalculateBoundingBoxPosition(quadBounds, prediction.boundingBox);
                labelObj.transform.localPosition = positionRect.GetCenter();

                // Cast a ray from the user's head to the currently placed label, it should hit the object detected by the Service.
                // At that point it will reposition the label where the ray HL sensor collides with the object,
                // (using the HL spatial tracking)
                Debug.Log("Repositioning Label");
                Vector3    headPosition = Camera.main.transform.position;
                RaycastHit objHitInfo;
                Vector3    objDirection     = labelObj.position;
                string     dimensionsString = "";
                if (Physics.Raycast(headPosition, objDirection, out objHitInfo, 30.0f, SpatialMapping.PhysicsRaycastMask))
                {
                    float widthInMeters  = 0;
                    float heightInMeters = 0;
                    labelObj.position = objHitInfo.point;
                    widthInMeters     = Vector3.Distance(new Vector3(positionRect.topLeft.x, positionRect.topLeft.y, objHitInfo.distance), new Vector3(positionRect.bottomRight.x, positionRect.topLeft.y, objHitInfo.distance));
                    heightInMeters    = Vector3.Distance(new Vector3(positionRect.topLeft.x, positionRect.topLeft.y, objHitInfo.distance), new Vector3(positionRect.topLeft.x, positionRect.bottomRight.y, objHitInfo.distance));
                    dimensionsString  = " - " + widthInMeters + "x" + heightInMeters;
                    // Draw rectangle around
                    DrawRect(positionRect, objHitInfo.distance);
                }

                // Set the tag text
                labelObjText.text = prediction.tagName + " (" + (prediction.probability * 100) + "%)" + dimensionsString;
            }
        }
        // Reset the color of the cursor
        cursor.GetComponent <Renderer>().material.color = Color.green;

        // Stop the analysis process
        ImageCapture.Instance.ResetImageCapture();
    }
    /// <summary>
    /// Call the Computer Vision Service to submit the image.
    /// </summary>
    public IEnumerator AnalyseLastImageCaptured(string imagePath, string operation = "label")
    {
        Debug.Log("Analyzing...");

        WWWForm webForm = new WWWForm();

        using (UnityWebRequest unityWebRequest = UnityWebRequest.Post(predictionEndpoint, webForm))
        {
            // Gets a byte array out of the saved image
            imageBytes = GetImageAsByteArray(imagePath);

            unityWebRequest.SetRequestHeader("Content-Type", "application/octet-stream");

            // The upload handler will help uploading the byte array with the request
            unityWebRequest.uploadHandler             = new UploadHandlerRaw(imageBytes);
            unityWebRequest.uploadHandler.contentType = "application/octet-stream";

            // The download handler will help receiving the analysis from Azure
            unityWebRequest.downloadHandler = new DownloadHandlerBuffer();

            // Send the request
            yield return(unityWebRequest.SendWebRequest());

            string jsonResponse = unityWebRequest.downloadHandler.text;

            Debug.Log("response: " + jsonResponse);

            // Create a texture. Texture size does not matter, since
            // LoadImage will replace with the incoming image size.
            Texture2D tex = new Texture2D(1, 1);
            tex.LoadImage(imageBytes);
            SceneOrganiser.Instance.quadRenderer.material.SetTexture("_MainTex", tex);

            // The response will be in JSON format, therefore it needs to be deserialized
            AnalysisRootObject analysisRootObject = new AnalysisRootObject();
            analysisRootObject = JsonUtility.FromJson <AnalysisRootObject>(jsonResponse);
            QRObject qRObject = (analysisRootObject.predictions.ToList())[0];
            string   data     = qRObject.tagName;
            data = data.Substring(7);

            switch (operation)
            {
            case "label":
                SceneOrganiser.Instance.FinaliseLabel(analysisRootObject);
                break;

            case "get_location":
                // TODO Validate data, i.e error handling
                var parameters   = data.Split(',');
                var buid         = parameters[0];
                var floor_number = int.Parse(parameters[1]);
                StartCoroutine(navManager.LoadPois(buid, floor_number));
                break;

            case "get_worker":
                firebaseManager.loadWorker(data);
                break;

            default:
                break;
            }
            SceneOrganiser.Instance.FinaliseLabel(analysisRootObject);
        }
    }