Ejemplo n.º 1
0
    /// <summary>
    /// Call the Computer Vision Service to submit the image.
    /// </summary>
    public IEnumerator AnalyseLastImageCaptured(string imagePath)
    {
        WWWForm webForm = new WWWForm();

        using (UnityWebRequest unityWebRequest = UnityWebRequest.Post(predictionEndpoint, webForm))
        {
            // Gets a byte array out of the saved image
            imageBytes = GetImageAsByteArray(imagePath);

            unityWebRequest.SetRequestHeader("Content-Type", "application/octet-stream");
            unityWebRequest.SetRequestHeader("Prediction-Key", predictionKey);

            // The upload handler will help uploading the byte array with the request
            unityWebRequest.uploadHandler             = new UploadHandlerRaw(imageBytes);
            unityWebRequest.uploadHandler.contentType = "application/octet-stream";

            // The download handler will help receiving the analysis from Azure
            unityWebRequest.downloadHandler = new DownloadHandlerBuffer();

            // Send the request
            yield return(unityWebRequest.SendWebRequest());

            string jsonResponse = unityWebRequest.downloadHandler.text;

            // The response will be in JSON format, therefore it needs to be deserialized

            // The following lines refers to a class that you will build in later Chapters
            // Wait until then to uncomment these lines

            AnalysisObject analysisObject = new AnalysisObject();
            analysisObject = JsonConvert.DeserializeObject <AnalysisObject>(jsonResponse);
            SceneOrganiser.Instance.SetTagsToLastLabel(analysisObject);
        }
    }
Ejemplo n.º 2
0
    /// <summary>
    /// Set the Tags as Text of the last label created.
    /// </summary>
    public void SetTagsToLastLabel(AnalysisObject analysisObject)
    {
        lastLabelPlacedText = lastLabelPlaced.GetComponent <TextMesh>();

        if (analysisObject.Predictions != null)
        {
            foreach (Prediction p in analysisObject.Predictions)
            {
                if (p.Probability > 0.99)
                {
                    lastLabelPlacedText.text += $"Detected: {p.TagName} {p.Probability.ToString("0.00 \n")}";
                    Debug.Log($"Detected: {p.TagName} {p.Probability.ToString("0.00 \n")}");
                    nowBloom = p.TagName;
                    Debug.Log(nowBloom + " " + prevBloom);
                    if (nowBloom == "1" && prevBloom == "0")
                    {
#if UNITY_EDITOR
                        EditorApplication.isPlaying = false;
#elif WINDOWS_UWP
                        Application.Quit();
                        Windows.ApplicationModel.Core.CoreApplication.Exit();
#endif
                    }
                    prevBloom = nowBloom;
                }
            }
        }
    }
Ejemplo n.º 3
0
    /// <summary>
    /// Call the Computer Vision Service to submit the image.
    /// </summary>
    public IEnumerator AnalyseCustomtImageCaptured()
    {
        WWWForm webForm = new WWWForm();

        using (UnityWebRequest unityWebRequest = UnityWebRequest.Post(customVisionAnalysisEndpoint, webForm))
        {
            // Gets a byte array out of the saved image
            imageBytes = GetImageAsByteArray(imagePath);

            unityWebRequest.SetRequestHeader("Content-Type", "application/octet-stream");
            unityWebRequest.SetRequestHeader("Prediction-Key", customVisionAuthorizationKey);

            // The upload handler will help uploading the byte array with the request
            unityWebRequest.uploadHandler             = new UploadHandlerRaw(imageBytes);
            unityWebRequest.uploadHandler.contentType = "application/octet-stream";

            // The download handler will help receiving the analysis from Azure
            unityWebRequest.downloadHandler = new DownloadHandlerBuffer();

            // Send the request
            yield return(unityWebRequest.SendWebRequest());

            string jsonResponse = unityWebRequest.downloadHandler.text;

            // The response will be in JSON format, therefore it needs to be deserialized

            // The following lines refers to a class that you will build in later Chapters
            // Wait until then to uncomment these lines

            AnalysisObject analysisObject = new AnalysisObject();
            analysisObject = JsonConvert.DeserializeObject <AnalysisObject>(jsonResponse);

            if (analysisObject != null)
            {
                Dictionary <string, double> tagsDictionary = new Dictionary <string, double>();

                foreach (Prediction td in analysisObject.Predictions)
                {
                    var accuracyRequest = td.Probability * 100.00;

                    if (tagsDictionary.ContainsKey(td.TagName))
                    {
                        if (tagsDictionary[td.TagName] < td.Probability)
                        {
                            tagsDictionary[td.TagName] = td.Probability;
                        }
                    }
                    else
                    {
                        tagsDictionary.Add(td.TagName, td.Probability);
                    }
                }

                ResultsLabel.instance.SetTagsToCustomLabel(tagsDictionary);
            }
            //SceneOrganiser.Instance.SetTagsToLastLabel(analysisObject);
        }
    }
Ejemplo n.º 4
0
    IEnumerator Upload(byte[] data)
    {
        WWWForm form = new WWWForm();

        using (UnityWebRequest www = UnityWebRequest.Post("", form)) // URL aqui
        {
            www.SetRequestHeader("Prediction-Key", "");              //KEY aqui
            www.SetRequestHeader("Content-Type", "application/octet-stream");
            www.uploadHandler             = new UploadHandlerRaw(data);
            www.uploadHandler.contentType = "application/octet-stream";

            www.downloadHandler = new DownloadHandlerBuffer();

            yield return(www.SendWebRequest());

            if (www.isNetworkError || www.isHttpError)
            {
                Debug.Log(www.error);
            }
            else
            {
                string jsonResponse = www.downloadHandler.text;

                AnalysisObject analysisObject = new AnalysisObject();
                analysisObject = JsonConvert.DeserializeObject <AnalysisObject>(jsonResponse);
                foreach (Prediction predict in analysisObject.Predictions)
                {
                    if (predict.Probability > probabiltyThreshold)
                    {
                        boxFactory.GetComponent <scrBoxFactory>().CreateBox(predict.boundingBox, predict.TagName, texture.width, texture.height);
                        switch (predict.TagName)
                        {
                        case "Harry Potter":
                            infoCharacter.getCharacter(0);
                            break;

                        case "Hermione Granger":
                            infoCharacter.getCharacter(1);
                            break;

                        case "Ron Weasley":
                            infoCharacter.getCharacter(2);
                            break;

                        case "Voldemort":
                            infoCharacter.getCharacter(16);
                            break;
                        }
                    }
                }


                Debug.Log(analysisObject);
            }
        }
    }
    /// <summary>
    /// Call the Computer Vision Service to submit the image.
    /// </summary>
    IEnumerator AnalyseLastImageCaptured(byte[] imageBytes, Action <AzureCVTag, double> onDone)
    {
        WWWForm webForm = new WWWForm();

        using (UnityWebRequest unityWebRequest = UnityWebRequest.Post(predictionEndpoint, webForm))
        {
            unityWebRequest.SetRequestHeader("Content-Type", "application/octet-stream");
            unityWebRequest.SetRequestHeader("Prediction-Key", predictionKey);

            // The upload handler will help uploading the byte array with the request
            unityWebRequest.uploadHandler             = new UploadHandlerRaw(imageBytes);
            unityWebRequest.uploadHandler.contentType = "application/octet-stream";

            // The download handler will help receiving the analysis from Azure
            unityWebRequest.downloadHandler = new DownloadHandlerBuffer();

            // Send the request
            yield return(unityWebRequest.SendWebRequest());

            string jsonResponse = unityWebRequest.downloadHandler.text;

            try
            {
                // The response will be in JSON format, therefore it needs to be deserialized
                AnalysisObject analysisObject = new AnalysisObject();
                analysisObject = JsonConvert.DeserializeObject <AnalysisObject>(jsonResponse);

                double     maxProbability = 0.0;
                AzureCVTag resultTag      = AzureCVTag.none;
                foreach (var prediction in analysisObject.Predictions)
                {
                    if (prediction.Probability > maxProbability)
                    {
                        maxProbability = prediction.Probability;
                        Enum.TryParse(prediction.TagName, out AzureCVTag tag);
                        resultTag = tag;
                    }
                }

                // If the max probability of a tag is lesser than 65%, better keep it to none
                if (maxProbability < 0.65)
                {
                    resultTag      = AzureCVTag.none;
                    maxProbability = 0;
                }


                onDone(resultTag, maxProbability);
            }
            catch (Exception ex)
            {
                Debug.LogError(ex.Message);
                onDone(AzureCVTag.none, 0);
            }
        }
    }
    /// <summary>
    /// Call the Computer Vision Service to submit the image.
    /// </summary>
    public static void AnalyseLastImageCaptured(string imagePath)
    {
        WWWForm webForm = new WWWForm();

        using (UnityWebRequest unityWebRequest = UnityWebRequest.Post(predictionEndpoint, webForm))
        {
            // Gets a byte array out of the saved image
            imageBytes = GetImageAsByteArray(imagePath);

            unityWebRequest.SetRequestHeader("Content-Type", "application/octet-stream");
            unityWebRequest.SetRequestHeader("Prediction-Key", predictionKey);

            // The upload handler will help uploading the byte array with the request
            unityWebRequest.uploadHandler             = new UploadHandlerRaw(imageBytes);
            unityWebRequest.uploadHandler.contentType = "application/octet-stream";

            // The download handler will help receiving the analysis from Azure
            unityWebRequest.downloadHandler = new DownloadHandlerBuffer();

            // Send the request
            var res = unityWebRequest.SendWebRequest();
            while (!res.isDone)
            {
                System.Threading.Thread.Sleep(100);
            }

            string jsonResponse = unityWebRequest.downloadHandler.text;

            // The response will be in JSON format, therefore it needs to be deserialized

            // The following lines refers to a class that you will build in later Chapters
            // Wait until then to uncomment these lines

            AnalysisObject analysisObject = new AnalysisObject();
            analysisObject = JsonConvert.DeserializeObject <AnalysisObject>(jsonResponse);
            var builder = new StringBuilder();
            if (analysisObject.Predictions != null)
            {
                foreach (var item in analysisObject.Predictions)
                {
                    if (item.Probability > 0.5)
                    {
                        builder.AppendLine(item.TagName + " found");
                    }
                }
                HintBox.Instance.ShowText(builder.ToString());
            }
        }
    }
    /// <summary>
    /// Call the Computer Vision Service to submit the image.
    /// </summary>
    public IEnumerator AnalyseLastImageCaptured(string imagePath)
    {
        // Update camera status to analysis.
        SceneOrganiser.Instance.SetCameraStatus("Analysis");

        //Makes call to the API to analyse the picture and find a tag
        //When it is done, SceneOrganiser.Instance.FinaliseLabel is called
        WWWForm webForm = new WWWForm();

        using (UnityWebRequest unityWebRequest = UnityWebRequest.Post(predictionEndpoint, webForm))
        {
            // Gets a byte array out of the saved image
            imageBytes = GetImageAsByteArray(imagePath);

            unityWebRequest.SetRequestHeader("Content-Type", "application/octet-stream");
            unityWebRequest.SetRequestHeader("Prediction-Key", predictionKey);

            // The upload handler will help uploading the byte array with the request
            unityWebRequest.uploadHandler             = new UploadHandlerRaw(imageBytes);
            unityWebRequest.uploadHandler.contentType = "application/octet-stream";

            // The download handler will help receiving the analysis from Azure
            unityWebRequest.downloadHandler = new DownloadHandlerBuffer();

            // Send the request
            yield return(unityWebRequest.SendWebRequest());

            if (unityWebRequest.error != "")
            {
                DialogManager.Instance.LaunchBasicDialog(1, "Internet Error", "Please verify your internet connection");
            }

            string jsonResponse = unityWebRequest.downloadHandler.text;

            Debug.Log("response: " + jsonResponse);

            // Create a texture. Texture size does not matter, since
            // LoadImage will replace with the incoming image size.
            Texture2D tex = new Texture2D(1, 1);
            tex.LoadImage(imageBytes);
            SceneOrganiser.Instance.quadRenderer.material.SetTexture("_MainTex", tex);

            // The response will be in JSON format, therefore it needs to be deserialized
            AnalysisObject analysisObject = new AnalysisObject();
            analysisObject = JsonConvert.DeserializeObject <AnalysisObject>(jsonResponse);

            SceneOrganiser.Instance.FinaliseLabel(analysisObject);
        }
    }
    /// <summary>
    /// Set the Tags as Text of the last label created.
    /// </summary>
    public void SetTagsToLastLabel(AnalysisObject analysisObject)
    {
        lastLabelPlacedText = lastLabelPlaced.GetComponent <TextMesh>();

        if (analysisObject.Predictions != null)
        {
            foreach (Prediction p in analysisObject.Predictions)
            {
                if (p.Probability > 0.02)
                {
                    lastLabelPlacedText.text += $"Detected: {p.TagName} {p.Probability.ToString("0.00 \n")}";
                    Debug.Log($"Detected: {p.TagName} {p.Probability.ToString("0.00 \n")}");
                }
            }
        }
    }
Ejemplo n.º 9
0
        /// <summary>
        /// Initializes the settings view.
        /// </summary>
        /// <param name="vis">The visualization that will be configured by this settings view.</param>
        /// <param name="showStaticObjects">Whether static study objects should be shown in the settings view.</param>
        /// <param name="showSpeedSettings">Whether speed settings should be shown in the settings view.</param>
        public override void Init(IConfigurableVisualization vis, bool showStaticObjects = true, bool showSpeedSettings = false)
        {
            if (vis == null)
            {
                return;
            }

            VisProperties settings = vis.Settings;

            visId = vis.Settings.VisId;
            settingsViewObjects = new List <SettingsViewObject>();

            int i = 0;

            // create settings prefab for each object
            if (SettingsPrefab && Services.DataManager() != null)
            {
                foreach (var dataSetElem in Services.DataManager().DataSets.Values)
                {
                    AnalysisObject dataSet            = (AnalysisObject)dataSetElem;
                    var            settingsViewObject = GameObject.Instantiate <SettingsViewObject>(SettingsPrefab, this.transform);
                    settingsViewObject.transform.localPosition = new Vector3(StartPositionX, StartPositionY - (i * OffsetY), -0.009f);
                    settingsViewObject.DataSet = dataSet;
                    settingsViewObject.Init();
                    for (int idx = 0; idx < settings.ObjectIds.Count; idx++)
                    {
                        if (settings.ObjectIds[idx] == dataSetElem.Id)
                        {
                            settingsViewObject.IsObjectSelected = true;
                            if (settings.TryGet("useSpeed", out List <bool> useSpeedList))
                            {
                                if (useSpeedList != null && useSpeedList.Count == settings.ObjectIds.Count)
                                {
                                    settingsViewObject.IsUseSpeedSelected = useSpeedList[idx];
                                }
                            }
                        }
                    }

                    settingsViewObjects.Add(settingsViewObject);
                    i++;
                }
            }
        }
Ejemplo n.º 10
0
        private void DrawTrajectory(AnalysisObject dataSet, bool useSpeed)
        {
            // for all sessions that we want to visualize...
            for (int s = 0; s < Settings.Sessions.Count; s++)
            {
                // for all conditions that we want to visualize...
                for (int c = 0; c < Settings.Conditions.Count; c++)
                {
                    var line          = Instantiate(LinePrefab, Anchor);
                    var lineComponent = line.GetComponent <CustomTubeRenderer>();
                    lineComponent.startWidth = 0.0f;
                    lineComponent.endWidth   = 0.006f;
                    float colorSaturationOffset = ((Settings.Conditions.Count * s) + c) / (float)(Settings.Conditions.Count * Settings.Sessions.Count);

                    UpdateSegments(dataSet, Settings.Sessions[s], Settings.Conditions[c], useSpeed, colorSaturationOffset, out List <List <Vector3> > segments, out List <List <Color> > colorSegments);

                    lineComponent.SetPositions(segments, colorSegments);
                    primitives.Add(lineComponent);
                }
            }
        }
Ejemplo n.º 11
0
        private void UpdateSegments(AnalysisObject dataSet, int session, int condition, bool useSpeed, float colorSaturationOffset, out List <List <Vector3> > segments, out List <List <Color> > colorSegments)
        {
            Color.RGBToHSV(dataSet.ObjectColor, out float colorH, out float colorS, out float colorV);
            if (Settings.Conditions.Count * Settings.Sessions.Count > 3)
            {
                colorS = Math.Max(0.9f, colorS);
            }

            Color objectColor = Color.HSVToRGB(colorH, colorS - colorSaturationOffset, colorV);

            var infoObjects = dataSet.GetInfoObjects(session, condition); // get data for current session/condition

            segments = new List <List <Vector3> >();
            List <Vector3> points = new List <Vector3>();

            colorSegments = new List <List <Color> >();
            List <Color> colors = new List <Color>();

            Vector3 previousPosition  = new Vector3(0, 0, 0);
            long    previousTimestamp = 0;
            Vector3 currentPosition;
            long    currentTime       = Services.StudyManager().CurrentTimestamp;
            int     currentIndex      = dataSet.GetIndexFromTimestamp(currentTime, session, condition);
            int     firstIndex        = dataSet.GetIndexFromTimestamp(currentTime - (long)(trailTime * TimeSpan.TicksPerSecond), session, condition);
            int     alphaDropOffIndex = dataSet.GetIndexFromTimestamp(currentTime - (long)(0.75 * trailTime * TimeSpan.TicksPerSecond), session, condition);

            for (int i = firstIndex; i < currentIndex; i++)
            {
                Sample o = infoObjects[i];

                if (float.IsNaN(o.Position.x) || float.IsNaN(o.Position.y) || float.IsNaN(o.Position.z))
                {
                    throw new ArgumentException("float.NaN is not a valid position.");
                }

                currentPosition = o.Position;

                // check data to decide if we want to add this measurement
                if (!CheckReductionFilter(o.Timestamp, currentPosition, ref previousTimestamp, ref previousPosition, 15, 0.03f))
                {
                    continue;
                }

                // checks our time filter
                if (o.Timestamp < currentTimeFilterMin || o.Timestamp > currentTimeFilterMax)
                {
                    continue;
                }

                // check if probably an outlier (too fast)
                if (true || !CheckOutlier(o.Timestamp, currentPosition, ref previousTimestamp, ref previousPosition, 10.0f))
                {
                    points.Add(currentPosition); // add current point

                    // compute color based on speed
                    float diffSeconds  = (float)(o.Timestamp - previousTimestamp) / TimeSpan.TicksPerSecond;
                    float diffDistance = (currentPosition - previousPosition).magnitude;
                    float currentSpeed = diffDistance / diffSeconds;
                    Color color;
                    if (useSpeed)
                    {
                        color = MapSpeedToColor(currentSpeed);
                    }
                    else
                    {
                        color = objectColor;
                    }

                    // set alpha
                    if (alphaDropOffIndex > firstIndex)
                    {
                        color.a = (i - firstIndex) / ((alphaDropOffIndex - firstIndex) * 1.0f);
                    }

                    colors.Add(color);
                }
                else
                {
                    if (points.Count > 3)
                    {
                        segments.Add(points);
                        colors[0] = colors[1]; // first speed value cannot be correct, correct color
                        colorSegments.Add(colors);
                    }

                    points = new List <Vector3>();
                    points.Add(currentPosition);

                    colors = new List <Color>();

                    // compute color based on speed
                    float diffSeconds  = (float)(o.Timestamp - previousTimestamp) / TimeSpan.TicksPerSecond;
                    float diffDistance = (currentPosition - previousPosition).magnitude;
                    float currentSpeed = diffDistance / diffSeconds;
                    Color color;
                    if (useSpeed)
                    {
                        color = MapSpeedToColor(currentSpeed);
                    }
                    else
                    {
                        color = objectColor;
                    }

                    // set alpha
                    if (alphaDropOffIndex > firstIndex)
                    {
                        color.a = (i - firstIndex) / ((alphaDropOffIndex - firstIndex) * 1.0f);
                    }

                    colors.Add(color);
                }

                previousPosition  = currentPosition;
                previousTimestamp = o.Timestamp;
            }

            if (points.Count > 3)
            {
                segments.Add(points);
                colors[0] = colors[1]; // first speed value cannot be correct, correct color
                colorSegments.Add(colors);
            }
        }
Ejemplo n.º 12
0
        private void DrawTrajectoryWithSpeed(AnalysisObject dataSet)
        {
            // for all sessions that we want to visualize...
            for (int s = 0; s < Settings.Sessions.Count; s++)
            {
                // for all conditions that we want to visualize...
                for (int c = 0; c < Settings.Conditions.Count; c++)
                {
                    var line          = Instantiate(LinePrefab, Anchor);
                    var lineComponent = line.GetComponent <CustomTubeRenderer>();
                    lineComponent.startWidth = 0.003f;
                    lineComponent.endWidth   = 0.003f;

                    var infoObjects = dataSet.GetInfoObjects(Settings.Sessions[s], Settings.Conditions[c]); // get data for current session/condition

                    // prepare lists for points and colors
                    List <List <Vector3> > segments      = new List <List <Vector3> >();
                    List <Vector3>         points        = new List <Vector3>();
                    List <List <Color> >   colorSegments = new List <List <Color> >();
                    List <Color>           colors        = new List <Color>();

                    Vector3 previousPosition  = new Vector3(0, 0, 0);
                    long    previousTimestamp = long.MinValue;
                    Vector3 currentPosition;

                    for (int i = 0; i < infoObjects.Count; i++)
                    {
                        Sample o = infoObjects[i];

                        if (float.IsNaN(o.Position.x) || float.IsNaN(o.Position.y) || float.IsNaN(o.Position.z))
                        {
                            throw new ArgumentException("float.NaN is not a valid position.");
                        }

                        currentPosition = o.Position;

                        // check data to decide if we want to add this measurement
                        if (!CheckReductionFilter(o.Timestamp, currentPosition, ref previousTimestamp, ref previousPosition, 15, 0.03f))
                        {
                            continue;
                        }

                        // checks our time filter
                        if (o.Timestamp < currentTimeFilterMin || o.Timestamp > currentTimeFilterMax)
                        {
                            continue;
                        }

                        // check if probably an outlier (too fast)
                        if (true || !CheckOutlier(o.Timestamp, currentPosition, ref previousTimestamp, ref previousPosition, 10.0f))
                        {
                            points.Add(currentPosition); // add current point

                            // compute color based on speed
                            ////float diffSeconds = (float)(o.Timestamp - previousTimestamp) / TimeSpan.TicksPerSecond;
                            ////float diffDistance = (currentPosition - previousPosition).magnitude;
                            ////float currentSpeed = diffDistance / diffSeconds;
                            ////Color color = MapSpeedToColor(currentSpeed);
                            Color color = MapSpeedToColor(o.Speed);
                            colors.Add(color);
                        }
                        else
                        {
                            if (points.Count > 3)
                            {
                                segments.Add(points);
                                colors[0] = colors[1]; // first speed value cannot be correct, correct color
                                colorSegments.Add(colors);
                            }

                            points = new List <Vector3>();
                            points.Add(currentPosition);

                            colors = new List <Color>();

                            // compute color based on speed
                            float diffSeconds  = (float)(o.Timestamp - previousTimestamp) / TimeSpan.TicksPerSecond;
                            float diffDistance = (currentPosition - previousPosition).magnitude;
                            float currentSpeed = diffDistance / diffSeconds;
                            Color color        = MapSpeedToColor(currentSpeed);
                            colors.Add(color);
                        }

                        previousPosition  = currentPosition;
                        previousTimestamp = o.Timestamp;
                    }

                    if (points.Count > 3)
                    {
                        segments.Add(points);
                        colors[0] = colors[1]; // first speed value cannot be correct, correct color
                        colorSegments.Add(colors);
                    }

                    lineComponent.SetPositions(segments, colorSegments);

                    primitives.Add(lineComponent);
                }
            }
        }
Ejemplo n.º 13
0
        private void DrawTrajectoryPlain(AnalysisObject dataSet)
        {
            // for all sessions that we want to visualize...
            for (int s = 0; s < Settings.Sessions.Count; s++)
            {
                // for all conditions that we want to visualize
                for (int c = 0; c < Settings.Conditions.Count; c++)
                {
                    var line          = Instantiate(LinePrefab, Anchor);
                    var lineComponent = line.GetComponent <CustomTubeRenderer>();
                    lineComponent.startWidth = 0.003f;
                    lineComponent.endWidth   = 0.003f;

                    float colorSaturationOffset = ((Settings.Conditions.Count * s) + c) / (float)(Settings.Conditions.Count * Settings.Sessions.Count);
                    Color.RGBToHSV(dataSet.ObjectColor, out float colorH, out float colorS, out float colorV);
                    if (Settings.Conditions.Count * Settings.Sessions.Count > 3)
                    {
                        colorS = Math.Max(0.9f, colorS);
                    }

                    Color objectColor = Color.HSVToRGB(colorH, colorS - colorSaturationOffset, colorV);
                    lineComponent.Color = objectColor;

                    var infoObjects = dataSet.GetInfoObjects(Settings.Sessions[s], Settings.Conditions[c]); // get data for current session/condition
                    List <List <Vector3> > segments = new List <List <Vector3> >();
                    List <Vector3>         points   = new List <Vector3>();

                    Vector3 previousPosition  = new Vector3(0, 0, 0);
                    long    previousTimestamp = long.MinValue;
                    Vector3 currentPosition;
                    for (int i = 0; i < infoObjects.Count; i++)
                    {
                        Sample o = infoObjects[i];

                        if (float.IsNaN(o.Position.x) || float.IsNaN(o.Position.y) || float.IsNaN(o.Position.z))
                        {
                            throw new ArgumentException("float.NaN is not a valid position.");
                        }

                        currentPosition = o.Position;

                        // check data to decide if we want to add this measurement
                        if (!CheckReductionFilter(o.Timestamp, currentPosition, ref previousTimestamp, ref previousPosition, 15, 0.03f))
                        {
                            continue;
                        }

                        // checks our time filter
                        if (o.Timestamp < currentTimeFilterMin || o.Timestamp > currentTimeFilterMax)
                        {
                            continue;
                        }

                        // check if probably an outlier (too fast)
                        if (true || !CheckOutlier(o.Timestamp, currentPosition, ref previousTimestamp, ref previousPosition, 10.0f))
                        {
                            points.Add(currentPosition);
                        }
                        else
                        {
                            if (points.Count > 3)
                            {
                                segments.Add(points);
                            }

                            points = new List <Vector3>();
                            points.Add(currentPosition);
                        }

                        previousPosition  = currentPosition;
                        previousTimestamp = o.Timestamp;
                    }

                    if (points.Count > 3)
                    {
                        segments.Add(points);
                    }

                    lineComponent.SetPositions(segments);

                    primitives.Add(lineComponent);
                }
            }
        }
Ejemplo n.º 14
0
    /// <summary>
    /// Set the Tags as Text of the last label created.
    /// </summary>
    public void FinaliseLabel(AnalysisObject analysisObject)
    {
        if (analysisObject.Predictions != null)
        {
            // Sort the Predictions to locate the highest one
            List <Prediction> sortedPredictions = new List <Prediction>();
            sortedPredictions = analysisObject.Predictions.OrderBy(p => p.Probability).ToList();
            Prediction bestPrediction = new Prediction();
            bestPrediction = sortedPredictions[sortedPredictions.Count - 1];

            if (bestPrediction.Probability > probabilityThreshold)
            {
                // The prediction is considered good enough
                quadRenderer = quad.GetComponent <Renderer>() as Renderer;
                Bounds quadBounds = quadRenderer.bounds;

                //Draw a cube acting like a visible boundingBox for the recognized object
                GameObject objBoundingBox = DrawInSpace.Instance.DrawCube((float)bestPrediction.BoundingBox.Width, (float)bestPrediction.BoundingBox.Height);
                objBoundingBox.transform.parent        = quad.transform;
                objBoundingBox.transform.localPosition = CalculateBoundingBoxPosition(quadBounds, bestPrediction.BoundingBox);
                DrawInSpace.Instance.ChooseMaterial(objBoundingBox, "BoundingBoxTransparentFlashy"); //optional

                //Set the position and scale of the quad depending on user position
                objBoundingBox.transform.SetParent(transform);        //break the link with quad (picture)
                BoundingBoxManager.Instance.MakeBoundingBoxInteractible(objBoundingBox);
                objBoundingBox.AddComponent <EventTriggerDelegate>(); //to block picture functions while moving bounding box

                // Move the label upward in world space just above the boundingBox.
                LastLabelPlaced.transform.position = objBoundingBox.transform.position;
                LastLabelPlaced.transform.Translate(Vector3.up * (float)(bestPrediction.BoundingBox.Height / 2 + 0.1f), Space.World); //Vector3 = World Space and Transform = Local Space
                LastLabelPlaced.transform.parent = objBoundingBox.transform;                                                          //Link the Text label to the object bounding box

                // Set the label text and make it face the user
                SetLastLabel(bestPrediction.TagName);
                LastLabelPlaced.transform.rotation = Quaternion.LookRotation(Camera.main.transform.forward);

                RecognizedObject = bestPrediction.TagName;
            }
            else
            {
                RecognizedObject = null;
            }
        }
        else
        {
            RecognizedObject = null;
        }

        if (ImageCapture.Instance.AppMode == ImageCapture.AppModes.Analysis)
        {
            if (RecognizedObject == null)
            {
                SetLastLabel("Unknown");
            }
        }
        else if (ImageCapture.Instance.AppMode == ImageCapture.AppModes.Smart)
        {
            ImageCapture.Instance.UploadPhoto(RecognizedObject);
        }


        //Make a sound to notify the user of the new label
        AudioPlay.Instance.PlayWithVolume("Bell", 80);

        ImageCapture.Instance.ResetImageCapture();
    }