/// <inheritdoc/> public override object GetNormalisedValuePrecise(object originalValue, int identifier) { DimensionData.Metadata meta = this[identifier].MetaData; // Determine which textualDimensionsList to use Dictionary <string, Dictionary <string, int> > textualDimensionsListReverse; if (nodeDimensionData.Select(x => x.Identifier).FirstOrDefault() != null) { textualDimensionsListReverse = nodeTextualDimensionsListReverse; } else { textualDimensionsListReverse = edgeTextualDimensionsListReverse; } if (meta.Type == IATKDataType.String) { int stringIdx = textualDimensionsListReverse[this[identifier].Identifier][originalValue.ToString()]; return(UtilMath.NormaliseValue(stringIdx, meta.Min, meta.Max, 0f, 1f)); } else { return(UtilMath.NormaliseValue((float)originalValue, meta.Min, meta.Max, 0f, 1f)); } }
/// <inheritdoc/> public override object GetValuePrecise(float normalisedValue, string identifier) { DimensionData.Metadata meta = this[identifier].MetaData; // Determine which textualDimensionsList to use Dictionary <string, Dictionary <int, string> > textualDimensionsList; if (nodeDimensionData.FirstOrDefault(x => x.Identifier == identifier) != null) { textualDimensionsList = nodeTextualDimensionsList; } else { textualDimensionsList = edgeTextualDimensionsList; } float normValue = UtilMath.NormaliseValue(normalisedValue, 0f, 1f, meta.Min, meta.Max); // Dimensions of type String should return a string from the textual dimensions list if (meta.Type == IATKDataType.String) { return(textualDimensionsList[identifier][(int)normValue]); } // Otherwise re can return the de-normalised value else { return(normValue); } }
/// <inheritdoc/> public override object GetValueApproximate(float normalisedValue, string identifier) { DimensionData.Metadata meta = this[identifier].MetaData; // Determine which textualDimensionsList to use Dictionary <string, Dictionary <int, string> > textualDimensionsList; if (nodeDimensionData.FirstOrDefault(x => x.Identifier == identifier) != null) { textualDimensionsList = nodeTextualDimensionsList; } else { textualDimensionsList = edgeTextualDimensionsList; } // Dimensions of type String should return a string from the textual dimensions list if (meta.Type == IATKDataType.String) { // Since this function allows for approximate input values, we need to find the value closest to the given one float normValue = UtilMath.NormaliseValue(ValueClosestTo(this[identifier].Data, normalisedValue), 0f, 1f, meta.Min, meta.Max); return(textualDimensionsList[identifier][(int)normValue]); } // Otherwise we can return a de-normalised value else { return(UtilMath.NormaliseValue(normalisedValue, 0f, 1f, meta.Min, meta.Max)); } }
/// <inheritdoc/> public override object GetNormalisedValuePrecise(object originalValue, int identifier) { DimensionData.Metadata meta = this[identifier].MetaData; if (meta.Type == IATKDataType.String) { int stringIdx = textualDimensionsListReverse[this[identifier].Identifier][originalValue.ToString()]; return(UtilMath.NormaliseValue(stringIdx, meta.Min, meta.Max, 0f, 1f)); } else { return(UtilMath.NormaliseValue((float)originalValue, meta.Min, meta.Max, 0f, 1f)); } }
/// <inheritdoc/> public override object GetValuePrecise(float normalisedValue, string identifier) { DimensionData.Metadata meta = this[identifier].MetaData; float normValue = UtilMath.NormaliseValue(normalisedValue, 0f, 1f, meta.Min, meta.Max); // Dimensions of type String should return a string from the textual dimensions list if (meta.Type == IATKDataType.String) { return(textualDimensionsList[this[identifier].Identifier][(int)normValue]); } // Otherwise re can return the de-normalised value else { return(normValue); } }
/// <inheritdoc/> public override object GetValueApproximate(float normalisedValue, string identifier) { DimensionData.Metadata meta = this[identifier].MetaData; // Dimensions of type String should return a string from the textual dimensions list if (meta.Type == IATKDataType.String) { // Since this function allows for approximate input values, we need to find the value closest to the given one float normValue = UtilMath.NormaliseValue(ValueClosestTo(this[identifier].Data, normalisedValue), 0f, 1f, meta.Min, meta.Max); return(textualDimensionsList[identifier][(int)normValue]); } // Otherwise we can return a de-normalised value else { return(UtilMath.NormaliseValue(normalisedValue, 0f, 1f, meta.Min, meta.Max)); } }
/// <summary> /// Normalises a given column from a 2D array of float values within the range 0..1. This function also sets some metadata values. /// </summary> /// <param name="dataArray">A 2D float array of data.</param> /// <param name="col">An integer index of the column to normalise.</param> /// <returns>A normalised float array in the range 0..1.</returns> private float[] NormaliseColumn(float[,] dataArray, int col, ref List <DimensionData> dimensionData) { float[] result = GetColumn(dataArray, col); float minValue = result.Min(); float maxValue = result.Max(); if (minValue == maxValue) { // where there are no distinct values, need the dimension to be distinct // otherwise lots of maths breaks with division by zero, etc. // this is the most elegant hack I could think of, but should be fixed properly in future minValue -= 1.0f; maxValue += 1.0f; } // Populate metadata values DimensionData.Metadata metadata = dimensionData[col].MetaData; metadata.Min = minValue; metadata.Max = maxValue; metadata.Categories = result.Distinct().Select(x => UtilMath.NormaliseValue(x, minValue, maxValue, 0.0f, 1.0f)).ToArray(); metadata.CategoryCount = metadata.Categories.Count(); metadata.BinCount = (int)(maxValue - minValue + 1); dimensionData[col].SetMetadata(metadata); for (int j = 0; j < result.Length; j++) { if (minValue < maxValue) { result[j] = UtilMath.NormaliseValue(result[j], minValue, maxValue, 0f, 1f); } else { // Avoid NaNs or nonsensical normalization result[j] = 0; } } return(result); }
protected virtual void DrawMinMaxSlider(Rect rect, SerializedProperty minFilterProp, SerializedProperty maxFilterProp, string attributeid, DataSource dataSource) { bool isUndefined = dataSource == null || attributeid == "Undefined"; int idx = Array.IndexOf(dataSource.Select(m => m.Identifier).ToArray(), attributeid); // get the normalized value float minValue = !isUndefined ? dataSource[attributeid].MetaData.Min : 0.0f; float maxValue = !isUndefined ? dataSource[attributeid].MetaData.Max : 1.0f; // calculate the real value float min = UtilMath.NormaliseValue(minFilterProp.floatValue, 0, 1, minValue, maxValue); float max = UtilMath.NormaliseValue(maxFilterProp.floatValue, 0, 1, minValue, maxValue); // get the string representation string minLogical = isUndefined ? "" : dataSource.GetValueApproximate(minFilterProp.floatValue, idx).ToString(); string maxLogical = isUndefined ? "" : dataSource.GetValueApproximate(maxFilterProp.floatValue, idx).ToString(); EditorGUI.TextField(new Rect(rect.x, rect.y, 75, rect.height), minLogical); EditorGUI.MinMaxSlider(new Rect(rect.x + 75, rect.y, rect.width - 150, rect.height), GUIContent.none, ref min, ref max, minValue, maxValue); EditorGUI.TextField(new Rect(rect.x + rect.width - 78, rect.y, 75, rect.height), maxLogical); minFilterProp.floatValue = UtilMath.NormaliseValue(min, minValue, maxValue, 0, 1); maxFilterProp.floatValue = UtilMath.NormaliseValue(max, minValue, maxValue, 0, 1); }
/// <summary> /// Creates an array of positions that are aggregated based on the given aggregation type. /// This MUST be called AFTER each time the other dimensions change. /// </summary> /// <param name="data"></param> /// <param name="dimension"></param> /// <param name="aggregation"></param> /// <returns></returns> public float[] SetAggregatedDimension(float[] yData, IATKBarAggregation aggregation) { // Extract independent arrays of the position values for the x and z dimensions Vector3[] vertices = View.GetVertices(); float[] xData = new float[vertices.Length]; float[] zData = new float[vertices.Length]; for (int i = 0; i < DataSource.DataCount; i++) { xData[i] = vertices[i].x; zData[i] = vertices[i].z; } // Get the unique "categories" of the x and z dimensions (these are technically floats) var xCategories = xData.Distinct(); var zCategories = zData.Distinct(); // LAZY HACK: Set a value in the mesh's normal.y value to designate whether to show or hide the point to prevent z-fighting and mass lag float[] masterBars = new float[DataSource.DataCount]; // Create a dictionary that will store the values assocatied with each (x, z) pairs of aggregating values (x bins * z bins = n lists) Dictionary <float, Dictionary <float, List <float> > > aggGroups = new Dictionary <float, Dictionary <float, List <float> > >(); // Iterate through each position and assign the data values to the respective (x, z) pair for (int i = 0; i < DataSource.DataCount; i++) { Dictionary <float, List <float> > innerDict; if (!aggGroups.TryGetValue(xData[i], out innerDict)) { innerDict = new Dictionary <float, List <float> >(); aggGroups[xData[i]] = innerDict; } List <float> innerList; if (!innerDict.TryGetValue(zData[i], out innerList)) { innerList = new List <float>(); innerDict[zData[i]] = innerList; masterBars[i] = 1; } // If the aggregation type is count, we don't need to use the y axis values if (aggregation == IATKBarAggregation.Count || yData == null) { innerList.Add(0); } else { innerList.Add(yData[i]); } } // LAZY HACK: Send the master values to the mesh now View.SetUVs(masterBars, IATKDimension.Y); // Create another dictionary that will store the aggregated value for each (x, z) pair group float max = 0; Dictionary <float, Dictionary <float, float> > aggregatedValues = new Dictionary <float, Dictionary <float, float> >(); foreach (float xCategory in xCategories) { foreach (float zCategory in zCategories) { // Calculate final aggregated value if (!aggGroups[xCategory].ContainsKey(zCategory)) { continue; } List <float> values = aggGroups[xCategory][zCategory]; float aggregated = 0; switch (aggregation) { case IATKBarAggregation.Count: aggregated = values.Count; break; case IATKBarAggregation.Average: aggregated = values.Average(); break; case IATKBarAggregation.Sum: aggregated = values.Sum(); break; case IATKBarAggregation.Median: values.Sort(); float mid = (values.Count - 1) / 2f; aggregated = (values[(int)(mid)] + values[(int)(mid + 0.5f)]) / 2; break; case IATKBarAggregation.Min: aggregated = values.Min(); break; case IATKBarAggregation.Max: aggregated = values.Max(); break; } // Set value Dictionary <float, float> innerDict; if (!aggregatedValues.TryGetValue(xCategory, out innerDict)) { innerDict = new Dictionary <float, float>(); aggregatedValues[xCategory] = innerDict; } innerDict[zCategory] = aggregated; // We need to normalise back into 0..1 for these specific aggregations, so we collect the max value if (aggregation == IATKBarAggregation.Count || aggregation == IATKBarAggregation.Sum) { if (max < aggregated) { max = aggregated; } } } } // Set y position based on newly aggregated values float[] positions = new float[DataSource.DataCount]; for (int i = 0; i < DataSource.DataCount; i++) { // For specific aggregations, normalise if (aggregation == IATKBarAggregation.Count || aggregation == IATKBarAggregation.Sum) { positions[i] = UtilMath.NormaliseValue(aggregatedValues[xData[i]][zData[i]], 0, max, 0, 1); } else { positions[i] = aggregatedValues[xData[i]][zData[i]]; } } return(positions); }