public static IEnumerable<int> Indices(this Matrix source, Func<Vector, bool> f, VectorType t) { int max = t == VectorType.Row ? source.Rows : source.Cols; for (int i = 0; i < max; i++) if (f(source[i, t])) yield return i; }
internal ArrayExpression( VectorType vectorType, EnumerableArrayWrapper<ExpressionBase, IMetadataExpression> elements ) { this.VectorType = vectorType; this.Elements = elements; }
public static Vector CovarianceDiag(Matrix source, VectorType t = VectorType.Col) { int length = t == VectorType.Row ? source.Rows : source.Cols; Vector vector = new Vector(length); for (int i = 0; i < length; i++) vector[i] = source[i, t].Variance(); return vector; }
public MatrixHelper(string vector, VectorType type, int columns, int rows, bool isLinearMatrix) { this.type = type; vectorString = vector; this.columns = columns; this.rows = rows; matrix = !isLinearMatrix ? SplitVector() : SplitLinerMatrix(); this.isLinearMatrix = isLinearMatrix; }
public static Matrix Correlation(Matrix source, VectorType t = VectorType.Col) { int length = t == VectorType.Row ? source.Rows : source.Cols; Matrix m = new Matrix(length); for (int i = 0; i < length; i++) for (int j = i; j < length; j++) // symmetric matrix m[i, j] = m[j, i] = source[i, t].Correlation(source[j, t]); return m; }
public static Vector Mean(this Matrix source, VectorType t) { int count = t == VectorType.Row ? source.Cols : source.Rows; VectorType type = t == VectorType.Row ? VectorType.Column : VectorType.Row; Vector v = new Vector(count); for (int i = 0; i < count; i++) v[i] = source[i, type].Mean(); return v; }
public static Matrix Covariance(Matrix source, VectorType t = VectorType.Col) { int length = t == VectorType.Row ? source.Rows : source.Cols; Matrix m = new Matrix(length); //for (int i = 0; i < length; i++) Parallel.For(0, length, i => //for (int j = i; j < length; j++) // symmetric matrix Parallel.For(i, length, j => m[i, j] = m[j, i] = source[i, t].Covariance(source[j, t]))); return m; }
/// <summary> /// Summarizes a given Matrix. /// </summary> /// <param name="matrix">Matrix to summarize.</param> /// <param name="byVector">Indicates which direction to summarize, default is <see cref="VectorType.Row"/> indicating top-down.</param> /// <returns></returns> public static Summary Summarize(Matrix matrix, VectorType byVector = VectorType.Row) { return new Summary() { Average = matrix.Mean(byVector), StandardDeviation = matrix.StdDev(byVector), Minimum = matrix.Min(byVector), Maximum = matrix.Max(byVector), Median = matrix.Median(byVector) }; }
/// <summary>Covariances.</summary> /// <param name="source">Source for the.</param> /// <param name="t">(Optional) Row or Column sum.</param> /// <returns>A Matrix.</returns> public static Matrix Covariance(Matrix source, VectorType t = VectorType.Col) { int length = t == VectorType.Row ? source.Rows : source.Cols; Matrix m = new Matrix(length); //for (int i = 0; i < length; i++) for (int i = 0; i < length; i++) { //for (int j = i; j < length; j++) // symmetric matrix for (int j = i; j < length; j++) m[i, j] = m[j, i] = source[i, t].Covariance(source[j, t]); } return m; }
/// <summary> /// Initializes a new instance of the Vector class that contains elements /// copied from the specified array. /// </summary> /// <param name="type">The vector type</param> /// <param name="data">The array whose elements are copied to the vector.</param> public Vector(VectorType type, double[] data) { if (data.Length < 1) throw new System.ArgumentException("data.Length < 1"); this._Type = type; this._Data = new double[data.Length]; data.CopyTo(this._Data, 0); //for (int i = 0; i < data.Length; i++) //{ // this.MeData[i] = data[i]; //} }
//Spezielle Vektoren erzeugen. public Vector(int _n, VectorType type) : base(_n, 1) { switch (type) { case VectorType.ONES: for (int i = 0; i < base.NoRows; i++) { this[i] = 1.0; } break; } }
/// <summary>Estimates.</summary> /// <param name="X">The Matrix to process.</param> /// <param name="type">(Optional) the type.</param> public void Estimate(Matrix X, VectorType type = VectorType.Row) { int n = type == VectorType.Row ? X.Rows : X.Cols; int s = type == VectorType.Row ? X.Cols : X.Rows; Mu = X.Sum(type) / n; Sigma = Matrix.Zeros(s); for (int i = 0; i < n; i++) { var x = X[i, type] - Mu; Sigma += x.Outer(x); } Sigma *= (1d / (n - 1d)); }
/// <summary>Estimates.</summary> /// <param name="X">The Matrix to process.</param> /// <param name="type">(Optional) the type.</param> public void Estimate(Matrix X, VectorType type = VectorType.Row) { var n = type == VectorType.Row ? X.Rows : X.Cols; var s = type == VectorType.Row ? X.Cols : X.Rows; this.Mu = X.Sum(type) / n; this.Sigma = Matrix.Zeros(s); for (var i = 0; i < n; i++) { var x = X[i, type] - this.Mu; this.Sigma += x.Outer(x); } this.Sigma *= 1d / (n - 1d); }
public MatrixForm(string title, int columns, int rows, object algebraObject) { InitializeComponent(); isMatrix = algebraObject is Matrix<double>; matrixTypes = new List<RadioButton> { ordinarMatrix, lineMatrix }; readingTypes = new List<RadioButton> { radioButton1, radioButton2 }; readingType = Int32.Parse(ConfigurationManager.AppSettings["vectorReadingType"]); matrixType = Int32.Parse(ConfigurationManager.AppSettings["matrixType"]); InitRadioButtins(); Text = title; this.columns = columns; this.rows = rows; SetWndsize(columns, rows); InitGrid(); vectorType = SetGrid(algebraObject, columns, rows); this._algebraObject = algebraObject; }
/// <summary> /// Computes the sum of either the rows or columns of a matrix and returns a vector. /// </summary> /// <param name="m">Input Matrix.</param> /// <param name="t">Row or Column sum.</param> /// <returns>Vector Sum.</returns> public static Vector Sum(Matrix m, VectorType t) { if (t == VectorType.Row) { Vector result = new Vector(m.Cols); for (int i = 0; i < m.Cols; i++) for (int j = 0; j < m.Rows; j++) result[i] += m[j, i]; return result; } else { Vector result = new Vector(m.Rows); for (int i = 0; i < m.Rows; i++) for (int j = 0; j < m.Cols; j++) result[i] += m[i, j]; return result; } }
// ---------------- structural /// <summary>Stack a set of vectors into a matrix.</summary> /// <exception cref="InvalidOperationException">Thrown when the requested operation is invalid.</exception> /// <param name="type">.</param> /// <param name="vectors">.</param> /// <returns>A Matrix.</returns> internal static Matrix Stack(VectorType type, params Vector[] vectors) { if (vectors.Length == 0) { throw new InvalidOperationException("Cannot construct Matrix from empty vector set!"); } if (!vectors.All(v => v.Length == vectors[0].Length)) { throw new InvalidOperationException("Vectors must all be of the same length!"); } var n = type == VectorType.Row ? vectors.Length : vectors[0].Length; var d = type == VectorType.Row ? vectors[0].Length : vectors.Length; var m = Zeros(n, d); for (var i = 0; i < vectors.Length; i++) { m[i, type] = vectors[i]; } return m; }
/// <summary> /// Sorts the given Matrix by the specified row or column selector and returns the new Matrix /// </summary> /// <param name="source">The Matrix</param> /// <param name="keySelector">Property selector to sort by.</param> /// <param name="t">Specifies whether to sort horizontally or vertically.</param> /// <param name="ascending">Determines whether to sort ascending or descending (Default: True)</param> /// <returns>New Matrix and Vector of original indices.</returns> public static Matrix Sort(Matrix source, Func <Vector, double> keySelector, VectorType t, bool ascending = true) { Vector v; return(Sort(source, keySelector, t, ascending, out v)); }
private static bool TryCreateEx(IExceptionContext ectx, ColInfo info, DataKind kind, KeyRange range, out PrimitiveType itemType, out ColInfoEx ex) { ectx.AssertValue(info); ectx.Assert(Enum.IsDefined(typeof(DataKind), kind)); ex = null; var typeSrc = info.TypeSrc; if (range != null) { itemType = TypeParsingUtils.ConstructKeyType(kind, range); if (!typeSrc.ItemType().IsKey() && !typeSrc.ItemType().IsText() && typeSrc.ItemType().RawKind() != kind && !(typeSrc.ItemType().RawKind() == DataKind.I8 && (kind == DataKind.U8 || kind == DataKind.U4))) { return(false); } } else if (!typeSrc.ItemType().IsKey()) { itemType = ColumnTypeHelper.PrimitiveFromKind(kind); } else if (!ColumnTypeHelper.IsValidDataKind(kind)) { itemType = ColumnTypeHelper.PrimitiveFromKind(kind); return(false); } else { var key = typeSrc.ItemType().AsKey(); ectx.Assert(ColumnTypeHelper.IsValidDataKind(key.RawKind())); int count = key.Count; // Technically, it's an error for the counts not to match, but we'll let the Conversions // code return false below. There's a possibility we'll change the standard conversions to // map out of bounds values to zero, in which case, this is the right thing to do. ulong max = kind.ToMaxInt(); if ((ulong)count > max) { count = (int)max; } itemType = new KeyType(kind.ToType(), key.Min, count, key.Contiguous); } // Ensure that the conversion is legal. We don't actually cache the delegate here. It will get // re-fetched by the utils code when needed. bool identity; Delegate del; if (!Conversions.Instance.TryGetStandardConversion(typeSrc.ItemType(), itemType, out del, out identity)) { if (typeSrc.ItemType().RawKind() == itemType.RawKind()) { switch (typeSrc.ItemType().RawKind()) { case DataKind.U4: // Key starts at 1. uint plus = (itemType.IsKey() ? (uint)1 : (uint)0) - (typeSrc.IsKey() ? (uint)1 : (uint)0); identity = false; ValueMapper <uint, uint> map_ = (in uint src, ref uint dst) => { dst = src + plus; }; del = (Delegate)map_; if (del == null) { throw Contracts.ExceptNotSupp("Issue with casting"); } break; default: throw Contracts.Except("Not suppoted type {0}", typeSrc.ItemType().RawKind()); } } else if (typeSrc.ItemType().RawKind() == DataKind.I8 && kind == DataKind.U8) { ulong plus = (itemType.IsKey() ? (ulong)1 : (ulong)0) - (typeSrc.IsKey() ? (ulong)1 : (ulong)0); identity = false; ValueMapper <long, ulong> map_ = (in long src, ref ulong dst) => { CheckRange(src, dst, ectx); dst = (ulong)src + plus; }; del = (Delegate)map_; if (del == null) { throw Contracts.ExceptNotSupp("Issue with casting"); } } else if (typeSrc.ItemType().RawKind() == DataKind.I8 && kind == DataKind.U4) { uint plus = (itemType.IsKey() ? (uint)1 : (uint)0) - (typeSrc.IsKey() ? (uint)1 : (uint)0); identity = false; ValueMapper <long, uint> map_ = (in long src, ref uint dst) => { CheckRange(src, dst, ectx); dst = (uint)src + plus; }; del = (Delegate)map_; if (del == null) { throw Contracts.ExceptNotSupp("Issue with casting"); } } else { return(false); } } ColumnType typeDst = itemType; if (typeSrc.IsVector()) { typeDst = new VectorType(itemType, typeSrc.AsVector().Dimensions.ToArray()); } // An output column is transposable iff the input column was transposable. VectorType slotType = null; if (info.SlotTypeSrc != null) { slotType = new VectorType(itemType, info.SlotTypeSrc.Dimensions.ToArray()); } ex = new ColInfoEx(kind, range != null, typeDst, slotType); return(true); }
public VectorData(string name, uint offset, uint address, VectorType type, float x, float y, float z, float a, string labels, bool degrees, uint pluginLine) : base(name, offset, address, pluginLine) { // Vector Components _x = x; _y = y; _z = z; _a = a; // Visibility for last 2 Components _zVis = _aVis = false; _labels = labels; _degrees = degrees; // Optional custom label letters for components if (_labels.Length < (int)type) { _xLabel = "x"; _yLabel = "y"; _zLabel = "z"; _aLabel = "a"; } else { switch (type) { case VectorType.Vector4: _aLabel = _labels[3].ToString(); goto case VectorType.Vector3; case VectorType.Vector3: _zLabel = _labels[2].ToString(); goto case VectorType.Vector2; case VectorType.Vector2: _yLabel = _labels[1].ToString(); goto default; default: _xLabel = _labels[0].ToString(); break; } } // Make last 2 Components visible if we need either switch (type) { case VectorType.Vector4: _aVis = true; goto case VectorType.Vector3; case VectorType.Vector3: _zVis = true; break; } // Create our Vector type name _typeLabel = "vector"; switch (type) { case VectorType.Vector4: _typeLabel += "4"; break; case VectorType.Vector3: _typeLabel += "3"; break; case VectorType.Vector2: _typeLabel += "2"; break; } if (_degrees) _typeLabel += "D"; else _typeLabel += "F"; }
/// <summary> /// returns col/row vector at index j /// </summary> /// <param name="j">Col/Row</param> /// <param name="t">Row or Column</param> /// <returns>Vector</returns> public Vector this[int i, VectorType t] { get { // switch it up if using a transposed version if (_asTransposeRef) { t = t == VectorType.Row ? VectorType.Column : VectorType.Row; } if (t == VectorType.Row) { if (i >= Rows) { throw new IndexOutOfRangeException(); } return(new Vector(_matrix[i])); } else { if (i >= Cols) { throw new IndexOutOfRangeException(); } return(new Vector(_matrix, i)); } } set { if (_asTransposeRef) { throw new InvalidOperationException("Cannot modify matrix in read-only transpose mode!"); } if (t == VectorType.Row) { if (i >= Rows) { throw new IndexOutOfRangeException(); } if (value.Length > Cols) { throw new InvalidOperationException(string.Format("Vector has lenght larger then {0}", Cols)); } for (int k = 0; k < Cols; k++) { _matrix[i][k] = value[k]; } } else { if (i >= Cols) { throw new IndexOutOfRangeException(); } if (value.Length > Rows) { throw new InvalidOperationException(string.Format("Vector has lenght larger then {0}", Cols)); } for (int k = 0; k < Rows; k++) { _matrix[k][i] = value[k]; } } } }
public double Sum(int i, VectorType t) { return(this[i, t].Sum()); }
private static bool TryCreateEx(IExceptionContext ectx, ColInfo info, DataKind kind, KeyRange range, out PrimitiveType itemType, out ColInfoEx ex) { ectx.AssertValue(info); ectx.Assert(Enum.IsDefined(typeof(DataKind), kind)); ex = null; var typeSrc = info.TypeSrc; if (range != null) { itemType = TypeParsingUtils.ConstructKeyType(kind, range); if (!typeSrc.ItemType.IsKey && !typeSrc.ItemType.IsText) { return(false); } } else if (!typeSrc.ItemType.IsKey) { itemType = PrimitiveType.FromKind(kind); } else if (!KeyType.IsValidDataKind(kind)) { itemType = PrimitiveType.FromKind(kind); return(false); } else { var key = typeSrc.ItemType.AsKey; ectx.Assert(KeyType.IsValidDataKind(key.RawKind)); int count = key.Count; // Technically, it's an error for the counts not to match, but we'll let the Conversions // code return false below. There's a possibility we'll change the standard conversions to // map out of bounds values to zero, in which case, this is the right thing to do. ulong max = kind.ToMaxInt(); if ((ulong)count > max) { count = (int)max; } itemType = new KeyType(kind, key.Min, count, key.Contiguous); } // Ensure that the conversion is legal. We don't actually cache the delegate here. It will get // re-fetched by the utils code when needed. bool identity; Delegate del; if (!Runtime.Data.Conversion.Conversions.Instance.TryGetStandardConversion(typeSrc.ItemType, itemType, out del, out identity)) { return(false); } ColumnType typeDst = itemType; if (typeSrc.IsVector) { typeDst = new VectorType(itemType, typeSrc.AsVector); } // An output column is transposable iff the input column was transposable. VectorType slotType = null; if (info.SlotTypeSrc != null) { slotType = new VectorType(itemType, info.SlotTypeSrc); } ex = new ColInfoEx(kind, range != null, typeDst, slotType); return(true); }
public void Add(GameObject obj, VectorType type = VectorType.position) { Add(obj.transform, type); }
public bool VisitVectorType(VectorType vectorType, TypeQualifiers quals) { throw new NotImplementedException(); }
/// <summary> /// Reshapes the supplied Vector into a Matrix form. /// </summary> /// <param name="v">Source vector to act on.</param> /// <param name="dimension">Length of the specified dimension.</param> /// <param name="dimensionType">Dimension type to use for creating a <paramref name="dimension"/> by n matrix.</param> /// <param name="byVector">Direction to process, i.e. Row = Fill Down then Right, or Col = Fill Right then Down</param> /// <returns></returns> public static Matrix Reshape(Vector v, int dimension, VectorType dimensionType = VectorType.Col, VectorType byVector = VectorType.Row) { int x = (dimensionType == VectorType.Row ? dimension : v.Length / dimension); int y = (dimensionType == VectorType.Col ? dimension : v.Length / dimension); return(Reshape(v, x, y, byVector)); }
/// <summary> /// Unshapes the given Matrix into a Vector form along the <paramref name="dimensionType"/> axis. /// <para>Reads from the source Matrix and stacks from right to left when <paramref name="dimensionType"/> equals 'Col' otherwise uses a bottom up approach.</para> /// </summary> /// <param name="m">The Matrix to act on.</param> /// <param name="dimensionType">Type of the dimension to use when unrolling the Matrix.</param> /// <returns>Matrix.</returns> public static Vector Unshape(Matrix m, VectorType dimensionType = VectorType.Col) { return(Vector.Combine((dimensionType == VectorType.Col ? m.GetCols().ToArray() : m.GetRows().ToArray()))); }
public void Matrix_Insert_Test(int index, bool insertAfter, VectorType vectorType, bool isTransposed) { Vector v = (vectorType == VectorType.Row) ^ isTransposed ? new double[] { 1, 3, 2, 0 } : new double[] { 2, 1, 0 }; Matrix A = new[,] { { 4, 1, 3, 2 }, { 1, 2, 3, 4 }, { 7, 9, 8, 6 } }; if (isTransposed) A = A.T; var rows = A.Rows; var columns = A.Cols; var B = A.Insert(v, index, vectorType, insertAfter); Assert.Equal(A.Rows, rows); Assert.Equal(A.Cols, columns); if (vectorType == VectorType.Row) { Assert.Equal(B.Rows, rows + 1); Assert.Equal(B.Cols, columns); } else { Assert.Equal(B.Rows, rows); Assert.Equal(B.Cols, columns + 1); } var dimension = vectorType == VectorType.Row ? rows : columns; for (var i = 0; i < dimension + 1; i++) { if (index == dimension - 1 && insertAfter) Assert.Equal(v, B[dimension, vectorType]); else if (i == index) Assert.Equal(v, B[i, vectorType]); else if(i < index) Assert.Equal(A[i, vectorType], B[i, vectorType]); else Assert.Equal(A[i - 1, vectorType], B[i, vectorType]); } }
public static Vector Sum(Matrix m, VectorType t) { return(m.Sum(t)); }
private static void* __CopyValue(VectorType.__Internal native) { var ret = Marshal.AllocHGlobal(20); global::CppSharp.Parser.AST.VectorType.__Internal.cctor_2(ret, new global::System.IntPtr(&native)); return ret.ToPointer(); }
/// <summary> /// Returns a vector of the median values for each row or column. /// </summary> /// <param name="source">Matrix.</param> /// <param name="t">VectorType.</param> /// <returns></returns> public static Vector Median(Matrix source, VectorType t = VectorType.Col) { var vectors = (t == VectorType.Row ? source.GetCols() : source.GetRows()); return(vectors.Select(s => s.Median()).ToVector()); }
/// <summary>Computes the sum of every element of the matrix.</summary> /// <param name="m">Input Matrix.</param> /// <param name="i">Zero-based index of the.</param> /// <param name="t">Row or Column sum.</param> /// <returns>sum.</returns> public static double Sum(Matrix m, int i, VectorType t) { return(m[i, t].Sum()); }
/// <summary> /// Matrix constructor with an array of vectors, /// can be columns or rows based on type /// </summary> /// <param name="vectors"></param> /// <param name="type">Determines whether the vectors are columns or rows</param> public RealMatrix(RealVector[] vectors, VectorType type) : base(vectors, type) { }
public override TypePrinterResult VisitVectorType(VectorType vectorType, TypeQualifiers quals) { return(vectorType.ElementType.Visit(this)); }
/// <summary> /// Matrix constructor with an array of vectors, /// can be columns or rows based on type /// </summary> /// <param name="vectors"></param> /// <param name="type"></param> public RealMatrix(Vector <Real>[] vectors, VectorType type) : base(vectors, type) { }
public void TestEqualAndGetHashCode() { var dict = new Dictionary <ColumnType, string>(); // add PrimitiveTypes, KeyType & corresponding VectorTypes VectorType tmp1, tmp2; var types = new PrimitiveType[] { NumberType.I1, NumberType.I2, NumberType.I4, NumberType.I8, NumberType.U1, NumberType.U2, NumberType.U4, NumberType.U8, NumberType.UG, TextType.Instance, BoolType.Instance, DateTimeType.Instance, DateTimeOffsetType.Instance, TimeSpanType.Instance }; foreach (var type in types) { var tmp = type; if (dict.ContainsKey(tmp) && dict[tmp] != tmp.ToString()) { Assert.True(false, dict[tmp] + " and " + tmp.ToString() + " are duplicates."); } dict[tmp] = tmp.ToString(); for (int size = 0; size < 5; size++) { tmp1 = new VectorType(tmp, size); if (dict.ContainsKey(tmp1) && dict[tmp1] != tmp1.ToString()) { Assert.True(false, dict[tmp1] + " and " + tmp1.ToString() + " are duplicates."); } dict[tmp1] = tmp1.ToString(); for (int size1 = 0; size1 < 5; size1++) { tmp2 = new VectorType(tmp, size, size1); if (dict.ContainsKey(tmp2) && dict[tmp2] != tmp2.ToString()) { Assert.True(false, dict[tmp2] + " and " + tmp2.ToString() + " are duplicates."); } dict[tmp2] = tmp2.ToString(); } } // KeyType & Vector var rawType = tmp.RawType; if (!KeyType.IsValidDataType(rawType)) { continue; } for (ulong min = 0; min < 5; min++) { for (var count = 0; count < 5; count++) { tmp = new KeyType(rawType, min, count); if (dict.ContainsKey(tmp) && dict[tmp] != tmp.ToString()) { Assert.True(false, dict[tmp] + " and " + tmp.ToString() + " are duplicates."); } dict[tmp] = tmp.ToString(); for (int size = 0; size < 5; size++) { tmp1 = new VectorType(tmp, size); if (dict.ContainsKey(tmp1) && dict[tmp1] != tmp1.ToString()) { Assert.True(false, dict[tmp1] + " and " + tmp1.ToString() + " are duplicates."); } dict[tmp1] = tmp1.ToString(); for (int size1 = 0; size1 < 5; size1++) { tmp2 = new VectorType(tmp, size, size1); if (dict.ContainsKey(tmp2) && dict[tmp2] != tmp2.ToString()) { Assert.True(false, dict[tmp2] + " and " + tmp2.ToString() + " are duplicates."); } dict[tmp2] = tmp2.ToString(); } } } tmp = new KeyType(rawType, min, 0, false); if (dict.ContainsKey(tmp) && dict[tmp] != tmp.ToString()) { Assert.True(false, dict[tmp] + " and " + tmp.ToString() + " are duplicates."); } dict[tmp] = tmp.ToString(); for (int size = 0; size < 5; size++) { tmp1 = new VectorType(tmp, size); if (dict.ContainsKey(tmp1) && dict[tmp1] != tmp1.ToString()) { Assert.True(false, dict[tmp1] + " and " + tmp1.ToString() + " are duplicates."); } dict[tmp1] = tmp1.ToString(); for (int size1 = 0; size1 < 5; size1++) { tmp2 = new VectorType(tmp, size, size1); if (dict.ContainsKey(tmp2) && dict[tmp2] != tmp2.ToString()) { Assert.True(false, dict[tmp2] + " and " + tmp2.ToString() + " are duplicates."); } dict[tmp2] = tmp2.ToString(); } } } } // add ImageTypes for (int height = 1; height < 5; height++) { for (int width = 1; width < 5; width++) { var tmp4 = new ImageType(height, width); if (dict.ContainsKey(tmp4)) { Assert.True(false, dict[tmp4] + " and " + tmp4.ToString() + " are duplicates."); } dict[tmp4] = tmp4.ToString(); } } }
/// <summary>Enumerates indices in this collection.</summary> /// <param name="source">Source for the.</param> /// <param name="f">The Func<Vector,bool> to process.</param> /// <param name="t">Row or Column sum.</param> /// <returns> /// An enumerator that allows foreach to be used to process indices in this collection. /// </returns> public static IEnumerable <int> Indices(Matrix source, Func <Vector, bool> f, VectorType t) { int max = t == VectorType.Row ? source.Rows : source.Cols; for (int i = 0; i < max; i++) { if (f(source[i, t])) { yield return(i); } } }
public virtual TypePrinterResult VisitVectorType(VectorType vectorType, TypeQualifiers quals) { throw new NotImplementedException(); }
internal static Schema GetModelSchema(IExceptionContext ectx, TFGraph graph, string opType = null) { var schemaBuilder = new SchemaBuilder(); foreach (var op in graph) { if (opType != null && opType != op.OpType) { continue; } var tfType = op[0].OutputType; // Determine element type in Tensorflow tensor. For example, a vector of floats may get NumberType.R4 here. var mlType = Tf2MlNetTypeOrNull(tfType); // If the type is not supported in ML.NET then we cannot represent it as a column in an Schema. // We also cannot output it with a TensorFlowTransform, so we skip it. // Furthermore, operators which have NumOutputs <= 0 needs to be filtered. // The 'GetTensorShape' method crashes TensorFlow runtime // (https://github.com/dotnet/machinelearning/issues/2156) when the operator has no outputs. if (mlType == null || op.NumOutputs <= 0) { continue; } // Construct the final ML.NET type of a Tensorflow variable. var tensorShape = graph.GetTensorShape(op[0]).ToIntArray(); var columnType = new VectorType(mlType); if (!(Utils.Size(tensorShape) == 1 && tensorShape[0] <= 0) && (Utils.Size(tensorShape) > 0 && tensorShape.Skip(1).All(x => x > 0))) { columnType = new VectorType(mlType, tensorShape[0] > 0 ? tensorShape : tensorShape.Skip(1).ToArray()); } // There can be at most two metadata fields. // 1. The first field always presents. Its value is this operator's type. For example, // if an output is produced by an "Softmax" operator, the value of this field should be "Softmax". // 2. The second field stores operators whose outputs are consumed by this operator. In other words, // these values are names of some upstream operators which should be evaluated before executing // the current operator. It's possible that one operator doesn't need any input, so this field // can be missing. var metadataBuilder = new MetadataBuilder(); // Create the first metadata field. metadataBuilder.Add(TensorflowOperatorTypeKind, TextType.Instance, (ref ReadOnlyMemory <char> value) => value = op.OpType.AsMemory()); if (op.NumInputs > 0) { // Put upstream operators' names to an array (type: VBuffer) of string (type: ReadOnlyMemory<char>). VBuffer <ReadOnlyMemory <char> > upstreamOperatorNames = default; var bufferEditor = VBufferEditor.Create(ref upstreamOperatorNames, op.NumInputs); for (int i = 0; i < op.NumInputs; ++i) { bufferEditor.Values[i] = op.GetInput(i).Operation.Name.AsMemory(); } upstreamOperatorNames = bufferEditor.Commit(); // Used in metadata's getter. // Create the second metadata field. metadataBuilder.Add(TensorflowUpstreamOperatorsKind, new VectorType(TextType.Instance, op.NumInputs), (ref VBuffer <ReadOnlyMemory <char> > value) => { upstreamOperatorNames.CopyTo(ref value); }); } schemaBuilder.AddColumn(op.Name, columnType, metadataBuilder.GetMetadata()); } return(schemaBuilder.GetSchema()); }
/// <summary> /// Turn this Matrix into an array of vectors (the type parameter determines /// whether the columns or rows will be returned). /// </summary> /// <param name="type"></param> /// <returns></returns> public new RealVector[] this[VectorType type] { get { RealVector[] result; switch (type) { case VectorType.Column: result = new RealVector[Width]; for (int j = 0; j < result.Length; j++) { result[j] = new RealVector(Height); } for (int i = 0; i < Height; i++) { for (int j = 0; j < Width; j++) { result[j][i] = Indices[i, j]; } } return(result); case VectorType.Row: result = new RealVector[Height]; for (int i = 0; i < result.Length; i++) { result[i] = new RealVector(Width); } for (int i = 0; i < Height; i++) { for (int j = 0; j < Width; j++) { result[i][j] = Indices[i, j]; } } return(result); default: throw new ArgumentException("Given argument was not a vectortype"); } } set { Real[,] indices = null; switch (type) { case VectorType.Column: indices = new Real[value[0].Dimension, value.Length]; for (int j = 0; j < value.Length; j++) { for (int i = 0; i < value[0].Dimension; i++) { indices[i, j] = value[j][i]; } } break; case VectorType.Row: indices = new Real[value.Length, value[0].Dimension]; for (int i = 0; i < value.Length; i++) { for (int j = 0; j < value[0].Dimension; j++) { indices[i, j] = value[i][j]; } } break; } Indices = indices; } }
private TensorFlowTransform(IHostEnvironment env, TFSession session, string[] inputs, string[] outputs) { Contracts.CheckValue(env, nameof(env)); _host = env.Register(nameof(RegistrationName)); _host.CheckValue(session, nameof(session)); _host.CheckNonEmpty(inputs, nameof(inputs)); _host.CheckNonEmpty(outputs, nameof(outputs)); Session = session; foreach (var input in inputs) { _host.CheckNonWhiteSpace(input, nameof(inputs)); if (Session.Graph[input] == null) { throw _host.ExceptParam(nameof(inputs), $"Input column '{input}' does not exist in the model"); } var tfInput = new TFOutput(Session.Graph[input]); if (!TensorFlowUtils.IsTypeSupported(tfInput.OutputType)) { throw _host.ExceptParam(nameof(session), $"Input type '{tfInput.OutputType}' of input column '{input}' is not supported in TensorFlow"); } } var newNames = new HashSet <string>(); foreach (var output in outputs) { _host.CheckNonWhiteSpace(output, nameof(outputs)); if (!newNames.Add(output)) { throw _host.ExceptParam(nameof(outputs), $"Output column '{output}' specified multiple times"); } if (Session.Graph[output] == null) { throw _host.ExceptParam(nameof(outputs), $"Output column '{output}' does not exist in the model"); } } Inputs = inputs; TFInputTypes = new TFDataType[Inputs.Length]; TFInputShapes = new TFShape[Inputs.Length]; for (int i = 0; i < Inputs.Length; i++) { var tfInput = new TFOutput(Graph[Inputs[i]]); TFInputTypes[i] = tfInput.OutputType; TFInputShapes[i] = Graph.GetTensorShape(tfInput); if (TFInputShapes[i].NumDimensions != -1) { var newShape = new long[TFInputShapes[i].NumDimensions]; newShape[0] = TFInputShapes[i][0] == -1 ? BatchSize : TFInputShapes[i][0]; for (int j = 1; j < TFInputShapes[i].NumDimensions; j++) { newShape[j] = TFInputShapes[i][j]; } TFInputShapes[i] = new TFShape(newShape); } } Outputs = outputs; OutputTypes = new ColumnType[Outputs.Length]; TFOutputTypes = new TFDataType[Outputs.Length]; for (int i = 0; i < Outputs.Length; i++) { var tfOutput = new TFOutput(Graph[Outputs[i]]); var shape = Graph.GetTensorShape(tfOutput); int[] dims = shape.NumDimensions > 0 ? shape.ToIntArray().Skip(shape[0] == -1 ? BatchSize : 0).ToArray() : new[] { 0 }; var type = TensorFlowUtils.Tf2MlNetType(tfOutput.OutputType); OutputTypes[i] = new VectorType(type, dims); TFOutputTypes[i] = tfOutput.OutputType; } }
private void InitializeMappers(out IValueMapper[] mappers, out VectorType inputType, out VectorType outputType) { Host.AssertNonEmpty(Models); mappers = new IValueMapper[Models.Length]; inputType = null; outputType = null; for (int i = 0; i < Models.Length; i++) { var vm = Models[i].Predictor as IValueMapper; if (!IsValid(vm, out VectorType vmInputType, out VectorType vmOutputType)) { throw Host.Except("Predictor does not implement expected interface"); } if (vmInputType.Size > 0) { if (inputType == null) { inputType = vmInputType; } else if (vmInputType.Size != inputType.Size) { throw Host.Except("Predictor input type mismatch"); } } if (outputType == null || vmOutputType.Size > outputType.Size) { outputType = vmOutputType; } mappers[i] = vm; } Host.AssertValue(outputType); if (inputType == null) { inputType = new VectorType(NumberType.Float); } }
private BoundColumn MakeColumn(DataViewSchema inputSchema, int iinfo) { Contracts.AssertValue(inputSchema); Contracts.Assert(0 <= iinfo && iinfo < _parent._columns.Length); DataViewType itemType = null; int[] sources = new int[_parent._columns[iinfo].Sources.Count]; // Go through the columns, and establish the following: // - indices of input columns in the input schema. Throw if they are not there. // - output type. Throw if the types of inputs are not the same. // - how many slots are there in the output vector (or variable). Denoted by totalSize. // - total size of CategoricalSlotRanges metadata, if present. Denoted by catCount. // - whether the column is normalized. // It is true when ALL inputs are normalized (and of numeric type). // - whether the column has slot names. // It is true if ANY input is a scalar, or has slot names. // - whether the column has categorical slot ranges. // It is true if ANY input has this metadata. int totalSize = 0; int catCount = 0; bool isNormalized = true; bool hasSlotNames = false; bool hasCategoricals = false; for (int i = 0; i < _parent._columns[iinfo].Sources.Count; i++) { var(srcName, srcAlias) = _parent._columns[iinfo].Sources[i]; if (!inputSchema.TryGetColumnIndex(srcName, out int srcCol)) { throw Host.ExceptSchemaMismatch(nameof(inputSchema), "input", srcName); } sources[i] = srcCol; var curType = inputSchema[srcCol].Type; VectorType curVectorType = curType as VectorType; DataViewType currentItemType = curVectorType?.ItemType ?? curType; int currentValueCount = curVectorType?.Size ?? 1; if (itemType == null) { itemType = currentItemType; totalSize = currentValueCount; } else if (currentItemType.Equals(itemType)) { // If any one input is variable length, then the output is variable length. if (totalSize == 0 || currentValueCount == 0) { totalSize = 0; } else { totalSize += currentValueCount; } } else { throw Host.ExceptSchemaMismatch(nameof(inputSchema), "input", srcName, itemType.ToString(), curType.ToString()); } if (isNormalized && !inputSchema[srcCol].IsNormalized()) { isNormalized = false; } if (MetadataUtils.TryGetCategoricalFeatureIndices(inputSchema, srcCol, out int[] typeCat))
/// <summary>Computes the sum of every element of the matrix.</summary> /// <param name="m">Input Matrix.</param> /// <param name="i">Zero-based index of the.</param> /// <param name="t">Row or Column sum.</param> /// <returns>sum.</returns> public static double Sum(Matrix m, int i, VectorType t) { return m[i, t].Sum(); }
private static CountAggregator GetVecAggregator <T>(Row row, VectorType colType, int colSrc) { return(new CountAggregator <T>(colType, row.GetGetter <VBuffer <T> >(colSrc))); }
/// <summary>Correlations.</summary> /// <param name="source">Source for the.</param> /// <param name="t">(Optional) Row or Column sum.</param> /// <returns>A Matrix.</returns> public static Matrix Correlation(Matrix source, VectorType t = VectorType.Col) { var length = t == VectorType.Row ? source.Rows : source.Cols; var m = new Matrix(length); for (var i = 0; i < length; i++) { for (var j = i; j < length; j++) { // symmetric matrix m[i, j] = m[j, i] = source[i, t].Correlation(source[j, t]); } } return m; }
public Matrix this[Func<Vector, bool> f, VectorType t] { get { int count = 0; if (t == VectorType.Row) { for (int i = 0; i < Rows; i++) if (f(this[i, t])) count++; Matrix m = new Matrix(count, Cols); int j = -1; for (int i = 0; i < Rows; i++) if (f(this[i, t])) m[++j, t] = this[i, t]; return m; } else { for (int i = 0; i < Cols; i++) if (f(this[i, t])) count++; Matrix m = new Matrix(Rows, count); int j = -1; for (int i = 0; i < Cols; i++) if (f(this[i, t])) m[++j, t] = this[i, t]; return m; } } }
public Vector GetVector(int index, int from, int to, VectorType type) { double[] v = (double[])Array.CreateInstance(typeof(double), to - from + 1); for (int i = from, j = 0; i < to + 1; i++, j++) v[j] = this[index, type][i]; return new Vector(v); }
public Matrix Remove(int index, VectorType t) { int max = t == VectorType.Row ? Rows : Cols; int row = t == VectorType.Row ? Rows - 1 : Rows; int col = t == VectorType.Col ? Cols - 1 : Cols; Matrix m = new Matrix(row, col); int j = -1; for (int i = 0; i < max; i++) { if (i == index) continue; m[++j, t] = this[i, t]; } return m; }
public static VectorType __CreateInstance(VectorType.__Internal native, bool skipVTables = false) { return new VectorType(native, skipVTables); }
// Checks that all the label columns of the model have the same key type as their label column - including the same // cardinality and the same key values, and returns the cardinality of the label column key. private static int CheckKeyLabelColumnCore <T>(IHostEnvironment env, PredictorModel[] models, KeyType labelType, DataViewSchema schema, int labelIndex, VectorType keyValuesType) where T : IEquatable <T> { env.Assert(keyValuesType.ItemType.RawType == typeof(T)); env.AssertNonEmpty(models); var labelNames = default(VBuffer <T>); schema[labelIndex].GetKeyValues(ref labelNames); var classCount = labelNames.Length; var curLabelNames = default(VBuffer <T>); for (int i = 1; i < models.Length; i++) { var model = models[i]; var edv = new EmptyDataView(env, model.TransformModel.InputSchema); model.PrepareData(env, edv, out RoleMappedData rmd, out IPredictor pred); var labelInfo = rmd.Schema.Label.HasValue; if (!rmd.Schema.Label.HasValue) { throw env.Except("Training schema for model {0} does not have a label column", i); } var labelCol = rmd.Schema.Label.Value; var curLabelType = labelCol.Type as KeyType; if (!labelType.Equals(curLabelType)) { throw env.Except("Label column of model {0} has different type than model 0", i); } var mdType = labelCol.Annotations.Schema.GetColumnOrNull(AnnotationUtils.Kinds.KeyValues)?.Type; if (!mdType.Equals(keyValuesType)) { throw env.Except("Label column of model {0} has different key value type than model 0", i); } labelCol.GetKeyValues(ref curLabelNames); if (!AreEqual(in labelNames, in curLabelNames)) { throw env.Except("Label of model {0} has different values than model 0", i); } } return(classCount); }
private VectorType(VectorType.__Internal native, bool skipVTables = false) : this(__CopyValue(native), skipVTables) { __ownsNativeInstance = true; NativeToManagedMap[__Instance] = this; }
// Loads all relevant data for whitening training into memory. private static float[][] LoadDataAsDense(IHostEnvironment env, IChannel ch, IDataView inputData, out int[] actualRowCounts, DataViewType[] srcTypes, int[] cols, params VectorWhiteningEstimator.ColumnInfo[] columns) { long crowData = GetRowCount(inputData, columns); var columnData = new float[columns.Length][]; actualRowCounts = new int[columns.Length]; int maxActualRowCount = 0; for (int i = 0; i < columns.Length; i++) { VectorType vectorType = srcTypes[i] as VectorType; ch.Assert(vectorType != null && vectorType.IsKnownSize); // Use not more than MaxRow number of rows. var ex = columns[i]; if (crowData <= ex.MaxRow) { actualRowCounts[i] = (int)crowData; } else { ch.Info(MessageSensitivity.Schema, "Only {0:N0} rows of column '{1}' will be used for whitening transform.", ex.MaxRow, columns[i].Name); actualRowCounts[i] = ex.MaxRow; } int cslot = vectorType.Size; // Check that total number of values in matrix does not exceed int.MaxValue and adjust row count if necessary. if ((long)cslot * actualRowCounts[i] > int.MaxValue) { actualRowCounts[i] = int.MaxValue / cslot; ch.Info(MessageSensitivity.Schema, "Only {0:N0} rows of column '{1}' will be used for whitening transform.", actualRowCounts[i], columns[i].Name); } columnData[i] = new float[cslot * actualRowCounts[i]]; if (actualRowCounts[i] > maxActualRowCount) { maxActualRowCount = actualRowCounts[i]; } } var idxDst = new int[columns.Length]; using (var cursor = inputData.GetRowCursor(inputData.Schema.Where(c => cols.Any(col => c.Index == col)))) { var getters = new ValueGetter <VBuffer <float> > [columns.Length]; for (int i = 0; i < columns.Length; i++) { getters[i] = cursor.GetGetter <VBuffer <float> >(cols[i]); } var val = default(VBuffer <float>); int irow = 0; while (irow < maxActualRowCount && cursor.MoveNext()) { for (int i = 0; i < columns.Length; i++) { if (irow >= actualRowCounts[i] || columnData[i].Length == 0) { continue; } getters[i](ref val); val.CopyTo(columnData[i], idxDst[i]); idxDst[i] += srcTypes[i].GetValueCount(); } irow++; } #if DEBUG for (int i = 0; i < columns.Length; i++) { ch.Assert(idxDst[i] == columnData[i].Length); } #endif } return(columnData); }
/// <summary> /// returns col/row vector at index j /// </summary> /// <param name="i">Col/Row</param> /// <param name="t">Row or Column</param> /// <returns>Vector</returns> public virtual Vector this[int i, VectorType t] { get { // switch it up if using a transposed version if (_asTransposeRef) t = t == VectorType.Row ? VectorType.Col : VectorType.Row; if (t == VectorType.Row) { if (i >= Rows) throw new IndexOutOfRangeException(); return new Vector(_matrix, i, true); } else { if (i >= Cols) throw new IndexOutOfRangeException(); return new Vector(_matrix, i); } } set { if (_asTransposeRef) throw new InvalidOperationException("Cannot modify matrix in read-only transpose mode!"); if (t == VectorType.Row) { if (i >= Rows) throw new IndexOutOfRangeException(); if (value.Length > Cols) throw new InvalidOperationException(string.Format("Vector has lenght larger then {0}", Cols)); for (int k = 0; k < Cols; k++) _matrix[i][k] = value[k]; } else { if (i >= Cols) throw new IndexOutOfRangeException(); if (value.Length > Rows) throw new InvalidOperationException(string.Format("Vector has lenght larger then {0}", Cols)); for (int k = 0; k < Rows; k++) _matrix[k][i] = value[k]; } } }
private void SaveTransposedData(IChannel ch, Stream stream, ITransposeDataView data, int[] cols) { _host.AssertValue(ch); ch.AssertValue(stream); ch.AssertValue(data); ch.AssertNonEmpty(cols); ch.Assert(stream.CanSeek); // Initialize what we can in the header, though we will not be writing out things in the // header until we have confidence that things were written out correctly. TransposeLoader.Header header = default(TransposeLoader.Header); header.Signature = TransposeLoader.Header.SignatureValue; header.Version = TransposeLoader.Header.WriterVersion; header.CompatibleVersion = TransposeLoader.Header.WriterVersion; VectorType slotType = data.TransposeSchema.GetSlotType(cols[0]); ch.AssertValue(slotType); header.RowCount = slotType.ValueCount; header.ColumnCount = cols.Length; // We keep track of the offsets of the start of each sub-IDV, for use in writing out the // offsets/length table later. List <long> offsets = new List <long>(); // First write a bunch of zeros at the head, as a placeholder for the header that // will go there assuming we can successfully load it. We'll keep this array around // for the real marshalling and writing of the header bytes structure. byte[] headerBytes = new byte[TransposeLoader.Header.HeaderSize]; stream.Write(headerBytes, 0, headerBytes.Length); offsets.Add(stream.Position); // This is a convenient delegate to write out an IDV substream, then save the offsets // where writing stopped to the offsets list. Action <string, IDataView> viewAction = (name, view) => { using (var substream = new SubsetStream(stream)) { _internalSaver.SaveData(substream, view, Utils.GetIdentityPermutation(view.Schema.ColumnCount)); substream.Seek(0, SeekOrigin.End); ch.Info("Wrote {0} data view in {1} bytes", name, substream.Length); } offsets.Add(stream.Position); }; // First write out the no-row data, limited to these columns. IDataView subdata = new ChooseColumnsByIndexTransform(_host, new ChooseColumnsByIndexTransform.Arguments() { Index = cols }, data); // If we want the "dual mode" row-wise and slot-wise file, don't filter out anything. if (!_writeRowData) { subdata = SkipTakeFilter.Create(_host, new SkipTakeFilter.TakeArguments() { Count = 0 }, subdata); } string msg = _writeRowData ? "row-wise data, schema, and metadata" : "schema and metadata"; viewAction(msg, subdata); foreach (var col in cols) { viewAction(data.Schema.GetColumnName(col), new TransposerUtils.SlotDataView(_host, data, col)); } // Wrote out the dataview. Write out the table offset. using (var writer = new BinaryWriter(stream, Encoding.UTF8, leaveOpen: true)) { // Format of the table is offset, length, both as 8-byte integers. // As it happens we wrote things out as adjacent sub-IDVs, so the // length can be derived from the offsets. The first will be the // start of the first sub-IDV, and all subsequent entries will be // the start/end of the current/next sub-IDV, respectively, so a total // of cols.Length + 2 entries. ch.Assert(offsets.Count == cols.Length + 2); ch.Assert(offsets[offsets.Count - 1] == stream.Position); header.SubIdvTableOffset = stream.Position; for (int c = 1; c < offsets.Count; ++c) { // 8-byte int for offsets, 8-byte int for length. writer.Write(offsets[c - 1]); writer.Write(offsets[c] - offsets[c - 1]); } header.TailOffset = stream.Position; writer.Write(TransposeLoader.Header.TailSignatureValue); // Now we are confident that things will work, so write it out. unsafe { Marshal.Copy(new IntPtr(&header), headerBytes, 0, Marshal.SizeOf(typeof(Header))); } writer.Seek(0, SeekOrigin.Begin); writer.Write(headerBytes); } }
/// <summary> /// In place centering. /// WARNING: WILL UPDATE MATRIX! /// </summary> /// <param name="t"></param> public Matrix Center(VectorType t) { int max = t == VectorType.Row ? Rows : Cols; for (int i = 0; i < max; i++) this[i, t] -= this[i, t].Mean(); return this; }
private bool PopulateCilInstructions() { MethodBodyDocument document = new MethodBodyDocument(this.MethodDefinition); MemoryReader memReader = new MemoryReader(_methodIL.EncodedILMemoryBlock); List <CilInstruction> instrList = new List <CilInstruction>(); while (memReader.NotEndOfBytes) { object /*?*/ value = null; uint offset = (uint)memReader.Offset; OperationCode cilOpCode = memReader.ReadOpcode(); switch (cilOpCode) { case OperationCode.Nop: case OperationCode.Break: break; case OperationCode.Ldarg_0: case OperationCode.Ldarg_1: case OperationCode.Ldarg_2: case OperationCode.Ldarg_3: value = this.GetParameter((uint)(cilOpCode - OperationCode.Ldarg_0)); break; case OperationCode.Ldloc_0: case OperationCode.Ldloc_1: case OperationCode.Ldloc_2: case OperationCode.Ldloc_3: value = this.GetLocal((uint)(cilOpCode - OperationCode.Ldloc_0)); break; case OperationCode.Stloc_0: case OperationCode.Stloc_1: case OperationCode.Stloc_2: case OperationCode.Stloc_3: value = this.GetLocal((uint)(cilOpCode - OperationCode.Stloc_0)); break; case OperationCode.Ldarg_S: case OperationCode.Ldarga_S: case OperationCode.Starg_S: value = this.GetParameter(memReader.ReadByte()); break; case OperationCode.Ldloc_S: case OperationCode.Ldloca_S: case OperationCode.Stloc_S: value = this.GetLocal(memReader.ReadByte()); break; case OperationCode.Ldnull: case OperationCode.Ldc_I4_M1: case OperationCode.Ldc_I4_0: case OperationCode.Ldc_I4_1: case OperationCode.Ldc_I4_2: case OperationCode.Ldc_I4_3: case OperationCode.Ldc_I4_4: case OperationCode.Ldc_I4_5: case OperationCode.Ldc_I4_6: case OperationCode.Ldc_I4_7: case OperationCode.Ldc_I4_8: break; case OperationCode.Ldc_I4_S: value = (int)memReader.ReadSByte(); break; case OperationCode.Ldc_I4: value = memReader.ReadInt32(); break; case OperationCode.Ldc_I8: value = memReader.ReadInt64(); break; case OperationCode.Ldc_R4: value = memReader.ReadSingle(); break; case OperationCode.Ldc_R8: value = memReader.ReadDouble(); break; case OperationCode.Dup: case OperationCode.Pop: break; case OperationCode.Jmp: value = this.GetMethod(memReader.ReadUInt32()); break; case OperationCode.Call: { IMethodReference methodReference = this.GetMethod(memReader.ReadUInt32()); IArrayTypeReference /*?*/ arrayType = methodReference.ContainingType as IArrayTypeReference; if (arrayType != null) { if (methodReference.Name.UniqueKey == this.PEFileToObjectModel.NameTable.GetNameFor("Set").UniqueKey) { cilOpCode = OperationCode.Array_Set; } else if (methodReference.Name.UniqueKey == this.PEFileToObjectModel.NameTable.Get.UniqueKey) { cilOpCode = OperationCode.Array_Get; } else if (methodReference.Name.UniqueKey == this.PEFileToObjectModel.NameTable.GetNameFor("Address").UniqueKey) { cilOpCode = OperationCode.Array_Addr; } value = arrayType; } else { value = methodReference; } } break; case OperationCode.Calli: value = this.GetFunctionPointerType(memReader.ReadUInt32()); break; case OperationCode.Ret: break; case OperationCode.Br_S: case OperationCode.Brfalse_S: case OperationCode.Brtrue_S: case OperationCode.Beq_S: case OperationCode.Bge_S: case OperationCode.Bgt_S: case OperationCode.Ble_S: case OperationCode.Blt_S: case OperationCode.Bne_Un_S: case OperationCode.Bge_Un_S: case OperationCode.Bgt_Un_S: case OperationCode.Ble_Un_S: case OperationCode.Blt_Un_S: { uint jumpOffset = (uint)(memReader.Offset + 1 + memReader.ReadSByte()); if (jumpOffset >= _endOfMethodOffset) { // Error... } value = jumpOffset; } break; case OperationCode.Br: case OperationCode.Brfalse: case OperationCode.Brtrue: case OperationCode.Beq: case OperationCode.Bge: case OperationCode.Bgt: case OperationCode.Ble: case OperationCode.Blt: case OperationCode.Bne_Un: case OperationCode.Bge_Un: case OperationCode.Bgt_Un: case OperationCode.Ble_Un: case OperationCode.Blt_Un: { uint jumpOffset = (uint)(memReader.Offset + 4 + memReader.ReadInt32()); if (jumpOffset >= _endOfMethodOffset) { // Error... } value = jumpOffset; } break; case OperationCode.Switch: { uint numTargets = memReader.ReadUInt32(); uint[] result = new uint[numTargets]; uint asOffset = memReader.Offset + numTargets * 4; for (int i = 0; i < numTargets; i++) { uint targetAddress = memReader.ReadUInt32() + asOffset; if (targetAddress >= _endOfMethodOffset) { // Error... } result[i] = targetAddress; } value = result; } break; case OperationCode.Ldind_I1: case OperationCode.Ldind_U1: case OperationCode.Ldind_I2: case OperationCode.Ldind_U2: case OperationCode.Ldind_I4: case OperationCode.Ldind_U4: case OperationCode.Ldind_I8: case OperationCode.Ldind_I: case OperationCode.Ldind_R4: case OperationCode.Ldind_R8: case OperationCode.Ldind_Ref: case OperationCode.Stind_Ref: case OperationCode.Stind_I1: case OperationCode.Stind_I2: case OperationCode.Stind_I4: case OperationCode.Stind_I8: case OperationCode.Stind_R4: case OperationCode.Stind_R8: case OperationCode.Add: case OperationCode.Sub: case OperationCode.Mul: case OperationCode.Div: case OperationCode.Div_Un: case OperationCode.Rem: case OperationCode.Rem_Un: case OperationCode.And: case OperationCode.Or: case OperationCode.Xor: case OperationCode.Shl: case OperationCode.Shr: case OperationCode.Shr_Un: case OperationCode.Neg: case OperationCode.Not: case OperationCode.Conv_I1: case OperationCode.Conv_I2: case OperationCode.Conv_I4: case OperationCode.Conv_I8: case OperationCode.Conv_R4: case OperationCode.Conv_R8: case OperationCode.Conv_U4: case OperationCode.Conv_U8: break; case OperationCode.Callvirt: value = this.GetMethod(memReader.ReadUInt32()); break; case OperationCode.Cpobj: case OperationCode.Ldobj: value = this.GetType(memReader.ReadUInt32()); break; case OperationCode.Ldstr: value = this.GetUserStringForToken(memReader.ReadUInt32()); break; case OperationCode.Newobj: { IMethodReference methodReference = this.GetMethod(memReader.ReadUInt32()); IArrayTypeReference /*?*/ arrayType = methodReference.ContainingType as IArrayTypeReference; if (arrayType != null && !arrayType.IsVector) { uint numParam = IteratorHelper.EnumerableCount(methodReference.Parameters); if (numParam != arrayType.Rank) { cilOpCode = OperationCode.Array_Create_WithLowerBound; } else { cilOpCode = OperationCode.Array_Create; } value = arrayType; } else { value = methodReference; } } break; case OperationCode.Castclass: case OperationCode.Isinst: value = this.GetType(memReader.ReadUInt32()); break; case OperationCode.Conv_R_Un: break; case OperationCode.Unbox: value = this.GetType(memReader.ReadUInt32()); break; case OperationCode.Throw: break; case OperationCode.Ldfld: case OperationCode.Ldflda: case OperationCode.Stfld: case OperationCode.Ldsfld: case OperationCode.Ldsflda: case OperationCode.Stsfld: value = this.GetField(memReader.ReadUInt32()); break; case OperationCode.Stobj: value = this.GetType(memReader.ReadUInt32()); break; case OperationCode.Conv_Ovf_I1_Un: case OperationCode.Conv_Ovf_I2_Un: case OperationCode.Conv_Ovf_I4_Un: case OperationCode.Conv_Ovf_I8_Un: case OperationCode.Conv_Ovf_U1_Un: case OperationCode.Conv_Ovf_U2_Un: case OperationCode.Conv_Ovf_U4_Un: case OperationCode.Conv_Ovf_U8_Un: case OperationCode.Conv_Ovf_I_Un: case OperationCode.Conv_Ovf_U_Un: break; case OperationCode.Box: value = this.GetType(memReader.ReadUInt32()); break; case OperationCode.Newarr: { ITypeReference elementType = this.GetType(memReader.ReadUInt32()); IModuleTypeReference /*?*/ moduleTypeReference = elementType as IModuleTypeReference; if (moduleTypeReference != null) { value = new VectorType(this.PEFileToObjectModel, 0xFFFFFFFF, moduleTypeReference); } else { value = Dummy.ArrayType; } } break; case OperationCode.Ldlen: break; case OperationCode.Ldelema: value = this.GetType(memReader.ReadUInt32()); break; case OperationCode.Ldelem_I1: case OperationCode.Ldelem_U1: case OperationCode.Ldelem_I2: case OperationCode.Ldelem_U2: case OperationCode.Ldelem_I4: case OperationCode.Ldelem_U4: case OperationCode.Ldelem_I8: case OperationCode.Ldelem_I: case OperationCode.Ldelem_R4: case OperationCode.Ldelem_R8: case OperationCode.Ldelem_Ref: case OperationCode.Stelem_I: case OperationCode.Stelem_I1: case OperationCode.Stelem_I2: case OperationCode.Stelem_I4: case OperationCode.Stelem_I8: case OperationCode.Stelem_R4: case OperationCode.Stelem_R8: case OperationCode.Stelem_Ref: break; case OperationCode.Ldelem: value = this.GetType(memReader.ReadUInt32()); break; case OperationCode.Stelem: value = this.GetType(memReader.ReadUInt32()); break; case OperationCode.Unbox_Any: value = this.GetType(memReader.ReadUInt32()); break; case OperationCode.Conv_Ovf_I1: case OperationCode.Conv_Ovf_U1: case OperationCode.Conv_Ovf_I2: case OperationCode.Conv_Ovf_U2: case OperationCode.Conv_Ovf_I4: case OperationCode.Conv_Ovf_U4: case OperationCode.Conv_Ovf_I8: case OperationCode.Conv_Ovf_U8: break; case OperationCode.Refanyval: value = this.GetType(memReader.ReadUInt32()); break; case OperationCode.Ckfinite: break; case OperationCode.Mkrefany: value = this.GetType(memReader.ReadUInt32()); break; case OperationCode.Ldtoken: value = this.GetRuntimeHandleFromToken(memReader.ReadUInt32()); break; case OperationCode.Conv_U2: case OperationCode.Conv_U1: case OperationCode.Conv_I: case OperationCode.Conv_Ovf_I: case OperationCode.Conv_Ovf_U: case OperationCode.Add_Ovf: case OperationCode.Add_Ovf_Un: case OperationCode.Mul_Ovf: case OperationCode.Mul_Ovf_Un: case OperationCode.Sub_Ovf: case OperationCode.Sub_Ovf_Un: case OperationCode.Endfinally: break; case OperationCode.Leave: { uint leaveOffset = (uint)(memReader.Offset + 4 + memReader.ReadInt32()); if (leaveOffset >= _endOfMethodOffset) { // Error... } value = leaveOffset; } break; case OperationCode.Leave_S: { uint leaveOffset = (uint)(memReader.Offset + 1 + memReader.ReadSByte()); if (leaveOffset >= _endOfMethodOffset) { // Error... } value = leaveOffset; } break; case OperationCode.Stind_I: case OperationCode.Conv_U: case OperationCode.Arglist: case OperationCode.Ceq: case OperationCode.Cgt: case OperationCode.Cgt_Un: case OperationCode.Clt: case OperationCode.Clt_Un: break; case OperationCode.Ldftn: case OperationCode.Ldvirtftn: value = this.GetMethod(memReader.ReadUInt32()); break; case OperationCode.Ldarg: case OperationCode.Ldarga: case OperationCode.Starg: value = this.GetParameter(memReader.ReadUInt16()); break; case OperationCode.Ldloc: case OperationCode.Ldloca: case OperationCode.Stloc: value = this.GetLocal(memReader.ReadUInt16()); break; case OperationCode.Localloc: value = new PointerType(this.PEFileToObjectModel, 0xFFFFFFFF, this.PEFileToObjectModel.SystemVoid); break; case OperationCode.Endfilter: break; case OperationCode.Unaligned_: value = memReader.ReadByte(); break; case OperationCode.Volatile_: case OperationCode.Tail_: break; case OperationCode.Initobj: value = this.GetType(memReader.ReadUInt32()); break; case OperationCode.Constrained_: value = this.GetType(memReader.ReadUInt32()); break; case OperationCode.Cpblk: case OperationCode.Initblk: break; case OperationCode.No_: value = (OperationCheckFlags)memReader.ReadByte(); break; case OperationCode.Rethrow: break; case OperationCode.Sizeof: value = this.GetType(memReader.ReadUInt32()); break; case OperationCode.Refanytype: case OperationCode.Readonly_: break; default: this.PEFileToObjectModel.PEFileReader.ErrorContainer.AddILError(this.MethodDefinition, offset, MetadataReaderErrorKind.UnknownILInstruction); break; } MethodBodyLocation location = new MethodBodyLocation(document, offset); instrList.Add(new CilInstruction(cilOpCode, location, value)); } this.MethodBody.SetCilInstructions(new EnumerableArrayWrapper <CilInstruction, IOperation>(instrList.ToArray(), Dummy.Operation)); return(true); }
//-------------- destructive ops /// <summary> /// In place normalization. /// WARNING: WILL UPDATE MATRIX! /// </summary> /// <param name="t"></param> public void Normalize(VectorType t) { int max = t == VectorType.Row ? Rows : Cols; for (int i = 0; i < max; i++) this[i, t] /= this[i, t].Norm(); }
private MulticlassLogisticRegressionPredictor(IHostEnvironment env, ModelLoadContext ctx) : base(env, RegistrationName, ctx) { // *** Binary format *** // int: number of features // int: number of classes = number of biases // float[]: biases // (weight matrix, in CSR if sparse) // (see https://netlib.org/linalg/html_templates/node91.html#SECTION00931100000000000000) // int: number of row start indices (_numClasses + 1 if sparse, 0 if dense) // int[]: row start indices // int: total number of column indices (0 if dense) // int[]: column index of each non-zero weight // int: total number of non-zero weights (same as number of column indices if sparse, num of classes * num of features if dense) // float[]: non-zero weights // int[]: Id of label names (optional, in a separate stream) // LinearModelStatistics: model statistics (optional, in a separate stream) _numFeatures = ctx.Reader.ReadInt32(); Host.CheckDecode(_numFeatures >= 1); _numClasses = ctx.Reader.ReadInt32(); Host.CheckDecode(_numClasses >= 1); _biases = ctx.Reader.ReadFloatArray(_numClasses); int numStarts = ctx.Reader.ReadInt32(); if (numStarts == 0) { // The weights are entirely dense. int numIndices = ctx.Reader.ReadInt32(); Host.CheckDecode(numIndices == 0); int numWeights = ctx.Reader.ReadInt32(); Host.CheckDecode(numWeights == _numClasses * _numFeatures); _weights = new VBuffer <float> [_numClasses]; for (int i = 0; i < _weights.Length; i++) { var w = ctx.Reader.ReadFloatArray(_numFeatures); _weights[i] = new VBuffer <float>(_numFeatures, w); } _weightsDense = _weights; } else { // Read weight matrix as CSR. Host.CheckDecode(numStarts == _numClasses + 1); int[] starts = ctx.Reader.ReadIntArray(numStarts); Host.CheckDecode(starts[0] == 0); Host.CheckDecode(Utils.IsSorted(starts)); int numIndices = ctx.Reader.ReadInt32(); Host.CheckDecode(numIndices == starts[starts.Length - 1]); var indices = new int[_numClasses][]; for (int i = 0; i < indices.Length; i++) { indices[i] = ctx.Reader.ReadIntArray(starts[i + 1] - starts[i]); Host.CheckDecode(Utils.IsIncreasing(0, indices[i], _numFeatures)); } int numValues = ctx.Reader.ReadInt32(); Host.CheckDecode(numValues == numIndices); _weights = new VBuffer <float> [_numClasses]; for (int i = 0; i < _weights.Length; i++) { float[] values = ctx.Reader.ReadFloatArray(starts[i + 1] - starts[i]); _weights[i] = new VBuffer <float>(_numFeatures, Utils.Size(values), values, indices[i]); } } WarnOnOldNormalizer(ctx, GetType(), Host); InputType = new VectorType(NumberType.R4, _numFeatures); OutputType = new VectorType(NumberType.R4, _numClasses); // REVIEW: Should not save the label names duplicately with the predictor again. // Get it from the label column schema metadata instead. string[] labelNames = null; if (ctx.TryLoadBinaryStream(LabelNamesSubModelFilename, r => labelNames = LoadLabelNames(ctx, r))) { _labelNames = labelNames; } string statsDir = Path.Combine(ctx.Directory ?? "", ModelStatsSubModelFilename); using (var statsEntry = ctx.Repository.OpenEntryOrNull(statsDir, ModelLoadContext.ModelStreamName)) { if (statsEntry == null) { _stats = null; } else { using (var statsCtx = new ModelLoadContext(ctx.Repository, statsEntry, statsDir)) _stats = LinearModelStatistics.Create(Host, statsCtx); } } }
public void Swap(int from, int to, VectorType t) { var temp = this[from, t].Copy(); this[from, t] = this[to, t]; this[to, t] = temp; }
public override TypePrinterResult VisitVectorType(VectorType vectorType, TypeQualifiers quals) { // an incomplete implementation but we'd hardly need anything better return("__attribute__()"); }