public static void FitMnistSimple() { var model = new Sequential(); model.Add(new Dense(512, activation: "relu", inputShape: new int[] { 784 })); model.Add(new Dropout(0.2)); model.Add(new Dense(512, activation: "relu")); model.Add(new Dropout(0.2)); model.Add(new Dense(10, activation: "softmax")); var optimizer = new SGD(lr: 0.01); model.Compile("categorical_crossentropy", optimizer, new string[] { "accuracy" }); var xtrain = TensorUtils.Deserialize(new FileStream(GetDataPath("datasets/nda_mnist/mnist_xtrain.nda"), FileMode.Open)); var ytrain = TensorUtils.Deserialize(new FileStream(GetDataPath("datasets/nda_mnist/mnist_ytrain.nda"), FileMode.Open)); xtrain = xtrain.Cast(DType.Float32); xtrain = Ops.Div(null, xtrain, 255f); ytrain = ytrain.Cast(DType.Float32); model.Fit(xtrain, ytrain, batchSize: 128, epochs: 20); var stream = new FileStream("c:/ttt/mnist-simple.model", FileMode.OpenOrCreate, FileAccess.Write); stream.SetLength(0); model.Save(stream); }
static void Main(string[] args) { var modelPath = "models/resnet50.model"; var width = 224; var tensor = JpegToTensor(GetDataPath("datasets/towers/et1.jpg"), width, width); var model = Sequential.Load(GetDataPath(modelPath)); var prediction1 = model.Predict(tensor).Squeeze(); tensor = JpegToTensor(GetDataPath("datasets/towers/et2.jpg"), width, width); var prediction2 = model.Predict(tensor).Squeeze(); Console.WriteLine($" Distance to picture of the same object: {TensorUtils.EuclideanDistance(prediction1, prediction2).ToString("F2")}"); tensor = JpegToTensor(GetDataPath("datasets/towers/et3.jpg"), width, width); prediction2 = model.Predict(tensor).Squeeze(); Console.WriteLine($" Distance to picture of the same object: {TensorUtils.EuclideanDistance(prediction1, prediction2).ToString("F2")}"); tensor = JpegToTensor(GetDataPath("datasets/towers/pt1.jpg"), width, width); prediction2 = model.Predict(tensor).Squeeze(); Console.WriteLine($" Ddistance to picture of a different object: {TensorUtils.EuclideanDistance(prediction1, prediction2).ToString("F2")}"); }
// Update is called once per frame void Update() { var input_vector_obs = new Tensor(1, 1, 1, 8, new float[] { Target.position.x, Target.position.y, Target.position.z, this.transform.position.x, this.transform.position.y, this.transform.position.z, rBody.velocity.x, rBody.velocity.z }); var input_eps = new Tensor(1, 1, 1, 1, new float[] { 0.2f }); var dict = new Dictionary <string, Tensor>(); dict["vector_observation"] = input_vector_obs; dict["epsilon"] = input_eps; worker.Execute(dict); //worker.Fetch("action_probs").PrintDataPart(24, "action_probs"); //worker.Fetch("action").PrintDataPart(24, "action:"); //Debug.Log(worker.Fetch("action").GetType()); //BarracudaToFloatArray var out_ = TensorUtils.BarracudaToFloatArray(worker.Fetch("action")); int i = 0; foreach (var fc in out_) { FxFz[i] = (float)fc; Debug.Log((float)fc); i = i + 1; } AgentAction(FxFz); Debug.Log("valueEstimate"); //UpdateVectorAction(FxFz); Debug.Log(GetValueEstimate()); }
public SymbolsGenerator(string name, params Tensor[] forbiddenTensors) { CheckName(name); Name = name; var set = new HashSet <string>(); FromChildToParentIterator iterator; foreach (Tensor f in forbiddenTensors) { iterator = new FromChildToParentIterator(f); Tensor c; while ((c = iterator.Next()) != null) { if (TensorUtils.IsSymbol(c)) { set.Add(c.ToString()); } } } var usedNames = new string[set.Count]; var i = -1; foreach (var str in set) { usedNames[++i] = str; } UsedNames = usedNames.OrderBy(n => n).ToArray(); }
public static void FitMnist() { var model = new Sequential(); model.Add(new Conv2D(32, kernelSize: new int[] { 3, 3 }, inputShape: new int[] { 28, 28, 1 }, activation: "relu")); model.Add(new Conv2D(64, kernelSize: new int[] { 3, 3 }, activation: "relu")); // model.Add(new MaxPooling1D(poolSize: 2)); model.Add(new MaxPooling2D(poolSize: new int[] { 2, 2 })); model.Add(new Dropout(0.25)); model.Add(new Flatten()); model.Add(new Dense(128, activation: "relu")); model.Add(new Dropout(0.5)); model.Add(new Dense(10, activation: "softmax")); var optimizer = new SGD(lr: 0.01); model.Compile("categorical_crossentropy", optimizer, new string[] { "accuracy" }); var xtrain = TensorUtils.Deserialize(new FileStream(GetDataPath("datasets/nda_mnist/mnist_xtrain.nda"), FileMode.Open)); var ytrain = TensorUtils.Deserialize(new FileStream(GetDataPath("datasets/nda_mnist/mnist_ytrain.nda"), FileMode.Open)); xtrain = xtrain.Cast(DType.Float32); xtrain = Ops.Div(null, xtrain, 255f); ytrain = ytrain.Cast(DType.Float32); model.Fit(xtrain, ytrain, batchSize: 128, epochs: 12); var stream = new FileStream("c:/ttt/mnist.model", FileMode.OpenOrCreate, FileAccess.Write); stream.SetLength(0); model.Save(stream); }
public void RandomNormalTestTensorInt() { var rn = new RandomNormal(1982); var t = new TensorProxy { valueType = TensorProxy.TensorType.Integer }; Assert.Throws <NotImplementedException>( () => TensorUtils.FillTensorWithRandomNormal(t, rn)); }
public void RandomNormalTestDataNull() { var rn = new RandomNormal(1982); var t = new TensorProxy { valueType = TensorProxy.TensorType.FloatingPoint }; Assert.Throws <ArgumentNullException>( () => TensorUtils.FillTensorWithRandomNormal(t, rn)); }
protected List <TensorProxy> FetchBarracudaOutputs(string[] names) { var outputs = new List <TensorProxy>(); foreach (var name in names) { var outp = _engine.Peek(name); outputs.Add(TensorUtils.TensorProxyFromBarracuda(outp, name)); } return(outputs); }
/// <summary> /// Encode source sentences and output encoded weights /// </summary> /// <param name="g"></param> /// <param name="seqs"></param> /// <param name="encoder"></param> /// <param name="reversEncoder"></param> /// <param name="embeddings"></param> /// <returns></returns> static private IWeightTensor RunEncoder(IComputeGraph g, List <List <int> > seqs, IEncoder encoder, IModel modelMetaData, IWeightTensor embeddings, IWeightTensor selfMask, IWeightTensor posEmbeddings, IWeightTensor segmentEmbeddings) { int batchSize = seqs.Count; var inputEmbs = TensorUtils.CreateTokensEmbeddings(seqs, g, embeddings, segmentEmbeddings, modelMetaData.SrcVocab, (float)Math.Sqrt(embeddings.Columns), enableTagEmbedding: modelMetaData.EnableTagEmbeddings); if (modelMetaData.EncoderType == EncoderTypeEnums.Transformer) { inputEmbs = PositionEmbedding.AddPositionEmbedding(g, posEmbeddings, batchSize, inputEmbs, 0.0f); } return(encoder.Encode(inputEmbs, batchSize, g, selfMask)); }
public void TestResizeTensor(int dimension) { var alloc = new TensorCachingAllocator(); var height = 64; var width = 84; var channels = 3; // Set shape to {1, ..., height, width, channels} // For 8D, the ... are all 1's var shape = new long[dimension]; for (var i = 0; i < dimension; i++) { shape[i] = 1; } shape[dimension - 3] = height; shape[dimension - 2] = width; shape[dimension - 1] = channels; var intShape = new int[dimension]; for (var i = 0; i < dimension; i++) { intShape[i] = (int)shape[i]; } var tensorProxy = new TensorProxy { valueType = TensorProxy.TensorType.Integer, data = new Tensor(intShape), shape = shape, }; // These should be invariant after the resize. Assert.AreEqual(height, tensorProxy.data.shape.height); Assert.AreEqual(width, tensorProxy.data.shape.width); Assert.AreEqual(channels, tensorProxy.data.shape.channels); TensorUtils.ResizeTensor(tensorProxy, 42, alloc); Assert.AreEqual(height, tensorProxy.shape[dimension - 3]); Assert.AreEqual(width, tensorProxy.shape[dimension - 2]); Assert.AreEqual(channels, tensorProxy.shape[dimension - 1]); Assert.AreEqual(height, tensorProxy.data.shape.height); Assert.AreEqual(width, tensorProxy.data.shape.width); Assert.AreEqual(channels, tensorProxy.data.shape.channels); alloc.Dispose(); }
public void TestPermute() { // Matrix transpose var shape = new long[] { 45, 23 }; var totalSize = shape.Aggregate((a, l) => a * l); var data = Enumerable.Range(1, (int)totalSize).Select(i => (float)i).ToArray(); var t1 = TensorUtils.Create(shape, data); var t2 = TensorUtils.Create(shape, data); t2 = t2.Permute(1, 0); for (var i = 0; i < shape[0]; ++i) { for (var j = 0; j < shape[1]; ++j) { Assert.AreEqual(t1.GetElementAsFloat(i, j), t2.GetElementAsFloat(j, i)); } } Assert.AreEqual(t1.Sizes[0], shape[0]); Assert.AreEqual(t1.Sizes[1], shape[1]); Assert.AreEqual(t2.Sizes[0], shape[1]); Assert.AreEqual(t2.Sizes[1], shape[0]); // ND transpose shape = new long[] { 600, 28, 37, 3 }; totalSize = shape.Aggregate((a, l) => a * l); data = Enumerable.Range(1, (int)totalSize).Select(i => (float)i).ToArray(); t1 = TensorUtils.Create(shape, data); t2 = t1.View(new long[] { shape[0], 1, shape[1], shape[2], shape[3] }); t2 = t2.Permute(new int[] { 4, 3, 2, 1, 0 }); for (var i = 0; i < shape[0]; ++i) { for (var j = 0; j < shape[1]; ++j) { for (var k = 0; k < shape[2]; ++k) { for (var l = 0; l < shape[3]; ++l) { Assert.AreEqual(t1.GetElementAsFloat(i, j, k, l), t2.GetElementAsFloat(l, k, j, 0, i)); } } } } }
public void RandomNormalTestTensor() { var rn = new RandomNormal(1982); var t = new TensorProxy { valueType = TensorProxy.TensorType.FloatingPoint, data = new Tensor(1, 3, 4, 2) }; TensorUtils.FillTensorWithRandomNormal(t, rn); var reference = new[] { -0.4315872f, -1.11074f, 0.3414804f, -1.130287f, 0.1413168f, -0.5105762f, -0.3027347f, -0.2645015f, 1.225356f, -0.02921959f, 0.3716498f, -1.092338f, 0.9561074f, -0.5018106f, 1.167787f, -0.7763879f, -0.07491868f, 0.5396146f, -0.1377991f, 0.3331701f, 0.06144788f, 0.9520947f, 1.088157f, -1.177194f, }; for (var i = 0; i < t.data.length; i++) { Assert.AreEqual(t.data[i], reference[i], 0.0001); } }
static void PredictMnist(string modelPath, string xtestPath, string ytestPath = null) { var xtest = TensorUtils.Deserialize(File.OpenRead(xtestPath)); xtest = xtest.Cast(DType.Float32); xtest = Ops.Div(null, xtest, 255f); // xtest = xtest.Narrow(0, 0, 101); var model = Sequential.Load(modelPath); var result = model.Predict(xtest, batchSize: 32); if (ytestPath == null) { return; } var ytest = TensorUtils.Deserialize(File.OpenRead(ytestPath)); ytest = ytest.Cast(DType.Float32); // ytest = ytest.Narrow(0, 0, 101); ytest = Ops.Argmax(null, ytest, 1).Squeeze(); var t = result.Narrow(0, 0, 11); // Console.WriteLine(t.Format()); result = Ops.Argmax(null, result, 1).Squeeze(); t = result.Narrow(0, 0, 11); // Console.WriteLine(t.Format()); double sum = 0.0; for (var i = 0; i < ytest.Sizes[0]; ++i) { sum += (int)ytest.GetElementAsFloat(i) == (int)result.GetElementAsFloat(i) ? 1.0 : 0.0; } Console.WriteLine($"Accuracy: {sum / ytest.Sizes[0] * 100}%"); }
public void TestLoadMnist() { string path = "../../../datasets/nda_mnist/mnist_xtrain.nda"; var nda = TensorUtils.Deserialize(File.OpenRead(path)); CollectionAssert.AreEqual(nda.Sizes, new long[] { 60000, 28, 28 }); Assert.AreEqual(nda.GetElementAsFloat(1, 0, 0), 0f); Assert.AreEqual(nda.GetElementAsFloat(0, 6, 9), 36f); // Cast the array to float nda = nda.Cast(DType.Float32); CollectionAssert.AreEqual(nda.Sizes, new long[] { 60000, 28, 28 }); Assert.AreEqual(nda.GetElementAsFloat(1, 0, 0), 0f); Assert.AreEqual(nda.GetElementAsFloat(0, 6, 9), 36f); // Divide by 255 nda = Ops.Div(null, nda, 255f); Assert.AreEqual(nda.GetElementAsFloat(1, 0, 0), 0f); Assert.AreEqual(nda.GetElementAsFloat(0, 6, 9), 36f / 255f, float.Epsilon); Assert.AreEqual(nda.GetElementAsFloat(0, 6, 9), 36.0 / 255, 0.000001); }
/// <summary> /// Predict gender/age for all faces which were detected. /// </summary> /// <param name="faceLocs"> /// Locations of all detected faces. /// </param> /// <param name="faces"> /// Data of all detected faces. /// </param> /// <returns> /// A list of Result object, /// each one is the position and predicted result of a face. /// </returns> protected List <Result> Fit(List <Location> faceLocs, List <byte[]> faces) { int[] shape = { 1, // We will perform tensor stacking inside CreateTensor function Height, Width, Depth }; // Convert faces' data into one tensor var inputs = TensorUtils.CreateTensor(faces, shape); // Normalize input tensor before passing to tensorflow model. // This could sustainably increase model's accuracy if (Preprocessors != null) { foreach (var preprocessor in Preprocessors) { inputs = preprocessor.Process(inputs); } } return(Fit(faceLocs, inputs)); }
public override string ToString() { return(TensorUtils.GetString(this)); }
public void TestResizeTensor(int dimension) { if (dimension == 8) { // Barracuda 1.0.x doesn't support 8D tensors // Barracuda 1.1.x does but it initially broke ML-Agents support // Unfortunately, the PackageInfo methods don't exist in earlier versions of the editor, // so just skip that variant of the test then. // It's unlikely, but possible that we'll upgrade to a newer dependency of Barracuda, // in which case we should make sure this test is run then. #if UNITY_2019_3_OR_NEWER var packageInfo = UnityEditor.PackageManager.PackageInfo.FindForAssembly(typeof(Tensor).Assembly); Assert.AreEqual("com.unity.barracuda", packageInfo.name); var barracuda8DSupport = new Version(1, 1, 0); var strippedBarracudaVersion = packageInfo.version.Replace("-preview", ""); var version = new Version(strippedBarracudaVersion); if (version <= barracuda8DSupport) { return; } #else return; #endif } var alloc = new TensorCachingAllocator(); var height = 64; var width = 84; var channels = 3; // Set shape to {1, ..., height, width, channels} // For 8D, the ... are all 1's var shape = new long[dimension]; for (var i = 0; i < dimension; i++) { shape[i] = 1; } shape[dimension - 3] = height; shape[dimension - 2] = width; shape[dimension - 1] = channels; var intShape = new int[dimension]; for (var i = 0; i < dimension; i++) { intShape[i] = (int)shape[i]; } var tensorProxy = new TensorProxy { valueType = TensorProxy.TensorType.Integer, data = new Tensor(intShape), shape = shape, }; // These should be invariant after the resize. Assert.AreEqual(height, tensorProxy.data.shape.height); Assert.AreEqual(width, tensorProxy.data.shape.width); Assert.AreEqual(channels, tensorProxy.data.shape.channels); TensorUtils.ResizeTensor(tensorProxy, 42, alloc); Assert.AreEqual(height, tensorProxy.shape[dimension - 3]); Assert.AreEqual(width, tensorProxy.shape[dimension - 2]); Assert.AreEqual(channels, tensorProxy.shape[dimension - 1]); Assert.AreEqual(height, tensorProxy.data.shape.height); Assert.AreEqual(width, tensorProxy.data.shape.width); Assert.AreEqual(channels, tensorProxy.data.shape.channels); alloc.Dispose(); }