public void Initialise() { layers[0] = LayerBuilder.CreateInputLayer(layers[0], layers[1]); layers = LayerBuilder.CreateHidenLayer(layers); layers[layers.Length - 1] = LayerBuilder.CreateOutputLayer(layers[layers.Length - 1], layers[layers.Length - 2]); }
// Constructs a convolutional network with two convolutional layers, // two fully-connected layers, ReLU units and a softmax on the output. public static void BuildCnn(IAllocator allocator, SeedSource seedSource, int batchSize, bool useCudnn, out Sequential model, out ICriterion criterion, out bool outputIsClassIndices) { var inputWidth = MnistParser.ImageSize; var inputHeight = MnistParser.ImageSize; var elementType = DType.Float32; var inputDims = new long[] { batchSize, 1, inputHeight, inputWidth }; model = new Sequential(); model.Add(new ViewLayer(inputDims)); var outSize = AddCnnLayer(allocator, seedSource, elementType, model, inputDims, 20, useCudnn); outSize = AddCnnLayer(allocator, seedSource, elementType, model, outSize, 40, useCudnn); var convOutSize = outSize[1] * outSize[2] * outSize[3]; model.Add(new ViewLayer(batchSize, convOutSize)); var hiddenSize = 1000; var outputSize = 10; model.Add(new DropoutLayer(allocator, seedSource, elementType, 0.5f, batchSize, convOutSize)); model.Add(new LinearLayer(allocator, seedSource, elementType, (int)convOutSize, hiddenSize, batchSize)); model.Add(new ReLULayer(allocator, elementType, batchSize, hiddenSize)); model.Add(new DropoutLayer(allocator, seedSource, elementType, 0.5f, batchSize, hiddenSize)); model.Add(new LinearLayer(allocator, seedSource, elementType, hiddenSize, outputSize, batchSize)); model.Add(LayerBuilder.BuildLogSoftMax(allocator, elementType, batchSize, outputSize, useCudnn)); criterion = new ClassNLLCriterion(allocator, batchSize, outputSize); outputIsClassIndices = true; // output of criterion is class indices }
internal static ServerModelNode AddServerToHomeView(DappleModel oModel, LayerBuilder oLayer) { const bool Enabled = true; const bool DontAddToHomeViewYet = false; const bool DontSubmitToDappleSearch = false; ServerModelNode result = null; // --- Add the server to the model --- if (oLayer is ArcIMSQuadLayerBuilder) { ArcIMSQuadLayerBuilder castLayer = oLayer as ArcIMSQuadLayerBuilder; result = oModel.AddArcIMSServer(new ArcIMSServerUri(castLayer.ServerURL), Enabled, DontAddToHomeViewYet, DontSubmitToDappleSearch); } else if (oLayer is DAPQuadLayerBuilder) { DAPQuadLayerBuilder castLayer = oLayer as DAPQuadLayerBuilder; result = oModel.AddDAPServer(new DapServerUri(castLayer.ServerURL), Enabled, DontAddToHomeViewYet, DontSubmitToDappleSearch); } else if (oLayer is WMSQuadLayerBuilder) { WMSQuadLayerBuilder castLayer = oLayer as WMSQuadLayerBuilder; result = oModel.AddWMSServer(new WMSServerUri(castLayer.ServerURL), Enabled, DontAddToHomeViewYet, DontSubmitToDappleSearch); } else { throw new ApplicationException("Don't know how to get the server of type " + oLayer.GetType().ToString()); } result.AddToHomeView(); return(result); }
public void LayerAssembler_AssembleSimpleLayerWithRoomOffset_LayerCorrect() { var instructions = LayerInstruction.FromStrings(CreateLines("Layer:Size=(5,5)", "Room:(2,2)", "Floor:(1,1)")); var layer = LayerBuilder.Assemble(instructions); Assert.AreEqual("Floor", layer[3, 3].Ground.Type); }
public void Init() { _routeProposal = new PatrolRouteProposal(_map, new XYZ(0, 0, 0), path => { }); var builder = new LayerBuilder(3, 3); builder.PutFloor(new XY(0, 0), new XY(2, 2)); _map.Add(_layer = builder.Build()); }
public void LayerBuilder_CreateEmptyLayer_LayerCreated() { var builder = new LayerBuilder(10, 10); var layer = builder.Build(); Assert.AreEqual(new XY(10, 10), layer.Size); }
public void LayerBuilder_PutSingleFloorSpace_FloorCorrectlyAdded() { var builder = new LayerBuilder(3, 3); builder.PutFloor(1, 1); var layer = builder.Build(); AssertContains(1, 1, layer, FacilityObjectNames.Floor); }
public void LayerBuilder_PutFloor_FloorWithCorrectObjectLayer() { var builder = new LayerBuilder(3, 3); builder.PutFloor(1, 1); var layer = builder.Build(); Assert.IsTrue(layer[1, 1].Ground.ObjectLayer == ObjectLayer.Ground); }
public void LayerBuilder_PutFloorRectangle_FloorCorrectlyAdded() { var builder = new LayerBuilder(3, 3); builder.PutFloor(new XY(0, 0), new XY(2, 2)); var layer = builder.Build(); AssertContains(new XY(0, 0), new XY(2, 2), layer, FacilityObjectNames.Floor); }
public void LayerAssembler_AssembleSimpleLayerWithObjectLink_LayerCorrect() { var instructions = LayerInstruction.FromStrings(CreateLines("Layer:Size=(3,3)", "Room: (0,0)", "Table:(1,1)", "Table:(1,2)", "Link:(Table,1,1)-(Table,1,2)")); var layer = LayerBuilder.Assemble(instructions); Assert.AreEqual(1, layer[1, 1].LowerObject.LinkedObjs.Count); Assert.AreEqual(1, layer[1, 2].LowerObject.LinkedObjs.Count); }
public void Init() { _thief = new Thief(this, _map); _thief.Subscribe(this); var builder = new LayerBuilder(3, 3); builder.PutFloor(new XY(0, 0), new XY(2, 2)); AddPortals(builder); _map.Add(_layer = builder.Build()); _upFacingContainer.Put(_upFacingValuable); _rightFacingContainer.Put(_valuable2); }
public void LayerAssembler_PutObjectFromInstruction_LayerCorrect() { var inst = ObjectInstruction.FromString("Floor:(1,1,R)"); var builder = new LayerBuilder(3, 3); builder.Put(inst[0]); var layer = builder.Build(); Assert.AreEqual("Floor", layer[1, 1].Ground.Type); Assert.AreEqual(Orientation.Right, layer[1, 1].Ground.Orientation); }
public void LayerBuilder_OnBuild_WallsCorrectlyAdded() { var builder = new LayerBuilder(3, 3); builder.PutFloor(1, 1); var layer = builder.Build(); AssertContains(new XY(0, 0), new XY(0, 2), layer, "Wall"); AssertContains(new XY(0, 0), new XY(2, 0), layer, "Wall"); AssertContains(new XY(2, 0), new XY(2, 2), layer, "Wall"); AssertContains(new XY(0, 2), new XY(2, 2), layer, "Wall"); }
private void AddPortals(LayerBuilder builder) { for (var row = 0; row < 3; row++) { for (var column = 0; column < 3; column++) { builder.Put(column, row, new FacilityPortal { ObjectLayer = ObjectLayer.LowerObject, Endpoint1 = SpecialLocation.OffOfMap, Endpoint2 = new XYZ(column, row, 0) }); } } }
/// <summary> /// View the legend for the selected layer. /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void viewLegendToolStripMenuItem_Click(object sender, EventArgs e) { LayerBuilder oBuilder = m_oCurrServerLayers[c_lvLayers.SelectedIndices[0]]; string[] aLegends = oBuilder.GetLegendURLs(); foreach (string szLegend in aLegends) { if (!String.IsNullOrEmpty(szLegend)) { MainForm.BrowseTo(szLegend); } } }
public void LayerBuilder_PutUpperObject_ObjectPutCorrectly() { var builder = new LayerBuilder(3, 3); var painting = new FacilityObject { Type = "Painting", Orientation = Orientation.Right, ObjectLayer = ObjectLayer.UpperObject }; builder.PutFloor(1, 1); builder.Put(0, 1, painting); var layer = builder.Build(); Assert.IsTrue(layer[0, 1].Contains(painting)); Assert.AreEqual(painting, layer[0, 1].UpperObject); }
public void LayerBuilder_PutLowerObject_ObjectPutCorrectly() { var builder = new LayerBuilder(3, 3); var cash = new FacilityObject { Type = "Cash", Orientation = Orientation.Up, ObjectLayer = ObjectLayer.LowerObject }; builder.PutFloor(1, 1); builder.Put(1, 1, cash); var layer = builder.Build(); Assert.IsTrue(layer[1, 1].Contains(cash)); Assert.AreEqual(cash, layer[1, 1].LowerObject); }
private static long[] AddCnnLayer(IAllocator allocator, SeedSource seedSource, DType elementType, Sequential model, long[] inputSizes, int nOutputPlane, bool useCudnn) { var conv = LayerBuilder.BuildConvLayer(allocator, seedSource, elementType, (int)inputSizes[0], (int)inputSizes[3], (int)inputSizes[2], (int)inputSizes[1], nOutputPlane, new ConvolutionDesc2d(5, 5, 1, 1, 0, 0), useCudnn); model.Add(conv); var cdPool = new ConvolutionDesc2d(2, 2, 1, 1, 0, 0); var poolLayer = LayerBuilder.BuildPoolLayer(allocator, elementType, conv.OutputSizes, cdPool, useCudnn); model.Add(poolLayer); model.Add(new ReLULayer(allocator, elementType, poolLayer.OutputSizes)); return(poolLayer.OutputSizes); }
// Constructs a network with two fully-connected layers; one sigmoid, one softmax public static void BuildMLPSoftmax(IAllocator allocator, SeedSource seedSource, int batchSize, bool useCudnn, out Sequential model, out ICriterion criterion, out bool outputIsClassIndices) { int inputSize = MnistParser.ImageSize * MnistParser.ImageSize; int hiddenSize = 100; int outputSize = MnistParser.LabelCount; var elementType = DType.Float32; model = new Sequential(); model.Add(new ViewLayer(batchSize, inputSize)); model.Add(new LinearLayer(allocator, seedSource, elementType, inputSize, hiddenSize, batchSize)); model.Add(new SigmoidLayer(allocator, elementType, batchSize, hiddenSize)); model.Add(new LinearLayer(allocator, seedSource, elementType, hiddenSize, outputSize, batchSize)); model.Add(LayerBuilder.BuildLogSoftMax(allocator, elementType, batchSize, outputSize, useCudnn)); criterion = new ClassNLLCriterion(allocator, batchSize, outputSize); outputIsClassIndices = true; // output of criterion is class indices }
/// <summary> /// Turns a list of LayerUris into a list of LayerBuilders. /// </summary> /// <param name="oUris"></param> /// <returns></returns> private List <LayerBuilder> CreateLayerBuilders(List <LayerUri> oUris) { List <LayerBuilder> result = new List <LayerBuilder>(); foreach (LayerUri oUri in oUris) { LayerBuilder oLayerToAdd = null; try { oLayerToAdd = oUri.getBuilder(m_oModel); if (oLayerToAdd != null) { result.Add(oLayerToAdd); } } catch (Exception ex) { Program.ShowMessageBox(ex.Message, "Dataset Could Not Be Added", MessageBoxButtons.OK, MessageBoxDefaultButton.Button1, MessageBoxIcon.Error); } } return(result); }
public void Rebuild() { LayerBuilder.PopulateLayers(ref _layers); }
public unsafe static void MNISTExample() { //Hyperparameters Hyperparameters.LearningRate = 0.001f; Hyperparameters.Optimizer = new SGD(); //Model Creation var x = new Input(784); //var dropout = new Dropout(x, 0.1f); //var model = LayerBuilder.Dense(500, x, "relu"); var model = LayerBuilder.Dense(100, x, "relu"); model = LayerBuilder.Dense(400, model, "relu"); model = LayerBuilder.Dense(200, model, "relu"); model = LayerBuilder.Dense(100, model, "relu"); model = LayerBuilder.Dense(10, model, "softmax"); //Loss Function Creation var y = new Input(10); var loss = LayerBuilder.SquaredError(model, y); //Data preparation (float[,] traindata, float[,] labels) = LoadMNISTDataSet(); int mnistsize = 42000; Tensor x_train = Tensor.LoadArrayToDisposedTensor(traindata, new Shape(mnistsize, 784), DeviceConfig.Host_Float); Tensor y_train = Tensor.LoadArrayToDisposedTensor(labels, new Shape(mnistsize, 10), DeviceConfig.Host_Float); //Training int batchsize = 100; int trainl = 41000; Stopwatch s = new Stopwatch(); for (int epoch = 0; epoch < 35; epoch++) { float l = 0; float val = 0; s.Restart(); Console.WriteLine("Epoch " + epoch + " başladı."); for (int batch = 0; batch < trainl / batchsize; batch++) { Tensor batchx = Tensor.Cut(x_train, batch * (batchsize * 784), new Shape(1, batchsize, 784)); Tensor batchy = Tensor.Cut(y_train, batch * (batchsize * 10), new Shape(1, batchsize, 10)); x.SetInput(batchx); y.SetInput(batchy); loss.Minimize(); Index zero = new Index(loss.OuterShape); zero.SetZero(); Tensor res = loss.GetTerm(zero).GetResult(); float *pp = (float *)res.Array; for (int i = 0; i < res.Shape.TotalSize; i++) { l += pp[i]; } } for (int batch = trainl / batchsize; batch < mnistsize / batchsize; batch++) { Tensor batchx = Tensor.Cut(x_train, batch * (batchsize * 784), new Shape(1, batchsize, 784)); Tensor batchy = Tensor.Cut(y_train, batch * (batchsize * 10), new Shape(1, batchsize, 10)); model.DeleteTerms(); x.SetInput(batchx); y.SetInput(batchy); Index zero = new Index(model.OuterShape); zero.SetZero(); model.PreCheck(); Tensor res = model.GetTerm(zero).GetResult(); for (int i = 0; i < batchsize; i++) { int myans = MaxId((float *)res.Array + i * 10); int correctres = MaxId((float *)batchy.Array + i * 10); val += (myans == correctres ? 1 : 0); } } s.Stop(); Console.WriteLine("Epoch " + epoch + " biti."); Console.WriteLine("Loss: " + l / trainl); Console.WriteLine("Validation: " + val / (mnistsize - trainl)); Console.WriteLine("Time: " + s.ElapsedMilliseconds + "ms"); } PrintPools(); while (true) { try { float[] data = LoadCurrentImage(); Tensor x_test = Tensor.LoadArrayToDisposedTensor(data, new Shape(1, 1, 784), DeviceConfig.Host_Float); model.DeleteTerms(); x.SetInput(x_test); Index zero = new Index(model.OuterShape); zero.SetZero(); model.PreCheck(); Tensor res = model.GetTerm(zero).GetResult(); Console.WriteLine("Result: " + res); Console.WriteLine("Digit Prediction: " + MaxId((float *)res.Array)); Console.WriteLine("-----------"); } catch (Exception) { } Thread.Sleep(500); } }
public FacilityMap(IWorld world, MapInstruction inst) { _world = world; inst.Layers.ForEach(x => Add(LayerBuilder.Assemble(x))); inst.Portals.ForEach(x => this[x.Location].Put(PortalFactory.Create(x))); }
static void Main(string[] args) { var rd = new Random(); var values = new[] { .1f, .002f }; var watch = System.Diagnostics.Stopwatch.StartNew(); ProcessingDevice.Device = Device.CPU; watch.Stop(); Console.WriteLine($"Device Time: {watch.ElapsedMilliseconds}ms"); var hiddens = new LayerBuilder(2, 4, values[0]) .Supervised() .WithLeakRelu() .Hidden() .WithSGD() .WithMomentum(values[1]) .FullSynapse() .Build(); var hiddens2 = new LayerBuilder(2, 2, values[0]) .Supervised() .WithLeakRelu() .Hidden() .WithSGD() .WithMomentum(values[1]) .FullSynapse() .Build(); var outputs = new LayerBuilder(2, 2, values[0]) .Supervised() .WithSigmoid() .Output() .WithSGD() .WithMomentum(values[1]) .FullSynapse() .Build(); var loss = new SquareLossFunction(); watch = System.Diagnostics.Stopwatch.StartNew(); watch.Stop(); Console.WriteLine($"Sinapse Time: {watch.ElapsedMilliseconds}ms"); var trainingValues = new[] { new[] { 0f, 0f, 0f, 0f }, new[] { 1f, 0f, 0f, 0f }, new[] { 0f, 1f, 0f, 0f }, new[] { 0f, 0f, 1f, 0f }, new[] { 0f, 0f, 0f, 1f }, new[] { 1f, 0f, 0f, 0f }, new[] { 1f, 1f, 0f, 0f }, new[] { 0f, 1f, 1f, 0f }, new[] { 0f, 0f, 1f, 1f }, new[] { 0f, 0f, 0f, 1f }, new[] { 0f, 1f, 0f, 0f }, new[] { 1f, 0f, 1f, 0f }, new[] { 0f, 1f, 0f, 1f }, new[] { 1f, 0f, 1f, 0f }, new[] { 0f, 1f, 0f, 1f }, new[] { 0f, 0f, 1f, 0f }, new[] { 1f, 0f, 0f, 1f }, new[] { 1f, 1f, 0f, 0f }, new[] { 0f, 1f, 1f, 0f }, new[] { 0f, 0f, 1f, 1f }, new[] { 1f, 1f, 1f, 0f }, new[] { 1f, 1f, 1f, 1f } }; var desiredValues = new[] { new[] { 1f, 0f }, new[] { 1f, 0f }, new[] { 1f, 0f }, new[] { 1f, 0f }, new[] { 0f, 1f }, new[] { 1f, 0f }, new[] { 1f, 0f }, new[] { 1f, 0f }, new[] { 0f, 1f }, new[] { 0f, 1f }, new[] { 1f, 0f }, new[] { 1f, 0f }, new[] { 0f, 1f }, new[] { 1f, 0f }, new[] { 0f, 1f }, new[] { 1f, 0f }, new[] { 0f, 1f }, new[] { 1f, 0f }, new[] { 1f, 0f }, new[] { 0f, 1f }, new[] { 1f, 0f }, new[] { 0f, 1f } }; int cont = 0; int sizeTrain = 10; var e = double.MaxValue; while (true) { watch = System.Diagnostics.Stopwatch.StartNew(); e = 0; for (int i = 0; i < sizeTrain; i++) { var index = rd.Next(0, trainingValues.Length); var inputs = trainingValues[index]; var desireds = desiredValues[index]; //watch = System.Diagnostics.Stopwatch.StartNew(); // Feed Forward var _h = hiddens.Output(inputs); var _h2 = hiddens2.Output(_h); var _o = outputs.Output(_h2); //watch.Stop(); //Console.WriteLine($"\nForward Time: { watch.ElapsedMilliseconds}ms"); //Thread.Sleep(100); //watch = System.Diagnostics.Stopwatch.StartNew(); // Backward var _oe = ((ISupervisedLearning)outputs).Learn(_h2, desireds); var _he2 = ((ISupervisedLearning)hiddens2).Learn(_h, _oe); ((ISupervisedLearning)hiddens).Learn(inputs, _he2); //watch.Stop(); //Console.WriteLine($"\nBackward Time: { watch.ElapsedMilliseconds}ms"); // Error var e0 = Math.Abs(_o[0] - desireds[0]); var e1 = Math.Abs(_o[1] - desireds[1]); var error = Math.Sqrt(Math.Abs(e0 * e0 + e1 * e0)); e += error / 2.0; } e /= sizeTrain; cont++; watch.Stop(); var time = watch.ElapsedMilliseconds; Console.WriteLine($"Interactions: {cont}\nError: {e}"); //Console.WriteLine($"Interactions: {cont}\nError: {e}\nTime: {time / (double)sizeTrain}ms"); Console.Title = $"TSPS (Training Sample per Second): {Math.Ceiling(1000d / ((double) time / (double)sizeTrain))}"; } }
static void Main(string[] args) { var events = ReadEventFile("trainingEvents.csv"); var ins = events .Where(e => (e.Open - e.NextLow) / e.Open > .1f) .Select(evts => evts.GetInputArray()); Console.WriteLine($"Qualified Events: {ins.Count()}"); var unsupervisedTests = ins.Select(i => Tuple.Create(i, i)); var supervisedTests = events.Select(evt => Tuple.Create(evt.GetInputArray(), evt.GetOutputArray())); var builder = new LayerBuilder(); var description = builder.BuildDescription(5, new[] { new LayerBuilder.LayerSpec(10, "sum", "softplus"), new LayerBuilder.LayerSpec(10, "sum", "softplus"), new LayerBuilder.LayerSpec(10, "sum", "tanh"), new LayerBuilder.LayerSpec(10, "sum", "softplus"), new LayerBuilder.LayerSpec(10, "sum", "softplus"), new LayerBuilder.LayerSpec(5, "sum", null) }); //var description = builder.BuildDescription(5, new[] //{ // new LayerBuilder.LayerSpec(5, "sum", "softplus"), // new LayerBuilder.LayerSpec(6, "sum", "tanh"), // new LayerBuilder.LayerSpec(4, "sum", "softplus"), // new LayerBuilder.LayerSpec(5, "sum", null) //}); var net = Net.FromDescription(description); var trainer = new SimpleTrainer(); trainer.Train( net: net, tests: unsupervisedTests, desiredError: 5e-6f, maxEpochs: 200, learningRate: 0.75f); trainer.Train( net: net, tests: unsupervisedTests, desiredError: 5e-6f, maxEpochs: 9500, learningRate: 0.5f); trainer.Train( net: net, tests: unsupervisedTests, desiredError: 1e-8f, maxEpochs: 200, learningRate: .25f); trainer.Train( net: net, tests: unsupervisedTests, desiredError: 1e-8f, maxEpochs: 200, learningRate: .125f); trainer.Train( net: net, tests: unsupervisedTests, desiredError: 1e-8f, maxEpochs: 200, learningRate: .0625f); for (var i = 0; i < 5; i++) { var nextDescription = net.Description; var firstSigmoidId = nextDescription.Nodes.First(n => n.Processor == "tanh").NodeId; nextDescription.Nodes = nextDescription.Nodes.Where(n => n.NodeId != firstSigmoidId).ToArray(); foreach (var node in nextDescription.Nodes) { node.Inputs = node.Inputs .Where(inp => inp.FromInputVector || inp.InputId != firstSigmoidId) .ToArray(); } net = Net.FromDescription(nextDescription); Console.WriteLine($"Removed {i + 1} sigmoids"); trainer.Train( net: net, tests: unsupervisedTests, desiredError: 1e-6f, maxEpochs: 400, learningRate: 0.5f); } trainer.Train( net: net, tests: unsupervisedTests, desiredError: 1e-6f, maxEpochs: 500, learningRate: 0.5f); trainer.Train( net: net, tests: unsupervisedTests, desiredError: 1e-6f, maxEpochs: 400, learningRate: 0.25f); var finalDescription = net.Description; var outsRemoved = new[] { finalDescription.Outputs[1], finalDescription.Outputs[2], finalDescription.Outputs[3], finalDescription.Outputs[4] }; finalDescription.Nodes = finalDescription.Nodes .Where(n => !outsRemoved.Contains(n.NodeId)) .ToArray(); finalDescription.Outputs = new[] { finalDescription.Outputs[0] }; var nextNet = Net.FromDescription(finalDescription); trainer.Train( net: nextNet, tests: supervisedTests, desiredError: 1e-8f, maxEpochs: 20000, learningRate: .25f); trainer.Train( net: nextNet, tests: supervisedTests, desiredError: 1e-8f, maxEpochs: 20000, learningRate: .125f); trainer.Train( net: nextNet, tests: supervisedTests, desiredError: 1e-8f, maxEpochs: 2000, learningRate: .0625f); trainer.Train( net: nextNet, tests: supervisedTests, desiredError: 1e-8f, maxEpochs: 2000, learningRate: .03125f); var final = net.Description; var finalText = JsonConvert.SerializeObject(final); using (var writer = File.CreateText("out3.json")) { writer.Write(finalText); } Console.WriteLine(); Console.ReadLine(); }
public void TestReadAndFillDoesntChangeError() { var tests = new[] { new[] { new[] { 1f, 1f }, new[] { -1f } }, new[] { new[] { 1f, -1f }, new[] { 1f } }, new[] { new[] { -1f, 1f }, new[] { 1f } }, new[] { new[] { -1f, -1f }, new[] { -1f } } }; var builder = new LayerBuilder(); var desc = builder.BuildDescription(2, new[] { new LayerBuilder.LayerSpec(2, "sum", "tanh"), new LayerBuilder.LayerSpec(1, "sum", "tanh") }); var net = Net.FromDescription(desc); var train = net.GetTrainingFunction(); var weights = new float[net.NumberOfWeights]; var deltas = new float[net.NumberOfWeights]; net.FillWeights(weights); var loss = 0f; for (var i = 0; i < 15001; i++) { loss = 0; foreach (var test in tests) { loss += train(test[0], test[1], weights, deltas); for (var j = 0; j < weights.Length; j++) { weights[j] -= deltas[j] / 2f; } } if (i % 300 == 0) { Console.WriteLine($"{i}, {loss}"); } } loss = 0; foreach (var test in tests) { loss += train(test[0], test[1], weights, deltas); } Console.WriteLine(loss); Console.WriteLine(string.Join(", ", weights)); net.ReadWeights(weights); var newWeights = new float[net.NumberOfWeights]; net.FillWeights(newWeights); Console.WriteLine(string.Join(", ", newWeights)); for (var i = 0; i < weights.Length; i++) { Assert.AreEqual(weights[i], newWeights[i]); } var newLoss = 0f; foreach (var test in tests) { newLoss += train(test[0], test[1], newWeights, new float[net.NumberOfWeights]); } Console.WriteLine(newLoss); Assert.AreEqual(loss, newLoss); }
public unsafe static void XORExample() { //Hyperparameters Hyperparameters.LearningRate = 0.1f; Hyperparameters.Optimizer = new SGD(); //Model Creation var l1 = LayerBuilder.Dense(16, "sigmoid"); var l2 = LayerBuilder.Dense(1, "sigmoid")[l1]; var x = new Input(2); Layer model = l2[x]; //Loss Function Creation var y = new Input(1); var loss = LayerBuilder.SquaredError(model, y); //Data preparation Tensor x_train = new Tensor((1, 4, 2), DeviceConfig.Host_Float); Tensor y_train = new Tensor((1, 4, 1), DeviceConfig.Host_Float); float *xt = (float *)x_train.Array; float *yt = (float *)y_train.Array; // 1,1 = 0 // 1,0 = 1 // 0,1 = 1 // 0,0 = 0 xt[0] = 1; xt[1] = 1; xt[2] = 1; xt[3] = 0; xt[4] = 0; xt[5] = 1; xt[6] = 0; xt[7] = 0; yt[0] = 0; yt[1] = 1; yt[2] = 1; yt[3] = 0; //Give data to the model x.SetInput(x_train); y.SetInput(y_train); Stopwatch s = new Stopwatch(); s.Start(); //Minimizing loss.PreCheck(); Index a = new Index(model.OuterShape); a.SetZero(); for (int epoch = 0; epoch < 100000; epoch++) { loss.Minimize(); if (epoch % 5000 == 0) { float res = ((float *)loss.GetTerm(a).GetResult().Array)[0]; res += ((float *)loss.GetTerm(a).GetResult().Array)[1]; res += ((float *)loss.GetTerm(a).GetResult().Array)[2]; res += ((float *)loss.GetTerm(a).GetResult().Array)[3]; Console.WriteLine(res); } } s.Stop(); Console.WriteLine("Time Elapsed: " + s.ElapsedMilliseconds); //Print Pools PrintPools(); //Print the results var result = model.GetTerm(a).GetResult(); Console.WriteLine("Results: " + result); //Print the results of clone model Input x2 = new Input(2); x2.SetInput(x_train); var clonemodel = l2[x2]; clonemodel.PreCheck(); var result2 = clonemodel.GetTerm(a).GetResult(); Console.WriteLine("Results: " + result2); clonemodel.DeleteTerms(); model.DeleteTerms(); }
/// <summary> /// Basic initialising neural network, before initialising fully use Initialise() method /// </summary> /// <param name="hidenLayersNeuronCount">int array that contains number of neurons for each hiden layer</param> /// <param name="inputCount">number of input neurons</param> /// <param name="outputCount">number of output neurons</param> public NeuralNetwork(int[] hidenLayersNeuronCount, int inputCount, int outputCount) { layers = LayerBuilder.CreateNewNeuralNetwork(hidenLayersNeuronCount, inputCount, outputCount); }