public void TiedAutoEncoder() { const int DATA_SIZE = 1000, REDUCED_SIZE = 200; // create some random data var rand = new Random(); var trainingData = _lap.NN.CreateTrainingDataProvider(Enumerable.Range(0, 100) .Select(i => _lap.Create(DATA_SIZE, v => Convert.ToSingle(rand.NextDouble()))) .Select(v => new TrainingExample(v.Data.Data, v.Data.Data)) .ToList() ); var layerTemplate = new LayerDescriptor(0f) { Activation = ActivationType.Relu, WeightUpdate = WeightUpdateType.RMSprop }; var firstLayer = _lap.NN.CreateLayer(DATA_SIZE, REDUCED_SIZE, layerTemplate); var secondLayer = _lap.NN.CreateTiedLayer(firstLayer, layerTemplate); var layers = new[] { _lap.NN.CreateTrainer(firstLayer, layerTemplate), _lap.NN.CreateTrainer(secondLayer, layerTemplate) }; var errorMetric = ErrorMetricType.RMSE.Create(); using (var trainer = _lap.NN.CreateBatchTrainer(layers)) { var trainingContext = _lap.NN.CreateTrainingContext(errorMetric, 0.03f, 32); trainer.Train(trainingData, 2, trainingContext); } }
/// <summary> /// Convenience function to train a vanilla feed forward neural network /// </summary> /// <param name="trainingContext">The training context to use</param> /// <param name="lap">Linear algebra provider</param> /// <param name="trainingData">Training data provider</param> /// <param name="testData">Test data provider</param> /// <param name="layerDescriptor">The layer descriptor</param> /// <param name="hiddenLayerSize">The size of the single hidden layer</param> /// <param name="numEpochs">Number of epochs to train for</param> /// <returns>A trained feed forward model</returns> public static FeedForwardNetwork TrainNeuralNetwork( this ITrainingContext trainingContext, ILinearAlgebraProvider lap, ITrainingDataProvider trainingData, ITrainingDataProvider testData, LayerDescriptor layerDescriptor, int hiddenLayerSize, int numEpochs ) { Console.WriteLine($"Training a {trainingData.InputSize}x{hiddenLayerSize}x{trainingData.OutputSize} neural network..."); FeedForwardNetwork bestModel = null; using (var trainer = lap.NN.CreateBatchTrainer(layerDescriptor, trainingData.InputSize, hiddenLayerSize, trainingData.OutputSize)) { float bestScore = 0; trainingContext.EpochComplete += c => { var testError = trainer.Execute(testData).Select(d => trainingContext.ErrorMetric.Compute(d.Output, d.ExpectedOutput)).Average(); var flag = false; if (testError > bestScore) { bestScore = testError; bestModel = trainer.NetworkInfo; flag = true; } trainingContext.WriteScore(testError, trainingContext.ErrorMetric.DisplayAsPercentage, flag); }; trainer.Train(trainingData, numEpochs, trainingContext); } return(bestModel); }
public static void MNISTConvolutional(string dataFilesPath) { Console.Write("Loading training data..."); var trainingData = Mnist.Load(dataFilesPath + "train-labels.idx1-ubyte", dataFilesPath + "train-images.idx3-ubyte"); var testData = Mnist.Load(dataFilesPath + "t10k-labels.idx1-ubyte", dataFilesPath + "t10k-images.idx3-ubyte"); Console.WriteLine($"done - found {trainingData.Count} training samples and {testData.Count} test samples"); var convolutionDescriptor = new ConvolutionDescriptor(0.1f) { Stride = 1, Padding = 1, FilterDepth = 4, FilterHeight = 3, FilterWidth = 3, WeightInitialisation = WeightInitialisationType.Xavier, WeightUpdate = WeightUpdateType.RMSprop, Activation = ActivationType.LeakyRelu }; const int BATCH_SIZE = 128, NUM_EPOCHS = 2; const float TRAINING_RATE = 0.03f; var errorMetric = ErrorMetricType.OneHot.Create(); var layerTemplate = new LayerDescriptor(0.1f) { WeightUpdate = WeightUpdateType.RMSprop, Activation = ActivationType.LeakyRelu }; using (var lap = Provider.CreateLinearAlgebra(false)) { var trainingSamples = trainingData.Shuffle(0).Take(1000).Select(d => d.AsVolume).Select(d => Tuple.Create(d.AsTensor(lap), d.ExpectedOutput)).ToList(); var testSamples = testData.Shuffle(0).Take(100).Select(d => d.AsVolume).Select(d => Tuple.Create(d.AsTensor(lap), d.ExpectedOutput)).ToList(); // create a network with two convolutional layers // the first takes the input image of depth 1 // and the second expects of tensor with depth equal to the output of the first layer var layer = new IConvolutionalLayer[] { lap.NN.CreateConvolutionalLayer(convolutionDescriptor, 1, 28), lap.NN.CreateConvolutionalLayer(convolutionDescriptor, convolutionDescriptor.FilterDepth, 28) }; var trainingDataProvider = lap.NN.CreateConvolutionalTrainingProvider(convolutionDescriptor, trainingSamples, layer, true); var testDataProvider = lap.NN.CreateConvolutionalTrainingProvider(convolutionDescriptor, testSamples, layer, false); using (var trainer = lap.NN.CreateBatchTrainer(layerTemplate, trainingDataProvider.InputSize, 128, trainingDataProvider.OutputSize)) { var trainingContext = lap.NN.CreateTrainingContext(errorMetric, TRAINING_RATE, BATCH_SIZE); trainingContext.EpochComplete += c => { var output = trainer.Execute(testDataProvider.TrainingDataProvider); var testError = output.Select(d => errorMetric.Compute(d.Output, d.ExpectedOutput)).Average(); trainingContext.WriteScore(testError, errorMetric.DisplayAsPercentage); }; trainingContext.ScheduleTrainingRateChange(50, TRAINING_RATE / 3f); trainingContext.ScheduleTrainingRateChange(100, TRAINING_RATE / 9f); trainingContext.ScheduleTrainingRateChange(150, TRAINING_RATE / 27f); trainer.Train(trainingDataProvider.TrainingDataProvider, NUM_EPOCHS, trainingContext); } foreach (var item in layer) { item.Dispose(); } } }
/// <summary> /// Trains a neural net on the MNIST database (digit recognition) /// The data files can be downloaded from http://yann.lecun.com/exdb/mnist/ /// </summary> /// <param name="dataFilesPath">The path to a directory with the four extracted data files</param> public static void MNIST(string dataFilesPath, string outputModelPath) { // neural network hyper parameters const int HIDDEN_SIZE = 1024, BATCH_SIZE = 128, NUM_EPOCHS = 40; const float TRAINING_RATE = 0.03f; var errorMetric = ErrorMetricType.OneHot.Create(); var layerTemplate = new LayerDescriptor(0f) { WeightUpdate = WeightUpdateType.RMSprop, Activation = ActivationType.LeakyRelu }; Console.Write("Loading training data..."); var trainingData = Mnist.Load(dataFilesPath + "train-labels.idx1-ubyte", dataFilesPath + "train-images.idx3-ubyte"); var testData = Mnist.Load(dataFilesPath + "t10k-labels.idx1-ubyte", dataFilesPath + "t10k-images.idx3-ubyte"); Console.WriteLine("done"); Console.WriteLine("Starting training..."); using (var lap = GPUProvider.CreateLinearAlgebra()) { var trainingSet = lap.NN.CreateTrainingDataProvider(trainingData.Select(d => d.Sample).ToList()); var testSet = lap.NN.CreateTrainingDataProvider(testData.Select(d => d.Sample).ToList()); using (var trainer = lap.NN.CreateBatchTrainer(layerTemplate, Mnist.INPUT_SIZE, HIDDEN_SIZE, Mnist.OUTPUT_SIZE)) { var trainingManager = lap.NN.CreateFeedForwardManager(trainer, outputModelPath, testSet); var trainingContext = lap.NN.CreateTrainingContext(errorMetric, TRAINING_RATE, BATCH_SIZE); trainingContext.ScheduleTrainingRateChange(NUM_EPOCHS / 2, TRAINING_RATE / 3); trainingManager.Train(trainingSet, NUM_EPOCHS, trainingContext); } } }
public Layer(LayerDescriptor descriptor) { _layerDescriptor = descriptor; _tiles = new Tile[descriptor.Tiles.GetLength(0), descriptor.Tiles.GetLength(0)]; _playerCollidingTiles = new Dictionary <Player, List <Tile> >();; _collisionDescriptors = new Dictionary <Vector, CollisionDescriptor>(); _mapObjects = new List <MapObject>(); }
public Standard(ILinearAlgebraProvider lap, int inputSize, int outputSize, LayerDescriptor init, IActivationFunction activation, IWeightInitialisation weightInit) { _descriptor = init; _activation = activation; // initialise weights and bias _bias = lap.Create(outputSize, x => weightInit.GetBias()); _weight = lap.Create(inputSize, outputSize, (x, y) => weightInit.GetWeight(inputSize, outputSize, x, y)); }
public Layer(Map map, LayerDescriptor <TileDescriptor <SpriteInfo> > descriptor) : this(descriptor) { this.Map = map; _playerCollidingTiles = new Dictionary <Player, List <Tile> >();; _collisionDescriptors = new Dictionary <Vector, CollisionBody>(); _mapObjects = new List <MapObject>(); this.LoadData(); }
private LayerDescriptor generateLayer(string material, int minThickness, int maxThickness) { LayerDescriptor layer = new LayerDescriptor(material, config.areaSize); layer.material = material; xOffset = UnityEngine.Random.value * 10000; yOffset = UnityEngine.Random.value * 10000; int noiseMod = maxThickness - minThickness; bounds.iterate((x, y) => layer.layer[x, y] = minThickness + (int)Mathf.PerlinNoise(xOffset + x * 0.05f, yOffset + y * 0.05f) * noiseMod); return(layer); }
public void BidirectionalAddition() { var trainingSet = BinaryIntegers.Addition(10, false).Select(l => l.ToArray()).ToList(); const int HIDDEN_SIZE = 16, NUM_EPOCHS = 100, BATCH_SIZE = 32; var errorMetric = ErrorMetricType.BinaryClassification.Create(); var layerTemplate = new LayerDescriptor(0.1f) { Activation = ActivationType.LeakyRelu, WeightInitialisation = WeightInitialisationType.Gaussian, DecayRate = 0.99f }; var recurrentTemplate = layerTemplate.Clone(); recurrentTemplate.WeightInitialisation = WeightInitialisationType.Gaussian; var trainingDataProvider = _lap.NN.CreateSequentialTrainingDataProvider(trainingSet); var layers = new INeuralNetworkBidirectionalLayer[] { _lap.NN.CreateBidirectionalLayer( _lap.NN.CreateSimpleRecurrentLayer(trainingDataProvider.InputSize, HIDDEN_SIZE, recurrentTemplate), _lap.NN.CreateSimpleRecurrentLayer(trainingDataProvider.InputSize, HIDDEN_SIZE, recurrentTemplate) ), _lap.NN.CreateBidirectionalLayer(_lap.NN.CreateFeedForwardRecurrentLayer(HIDDEN_SIZE * 2, trainingDataProvider.OutputSize, layerTemplate)) }; BidirectionalNetwork networkData = null; using (var trainer = _lap.NN.CreateBidirectionalBatchTrainer(layers)) { var forwardMemory = Enumerable.Range(0, HIDDEN_SIZE).Select(i => 0f).ToArray(); var backwardMemory = Enumerable.Range(0, HIDDEN_SIZE).Select(i => 0f).ToArray(); var trainingContext = _lap.NN.CreateTrainingContext(errorMetric, 0.1f, BATCH_SIZE); trainingContext.RecurrentEpochComplete += (tc, rtc) => { Debug.WriteLine(tc.LastTrainingError); }; trainer.Train(trainingDataProvider, forwardMemory, backwardMemory, NUM_EPOCHS, _lap.NN.CreateRecurrentTrainingContext(trainingContext)); networkData = trainer.NetworkInfo; networkData.ForwardMemory = new FloatArray { Data = forwardMemory }; networkData.BackwardMemory = new FloatArray { Data = backwardMemory }; } var network = _lap.NN.CreateBidirectional(networkData); foreach (var sequence in trainingSet) { var result = network.Execute(sequence.Select(d => d.Input).ToList()); } }
public void XOR() { // create a template that describes each layer of the network // in this case no regularisation, and using a sigmoid activation function var layerTemplate = new LayerDescriptor(0f) { Activation = ActivationType.Sigmoid }; // Create some training data that the network will learn. The XOR pattern looks like: // 0 0 => 0 // 1 0 => 1 // 0 1 => 1 // 1 1 => 0 var testDataProvider = _lap.NN.CreateTrainingDataProvider(XorData.Get()); // create a batch trainer (hidden layer of size 4). using (var trainer = _lap.NN.CreateBatchTrainer(layerTemplate, testDataProvider.InputSize, 4, testDataProvider.OutputSize)) { // create a training context that will hold the training rate and batch size var trainingContext = _lap.NN.CreateTrainingContext(ErrorMetricType.OneHot, 0.03f, 2); // train the network! trainer.Train(testDataProvider, 1000, trainingContext); // execute the network to get the predictions var trainingResults = trainer.Execute(testDataProvider); for (var i = 0; i < trainingResults.Count; i++) { var result = trainingResults[i]; var predictedResult = Convert.ToSingle(Math.Round(result.Output[0])); var expectedResult = result.ExpectedOutput[0]; FloatingPointHelper.AssertEqual(predictedResult, expectedResult); } // serialise the network parameters and data var networkData = trainer.NetworkInfo; // create a new network to execute the learned network var network = _lap.NN.CreateFeedForward(networkData); var results = XorData.Get().Select(d => Tuple.Create(network.Execute(d.Input), d.Output)).ToList(); for (var i = 0; i < results.Count; i++) { var result = results[i].Item1.AsIndexable(); var predictedResult = Convert.ToSingle(Math.Round(result[0])); var expectedResult = results[i].Item2[0]; FloatingPointHelper.AssertEqual(predictedResult, expectedResult); } } }
public Lstm(int inputSize, int hiddenSize, INeuralNetworkFactory factory, LayerDescriptor template) { _lap = factory.LinearAlgebraProvider; _activation = factory.GetActivation(template.Activation); _wc = CreateLayer(inputSize, hiddenSize, factory, template); _wi = CreateLayer(inputSize, hiddenSize, factory, template); _wf = CreateLayer(inputSize, hiddenSize, factory, template); _wo = CreateLayer(inputSize, hiddenSize, factory, template); _uc = CreateLayer(hiddenSize, hiddenSize, factory, template); _ui = CreateLayer(hiddenSize, hiddenSize, factory, template); _uf = CreateLayer(hiddenSize, hiddenSize, factory, template); _uo = CreateLayer(hiddenSize, hiddenSize, factory, template); }
// adds layers blocks beneath currentHeight values private void fillLayer(LayerDescriptor layer) { bounds.iterate((x, y) => { if (currentHeight[x, y] > 0) { for (int z = 0; z <= layer.layer[x, y]; z++) { if (currentHeight[x, y] >= 0) { container.localMap.blockType.setRaw(x, y, currentHeight[x, y], BlockTypeEnum.WALL.CODE, layer.material); currentHeight[x, y]--; } } } }); }
private void MountSettingsFile(Lifetime lifetime, string pluginId, FileSystemPath path, UserFriendlySettingsLayer.Identity hostId, IThreading threading, IFileSystemTracker filetracker, FileSettingsStorageBehavior behavior, UserInjectedSettingsLayers userInjectedSettingsLayers) { var id = string.Format("extension::{0}-{1}", pluginId, path.Name); var persistentId = new LayerId(id); var pathAsProperty = new Property <FileSystemPath>(lifetime, "InjectedFileStoragePath", path); var serialization = CreateXmlFileSettingsStorage(lifetime, threading, filetracker, behavior, id, pathAsProperty); var descriptor = new LayerDescriptor(lifetime, hostId, persistentId, serialization.Storage, MountPath.Default, () => { }); descriptor.InitialMetadata.Set(UserFriendlySettingsLayers.DisplayName, string.Format("{0} » {1}", pluginId, path.NameWithoutExtension)); descriptor.InitialMetadata.Set(UserFriendlySettingsLayers.Origin, string.Format("Published by plugin: {0}", pluginId)); descriptor.InitialMetadata.Set(UserFriendlySettingsLayers.DiskFilePath, path); descriptor.InitialMetadata.Set(IsNonUserEditable, true); userInjectedSettingsLayers.RegisterUserInjectedLayer(lifetime, descriptor); }
private Layer(LayerDescriptor <TileDescriptor <SpriteInfo> > baseLayer) { this.Name = baseLayer.Name; this.Tiles = new Tile[baseLayer.Tiles.GetLength(0), baseLayer.Tiles.GetLength(1)]; this.LayerIndex = baseLayer.LayerIndex; for (int x = 0; x < this.Tiles.GetLength(0); x++) { for (int y = 0; y < this.Tiles.GetLength(1); y++) { if (baseLayer.Tiles[x, y] != null) { this.Tiles[x, y] = new Tile(this, baseLayer.Tiles[x, y]); } else { this.Tiles[x, y] = new Tile(new Vector(x * EngineConstants.TILE_SIZE, y * EngineConstants.TILE_SIZE)); } } } }
private void MountSettingsFile(Lifetime lifetime, string pluginId, FileSystemPath path, UserFriendlySettingsLayer.Identity hostId, IThreading threading, IFileSystemTracker filetracker, FileSettingsStorageBehavior behavior, UserInjectedSettingsLayers userInjectedSettingsLayers) { var id = string.Format("extension::{0}-{1}", pluginId, path.Name); var persistentId = new LayerId(id); var pathAsProperty = new Property<FileSystemPath>(lifetime, "InjectedFileStoragePath", path); var serialization = CreateXmlFileSettingsStorage(lifetime, threading, filetracker, behavior, id, pathAsProperty); var descriptor = new LayerDescriptor(lifetime, hostId, persistentId, serialization.Storage, MountPath.Default, () => { }); descriptor.InitialMetadata.Set(UserFriendlySettingsLayers.DisplayName, string.Format("{0} » {1}", pluginId, path.NameWithoutExtension)); descriptor.InitialMetadata.Set(UserFriendlySettingsLayers.Origin, string.Format("Published by plugin: {0}", pluginId)); descriptor.InitialMetadata.Set(UserFriendlySettingsLayers.DiskFilePath, path); descriptor.InitialMetadata.Set(IsNonUserEditable, true); userInjectedSettingsLayers.RegisterUserInjectedLayer(lifetime, descriptor); }
public static void ReducedMNIST(string dataFilesPath) { Console.Write("Loading training data..."); var trainingData = Mnist.Load(dataFilesPath + "train-labels.idx1-ubyte", dataFilesPath + "train-images.idx3-ubyte"); var testData = Mnist.Load(dataFilesPath + "t10k-labels.idx1-ubyte", dataFilesPath + "t10k-images.idx3-ubyte"); Console.WriteLine("done"); var onesAndZeroesTraining = trainingData.Where(s => s.Label == 0 || s.Label == 1).Shuffle(0).Take(1000).ToList(); var onesAndZeroesTest = testData.Where(s => s.Label == 0 || s.Label == 1).Shuffle(0).Take(100).ToList(); using (var lap = GPUProvider.CreateLinearAlgebra(false)) { var convolutionDescriptor = new ConvolutionDescriptor(0.1f) { Stride = 1, Padding = 1, FilterDepth = 4, FilterHeight = 3, FilterWidth = 3, WeightInitialisation = WeightInitialisationType.Xavier, WeightUpdate = WeightUpdateType.RMSprop, Activation = ActivationType.LeakyRelu }; const int BATCH_SIZE = 128, NUM_EPOCHS = 2, IMAGE_WIDTH = 28; const float TRAINING_RATE = 0.03f; var errorMetric = ErrorMetricType.OneHot.Create(); var layerTemplate = new LayerDescriptor(0.1f) { WeightUpdate = WeightUpdateType.RMSprop, Activation = ActivationType.LeakyRelu }; var trainingSamples = onesAndZeroesTraining.Select(d => d.AsVolume).Select(d => Tuple.Create(d.AsTensor(lap), d.ExpectedOutput)).ToList(); var testSamples = onesAndZeroesTest.Select(d => d.AsVolume).Select(d => Tuple.Create(d.AsTensor(lap), d.ExpectedOutput)).ToList(); // create a network with a single convolutional layer followed by a max pooling layer var convolutionalLayer = new IConvolutionalLayer [] { lap.NN.CreateConvolutionalLayer(convolutionDescriptor, 1, IMAGE_WIDTH, false), lap.NN.CreateMaxPoolingLayer(2, 2, 2) }; var trainingDataProvider = lap.NN.CreateConvolutionalTrainingProvider(convolutionDescriptor, trainingSamples, convolutionalLayer, true); var testDataProvider = lap.NN.CreateConvolutionalTrainingProvider(convolutionDescriptor, testSamples, convolutionalLayer, false); ConvolutionalNetwork network; using (var trainer = lap.NN.CreateBatchTrainer(layerTemplate, 784, trainingDataProvider.OutputSize)) { var trainingContext = lap.NN.CreateTrainingContext(errorMetric, TRAINING_RATE, BATCH_SIZE); trainingContext.EpochComplete += c => { var output = trainer.Execute(testDataProvider.TrainingDataProvider); var testError = output.Select(d => errorMetric.Compute(d.Output, d.ExpectedOutput)).Average(); trainingContext.WriteScore(testError, errorMetric.DisplayAsPercentage); }; trainer.Train(trainingDataProvider.TrainingDataProvider, NUM_EPOCHS, trainingContext); network = trainingDataProvider.GetCurrentNetwork(trainer); } foreach (var layer in convolutionalLayer) { layer.Dispose(); } foreach (var item in trainingSamples) { item.Item1.Dispose(); } foreach (var item in testSamples) { item.Item1.Dispose(); } int correct = 0, total = 0; using (var execution = lap.NN.CreateConvolutional(network)) { foreach (var item in onesAndZeroesTest) { using (var tensor = item.AsVolume.AsTensor(lap)) { using (var output = execution.Execute(tensor)) { var maxIndex = output.MaximumIndex(); if (maxIndex == item.Label) { ++correct; } ++total; } } } } Console.WriteLine($"Execution results: {(double)correct / total:P0} correct"); } }
public SimpleRecurrent(int inputSize, int hiddenSize, INeuralNetworkFactory factory, LayerDescriptor template) { _activation = factory.GetActivation(template.Activation); _input = CreateLayer(inputSize, hiddenSize, factory, template); _memory = CreateLayer(hiddenSize, hiddenSize, factory, template); }
public static void IntegerAddition() { // generate 1000 random integer additions var dataSet = BinaryIntegers.Addition(1000, false) .Select(l => l.ToArray()) .ToList() ; // split the numbers into training and test sets int split = Convert.ToInt32(dataSet.Count * 0.8); var trainingData = dataSet.Take(split).ToList(); var testData = dataSet.Skip(split).ToList(); // neural network hyper parameters const int HIDDEN_SIZE = 32, NUM_EPOCHS = 25, BATCH_SIZE = 16; const float TRAINING_RATE = 0.001f; var errorMetric = ErrorMetricType.BinaryClassification.Create(); var layerTemplate = new LayerDescriptor(0.3f) { Activation = ActivationType.Relu, WeightInitialisation = WeightInitialisationType.Xavier, WeightUpdate = WeightUpdateType.RMSprop }; var recurrentTemplate = layerTemplate.Clone(); recurrentTemplate.WeightInitialisation = WeightInitialisationType.Gaussian; using (var lap = Provider.CreateLinearAlgebra()) { // create training data providers var trainingDataProvider = lap.NN.CreateSequentialTrainingDataProvider(trainingData); var testDataProvider = lap.NN.CreateSequentialTrainingDataProvider(testData); var layers = new INeuralNetworkRecurrentLayer[] { lap.NN.CreateSimpleRecurrentLayer(trainingDataProvider.InputSize, HIDDEN_SIZE, recurrentTemplate), lap.NN.CreateFeedForwardRecurrentLayer(HIDDEN_SIZE, trainingDataProvider.OutputSize, layerTemplate) }; // train the network RecurrentNetwork networkData = null; using (var trainer = lap.NN.CreateRecurrentBatchTrainer(layers)) { var memory = Enumerable.Range(0, HIDDEN_SIZE).Select(i => 0f).ToArray(); var trainingContext = lap.NN.CreateTrainingContext(errorMetric, TRAINING_RATE, BATCH_SIZE); trainingContext.RecurrentEpochComplete += (tc, rtc) => { var testError = trainer.Execute(testDataProvider, memory, rtc).SelectMany(s => s.Select(d => errorMetric.Compute(d.Output, d.ExpectedOutput))).Average(); Console.WriteLine($"Epoch {tc.CurrentEpoch} - score: {testError:P}"); }; trainer.Train(trainingDataProvider, memory, NUM_EPOCHS, lap.NN.CreateRecurrentTrainingContext(trainingContext)); networkData = trainer.NetworkInfo; networkData.Memory = new FloatArray { Data = memory }; } // evaluate the network on some freshly generated data var network = lap.NN.CreateRecurrent(networkData); foreach (var sequence in BinaryIntegers.Addition(8, true)) { var result = network.Execute(sequence.Select(d => d.Input).ToList()); Console.Write("First: "); foreach (var item in sequence) { _WriteBinary(item.Input[0]); } Console.WriteLine(); Console.Write("Second: "); foreach (var item in sequence) { _WriteBinary(item.Input[1]); } Console.WriteLine(); Console.WriteLine(" --------------------------------"); Console.Write("Expected: "); foreach (var item in sequence) { _WriteBinary(item.Output[0]); } Console.WriteLine(); Console.Write("Predicted: "); foreach (var item in result) { _WriteBinary(item.Output[0]); } Console.WriteLine(); Console.WriteLine(); } } }
public MapDescriptor Load(IDataManagerArguments arguments) { MapDescriptor map = null; string path = Core.EngineConstants.FILEPATH_MAPS + (arguments as MapDataLoaderArguments)?.Name + EngineConstants.MAP_FILE_EXT; using (var fileStream = new FileStream(path, FileMode.Open)) { using (var bR = new BinaryReader(fileStream)) { // Load the tileset information int tilesetCount = bR.ReadInt32(); for (int i = 0; i < tilesetCount; i++) { // We can throw this information away as it is used only in the editor suite. string tilesetPath = bR.ReadString(); } string name = bR.ReadString(); var dimensions = new Vector(bR.ReadInt32(), bR.ReadInt32()); map = new MapDescriptor(dimensions, name) { Dark = bR.ReadBoolean() }; map.Bounds = new Rect(0, 0, (int)map.Dimensions.X, (int)map.Dimensions.Y); int layerCount = bR.ReadInt32(); for (int i = 0; i < layerCount; i++) { string layerName = bR.ReadString(); int lIndex = bR.ReadInt32(); var layer = new LayerDescriptor(map.Dimensions, layerName, lIndex); for (int x = 0; x < layer.Tiles.GetLength(0); x++) { for (int y = 0; y < layer.Tiles.GetLength(1); y++) { if (bR.ReadBoolean()) { layer.Tiles[x, y] = new TileDescriptor(new Vector(x * EngineConstants.TILE_WIDTH, y * EngineConstants.TILE_HEIGHT)) { Attribute = (TileAttributes)bR.ReadByte() }; int attributeDataLength = bR.ReadInt32(); byte[] attributeData = bR.ReadBytes(attributeDataLength); layer.Tiles[x, y].AttributeData = AttributeData.Deserialize(attributeData); if (bR.ReadBoolean()) { layer.Tiles[x, y].Animated = bR.ReadBoolean(); layer.Tiles[x, y].LightSource = bR.ReadBoolean(); string spriteName = bR.ReadString(); float zIndex = bR.ReadSingle(); // We can throw this away layer.Tiles[x, y].SpriteInfo = new SpriteInfo(spriteName) { Transform = { Position = new Vector(x * EngineConstants.TILE_WIDTH, y * EngineConstants.TILE_HEIGHT), Color = new Color(bR.ReadByte(), bR.ReadByte(), bR.ReadByte(), bR.ReadByte()), Rect = new Rect(bR.ReadInt32(), bR.ReadInt32(), bR.ReadInt32(), bR.ReadInt32()) } }; layer.Tiles[x, y].FrameCount = bR.ReadInt32(); } } } } int mapObjectCount = bR.ReadInt32(); for (int mI = 0; mI < mapObjectCount; mI++) { var mapObject = new MapObjectDescriptor() { Position = new Vector(bR.ReadSingle(), bR.ReadSingle()) }; if (bR.ReadBoolean()) { string texturePath = bR.ReadString(); mapObject.Sprite = new SpriteInfo(texturePath) { Transform = { Rect = new Rect(bR.ReadInt32(), bR.ReadInt32(), bR.ReadInt32(), bR.ReadInt32()) } }; } mapObject.Animated = bR.ReadBoolean(); mapObject.FrameTime = bR.ReadInt32(); string scriptPath = bR.ReadString(); var lightSource = bR.ReadBoolean(); var lightRadius = bR.ReadSingle(); var lightColor = new Color(bR.ReadByte(), bR.ReadByte(), bR.ReadByte(), bR.ReadByte()); if (lightSource) { mapObject.LightInformation = new LightInformation() { Radius = lightRadius, Color = lightColor }; } } map.Layers.Add(layerName, layer); } } } return(map); }
public override MapDescriptor <LayerDescriptor <TileDescriptor <SpriteInfo> > > Load(IDataManagerArguments arguments) { MapDescriptor <LayerDescriptor <TileDescriptor <SpriteInfo> > > map = null; var mapArguments = (arguments as ContentFileDataLoaderArguments); using (var fileStream = new FileStream(this.RootPath + mapArguments.FileName + EngineConstants.MAP_FILE_EXT, FileMode.Open)) { using (var bR = new BinaryReader(fileStream)) { // Load the tileset information int tilesetCount = bR.ReadInt32(); List <string> tilesetPaths = new List <string>(); for (int i = 0; i < tilesetCount; i++) { // We can throw this information away as it is used only in the editor suite. string tilesetPath = bR.ReadString(); tilesetPaths.Add(tilesetPath); } string name = bR.ReadString(); var dimensions = new Vector(bR.ReadInt32(), bR.ReadInt32()); map = new MapDescriptor <LayerDescriptor <TileDescriptor <SpriteInfo> > >(dimensions, name) { Dark = bR.ReadBoolean() }; map.TilesetPaths.AddRange(tilesetPaths); map.Bounds = new Rect(0, 0, (int)map.Dimensions.X, (int)map.Dimensions.Y); int layerCount = bR.ReadInt32(); for (int i = 0; i < layerCount; i++) { string layerName = bR.ReadString(); int lIndex = bR.ReadInt32(); var layer = new LayerDescriptor <TileDescriptor <SpriteInfo> >(map.Dimensions, layerName, lIndex); for (int x = 0; x < layer.Tiles.GetLength(0); x++) { for (int y = 0; y < layer.Tiles.GetLength(1); y++) { if (bR.ReadBoolean()) { layer.Tiles[x, y] = new TileDescriptor <SpriteInfo>(new Vector(x * EngineConstants.TILE_SIZE, y * EngineConstants.TILE_SIZE)); if (bR.ReadBoolean()) // Is there a valid attribute saved for this tile? { int attributeDataLength = bR.ReadInt32(); byte[] attributeData = bR.ReadBytes(attributeDataLength); layer.Tiles[x, y].Attribute = TileAttribute.Deserialize(attributeData); } if (bR.ReadBoolean()) { layer.Tiles[x, y].Animated = bR.ReadBoolean(); layer.Tiles[x, y].LightSource = bR.ReadBoolean(); string spriteName = bR.ReadString(); float zIndex = bR.ReadSingle(); // We can throw this away layer.Tiles[x, y].Sprite = new SpriteInfo(spriteName) { Transform = { Position = new Vector(x * EngineConstants.TILE_SIZE, y * EngineConstants.TILE_SIZE), Color = new Color(bR.ReadByte(), bR.ReadByte(), bR.ReadByte(), bR.ReadByte()), Rect = new Rect(bR.ReadInt32(), bR.ReadInt32(), bR.ReadInt32(), bR.ReadInt32()), LayerDepth = zIndex } }; layer.Tiles[x, y].FrameCount = bR.ReadInt32(); } } } } //int mapObjectCount = bR.ReadInt32(); //for (int mI = 0; mI < mapObjectCount; mI++) //{ // var mapObject = new MapObjectDescriptor() // { // Position = new Vector(bR.ReadSingle(), bR.ReadSingle()) // }; // if (bR.ReadBoolean()) // { // string texturePath = bR.ReadString(); // mapObject.Sprite = new SpriteInfo(texturePath) // { // Transform = // { // Rect = new Rect(bR.ReadInt32(), bR.ReadInt32(), bR.ReadInt32(), bR.ReadInt32()) // } // }; // } // mapObject.Animated = bR.ReadBoolean(); // mapObject.FrameTime = bR.ReadInt32(); // string scriptPath = bR.ReadString(); // var lightSource = bR.ReadBoolean(); // var lightRadius = bR.ReadSingle(); // var lightColor = new Color(bR.ReadByte(), bR.ReadByte(), bR.ReadByte(), bR.ReadByte()); // if (lightSource) // { // mapObject.LightInformation = new LightInformation() // { // Radius = lightRadius, // Color = lightColor // }; // } //} map.AddLayer(layerName, layer); } } } return(map); }
protected internal INeuralNetworkLayerUpdater CreateLayer(int inputSize, int outputSize, INeuralNetworkFactory factory, LayerDescriptor template) { var descriptor = template.Clone(); descriptor.Activation = ActivationType.None; var layer = factory.CreateLayer(inputSize, outputSize, descriptor); return(factory.CreateUpdater(layer, template)); }
/// <summary> /// Trains various classifiers on the Iris data set /// /// Tutorial available at http://www.jackdermody.net/brightwire/article/Introduction_to_Bright_Wire /// </summary> public static void IrisClassification() { // download the iris data set byte[] data; using (var client = new WebClient()) { data = client.DownloadData("https://archive.ics.uci.edu/ml/machine-learning-databases/iris/iris.data"); } // parse the iris CSV into a data table var dataTable = new StreamReader(new MemoryStream(data)).ParseCSV(','); // the last column is the classification target ("Iris-setosa", "Iris-versicolor", or "Iris-virginica") var targetColumnIndex = dataTable.TargetColumnIndex = dataTable.ColumnCount - 1; // split the data table into training and test tables var split = dataTable.Split(0); // train and evaluate a naive bayes classifier var naiveBayes = split.Training.TrainNaiveBayes(); Console.WriteLine("Naive bayes accuracy: {0:P}", split.Test .Classify(naiveBayes.CreateClassifier()) .Average(d => d.Row.GetField <string>(targetColumnIndex) == d.Classification ? 1.0 : 0.0) ); // train and evaluate a decision tree classifier var decisionTree = split.Training.TrainDecisionTree(); Console.WriteLine("Decision tree accuracy: {0:P}", split.Test .Classify(decisionTree.CreateClassifier()) .Average(d => d.Row.GetField <string>(targetColumnIndex) == d.Classification ? 1.0 : 0.0) ); // train and evaluate a random forest classifier var randomForest = split.Training.TrainRandomForest(500); Console.WriteLine("Random forest accuracy: {0:P}", split.Test .Classify(randomForest.CreateClassifier()) .Average(d => d.Row.GetField <string>(targetColumnIndex) == d.Classification ? 1.0 : 0.0) ); // fire up some linear algebra on the CPU using (var lap = Provider.CreateLinearAlgebra(false)) { // train and evaluate k nearest neighbours var knn = split.Training.TrainKNearestNeighbours(); Console.WriteLine("K nearest neighbours accuracy: {0:P}", split.Test .Classify(knn.CreateClassifier(lap, 10)) .Average(d => d.Row.GetField <string>(targetColumnIndex) == d.Classification ? 1.0 : 0.0) ); // train and evaluate a mulitinomial logistic regression classifier var logisticRegression = split.Training.TrainMultinomialLogisticRegression(lap, 500, 0.1f); Console.WriteLine("Multinomial logistic regression accuracy: {0:P}", split.Test .Classify(logisticRegression.CreateClassifier(lap)) .Average(d => d.Row.GetField <string>(targetColumnIndex) == d.Classification ? 1.0 : 0.0) ); // convert the data tables into vector based training data providers var trainingData = lap.NN.CreateTrainingDataProvider(split.Training); var testData = lap.NN.CreateTrainingDataProvider(split.Test); // create a feed forward network with 8 hidden neurons const int BATCH_SIZE = 8, NUM_EPOCHS = 300; const float LEARNING_RATE = 0.03f; var layerTemplate = new LayerDescriptor(0.1f) // add some L2 regularisation { Activation = ActivationType.Sigmoid, // sigmoid activation function WeightUpdate = WeightUpdateType.RMSprop, // use rmsprop gradient descent optimisation WeightInitialisation = WeightInitialisationType.Xavier, // xavier weight initialisation LayerTrainer = LayerTrainerType.DropConnect // throw in some drop connect regularisation for fun }; // the default data table -> vector conversion uses one hot encoding of the classification labels, so create a corresponding cost function var errorMetric = ErrorMetricType.OneHot.Create(); // create a network trainer and evaluate against the test set after every 50 epochs Console.WriteLine("Training a 4x8x3 neural network..."); using (var trainer = lap.NN.CreateBatchTrainer(layerTemplate, trainingData.InputSize, 8, trainingData.OutputSize)) { var trainingContext = lap.NN.CreateTrainingContext(errorMetric, LEARNING_RATE, BATCH_SIZE); trainingContext.EpochComplete += c => { if (c.CurrentEpoch % 50 == 0) { var testError = trainer.Execute(testData).Select(d => errorMetric.Compute(d.Output, d.ExpectedOutput)).Average(); trainingContext.WriteScore(testError, errorMetric.DisplayAsPercentage); } }; trainer.Train(trainingData, NUM_EPOCHS, trainingContext); } Console.WriteLine(); // let's unload some deep learning on these flowers... Console.WriteLine("Training a 4x8x16x32x16x8x3 neural network..."); using (var deepTrainer = lap.NN.CreateBatchTrainer(layerTemplate, trainingData.InputSize, 8, 16, 32, 16, 8, trainingData.OutputSize)) { var trainingContext = lap.NN.CreateTrainingContext(errorMetric, LEARNING_RATE, BATCH_SIZE); trainingContext.EpochComplete += c => { if (c.CurrentEpoch % 50 == 0) { var testError = deepTrainer.Execute(testData).Select(d => errorMetric.Compute(d.Output, d.ExpectedOutput)).Average(); trainingContext.WriteScore(testError, errorMetric.DisplayAsPercentage); } }; deepTrainer.Train(trainingData, NUM_EPOCHS, trainingContext); } Console.WriteLine(); } }