public override void Update(AdaptationStrategy strategy) { Biases.Add(strategy.BiasesUpdate(_biasesGradientAccumulator)); Weights.Add(strategy.WeightsUpdate(_weightsGradientAccumulator)); _biasesGradientAccumulator.Zero(); _weightsGradientAccumulator.Zero(); }
public bool Equals(NeuralNetworkLayerModel?other) { if (other is null) { return(false); } if (ReferenceEquals(this, other)) { return(true); } return ((Id == null && other.Id == null || Id?.Equals(other.Id) == true) && Index == other.Index && Inputs == other.Inputs && Outputs == other.Outputs && (Weights == null && other.Weights == null || Weights?.Length == other.Weights?.Length && Weights?.Zip(other.Weights).All(t => MathF.Abs(t.First - t.Second) < 0.001f) == true) && (Biases == null && other.Biases == null || Biases?.Length == other.Biases?.Length && Biases?.Zip(other.Biases).All(t => MathF.Abs(t.First - t.Second) < 0.001f) == true) && Activation == other.Activation && LastUpdateTime.Equals(other.LastUpdateTime)); }
private IList <double> GetCurrentOffsetProbabilities(Point lastOffset = null) { IList <double> probabilities; if (lastOffset != null) { // TODO: Possibly optimize this search. var lastOffsetInfo = GetOffsetInfo(lastOffset); probabilities = new List <double>(Biases.Length); for (var i = 0; i < Biases.Length; i++) { var runIndex = i - lastOffsetInfo.Direction; while (runIndex < 0) { runIndex += Biases.Length; } while (runIndex >= Biases.Length) { runIndex -= Biases.Length; } var run = Runs[runIndex]; var initialBias = Biases[i]; var bias = initialBias * run; probabilities.Add(bias); } } else { probabilities = Biases.ToList(); } return(probabilities); }
public void BackPropagation(TrainTuple train) { var nablaB = Biases.Select(it => Vector <double> .Build.Dense(it.Count, 0)).ToList(); var nablaW = Weights.Select(it => Matrix <double> .Build.Dense(it.RowCount, it.ColumnCount, 0)).ToList(); var activation = Vector <double> .Build.DenseOfEnumerable(train.Input.Select(it => (double)it)); var activations = new List <Vector <double> > { activation }; var zs = new List <Vector <double> >(); var weightsWithBiases = Biases.Zip(Weights, (vector, matrix) => (vector, matrix)); foreach (var(bias, weights) in weightsWithBiases) { var z = weights.TransposeThisAndMultiply(activation) + bias; zs.Add(z); activations.Add(z.Map(Sigmoid)); } var expected = Vector <double> .Build.DenseOfEnumerable(train.Output.Select(it => (double)it)); var delta = CostDerivative(activations.Last(), expected) * zs.Last().Map(SigmoidPrime); //nablaB[^0] = delta; }
public override void Serialize(XmlWriter writer) { writer.WriteStartElement(GetType().Name); writer.XmlSerialize(Weights.ToColumnArrays()); writer.XmlSerialize(Biases.ToArray()); writer.WriteEndElement(); }
public void Update() { if (EnableBiases) { Biases.MapIndexedInplace((i, q) => _biasOptimizers[i].Update()); } Weights.MapIndexedInplace((i, j, q) => _weightOptimizers[i, j].Update()); }
private void CreateMatrix(int inputSize, int hiddenSize, int hiddenLayers, int outputSize) { // ?? for (int i = 0; i < hiddenLayers + 1; ++i) { int inSize = (i == 0) ? inputSize : hiddenSize; int outSize = (i == hiddenLayers) ? outputSize : hiddenSize; Weights.Add(new Matrix(inSize, outSize)); Biases.Add(new Matrix(1, outSize)); } }
private void FindBiases() { foreach (var outputSynapse in Inputs.SelectMany(i => i.Outgoing)) { Biases.Add(outputSynapse.To.BiasSynapse.From); foreach (var targetOutput in outputSynapse.To.Outgoing) { Biases.Add(targetOutput.To.BiasSynapse.From); } } }
public void Print() { if (PreviousLayer != null) { Console.WriteLine(Weights.Append(Biases.ToColumnMatrix())); } else { Console.WriteLine($"Input layer of {NeuronCount} neurons"); } }
protected override NetworkVector _run(NetworkVector inputvalues) { if (inputvalues.Dimension != NumberOfInputs) { throw new ArgumentException("The dimension of the input does not match this WeightedCombinger."); } VectorInput = inputvalues; BatchInput = null; return(Biases.SumWith(Weights.LeftMultiply(inputvalues))); }
public override void Serialize(XmlWriter writer) { writer.WriteStartElement(GetType().Name); for (int x = 0; x < Weights.Length; x++) { for (int y = 0; y < Weights[x].Length; y++) { writer.XmlSerialize(Weights[x][y].ToColumnArrays()); } } writer.XmlSerialize(Biases.ToArray()); writer.WriteEndElement(); }
public Vector <double> Feedforward(IEnumerable <byte> input) { var output = Vector <double> .Build.DenseOfEnumerable(input.Select(it => (double)it)); var biasesWithWeights = Biases.Zip(Weights, (vector, matrix) => (vector, matrix)); foreach (var(bias, weights) in biasesWithWeights) { output = Sigmoid(weights.Multiply(output) + bias); } return(output); }
public Vector <double> Feedforward(IEnumerable <double> input) { var output = Vector <double> .Build.DenseOfEnumerable(input); var weightsWithBiases = Biases.Zip(Weights, (vector, matrix) => (vector, matrix)); foreach (var(bias, weights) in weightsWithBiases) { output = (weights.TransposeThisAndMultiply(output) + bias).Map(Sigmoid); } return(output); }
public void Init(List <Data> trainData, List <Data> testData, int[] sizes) { TrainData = trainData; TestData = testData; Num_layers = sizes.Length; Sizes = sizes; for (int counter = 1; counter < Num_layers; counter++) { Biases.Add(np.random.randn(new int[] { Sizes[counter], 1 })); } foreach (var item in Sizes[..^ 1].Zip(Sizes[1..]))
public override Layer Copy() { Dense l = new Dense(); l.Activations = Activations.ToArray(); if (!IsInputLayer) { l.Weights = Weights.ToArray(); l.Biases = Biases.ToArray(); } l.ActivationFunc = ActivationFunc; l.IsInputLayer = IsInputLayer; return(l); }
public void Update() { Parallel.For(0, Weights.GetLength(0), j => { Parallel.For(0, Weights.GetLength(1), i => { Weights[j, i] += WMomentum[j, i]; WMomentum[j, i] *= 0.5; }); }); Parallel.For(0, Biases.Count(), j => { Biases[j] += BMomentum[j]; BMomentum[j] *= 0.5; }); }
public Vector <double> BackPropagation(TrainTuple train, double speed) { var activations = new List <Vector <double> > { Vector <double> .Build.DenseOfEnumerable(train.Input.Select(it => (double)it)) }; var weightedSums = new List <Vector <double> >(); var biasesWithWeights = Biases.Zip(Weights, (vector, matrix) => (vector, matrix)); foreach (var(biases, weights) in biasesWithWeights) { var weightedSum = weights * activations[^ 0] + biases;
public NNBrain(NNBrain other) { InputSize = other.InputSize; OutputSize = other.OutputSize; HiddenLayers = other.HiddenLayers; HiddenSize = other.HiddenSize; // ?? for (int i = 0; i < other.Weights.Count; ++i) { Matrix w = other.Weights[i].Copy(); Matrix b = other.Biases[i].Copy(); Weights.Add(w); Biases.Add(b); } }
public void Init(int inputsAmount, int[] neuronsInHiddenLayersAmount, int outputsAmount, Func <double, double>[] activationsFunctions) { Validate(neuronsInHiddenLayersAmount, activationsFunctions.Length, outputsAmount); ActivationsFunctions = activationsFunctions; InitHiddenLayers(inputsAmount, neuronsInHiddenLayersAmount); var weightsLastHiddenLayerToOutputLayer = new Matrix(neuronsInHiddenLayersAmount[neuronsInHiddenLayersAmount.Length - 1], outputsAmount); Weights.Add(weightsLastHiddenLayerToOutputLayer); Biases.Add(Random.Range(-1f, 1f)); GenerateWeightsValues(); }
/// <summary> /// Save wieghts and biases within a file /// </summary> /// <param name="file">path to the file</param> public void Save(string file) { NetworkMemory memory = new NetworkMemory { Biases = Biases.Select(b => b.mat).ToArray(), Weights = Weights.Select(b => b.mat).ToArray(), Sizes = Sizes }; JsonSerializer serializer = new JsonSerializer(); using (StreamWriter sw = new StreamWriter(file)) using (JsonWriter writer = new JsonTextWriter(sw)) { serializer.Serialize(writer, memory); } }
protected override NetworkVector _run(NetworkVector inputvalues) { if (inputvalues.Dimension != NumberOfInputs) { throw new ArgumentException("Input dimension does not match this Layer."); } VectorInput = inputvalues; BatchInput = null; List <NetworkVector> outputParts = new List <NetworkVector>(); foreach (NetworkVector inputPart in _segment(inputvalues)) { outputParts.Add(Biases.SumWith(Weights.LeftMultiply(inputPart))); } return(NetworkVector.Concatenate(outputParts)); }
public NeuralNetwork(ISource source, params int[] sizes) { _source = source; Sizes = sizes.ToList(); NumLayers = sizes.Length; for (var y = 1; y < sizes.Length; y++) { var newBiases = Vector <double> .Build.Random(sizes[y]); Biases.Add(newBiases); } for (int x = 0, y = 1; y < sizes.Length; y++, x++) { var newWeights = Matrix <double> .Build.Random(sizes[x], sizes[y]); Weights.Add(newWeights); } }
private void InitHiddenLayers(int inputsAmount, int[] neuronsInHiddenLayersAmount) { for (var neuronsCount = 0; neuronsCount < neuronsInHiddenLayersAmount.Length; neuronsCount++) { var hiddenLayer = new Matrix(1, neuronsInHiddenLayersAmount[neuronsCount]); HiddenLayers.Add(hiddenLayer); Biases.Add(Random.Range(-1f, 1f)); if (neuronsCount == 0) { var weightsInputToHiddenLayer1 = new Matrix(inputsAmount, neuronsInHiddenLayersAmount[neuronsCount]); Weights.Add(weightsInputToHiddenLayer1); } else { var weightsHliToNextHl = new Matrix(neuronsInHiddenLayersAmount[neuronsCount - 1], neuronsInHiddenLayersAmount[neuronsCount]); Weights.Add(weightsHliToNextHl); } } }
/// <summary> /// Update a mini batch of results representing a small portion of the neural network /// </summary> /// <param name="miniBatch">small batch of datas</param> /// <param name="eta">leaning rate</param> private void UpdateMiniBatch(List <Data> miniBatch, float eta) { Matrix[] nablaBiases = Biases.Select(b => new Matrix(new float[b.mat.GetLength(0), b.mat.GetLength(1)])).ToArray(); Matrix[] nablaWeights = Weights.Select(w => new Matrix(new float[w.mat.GetLength(0), w.mat.GetLength(1)])).ToArray(); float K = eta / miniBatch.Count; for (int n = 0; n < miniBatch.Count; n++) { DeltaNabla deltaNabla = BackPropagation(miniBatch[n]); for (int l = 0; l < NumberOfLayer - 1; l++) { nablaBiases[l] += deltaNabla.Biases[l]; nablaWeights[l] += deltaNabla.Weights[l]; } } for (int l = 0; l < NumberOfLayer - 1; l++) { Biases[l] -= K * nablaBiases[l]; Weights[l] -= K * nablaWeights[l]; } }
public Think(Agent agent) : base(agent, GoalTypes.Think) { var biases = new Biases { HealthBias = CreateRandomValue(), ExploreBias = CreateRandomValue(), AttackBias = CreateRandomValue(), EvadeBias = CreateRandomValue(), ShotgunBias = CreateRandomValue(), RailgunBias = CreateRandomValue(), RocketLauncherBias = CreateRandomValue() }; //// create the evaluator objects evaluators.Add(new EvaluatorGetHealth(biases.HealthBias)); evaluators.Add(new EvaluatorExplore(biases.ExploreBias)); evaluators.Add(new EvaluatorAttackTarget(biases.AttackBias)); /* TODO add in evaluate to do allow agent to evade */ evaluators.Add(new EvaluatorEvadeBot(biases.EvadeBias)); evaluators.Add(new EvaluatorGetWeapon(biases.ShotgunBias, WeaponTypes.Shotgun)); evaluators.Add(new EvaluatorGetWeapon(biases.RailgunBias, WeaponTypes.Railgun)); evaluators.Add(new EvaluatorGetWeapon(biases.RocketLauncherBias, WeaponTypes.RocketLauncher)); }
public override Matrix <float>[] Compute(Matrix <float>[] input) { // Input from first channel. No buffer needed (values are overwritten). for (int j = 0; j < Weights[0].Length; j++) { Convolution(input[0], Weights[0][j], _values[j]); } // Input from remaining channels. Use of buffer in order to accumulate values. for (int i = 1; i < Weights.Length; i++) { for (int j = 0; j < Weights[i].Length; j++) { Convolution(input[i], Weights[i][j], _buffer); _values[j].Add(_buffer, _values[j]); } } // Add biases and apply activation function. for (int i = 0; i < _values.Length; i++) { _values[i].Add(Biases.At(i), _values[i]); Activation.Apply(_values[i], _values[i]); } return(_values); }
public override MazeGenerationResults Generate() { var results = new MazeGenerationResults(); if (CurrentIteration == 0) { InitializeMap(); } var treeIndex = CurrentIteration % RunningTrees.Count; var tree = RunningTrees[treeIndex]; var path = tree.Path; CurrentIteration++; if (path.Count == 0) { return(InitializeTree(path, results, tree)); } var doBreadth = RNG.NextDouble() < Breadth; var doFirstChanceLooping = RNG.NextDouble() < FirstChanceLooping; var doLastChanceLooping = RNG.NextDouble() < LastChanceLooping; var dontGoBackAfterLooping = RNG.NextDouble() < DontGoBackAfterLooping; var doBlocking = RNG.NextDouble() < Blocking; var currentCoordinateIndex = doBreadth && path.Count > 1 ? RNG.Next(1, path.Count / Sparseness + 1) * Sparseness : path.Count - 1; var currentPoint = path[currentCoordinateIndex]; var offsets = currentPoint.GetAxisOffsets(); var biases = GetCurrentOffsetProbabilities(tree.LastOffset); var lastChanceLooping = false; while (offsets.Count > 0) { var offsetIndex = PickNextDirection(biases, offsets); var offset = offsets[offsetIndex]; var points = new List <Point>(Sparseness); { for (var i = 1; i <= Sparseness; i++) { var point = currentPoint + (offset * i); points.Add(point); } } var firstPoint = points[0]; var lastPoint = points[points.Count - 1]; var lastCellExists = Map.CellExists(lastPoint); var testCell = lastCellExists ? Map.GetCell(lastChanceLooping ? firstPoint : lastPoint) : null; if (testCell == null || doBlocking || (!doFirstChanceLooping && testCell.State != CellState.Filled)) { offsets.RemoveAt(offsetIndex); biases.RemoveAt(offsetIndex); if (!lastChanceLooping && offsets.Count == 0) { offsets = currentPoint.GetAxisOffsets(); biases = Biases.ToList(); lastChanceLooping = true; } continue; } var cells = points.Select(x => Map.GetCell(x)).ToList(); var lastCell = cells[cells.Count - 1]; var wouldLoop = lastCell.State == CellState.Empty; Tree otherTree; var treeJoinForceConnect = CellsTreeDict.TryGetValue(lastPoint, out otherTree) && TreeTree.Connect(tree, otherTree); if (wouldLoop) { if (!doLastChanceLooping && !treeJoinForceConnect) { break; } } tree.LastOffset = offset; if (!dontGoBackAfterLooping && LastLooped && wouldLoop) { // TODO: Fix going back with first chance looping. break; } if (!treeJoinForceConnect && !wouldLoop) { CellsTreeDict.Add(lastPoint, tree); } LastLooped = wouldLoop; for (var i = 0; i < points.Count; i++) { var point = points[i]; var cell = cells[i]; cell.State = CellState.Empty; if (wouldLoop) { cell.DisplayState = CellDisplayState.Path; } else { cell.DisplayState = CellDisplayState.PathWillReturn; path.Push(point); } var result = new MazeGenerationResult(point, cell.State, cell.DisplayState); results.Add(result); } return(results); } if (path.Count <= 1) { return(CompleteTree(treeIndex, results)); } if (currentCoordinateIndex != 0) { for (var i = 0; i < Sparseness; i++) { var coord = path[currentCoordinateIndex - i]; path.RemoveAt(currentCoordinateIndex - i); var lastCell = Map.GetCell(coord); lastCell.DisplayState = CellDisplayState.Path; var lastResult = new MazeGenerationResult(coord, lastCell.State, lastCell.DisplayState); results.Results.Add(lastResult); } } return(results); }
public new void SetBiases(double[] array) { base.SetBiases(array); Biases.ToArray().ForEach((q, i) => _biasOptimizers[i].SetValue(q)); }
public double[] GetBiases() { return(Biases.ToArray()); }