public override LenfNum CalculateOutput(LenfNum input) { short lenght = 28 - 5 + 1; for (short count = 0; count < filter.Length; count++) { result[count] = new LenfNum((short)(lenght + 1), (short)(lenght + 1), 0); for (short i = 0; i < lenght; i++) { for (short j = 0; j < lenght; j++) { double r = 0; for (short k = 0; k < 5; k++) { for (short l = 0; l < 5; l++) { r += input[0, (short)((i + k) * 28 + (j + l))] * filter[count][k, l]; } } result[count][i, j] = r; } } } throw new NotImplementedException(); //return result; }
public override LenfNum CalculateOutput(LenfNum input) { Input = input; BeforeActive = input.Multiply(Weight) + Bias; AfterActive = ActivityFunction[0](this); return(AfterActive); }
public LenfNum Multiply(LenfNum Multier) { if (Multier == null) { return(this); } if (column != Multier.row) { throw new Exception("Can't multiply AAAAA"); } var c = row; var d = Multier.column; var e = column; var f = new LenfNum(c, d, 0d); for (short i = 0; i < c; i++) { for (short j = 0; j < d; j++) { for (short k = 0; k < e; k++) { f[i, j] += this[i, k] * Multier[k, j]; } } } return(f); }
//public void AddLayer(LenfLayer layer, short? firstInputCount = null) { // layer.Input = Layers.Count == 0 ? new LenfNum(1, firstInputCount ?? throw new Exception("First Layer should have input size")) : new LenfNum(Layers.Last().give,layer.get); // Layers.Add(layer); //} //public void AddRangeLayers(List<LenfLayer> layers, short? firstInputCount = null) { // foreach(var layer in layers) { // AddLayer(layer, firstInputCount); // } //} #endregion public LenfNum Calculate(LenfNum CheckData) { var finalOutput = GetFinalOutput(CheckData); var totalCost = CostFunction[0](finalOutput * -1, CheckData).Sum(); return(finalOutput); }
public double Train(LenfNum TrainData, LenfNum CheckData) { var finalOutput = GetFinalOutput(TrainData) * -1; var totalCost = -CostFunction[0](finalOutput * -1, CheckData).Sum(); BackPropagation(CostFunction[1](finalOutput, CheckData)); return(totalCost); }
public LenfNum GetFinalOutput(LenfNum TrainData) { foreach (var layer in Layers) { TrainData = layer.CalculateOutput(TrainData); } return(TrainData); }
public void BackPropagation(LenfNum CostData) { foreach (var layer in Layers.ToArray().Reverse()) { CostData = layer.BackPropagation(CostData); } foreach (var layer in Layers) { layer.GradientDescent(); } }
public LenfNum Transposed() { var sql = new LenfNum(column, row, 0d); for (short i = 0; i < column; i++) { for (short j = 0; j < row; j++) { sql[i, j] = this[j, i]; } } return(sql); }
public LenfNum FuncToAll(Func <double, double> func) { var d = new LenfNum(row, column, 0d); for (short i = 0; i < row; i++) { for (short j = 0; j < column; j++) { d[i, j] = func(this[i, j]); } } return(d); }
public static LenfNum operator /(LenfNum augend, double addend) { short row = augend.row; short column = augend.column; var d = new LenfNum(row, column); for (short i = 0; i < row; i++) { for (short j = 0; j < column; j++) { d[i, j] = augend[i, j] / addend; } } return(d); }
public LenfNum[] test(LenfNum input) { short height = (short)(Height - FilterLenght + 1); short width = (short)(Width - FilterLenght + 1); for (short count = 0; count < filter.Length; count++) { for (short i = 0; i < height; i++) { for (short j = 0; j < width; j++) { double r = 0; for (short k = 0; k < 5; k++) { for (short l = 0; l < 5; l++) { r += input[0, (short)((i + k) * Width + (j + l))] * filter[count][k, l]; } } } } } return(result); }
public override void GradientDescent() { Weight += Input.Transposed().Multiply(Partial) * LearningRate; Bias += Partial * LearningRate / 10; }
public abstract LenfNum CalculateOutput(LenfNum input);
public abstract LenfNum BackPropagation(LenfNum lastLayer);
public override LenfNum CalculateOutput(LenfNum input) { throw new NotImplementedException(); }
public LenfDenseLayer(short get, short give, double learningRate = 0.1, Func <LenfLayer, LenfNum>[] activityFunction = null) : base(get, give, learningRate) { ActivityFunction = activityFunction ?? ActivityFunction; Weight = new LenfNum(get, give); Bias = new LenfNum(1, give, 0); }
public override LenfNum BackPropagation(LenfNum lastLayer) { throw new NotImplementedException(); }
public override LenfNum BackPropagation(LenfNum lastLayer) { Partial = lastLayer; return(NormalizeFunction[1](this) * Weight); }
public override LenfNum CalculateOutput(LenfNum input) { BeforeNormal = input; AfterNormal = NormalizeFunction[0](this); return(AfterNormal * Weight + Bias); }
public override LenfNum BackPropagation(LenfNum lastLayer) { Partial = lastLayer * ActivityFunction[1](this); return(Partial.Multiply(Weight.Transposed())); }