public WidgetAttribute(string name, double value, WidgetType type, int widgetId) { Name = name; Value = value; double attWeight = Calculator.Weights[type][name]; WeightStore.AttWeightedAvgAvailable(Value * attWeight, widgetId); }
private void ValidateTest(double[] outputlayer, int index, ref WeightStore ws) { outputlayer = ConvertFromBinaryToDouble(outputlayer); for (int i = 0; i < valdataSet[index].PrivateKey.Length; i++) { if (valdataSet[index].PrivateKey[i] == (int)outputlayer[i]) { ws.Statistics[i]++; } } }
private void Validate() { bool shouldSave = false; WeightStore ws = new WeightStore() { Statistics = new double[32] }; for (int i = 0; i < valdataSet.Count; i++) { //Layer 0 List <NeuralNetwork> hiddenLayer1 = neuralNetwork.FindAll(x => x.LayerNumber == 0); double[] weightedSum1 = new double[32]; for (int j = 0; j < hiddenLayer1.Count; j++) { weightedSum1[j] = perceptron.Execute(hiddenLayer1[j].Weights, valdataSet[i].PublicAddressDouble, hiddenLayer1[j].Bias); } for (int k = 0; k < weightedSum1.Length; k++) { weightedSum1[k] = activationFunctions.LeakyReLU(weightedSum1[k]); } //Layer 1 List <NeuralNetwork> hiddenLayer2 = neuralNetwork.FindAll(x => x.LayerNumber == 1); double[] weightedSum2 = new double[64]; for (int j = 0; j < hiddenLayer2.Count; j++) { weightedSum2[j] = perceptron.Execute(hiddenLayer2[j].Weights, weightedSum1, hiddenLayer2[j].Bias); } for (int k = 0; k < weightedSum2.Length; k++) { weightedSum2[k] = activationFunctions.LeakyReLU(weightedSum2[k]); } //Layer 2 List <NeuralNetwork> hiddenLayer3 = neuralNetwork.FindAll(x => x.LayerNumber == 2); double[] weightedSum3 = new double[128]; for (int j = 0; j < hiddenLayer3.Count; j++) { weightedSum3[j] = perceptron.Execute(hiddenLayer3[j].Weights, weightedSum2, hiddenLayer3[j].Bias); } for (int k = 0; k < weightedSum3.Length; k++) { weightedSum3[k] = activationFunctions.LeakyReLU(weightedSum3[k]); } //Output Layer List <NeuralNetwork> outputLayer = neuralNetwork.FindAll(x => x.LayerNumber == 3); double[] weightedSum4 = new double[256]; for (int j = 0; j < outputLayer.Count; j++) { weightedSum4[j] = perceptron.Execute(outputLayer[j].Weights, weightedSum3, outputLayer[j].Bias); } for (int k = 0; k < weightedSum4.Length; k++) { weightedSum4[k] = activationFunctions.BinaryStep(weightedSum4[k]); } ValidateTest(weightedSum4, i, ref ws); } for (int i = 0; i < ws.Statistics.Length; i++) { if (ws.Statistics[i] > 50) { shouldSave = true; if (ws.Statistics[i] > maxStat) { maxStat = ws.Statistics[i]; Console.WriteLine(string.Format("Current highest stat is: {0}, Probability: {1}", maxStat, maxStat / valdataSet.Count)); } } } if (shouldSave) { //Console.WriteLine(Environment.NewLine); //SerialiseWeightsAndSaveToDB(ws.Statistics); //Save to DB using entity framework - uncomment to enable - remember to set connection string in WeightsDBContext } }