private void SigmoidActivation(DataSample sample)
        {
            if (sample.data.Length != inputSize)
            {
                Console.WriteLine("NOT the same size - " + sample.data.Length + " / " + inputSize);
                return;
            }

            inputLayerValue = sample.data;

            // First Layer Activation
            for (int i = 0; i < hiddenSize; i++)
            {
                float sum = 0;

                for (int j = 0; j < inputSize; j++)
                {
                    sum += (float)(inputLayerValue[j] * inputLayerWeights[j, i]);
                }

                hiddenLayerValue[0, i] = ActivationFunctions.Sigmoid(sum);
            }

            // Hidden Layer Activation
            for (int i = 1; i < hiddenDimension; i++)
            {
                for (int j = 0; j < hiddenSize; j++)
                {
                    float sum = 0;

                    for (int k = 0; k < hiddenSize; k++)
                    {
                        sum += (hiddenLayerValue[i - 1, k] * hiddenLayerWeights[i - 1, j, k]);
                    }

                    hiddenLayerValue[i, j] = ActivationFunctions.Sigmoid(sum);
                }
            }

            // Output Layer Activation
            for (int i = 0; i < outputSize; i++)
            {
                float sum = 0;

                for (int j = 0; j < hiddenSize; j++)
                {
                    sum += (hiddenLayerValue[hiddenDimension - 1, j] * outputLayerWeights[i, j]);
                }
                outputLayerValue[i] = ActivationFunctions.Sigmoid(sum);
            }
        }
        public void DataClassification(DataSample sample)
        {
            SigmoidActivation(sample);

            if (ReturnCost(CreateIdealVector(sample.label)) < .5)
            {
                Console.Write("Classified: ");
                float maxVal = outputLayerValue[0];
                int   index  = 0;

                for (int i = 0; i < outputSize; i++)
                {
                    if (outputLayerValue[i] > maxVal)
                    {
                        maxVal = outputLayerValue[i];
                        index  = i;
                    }
                }

                switch (index)
                {
                case 0:
                    Console.Write("Classified: Blank");
                    break;

                case 1:
                    Console.Write("Classified: Noise");
                    break;

                case 2:
                    Console.Write("Classified: Hej");
                    break;

                case 3:
                    Console.Write("Classified: Siri");
                    break;
                }
            }
        }
Пример #3
0
        static void NetworkTraining(int inputSize, int width, int height, int itterations)
        {
            int outputSize = DataSample.labelSize;

            string databasePath1 = @"C:/Users/Mikkel/Documents/MTA18434_SemesterProject/ML_Sound_Samples/Assets/Resources/SampleDatabase/Database.json";

            string networkPath1 = Path.GetDirectoryName(Assembly.GetEntryAssembly().Location) + "/NetworkSave01.json";


            if (File.Exists(networkPath1))
            {
                network = NeuralNetwork.LoadNetwork(networkPath1);

                if (network.inputSize != inputSize)
                {
                    throw new Exception();
                }

                if (network.hiddenDimension != width)
                {
                    throw new Exception();
                }

                if (network.hiddenSize != height)
                {
                    throw new Exception();
                }
            }
            else
            {
                Console.WriteLine("Initializing NN");
                network = new NeuralNetwork(inputSize, width, height, outputSize);
                Console.WriteLine("NN Initialized");
            }

            if (File.Exists(databasePath1))
            {
                Console.WriteLine(File.Exists(databasePath1) ? "Database exists." : "File does not exist.");

                SampleDatabase temp = null;

                using (StreamReader r = new StreamReader("C:/Users/Mikkel/Documents/MTA18434_SemesterProject/ML_Sound_Samples/Assets/Resources/SampleDatabase/Database.json"))
                {
                    using (JsonReader reader = new JsonTextReader(r))
                    {
                        JsonSerializer serializer = new JsonSerializer();
                        Console.WriteLine("Deserializing");
                        temp = serializer.Deserialize <SampleDatabase>(reader);
                    }
                }

                Console.WriteLine(temp.database[0].data.Length);

                DataSample[] trainingSamples = new DataSample[10];
                Random       rand            = new Random();

                for (int i = 0; i < itterations; i++)
                {
                    Console.WriteLine("Progress: " + i + " / " + itterations);
                    // pick 10 samples
                    for (int j = 0; j < 10; j++)
                    {
                        int num = rand.Next(0, temp.database.Length);
                        trainingSamples[j] = new DataSample(temp.database[num].data, temp.database[num].label);

                        //Console.WriteLine("Database sample " + temp.database[num].data[0] + " " + temp.database[num].label);
                    }

                    network.TrainNetwork(trainingSamples);
                }

                Console.WriteLine("Saving network");
                network.SaveNetwork(networkPath1);
                Console.WriteLine("Network saved");
            }
            else
            {
                Console.WriteLine(File.Exists(databasePath1) ? "File exists." : "File does not exist.");
                throw new Exception();
            }
        }
        private GradientWeightVector GradientBackProp(DataSample sample)
        {
            // Backprop Output Layer
            GradientWeightVector weightVector = new GradientWeightVector(this);

            float[] idealVector      = CreateIdealVector(sample.label);
            float[] outputLayerDelta = new float[outputSize];
            float[,] hiddenLayerDelta = new float[hiddenDimension - 1, hiddenSize];
            float[] inputLayerDelta = new float[inputSize];

            SigmoidActivation(sample);
            Console.WriteLine("Label: " + sample.label);
            Console.WriteLine();
            PrintOutput();
            Console.WriteLine();
            Console.WriteLine("Cost after Activation is: " + ReturnCost(CreateIdealVector(sample.label)));
            PrintAverageCost();

            // Backpropagation for the outputlayer
            for (int i = 0; i < outputSize; i++)
            {
                outputLayerDelta[i] = -(idealVector[i] - outputLayerValue[i]) * outputLayerValue[i] *
                                      (1 - outputLayerValue[i]);

                for (int j = 0; j < hiddenSize; j++)
                {
                    weightVector.outputLayerWeights[i, j] = learningRate * (outputLayerDelta[i] * hiddenLayerValue[hiddenDimension - 1, j]);
                }
            }

            // Finding delta and gradients for the last hidden layer
            for (int i = 0; i < hiddenSize; i++)
            {
                float sum = 0;

                for (int j = 0; j < outputSize; j++)
                {
                    for (int k = 0; k < hiddenSize; k++)
                    {
                        sum += outputLayerDelta[j] * outputLayerWeights[j, k];
                    }
                }

                hiddenLayerDelta[hiddenDimension - 2, i] = sum;

                for (int j = 0; j < hiddenSize; j++)
                {
                    weightVector.hiddenLayerWeights[hiddenDimension - 2, i, j] = learningRate * (hiddenLayerDelta[hiddenDimension - 2, i] *
                                                                                                 hiddenLayerValue[hiddenDimension - 1, i] * (1 - hiddenLayerValue[hiddenDimension - 1, i]) *
                                                                                                 hiddenLayerValue[hiddenDimension - 2, j]);
                }
            }

            for (int i = hiddenDimension - 2; i > 0; i--)
            {
                for (int j = 0; j < hiddenSize; j++)
                {
                    float sum = 0;

                    for (int k = 0; k < hiddenSize; k++)
                    {
                        sum += hiddenLayerDelta[i, j] * hiddenLayerWeights[i, j, k];
                    }

                    hiddenLayerDelta[i, j] = sum;

                    for (int k = 0; k < hiddenSize; k++)
                    {
                        weightVector.hiddenLayerWeights[i, j, k] = learningRate * (hiddenLayerDelta[i, j] * hiddenLayerValue[i, j] *
                                                                                   (1 - hiddenLayerValue[i, j]) * hiddenLayerValue[i - 1, j]);
                    }
                }
            }

            // BackPropagation for the input layer
            for (int i = 0; i < inputSize; i++)
            {
                float sum = 0;

                for (int j = 0; j < hiddenSize; j++)
                {
                    sum += hiddenLayerDelta[0, j] * inputLayerWeights[i, j];
                }

                inputLayerDelta[i] = sum;

                for (int j = 0; j < hiddenSize; j++)
                {
                    weightVector.inputLayerWeights[i, j] = learningRate * (inputLayerDelta[i] * hiddenLayerValue[0, j] *
                                                                           (1 - hiddenLayerValue[0, j]) * (float)inputLayerValue[i]);
                }
            }

            return(weightVector);
        }