示例#1
0
    public static string SaveFile(Epoch E)
    {
        int err = 0;

        StringMaker str = new StringMaker();
        // All things that need to be saved to a file
        List <Perceptron>         input_layer   = E.GetInputLayer();
        List <List <Perceptron> > hidden_layers = E.GetHiddenLayers();
        List <Perceptron>         output_layers = E.GetOutputLayers();

        // Then with total hidden layer count
        str.Add(FloatByteConverter.FloatToStringOfBytes((float)hidden_layers.Count));
        // Then hidden layer size
        if (hidden_layers.Count == 0)
        {
            str.Add(FloatByteConverter.FloatToStringOfBytes(0f));
        }
        else
        {
            str.Add(FloatByteConverter.FloatToStringOfBytes((float)hidden_layers[0].Count));
        }

        // Then hidden layer weight count
        for (int i = 0; i < hidden_layers.Count; i++)
        {
            for (int j = 0; j < hidden_layers[i].Count; j++)
            {
                float values_layer = hidden_layers[i][j].GetWeights().Length;
                str.Add(FloatByteConverter.FloatToStringOfBytes(values_layer));
            }
        }

        // Then output layer size
        str.Add(FloatByteConverter.FloatToStringOfBytes((float)output_layers.Count));
        // Then output weight count
        float values = output_layers[0].GetWeights().Length;

        str.Add(FloatByteConverter.FloatToStringOfBytes(values));

        // Add all bias and weights of each hidden layers perceptrons
        if (hidden_layers.Count > 0)
        {
            for (int i = 0; i < hidden_layers.Count; i++)
            {
                for (int j = 0; j < hidden_layers[i].Count; j++)
                {
                    float[] weights = hidden_layers[i][j].GetWeights();
                    float   bias    = hidden_layers[i][j].GetBias();

                    //BIAS
                    str.Add(FloatByteConverter.FloatToStringOfBytes(bias));
                    //WEIGHTS
                    for (int c = 0; c < weights.Length; c++)
                    {
                        str.Add(FloatByteConverter.FloatToStringOfBytes(weights[c]));

                        SavingThread.m_Saving_Layer      = (hidden_layers.Count + 1) - i;
                        SavingThread.m_Saving_Perceptron = (hidden_layers[i].Count) - j;
                        SavingThread.m_Saving_Weight     = (weights.Length) - c;
                    }
                }
            }
        }

        // Add all bias and weights of each output layer perceptron
        for (int i = 0; i < output_layers.Count; i++)
        {
            // Save Me
            float[] weights = output_layers[i].GetWeights();
            float   bias    = output_layers[i].GetBias();

            //BIAS
            str.Add(FloatByteConverter.FloatToStringOfBytes(bias));
            //WEIGHTS
            for (int c = 0; c < weights.Length; c++)
            {
                str.Add(FloatByteConverter.FloatToStringOfBytes(weights[c]));

                SavingThread.m_Saving_Layer      = 1;
                SavingThread.m_Saving_Perceptron = (output_layers.Count) - i;
                SavingThread.m_Saving_Weight     = (weights.Length) - c;
            }
        }

        // Add all bias and weights of each input layer perceptron
        for (int i = 0; i < input_layer.Count; i++)
        {
            // Save Me
            float weight = input_layer[i].GetWeight(0);
            float bias   = input_layer[i].GetBias();

            //BIAS
            str.Add(FloatByteConverter.FloatToStringOfBytes(bias));
            //WEIGHT
            str.Add(FloatByteConverter.FloatToStringOfBytes(weight));

            SavingThread.m_Saving_Layer      = 0;
            SavingThread.m_Saving_Perceptron = (input_layer.Count) - i;
            SavingThread.m_Saving_Weight     = (input_layer.Count) - i;
        }

        return(str.GetString());
    }