Exemple #1
0
    public NetParamData(NetParameters _netParameters, float[] _performanceHistory)
    {
        w_ij_i0 = _netParameters.weights1[0];
        w_ij_i1 = _netParameters.weights1[1];
        w_ij_i2 = _netParameters.weights1[2];
        w_ij_i3 = _netParameters.weights1[3];
        w_ij_i4 = _netParameters.weights1[4];
        w_ij_i5 = _netParameters.weights1[5];
        w_ij_i6 = _netParameters.weights1[6];
        w_ij_i7 = _netParameters.weights1[7];

        w_out_j0 = _netParameters.weights2[0];
        w_out_j1 = _netParameters.weights2[1];
        w_out_j2 = _netParameters.weights2[2];
        w_out_j3 = _netParameters.weights2[3];
        w_out_j4 = _netParameters.weights2[4];
        w_out_j5 = _netParameters.weights2[5];
        w_out_j6 = _netParameters.weights2[6];
        w_out_j7 = _netParameters.weights2[7];

        b_j   = _netParameters.biases1;
        b_out = _netParameters.biases2;

        generation = _netParameters.generation;
        note       = _netParameters.note;

        performanceHistory = _performanceHistory;
    }
Exemple #2
0
    public static NetParameters SetUniformValues(int _id, int _generation, float _weight, float _bias)
    {
        float[][] w1 = new float[8][];; //weights
        float[][] w2 = new float[8][];; //weights
        float[]   b1 = new float[8];;   //bias to h layer 1
        float[]   b2 = new float[4];;   //bias to output

        for (int i = 0; i < w1.Length; i++)
        {
            w1[i] = new float[8];
            w2[i] = new float[4];

            for (int j = 0; j < 8; j++)
            {
                w1[i][j] = _weight;
                if (j < 4)
                {
                    w2[i][j] = _weight;
                }
            }

            b1[i] = _bias;
            if (i < 4)
            {
                b2[i] = _bias;
            }
        }


        NetParameters uniformParams = new NetParameters(_id, _generation, w1, w2, b1, b2);

        return(uniformParams);
    }
Exemple #3
0
 //build from existing instance of the class
 public NetParameters(NetParameters _netParameters)
 {
     this.weights1    = _netParameters.weights1;
     this.weights2    = _netParameters.weights2;
     this.biases1     = _netParameters.biases1;
     this.biases2     = _netParameters.biases2;
     this.performance = _netParameters.performance;
 }
Exemple #4
0
    public static void SaveData(string _filename, NetParameters _netParameters, float[] _performanceHistory)
    {
        NetParamData data = new NetParamData(_netParameters);

        string dataAsJson = JsonUtility.ToJson(data, true);

        string filePath = Path.Combine(Application.streamingAssetsPath, _filename);

        File.WriteAllText(filePath, dataAsJson);
    }
Exemple #5
0
    //set up the neural network (called from EvolveNetParameters)
    public void SetWeightsAndBiases(NetParameters netParams)
    {
        w1 = netParams.weights1;
        w2 = netParams.weights2;

        b1 = netParams.biases1;
        b2 = netParams.biases2;

        id         = netParams.id;
        generation = netParams.generation;

        paramsSet = true;
    }
Exemple #6
0
    void Start()
    {
        GameObject obj = GameObject.FindGameObjectWithTag("ohd");

        if (obj != null)
        {
            displayInfo = obj.GetComponent <Text>();
        }

        //create 0th generation of parameter sets

        currentGenParameters = new NetParameters[population];

        //generate random weights and biases for 0th generation, set id and generation id

        //testParams = NetParameters.SetUniformValues(0, 0, 2f, 2f);

        for (int i = 0; i < currentGenParameters.Length; i++)
        {
            //currentGenParameters[i] = new NetParameters(NetParameters.SetRandomValues(i, generation, startVariation, 0));
            //currentGenParameters[i] = new NetParameters(testParams);


            if (loadData)
            {
                currentGenParameters[i] = SaveAndLoadParams.LoadData(fileToLoad);
            }
            else
            {
                currentGenParameters[i] = new NetParameters(NetParameters.SetRandomValues(i, generation, startVariation, 0));
            }
        }


        generation = currentGenParameters[0].generation;

        if (loadData)
        {
            performanceList = SaveAndLoadParams.LoadDataPerformanceHistory(fileToLoad).ToList();
        }



        //spaun simulation with generation 00 paramters

        currentGeneration = SpaunNewGeneration(currentGenParameters);
    }
Exemple #7
0
    // // New parameter generators (mutate, crossbreed, random, uniform etc.)



    public static NetParameters SetRandomValues(int _id, int _generation, float _variation, float _initialBias)
    {
        float[][] w1 = new float[8][];; //weights
        float[][] w2 = new float[8][];; //weights
        float[]   b1 = new float[8];;   //bias to h layer 1
        float[]   b2 = new float[4];;   //bias to output


        float xavierN1 = _variation * Mathf.Sqrt(6f / (8f + 8f));
        float xavierN2 = _variation * Mathf.Sqrt(6f / (8f + 4f));



        for (int i = 0; i < w1.Length; i++)
        {
            w1[i] = new float[8];
            w2[i] = new float[4];

            for (int j = 0; j < 8; j++)
            {
                w1[i][j] = Random.Range(-xavierN1, xavierN1);
                if (j < 4)
                {
                    w2[i][j] = Random.Range(-xavierN2, xavierN2);
                }
            }

            b1[i] = _initialBias;
            if (i < 4)
            {
                b2[i] = _initialBias;
            }
        }



        NetParameters randomNetParameters = new NetParameters(_id, _generation, w1, w2, b1, b2);

        return(randomNetParameters);
    }
Exemple #8
0
    void Update()
    {
        if (!FinishedStop)
        {
            //check if all simulations have ended

            int finishedSims = 0;

            for (int i = 0; i < currentGeneration.Length; i++)
            {
                if (currentGeneration[i].SimulationEnded)
                {
                    finishedSims++;
                }
            }

            //array for storing performance values for sorting based on performance later

            float[] performances = new float[currentGeneration.Length];
            int[]   ids          = new int[currentGeneration.Length];


            //if all have finished then store performance values

            if (finishedSims == currentGeneration.Length)
            {
                for (int i = 0; i < currentGeneration.Length; i++)
                {
                    performances[i] = currentGeneration[i].performanceMetric;

                    currentGenParameters[i].performance = currentGeneration[i].performanceMetric;


                    ids[i] = i;
                }



                //sort generation into order of best performance

                //bubble sort, keeping track of id movements
                for (int i = 0; i < currentGeneration.Length; i++)
                {
                    for (int j = 0; j < currentGeneration.Length - 1; j++)
                    {
                        if (performances[j + 1] > performances[j])
                        {
                            float temp = performances[j];
                            performances[j]     = performances[j + 1];
                            performances[j + 1] = temp;

                            int tempInt = ids[j];
                            ids[j]     = ids[j + 1];
                            ids[j + 1] = tempInt;
                        }
                    }
                }

                for (int i = 0; i < currentGeneration.Length; i++)
                {
                }

                //print the best performance so far

                bestPerformanceSoFar = (performances[0]);
                performanceList.Add(bestPerformanceSoFar);

                float worst = (performances[currentGeneration.Length - 1]);
                print("Generation " + generation + " best " + bestPerformanceSoFar + " worst " + worst + " mss");


                //take the best 5 NetworkParameters based on performance

                NetParameters[] bestOfGeneration = new NetParameters[5];

                for (int i = 0; i < 5; i++)
                {
                    bestOfGeneration[i] = new NetParameters(currentGenParameters[ids[i]]); //duplicates the parameters from current generation winners
                    //print("best performances going to next round " + bestOfGeneration[i].performance);
                }



                //increments generation counter for the next generation

                generation++;



                if (generation < numberOfGenerations)
                {
                    if (displayInfo != null)
                    {
                        displayInfo.text = "generation\n" + generation.ToString() + "\nscore\n" + (bestPerformanceSoFar * 100f).ToString("F3");
                    }

                    //creates a new generation

                    //spaun new set taking the 2 best from the last one
                    for (int i = 0; i < 2; i++)
                    {
                        currentGenParameters[i]            = new NetParameters(bestOfGeneration[i]);
                        currentGenParameters[i].generation = generation;
                        currentGenParameters[i].id         = i;
                    }


                    /*
                     * for (int i = 2; i < 32; i++)
                     * {
                     *  //mutate
                     *  currentGenParameters[i] = new NetParameters(MutateParameters(i, generation, bestOfGeneration[0]));
                     *
                     * }
                     *
                     * for (int i = 32; i < 62; i++)
                     * {
                     *  //cross breed
                     *  currentGenParameters[i] = new NetParameters(CrossBreed(i, generation, bestOfGeneration[0], bestOfGeneration[1], false));
                     *
                     * }
                     *
                     * for (int i = 62; i < 100; i++)
                     * {
                     *  //cross breed with mutation
                     *  currentGenParameters[i] = new NetParameters(CrossBreed(i, generation, bestOfGeneration[0], bestOfGeneration[1], true));
                     *
                     * }
                     *
                     * for (int i = 100; i < currentGenParameters.Length; i++)
                     * {
                     *  //cross breed with mutation
                     *  currentGenParameters[i] = new NetParameters(SetRandomValues(i, generation));
                     *
                     * }
                     *
                     * for (int i = currentGenParameters.Length - 10; i < currentGenParameters.Length; i++)
                     * {
                     *  //currentGenParameters[i] = new NetParameters(SetRandomValues(i, generation));
                     *  //currentGenParameters[i] = new NetParameters(SetRandomValues(i, generation));
                     *
                     * }
                     */

                    for (int i = 2; i < 76; i++)
                    {
                        //mutate
                        //currentGenParameters[i] = new NetParameters(MutateParameters(i, generation, bestOfGeneration[0]));
                    }
                    for (int i = 76; i < currentGenParameters.Length; i++)
                    {
                        //mutate
                        //currentGenParameters[i] = new NetParameters(MutateParameters(i, generation, bestOfGeneration[1]));
                    }

                    for (int i = 2; i < currentGenParameters.Length; i++)
                    {
                        //currentGenParameters[i] = new NetParameters(bestOfGeneration[0]);
                        //currentGenParameters[i] = new NetParameters(bestOfGeneration[i%5]);
                        //currentGenParameters[i] = new NetParameters(MutateParameters(i, generation, currentGenParameters[i]));
                        //currentGenParameters[i] = new NetParameters(SetRandomValues(i, generation));
                        //currentGenParameters[i] = new NetParameters(SetUniformValues(0, generation));
                        //currentGenParameters[i] = new NetParameters(testParams);
                        currentGenParameters[i] = new NetParameters(NetParameters.CrossBreed(i, generation, bestOfGeneration[0], bestOfGeneration[1], mutationAmount, mutationFrequency));


                        //currentGenParameters[i] = new NetParameters(MutateParameters(i, generation, bestOfGeneration[0]));
                        // currentGenParameters[i] = new NetParameters(CrossBreed(i, generation, bestOfGeneration[0], bestOfGeneration[1], true));
                    }

                    if (bestPerformanceSoFar < 0.030f)
                    {
                        for (int i = currentGenParameters.Length - 350; i < currentGenParameters.Length; i++)
                        {
                            //currentGenParameters[i] = new NetParameters(NetParameters.CrossBreed(i, generation, bestOfGeneration[0], bestOfGeneration[1], 2f, 2));
                            //currentGenParameters[i] = new NetParameters(NetParameters.SetRandomValues(i, generation, 2f, 2));
                        }
                    }

                    simulationType = "cross breed";
                    hiddenlayers   = 1;

                    //destroy current generation :(

                    foreach (NeuralNet NN in currentGeneration)
                    {
                        Destroy(NN.gameObject);
                    }

                    //spaun new generation
                    currentGeneration = SpaunNewGeneration(currentGenParameters);


                    //if the performance reaches a threshold then add challenge to the task by altering the positions of the balls
                    if (bestPerformanceSoFar > 0.07f)
                    {
                        triggerPositionRandomisation = true;
                        // randomiseBallPosition = true;

                        mutationFrequency = 30;
                        mutationAmount    = 1f;
                    }

                    if (triggerPositionRandomisation || randomiseBallPosition)
                    {
                        for (int i = 0; i < currentGeneration.Length; i++)
                        {
                            currentGeneration[i].RandomlyPositionBalls = randomiseBallPosition;
                            //currentGeneration[i].positionRange = bestPerformanceSoFar * 5f < 0.4f ? bestPerformanceSoFar * 5f : 0.4f;
                        }
                    }
                }
                else
                {
                    EndRun(ids);
                }
            }

            if (Input.GetKeyDown(KeyCode.Escape))
            {
                EndRun(ids);
            }
        }
    }
Exemple #9
0
    public static NetParameters CrossBreed(int _id, int _generation, NetParameters _originalParameters, NetParameters _originalParameters2, float _mutationAmplitude, int _mutationFrequency)
    {
        float[][] w1 = new float[8][];; //weights
        float[][] w2 = new float[8][];; //weights
        float[]   b1 = new float[8];;   //bias to h layer 1
        float[]   b2 = new float[4];;   //bias to output

        //reference input NetParameters
        float[][] w1_a = _originalParameters.weights1;
        float[][] w2_a = _originalParameters.weights2;

        float[] b1_a = _originalParameters.biases1;
        float[] b2_a = _originalParameters.biases2;

        //reference input NetParameters2
        float[][] w1_b = _originalParameters2.weights1;
        float[][] w2_b = _originalParameters2.weights2;

        float[] b1_b = _originalParameters2.biases1;
        float[] b2_b = _originalParameters2.biases2;

        //parameters for mutation   if(Random.Range(1,frequency)%frequency == 0) //1/frequency chance of occurance
        float xavierN1 = _mutationAmplitude * Mathf.Sqrt(6f / (8f + 8f));
        float xavierN2 = _mutationAmplitude * Mathf.Sqrt(6f / (8f + 4f));

        for (int i = 0; i < w1.Length; i++)
        {
            w1[i] = new float[8]; //initialise
            w2[i] = new float[4];

            //copy values from input NetParameters

            for (int j = 0; j < 8; j++)
            {
                //weights crossbreed

                //cross
                w1[i][j] = Random.Range(0, 2) == 0 ? w1_a[i][j] : w1_b[i][j];

                //mutate
                w1[i][j] += Random.Range(0, _mutationFrequency) % _mutationFrequency == 0 ? Random.Range(-xavierN1, xavierN1) : 0;

                if (j < 4)
                {
                    w2[i][j]  = Random.Range(0, 2) == 0 ? w2_a[i][j] : w2_b[i][j];
                    w2[i][j] += Random.Range(0, _mutationFrequency) % _mutationFrequency == 0 ? Random.Range(-xavierN1, xavierN1) : 0;
                }
            }

            //biases

            //cross
            b1[i] = Random.Range(0, 2) == 0 ? b1_a[i] : b1_b[i];
            //mutate
            b1[i] += Random.Range(0, _mutationFrequency) % _mutationFrequency == 0 ? Random.Range(-xavierN1, xavierN1) : 0;

            if (i < 4)
            {
                //cross
                b2[i] = Random.Range(0, 2) == 0 ? b2_a[i] : b2_b[i];
                //mutate
                b2[i] += Random.Range(0, _mutationFrequency) % _mutationFrequency == 0 ? Random.Range(-xavierN1, xavierN1) : 0;
            }
        }


        NetParameters crossedParams = new NetParameters(_id, _generation, w1, w2, b1, b2);

        return(crossedParams);
    }
Exemple #10
0
    public static NetParameters MutateParameters(int _id, int _generation, NetParameters _originalParameters, float _mutationAmplitude, float _mutationFrequency)
    {
        float[][] w1 = new float[8][];; //weights
        float[][] w2 = new float[8][];; //weights
        float[]   b1 = new float[8];;   //bias to h layer 1
        float[]   b2 = new float[4];;   //bias to output

        //reference input NetParameters
        float[][] w1_ = _originalParameters.weights1;
        float[][] w2_ = _originalParameters.weights2;

        float[] b1_ = _originalParameters.biases1;
        float[] b2_ = _originalParameters.biases2;

        for (int i = 0; i < w1.Length; i++)
        {
            w1[i] = new float[8]; //initialise
            w2[i] = new float[4];

            //copy values from input NetParameters

            for (int j = 0; j < 8; j++)
            {
                w1[i][j] = w1_[i][j];
                if (j < 4)
                {
                    w2[i][j] = w2_[i][j];
                }
            }
        }

        //if(Random.Range(1,frequency)%frequency == 0) //1/frequency chance of occurance
        float xavierN1 = _mutationAmplitude * Mathf.Sqrt(6f / (8f + 8f));
        float xavierN2 = _mutationAmplitude * Mathf.Sqrt(6f / (8f + 4f));

        for (int i = 0; i < w1.Length; i++)
        {
            for (int j = 0; j < 8; j++)
            {
                if (Random.Range(0, _mutationFrequency) % _mutationFrequency == 0)
                {
                    w1[i][j] += Random.Range(-xavierN1, xavierN1);
                }

                if (j < 4)
                {
                    if (Random.Range(0, _mutationFrequency) % _mutationFrequency == 0)
                    {
                        w2[i][j] += Random.Range(-xavierN1, xavierN1);
                    }
                }
            }
            if (Random.Range(0, _mutationFrequency) % _mutationFrequency == 0)
            {
                b1[i] += Random.Range(-xavierN1, xavierN1);
            }

            if (i < 4)
            {
                if (Random.Range(0, _mutationFrequency) % _mutationFrequency == 0)
                {
                    b2[i] += Random.Range(-xavierN1, xavierN1);
                }
            }
        }



        NetParameters mutatedNetParams = new NetParameters(_id, _generation, w1, w2, b1, b2);

        return(mutatedNetParams);
    }