Example #1
0
    private void processDecision(TreeOfDecisions.Type type)
    {
        switch (type)
        {
        case TreeOfDecisions.Type.Wait:
        {
            GetComponent <Waiter>().act();
            break;
        }

        case TreeOfDecisions.Type.Drop:
        {
            GetComponent <Dropper>().act();
            break;
        }

        case TreeOfDecisions.Type.Throw:
        {
            GetComponent <Thrower>().act();
            break;
        }

        case TreeOfDecisions.Type.Punch:
        {
            GetComponent <Puncher>().Punch();
            break;
        }
        }
    }
Example #2
0
    // Use this for initialization
    void Start()
    {
        rb   = GetComponent <Rigidbody>();
        nnai = GetComponent <NNAI>();
        //Direction = new Vector3(UnityEngine.Random.Range(-1, 2) * Step, 0.0f, UnityEngine.Random.Range(-1,2) * Step);
        tree     = new TreeOfDecisions(this);
        decision = nnai.GetDecision();
        tree.WalkWith(decision);
        Direction           = nnai.GetDirection();
        deadEndAgentCounter = DeadEndAgentCounterValue;


        //Vector3 j = new Vector3(-0.5f, 0, 0.5f);
        //j = j.normalized;
        //proc(j.x, j.z);
        //j = new Vector3(-0.5f, 0, -0.5f);
        //j = j.normalized;
        //proc(j.x, j.z);
        //j = new Vector3(0.5f, 0, 0.5f);
        //j = j.normalized;
        //proc(j.x, j.z);
        //j = new Vector3(0.5f, 0, -0.5f);
        //j = j.normalized;
        //proc(j.x, j.z);
    }
Example #3
0
    // Update is called once per frame
    void FixedUpdate()
    {
        //Debug.Log(UnityEngine.Random.value);
        //Debug.Log(positiveChange + " " + negativeChange +" "+ DTTResourceChange);

        if (tree == null || nnai.neuralNetwork == null || deadEndAgentCounter <= 0)
        {
            Debug.Log("Memory bug. " + ObserverInstance.SimulationNumber + " simulation is inconsisnent.");


            //TODO add DeadEng label into agent stored data


            ObserverInstance.RemoveAgent(gameObject);
        }
        nnai.InputReset();
        if (positiveChange > 0 || negativeChange > 0 || DTTResourceChange > 0)
        {
            tree.ChangeOutcomes(positiveChange, (short)(negativeChange + DTTResourceChange));
            nnai.DecisionBackPropagation(tree.RebalanceValue, decision);
            nnai.DirectionBackPropagation(tree.RebalanceValue, Direction);
            tree.RebalanceValue = 0;
            //Debug.Log("Rebalance of " + decision);

            if (negativeChange > 0 || positiveChange > 0)
            {
                tree.moveTo(tree.Root);
                deadEndAgentCounter = DeadEndAgentCounterValue;
            }
            resetResourceChange();

            decision = nnai.GetDecision();
            if (decision != oldDecision)
            {
                DeadEndReset();
                oldDecision = decision;
            }
            processDecision(decision);
            tree.WalkWith(decision);
        }
        Direction = nnai.GetDirection();
        if (Direction != oldDirection)
        {
            DeadEndReset();
            oldDirection = Direction;
        }
        //Debug.Log("Direction: " + RadianToDegree(CalcAngle(Direction.normalized)));
        //Debug.Log("Velocity: " + RadianToDegree(CalcAngle(rb.velocity.normalized)));
        rb.AddForce(Direction * Step);
    }
Example #4
0
    public void RebalanceDecisionBranch(float value, TreeOfDecisions.Type decision)
    {
        ResetVisit(DecisionOutputLayer);
        ResetVisit(DecisionHiddenLayer);
        ResetVisit(InputLayer);
        if (DecisionOutputLayer == null || DecisionHiddenLayer == null || InputLayer == null)
        {
            return;
        }
        switch (decision)
        {
        case TreeOfDecisions.Type.Drop:
        {
            //foreach(Neuron neuron in OutputLayer)
            //{
            //    if (neuron == null)
            //        Debug.Log("null");
            //    else
            //        Debug.Log(neuron.output);
            //}
            DecisionOutputLayer[0].RebalanceWeights(value, DecisionLearningRate);
            break;
        }

        case TreeOfDecisions.Type.Punch:
        {
            DecisionOutputLayer[1].RebalanceWeights(value, DecisionLearningRate);
            break;
        }

        case TreeOfDecisions.Type.Throw:
        {
            DecisionOutputLayer[2].RebalanceWeights(value, DecisionLearningRate);
            break;
        }

        case TreeOfDecisions.Type.Wait:
        {
            DecisionOutputLayer[3].RebalanceWeights(value, DecisionLearningRate);
            break;
        }
        }

        //foreach (Neuron neuron in OutputLayer)
        //{
        //    neuron.RebalanceWeights(value, learningRate);
        //}
    }
Example #5
0
 public void DecisionBackPropagation(float value, TreeOfDecisions.Type decision)
 {
     //Debug.Log(neuralNetwork);
     neuralNetwork.RebalanceDecisionBranch(value, decision);
 }