예제 #1
0
        public AlexNet(int classes = 1000, string prefix = "", ParameterDict @params = null) : base(prefix, @params)
        {
            Features = new HybridSequential(prefix);
            Features.Add(new Conv2D(64, (11, 11), (4, 4), (2, 2), activation: ActivationType.Relu));
            Features.Add(new MaxPool2D((3, 3), (2, 2)));

            Features.Add(new Conv2D(192, (5, 5), padding: (2, 2), activation: ActivationType.Relu));
            Features.Add(new MaxPool2D((3, 3), (2, 2)));

            Features.Add(new Conv2D(384, (3, 3), padding: (1, 1), activation: ActivationType.Relu));
            Features.Add(new Conv2D(256, (3, 3), padding: (1, 1), activation: ActivationType.Relu));
            Features.Add(new Conv2D(256, (3, 3), padding: (1, 1), activation: ActivationType.Relu));
            Features.Add(new MaxPool2D((3, 3), (2, 2)));
            Features.Add(new Flatten());
            Features.Add(new Dense(4096, ActivationType.Relu));
            Features.Add(new Dropout(0.5f));
            Features.Add(new Dense(4096, ActivationType.Relu));
            Features.Add(new Dropout(0.5f));

            Output = new Dense(classes);

            RegisterChild(Features);
            RegisterChild(Output);
        }
예제 #2
0
        public void SpeedTest()
        {
            int inchannels = 1024, outchannels = 512;

            OverflowCheckedTensor x_tensor = new OverflowCheckedTensor(Shape.Map0D(inchannels));
            OverflowCheckedTensor w_tensor = new OverflowCheckedTensor(Shape.Kernel0D(inchannels, outchannels));

            OverflowCheckedTensor y_tensor = new OverflowCheckedTensor(Shape.Map0D(outchannels));

            Dense ope = new Dense(inchannels, outchannels);

            Stopwatch sw = new Stopwatch();

            sw.Start();

            ope.Execute(x_tensor, w_tensor, y_tensor);
            ope.Execute(x_tensor, w_tensor, y_tensor);
            ope.Execute(x_tensor, w_tensor, y_tensor);
            ope.Execute(x_tensor, w_tensor, y_tensor);

            sw.Stop();

            Console.WriteLine($"{sw.ElapsedMilliseconds / 4} msec");
        }
    void InitConnections()
    {
        float weight_init_range = 0.00001f;

        foreach (KeyValuePair <string, Dictionary <string, string> > structureInfo in networkConnectionInfoDict)
        {
            Dictionary <string, Connection> thisLayerConnectionDict = new Dictionary <string, Connection>();
            // create the layer's empty connection dictionary

            if (structureInfo.Value.Count > 0)
            {
                // create a connection object
                // add an entry to the layer's connection dictionary, with the sender name as the key, and the connection object as its value
                foreach (KeyValuePair <string, string> connectionInfo in structureInfo.Value)
                {
                    if (connectionInfo.Value == "dense")
                    {
                        Connection dense = new Dense {
                            ConnectionType = connectionInfo.Value, Sender = connectionInfo.Key, Recipient = structureInfo.Key
                        };
                        // this should be in the dense constructor
                        dense.ConnectionWeight = Matrix <float> .Build.Random(networkLayerDict[dense.Recipient].NumUnits, networkLayerDict[dense.Sender].NumUnits, new Normal (0, weight_init_range));

                        networkLayerDict[structureInfo.Key].InputConnectionDict.Add(connectionInfo.Key, dense);
                    }
                    else if (connectionInfo.Value == "tanh")
                    {
                        Connection tanh = new TanhActivation {
                            ConnectionType = connectionInfo.Value, Sender = connectionInfo.Key, Recipient = structureInfo.Key
                        };
                        networkLayerDict[structureInfo.Key].InputConnectionDict.Add(connectionInfo.Key, tanh);
                    }
                }
            }
        }
    }
예제 #4
0
    public override void BuildNetwork(Tensor inVectorstate, List <Tensor> inVisualState, Tensor inMemery, Tensor inPrevAction, int outActionSize, SpaceType actionSpace,
                                      out Tensor outAction, out Tensor outValue, out Tensor outVariance)
    {
        Debug.Assert(inMemery == null, "Currently recurrent input is not supported by RLNetworkSimpleAC");
        Debug.Assert(inPrevAction == null, "Currently previous action input is not supported by RLNetworkSimpleAC");
        Debug.Assert(!(inVectorstate == null && inVisualState == null), "Network need at least one vector observation or visual observation");
        //Debug.Assert(actionSpace == SpaceType.continuous, "Only continuous action space is supported by RLNetworkSimpleAC");
        criticWeights = new List <Tensor>();
        actorWeights  = new List <Tensor>();

        //visual encoders
        Tensor encodedVisualActor  = null;
        Tensor encodedVisualCritic = null;

        if (inVisualState != null)
        {
            List <Tensor> visualEncodedActor  = new List <Tensor>();
            List <Tensor> visualEncodedCritic = new List <Tensor>();
            foreach (var v in inVisualState)
            {
                var ha = CreateVisualEncoder(v, actorHiddenLayers, "ActorVisualEncoder");
                var hc = CreateVisualEncoder(v, criticHiddenLayers, "CriticVisualEncoder");

                actorWeights.AddRange(ha.Item2);
                visualEncodedActor.Add(ha.Item1);

                criticWeights.AddRange(hc.Item2);
                visualEncodedCritic.Add(hc.Item1);
            }
            if (inVisualState.Count > 1)
            {
                //Debug.LogError("Tensorflow does not have gradient for concat operation in C yet. Please only use one observation.");
                encodedVisualActor  = Current.K.stack(visualEncodedActor, 1);
                encodedVisualActor  = Current.K.batch_flatten(encodedVisualActor);
                encodedVisualCritic = Current.K.stack(visualEncodedCritic, 1);
                encodedVisualCritic = Current.K.batch_flatten(encodedVisualCritic);
            }
            else
            {
                encodedVisualActor  = visualEncodedActor[0];
                encodedVisualCritic = visualEncodedCritic[0];
            }
        }



        //vector states encode
        Tensor encodedVectorStateActor  = null;
        Tensor encodedVectorStateCritic = null;

        if (inVectorstate != null)
        {
            var output = BuildSequentialLayers(actorHiddenLayers, inVectorstate, "ActorStateEncoder");
            encodedVectorStateActor = output.Item1;
            actorWeights.AddRange(output.Item2);
            output = BuildSequentialLayers(criticHiddenLayers, inVectorstate, "CriticStateEncoder");
            encodedVectorStateCritic = output.Item1;
            criticWeights.AddRange(output.Item2);
        }

        //concat all inputs
        Tensor encodedAllActor  = null;
        Tensor encodedAllCritic = null;

        if (inVisualState == null && inVectorstate != null)
        {
            encodedAllActor  = encodedVectorStateActor;
            encodedAllCritic = encodedVectorStateCritic;
        }
        else if (inVisualState != null && inVectorstate == null)
        {
            encodedAllActor  = encodedVisualActor;
            encodedAllCritic = encodedVisualCritic;
        }
        else if (inVisualState != null && inVectorstate != null)
        {
            //Debug.LogWarning("Tensorflow does not have gradient for concat operation in C yet. Please only use one type of observation if you need training.");
            encodedAllActor = Current.K.concat(new List <Tensor>()
            {
                encodedVectorStateActor, encodedVisualActor
            }, 1);
            encodedAllCritic = Current.K.concat(new List <Tensor>()
            {
                encodedVectorStateCritic, encodedVisualCritic
            }, 1);
        }


        //outputs
        var actorOutput = new Dense(units: outActionSize, activation: null, use_bias: actorOutputLayerBias, kernel_initializer: new VarianceScaling(scale: actorOutputLayerInitialScale));

        outAction = actorOutput.Call(encodedAllActor)[0];
        if (actionSpace == SpaceType.discrete)
        {
            outAction = Current.K.softmax(outAction);
        }
        actorWeights.AddRange(actorOutput.weights);

        var criticOutput = new Dense(units: 1, activation: null, use_bias: criticOutputLayerBias, kernel_initializer: new GlorotUniform(scale: criticOutputLayerInitialScale));

        outValue = criticOutput.Call(encodedAllCritic)[0];
        criticWeights.AddRange(criticOutput.weights);

        //output variance. Currently not depending on the inputs for this simple network implementation
        if (actionSpace == SpaceType.continuous)
        {
            var logSigmaSq = Current.K.variable((new Constant(0)).Call(new int[] { outActionSize }, DataType.Float), name: "PPO.log_sigma_square");
            outVariance = Current.K.exp(logSigmaSq);
            actorWeights.Add(logSigmaSq);
        }
        else
        {
            outVariance = null;
        }
    }
예제 #5
0
 public static void ConvertDenseWeightsDataFormat(Dense dense, Shape previous_feature_map_shape, string target_data_format = "channels_first")
 {
     throw new NotImplementedException();
 }
예제 #6
0
        static void Main(string[] args)
        {
            Tensor.SetOpMode(Tensor.OpMode.MultiCPU);

            var input1 = new Dense(2, 2, Activation.Sigmoid)
            {
                Name = "input1"
            };;
            var upperStream1 = new Dense(input1, 2, Activation.Linear)
            {
                Name = "upperStream1"
            };;
            var lowerStream1 = new Dense(input1, 2, Activation.Linear)
            {
                Name = "lowerStream1"
            };

            var net = new NeuralNetwork("test");

            net.Model = new Flow(new[] { input1 }, new[] { upperStream1, lowerStream1 });

            net.Optimize(new SGD(0.05f), Loss.MeanSquareError);

            var input        = new Tensor(new float[] { 0, 1 }, new Shape(1, 2));
            var outputs      = new [] { new Tensor(new float[] { 0, 1 }, new Shape(1, 2)), new Tensor(new float[] { 1, 2 }, new Shape(1, 2)) };
            var trainingData = new List <Data> {
                new Data(new[] { input }, outputs)
            };

            var netClone = net.Clone();

            netClone.Fit(trainingData, 1, 60, null, 2, Track.Nothing, false);


            //var input1 = new Dense(2, 2, Activation.Sigmoid);
            //var upperStream1 = new Dense(input1, 2, Activation.Sigmoid);
            //var upperStream2 = new Dense(upperStream1, 2, Activation.Sigmoid) { Name = "upperStream2" };
            //var lowerStream1 = new Dense(input1, 2, Activation.Sigmoid) { Name = "lowerStream1" };
            //var merge = new Merge(new[] {upperStream2, lowerStream1}, Merge.Mode.Sum) { Name = "merge1" };

            //var net = new NeuralNetwork("test");
            //net.Model = new Flow(new[] { input1 }, new[] { merge });
            //net.Optimize(new SGD(), new Dictionary<string, LossFunc>{ {"upperStream2", Loss.MeanSquareError}, { "lowerStream1", Loss.Huber1 } });


            /*var inputs = new Tensor(new float[] { 1,1,2,2,3,3,4,4,5,5,6,6,2,3,4,5,6,7,8,9,0,1 }, new Shape(1, 2, 1, 11));
             * var outputs = new Tensor(new float[] { 2,2,3,3,4,4,5,5,6,6,7,7,3,4,5,6,7,8,9,10,1,2 }, new Shape(1, 2, 1, 11));
             *
             * var net = new NeuralNetwork("test");
             * net.AddLayer(new Dense(2, 5, Activation.Sigmoid));
             * net.AddLayer(new Dense(net.LastLayer, 4, Activation.Sigmoid));
             * net.AddLayer(new Dense(net.LastLayer, 2, Activation.Linear));
             *
             * var l0 = net.Layer(0) as Dense;
             * l0.Weights = new Tensor(new[] {-0.5790837f ,  0.79525125f, -0.6933877f , -0.3692013f ,  0.1810553f,
             *                              0.03039712f,  0.91264546f,  0.11529088f,  0.33134186f, -0.46221718f }, new Shape(l0.Weights.Height, l0.Weights.Width)).Transposed();
             *
             * var l1 = net.Layer(1) as Dense;
             * l1.Weights = new Tensor(new[] { 0.08085728f, -0.10262775f,  0.38443696f, -0.23273587f,
             *                              0.33498216f, -0.7566199f , -0.814561f  , -0.08565235f,
             *                             -0.55490625f,  0.6140275f ,  0.34785295f, -0.3431782f,
             *                              0.47427893f, -0.41688982f,  0.59143007f,  0.00616223f,
             *                              0.60304165f,  0.6548513f , -0.78456855f,  0.4640578f }, new Shape(l1.Weights.Height, l1.Weights.Width)).Transposed();
             *
             * var l2 = net.Layer(2) as Dense;
             * l2.Weights = new Tensor(new[] { 0.32492328f,  0.6930735f,
             *                             -0.7263415f ,  0.4574399f,
             *                              0.5422747f ,  0.19008946f,
             *                              0.911242f  , -0.24971604f }, new Shape(l2.Weights.Height, l2.Weights.Width)).Transposed();
             *
             * Trace.WriteLine(net.Predict(inputs.GetBatch(0)));
             *
             * //net.Optimize(new SGD(0.01f), Loss.MeanSquareError);
             * net.Optimize(new Adam(0.01f), Loss.MeanSquareError);
             *
             * net.Fit(inputs, outputs, 1, 100, 2, Track.Nothing, false);*/

            /*var inShape = new Shape(20);
             * var outShape = new Shape(20);
             *
             * List<Data> trainingData = new List<Data>();
             *
             * for (int i = 0; i < 32; ++i)
             * {
             *  var input = new Tensor(inShape);
             *  input.FillWithRand(3 * i);
             *  var output = new Tensor(outShape);
             *  output.FillWithRand(3 * i);
             *  trainingData.Add(new Data(input, output));
             * }
             *
             * var model = new Sequential();
             * model.AddLayer(new Flatten(inShape));
             * model.AddLayer(new Dense(model.LastLayer, 128, Activation.ReLU));
             * model.AddLayer(new Dense(model.LastLayer, 64, Activation.ReLU));
             * model.AddLayer(new Dense(model.LastLayer, outShape.Length, Activation.Linear));
             *
             * var net = new NeuralNetwork("simple_net_perf_test");
             * net.Model = model;
             * net.Optimize(new Adam(), Loss.MeanSquareError);*/

            var timer = new Stopwatch();

            timer.Start();

            //net.Fit(trainingData, -1, 500, null, 0, Track.Nothing);

            timer.Stop();
            Trace.WriteLine($"{Math.Round(timer.ElapsedMilliseconds / 1000.0, 2)} seconds");

            return;
        }
예제 #7
0
            public static void Create()
            {
                float headr = HeadR / W;

                Standard.Add(new Orbit(.1f, .2f, 1f / 13));
                Standard.Add(new Orbit(.367f, .2f, 1f / 13));
                Standard.Add(new Orbit(.633f, .2f, 1f / 13));
                Standard.Add(new Orbit(.9f, .2f, 1f / 13));
                Standard.Add(new Orbit(2f / 9, .5f, 1f / 13));
                Standard.Add(new Orbit(.5f, .5f, 1f / 13));
                Standard.Add(new Orbit(7f / 9, .5f, 1f / 13));
                Standard.Add(new Orbit(.1f, .8f, 1f / 13));
                Standard.Add(new Orbit(.367f, .8f, 1f / 13));
                Standard.Add(new Orbit(.633f, .8f, 1f / 13f));
                Standard.Add(new Orbit(.9f, .8f, 1f / 13));

                First.Add(new Orbit(1 / 12f, 1 / 6f, 1 / 21f));
                First.Add(new Orbit(11 / 12f, 1 / 6f, 1 / 21f));
                First.Add(new Orbit(1 / 12f, 5 / 6f, 1 / 21f));
                First.Add(new Orbit(11 / 12f, 5 / 6f, 1 / 21f));
                First.Add(new Orbit(.5f, .5f, 3 / 16f));
                First.Add(new Orbit(1 / 6f, .5f, 3 / 32f));
                First.Add(new Orbit(5 / 6f, .5f, 3 / 32f));
                First.Add(new Orbit(1 / 4f, 1 / 5f, 1 / 28f));
                First.Add(new Orbit(3 / 4f, 1 / 5f, 1 / 28f));
                First.Add(new Orbit(1 / 4f, 4 / 5f, 1 / 28f));
                First.Add(new Orbit(3 / 4f, 4 / 5f, 1 / 28f));

                Dense.Add(new Orbit(1 / 12f, 1 / 8f, 1 / 25f));
                Dense.Add(new Orbit(1 / 12f, 3 / 8f, 1 / 25f));
                Dense.Add(new Orbit(1 / 12f, 5 / 8f, 1 / 25f));
                Dense.Add(new Orbit(1 / 12f, 7 / 8f, 1 / 25f));
                Dense.Add(new Orbit(1 / 6f, 1 / 4f, 1 / 25f));
                Dense.Add(new Orbit(1 / 6f, 1 / 2f, 1 / 25f));
                Dense.Add(new Orbit(1 / 6f, 3 / 4f, 1 / 25f));
                Dense.Add(new Orbit(1 / 4f, 1 / 8f, 1 / 25f));
                Dense.Add(new Orbit(1 / 4f, 3 / 8f, 1 / 25f));
                Dense.Add(new Orbit(1 / 4f, 5 / 8f, 1 / 25f));
                Dense.Add(new Orbit(1 / 4f, 7 / 8f, 1 / 25f));
                Dense.Add(new Orbit(1 / 3f, 1 / 4f, 1 / 25f));
                Dense.Add(new Orbit(1 / 3f, 1 / 2f, 1 / 25f));
                Dense.Add(new Orbit(1 / 3f, 3 / 4f, 1 / 25f));
                Dense.Add(new Orbit(5 / 12f, 1 / 8f, 1 / 25f));
                Dense.Add(new Orbit(5 / 12f, 3 / 8f, 1 / 25f));
                Dense.Add(new Orbit(5 / 12f, 5 / 8f, 1 / 25f));
                Dense.Add(new Orbit(5 / 12f, 7 / 8f, 1 / 25f));
                Dense.Add(new Orbit(1 / 2f, 1 / 4f, 1 / 25f));
                Dense.Add(new Orbit(1 / 2f, 1 / 2f, 1 / 25f));
                Dense.Add(new Orbit(1 / 2f, 3 / 4f, 1 / 25f));
                Dense.Add(new Orbit(7 / 12f, 1 / 8f, 1 / 25f));
                Dense.Add(new Orbit(7 / 12f, 3 / 8f, 1 / 25f));
                Dense.Add(new Orbit(7 / 12f, 5 / 8f, 1 / 25f));
                Dense.Add(new Orbit(7 / 12f, 7 / 8f, 1 / 25f));
                Dense.Add(new Orbit(2 / 3f, 1 / 4f, 1 / 25f));
                Dense.Add(new Orbit(2 / 3f, 1 / 2f, 1 / 25f));
                Dense.Add(new Orbit(2 / 3f, 3 / 4f, 1 / 25f));
                Dense.Add(new Orbit(3 / 4f, 1 / 8f, 1 / 25f));
                Dense.Add(new Orbit(3 / 4f, 3 / 8f, 1 / 25f));
                Dense.Add(new Orbit(3 / 4f, 5 / 8f, 1 / 25f));
                Dense.Add(new Orbit(3 / 4f, 7 / 8f, 1 / 25f));
                Dense.Add(new Orbit(5 / 6f, 1 / 4f, 1 / 25f));
                Dense.Add(new Orbit(5 / 6f, 1 / 2f, 1 / 25f));
                Dense.Add(new Orbit(5 / 6f, 3 / 4f, 1 / 25f));
                Dense.Add(new Orbit(11 / 12f, 1 / 8f, 1 / 25f));
                Dense.Add(new Orbit(11 / 12f, 3 / 8f, 1 / 25f));
                Dense.Add(new Orbit(11 / 12f, 5 / 8f, 1 / 25f));
                Dense.Add(new Orbit(11 / 12f, 7 / 8f, 1 / 25f));

                Simple.Add(new Orbit(1 / 4f, .5f, .2f));
                Simple.Add(new Orbit(3 / 4f, .5f, .2f));
                Simple.Add(new Orbit(.5f, 1 / 6f, .05f));
                Simple.Add(new Orbit(.5f, 5 / 6f, .05f));

                Steps.Add(new Orbit(.12f + headr, .24f + headr * 2, .12f));
                Steps.Add(new Orbit(1 / 3f, .6f, .1f));
                Steps.Add(new Orbit(.5f, 14 / 16f - headr * 2, 1 / 16f));
                Steps.Add(new Orbit(2 / 3f, .6f, .1f));
                Steps.Add(new Orbit(.88f - headr, .24f + headr * 2, .12f));

                Isles.Add(new Orbit(1 / 12f + headr, 3 / 5f, 1 / 12f));
                Isles.Add(new Orbit(3 / 12f, 1 / 6f + headr * 2, 1 / 12f));
                Isles.Add(new Orbit(5 / 12f, 5 / 6f - headr * 2, 1 / 12f));
                Isles.Add(new Orbit(7 / 12f, 1 / 6f + headr * 2, 1 / 12f));
                Isles.Add(new Orbit(9 / 12f, 5 / 6f - headr * 2, 1 / 12f));
                Isles.Add(new Orbit(11 / 12f - headr, 2 / 5f, 1 / 12f));

                Moon.Add(new Orbit(1 / 4f, .3f + headr * 2, .15f));
                Moon.Add(new Orbit(.42f, .87f, 1 / 30f));
                Moon.Add(new Orbit(.3f, .5f, 1 / 30f));
                Moon.Add(new Orbit(.72381f, .18924f, 1 / 32f));
                Moon.Add(new Orbit(.62348f, .54328f, 1 / 18f));
                Moon.Add(new Orbit(.76483f, .43617f, 1 / 32f));
                Moon.Add(new Orbit(.32732f, .32711f, 1 / 26f));
                Moon.Add(new Orbit(.17231f, .79801f, 1 / 24f));
                Moon.Add(new Orbit(.79782f, .79701f, 1 / 20f));
                Moon.Add(new Orbit(.87982f, .33241f, 1 / 22f));
                Moon.Add(new Orbit(.54311f, .12411f, 1 / 28f));
                Moon.Add(new Orbit(.06251f, .56781f, 1 / 30f));
                Moon.Add(new Orbit(.52879f, .78232f, 1 / 34f));

                Ring.Add(new Orbit(3 / 13f, 1 / 9f + headr * 2, 1 / 18f));
                Ring.Add(new Orbit(5 / 13f, 1 / 9f + headr * 2, 1 / 18f));
                Ring.Add(new Orbit(7 / 13f, 1 / 9f + headr * 2, 1 / 18f));
                Ring.Add(new Orbit(9 / 13f, 1 / 9f + headr * 2, 1 / 18f));
                Ring.Add(new Orbit(4 / 13f, 8 / 9f - headr * 2, 1 / 18f));
                Ring.Add(new Orbit(6 / 13f, 8 / 9f - headr * 2, 1 / 18f));
                Ring.Add(new Orbit(8 / 13f, 8 / 9f - headr * 2, 1 / 18f));
                Ring.Add(new Orbit(10 / 13f, 8 / 9f - headr * 2, 1 / 18f));
                Ring.Add(new Orbit(1 / 15f + headr, 1 / 3f, 1 / 15f));
                Ring.Add(new Orbit(14 / 15f - headr, 2 / 3f, 1 / 15f));
                //Ring.Add(new Orbit(1/18f+headr, 1/8f, 1/18f));
                //Ring.Add(new Orbit(17/18f-headr, 7/8f, 1/18f));
                Ring.Add(new Orbit(.1f + headr, .8f - headr * 2, .1f));
                Ring.Add(new Orbit(.9f - headr, .2f + headr * 2, .1f));

                Wave.Add(new Orbit(1 / 6f, 2 / 9f + headr * 2, 1 / 9f));
                Wave.Add(new Orbit(2 / 6f, 7 / 9f - headr * 2, 1 / 9f));
                Wave.Add(new Orbit(3 / 6f, 2 / 9f + headr * 2, 1 / 9f));
                Wave.Add(new Orbit(4 / 6f, 7 / 9f - headr * 2, 1 / 9f));
                Wave.Add(new Orbit(5 / 6f, 2 / 9f + headr * 2, 1 / 9f));

                /*
                 * Me.Add(new Orbit(1/4f, 1/4f, .12f));
                 * Me.Add(new Orbit(1/4f, 3/4f, .12f));
                 * Me.Add(new Orbit(.52f, .155f, .06f));
                 * Me.Add(new Orbit(.67f, .155f, .06f));
                 * Me.Add(new Orbit(.82f, .155f, .06f));
                 * Me.Add(new Orbit(.595f, .5f, .12f));
                 * Me.Add(new Orbit(.52f, .85f, .06f));
                 * Me.Add(new Orbit(.875f, .85f, .05f));
                 */

                Me.Add(new Orbit(1 / 11f + headr, 9 / 11f - 2 * headr, 1 / 11f));
                Me.Add(new Orbit(13 / 44f + headr / 2, 29 / 44f - headr, 1 / 11f));
                Me.Add(new Orbit(.5f, .5f, 1 / 11f));
                Me.Add(new Orbit(31 / 44f - headr / 2, 15 / 44f + headr, 1 / 11f));
                Me.Add(new Orbit(10 / 11f - headr, 2 / 11f + 2 * headr, 1 / 11f));
            }
예제 #8
0
        public void Create()
        {
            var dense = new Dense <double>(500);

            this.DisposeAndCheckDisposedState(dense);
        }
예제 #9
0
    public override ValueTuple <Tensor, Tensor> BuildNetwork(Tensor inVectorstate, List <Tensor> inVisualState, Tensor inMemery, int outActionSize, SpaceType actionSpace)
    {
        Debug.Assert(inMemery == null, "Currently recurrent input is not supported by SupervisedLearningNetworkSimple");
        Debug.Assert(!(inVectorstate == null && inVisualState == null), "Network need at least one vector observation or visual observation");


        weights = new List <Tensor>();

        //visual encoders
        Tensor encodedVisualActor = null;

        if (inVisualState != null)
        {
            List <Tensor> visualEncodedActor = new List <Tensor>();
            foreach (var v in inVisualState)
            {
                var ha = CreateVisualEncoder(v, hiddenLayers, "ActorVisualEncoder");
                visualEncodedActor.Add(ha);
            }
            if (inVisualState.Count > 1)
            {
                //Debug.LogError("Tensorflow does not have gradient for concat operation in C yet. Please only use one observation.");
                encodedVisualActor = Current.K.stack(visualEncodedActor, 1);
                encodedVisualActor = Current.K.batch_flatten(encodedVisualActor);
            }
            else
            {
                encodedVisualActor = visualEncodedActor[0];
            }
        }

        //vector states encode
        Tensor encodedVectorStateActor = null;

        if (inVectorstate != null)
        {
            var hiddens = BuildSequentialLayers(hiddenLayers, inVectorstate, "ActorStateEncoder");
            encodedVectorStateActor = hiddens.Item1;
            weights.AddRange(hiddens.Item2);
        }

        //concat all inputs
        Tensor encodedAllActor = null;

        if (inVisualState == null && inVectorstate != null)
        {
            encodedAllActor = encodedVectorStateActor;
        }
        else if (inVisualState != null && inVectorstate == null)
        {
            encodedAllActor = encodedVisualActor;
        }
        else if (inVisualState != null && inVectorstate != null)
        {
            //Debug.LogError("Tensorflow does not have gradient for concat operation in C yet. Please only use one observation.");
            encodedAllActor = Current.K.concat(new List <Tensor>()
            {
                encodedVectorStateActor, encodedVisualActor
            }, 1);
        }


        //outputs
        var actorOutput = new Dense(units: outActionSize, activation: null, use_bias: outputLayerBias, kernel_initializer: new GlorotUniform(scale: outputLayerInitialScale));
        var outAction   = actorOutput.Call(encodedAllActor)[0];

        if (actionSpace == SpaceType.discrete)
        {
            outAction = Current.K.softmax(outAction);
        }

        weights.AddRange(actorOutput.weights);

        Tensor outVar = null;

        if (useVarianceForContinuousAction && actionSpace == SpaceType.continuous)
        {
            var logSigmaSq = new Dense(units: 1, activation: null, use_bias: outputLayerBias, kernel_initializer: new GlorotUniform(scale: outputLayerInitialScale));
            outVar = Current.K.exp(logSigmaSq.Call(encodedAllActor)[0]) + minStd * minStd;
            weights.AddRange(logSigmaSq.weights);
        }

        return(ValueTuple.Create(outAction, outVar));
    }
예제 #10
0
        public void NegativeTest()
        {
            var exception = AssertThrows <ArgumentException>(() => Dense.Dec(Zero));

            Assert.AreEqual("Can't go negative\r\nParameter name: ds", exception.Message);
        }
예제 #11
0
 public FCPredictor(int num_output, ActivationType activation = ActivationType.Relu, bool use_bias = true, string prefix = null, ParameterDict @params = null) : base(prefix, @params)
 {
     this.predictor = new Dense(num_output, activation: activation, use_bias: use_bias);
 }
예제 #12
0
        public void DecTest()
        {
            var four = Dense.Dec(Five);

            Assert.AreEqual("100", DumpNat(four));
        }
예제 #13
0
        public void SixteenTest()
        {
            var sixteen = Dense.Inc(Fifteen);

            Assert.AreEqual("10000", DumpNat(sixteen));
        }
예제 #14
0
 public void DenseTest()
 {
     Dense dense = new Dense(10, activation: "relu");
     var   obj   = dense.ToPython();
 }
예제 #15
0
 public void DenseTest()
 {
     Dense dense = new Dense(10, "relu");
     var   obj   = dense.GetPythonObject();
 }
예제 #16
0
    public void BuildNetwork(Tensor inVectorstateLowlevel, Tensor inVectorstateHighlevel, int outActionSize, SpaceType actionSpace,
                             out Tensor outAction, out Tensor outValue, out Tensor outVariance)
    {
        weightsLowlevel  = new List <Tensor>();
        weightsHighLevel = new List <Tensor>();


        //lowlevel encoder
        var    lowlevelEncoder = BuildSequentialLayers(inLowlevelLayers, inVectorstateLowlevel, "LowlevelEncoder");
        Tensor encodedLowlevel = lowlevelEncoder.Item1;

        weightsLowlevel.AddRange(lowlevelEncoder.Item2);



        //highlevel
        Tensor concatedStates = null;

        if (inVectorstateHighlevel != null)
        {
            concatedStates = Current.K.concat(new List <Tensor>()
            {
                encodedLowlevel, inVectorstateHighlevel
            }, 1);
        }
        else
        {
            concatedStates = encodedLowlevel;
        }

        var    highlevelEncoder = BuildSequentialLayers(actorHighlevelLayers, concatedStates, "ActorHighevelEncoder");
        Tensor outputHighlevel  = highlevelEncoder.Item1;

        weightsHighLevel.AddRange(highlevelEncoder.Item2);

        //lowlevel actor output
        var    actorFinal      = BuildSequentialLayers(actorLowlevelLayers, outputHighlevel, "ActorLowlevelOut");
        Tensor encodedAllActor = actorFinal.Item1;

        weightsLowlevel.AddRange(actorFinal.Item2);

        //highlevel value output
        var    valueFinal       = BuildSequentialLayers(valueHighlevelLayers, concatedStates, "ValueHighlevelOut");
        Tensor encodedAllCritic = valueFinal.Item1;

        weightsHighLevel.AddRange(valueFinal.Item2);

        //outputs
        using (Current.K.name_scope("ActorOutput"))
        {
            var actorOutput = new Dense(units: outActionSize, activation: null, use_bias: actorOutputLayerBias, kernel_initializer: new VarianceScaling(scale: actorOutputLayerInitialScale));
            outAction = actorOutput.Call(encodedAllActor)[0];
            if (actionSpace == SpaceType.discrete)
            {
                outAction = Current.K.softmax(outAction);
            }

            weightsLowlevel.AddRange(actorOutput.weights);
        }

        using (Current.K.name_scope("CriticOutput"))
        {
            var criticOutput = new Dense(units: 1, activation: null, use_bias: criticOutputLayerBias, kernel_initializer: new GlorotUniform(scale: criticOutputLayerInitialScale));
            outValue = criticOutput.Call(encodedAllCritic)[0];
            weightsHighLevel.AddRange(criticOutput.weights);
        }
        //variance
        //actor network output variance
        if (actionSpace == SpaceType.continuous)
        {
            using (Current.K.name_scope("ActorVarianceOutput"))
            {
                logSigmaSq  = Current.K.variable((new Constant(0)).Call(new int[] { outActionSize }, DataType.Float), name: "PPO.log_sigma_square");
                outVariance = Current.K.exp(logSigmaSq);
                weightsHighLevel.Add(logSigmaSq);
            }
        }
        else
        {
            outVariance = null;
        }
    }