コード例 #1
0
        public NNBayes(int[] numNeurons, params object[] parameters)
        {
            float l2Penalty = 0.1F;

            _preds = new TFOutput[numSamples];

            _session = new TFSession();

            _graph = _session.Graph;

            _graph.Seed = Global.Random.Next();

            _layers = new BayesLayer[numNeurons.Length - 1];

            _input = _graph.Placeholder(TFDataType.Float, new TFShape(-1, numNeurons[0]));

            _output = _graph.Placeholder(TFDataType.Float, new TFShape(-1));

            _decay = _graph.Placeholder(TFDataType.Float, new TFShape(1));

            for (int i = 0; i < numNeurons.Length - 1; i++)
            {
                _layers[i] = new BayesLayer(_graph, numNeurons[i], numNeurons[i + 1]);
            }


            TFOutput likelihood = _graph.Const(0F);


            for (int i = 0; i < numSamples; i++)
            {
                TFOutput act = _input;

                foreach (BayesLayer layer in _layers)
                {
                    // TFOutput W = layer.SampleW(_graph);
                    // TFOutput b = layer.Sampleb(_graph);
                    // TFOutput z = _graph.Add(_graph.MatMul(act, W), b);

                    TFOutput z = layer.Samplez(_graph, act);

                    if (layer == _layers.Last())
                    {
                        TFOutput pred = _graph.Reshape(z, _graph.Const(new TFShape(-1)));
                        _preds[i] = pred;
                        TFOutput sample_likelihood = NNOperations.LogOfNormal(_graph, pred, _output, _graph.Const(1F));
                        likelihood = _graph.Add(likelihood, sample_likelihood);
                    }
                    else
                    {
                        act = _graph.Relu(z);
                    }
                }
            }


            TFOutput kl_W = _graph.Const(0F);
            TFOutput kl_b = _graph.Const(0F);

            foreach (BayesLayer layer in _layers)
            {
                kl_W = _graph.Add(kl_W, _graph.ReduceSum(NNOperations.KLUnivariateNormal(_graph, layer.Mu_W, NNOperations.LogTrans(_graph, layer.Phi_W), _graph.Const(0F), _graph.Const((float)(1 / (Math.Sqrt(l2Penalty)))))));
                kl_b = _graph.Add(kl_b, _graph.ReduceSum(NNOperations.KLUnivariateNormal(_graph, layer.Mu_b, NNOperations.LogTrans(_graph, layer.Phi_b), _graph.Const(0F), _graph.Const((float)(1 / Math.Sqrt(l2Penalty))))));
            }

            TFOutput kl = _graph.Add(kl_W, kl_b);

            likelihood = _graph.Div(likelihood, _graph.Const((float)numSamples));

            _cost = _graph.ReduceMean(_graph.Sub(_graph.Mul(_decay, kl), likelihood));

            optimizer = new AdamOptimizer();

            optimizer.AddBayesLayer(_graph, _layers, _cost);

            optimizer.Apply(_graph);

            var runner = _session.GetRunner();

            foreach (BayesLayer layer in _layers)
            {
                layer.Init(runner);
            }

            optimizer.Init(runner);

            runner.Run();
        }