Ejemplo n.º 1
0
        public void RandomNormalTestWithMean()
        {
            var rn = new RandomNormal(2018, 5.0f);

            Assert.AreEqual(k_FirstValue + 5.0, rn.NextDouble(), k_Epsilon);
            Assert.AreEqual(k_SecondValue + 5.0, rn.NextDouble(), k_Epsilon);
        }
Ejemplo n.º 2
0
        public void RandomNormalTestWithStddev()
        {
            var rn = new RandomNormal(2018, 0.0f, 4.2f);

            Assert.AreEqual(k_FirstValue * 4.2, rn.NextDouble(), k_Epsilon);
            Assert.AreEqual(k_SecondValue * 4.2, rn.NextDouble(), k_Epsilon);
        }
Ejemplo n.º 3
0
        public void RandomNormalTestTwoDouble()
        {
            var rn = new RandomNormal(2018);

            Assert.AreEqual(k_FirstValue, rn.NextDouble(), k_Epsilon);
            Assert.AreEqual(k_SecondValue, rn.NextDouble(), k_Epsilon);
        }
Ejemplo n.º 4
0
        public void RandomNormalTestTwoDouble()
        {
            RandomNormal rn = new RandomNormal(2018);

            Assert.AreEqual(firstValue, rn.NextDouble(), epsilon);
            Assert.AreEqual(secondValue, rn.NextDouble(), epsilon);
        }
Ejemplo n.º 5
0
        public void RandomNormalTestWithStddev()
        {
            RandomNormal rn = new RandomNormal(2018, 0.0f, 4.2f);

            Assert.AreEqual(firstValue * 4.2, rn.NextDouble(), epsilon);
            Assert.AreEqual(secondValue * 4.2, rn.NextDouble(), epsilon);
        }
Ejemplo n.º 6
0
        public void RandomNormalTestWithMean()
        {
            RandomNormal rn = new RandomNormal(2018, 5.0f);

            Assert.AreEqual(firstValue + 5.0, rn.NextDouble(), epsilon);
            Assert.AreEqual(secondValue + 5.0, rn.NextDouble(), epsilon);
        }
Ejemplo n.º 7
0
        public void RandomNormalTestWithStddev()
        {
            RandomNormal rn = new RandomNormal(2018, 1.0f, 4.2f);

            Assert.AreEqual(-0.9599, rn.NextDouble(), 0.0001);
            Assert.AreEqual(-0.5955, rn.NextDouble(), 0.0001);
        }
Ejemplo n.º 8
0
        public void RandomNormalTestWithMeanStddev()
        {
            RandomNormal rn = new RandomNormal(2018, -3.2f, 2.2f);

            Assert.AreEqual(-4.2266, rn.NextDouble(), 0.0001);
            Assert.AreEqual(-4.0357, rn.NextDouble(), 0.0001);
        }
Ejemplo n.º 9
0
        public void RandomNormalTestWithMean()
        {
            RandomNormal rn = new RandomNormal(2018, 5.0f);

            Assert.AreEqual(4.53333, rn.NextDouble(), 0.0001);
            Assert.AreEqual(4.6201, rn.NextDouble(), 0.0001);
        }
Ejemplo n.º 10
0
        public void RandomNormalTestTwoDouble()
        {
            RandomNormal rn = new RandomNormal(2018);

            Assert.AreEqual(-0.46666, rn.NextDouble(), 0.0001);
            Assert.AreEqual(-0.37989, rn.NextDouble(), 0.0001);
        }
Ejemplo n.º 11
0
    public static void generateHeightMap(float[,] ary, float k, float roughness, float minHeight, float maxHeight)
    {
        m_resolution = ary.GetLength(0);

        int r = m_resolution;
        float cRoughness = roughness;

        for (int s=r/2; s>=1; s/=2)
        {
            distribution = new RandomNormal(0, 2 * cRoughness * k);
            for (int y = s; y < m_resolution; y+=s*2)
            {
                for (int x=s; x <m_resolution; x+=s*2)
                {
                    //ii, iv, …
                    ary [x, y] = diamond(ary, x, y, s, cRoughness, minHeight, maxHeight);
                }
            }
            for (int y = 0; y < m_resolution; y+=s)
            {
                for (int x = s - y%(2*s); x < m_resolution; x+=s*2)
                {
                    //iii, v, …
                    ary [x, y] = square(ary, x, y, s, cRoughness, minHeight, maxHeight);
                }
            }
            cRoughness *= roughness;
        }
    }
Ejemplo n.º 12
0
        public void RandomNormalTestWithMeanStddev()
        {
            float        mean   = -3.2f;
            float        stddev = 2.2f;
            RandomNormal rn     = new RandomNormal(2018, mean, stddev);

            Assert.AreEqual(firstValue * stddev + mean, rn.NextDouble(), epsilon);
            Assert.AreEqual(secondValue * stddev + mean, rn.NextDouble(), epsilon);
        }
Ejemplo n.º 13
0
        public void RandomNormalTestWithMeanStddev()
        {
            const float mean   = -3.2f;
            const float stddev = 2.2f;
            var         rn     = new RandomNormal(2018, mean, stddev);

            Assert.AreEqual(k_FirstValue * stddev + mean, rn.NextDouble(), k_Epsilon);
            Assert.AreEqual(k_SecondValue * stddev + mean, rn.NextDouble(), k_Epsilon);
        }
Ejemplo n.º 14
0
        public void RandomNormalTestDataNull()
        {
            RandomNormal rn = new RandomNormal(1982);
            TensorProxy  t  = new TensorProxy
            {
                ValueType = TensorProxy.TensorType.FloatingPoint
            };

            Assert.Throws <ArgumentNullException>(() => rn.FillTensor(t));
        }
Ejemplo n.º 15
0
        public void RandomNormalTestTensorInt()
        {
            RandomNormal rn = new RandomNormal(1982);
            TensorProxy  t  = new TensorProxy
            {
                ValueType = TensorProxy.TensorType.Integer
            };

            Assert.Throws <NotImplementedException>(() => rn.FillTensor(t));
        }
Ejemplo n.º 16
0
        public void GeneratesOutput()
        {
            using (var session = new TFSession())
            {
                var initializer = new RandomNormal().Compile(session.Graph, new TFShape(5, 5));
                var output      = session.GetRunner().Run(initializer);

                output.Shape[0].Should().Be(5);
                output.Shape[1].Should().Be(5);
            }
        }
        public void RandomNormalTestDataNull()
        {
            var rn = new RandomNormal(1982);
            var t  = new TensorProxy
            {
                valueType = TensorProxy.TensorType.FloatingPoint
            };

            Assert.Throws <ArgumentNullException>(
                () => TensorUtils.FillTensorWithRandomNormal(t, rn));
        }
        public void RandomNormalTestTensorInt()
        {
            var rn = new RandomNormal(1982);
            var t  = new TensorProxy
            {
                valueType = TensorProxy.TensorType.Integer
            };

            Assert.Throws <NotImplementedException>(
                () => TensorUtils.FillTensorWithRandomNormal(t, rn));
        }
Ejemplo n.º 19
0
        public void RandomNormalTestTensor()
        {
            RandomNormal rn = new RandomNormal(1982);
            Tensor       t  = new Tensor
            {
                ValueType = Tensor.TensorType.FloatingPoint,
                Data      = Array.CreateInstance(typeof(float), new long[3] {
                    3, 4, 2
                })
            };

            rn.FillTensor(t);

            float[] reference = new float[]
            {
                -0.2139822f,
                0.5051259f,
                -0.5640336f,
                -0.3357787f,
                -0.2055894f,
                -0.09432302f,
                -0.01419199f,
                0.53621f,
                -0.5507085f,
                -0.2651141f,
                0.09315512f,
                -0.04918706f,
                -0.179625f,
                0.2280539f,
                0.1883962f,
                0.4047216f,
                0.1704049f,
                0.5050544f,
                -0.3365685f,
                0.3542781f,
                0.5951571f,
                0.03460682f,
                -0.5537263f,
                -0.4378373f,
            };

            int i = 0;

            foreach (float f in t.Data)
            {
                Assert.AreEqual(f, reference[i], 0.0001);
                ++i;
            }
        }
Ejemplo n.º 20
0
        public static void RandomTest2(bool isActive)
        {
            if (!isActive)
            {
                return;
            }
            RandomNormal ran = new RandomNormal();
            List <int>   l   = ran.IntegerTruncatedNormalRandomList(50, 200, 0, 80, 2);

            if (l == null)
            {
                Console.WriteLine("参数异常!");
                return;
            }
            l.ForEach(_ => Console.WriteLine(_));
        }
Ejemplo n.º 21
0
        /// <summary>
        /// Fill a pre-allocated Tensor with random numbers
        /// </summary>
        /// <param name="tensorProxy">The pre-allocated Tensor to fill</param>
        /// <param name="randomNormal">RandomNormal object used to populate tensor</param>
        /// <exception cref="NotImplementedException">
        /// Throws when trying to fill a Tensor of type other than float
        /// </exception>
        /// <exception cref="ArgumentNullException">
        /// Throws when the Tensor is not allocated
        /// </exception>
        public static void FillTensorWithRandomNormal(
            TensorProxy tensorProxy, RandomNormal randomNormal)
        {
            if (tensorProxy.DataType != typeof(float))
            {
                throw new NotImplementedException("Only float data types are currently supported");
            }

            if (tensorProxy.data == null)
            {
                throw new ArgumentNullException();
            }

            for (var i = 0; i < tensorProxy.data.length; i++)
            {
                tensorProxy.data[i] = (float)randomNormal.NextDouble();
            }
        }
        public void RandomNormalTestTensor()
        {
            var rn = new RandomNormal(1982);
            var t  = new TensorProxy
            {
                valueType = TensorProxy.TensorType.FloatingPoint,
                data      = new Tensor(1, 3, 4, 2)
            };

            TensorUtils.FillTensorWithRandomNormal(t, rn);

            var reference = new[]
            {
                -0.4315872f,
                -1.11074f,
                0.3414804f,
                -1.130287f,
                0.1413168f,
                -0.5105762f,
                -0.3027347f,
                -0.2645015f,
                1.225356f,
                -0.02921959f,
                0.3716498f,
                -1.092338f,
                0.9561074f,
                -0.5018106f,
                1.167787f,
                -0.7763879f,
                -0.07491868f,
                0.5396146f,
                -0.1377991f,
                0.3331701f,
                0.06144788f,
                0.9520947f,
                1.088157f,
                -1.177194f,
            };

            for (var i = 0; i < t.data.length; i++)
            {
                Assert.AreEqual(t.data[i], reference[i], 0.0001);
            }
        }
        private static void FinalizeFrame(Tomogram tom)
        {
            int numberOfBackgroundClasses = tom.DataClasses.Where(n => n != 0).Count();

            float[] classKey = new float[numberOfBackgroundClasses];
            for (int c = 0; c < classKey.Length; c++)
            {
                float v = (float)RandomNormal.Next(tom.Random, 85, 15);
                classKey[c] = v * tom.MRCScaler;
            }

            for (int y = 0, i = 0; y < tom.Height; y++)
            {
                for (int x = 0; x < tom.Width; x++, i++)
                {
                    int classNumber = tom.DataClasses[i];
                    if (classNumber > 0 && classNumber <= tom.BackgroundDensity)
                    {
                        tom.Data[i] = classKey[classNumber];
                    }
                }
            }

            GaussianBlur blur = GaussianBlur.BuildBlur(2.0f, 4);

            tom.Data = blur.BlurData(tom.Data, tom.Width, tom.Height);

            for (int y = 0, i = 0; y < tom.Height; y++)
            {
                for (int x = 0; x < tom.Width; x++, i++)
                {
                    int classNumber = tom.DataClasses[i];
                    if (classNumber == -1)
                    {
                        float v = tom.Random.Next(50, 60);
                        tom.Data[i] = v * tom.MRCScaler;
                    }
                }
            }

            tom.Data = blur.BlurData(tom.Data, tom.Width, tom.Height);
        }
Ejemplo n.º 24
0
        public void RandomNormalTestDistribution()
        {
            float        mean   = -3.2f;
            float        stddev = 2.2f;
            RandomNormal rn     = new RandomNormal(2018, mean, stddev);

            int numSamples = 100000;
            // Adapted from https://www.johndcook.com/blog/standard_deviation/
            // Computes stddev and mean without losing precision
            double oldM = 0.0, newM = 0.0, oldS = 0.0, newS = 0.0;

            for (int i = 0; i < numSamples; i++)
            {
                double x = rn.NextDouble();
                if (i == 0)
                {
                    oldM = newM = x;
                    oldS = 0.0;
                }
                else
                {
                    newM = oldM + (x - oldM) / i;
                    newS = oldS + (x - oldM) * (x - newM);

                    // set up for next iteration
                    oldM = newM;
                    oldS = newS;
                }
            }

            double sampleMean     = newM;
            double sampleVariance = newS / (numSamples - 1);
            double sampleStddev   = Math.Sqrt(sampleVariance);

            // Note a larger epsilon here. We could get closer to the true values with more samples.
            Assert.AreEqual(mean, sampleMean, 0.01);
            Assert.AreEqual(stddev, sampleStddev, 0.01);
        }
Ejemplo n.º 25
0
 public RandomNormalInputGenerator(int seed, ITensorAllocator allocator)
 {
     m_RandomNormal = new RandomNormal(seed);
     m_Allocator    = allocator;
 }
Ejemplo n.º 26
0
        public CubeNet(string modelDir, int deviceID = 0, int nThreads = 1, int batchSize = 1, int nClasses = 2, bool forTraining = false)
        {
            lock (TFHelper.DeviceSync[deviceID])
            {
                DeviceID    = deviceID;
                ForTraining = forTraining;
                ModelDir    = modelDir;
                MaxThreads  = nThreads;
                BatchSize   = batchSize;
                NClasses    = nClasses;

                TFSessionOptions SessionOptions = TFHelper.CreateOptions();
                TFSession        Dummy          = new TFSession(new TFGraph(), SessionOptions);

                Session = TFHelper.FromSavedModel(SessionOptions, null, ModelDir, new[] { forTraining ? "train" : "serve" }, new TFGraph(), $"/device:GPU:{deviceID}");
                Graph   = Session.Graph;

                if (forTraining)
                {
                    NodeInputMicTile = Graph["images"][0];
                    NodeInputLabels  = Graph["image_classes"][0];
                    NodeInputWeights = Graph["image_weights"][0];
                    NodeLearningRate = Graph["training_learning_rate"][0];
                    NodeOpTrain      = Graph["train_momentum"][0];

                    NodeOutputLoss = Graph["cross_entropy"][0];
                }
                else
                {
                    NodeInputMicTilePredict = Graph["images_predict"][0];
                }

                NodeOutputArgMax  = Graph["argmax_tensor"][0];
                NodeOutputSoftMax = Graph["softmax_tensor"][0];

                if (forTraining)
                {
                    TensorMicTile = Helper.ArrayOfFunction(i => TFTensor.FromBuffer(new TFShape(BatchSize, BoxDimensionsTrain.X, BoxDimensionsTrain.Y, BoxDimensionsTrain.Z, 1),
                                                                                    new float[BatchSize * BoxDimensionsTrain.Elements()],
                                                                                    0,
                                                                                    BatchSize * (int)BoxDimensionsTrain.Elements()),
                                                           nThreads);

                    TensorTrainingLabels = Helper.ArrayOfFunction(i => TFTensor.FromBuffer(new TFShape(BatchSize, BoxDimensionsTrain.X, BoxDimensionsTrain.Y, BoxDimensionsTrain.Z, NClasses),
                                                                                           new float[BatchSize * BoxDimensionsTrain.Elements() * NClasses],
                                                                                           0,
                                                                                           BatchSize * (int)BoxDimensionsTrain.Elements() * NClasses),
                                                                  nThreads);

                    TensorTrainingWeights = Helper.ArrayOfFunction(i => TFTensor.FromBuffer(new TFShape(BatchSize, BoxDimensionsTrain.X, BoxDimensionsTrain.Y, BoxDimensionsTrain.Z, 1),
                                                                                            new float[BatchSize * BoxDimensionsTrain.Elements()],
                                                                                            0,
                                                                                            BatchSize * (int)BoxDimensionsTrain.Elements()),
                                                                   nThreads);

                    TensorLearningRate = Helper.ArrayOfFunction(i => TFTensor.FromBuffer(new TFShape(1),
                                                                                         new float[1],
                                                                                         0,
                                                                                         1),
                                                                nThreads);
                }
                else
                {
                    TensorMicTilePredict = Helper.ArrayOfFunction(i => TFTensor.FromBuffer(new TFShape(BatchSize, BoxDimensionsPredict.X, BoxDimensionsPredict.Y, BoxDimensionsPredict.Z, 1),
                                                                                           new float[BatchSize * BoxDimensionsPredict.Elements()],
                                                                                           0,
                                                                                           BatchSize * (int)BoxDimensionsPredict.Elements()),
                                                                  nThreads);
                }

                if (forTraining)
                {
                    ResultArgMax  = Helper.ArrayOfFunction(i => new long[BatchSize * (int)BoxDimensionsTrain.Elements()], nThreads);
                    ResultSoftMax = Helper.ArrayOfFunction(i => new float[BatchSize * (int)BoxDimensionsTrain.Elements() * NClasses], nThreads);
                    ResultLoss    = Helper.ArrayOfFunction(i => new float[BatchSize], nThreads);
                }
                else
                {
                    ResultArgMax  = Helper.ArrayOfFunction(i => new long[BatchSize * (int)BoxDimensionsPredict.Elements()], nThreads);
                    ResultSoftMax = Helper.ArrayOfFunction(i => new float[BatchSize * (int)BoxDimensionsPredict.Elements() * NClasses], nThreads);
                }

                if (!ForTraining)
                {
                    RunnerPrediction = Helper.ArrayOfFunction(i => Session.GetRunner().
                                                              AddInput(NodeInputMicTilePredict, TensorMicTilePredict[i]).
                                                              Fetch(NodeOutputArgMax, NodeOutputSoftMax),
                                                              nThreads);
                }
                if (ForTraining)
                {
                    RunnerTraining = Helper.ArrayOfFunction(i => Session.GetRunner().
                                                            AddInput(NodeInputMicTile, TensorMicTile[i]).
                                                            AddInput(NodeInputLabels, TensorTrainingLabels[i]).
                                                            AddInput(NodeInputWeights, TensorTrainingWeights[i]).
                                                            AddInput(NodeLearningRate, TensorLearningRate[i]).
                                                            Fetch(NodeOpTrain, NodeOutputArgMax, NodeOutputSoftMax, NodeOutputLoss),
                                                            nThreads);
                }
            }

            // Run prediction or training for one batch to claim all the memory needed
            long[]  InitArgMax;
            float[] InitProb;
            if (!ForTraining)
            {
                Predict(new float[BoxDimensionsPredict.Elements() * BatchSize],
                        0,
                        out InitArgMax,
                        out InitProb);
            }
            if (ForTraining)
            {
                RandomNormal RandN = new RandomNormal();
                Train(Helper.ArrayOfFunction(i => RandN.NextSingle(0, 1), BatchSize * (int)BoxDimensionsTrain.Elements()),
                      Helper.ArrayOfConstant(0.0f, BatchSize * (int)BoxDimensionsTrain.Elements() * NClasses),
                      Helper.ArrayOfConstant(0.0f, BatchSize * (int)BoxDimensionsTrain.Elements()),
                      1e-6f,
                      0,
                      out InitArgMax,
                      out InitProb);
            }
        }
Ejemplo n.º 27
0
        public NoiseNet3D(string modelDir, int3 boxDimensions, int nThreads = 1, int batchSize = 8, bool forTraining = true, int deviceID = 0)
        {
            lock (TFHelper.DeviceSync[deviceID])
            {
                DeviceID      = deviceID;
                BoxDimensions = boxDimensions;
                ForTraining   = forTraining;
                ModelDir      = modelDir;
                MaxThreads    = nThreads;
                BatchSize     = batchSize;

                TFSessionOptions SessionOptions = TFHelper.CreateOptions();
                TFSession        Dummy          = new TFSession(new TFGraph(), SessionOptions);

                Session = TFHelper.FromSavedModel(SessionOptions, null, ModelDir, new[] { forTraining ? "train" : "serve" }, new TFGraph(), $"/device:GPU:{deviceID}");
                Graph   = Session.Graph;

                NodeInputSource = Graph["volume_source"][0];
                if (forTraining)
                {
                    NodeInputTarget  = Graph["volume_target"][0];
                    NodeLearningRate = Graph["training_learning_rate"][0];
                    NodeOpTrain      = Graph["train_momentum"][0];
                    NodeOutputLoss   = Graph["l2_loss"][0];
                }

                NodeOutputPredicted = Graph["volume_predict"][0];

                TensorSource = Helper.ArrayOfFunction(i => TFTensor.FromBuffer(new TFShape(BatchSize, BoxDimensions.X, BoxDimensions.Y, boxDimensions.Z, 1),
                                                                               new float[BatchSize * BoxDimensions.Elements()],
                                                                               0,
                                                                               BatchSize * (int)BoxDimensions.Elements()),
                                                      nThreads);

                if (ForTraining)
                {
                    TensorTarget = Helper.ArrayOfFunction(i => TFTensor.FromBuffer(new TFShape(BatchSize, BoxDimensions.X, BoxDimensions.Y, boxDimensions.Z, 1),
                                                                                   new float[BatchSize * BoxDimensions.Elements()],
                                                                                   0,
                                                                                   BatchSize * (int)BoxDimensions.Elements()),
                                                          nThreads);

                    TensorLearningRate = Helper.ArrayOfFunction(i => TFTensor.FromBuffer(new TFShape(1),
                                                                                         new float[1],
                                                                                         0,
                                                                                         1),
                                                                nThreads);
                }

                ResultPredicted = Helper.ArrayOfFunction(i => new float[BatchSize * BoxDimensions.Elements()], nThreads);
                ResultLoss      = Helper.ArrayOfFunction(i => new float[1], nThreads);

                //if (!ForTraining)
                RunnerPrediction = Helper.ArrayOfFunction(i => Session.GetRunner().
                                                          AddInput(NodeInputSource, TensorSource[i]).
                                                          Fetch(NodeOutputPredicted),
                                                          nThreads);
                if (ForTraining)
                {
                    RunnerTraining = Helper.ArrayOfFunction(i => Session.GetRunner().
                                                            AddInput(NodeInputSource, TensorSource[i]).
                                                            AddInput(NodeInputTarget, TensorTarget[i]).
                                                            AddInput(NodeLearningRate, TensorLearningRate[i]).
                                                            Fetch(NodeOutputPredicted, NodeOutputLoss, NodeOpTrain),
                                                            nThreads);
                }
            }

            // Run prediction or training for one batch to claim all the memory needed
            float[] InitDecoded;
            float[] InitLoss;
            //if (!ForTraining)
            {
                Predict(new float[BoxDimensions.Elements() * BatchSize],
                        0,
                        out InitDecoded);
            }
            if (ForTraining)
            {
                RandomNormal RandN = new RandomNormal();
                Train(Helper.ArrayOfFunction(i => RandN.NextSingle(0, 1), BatchSize * (int)BoxDimensions.Elements()),
                      Helper.ArrayOfFunction(i => RandN.NextSingle(0, 1), BatchSize * (int)BoxDimensions.Elements()),
                      1e-10f,
                      0,
                      out InitDecoded,
                      out InitLoss);
            }
        }
Ejemplo n.º 28
0
        public FlexNet3D(string modelDir, int3 boxDimensions, int gpuID = 0, int nThreads = 1, bool forTraining = true, int batchSize = 128, int bottleneckWidth = 2, int layerWidth = 64, int nlayers = 4)
        {
            BoxDimensions   = boxDimensions;
            ForTraining     = forTraining;
            BatchSize       = batchSize;
            BottleneckWidth = bottleneckWidth;
            NWeights0       = layerWidth;
            NLayers         = nlayers;
            ModelDir        = modelDir;
            MaxThreads      = nThreads;

            TFSessionOptions SessionOptions = TFHelper.CreateOptions();
            TFSession        Dummy          = new TFSession(new TFGraph(), SessionOptions);

            Session = TFHelper.FromSavedModel(SessionOptions, null, ModelDir, new[] { forTraining ? "train" : "serve" }, new TFGraph(), $"/device:GPU:{gpuID}");
            Graph   = Session.Graph;

            NodeInputSource       = Graph["volume_source"][0];
            NodeInputTarget       = Graph["volume_target"][0];
            NodeInputWeightSource = Graph["volume_weight_source"][0];
            NodeInputWeightTarget = Graph["volume_weight_target"][0];
            NodeDropoutRate       = Graph["training_dropout_rate"][0];
            if (forTraining)
            {
                NodeLearningRate      = Graph["training_learning_rate"][0];
                NodeOrthogonalityRate = Graph["training_orthogonality"][0];
                NodeOpTrain           = Graph["train_momentum"][0];
                NodeOutputLoss        = Graph["l2_loss"][0];
                NodeOutputLossKL      = Graph["kl_loss"][0];
                NodeBottleneck        = Graph["bottleneck"][0];
            }

            NodeCode = Graph["volume_code"][0];

            NodeOutputPredicted = Graph["volume_predict"][0];

            NodeWeights0 = Graph["encoder_0/weights_0"][0];
            NodeWeights1 = Graph[$"decoder_{nlayers - 1}/weights_{nlayers - 1}"][0];
            if (forTraining)
            {
                NodeWeights0Assign = Graph["encoder_0/assign_layer0"][0];
                NodeWeights0Input  = Graph["encoder_0/assign_layer0_values"][0];

                NodeWeights1Assign = Graph[$"decoder_{nlayers - 1}/assign_layer0"][0];
                NodeWeights1Input  = Graph[$"decoder_{nlayers - 1}/assign_layer0_values"][0];
            }

            TensorSource = Helper.ArrayOfFunction(i => TFTensor.FromBuffer(new TFShape(BatchSize, (BoxDimensions.X / 2 + 1), BoxDimensions.Y, BoxDimensions.Z, 2),
                                                                           new float[BatchSize * BoxDimensions.ElementsFFT() * 2],
                                                                           0,
                                                                           BatchSize * (int)BoxDimensions.ElementsFFT() * 2),
                                                  nThreads);

            TensorTarget = Helper.ArrayOfFunction(i => TFTensor.FromBuffer(new TFShape(BatchSize, (BoxDimensions.X / 2 + 1), BoxDimensions.Y, BoxDimensions.Z, 2),
                                                                           new float[BatchSize * BoxDimensions.ElementsFFT() * 2],
                                                                           0,
                                                                           BatchSize * (int)BoxDimensions.ElementsFFT() * 2),
                                                  nThreads);

            TensorWeightSource = Helper.ArrayOfFunction(i => TFTensor.FromBuffer(new TFShape(BatchSize, (BoxDimensions.X / 2 + 1), BoxDimensions.Y, BoxDimensions.Z, 1),
                                                                                 new float[BatchSize * BoxDimensions.ElementsFFT()],
                                                                                 0,
                                                                                 BatchSize * (int)BoxDimensions.ElementsFFT()),
                                                        nThreads);

            TensorWeightTarget = Helper.ArrayOfFunction(i => TFTensor.FromBuffer(new TFShape(BatchSize, (BoxDimensions.X / 2 + 1), BoxDimensions.Y, BoxDimensions.Z, 1),
                                                                                 new float[BatchSize * BoxDimensions.ElementsFFT()],
                                                                                 0,
                                                                                 BatchSize * (int)BoxDimensions.ElementsFFT()),
                                                        nThreads);

            TensorCode = Helper.ArrayOfFunction(i => TFTensor.FromBuffer(new TFShape(BatchSize, BottleneckWidth),
                                                                         new float[BatchSize * BottleneckWidth],
                                                                         0,
                                                                         BatchSize * BottleneckWidth),
                                                nThreads);

            TensorLearningRate = Helper.ArrayOfFunction(i => TFTensor.FromBuffer(new TFShape(1),
                                                                                 new float[1],
                                                                                 0,
                                                                                 1),
                                                        nThreads);

            TensorDropoutRate = Helper.ArrayOfFunction(i => TFTensor.FromBuffer(new TFShape(1),
                                                                                new float[1],
                                                                                0,
                                                                                1),
                                                       nThreads);

            TensorOrthogonalityRate = Helper.ArrayOfFunction(i => TFTensor.FromBuffer(new TFShape(1),
                                                                                      new float[1],
                                                                                      0,
                                                                                      1),
                                                             nThreads);

            ResultPredicted  = Helper.ArrayOfFunction(i => new float[BatchSize * BoxDimensions.ElementsFFT() * 2], nThreads);
            ResultBottleneck = Helper.ArrayOfFunction(i => new float[BatchSize * BottleneckWidth], nThreads);
            ResultLoss       = Helper.ArrayOfFunction(i => new float[1], nThreads);
            ResultLossKL     = Helper.ArrayOfFunction(i => new float[1], nThreads);

            RetrievedWeights = new float[boxDimensions.ElementsFFT() * 2 * NWeights0];

            //if (!ForTraining)
            RunnerPrediction = Helper.ArrayOfFunction(i => Session.GetRunner().
                                                      AddInput(NodeCode, TensorCode[i]).
                                                      AddInput(NodeDropoutRate, TensorDropoutRate[i]).
                                                      Fetch(NodeOutputPredicted),
                                                      nThreads);
            //else
            RunnerTraining = Helper.ArrayOfFunction(i => Session.GetRunner().
                                                    AddInput(NodeInputSource, TensorSource[i]).
                                                    AddInput(NodeInputTarget, TensorTarget[i]).
                                                    AddInput(NodeInputWeightSource, TensorWeightSource[i]).
                                                    AddInput(NodeInputWeightTarget, TensorWeightTarget[i]).
                                                    AddInput(NodeDropoutRate, TensorDropoutRate[i]).
                                                    AddInput(NodeLearningRate, TensorLearningRate[i]).
                                                    AddInput(NodeOrthogonalityRate, TensorOrthogonalityRate[i]).
                                                    Fetch(NodeOutputPredicted, NodeOutputLoss, NodeOutputLossKL, NodeBottleneck, NodeOpTrain),
                                                    nThreads);

            RunnerEncode = Helper.ArrayOfFunction(i => Session.GetRunner().
                                                  AddInput(NodeInputSource, TensorSource[i]).
                                                  AddInput(NodeInputWeightSource, TensorWeightSource[i]).
                                                  AddInput(NodeDropoutRate, TensorDropoutRate[i]).
                                                  Fetch(NodeBottleneck),
                                                  nThreads);

            RunnerRetrieveWeights0 = Session.GetRunner().Fetch(NodeWeights0);
            RunnerRetrieveWeights1 = Session.GetRunner().Fetch(NodeWeights1);

            if (ForTraining)
            {
                TensorWeights0 = TFTensor.FromBuffer(new TFShape(NWeights0, BoxDimensions.ElementsFFT() * 2),
                                                     new float[BoxDimensions.ElementsFFT() * 2 * NWeights0],
                                                     0,
                                                     (int)BoxDimensions.ElementsFFT() * 2 * NWeights0);

                RunnerAssignWeights0 = Session.GetRunner().AddInput(NodeWeights0Input, TensorWeights0).
                                       Fetch(NodeWeights0Assign);
                RunnerAssignWeights1 = Session.GetRunner().AddInput(NodeWeights1Input, TensorWeights0).
                                       Fetch(NodeWeights1Assign);
            }

            // Run prediction or training for one batch to claim all the memory needed
            float[] InitDecoded;
            float[] InitBottleneck;
            float[] InitLoss, InitLossKL;
            if (!ForTraining)
            {
                RandomNormal RandN = new RandomNormal(123);
                Predict(Helper.ArrayOfFunction(i => RandN.NextSingle(0, 1), BottleneckWidth * BatchSize),
                        0,
                        out InitDecoded);
            }
            else
            {
                RandomNormal RandN = new RandomNormal();

                Encode(Helper.ArrayOfFunction(i => RandN.NextSingle(0, 1), BatchSize * (int)BoxDimensions.ElementsFFT() * 2),
                       Helper.ArrayOfFunction(i => 1f, BatchSize * (int)BoxDimensions.ElementsFFT()),
                       0,
                       out InitBottleneck);

                Train(Helper.ArrayOfFunction(i => RandN.NextSingle(0, 1), BatchSize * (int)BoxDimensions.ElementsFFT() * 2),
                      Helper.ArrayOfFunction(i => RandN.NextSingle(0, 1), BatchSize * (int)BoxDimensions.ElementsFFT() * 2),
                      Helper.ArrayOfFunction(i => 1f, BatchSize * (int)BoxDimensions.ElementsFFT()),
                      Helper.ArrayOfFunction(i => 1f, BatchSize * (int)BoxDimensions.ElementsFFT()),
                      0.5f,
                      1e-10f,
                      1e-5f,
                      0,
                      out InitDecoded,
                      out InitBottleneck,
                      out InitLoss,
                      out InitLossKL);
            }
        }
Ejemplo n.º 29
0
 public RandomNormalInputGenerator(int seed)
 {
     _randomNormal = new RandomNormal(seed);
 }
Ejemplo n.º 30
0
        public static Func <float> CreateGaussianSampler(float mean, float stddev, int seed)
        {
            RandomNormal distr = new RandomNormal(seed, mean, stddev);

            return(() => (float)distr.NextDouble());
        }
Ejemplo n.º 31
0
        public BoxNet(string modelDir, int gpuID = 0, int nThreads = 1, int batchSize = 128, bool forTraining = false)
        {
            ForTraining = forTraining;
            BatchSize   = batchSize;
            ModelDir    = modelDir;
            MaxThreads  = nThreads;

            TFSessionOptions SessionOptions = TFHelper.CreateOptions();
            TFSession        Dummy          = new TFSession(new TFGraph(), SessionOptions);

            Session = TFHelper.FromSavedModel(SessionOptions, null, ModelDir, new[] { forTraining ? "train" : "serve" }, new TFGraph(), $"/device:GPU:{gpuID}");
            Graph   = Session.Graph;

            NodeInputMicTile = Graph["mic_tiles"][0];
            if (forTraining)
            {
                NodeInputLabels  = Graph["training_labels"][0];
                NodeLearningRate = Graph["training_learning_rate"][0];
                NodeOpTrain      = Graph["train_momentum"][0];
            }

            NodeOutputArgMax  = Graph["ArgMax"][0];
            NodeOutputSoftMax = Graph["softmax_tensor"][0];

            TensorMicTile = Helper.ArrayOfFunction(i => TFTensor.FromBuffer(new TFShape(BatchSize, 1, BoxDimensions.Y, BoxDimensions.X),
                                                                            new float[BatchSize * BoxDimensions.Elements()],
                                                                            0,
                                                                            BatchSize * (int)BoxDimensions.Elements()),
                                                   nThreads);

            TensorTrainingLabels = Helper.ArrayOfFunction(i => TFTensor.FromBuffer(new TFShape(BatchSize, 2),
                                                                                   new float[BatchSize * 2],
                                                                                   0,
                                                                                   BatchSize * 2),
                                                          nThreads);

            TensorLearningRate = Helper.ArrayOfFunction(i => new TFTensor(0.0f),
                                                        nThreads);

            ResultArgMax  = Helper.ArrayOfFunction(i => new long[BatchSize], nThreads);
            ResultSoftMax = Helper.ArrayOfFunction(i => new float[BatchSize * 2], nThreads);

            if (!ForTraining)
            {
                RunnerPrediction = Helper.ArrayOfFunction(i => Session.GetRunner().
                                                          AddInput(NodeInputMicTile, TensorMicTile[i]).
                                                          Fetch(NodeOutputArgMax, NodeOutputSoftMax),
                                                          nThreads);
            }
            else
            {
                RunnerTraining = Helper.ArrayOfFunction(i => Session.GetRunner().
                                                        AddInput(NodeInputMicTile, TensorMicTile[i]).
                                                        AddInput(NodeInputLabels, TensorTrainingLabels[i]).
                                                        AddInput(NodeLearningRate, TensorLearningRate[i]).
                                                        Fetch(NodeOutputArgMax, NodeOutputSoftMax, NodeOpTrain),
                                                        nThreads);
            }

            // Run prediction or training for one batch to claim all the memory needed
            long[]  InitArgMax;
            float[] InitProb;
            if (!ForTraining)
            {
                Predict(new float[BoxDimensions.Elements() * BatchSize],
                        0,
                        out InitArgMax,
                        out InitProb);
            }
            else
            {
                RandomNormal RandN = new RandomNormal();
                Train(Helper.ArrayOfFunction(i => RandN.NextSingle(0, 1), BatchSize * (int)BoxDimensions.Elements()),
                      Helper.Combine(Helper.ArrayOfFunction(i => new[] { 1.0f, 0.0f }, 128)),
                      1e-6f,
                      0,
                      out InitArgMax,
                      out InitProb);
            }
        }