Beispiel #1
0
        public void NoTrainingContextLeaks()
        {
            int       numInputs    = 200;
            var       layerSpecs   = MakeLayers();
            var       weightCount  = RNNInterop.GetWeightCount(layerSpecs, numInputs);
            const int numSamples   = 10;
            var       trainingData = Lists.Repeat(numSamples, _ => MakeVector(numInputs)).ColumnsToMatrix();
            var       outputData   = MakeVector(numSamples);
            var       weights      = MakeVector(weightCount);

            var mem = new MemoryAnalyzer();

            using (mem)
            {
                Lists.Repeat(2500, i => {
                    var context = RNNInterop.CreateTrainingContext(layerSpecs, trainingData, outputData);
                    RNNInterop.EvaluateWeights(context, weights);
                    context.Dispose();
                });
            }

            Trace.WriteLine("Mem usage before: " + mem.StartMB);
            Trace.WriteLine("Mem usage after: " + mem.StopMB);
            Assert.IsTrue(mem.StopMB - mem.StartMB < 2);
        }
Beispiel #2
0
        public void NoGetWeightsLeaks()
        {
            var layerSpecs = MakeLayers();

            var mem = new MemoryAnalyzer();

            using (mem)
            {
                Lists.Repeat(30000, i => {
                    RNNInterop.GetWeightCount(layerSpecs, 200);
                });
            }

            Trace.WriteLine("Mem usage before: " + mem.StartMB);
            Trace.WriteLine("Mem usage after: " + mem.StopMB);
            Assert.IsTrue(mem.StopMB - mem.StartMB < 15);
            // 15MB: don't know why this appears to leak, but it seems to approach some limit logarithmically with respect
            // to the number of calls to GetWeightCount(). oh well, we can afford 15MB for 30000 calls. Probably a
            // GC artifact anyway.
        }
Beispiel #3
0
        public void NoPropagationContextLeaks()
        {
            int numInputs   = 200;
            var layerSpecs  = MakeLayers();
            var weightCount = RNNInterop.GetWeightCount(layerSpecs, numInputs);

            var mem = new MemoryAnalyzer();

            using (mem)
            {
                Lists.Repeat(10000, _ => {
                    var rnnSpec = new RNNSpec(numInputs, layerSpecs, new DenseVector(weightCount));
                    var context = RNNInterop.CreatePropagationContext(rnnSpec);
                    var input   = MakeVector(numInputs);
                    RNNInterop.PropagateInput(context, input.ToArray(), 1);
                    context.Dispose();
                });
            }

            Trace.WriteLine("Mem usage before: " + mem.StartMB);
            Trace.WriteLine("Mem usage after: " + mem.StopMB);
            Assert.IsTrue(mem.StopMB - mem.StartMB < 2);
        }