Example #1
0
        public override void Execute() //Task execution
        {
            // pointer to previous layer
            MyAbstractLayer previousLayer = Owner.PreviousLayer;

            if (previousLayer != null)
            {
                // reset delta
                previousLayer.Delta.Fill(0);

                // determine input to previous layer
                CUdeviceptr prevInputPtr;
                if (previousLayer is MyAbstractWeightLayer)
                {
                    prevInputPtr = (previousLayer as MyAbstractWeightLayer).NeuronInput.GetDevicePtr(previousLayer.GPU);
                }
                else
                {
                    prevInputPtr = previousLayer.Input.GetDevicePtr(previousLayer.GPU);
                }

                m_deltaKernel.SetupExecution(previousLayer.Neurons);
                m_deltaKernel.Run(
                    (int)previousLayer.ActivationFunction,
                    prevInputPtr,
                    previousLayer.Delta,
                    Owner.Delta,
                    Owner.Weights,
                    Owner.ParentNetwork.Dropout,
                    previousLayer.Neurons,
                    Owner.Neurons
                    );
            }
        }
Example #2
0
        public override void Execute() //Task execution
        {
            MyLog.DEBUG.WriteLine("Pooling backward.");

            // pointer to previous layer
            MyNode node = Owner.Input.Owner;

            if (node is MyAbstractLayer)
            {
                MyAbstractLayer previousLayer = node as MyAbstractLayer;

                // reset delta
                previousLayer.Delta.Fill(0);

                // determine input to previous layer
                CUdeviceptr prevInputPtr = MyAbstractLayer.DetermineInput(previousLayer);

                m_kernel.SetupExecution(Owner.Neurons);
                m_kernel.Run(
                    (int)previousLayer.ActivationFunction,
                    Owner.Delta,
                    previousLayer.Delta,
                    prevInputPtr,
                    Owner.ActivatedNeurons,
                    Owner.Neurons
                    );
            }
        }
        public override void Execute()
        {
            // Input.Count == Output.Count (it is validated)
            // Sum kernels + Divide kernel = Average
            // First sum elementwise all input into output
            m_forwardResetKernel.Run(
                Owner.Output,
                Owner.Output.Count
                );

            foreach (MyConnection connection in Owner.InputConnections)
            {
                if (connection.From is MyAbstractLayer)
                {
                    MyAbstractLayer prevLayer = connection.From as MyAbstractLayer;

                    m_forwardSumKernel.Run(
                        prevLayer.Output,
                        Owner.Output,
                        Owner.Output.Count
                        );
                }
            }

            // Then divide output by input size
            m_forwardDivideKernel.Run(
                Owner.Output,
                Owner.Output.Count,
                Owner.InputBranches
                );
        }
        public override void Execute()
        {
            Owner.Delta.Fill(0.0f);
            // number of neurons of ensemble is the same as for each input
            m_deltaKernel.SetConstantVariable <float>("Lambda", Lambda);

            int inputLayerCount = Owner.InputConnections.Count(x => x.From is MyAbstractWeightLayer);

            foreach (MyConnection connection in Owner.InputConnections)
            {
                if (connection.From is MyAbstractLayer)
                {
                    MyAbstractLayer prevLayer = connection.From as MyAbstractLayer;

                    if (prevLayer is MyAbstractWeightLayer)
                    {
                        MyAbstractWeightLayer prevWeightLayer = prevLayer as MyAbstractWeightLayer;

                        m_deltaKernel.Run(
                            (int)prevLayer.ActivationFunction,
                            prevWeightLayer.NeuronInput,
                            prevLayer.Output,
                            Owner.Output,
                            Owner.Neurons,
                            prevLayer.Delta,
                            Owner.Delta,
                            inputLayerCount
                            );
                    }
                    prevLayer.Delta.SafeCopyToHost();
                    Owner.Delta.SafeCopyToHost();
                }
            }
            Owner.Delta.SafeCopyToHost();
        }
Example #5
0
        public void FeedForward()
        {
            MyAbstractLayer layer = FirstLayer;

            while (layer != null)
            {
                layer.ForwardTask.Execute();
                layer = layer.NextLayer;
            }
        }
        public override void Execute() //Task execution
        {
            MyNode node = Owner.Input.Owner;

            if (node is MyAbstractLayer)
            {
                MyAbstractLayer previousLayer = node as MyAbstractLayer;

                // reset delta only if next is not Gaussian HACK.
                // (Gaussian layer already reseted delta and filled with regularization deltas)
                previousLayer.Delta.Fill(0);

                // determine input to previous layer
                CUdeviceptr prevInputPtr = MyAbstractLayer.DetermineInput(previousLayer);

                if (Owner.ParentNetwork.BatchSize == 1)
                {
                    // cuBLAS tends to be slower when BatchSize is 1, use the kernel instead
                    m_deltaKernel.SetupExecution(previousLayer.Neurons);
                    m_deltaKernel.Run(
                        (int)previousLayer.ActivationFunction,
                        prevInputPtr,
                        previousLayer.Delta,
                        Owner.Delta,
                        Owner.Weights,
                        Owner.ParentNetwork.Dropout,
                        previousLayer.Neurons,
                        Owner.Neurons
                        );
                }
                else
                {
                    // previousLayer.Delta = Transpose(Weights) x Delta
                    MyCublasFactory.Instance.Gemm(Operation.Transpose, Operation.NonTranspose,
                                                  previousLayer.Neurons, Owner.ParentNetwork.BatchSize, Owner.Neurons, 1.0f,
                                                  Owner.Weights.GetDevice(Owner), Owner.Neurons,
                                                  Owner.Delta.GetDevice(Owner), Owner.Neurons,
                                                  0.0f, previousLayer.Delta.GetDevice(Owner), previousLayer.Neurons
                                                  );

                    // multiply previousLayer.Delta by activation derivatives of previous layer
                    m_deltaBatchKernel.SetupExecution(previousLayer.Neurons * Owner.ParentNetwork.BatchSize);
                    m_deltaBatchKernel.Run(
                        (int)previousLayer.ActivationFunction,
                        prevInputPtr,
                        previousLayer.Delta,
                        Owner.ParentNetwork.Dropout,
                        previousLayer.Neurons,
                        Owner.ParentNetwork.BatchSize
                        );
                }
            }
        }
Example #7
0
        //Task execution
        public override void Execute()
        {
            // disable GradientCheck by default - TODO: fix this somehow
            Owner.GradientCheck.Enabled = false;

            // sort children in topological order
            Owner.SortedChildren = Owner.Children.OrderBy(o => o.TopologicalOrder).ToList();

            // set next and previous layer
            MyAbstractLayer layer;
            MyAbstractLayer lastLayer = null;

            foreach (MyNode child in Owner.SortedChildren)
            {
                if (child is MyAbstractLayer)
                {
                    layer = child as MyAbstractLayer;

                    if (lastLayer != null)
                    {
                        lastLayer.NextLayer = layer;
                    }

                    layer.NextLayer     = null;
                    layer.PreviousLayer = lastLayer;
                    lastLayer           = layer;
                }
            }

            // set first and last layer
            layer            = lastLayer;
            Owner.FirstLayer = layer;
            Owner.LastLayer  = layer;
            while (layer != null)
            {
                Owner.FirstLayer = layer;
                layer            = layer.PreviousLayer;
            }

            // count total number of weights
            Owner.TotalWeights = 0;
            layer = Owner.FirstLayer;
            while (layer != null)
            {
                if (layer is MyAbstractWeightLayer)
                {
                    Owner.TotalWeights += (layer as MyAbstractWeightLayer).Weights.Count;
                }
                layer = layer.NextLayer;
            }
        }
Example #8
0
        public override void Execute()
        {
            MyLog.DEBUG.WriteLine("Convolution backward.");

            // pointer to previous layer
            MyNode node = Owner.Input.Owner;

            if (node is MyAbstractLayer)
            {
                MyAbstractLayer previousLayer = node as MyAbstractLayer;

                // reset delta
                previousLayer.Delta.Fill(0);

                // determine input to previous layer
                CUdeviceptr prevInputPtr = MyAbstractLayer.DetermineInput(previousLayer);

                m_kernel.SetupExecution(previousLayer.Neurons);
                m_kernel.Run(
                    (int)previousLayer.ActivationFunction,
                    Owner.Weights,
                    Owner.Delta,
                    previousLayer.Delta,
                    prevInputPtr,
                    Owner.FilterCount,
                    Owner.InputWidth * Owner.InputHeight,                                                                                     // input slice size without padding
                    (Owner.InputWidth + Owner.ZeroPadding + Owner.ZeroPadding) * (Owner.InputHeight + Owner.ZeroPadding + Owner.ZeroPadding), // input slice size
                    Owner.ZeroPadding,
                    Owner.InputWidth, Owner.InputHeight,
                    Owner.FilterWidth, Owner.FilterHeight,
                    Owner.FilterWidth * Owner.FilterHeight,
                    Owner.FilterWidth * Owner.FilterHeight * Owner.InputDepth,
                    Owner.OutputWidth, Owner.OutputHeight, Owner.OutputWidth * Owner.OutputHeight,
                    Owner.HorizontalStride, Owner.VerticalStride,
                    previousLayer.Neurons
                    );
            }
        }
Example #9
0
        public override void Init(int nGPU) { } //Kernel initialization

        public override void Execute()
        {
            float maxRelDiff = 0.0f;
            float maxAbsDiff = 0.0f;
            int maxDiffLayer = 0;
            int maxDiffWeight = 0;
            float maxDiffWeightValue = 0.0f;
            float maxDiffStepSize = 0.0f;
            float maxDiffAnalyticalGrad = 0.0f;
            float maxDiffNumericalGrad = 0.0f;

            float sampleProbability = 1.0f / Owner.TotalWeights;
            for (int s = 0; s < SamplesPerTimestep; s++)
            {
                // dice roll
                float diceRoll = (float)Rand.NextDouble();

                // convert diceroll to parameter to sample
                int w = (int)System.Math.Floor(diceRoll / sampleProbability);
                if (w >= Owner.TotalWeights)
                {
                    if (w > Owner.TotalWeights)
                        MyLog.ERROR.Write("w > Owner.TotalWeights"); // just for testing, this should never hit
                    w = Owner.TotalWeights - 1; // this is just to make if safe, but it should never hit
                }

                // loop through the layers
                MyAbstractLayer layer = Owner.FirstTopologicalLayer;
                while (layer != null)
                {
                    // check for weights
                    if (layer is MyAbstractWeightLayer)
                    {
                        MyAbstractWeightLayer weightLayer = (layer as MyAbstractWeightLayer);
                        if (weightLayer.Weights.Count <= w)
                            w -= weightLayer.Weights.Count;
                        else
                        {
                            weightLayer.Weights.SafeCopyToHost(w, 1); // copy this weight to host
                            float originalWeight = weightLayer.Weights.Host[w]; // save weight
                            float stepSize = System.Math.Abs(originalWeight) * RelativeStepSize; // set stepSize

                            // get errorPlus
                            weightLayer.Weights.Host[w] = originalWeight + stepSize; // increase weight
                            weightLayer.Weights.SafeCopyToDevice(w, 1); // back to device
                            Owner.FeedForward(); // forward the network
                            float errorPlus = Owner.GetError();

                            // get errorMinus
                            weightLayer.Weights.Host[w] = originalWeight - stepSize; // decrease weight
                            weightLayer.Weights.SafeCopyToDevice(w, 1); // back to device
                            Owner.FeedForward(); // forward the network
                            float errorMinus = Owner.GetError();

                            // reset to original
                            weightLayer.Weights.Host[w] = originalWeight; // back to where we started
                            weightLayer.Weights.SafeCopyToDevice(w, 1); // back to device
                            Owner.FeedForward(); // forward the network
                            Owner.GetError(); // this sets the original error

                            // numerical gradient
                            float numericalGradient = (errorPlus - errorMinus) / (2 * stepSize);

                            if (numericalGradient == 0)
                            {
                                MyLog.DEBUG.WriteLine("t: " + SimulationStep + " id: " + weightLayer.Id + " w" + w + ": " + weightLayer.Weights.Host[w] + " step: " + stepSize + " numerical gradient is 0.");
                                break; // continue to next sample
                            }

                            // analytical gradient
                            int n = w % weightLayer.Neurons;
                            int i = (w - n) / weightLayer.Neurons;
                            weightLayer.Delta.SafeCopyToHost(n, 1); // copy delta to host
                            weightLayer.Input.SafeCopyToHost(i, 1); // copy input to host
                            weightLayer.DropoutMask.SafeCopyToHost(n, 1); // copy dropoutmask to host
                            //weightLayer.Weights.SafeCopyToHost(w, 1); // already present at host due to resetting to original
                            if (weightLayer.DropoutMask.Host[n] > 0)
                                break;
                            float analyticalGradient = weightLayer.Delta.Host[n] * weightLayer.Input.Host[i] + Owner.L1 * (weightLayer.Weights.Host[w] < 0.0f ? -1.0f : 1.0f) + Owner.L2 * weightLayer.Weights.Host[w];
                            float relativeDiff = 0.0f;
                            float absoluteDiff = 0.0f;
                            if (analyticalGradient == 0)
                            {
                                MyLog.DEBUG.WriteLine("t: " + SimulationStep + " id: " + weightLayer.Id + " w" + w + ": " + weightLayer.Weights.Host[w] + " step: " + stepSize + " analytical gradient is 0.");
                                break; // continue to next sample
                            }
                            absoluteDiff = System.Math.Abs(numericalGradient - analyticalGradient);
                            relativeDiff = absoluteDiff / (System.Math.Abs(numericalGradient) + System.Math.Abs(analyticalGradient));
                            if (relativeDiff > maxRelDiff && absoluteDiff > ThresholdAbsolute)
                            {
                                maxAbsDiff = absoluteDiff;
                                maxRelDiff = relativeDiff;
                                maxDiffLayer = weightLayer.Id;
                                maxDiffWeight = w;
                                maxDiffWeightValue = weightLayer.Weights.Host[w];
                                maxDiffStepSize = stepSize;
                                maxDiffAnalyticalGrad = analyticalGradient;
                                maxDiffNumericalGrad = numericalGradient;
                            }
                            MyLog.DEBUG.WriteLine("t: " + SimulationStep + " id: " + weightLayer.Id + " w" + w + ": " + weightLayer.Weights.Host[w] + " step: " + stepSize + " AG: " + analyticalGradient + " NG: " + numericalGradient + " diff: " + relativeDiff);
                            break; // continue to next sample
                        }
                    }
                    layer = layer.NextTopologicalLayer;

                    // catch unmatched dice-rolls
                    if (layer == null)
                        MyLog.ERROR.Write("GradientCheck task: Weight w " + w + " not found within " + Owner.TotalWeights + " total weights"); // just for testing, this should never hit
                }
            }
            // handle the relativeDiff we just found
            if (maxRelDiff > ThresholdRelative && maxRelDiff > ThresholdAbsolute)
            {
                MyLog.INFO.WriteLine("Gradient threshold exceeded on SimulationStep: " + SimulationStep);
                MyLog.INFO.WriteLine("Max analytical vs numerical relative gradient difference found in layer id " + maxDiffLayer + " for weight " + maxDiffWeight + ": " + maxDiffWeightValue + " with Step size: " + maxDiffStepSize);
                MyLog.INFO.WriteLine("Analytical gradient: " + maxDiffAnalyticalGrad + " Numerical gradient: " + maxDiffNumericalGrad + " Relative difference: " + maxRelDiff);
                MyLog.INFO.WriteLine();
            }
        }
Example #10
0
        // Task execution
        public override void Execute()
        {
            // timeStep is -1, because it is incremented at beginning of new timestep
            Owner.TimeStep = -1;

            // disable GradientCheck by default - TODO: fix this somehow
            Owner.GradientCheck.Enabled = false;

            // sort children in topological order
            Owner.SortedChildren = Owner.Children.OrderBy(o => o.TopologicalOrder).ToList();

            // set next and previous layer
            MyAbstractLayer layer;
            MyAbstractLayer lastTopologicalLayer = null;
            foreach (MyNode child in Owner.SortedChildren)
            {
                if (child is MyAbstractLayer)
                {
                    layer = child as MyAbstractLayer;

                    if (lastTopologicalLayer != null)
                    {
                        lastTopologicalLayer.NextTopologicalLayer = layer;
                    }

                    layer.NextTopologicalLayer = null;
                    layer.PreviousTopologicalLayer = lastTopologicalLayer;
                    lastTopologicalLayer = layer;

                    // collect all next and all previous conneted layers
                    layer.PreviousConnectedLayers = new List<MyAbstractLayer>();
                    layer.NextConnectedLayers = new List<MyAbstractLayer>();
                    foreach (MyConnection inputConnection in child.InputConnections)
                    {
                        if (inputConnection != null && inputConnection.From is MyAbstractLayer)
                        {
                            MyAbstractLayer lastConnectedLayer = inputConnection.From as MyAbstractLayer;
                            layer.PreviousConnectedLayers.Add(lastConnectedLayer);
                            lastConnectedLayer.NextConnectedLayers.Add(layer);
                        }
                    }
                }
            }

            // set first and last layer
            layer = lastTopologicalLayer;
            Owner.FirstTopologicalLayer = layer;
            Owner.LastTopologicalLayer = layer;
            while (layer != null)
            {
                Owner.FirstTopologicalLayer = layer;
                layer = layer.PreviousTopologicalLayer;
            }

            // count total number of weights
            Owner.TotalWeights = 0;
            layer = Owner.FirstTopologicalLayer;
            while (layer != null)
            {
                if (layer is MyAbstractWeightLayer)
                    Owner.TotalWeights += (layer as MyAbstractWeightLayer).Weights.Count;
                layer = layer.NextTopologicalLayer;
            }
        }
Example #11
0
 public virtual void Execute(MyAbstractLayer layer)
 {
     MyLog.ERROR.WriteLine("No method provided to backpropagate MyAbstractLayer " + layer + " in " + Owner);
 }
        public override void Execute()
        {
            MyNode node = Owner.Input.Owner;

            if (node is MyAbstractLayer)
            {
                MyAbstractLayer previousLayer = node as MyAbstractLayer;

                // Reset delta
                previousLayer.Delta.Fill(0);

                // Disable backprop when in generative mode
                if (!Owner.Generate.IsIncomingRised())
                {
                    // Set locations for mean deltas
                    CUdeviceptr meanDeltas = previousLayer.Delta.GetDevicePtr(Owner, 0);
                    // Set locations for sigma deltas
                    CUdeviceptr sigmaDeltas = previousLayer.Delta.GetDevicePtr(Owner, previousLayer.Delta.Count / 2);
                    // Determine input to previous layer
                    CUdeviceptr prevInputPtr = MyAbstractLayer.DetermineInput(previousLayer);
                    // set locations for sigmas (prev layer or constant
                    CUdeviceptr sigmas;
                    if (Owner.UseSigmaConstant)
                    {
                        sigmas = Owner.SigmaConstants.GetDevicePtr(Owner);
                    }
                    else
                    {
                        sigmas = Owner.Input.GetDevicePtr(Owner, Owner.Input.Count / 2);
                    }

                    m_samplingDeltaKernel.Run(
                        Convert.ToInt32(Owner.UseSigmaConstant),
                        (int)previousLayer.ActivationFunction,
                        prevInputPtr,
                        sigmas,
                        meanDeltas,
                        sigmaDeltas,
                        Owner.Delta,
                        Owner.RandomNormal,
                        Owner.Neurons
                        );

                    // Regularization needs weights to compute gradients
                    if (Regularize && previousLayer != null && previousLayer is MyAbstractWeightLayer)
                    {
                        MyAbstractWeightLayer previousWeightLayer = previousLayer as MyAbstractWeightLayer;

                        // Try to regularize loss: mean^2 + sigma^2 - log(sigma^2)
                        // In other words regularize means to 0 and sigmas to 1
                        int weightCount = previousWeightLayer.Weights.Count;
                        m_regularizationDeltaKernel.SetConstantVariable <float>("RegularizationCoefficient", RegularizationCoefficient);
                        m_regularizationDeltaKernel.SetupExecution(weightCount);
                        m_regularizationDeltaKernel.Run(
                            Convert.ToInt32(Owner.UseSigmaConstant),
                            (int)previousLayer.ActivationFunction,
                            prevInputPtr,
                            previousLayer.Input,
                            previousWeightLayer.Weights,
                            previousLayer.Output.Count,
                            meanDeltas,
                            sigmaDeltas
                            );
                    }
                }
            }
        }
Example #13
0
        public override void Execute()
        {
            if (SimulationStep == 0)
            {
                return;
            }

            switch (Owner.LearningTasks)
            {
            case MyLSTMLayer.LearningTasksType.RTRL:
            {
                // propagate delta to output gates
                m_deltaKernel.Run(
                    Owner.CellStateErrors,
                    Owner.OutputGateDeltas,
                    Owner.CellStates,
                    Owner.OutputGateActivations,
                    Owner.OutputGateActivationDerivatives,
                    Owner.Delta,

                    Owner.CellStates.Count,
                    Owner.CellsPerBlock
                    );
                break;
            }

            case MyLSTMLayer.LearningTasksType.BPTT:
            {
                // propagate delta to output gates
                m_deltaKernel.Run(
                    Owner.Delta,
                    Owner.CellStates,
                    Owner.CellStates.GetTimeShiftedBlock(-1),
                    Owner.CellStateErrors,
                    Owner.CellStateErrors.GetTimeShiftedBlock(+1),

                    Owner.OutputGateDeltas,
                    Owner.ForgetGateDeltas,
                    Owner.ForgetGateDeltas.GetTimeShiftedBlock(+1),
                    Owner.InputGateDeltas,
                    Owner.InputGateDeltas.GetTimeShiftedBlock(+1),
                    Owner.CellInputDeltas,

                    Owner.CellInputActivations,
                    Owner.CellStateActivations,
                    Owner.OutputGateActivations,
                    Owner.ForgetGateActivations.GetTimeShiftedBlock(+1),
                    Owner.InputGateActivations,

                    Owner.CellInputActivationDerivatives,
                    Owner.CellStateActivationDerivatives,
                    Owner.OutputGateActivationDerivatives,
                    Owner.ForgetGateActivationDerivatives,
                    Owner.InputGateActivationDerivatives,

                    Owner.CellInputWeights,
                    Owner.OutputGateWeights,
                    Owner.ForgetGateWeights,
                    Owner.InputGateWeights,

                    Owner.Input.Count,
                    Owner.CellStates.Count,
                    Owner.CellsPerBlock
                    );

                m_gateGradientKernel.Run(
                    Owner.Input,
                    Owner.Output.GetTimeShiftedBlock(-1),
                    Owner.CellStates,

                    Owner.InputGateDeltas,
                    Owner.ForgetGateDeltas,
                    Owner.OutputGateDeltas,

                    Owner.OutputGateWeightGradient,
                    Owner.InputGateWeightGradient,
                    Owner.ForgetGateWeightGradient,

                    Owner.Input.Count,
                    Owner.CellStates.Count,
                    Owner.CellsPerBlock
                    );

                m_cellInputGradientKernel.Run(
                    Owner.Input,
                    Owner.Output.GetTimeShiftedBlock(-1),

                    Owner.CellInputDeltas,
                    Owner.CellInputWeightGradient,

                    Owner.Input.Count,
                    Owner.CellStates.Count,
                    Owner.CellsPerBlock
                    );
                break;
            }
            }

            MyNode node = Owner.GetInput(0).Owner;

            if (node is MyAbstractLayer)
            {
                MyAbstractLayer previousLayer = node as MyAbstractLayer;

                CUdeviceptr prevInputPtr = nullCUdeviceptr;

                // reset delta
                if (Owner.ParentNetwork.TimeStep == 0)
                {
                    previousLayer.Delta.Fill(0);
                }

                // determine input to previous layer
                prevInputPtr = MyAbstractLayer.DetermineInput(previousLayer);

                switch (Owner.LearningTasks)
                {
                case MyLSTMLayer.LearningTasksType.RTRL:
                {
                    // propagate delta to previous layer
                    m_deltaBackKernel.SetupExecution(previousLayer.Neurons);
                    m_deltaBackKernel.Run(
                        (int)previousLayer.ActivationFunction,
                        prevInputPtr,
                        previousLayer.Delta,
                        Owner.CellStateErrors,
                        Owner.PreviousCellStates,
                        Owner.InputGateActivations,
                        Owner.CellInputActivationDerivatives,
                        Owner.InputGateActivationDerivatives,
                        Owner.ForgetGateActivationDerivatives,
                        Owner.CellInputWeights,
                        Owner.InputGateWeights,
                        Owner.ForgetGateWeights,
                        Owner.OutputGateWeights,
                        Owner.OutputGateDeltas,

                        previousLayer.Neurons,
                        Owner.CellStates.Count,
                        Owner.CellsPerBlock
                        );
                    break;
                }

                case MyLSTMLayer.LearningTasksType.BPTT:
                {
                    // propagate delta to previous layer
                    m_deltaBackKernel.SetupExecution(previousLayer.Neurons);
                    m_deltaBackKernel.Run(
                        (int)previousLayer.ActivationFunction,
                        prevInputPtr,
                        previousLayer.Delta,

                        Owner.CellInputDeltas,
                        Owner.OutputGateDeltas,
                        Owner.ForgetGateDeltas,
                        Owner.InputGateDeltas,

                        Owner.CellInputWeights,
                        Owner.InputGateWeights,
                        Owner.ForgetGateWeights,
                        Owner.OutputGateWeights,

                        previousLayer.Neurons,
                        Owner.CellStates.Count,
                        Owner.CellsPerBlock
                        );
                    break;
                }
                }
            }
        }
Example #14
0
        MyNeuralNetworkGroupObserver ThisObserverObject;                                                        // pointer to MyNeuralNetworkGroupObserver

        public MyArrayOfPointsForNNGroupHelperShape(MyAbstractLayer firstLayer, MyNeuralNetworkGroupObserver O) //float[] data, int dataDim, float[] labels)
        {
            ThisObserverObject = O;
            m_firstLayer       = firstLayer;
            //--- size of net
            int             m_dataLen = 0;
            MyAbstractLayer tmpLayer  = firstLayer;
            int             Nlayers   = 0;

            while (tmpLayer != null)
            {
                m_dataLen += tmpLayer.Neurons;
                tmpLayer   = tmpLayer.NextTopologicalLayer;
                ++Nlayers;
            }
            m_data = new float[m_dataLen * 3];

            //--- init neuron positions
            int layerSpacePosition = -Nlayers / 2;
            int neuId = 0;

            tmpLayer = firstLayer;
            while (tmpLayer != null)
            {
                Vector3 layerCenter;
                layerCenter.X = 0.0f;                      //(float)rnd.Next(1,100); // needed for paralel connections :)
                layerCenter.Y = 0.0f;                      //(float)rnd.Next(1,100);
                layerCenter.Z = (float)layerSpacePosition;
                if (tmpLayer.NextTopologicalLayer != null) // normal layer has random postions
                {
                    for (int i = 0; i < tmpLayer.Neurons; i++)
                    {
                        m_data[neuId++] = layerCenter.X + ((float)rnd.Next(0, 100)) / 100.0f;
                        m_data[neuId++] = layerCenter.Y + ((float)rnd.Next(0, 100)) / 100.0f;
                        m_data[neuId++] = layerCenter.Z + ((float)rnd.Next(0, 100)) / 500.0f;
                    }
                }
                else // output is grid
                {
                    int  width = (int)Math.Sqrt(tmpLayer.Neurons);
                    bool run   = true;
                    for (int i = 0; run; i++)
                    {
                        for (int j = 0; j < width; j++)
                        {
                            if ((i * width + j) >= tmpLayer.Neurons)
                            {
                                run = false;
                                break;
                            }
                            m_data[neuId++] = layerCenter.X + ((float)j) / ((float)width);
                            m_data[neuId++] = layerCenter.Y + ((float)i) / ((float)width);
                            m_data[neuId++] = layerCenter.Z;
                        }
                        if (!run)
                        {
                            break;
                        }
                    }
                }
                layerSpacePosition++;
                tmpLayer = tmpLayer.NextTopologicalLayer;
            }
        }
Example #15
0
        public override void Render()
        {
            GL.ClearColor(.15f, .15f, .15f, 0.0f);
            GL.Enable(EnableCap.AlphaTest);
            GL.Disable(EnableCap.DepthTest);
            //Gl.AlphaFunc(GL_NOTEQUAL, 0);
            GL.Enable(EnableCap.Blend);
            GL.BlendFunc(BlendingFactorSrc.SrcAlpha, BlendingFactorDest.One);
            GL.Enable(EnableCap.PointSmooth);

            //////////////////////////////////////////////////////
            //   P O I N T S
            GL.Enable(EnableCap.PointSmooth);
            GL.PointSize(4.0f);
            GL.Begin(PrimitiveType.Points);
            MyAbstractLayer       currentLayer     = m_firstLayer;
            MyMemoryBlock <float> memBlockData2Vis = currentLayer.Output;
            bool normalizeData2Vis = false;
            int  a = 0;

            while (currentLayer != null)
            {
                //--- which data to dispaly
                switch (ThisObserverObject.PointVisMode)
                {
                case MyNeuralNetworkGroupObserver.MyPointVisMode.Output:
                    memBlockData2Vis = currentLayer.Output;
                    break;

                case MyNeuralNetworkGroupObserver.MyPointVisMode.Delta:
                    memBlockData2Vis  = currentLayer.Delta;
                    normalizeData2Vis = true;
                    break;

                default:
                    memBlockData2Vis = currentLayer.Output;
                    break;
                }
                memBlockData2Vis.SafeCopyToHost();
                //--- go through neurons and plot each
                for (int j = 0; j < currentLayer.Neurons; j++) // this is super in efficint :(
                {
                    int   id  = a * m_dataDim;
                    float val = memBlockData2Vis.Host[j];
                    val = Math.Abs(val);   // value has to be >0
                    if (normalizeData2Vis) // norlaimnze deltas :)
                    {
                        val = val * 3f;
                    }
                    System.Drawing.Color col = MyObserverHelpers.ColorFromHSV(120f, 0.5f, Math.Min(val, 1.0f)); // value has to be 0-1
                    GL.Color3(col.R / 256f, col.G / 256f, col.B / 256f);
                    GL.Vertex3(m_data[id], m_data[id + 1], m_data[id + 2]);
                    ++a;
                }
                MyAbstractLayer nextLayer = currentLayer.NextTopologicalLayer;
                currentLayer = currentLayer.NextTopologicalLayer;
            }
            GL.End();

            //////////////////////////////////////////////////////
            //    L I N E S
            if (ThisObserverObject.EdgeVisMode != MyNeuralNetworkGroupObserver.MyEdgeVisMode.None)
            {
                GL.LineWidth(0.1f);
                GL.Color4(.2f, .2f, .5f, 0.06f);
                GL.Begin(PrimitiveType.Lines);
                currentLayer = m_firstLayer;
                int curIdxStart = 0; // start of the current index
                while (currentLayer != null)
                {
                    int             nextIdxStart = curIdxStart + currentLayer.Neurons;
                    MyAbstractLayer nextLayer    = currentLayer.NextTopologicalLayer;
                    if (nextLayer != null)
                    {
                        //--- what to show
                        switch (ThisObserverObject.EdgeVisMode)
                        {
                        case MyNeuralNetworkGroupObserver.MyEdgeVisMode.Ones:
                            break;

                        case MyNeuralNetworkGroupObserver.MyEdgeVisMode.Output:
                            currentLayer.Output.SafeCopyToHost();
                            break;

                        case MyNeuralNetworkGroupObserver.MyEdgeVisMode.Weights:
                            (nextLayer as MyHiddenLayer).Weights.SafeCopyToHost();
                            break;

                        case MyNeuralNetworkGroupObserver.MyEdgeVisMode.WeightsXOut:
                            (nextLayer as MyHiddenLayer).Weights.SafeCopyToHost();
                            currentLayer.Output.SafeCopyToHost();
                            break;

                        default:
                            break;
                        }

                        for (int nc = 0; nc < currentLayer.Neurons; nc++)
                        {
                            for (int nn = 0; nn < nextLayer.Neurons; nn++)
                            {
                                float edgeWeight = .007f;
                                switch (ThisObserverObject.EdgeVisMode)
                                {
                                case MyNeuralNetworkGroupObserver.MyEdgeVisMode.Output:
                                    edgeWeight = currentLayer.Output.Host[nc] / 50f;
                                    break;

                                case MyNeuralNetworkGroupObserver.MyEdgeVisMode.Weights:
                                    edgeWeight = (nextLayer as MyHiddenLayer).Weights.Host[nn * currentLayer.Neurons + nc];
                                    break;

                                case MyNeuralNetworkGroupObserver.MyEdgeVisMode.WeightsXOut:
                                    edgeWeight  = currentLayer.Output.Host[nc];
                                    edgeWeight *= (nextLayer as MyHiddenLayer).Weights.Host[nn * currentLayer.Neurons + nc];
                                    break;

                                default:
                                    break;
                                }
                                int i_c = (nc + curIdxStart) * m_dataDim;  // index current
                                int i_n = (nn + nextIdxStart) * m_dataDim; // index next
                                GL.Color4(.5f, .5f, .5f, edgeWeight * ThisObserverObject.EdgeVisMultiplier);
                                GL.Vertex3(m_data[i_c], m_data[i_c + 1], m_data[i_c + 2]);
                                GL.Vertex3(m_data[i_n], m_data[i_n + 1], m_data[i_n + 2]);
                            }
                        }
                    }
                    currentLayer = nextLayer;
                    curIdxStart  = nextIdxStart;
                }
                GL.End();
            }
        }