Beispiel #1
0
        public void GetInitialGrads(double initialCost, double[] prevActivations, double bias, LSTMWeigths cellhiddenStates, out double cellStateGrad, out double outputWeigthGrad)
        {
            cellhiddenStates.hiddenState += bias;
            double linearFunction;

            for (int i = 0; i < prevActivations.Length; i++)
            {
                cellhiddenStates.hiddenState += prevActivations[i] * Weigths[i];
            }
            linearFunction = cellhiddenStates.hiddenState;

            double hiddenStateSigmoid = SigmoidActivation(cellhiddenStates.hiddenState);

            //forget gate
            cellhiddenStates.cellState *= hiddenStateSigmoid * recurrent.forgetWeigth;
            //store gate
            cellhiddenStates.cellState += SigmoidActivation(cellhiddenStates.hiddenState) * recurrent.storeWeigth * TanhActivation(cellhiddenStates.hiddenState);
            //output gate
            cellhiddenStates.hiddenState = hiddenStateSigmoid * recurrent.outputWeigth * TanhActivation(cellhiddenStates.cellState);

            //derivative of output weigth
            double outputWeigthDerivative = Derivatives.MultiplicationDerivative(cellhiddenStates.hiddenState, Derivatives.SigmoidDerivative(linearFunction), recurrent.outputWeigth, 0);

            //* output gate derivative
            initialCost     *= Derivatives.MultiplicationDerivative(hiddenStateSigmoid * recurrent.outputWeigth, outputWeigthDerivative, TanhActivation(cellhiddenStates.cellState), Derivatives.TanhDerivative(cellhiddenStates.cellState));
            outputWeigthGrad = initialCost * outputWeigthDerivative;
            initialCost     *= Derivatives.TanhDerivative(cellhiddenStates.cellState);
            cellStateGrad    = initialCost;
        }
Beispiel #2
0
        protected void Check(double expected, Derivatives <Func <double> > parsed)
        {
            Assert.AreEqual(parsed.Count, 1);
            var actual = parsed[0].Invoke();
            var tol    = Math.Max(Math.Abs(expected), Math.Abs(actual)) * RelativeTolerance + AbsoluteTolerance;

            Assert.AreEqual(expected, actual, tol);
        }
Beispiel #3
0
        protected void Check(double expected, Derivatives <Expression> actual)
        {
            Assert.AreEqual(actual.Count, 1);
            var method = Expression.Lambda <Func <double> >(actual[0]).Compile();
            var result = method();
            var tol    = Math.Max(Math.Abs(expected), Math.Abs(result)) * RelativeTolerance + AbsoluteTolerance;

            Assert.AreEqual(expected, result, tol);
        }
        public void TanhTest()
        {
            var x        = 0;
            var d        = Derivatives.Tanh(x);
            var delta    = 0.0001;
            var expected = (Functions.Tanh(x + delta) - Functions.Tanh(x)) / delta;

            MyAssert.ApproximatelyEqual(expected, d);
        }
        private float EuclidianDistance(Point p1, Point p2, Derivatives d1, Derivatives d2)
        {
            float sum = 0;

            sum += EuclidianDistancePoints(p1, p2, d1, d2) * EuclidianDistancePoints(p1, p2, d1, d2);
            sum += EuclidianDistanceDerivatives(p1, p2, d1, d2) * EuclidianDistanceDerivatives(p1, p2, d1, d2);

            return((float)Math.Sqrt(sum));
        }
Beispiel #6
0
        //TODO: Change out arrays to lists
        public void GetGrads(double[] prevActivations, double bias, LSTMWeigths cellhiddenStates, double cellStateGrad, double prevCellStateGrad, double prevHiddenGrad, double outputWeigthGrad
                             , out List <double> prevActivationsGrads, out List <double> weigthsGrads, out double biasGrad, out LSTMWeigths LSTMWeigthsGrads)
        {
            LSTMWeigthsGrads = new LSTMWeigths(0, 0, outputWeigthGrad);
            LSTMWeigths initialState = cellhiddenStates;

            cellhiddenStates.hiddenState += bias;
            for (int i = 0; i < prevActivations.Length; i++)
            {
                cellhiddenStates.hiddenState += prevActivations[i] * Weigths[i];
            }
            double linearFunc = cellhiddenStates.hiddenState;

            double hiddenStateSigmoid = SigmoidActivation(cellhiddenStates.hiddenState);
            //forget gate
            double forgetMultiplication;

            cellhiddenStates.cellState *= forgetMultiplication = hiddenStateSigmoid * recurrent.forgetWeigth;
            //store gate
            cellhiddenStates.cellState += SigmoidActivation(cellhiddenStates.hiddenState) * recurrent.storeWeigth * TanhActivation(cellhiddenStates.hiddenState);
            //output gate
            cellhiddenStates.hiddenState = hiddenStateSigmoid * recurrent.outputWeigth * TanhActivation(cellhiddenStates.cellState);

            double currentGrad = cellStateGrad;
            double storeWeigthMultiplicationDerivative = hiddenStateSigmoid * Derivatives.SigmoidDerivative(linearFunc);
            double storeGateMultiplicationDerivative   = Derivatives.MultiplicationDerivative(hiddenStateSigmoid * recurrent.storeWeigth, storeWeigthMultiplicationDerivative
                                                                                              , TanhActivation(linearFunc), Derivatives.TanhDerivative(linearFunc));


            double sigmoidDerivative = Derivatives.SigmoidDerivative(linearFunc);
            double forgetWeigthMultiplicationDerivative = sigmoidDerivative * hiddenStateSigmoid;
            double forgetGateMultiplicationDerivative   = Derivatives.MultiplicationDerivative(initialState.cellState, prevCellStateGrad, forgetMultiplication, forgetWeigthMultiplicationDerivative);

            //To store addition
            currentGrad *= forgetGateMultiplicationDerivative + storeGateMultiplicationDerivative;

            double gradToStoreWeigth = currentGrad;

            gradToStoreWeigth           *= storeGateMultiplicationDerivative;
            gradToStoreWeigth           *= storeWeigthMultiplicationDerivative;
            LSTMWeigthsGrads.storeWeigth = gradToStoreWeigth;

            //To Forget multiplication
            currentGrad *= forgetGateMultiplicationDerivative;
            LSTMWeigthsGrads.forgetWeigth = currentGrad * forgetWeigthMultiplicationDerivative;

            biasGrad = prevHiddenGrad * bias;

            prevActivationsGrads = weigthsGrads = new List <double>();
            for (int i = 0; i < prevActivations.Length; i++)
            {
                prevActivationsGrads.Add(prevHiddenGrad * Weigths[i]);
                weigthsGrads.Add(prevHiddenGrad * prevActivations[i]);
            }
        }
Beispiel #7
0
        public void When_DeriveNode_Expect_Reference(Node function, Dictionary <VariableNode, Node> expected)
        {
            var nf = new NodeFinder();
            var d  = new Derivatives()
            {
                Variables = new HashSet <VariableNode>(nf.Build(function))
            };
            var actual = d.Derive(function);

            CompareDictionary(expected, actual);
        }
        List <Tuple <OutputData, SyntaxBlock, List <Tuple <ArgumentValue, SyntaxBlock> > > > OutputFormulas = new List <Tuple <OutputData, SyntaxBlock, List <Tuple <ArgumentValue, SyntaxBlock> > > >(); //<output-node data, formula for result of output node, [<node-connection, partial derivative>]>

        /// <summary>
        /// Builds the equations for the network. It first calculates the formula for the result of an output node and saves it with the corresponding output node ArgumentValue (the output node's value).
        /// Then it calculates all the partial derivatives for that function and saves it per derived variable.
        /// </summary>
        private void BuildEquations()
        {
            Parallel.ForEach(Output, outputneuron =>
            {
                var resultformula = outputneuron.BuildEquation();
                resultformula     = resultformula.Simplify();

                var partial_deritatives = Derivatives.CalculatePartialDerivatives(resultformula);

                OutputFormulas.Add(new Tuple <OutputData, SyntaxBlock, List <Tuple <ArgumentValue, SyntaxBlock> > >(outputneuron.Value, resultformula, partial_deritatives));
            });
        }
Beispiel #9
0
 /// <summary>
 /// Default functions
 /// </summary>
 /// <param name="sender"></param>
 /// <param name="e"></param>
 private static void DefaultFunctionFound(object sender, FunctionFoundEventArgs <Derivatives <Expression> > e)
 {
     if (DefaultFunctions.TryGetValue(e.Name, out var function))
     {
         var arguments = new Derivatives <Expression> [e.ArgumentCount];
         for (var i = 0; i < e.ArgumentCount; i++)
         {
             arguments[i] = e[i];
         }
         e.Result = function?.Invoke(arguments);
     }
 }
        public void MeanSquareErrorTest()
        {
            var y        = new TensorOld(new double[] { 1, 3, 2, 4, 5, 6 });
            var yHat     = new TensorOld(new double[] { 1.5, 2.6, 2.1, 3.9, 5.3, 6.7 });
            var loss     = Functions.MeanSquareError(y, yHat);
            var gradient = Derivatives.MeanSquareError(y, yHat);
            var delta    = 0.00001;

            yHat[0] += delta;
            var expected = (Functions.MeanSquareError(y, yHat) - loss) / delta;

            MyAssert.ApproximatelyEqual(expected, gradient[0]);
        }
        private float EuclidianDistancePoints(Point p1, Point p2, Derivatives d1, Derivatives d2)
        {
            var sum = (p1.X - p2.X) * (p1.X - p2.X);

            sum += (p1.Y - p2.Y) * (p1.Y - p2.Y);

            sum += (p1.Pressure - p2.Pressure) * (p1.Pressure - p2.Pressure);

            sum += (p1.tiltX - p2.tiltX) * (p1.tiltX - p2.tiltX);
            sum += (p1.tiltY - p2.tiltY) * (p1.tiltY - p2.tiltY);

            return((float)Math.Sqrt(sum));
        }
Beispiel #12
0
        public void Dispose()
        {
            _buffer.Dispose();
            _dxDz.Dispose();
            _dyDxz.Dispose();
            _dyxDyz.Dispose();
            _dxxDzz.Dispose();

            InitialSpectrum.Dispose();
            PrecomputedData.Dispose();
            Displacement.Dispose();
            Derivatives.Dispose();
            Turbulence.Dispose();
        }
        public void SoftmaxTest()
        {
            var output   = new double[] { 0.05, 0.15, 0.7, 0.1 };
            var expected = new double[, ]
            {
                { 0.0475, -0.0075, -0.035, -0.005 },
                { -0.0075, 0.1275, -0.105, -0.015 },
                { -0.035, -0.105, 0.21, -0.07 },
                { -0.005, -0.015, -0.07, 0.09 }
            };
            var actual = Derivatives.SoftmaxFromOutput(output);

            MyAssert.ApproximatelyEqual(expected, actual);
        }
Beispiel #14
0
        public double[] GetCosts(double[] output, double reward)
        {
            double[] costs = new double[n.OutputLength];
            if (output.Length != costs.Length)
            {
                throw new ArgumentException();
            }

            for (int i = 0; i < output.Length; i++)
            {
                costs[i] = Derivatives.LogLikelyhoodTermDerivative(output[i], reward);
            }

            return(costs);
        }
        public void SoftmaxCrossEntropyTest()
        {
            var y    = new double[] { 0, 1, 0, 0 };
            var yHat = new double[] { 0.1, 0.7, 0.15, 0.05 };
            var der  = Derivatives.CrossEntropy(y, yHat);

            var delta = 0.00001;
            var ce0   = Functions.CrossEntropy(y, yHat);

            yHat[0] += delta;
            var ce1      = Functions.CrossEntropy(y, yHat);
            var expected = (ce1 - ce0) / delta;

            MyAssert.ApproximatelyEqual(expected, der[0], 0.0001);
        }
        public void When_Function1D_Expect_Reference(string func, double arg, double darg, double expected, double dexpected)
        {
            var base_builder = new DoubleBuilder();
            var builder      = new DerivativeBuilder <double>(base_builder);
            var x            = new Variable("x");
            var derivative   = new Derivatives <double> {
                Value = arg
            };

            derivative[x] = darg;

            var result = ((IBuilder <Derivatives <double> >)builder).CreateFunction(func, new[] { derivative });

            Assert.AreEqual(expected, result.Value, 1e-12);
            Assert.AreEqual(dexpected, result[x], 1e-12);
        }
Beispiel #17
0
        public void GetGradients(double costGradient, double[] previousLayerActivations, ActivationFunctions activation, double bias, out List <double> weigthsGradients, out List <double> previousLayerActivationGradients, out double biasGradient)
        {
            weigthsGradients = previousLayerActivationGradients = new List <double>();
            double neuronLinearFunction   = LinearFunction(previousLayerActivations, Weigths, bias);
            double ActivationFuncGradient = Derivatives.DerivativeOf(neuronLinearFunction, activation) * costGradient;


            for (int weigthIndex = 0; weigthIndex < Weigths.Count; weigthIndex++)
            {
                weigthsGradients.Add(ActivationFuncGradient * previousLayerActivations[weigthIndex]);

                previousLayerActivationGradients.Add(ActivationFuncGradient * Weigths[weigthIndex]);
            }

            biasGradient = costGradient * ActivationFuncGradient;
        }
        private float EuclidianDistanceDerivatives(Point p1, Point p2, Derivatives d1, Derivatives d2)
        {
            float sum = 0;

            sum += (d1.Velocity - d2.Velocity) * (d1.Velocity - d2.Velocity);
            sum += (d1.VelocityX - d2.VelocityX) * (d1.VelocityX - d2.VelocityX);
            sum += (d1.VelocityY - d2.VelocityY) * (d1.VelocityY - d2.VelocityY);

            sum += (d1.Acc - d2.Acc) * (d1.Acc - d2.Acc);
            sum += (d1.AccX - d2.AccX) * (d1.AccX - d2.AccX);
            sum += (d1.AccY - d2.AccY) * (d1.AccY - d2.AccY);

            sum += (d1.PressureChange - d2.PressureChange) * (d1.PressureChange - d2.PressureChange);

            // póki tilty nie działają, nie dodaję ich.
            return((float)Math.Sqrt(sum));
        }
Beispiel #19
0
            public static List <double> GetCostGradients(double[] output, double[] expected, CostFunctions costFunction, out double cost)
            {
                cost = GetCostOf(output, expected, costFunction);

                List <double> Gradients = new List <double>();

                for (int i = 0; i < Math.Min(output.Length, expected.Length); i++)
                {
                    if (double.IsNaN(expected[i]))
                    {
                        Gradients.Add(0);
                    }
                    else
                    {
                        Gradients.Add(Derivatives.DerivativeOf(output[i], expected[i], costFunction));
                    }
                }
                return(Gradients);
            }
        /// <summary>
        /// Creates the derivatives for the specified function.
        /// </summary>
        /// <param name="function">The function.</param>
        /// <returns>The derivatives with respect to all variables.</returns>
        public Dictionary <VariableNode, Node> CreateDerivatives(Node function)
        {
            var state    = GetState <IBiasingSimulationState>();
            var bp       = GetParameterSet <Parameters>();
            var comparer = new VariableNodeComparer(state.Comparer, Simulation.EntityBehaviors.Comparer, bp.VariableComparer);

            // Derive the function
            var derivatives = new Derivatives()
            {
                Variables = new HashSet <VariableNode>(comparer)
            };
            var nf = new NodeFinder();

            foreach (var variable in nf.Build(function).Where(v => v.NodeType == NodeTypes.Voltage || v.NodeType == NodeTypes.Current))
            {
                if (derivatives.Variables.Contains(variable))
                {
                    continue;
                }
                derivatives.Variables.Add(variable);
            }
            return(derivatives.Derive(function) ?? new Dictionary <VariableNode, Node>(comparer));
        }
Beispiel #21
0
        /// <summary>
        ///
        /// </summary>
        /// <param name="costGradient"></param>
        /// <param name="bias"></param>
        /// <param name="initialValues">Remember to add hiddenState at lstmWeigths</param>
        /// <param name="recurrentWeigth"></param>
        /// <param name="prevHiddenGrad">prev inital cost gradient</param>
        /// <param name="preActivations"></param>
        /// <param name="weigths"></param>
        /// <param name="weigthGrads"></param>
        /// <param name="recWeigthGrad"></param>
        /// <param name="biasGrad"></param>
        public void GetGradients(double costGradient, double bias, double hiddenState, double prevHiddenGrad, double[] prevActivations, ActivationFunctions activationFunction
                                 , out List <double> prevActivationsGrads, out List <double> weigthGrads, out double recWeigthGrad, out double biasGrad)
        {
            //execution
            double linearFunc          = LinearFunction(prevActivations, Weigths, bias);
            double linearWithRecurrent = linearFunc + recurrentWeigth * hiddenState;
            //end execution

            double currentGrad = costGradient;

            currentGrad *= Derivatives.DerivativeOf(linearWithRecurrent, activationFunction);
            double recurrentMultiplicationDerivative = Derivatives.MultiplicationDerivative(hiddenState, prevHiddenGrad, recurrentWeigth, 0);

            double linearFuncDerivative = 0;

            for (int i = 0; i < prevActivations.Length; i++)
            {
                linearFuncDerivative += prevActivations[i]; //Short for multiplication derivative, bias obviated ´cause it has to be added and derivative of a constant is 0
            }
            currentGrad *= linearFuncDerivative + recurrentMultiplicationDerivative;

            biasGrad      = currentGrad;
            recWeigthGrad = currentGrad * hiddenState;

            weigthGrads = new List <double>();
            foreach (var input in prevActivations)
            {
                weigthGrads.Add(input * currentGrad);
            }

            prevActivationsGrads = new List <double>();
            foreach (var weigth in Weigths)
            {
                prevActivationsGrads.Add(weigth * currentGrad);
            }
        }
Beispiel #22
0
        /// <summary>
        /// if expected has NaN it has not expected output
        /// </summary>
        public void SupervisedBackProp(List <double[]> input, List <double[]> expectedOutput, CostFunctions costFunction, List <List <NeuronValues> > startingStates, out double cost, out List <List <List <NeuronValues> > > grads, out List <double[]> biasGrads)
        {
            cost = 0;
            if (input.Count != expectedOutput.Count)
            {
                throw new ArgumentOutOfRangeException();
            }
            var copy = new TemporalNetwork(ActivationFunction, TemporalLayers);

            copy.SetTemporalStates(startingStates);
            List <double[]> costs = new List <double[]>();

            for (int t = 0; t < input.Count; t++)
            {
                double[] outputT = copy.ExecuteNetwork(input[t]);
                costs.Add(new double[expectedOutput[t].Length]);
                cost += Cost.GetCostOf(outputT, expectedOutput[t], costFunction);//REPAIR
                for (int i = 0; i < expectedOutput[i].Length; i++)
                {
                    costs[t][i] = Derivatives.DerivativeOf(outputT[i], expectedOutput[t][i], costFunction);
                }
            }
            GetGradients(costs, input, startingStates, out grads, out biasGrads);
        }
Beispiel #23
0
 /// <summary>
 /// Initializes a new instance of the <see cref="BehavioralFunction"/> class.
 /// </summary>
 /// <param name="map">Maps each derivative to an unknown</param>
 /// <param name="derivatives">The derivatives.</param>
 public BehavioralFunction(Dictionary <int, int> map, Derivatives <Func <double> > derivatives)
 {
     _map         = map.ThrowIfNull(nameof(map));
     _derivatives = derivatives.ThrowIfNull(nameof(map));
 }
Beispiel #24
0
        static void Main(string[] args)
        {
            /*
             * Currently implemented:
             *  Sums
             *  Products
             *  Quotients
             *  Numeric constants
             *  Variable constants (constants that can be changed)
             *  Variables
             *
             * Functionality:
             *  (Partial) derivatives
             *  Calculations
             *  Printing of formulas
             *
             * Notes:
             *  Simplification does not yet do anything beyond removing the following:
             *      A * 0 -> 0
             *      A * 1 -> A
             *      A + 0 -> A
             *      A / 1 -> A
             *      0 / A -> 0
             *      Number / Number -> Number
             *      Number + Number -> Number
             *      Number * Number -> Number
             *
             */

            var A = new VariableArgumentValue("A"); //create arguments with values, IE variables. The values of the variables can be changed using "ConstantArgumentValue" and "VariableArgumentValue".

            var B = new ConstantArgumentValue("B"); //A variable constant. Can only be used to create constants in the syntax.

            var C = new VariableArgumentValue("C"); //A true variable. Can only be used to create variables in the syntax.
            var D = new VariableArgumentValue("D");

            A.Value = 10;//change their value, their value type is a double.
            B.Value = 13;
            C.Value = 14.67;
            D.Value = -3;

            SyntaxBlock SomeSum = new Sum(new Variable(A), new Sum(new VariableConstant(B), new NumericConstant(420))); //create a formula.

            Console.WriteLine(SomeSum.print());                                                                         //print the forumula
            Console.WriteLine(SomeSum.Calculate());                                                                     //Calculate the results of a formula

            Console.WriteLine("\nCalculating partial derivatives");


            SyntaxBlock functionA, functionB;

            functionA = new Product(new NumericConstant(44), new Product(new Variable(A), new Variable(C)));
            functionB = new Product(new VariableConstant(B), new Product(new Variable(A), new Variable(D)));

            SyntaxBlock ComplexSum               = new Sum(functionA, functionB);                                                               //functions of functions
            SyntaxBlock SuperComplexProduct      = new Product(ComplexSum, ComplexSum);                                                         //functions of functions of functions
            SyntaxBlock SuperSuperComplexProduct = new Product(SuperComplexProduct, SuperComplexProduct);                                       //functions^4

            List <Tuple <VariableArgumentValue, SyntaxBlock> > derivatives = Derivatives.CalculatePartialDerivatives(SuperSuperComplexProduct); //Calculate all the partial derivatives of a given function.

            //A Function to calculate the derivative for a simple (single argument) function is also present, but will throw an exception if the formula given has more than one variable.
            //swap out "SuperSuperComplexProduct" for any other syntaxblock to see the result.
            foreach (var i in derivatives)
            {
                Console.WriteLine(i.Item1.Name + " : " + i.Item2.print()); //print the resulting derivatives
                Console.WriteLine();
            }

            double CalculationResult = derivatives[0].Item2.Calculate(); //calculate the result of a derivative function.

            Console.WriteLine("Calculation result: " + CalculationResult);



            Console.WriteLine("Press any key to continue...");
            Console.ReadKey();
        }
Beispiel #25
0
 // Use this for initialization
 void Start()
 {
     derivatives = new Derivatives(mass);
     potential   = new Potential();
     pointsMath();
 }