Exemple #1
0
        public void InitialiseParameters(double initialPrediction = 0, int n = 2, double estimateNoise = 3, double processNoise = 1, double alpha = 0.3)
        {
            EstimateNoise      = estimateNoise;
            ProcessNoise       = processNoise;
            _initialPrediction = initialPrediction;

            this.n = n;

            //int m = 1;
            filter = new Unscented(n, 1);
            //var adaptivefilter = new KalmanFilter.Adaptive1(alpha);


            R = Matrix.Build.Diagonal(n, n, estimateNoise * estimateNoise); //covariance of measurement
            //Q = Matrix.Build.Diagonal(n, n, processNoise * processNoise); //covariance of process
            q = new WhiteNoise(processNoise, 2);

            f = new FEquation();                                    //nonlinear state equations
            h = new HEquation();                                    //measurement equation

            x = Vector <double> .Build.Dense(n, initialPrediction); //s + q * Matrix.Build.Random(1, 1); //initial state with noise

            P = Matrix.Build.Diagonal(n, n, 1);                     //initial state covariance
            //ng = ng ?? new NoiseGenerator(processNoise, 2);
        }
        public void TestWNReadWrite()
        {
            double     log_nse_sd = System.Math.Log(3.456);
            WhiteNoise wnkf       = new WhiteNoise(log_nse_sd);

            TestReadWrite(wnkf);
        }
        public void TestKernelSummation()
        {
            double          log_length = System.Math.Log(1.234);
            double          log_sig_sd = System.Math.Log(2.345);
            double          log_nse_sd = System.Math.Log(3.456);
            SummationKernel kf         = new SummationKernel(new SquaredExponential(log_length, log_sig_sd));

            kf += new WhiteNoise(log_nse_sd);

            Assert.Equal(log_length, kf[0]);
            Assert.Equal(log_sig_sd, kf[1]);
            Assert.Equal(log_nse_sd, kf[2]);

            // Try resetting directly on the summation, using the name indexer
            log_length = 4.321;
            log_sig_sd = 5.432;
            log_nse_sd = 6.543;

            kf["Length"]   = log_length;
            kf["SignalSD"] = log_sig_sd;
            kf["NoiseSD"]  = log_nse_sd;

            Assert.Equal(log_length, kf[0]);
            Assert.Equal(log_sig_sd, kf[1]);
            Assert.Equal(log_nse_sd, kf[2]);
        }
        public void TestSummmationReadWrite()
        {
            double log_length = System.Math.Log(1.234);
            double log_sig_sd = System.Math.Log(2.345);
            double log_nse_sd = System.Math.Log(3.456);

            double[]        log_var = { System.Math.Log(0.987), System.Math.Log(0.876), System.Math.Log(0.765) };
            SummationKernel kf      = new SummationKernel(new SquaredExponential(log_length, log_sig_sd));

            kf += new LinearKernel(log_var);
            kf += new WhiteNoise(log_nse_sd);
            TestReadWrite(kf);
        }
        public void TestWNKernelDerivs()
        {
            double     log_nse_sd = System.Math.Log(3.456);
            WhiteNoise wnkf       = new WhiteNoise(log_nse_sd);

            double[] x1    = { 0.1, 0.2, 0.3 };
            double[] x2    = { 0.9, 0.7, 0.5 };
            Vector   x1Vec = Vector.FromArray(x1);
            Vector   x2Vec = Vector.FromArray(x2);

            TestDerivatives(wnkf, x1Vec, x1Vec);
            TestDerivatives(wnkf, x1Vec, x2Vec);
        }
    // Start is called before the first frame update
    void Start()
    {
        audioSources = GetComponents <AudioSource>();

        WhiteNoise.clip      = WhiteNoiseClip;
        BackgroundMusic.clip = BackgroundMusicClip;
        ShepardTone.clip     = ShepardToneClip;

        backgroundMusicMaxVolume = BackgroundMusic.volume;
        shepardToneMaxVolume     = ShepardTone.volume;

        ShepardTone.volume     = 0;
        BackgroundMusic.volume = 0;

        WhiteNoise.Play();
        BackgroundMusic.Play();
        ShepardTone.Play();
    }
        public void TestSumKDerivs()
        {
            double[] log_length = { System.Math.Log(0.543), System.Math.Log(0.432), System.Math.Log(0.321) };
            double   log_sig_sd = System.Math.Log(2.345);

            double[]        log_var    = { System.Math.Log(0.987), System.Math.Log(0.876), System.Math.Log(0.765) };
            double          log_nse_sd = System.Math.Log(3.456);
            SummationKernel kf         = new SummationKernel(new ARD(log_length, log_sig_sd));

            kf += new LinearKernel(log_var);
            kf += new WhiteNoise(log_nse_sd);
            double[] x1    = { 0.1, 0.2, 0.3 };
            double[] x2    = { 0.9, 0.7, 0.5 };
            Vector   x1Vec = Vector.FromArray(x1);
            Vector   x2Vec = Vector.FromArray(x2);

            TestDerivatives(kf, x1Vec, x1Vec);
            TestDerivatives(kf, x1Vec, x2Vec);
        }
Exemple #8
0
        public void BasicGPC()
        {
            Vector[] inputs = new Vector[]
            {
                Vector.FromArray(new double[2] {
                    0, 0
                }),
                Vector.FromArray(new double[2] {
                    0, 1
                }),
                Vector.FromArray(new double[2] {
                    1, 0
                }),
                Vector.FromArray(new double[2] {
                    0, 0.5
                }),
                Vector.FromArray(new double[2] {
                    1.5, 0
                }),
                Vector.FromArray(new double[2] {
                    0.5, 1.0
                })
            };
            bool[] outputs = { true, true, false, true, false, false };

            var kf = new SummationKernel(new SquaredExponential(0));

            kf += new WhiteNoise(System.Math.Log(0.1));
            var K = GramMatrix(kf, inputs);

            var n = new Range(inputs.Length);
            var x = Variable.VectorGaussianFromMeanAndVariance(Vector.Zero(inputs.Length), K);
            var g = Variable.ArrayFromVector(x, n);
            var p = Variable.Array <bool>(n);

            p[n]            = Variable.IsPositive(g[n]);
            p.ObservedValue = outputs;
            var ie = new InferenceEngine();

            Console.WriteLine(ie.Infer(x));
        }
Exemple #9
0
    // Generate perlin noise
    public static float[,] GeneratePerlinNoise(int seed, int octaveCount, float persistence, int noiseWidth, int noiseHeight)
    {
        List <float[, ]> smoothNoiseOctaves = new List <float[, ]>();

        // Get smooth noise octaves
        for (int i = 0; i < octaveCount; i++)
        {
            smoothNoiseOctaves.Add(SmoothNoise.GenerateSmoothNoise(WhiteNoise.GenerateWhiteNoise(seed, noiseWidth, noiseHeight), i, noiseWidth, noiseHeight));
        }

        float[,] noise = new float[noiseWidth, noiseHeight];
        float amplitude      = 1.0f;
        float totalAmplitude = 0.0f;

        // Blend noise together
        for (int octave = octaveCount - 1; octave >= 0; octave--)
        {
            amplitude      *= persistence;
            totalAmplitude += amplitude;

            for (int i = 0; i < noiseWidth; i++)
            {
                for (int j = 0; j < noiseHeight; j++)
                {
                    noise[i, j] += smoothNoiseOctaves[octave][i, j] * amplitude;
                }
            }
        }

        // Normalise
        for (int i = 0; i < noiseWidth; i++)
        {
            for (int j = 0; j < noiseHeight; j++)
            {
                noise[i, j] /= totalAmplitude;
            }
        }

        return(noise);
    }
Exemple #10
0
        static Bitmap GenerateWhiteNoise()
        {
            var whiteNoise = new WhiteNoise(256, 1);

            return(whiteNoise.Create($"{Path}\\whitenoise.jpg"));
        }
Exemple #11
0
        /// <summary>
        /// Primary definition of the GPRN model as an Infer.NET model.
        /// </summary>
        /// <param name="inputs">Covariates X</param>
        /// <param name="data">Outputs Y</param>
        /// <param name="Q">Number of latent functions</param>
        /// <param name="missing">Which elements of Y are missing</param>
        /// <param name="nodeFunctionNoise">Whether to include node noise</param>
        /// <param name="constrainWpositive">Whether to constrain W to be positive [experimental]</param>
        /// <param name="isotropicNoise">Whether to use isotropic observation noise</param>
        /// <param name="meanFunctions">Whether to include a per output mean function</param>
        /// <param name="initLoglengthscales">Initial values for the length scales of the kernels</param>
        /// <param name="sw">An output file for logging</param>
        public void GPRN_InferNET_model(Vector[] inputs,
                                        double[,] data,
                                        int Q,
                                        bool grid                    = false,
                                        bool[,] missing              = null,
                                        bool nodeFunctionNoise       = false,
                                        bool constrainWpositive      = false,
                                        bool isotropicNoise          = true,
                                        bool meanFunctions           = false,
                                        double[] initLoglengthscales = null,
                                        StreamWriter sw              = null)
        {
            var             toInfer = new List <IVariable>();
            SummationKernel kf_node = new SummationKernel(new SquaredExponential(0)) + new WhiteNoise(-3);
            var             K_node  = Utils.GramMatrix(kf_node, inputs);

            SummationKernel kf_weights = new SummationKernel(new SquaredExponential(1)) + new WhiteNoise(-3);
            var             K_weights  = Utils.GramMatrix(kf_weights, inputs);

            var D    = Variable.Observed <int>(data.GetLength(0)).Named("D");
            var d    = new Range(D).Named("d");
            var Qvar = Variable.Observed <int>(Q).Named("Q");
            var q    = new Range(Qvar).Named("q");
            var N    = Variable.Observed <int>(data.GetLength(1)).Named("N");
            var n    = new Range(N).Named("n");

            if (missing == null)
            {
                missing = new bool[D.ObservedValue, N.ObservedValue]; // check this is all false
            }
            var ev         = Variable.Bernoulli(.5).Named("ev");
            var modelBlock = Variable.If(ev);

            var nodeSignalPrecisions = Variable.Array <double>(q).Named("nodeSignalPrecisions");
            // set this to 1 if not learning signal variance
            var nodeSignalPrecisionsPrior = Variable.Observed(Enumerable.Range(0, Q).Select(_ => Gamma.FromShapeAndRate(.1, .1)).ToArray(), q).Named("nodeSignalPrecisionsPrior");

            nodeSignalPrecisions[q] = Variable.Random <double, Gamma>(nodeSignalPrecisionsPrior[q]);

            var nodeFunctions  = Variable.Array <Vector>(q).Named("nodeFunctions");
            var K_node_inverse = Variable.Observed(K_node.Inverse()).Named("K_node_inverse");

            nodeFunctions[q] = Variable <Vector> .Factor(MyFactors.VectorGaussianScaled, nodeSignalPrecisions[q], K_node_inverse);

            nodeFunctions.AddAttribute(new MarginalPrototype(new VectorGaussian(N.ObservedValue)));
            var nodeFunctionValues           = Variable.Array(Variable.Array <double>(n), q).Named("nodeFunctionValues");
            var nodeFunctionValuesPredictive = Variable.Array(Variable.Array <double>(n), q).Named("nodeFunctionValuesPredictive");

            VariableArray <double> nodeNoisePrecisions = null;

            if (nodeFunctionNoise)
            {
                var nodeFunctionValuesClean = Variable.Array(Variable.Array <double>(n), q).Named("nodeFunctionValuesClean");
                nodeFunctionValuesClean[q] = Variable.ArrayFromVector(nodeFunctions[q], n);
                nodeNoisePrecisions        = Variable.Array <double>(q).Named("nodeNoisePrecisions");
                var nodeNoisePrecisionPrior = Variable.Observed(Enumerable.Range(0, Q).Select(_ => Gamma.FromShapeAndRate(.1, .01)).ToArray(), q).Named("nodeNoisePrecisionPrior");
                nodeNoisePrecisions[q] = Variable.Random <double, Gamma>(nodeNoisePrecisionPrior[q]);
                toInfer.Add(nodeNoisePrecisions);
                nodeFunctionValues[q][n] = Variable.GaussianFromMeanAndPrecision(nodeFunctionValuesClean[q][n], nodeNoisePrecisions[q]);

                nodeFunctionValuesPredictive[q][n] = Variable.GaussianFromMeanAndPrecision(nodeFunctionValuesClean[q][n], nodeNoisePrecisions[q]);
            }
            else
            {
                nodeFunctionValues[q]           = Variable.ArrayFromVector(nodeFunctions[q], n);
                nodeFunctionValuesPredictive[q] = Variable.ArrayFromVector(nodeFunctions[q], n);
            }

            var weightFunctions   = Variable.Array <Vector>(d, q).Named("weightFunctions");
            var K_weights_inverse = Variable.Observed(K_weights.Inverse()).Named("K_weights_inverse");

            weightFunctions[d, q] = Variable <Vector> .Factor(MyFactors.VectorGaussianScaled, Variable.Constant <double>(1), K_weights_inverse).ForEach(d, q);

            weightFunctions.AddAttribute(new MarginalPrototype(new VectorGaussian(N.ObservedValue)));
            var weightFunctionValues  = Variable.Array(Variable.Array <double>(n), d, q).Named("weightFunctionValues");
            var weightFunctionValues2 = Variable.Array(Variable.Array <double>(n), d, q).Named("weightFunctionValuesPredictive");

            weightFunctionValues[d, q] = Variable.ArrayFromVector(weightFunctions[d, q], n);
            if (constrainWpositive)
            {
                var weightFunctionValuesCopy = Variable.Array(Variable.Array <double>(n), d, q).Named("weightFunctionValuesCopy");
                weightFunctionValuesCopy[d, q][n] = Variable.GaussianFromMeanAndPrecision(weightFunctionValues[d, q][n], 100);
                Variable.ConstrainPositive(weightFunctionValuesCopy[d, q][n]);
            }
            weightFunctionValues2[d, q] = Variable.ArrayFromVector(weightFunctions[d, q], n);
            var observedData        = Variable.Array <double>(d, n).Named("observedData");
            var noisePrecisionPrior = Variable.Observed(Gamma.FromShapeAndRate(1, .1)).Named("noisePrecisionPrior");
            Variable <double>      noisePrecision      = null;
            VariableArray <double> noisePrecisionArray = null;

            if (isotropicNoise)
            {
                noisePrecision = Variable.Random <double, Gamma>(noisePrecisionPrior).Named("noisePrecision");
                toInfer.Add(noisePrecision);
            }
            else
            {
                noisePrecisionArray    = Variable.Array <double>(d).Named("noisePrecision");
                noisePrecisionArray[d] = Variable.Random <double, Gamma>(noisePrecisionPrior).ForEach(d);
                toInfer.Add(noisePrecisionArray);
            }

            var isMissing = Variable.Array <bool>(d, n).Named("isMissing");

            isMissing.ObservedValue = missing;

            var noiseLessY = Variable.Array <double>(d, n).Named("noiseLessY");

            VariableArray <VariableArray <double>, double[][]> meanFunctionValues = null;

            if (meanFunctions)
            {
                GPFactor.settings = new Settings
                {
                    solverMethod = Settings.SolverMethod.GradientDescent,
                };

                VariableArray <KernelFunction> kf = Variable.Array <KernelFunction>(d);
                kf.ObservedValue = Enumerable.Range(0, D.ObservedValue).Select(
                    o => new SummationKernel(new SquaredExponential()) + new WhiteNoise(-3)).ToArray();

                var mf = Variable.Array <Vector>(d).Named("meanFunctions");
                mf[d] = Variable <Vector> .Factor <double, Vector[], int[], KernelFunction>(MyFactors.GP, 1.0 /*Variable.GammaFromShapeAndRate(1,1)*/, inputs, new int[] { 0 },
                                                                                            kf[d]);

                mf.AddAttribute(new MarginalPrototype(new VectorGaussian(N.ObservedValue)));
                meanFunctionValues    = Variable.Array(Variable.Array <double>(n), d).Named("meanFunctionValues");
                meanFunctionValues[d] = Variable.ArrayFromVector(mf[d], n);
                toInfer.Add(meanFunctionValues);
            }

            using (Variable.ForEach(n))
                using (Variable.ForEach(d))
                {
                    var temp = Variable.Array <double>(q).Named("temp");
                    temp[q] = weightFunctionValues[d, q][n] * nodeFunctionValues[q][n];
                    if (meanFunctions)
                    {
                        noiseLessY[d, n] = Variable.Sum(temp) + meanFunctionValues[d][n];
                    }
                    else
                    {
                        noiseLessY[d, n] = Variable.Sum(temp);
                    }
                    using (Variable.IfNot(isMissing[d, n]))
                        if (isotropicNoise)
                        {
                            observedData[d, n] = Variable.GaussianFromMeanAndPrecision(noiseLessY[d, n], noisePrecision);
                        }
                        else
                        {
                            observedData[d, n] = Variable.GaussianFromMeanAndPrecision(noiseLessY[d, n], noisePrecisionArray[d]);
                        }
                    using (Variable.If(isMissing[d, n]))
                        observedData[d, n] = Variable.GaussianFromMeanAndPrecision(0, 1);
                }
            observedData.ObservedValue = data;
            var nodeFunctionsInit = Enumerable.Range(0, Q).Select(i =>
                                                                  VectorGaussian.FromMeanAndVariance(
                                                                      VectorGaussian.Sample(Vector.Zero(N.ObservedValue), PositiveDefiniteMatrix.IdentityScaledBy(N.ObservedValue, 100)),
                                                                      PositiveDefiniteMatrix.IdentityScaledBy(N.ObservedValue, 100))).ToArray(); // should put this manually in generated code

            var distArray = Distribution <Vector> .Array(nodeFunctionsInit);

            var nodeFunctionsInitVar = Variable.Observed(distArray).Named("nodeFunctionsInitVar");

            nodeFunctions.InitialiseTo(nodeFunctionsInitVar);

            modelBlock.CloseBlock();

            toInfer.AddRange(new List <IVariable>()
            {
                ev, noiseLessY, nodeFunctionValues, nodeSignalPrecisions, nodeFunctionValuesPredictive, weightFunctionValues, weightFunctionValues2
            });

            var infer = new InferenceEngine(new VariationalMessagePassing());

            infer.ModelName = "MeanFunction";
            var ca = infer.GetCompiledInferenceAlgorithm(toInfer.ToArray());

            var kernel = new SummationKernel(new SquaredExponential(initLoglengthscales[0]));

            kernel += new WhiteNoise(-3);
            ca.SetObservedValue(K_node_inverse.NameInGeneratedCode, Utils.GramMatrix(kernel, inputs).Inverse());

            kernel  = new SummationKernel(new SquaredExponential(initLoglengthscales[1]));
            kernel += new WhiteNoise(-3);
            ca.SetObservedValue(K_weights_inverse.NameInGeneratedCode, Utils.GramMatrix(kernel, inputs).Inverse());

            ca.Reset();
            double oldML = double.NegativeInfinity;
            double ml    = 0;
            int    it    = 0;

            for (; it < 100; it++)
            {
                ca.Update(1);
                ml = ca.Marginal <Bernoulli>(ev.NameInGeneratedCode).LogOdds;
                Console.WriteLine(ml);
                if (Math.Abs(oldML - ml) < .1)
                {
                    break;
                }
                oldML = ml;
            }
            Console.WriteLine("Finished after " + it);
        }
        //    *
        //	Run image generation
        //	\return Pointer to image buffer which has been set in the constructor.
        //	
        public override TextureBuffer process() {
            NoiseBase noiseGen;
            switch (mType) {
                case NOISE_TYPE.NOISE_PERLIN:
                    noiseGen = new PerlinNoise();
                    break;

                default:
                //C++ TO C# CONVERTER TODO TASK: C# does not allow fall-through from a non-empty 'case':
                case NOISE_TYPE.NOISE_WHITE:
                    noiseGen = new WhiteNoise(mSeed);
                    break;
            }

            byte[] field = noiseGen.field2D(mBuffer.getWidth(), mBuffer.getHeight());
            for (int y = 0; y < mBuffer.getHeight(); ++y) {
                for (int x = 0; x < mBuffer.getWidth(); ++x) {
                    float noiseVal = (float)field[y * mBuffer.getWidth() + x];
                    mBuffer.setRed(x, y, (byte)(noiseVal * mColour.r));
                    mBuffer.setGreen(x, y, (byte)(noiseVal * mColour.g));
                    mBuffer.setBlue(x, y, (byte)(noiseVal * mColour.b));
                    mBuffer.setAlpha(x, y, (byte)(mColour.a * 255.0f));
                }
            }

            field = null;
            noiseGen.Dispose();
            Utils.log("Create noise texture : " + mType.ToString());//StringConverter.ToString(mType));
            return mBuffer;
        }