コード例 #1
0
ファイル: Hist2DVmD.cs プロジェクト: tp-nscan/HopAlong
        public static IEnumerable<Z2<float>> TestData()
        {
            var g = new MathNet.Numerics.Distributions.Normal(0.0, 1.0);
            var randy = new MathNet.Numerics.Random.MersenneTwister();

            g.RandomSource = randy;
            var dblsX = new double[100000];
            var dblsY = new double[100000];
            g.Samples(dblsX);
            g.Samples(dblsY);
            return dblsX.Select((d,i) =>
                new Z2<float>((float)d, (float)dblsY[i]));
        }
        public double TakeSamples()
        {
            var dateTimeElapsed = 0.0;
            var dateTime        = DateTime.Now;

            if (this.DistributionName == "Binomial")
            {
                fullName = $"{DistributionName}-Samples:{SamplesNumber}-Trials:{TrialsNumber}";

                var binomaial        = new MathNet.Numerics.Distributions.Binomial(0.5, this.TrialsNumber);
                var generatedsamples = binomaial.Samples().Take(SamplesNumber).ToArray();
            }
            else if (this.DistributionName == "Geometric")
            {
                fullName = $"{DistributionName}-Samples:{SamplesNumber}";

                var geometric        = new MathNet.Numerics.Distributions.Geometric(0.5);
                var generatedsamples = geometric.Samples().Take(SamplesNumber).ToArray();
            }
            else if (this.DistributionName == "Poisson")
            {
                fullName = $"{DistributionName}-Samples:{SamplesNumber}";

                var poisson          = new MathNet.Numerics.Distributions.Poisson(0.5);
                var generatedsamples = poisson.Samples().Take(SamplesNumber).ToArray();
            }
            else if (this.DistributionName == "Normal")
            {
                fullName = $"{DistributionName}-Samples:{SamplesNumber}";

                var normal           = new MathNet.Numerics.Distributions.Normal(0.5, 2);
                var generatedsamples = normal.Samples().Take(SamplesNumber).ToArray();
            }

            dateTimeElapsed = (DateTime.Now - dateTime).TotalMilliseconds;
            return(dateTimeElapsed);
        }
コード例 #3
0
        /// <summary>
        /// Gets a new Decision Vector, based on the PCX logic.
        /// </summary>
        /// <param name="parents">A list of parent <see cref="DecisionVector"/>s.</param>
        /// <returns>A new <see cref="DecisionVector"/>.</returns>
        /// <exception cref="ArgumentOutOfRangeException">
        /// Thrown if:
        /// - there are less than two parents; or
        /// - the parents have different length or zero length decision vectors; or
        /// - any of the parents have non-continuous Decision Vector elements.
        /// </exception>
        public DecisionVector Operate(params DecisionVector[] parents)
        {
            if (parents.Length < 2)
            {
                throw new ArgumentOutOfRangeException(nameof(parents),
                                                      "There must be at least two parents.");
            }

            // TODO: These calls to .Any() are slow - can we remove the error checking?
            if (parents.Any(p => p.GetContinuousElements().Count == 0))
            {
                throw new ArgumentOutOfRangeException(nameof(parents),
                                                      "Parents must have non-zero length decision vectors.");
            }

            if (parents.Any(p => p.GetContinuousElements().Count != parents.First().Count))
            {
                throw new ArgumentOutOfRangeException(nameof(parents),
                                                      "Parents must have the same length and fully continuous decision vectors.");
            }

            // 1: Pre-process
            var parentDVs = Matrix <double> .Build.DenseOfColumns(parents.Select(dv => dv.Select(d => (double)d)));

            var motherDV = Vector <double> .Build.DenseOfArray(parents.ElementAt(0).Select(d => (double)d).ToArray());

            // 1a: centroid of all parents
            var centroid = parentDVs.RowSums().Divide(parents.Count());

            // 1b: vector distance from centroid to mother (following Deb's C code, not paper)
            var motherCentroidVectorDistance   = centroid - motherDV;
            var motherCentroidAbsoluteDistance = motherCentroidVectorDistance.L2Norm();

            if (motherCentroidAbsoluteDistance < 1e-20)
            {
                return(DecisionVector.CreateForEmpty());
            }

            // 1c: vector distance from other parents to mother
            var otherParentDVs = parentDVs.RemoveColumn(0);
            var parentMotherVectorDistances = otherParentDVs.EnumerateColumns()
                                              .Select(v => v - motherDV).ToArray();
            var parentMotherAbsoluteDistances = parentMotherVectorDistances.Select(v => v.L2Norm()).ToArray();

            if (parentMotherAbsoluteDistances.Any(d => d < 1e-20))
            {
                return(DecisionVector.CreateForEmpty());
            }

            // 1d: perpendicular distances from other parents to centroid-mother vector
            var orthogonalDistances = parentMotherVectorDistances
                                      .Select((v, i) => parentMotherAbsoluteDistances.ElementAt(i) *
                                              Math.Sqrt(1.0 - Math.Pow(
                                                            v.DotProduct(motherCentroidVectorDistance) /
                                                            (parentMotherAbsoluteDistances.ElementAt(i) * motherCentroidAbsoluteDistance),
                                                            2.0)));
            var meanOrthogonalDistance = orthogonalDistances.Mean();

            // 2: Now create a new individual
            var normRnd    = new MathNet.Numerics.Distributions.Normal(rngManager.Rng);
            var samplesEta = new double[motherDV.Count];

            normRnd.Samples(samplesEta);

            var newRandomDv = Vector <double> .Build.DenseOfArray(samplesEta)
                              .Multiply(sigmaEta * meanOrthogonalDistance);

            //Remove component of randomness in direction of ?
            var offset1 = motherCentroidVectorDistance
                          .Multiply(newRandomDv.DotProduct(motherCentroidVectorDistance))
                          .Divide(Math.Pow(motherCentroidAbsoluteDistance, 2.0));

            newRandomDv -= offset1;

            var offset2 = motherCentroidVectorDistance
                          .Multiply(sigmaZeta * normRnd.Sample());

            newRandomDv += offset2;

            // Modification of Deb2002 which should maintain stability.
            var finalDv = motherDV +
                          newRandomDv.Divide(Math.Sqrt(motherDV.Count));

            return(DecisionVector.CreateFromArray(parents.First().GetDecisionSpace(), finalDv.ToArray()));
        }
        public double TakeSamples()
        {
            var dateTimeElapsed = 0.0;
            var dateTime        = DateTime.Now;

            //IEnumerable<int> generatedSamplesEnumerable = Enumerable.Empty<int>();
            //IEnumerable<double> generatedSamplesDoubleEnumerable = Enumerable.Empty<double>();

            if (this.DistributionName == "Binomial")
            {
                fullName = $"{DistributionName}-Samples:{SamplesNumber}-Trials:{TrialsNumber}";

                var binomaial        = new MathNet.Numerics.Distributions.Binomial(0.5, this.TrialsNumber);
                var generatedsamples = binomaial.Samples().Take(SamplesNumber).ToArray();

                //generatedSamplesEnumerable = binomaial.Samples().Take(SamplesNumber);
                //foreach (var item in generatedSamplesEnumerable)
                //{
                //    var test = item;
                //}
            }
            else if (this.DistributionName == "Geometric")
            {
                fullName = $"{DistributionName}-Samples:{SamplesNumber}";

                var geometric        = new MathNet.Numerics.Distributions.Geometric(0.5);
                var generatedsamples = geometric.Samples().Take(SamplesNumber).ToArray();

                //generatedSamplesEnumerable = geometric.Samples().Take(SamplesNumber);
                //foreach (var item in generatedSamplesEnumerable)
                //{
                //    var test = item;
                //}
            }
            else if (this.DistributionName == "Poisson")
            {
                fullName = $"{DistributionName}-Samples:{SamplesNumber}";

                var poisson          = new MathNet.Numerics.Distributions.Poisson(0.5);
                var generatedsamples = poisson.Samples().Take(SamplesNumber).ToArray();

                //generatedSamplesEnumerable = poisson.Samples().Take(SamplesNumber);
                //foreach (var item in generatedSamplesEnumerable)
                //{
                //    var test = item;
                //}
            }
            else if (this.DistributionName == "Normal")
            {
                fullName = $"{DistributionName}-Samples:{SamplesNumber}";

                var normal           = new MathNet.Numerics.Distributions.Normal(0.5, 2);
                var generatedsamples = normal.Samples().Take(SamplesNumber).ToArray();

                //generatedSamplesDoubleEnumerable = normal.Samples().Take(SamplesNumber);
                //foreach(var item in generatedSamplesDoubleEnumerable)
                //{
                //    var test = item;
                //}
            }

            dateTimeElapsed = (DateTime.Now - dateTime).TotalMilliseconds;
            return(dateTimeElapsed);
        }
コード例 #5
0
ファイル: CurvasForm.cs プロジェクト: crisguycabs/origin
        /// <summary>
        /// Se estiman las propiedades estáticas en función de la profundidad del core 
        /// </summary>
        public void Estimar()
        {
            // se toma la informacion de segmentacion vs areas de interes y se estiman las propiedades petrofisicas estaticas

            // se preparan los generadores de CT aleatorios para cada phantom
            var phantom1High = new MathNet.Numerics.Distributions.Normal(padre.actual.phantom1.mediaHigh, padre.actual.phantom1.desvHigh);
            var phantom1Low = new MathNet.Numerics.Distributions.Normal(padre.actual.phantom1.mediaLow, padre.actual.phantom1.desvLow);

            var phantom2High = new MathNet.Numerics.Distributions.Normal(padre.actual.phantom2.mediaHigh, padre.actual.phantom2.desvHigh);
            var phantom2Low = new MathNet.Numerics.Distributions.Normal(padre.actual.phantom2.mediaLow, padre.actual.phantom2.desvLow);

            var phantom3High = new MathNet.Numerics.Distributions.Normal(padre.actual.phantom3.mediaHigh, padre.actual.phantom3.desvHigh);
            var phantom3Low = new MathNet.Numerics.Distributions.Normal(padre.actual.phantom3.mediaLow, padre.actual.phantom3.desvLow);

            // se prepara un vector de valores CT high y low para cada phantom
            // este vector representa un slide y solo se guarda un valor promedio del slide
            double[] temp = new double[padre.actual.datacuboHigh.dataCube[0].segCore.Count];
            MathNet.Numerics.Statistics.DescriptiveStatistics stats;

            // se preparan los vectores para densidad y zeff
            this.Dfm = new double[padre.actual.datacuboHigh.dataCube.Count];
            this.Zfme = new double[padre.actual.datacuboHigh.dataCube.Count];
            this.Pefm = new double[padre.actual.datacuboHigh.dataCube.Count];

            double ctP1High, ctP2High, ctP3High, ctP1Low, ctP2Low, ctP3Low;
            double A, B, C, D, E, F;

            List<double> Df, Zf, Zeff, Pef;

            int iarea;

            // se empieza a recorrer cada slide que se encuentre dentro de las areas de interes
            // para cada slide se genera una colección de datos aleatorios simulando cada phantom, y se estima la media para cada phantom en cada slide
            bool slide = false;
            for (int i = 0; i < padre.actual.datacuboHigh.dataCube.Count; i++)
            {
                slide = false;
                iarea = -1;

                for (int j = 0; j < padre.actual.areasCore.Count; j++)
                {
                    // se busca si este slide esta dentro de al menos un area de interes
                    if ((i >= padre.actual.areasCore[j].ini) & (i <= padre.actual.areasCore[j].fin))
                    {
                        slide = true;
                        iarea = j;
                    }
                }

                if (slide)
                {
                    // el slide pertenece al menos a un area de interes, por tanto se procede a calcular la densidad y zeff para este slide

                    // se generan los valores CT para cada phantom y se toma su media
                    phantom1High.Samples(temp);
                    stats = new MathNet.Numerics.Statistics.DescriptiveStatistics(temp);
                    ctP1High = stats.Mean;

                    phantom2High.Samples(temp);
                    stats = new MathNet.Numerics.Statistics.DescriptiveStatistics(temp);
                    ctP2High = stats.Mean;

                    phantom3High.Samples(temp);
                    stats = new MathNet.Numerics.Statistics.DescriptiveStatistics(temp);
                    ctP3High = stats.Mean;

                    phantom1Low.Samples(temp);
                    stats = new MathNet.Numerics.Statistics.DescriptiveStatistics(temp);
                    ctP1Low = stats.Mean;

                    phantom2Low.Samples(temp);
                    stats = new MathNet.Numerics.Statistics.DescriptiveStatistics(temp);
                    ctP2Low = stats.Mean;

                    phantom3Low.Samples(temp);
                    stats = new MathNet.Numerics.Statistics.DescriptiveStatistics(temp);
                    ctP3Low = stats.Mean;

                    // se resuelve el sistema lineal para obtener las constantes A,B,C,D,E,F
                    var matriz = MathNet.Numerics.LinearAlgebra.Matrix<double>.Build.DenseOfArray(new double[,] { { ctP1Low, ctP1High, 1 }, { ctP2Low, ctP2High, 1 }, { ctP3Low, ctP3High, 1 } });
                    var sol = MathNet.Numerics.LinearAlgebra.Vector<double>.Build.Dense(new double[] { padre.actual.phantom1.densidad, padre.actual.phantom2.densidad, padre.actual.phantom3.densidad });
                    var x = matriz.Solve(sol);

                    A = x[0];
                    B = x[1];
                    C = x[2];

                    sol = MathNet.Numerics.LinearAlgebra.Vector<double>.Build.Dense(new double[] { padre.actual.phantom1.zeff, padre.actual.phantom2.zeff, padre.actual.phantom3.zeff });
                    x = matriz.Solve(sol);

                    D = x[0];
                    E = x[1];
                    F = x[2];

                    // se empieza a recorrer cada voxel, en la segmentacion del actual i-slide, se revisa que este dentro del area de interes

                    Df = new List<double>();
                    Zf = new List<double>();
                    Zeff = new List<double>();
                    Pef = new List<double>();

                    int jkindex = 0;
                    double dx;
                    double dy;

                    double tDf, tZf, tZeff, tPef;

                    // dado que se recorre fila a fila, entonces el indice j corresponde al eje Y y el indice k al eje X
                    for (int j = 0; j < padre.actual.datacuboHigh.widthSeg; j++)
                    {
                        for (int k = 0; k < padre.actual.datacuboHigh.widthSeg; k++)
                        {
                            // se calcula la distancia de la posicion (j,k) al centro del area de interes
                            // si la distancia es menor que el radio entonces se agrega al calculo, sino no

                            dx = k - padre.actual.areasCore[iarea].x;
                            dx = dx * dx;
                            dy = j - padre.actual.areasCore[iarea].y;
                            dy = dy * dy;
                            if (Math.Sqrt(dx + dy) <= padre.actual.datacuboHigh.widthSeg)
                            {
                                // la coordenada (j,k) esta dentro del area de interes
                                // se calculan las propiedades estaticas

                                tDf = A * padre.actual.datacuboLow.dataCube[i].segCore[jkindex] + B * padre.actual.datacuboHigh.dataCube[i].segCore[jkindex] + C;
                                Df.Add(tDf);

                                tZf = D * padre.actual.datacuboLow.dataCube[i].segCore[jkindex] + E * padre.actual.datacuboHigh.dataCube[i].segCore[jkindex] + F;
                                Zf.Add(tZf);

                                //tZeff = Math.Pow(Math.Pow((tZf / (0.9342 * tDf + 0.1759)), 10), 1 / 36);
                                tZeff = Math.Pow((tZf / (0.9342 * tDf + 0.1759)), 1/3.6);
                                Zeff.Add(tZeff);

                                tPef = Math.Pow(Math.Pow((tZeff / 10), 36), 0.1);
                                Pef.Add(tPef);
                            }

                            jkindex++;
                        }
                    }

                    stats = new MathNet.Numerics.Statistics.DescriptiveStatistics(Df);
                    Dfm[i] = stats.Mean;

                    stats = new MathNet.Numerics.Statistics.DescriptiveStatistics(Zeff);
                    Zfme[i] = stats.Mean;

                    stats = new MathNet.Numerics.Statistics.DescriptiveStatistics(Pef);
                    Pefm[i] = stats.Mean;
                }
                else
                {
                    // se llenan los vectores de densidad y zeff con valores -1... si el valor es -1 entonces no se grafica
                    this.Dfm[i] = -1;
                    this.Zfme[i] = -1;
                    this.Pefm[i] = -1;
                }
            }

            DateTime fin = DateTime.Now;
        }