Esempio n. 1
0
        /// <summary>
        /// Applies this decider's feature to all of the points, storing the value in the <see cref="T:ImageDataPoint.FeatureValue"/> property.
        /// </summary>
        /// <param name="points">Points to use when applying the feature</param>
        public void ApplyFeature(List <T> points)
        {
            int count = points.Count;

            for (int i = 0; i < count; i++)
            {
                points[i].FeatureValue = _feature.Compute(points[i]);
            }
        }
Esempio n. 2
0
 public void ApplyFeature(List <T> points)
 {
     foreach (T point in points)
     {
         point.FeatureValue = _feature.Compute(point);
     }
 }
Esempio n. 3
0
        /// <summary>
        ///   Computes the factor potential function for the given parameters.
        /// </summary>
        ///
        /// <param name="previousState">The previous state in a given sequence of states.</param>
        /// <param name="currentState">The current state in a given sequence of states.</param>
        /// <param name="observations">The observation vector.</param>
        /// <param name="index">The index of the observation in the current state of the sequence.</param>
        /// <param name="outputClass">The output class label for the sequence.</param>
        /// <returns>The value of the factor potential function evaluated for the given parameters.</returns>
        ///
        public virtual double Compute(int previousState, int currentState,
                                      T[] observations, int index, int outputClass = 0)
        {
            if (outputClass != this.Index)
            {
                return(Double.NegativeInfinity);
            }

            int start = FactorParameters.Offset;
            int end   = FactorParameters.Offset + FactorParameters.Count;

            double sum = 0;

            for (int k = start; k < end; k++)
            {
                double weight = Owner.Weights[k];

                if (Double.IsNaN(weight))
                {
                    Owner.Weights[k] = weight = 0;
                }

                if (weight != 0)
                {
                    IFeature <T> feature = Owner.Features[k];

                    double value = feature.Compute(previousState, currentState,
                                                   observations, index, outputClass);

                    if (value != 0)
                    {
                        sum += weight * value;
                    }
                }

                if (Double.IsNaN(sum))
                {
                    return(0);
                }

                if (Double.IsPositiveInfinity(sum))
                {
                    return(Double.MaxValue);
                }

                if (Double.IsNegativeInfinity(sum))
                {
                    return(Double.NegativeInfinity);
                }
            }

            return(sum);
        }
Esempio n. 4
0
 private static void fillFeatureValues(IFeatureFactory <T, float[]> factory, int numFeatures, List <T> data)
 {
     _numFeatures   = numFeatures;
     _buildFeatures = new IFeature <T, float[]> [numFeatures];
     _featureValues = new float[numFeatures][];
     for (int i = 0; i < numFeatures; i++)
     {
         IFeature <T, float[]> feature = factory.Create();
         _featureValues[i] = data.Select(o => feature.Compute(o)).ToArray();
         _buildFeatures[i] = feature;
     }
 }
Esempio n. 5
0
        /// <summary>
        /// Test metoda. Potrebne su dvije ulazne datoteke datoteke, InputFile i OutputFile. U jednoj su
        /// ulazni podatci za značajku, a u drugoj referentni podatci (izlaz iz Matlaba).
        /// Ulazni podatci i izlaz i matlaba se spremaju koristeći sljedeću sintaksu:
        /// save('ime_datoteke.txt','varijabla','-ascii','-double','-tabs');
        /// </summary>
        /// <param name="InputFile">Datoteka s ulaznim podatcima</param>
        /// <param name="OutputFile">Datoteka s izlaznim podatcima</param>
        /// <param name="DataProvider">DataProvider za IFeature</param>
        /// <param name="Feature">Značajka</param>
        /// <param name="SubWindowLength">Dužina prozora u uzorcima</param>
        /// <param name="SubWindowShift">Dužina pomaka u uzorcima</param>
        /// <returns></returns>
        public static TestResult ExecuteTest(string InputFile, string OutputFile, IDataProvider DataProvider, IFeature Feature, int SubWindowLength, int SubWindowShift)
        {
            List <double> InputData  = ReadFromFile(InputFile);
            List <double> OutputData = ReadFromFile(OutputFile);

            FastQueue <double> Data   = new FastQueue <double>(100000);
            List <double>      Result = new List <double>();

            Data.Enqueue(InputData.ToArray());

            while (Data.Count > SubWindowLength)
            {
                List <double> CurrentData;

                CurrentData = Data.Peek(SubWindowLength);
                Data.Delete(SubWindowShift);
                DataProvider.Data = CurrentData;

                Feature.Compute();
                Result.Add(Feature.Feature);
            }

            double absdiff = 0;

            for (int i = 0; i < Result.Count || i < OutputData.Count; i++)
            {
                absdiff += Math.Abs(Result[i] - OutputData[i]);
            }

            TestResult tr = new TestResult();

            tr.AbsoluteDifference = absdiff;
            if (absdiff < 100)
            {
                tr.Success = true;
            }
            else
            {
                tr.Success = false;
            }


            return(tr);
        }
Esempio n. 6
0
        /// <summary>
        /// Test metoda. Potrebne su dvije ulazne datoteke datoteke, InputFile i OutputFile. U jednoj su
        /// ulazni podatci za značajku, a u drugoj referentni podatci (izlaz iz Matlaba).
        /// Ulazni podatci i izlaz i matlaba se spremaju koristeći sljedeću sintaksu:
        /// save('ime_datoteke.txt','varijabla','-ascii','-double','-tabs');
        /// </summary>
        /// <param name="InputFile">Datoteka s ulaznim podatcima</param>
        /// <param name="OutputFile">Datoteka s izlaznim podatcima</param>
        /// <param name="DataProvider">DataProvider za IFeature</param>
        /// <param name="Feature">Značajka</param>
        /// <param name="SubWindowLength">Dužina prozora u uzorcima</param>
        /// <param name="SubWindowShift">Dužina pomaka u uzorcima</param>
        /// <returns></returns>
        public static TestResult ExecuteTest(string InputFile, string OutputFile, IDataProvider DataProvider, IFeature Feature, int SubWindowLength, int SubWindowShift)
        {
            List<double> InputData = ReadFromFile(InputFile);
            List<double> OutputData = ReadFromFile(OutputFile);

            FastQueue<double> Data = new FastQueue<double>(100000);
            List<double> Result = new List<double>();

            Data.Enqueue(InputData.ToArray());

            while (Data.Count > SubWindowLength)
            {
                List<double> CurrentData;

                CurrentData = Data.Peek(SubWindowLength);
                Data.Delete(SubWindowShift);
                DataProvider.Data = CurrentData;

                Feature.Compute();
                Result.Add(Feature.Feature);

            }

            double absdiff = 0;
            for (int i = 0; i < Result.Count || i < OutputData.Count; i++)
            {
                absdiff += Math.Abs(Result[i] - OutputData[i]);
            }

            TestResult tr = new TestResult();
            tr.AbsoluteDifference = absdiff;
            if (absdiff < 100)
                tr.Success = true;
            else
                tr.Success = false;

            return tr;
        }
Esempio n. 7
0
        private static Node compute(List <T> data, int NCount, IFeatureFactory <T, float[]> factory, int numFeatures, int numThresholds, float min_rd, int min_y, int currentDepth, int maxDepth)
        {
            if (currentDepth == maxDepth - 1)
            {
                return new Node {
                           NodeType = NodeType.Leaf
                }
            }
            ;

            int YCount = data.Count;

            if (NCount < YCount)
            {
                NCount = YCount;
            }

            Split best = new Split {
                Score = float.MinValue
            };

            for (int i = 0; i < numFeatures; i++)
            {
                IFeature <T, float[]> feature = factory.Create();
                var featureValues             = from point in data
                                                select feature.Compute(point);

                Split split = findBestSplit(featureValues.OrderBy(o => o).ToArray(), min_rd, min_y, NCount, numThresholds);
                if (split.Score > best.Score)
                {
                    best         = split;
                    best.Feature = feature;
                }
            }
            if (best.Feature == null)
            {
                return new Node {
                           NodeType = NodeType.Leaf
                }
            }
            ;

            Node node = new Node {
                NodeType = NodeType.Branch, Feature = best.Feature, Threshold = best.Threshold
            };
            List <T> left  = new List <T>();
            List <T> right = new List <T>();

            foreach (T point in data)
            {
                if (best.Feature.Compute(point) < best.Threshold)
                {
                    left.Add(point);
                }
                else
                {
                    right.Add(point);
                }
            }

            if (left.Count == 0 || right.Count == 0)
            {
                return new Node {
                           NodeType = NodeType.Leaf
                }
            }
            ;

            UpdateManager.WriteLine("{0}:{1} {2}|{3} {4}", currentDepth, best.Score, left.Count, right.Count, best.Feature);

            node.Left  = compute(left, best.NLeft, factory, numFeatures, numThresholds, min_rd, min_y, currentDepth + 1, maxDepth);
            node.Right = compute(right, NCount - best.NLeft, factory, numFeatures, numThresholds, min_rd, min_y, currentDepth + 1, maxDepth);
            return(node);
        }
    }
}
Esempio n. 8
0
        private double[] gradient(T[][] observations, int[][] labels, double[] g)
        {
            var model    = Model;
            var function = model.Function;
            int states   = model.States;
            int n        = observations.Length;
            int d        = Model.Function.Weights.Length;
            int Tmax     = observations.Max(x => x.Length);
            int progress = 0;

            g.Clear();


            // Compute sequence probabilities
            Parallel.For(0, observations.Length, ParallelOptions,

                         () =>
            {
                // Create thread-local storage
                var work = new double[states + 1, states][];
                for (int j = 0; j < states + 1; j++)
                {
                    for (int k = 0; k < states; k++)
                    {
                        work[j, k] = new double[Tmax];
                    }
                }

                return(new
                {
                    bwd = new double[Tmax, states],
                    fwd = new double[Tmax, states],
                    sum1 = new double[d],
                    sum2 = new double[d],
                    work = work,
                    count = new int[] { 0 }
                });
            },

                         (i, state, local) =>
            {
                T[] x    = observations[i];
                var fwd  = local.fwd;
                var bwd  = local.bwd;
                var sum1 = local.sum1;
                var sum2 = local.sum2;
                var work = local.work;
                ForwardBackwardAlgorithm.Forward(function.Factors[0], x, fwd);
                ForwardBackwardAlgorithm.Backward(function.Factors[0], x, bwd);
                double z = partition(fwd, x);

                for (int prev = -1; prev < states; prev++)
                {
                    for (int next = 0; next < states; next++)
                    {
                        double[] Pis = work[prev + 1, next];
                        for (int t = 0; t < x.Length; t++)
                        {
                            Pis[t] = p(prev, next, x, t, fwd, bwd, function) / z;
                        }
                    }
                }

                // Compute the gradient w.r.t. each feature
                //  function in the model's potential function.

                int[] y = labels[i];

                Parallel.For(0, g.Length, ParallelOptions, k =>
                {
                    IFeature <T> feature = function.Features[k];

                    // Compute first term of the partial derivative
                    sum1[k] += feature.Compute(-1, y[0], x, 0);
                    for (int t = 1; t < x.Length; t++)
                    {
                        sum1[k] += feature.Compute(y[t - 1], y[t], x, t);
                    }

                    // Compute second term of the partial derivative
                    for (int prev = -1; prev < states; prev++)
                    {
                        for (int next = 0; next < states; next++)
                        {
                            double[] Pis = work[prev + 1, next];
                            for (int t = 0; t < Pis.Length; t++)
                            {
                                sum2[k] += feature.Compute(prev, next, x, t) * Pis[t];
                            }
                        }
                    }
                });

                local.count[0]++;
                return(local);
            },

                         (local) =>
            {
                lock (g)
                {
                    for (int k = 0; k < g.Length; k++)
                    {
                        g[k] -= (local.sum1[k] - local.sum2[k]);
                    }
                    progress += local.count[0];
                }
            }
                         );

            return(g);
        }
Esempio n. 9
0
        private double[] gradient(int[][] observations, int[][] labels)
        {
            int N = observations.Length;

            var function = model.Function;
            var states   = model.States;

            double[] g = new double[function.Weights.Length];


            // Compute sequence probabilities
            var P = new double[N][, ][];

            for (int i = 0; i < N; i++)
            {
                var Pi = P[i] = new double[states + 1, states][];

                int[]  x   = observations[i];
                var    fwd = ForwardBackwardAlgorithm.Forward(function, x);
                var    bwd = ForwardBackwardAlgorithm.Backward(function, x);
                double z   = partition(fwd, x);

                for (int prev = -1; prev < states; prev++)
                {
                    for (int next = 0; next < states; next++)
                    {
                        double[] Pis = new double[x.Length];
                        for (int t = 0; t < x.Length; t++)
                        {
                            Pis[t] = p(prev, next, x, t, fwd, bwd, function) / z;
                        }

                        Pi[prev + 1, next] = Pis;
                    }
                }
            }

            // Compute the gradient w.r.t. each feature
            //  function in the model's potential function.
            for (int k = 0; k < g.Length; k++)
            {
                IFeature feature = function.Features[k];

                double sum1 = 0.0, sum2 = 0.0;
                for (int i = 0; i < N; i++)
                {
                    int[] x  = observations[i];
                    int[] y  = labels[i];
                    var   Pi = P[i];

                    // Compute first term of the partial derivative
                    sum1 += feature.Compute(-1, y[0], x, 0);
                    for (int t = 1; t < x.Length; t++)
                    {
                        sum1 += feature.Compute(y[t - 1], y[t], x, t);
                    }

                    // Compute second term of the partial derivative
                    for (int prev = -1; prev < states; prev++)
                    {
                        for (int next = 0; next < states; next++)
                        {
                            double[] Pis = Pi[prev + 1, next];

                            for (int t = 0; t < Pis.Length; t++)
                            {
                                sum2 += feature.Compute(prev, next, x, t) * Pis[t];
                            }
                        }
                    }
                }

                g[k] = -(sum1 - sum2);
            }

            return(g);
        }