protected TrainStateBase(IChannel ch, int numFeatures, LinearPredictor predictor, OnlineLinearTrainer <TTransformer, TModel> parent)
            {
                Contracts.CheckValue(ch, nameof(ch));
                ch.Check(numFeatures > 0, "Cannot train with zero features!");
                ch.AssertValueOrNull(predictor);
                ch.AssertValue(parent);
                ch.Assert(Iteration == 0);
                ch.Assert(Bias == 0);

                ParentHost = parent.Host;

                ch.Trace("{0} Initializing {1} on {2} features", DateTime.UtcNow, parent.Name, numFeatures);

                // We want a dense vector, to prevent memory creation during training
                // unless we have a lot of features.
                if (predictor != null)
                {
                    predictor.GetFeatureWeights(ref Weights);
                    VBufferUtils.Densify(ref Weights);
                    Bias = predictor.Bias;
                }
                else if (!string.IsNullOrWhiteSpace(parent.Args.InitialWeights))
                {
                    ch.Info("Initializing weights and bias to " + parent.Args.InitialWeights);
                    string[] weightStr = parent.Args.InitialWeights.Split(',');
                    if (weightStr.Length != numFeatures + 1)
                    {
                        throw ch.Except(
                                  "Could not initialize weights from 'initialWeights': expecting {0} values to initialize {1} weights and the intercept",
                                  numFeatures + 1, numFeatures);
                    }

                    var weightValues = new float[numFeatures];
                    for (int i = 0; i < numFeatures; i++)
                    {
                        weightValues[i] = Float.Parse(weightStr[i], CultureInfo.InvariantCulture);
                    }
                    Weights = new VBuffer <float>(numFeatures, weightValues);
                    Bias    = Float.Parse(weightStr[numFeatures], CultureInfo.InvariantCulture);
                }
                else if (parent.Args.InitWtsDiameter > 0)
                {
                    var weightValues = new float[numFeatures];
                    for (int i = 0; i < numFeatures; i++)
                    {
                        weightValues[i] = parent.Args.InitWtsDiameter * (parent.Host.Rand.NextSingle() - (Float)0.5);
                    }
                    Weights = new VBuffer <float>(numFeatures, weightValues);
                    Bias    = parent.Args.InitWtsDiameter * (parent.Host.Rand.NextSingle() - (Float)0.5);
                }
                else if (numFeatures <= 1000)
                {
                    Weights = VBufferUtils.CreateDense <Float>(numFeatures);
                }
                else
                {
                    Weights = VBufferUtils.CreateEmpty <Float>(numFeatures);
                }
                WeightsScale = 1;
            }
Esempio n. 2
0
            private protected AveragedTrainStateBase(IChannel ch, int numFeatures, LinearPredictor predictor, AveragedLinearTrainer <TTransformer, TModel> parent)
                : base(ch, numFeatures, predictor, parent)
            {
                // Do the other initializations by setting the setters as if user had set them
                // Initialize the averaged weights if needed (i.e., do what happens when Averaged is set)
                Averaged = parent.Args.Averaged;
                if (Averaged)
                {
                    if (parent.Args.AveragedTolerance > 0)
                    {
                        VBufferUtils.Densify(ref Weights);
                    }
                    Weights.CopyTo(ref TotalWeights);
                }
                else
                {
                    // It is definitely advantageous to keep weights dense if we aren't adding them
                    // to another vector with each update.
                    VBufferUtils.Densify(ref Weights);
                }
                _resetWeightsAfterXExamples = parent.Args.ResetWeightsAfterXExamples ?? 0;
                _args = parent.Args;
                _loss = parent.LossFunction;

                Gain = 1;
            }
Esempio n. 3
0
        protected override void InitCore(IChannel ch, int numFeatures, LinearPredictor predictor)
        {
            base.InitCore(ch, numFeatures, predictor);

            // Verify user didn't specify parameters that conflict
            Contracts.Check(!Args.DoLazyUpdates || !Args.RecencyGainMulti && Args.RecencyGain == 0,
                            "Cannot have both recency gain and lazy updates.");

            // Do the other initializations by setting the setters as if user had set them
            // Initialize the averaged weights if needed (i.e., do what happens when Averaged is set)
            if (Args.Averaged)
            {
                if (Args.AveragedTolerance > 0)
                {
                    VBufferUtils.Densify(ref Weights);
                }
                Weights.CopyTo(ref TotalWeights);
            }
            else
            {
                // It is definitely advantageous to keep weights dense if we aren't adding them
                // to another vector with each update.
                VBufferUtils.Densify(ref Weights);
            }
            Gain = 1;
        }
Esempio n. 4
0
        protected virtual void InitCore(IChannel ch, int numFeatures, LinearPredictor predictor)
        {
            Contracts.Check(numFeatures > 0, "Can't train with zero features!");
            Contracts.Check(NumFeatures == 0, "Can't re-use trainer!");
            Contracts.Assert(Iteration == 0);
            Contracts.Assert(Bias == 0);

            ch.Trace("{0} Initializing {1} on {2} features", DateTime.UtcNow, Name, numFeatures);
            NumFeatures = numFeatures;

            // We want a dense vector, to prevent memory creation during training
            // unless we have a lot of features.
            // REVIEW: make a setting
            if (predictor != null)
            {
                predictor.GetFeatureWeights(ref Weights);
                VBufferUtils.Densify(ref Weights);
                Bias = predictor.Bias;
            }
            else if (!string.IsNullOrWhiteSpace(Args.InitialWeights))
            {
                ch.Info("Initializing weights and bias to " + Args.InitialWeights);
                string[] weightStr = Args.InitialWeights.Split(',');
                if (weightStr.Length != NumFeatures + 1)
                {
                    throw Contracts.Except(
                              "Could not initialize weights from 'initialWeights': expecting {0} values to initialize {1} weights and the intercept",
                              NumFeatures + 1, NumFeatures);
                }

                Weights = VBufferUtils.CreateDense <Float>(NumFeatures);
                for (int i = 0; i < NumFeatures; i++)
                {
                    Weights.Values[i] = Float.Parse(weightStr[i], CultureInfo.InvariantCulture);
                }
                Bias = Float.Parse(weightStr[NumFeatures], CultureInfo.InvariantCulture);
            }
            else if (Args.InitWtsDiameter > 0)
            {
                Weights = VBufferUtils.CreateDense <Float>(NumFeatures);
                for (int i = 0; i < NumFeatures; i++)
                {
                    Weights.Values[i] = Args.InitWtsDiameter * (Host.Rand.NextSingle() - (Float)0.5);
                }
                Bias = Args.InitWtsDiameter * (Host.Rand.NextSingle() - (Float)0.5);
            }
            else if (NumFeatures <= 1000)
            {
                Weights = VBufferUtils.CreateDense <Float>(NumFeatures);
            }
            else
            {
                Weights = VBufferUtils.CreateEmpty <Float>(NumFeatures);
            }
            WeightsScale = 1;
        }
Esempio n. 5
0
        public void testPredict()
        {
            float[] cocs          = { 0.5f, 0.5f };
            string  savedFilePath = "..\\..\\..\\TestImages\\";
            string  fileName      = "lenna_gray_predicated";

            LinearPredictor lp     = new LinearPredictor(image, savedFilePath, fileName, cocs);
            BmpImage        pImage = lp.predicate();


            Assert.Equal(pImage.Data.Width, image.Data.Width);
            Assert.Equal(pImage.Data.Height, image.Data.Height);
            Assert.Equal(BitmapColorMode.TwoFiftySixColors, pImage.Data.ColorMode);
        }
Esempio n. 6
0
        protected override void InitCore(IChannel ch, int numFeatures, LinearPredictor predictor)
        {
            base.InitCore(ch, numFeatures, predictor);

            if (Args.NoBias)
            {
                Bias = 0;
            }

            if (predictor == null)
            {
                VBufferUtils.Densify(ref Weights);
            }

            _weightsUpdate = VBufferUtils.CreateEmpty <Float>(numFeatures);
        }
Esempio n. 7
0
        public void LinearPredictor1Indicator()
        {
            TimeSeries indicator = TimeSeries.CreateDailySinusoidalTimeSeries(1, 2, 0, new DateTime(2000, 1, 1), new DateTime(2000, 12, 31));

            indicator.Name = "INDICATOR";
            TimeSeries target = indicator.OffsetBy(new TimeSpan(10, 0, 0, 0));

            target.Name = "TARGET";
            CorrelationAnalysisParameters parameters = new CorrelationAnalysisParameters()
            {
                Span = new TimeSpan(1, 0, 0, 0),
                NumberOfSpansInthePast   = 10,
                NumberOfSpansIntheFuture = -10,
            };
            IEnumerable <TemporalGapTuple> analysis = TimeSeriesCorrelation.DoCorrelationAnalysis(indicator, target, parameters);
            TemporalGapGroup g = new TemporalGapGroup()
            {
                Pairs = analysis
                        .Where(a => Math.Abs(a.Correlation - 1) < tolerance)
                        .ToList(),
            };
            TimeSpan        predictionSpan  = new TimeSpan(5, 0, 0, 0);
            DataTable       table           = g.GetPredictionTable(target.Name, predictionSpan);
            LinearPredictor linearPredictor = new LinearPredictor();

            linearPredictor.Learn(
                table: table,
                trainingLabel: indicator.Name,
                targetLabel: target.Name);

            TimeSpan correlationSpan = new TimeSpan(10, 0, 0, 0);

            Assert.IsTrue(target.Dates
                          .Where(day => target.ContainsValueAt(day.Add(predictionSpan)) &&
                                 indicator.ContainsValueAt(day.Add(-correlationSpan)) &&
                                 indicator.ContainsValueAt(day.Add(-correlationSpan).Add(predictionSpan)))
                          .All(day =>
            {
                double incrementIndicator = indicator[day.Add(-correlationSpan).Add(predictionSpan)] - indicator[day.Add(-correlationSpan)];
                DataRow row         = table.NewRow();
                row[indicator.Name] = incrementIndicator;
                return(Math.Abs((double)linearPredictor.Predict(row) - incrementIndicator) < tolerance);
            }));
        }
Esempio n. 8
0
            public TrainState(IChannel ch, int numFeatures, LinearPredictor predictor, LinearSvm parent)
                : base(ch, numFeatures, predictor, parent)
            {
                _batchSize         = parent.Args.BatchSize;
                _noBias            = parent.Args.NoBias;
                _performProjection = parent.Args.PerformProjection;
                _lambda            = parent.Args.Lambda;

                if (_noBias)
                {
                    Bias = 0;
                }

                if (predictor == null)
                {
                    VBufferUtils.Densify(ref Weights);
                }

                _weightsUpdate = VBufferUtils.CreateEmpty <Float>(numFeatures);
            }
Esempio n. 9
0
 private protected override TrainStateBase MakeState(IChannel ch, int numFeatures, LinearPredictor predictor)
 {
     return(new TrainState(ch, numFeatures, predictor, this));
 }
Esempio n. 10
0
 private protected abstract TrainStateBase MakeState(IChannel ch, int numFeatures, LinearPredictor predictor);
        private TPredictor TrainCore(IChannel ch, RoleMappedData data, LinearPredictor predictor, int weightSetCount)
        {
            int numFeatures   = data.Schema.Feature.Type.VectorSize;
            var cursorFactory = new FloatLabelCursor.Factory(data, CursOpt.Label | CursOpt.Features | CursOpt.Weight);
            int numThreads    = 1;

            ch.CheckUserArg(numThreads > 0, nameof(_args.NumberOfThreads),
                            "The number of threads must be either null or a positive integer.");

            var             positiveInstanceWeight = _args.PositiveInstanceWeight;
            VBuffer <float> weights = default;
            float           bias    = 0.0f;

            if (predictor != null)
            {
                predictor.GetFeatureWeights(ref weights);
                VBufferUtils.Densify(ref weights);
                bias = predictor.Bias;
            }
            else
            {
                weights = VBufferUtils.CreateDense <float>(numFeatures);
            }

            // Reference: Parasail. SymSGD.
            bool tuneLR = _args.LearningRate == null;
            var  lr     = _args.LearningRate ?? 1.0f;

            bool tuneNumLocIter = (_args.UpdateFrequency == null);
            var  numLocIter     = _args.UpdateFrequency ?? 1;

            var l2Const = _args.L2Regularization;
            var piw     = _args.PositiveInstanceWeight;

            // This is state of the learner that is shared with the native code.
            State    state         = new State();
            GCHandle stateGCHandle = default;

            try
            {
                stateGCHandle = GCHandle.Alloc(state, GCHandleType.Pinned);

                state.TotalInstancesProcessed = 0;
                using (InputDataManager inputDataManager = new InputDataManager(this, cursorFactory, ch))
                {
                    bool shouldInitialize = true;
                    using (var pch = Host.StartProgressChannel("Preprocessing"))
                        inputDataManager.LoadAsMuchAsPossible();

                    int iter = 0;
                    if (inputDataManager.IsFullyLoaded)
                    {
                        ch.Info("Data fully loaded into memory.");
                    }
                    using (var pch = Host.StartProgressChannel("Training"))
                    {
                        if (inputDataManager.IsFullyLoaded)
                        {
                            pch.SetHeader(new ProgressHeader(new[] { "iterations" }),
                                          entry => entry.SetProgress(0, state.PassIteration, _args.NumberOfIterations));
                            // If fully loaded, call the SymSGDNative and do not come back until learned for all iterations.
                            Native.LearnAll(inputDataManager, tuneLR, ref lr, l2Const, piw, weights.Values, ref bias, numFeatures,
                                            _args.NumberOfIterations, numThreads, tuneNumLocIter, ref numLocIter, _args.Tolerance, _args.Shuffle, shouldInitialize, stateGCHandle);
                            shouldInitialize = false;
                        }
                        else
                        {
                            pch.SetHeader(new ProgressHeader(new[] { "iterations" }),
                                          entry => entry.SetProgress(0, iter, _args.NumberOfIterations));

                            // Since we loaded data in batch sizes, multiple passes over the loaded data is feasible.
                            int numPassesForABatch = inputDataManager.Count / 10000;
                            while (iter < _args.NumberOfIterations)
                            {
                                // We want to train on the final passes thoroughly (without learning on the same batch multiple times)
                                // This is for fine tuning the AUC. Experimentally, we found that 1 or 2 passes is enough
                                int numFinalPassesToTrainThoroughly = 2;
                                // We also do not want to learn for more passes than what the user asked
                                int numPassesForThisBatch = Math.Min(numPassesForABatch, _args.NumberOfIterations - iter - numFinalPassesToTrainThoroughly);
                                // If all of this leaves us with 0 passes, then set numPassesForThisBatch to 1
                                numPassesForThisBatch = Math.Max(1, numPassesForThisBatch);
                                state.PassIteration   = iter;
                                Native.LearnAll(inputDataManager, tuneLR, ref lr, l2Const, piw, weights.Values, ref bias, numFeatures,
                                                numPassesForThisBatch, numThreads, tuneNumLocIter, ref numLocIter, _args.Tolerance, _args.Shuffle, shouldInitialize, stateGCHandle);
                                shouldInitialize = false;

                                // Check if we are done with going through the data
                                if (inputDataManager.FinishedTheLoad)
                                {
                                    iter += numPassesForThisBatch;
                                    // Check if more passes are left
                                    if (iter < _args.NumberOfIterations)
                                    {
                                        inputDataManager.RestartLoading(_args.Shuffle, Host);
                                    }
                                }

                                // If more passes are left, load as much as possible
                                if (iter < _args.NumberOfIterations)
                                {
                                    inputDataManager.LoadAsMuchAsPossible();
                                }
                            }
                        }

                        // Maps back the dense features that are mislocated
                        if (numThreads > 1)
                        {
                            Native.MapBackWeightVector(weights.Values, stateGCHandle);
                        }
                        Native.DeallocateSequentially(stateGCHandle);
                    }
                }
            }
            finally
            {
                if (stateGCHandle.IsAllocated)
                {
                    stateGCHandle.Free();
                }
            }
            return(CreatePredictor(weights, bias));
        }
        public void TestLinearPrediction()
        {
            double p1  = 320.00;
            double p2  = 327.00;
            double p3  = 334.00;
            double p4  = 341.00;
            double p5  = 349.00;
            double p6  = 356.00;
            double p7  = 364.00;
            double p8  = 371.00;
            double p9  = 379.00;
            double p10 = 388.00;
            double p11 = 394.00;
            double p12 = 402.00;

            LinearPredictor linearPredictor = new LinearPredictor();

            linearPredictor.UpdateEstimate(p1);
            Debug.WriteLine("EstimatedPos. " + linearPredictor.EstimatedPosition.ToString());
            Debug.WriteLine("Error " + linearPredictor._error.ToString());
            Debug.WriteLine("newCoef : " + linearPredictor._newCoeff.ToString());
            Debug.WriteLine("ArryPointer: " + linearPredictor._arrayPointer.ToString());
            Debug.WriteLine("PredCoeff: " + "[{0}]", string.Join(", ", linearPredictor._a));
            Debug.WriteLine("PrevRead : " + "[{0}]", string.Join(", ", linearPredictor._x));
            Debug.WriteLine("");


            linearPredictor.UpdateEstimate(p2);
            Debug.WriteLine("EstimatedPos. " + linearPredictor.EstimatedPosition.ToString());
            Debug.WriteLine("Error " + linearPredictor._error.ToString());
            Debug.WriteLine("newCoef : " + linearPredictor._newCoeff.ToString());
            Debug.WriteLine("ArrayPointer: " + linearPredictor._arrayPointer.ToString());
            Debug.WriteLine("PredCoeff: " + "[{0}]", string.Join(", ", linearPredictor._a));
            Debug.WriteLine("PrevRead : " + "[{0}]", string.Join(", ", linearPredictor._x));
            Debug.WriteLine("");

            linearPredictor.UpdateEstimate(p3);
            Debug.WriteLine("EstimatedPos. " + linearPredictor.EstimatedPosition.ToString());
            Debug.WriteLine("Error " + linearPredictor._error.ToString());
            Debug.WriteLine("newCoef : " + linearPredictor._newCoeff.ToString());
            Debug.WriteLine("ArrayPointer: " + linearPredictor._arrayPointer.ToString());
            Debug.WriteLine("PredCoeff: " + "[{0}]", string.Join(", ", linearPredictor._a));
            Debug.WriteLine("PrevRead : " + "[{0}]", string.Join(", ", linearPredictor._x));
            Debug.WriteLine("");

            linearPredictor.UpdateEstimate(p4);
            Debug.WriteLine("EstimatedPos. " + linearPredictor.EstimatedPosition.ToString());
            Debug.WriteLine("Error " + linearPredictor._error.ToString());
            Debug.WriteLine("newCoef : " + linearPredictor._newCoeff.ToString());
            Debug.WriteLine("ArrayPointer: " + linearPredictor._arrayPointer.ToString());
            Debug.WriteLine("PredCoeff: " + "[{0}]", string.Join(", ", linearPredictor._a));
            Debug.WriteLine("PrevRead : " + "[{0}]", string.Join(", ", linearPredictor._x));
            Debug.WriteLine("");

            linearPredictor.UpdateEstimate(p5);
            Debug.WriteLine("EstimatedPos. " + linearPredictor.EstimatedPosition.ToString());
            Debug.WriteLine("Error " + linearPredictor._error.ToString());
            Debug.WriteLine("newCoef : " + linearPredictor._newCoeff.ToString());
            Debug.WriteLine("ArrayPointer: " + linearPredictor._arrayPointer.ToString());
            Debug.WriteLine("PredCoeff: " + "[{0}]", string.Join(", ", linearPredictor._a));
            Debug.WriteLine("PrevRead : " + "[{0}]", string.Join(", ", linearPredictor._x));
            Debug.WriteLine("");

            linearPredictor.UpdateEstimate(p6);
            Debug.WriteLine("EstimatedPos. " + linearPredictor.EstimatedPosition.ToString());
            Debug.WriteLine("Error " + linearPredictor._error.ToString());
            Debug.WriteLine("newCoef : " + linearPredictor._newCoeff.ToString());
            Debug.WriteLine("ArrayPointer: " + linearPredictor._arrayPointer.ToString());
            Debug.WriteLine("PredCoeff: " + "[{0}]", string.Join(", ", linearPredictor._a));
            Debug.WriteLine("PrevRead : " + "[{0}]", string.Join(", ", linearPredictor._x));
            Debug.WriteLine("");

            linearPredictor.UpdateEstimate(p7);
            Debug.WriteLine("EstimatedPos. " + linearPredictor.EstimatedPosition.ToString());
            Debug.WriteLine("Error " + linearPredictor._error.ToString());
            Debug.WriteLine("newCoef : " + linearPredictor._newCoeff.ToString());
            Debug.WriteLine("ArrayPointer: " + linearPredictor._arrayPointer.ToString());
            Debug.WriteLine("PredCoeff: " + "[{0}]", string.Join(", ", linearPredictor._a));
            Debug.WriteLine("PrevRead : " + "[{0}]", string.Join(", ", linearPredictor._x));
            Debug.WriteLine("");

            linearPredictor.UpdateEstimate(p8);
            Debug.WriteLine("EstimatedPos. " + linearPredictor.EstimatedPosition.ToString());
            Debug.WriteLine("Error " + linearPredictor._error.ToString());
            Debug.WriteLine("newCoef : " + linearPredictor._newCoeff.ToString());
            Debug.WriteLine("ArrayPointer: " + linearPredictor._arrayPointer.ToString());
            Debug.WriteLine("PredCoeff: " + "[{0}]", string.Join(", ", linearPredictor._a));
            Debug.WriteLine("PrevRead : " + "[{0}]", string.Join(", ", linearPredictor._x));
            Debug.WriteLine("");

            linearPredictor.UpdateEstimate(p9);
            Debug.WriteLine("EstimatedPos. " + linearPredictor.EstimatedPosition.ToString());
            Debug.WriteLine("Error " + linearPredictor._error.ToString());
            Debug.WriteLine("newCoef : " + linearPredictor._newCoeff.ToString());
            Debug.WriteLine("ArrayPointer: " + linearPredictor._arrayPointer.ToString());
            Debug.WriteLine("PredCoeff: " + "[{0}]", string.Join(", ", linearPredictor._a));
            Debug.WriteLine("PrevRead : " + "[{0}]", string.Join(", ", linearPredictor._x));
            Debug.WriteLine("");

            linearPredictor.UpdateEstimate(p10);
            Debug.WriteLine("EstimatedPos. " + linearPredictor.EstimatedPosition.ToString());
            Debug.WriteLine("Error " + linearPredictor._error.ToString());
            Debug.WriteLine("newCoef : " + linearPredictor._newCoeff.ToString());
            Debug.WriteLine("ArrayPointer: " + linearPredictor._arrayPointer.ToString());
            Debug.WriteLine("PredCoeff: " + "[{0}]", string.Join(", ", linearPredictor._a));
            Debug.WriteLine("PrevRead : " + "[{0}]", string.Join(", ", linearPredictor._x));
            Debug.WriteLine("");

            linearPredictor.UpdateEstimate(p11);
            Debug.WriteLine("EstimatedPos. " + linearPredictor.EstimatedPosition.ToString());
            Debug.WriteLine("Error " + linearPredictor._error.ToString());
            Debug.WriteLine("newCoef : " + linearPredictor._newCoeff.ToString());
            Debug.WriteLine("ArrayPointer: " + linearPredictor._arrayPointer.ToString());
            Debug.WriteLine("PredCoeff: " + "[{0}]", string.Join(", ", linearPredictor._a));
            Debug.WriteLine("PrevRead : " + "[{0}]", string.Join(", ", linearPredictor._x));
            Debug.WriteLine("");

            linearPredictor.UpdateEstimate(p12);
            Debug.WriteLine("EstimatedPos. " + linearPredictor.EstimatedPosition.ToString());
            Debug.WriteLine("Error " + linearPredictor._error.ToString());
            Debug.WriteLine("newCoef : " + linearPredictor._newCoeff.ToString());
            Debug.WriteLine("ArrayPointer: " + linearPredictor._arrayPointer.ToString());
            Debug.WriteLine("PredCoeff: " + "[{0}]", string.Join(", ", linearPredictor._a));
            Debug.WriteLine("PrevRead : " + "[{0}]", string.Join(", ", linearPredictor._x));
            Debug.WriteLine("");
        }
 public TrainState(IChannel ch, int numFeatures, LinearPredictor predictor, AveragedPerceptronTrainer parent)
     : base(ch, numFeatures, predictor, parent)
 {
 }
 public TrainState(IChannel ch, int numFeatures, LinearPredictor predictor, OnlineGradientDescentTrainer parent)
     : base(ch, numFeatures, predictor, parent)
 {
 }
Esempio n. 15
0
        public void LinearPredictor2Indicators()
        {
            DateTime   firstDate = new DateTime(2000, 1, 1);
            DateTime   lastDate  = new DateTime(2000, 12, 31);
            TimeSeries ind1      = TimeSeries.CreateDailyLinearTimeSeries(0, 100, firstDate, lastDate);

            ind1.Name = "ind1";
            TimeSeries ind2 = TimeSeries.CreateDailySinusoidalTimeSeries(1, 2 * Math.PI / 365, 0, firstDate, lastDate);

            ind2.Name = "ind2";
            List <TimeSeries> indicators = new List <TimeSeries>()
            {
                ind1, ind2
            };
            TimeSpan   predictionSpan  = new TimeSpan(5, 0, 0, 0);
            TimeSpan   correlationSpan = new TimeSpan(10, 0, 0, 0);
            TimeSeries target          = ind1
                                         .Sum(ind2.MultiplyBy(2))
                                         .OffsetBy(correlationSpan);

            target.Name = "target";
            TemporalGapGroup g = new TemporalGapGroup()
            {
                Pairs = new List <TemporalGapTuple>()
                {
                    new TemporalGapTuple()
                    {
                        Indicator   = ind1,
                        Target      = target,
                        TemporalGap = correlationSpan,
                    },
                    new TemporalGapTuple()
                    {
                        Indicator   = ind2,
                        Target      = target,
                        TemporalGap = correlationSpan,
                    },
                }
            };
            DataTable       table           = g.GetPredictionTable(target.Name, predictionSpan);
            LinearPredictor linearPredictor = new LinearPredictor()
            {
                UseIntercept = false
            };

            linearPredictor.Learn(
                table: table,
                trainingLabels: new List <string>()
            {
                ind1.Name, ind2.Name
            },
                targetLabel: target.Name);

            Assert.IsTrue(target.Dates
                          .Where(day => target.ContainsValueAt(day.Add(predictionSpan)) &&
                                 indicators.All(ind => ind.ContainsValueAt(day.Add(-correlationSpan)) &&
                                                ind.ContainsValueAt(day.Add(-correlationSpan).Add(predictionSpan))))
                          .All(day =>
            {
                DataRow row = table.NewRow();
                indicators
                .ForEach(ind => row[ind.Name]   = ind[day.Add(-correlationSpan).Add(predictionSpan)] - ind[day.Add(-correlationSpan)]);
                double predictedTargetIncrement = (double)linearPredictor.Predict(row);
                double realTargetIncrement      = target[day.Add(predictionSpan)] - target[day];
                return(Math.Abs(predictedTargetIncrement - realTargetIncrement) < tolerance);
            }));
        }
Esempio n. 16
0
        private static void ModuleThree(BmpImage image)
        {
            /*
             * 图像压缩模块
             *  1.无损预测编码+符号编码
             *  2.均匀量化
             *  3.DCT变换及DCT反变换
             */

            int photoFormat = CheckPhoto(image);

            if (photoFormat == 1)
            {
                System.Console.WriteLine("\n检测图片为8bit灰度图像");
                System.Console.WriteLine("可选择的压缩模式:\n1.无损预测编码\n2.均匀量化\n3.DCT变换及DCT反变换\n0.返回");
                bool inputNum = false;
                do
                {
                    try
                    {
                        int num = Convert.ToInt32(System.Console.ReadLine());
                        if (num < 4 && num > -1)
                        {
                            inputNum = true;
                            //1.无损预测编码
                            if (num == 1)
                            {
                                System.Console.WriteLine("\n输如文件保存的路径:");
                                string          CompressPath  = System.Console.ReadLine();
                                string          savedFilePath = Path.GetDirectoryName(CompressPath);
                                string          fileName      = Path.GetFileNameWithoutExtension(CompressPath);
                                LinearPredictor lp            = new LinearPredictor(image, savedFilePath, fileName);
                                lp.predicate();
                                //system.console.writeline("{0},{1}", savedfilepath, filename);
                                System.Console.WriteLine("***************保存中***************");
                                System.Console.WriteLine("压缩文件已保存至{0}\n", CompressPath);
                                System.Console.WriteLine("完成无损预测编码!");
                            }
                            //2.均匀量化
                            else if (num == 2)
                            {
                                System.Console.WriteLine("\n压缩比(输如0为IGS量化):");
                                double CompressRatio = Convert.ToDouble(System.Console.ReadLine());
                                if (CompressRatio == 0)
                                {
                                    UniformQuantizing igs = new UniformQuantizing(image);
                                    image = igs.InverseUniformQuantizaing();
                                }
                                else
                                {
                                    UniformQuantizing uq = new UniformQuantizing(image, CompressRatio);
                                    image = uq.InverseUniformQuantizaing();
                                }
                                System.Console.WriteLine("完成均匀量化!");
                            }
                            //3.DCT变换及DCT反变换
                            else if (num == 3)
                            {
                                System.Console.WriteLine("\n设置分块大小:");
                                int blockSize = Convert.ToInt32(System.Console.ReadLine());
                                var dct       = new DCTCompression(image, blockSize);
                                image = dct.GetDCTImage();
                                //DCT反变换
                                BmpImage reverseImage = dct.GetDCTReverseImage(1.0);

                                //即将50%的高频系数用0代替 DCT反变换
                                BmpImage reverseHalfImage = dct.GetDCTReverseImage(2.0);

                                System.Console.WriteLine("\n把DCT反变换的结果存储至:");
                                string reverseImagePath = System.Console.ReadLine();
                                ImageFile.SaveBmpImage(reverseImage, @reverseImagePath);
                                System.Console.WriteLine("显示图片?(1 or 2)");
                                int display1 = Convert.ToInt32(System.Console.ReadLine());
                                if (display1 == 1)
                                {
                                    showPhoto(reverseImagePath);
                                }

                                System.Console.WriteLine("把DCT反变换(50%)的结果存储至:");
                                string reverseHalfImagePath = System.Console.ReadLine();
                                ImageFile.SaveBmpImage(reverseHalfImage, @reverseHalfImagePath);
                                System.Console.WriteLine("显示图片?(1 or 2)");
                                int display2 = Convert.ToInt32(System.Console.ReadLine());
                                if (display2 == 1)
                                {
                                    showPhoto(reverseHalfImagePath);
                                }
                                System.Console.WriteLine("完成DCT变换及反变换!");
                            }
                            else
                            {
                                ;
                            }
                        }
                        else
                        {
                            System.Console.Write("输入错误请重新输入:");
                        }
                    }
                    catch (OverflowException)
                    {
                        System.Console.WriteLine("err:转化的不是一个int型数据");
                    }
                    catch (FormatException)
                    {
                        System.Console.WriteLine("err:格式错误");
                    }
                    catch (ArgumentNullException)
                    {
                        System.Console.WriteLine("err:null");
                    }
                } while (inputNum == false);
            }
            else
            {
                System.Console.Write("\n图片形式不支持图像压缩哟~\n");
            }
        }