public IEnumerable <KeyValuePair <DateTime, Tuple <double, double>[]> > BatchRun(IEnumerable <KeyValuePair <DateTime, double> > measurements) { //kfs = KalmanFilterWrapperFactory.BuildMany((1, 300), (1, 2)).ToArray(); //dfs = kfs.Select(_ => DiscreteFactory.Build(_.Size)).ToArray(); //List<Tuple<DateTime, Tuple<double[], double[]>>> u = new List<Tuple<DateTime, Tuple<double[],double[]>>>(); DateTime dt = measurements.First().Key; List <Matrix <double> > nweights = Enumerable.Range(0, Filters.Count()).Select(_ => Matrix <double> .Build.DenseOfRowArrays(new double[] { 0.00000000001d })).ToList(); foreach (var meas in measurements.ToMatrices()) { TimeSpan ts = meas.Key - dt; dt = meas.Key; var prd = PredictWeighted(ts, dt, nweights); var eval = Evaluate(prd).ToArray(); Update(ts, meas.Value); var weights = GetDifferences(meas.Value); nweights = Normaliser.Normalise(weights.ToList()).ToList(); yield return(new KeyValuePair <DateTime, Tuple <double, double>[]>(dt, eval)); } }
public SvmClassifier(SupportVectorMachine <Linear> svm, int sampleWidth, int sampleHeight, int blockSize, Normaliser normaliser) { if (svm == null) { throw new ArgumentNullException(nameof(svm)); } if (sampleWidth <= 0) { throw new ArgumentOutOfRangeException(nameof(sampleWidth)); } if (sampleHeight <= 0) { throw new ArgumentOutOfRangeException(nameof(sampleHeight)); } if (blockSize <= 0) { throw new ArgumentOutOfRangeException(nameof(blockSize)); } if (normaliser == null) { throw new ArgumentNullException(nameof(normaliser)); } _svm = svm; _sampleWidth = sampleWidth; _sampleHeight = sampleHeight; _blockSize = blockSize; _normaliser = normaliser; }
public IObservable <KeyValuePair <DateTime, Tuple <double, double>[]> > Run(IObservable <KeyValuePair <DateTime, double?> > measurements) { //kfs = KalmanFilterWrapperFactory.BuildMany((1, 300), (1, 2)).ToArray(); //dfs = kfs.Select(_ => DiscreteFactory.Build(_.Size)).ToArray(); //List<Tuple<DateTime, Tuple<double[], double[]>>> u = new List<Tuple<DateTime, Tuple<double[],double[]>>>(); //DateTime dt = measurements.First().Item1; List <Matrix <double> > nweights = Enumerable.Range(0, Filters.Count()).Select(_ => Matrix <double> .Build.DenseOfRowArrays(new double[] { 0.00000000001d })).ToList(); return(measurements.IncrementalTimeOffsets().Select(meas => { var prd = PredictWeighted(meas.Key.Item2, meas.Key.Item1, nweights); var eval = Evaluate(prd).ToArray(); if (meas.Value != null) { var mtrx = meas.Value == null ? null : Matrix <double> .Build.DenseOfColumnArrays(new double[] { (double)meas.Value }); Update(meas.Key.Item2, mtrx); var weights = GetDifferences(mtrx); nweights = Normaliser.Normalise(weights.ToList()).ToList(); } return new KeyValuePair <DateTime, Tuple <double, double>[]>(meas.Key.Item1, eval); })); }
private static Boolean IsNoOp(Normaliser normaliser) { Double actual = normaliser.Normalise <Linear>(1d); Double expected = Math.Pow(10, normaliser.Exponent); Assert.Equal(expected, actual); return(true); }
private static Boolean IsMultiply(Normaliser normaliser) { Double value = 2d; Double actual = normaliser.Normalise <Linear>(value); Double lower = Math.Pow(10, normaliser.Exponent + 1); Double upper = Math.Pow(10, normaliser.Exponent + 4); Assert.InRange(actual, lower, upper); return(true); }
//public void Update(double[] likelihood, double[] prior) //{ // var posterior = prior * likelihood; // return Normaliser.Normalise(posterior); //} public void Update(ref List <Particle> particles, Point robot) { foreach (var p in particles) { var dX = p.X - robot.X; var dY = p.Y - robot.Y; p.Weight = 1 / (Math.Sqrt(dX * dX + dY * dY)); } particles = particles.Zip(Normaliser.Normalise(particles.Select(_ => _.Weight).ToList()), (a, b) => { a.Weight = b; return(a); }).ToList(); //particles = particles.Zip(Normaliser.Normalise(weights), (a, b) => { a.Weight = b; return a; }).ToList(); }
public bool IsVersion(string version) { bool isVersionAcceptable = false; for (int versionCounter = 0; (!isVersionAcceptable) && (versionCounter < Versions.Length); versionCounter++) { if (Normaliser.StringCompare(Versions [versionCounter], version)) { isVersionAcceptable = true; } } return(isVersionAcceptable); }
public bool IsProduct(string product) { bool isProductAcceptable = false; for (int productCounter = 0; (!isProductAcceptable) && (productCounter < Products.Length); productCounter++) { if (Normaliser.StringCompare(Products[productCounter], product)) { isProductAcceptable = true; } } return(isProductAcceptable); }
private static bool IsStringInArray(string stringToTest, string[] arrayToTest) { bool isStringInArray = false; string normalised = stringToTest.ToLower(Globalisation.GetCultureInfo()); for (int indexCounter = 0; indexCounter < arrayToTest.Length; indexCounter++) { if (Normaliser.StringCompare(arrayToTest [indexCounter], AllHosts) || Normaliser.StringCompare(arrayToTest [indexCounter], normalised) || Normaliser.StringCompare("www." + arrayToTest [indexCounter], normalised)) { isStringInArray = true; break; } } return(isStringInArray); }
public static IClassifyPotentialFaces TrainFromCaltechData( DirectoryInfo caltechWebFacesSourceImageFolder, FileInfo groundTruthTextFile, int sampleWidth, int sampleHeight, int blockSize, int minimumNumberOfImagesToTrainWith, Normaliser normaliser, Action <string> logger) { if (caltechWebFacesSourceImageFolder == null) { throw new ArgumentNullException(nameof(caltechWebFacesSourceImageFolder)); } if (groundTruthTextFile == null) { throw new ArgumentNullException(nameof(groundTruthTextFile)); } if (sampleWidth <= 0) { throw new ArgumentOutOfRangeException(nameof(sampleWidth)); } if (sampleHeight <= 0) { throw new ArgumentOutOfRangeException(nameof(sampleHeight)); } if (blockSize <= 0) { throw new ArgumentOutOfRangeException(nameof(blockSize)); } if (minimumNumberOfImagesToTrainWith <= 0) { throw new ArgumentOutOfRangeException(nameof(minimumNumberOfImagesToTrainWith)); } if (normaliser == null) { throw new ArgumentNullException(nameof(normaliser)); } if (logger == null) { throw new ArgumentNullException(nameof(logger)); } var timer = Stopwatch.StartNew(); var trainingDataOfHogsAndIsFace = new List <Tuple <double[], bool> >(); var numberOfImagesThatLastProgressMessageWasShownAt = 0; const int numberOfImagesToProcessBeforeShowingUpdateMessage = 20; foreach (var imagesFromSingleReferenceImage in ExtractPositiveAndNegativeTrainingDataFromCaltechWebFaces(sampleWidth, sampleHeight, groundTruthTextFile, caltechWebFacesSourceImageFolder)) { // We want to train using the same number of positive images as negative images. It's possible that we were unable to extract as many non-face regions from the source // image as we did face regions. In this case, discount the image and move on to the next one. var numberOfPositiveImagesExtracted = imagesFromSingleReferenceImage.Count(imageAndIsFaceDecision => imageAndIsFaceDecision.Item2); var numberOfNegativeImagesExtracted = imagesFromSingleReferenceImage.Count(imageAndIsFaceDecision => !imageAndIsFaceDecision.Item2); if (numberOfPositiveImagesExtracted != numberOfNegativeImagesExtracted) { foreach (var image in imagesFromSingleReferenceImage.Select(imageAndIsFaceDecision => imageAndIsFaceDecision.Item1)) { image.Dispose(); } continue; } foreach (var imageAndIsFaceDecision in imagesFromSingleReferenceImage) { var image = imageAndIsFaceDecision.Item1; var isFace = imageAndIsFaceDecision.Item2; trainingDataOfHogsAndIsFace.Add(Tuple.Create( FeatureExtractor.GetFor(image, blockSize, optionalHogPreviewImagePath: null, normaliser: normaliser).ToArray(), isFace )); image.Dispose(); } var approximateNumberOfImagesProcessed = (int)Math.Floor((double)trainingDataOfHogsAndIsFace.Count / numberOfImagesToProcessBeforeShowingUpdateMessage) * numberOfImagesToProcessBeforeShowingUpdateMessage; if (approximateNumberOfImagesProcessed > numberOfImagesThatLastProgressMessageWasShownAt) { logger("Processed " + approximateNumberOfImagesProcessed + " images"); numberOfImagesThatLastProgressMessageWasShownAt = approximateNumberOfImagesProcessed; } if (trainingDataOfHogsAndIsFace.Count >= minimumNumberOfImagesToTrainWith) { break; } } if (trainingDataOfHogsAndIsFace.Count < minimumNumberOfImagesToTrainWith) { throw new Exception($"After loaded all data, there are only {trainingDataOfHogsAndIsFace.Count} training images but {minimumNumberOfImagesToTrainWith} were requested"); } logger("Time to load image data: " + timer.Elapsed.TotalSeconds.ToString("0.00") + "s"); timer.Restart(); var smo = new SequentialMinimalOptimization <Linear>(); var inputs = trainingDataOfHogsAndIsFace.Select(dataAndResult => dataAndResult.Item1).ToArray(); var outputs = trainingDataOfHogsAndIsFace.Select(dataAndResult => dataAndResult.Item2).ToArray(); var svm = smo.Learn(inputs, outputs); logger("Time to teach SVM: " + timer.Elapsed.TotalSeconds.ToString("0.00") + "s"); timer.Restart(); // The SVM kernel contains lots of information from the training process which can be reduced down (from the Compress method's summary documentation: "If this machine has // a linear kernel, compresses all support vectors into a single parameter vector)". This additional data is of no use to use so we can safely get rid of it - this will // be beneficial if we decide to persist the trained SVM since they will be less data to serialise. svm.Compress(); logger("Time to compress SVM: " + timer.Elapsed.TotalSeconds.ToString("0.00") + "s"); timer.Restart(); var predicted = svm.Decide(inputs); var error = new ZeroOneLoss(outputs).Loss(predicted); if (error > 0) { logger("*** Generated SVM has non-zero error against training data: " + error); } logger("Time to test SVM against training data: " + timer.Elapsed.TotalSeconds.ToString("0.00") + "s"); timer.Restart(); return(new SvmClassifier(svm, sampleWidth, sampleHeight, blockSize, normaliser)); }
public static IEnumerable <double> GetFor(Bitmap image, int blockSize, string optionalHogPreviewImagePath, Normaliser normaliser) { if (image == null) { throw new ArgumentNullException(nameof(image)); } if (blockSize <= 0) { throw new ArgumentOutOfRangeException(nameof(blockSize)); } if (normaliser == null) { throw new ArgumentNullException(nameof(normaliser)); } var hogs = normaliser(HistogramOfGradientGenerator.Get(image.GetRGB().Transform(c => c.ToGreyScale()), blockSize)); if (hogs == null) { throw new ArgumentException("Normaliser returned null - invalid"); } if (!string.IsNullOrWhiteSpace(optionalHogPreviewImagePath)) { using (var hogBitmap = hogs.GeneratePreviewImage()) { hogBitmap.Save(optionalHogPreviewImagePath); } } return(hogs.Enumerate() .Select(pointAndValue => pointAndValue.Item2) .Select(hog => new[] { hog.Degrees10, hog.Degrees30, hog.Degrees50, hog.Degrees70, hog.Degrees90, hog.Degrees110, hog.Degrees130, hog.Degrees150, hog.Degrees170 }) .SelectMany(valueSets => valueSets)); }