Exemple #1
0
            internal static IDictionary <int, Page> Images(string pageid)
            {
                string url = $"{Api}?action=query&prop=images&pageids={pageid}&format=json";

                var result     = new Dictionary <int, Page>();
                var serializer = new JsonSerializer();

                var response = url.Get <Json.Imagesearch.Response>(serializer).Result;

                if (response.Error != null)
                {
                    throw new ApiException(response.Error.Code, response.Error.Info);
                }

                result.Zip(response.Query.Pages);

                while (response.Continue != null)
                {
                    string imurl = $"{url}&imcontinue={response.Continue.Imcontinue}";
                    response = imurl.Get <Json.Imagesearch.Response>(serializer).Result;
                    if (response.Error != null)
                    {
                        throw new ApiException(response.Error.Code, response.Error.Info);
                    }

                    result.Zip(response.Query.Pages);
                }
                return(result);
            }
        /// <summary>
        ///
        /// </summary>
        /// <param name="other"></param>
        /// <returns></returns>
        public bool Equals(ProbabilityPrediction other)
        {
            if (!Equal(Prediction, other.Prediction))
            {
                return(false);
            }
            if (Probabilities.Count != other.Probabilities.Count)
            {
                return(false);
            }

            var zip = Probabilities.Zip(other.Probabilities, (t, o) => new { This = t, Other = o });

            foreach (var item in zip)
            {
                if (item.This.Key != item.Other.Key)
                {
                    return(false);
                }
                if (!Equal(item.This.Value, item.Other.Value))
                {
                    return(false);
                }
            }

            return(true);
        }
        public void ClassificationEnsembleModel_GetVariableImportance()
        {
            var(observations, targets) = DataSetUtilities.LoadAptitudeDataSet();

            var featureNameToIndex = new Dictionary <string, int> {
                { "AptitudeTestScore", 0 },
                { "PreviousExperience_month", 1 }
            };

            var learners = new IIndexedLearner <ProbabilityPrediction>[]
            {
                new ClassificationDecisionTreeLearner(2),
                new ClassificationDecisionTreeLearner(5),
                new ClassificationDecisionTreeLearner(7),
                new ClassificationDecisionTreeLearner(9)
            };

            var learner = new ClassificationEnsembleLearner(learners, new MeanProbabilityClassificationEnsembleStrategy());
            var sut     = learner.Learn(observations, targets);

            var actual   = sut.GetVariableImportance(featureNameToIndex);
            var expected = new Dictionary <string, double> {
                { "PreviousExperience_month", 100.0 },
                { "AptitudeTestScore", 15.6771501925546 }
            };

            Assert.AreEqual(expected.Count, actual.Count);
            var zip = expected.Zip(actual, (e, a) => new { Expected = e, Actual = a });

            foreach (var item in zip)
            {
                Assert.AreEqual(item.Expected.Key, item.Actual.Key);
                Assert.AreEqual(item.Expected.Value, item.Actual.Value, 0.000001);
            }
        }
Exemple #4
0
        public void ClassificationAdaBoostModel_GetVariableImportance()
        {
            var parser             = new CsvParser(() => new StringReader(Resources.AptitudeData));
            var observations       = parser.EnumerateRows(v => v != "Pass").ToF64Matrix();
            var targets            = parser.EnumerateRows("Pass").ToF64Vector();
            var featureNameToIndex = new Dictionary <string, int> {
                { "AptitudeTestScore", 0 },
                { "PreviousExperience_month", 1 }
            };

            var learner = new ClassificationAdaBoostLearner(10, 1, 3);
            var sut     = learner.Learn(observations, targets);

            var actual   = sut.GetVariableImportance(featureNameToIndex);
            var expected = new Dictionary <string, double> {
                { "PreviousExperience_month", 100.0 },
                { "AptitudeTestScore", 24.0268096428771 }
            };

            Assert.AreEqual(expected.Count, actual.Count);
            var zip = expected.Zip(actual, (e, a) => new { Expected = e, Actual = a });

            foreach (var item in zip)
            {
                Assert.AreEqual(item.Expected.Key, item.Actual.Key);
                Assert.AreEqual(item.Expected.Value, item.Actual.Value, 0.000001);
            }
        }
        public void ClassificationAdaBoostModel_GetVariableImportance()
        {
            var(observations, targets) = DataSetUtilities.LoadAptitudeDataSet();

            var featureNameToIndex = new Dictionary <string, int> {
                { "AptitudeTestScore", 0 },
                { "PreviousExperience_month", 1 }
            };

            var learner = new ClassificationAdaBoostLearner(10, 1, 3);
            var sut     = learner.Learn(observations, targets);

            var actual   = sut.GetVariableImportance(featureNameToIndex);
            var expected = new Dictionary <string, double> {
                { "PreviousExperience_month", 100.0 },
                { "AptitudeTestScore", 24.0268096428771 }
            };

            Assert.AreEqual(expected.Count, actual.Count);
            var zip = expected.Zip(actual, (e, a) => new { Expected = e, Actual = a });

            foreach (var item in zip)
            {
                Assert.AreEqual(item.Expected.Key, item.Actual.Key);
                Assert.AreEqual(item.Expected.Value, item.Actual.Value, 0.000001);
            }
        }
        public void RegressionForestModel_GetVariableImportance()
        {
            var parser             = new CsvParser(() => new StringReader(Resources.AptitudeData));
            var observations       = parser.EnumerateRows(v => v != "Pass").ToF64Matrix();
            var targets            = parser.EnumerateRows("Pass").ToF64Vector();
            var featureNameToIndex = new Dictionary <string, int> {
                { "AptitudeTestScore", 0 },
                { "PreviousExperience_month", 1 }
            };

            var learner = new RegressionRandomForestLearner(100, 5, 100, 1, 0.0001, 1.0, 42, false);
            var sut     = learner.Learn(observations, targets);

            var actual   = sut.GetVariableImportance(featureNameToIndex);
            var expected = new Dictionary <string, double> {
                { "PreviousExperience_month", 100 },
                { "AptitudeTestScore", 42.3879919692465 }
            };

            Assert.AreEqual(expected.Count, actual.Count);
            var zip = expected.Zip(actual, (e, a) => new { Expected = e, Actual = a });

            foreach (var item in zip)
            {
                Assert.AreEqual(item.Expected.Key, item.Actual.Key);
                Assert.AreEqual(item.Expected.Value, item.Actual.Value, m_delta);
            }
        }
        public void RadiusDistanceOutput_AzimuthSum_EffectiveCrossExt()
        {
            // Arrange
            const int    DistanceMax  = 3;
            const double DistanceStep = 0.1;
            var          radiuses     = new List <double> {
                4, 10, 20, 40, 70, 100, 200
            };
            List <double> distances    = getDistances(DistanceStep, DistanceMax);
            string        dirAzimuth0  = "RadiusDistanceOutput_Azimuth0_EffectiveCrossExt";
            string        dirAzimuth90 = "RadiusDistanceOutput_Azimuth90_EffectiveCrossExt";

            // Calculate
            foreach (double radius in radiuses)
            {
                foreach (double distance in distances)
                {
                    Dictionary <double, double> azim0 = SimpleFormatter.Read(
                        this.getFileFormat(dirAzimuth0, distance, radius));

                    Dictionary <double, double> azim90 = SimpleFormatter.Read(
                        this.getFileFormat(dirAzimuth90, distance, radius));

                    Dictionary <double, double> azimSum = azim0.Zip(
                        azim90,
                        (x, y) =>
                        new KeyValuePair <double, double>(x.Key, (x.Value + y.Value) / 2))
                                                          .ToDictionary(key => key.Key, value => value.Value);

                    string filename = this.getFileFormat(this.TestContext.TestName, distance, radius);
                    SimpleFormatter.Write(filename, azimSum);
                }
            }
            this.writeParameters(radiuses, distances);
        }
        public void RegressionForestModel_GetVariableImportance()
        {
            var(observations, targets) = DataSetUtilities.LoadAptitudeDataSet();

            var featureNameToIndex = new Dictionary <string, int> {
                { "AptitudeTestScore", 0 },
                { "PreviousExperience_month", 1 }
            };

            var learner = new RegressionRandomForestLearner(100, 5, 100, 1, 0.0001, 1.0, 42, false);
            var sut     = learner.Learn(observations, targets);

            var actual   = sut.GetVariableImportance(featureNameToIndex);
            var expected = new Dictionary <string, double> {
                { "PreviousExperience_month", 100 },
                { "AptitudeTestScore", 42.3879919692465 }
            };

            Assert.AreEqual(expected.Count, actual.Count);
            var zip = expected.Zip(actual, (e, a) => new { Expected = e, Actual = a });

            foreach (var item in zip)
            {
                Assert.AreEqual(item.Expected.Key, item.Actual.Key);
                Assert.AreEqual(item.Expected.Value, item.Actual.Value, m_delta);
            }
        }
Exemple #9
0
 private static double CalculateHeuristicValue(Dictionary <CharacterFrequencyAnalyzer.Category, double> a, List <double> n)
 {
     return(a.Zip(CharacterFrequencyAnalyzer.CalculateRelativeAggregateFrequency(n), (b, c) =>
     {
         return (Math.Pow((b.Value - c.Value), 2));
     }).Sum());
 }
        public void RegressionGradientBoostModel_GetVariableImportance()
        {
            var(observations, targets) = DataSetUtilities.LoadAptitudeDataSet();

            var featureNameToIndex = new Dictionary <string, int> {
                { "AptitudeTestScore", 0 },
                { "PreviousExperience_month", 1 }
            };

            var learner = new RegressionGradientBoostLearner(100, 0.1, 3, 1, 1e-6, 1.0, 0, new GradientBoostSquaredLoss(), false);
            var sut     = learner.Learn(observations, targets);

            var actual   = sut.GetVariableImportance(featureNameToIndex);
            var expected = new Dictionary <string, double> {
                { "PreviousExperience_month", 100.0 },
                { "AptitudeTestScore", 72.1682473281495 }
            };

            Assert.AreEqual(expected.Count, actual.Count);
            var zip = expected.Zip(actual, (e, a) => new { Expected = e, Actual = a });

            foreach (var item in zip)
            {
                Assert.AreEqual(item.Expected.Key, item.Actual.Key);
                Assert.AreEqual(item.Expected.Value, item.Actual.Value, 0.000001);
            }
        }
Exemple #11
0
 public IEnumerator <KeyPair <TKey1, TKey2> > GetEnumerator()
 {
     return(_K1K2.Zip(_K2K1, (d1, d2) => new KeyPair <TKey1, TKey2>
     {
         Key1 = d1.Key,
         Key2 = d2.Key
     }).GetEnumerator());
 }
Exemple #12
0
        private string MakeReadyForQuery(Dictionary <string, object> stupci, List <string> _operatorUsporedbe)
        {
            string queryOne = null;

            foreach (var pair in stupci.Zip(_operatorUsporedbe, (stupac, operatorUsporedbe) => (Stupac: stupac, OperatorUsporedbe: operatorUsporedbe)))
            {
                queryOne += pair.Stupac.Key + " " + pair.OperatorUsporedbe + " '" + pair.Stupac.Value + "'" + (pair.Stupac.Key.Equals(stupci.Last().Key) ? " " : " AND ");
            }
            return(queryOne);
        }
        static void Main(string[] args)
        {
            IDictionary <string, decimal> priceDict    = new Dictionary <string, decimal>();
            IDictionary <string, int>     quantityDict = new Dictionary <string, int>();

            while (true)
            {
                string[] input = Console.ReadLine().Split(' ').ToArray();

                if (input[0].Equals("stocked"))
                {
                    break;
                }

                string  product  = input[0];
                decimal price    = Decimal.Parse(input[1]);
                int     quantity = int.Parse(input[2]);

                if (!priceDict.ContainsKey(product))
                {
                    priceDict.Add(product, 0);
                    quantityDict.Add(product, 0);
                }

                priceDict[product]     = price;
                quantityDict[product] += quantity;
            }

            //var tuple1 = Tuple.Create(priceDict, quantityDict);

            // print products, their quantities and prices (etc.)
            // here we use Zip method and Tuple class (construct) in order to wrap two dictionaries together
            // and use them in one foreach loop;
            // we tell one dictionary (priceDict) from the other (quantityDict) using Item1 and Item2 as per
            // Tuple construction;
            decimal grandTotal = 0;

            foreach (var product in priceDict.Zip(quantityDict, Tuple.Create))
            {
                decimal productPriceTotal = product.Item1.Value * product.Item2.Value;
                grandTotal += productPriceTotal;
                string  productName     = product.Item1.Key;
                decimal productPrice    = product.Item1.Value;
                int     productQuantity = product.Item2.Value;

                Console.WriteLine("{0}: ${1:F2} * {2} = ${3:F2}", productName, productPrice,
                                  productQuantity, productPriceTotal);
            }
            Console.WriteLine("------------------------------");
            Console.WriteLine("Grand Total: ${0:F2}", grandTotal);
        }
Exemple #14
0
        private static int SolutionScore(int[] bestSolution, Dictionary <Minion, List <PlayerTask> > minionToTasks)
        {
            int removedAttack = 0;

            foreach (var((minion, tasks), index) in minionToTasks.Zip(bestSolution, (a, b) => Tuple.Create(a, b)))
            {
                Minion target = (Minion)tasks[index].Target;
                if (target.Health <= minion.AttackDamage)
                {
                    removedAttack += target.AttackDamage;
                }
            }
            return(removedAttack);
        }
Exemple #15
0
        //Calculate simple moving average
        public decimal GetAverage(Dictionary <int, decimal> lstPrcItems)
        {
            List <decimal> averages = new List <decimal>();

            //Measuring time to perform average calculation -- currently is commented out as it was only for informational purpose during development
            //FinanacialFunctionsExtensions<decimal>.MeasureTime(" Time to calculate Average"
            //,() =>
            //	{
            averages = FinanacialFunctionsExtensions <decimal> .SimpleMovingAverage(lstPrcItems.Select(t => t.Value), SampleLength(lstPrcItems)).ToList();

            //	}
            //);
            var results = lstPrcItems.Zip(averages, (v, a) => new { index = v.Key, Value = v, Average = a });

            return(results.Where(rs => rs.index == lstPrcItems.Count).Select(r => r.Average).FirstOrDefault());
        }
Exemple #16
0
        public static void AreEquivalent(Dictionary <string, FeatureVariable> expected, Dictionary <string, FeatureVariable> actual)
        {
            Assert.AreEqual(expected.Count, actual.Count);
            var zipped = expected.Zip(actual, (e, a) =>
            {
                return(new
                {
                    Expected = e,
                    Actual = a
                });
            }).ToList();

            foreach (var z in zipped)
            {
                AreEquivalent(z.Expected, z.Actual);
            }
        }
    static void Main()
    {
        var prodPrice    = new Dictionary <string, double>();
        var prodQuantity = new Dictionary <string, long>();

        while (true)
        {
            var input = Console.ReadLine().Split();

            if (input[0] == "stocked")
            {
                break;
            }

            var currProd     = input[0];
            var currPrice    = double.Parse(input[1]);
            var currQuantity = long.Parse(input[2]);

            if (!prodQuantity.ContainsKey(currProd))
            {
                prodQuantity[currProd] = 0L;
            }

            if (!prodPrice.ContainsKey(currProd))
            {
                prodPrice[currProd] = currPrice;
            }

            prodQuantity[currProd] += currQuantity;
            prodPrice[currProd]     = currPrice;
        }

        foreach (var kvp in prodQuantity)
        {
            var currProdName = kvp.Key;
            var currQuantity = kvp.Value;
            var currPrice    = prodPrice[currProdName];

            Console.WriteLine($"{currProdName}: ${currPrice:F2} * {currQuantity} = ${(currPrice * currQuantity):F2}");
        }

        var totalSum = prodPrice.Zip(prodQuantity, (x, y) => x.Value * y.Value).ToList().Sum();

        Console.WriteLine("------------------------------");
        Console.WriteLine($"Grand Total: ${totalSum:F2}");
    }
        /// <summary>
        /// Validates any imported chapter information against the currently detected chapter information in the
        /// source media. If validation fails then an error message is returned via the out parameter <see cref="validationErrorMessage"/>
        /// </summary>
        /// <param name="importedChapters">The list of imported chapter information</param>
        /// <param name="validationErrorMessage">In case of a validation error this variable will hold
        ///                                      a detailed message that can be presented to the user</param>
        /// <returns>True if there are no errors with imported chapters, false otherwise</returns>
        private bool ValidateImportedChapters(Dictionary <int, Tuple <string, TimeSpan> > importedChapters, out string validationErrorMessage, bool hasTimestamps)
        {
            validationErrorMessage = null;

            // If the number of chapters don't match, prompt for confirmation
            if (importedChapters.Count != this.Chapters.Count)
            {
                if (this.errorService.ShowMessageBox(
                        string.Format(Resources.ChaptersViewModel_ValidateImportedChapters_ChapterCountMismatchMsg, this.Chapters.Count, importedChapters.Count),
                        Resources.ChaptersViewModel_ValidateImportedChapters_ChapterCountMismatchWarning,
                        MessageBoxButton.YesNo,
                        MessageBoxImage.Question) !=
                    MessageBoxResult.Yes)
                {
                    return(false);
                }
            }

            // If the average discrepancy in timings between chapters is either:
            //   a) more than 15 sec for more than 2 chapters
            //      (I chose 15sec based on empirical evidence from testing a few DVDs and comparing to chapter-marker files I downloaded)
            //      => This check will not be performed for the first and last chapter as they're very likely to differ significantly due to language and region
            //         differences (e.g. longer title sequences and different distributor credits)
            if (hasTimestamps)
            {
                var diffs = importedChapters.Zip(this.Chapters, (import, source) => source.Duration - import.Value.Item2);
                if (diffs.Count(diff => Math.Abs(diff.TotalSeconds) > 15) > 2)
                {
                    if (this.errorService.ShowMessageBox(
                            Resources.ChaptersViewModel_ValidateImportedChapters_ChapterDurationMismatchMsg,
                            Resources.ChaptersViewModel_ValidateImportedChapters_ChapterDurationMismatchWarning,
                            MessageBoxButton.YesNo,
                            MessageBoxImage.Question) != MessageBoxResult.Yes)
                    {
                        return(false);
                    }
                }
            }

            // All is well, we should import chapters
            return(true);
        }
Exemple #19
0
        public static void AreEquivalent(Dictionary <string, FeatureVariableUsage> expected, Dictionary <string, FeatureVariableUsage> actual)
        {
            if (expected == null && actual == null)
            {
                return;
            }

            Assert.AreEqual(expected.Count(), actual.Count());
            expected.Zip(actual, (e, a) =>
            {
                return(new
                {
                    Expected = e,
                    Actual = a
                });
            }).ToList().ForEach((item) =>
            {
                AreEquivalent(item.Expected, item.Actual);
            });
        }
Exemple #20
0
        public void RegressionXGBoostModel_GetVariableImportance()
        {
            var parser       = new CsvParser(() => new StringReader(Resources.Glass));
            var observations = parser.EnumerateRows(v => v != "Target").ToF64Matrix();
            var targets      = parser.EnumerateRows("Target").ToF64Vector();

            var index = 0;
            var name  = "f";
            var featureNameToIndex = Enumerable.Range(0, 9)
                                     .ToDictionary(v => name + index.ToString(), v => index++);

            var learner = CreateLearner();

            using (var sut = learner.Learn(observations, targets))
            {
                var actual   = sut.GetVariableImportance(featureNameToIndex);
                var expected = new Dictionary <string, double>
                {
                    { "f2", 100 },
                    { "f7", 21.1439170859871 },
                    { "f6", 17.5087210061721 },
                    { "f3", 12.6909395202158 },
                    { "f0", 12.3235851417467 },
                    { "f1", 9.00304680229703 },
                    { "f5", 7.10296482157573 },
                    { "f4", 6.43327754840246 },
                    { "f8", 4.61553313147666 },
                };

                Assert.AreEqual(expected.Count, actual.Count);
                var zip = expected.Zip(actual, (e, a) => new { Expected = e, Actual = a });

                foreach (var item in zip)
                {
                    Assert.AreEqual(item.Expected.Key, item.Actual.Key);
                    Assert.AreEqual(item.Expected.Value, item.Actual.Value, m_delta);
                }
            }
        }
        public void ClassificationStackingEnsembleModel_GetVariableImportance()
        {
            var parser             = new CsvParser(() => new StringReader(Resources.AptitudeData));
            var observations       = parser.EnumerateRows(v => v != "Pass").ToF64Matrix();
            var targets            = parser.EnumerateRows("Pass").ToF64Vector();
            var featureNameToIndex = new Dictionary <string, int> {
                { "AptitudeTestScore", 0 },
                { "PreviousExperience_month", 1 }
            };

            var learners = new IIndexedLearner <ProbabilityPrediction>[]
            {
                new ClassificationDecisionTreeLearner(2),
                new ClassificationDecisionTreeLearner(5),
                new ClassificationDecisionTreeLearner(7),
                new ClassificationDecisionTreeLearner(9)
            };

            var learner = new ClassificationStackingEnsembleLearner(learners, new ClassificationDecisionTreeLearner(9),
                                                                    new RandomCrossValidation <ProbabilityPrediction>(5, 23), false);

            var sut = learner.Learn(observations, targets);

            var actual = sut.GetVariableImportance(featureNameToIndex);

            WriteImportances(actual);
            var expected = new Dictionary <string, double> {
                { "ClassificationDecisionTreeModel_1_Class_Probability_0", 100 }, { "ClassificationDecisionTreeModel_2_Class_Probability_0", 92.2443379072288 }, { "ClassificationDecisionTreeModel_0_Class_Probability_0", 76.9658783620323 }, { "ClassificationDecisionTreeModel_1_Class_Probability_1", 21.1944454897829 }, { "ClassificationDecisionTreeModel_0_Class_Probability_1", 0 }, { "ClassificationDecisionTreeModel_2_Class_Probability_1", 0 }, { "ClassificationDecisionTreeModel_3_Class_Probability_0", 0 }, { "ClassificationDecisionTreeModel_3_Class_Probability_1", 0 }
            };

            Assert.AreEqual(expected.Count, actual.Count);
            var zip = expected.Zip(actual, (e, a) => new { Expected = e, Actual = a });

            foreach (var item in zip)
            {
                Assert.AreEqual(item.Expected.Key, item.Actual.Key);
                Assert.AreEqual(item.Expected.Value, item.Actual.Value, 0.000001);
            }
        }
Exemple #22
0
        static void Main(string[] args)
        {
            var productsPrices     = new Dictionary <string, double>();
            var productsQuantities = new Dictionary <string, int>();
            var command            = Console.ReadLine();

            while (command != "stocked")
            {
                var commandList = command.Split(' ').ToList();
                var product     = commandList[0];
                var price       = double.Parse(commandList[1]);
                var quantity    = int.Parse(commandList[2]);

                if (!productsQuantities.ContainsKey(product))
                {
                    productsQuantities[product] = 0;
                    productsPrices[product]     = 0;
                }
                productsQuantities[product] += quantity;
                productsPrices[product]      = price;

                command = Console.ReadLine();
            }

            var grandTotal = 0.0;

            foreach (var product in productsPrices.Zip(productsQuantities, Tuple.Create))
            {
                var currentProductTotalPrice = product.Item1.Value * product.Item2.Value;
                grandTotal += currentProductTotalPrice;
                Console.WriteLine(
                    $"{product.Item1.Key}: ${product.Item1.Value:f2} * {product.Item2.Value} = ${currentProductTotalPrice:f2}"
                    );
            }
            Console.WriteLine(new string('-', 30));
            Console.WriteLine($"Grand Total: ${grandTotal:F2}");
        }
        public void RegressionStackingEnsembleModel_GetVariableImportance()
        {
            var(observations, targets) = DataSetUtilities.LoadAptitudeDataSet();

            var featureNameToIndex = new Dictionary <string, int> {
                { "AptitudeTestScore", 0 },
                { "PreviousExperience_month", 1 }
            };

            var learners = new IIndexedLearner <double>[]
            {
                new RegressionDecisionTreeLearner(2),
                new RegressionDecisionTreeLearner(5),
                new RegressionDecisionTreeLearner(7),
                new RegressionDecisionTreeLearner(9)
            };

            var learner = new RegressionStackingEnsembleLearner(learners,
                                                                new RegressionDecisionTreeLearner(9),
                                                                new RandomCrossValidation <double>(5, 23), false);

            var sut = learner.Learn(observations, targets);

            var actual   = sut.GetVariableImportance(featureNameToIndex);
            var expected = new Dictionary <string, double> {
                { "RegressionDecisionTreeModel_2", 100 }, { "RegressionDecisionTreeModel_1", 69.7214491857349 }, { "RegressionDecisionTreeModel_0", 33.8678328474247 }, { "RegressionDecisionTreeModel_3", 1.70068027210884 }
            };

            Assert.AreEqual(expected.Count, actual.Count);
            var zip = expected.Zip(actual, (e, a) => new { Expected = e, Actual = a });

            foreach (var item in zip)
            {
                Assert.AreEqual(item.Expected.Key, item.Actual.Key);
                Assert.AreEqual(item.Expected.Value, item.Actual.Value, 0.000001);
            }
        }
        public void RegressionEnsembleModel_GetVariableImportance()
        {
            var parser             = new CsvParser(() => new StringReader(Resources.AptitudeData));
            var observations       = parser.EnumerateRows(v => v != "Pass").ToF64Matrix();
            var targets            = parser.EnumerateRows("Pass").ToF64Vector();
            var featureNameToIndex = new Dictionary <string, int> {
                { "AptitudeTestScore", 0 },
                { "PreviousExperience_month", 1 }
            };

            var learners = new IIndexedLearner <double>[]
            {
                new RegressionDecisionTreeLearner(2),
                new RegressionDecisionTreeLearner(5),
                new RegressionDecisionTreeLearner(7),
                new RegressionDecisionTreeLearner(9)
            };

            var learner = new RegressionEnsembleLearner(learners, new MeanRegressionEnsembleStrategy());
            var sut     = learner.Learn(observations, targets);

            var actual   = sut.GetVariableImportance(featureNameToIndex);
            var expected = new Dictionary <string, double> {
                { "PreviousExperience_month", 100.0 },
                { "AptitudeTestScore", 3.46067371526717 }
            };

            Assert.AreEqual(expected.Count, actual.Count);
            var zip = expected.Zip(actual, (e, a) => new { Expected = e, Actual = a });

            foreach (var item in zip)
            {
                Assert.AreEqual(item.Expected.Key, item.Actual.Key);
                Assert.AreEqual(item.Expected.Value, item.Actual.Value, 0.000001);
            }
        }
Exemple #25
0
        internal override Expression GetExpression(List <ParameterExpression> parameters, Dictionary <string, ConstantExpression> locals, List <DataContainer> dataContainers, Type dynamicContext, LabelTarget label)
        {
            if (Lambda)
            {
                List <Tuple <string, Type> > pars = new List <Tuple <string, Type> >();
                foreach (TokenBase token in Arguments.Arguments)
                {
                    if (token is TypeCastToken)
                    {
                        pars.Add(new Tuple <string, Type>(((token as TypeCastToken).Target as ParameterToken).Name, (token as TypeCastToken).TargetType));
                    }
                    else
                    {
                        pars.Add(new Tuple <string, Type>((token as ParameterToken).Name, typeof(object)));
                    }
                }
                Dictionary <string, ConstantExpression> subLocals = new Dictionary <string, ConstantExpression>();
                foreach (var tuple in pars)
                {
                    var container = new DataContainer();
                    subLocals.Add(tuple.Item1, Expression.Constant(container));
                    dataContainers.Add(container);
                }

                List <ParameterExpression> parExps = new List <ParameterExpression>();
                Expression exp = Value.GetExpression(parExps, subLocals, dataContainers, dynamicContext, label);

                if (parExps.Count != 0)
                {
                    foreach (ParameterExpression par in parExps)
                    {
                        if (!(parameters.Any(p => p.Name == par.Name) || locals.Any(l => l.Key == par.Name)))
                        {
                            parameters.Add(par);
                        }
                        var container = new DataContainer();
                        subLocals.Add(par.Name, Expression.Constant(container));
                        dataContainers.Add(container);
                    }
                    parExps.Clear();
                    exp = Value.GetExpression(parExps, subLocals, dataContainers, dynamicContext, label);
                }

                foreach (var tuple in pars)
                {
                    parExps.Add(Expression.Parameter(tuple.Item2, tuple.Item1));
                }

                CallSiteBinder binder = Binder.Convert(CSharpBinderFlags.None, Value.ReturnType, dynamicContext);

                Expression block = Expression.Block(subLocals.Zip(parExps, (l, p) => Expression.Assign(Expression.Property(l.Value, "Value"), Expression.Convert(p, typeof(object)))).Concat(new Expression[] { Expression.Dynamic(binder, Value.ReturnType, exp) }));

                Type       type   = funcTypes[pars.Count].MakeGenericType(pars.Select(t => t.Item2).Concat(new[] { Value.ReturnType }).ToArray());
                MethodInfo method = typeof(Expression).GetMethods().FirstOrDefault(m => m.Name == "Lambda" && m.IsGenericMethod && m.GetParameters().Length == 2).MakeGenericMethod(type);
                object     func   = ((dynamic)method.Invoke(null, new object[] { block, parExps.ToArray() })).Compile();

                Expression ret = Expression.Block(subLocals.Skip(parExps.Count).Select(kvp => Expression.Assign(Expression.Property(kvp.Value, "Value"), parameters.Select(p => new Tuple <string, Expression>(p.Name, p)).Concat(locals.Select(k => new Tuple <string, Expression>(k.Key, Expression.Property(k.Value, "Value")))).First(p => p.Item1 == kvp.Key).Item2)).Concat(new [] { Expression.Constant(func) as Expression }));

                return(ret);
            }
            else
            {
                List <ConstantExpression> newLocals = new List <ConstantExpression>();
                foreach (var arg in Arguments.Arguments.Cast <AssignmentToken>())
                {
                    if (locals.Any(name => name.Key == arg.Name))
                    {
                        throw new Exception("Duplicate local variable name \"" + arg.Name + "\" found.");
                    }
                    var container = new DataContainer();
                    var value     = Expression.Constant(container);
                    dataContainers.Add(container);
                    newLocals.Add(value);
                    locals.Add(arg.Name, value);
                }
                IEnumerable <BinaryExpression> assignments = Arguments.Arguments.Cast <AssignmentToken>().Zip(newLocals, (t, l) => Expression.Assign(Expression.Property(l, "Value"), t.Value.GetExpression(parameters, locals, dataContainers, dynamicContext, label)));
                Expression ret = Expression.Block(assignments.Cast <Expression>().Concat(new Expression[] { Value.GetExpression(parameters, locals, dataContainers, dynamicContext, label) }));
                foreach (var arg in Arguments.Arguments.Cast <AssignmentToken>())
                {
                    locals.Remove(arg.Name);
                }
                return(ret);
            }
        }
	/// <summary>
	/// Creates pak files using streaming install chunk manifests.
	/// </summary>
	/// <param name="Params"></param>
	/// <param name="SC"></param>
	private static void CreatePaksUsingChunkManifests(ProjectParams Params, DeploymentContext SC)
	{
		Log("Creating pak using streaming install manifests.");
		DumpManifest(SC, CombinePaths(CmdEnv.LogFolder, "PrePak" + (SC.DedicatedServer ? "_Server" : "")));

		var TmpPackagingPath = GetTmpPackagingPath(Params, SC);
		var ChunkListFilename = GetChunkPakManifestListFilename(Params, SC);
		var ChunkList = ReadAllLines(ChunkListFilename);
		var ChunkResponseFiles = new HashSet<string>[ChunkList.Length];
		for (int Index = 0; Index < ChunkList.Length; ++Index)
		{
			var ChunkManifestFilename = CombinePaths(TmpPackagingPath, ChunkList[Index]);
			ChunkResponseFiles[Index] = ReadPakChunkManifest(ChunkManifestFilename);
		}
		// We still want to have a list of all files to stage. We will use the chunk manifests
		// to put the files from staging manigest into the right chunk
		var StagingManifestResponseFile = CreatePakResponseFileFromStagingManifest(SC);
		// DefaultChunkIndex assumes 0 is the 'base' chunk
		const int DefaultChunkIndex = 0;
		var PakResponseFiles = new Dictionary<string, string>[ChunkList.Length];
		for (int Index = 0; Index < PakResponseFiles.Length; ++Index)
		{
			PakResponseFiles[Index] = new Dictionary<string, string>(StringComparer.InvariantCultureIgnoreCase);
		}
		foreach (var StagingFile in StagingManifestResponseFile)
		{
			bool bAddedToChunk = false;
			for (int ChunkIndex = 0; !bAddedToChunk && ChunkIndex < ChunkResponseFiles.Length; ++ChunkIndex)
			{
                		string OriginalFilename = StagingFile.Key;
                		string NoExtension = CombinePaths(Path.GetDirectoryName(OriginalFilename), Path.GetFileNameWithoutExtension(OriginalFilename));
                		string OriginalReplaceSlashes = OriginalFilename.Replace('/', '\\');
                		string NoExtensionReplaceSlashes = NoExtension.Replace('/', '\\');

				if (ChunkResponseFiles[ChunkIndex].Contains(OriginalFilename) || 
                		    ChunkResponseFiles[ChunkIndex].Contains(OriginalReplaceSlashes) ||
		                    ChunkResponseFiles[ChunkIndex].Contains(NoExtension) ||
		                    ChunkResponseFiles[ChunkIndex].Contains(NoExtensionReplaceSlashes))
				{
					PakResponseFiles[ChunkIndex].Add(StagingFile.Key, StagingFile.Value);
					bAddedToChunk = true;
				}
			}
			if (!bAddedToChunk)
			{
				//Log("No chunk assigned found for {0}. Using default chunk.", StagingFile.Key);
				PakResponseFiles[DefaultChunkIndex].Add(StagingFile.Key, StagingFile.Value);
			}
		}

		if (Params.CreateChunkInstall)
		{
			string ManifestDir = CombinePaths(Params.ChunkInstallDirectory, SC.FinalCookPlatform, "ManifestDir");
			if (InternalUtils.SafeDirectoryExists(ManifestDir))
			{
				foreach (string ManifestFile in Directory.GetFiles(ManifestDir, "*.manifest"))
				{
					InternalUtils.SafeDeleteFile(ManifestFile, true);
				}
			}
			string DestDir = CombinePaths(Params.ChunkInstallDirectory, SC.FinalCookPlatform, Params.ChunkInstallVersionString);
			if (InternalUtils.SafeDirectoryExists(DestDir))
			{
				InternalUtils.SafeDeleteDirectory(DestDir); 
			}
		}

		IEnumerable<Tuple<Dictionary<string,string>, string>> PakPairs = PakResponseFiles.Zip(ChunkList, (a, b) => Tuple.Create(a, b));

        if (Params.CreateChunkInstall)
        {
            string ChunkInstallBasePath = CombinePaths(Params.ChunkInstallDirectory, SC.FinalCookPlatform);
            string CloudDir = MakePathSafeToUseWithCommandLine(CombinePaths(ChunkInstallBasePath, "CloudDir"));
            InternalUtils.SafeDeleteDirectory(CloudDir, true);
        }

        System.Threading.Tasks.Parallel.ForEach(PakPairs, (PakPair) =>
		{
			var ChunkName = Path.GetFileNameWithoutExtension(PakPair.Item2);
			CreatePak(Params, SC, PakPair.Item1, ChunkName, Params.SignPak);
		});

		String ChunkLayerFilename = CombinePaths(GetTmpPackagingPath(Params, SC), GetChunkPakLayerListName());
		String OutputChunkLayerFilename = Path.Combine(SC.ProjectRoot, "Build", SC.FinalCookPlatform, "ChunkLayerInfo", GetChunkPakLayerListName());
		Directory.CreateDirectory(Path.GetDirectoryName(OutputChunkLayerFilename));
		File.Copy(ChunkLayerFilename, OutputChunkLayerFilename, true);
	}
Exemple #27
0
        /// <summary>
        /// Validates any imported chapter information against the currently detected chapter information in the 
        /// source media. If validation fails then an error message is returned via the out parameter <see cref="validationErrorMessage"/>
        /// </summary>
        /// <param name="importedChapters">The list of imported chapter information</param>
        /// <param name="validationErrorMessage">In case of a validation error this variable will hold 
        ///                                      a detailed message that can be presented to the user</param>
        /// <returns>True if there are no errors with imported chapters, false otherwise</returns>
        private bool ValidateImportedChapters(Dictionary<int, Tuple<string, TimeSpan>> importedChapters, out string validationErrorMessage)
        {
            validationErrorMessage = null;

            // If the number of chapters don't match, prompt for confirmation
            if (importedChapters.Count != this.Task.ChapterNames.Count)
            {
                if (MessageBoxResult.Yes !=
                    this.errorService.ShowMessageBox(
                        string.Format(Resources.ChaptersViewModel_ValidateImportedChapters_ChapterCountMismatchMsg, this.Task.ChapterNames.Count, importedChapters.Count),
                        Resources.ChaptersViewModel_ValidateImportedChapters_ChapterCountMismatchWarning,
                        MessageBoxButton.YesNo,
                        MessageBoxImage.Question))
                {
                    return false;
                }
            }

            // If the average discrepancy in timings between chapters is either:
            //   a) more than 15 sec for more than 2 chapters
            //      (I chose 15sec based on empirical evidence from testing a few DVDs and comparing to chapter-marker files I downloaded)
            //      => This check will not be performed for the first and last chapter as they're very likely to differ significantly due to language and region
            //         differences (e.g. longer title sequences and different distributor credits)
            var diffs = importedChapters.Zip(this.Task.ChapterNames, (import, source) => source.Duration - import.Value.Item2);
            if (diffs.Count(diff => Math.Abs(diff.TotalSeconds) > 15) > 2)
            {
                if (MessageBoxResult.Yes !=
                    this.errorService.ShowMessageBox(
                        Resources.ChaptersViewModel_ValidateImportedChapters_ChapterDurationMismatchMsg,
                        Resources.ChaptersViewModel_ValidateImportedChapters_ChapterDurationMismatchWarning,
                        MessageBoxButton.YesNo,
                        MessageBoxImage.Question))
                {
                    return false;
                }
            }

            // All is well, we should import chapters
            return true;
        }
Exemple #28
0
        private void SaveTo(Stream stream, bool recreate)
        {
            var allFiles     = this.SelectMany(fold => fold).ToList();
            var allFileNames = allFiles.Select(file => file.Name).ToList();

            _folderRecordOffsetsA = new Dictionary <BsaFolder, uint>(Count);
            _folderRecordOffsetsB = new Dictionary <BsaFolder, uint>(Count);

            _fileRecordOffsetsA = new Dictionary <BsaFile, uint>(allFiles.Count);
            _fileRecordOffsetsB = new Dictionary <BsaFile, uint>(allFiles.Count);

            var header = new BsaHeader();

            if (!recreate && _bsaReader != null)
            {
                header = _bsaReader.Header;
            }
            if (header.Equals(default(BsaHeader)))
            {
                //this needs to be set, otherwise we won't write archive information
                recreate = true;
            }

            header.Field   = BsaGreet;
            header.Version = FalloutVersion;
            header.Offset  = BsaHeader.Size;
            if (recreate)
            {
                header.ArchiveFlags = ArchiveFlags.NamedDirectories | ArchiveFlags.NamedFiles;
                if (Settings.DefaultCompressed)
                {
                    header.ArchiveFlags |= ArchiveFlags.Compressed;
                }
                if (Settings.BStringPrefixed)
                {
                    header.ArchiveFlags |= ArchiveFlags.BStringPrefixed;
                }
            }
            header.FolderCount           = (uint)Count;
            header.FileCount             = (uint)allFileNames.Count;
            header.TotalFolderNameLength = (uint)this.Sum(folder => folder.Path.Length + 1);
            header.TotalFileNameLength   = (uint)allFileNames.Sum(file => file.Length + 1);
            if (recreate)
            {
                header.FileFlags = CreateFileFlags(allFileNames);
            }

            using (var writer = new BinaryWriter(stream))
            {
                header.Write(writer);

                foreach (var folder in this)
                {
                    WriteFolderRecord(writer, folder);
                }

#if PARALLEL
                //parallel pump the files, as checking RecordSize may
                //trigger a decompress/recompress, depending on settings
                allFiles.AsParallel().ForAll(file => file.Cache());
#endif
                foreach (var folder in this)
                {
                    WriteFileRecordBlock(writer, folder, header.TotalFileNameLength);
                }

                allFileNames.ForEach(writer.WriteCString);

                allFiles.ForEach(file => WriteFileBlock(writer, file));

                var folderRecordOffsets = _folderRecordOffsetsA.Zip(_folderRecordOffsetsB, (kvpA, kvpB) => new KeyValuePair <uint, uint>(kvpA.Value, kvpB.Value));
                var fileRecordOffsets   = _fileRecordOffsetsA.Zip(_fileRecordOffsetsB, (kvpA, kvpB) => new KeyValuePair <uint, uint>(kvpA.Value, kvpB.Value));
                var completeOffsets     = folderRecordOffsets.Concat(fileRecordOffsets).ToList();

                completeOffsets.ForEach(kvp =>
                {
                    writer.BaseStream.Seek(kvp.Key, SeekOrigin.Begin);
                    writer.Write(kvp.Value);
                });
            }
        }