public static TestSummary Merge(TestSummary[] tests)
        {
            //some initial error checking, train dates must all be the same to make sense of performance results in train and test periods
            if (tests.Select(x => x.EndTrainDate).Distinct().Count() != 1)
            {
                throw new Exception("Can only merge trades that have the same end train date!");
            }
            else if (tests.Length == 0)
            {
                return(null);
            }

            //merges an array of TestSummaries back into one ie. for combining seperate asset sets back into one for analysis
            TestSummary merged = new TestSummary();

            foreach (TestSummary ts in tests)
            {
                merged.testSets.AddRange(ts.TestSets);
            }

            //copy across the dates to the new testsummary
            merged.StartDate    = tests.Min(x => x.StartDate);
            merged.EndDate      = tests.Max(x => x.EndDate);
            merged.EndTrainDate = tests.FirstOrDefault().EndTrainDate;


            return(merged);
        }
 private void Copy(TestSummary original, TestSummary newObject)
 {
     newObject.StartDate              = original.StartDate;
     newObject.EndDate                = original.EndDate;
     newObject.EndTrainDate           = original.EndTrainDate;
     newObject.ReduceCorrelatedParams = original.ReduceCorrelatedParams;
     newObject.ReduceByRankParams     = original.ReduceByRankParams;
     newObject.TradeDataLabels        = original.TradeDataLabels;
 }
        public void OnCompleteTest(TestSet testSet)
        {
            TestSummary.Add(testSet);

            int completed = TestSummary.TestSets.Length;

            MessageDelegate?.Invoke("Completed " + completed + " of " + tasksRequired, MessageType.Update);

            //All optimisations have been done
            if (completed == tasksRequired)
            {
                MessageDelegate?.Invoke("\nComplete backtest took: " + (DateTime.Now - start).TotalMinutes.ToString("0.00") + " minutes");
                OnComplete.Invoke(TestSummary);
            }
        }
        public static TestSummary Load(string filename)
        {
            TestSummary ts = null;

            if (File.Exists(filename))
            {
                Stream openFileStream = File.OpenRead(filename);
                System.Runtime.Serialization.Formatters.Binary.BinaryFormatter deserializer = new System.Runtime.Serialization.Formatters.Binary.BinaryFormatter();
                if (openFileStream.Length > 0)
                {
                    ts = (TestSummary)deserializer.Deserialize(openFileStream);
                }
                openFileStream.Close();
            }

            return(ts);
        }
        public void Run(string strategyName, string strategiesDLL, bool optimise = false)
        {
            start = DateTime.Now;

            TestSummary = new TestSummary();

            Optimise = optimise;

            //Get the strategy
            Strategy strategy = Strategy.Load(strategyName, strategiesDLL);

            //Copy across the strategy back test setup
            if (strategy.StartDate != null)
            {
                TestSummary.StartDate = strategy.StartDate;
            }
            if (strategy.EndDate != null)
            {
                TestSummary.EndDate = strategy.EndDate;
            }
            TestSummary.ReduceCorrelatedParams = strategy.ReduceCorrelatedParams;
            TestSummary.ReduceByRankParams     = strategy.ReduceByRankParams;
            TestSummary.TradeDataLabels        = strategy.TradeDataLabels;

            //Get all the possible variants from this strategies parameters
            StrategyVariant[] variants;
            if (optimise)
            {
                variants = StrategyVariant.BruteForceGeneration(strategy.OptimiseParameters.ToArray());
            }
            else
            {
                variants = new StrategyVariant[] { StrategyVariant.Default(strategy.Parameters.ToArray()) }
            };

            threadTasks = new Queue <BackTestTask> [strategy.Cpus];

            //Load in the asset details and add these to the strategy if selected in TradeAssetList
            Dictionary <string, Asset> assetDetails = Asset.LoadAssetFile(strategy.AssetDetailsFile);

            foreach (string assetName in strategy.TradeAssetList)
            {
                Asset asset = assetDetails[assetName];

                MessageDelegate?.Invoke("Loading in data " + assetName);

                //load in the data before we start
                foreach (ExternalFeatureData externalFeature in strategy.ExternalFeatures)
                {
                    if (!asset.Data.ContainsKey(externalFeature.Timeframe))
                    {
                        DataBuilder.LoadExternalFeatureBinary(asset, externalFeature, MessageDelegate);
                    }
                }

                //Read all the bytes from the datapath from the 1 min timeframe if the data is not already loaded
                //this is very fast about 0.9 seconds for 10 years of minute data
                if (asset.Dataset == null)
                {
                    asset.Dataset = DataBuilder.LoadBinary(asset.DataPath);
                }
            }

            tasksRequired = 0;

            int threadIndex = 0;

            foreach (string assetName in strategy.TradeAssetList)
            {
                foreach (StrategyVariant currentVariant in variants)
                {
                    Strategy localStrategy = Strategy.Load(strategyName, strategiesDLL);
                    Asset    asset         = assetDetails[assetName];
                    localStrategy.Assets.Add(assetName, asset);

                    //set the strategy parameters
                    for (int pIndex = 0; pIndex < localStrategy.Parameters.Count; pIndex++)
                    {
                        localStrategy.SetParameter(localStrategy.Parameters[pIndex].Name, currentVariant.Parameters[pIndex]);
                    }

                    localStrategy.Description = asset.Name + " " + currentVariant.ToString();

                    if (threadIndex >= threadTasks.Length)
                    {
                        threadIndex = 0;
                    }

                    BackTestTask btt = new BackTestTask(asset, localStrategy);

                    if (threadTasks[threadIndex] == null)
                    {
                        threadTasks[threadIndex] = new Queue <BackTestTask>();
                    }
                    threadTasks[threadIndex].Enqueue(btt);

                    tasksRequired++;

                    threadIndex++;
                }
            }

            MessageDelegate?.Invoke("Starting " + tasksRequired + " tests ...");

            foreach (Queue <BackTestTask> threadQueue in threadTasks)
            {
                if (threadQueue != null)
                {
                    if (threadQueue.Count > 0)
                    {
                        Thread thread = new Thread(() => Test(threadQueue, OnCompleteTest));
                        thread.Start();
                    }
                }
            }
        }
 public BackTest()
 {
     TestSummary = new TestSummary();
 }
 //makes a copy of the test summary but filters out the tests that relate to the passed assetName
 public TestSummary(TestSummary original, string assetName)
 {
     Copy(original, this);
     Description = assetName;
     testSets    = new List <TestSet>(original.TestSets.Where(x => x.Asset == assetName).ToArray());
 }
 public TestSummary(TestSummary original)
 {
     Copy(original, this);
     testSets = new List <TestSet>(original.TestSets);
 }
        public static TestSummary ReduceCorrelated(TestSummary testSummary)
        {
            //Work out the percentage of trades to use to train
            DateTime startDate = (DateTime)testSummary.StartDate;
            DateTime endDate   = (DateTime)testSummary.EndDate;
            double   trainDays = (endDate - startDate).TotalDays * ((double)testSummary.ReduceCorrelatedParams.TrainTestSplit / 100);
            DateTime endTrain  = startDate.AddDays(trainDays);

            testSummary.EndTrainDate = endTrain;

            //Create a copy of the testSummary so we don't modify the original testSummary when we reduce the testsets
            TestSummary filteredTestSummary = new TestSummary(testSummary);

            //remove any non-performing sets
            List <TestSet> nonPerformers   = new List <TestSet>();
            int            totalProfitable = 0;

            foreach (TestSet t in testSummary.TestSets)
            {
                double p = PerformanceResult.CalculateProfitFactor(t.Trades.Where(x => x.CloseTime <= filteredTestSummary.EndTrainDate).ToArray());
                if (p > 1)
                {
                    totalProfitable++;
                }
                if (p < testSummary.ReduceCorrelatedParams.MinMetric)
                {
                    filteredTestSummary.Remove(t);
                }
            }

            //remove all the test sets if at least half are profitable
            double pProfitable = (double)totalProfitable / (double)testSummary.TestSets.Length;

            if (pProfitable < 0.5)
            {
                foreach (TestSet t in testSummary.TestSets)
                {
                    filteredTestSummary.Remove(t);
                }
            }

            //Calcualte weekly profit for every week so we can do a correlation based on weekly profits
            Dictionary <string, KeyValuePair <int, double>[]> WeeklyProfits = new Dictionary <string, KeyValuePair <int, double>[]>();

            foreach (TestSet ts in testSummary.TestSets)
            {
                //get all the trades in the train period and calculate weekly profit
                var result =
                    from s in ts.Trades.Where(x => x.CloseTime <= endTrain)
                    group s by new { week = (s.OpenTime.Year - startDate.Year) * 52 + (s.OpenTime.DayOfYear / 7) } into g
                             select new KeyValuePair <int, double>(g.Key.week, g.Sum(x => x.Profit));

                WeeklyProfits.Add(ts.Description, result.ToArray());
            }

            //Create a grid of r2 values by comparing each testset with each other test set
            Dictionary <Pair, double> r2Values = new Dictionary <Pair, double>();

            foreach (KeyValuePair <string, KeyValuePair <int, double>[]> wpRow in WeeklyProfits)
            {
                foreach (KeyValuePair <string, KeyValuePair <int, double>[]> wpColumn in WeeklyProfits)
                {
                    //skip identical resuklt sets
                    if (wpColumn.Key == wpRow.Key)
                    {
                        continue;
                    }
                    //calculate the r2 value from these lists


                    //Line up the weeks to get an x and y for the current pair
                    Dictionary <int, Point> list = new Dictionary <int, Point>();
                    foreach (KeyValuePair <int, double> res in wpRow.Value)
                    {
                        list.Add(res.Key, new Point(res.Value, 0, wpRow.Key, null));
                    }
                    foreach (KeyValuePair <int, double> res in wpColumn.Value)
                    {
                        if (!list.ContainsKey(res.Key))
                        {
                            list.Add(res.Key, new Point(0, res.Value, null, wpColumn.Key));
                        }
                        else
                        {
                            list[res.Key].Y      = res.Value;
                            list[res.Key].YLabel = wpColumn.Key;
                        }
                    }
                    double[] x = list.Select(v => v.Value.X).ToArray();
                    double[] y = list.Select(v => v.Value.Y).ToArray();


                    //calculate the r2 and store in dictionary with the testset description pair as the Key
                    r2Values.Add(new Pair(wpRow.Key, wpColumn.Key), Stat.R2(x, y));
                }
            }



            foreach (KeyValuePair <Pair, double> res in r2Values)
            {
                //if too corelated remove the worst performer
                if (res.Value > testSummary.ReduceCorrelatedParams.R2Cutoff)
                {
                    //get the train set of trades only
                    Trade[] xTrades = filteredTestSummary.GetTradeSet(Convert.ToString(res.Key.P1), TestSummary.TradeSet.Train);
                    Trade[] yTrades = filteredTestSummary.GetTradeSet(Convert.ToString(res.Key.P2), TestSummary.TradeSet.Train);

                    //if both exist in our filtered test sets remove worst performer - it may have already been removed from previous pair r2 comparisons
                    if (xTrades != null && yTrades != null)
                    {
                        double xMetric = PerformanceResult.CalculateProfitFactor(xTrades);
                        double yMetric = PerformanceResult.CalculateProfitFactor(yTrades);

                        if (xMetric > yMetric)
                        {
                            filteredTestSummary.Remove(Convert.ToString(res.Key.P2));
                        }
                        else
                        {
                            filteredTestSummary.Remove(Convert.ToString(res.Key.P1));
                        }
                    }
                }
            }


            return(filteredTestSummary);
        }
        public static TestSummary ReduceByRank(TestSummary testSummary)
        {
            //dn't filter anymore if there is less than 2 testSets
            if (testSummary.TestSets.Count() < 2)
            {
                return(testSummary);
            }

            //copy the original test summary so we don't modify it directily
            TestSummary filteredTestSummary = new TestSummary(testSummary);

            //cycle through this process of deleteing the worst average ranking test set until the remaining test sets have no real rank differentation
            //These test sets should have already been reduced to remove correlated test sets so we should end up with a number of uncorrelated test sets that all perform about the same
            double rankDiff = 0;

            do
            {
                //setup the dates for starting the train and test periods
                DateTime startDate     = (DateTime)testSummary.StartDate;
                DateTime endDate       = (DateTime)testSummary.EndTrainDate; // this is set in the remove correlated method
                DateTime testStartDate = startDate;
                DateTime testEndDate   = testStartDate.AddDays(testSummary.ReduceByRankParams.PeriodDays);

                //keep a list of rankings of each test set compared to the other parameter sets for each cycle
                Dictionary <string, OptimisePerformanceRank> ranks = new Dictionary <string, OptimisePerformanceRank>();

                //cycle through the dates until the end of the train set
                while (testStartDate < endDate)
                {
                    //compile an dictonary of the results for this cycle the key is the parameter set string
                    Dictionary <string, double> testResults = new Dictionary <string, double>();

                    //Calcualte a profit factor for each of the test sets between the current cycle dates
                    foreach (TestSet ts in filteredTestSummary.TestSets)
                    {
                        Trade[] trades = ts.Trades.Where(x => x.OpenTime > testStartDate && x.CloseTime <= testEndDate).ToArray();
                        testResults.Add(ts.Description, PerformanceResult.CalculateProfitFactor(trades));
                    }

                    //rank the test sets
                    var ordered = testResults.OrderByDescending(x => x.Value);
                    int rank    = 1;
                    foreach (KeyValuePair <string, double> result in ordered)
                    {
                        if (!ranks.ContainsKey(result.Key))
                        {
                            ranks.Add(result.Key, new OptimisePerformanceRank());
                        }

                        ranks[result.Key].Add(rank);

                        rank++;
                    }

                    //move the dates along
                    testStartDate = testStartDate.AddDays(testSummary.ReduceByRankParams.PeriodDays);
                    testEndDate   = testEndDate.AddDays(testSummary.ReduceByRankParams.PeriodDays);
                    if (testEndDate > endDate)
                    {
                        testEndDate = endDate;
                    }
                }

                //Print out the table of the parameter sets ranked for each cycle and the average overall rank
                var orderedRanks = ranks.OrderBy(x => x.Value.Average).ToDictionary(t => t.Key, t => t.Value);

                //calcaulte the maximum difference in rank between the test sets - we exit the loop if this difference is small enough
                rankDiff = orderedRanks.Max(x => x.Value.Average) - orderedRanks.Min(x => x.Value.Average);

                //copy the original test summary so we don't modify it directily
                filteredTestSummary = new TestSummary(filteredTestSummary);

                //remove the lowest ranking test set
                filteredTestSummary.Remove(orderedRanks.LastOrDefault().Key);
            } while (testSummary.ReduceByRankParams.MaxRankDifference > 0 && rankDiff > testSummary.ReduceByRankParams.MaxRankDifference);

            return(filteredTestSummary);
        }