private void DrawHistogram(StatisticsResults sr) { HistogramData histogram = new HistogramData(); histogram.Calculate(sr.Cycle); ICollection<HistogramData.IntervalData> intervals = histogram.Rows; const double targetProbabilityValue = (double)1/20; PlotModel tempPlotModel = new PlotModel("Histogram") { LegendPlacement = LegendPlacement.Outside, LegendPosition = LegendPosition.RightTop, LegendOrientation = LegendOrientation.Vertical }; ColumnSeries columnSeries = (ColumnSeries)HistogramPlot(intervals); tempPlotModel.Axes.Add(new LinearAxis(AxisPosition.Left, 0.0)); tempPlotModel.Axes.Add(new CategoryAxis { LabelField = "Value", IntervalLength = targetProbabilityValue, ItemsSource = columnSeries.ItemsSource, GapWidth = 0.0 }); tempPlotModel.Series.Add( columnSeries ); tempPlotModel.Series.Add(TargetProbabilityLine(targetProbabilityValue)); SamplePlot.Model = tempPlotModel; }
public void ShouldCollectFxCopStatistics() { results = builder.ProcessBuildResults(successfulBuildLog); AssertHasStatistic("FxCop Warnings", 9, results); AssertHasStatistic("FxCop Errors", 202, results); }
private void StartButtonPressed(object sender, RoutedEventArgs args) { try { Int32 aCoeff = ReadInt32(ACoeffInput); Int32 mCoeff = ReadInt32(MCoeffInput); if (mCoeff == 0) { throw new Exception("M coefficient must be a positive value!"); } Int32 startingNumber = ReadInt32(StartingValueInput); LemerGenerator lg = new LemerGenerator(aCoeff, mCoeff, startingNumber); lg.GenerateRealization(); IList<Double> lemerRealization = lg.Realization; StatisticsResults sr = new StatisticsResults(); sr.Calculate(lemerRealization); OutStatisticsResults(sr); DrawHistogram(sr); } catch (Exception e) { MessageWindow mw = new MessageWindow(this, e.Message); mw.ShowDialog(); } }
static void OutStatitisticsResults(StatisticsResults sr) { System.Console.WriteLine("Period {0}", sr.Period); System.Console.WriteLine("Aperiodic {0}", sr.Aperiodic); System.Console.WriteLine("Expected Value {0}", sr.ExpectedValue); System.Console.WriteLine("Deviation {0}", sr.Deviation); System.Console.WriteLine("Variance {0}", sr.Variance); System.Console.WriteLine("PI {0}", sr.PI); }
static void Main(string[] args) { System.Console.WriteLine("aCoeff"); Int32 aCoeff = ReadInt32(); System.Console.WriteLine("mCoeff"); Int32 mCoeff = ReadInt32(); System.Console.WriteLine("startingNumber"); Int32 startingNumber = ReadInt32(); LemerGenerator lg = new LemerGenerator(aCoeff, mCoeff, startingNumber); lg.GenerateRealization(); IList<Double> lemerRealization = lg.Realization; StatisticsResults sr = new StatisticsResults(); sr.Calculate(lemerRealization); OutStatitisticsResults(sr); }
public void ShouldPopulateNUnitSummaryFromLog() { string xml = @"<task> <test-results total=""6"" failures=""1"" not-run=""2"" date=""2005-04-29"" time=""9:02 PM""> <test-suite /> </test-results> <test-results total=""1"" failures=""1"" not-run=""1"" date=""2005-04-29"" time=""9:02 PM""> <test-suite /> </test-results> </task>"; result.AddTaskResult(xml); results = builder.ProcessBuildResults(result); AssertHasStatistic("TestCount", 7, results); AssertHasStatistic("TestFailures", 2, results); AssertHasStatistic("TestIgnored", 3, results); }
public void AssertHasStatistic(string name, object value, StatisticsResults results) { Assert.AreEqual(value, results.Find(delegate(StatisticResult obj) { return obj.StatName.Equals(name); }).Value, "Wrong statistic for {0}", name); }
public void ShouldPopulateTimingsFromIntegrationResult() { result.StartTime = new DateTime(2005, 03, 12, 01, 13, 00); result.EndTime = new DateTime(2005, 03, 12, 01, 45, 00); result.ProjectName = "Foo"; results = builder.ProcessBuildResults(result); AssertHasStatistic("StartTime", DateUtil.FormatDate(result.StartTime), results); AssertHasStatistic("Duration", new TimeSpan(0, 32, 0).ToString(), results); //AssertHasStatistic("ProjectName", "Foo", results); }
public void ShowOutputOfStatisticsInConsole() { StringBuilder buffer = new StringBuilder(); System.IO.StreamReader reader = System.IO.File.OpenText(@"resources\UnitTestResults2.xml"); var buildlog = reader.ReadToEnd(); reader.Close(); StatisticsBuilder builder = new StatisticsBuilder(); Statistic info = new Statistic(); info.Name = "Statistic TestsTotalCount no ns"; info.Xpath = @"//TestRun/ResultSummary/Counters/@total"; builder.Add(info); FirstMatch info2 = new FirstMatch(); info2.Name = "FirstMatch TestsTotalCount no ns"; info2.Xpath = @"//TestRun/ResultSummary/Counters/@total"; builder.Add(info2); FirstMatch info1 = new FirstMatch(); info1.Name = "FirstMatch TestsTotalCount with ns"; info1.Xpath = @"//mstest:TestRun/mstest:ResultSummary/mstest:Counters/@total"; info1.NameSpaces = new StatisticsNamespaceMapping[1]; info1.NameSpaces[0] = new StatisticsNamespaceMapping("mstest", @"http://microsoft.com/schemas/VisualStudio/TeamTest/2010"); builder.Add(info1); Statistic info3 = new Statistic(); info3.Name = "Statistic sum(TestsTotalCount) no ns"; info3.Xpath = @"sum(//TestRun/ResultSummary/Counters/@total)"; builder.Add(info3); Statistic info4 = new Statistic(); info4.Name = "Statistic sum(TestsTotalCount) with ns"; info4.Xpath = @"sum(//mstest:TestRun/mstest:ResultSummary/mstest:Counters/@total)"; info4.NameSpaces = new StatisticsNamespaceMapping[1]; info4.NameSpaces[0] = new StatisticsNamespaceMapping("mstest", @"http://microsoft.com/schemas/VisualStudio/TeamTest/2010"); builder.Add(info4); // seems impossible in xsl 1.0 Statistic imp01 = new Statistic(); imp01.Name = "impossible : Statistic TestsExecutedCount xmlns"; imp01.Xpath = @"/xmlns:cruisecontrol/mstest:TestRun/mstest:ResultSummary/mstest:Counters/@executed"; imp01.NameSpaces = new StatisticsNamespaceMapping[1]; imp01.NameSpaces[0] = new StatisticsNamespaceMapping("mstest", @"http://microsoft.com/schemas/VisualStudio/TeamTest/2010"); builder.Add(imp01); Statistic imp02 = new Statistic(); imp02.Name = "impossible : Statistic TestsExecutedCount2 not namespace"; imp02.Xpath = @"/cruisecontrol/mstest:TestRun/mstest:ResultSummary/mstest:Counters/@executed"; imp02.NameSpaces = new StatisticsNamespaceMapping[1]; imp02.NameSpaces[0] = new StatisticsNamespaceMapping("mstest", @"http://microsoft.com/schemas/VisualStudio/TeamTest/2010"); builder.Add(imp02); StatisticsResults results = builder.ProcessBuildResults(buildlog); foreach (var x in results) { Console.WriteLine("Result {0} : {1}", x.StatName, x.Value); } }
/// <summary> /// Send a final analysis result back to the IDE. /// </summary> /// <param name="job">Lean AlgorithmJob task</param> /// <param name="orders">Collection of orders from the algorithm</param> /// <param name="profitLoss">Collection of time-profit values for the algorithm</param> /// <param name="holdings">Current holdings state for the algorithm</param> /// <param name="statisticsResults">Statistics information for the algorithm (empty if not finished)</param> /// <param name="banner">Runtime statistics banner information</param> public void SendFinalResult(AlgorithmNodePacket job, Dictionary <int, Order> orders, Dictionary <DateTime, decimal> profitLoss, Dictionary <string, Holding> holdings, StatisticsResults statisticsResults, Dictionary <string, string> banner) { try { FinalStatistics = statisticsResults.Summary; //Convert local dictionary: var charts = new Dictionary <string, Chart>(Charts); _processingFinalPacket = true; // clear the trades collection before placing inside the backtest result foreach (var ap in statisticsResults.RollingPerformances.Values) { ap.ClosedTrades.Clear(); } //Create a result packet to send to the browser. var result = new BacktestResultPacket((BacktestNodePacket)job, new BacktestResult(charts, orders, profitLoss, statisticsResults.Summary, banner, statisticsResults.RollingPerformances, statisticsResults.TotalPerformance) , 1m) { ProcessingTime = (DateTime.Now - _startTime).TotalSeconds, DateFinished = DateTime.Now, Progress = 1 }; //Place result into storage. StoreResult(result); //Second, send the truncated packet: _messagingHandler.Send(result); Log.Trace("BacktestingResultHandler.SendAnalysisResult(): Processed final packet"); } catch (Exception err) { Log.Error(err); } }
public void SendFinalResult(AlgorithmNodePacket job, Dictionary <int, Order> orders, Dictionary <DateTime, decimal> profitLoss, Dictionary <string, Holding> holdings, StatisticsResults statisticsResults, Dictionary <string, string> banner) { }
public void SendFinalResult(AlgorithmNodePacket job, Dictionary <int, Order> orders, Dictionary <DateTime, decimal> profitLoss, Dictionary <string, Holding> holdings, StatisticsResults statisticsResults, Dictionary <string, string> banner) { _shadow.SendFinalResult(job, orders, profitLoss, holdings, statisticsResults, banner); FullResults = StatisticsAdapter.Transform(statisticsResults.TotalPerformance); }
/// <summary> /// Send a final analysis result back to the IDE. /// </summary> /// <param name="job">Lean AlgorithmJob task</param> /// <param name="orders">Collection of orders from the algorithm</param> /// <param name="profitLoss">Collection of time-profit values for the algorithm</param> /// <param name="holdings">Current holdings state for the algorithm</param> /// <param name="statisticsResults">Statistics information for the algorithm (empty if not finished)</param> /// <param name="banner">Runtime statistics banner information</param> public void SendFinalResult(AlgorithmNodePacket job, Dictionary <int, Order> orders, Dictionary <DateTime, decimal> profitLoss, Dictionary <string, Holding> holdings, StatisticsResults statisticsResults, Dictionary <string, string> banner) { try { //Convert local dictionary: var charts = new Dictionary <string, Chart>(Charts); _processingFinalPacket = true; //Create a result packet to send to the browser. BacktestResultPacket result = new BacktestResultPacket((BacktestNodePacket)job, new BacktestResult(charts, orders, profitLoss, statisticsResults.Summary, statisticsResults.RollingPerformances), 1m) { ProcessingTime = (DateTime.Now - _startTime).TotalSeconds, DateFinished = DateTime.Now, Progress = 1 }; //Place result into storage. StoreResult(result); //Truncate packet to fit within 32kb of messaging limits. result.Results = new BacktestResult(); //Second, send the truncated packet: _messagingHandler.BacktestResult(result, finalPacket: true); Log.Trace("BacktestingResultHandler.SendAnalysisResult(): Processed final packet"); } catch (Exception err) { Log.Error("Algorithm.Worker.SendResult(): " + err.Message); } }
public void AssertHasStatistic(string name, object value, StatisticsResults results) { Assert.AreEqual(value, results.Find(delegate(StatisticResult obj) { return(obj.StatName.Equals(name)); }).Value, "Wrong statistic for {0}", name); }
/// <summary> /// Algorithm final analysis results dumped to the console. /// </summary> /// <param name="job">Lean AlgorithmJob task</param> /// <param name="orders">Collection of orders from the algorithm</param> /// <param name="profitLoss">Collection of time-profit values for the algorithm</param> /// <param name="holdings">Current holdings state for the algorithm</param> /// <param name="statisticsResults">Statistics information for the algorithm (empty if not finished)</param> /// <param name="banner">Runtime statistics banner information</param> public void SendFinalResult(AlgorithmNodePacket job, Dictionary <int, Order> orders, Dictionary <DateTime, decimal> profitLoss, Dictionary <string, Holding> holdings, StatisticsResults statisticsResults, Dictionary <string, string> banner) { // uncomment these code traces to help write regression tests //Console.WriteLine("var statistics = new Dictionary<string, string>();"); // Bleh. Nicely format statistical analysis on your algorithm results. Save to file etc. foreach (var pair in statisticsResults.Summary) { Log.Trace("STATISTICS:: " + pair.Key + " " + pair.Value); //Console.WriteLine(string.Format("statistics.Add(\"{0}\",\"{1}\");", pair.Key, pair.Value)); } //foreach (var pair in statisticsResults.RollingPerformances) //{ // Log.Trace("ROLLINGSTATS:: " + pair.Key + " SharpeRatio: " + Math.Round(pair.Value.PortfolioStatistics.SharpeRatio, 3)); //} FinalStatistics = statisticsResults.Summary; }
/// <summary> /// Extract all the statistics from the specified XML build results document. /// </summary> /// <param name="doc">The build results.</param> /// <returns>The set of statistics.</returns> private StatisticsResults ProcessLog(IXPathNavigable doc) { XPathNavigator nav = doc.CreateNavigator(); StatisticsResults statisticResults = new StatisticsResults(); foreach (StatisticBase s in logStatistics) { statisticResults.Add(s.Apply(nav)); } return statisticResults; }
/// <summary> /// Algorithm final analysis results dumped to the console. /// </summary> /// <param name="job">Lean AlgorithmJob task</param> /// <param name="orders">Collection of orders from the algorithm</param> /// <param name="profitLoss">Collection of time-profit values for the algorithm</param> /// <param name="holdings">Current holdings state for the algorithm</param> /// <param name="statisticsResults">Statistics information for the algorithm (empty if not finished)</param> /// <param name="banner">Runtime statistics banner information</param> public void SendFinalResult(AlgorithmNodePacket job, Dictionary <int, Order> orders, Dictionary <DateTime, decimal> profitLoss, Dictionary <string, Holding> holdings, StatisticsResults statisticsResults, Dictionary <string, string> banner) { // uncomment these code traces to help write regression tests //Log.Trace("var statistics = new Dictionary<string, string>();"); // Bleh. Nicely format statistical analysis on your algorithm results. Save to file etc. foreach (var pair in statisticsResults.Summary) { DebugMessage("STATISTICS:: " + pair.Key + " " + pair.Value); } FinalStatistics = statisticsResults.Summary; }
/// <summary> /// Runs a single backtest/live job from the job queue /// </summary> /// <param name="job">The algorithm job to be processed</param> /// <param name="manager"></param> /// <param name="assemblyPath">The path to the algorithm's assembly</param> public void Run(AlgorithmNodePacket job, AlgorithmManager manager, string assemblyPath) { var algorithm = default(IAlgorithm); var algorithmManager = manager; try { //Reset thread holders. var initializeComplete = false; Thread threadFeed = null; Thread threadTransactions = null; Thread threadResults = null; Thread threadRealTime = null; Thread threadAlphas = null; //-> Initialize messaging system _systemHandlers.Notify.SetAuthentication(job); //-> Set the result handler type for this algorithm job, and launch the associated result thread. _algorithmHandlers.Results.Initialize(job, _systemHandlers.Notify, _systemHandlers.Api, _algorithmHandlers.DataFeed, _algorithmHandlers.Setup, _algorithmHandlers.Transactions); threadResults = new Thread(_algorithmHandlers.Results.Run, 0) { IsBackground = true, Name = "Result Thread" }; threadResults.Start(); IBrokerage brokerage = null; try { // Save algorithm to cache, load algorithm instance: algorithm = _algorithmHandlers.Setup.CreateAlgorithmInstance(job, assemblyPath); // Set algorithm in ILeanManager _systemHandlers.LeanManager.SetAlgorithm(algorithm); // initialize the alphas handler with the algorithm instance _algorithmHandlers.Alphas.Initialize(job, algorithm, _systemHandlers.Notify, _systemHandlers.Api); // Initialize the brokerage IBrokerageFactory factory; brokerage = _algorithmHandlers.Setup.CreateBrokerage(job, algorithm, out factory); // Initialize the data feed before we initialize so he can intercept added securities/universes via events _algorithmHandlers.DataFeed.Initialize(algorithm, job, _algorithmHandlers.Results, _algorithmHandlers.MapFileProvider, _algorithmHandlers.FactorFileProvider, _algorithmHandlers.DataProvider); // set the order processor on the transaction manager (needs to be done before initializing BrokerageHistoryProvider) algorithm.Transactions.SetOrderProcessor(_algorithmHandlers.Transactions); // set the history provider before setting up the algorithm var historyProvider = GetHistoryProvider(job.HistoryProvider); if (historyProvider is BrokerageHistoryProvider) { (historyProvider as BrokerageHistoryProvider).SetBrokerage(brokerage); } var historyDataCacheProvider = new ZipDataCacheProvider(_algorithmHandlers.DataProvider); historyProvider.Initialize(job, _algorithmHandlers.DataProvider, historyDataCacheProvider, _algorithmHandlers.MapFileProvider, _algorithmHandlers.FactorFileProvider, progress => { // send progress updates to the result handler only during initialization if (!algorithm.GetLocked() || algorithm.IsWarmingUp) { _algorithmHandlers.Results.SendStatusUpdate(AlgorithmStatus.History, string.Format("Processing history {0}%...", progress)); } }); algorithm.HistoryProvider = historyProvider; // initialize the default brokerage message handler algorithm.BrokerageMessageHandler = factory.CreateBrokerageMessageHandler(algorithm, job, _systemHandlers.Api); //Initialize the internal state of algorithm and job: executes the algorithm.Initialize() method. initializeComplete = _algorithmHandlers.Setup.Setup(algorithm, brokerage, job, _algorithmHandlers.Results, _algorithmHandlers.Transactions, _algorithmHandlers.RealTime); // set this again now that we've actually added securities _algorithmHandlers.Results.SetAlgorithm(algorithm); // alpha handler needs start/end dates to determine sample step sizes _algorithmHandlers.Alphas.OnAfterAlgorithmInitialized(algorithm); //If there are any reasons it failed, pass these back to the IDE. if (!initializeComplete || algorithm.ErrorMessages.Count > 0 || _algorithmHandlers.Setup.Errors.Count > 0) { initializeComplete = false; //Get all the error messages: internal in algorithm and external in setup handler. var errorMessage = String.Join(",", algorithm.ErrorMessages); errorMessage += String.Join(",", _algorithmHandlers.Setup.Errors.Select(e => e.Message)); Log.Error("Engine.Run(): " + errorMessage); _algorithmHandlers.Results.RuntimeError(errorMessage); _systemHandlers.Api.SetAlgorithmStatus(job.AlgorithmId, AlgorithmStatus.RuntimeError, errorMessage); } } catch (Exception err) { Log.Error(err); var runtimeMessage = "Algorithm.Initialize() Error: " + err.Message + " Stack Trace: " + err.StackTrace; _algorithmHandlers.Results.RuntimeError(runtimeMessage, err.StackTrace); _systemHandlers.Api.SetAlgorithmStatus(job.AlgorithmId, AlgorithmStatus.RuntimeError, runtimeMessage); } // log the job endpoints Log.Trace("JOB HANDLERS: "); Log.Trace(" DataFeed: " + _algorithmHandlers.DataFeed.GetType().FullName); Log.Trace(" Setup: " + _algorithmHandlers.Setup.GetType().FullName); Log.Trace(" RealTime: " + _algorithmHandlers.RealTime.GetType().FullName); Log.Trace(" Results: " + _algorithmHandlers.Results.GetType().FullName); Log.Trace(" Transactions: " + _algorithmHandlers.Transactions.GetType().FullName); Log.Trace(" Alpha: " + _algorithmHandlers.Alphas.GetType().FullName); if (algorithm != null && algorithm.HistoryProvider != null) { Log.Trace(" History Provider: " + algorithm.HistoryProvider.GetType().FullName); } if (job is LiveNodePacket) { Log.Trace(" Brokerage: " + brokerage.GetType().FullName); } //-> Using the job + initialization: load the designated handlers: if (initializeComplete) { //-> Reset the backtest stopwatch; we're now running the algorithm. var startTime = DateTime.Now; //Set algorithm as locked; set it to live mode if we're trading live, and set it to locked for no further updates. algorithm.SetAlgorithmId(job.AlgorithmId); algorithm.SetLocked(); //Load the associated handlers for transaction and realtime events: _algorithmHandlers.Transactions.Initialize(algorithm, brokerage, _algorithmHandlers.Results); _algorithmHandlers.RealTime.Setup(algorithm, job, _algorithmHandlers.Results, _systemHandlers.Api); // wire up the brokerage message handler brokerage.Message += (sender, message) => { algorithm.BrokerageMessageHandler.Handle(message); // fire brokerage message events algorithm.OnBrokerageMessage(message); switch (message.Type) { case BrokerageMessageType.Disconnect: algorithm.OnBrokerageDisconnect(); break; case BrokerageMessageType.Reconnect: algorithm.OnBrokerageReconnect(); break; } }; //Send status to user the algorithm is now executing. _algorithmHandlers.Results.SendStatusUpdate(AlgorithmStatus.Running); //Launch the data, transaction and realtime handlers into dedicated threads threadFeed = new Thread(_algorithmHandlers.DataFeed.Run) { IsBackground = true, Name = "DataFeed Thread" }; threadTransactions = new Thread(_algorithmHandlers.Transactions.Run) { IsBackground = true, Name = "Transaction Thread" }; threadRealTime = new Thread(_algorithmHandlers.RealTime.Run) { IsBackground = true, Name = "RealTime Thread" }; threadAlphas = new Thread(() => _algorithmHandlers.Alphas.Run()) { IsBackground = true, Name = "Alpha Thread" }; //Launch the data feed, result sending, and transaction models/handlers in separate threads. threadFeed.Start(); // Data feed pushing data packets into thread bridge; threadTransactions.Start(); // Transaction modeller scanning new order requests threadRealTime.Start(); // RealTime scan time for time based events: threadAlphas.Start(); // Alpha thread for processing algorithm alphas // Result manager scanning message queue: (started earlier) _algorithmHandlers.Results.DebugMessage(string.Format("Launching analysis for {0} with LEAN Engine v{1}", job.AlgorithmId, Globals.Version)); try { //Create a new engine isolator class var isolator = new Isolator(); // Execute the Algorithm Code: var complete = isolator.ExecuteWithTimeLimit(_algorithmHandlers.Setup.MaximumRuntime, algorithmManager.TimeLoopWithinLimits, () => { try { //Run Algorithm Job: // -> Using this Data Feed, // -> Send Orders to this TransactionHandler, // -> Send Results to ResultHandler. algorithmManager.Run(job, algorithm, _algorithmHandlers.DataFeed, _algorithmHandlers.Transactions, _algorithmHandlers.Results, _algorithmHandlers.RealTime, _systemHandlers.LeanManager, _algorithmHandlers.Alphas, isolator.CancellationToken); } catch (Exception err) { //Debugging at this level is difficult, stack trace needed. Log.Error(err); algorithm.RunTimeError = err; algorithmManager.SetStatus(AlgorithmStatus.RuntimeError); return; } Log.Trace("Engine.Run(): Exiting Algorithm Manager"); }, job.Controls.RamAllocation); if (!complete) { Log.Error("Engine.Main(): Failed to complete in time: " + _algorithmHandlers.Setup.MaximumRuntime.ToString("F")); throw new Exception("Failed to complete algorithm within " + _algorithmHandlers.Setup.MaximumRuntime.ToString("F") + " seconds. Please make it run faster."); } // Algorithm runtime error: if (algorithm.RunTimeError != null) { HandleAlgorithmError(job, algorithm.RunTimeError); } } catch (Exception err) { //Error running the user algorithm: purge datafeed, send error messages, set algorithm status to failed. HandleAlgorithmError(job, err); } try { var trades = algorithm.TradeBuilder.ClosedTrades; var charts = new Dictionary <string, Chart>(_algorithmHandlers.Results.Charts); var orders = new Dictionary <int, Order>(_algorithmHandlers.Transactions.Orders); var holdings = new Dictionary <string, Holding>(); var banner = new Dictionary <string, string>(); var statisticsResults = new StatisticsResults(); var csvTransactionsFileName = Config.Get("transaction-log"); if (!string.IsNullOrEmpty(csvTransactionsFileName)) { SaveListOfTrades(_algorithmHandlers.Transactions, csvTransactionsFileName); } try { //Generates error when things don't exist (no charting logged, runtime errors in main algo execution) const string strategyEquityKey = "Strategy Equity"; const string equityKey = "Equity"; const string dailyPerformanceKey = "Daily Performance"; const string benchmarkKey = "Benchmark"; // make sure we've taken samples for these series before just blindly requesting them if (charts.ContainsKey(strategyEquityKey) && charts[strategyEquityKey].Series.ContainsKey(equityKey) && charts[strategyEquityKey].Series.ContainsKey(dailyPerformanceKey) && charts.ContainsKey(benchmarkKey) && charts[benchmarkKey].Series.ContainsKey(benchmarkKey) ) { var equity = charts[strategyEquityKey].Series[equityKey].Values; var performance = charts[strategyEquityKey].Series[dailyPerformanceKey].Values; var profitLoss = new SortedDictionary <DateTime, decimal>(algorithm.Transactions.TransactionRecord); var totalTransactions = algorithm.Transactions.GetOrders(x => x.Status.IsFill()).Count(); var benchmark = charts[benchmarkKey].Series[benchmarkKey].Values; statisticsResults = StatisticsBuilder.Generate(trades, profitLoss, equity, performance, benchmark, _algorithmHandlers.Setup.StartingPortfolioValue, algorithm.Portfolio.TotalFees, totalTransactions); //Some users have $0 in their brokerage account / starting cash of $0. Prevent divide by zero errors var netReturn = _algorithmHandlers.Setup.StartingPortfolioValue > 0 ? (algorithm.Portfolio.TotalPortfolioValue - _algorithmHandlers.Setup.StartingPortfolioValue) / _algorithmHandlers.Setup.StartingPortfolioValue : 0; //Add other fixed parameters. banner.Add("Unrealized", "$" + algorithm.Portfolio.TotalUnrealizedProfit.ToString("N2")); banner.Add("Fees", "-$" + algorithm.Portfolio.TotalFees.ToString("N2")); banner.Add("Net Profit", "$" + algorithm.Portfolio.TotalProfit.ToString("N2")); banner.Add("Return", netReturn.ToString("P")); banner.Add("Equity", "$" + algorithm.Portfolio.TotalPortfolioValue.ToString("N2")); } } catch (Exception err) { Log.Error(err, "Error generating statistics packet"); } //Diagnostics Completed, Send Result Packet: var totalSeconds = (DateTime.Now - startTime).TotalSeconds; var dataPoints = algorithmManager.DataPoints + algorithm.HistoryProvider.DataPointCount; _algorithmHandlers.Results.DebugMessage( string.Format("Algorithm Id:({0}) completed in {1} seconds at {2}k data points per second. Processing total of {3} data points.", job.AlgorithmId, totalSeconds.ToString("F2"), ((dataPoints / (double)1000) / totalSeconds).ToString("F0"), dataPoints.ToString("N0"))); _algorithmHandlers.Results.SendFinalResult(job, orders, algorithm.Transactions.TransactionRecord, holdings, algorithm.Portfolio.CashBook, statisticsResults, banner); } catch (Exception err) { Log.Error(err, "Error sending analysis results"); } //Before we return, send terminate commands to close up the threads _algorithmHandlers.Transactions.Exit(); _algorithmHandlers.DataFeed.Exit(); _algorithmHandlers.RealTime.Exit(); _algorithmHandlers.Alphas.Exit(); } //Close result handler: _algorithmHandlers.Results.Exit(); //Wait for the threads to complete: var ts = Stopwatch.StartNew(); while ((_algorithmHandlers.Results.IsActive || (_algorithmHandlers.Transactions != null && _algorithmHandlers.Transactions.IsActive) || (_algorithmHandlers.DataFeed != null && _algorithmHandlers.DataFeed.IsActive) || (_algorithmHandlers.RealTime != null && _algorithmHandlers.RealTime.IsActive) || (_algorithmHandlers.Alphas != null && _algorithmHandlers.Alphas.IsActive)) && ts.ElapsedMilliseconds < 30 * 1000) { Thread.Sleep(100); Log.Trace("Waiting for threads to exit..."); } //Terminate threads still in active state. if (threadFeed != null && threadFeed.IsAlive) { threadFeed.Abort(); } if (threadTransactions != null && threadTransactions.IsAlive) { threadTransactions.Abort(); } if (threadResults != null && threadResults.IsAlive) { threadResults.Abort(); } if (threadAlphas != null && threadAlphas.IsAlive) { threadAlphas.Abort(); } if (brokerage != null) { Log.Trace("Engine.Run(): Disconnecting from brokerage..."); brokerage.Disconnect(); brokerage.Dispose(); } if (_algorithmHandlers.Setup != null) { Log.Trace("Engine.Run(): Disposing of setup handler..."); _algorithmHandlers.Setup.Dispose(); } Log.Trace("Engine.Main(): Analysis Completed and Results Posted."); } catch (Exception err) { Log.Error(err, "Error running algorithm"); } finally { //No matter what for live mode; make sure we've set algorithm status in the API for "not running" conditions: if (_liveMode && algorithmManager.State != AlgorithmStatus.Running && algorithmManager.State != AlgorithmStatus.RuntimeError) { _systemHandlers.Api.SetAlgorithmStatus(job.AlgorithmId, algorithmManager.State); } _algorithmHandlers.Results.Exit(); _algorithmHandlers.DataFeed.Exit(); _algorithmHandlers.Transactions.Exit(); _algorithmHandlers.RealTime.Exit(); } }
/// <summary> /// Write the specified collection of statistics to the CSV /// statistics file, creating it if it does not already exist, /// and returning the full set. /// </summary> /// <param name="statisticsResults"></param> /// <param name="statistics"></param> /// <param name="integrationResult">The build for which the /// statistics were collected.</param> /// <returns>The full XML statistics document.</returns> /// <remarks> /// Note: The <see cref="StatisticsResults.AppendCsv"/> method does not /// reconcile the specified statistics against the existing content of /// the file. If statistics are added or removed over time, the headings /// and values may not match up correctly. /// </remarks> private static void UpdateCsvFile(StatisticsResults statisticsResults, List<StatisticBase> statistics, IIntegrationResult integrationResult) { string csvFile = CsvStatisticsFile(integrationResult); statisticsResults.AppendCsv(csvFile, statistics); }
private void OutStatisticsResults(StatisticsResults sr) { PeriodOutput.Text = sr.Period.ToString(CultureInfo.InvariantCulture); AperiodicOutput.Text = sr.Aperiodic.ToString(CultureInfo.InvariantCulture); ExpectedValueOutput.Text = sr.ExpectedValue.ToString(CultureInfo.InvariantCulture); DeviationValue.Text = sr.Deviation.ToString(CultureInfo.InvariantCulture); SigmaOutput.Text = sr.Variance.ToString(CultureInfo.InvariantCulture); PiValue.Text = sr.PI.ToString(CultureInfo.InvariantCulture); PiValueReal.Text = (Math.PI/4).ToString(CultureInfo.InvariantCulture); }