public void Initialize(AlgorithmNodePacket job,
     IMessagingHandler messagingHandler,
     IApi api,
     IDataFeed dataFeed,
     ISetupHandler setupHandler,
     ITransactionHandler transactionHandler)
 {
     _job = job;
 }
 /// <summary>
 /// Initializes a new instance of the <see cref="LeanEngineAlgorithmHandlers"/> class from the specified handlers
 /// </summary>
 /// <param name="results">The result handler for communicating results from the algorithm</param>
 /// <param name="setup">The setup handler used to initialize algorithm state</param>
 /// <param name="dataFeed">The data feed handler used to pump data to the algorithm</param>
 /// <param name="transactions">The transaction handler used to process orders from the algorithm</param>
 /// <param name="realTime">The real time handler used to process real time events</param>
 /// <param name="historyProvider">The history provider used to process historical data requests</param>
 /// <param name="commandQueue">The command queue handler used to receive external commands for the algorithm</param>
 /// <param name="mapFileProvider">The map file provider used to retrieve map files for the data feed</param>
 public LeanEngineAlgorithmHandlers(IResultHandler results,
     ISetupHandler setup,
     IDataFeed dataFeed,
     ITransactionHandler transactions,
     IRealTimeHandler realTime,
     IHistoryProvider historyProvider,
     ICommandQueueHandler commandQueue,
     IMapFileProvider mapFileProvider
     )
 {
     if (results == null)
     {
         throw new ArgumentNullException("results");
     }
     if (setup == null)
     {
         throw new ArgumentNullException("setup");
     }
     if (dataFeed == null)
     {
         throw new ArgumentNullException("dataFeed");
     }
     if (transactions == null)
     {
         throw new ArgumentNullException("transactions");
     }
     if (realTime == null)
     {
         throw new ArgumentNullException("realTime");
     }
     if (historyProvider == null)
     {
         throw new ArgumentNullException("realTime");
     }
     if (commandQueue == null)
     {
         throw new ArgumentNullException("commandQueue");
     }
     if (mapFileProvider == null)
     {
         throw new ArgumentNullException("mapFileProvider");
     }
     _results = results;
     _setup = setup;
     _dataFeed = dataFeed;
     _transactions = transactions;
     _realTime = realTime;
     _historyProvider = historyProvider;
     _commandQueue = commandQueue;
     _mapFileProvider = mapFileProvider;
 }
Esempio n. 3
0
        /********************************************************
        * CLASS METHODS
        *********************************************************/
        /// <summary>
        /// Launch the algorithm manager to run this strategy
        /// </summary>
        /// <param name="job">Algorithm job</param>
        /// <param name="algorithm">Algorithm instance</param>
        /// <param name="feed">Datafeed object</param>
        /// <param name="transactions">Transaction manager object</param>
        /// <param name="results">Result handler object</param>
        /// <param name="setup">Setup handler object</param>
        /// <param name="realtime">Realtime processing object</param>
        /// <remarks>Modify with caution</remarks>
        public static void Run(AlgorithmNodePacket job, IAlgorithm algorithm, IDataFeed feed, ITransactionHandler transactions, IResultHandler results, ISetupHandler setup, IRealTimeHandler realtime)
        {
            //Initialize:
            var backwardsCompatibilityMode = false;
            var tradebarsType = typeof (TradeBars);
            var ticksType = typeof(Ticks);
            var startingPerformance = setup.StartingCapital;
            var backtestMode = (job.Type == PacketType.BacktestNode);
            var methodInvokers = new Dictionary<Type, MethodInvoker>();

            //Initialize Properties:
            _frontier = setup.StartingDate;
            _runtimeError = null;
            _algorithmId = job.AlgorithmId;
            _algorithmState = AlgorithmStatus.Running;
            _previousTime = setup.StartingDate.Date;

            //Create the method accessors to push generic types into algorithm: Find all OnData events:

            //Algorithm 1.0 Data Accessors.
            //If the users defined these methods, add them in manually. This allows keeping backwards compatibility to algorithm 1.0.
            var oldTradeBarsMethodInfo = (algorithm.GetType()).GetMethod("OnTradeBar",   new[] { typeof(Dictionary<string, TradeBar>) });
            var oldTicksMethodInfo = (algorithm.GetType()).GetMethod("OnTick", new[] { typeof(Dictionary<string, List<Tick>>) });

            //Algorithm 2.0 Data Generics Accessors.
            //New hidden access to tradebars with custom type.
            var newTradeBarsMethodInfo = (algorithm.GetType()).GetMethod("OnData", new[] { tradebarsType });
            var newTicksMethodInfo = (algorithm.GetType()).GetMethod("OnData", new[] { ticksType });

            if (newTradeBarsMethodInfo == null && newTicksMethodInfo == null)
            {
                backwardsCompatibilityMode = true;
                if (oldTradeBarsMethodInfo != null) methodInvokers.Add(tradebarsType, oldTradeBarsMethodInfo.DelegateForCallMethod());
                if (oldTradeBarsMethodInfo != null) methodInvokers.Add(ticksType, oldTicksMethodInfo.DelegateForCallMethod());
            }
            else
            {
                backwardsCompatibilityMode = false;
                if (newTradeBarsMethodInfo != null) methodInvokers.Add(tradebarsType, newTradeBarsMethodInfo.DelegateForCallMethod());
                if (newTicksMethodInfo != null) methodInvokers.Add(ticksType, newTicksMethodInfo.DelegateForCallMethod());
            }

            //Go through the subscription types and create invokers to trigger the event handlers for each custom type:
            foreach (var config in feed.Subscriptions)
            {
                //If type is a tradebar, combine tradebars and ticks into unified array:
                if (config.Type.Name != "TradeBar" && config.Type.Name != "Tick")
                {
                    //Get the matching method for this event handler - e.g. public void OnData(Quandl data) { .. }
                    var genericMethod = (algorithm.GetType()).GetMethod("OnData", new[] { config.Type });

                    //Is we already have this Type-handler then don't add it to invokers again.
                    if (methodInvokers.ContainsKey(config.Type)) continue;

                    //If we couldnt find the event handler, let the user know we can't fire that event.
                    if (genericMethod == null)
                    {
                        _runtimeError = new Exception("Data event handler not found, please create a function matching this template: public void OnData(" + config.Type.Name + " data) {  }");
                        _algorithmState = AlgorithmStatus.RuntimeError;
                        return;
                    }
                    methodInvokers.Add(config.Type, genericMethod.DelegateForCallMethod());
                }
            }

            //Loop over the queues: get a data collection, then pass them all into relevent methods in the algorithm.
            Log.Debug("AlgorithmManager.Run(): Algorithm initialized, launching time loop.");
            foreach (var newData in DataStream.GetData(feed, setup.StartingDate))
            {
                //Check this backtest is still running:
                if (_algorithmState != AlgorithmStatus.Running) break;

                //Go over each time stamp we've collected, pass it into the algorithm in order:
                foreach (var time in newData.Keys)
                {
                    //Set the time frontier:
                    _frontier = time;

                    //Execute with TimeLimit Monitor:
                    if (Isolator.IsCancellationRequested) return;

                    //Refresh the realtime event monitor:
                    realtime.SetTime(time);

                    //Fire EOD if the time packet we just processed is greater
                    if (backtestMode && _previousTime.Date != time.Date)
                    {
                        //Sample the portfolio value over time for chart.
                        results.SampleEquity(_previousTime, Math.Round(algorithm.Portfolio.TotalPortfolioValue, 4));

                        if (startingPerformance == 0)
                        {
                            results.SamplePerformance(_previousTime.Date, 0);
                        }
                        else
                        {
                            results.SamplePerformance(_previousTime.Date, Math.Round((algorithm.Portfolio.TotalPortfolioValue - startingPerformance) * 100 / startingPerformance, 10));
                        }

                        startingPerformance = algorithm.Portfolio.TotalPortfolioValue;
                    }

                    //Check if the user's signalled Quit: loop over data until day changes.
                    if (algorithm.GetQuit())
                    {
                        _algorithmState = AlgorithmStatus.Quit;
                        break;
                    }

                    //Pass in the new time first:
                    algorithm.SetDateTime(time);

                    //Trigger the data events: Invoke the types we have data for:
                    var oldBars = new Dictionary<string, TradeBar>();
                    var oldTicks = new Dictionary<string, List<Tick>>();
                    var newBars = new TradeBars(time);
                    var newTicks = new Ticks(time);

                    //Invoke all non-tradebars, non-ticks methods:
                    // --> i == Subscription Configuration Index, so we don't need to compare types.
                    foreach (var i in newData[time].Keys)
                    {
                        //Data point and config of this point:
                        var dataPoints = newData[time][i];
                        var config = feed.Subscriptions[i];

                        //Create TradeBars Unified Data --> OR --> invoke generic data event. One loop.
                        foreach (var dataPoint in dataPoints)
                        {
                            //Update the securities properties: first before calling user code to avoid issues with data
                            algorithm.Securities.Update(time, dataPoint);

                            //Update registered consolidators for this symbol index
                            for (var j = 0; j < config.Consolidators.Count; j++)
                            {
                                config.Consolidators[j].Update(dataPoint);
                            }

                            switch (config.Type.Name)
                            {
                                case "TradeBar":
                                    var bar = dataPoint as TradeBar;
                                    try
                                    {
                                        if (bar != null)
                                        {
                                            if (backwardsCompatibilityMode)
                                            {
                                                if (!oldBars.ContainsKey(bar.Symbol)) oldBars.Add(bar.Symbol, bar);
                                            }
                                            else
                                            {
                                                if (!newBars.ContainsKey(bar.Symbol)) newBars.Add(bar.Symbol, bar);
                                            }
                                        }
                                    }
                                    catch (Exception err)
                                    {
                                        Log.Error(time.ToLongTimeString() + " >> " + bar.Time.ToLongTimeString() + " >> " + bar.Symbol + " >> " + bar.Value.ToString("C"));
                                        Log.Error("AlgorithmManager.Run(): Failed to add TradeBar (" + bar.Symbol + ") Time: (" + time.ToLongTimeString() + ") Count:(" + newBars.Count + ") " + err.Message);
                                    }
                                    break;

                                case "Tick":
                                    var tick = dataPoint as Tick;
                                    if (tick != null)
                                    {
                                         if (backwardsCompatibilityMode) {
                                             if (!oldTicks.ContainsKey(tick.Symbol)) { oldTicks.Add(tick.Symbol, new List<Tick>()); }
                                             oldTicks[tick.Symbol].Add(tick);
                                         }
                                         else
                                         {
                                             if (!newTicks.ContainsKey(tick.Symbol)) { newTicks.Add(tick.Symbol, new List<Tick>()); }
                                             newTicks[tick.Symbol].Add(tick);
                                         }
                                    }
                                    break;

                                default:
                                    //Send data into the generic algorithm event handlers
                                    try
                                    {
                                        methodInvokers[config.Type](algorithm, dataPoint);
                                    }
                                    catch (Exception err)
                                    {
                                        _runtimeError = err;
                                        _algorithmState = AlgorithmStatus.RuntimeError;
                                        Log.Debug("AlgorithmManager.Run(): RuntimeError: Custom Data: " + err.Message + " STACK >>> " + err.StackTrace);
                                        return;
                                    }
                                    break;
                            }
                        }
                    }

                    //After we've fired all other events in this second, fire the pricing events:
                    if (backwardsCompatibilityMode)
                    {
                        //Log.Debug("AlgorithmManager.Run(): Invoking v1.0 Event Handlers...");
                        try
                        {
                            if (oldTradeBarsMethodInfo != null && oldBars.Count > 0) methodInvokers[tradebarsType](algorithm, oldBars);
                            if (oldTicksMethodInfo != null && oldTicks.Count > 0) methodInvokers[ticksType](algorithm, oldTicks);
                        }
                        catch (Exception err)
                        {
                            _runtimeError = err;
                            _algorithmState = AlgorithmStatus.RuntimeError;
                            Log.Debug("AlgorithmManager.Run(): RuntimeError: Backwards Compatibility Mode: " + err.Message + " STACK >>> " + err.StackTrace);
                            return;
                        }
                    }
                    else
                    {
                        //Log.Debug("AlgorithmManager.Run(): Invoking v2.0 Event Handlers...");
                        try
                        {
                            if (newTradeBarsMethodInfo != null && newBars.Count > 0) methodInvokers[tradebarsType](algorithm, newBars);
                            if (newTicksMethodInfo != null && newTicks.Count > 0) methodInvokers[ticksType](algorithm, newTicks);
                        }
                        catch (Exception err)
                        {
                            _runtimeError = err;
                            _algorithmState = AlgorithmStatus.RuntimeError;
                            Log.Debug("AlgorithmManager.Run(): RuntimeError: New Style Mode: " + err.Message + " STACK >>> " + err.StackTrace);
                            return;
                        }
                    }

                    //If its the historical/paper trading models, wait until market orders have been "filled"
                    // Manually trigger the event handler to prevent thread switch.
                    transactions.ProcessSynchronousEvents();

                    //Save the previous time for the sample calculations
                    _previousTime = time;

                } // End of Time Loop

                // Process any required events of the results handler such as sampling assets, equity, or stock prices.
                results.ProcessSynchronousEvents();
            } // End of ForEach DataStream

            //Stream over:: Send the final packet and fire final events:
            Log.Trace("AlgorithmManager.Run(): Firing On End Of Algorithm...");
            try
            {
                algorithm.OnEndOfAlgorithm();
            }
            catch (Exception err)
            {
                _algorithmState = AlgorithmStatus.RuntimeError;
                _runtimeError = new Exception("Error running OnEndOfAlgorithm(): " + err.Message, err.InnerException);
                Log.Debug("AlgorithmManager.OnEndOfAlgorithm(): " + err.Message + " STACK >>> " + err.StackTrace);
                return;
            }

            // Process any required events of the results handler such as sampling assets, equity, or stock prices.
            results.ProcessSynchronousEvents(forceProcess: true);

            //Liquidate Holdings for Calculations:
            if (_algorithmState == AlgorithmStatus.Liquidated || !Engine.LiveMode)
            {
                Log.Trace("AlgorithmManager.Run(): Liquidating algorithm holdings...");
                algorithm.Liquidate();
                results.LogMessage("Algorithm Liquidated");
                results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Liquidated);
            }

            //Manually stopped the algorithm
            if (_algorithmState == AlgorithmStatus.Stopped)
            {
                Log.Trace("AlgorithmManager.Run(): Stopping algorithm...");
                results.LogMessage("Algorithm Stopped");
                results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Stopped);
            }

            //Backtest deleted.
            if (_algorithmState == AlgorithmStatus.Deleted)
            {
                Log.Trace("AlgorithmManager.Run(): Deleting algorithm...");
                results.DebugMessage("Algorithm Id:(" + job.AlgorithmId + ") Deleted by request.");
                results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Deleted);
            }

            //Algorithm finished, send regardless of commands:
            results.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Completed);

            //Take final samples:
            results.SampleRange(algorithm.GetChartUpdates());
            results.SampleEquity(_frontier, Math.Round(algorithm.Portfolio.TotalPortfolioValue, 4));
            results.SamplePerformance(_frontier, Math.Round((algorithm.Portfolio.TotalPortfolioValue - startingPerformance) * 100 / startingPerformance, 10));
        }
Esempio n. 4
0
        /// <summary>
        /// Initialize the result handler with this result packet.
        /// </summary>
        /// <param name="packet">Algorithm job packet for this result handler</param>
        /// <param name="messagingHandler"></param>
        /// <param name="api"></param>
        /// <param name="dataFeed"></param>
        /// <param name="setupHandler"></param>
        /// <param name="transactionHandler"></param>
        public void Initialize(AlgorithmNodePacket packet, IMessagingHandler messagingHandler, IApi api, IDataFeed dataFeed, ISetupHandler setupHandler, ITransactionHandler transactionHandler)
        {
            // we expect one of two types here, the backtest node packet or the live node packet
            var job = packet as BacktestNodePacket;
            if (job != null)
            {
                _algorithmNode = new BacktestConsoleStatusHandler(job);
            }
            else
            {
                var live = packet as LiveNodePacket;
                if (live == null)
                {
                    throw new ArgumentException("Unexpected AlgorithmNodeType: " + packet.GetType().Name);
                }
                _algorithmNode = new LiveConsoleStatusHandler(live);
            }
            _resamplePeriod = _algorithmNode.ComputeSampleEquityPeriod();

            var time = DateTime.Now.ToString("yyyy-MM-dd-HH-mm");
            _chartDirectory = Path.Combine("../../../Charts/", packet.AlgorithmId, time);
            if (Directory.Exists(_chartDirectory))
            {
                foreach (var file in Directory.EnumerateFiles(_chartDirectory, "*.csv", SearchOption.AllDirectories))
                {
                    File.Delete(file);
                }
                Directory.Delete(_chartDirectory, true);
            }
            Directory.CreateDirectory(_chartDirectory);
            _messagingHandler = messagingHandler; 

        }
Esempio n. 5
0
        /********************************************************
        * CLASS METHODS
        *********************************************************/
        /// <summary>
        /// Primary Analysis Thread:
        /// </summary>
        public static void Main(string[] args)
        {
            //Initialize:
            var algorithmPath = "";
            string mode = "RELEASE";
            AlgorithmNodePacket job = null;
            var algorithm = default(IAlgorithm);
            var startTime = DateTime.Now;
            Log.LogHandler = Composer.Instance.GetExportedValueByTypeName<ILogHandler>(Config.Get("log-handler", "CompositeLogHandler"));

            #if DEBUG
                mode = "DEBUG";
            #endif

            //Name thread for the profiler:
            Thread.CurrentThread.Name = "Algorithm Analysis Thread";
            Log.Trace("Engine.Main(): LEAN ALGORITHMIC TRADING ENGINE v" + Constants.Version + " Mode: " + mode);
            Log.Trace("Engine.Main(): Started " + DateTime.Now.ToShortTimeString());
            Log.Trace("Engine.Main(): Memory " + OS.ApplicationMemoryUsed + "Mb-App  " + +OS.TotalPhysicalMemoryUsed + "Mb-Used  " + OS.TotalPhysicalMemory + "Mb-Total");

            //Import external libraries specific to physical server location (cloud/local)
            try
            {
                // grab the right export based on configuration
                Api = Composer.Instance.GetExportedValueByTypeName<IApi>(Config.Get("api-handler"));
                Notify = Composer.Instance.GetExportedValueByTypeName<IMessagingHandler>(Config.Get("messaging-handler"));
                JobQueue = Composer.Instance.GetExportedValueByTypeName<IJobQueueHandler>(Config.Get("job-queue-handler"));
            }
            catch (CompositionException compositionException)
            { Log.Error("Engine.Main(): Failed to load library: " + compositionException);
            }

            //Setup packeting, queue and controls system: These don't do much locally.
            Api.Initialize();
            Notify.Initialize();
            JobQueue.Initialize();

            //Start monitoring the backtest active status:
            var statusPingThread = new Thread(StateCheck.Ping.Run);
            statusPingThread.Start();

            try
            {
                //Reset algo manager internal variables preparing for a new algorithm.
                AlgorithmManager.ResetManager();

                //Reset thread holders.
                var initializeComplete = false;
                Thread threadFeed = null;
                Thread threadTransactions = null;
                Thread threadResults = null;
                Thread threadRealTime = null;

                do
                {
                    //-> Pull job from QuantConnect job queue, or, pull local build:
                    job = JobQueue.NextJob(out algorithmPath); // Blocking.

                    // if the job version doesn't match this instance version then we can't process it
                    // we also don't want to reprocess redelivered live jobs
                    if (job.Version != Constants.Version || (LiveMode && job.Redelivered))
                    {
                        Log.Error("Engine.Run(): Job Version: " + job.Version + "  Deployed Version: " + Constants.Version);

                        //Tiny chance there was an uncontrolled collapse of a server, resulting in an old user task circulating.
                        //In this event kill the old algorithm and leave a message so the user can later review.
                        JobQueue.AcknowledgeJob(job);
                        Api.SetAlgorithmStatus(job.AlgorithmId, AlgorithmStatus.RuntimeError, _collapseMessage);
                        Notify.SetChannel(job.Channel);
                        Notify.RuntimeError(job.AlgorithmId, _collapseMessage);
                        job = null;
                    }
                } while (job == null);

                //-> Initialize messaging system
                Notify.SetChannel(job.Channel);

                //-> Create SetupHandler to configure internal algorithm state:
                SetupHandler = GetSetupHandler(job.SetupEndpoint);

                //-> Set the result handler type for this algorithm job, and launch the associated result thread.
                ResultHandler = GetResultHandler(job);
                threadResults = new Thread(ResultHandler.Run, 0) {Name = "Result Thread"};
                threadResults.Start();

                try
                {
                    // Save algorithm to cache, load algorithm instance:
                    algorithm = SetupHandler.CreateAlgorithmInstance(algorithmPath);

                    //Initialize the internal state of algorithm and job: executes the algorithm.Initialize() method.
                    initializeComplete = SetupHandler.Setup(algorithm, out _brokerage, job);

                    //If there are any reasons it failed, pass these back to the IDE.
                    if (!initializeComplete || algorithm.ErrorMessages.Count > 0 || SetupHandler.Errors.Count > 0)
                    {
                        initializeComplete = false;
                        //Get all the error messages: internal in algorithm and external in setup handler.
                        var errorMessage = String.Join(",", algorithm.ErrorMessages);
                        errorMessage += String.Join(",", SetupHandler.Errors);
                        ResultHandler.RuntimeError(errorMessage);
                        Api.SetAlgorithmStatus(job.AlgorithmId, AlgorithmStatus.RuntimeError);
                    }
                }
                catch (Exception err)
                {
                    var runtimeMessage = "Algorithm.Initialize() Error: " + err.Message + " Stack Trace: " + err.StackTrace;
                    ResultHandler.RuntimeError(runtimeMessage, err.StackTrace);
                    Api.SetAlgorithmStatus(job.AlgorithmId, AlgorithmStatus.RuntimeError, runtimeMessage);
                }

                //-> Using the job + initialization: load the designated handlers:
                if (initializeComplete)
                {
                    //-> Reset the backtest stopwatch; we're now running the algorithm.
                    startTime = DateTime.Now;

                    //Set algorithm as locked; set it to live mode if we're trading live, and set it to locked for no further updates.
                    algorithm.SetAlgorithmId(job.AlgorithmId);
                    algorithm.SetLiveMode(LiveMode);
                    algorithm.SetLocked();

                    //Load the associated handlers for data, transaction and realtime events:
                    ResultHandler.SetAlgorithm(algorithm);
                    DataFeed            = GetDataFeedHandler(algorithm, job);
                    TransactionHandler  = GetTransactionHandler(algorithm, _brokerage, ResultHandler, job);
                    RealTimeHandler     = GetRealTimeHandler(algorithm, _brokerage, DataFeed, ResultHandler, job);

                    //Set the error handlers for the brokerage asynchronous errors.
                    SetupHandler.SetupErrorHandler(ResultHandler, _brokerage);

                    //Send status to user the algorithm is now executing.
                    ResultHandler.SendStatusUpdate(job.AlgorithmId, AlgorithmStatus.Running);

                    //Launch the data, transaction and realtime handlers into dedicated threads
                    threadFeed = new Thread(DataFeed.Run) {Name = "DataFeed Thread"};
                    threadTransactions = new Thread(TransactionHandler.Run) {Name = "Transaction Thread"};
                    threadRealTime = new Thread(RealTimeHandler.Run) {Name = "RealTime Thread"};

                    //Launch the data feed, result sending, and transaction models/handlers in separate threads.
                    threadFeed.Start(); // Data feed pushing data packets into thread bridge;
                    threadTransactions.Start(); // Transaction modeller scanning new order requests
                    threadRealTime.Start(); // RealTime scan time for time based events:

                    // Result manager scanning message queue: (started earlier)
                    ResultHandler.DebugMessage(string.Format("Launching analysis for {0} with LEAN Engine v{1}", job.AlgorithmId, Constants.Version));

                    try
                    {
                        // Execute the Algorithm Code:
                        var complete = Isolator.ExecuteWithTimeLimit(SetupHandler.MaximumRuntime, AlgorithmManager.TimeLoopWithinLimits, () =>
                        {
                            try
                            {
                                //Run Algorithm Job:
                                // -> Using this Data Feed,
                                // -> Send Orders to this TransactionHandler,
                                // -> Send Results to ResultHandler.
                                AlgorithmManager.Run(job, algorithm, DataFeed, TransactionHandler, ResultHandler, SetupHandler, RealTimeHandler);
                            }
                            catch (Exception err)
                            {
                                //Debugging at this level is difficult, stack trace needed.
                                Log.Error("Engine.Run", err);
                            }

                            Log.Trace("Engine.Run(): Exiting Algorithm Manager");

                            }, job.UserPlan == UserPlan.Free ? 1024 : MaximumRamAllocation);

                        if (!complete)
                        {
                            Log.Error("Engine.Main(): Failed to complete in time: " + SetupHandler.MaximumRuntime.ToString("F"));
                            throw new Exception("Failed to complete algorithm within " + SetupHandler.MaximumRuntime.ToString("F") + " seconds. Please make it run faster.");
                        }

                        // Algorithm runtime error:
                        if (algorithm.RunTimeError != null)
                        {
                            throw algorithm.RunTimeError;
                        }
                    }
                    catch (Exception err)
                    {
                        //Error running the user algorithm: purge datafeed, send error messages, set algorithm status to failed.
                        Log.Error("Engine.Run(): Breaking out of parent try-catch: " + err.Message + " " + err.StackTrace);
                        if (DataFeed != null) DataFeed.Exit();
                        if (ResultHandler != null)
                        {
                            var message = "Runtime Error: " + err.Message;
                            Log.Trace("Engine.Run(): Sending runtime error to user...");
                            ResultHandler.LogMessage(message);
                            ResultHandler.RuntimeError(message, err.StackTrace);
                            Api.SetAlgorithmStatus(job.AlgorithmId, AlgorithmStatus.RuntimeError, message + " Stack Trace: " + err.StackTrace);
                        }
                    }

                    //Send result data back: this entire code block could be rewritten.
                    // todo: - Split up statistics class, its enormous.
                    // todo: - Make a dedicated Statistics.Benchmark class.
                    // todo: - Move all creation and transmission of statistics out of primary engine loop.
                    // todo: - Statistics.Generate(algorithm, resulthandler, transactionhandler);

                    try
                    {
                        var charts = new Dictionary<string, Chart>(ResultHandler.Charts);
                        var orders = new Dictionary<int, Order>(algorithm.Transactions.Orders);
                        var holdings = new Dictionary<string, Holding>();
                        var statistics = new Dictionary<string, string>();
                        var banner = new Dictionary<string, string>();

                        try
                        {
                            //Generates error when things don't exist (no charting logged, runtime errors in main algo execution)
                            const string strategyEquityKey = "Strategy Equity";
                            const string equityKey = "Equity";
                            const string dailyPerformanceKey = "Daily Performance";

                            // make sure we've taken samples for these series before just blindly requesting them
                            if (charts.ContainsKey(strategyEquityKey) &&
                                charts[strategyEquityKey].Series.ContainsKey(equityKey) &&
                                charts[strategyEquityKey].Series.ContainsKey(dailyPerformanceKey))
                            {
                                var equity = charts[strategyEquityKey].Series[equityKey].Values;
                                var performance = charts[strategyEquityKey].Series[dailyPerformanceKey].Values;
                                var profitLoss =
                                    new SortedDictionary<DateTime, decimal>(algorithm.Transactions.TransactionRecord);
                                statistics = Statistics.Statistics.Generate(equity, profitLoss, performance,
                                    SetupHandler.StartingPortfolioValue, algorithm.Portfolio.TotalFees, 252);
                            }
                        }
                        catch (Exception err)
                        {
                            Log.Error("Algorithm.Node.Engine(): Error generating statistics packet: " + err.Message);
                        }

                        //Diagnostics Completed, Send Result Packet:
                        var totalSeconds = (DateTime.Now - startTime).TotalSeconds;
                        ResultHandler.DebugMessage(string.Format("Algorithm Id:({0}) completed in {1} seconds at {2}k data points per second. Processing total of {3} data points.",
                            job.AlgorithmId, totalSeconds.ToString("F2"), ((AlgorithmManager.DataPoints / (double)1000) / totalSeconds).ToString("F0"), AlgorithmManager.DataPoints.ToString("N0")));

                        ResultHandler.SendFinalResult(job, orders, algorithm.Transactions.TransactionRecord, holdings, statistics, banner);
                    }
                    catch (Exception err)
                    {
                        Log.Error("Engine.Main(): Error sending analysis result: " + err.Message + "  ST >> " + err.StackTrace);
                    }

                    //Before we return, send terminate commands to close up the threads
                    TransactionHandler.Exit();
                    DataFeed.Exit();
                    RealTimeHandler.Exit();
                }

                //Close result handler:
                ResultHandler.Exit();
                StateCheck.Ping.Exit();

                //Wait for the threads to complete:
                var ts = Stopwatch.StartNew();
                while ((ResultHandler.IsActive || (TransactionHandler != null && TransactionHandler.IsActive) || (DataFeed != null && DataFeed.IsActive)) && ts.ElapsedMilliseconds < 30 * 1000)
                {
                    Thread.Sleep(100); Log.Trace("Waiting for threads to exit...");
                }

                //Terminate threads still in active state.
                if (threadFeed != null && threadFeed.IsAlive) threadFeed.Abort();
                if (threadTransactions != null && threadTransactions.IsAlive) threadTransactions.Abort();
                if (threadResults != null && threadResults.IsAlive) threadResults.Abort();
                if (statusPingThread != null && statusPingThread.IsAlive) statusPingThread.Abort();

                if (_brokerage != null)
                {
                    _brokerage.Disconnect();
                }
                if (SetupHandler != null)
                {
                    SetupHandler.Dispose();
                }
                Log.Trace("Engine.Main(): Analysis Completed and Results Posted.");
            }
            catch (Exception err)
            {
                Log.Error("Engine.Main(): Error running algorithm: " + err.Message + " >> " + err.StackTrace);
            }
            finally
            {
                //No matter what for live mode; make sure we've set algorithm status in the API for "not running" conditions:
                if (LiveMode && AlgorithmManager.State != AlgorithmStatus.Running && AlgorithmManager.State != AlgorithmStatus.RuntimeError)
                    Api.SetAlgorithmStatus(job.AlgorithmId, AlgorithmManager.State);

                //Delete the message from the job queue:
                JobQueue.AcknowledgeJob(job);
                Log.Trace("Engine.Main(): Packet removed from queue: " + job.AlgorithmId);

                //Attempt to clean up ram usage:
                GC.Collect();
            }

            //Final disposals.
            Api.Dispose();

            // Make the console window pause so we can read log output before exiting and killing the application completely
            if (IsLocal)
            {
                Log.Trace("Engine.Main(): Analysis Complete. Press any key to continue.");
                Console.Read();
            }
            Log.LogHandler.Dispose();
        }
Esempio n. 6
0
 /// <summary>
 /// Initialize the result handler with this result packet.
 /// </summary>
 /// <param name="job">Algorithm job packet for this result handler</param>
 /// <param name="messagingHandler"></param>
 /// <param name="api"></param>
 /// <param name="dataFeed"></param>
 /// <param name="setupHandler"></param>
 /// <param name="transactionHandler"></param>
 public void Initialize(AlgorithmNodePacket job, IMessagingHandler messagingHandler, IApi api, IDataFeed dataFeed, ISetupHandler setupHandler, ITransactionHandler transactionHandler)
 {
     //Redirect the log messages here:
     _job = job;
     var desktopLogging = new FunctionalLogHandler(DebugMessage, DebugMessage, ErrorMessage);
     Log.LogHandler = new CompositeLogHandler(new[] { desktopLogging, Log.LogHandler });
 }
 /// <summary>
 /// Initialize the result handler with this result packet.
 /// </summary>
 /// <param name="job">Algorithm job packet for this result handler</param>
 /// <param name="messagingHandler"></param>
 /// <param name="api"></param>
 /// <param name="dataFeed"></param>
 /// <param name="setupHandler"></param>
 /// <param name="transactionHandler"></param>
 public void Initialize(AlgorithmNodePacket job, IMessagingHandler messagingHandler, IApi api, IDataFeed dataFeed, ISetupHandler setupHandler, ITransactionHandler transactionHandler)
 {
     _api = api;
     _dataFeed = dataFeed;
     _messagingHandler = messagingHandler;
     _setupHandler = setupHandler;
     _transactionHandler = transactionHandler;
     _job = (LiveNodePacket)job;
     if (_job == null) throw new Exception("LiveResultHandler.Constructor(): Submitted Job type invalid."); 
     _deployId = _job.DeployId;
     _compileId = _job.CompileId;
 }
Esempio n. 8
0
        /// <summary>
        /// Initialize the result handler with this result packet.
        /// </summary>
        /// <param name="job">Algorithm job packet for this result handler</param>
        /// <param name="messagingHandler">The handler responsible for communicating messages to listeners</param>
        /// <param name="api">The api instance used for handling logs</param>
        /// <param name="dataFeed"></param>
        /// <param name="setupHandler"></param>
        /// <param name="transactionHandler"></param>
        public void Initialize(AlgorithmNodePacket job, IMessagingHandler messagingHandler, IApi api, IDataFeed dataFeed, ISetupHandler setupHandler, ITransactionHandler transactionHandler)
        {
            _api = api;
            _messagingHandler = messagingHandler;
            _transactionHandler = transactionHandler;
            _job = (BacktestNodePacket)job;
            if (_job == null) throw new Exception("BacktestingResultHandler.Constructor(): Submitted Job type invalid.");
            _compileId = _job.CompileId;
            _backtestId = _job.BacktestId;

            //Get the resample period:
            var totalMinutes = (_job.PeriodFinish - _job.PeriodStart).TotalMinutes;
            var resampleMinutes = (totalMinutes < (_minimumSamplePeriod * _samples)) ? _minimumSamplePeriod : (totalMinutes / _samples); // Space out the sampling every
            _resamplePeriod = TimeSpan.FromMinutes(resampleMinutes);
            Log.Trace("BacktestingResultHandler(): Sample Period Set: " + resampleMinutes.ToString("00.00"));
        }