public async Task Should_Work_For_Multiple_Types()
		{
			/* Setup */
			using (var requester = TestClientFactory.CreateNormal())
			using (var responder = TestClientFactory.CreateNormal())
			{
				const int numberOfCalls = 10000;
				var firstResponseTasks = new Task[numberOfCalls];
				var secondResponseTasks = new Task[numberOfCalls];
				responder.RespondAsync<FirstRequest, FirstResponse>((req, i) =>
					Task.FromResult(new FirstResponse { Infered = Guid.NewGuid() }));
				responder.RespondAsync<SecondRequest, SecondResponse>((request, context) =>
					Task.FromResult(new SecondResponse { Source = Guid.NewGuid() }));

				/* Test */
				for (var i = 0; i < numberOfCalls; i++)
				{
					var firstResponse = requester.RequestAsync<FirstRequest, FirstResponse>();
					var secondResponse = requester.RequestAsync<SecondRequest, SecondResponse>();
					firstResponseTasks[i] = firstResponse;
					secondResponseTasks[i] = secondResponse;
				}
				Task.WaitAll(firstResponseTasks.Concat(secondResponseTasks).ToArray());
				var firstIds = firstResponseTasks
					.OfType<Task<FirstResponse>>()
					.Select(b => b.Result.Infered)
					.Where(id => id != Guid.Empty)
					.Distinct()
					.ToList();
				var secondIds = secondResponseTasks
					.OfType<Task<SecondResponse>>()
					.Select(b => b.Result.Source)
					.Where(id => id != Guid.Empty)
					.Distinct()
					.ToList();
				/* Assert */
				Assert.Equal(expected: numberOfCalls, actual: firstIds.Count);
				Assert.Equal(expected: numberOfCalls, actual: secondIds.Count);
			}
		}
        //TODO
        public static async Task Create(Task[] tasks, IDataflowBlock[] dataFlowBlocks)
        {
            var pipelineTasks = tasks.Concat(dataFlowBlocks.Select(x => x.Completion)).ToArray();

            var catchTasks = new Task[pipelineTasks.Length];
            for (var i = 0; i < pipelineTasks.Length; i++)
            {
                var task = pipelineTasks[i];

                catchTasks[i] =
                    task.ContinueWith(_ =>
                    {
                        if (task.IsFaulted)
                        {
                            // fault all dataflow blocks as soon as any fault occurs
                            foreach (var dataFlowBlock in dataFlowBlocks)
                                dataFlowBlock.Fault(task.Exception);
                        }
                    });
            }

            try
            {
                var allTasks = pipelineTasks.Concat(catchTasks).ToArray();
                await Task.WhenAll(allTasks);

                // we should only be here if all tasks completed successfully
                Debug.Assert(allTasks.All(x => x.Status == TaskStatus.RanToCompletion));
            }
            catch (Exception ex)
            {
                // ensure all dataflow blocks are faulted
                foreach (var dataFlowBlock in dataFlowBlocks)
                    dataFlowBlock.Fault(ex);

                throw;
            }
        }
Beispiel #3
0
      public void Parallel_PutGetDelete_Random()
      {
        const int PUTTER_CNT  = 2, PUTTER_OP_CNT  = 2 * 10000;
        const int GETTER_CNT  = 6, GETTER_OP_CNT  = 2 * 30000;
        const int DELETER_CNT = 2, DELETER_OP_CNT = 2 * 10000;

        var data = new ConcurrentDictionary<PilePointer, string>();

        var getAccessViolations = new ConcurrentDictionary<int, int>();
        var deleteAccessViolations = new ConcurrentDictionary<int, int>();

        using (var pile = new DefaultPile())
        {
          pile.Start();

          var ipile = pile as IPile;

          // putter tasks
          var putters = new Task[PUTTER_CNT];
          for (int it = 0; it < PUTTER_CNT; it++)
          {
            var task = new Task(() => {

              for (int i = 0; i < PUTTER_OP_CNT; i++)
              {
                var str = NFX.Parsing.NaturalTextGenerator.Generate();
                var pp = ipile.Put(str);
                data.TryAdd(pp, str);
              }

            });

            putters[it] = task;
          }

          // getter tasks
          var getters = new Task[GETTER_CNT];
          for (int it = 0; it < GETTER_CNT; it++)
          {
            var task = new Task(() => {

              for (int i = 0; i < GETTER_OP_CNT; i++)
              {
                if (data.Count == 0) {
                  System.Threading.Thread.Yield();
                  continue;
                }
                var idx = ExternalRandomGenerator.Instance.NextScaledRandomInteger(0, data.Count-1);
                var kvp = data.ElementAt(idx);
                try
                {
                  
                  var str = ipile.Get(kvp.Key);
                  Assert.AreEqual(str, kvp.Value);
                }
                catch (PileAccessViolationException)
                {
                  getAccessViolations.AddOrUpdate(System.Threading.Thread.CurrentThread.ManagedThreadId, 1, (mid, val) => val + 1);
                }
              }
            });
            getters[it] = task;
          }

          // deleter tasks
          var deleters = new Task[DELETER_CNT];
          for (int it = 0; it < DELETER_CNT; it++)
          {
            var task = new Task(() => {

              for (int i = 0; i < DELETER_OP_CNT; i++)
              {
                if (data.Count == 0) {
                  System.Threading.Thread.Yield();
                  continue;
                }
                var idx = ExternalRandomGenerator.Instance.NextScaledRandomInteger(0, data.Count-1);
                var kvp = data.ElementAt(idx);
                try
                {
                  ipile.Delete(kvp.Key);
                }
                catch (PileAccessViolationException)
                {
                  deleteAccessViolations.AddOrUpdate(System.Threading.Thread.CurrentThread.ManagedThreadId, 1, (mid, val) => val + 1);
                }
              }
            });
            deleters[it] = task;
          }


          foreach (var task in putters) task.Start();
          foreach (var task in getters) task.Start();
          foreach (var task in deleters) task.Start();


          Task.WaitAll(putters.Concat(getters).Concat(deleters).ToArray());

          foreach (var kvp in getAccessViolations)
            Console.WriteLine("Get thread '{0}' {1:n0} times accessed deleted pointer", kvp.Key, kvp.Value);

          foreach (var kvp in deleteAccessViolations)
            Console.WriteLine("Del thread '{0}' {1:n0} times accessed deleted pointer", kvp.Key, kvp.Value);
        }
      }
        public IEnumerable<BondSpreadMergedResult> ComputeBondSpread(string ccy, string oisCurveName, string liborCurveName)
        {
            try
            {
                var nowTime = DateTime.Now;
                Log.Info("Prepare for bond spread computation...");


                if (futureBuilder.Today.Date < DateTime.UtcNow.AddHours(-5).Date)
                {
                    Log.InfoFormat("previous today date set to {0}, now today date is set to {1}", futureBuilder.Today.Date, DateTime.UtcNow.AddHours(-5).Date);
                    // when we move forward to the next day, we need to roll dates
                    futureBuilder.ComputeRelevantDatesForForwards();
                    futureBuilder.ComputeRollDates();     
               
                    // recompute closes
                    histPriceCache.Clear();
                    futureEnricher.ClearCloseCache();
                }

                // 1. need to get all bonds statics
                //var bondLists = GetBondStatics();
                //var allbonds = bondSpreadServiceModel.GetAllSecurities("T", "Term", DateTime.Today.AddDays(30));
                //var bondLists = bondPriceCache.Keys.ToArray();
                var bondLists = bondStaticCache.Keys.ToArray();

                // 2. build curve                
                curves.SetupCurves(ccy, oisCurveName, liborCurveName);
                Log.InfoFormat("Finished curve bulding in {0}secs", (DateTime.Now - nowTime).TotalSeconds);

                // 3. compute bond spread
                string country = "US";
                BondAnalytics.Country eCountry;
                BondAnalytics.CountryMappings.TryGetValue(country.ToUpper().Trim(), out eCountry);
                
                long Freq = 6; // hard code to be semi-annual?

                // all the bond prices            
                double optStartDate = 0;
                double optFirstCpnDate = 0;

                Log.Info("Computing bond spread...");
                BondSpreadResult[] liveBondSpreads = null;
                BondSpreadResult[] closeBondSpreads = null;
                BondSpreadResult[] futureSpreadsLive = null;
                BondSpreadResult[] futureSpreadsClose = null;

                IEnumerable<BondSpreadMergedResult> MergedBondSpreads = null;
                IEnumerable<BondSpreadMergedResult> MergedFuturesSpreads = null;

                DateTime futureStart = DateTime.Now;
                // future enrichment
                lock(lockobj)
                { 
                    foreach (var future in FutureCache)
                    {
                        try
                        {
                            futureEnricher.EnrichFuture(future, curves.GetLiveOISCurve(), curves.GetLiveLiborCurve(),  curves.GetCloseOISCurve(), curves.GetCloseLiborCurve(), this); // oiscurve, liborcurve    
                        }
                        catch (Exception e)
                        {                        
                            Log.Error(string.Format("Failed to enrich future {0}", future.Contract), e);
                            future.CanComputeSpread = false;
                        }
                    
                    }
                }
                Log.InfoFormat("Finished future enrichment in {0}secs", (DateTime.Now - futureStart).TotalSeconds);

                try
                {
                    // compute bond prices in parallel
                    var starttime = DateTime.Now;
                    var computingbondlist = bondLists.ToArray();
                    var closecomputeTasks = new Task<BondSpreadResult>[computingbondlist.Length];
                    var livecomputeTasks = new Task<BondSpreadResult>[computingbondlist.Length];

                    
                    //var liveBondSpreads = new List<BondSpreadResult>();                    
                               
                    for (int i = 0; i<computingbondlist.Length; i++)
                    {
                        var i1 = i;
                        livecomputeTasks[i] = Task<BondSpreadResult>.Factory.StartNew(
                                () =>
                                    ComputeLiveBondSpreadResult(computingbondlist[i1], eCountry, optStartDate, optFirstCpnDate, Freq, curves.GetLiveLiborCurve(), curves.GetLiveOISCurve()))
                                    .ContinueWith(r => r.Result);
                    }
                    for (int i = 0; i < computingbondlist.Length; i++)
                    {
                        var i1 = i;
                        closecomputeTasks[i] = Task<BondSpreadResult>.Factory.StartNew(
                                () =>
                                     ComputeCloseBondSpreadResult(computingbondlist[i1], eCountry, optStartDate, optFirstCpnDate, Freq, curves.GetCloseLiborCurve(), curves.GetCloseOISCurve()))
                                    .ContinueWith(r => r.Result);
                    }

                    Task.WaitAll(closecomputeTasks.Concat(livecomputeTasks).Cast<Task>().ToArray());                    
                    liveBondSpreads = livecomputeTasks.Select(l => l.Result).Where(r=> r != null).ToArray();
                    closeBondSpreads = closecomputeTasks.Select(l => l.Result).Where(r => r != null).ToArray();
                    Log.InfoFormat("Finished parallel bond spread calculation in {0}secs", (DateTime.Now - starttime).TotalSeconds);
                    MergedBondSpreads = MergeLiveAndCloseResults(liveBondSpreads, closeBondSpreads);

                    // future spread result
                    starttime = DateTime.Now;
                    var computefuturelist = FutureCache.Where(f => f.CanComputeSpread).ToArray();
                    var closecomputeFutureTasks = new Task<BondSpreadResult>[computefuturelist.Length];
                    var livecomputeFutureTasks = new Task<BondSpreadResult>[computefuturelist.Length];
                    for (int i = 0; i < computefuturelist.Length; i++)
                    {
                        var i1 = i;
                        livecomputeFutureTasks[i] = Task<BondSpreadResult>.Factory.StartNew(
                                () =>
                                    ComputeLiveFutureBondSpread(eCountry, computefuturelist[i1], Freq, curves.GetLiveLiborCurve(), curves.GetLiveOISCurve()))
                                    .ContinueWith(r => r.Result);
                    }
                    for (int i = 0; i < computefuturelist.Length; i++)
                    {
                        var i1 = i;
                        closecomputeFutureTasks[i] = Task<BondSpreadResult>.Factory.StartNew(
                                () =>
                                     ComputeCloseFutureBondSpread(eCountry, computefuturelist[i1], Freq, curves.GetCloseLiborCurve(), curves.GetCloseOISCurve()))
                                    .ContinueWith(r => r.Result);
                    }
                    Task.WaitAll(closecomputeFutureTasks.Concat(livecomputeFutureTasks).Cast<Task>().ToArray());                    
                    futureSpreadsLive = livecomputeFutureTasks.Select(l => l.Result).Where(r => r != null).ToArray();
                    futureSpreadsClose = closecomputeFutureTasks.Select(l => l.Result).Where(r => r != null).ToArray();
                    Log.InfoFormat("Finished parallel future spread calculation in {0}secs", (DateTime.Now - starttime).TotalSeconds);

                    MergedFuturesSpreads = MergeLiveAndCloseResults(futureSpreadsLive, futureSpreadsClose);

                }
                catch (Exception e)
                {
                    return null;
                }

                Log.InfoFormat("Finished bond spread computation cycle in {0}secs", (DateTime.Now - nowTime).TotalSeconds);
                //return liveBondSpreads.Concat(closeBondSpreads).Concat(futureSpreadsLive).Concat(futureSpreadsClose).ToArray();
                return MergedBondSpreads.Concat(MergedFuturesSpreads);
            }
            catch (Exception e)
            {
                Log.Error("Exception occurs in bond spread calculation", e);
                return null;
            }

        }
Beispiel #5
0
        private static void ConnectToNodes(TextWriter log, string tieBreakerKey, int syncTimeout, int keepAlive, bool allowAdmin, string clientName, string[] arr, List<RedisConnection> connections, out Task<string>[] infos, out Task<string>[] aux, AuxMode mode)
        {
            TraceWriteTime("Infos");
            infos = new Task<string>[arr.Length];
            aux = new Task<string>[arr.Length];
            var opens = new Task[arr.Length];
            for (int i = 0; i < arr.Length; i++)
            {
                var option = arr[i];
                if (string.IsNullOrWhiteSpace(option)) continue;

                RedisConnection conn = null;
                try
                {

                    var parts = option.Split(':');
                    if (parts.Length == 0) continue;

                    string host = parts[0].Trim();
                    int port = 6379, tmp;
                    if (parts.Length > 1 && int.TryParse(parts[1].Trim(), out tmp)) port = tmp;
                    conn = new RedisConnection(host, port, syncTimeout: syncTimeout, allowAdmin: allowAdmin);
                    conn.Name = clientName;
                    log.WriteLine("Opening connection to {0}:{1}...", host, port);
                    if (keepAlive >= 0) conn.SetKeepAlive(keepAlive);
                    opens[i] = conn.Open();
                    var info = conn.GetInfoImpl(null, false, false);
                    connections.Add(conn);
                    infos[i] = info;
                    switch (mode)
                    {
                        case AuxMode.TieBreakers:
                            if (tieBreakerKey != null)
                            {
                                aux[i] = conn.Strings.GetString(0, tieBreakerKey);
                            }
                            break;
                        case AuxMode.ClusterNodes:
                            aux[i] = conn.Cluster.GetNodes();
                            break;
                    }

                }
                catch (Exception ex)
                {
                    if (conn == null)
                    {
                        log.WriteLine("Error parsing option \"{0}\": {1}", option, ex.Message);
                    }
                    else
                    {
                        log.WriteLine("Error connecting: {0}", ex.Message);
                    }
                }
            }

            TraceWriteTime("Wait for infos");
            RedisConnectionBase.Trace("select-create", "wait...");
            var watch = new Stopwatch();
            foreach (Task task in infos.Concat(aux).Concat(opens))
            {
                if (task != null)
                {
                    try
                    {
                        int remaining = unchecked((int)(syncTimeout - watch.ElapsedMilliseconds));
                        if (remaining > 0) task.Wait(remaining);
                    }
                    catch { }
                }
            }
            watch.Stop();
            RedisConnectionBase.Trace("select-create", "complete");
        }