Ejemplo n.º 1
0
        public void ChangesToSourceAreIgnoredInWhileIteratingOverResultsAfterFirstElementRetrieved()
        {
            var source = new List<string> { "a", "b", "c", "def" };

            var groups = source.GroupBy(x => x.Length);
            using (var iterator = groups.GetEnumerator())
            {
                Assert.IsTrue(iterator.MoveNext());
                iterator.Current.AssertSequenceEqual("a", "b", "c");

                // If GroupBy still needed to iterate over the source, this would cause a
                // InvalidOperationException when we next fetched an element from groups.
                source.Add("ghi");

                Assert.IsTrue(iterator.MoveNext());
                // ghi isn't in the group
                iterator.Current.AssertSequenceEqual("def");

                Assert.IsFalse(iterator.MoveNext());
            }

            // If we iterate again now - without calling GroupBy again - we'll see the difference:
            using (var iterator = groups.GetEnumerator())
            {
                Assert.IsTrue(iterator.MoveNext());
                iterator.Current.AssertSequenceEqual("a", "b", "c");

                Assert.IsTrue(iterator.MoveNext());
                iterator.Current.AssertSequenceEqual("def", "ghi");
            }
        }
Ejemplo n.º 2
0
 public void CompareSolves()
 {
     var groupBy = GetSolves().ToArray();
     var bests = new List<Tuple<string, int>>();
     foreach (var result in groupBy)
     {
         var grouByName = result.GroupBy(r => r.Item1);
         Console.WriteLine("Problem" + result.Key);
         var solves = grouByName.OrderByDescending(r => r.Sum(k => k.Item4));
         var bsolves = solves.Where(s => s.Sum(r => r.Item4) == solves.First().Sum(r => r.Item4));
         foreach (var bsolve in bsolves)
             bests.Add(Tuple.Create(bsolve.Key, int.Parse(result.Key)));
         foreach (var res in solves)
         {
             Console.WriteLine("{0}: {1}", res.Key, res.Sum(K => K.Item4) / res.Count());
         }
         foreach (var byS in result.GroupBy(r => r.Item3))
         {
             Console.WriteLine("Seed {0}", byS.Key);
             foreach (var oneItem in byS.OrderByDescending(b => b.Item4))
             {
                 Console.WriteLine("{0}: {1}", oneItem.Item1, oneItem.Item4);
             }
         }
         Console.WriteLine();
     }
     Console.WriteLine();
     Console.WriteLine("Who's BEST?");
     foreach (var taskresult in bests.GroupBy(b => b.Item1).OrderByDescending(b => b.Count()))
     {
         Console.WriteLine("{0}: {1}% ({2})", taskresult.Key, taskresult.Count() * 100 / groupBy.Count(), string.Join(".", taskresult.Take(15).Select(b => b.Item2)));
     }
 }
Ejemplo n.º 3
0
        public void should_generate_unique_ids()
        {
            var messageIds = new List<MessageId>(200000);
            for (var i = 0; i < messageIds.Capacity; ++i)
            {
                messageIds.Add(MessageId.NextId());
            }

            var duplicatedMessageIds = messageIds.GroupBy(x => x.Value).Where(x => x.Count() != 1).ToList();
            duplicatedMessageIds.ShouldBeEmpty();
        }
        public void MeasureUpdatePerformance()
        {
            var subscriptions = new List<Subscription>();
            for (var typeIdIndex = 0; typeIdIndex < 20; ++typeIdIndex)
            {
                var typeId = new MessageTypeId("Abc.Foo.Events.FakeEvent" + typeIdIndex);
                for (var routingIndex = 0; routingIndex < 500; ++routingIndex)
                {
                    subscriptions.Add(new Subscription(typeId, new BindingKey(routingIndex.ToString())));
                }
            }

            var subscriptionsByTypeId = subscriptions.GroupBy(x => x.MessageTypeId).ToDictionary(x => x.Key, x => x.Select(s=>s.BindingKey).ToArray());

            _directory = new PeerDirectoryClient(_configurationMock.Object);
            _directory.Handle(new PeerStarted(_otherPeer.ToPeerDescriptor(false)));

            Console.WriteLine("Snapshot updates (add)");
            using (Measure.Throughput(subscriptions.Count))
            {
                for (var subscriptionCount = 1; subscriptionCount <= subscriptions.Count; ++subscriptionCount)
                {
                    _directory.Handle(new PeerSubscriptionsUpdated(_otherPeer.ToPeerDescriptor(false, subscriptions.Take(subscriptionCount))));
                }
            }
            Console.WriteLine("Snapshot updates (remove)");
            using (Measure.Throughput(subscriptions.Count))
            {
                for (var subscriptionCount = subscriptions.Count; subscriptionCount >= 1; --subscriptionCount)
                {
                    _directory.Handle(new PeerSubscriptionsUpdated(_otherPeer.ToPeerDescriptor(false, subscriptions.Take(subscriptionCount))));
                }
            }

            _directory = new PeerDirectoryClient(_configurationMock.Object);
            _directory.Handle(new PeerStarted(_otherPeer.ToPeerDescriptor(false)));

            Console.WriteLine("Snapshot updates per message type id (add)");
            using (Measure.Throughput(subscriptions.Count))
            {
                foreach (var subscriptionGroup in subscriptionsByTypeId)
                {
                    _directory.Handle(new PeerSubscriptionsForTypesUpdated(_otherPeer.Id, DateTime.UtcNow, subscriptionGroup.Key, subscriptionGroup.Value));
                }
            }
            Console.WriteLine("Snapshot updates per message type id (remove)");
            using (Measure.Throughput(subscriptions.Count))
            {
                foreach (var subscriptionGroup in subscriptionsByTypeId)
                {
                    _directory.Handle(new PeerSubscriptionsForTypesUpdated(_otherPeer.Id, DateTime.UtcNow, subscriptionGroup.Key));
                }
            }
        }
Ejemplo n.º 5
0
        public void GenerateMultipleIteratively()
        {
            var gen = new Generator();
            var stringList = new List<string>();

            for (int i = 0; i < 100000; i++)
            {
                stringList.Add(gen.Single(9));
            }

            Assert.LessOrEqual(1, stringList.GroupBy(x => x).Count());
        }
Ejemplo n.º 6
0
        public void Method_Scenario_Result()
        {
            var list = new List<string> {"hello", "world"};
            var s = list[0];
            Console.WriteLine(s);

            Interlocked.Increment(ref _x2);

            var dictionary = new Dictionary<int, string> {{0, "Hello"}, {2, "World"}};
            var s1 = dictionary[0];

            var dateTime = new DateTime(1988,10,10);
            var dateTimes = new List<DateTime> { new DateTime(1988, 10, 10), new DateTime(1988, 10, 10), new DateTime(1987, 12, 29) };

            var enumerable = from time in dateTimes group time by time.Date into grouping select new {Date = grouping, Count = grouping.Count()};
            var enumerable2 = dateTimes.GroupBy(x => x.Date).Select(y => new {Date = y, Count = y.Count()}).ToList();

            Console.WriteLine(s1);
        }
Ejemplo n.º 7
0
        public void GroupingElection()
        {
            var election2015 = new List<Election>
            {
                new Election { Name = "Clinton", Votes = 8 },
                new Election { Name = "Gore", Votes = 4 },
                new Election { Name = "Bush", Votes = 1 },
                new Election { Name = "Obama", Votes = 4 }
            };

            var groups = election2015.GroupBy(x => x.Votes, x => x.Name);
            groups.ToList().ForEach(voteGroup =>
            {
                Console.WriteLine(voteGroup.Key);
                foreach (var candidate in voteGroup)
                {
                    Console.WriteLine(candidate);
                }
            });
        }
Ejemplo n.º 8
0
        public void GroupByList_v1()
        {
            List<Pet> pets =
                    new List<Pet>{ new Pet { Name="Barley", Age=8 },
                                   new Pet { Name="Boots", Age=4 },
                                   new Pet { Name="Whiskers", Age=1 },
                                   new Pet { Name="Daisy", Age=4 } };

            IEnumerable<IGrouping<int, string>> query =
                    pets.GroupBy(pet => pet.Age, pet => pet.Name);

            foreach (IGrouping<int, string> petGroup in query)
            {
                // Print the key value of the IGrouping.
                Console.WriteLine(petGroup.Key);
                // Iterate over each value in the 
                // IGrouping and print the value.
                foreach (string name in petGroup)
                    Console.WriteLine("  {0}", name);
            }
        }
Ejemplo n.º 9
0
        public void FinalTest()
        {
            var buffer1 = new IdHashBuffer(4);
            var buffer2 = new IdHashBuffer(4);

            buffer1.TryWrite(G1, 1);
            buffer1.TryWrite(G2, 2);
            buffer1.TryWrite(G3, 3);
            buffer1.TryWrite(G4, 3);

            buffer1.Seal();

            buffer2.TryWrite(G5, 5);
            buffer2.TryWrite(G6, 2);
            buffer2.TryWrite(G7, 3);
            buffer2.TryWrite(G8, 5);

            buffer2.Seal();

            var collisions = new List<Tuple<ulong, Guid[]>>();
            Action<ulong, ArraySegment<Guid>> onCollision = (hash, ids) =>
            {
                collisions.Add(new Tuple<ulong, Guid[]>(hash, ids.ToArray()));
            };

            buffer1.FindHashCollisions(new[] { buffer1, buffer2 }, onCollision);
            buffer2.FindHashCollisions(new[] { buffer2 }, onCollision);

            var collisionDictionary = collisions
                .GroupBy(t => t.Item1)
                .Select(g => Tuple.Create(g.Key, g.SelectMany(x => x.Item2).Distinct().ToArray()))
                .ToDictionary(t => t.Item1, t => t.Item2);

            Assert.AreEqual(3, collisionDictionary.Count);
            CollectionAssert.AreEquivalent(new[] { G2, G6 }, collisionDictionary[2]);
            CollectionAssert.AreEquivalent(new[] { G3, G4, G7 }, collisionDictionary[3]);
            CollectionAssert.AreEquivalent(new[] { G5, G8 }, collisionDictionary[5]);
        }
        public void Should_detect_collided_news()
        {
            var time = DateTime.Now;

            var evtList = new List<EconomicEvent>
            {
                new EconomicEvent { Currency = "USD", DateTime = time },
                new EconomicEvent { Currency = "USD", DateTime = time }
            };

            var groups = evtList.GroupBy(x => x.DateTime);

            Assert.AreEqual(1, groups.Count());
            groups.First();

            foreach (var grp in groups)
            {
                Console.WriteLine(grp.Key);
                foreach (var item in grp)
                {
                    Console.WriteLine(item.Currency);
                }
            }
        }
        public void ToDictionaryTestCase()
        {
            var list = new List<Tuple<Int32, String>>
            {
                new Tuple<Int32, String>( 1, "test1.1" ),
                new Tuple<Int32, String>( 1, "test1.2" ),
                new Tuple<Int32, String>( 1, "test1.3" ),
                new Tuple<Int32, String>( 2, "test2.1" ),
                new Tuple<Int32, String>( 2, "test2.2" ),
                new Tuple<Int32, String>( 2, "test2.3" ),
                new Tuple<Int32, String>( 3, "test3.1" ),
                new Tuple<Int32, String>( 3, "test3.2" ),
                new Tuple<Int32, String>( 3, "test3.3" )
            };

            var groups = list.GroupBy( x => x.Item1 );
            var actual = groups.ToDictionary();

            Assert.AreEqual( 3, actual.Count );

            Assert.AreEqual( 3, actual[1].Count );
            Assert.AreEqual( 3, actual[2].Count );
            Assert.AreEqual( 3, actual[3].Count );
        }
Ejemplo n.º 12
0
        public void Test()
        {
            List<DataItem> list = new List<DataItem>
            {
                new DataItem(1, 10.0),
                new DataItem(1, 14.0),
                new DataItem(2, 20.0),
                new DataItem(4, 40.0),
                new DataItem(4, 41.0)
            };
            Dictionary<int, List<double>> expectedDictionary = new Dictionary<int, List<double>>
            {
                {1, new List<double> {10.0, 14.0}},
                {2, new List<double> {20.0}},
                {4, new List<double> {40.0, 41.0}}
            };

            Dictionary<int, List<double>> actualDictionary = list
                .GroupBy(dataItem => dataItem.Key, dataItem => dataItem.Value)
                .ToDictionary(item => item.Key, item => item.ToList());

            Assert.That(actualDictionary.Keys, Is.EquivalentTo(expectedDictionary.Keys));
            Assert.That(actualDictionary.Values, Is.EquivalentTo(expectedDictionary.Values));
        }
Ejemplo n.º 13
0
        public void MessageIdsAreDifferent()
        {
            var messageIds = new List<string>();
            var counter = new SharedCounter(1);

            _activator1.Handle<string>(async (bus, ctx, str) =>
            {
                messageIds.Add(ctx.Headers[Headers.MessageId]);
                await bus.Advanced.Routing.Send("bus2", "hej!");
            });
            _activator2.Handle<string>(async (bus, ctx, str) =>
            {
                messageIds.Add(ctx.Headers[Headers.MessageId]);
                counter.Decrement();
            });

            _activator1.Bus.SendLocal("heeeej!").Wait();

            counter.WaitForResetEvent();

            Assert.That(messageIds.GroupBy(i => i).Count(), Is.EqualTo(2));
        }
Ejemplo n.º 14
0
        public void When_duplicates_found_Should_raise_an_event_for_every_duplicate_set_found(int testSize)
        {
            const int NumberOfCollisionsForEach = 2;
            var modulo = testSize / NumberOfCollisionsForEach;

            var toCreate = Enumerable.Range(0, testSize).Select(i => Tuple.Create(i, Guid.NewGuid())).ToArray();

            const int concurrency = 100;
            var semaphore = new SemaphoreSlim(concurrency);

            foreach (var t in toCreate)
            {
                semaphore.Wait();
                cloudTable.ExecuteAsync(TableOperation.Insert(CreateSagaState(t, modulo)))
                    .ContinueWith(task =>
                    {
                        if (task.Exception != null)
                        {
                            Console.WriteLine($"Exception occured {task.Exception}");
                        }
                        semaphore.Release();
                    });
            }

            for (var i = 0; i < concurrency; i++)
            {
                semaphore.Wait();
            }

            var comparer = EqualityComparers.GetValueComparer(EdmType.Int64);
            var indexer = new SagaIndexer(cloudTable, "CorrelatingId", o => (ulong)(long)o, comparer);
            var results = new List<Tuple<Guid, Guid[]>>();

            indexer.SearchForDuplicates((o, guids) => results.Add(Tuple.Create(o, guids.ToArray())));

            var dict = results
                .GroupBy(t => t.Item1, t => t.Item2, comparer)
                .ToDictionary(g => g.Key, g => g.SelectMany(ids => ids).Distinct().ToArray(), comparer);

            Assert.AreEqual(modulo, dict.Count);
            foreach (var kvp in dict)
            {
                Assert.AreEqual(2, kvp.Value.Length);
            }
        }
Ejemplo n.º 15
0
        public async void KeyedMessagesPreserveOrder()
        {
            kafka4net.Tracing.EtwTrace.Marker("KeyedMessagesPreserveOrder");
            // create a topic with 3 partitions
            var topicName = "part33." + _rnd.Next();
            VagrantBrokerUtil.CreateTopic(topicName, 3, 3);
            
            // create listener in a separate connection/broker
            var receivedMsgs = new List<ReceivedMessage>();
            var consumer = new Consumer(new ConsumerConfiguration(_seed2Addresses, topicName, new StartPositionTopicEnd()));
            var consumerSubscription = consumer.OnMessageArrived.Synchronize().Subscribe(msg =>
            {
                lock (receivedMsgs)
                {
                    receivedMsgs.Add(msg);
                }
            });
            await consumer.IsConnected;

            // sender is configured with 50ms batch period
            var producer = new Producer(_seed2Addresses, new ProducerConfiguration(topicName, TimeSpan.FromMilliseconds(50)));
            await producer.ConnectAsync();

            //
            // generate messages with 100ms interval in 10 threads
            //
            var sentMsgs = new List<Message>();
            _log.Info("Start sending");
            var senders = Enumerable.Range(1, 1).
                Select(thread => Observable.
                    Interval(TimeSpan.FromMilliseconds(10)).
                    Synchronize(). // protect adding to sentMsgs
                    Select(i =>
                    {
                        var str = "msg " + i + " thread " + thread + " " + Guid.NewGuid();
                        var bin = Encoding.UTF8.GetBytes(str);
                        var msg = new Message
                        {
                            Key = BitConverter.GetBytes((int)(i + thread) % 10),
                            Value = bin
                        };
                        return Tuple.Create(msg, i, str);
                    }).
                    Subscribe(msg =>
                    {
                        lock (sentMsgs)
                        {
                            producer.Send(msg.Item1);
                            sentMsgs.Add(msg.Item1);
                            Assert.AreEqual(msg.Item2, sentMsgs.Count-1);
                        }
                    })
                ).
                ToArray();

            // wait for around 10K messages (10K/(10*10) = 100sec) and close producer
            _log.Info("Waiting for producer to produce enough...");
            await Task.Delay(100*1000);
            _log.Info("Closing senders intervals");
            senders.ForEach(s => s.Dispose());
            _log.Info("Closing producer");
            await producer.CloseAsync(TimeSpan.FromSeconds(5));

            _log.Info("Waiting for additional 10sec");
            await Task.Delay(10*1000);

            _log.Info("Disposing consumer");
            consumerSubscription.Dispose();
            _log.Info("Closing consumer");
            consumer.Dispose();
            _log.Info("Done with networking");

            // compare sent and received messages
            // TODO: for some reason preformance is not what I'd expect it to be and only 6K is generated.
            Assert.GreaterOrEqual(sentMsgs.Count, 4000, "Expected around 10K messages to be sent");

            if (sentMsgs.Count != receivedMsgs.Count)
            {
                var sentStr = sentMsgs.Select(m => Encoding.UTF8.GetString(m.Value)).ToArray();
                var receivedStr = receivedMsgs.Select(m => Encoding.UTF8.GetString(m.Value)).ToArray();
                sentStr.Except(receivedStr).
                    ForEach(m => _log.Error("Not received: '{0}'", m));
                receivedStr.Except(sentStr).
                    ForEach(m => _log.Error("Not sent but received: '{0}'", m));
            }
            Assert.AreEqual(sentMsgs.Count, receivedMsgs.Count, "Sent and received messages count differs");
            
            //
            // group messages by key and compare lists in each key to be the same (order should be preserved within key)
            //
            var keysSent = sentMsgs.GroupBy(m => BitConverter.ToInt32(m.Key, 0), m => Encoding.UTF8.GetString(m.Value), (i, mm) => new { Key = i, Msgs = mm.ToArray() }).ToArray();
            var keysReceived = receivedMsgs.GroupBy(m => BitConverter.ToInt32(m.Key, 0), m => Encoding.UTF8.GetString(m.Value), (i, mm) => new { Key = i, Msgs = mm.ToArray() }).ToArray();
            Assert.AreEqual(10, keysSent.Count(), "Expected 10 unique keys 0-9");
            Assert.AreEqual(keysSent.Count(), keysReceived.Count(), "Keys count does not match");
            // compare order within each key
            var notInOrder = keysSent
                .OrderBy(k => k.Key)
                .Zip(keysReceived.OrderBy(k => k.Key), (s, r) => new { s, r, ok = s.Msgs.SequenceEqual(r.Msgs) }).Where(_ => !_.ok).ToArray();

            if (notInOrder.Any())
            {
                _log.Error("{0} keys are out of order", notInOrder.Count());
                notInOrder.ForEach(_ => _log.Error("Failed order in:\n{0}", 
                    string.Join(" \n", DumpOutOfOrder(_.s.Msgs, _.r.Msgs))));
            }
            Assert.IsTrue(!notInOrder.Any(), "Detected out of order messages");

            kafka4net.Tracing.EtwTrace.Marker("/KeyedMessagesPreserveOrder");
        }
Ejemplo n.º 16
0
        public static void WaitForSchemaAgreement(ICluster cluster)
        {
            const int maxRetries = 20;
            var hostsLength = cluster.AllHosts().Count;
            if (hostsLength == 1)
            {
                return;
            }
            var cc = cluster.Metadata.ControlConnection;
            var counter = 0;
            var nodesDown = cluster.AllHosts().Count(h => !h.IsConsiderablyUp);
            while (counter++ < maxRetries)
            {
                Trace.TraceInformation("Waiting for test schema agreement");
                Thread.Sleep(500);
                var hosts = new List<Guid>();
                //peers
                hosts.AddRange(cc.Query("SELECT peer, schema_version FROM system.peers").Select(r => r.GetValue<Guid>("schema_version")));
                //local
                hosts.Add(cc.Query("SELECT schema_version FROM system.local").Select(r => r.GetValue<Guid>("schema_version")).First());

                var differentSchemas = hosts.GroupBy(v => v).Count();
                if (differentSchemas <= 1 + nodesDown)
                {
                    //There is 1 schema version or 1 + nodes that are considered as down
                    break;
                }
            }
        }
        public void TestCalibration()
        {
            string uimfFileLocation = Cheetah;

            LcImsPeptideSearchParameters parameters = new LcImsPeptideSearchParameters
            {
                ChargeStateMax = 5,
                NetTolerance = 0.5,
                IsotopicFitScoreThreshold = 0.15,
                MassToleranceInPpm = 30,
                NumPointForSmoothing = 9
            };

            List<PeptideTarget> targetList = MassTagImporter.ImportMassTags("elmer", "MT_Human_Sarcopenia_P789", 1e-10, true);
            Console.WriteLine("Using " + targetList.Count + " targets for calibration.");

            List<Tuple<double, double>> netAlignmentInput = new List<Tuple<double, double>>();
            List<Tuple<double, double>> massAlignmentInput = new List<Tuple<double, double>>();

            LcImsPeptideSearchWorkfow lcImsPeptideSearchWorkfow = new LcImsPeptideSearchWorkfow(uimfFileLocation, parameters);

            foreach (var imsTarget in targetList.OrderBy(x => x.NormalizedElutionTime))
            {
                ChargeStateCorrelationResult correlationResult = lcImsPeptideSearchWorkfow.RunInformedWorkflow(imsTarget);

                if (correlationResult == null || !correlationResult.CorrelatedResults.Any()) continue;

                LcImsTargetResult result = correlationResult.CorrelatedResults.OrderByDescending(x => x.Intensity).First();
                //LcImsTargetResult result = correlationResult.CorrelatedResults.OrderByDescending(X => X.Intensity * (1 - Math.Abs(X.NormalizedElutionTime - imsTarget.NormalizedElutionTime))).First();
                //LcImsTargetResult result = correlationResult.CorrelatedResults.OrderBy(X => X.NormalizedElutionTime).First();

                //if (netAlignmentInput.Count == 0 || Math.Abs(netAlignmentInput.Last().Item1 - imsTarget.NormalizedElutionTime) > 0.0001)
                //{
                //    netAlignmentInput.Add(new Tuple<double, double>(imsTarget.NormalizedElutionTime, result.NormalizedElutionTime));
                //    massAlignmentInput.Add(new Tuple<double, double>(imsTarget.NormalizedElutionTime, result.PpmError));
                //}

                netAlignmentInput.Add(new Tuple<double, double>(result.NormalizedElutionTime, imsTarget.NormalizedElutionTime));
                massAlignmentInput.Add(new Tuple<double, double>(result.IsotopicProfile.MonoPeakMZ, result.PpmError));
            }

            var netAlignmentInputGroup = netAlignmentInput.GroupBy(x => x.Item1).OrderBy(x => x.Key);
            var massAlignmentInputGroup = massAlignmentInput.GroupBy(x => x.Item1).OrderBy(x => x.Key);

            netAlignmentInput = netAlignmentInput.OrderBy(x => x.Item1).ToList();
            massAlignmentInput = massAlignmentInput.OrderBy(x => x.Item1).ToList();

            var groupedNetTuple = netAlignmentInputGroup.Select(x => x.OrderBy(y => Math.Abs(y.Item1 - y.Item2)).First()).ToArray();
            //var groupedNetTuple = netAlignmentInputGroup.Select(X => X.Average(Y => Y.Item2)).ToArray();
            var groupedMassTuple = massAlignmentInputGroup.Select(x => x.First()).ToArray();

            var loessInterpolatorForNetAlignment = new LoessInterpolator(0.1, 4);
            var loessInterpolatorForMassAlignment = new LoessInterpolator(0.2, 1);

            //double[] newNetValues = loessInterpolatorForNetAlignment.Smooth(netAlignmentInputGroup.Select(X => X.Key).ToArray(), netAlignmentInputGroup.Select(X => X.Average(Y => Y.Item2)).ToArray());
            double[] newNetValues = loessInterpolatorForNetAlignment.Smooth(groupedNetTuple.Select(x => x.Item1).ToArray(), groupedNetTuple.Select(x => x.Item2).ToArray());
            double[] newMassValues = loessInterpolatorForMassAlignment.Smooth(groupedMassTuple.Select(x => x.Item1).ToArray(), groupedMassTuple.Select(x => x.Item2).ToArray());

            using(StreamWriter writer = new StreamWriter("oldNetValues.csv"))
            {
                foreach (var netTuple in groupedNetTuple)
                {
                    writer.WriteLine(netTuple.Item1 + "," + netTuple.Item2);
                }
            }

            using (StreamWriter writer = new StreamWriter("oldMassValues.csv"))
            {
                foreach (var massTuple in groupedMassTuple)
                {
                    writer.WriteLine(massTuple.Item1 + "," + massTuple.Item2);
                }
            }

            using (StreamWriter writer = new StreamWriter("smoothedNetValues.csv"))
            {
                for (int i = 0; i < groupedNetTuple.Length; i++)
                {
                    writer.WriteLine(groupedNetTuple[i].Item1 + "," + newNetValues[i]);
                }
            }

            using (StreamWriter writer = new StreamWriter("smoothedMassValues.csv"))
            {
                for (int i = 0; i < groupedMassTuple.Length; i++)
                {
                    writer.WriteLine(groupedMassTuple[i].Item1 + "," + newMassValues[i]);
                }
            }
        }
Ejemplo n.º 18
0
        public void GetNonce()
        {
            List<string> nonces = new List<string>();

            for (int i = 0; i < 1000; i++)
            {
                nonces.Add(helpers.BuildNonce());
            }

            var g = nonces.GroupBy(n => n);
            g.ToList().ForEach(n => Assert.AreEqual(1, n.Count()));
        }
Ejemplo n.º 19
0
        public void TestPeptideLevelStats()
        {
            var methodName = MethodBase.GetCurrentMethod().Name;
            TestUtils.ShowStarting(methodName);

            const int topK = 10;
            const string resultDir = @"D:\Research\Data\UW\QExactive\Ic_NTT1_Rescoring";
            if (!Directory.Exists(resultDir))
            {
                Assert.Ignore(@"Skipping test {0} since folder not found: {1}", methodName, resultDir);
            }

            var concatenated = new List<string>();
            const string mzRange = "650to775";
            foreach (var specFilePath in Directory.GetFiles(resultDir, "*DIA*" + mzRange + "*IcTarget.tsv"))
            {
                concatenated.AddRange(File.ReadAllLines(specFilePath).Skip(1));
            }

            foreach (var specFilePath in Directory.GetFiles(resultDir, "*DIA*" + mzRange + "*IcDecoy.tsv"))
            {
                concatenated.AddRange(File.ReadAllLines(specFilePath).Skip(1));
            }

            const string headerStr =
                "Scan\tPre\tSequence\tPost\tModifications\t" +
                "Composition\tProteinName\tProteinDesc\tProteinLength\t" +
                "Start\tEnd\tCharge\tMostAbundantIsotopeMz\t" +
                "Mass\t#MatchedFragments\tIcScore";
            var header = headerStr.Split('\t').ToList();
            if (concatenated.Count <= 1) return;

            var scoreIndex = header.IndexOf("IcScore");
            var sequenceIndex = header.IndexOf("Sequence");
            var preIndex = header.IndexOf("Pre");
            var postIndex = header.IndexOf("Post");
            var proteinIndex = header.IndexOf("ProteinName");
            var scanIndex = header.IndexOf("Scan");
            if (scoreIndex < 0 || sequenceIndex < 0 || preIndex < 0 || postIndex < 0 || proteinIndex < 0) return;

            //var distinctSorted = concatenated.OrderByDescending(r => Convert.ToDouble(r.Split('\t')[scoreIndex]))
            //    .GroupBy(r => Convert.ToDouble(r.Split('\t')[scanIndex]))
            //    .Select(grp => grp.First())
            //    .ToArray();

            var distinctSorted = concatenated
                .GroupBy(r => Convert.ToInt32(r.Split('\t')[scanIndex]))
                .Select(g => new
                {
                    FirstTwo = g.OrderByDescending(r => Convert.ToDouble(r.Split('\t')[scoreIndex])).Take(topK)
                })
                .SelectMany(g => g.FirstTwo)
                .OrderByDescending(r => Convert.ToDouble(r.Split('\t')[scoreIndex]))
                .GroupBy(r => r.Split('\t')[preIndex] + r.Split('\t')[sequenceIndex] + r.Split('\t')[postIndex])
                .Select(grp => grp.First())
                .ToArray();

            // Calculate q values
            var numDecoy = 0;
            var numTarget = 0;
            var fdr = new double[distinctSorted.Length];
            for (var i = 0; i < distinctSorted.Length; i++)
            {
                var row = distinctSorted[i];
                var columns = row.Split('\t');
                var protein = columns[proteinIndex];
                if (protein.StartsWith(FastaDatabase.DecoyProteinPrefix)) numDecoy++;
                else numTarget++;
                fdr[i] = numDecoy / (double)numTarget;
            }

            var numPeptides = 0;
            var qValue = new double[fdr.Length];
            qValue[fdr.Length - 1] = fdr[fdr.Length - 1];
            for (var i = fdr.Length - 2; i >= 0; i--)
            {
                qValue[i] = Math.Min(qValue[i + 1], fdr[i]);
                if (qValue[i] < 0.01) numPeptides++;
            }

            Console.WriteLine("NumPeptides: {0}", numPeptides);
        }
Ejemplo n.º 20
0
        public void TestGetCreditStatic()
        {
            var indicesStatics = mCarbonClient.GetStaticDataAsync(new string[] { }, "static-markitindices").Result;

            var dict = new Dictionary<string, CreditIndicesStatic>();

            var calendar = mCarbonClient.GetCalendarAsync("GBLO").Result;

            var holidays = new HashSet<DateTime>();
            foreach (var localDate in calendar.Dates)
            {
                var d = localDate.ToDateTime();
                if (!holidays.Contains(d))
                {
                    holidays.Add(d);
                }
            }

            var seriesConfigs = new Dictionary<string, CreditTrSeriesConfig>
            {
                {"CDX.NA.IG.3Y", new CreditTrSeriesConfig
                {
                    LookupCode = "CDX-NAIGS",
                    Indice = "CDX.NA.IG",
                    Tenor = "3Y",
                    Curve = "USD.CDS.ISDA"
                }},
                {"CDX.NA.IG.5Y", new CreditTrSeriesConfig
                {
                    LookupCode = "CDX-NAIGS",
                    Indice = "CDX.NA.IG",
                    Tenor = "5Y",
                    Curve = "USD.CDS.ISDA"
                }},
                {"CDX.NA.IG.7Y", new CreditTrSeriesConfig
                {
                    LookupCode = "CDX-NAIGS",
                    Indice = "CDX.NA.IG",
                    Tenor = "7Y",
                    Curve = "USD.CDS.ISDA"
                }},
                {"CDX.NA.IG.10Y", new CreditTrSeriesConfig
                {
                    LookupCode = "CDX-NAIGS",
                    Indice = "CDX.NA.IG",
                    Tenor = "10Y",
                    Curve = "USD.CDS.ISDA"
                }},
                {"Itraxx.Main.3Y", new CreditTrSeriesConfig
                {
                    LookupCode = "ITRAXX-EUROPES",
                    Indice = "Itraxx.Main",
                    Tenor = "3Y",
                    Curve = "EUR.CDS.ISDA"
                }},
                {"Itraxx.Main.5Y", new CreditTrSeriesConfig
                {
                    LookupCode = "ITRAXX-EUROPES",
                    Indice = "Itraxx.Main",
                    Tenor = "5Y",
                    Curve = "EUR.CDS.ISDA"
                }},
                {"Itraxx.Main.7Y", new CreditTrSeriesConfig
                {
                    LookupCode = "ITRAXX-EUROPES",
                    Indice = "Itraxx.Main",
                    Tenor = "7Y",
                    Curve = "EUR.CDS.ISDA"
                }},
                {"Itraxx.Main.10Y", new CreditTrSeriesConfig
                {
                    LookupCode = "ITRAXX-EUROPES",
                    Indice = "Itraxx.Main",
                    Tenor = "10Y",
                    Curve = "EUR.CDS.ISDA"
                }},
                {"Itraxx.CrossOver.5Y", new CreditTrSeriesConfig
                {
                    LookupCode = "ITRAXX-XOVERS",
                    Indice = "Itraxx.CrossOver",
                    Tenor = "5Y",
                    Curve = "EUR.CDS.ISDA"
                }},
                {"Itraxx.SnrFin.5Y", new CreditTrSeriesConfig
                {
                    LookupCode = "ITRAXX-FINSENS",
                    Indice = "Itraxx.SnrFin",
                    Tenor = "5Y",
                    Curve = "EUR.CDS.ISDA"
                }},
                {"CDX.NA.HY.5Y", new CreditTrSeriesConfig
                {
                    LookupCode = "CDX-NAHYS",
                    Indice = "CDX.NA.HY",
                    Tenor = "5Y",
                    Curve = "USD.CDS.ISDA"
                }}
            };

            foreach (var data in indicesStatics)
            {
                if (data.Properties != null)
                {
                    string redId = null, currency = null, payFreq = null;
                    DateTime? effectiveDt = null, 
                        announceDt = null, 
                        firstPayDt = null, 
                        rollDate = null;
                    int? series = null, version = null;
                    double? factor = null, evalRecoveryRate = null;

                    if (data.Properties.ContainsKey("redIdxCode"))
                    {
                        redId = data.Properties["redIdxCode"].AsString();
                    }

                    if (data.Properties.ContainsKey("series"))
                    {
                        var val = data.Properties["series"].AsString();
                        int value;
                        if (int.TryParse(val, out value))
                        {
                            series = value;
                        }
                    }

                    if (data.Properties.ContainsKey("idxVersion"))
                    {
                        version = data.Properties["idxVersion"].AsInt32();
                    }

                    if (data.Properties.ContainsKey("ccy"))
                    {
                        currency = data.Properties["ccy"].AsString();
                    }

                    if (data.Properties.ContainsKey("payFreq"))
                    {
                        payFreq = data.Properties["payFreq"].AsString();
                    }

                    if (data.Properties.ContainsKey("effectiveDt"))
                    {
                        var val = data.Properties["effectiveDt"].AsString();
                        DateTime value;
                        if (DateTime.TryParseExact(val, "yyyyMMdd", CultureInfo.InvariantCulture, DateTimeStyles.None,
                            out value))
                        {
                            effectiveDt = value;
                        }
                    }

                    if (data.Properties.ContainsKey("announceDt"))
                    {
                        var val = data.Properties["announceDt"].AsString();
                        DateTime value;
                        if (DateTime.TryParseExact(val, "yyyyMMdd", CultureInfo.InvariantCulture, DateTimeStyles.None,
                            out value))
                        {
                            announceDt = value;
                            rollDate = mCarbonClient.RollDateAsync(value.ToLocalDate(),
                                -1, DateUnit.Bd, BusinessDayConvention.None, mCalendarCode).Result.ToDateTime();
                        }
                    }

                    if (data.Properties.ContainsKey("firstPayDate"))
                    {
                        var val = data.Properties["firstPayDate"].AsString();
                        DateTime value;
                        if (DateTime.TryParseExact(val, "yyyyMMdd", CultureInfo.InvariantCulture, DateTimeStyles.None,
                            out value))
                        {
                            firstPayDt = value;
                        }
                    }

                    if (data.Properties.ContainsKey("factor"))
                    {
                        factor = data.Properties["factor"].AsDouble();
                    }

                    if (data.Properties.ContainsKey("evalRecoveryRate"))
                    {
                        evalRecoveryRate = data.Properties["evalRecoveryRate"].AsDouble();
                    }

                    if (string.IsNullOrEmpty(redId)
                        || string.IsNullOrEmpty(currency)
                        || string.IsNullOrEmpty(payFreq)
                        || !effectiveDt.HasValue
                        || !announceDt.HasValue
                        || !firstPayDt.HasValue
                        || !factor.HasValue
                        || !evalRecoveryRate.HasValue
                        || !series.HasValue
                        || !version.HasValue
                        || !rollDate.HasValue)
                    {
                        Console.WriteLine("Skip {0} due to missing data", data.Identifier);
                        continue;
                    }

                    if (data.Properties.ContainsKey("idxTerms"))
                    {
                        var list = data.Properties["idxTerms"].AsDictionary();
                        if (list.ContainsKey("term"))
                        {
                            var terms = list["term"].IsDictionary 
                                ? new List<MessagePackObject> {list["term"]} 
                                : list["term"].AsList();

                            foreach (var term in terms)
                            {
                                var termDict = term.AsDictionary();
                                string tenor = null, id = null;
                                double? coupon = null;
                                DateTime? maturity = null;
                                int? tenorInYear = null;

                                if (termDict.ContainsKey("tenor"))
                                {
                                    tenor = termDict["tenor"].AsString();

                                    int value;
                                    if (int.TryParse(tenor.Replace("Y", null), out value))
                                    {
                                        tenorInYear = value;
                                    }
                                }

                                if (termDict.ContainsKey("tradeid"))
                                {
                                    id = termDict["tradeid"].AsString();
                                }

                                if (termDict.ContainsKey("coupon"))
                                {
                                    var val = termDict["coupon"].AsString();
                                    double value;
                                    if (double.TryParse(val, out value))
                                    {
                                        coupon = value;
                                    }
                                }

                                if (termDict.ContainsKey("maturity"))
                                {
                                    var val = termDict["maturity"].AsString();
                                    DateTime value;
                                    if (DateTime.TryParseExact(val, "yyyy-MM-dd", CultureInfo.InvariantCulture, DateTimeStyles.None,
                                        out value))
                                    {
                                        maturity = value;
                                    }
                                }

                                if (string.IsNullOrEmpty(tenor)
                                    || string.IsNullOrEmpty(id)
                                    || !coupon.HasValue
                                    || !maturity.HasValue
                                    || !tenorInYear.HasValue)
                                {
                                    Console.WriteLine("Skip term due to missing data");
                                    continue;
                                }

                                dict.Add(id, new CreditIndicesStatic
                                {
                                    AnnouncementDate = announceDt.Value,
                                    Coupon = coupon.Value,
                                    Currency = currency,
                                    EffectiveDate = effectiveDt.Value,
                                    EvalRecoveryRate = evalRecoveryRate.Value,
                                    Factor = factor.Value,
                                    FirstPayDate = firstPayDt.Value,
                                    Id = id,
                                    Maturity = maturity.Value,
                                    PayFrequency = payFreq,
                                    RedId = redId,
                                    Series = series.Value,
                                    Version = version.Value,
                                    Tenor = tenor,
                                    TenorInYear = tenorInYear.Value,
                                    RollDate = rollDate.Value
                                });
                            }
                        }
                    }
                }
            }

            var configsAndStatics = new Dictionary<string, 
                Tuple<CreditTrSeriesConfig, List<CreditIndicesStatic>>>();

            foreach (var indice in seriesConfigs)
            {
                var list = new List<CreditIndicesStatic>();
                foreach (var kvp in dict)
                {
                    if (kvp.Key.StartsWith(indice.Value.LookupCode)
                        && kvp.Value.Tenor == indice.Value.Tenor)
                    {
                        list.Add(kvp.Value);
                    }
                }

                var groups = list.GroupBy(s => s.TenorInYear);

                foreach (var group in groups )
                {
                    var values = group.OrderByDescending(r => r.Series)
                        .ThenByDescending(r => r.Version)
                        .ToList();

                    configsAndStatics.Add(indice.Key, 
                        new Tuple<CreditTrSeriesConfig, List<CreditIndicesStatic>>(indice.Value, values));

                    foreach (var creditIndicesStatic in values)
                    {
                        Console.WriteLine("{0}\t{1}\t{2}\t{3}\t{4}\t{5:yyyyMMdd}\t{6:yyyyMMdd}\t{7:yyyyMMdd}",
                            indice.Key,
                            creditIndicesStatic.Id,
                            creditIndicesStatic.Series,
                            creditIndicesStatic.Version,
                            creditIndicesStatic.Tenor,
                            creditIndicesStatic.AnnouncementDate,
                            creditIndicesStatic.EffectiveDate,
                            creditIndicesStatic.RollDate);
                    }
                }
            }

            var dates = new List<DateTime>();

            var currenDate = DateTime.Today;
            var minDate = new DateTime(2015, 1, 1);

            while (minDate < currenDate)
            {
                if (currenDate.DayOfWeek != DayOfWeek.Saturday
                    && currenDate.DayOfWeek != DayOfWeek.Sunday
                    && !holidays.Contains(currenDate))
                {
                    dates.Add(currenDate);
                }

                currenDate = currenDate.AddDays(-1);
            }

            using (var sw = new StreamWriter(@"D:\temp\credit_on_the_run.csv"))
            {
                foreach (var kvp in configsAndStatics)
                {
                    var config = kvp.Key;

                    var list = new List<Tuple<string, DateTime>>();

                    var rollDate = DateTime.MaxValue;

                    foreach (var t in kvp.Value.Item2)
                    {
                        if (rollDate > t.RollDate)
                        {
                            rollDate = t.RollDate;
                            list.Add(new Tuple<string, DateTime>(t.Id, rollDate));
                        }
                    }

                    //foreach (var tuple in list)
                    //{
                    //    Console.WriteLine("{0}\t{1:yyyyMMdd}", tuple.Item1, tuple.Item2);
                    //}

                    var currentIdx = 0;
                    foreach (var date in dates)
                    {
                        var rDate = list[currentIdx].Item2;
                        if (date < rDate)
                        {
                            if (currentIdx != list.Count - 1)
                            {
                                currentIdx++;
                            }
                        }
                        sw.WriteLine("{0},{1:yyyyMMdd},{2}", config, date, list[currentIdx].Item1);
                    }
                }
            }
        }
        public void test_match10()
        {
            var testDirectory = AppDomain.CurrentDomain.BaseDirectory;
            var excelFilesDirectory = Path.Combine(testDirectory, "ExcelFiles");
            var excelFileName = Path.Combine(excelFilesDirectory, "match10.csv");
            var stream=new StreamReader(excelFileName);
            var outputFileName = Path.Combine(excelFilesDirectory, "output.csv");
            var count = 0;
            var records=new List<Record>();
            while (true)
            {
                var line = stream.ReadLine();
                if (line==null) break;
                var fields = line.Split(',');

                if (fields.Length > 3)
                {
                    records.Add(new Record
                    {
                        UserId = fields[0].Replace("'", ""),
                        Url = fields[1].Replace("'", ""),
                        Times = fields[fields.Length-1].Replace("'", "").ConvertToInt(0)
                    });
                }
                else if (fields.Length==3)
                {
                    records.Add(new Record
                    {
                        UserId = fields[0].Replace("'", ""),
                        Url = fields[1].Replace("'", ""),
                        Times = fields[2].Replace("'", "").ConvertToInt(0)
                    });
                }
                count++;
            } 
            Assert.AreEqual(count, 3000000);
            Assert.AreEqual(records[0].UserId, "00008d13724");
            Assert.AreEqual(records[0].Url, "http://i.thsi.cn/Mobileicon/gPhone/gupiaokaihu20160601g.png");
            Assert.AreEqual(records[0].Times, 103925);
            var payingRecords = records.Where(x => x.Url.StartsWith("http://m.360buyimg.com/") || x.Url.StartsWith("http://alipay")).GroupBy(x=>x.UserId).Select(u =>new
            {
                UserId = u.Key,
                TotalTimes = u.Sum(g=>g.Times),
                JingdongTimes = u.Where(g=>g.Url.StartsWith("http://m.360buyimg.com/")).Sum(g=>g.Times),
                AliTimes = u.Where(g => g.Url.StartsWith("http://alipay")).Sum(g => g.Times)
            });
            var totalRecords=records.GroupBy(x => x.UserId).Select(u => new
            {
                UserId = u.Key,
                TotalTimes = u.Sum(g => g.Times)
            });
            var results = from lr in payingRecords join ttri in totalRecords on lr.UserId equals ttri.UserId select new 
            {
                UserId = lr.UserId,
                Factor = lr.TotalTimes/10 + ttri.TotalTimes,
                lr.JingdongTimes,
                lr.AliTimes
            };
            //Assert.AreEqual(results.Count(), 14363);
            var writer = new StreamWriter(outputFileName);
            foreach (var result in results)
            {
                writer.WriteLine(result.UserId+","+result.Factor+","+result.JingdongTimes+","+result.AliTimes);
            }
            //Assert.AreEqual(line, "'00008d13724','http://i.thsi.cn/Mobileicon/gPhone/gupiaokaihu20160601g.png','103925'");
            stream.Close();
        }
Ejemplo n.º 22
0
        public void CorrelationIdFlows()
        {
            var correlationIds = new List<string>();
            var counter = new SharedCounter(1);

            _activator1.Handle<string>(async (bus, ctx, str) =>
            {
                correlationIds.Add(ctx.Headers[Headers.CorrelationId]);
                await bus.Advanced.Routing.Send("bus2", "hej!");
            });
            _activator2.Handle<string>(async (bus, ctx, str) =>
            {
                correlationIds.Add(ctx.Headers[Headers.CorrelationId]);
                await bus.Advanced.Routing.Send("bus3", "hej!");
            });
            _activator3.Handle<string>(async (bus, ctx, str) =>
            {
                correlationIds.Add(ctx.Headers[Headers.CorrelationId]);
                counter.Decrement();
            });

            _activator1.Bus.SendLocal("heeeej!").Wait();

            counter.WaitForResetEvent();

            Assert.That(correlationIds.GroupBy(c => c).Count(), Is.EqualTo(1));
        }
Ejemplo n.º 23
0
        private void SeedStackOverflowData(IDbConnectionFactory dbConnectionFactory)
        {
            var client = new JsonServiceClient();
            int numberOfPages = 80;
            int pageSize = 100;
            var dbQuestions = new List<Question>();
            var dbAnswers = new List<Answer>();
            try
            {
                for (int i = 1; i < numberOfPages + 1; i++)
                {
                    //Throttle queries
                    Thread.Sleep(100);
                    var questionsResponse =
                        client.Get("https://api.stackexchange.com/2.2/questions?page={0}&pagesize={1}&site={2}&tagged=servicestack"
                                .Fmt(i, pageSize, "stackoverflow"));

                    QuestionsResponse qResponse;
                    using (new ConfigScope())
                    {
                        var json = questionsResponse.ReadToEnd();
                        qResponse = json.FromJson<QuestionsResponse>();
                        dbQuestions.AddRange(qResponse.Items.Select(q => q.ConvertTo<Question>()));
                    }

                    var acceptedAnswers =
                        qResponse.Items
                        .Where(x => x.AcceptedAnswerId != null)
                        .Select(x => x.AcceptedAnswerId).ToList();

                    var answersResponse = client.Get("https://api.stackexchange.com/2.2/answers/{0}?sort=activity&site=stackoverflow"
                        .Fmt(acceptedAnswers.Join(";")));

                    using (new ConfigScope())
                    {
                        var json = answersResponse.ReadToEnd();
                        var aResponse = JsonSerializer.DeserializeFromString<AnswersResponse>(json);
                        dbAnswers.AddRange(aResponse.Items.Select(a => a.ConvertTo<Answer>()));
                    }
                }
            }
            catch (Exception ex)
            {
                //ignore
            }

            //Filter duplicates
            dbQuestions = dbQuestions.GroupBy(q => q.QuestionId).Select(q => q.First()).ToList();
            dbAnswers = dbAnswers.GroupBy(a => a.AnswerId).Select(a => a.First()).ToList();
            var questionTags = dbQuestions.SelectMany(q =>
                q.Tags.Select(t => new QuestionTag { QuestionId = q.QuestionId, Tag = t }));

            using (var db = dbConnectionFactory.OpenDbConnection())
            {
                db.InsertAll(dbQuestions);
                db.InsertAll(dbAnswers);
                db.InsertAll(questionTags);
            }
        }
Ejemplo n.º 24
0
        //This is how the data is coming from the database.
        public IList<NetworkTranslation> GetTranslations()
        {
            IList<Translation> transList = new List<Translation>
            {
                new Translation { TargetID = 1, TransFieldID = 1, RuleType = "IN", FieldDescription="Network", RuleValue="ANN"},
                new Translation { TargetID = 1, TransFieldID = 2, RuleType = "IN", FieldDescription="Role", RuleValue="*"},
                new Translation { TargetID = 1, TransFieldID = 3, RuleType = "EX", FieldDescription="Written Agreement", RuleValue="MGA1"},
                new Translation { TargetID = 1, TransFieldID = 3, RuleType = "EX", FieldDescription="Written Agreement", RuleValue="S"},
                new Translation { TargetID = 1, TransFieldID = 3, RuleType = "IN", FieldDescription="Written Agreement", RuleValue="*"},
                new Translation { TargetID = 1, TransFieldID = 5, RuleType = "IN", FieldDescription="Provider Type", RuleValue="*"},
                new Translation { TargetID = 1, TransFieldID = 6, RuleType = "IN", FieldDescription="Degree", RuleValue="*"},
                new Translation { TargetID = 1, TransFieldID = 7, RuleType = "Ex", FieldDescription="Specialty", RuleValue="*"},
                new Translation { TargetID = 1, TransFieldID = 7, RuleType = "IN", FieldDescription="Specialty", RuleValue="*"},
                new Translation { TargetID = 1, TransFieldID = 8, RuleType = "IN", FieldDescription="County", RuleValue="*"},
                new Translation { TargetID = 1, TransFieldID = 9, RuleType = "IN", FieldDescription="State", RuleValue="*"},
                new Translation { TargetID = 1, TransFieldID = 11, RuleType = "IN", FieldDescription="Tax ID", RuleValue="*"},

                new Translation { TargetID = 2, TransFieldID = 1, RuleType = "IN", FieldDescription="Network", RuleValue="ANN"},
                new Translation { TargetID = 2, TransFieldID = 2, RuleType = "IN", FieldDescription="Role", RuleValue="*"},
                new Translation { TargetID = 2, TransFieldID = 3, RuleType = "EX", FieldDescription="Written Agreement", RuleValue="MGA1"},
                new Translation { TargetID = 2, TransFieldID = 3, RuleType = "EX", FieldDescription="Written Agreement", RuleValue="S"},
                new Translation { TargetID = 2, TransFieldID = 3, RuleType = "IN", FieldDescription="Written Agreement", RuleValue="*"},
                new Translation { TargetID = 2, TransFieldID = 5, RuleType = "IN", FieldDescription="Provider Type", RuleValue="*"},
                new Translation { TargetID = 2, TransFieldID = 6, RuleType = "IN", FieldDescription="Degree", RuleValue="*"},
                new Translation { TargetID = 2, TransFieldID = 7, RuleType = "Ex", FieldDescription="Specialty", RuleValue="*"},
                new Translation { TargetID = 2, TransFieldID = 7, RuleType = "IN", FieldDescription="Specialty", RuleValue="*"},
                new Translation { TargetID = 2, TransFieldID = 8, RuleType = "IN", FieldDescription="County", RuleValue="*"},
                new Translation { TargetID = 2, TransFieldID = 9, RuleType = "IN", FieldDescription="State", RuleValue="*"},
                new Translation { TargetID = 2, TransFieldID = 11, RuleType = "IN", FieldDescription="Tax ID", RuleValue="*"},

                new Translation { TargetID = 3, TransFieldID = 1, RuleType = "IN", FieldDescription="Network", RuleValue="ANN"},
                new Translation { TargetID = 3, TransFieldID = 2, RuleType = "IN", FieldDescription="Role", RuleValue="*"},
                new Translation { TargetID = 3, TransFieldID = 3, RuleType = "EX", FieldDescription="Written Agreement", RuleValue="MGA1"},
                new Translation { TargetID = 3, TransFieldID = 3, RuleType = "EX", FieldDescription="Written Agreement", RuleValue="S"},
                new Translation { TargetID = 3, TransFieldID = 3, RuleType = "IN", FieldDescription="Written Agreement", RuleValue="*"},
                new Translation { TargetID = 3, TransFieldID = 5, RuleType = "IN", FieldDescription="Provider Type", RuleValue="*"},
                new Translation { TargetID = 3, TransFieldID = 6, RuleType = "IN", FieldDescription="Degree", RuleValue="*"},
                new Translation { TargetID = 3, TransFieldID = 7, RuleType = "Ex", FieldDescription="Specialty", RuleValue="*"},
                new Translation { TargetID = 3, TransFieldID = 7, RuleType = "IN", FieldDescription="Specialty", RuleValue="*"},
                new Translation { TargetID = 3, TransFieldID = 8, RuleType = "IN", FieldDescription="County", RuleValue="*"},
                new Translation { TargetID = 3, TransFieldID = 9, RuleType = "IN", FieldDescription="State", RuleValue="*"},
                new Translation { TargetID = 3, TransFieldID = 11, RuleType = "IN", FieldDescription="Tax ID", RuleValue="*"}
            };

            return transList
                .GroupBy(t => t.TargetID)
                .Select(g => new NetworkTranslation
                             {
                                 TargetId = g.Key,
                                 PimsNetwork = GetValueByFieldId(g, FieldIds.Network),
                                 PimsRole = GetValueByFieldId(g, FieldIds.Role),
                                 WrittenAgreement = GetValuesByFieldId(g, FieldIds.WrittenAgreement),
                                 ProviderType = GetValueByFieldId(g, FieldIds.ProviderType),
                                 Degree = GetValueByFieldId(g, FieldIds.Degree),
                                 Specialty = GetValuesByFieldId(g, FieldIds.Specialty),
                                 County = GetValueByFieldId(g, FieldIds.County),
                                 State = GetValueByFieldId(g, FieldIds.State),
                                 TaxID = GetValueByFieldId(g, FieldIds.TaxID)
                             })
                .ToList();
        }
        public void WriteEndOnPropertyState2()
        {
            JsonSerializerSettings settings = new JsonSerializerSettings();
            settings.Error += (obj, args) =>
            {
                args.ErrorContext.Handled = true;
            };

            var data = new List<ErrorPerson2>
        {
          new ErrorPerson2 {FirstName = "Scott", LastName = "Hanselman"},
          new ErrorPerson2 {FirstName = "Scott", LastName = "Hunter"},
          new ErrorPerson2 {FirstName = "Scott", LastName = "Guthrie"},
          new ErrorPerson2 {FirstName = "James", LastName = "Newton-King"},
        };

            Dictionary<string, IEnumerable<IErrorPerson2>> dictionary = data.GroupBy(person => person.FirstName).ToDictionary(group => @group.Key, group => @group.Cast<IErrorPerson2>());
            string output = JsonConvert.SerializeObject(dictionary, Formatting.None, settings);

            Assert.AreEqual(@"{""Scott"":[],""James"":[]}", output);
        }
Ejemplo n.º 26
0
        private TemplateLookup GetSuppliedMetricTemplates()
        {
            var templates =
                new List<MetricTemplateMetadata>
                       {
                           new MetricTemplateMetadata
                               {
                                   FullPath=templateNameNoTemplate,
                                   Tokens = new Dictionary<string, string>()
                               },
                           new MetricTemplateMetadata
                               {
                                   FullPath = templateName1,
                                   Tokens = new Dictionary<string, string>
                                       {
                                           { "MetricInstanceSetType", "LocalEducationAgency" },
                                           { "RenderingMode", "Overview" },
                                           { "MetricType", "ContainerMetric" },
                                           { "SchoolId", "555" },
                                           { "MetricId", "10" },
                                           { "Open", "true"}
                                       }
                               },
                           new MetricTemplateMetadata
                               {
                                   FullPath = templateName2,
                                   Tokens = new Dictionary<string, string>
                                       {
                                           { "MetricInstanceSetType", "School" },
                                           { "RenderingMode", "Overview" },
                                           { "MetricType", "GranularMetric" },
                                           { "SchoolId", "555" },
                                           { "MetricId", "9" },
                                           { "Enabled", "True" },
                                           { "OtherKey", "111" },
                                           { "Depth", "Level2" },
                                           { "ParentMetricId", "333" },
                                           { "LocalEducationAgencyId", "888" },
                                           { "StudentUSI", "444" },
                                           { "Open", "true"}
                                       }
                               },
                           new MetricTemplateMetadata
                               {
                                   FullPath = templateName2 + "Close",
                                   Tokens = new Dictionary<string, string>
                                       {
                                           { "MetricInstanceSetType", "School" },
                                           { "RenderingMode", "Overview" },
                                           { "MetricType", "GranularMetric" },
                                           { "SchoolId", "555" },
                                           { "MetricId", "9" },
                                           { "Enabled", "True" },
                                           { "OtherKey", "111" },
                                           { "Depth", "Level2" },
                                           { "ParentMetricId", "333" },
                                           { "LocalEducationAgencyId", "888" },
                                           { "StudentUSI", "444" },
                                           { "Open", "false"}
                                       }
                               },
                           new MetricTemplateMetadata
                               {
                                   FullPath = templateName3,
                                   Tokens = new Dictionary<string, string>
                                       {
                                           { "MetricInstanceSetType", "LocalEducationAgency" },
                                           { "RenderingMode", "Overview" },
                                           { "MetricType", "AggregateMetric" },
                                           { "LocalEducationAgencyId", "888" },
                                           { "ParentMetricId", "9" },
                                           { "Open", "true"}
                                       }
                               },
                           new MetricTemplateMetadata
                               {
                                   FullPath = templateName4,
                                   Tokens = new Dictionary<string, string>
                                       {
                                           { "MetricInstanceSetType", "Student" },
                                           { "RenderingMode", "Overview" },
                                           { "MetricType", "GranularMetric" },
                                           { "SchoolId", "1234" },
                                           { "MetricId", "19" },
                                           { "Open", "false"}
                                       }
                               },
                           new MetricTemplateMetadata
                               {
                                   FullPath = templateName4 + "Open",
                                   Tokens = new Dictionary<string, string>
                                       {
                                           { "MetricInstanceSetType", "Student" },
                                           { "RenderingMode", "Overview" },
                                           { "MetricType", "GranularMetric" },
                                           { "SchoolId", "1234" },
                                           { "MetricId", "19" },
                                           { "Open", "true"}
                                       }
                               },
                           new MetricTemplateMetadata
                               {
                                   FullPath = templateName5,
                                   Tokens = new Dictionary<string, string>
                                       {
                                           { "MetricInstanceSetType", "Student" },
                                           { "RenderingMode", "Overview" },
                                           { "MetricType", "GranularMetric" },
                                           { "Depth", "Level0" },
                                           { "Open", "true"}
                                       }
                               },
                           new MetricTemplateMetadata
                               {
                                   FullPath = templateName6,
                                   Tokens = new Dictionary<string, string>
                                       {
                                           { "MetricInstanceSetType", "NullTest" },
                                           { "RenderingMode", "Overview" },
                                           { "MetricType", "GranularMetric" },
                                           { "Depth", "Level1" },
                                           { "NullValue", "true" },
                                           { "Open", "true"}
                                       }
                               },
                           new MetricTemplateMetadata
                               {
                                   FullPath = templateName7,
                                   Tokens = new Dictionary<string, string>
                                       {
                                           { "MetricInstanceSetType", "NullTest" },
                                           { "RenderingMode", "Overview" },
                                           { "MetricType", "GranularMetric" },
                                           { "Depth", "Level1" },
                                           { "Open", "true"}
                                       }
                               }
                       };

            // Fill in the metric template grouping Ids
            templates.ForEach(m => m.TemplateGroupingId = MetricTemplateMetadata.GetTemplateGroupingId(m.Tokens));

            var lookupData = templates
                .GroupBy(x => x.TemplateGroupingId)
                .ToDictionary(g => g.Key,
                              g => g.Select(x => x).ToArray());

            return new TemplateLookup(lookupData);
        }
        public void TestRunAllTargetsSqlOutput()
        {
            // Setup sqlite output file by deleting any current file and copying a blank schema over
            string sqliteSchemaLocation = @"..\..\..\testFiles\informedSchema.db3";
            string sqliteOutputLocation = "Sarc_Many_Datasets.db3";
            if(File.Exists(sqliteOutputLocation)) File.Delete(sqliteOutputLocation);
            File.Copy(sqliteSchemaLocation, sqliteOutputLocation);

            //string uimfFileLocation = @"..\..\..\testFiles\Sarc_MS2_90_6Apr11_Cheetah_11-02-19.uimf";
            //string uimfFileLocation = @"..\..\..\testFiles\Sarc_P23_C07_2143_23Feb12_Cheetah_11-05-40.uimf";

            //IInterpolation interpolation = AlignmentImporter.ReadFile(netAlignmentFileLocation);

            // Setup calibration workflow and targets
            LcImsPeptideSearchParameters calibrationParameters = new LcImsPeptideSearchParameters
            {
                ChargeStateMax = 5,
                NetTolerance = 0.5,
                IsotopicFitScoreThreshold = 0.15,
                MassToleranceInPpm = 30,
                NumPointForSmoothing = 9
            };

            List<PeptideTarget> calibrationTargetList = MassTagImporter.ImportMassTags("elmer", "MT_Human_Sarcopenia_P789", 1e-10, true);
            Console.WriteLine("Using " + calibrationTargetList.Count + " targets for calibration.");

            LcImsPeptideSearchParameters parameters = new LcImsPeptideSearchParameters
            {
                ChargeStateMax = 5,
                NetTolerance = 0.03,
                IsotopicFitScoreThreshold = 0.2,
                MassToleranceInPpm = 30,
                NumPointForSmoothing = 9
            };

            List<PeptideTarget> targetList = MassTagImporter.ImportMassTags("elmer", "MT_Human_Sarcopenia_P789");
            Console.WriteLine(DateTime.Now + ": Reading ViperCompatibleMass Tags from MTDB");

            Console.WriteLine(DateTime.Now + ": Using " + targetList.Count + " targets.");

            List<string> uimfFileList = new List<string>();
            uimfFileList.Add(@"..\..\..\testFiles\Sarc_MS2_13_1Apr11_Cheetah_11-02-24.uimf");
            //uimfFileList.Add(@"..\..\..\testFiles\Sarc_MS2_27_2Apr11_Cheetah_11-01-11.uimf");
            //uimfFileList.Add(@"..\..\..\testFiles\Sarc_MS2_54_6Apr11_Cheetah_11-02-18.uimf");
            //uimfFileList.Add(@"..\..\..\testFiles\Sarc_MS2_55_3Apr11_Cheetah_11-02-15.uimf");
            //uimfFileList.Add(@"..\..\..\testFiles\Sarc_MS2_73_5Apr11_Cheetah_11-02-24.uimf");
            //uimfFileList.Add(@"..\..\..\testFiles\Sarc_MS2_90_6Apr11_Cheetah_11-02-19.uimf");
            //uimfFileList.Add(@"..\..\..\testFiles\Sarc_MS2_102_7Apr11_Cheetah_11-02-19.uimf");
            //uimfFileList.Add(@"..\..\..\testFiles\Sarc_MS2_119_8Apr11_Cheetah_11-02-18.uimf");
            //uimfFileList.Add(@"..\..\..\testFiles\Sarc_MS2_125_8Apr11_Cheetah_11-02-15.uimf");
            //uimfFileList.Add(@"..\..\..\testFiles\Sarc_MS2_146_9Apr11_Cheetah_11-02-24.uimf");

            using (var connection = new SQLiteConnection(@"Data Source=" + sqliteOutputLocation + ";New=False;"))
            {
                connection.Open();

                using (var command = new SQLiteCommand(connection))
                {
                    using (var transaction = connection.BeginTransaction())
                    {
                        Console.WriteLine(DateTime.Now + ": Writing ViperCompatibleMass Tags to database");

                        //foreach (var Target in targetList)
                        //{
                        //    string insertMassTagsQuery = Target.CreateSqlMassTagQueries();
                        //    command.CommandText = insertMassTagsQuery;
                        //    command.ExecuteNonQuery();
                        //}

                        //transaction.Commit();
                    }

                    for (int i = 0; i < uimfFileList.Count; i++)
                    {
                        string uimfFileLocation = uimfFileList[i];
                        FileInfo uimfFileInfo = new FileInfo(uimfFileLocation);
                        Console.WriteLine(DateTime.Now + ": Processing " + uimfFileInfo.Name);

                        // NET Alignment
                        string netAlignmentFileName = uimfFileInfo.Name.Replace(".uimf", "_NetAlign.csv");
                        string netAlignmentLocation = Path.Combine(uimfFileInfo.DirectoryName, netAlignmentFileName);
                        FileInfo netAlignmentFileInfo = new FileInfo(netAlignmentLocation);
                        if (!File.Exists(netAlignmentFileInfo.FullName))
                        {
                            Console.WriteLine(DateTime.Now + ": Creating alignment file using " + calibrationTargetList.Count + " possible targets.");
                            LcImsPeptideSearchWorkfow calibrationWorkflow = new LcImsPeptideSearchWorkfow(uimfFileLocation, calibrationParameters);
                            List<Tuple<double, double>> netAlignmentInput = new List<Tuple<double, double>>();

                            int index = 0;
                            // Run calibration workflow on each of the calibration targets
                            foreach (var imsTarget in calibrationTargetList.OrderBy(x => x.NormalizedElutionTime))
                            {
                                //Console.WriteLine(DateTime.Now + ": Processing Target " + index);
                                ChargeStateCorrelationResult correlationResult = calibrationWorkflow.RunInformedWorkflow(imsTarget);

                                if (correlationResult != null && correlationResult.CorrelatedResults.Any())
                                {
                                    var elutionTimeFilteredResults = correlationResult.CorrelatedResults.Where(x => x.NormalizedElutionTime >= 0.1);
                                    if (elutionTimeFilteredResults.Any())
                                    {
                                        LcImsTargetResult result = correlationResult.CorrelatedResults.Where(x => x.NormalizedElutionTime >= 0.1).OrderByDescending(x => x.Intensity).First();
                                        netAlignmentInput.Add(new Tuple<double, double>(result.NormalizedElutionTime, imsTarget.NormalizedElutionTime));
                                    }
                                }

                                //Console.WriteLine(DateTime.Now + ": Done Processing Target " + index);
                                imsTarget.RemoveResults();
                                //Console.WriteLine(DateTime.Now + ": Removed results from Target " + index);

                                index++;
                            }

                            // Place data points at beginning and end to finish off the alignment
                            netAlignmentInput.Add(new Tuple<double, double>(0, 0));
                            netAlignmentInput.Add(new Tuple<double, double>(1, 1));

                            // Do LOESS to get NET alignment
                            Console.WriteLine(DateTime.Now + ": Found " + netAlignmentInput.Count + " targets to use for alignment.");
                            var netAlignmentInputGroup = netAlignmentInput.GroupBy(x => x.Item1).OrderBy(x => x.Key);
                            var groupedNetTuple = netAlignmentInputGroup.Select(x => x.OrderBy(y => Math.Abs(y.Item1 - y.Item2)).First()).ToArray();
                            var loessInterpolatorForNetAlignment = new LoessInterpolator(0.1, 4);
                            double[] xArray = groupedNetTuple.Select(x => x.Item1).ToArray();
                            double[] yArray = groupedNetTuple.Select(x => x.Item2).ToArray();
                            double[] newNetValues = loessInterpolatorForNetAlignment.Smooth(xArray, yArray);

                            // Creates a file for the NET Alignment to be stored
                            using (StreamWriter writer = new StreamWriter(netAlignmentFileInfo.FullName))
                            {
                                for (int j = 0; j < groupedNetTuple.Length; j++)
                                {
                                    writer.WriteLine(groupedNetTuple[j].Item1 + "," + newNetValues[j]);
                                }
                            }
                        }
                        else
                        {
                            Console.WriteLine(DateTime.Now + ": Using existing alignment file");
                        }

                        // Grab the net alignment
                        IInterpolation interpolation = AlignmentImporter.ReadFile(netAlignmentFileInfo.FullName);

                        LcImsPeptideSearchWorkfow lcImsPeptideSearchWorkfow = new LcImsPeptideSearchWorkfow(uimfFileInfo.FullName, parameters, interpolation);

                        //using (var transaction = connection.BeginTransaction())
                        //{
                        //    string insertDatasetQuery = "INSERT INTO T_Dataset (Dataset_Id, File_Name) VALUES(" + i + ",'" + uimfFileInfo.Name + "');";
                        //    command.CommandText = insertDatasetQuery;
                        //    command.ExecuteNonQuery();
                        //    transaction.Commit();
                        //}

                        List<ChargeStateCorrelationResult> resultList = new List<ChargeStateCorrelationResult>();

                        foreach (var imsTarget in targetList)
                        {
                            using (var transaction = connection.BeginTransaction())
                            {
                                ChargeStateCorrelationResult correlationResult = lcImsPeptideSearchWorkfow.RunInformedWorkflow(imsTarget);

                                if (correlationResult != null)
                                {
                                    resultList.Add(correlationResult);
                                }

                                //string queries = imsTarget.CreateSqlResultQueries(i);
                                //command.CommandText = queries;
                                //command.ExecuteNonQuery();

                                //if (correlationResult != null && correlationResult.CorrelationMap.Count > 1)
                                //{
                                //    string correlationQueries = correlationResult.CreateSqlUpdateQueries();
                                //    if (correlationQueries != "")
                                //    {
                                //        command.CommandText = correlationQueries;
                                //        command.ExecuteNonQuery();
                                //    }
                                //}

                                //transaction.Commit();
                            }

                            // Reset the Target so it can be used again by another dataset
                            imsTarget.RemoveResults();
                        }

                        foreach (var chargeStateCorrelationResult in resultList)
                        {

                        }
                    }
                }

                connection.Close();
            }
        }