Exemple #1
0
        public async Task MulitlpTestAsync()
        {
            var threadNum = 10;
            var idNum     = 10000;

            var bags  = new ConcurrentBag <long>();
            var tasks = new List <Task>();

            for (var i = 0; i < threadNum; i++)
            {
                tasks.Add(Task.Run(() =>
                {
                    Parallel.For(0, idNum, i =>
                    {
                        bags.Add(idworker.Generate());
                    });
                }));
            }
            Task.WaitAll(tasks.ToArray());

            var t  = bags.Count;
            var t1 = bags.Distinct().Count();

            Assert.Equal(bags.Count, threadNum * idNum);
            Assert.Equal(bags.Distinct().Count(), threadNum * idNum);

            await Task.CompletedTask;
        }
Exemple #2
0
        public void DataListFactory_Instance_Singleton()
        {
            var dlfs    = new ConcurrentBag <IDataListFactory>();
            var threads = new List <Thread>();

            for (var i = 0; i < 100; i++)
            {
                var t = new Thread(() => {
                    var instance = DataListFactory.Instance;
                    dlfs.Add(instance);
                });
                threads.Add(t);
            }
            foreach (var t in threads)
            {
                t.Start();
            }
            foreach (var t in threads)
            {
                t.Join();
            }

            Assert.AreEqual(1, dlfs.Distinct().Count());
            Assert.AreEqual(DataListFactory.Instance, dlfs.Distinct().First());
        }
        public void StaticInstance_InParallel_CanBeAccessedAndConsumed()
        {
            var sut          = GetStaticInstance();
            var iterations   = 100;
            var bufferLength = 200; //high for collisions
            var byteList     = new ConcurrentBag <byte[]>();
            var actions      = new Action[iterations];

            for (var i = 0; i < iterations; i++)
            {
                actions[i] = () =>
                {
                    var tempBytes = sut.GetBytes(bufferLength);
                    Assert.That(tempBytes, Is.Not.Null);
                    Assert.That(tempBytes, Is.Not.Empty);
                    Assert.That(tempBytes, Has.Length.EqualTo(bufferLength));
                    byteList.Add(tempBytes);
                    var sutInThread = FastRandom.StaticInstance;
                    tempBytes = sutInThread.GetBytes(bufferLength);
                    Assert.That(tempBytes, Is.Not.Null);
                    Assert.That(tempBytes, Is.Not.Empty);
                    Assert.That(tempBytes, Has.Length.EqualTo(bufferLength));
                };
            }
            Parallel.Invoke(actions);
            Assert.That(byteList.Distinct().Count(), Is.EqualTo(iterations));
        }
        public void StaticInstance_InMultipleThreads_CanBeAccessedAndConsumed()
        {
            var sut          = GetStaticInstance();
            var iterations   = 100;
            var bufferLength = 200; //high for collisions
            var byteList     = new ConcurrentBag <byte[]>();
            var threads      = new Thread[iterations];

            for (var i = 0; i < iterations; i++)
            {
                threads[i] = new Thread(() =>
                {
                    var tempBytes = sut.GetBytes(bufferLength);
                    Assert.That(tempBytes, Is.Not.Null);
                    Assert.That(tempBytes, Is.Not.Empty);
                    Assert.That(tempBytes, Has.Length.EqualTo(bufferLength));
                    byteList.Add(tempBytes);
                    var sutInThread = FastRandom.StaticInstance;
                    tempBytes       = sutInThread.GetBytes(bufferLength);
                    Assert.That(tempBytes, Is.Not.Null);
                    Assert.That(tempBytes, Is.Not.Empty);
                    Assert.That(tempBytes, Has.Length.EqualTo(bufferLength));
                });
            }
            for (int i = 0; i < iterations; i++)
            {
                threads[i].Start();
            }
            for (int i = 0; i < iterations; i++)
            {
                threads[i].Join();
            }
            Assert.That(byteList.Distinct().Count(), Is.EqualTo(iterations));
        }
Exemple #5
0
        /// <summary>
        ///     Create Palylist with selected test cases
        /// </summary>
        /// <param name="selectedTcId"></param>
        /// <param name="fileLocation"></param>
        public static void CreatePlaylist(List <int> selectedTcId, string fileLocation)
        {
            Stp.Restart();
            string automatedTestName, playlistFileContent = "<Playlist Version=\"1.0\">";
            var    automatedTestList = new ConcurrentBag <string>();

            automatedTestList.AsParallel();
            Parallel.ForEach(selectedTcId, testcaseId =>
            {
                var pt            = _tfsStore.GetWorkItem(testcaseId);
                automatedTestName = pt.Fields["Automated Test Name"].Value.ToString();
                lock (automatedTestList)
                {
                    automatedTestList.Add(automatedTestName);
                }
            });
            var dedup = automatedTestList.Distinct().ToList();

            Stp.Stop();
            AutomationMethodTime = (float)Stp.ElapsedMilliseconds / 1000;
            Stp.Restart();
            playlistFileContent = dedup.Aggregate(playlistFileContent, (current, testName) => current + "<Add Test=\"" + testName + "\" />");

            playlistFileContent += "</Playlist>";
            var fs = new FileStream(fileLocation, FileMode.Create);

            using (var writer = new StreamWriter(fs))
            {
                writer.WriteLine(playlistFileContent);
            }
            fs.Dispose();
            Stp.Stop();
            AutomationPlaylistAddition = (float)Stp.ElapsedMilliseconds / 1000;
        }
        public void World_should_not_end_if_exception_thrown_in_user_callback()
        {
            var    numThreads  = 3;
            var    threadIds   = new ConcurrentBag <int>();
            Action badCallback = () =>
            {
                threadIds.Add(Thread.CurrentThread.ManagedThreadId);
                throw new Exception("DEATH TO THIS THREAD I SAY!");
            };
            Action goodCallback = () =>
            {
                Thread.Sleep(20);
                threadIds.Add(Thread.CurrentThread.ManagedThreadId);
            };

            using (var threadPool = new DedicatedThreadPool(new DedicatedThreadPoolSettings(numThreads)))
            {
                for (var i = 0; i < numThreads; i++)
                {
                    threadPool.EnqueueWorkItem(badCallback);
                }
                //wait a second for all work to be completed
                Task.Delay(TimeSpan.FromSeconds(0.5)).Wait();

                //run the job again. Should get 3 more managed thread IDs
                for (var i = 0; i < numThreads; i++)
                {
                    threadPool.EnqueueWorkItem(goodCallback);
                }
                Task.Delay(TimeSpan.FromSeconds(0.5)).Wait();
            }

            // half of thread IDs should belong to failed threads, other half to successful ones
            Assert.AreEqual(numThreads * 2, threadIds.Distinct().Count());
        }
        public void Should_release_threads_when_idle()
        {
            var    numThreads = 3;
            var    threadIds  = new ConcurrentBag <int>();
            Action callback   = () =>
            {
                Thread.Sleep(15); //sleep, so another thread is forced to take the work
                threadIds.Add(Thread.CurrentThread.ManagedThreadId);
            };

            using (var threadPool = new DedicatedThreadPool(new DedicatedThreadPoolSettings(numThreads)))
            {
                for (var i = 0; i < numThreads; i++)
                {
                    threadPool.EnqueueWorkItem(callback);
                }
                //wait a second for all work to be completed
                Task.Delay(TimeSpan.FromSeconds(0.5)).Wait();

                //run the job again. Should get 3 more managed thread IDs
                for (var i = 0; i < numThreads; i++)
                {
                    threadPool.EnqueueWorkItem(callback);
                }
                Task.Delay(TimeSpan.FromSeconds(0.5)).Wait();
            }

            Assert.AreEqual(numThreads * 2, threadIds.Distinct().Count());
        }
        protected virtual async Task <IEnumerable <ConfigurationSetupResult> > ExecuteConfigurationsDependingOn(
            IReadOnlyCollection <ConfigurationSetupResult> configurations, string dependsOn,
            Func <ConfigurationSetupResult, Task> executionAction)
        {
            var configurationsToExecute = configurations.Where(x => x.DependsOn == dependsOn).ToList();

            var results = new ConcurrentBag <ConfigurationSetupResult>();

            var actions = new ConcurrentBag <Task>();

            Parallel.ForEach(configurationsToExecute, configuration =>
            {
                actions.Add(executionAction(configuration));
                results.Add(configuration);
            });

            await Task.WhenAll(actions).ConfigureAwait(false);

            var executed = results.ToList();

            foreach (var item in results.Distinct())
            {
                executed.AddRange(
                    await
                    ExecuteConfigurationsDependingOn(configurations, item.ConfigurationName, executionAction)
                    .ConfigureAwait(false));
            }

            return(executed);
        }
Exemple #9
0
        public void Should_Be_Able_To_Enqueue_Actions_Without_Configuring_Taskpool_And_Name()
        {
            var threadIds  = new ConcurrentBag <int>();
            var waitHandle = new CountdownEvent(10);

            Action runTask = () => {
                Thread.Sleep(1000);
                threadIds.Add(Thread.CurrentThread.ManagedThreadId);
                waitHandle.Signal();
            };

            AsyncTasks.Run(runTask);
            AsyncTasks.Run(runTask);
            AsyncTasks.Run(runTask);
            AsyncTasks.Run(runTask);
            AsyncTasks.Run(runTask);
            AsyncTasks.Run(runTask);
            AsyncTasks.Run(runTask);
            AsyncTasks.Run(runTask);
            AsyncTasks.Run(runTask);
            AsyncTasks.Run(runTask);
            waitHandle.Wait();
            AsyncTasks.RemoveAll();

            Assert.AreEqual(Environment.ProcessorCount, threadIds.Distinct().Count());
        }
        public async Task Subscribe_MessagesPublishedFromMultipleThreads_MessagesReceivedInMultipleThreads()
        {
            int count   = 0;
            var threads = new ConcurrentBag <int>();

            _messageObservable.ObserveOn(NewThreadScheduler.Default).Subscribe(_ => count++);
            _messageObservable.ObserveOn(NewThreadScheduler.Default).Subscribe(_ => count++);

            Parallel.Invoke(
                () =>
            {
                _publisher.Publish(new TestEventOne());
                threads.Add(Thread.CurrentThread.ManagedThreadId);
            },
                () =>
            {
                _publisher.Publish(new TestCommandOne());
                threads.Add(Thread.CurrentThread.ManagedThreadId);
            },
                () =>
            {
                _publisher.Publish(new TestEventOne());
                threads.Add(Thread.CurrentThread.ManagedThreadId);
            });

            await Task.Delay(100);

            count.Should().Be(4);
            threads.Distinct().Count().Should().BeGreaterThan(1);
        }
        public void verify_multithread_mapping()
        {
            for (int outerIteration = 0; outerIteration < 10; outerIteration++)
            {
                SetUp();
                Int32 iteration = 0;
                var   sequence  = Enumerable.Range(0, 100);
                ConcurrentBag <String> generated = new ConcurrentBag <string>();
                try
                {
                    Parallel.ForEach(sequence, i =>
                    {
                        Interlocked.Increment(ref iteration);
                        generated.Add(sut.Map("TEST" + i));
                    });
                    Assert.That(generated.Count, Is.EqualTo(100), "Error in iteration " + outerIteration);
                }
                catch (Exception ex)
                {
                    Assert.Fail("Exception at iteration " + iteration + ": " + ex.ToString());
                }

                var allRecords = mapperCollection.Find(Builders <BsonDocument> .Filter.Empty).ToList();
                Assert.That(allRecords, Has.Count.EqualTo(100));
                for (int i = 1; i <= 100; i++)
                {
                    if (!generated.Contains("MapperTests_" + i))
                    {
                        Assert.Fail("Id " + i + " is missing");
                    }
                }
                Assert.That(generated.Distinct().Count(), Is.EqualTo(100));
            }
        }
Exemple #12
0
        public void Should_Be_Able_To_Enqueue_Actions_From_Multiple_Threads()
        {
            var threadIds  = new ConcurrentBag <int>();
            var waitHandle = new CountdownEvent(4);

            Action runTask = () => {
                Thread.Sleep(1000);
                threadIds.Add(Thread.CurrentThread.ManagedThreadId);
                waitHandle.Signal();
            };


            AsyncTasks.AddPool("testPool7", 2);
            Parallel.Invoke(
                () => {
                AsyncTasks.Run(runTask, "testPool7");
            },
                () => {
                AsyncTasks.AddPool("testPool8", 1);
                AsyncTasks.Run(runTask, "testPool8");
            },
                () => {
                AsyncTasks.Run(runTask, "testPool7");
            },
                () => {
                AsyncTasks.AddPool("testPool9", 1);
                AsyncTasks.Run(runTask, "testPool9");
            }
                );

            waitHandle.Wait();
            AsyncTasks.RemoveAll();

            Assert.AreEqual(4, threadIds.Distinct().Count());
        }
Exemple #13
0
        public void GetValue_Multithreads()
        {
            var azureManager = Substitute.For <IAzureManager>();

            azureManager.FileExists(Arg.Any <string>()).Returns(x => x.Arg <string>() == "/some/key");

            var cache   = new AzureBlobCache <string>(azureManager, true);
            var results = new ConcurrentBag <string>();

            void Action(int number) => results.Add(cache.GetValue("/some/key", x => $"value-{number}"));

            var threads = new Thread[10];

            for (var i = 0; i < threads.Length; i++)
            {
                int number = i;
                threads[i] = new Thread(() => Action(number));
            }

            foreach (Thread thread in threads)
            {
                thread.Start();
            }

            foreach (Thread thread in threads)
            {
                thread.Join();
            }

            Assert.Equal(10, results.Count);
            Assert.Equal(1, results.Distinct().Count());
        }
Exemple #14
0
        //TODO Fix Me
        private static async Task <IEnumerable <DownloadResult> > GetDownloadables(string url, string extension, int levels)
        {
            var downloadResults = new ConcurrentBag <DownloadResult>();
            var results         = GetDownloadables(url, extension).Result.ToList();

            downloadResults.AddRange(results);
            var tempResults = new ConcurrentBag <DownloadResult>();

            tempResults.AddRange(results);
            for (int i = 0; i < levels; i++)
            {
                Parallel.ForEach(results.Where(r => r.IsDirectory),
                                 //new ParallelOptions() { MaxDegreeOfParallelism = 1 },
                                 async directory =>
                {
                    tempResults.AddRange(await GetDownloadables(directory.Url, extension));
                });
                Console.WriteLine("Level {0}, {1} urls added", i, tempResults.Count);
                downloadResults.AddRange(tempResults);
                Console.WriteLine("Level {0}, {1} urls total", i, downloadResults.Count);
                results     = tempResults.Clone();
                tempResults = new ConcurrentBag <DownloadResult>();
            }
            return(downloadResults.Distinct().OrderBy(x => x.Url));
        }
        public void should_generate_distinct_handles()
        {
            var list = new ConcurrentBag<string>();
            Parallel.For(0, 10000, i => list.Add(HandleGenerator.Generate(100)));

            Assert.Equal(10000, list.Distinct().Count());
        }
Exemple #16
0
        public void Should_Be_Able_To_Configure_Default_Taskpool_With_Specific_Size()
        {
            var threadIds  = new ConcurrentBag <int>();
            var waitHandle = new CountdownEvent(4);

            Action runTask = () => {
                Thread.Sleep(1000);
                threadIds.Add(Thread.CurrentThread.ManagedThreadId);
                waitHandle.Signal();
            };

            int poolSize = 2;

            lock (_lock)
            {
                AsyncTasks.UseDefaultPool();
                AsyncTasks.AddPool("testPool1", poolSize);
            }
            AsyncTasks.Run(runTask, "testPool1");
            AsyncTasks.Run(runTask, "testPool1");
            AsyncTasks.Run(runTask, "testPool1");
            AsyncTasks.Run(runTask, "testPool1");
            waitHandle.Wait();
            AsyncTasks.RemoveAll();

            Assert.AreEqual(poolSize, threadIds.Distinct().Count());
        }
        public async Task AsyncPool_TakeParallelCheckDistinct()
        {
            const int count = 100;
            var       pack  = new ConcurrentBag <object>();

            Parallel.For(0, count / 10, (_) => pack.Add(new object()));
            var pool = new ObjectPool <object>(pack, () => new object(), count);

            var retrieved = new ConcurrentBag <object>();
            var tasks     = new List <Task>();

            const int tasksCount = count / 20;

            for (int i = 0; i < tasksCount; i++)
            {
                var task = Task.Run(async() =>
                {
                    for (int j = 0; j < count / tasksCount; j++)
                    {
                        var item = await pool.TakeAsync();
                        retrieved.Add(item.Object);
                    }
                });
                tasks.Add(task);
            }
            await Task.WhenAll(tasks);

            Assert.AreEqual(count, retrieved.Distinct().Count());
        }
Exemple #18
0
        public void Should_Be_Able_To_Configure_Default_Task_Pool_With_Default_Size()
        {
            var threadIds  = new ConcurrentBag <int>();
            var waitHandle = new CountdownEvent(10);

            Action runTask = () => {
                Thread.Sleep(1000);
                threadIds.Add(Thread.CurrentThread.ManagedThreadId);
                waitHandle.Signal();
            };

            lock (_lock)
            {
                AsyncTasks.UseDefaultPool();
                AsyncTasks.AddPool("testPool");
            }
            AsyncTasks.Run(runTask, "testPool");
            AsyncTasks.Run(runTask, "testPool");
            AsyncTasks.Run(runTask, "testPool");
            AsyncTasks.Run(runTask, "testPool");
            AsyncTasks.Run(runTask, "testPool");
            AsyncTasks.Run(runTask, "testPool");
            AsyncTasks.Run(runTask, "testPool");
            AsyncTasks.Run(runTask, "testPool");
            AsyncTasks.Run(runTask, "testPool");
            AsyncTasks.Run(runTask, "testPool");
            waitHandle.Wait();
            AsyncTasks.RemoveAll();

            Assert.AreEqual(Environment.ProcessorCount, threadIds.Distinct().Count());
        }
        public void Subscribe_MessagesPublishedFromMultipleThreads_MessagesReceived()
        {
            int count   = 0;
            var threads = new ConcurrentBag <int>();

            _messageObservable.Subscribe(_ => count++);

            Parallel.Invoke(
                () =>
            {
                _publisher.Publish(new TestEventOne());
                threads.Add(Thread.CurrentThread.ManagedThreadId);
            },
                () =>
            {
                _publisher.Publish(new TestCommandOne());
                threads.Add(Thread.CurrentThread.ManagedThreadId);
            },
                () =>
            {
                _publisher.Publish(new TestEventOne());
                threads.Add(Thread.CurrentThread.ManagedThreadId);
            });

            count.Should().Be(2);
            threads.Distinct().Count().Should().BeGreaterThan(1);
        }
Exemple #20
0
        public void GetAllUsersFromSBK_02()
        {
            var config = new Config()
            {
                Name             = "internsbk",
                ApiUrl           = "https://testservice.MINKOMMUN.se/api/v2/services",
                ApiUserName      = "******",
                ApiKey           = "c0ff3c0f-fec0-ff3c-0ffe-c0ff3c0ff3c0f",
                ServiceShortName = "SBK_02",
                Actor            = "SBK"
            };

            var api = new AbouRestApi(config);


            DateTime FromDate = new DateTime(2016, 02, 27);
            DateTime ToDate   = new DateTime(2019, 12, 11);

            string[] States = new string[] { "Godkänd", "Inkommet" };
            bool     ExcludeCasesWithDiaryNumber = false;

            var users = new ConcurrentBag <string>();

            var res = api.GetByDateAndState(FromDate, ToDate, States, ExcludeCasesWithDiaryNumber);

            Parallel.ForEach(res, (u) =>
            {
                var r    = api.GetDetailed(u);
                var user = r.Signatures.First().UserIdentity;
                users.Add(user);
            });

            var f = string.Join(",", users.Distinct());
        }
        private static List <Coordinate> doGeneration(List <Coordinate> board)
        {
            var result = new ConcurrentBag <Coordinate>();

            Parallel.ForEach(board, (cell) =>
            {
                var cellNeighbors = FindNeighborCount(cell, board);

                if (cellNeighbors == 2 || cellNeighbors == 3)
                {
                    //cell lives.
                    result.Add(cell);
                }

                //neighbor cells might(?) become alive?
                foreach (var neighbor in cell.Neighbors)
                {
                    var neighborIsCurrentlyDead = !board.Any(c => c == neighbor);
                    if (neighborIsCurrentlyDead && FindNeighborCount(neighbor, board) == 3)
                    {
                        result.Add(neighbor);
                    }
                }
            });
            var distinct = result.Distinct();

            return(distinct.ToList());
        }
        public void OneThreadManyTasksTest(int taskCount)
        {
            var pool  = new MyThreadPool(1);
            var tasks = new List <IMyTask <int> >();
            var idBag = new ConcurrentBag <int>();

            for (var i = 0; i < taskCount; ++i)
            {
                var localIndex = i;

                tasks.Add(pool.QueueTask(() =>
                {
                    idBag.Add(Thread.CurrentThread.ManagedThreadId);
                    Thread.Sleep(1000);
                    return(localIndex);
                }));
            }

            for (var i = 0; i < taskCount; ++i)
            {
                Assert.AreEqual(i, tasks[i].Result);
            }

            Assert.AreEqual(1, idBag.Distinct().Count());
        }
        private async Task <Server[]> GetServersAsync(CancellationToken cancellationToken)
        {
            _logger?.LogDebug(LoggingExtensions.CurrentFunction());

            return(await Task.Run(async() =>
            {
                var servers = new ConcurrentBag <Server>();

                try
                {
                    var populationTasks = new List <Task>();

                    var registryInstances = LocalServers.LookupRegistryInstances();

                    foreach (var(hive, instances) in registryInstances)
                    {
                        foreach (var instance in instances)
                        {
                            populationTasks.Add(PopulateLocalServerAsync(servers, hive, instance, cancellationToken));
                        }
                    }

                    await Task.WhenAll(populationTasks).ConfigureAwait(false);
                }
                catch (TaskCanceledException)
                {
                    _logger?.LogDebug($"[{LoggingExtensions.CurrentFunction()}] Cancelled");
                }

                return servers.Distinct().ToArray();
            }, cancellationToken).ConfigureAwait(false));
        }
Exemple #24
0
        private void CleanOwnFiles()
        {
            lock (s_CleanLock)
            {
                _flagFile.Dispose();
                _flagFile = null;

                // Delete our own temp folder
                try
                {
                    FileHelper.DeleteDirectoryRecursive(@"\\?\" + TempFolder);
                }
                catch (Exception)
                {
                }

                // Clean any loose files we're tracking
                foreach (string file in _filesToClean.Distinct(StringComparer.OrdinalIgnoreCase))
                {
                    if (string.IsNullOrWhiteSpace(file))
                    {
                        continue;
                    }

                    try
                    {
                        FileMethods.DeleteFile(@"\\?\" + file);
                    }
                    catch (Exception)
                    {
                    }
                }
            }
        }
Exemple #25
0
        public async Task Should_create_multiple_concurrent_consumers()
        {
            var address = Guid.NewGuid().ToString();
            var queue   = Guid.NewGuid().ToString();

            var consumers = new ConcurrentBag <IConsumer>();

            async Task MessageHandler(Message message, IConsumer consumer, IServiceProvider provider, CancellationToken token)
            {
                consumers.Add(consumer);
                await consumer.AcceptAsync(message);
            }

            await using var testFixture = await TestFixture.CreateAsync(_testOutputHelper, builder =>
            {
                builder.AddConsumer(address, RoutingType.Multicast, queue, new ConsumerOptions { ConcurrentConsumers = 3 }, MessageHandler)
                .EnableAddressDeclaration()
                .EnableQueueDeclaration();
            });

            await using var producer = await testFixture.Connection.CreateProducerAsync(address, RoutingType.Multicast, testFixture.CancellationToken);

            for (int i = 0; i < 100; i++)
            {
                await producer.SendAsync(new Message("foo" + i), testFixture.CancellationToken);
            }

            Assert.Equal(3, consumers.Distinct().Count());
        }
        public void Instance_ThreadSafe()
        {

            using (var gate = new Barrier(5))
            {
                var result = new ConcurrentBag<AnyConstructorFinder>();

                Action test = () =>
                {
                    gate.SignalAndWait(20);

                    var instance = AnyConstructorFinder.Instance;

                    Thread.MemoryBarrier();

                    result.Add(instance);
                };

                var cycleState = Parallel.For(0, 200,
                    new ParallelOptions { MaxDegreeOfParallelism = 15 },
                    x => { test(); })
                    ;

                while (!cycleState.IsCompleted) 
                {
                    Thread.Sleep(100);
                }

                Assert.IsTrue(result.All(x => x != null));
                Assert.IsTrue(result.Distinct().Count() == 1);
            }
        }
Exemple #27
0
        public Receipt Store <T>(WarehouseKey key, IEnumerable <T> data, IEnumerable <LoadingDockPolicy> loadingDockPolicies)
        {
            ThrowIfNotInitialized();

            var uuid = Guid.NewGuid();

            ConcurrentBag <LoadingDockPolicy> enforcedPolicies = new ConcurrentBag <LoadingDockPolicy>();

            // resolve the appropriate store, based on the policy
            Parallel.ForEach(ResolveShelves <T>(loadingDockPolicies), (shelf) =>
            {
                shelf.Store(key, data, enforcedPolicies);
            });

            // the receipt is largely what was passed in when it was stored
            var receipt = new Receipt(enforcedPolicies.Any())
            {
                UUID  = uuid,
                Key   = key.Id,
                Scope = key.Scope,
                // add the policies that were upheld during the store, this is necessary,
                // because this warehouse might not be able to satisfy all of the policies
                Policies       = enforcedPolicies.Distinct().ToList(),
                SHA256Checksum = CalculateChecksum <T>(data)
            };

            SessionReceipts.Add(receipt);

            return(receipt);
        }
Exemple #28
0
        public async Task ItHasBeenFixed()
        {
            var activator = new BuiltinHandlerActivator();

            var receivedMessageIds = new ConcurrentBag <string>();

            activator.Handle <string>(async(_, context, message) =>
            {
                receivedMessageIds.Add(context.TransportMessage.Headers[Headers.MessageId]);
            });

            Using(activator);

            var bus = Configure.With(activator)
                      .Transport(t => t.UseInMemoryTransport(new InMemNetwork(), "buggerino"))
                      .Start();

            var customHeaders = new Dictionary <string, string>
            {
                { "custom-header", "woohoo" }
            };


            const string repeatedMessage = "hej med dig";

            await bus.SendLocal(repeatedMessage, customHeaders);

            await bus.SendLocal("hej igen med", customHeaders);

            await bus.SendLocal(repeatedMessage, customHeaders);

            await Task.Delay(TimeSpan.FromSeconds(1));

            Assert.That(receivedMessageIds.Distinct().Count(), Is.EqualTo(3), "Expected three unique message IDs - got: {0}", string.Join(", ", receivedMessageIds));
        }
Exemple #29
0
        public int SortAndWriteToFile()
        {
            var sortedCollection = _collection.Distinct().OrderBy(x => x.Key);

            File.WriteAllLines(Common.Common.OutputFileName, sortedCollection.Select(s => s.Value.ToString()).ToList());
            return(sortedCollection.Count());
        }
Exemple #30
0
        public string RunStep(RuntimeSettings settings, ILog log)
        {
            var links   = new ConcurrentBag <string>();
            var results = new StringBuilder();

            foreach (string?chapter in settings.TocContents.Chapters)
            {
                log.Info("Processing chapter: {0}", chapter);
                results.AppendFormat("## {0}\r\n\r\n", chapter);
                links.Clear();

                Parallel.ForEach(settings.TocContents.GetLinksForChapter(chapter), link =>
                {
                    var input = settings.SourceDirectory.Combine(link.Url);

                    var contents = input.ReadFile(log);

                    foreach (Match?match in _link.Matches(contents))
                    {
                        if (match != null)
                        {
                            links.Add(match.Value);
                        }
                    }
                });

                foreach (string link in links.Distinct().OrderBy(s => s))
                {
                    results.AppendLine(link);
                }
                results.AppendLine();
            }

            return(results.ToString());
        }
        public async Task ItHasBeenFixed()
        {
            var activator = new BuiltinHandlerActivator();

            Using(activator);

            var receivedMessageIds = new ConcurrentBag<string>();

            activator.Handle<string>(async (_, context, message) =>
            {
                receivedMessageIds.Add(context.TransportMessage.Headers[Headers.MessageId]);
            });

            var bus = Configure.With(activator)
                .Transport(t => t.UseInMemoryTransport(new InMemNetwork(), "buggerino"))
                .Start();

            var customHeaders = new Dictionary<string, string>
            {
                {"custom-header", "woohoo"}
            };

            const string repeatedMessage = "hej med dig";

            await bus.SendLocal(repeatedMessage, customHeaders);
            await bus.SendLocal("hej igen med", customHeaders);
            await bus.SendLocal(repeatedMessage, customHeaders);

            await Task.Delay(TimeSpan.FromSeconds(1));

            Assert.That(receivedMessageIds.Distinct().Count(), Is.EqualTo(3), "Expected three unique message IDs - got: {0}", string.Join(", ", receivedMessageIds));
        }
Exemple #32
0
        private void mergeCloseNodes()
        {
            var  mergedNodes = new ConcurrentBag <Tuple <Simulation.Traffic.Node, Simulation.Traffic.Node> >();
            bool any         = false;
            var  allNodes    = manager.Nodes.ToArray();

            //Parallel.For(0, allNodes.Length, x =>
            for (int x = 0; x < allNodes.Length; x++)
            {
                var a = allNodes[x];

                var closeNodes = manager.QueryNodes(a.Position, 1).ToArray();


                foreach (var b in closeNodes)
                {
                    if (a != b)
                    {
                        if ((a.Position - b.Position).sqrMagnitude < 2)
                        {
                            mergedNodes.Add(new Tuple <Simulation.Traffic.Node, Simulation.Traffic.Node>(a, b));
                            //manager.MergeNodes(a, b);
                        }
                    }
                }
            }//);


            var mergedNodesDistinct = mergedNodes.Distinct(new NodePairComparer()).ToArray();

            foreach (var merge in mergedNodesDistinct)
            {
                manager.MergeNodes(merge.Item1, merge.Item2);
            }
        }
Exemple #33
0
        public async Task <List <int> > GetAllTypeAsync(bool tq = false)
        {
            ESIClient           esi    = tq ? (ESIClient)_tqesi : _esi;
            ConcurrentBag <int> result = new ConcurrentBag <int>();
            var pageheaders            = await esi.GetAllTypesPages();


            if (!pageheaders.Headers.TryGetValues("x-pages", out var xPages) ||
                !int.TryParse(xPages.FirstOrDefault(), out var pages))
            {
                pages = 1;
            }


            await Dasync.Collections.ParallelForEachExtensions.ParallelForEachAsync(Enumerable.Range(1, pages), async page =>
            {
                var thispage = await esi.Get_universe_typesAsync(page);


                foreach (var i in thispage)
                {
                    result.Add(i);
                }
            }, MAX_THREAD);

            return(result.Distinct().ToList());
        }
        public void Should_use_all_threads_for_many_tasks()
        {
            var threadIds = new ConcurrentBag<int>();
            var atomicCounter = new AtomicCounter(0);
            Action callback = () =>
            {
                atomicCounter.GetAndIncrement();
                threadIds.Add(Thread.CurrentThread.ManagedThreadId);
            };

            for (var i = 0; i < 1000; i++)
            {
                Factory.StartNew(callback);
            }
            //spin until work is completed
            SpinWait.SpinUntil(() => atomicCounter.Current == 1000, TimeSpan.FromSeconds(1));

            Assert.AreEqual(Pool.Settings.NumThreads, threadIds.Distinct().Count());
        }
        public int Execute()
        {
            var fileItems = File.ReadAllLines(_file).Select(Int64.Parse).ToArray();
            var hashSet = new HashSet<Int64>(fileItems);
            var result = new ConcurrentBag<Int64>();

            Parallel.ForEach(fileItems, x =>
            {
                var y1 = IntervalStart - x;
                var y2 = IntervalEnd - x;

                for (var y = y1; y <= y2; y++)
                {
                    if (hashSet.Contains(y))
                        result.Add(x + y);
                }
            });

            return result.Distinct().Count();
        }
Exemple #36
0
        public void WhenTwoThreadsInitialiseASharedInstanceSimultaneouslyViaChildLifetime_OnlyOneInstanceIsActivated()
        {
            int activationCount = 0;
            var results = new ConcurrentBag<object>();
            var exceptions = new ConcurrentBag<Exception>();

            var builder = new ContainerBuilder();
            builder.Register(c =>
                {
                    Interlocked.Increment(ref activationCount);
                    Thread.Sleep(500);
                    return new object();
                })
                .SingleInstance();

            var container = builder.Build();

            ThreadStart work = () => {
                 try
                 {
                     var o = container.BeginLifetimeScope().Resolve<object>();
                     results.Add(o);
                 }
                 catch (Exception ex)
                 {
                     exceptions.Add(ex);
                 }
            };

            var t1 = new Thread(work);
            var t2 = new Thread(work);
            t1.Start();
            t2.Start();
            t1.Join();
            t2.Join();

            Assert.Equal(1, activationCount);
            Assert.Empty(exceptions);
            Assert.Equal(1, results.Distinct().Count());
        }
        public void Should_process_workload_across_exactly_DedicatedThreadPoolSettings_NumThreads()
        {
            var numThreads = Environment.ProcessorCount;
            var threadIds = new ConcurrentBag<int>();
            var atomicCounter = new AtomicCounter(0);
            Action callback = () =>
            {
                atomicCounter.GetAndIncrement();
                threadIds.Add(Thread.CurrentThread.ManagedThreadId);
            };
            using (var threadPool = new DedicatedThreadPool(new DedicatedThreadPoolSettings(numThreads)))
            {
                for (var i = 0; i < 1000; i++)
                {
                    threadPool.QueueUserWorkItem(callback);
                }
                //spin until work is completed
                SpinWait.SpinUntil(() => atomicCounter.Current == 1000, TimeSpan.FromSeconds(1));
            }

            Assert.AreEqual(numThreads, threadIds.Distinct().Count());
        }
Exemple #38
0
        public Property ObjectPool_should_not_leak_when_used_properly(Tuple<int, int>[] values)
        {
            var tasks = new List<Task<bool>>();
            var pooledObjects = new ConcurrentBag<MyPooledObject>();
            Func<Tuple<int, int>, MyPooledObject> setPool = tupe =>
            {
                var obj = _pool.Take();
                obj.Num = tupe.Item1;
                obj.Num2 = tupe.Item2;
                return obj;
            };

            Func<MyPooledObject, Tuple<int, int>, bool> freePoolAndAssertReferentialIntegrity = (o, tuple) =>
            {
                var propsEqual = o.Num == tuple.Item1 && o.Num2 == tuple.Item2;
                pooledObjects.Add(o); //add a reference to O
                o.Recycle();
                return propsEqual;
            };

            foreach (var value in values)
            {
                var v = value;
                var task =
                    Task.Run(() => setPool(v)).ContinueWith(t => freePoolAndAssertReferentialIntegrity(t.Result, v));
                tasks.Add(task);
            }
            var results = Task.WhenAll(tasks);
            if (!results.Wait(200))
                return false.Label($"Should not have taken 200ms to process {values.Length} items");

            if (!results.Result.All(x => x))
                return false.Label("None of the objects in the pool should ever be concurrently modified while in use");

            var count = pooledObjects.Distinct().Count();
            return
                (count <= ObjectCount).Label(
                    $"Should not have produced more than {ObjectCount}, but was instead {count}");
        }
 public void TestSaveUpdatesAutoIncrementingField()
 {
     //---------------Set up test pack-------------------
     ClassDef.ClassDefs.Clear();
     TestAutoInc.LoadClassDefWithAutoIncrementingID();
     var newIds = new ConcurrentBag<int?>();
     //---------------Execute Test ----------------------
     Parallel.For(0, 1000, i => {
                                    //---------------Set up test pack-------------------
                                    var bo = new TestAutoInc();
                                    bo.SetPropertyValue("testfield", "testing 123");
                                    //---------------Assert Precondition----------------
                                    Assert.IsFalse(bo.TestAutoIncID.HasValue);
                                    //---------------Execute Test ----------------------
                                    bo.Save();
                                    //---------------Test Result -----------------------
                                    newIds.Add(bo.TestAutoIncID);
     });
     //---------------Test Result -----------------------
     Assert.IsTrue(newIds.All(i => i.HasValue));
     Assert.IsTrue(newIds.All(i => i > 0));
     Assert.That(newIds.Distinct().Count(), Is.EqualTo(1000), "Every generated ID must be unique");
 }
        public void World_should_not_end_if_exception_thrown_in_user_callback()
        {
            var numThreads = 3;
            var threadIds = new ConcurrentBag<int>();
            Action badCallback = () =>
            {
                threadIds.Add(Thread.CurrentThread.ManagedThreadId);
                throw new Exception("DEATH TO THIS THREAD I SAY!");
            };
            Action goodCallback = () =>
            {
                threadIds.Add(Thread.CurrentThread.ManagedThreadId);
            };

            using (var threadPool = new DedicatedThreadPool(new DedicatedThreadPoolSettings(numThreads, TimeSpan.FromSeconds(1))))
            {
                for (var i = 0; i < numThreads; i++)
                {
                    threadPool.QueueUserWorkItem(badCallback);
                    Thread.Sleep(20);
                }

                //sanity check
                Assert.AreEqual(numThreads, threadIds.Distinct().Count());

                //run the job again. Should get 3 more managed thread IDs
                for (var i = 0; i < numThreads*10; i++)
                {
                    threadPool.QueueUserWorkItem(goodCallback);
                    Thread.Sleep(20);
                }
            }

            // half of thread IDs should belong to failed threads, other half to successful ones
            Assert.AreEqual(numThreads * 2, threadIds.Distinct().Count());
        }
Exemple #41
0
        private ConcurrentBag<ISequentialIntServiceNode> GetAllNodesFromAllServices()
        {
            var allNodes = new ConcurrentBag<ISequentialIntServiceNode>();

            foreach (var node in this.nodes)
            {
                allNodes.Add(node);
            }

            foreach (var service in this.services)
            {
                foreach (var node in service.GetServiceNodes())
                {
                    allNodes.Add(node);
                }
            }

            return new ConcurrentBag<ISequentialIntServiceNode>(allNodes.Distinct());
        }
Exemple #42
0
        /// <summary>
        /// 并行获取关注的UID列表
        /// </summary>
        /// <param name="uid"></param>
        /// <returns></returns>
        public static List<long> AttentionSpiderParallelExecute(string uid)
        {
            ConcurrentBag<long> concurrentBag = new ConcurrentBag<long>();

            var weiboUser = AnalyseCnPage.AnalysisUserHome(GetWeiboUser(uid));
            var totalPage = weiboUser.FriendsCount / 10 + 1;

            Action<int> action = page =>
            {
                //对于网络错误进行重试
                for (int i = 0; i < 3; i++)
                {
                    var pageStr = GetFollowers(weiboUser, page);
                    var pageList = AnalyseCnPage.AnalysisFollowers(pageStr);
                    if (pageList != null)
                    {
                        foreach (long id in pageList)
                        {
                            concurrentBag.Add(id);
                        }
                        break;
                    }
                    CNHttpWorkLogger.Info("用户{0}第{1}页关注获取失败", uid, page);
                }
            };

            ParallelOptions po = new ParallelOptions { MaxDegreeOfParallelism = 32 };
            Parallel.For(1, totalPage, po, action);
            return concurrentBag.Distinct().ToList();
        }
Exemple #43
0
        static void Main(string[] args)
        {
            var types = new ConcurrentBag<string>();
            var jsonPath = @"E:\tmp\ibm\ibmresultjson";
            var files = Directory.EnumerateFiles(jsonPath).ToArray();
            var ouputDir = @"E:\tmp\ibm\ibmcorpus";
            Parallel.For(0, files.Length,
                (int i) =>
                {
                    ConvertJsonToCorpus(files[i], Path.Combine(ouputDir, Path.GetFileName(files[i])), types);
                });

            Console.WriteLine("Types {0}", types.Count);
            foreach (var type in types.Distinct())
            {
                Console.WriteLine(type);
            }
            Console.ReadLine();
        }
Exemple #44
0
        private void button1_Click(object sender, EventArgs e)
        {
            var cnt = tbCount.Text.AsInt();

              var bag = new ConcurrentBag<FID>();

              if (chkParallel.Checked)
               Parallel.For(0,cnt,(i)=>
               {
              bag.Add( FID.Generate() );
               });
              else
               for(var i=0; i<cnt;i++)
              bag.Add( FID.Generate() );

              var sb = new StringBuilder();
              var c=0;
              foreach(var id in bag)
              {
            sb.AppendLine( "{0}:    {1}  ->  {2}".Args(c, id.ID, id) );
            c++;
            if (c>10000)
            {
              sb.AppendLine("......more......");
              break;
            }
              }

              //Uncomment to cause duplicates
              //var v =bag.FirstOrDefault();
              //bag.Add(v);
              //bag.Add(v);//duplicate

              if (bag.Count==bag.Distinct().Count())
            sb.Insert(0, "No Duplicates in the set of {0:n2}\r\n".Args(bag.Count));
              else
            sb.Insert(0, "DUPLICATES!!!!!!!!!!!!! in the set of {0:n2}\r\n\r\n\r\n".Args(bag.Count));

              tbDump.Text = sb.ToString();
        }
Exemple #45
0
        async static Task<string[]> GetPeopleTitles(string listTitle, List<string> previousLists = null, int level = 0)
        {
            if ((previousLists != null && previousLists.Contains(listTitle)) || level >= 2)
                return new string[0];
            Console.WriteLine("Getting {0}.", listTitle);
            try
            {
                string peoplePage = await Utilities.GetPage(listTitle);
                if (previousLists == null)
                    previousLists = new List<string>();
                previousLists.Add(listTitle);

                var peopleTitles = new List<string>();
                Regex personRegex = new Regex(@"(?<=\*[^\[]*\[\[)[^\[\]\|]+");

                var matches = personRegex.Matches(peoplePage);
                foreach (Match match in matches)
                {
                    if (!match.Value.Contains("Category") && !match.Value.Contains("List") && !match.Value.Contains(" people") && !match.Value.Contains(':'))
                    {
                        peopleTitles.Add(match.Value.Replace(' ', '_'));
                    }
                }

                // check for other people lists in this list
                Regex listRegex = new Regex(@"(?<=\[\[)List of[^\]]+");
                var listMatches = listRegex.Matches(peoplePage);
                if (listMatches.Count > 0)
                {
                    var listNames = new List<string>();
                    foreach (Match match in listMatches)
                    {
                        listNames.Add(match.Value);
                    }
                    var titles = new ConcurrentBag<string>();
                    Parallel.ForEach(listNames, async name =>
                    {
                        if (!previousLists.Contains(name))
                        {
                            var nestedTitles = await GetPeopleTitles(name, previousLists, level + 1);
                            foreach (var title in nestedTitles)
                            {
                                titles.Add(title);
                            }
                        }
                    });
                    peopleTitles.AddRange(titles.Distinct());
                }

                return peopleTitles.ToArray();
            }
            catch
            {
                return new string[0];
            }

        }
Exemple #46
0
        private static void ProcessPics(Task t, ConcurrentBag<Uri> picsToGet)
        {
            if (t.Exception != null)
            {
                throw t.Exception.Flatten();
            }
            string[] existingFiles = Directory.GetFiles(Settings.Default.FacebookUsersSettings.PhotoDirectory);
            IEnumerable<string> filesToDelete =
                existingFiles.Except(
                    picsToGet.Distinct().Select(
                        x =>
                        Path.Combine(Settings.Default.FacebookUsersSettings.PhotoDirectory,
                                     Path.GetFileName(x.AbsoluteUri))));

            try
            {
                foreach (string file in filesToDelete)
                {
                    File.Delete(file);
                }
            }
            catch
            {
            }

            //delete files not in list
            IEnumerable<Uri> filesToDownload =
                picsToGet.Distinct().Where(
                    x =>
                    !existingFiles.Contains(Path.Combine(Settings.Default.FacebookUsersSettings.PhotoDirectory,
                                                         Path.GetFileName(x.AbsoluteUri))));

            Parallel.ForEach(filesToDownload, DownloadFiles);
        }
Exemple #47
0
        public void UseSameThreadForAllRequests()
        {
            var bag = new ConcurrentBag<int>();

            using (var server = new HttpServer("http://*:1234/"))
            {
                server.RAW("")
                      .Subscribe(ctx =>
                                     {
                                         bag.Add(Thread.CurrentThread.ManagedThreadId);
                                         ctx.Respond(200);
                                     });
                Parallel.For(1, 1000, i => Browser.ExecuteGet("http://localhost:1234/"));

                bag.Distinct().Count()
                   .Should("The default scheduler should be Event Loop, and all subscriber run in same thread")
                   .Be.EqualTo(1);

            }
        }
Exemple #48
0
        public static void Run(string configurationFileName)
        {
            TextWriter gDisaggregatedCsvFile = null;

            TextWriter lAggregateMarginalDamage = null;

            TextWriter lGlobalInputCsv = null;
            TextWriter lRegionInputCsv = null;
            TextWriter lYearInputCsv = null;
            TextWriter lRegionYearInputCsv = null;

            SimulationManager lSimulationManager;

            var lDefaultConfigurationFile = Path.Combine(Path.Combine(Path.GetDirectoryName(System.Reflection.Assembly.GetExecutingAssembly().Location), "Data"), "DefaultSimulation.xml");
            lSimulationManager = new SimulationManager(configurationFileName == null ? lDefaultConfigurationFile : configurationFileName);
            lSimulationManager.Load();

            if (!Directory.Exists(ConsoleApp.OutputPath))
                Directory.CreateDirectory(ConsoleApp.OutputPath);

            var lRandom = GetNewRandom(lSimulationManager);

            lAggregateMarginalDamage = TextWriter.Synchronized(new StreamWriter(Path.Combine(ConsoleApp.OutputPath, "Output - Aggregate marginal damage.csv")));
            var TempOutputFile = new StreamWriter(Path.Combine(ConsoleApp.OutputPath, "Output - Temp.csv"));

            if (lSimulationManager.OutputVerbal)
            {
                lAggregateMarginalDamage.Write("Scenario");
                lAggregateMarginalDamage.Write(";");
                lAggregateMarginalDamage.Write("Gas");
                lAggregateMarginalDamage.Write(";");
                lAggregateMarginalDamage.Write("Emissionyear");
                lAggregateMarginalDamage.Write(";");
                lAggregateMarginalDamage.Write("Run");
                lAggregateMarginalDamage.Write(";");
                lAggregateMarginalDamage.Write("Weightingscheme");
                lAggregateMarginalDamage.Write(";");
                lAggregateMarginalDamage.Write("Marginal damage");
                lAggregateMarginalDamage.WriteLine();
            }

            TextWriter lSummaryDamage = TextWriter.Synchronized(new StreamWriter(Path.Combine(ConsoleApp.OutputPath, "Output - Summary damage.csv")));

            if (lSimulationManager.OutputVerbal)
            {
                lSummaryDamage.WriteLine("Scenario;Gas;Emissionyear;Weightingscheme;Bestguess;Mean;TrimMean0.1%;TrimMean1%;TrimMean5%;Median;Std;Var;Skew;Kurt;Min;Max;SE");
            }

            if (lSimulationManager.Runs.Exists((Run r) => r.OutputDisaggregatedData))
            {
                gDisaggregatedCsvFile = TextWriter.Synchronized(new StreamWriter(Path.Combine(ConsoleApp.OutputPath, "Output - Fact YearRegionSectorWeightingscheme.csv")));

                if (lSimulationManager.OutputVerbal)
                {
                    gDisaggregatedCsvFile.Write("Scenario");
                    gDisaggregatedCsvFile.Write(";");
                    gDisaggregatedCsvFile.Write("Run");
                    gDisaggregatedCsvFile.Write(";");
                    gDisaggregatedCsvFile.Write("Marginal Gas");
                    gDisaggregatedCsvFile.Write(";");
                    gDisaggregatedCsvFile.Write("Marginal Emission Year");
                    gDisaggregatedCsvFile.Write(";");
                    gDisaggregatedCsvFile.Write("Year");
                    gDisaggregatedCsvFile.Write(";");
                    gDisaggregatedCsvFile.Write("Region");
                    gDisaggregatedCsvFile.Write(";");
                    gDisaggregatedCsvFile.Write("Sector");
                    gDisaggregatedCsvFile.Write(";");
                    gDisaggregatedCsvFile.Write("Weightingscheme");
                    gDisaggregatedCsvFile.Write(";");
                    gDisaggregatedCsvFile.Write("Damage");
                    gDisaggregatedCsvFile.WriteLine();
                }
            }

            using (var lDimGasCsv = new StreamWriter(Path.Combine(ConsoleApp.OutputPath, "Output - Dim Gas.csv")))
            {
                lDimGasCsv.WriteLine("0;C");
                lDimGasCsv.WriteLine("1;CH4");
                lDimGasCsv.WriteLine("2;N2O");
                lDimGasCsv.WriteLine("3;SF6");
            }

            using (var lDimSectorCsv = new StreamWriter(Path.Combine(ConsoleApp.OutputPath, "Output - Dim Sector.csv")))
            {
                lDimSectorCsv.WriteLine("0;eloss;Water");
                lDimSectorCsv.WriteLine("1;eloss;Forests");
                lDimSectorCsv.WriteLine("2;eloss;Heating");
                lDimSectorCsv.WriteLine("3;eloss;Cooling");
                lDimSectorCsv.WriteLine("4;eloss;Agriculture");
                lDimSectorCsv.WriteLine("5;eloss;Dryland");
                lDimSectorCsv.WriteLine("6;eloss;SeaProtection");
                lDimSectorCsv.WriteLine("7;eloss;Imigration");
                lDimSectorCsv.WriteLine("8;sloss;Species");
                lDimSectorCsv.WriteLine("9;sloss;Death");
                lDimSectorCsv.WriteLine("10;sloss;Morbidity");
                lDimSectorCsv.WriteLine("11;sloss;Wetland");
                lDimSectorCsv.WriteLine("12;sloss;Emigration");
                lDimSectorCsv.WriteLine("13;eloss;Hurricane");
                lDimSectorCsv.WriteLine("14;eloss;ExtratropicalStorms");
            }

            using (var lDimScenarioCsv = new StreamWriter(Path.Combine(ConsoleApp.OutputPath, "Output - Dim Scenario.csv")))
            {
                foreach (Scenario lScenario in lSimulationManager.Scenarios)
                {
                    // Write Scenario dimension file
                    lDimScenarioCsv.Write(lScenario.Id);
                    lDimScenarioCsv.Write(";");
                    lDimScenarioCsv.Write(lScenario.Name);
                    lDimScenarioCsv.WriteLine();
                }
            }

            using (var lDimYearCsv = new StreamWriter(Path.Combine(ConsoleApp.OutputPath, "Output - Dim Year.csv")))
            {
                for (int i = 1950; i <= 2300; i++)
                {
                    string lYearStr = i.ToString();
                    lDimYearCsv.Write(lYearStr);
                    lDimYearCsv.Write(";");
                    lDimYearCsv.Write(lYearStr.Substring(0, 2));
                    lDimYearCsv.Write("xx;");
                    lDimYearCsv.Write(lYearStr.Substring(0, 3));
                    lDimYearCsv.Write("x;");
                    lDimYearCsv.Write(lYearStr.Substring(0, 4));
                    lDimYearCsv.WriteLine();
                }
            }

            var lDimEmissionYear = new ConcurrentBag<Timestep>();

            if (lSimulationManager.OutputInputParameters)
            {
                lGlobalInputCsv = TextWriter.Synchronized(new StreamWriter(Path.Combine(ConsoleApp.OutputPath, "Output - Fact Parameter.csv")));
                lRegionInputCsv = TextWriter.Synchronized(new StreamWriter(Path.Combine(ConsoleApp.OutputPath, "Output - Fact ParameterRegion.csv")));
                lYearInputCsv = TextWriter.Synchronized(new StreamWriter(Path.Combine(ConsoleApp.OutputPath, "Output - Fact ParameterYear.csv")));
                lRegionYearInputCsv = TextWriter.Synchronized(new StreamWriter(Path.Combine(ConsoleApp.OutputPath, "Output - Fact ParameterYearRegion.csv")));
            }

            if (lSimulationManager.RunParallel && !lSimulationManager.SameRandomStreamPerRun)
            {
                throw new ArgumentException("Cannot run in parallel but without random stream per run");
            }

            var parallelOptions = new System.Threading.Tasks.ParallelOptions()
            {
                MaxDegreeOfParallelism = lSimulationManager.RunParallel ? -1 : 1
            };

            if (lSimulationManager.RunParallel)
            {
                var parameterDefinition = new Parameters();
                parameterDefinition.ReadExcelFile(@"Data\Parameter - base.xlsm");

                // Create a new model that inits itself from the parameters just loaded
                var model = new Esmf.Model.ModelTyped<FundWorkflow>();
                model.Run(parameterDefinition.GetBestGuess());
            }

            System.Threading.Tasks.Parallel.ForEach<Run, object>(
                lSimulationManager.Runs,
                parallelOptions,
                () =>
                {
                    Thread.CurrentThread.CurrentCulture = CultureInfo.InvariantCulture;
                    Thread.CurrentThread.Priority = ThreadPriority.BelowNormal;
                    return null;
                },
                (lRun, loopState, dummy) =>
                {
                    var tlRandom = lSimulationManager.SameRandomStreamPerRun ? GetNewRandom(lSimulationManager) : lRandom;

                    var lParam = new Parameters();

                    foreach (string filename in lRun.Scenario.ExcelFiles)
                        lParam.ReadExcelFile(filename);

                    Console.WriteLine(lRun.Scenario.Name);

                    if (lRun.Mode == RunMode.MarginalRun)
                    {
                        var lMarginalRun = new TMarginalRun(lRun, lRun.MarginalGas, lRun.EmissionYear, ConsoleApp.OutputPath, lParam, lRandom);

                        lDimEmissionYear.Add(lRun.EmissionYear);

                        lMarginalRun.AggregateDamageCsv = lAggregateMarginalDamage;
                        lMarginalRun.SummaryCsv = lSummaryDamage;

                        if (lRun.OutputDisaggregatedData)
                        {
                            lMarginalRun.YearRegionSectorWeightingSchemeCsv = gDisaggregatedCsvFile;
                        }

                        if (lSimulationManager.OutputInputParameters)
                        {
                            lMarginalRun.GlobalInputCsv = lGlobalInputCsv;
                            lMarginalRun.RegionInputCsv = lRegionInputCsv;
                            lMarginalRun.YearInputCsv = lYearInputCsv;
                            lMarginalRun.RegionYearInputCsv = lRegionYearInputCsv;
                        }

                        var watch = new System.Diagnostics.Stopwatch();
                        watch.Start();
                        lMarginalRun.Run();
                        watch.Stop();
                        //Console.WriteLine("Elapsed time: {0}", watch.Elapsed);
                    }
                    else if (lRun.Mode == RunMode.FullMarginalRun)
                    {
                        MarginalGas[] gases = { MarginalGas.C };
                        foreach (MarginalGas gas in gases)
                        {
                            for (int emissionyear = 2010; emissionyear <= 2100; emissionyear += 5)
                            {
                                lDimEmissionYear.Add(Timestep.FromYear(emissionyear));

                                Console.WriteLine("Now doing year {0} and gas {1}", emissionyear, gas);
                                // DA: Use MargMain for marginal cost, use Main for total cost and optimisation
                                // modes
                                var lMarginalRun = new TMarginalRun(lRun, gas, Timestep.FromYear(emissionyear), ConsoleApp.OutputPath, lParam, lRandom);

                                lMarginalRun.AggregateDamageCsv = lAggregateMarginalDamage;

                                if (lRun.OutputDisaggregatedData)
                                {
                                    lMarginalRun.YearRegionSectorWeightingSchemeCsv = gDisaggregatedCsvFile;
                                }

                                if (lSimulationManager.OutputInputParameters)
                                {
                                    lMarginalRun.GlobalInputCsv = lGlobalInputCsv;
                                    lMarginalRun.RegionInputCsv = lRegionInputCsv;
                                    lMarginalRun.YearInputCsv = lYearInputCsv;
                                    lMarginalRun.RegionYearInputCsv = lRegionYearInputCsv;
                                }

                                var watch = new System.Diagnostics.Stopwatch();
                                watch.Start();
                                lMarginalRun.Run();
                                watch.Stop();
                                //Console.WriteLine("Elapsed time: {0}", watch.Elapsed);
                            }
                        }
                    }
                    else if (lRun.Mode == RunMode.TotalRun)
                    {
                        // DA: Use MargMain for marginal cost, use Main for total cost and optimisation
                        // modes
                        var lTotalDamageRun = new TotalDamage(lRun, ConsoleApp.OutputPath, lParam, lRun.EmissionYear);

                        lTotalDamageRun.AggregateDamageCsv = lAggregateMarginalDamage;

                        if (lRun.OutputDisaggregatedData)
                        {
                            lTotalDamageRun.YearRegionSectorWeightingSchemeCsv = gDisaggregatedCsvFile;
                        }

                        if (lSimulationManager.OutputInputParameters)
                        {
                            lTotalDamageRun.GlobalInputCsv = lGlobalInputCsv;
                            lTotalDamageRun.RegionInputCsv = lRegionInputCsv;
                            lTotalDamageRun.YearInputCsv = lYearInputCsv;
                            lTotalDamageRun.RegionYearInputCsv = lRegionYearInputCsv;
                        }

                        lTotalDamageRun.Run();

                    }
                    return null;
                },
                (dummy) => { return; });

            lSummaryDamage.Close();
            lAggregateMarginalDamage.Close();
            TempOutputFile.Close();

            using (var lDimEmissionYearCsv = new StreamWriter(Path.Combine(ConsoleApp.OutputPath, "Output - Dim Emissionyear.csv")))
            {
                foreach (Timestep emissionyear in lDimEmissionYear.Distinct().OrderBy(i => i.Value))
                    lDimEmissionYearCsv.WriteLine("{0};{1}", emissionyear, emissionyear);
            }

            if (lSimulationManager.Runs.Exists((Run run) => run.OutputDisaggregatedData))
            {
                gDisaggregatedCsvFile.Close();
            }

            if (lSimulationManager.OutputInputParameters)
            {
                lGlobalInputCsv.Close();
                lRegionInputCsv.Close();
                lYearInputCsv.Close();
                lRegionYearInputCsv.Close();
            }
        }
        public void GetEnumerator_IfMultipleThreadsPullingFiles_ThenEachFileReturnedOnce()
        {
            int numFiles = 1000;
            var receivedFiles = new ConcurrentBag<string>();

            // Create all the files.
            for (var fileNum = 0; fileNum < numFiles; ++fileNum)
            {
                File.Create(Path.Combine(_directory, fileNum.ToString())).Dispose();
            };

            using (CancellationTokenSource cts = new CancellationTokenSource(20 * 1000/*a fail-safe*/))
            {
                DateTime startTime = DateTime.UtcNow;
                // Get the results on several parallel threads.
                Parallel.ForEach<string>(new CreatedFileCollection(cts.Token, _directory), new ParallelOptions() { MaxDegreeOfParallelism = 10 }, file =>
                {
                    receivedFiles.Add(file);
                    // If we're getting near the end, assume we can finish in the time we've taken so far.
                    if (receivedFiles.Count == numFiles * 3 / 4)
                        cts.CancelAfter(DateTime.UtcNow - startTime);
                });
            }

            Assert.AreEqual(numFiles, receivedFiles.Count);
            var distinctFiles = receivedFiles.Distinct().Count();
            Assert.AreEqual(numFiles, distinctFiles);
        }