private static void Execute(FlexibleOptions options) { logger.Info("Start"); // Setting up Threading Environment int threadsCount = ProgramOptions.Get <int> ("Threads", Environment.ProcessorCount * 2); // Executing Multithreaded dequeueing ManualResetEvent[] doneEvents = new ManualResetEvent[threadsCount]; for (int threadIndex = 0; threadIndex < threadsCount; threadIndex++) { // Done = False (Thread isn't done doing it's work) doneEvents[threadIndex] = new ManualResetEvent(false); // Setting up SQS Worker SQSWorker worker = new SQSWorker(doneEvents[threadIndex], options); // Setting up Thread into ThreadPool ThreadPool.QueueUserWorkItem(worker.ThreadPoolCallback, threadIndex); } // Wait for all threads in pool to finish processing SQS WaitHandle.WaitAll(doneEvents); logger.Info("All Threads Halted"); logger.Info("All Messages Moved"); }
/// <summary> /// Main program entry point. /// </summary> static void Main(string[] args) { // set error exit code System.Environment.ExitCode = -50; try { // load configurations ProgramOptions = ConsoleUtils.Initialize(args, true); // start execution Execute(ProgramOptions); // check before ending for waitForKeyBeforeExit option if (ProgramOptions.Get("waitForKeyBeforeExit", false)) { ConsoleUtils.WaitForAnyKey(); } } catch (Exception ex) { LogManager.GetCurrentClassLogger().Fatal(ex); // check before ending for waitForKeyBeforeExit option if (ProgramOptions.Get("waitForKeyBeforeExit", false)) { ConsoleUtils.WaitForAnyKey(); } ConsoleUtils.CloseApplication(-60, true); } // set success exit code ConsoleUtils.CloseApplication(0, false); }
public static FlexibleOptions Initialize (string[] args, bool thrownOnError, InitializationOptions options = null) { InitOptions = options; DefaultProgramInitialization (); ProgramOptions = CheckCommandLineParams (args, thrownOnError); if (ProgramOptions.Get<bool> ("help", false) || ProgramOptions.Get<bool> ("h", false)) { show_help (""); CloseApplication (0, true); } // display program initialization header if (!Console.IsOutputRedirected) { ConsoleUtils.DisplayHeader ( typeof(ConsoleUtils).Namespace.Replace (".SimpleHelpers", ""), "options: " + (ProgramOptions == null ? "none" : "\n# " + String.Join ("\n# ", ProgramOptions.Options.Select (i => i.Key + "=" + i.Value)))); } else { var logger = GetLogger (); if (logger.IsDebugEnabled) { logger.Debug ("options: " + (ProgramOptions == null ? "none" : "\n# " + String.Join ("\n# ", ProgramOptions.Options.Select (i => i.Key + "=" + i.Value)))); } } return ProgramOptions; }
/// <summary> /// Class Constructor /// </summary> public SQSWorker(ManualResetEvent doneEvent, FlexibleOptions options) { _doneEvent = doneEvent; _programOptions = options; // Logger _logger = LogManager.GetCurrentClassLogger(); }
public void Get_CaseInsensitive_ShouldWork () { var expected = "v1"; var opt1 = new FlexibleOptions (); opt1.Set ("key1", expected); Assert.Equal (expected, opt1.Get ("KEY1")); }
public void Get_CaseInsensitive_ShouldWork() { var expected = "v1"; var opt1 = new FlexibleOptions(); opt1.Set("key1", expected); Assert.Equal(expected, opt1.Get("KEY1")); }
public void SetAlias_AllowCaseInsensitiveGet_ShouldWork () { var expected = "v3"; var opt1 = new FlexibleOptions (); opt1.Set ("k3", expected); opt1.SetAlias ("k3", "3", "key3"); Assert.Equal (expected, opt1.Get ("K3")); Assert.Equal (expected, opt1.Get ("KEY3")); }
private static List <string> ParseArgumentAsList(FlexibleOptions options, string key) { // first check if we have a json array var list = options.Get <string[]> (key, null); // fallback to a csv string if (list == null) { list = (options.Get(key, "") ?? "").Split(',', ';'); } return(list.Select(i => i.Trim()).Where(i => !String.IsNullOrEmpty(i)).ToList()); }
public void Merge_WithComplexPriority_ShouldWork() { var opt1 = new FlexibleOptions(); opt1.Set("k1", "v1"); opt1.Set("k2", "v2"); opt1.Set("k3", "v3"); opt1.SetAlias("k3", "3", "key3"); var opt2 = new FlexibleOptions(); opt2.Set("k2", "v2.2"); var opt3 = new FlexibleOptions(); opt3.Set("k3", "v3.3"); // opt1 has lower priority var merge = FlexibleOptions.Merge(opt1, opt2, opt3); Assert.NotNull(merge); Assert.Equal(opt1.Get("k1"), merge.Get("k1")); Assert.Equal(opt2.Get("k2"), merge.Get("k2")); Assert.Equal(opt3.Get("k3"), merge.Get("k3")); // alias Assert.Equal(opt3.Get("k3"), merge.Get("key3")); Assert.Equal(opt3.Get("k3"), merge.Get("3")); // opt1 has higher priority merge = FlexibleOptions.Merge(opt2, opt3, opt1); Assert.NotNull(merge); Assert.Equal(opt1.Get("k1"), merge.Get("k1")); Assert.NotEqual(opt2.Get("k2"), merge.Get("k2")); Assert.NotEqual(opt3.Get("k3"), merge.Get("k3")); Assert.Equal(opt1.Get("k2"), merge.Get("k2")); Assert.Equal(opt1.Get("k3"), merge.Get("k3")); // alias Assert.Equal(opt1.Get("k3"), merge.Get("key3")); Assert.Equal(opt1.Get("k3"), merge.Get("3")); // opt1 has higher priority but with null options merge = FlexibleOptions.Merge(null, opt2, null, opt3, opt1, null); Assert.NotNull(merge); Assert.Equal(opt1.Get("k1"), merge.Get("k1")); Assert.NotEqual(opt2.Get("k2"), merge.Get("k2")); Assert.NotEqual(opt3.Get("k3"), merge.Get("k3")); Assert.Equal(opt1.Get("k2"), merge.Get("k2")); Assert.Equal(opt1.Get("k3"), merge.Get("k3")); // alias Assert.Equal(opt1.Get("k3"), merge.Get("key3")); Assert.Equal(opt1.Get("k3"), merge.Get("3")); }
/// <summary> /// Checks the minimum parameters of this application /// </summary> /// <returns></returns> private static bool ValidateConfig(FlexibleOptions options) { if (String.IsNullOrEmpty(options["sourceServer"]) || String.IsNullOrEmpty(options["sourceUsername"]) || String.IsNullOrEmpty(options["sourcePassword"]) || String.IsNullOrEmpty(options["sourceDatabaseName"])) { return(false); } return(true); }
private static async Task ExecuteAsync (FlexibleOptions options, CancellationToken token) { logger.Info ("Start"); // TODO: implement execution // ... // if (token.IsCancellationRequested) return; // ... var publish = Task.Run (() => { using (var queue = CreateQueue ()) { for (var j = 0; j < 10; j++) { for (var i = 0; i < 250; i++) queue.Publish ("teste khalid " + i); logger.Debug ("publish progress " + (j + 1) * 250); } } }); Task.Delay (30000).Wait (); var consume = Task.Run (() => { int count = 0; using (var queue = CreateQueue ()) { //foreach (var i in queue.Get (TimeSpan.FromMinutes (30))) ParallelTasks<RabbitWorkMessage>.Process (queue.Get (TimeSpan.FromSeconds (1800)), 30, i => { // ... //i.Ack (); if (count % 2 == 0) i.Nack (); else i.Requeue (); //Task.Delay (50).Wait (); if (count++ % 250 == 0) logger.Debug ("ack progress " + count); //} }); } }); await publish; consume.Wait (); logger.Info ("End"); }
public void SetAlias_AllowCaseInsensitiveGet_ShouldWork() { var expected = "v3"; var opt1 = new FlexibleOptions(); opt1.Set("k3", expected); opt1.SetAlias("k3", "3", "key3"); Assert.Equal(expected, opt1.Get("K3")); Assert.Equal(expected, opt1.Get("KEY3")); }
public void SetAlias_AllowGet_ShouldWork() { var expected = "v3"; var opt1 = new FlexibleOptions (); opt1.Set ("k3", expected); opt1.SetAlias ("k3", "3", "key3"); Assert.Equal (expected, opt1.Get ("k3")); Assert.Equal (expected, opt1.Get ("key3")); Assert.Equal (expected, opt1.Get ("3")); Assert.True (String.IsNullOrEmpty (opt1.Get ("KEY"))); }
public void SetAlias_AllowGet_ShouldWork() { var expected = "v3"; var opt1 = new FlexibleOptions(); opt1.Set("k3", expected); opt1.SetAlias("k3", "3", "key3"); Assert.Equal(expected, opt1.Get("k3")); Assert.Equal(expected, opt1.Get("key3")); Assert.Equal(expected, opt1.Get("3")); Assert.True(String.IsNullOrEmpty(opt1.Get("KEY"))); }
public void Get_AsDateTime_ShouldWork() { // datetime without milliseconds var expected = new DateTime(2015, 01, 02, 03, 04, 05); var opt1 = new FlexibleOptions(); opt1.Set("dtAsDateTime", expected); Assert.Equal(expected, opt1.Get <DateTime> ("dtAsDateTime", DateTime.MinValue)); opt1.Set("dtAsString", expected.ToString(System.Globalization.CultureInfo.InvariantCulture)); Assert.Equal(expected, opt1.Get <DateTime> ("dtAsString", DateTime.MinValue)); opt1.Set("dtAsISOString", expected.ToString("o")); Assert.Equal(expected, opt1.Get <DateTime> ("dtAsISOString", DateTime.MinValue)); opt1.Set("dtAsDate", expected.ToString("yyyyMMdd")); Assert.Equal(expected.Date, opt1.Get <DateTime> ("dtAsDate", DateTime.MinValue)); }
public void Get_AsDateTime_ShouldWork () { // datetime without milliseconds var expected = new DateTime (2015, 01, 02, 03, 04, 05); var opt1 = new FlexibleOptions (); opt1.Set ("dtAsDateTime", expected); Assert.Equal (expected, opt1.Get<DateTime> ("dtAsDateTime", DateTime.MinValue)); opt1.Set ("dtAsString", expected.ToString (System.Globalization.CultureInfo.InvariantCulture)); Assert.Equal (expected, opt1.Get<DateTime> ("dtAsString", DateTime.MinValue)); opt1.Set ("dtAsISOString", expected.ToString ("o")); Assert.Equal (expected, opt1.Get<DateTime> ("dtAsISOString", DateTime.MinValue)); opt1.Set ("dtAsDate", expected.ToString ("yyyyMMdd")); Assert.Equal (expected.Date, opt1.Get<DateTime> ("dtAsDate", DateTime.MinValue)); }
/// <summary> /// Main program entry point. /// </summary> static void Main(string[] args) { // set error exit code System.Environment.ExitCode = -50; try { // load configurations ProgramOptions = ConsoleUtils.Initialize(args, true); CancellationTokenSource cts = new CancellationTokenSource(); System.Console.CancelKeyPress += (s, e) => { logger.Debug("User requested exit (Ctrl + C), exiting..."); e.Cancel = true; cts.Cancel(); }; // start execution ExecuteAsync(ProgramOptions, cts.Token).Wait(); // check before ending for waitForKeyBeforeExit option if (ProgramOptions.Get("waitForKeyBeforeExit", false)) { ConsoleUtils.WaitForAnyKey(); } } catch (Exception ex) { LogManager.GetCurrentClassLogger().Fatal(ex); // check before ending for waitForKeyBeforeExit option if (ProgramOptions != null && ProgramOptions.Get("waitForKeyBeforeExit", false)) { ConsoleUtils.WaitForAnyKey(); } ConsoleUtils.CloseApplication(-60, true); } // set success exit code ConsoleUtils.CloseApplication(0, false); }
public FlexibleOptions ParseAppSettings () { // parse local configuration file // display the options listed in the configuration file var localOptions = new FlexibleOptions (); try { var appSettings = System.Configuration.ConfigurationManager.AppSettings; foreach (var k in appSettings.AllKeys) { localOptions.Set (k, appSettings[k]); } } catch (Exception appSettingsEx) { if (ThrownOnError) throw; RaiseErrorEvent (appSettingsEx); } return localOptions; }
/// <summary> /// Main program entry point. /// </summary> static void Main (string[] args) { // set error exit code System.Environment.ExitCode = -50; try { // load configurations ProgramOptions = ConsoleUtils.Initialize (args, true); CancellationTokenSource cts = new CancellationTokenSource (); System.Console.CancelKeyPress += (s, e) => { logger.Debug ("User requested exit (Ctrl + C), exiting..."); e.Cancel = true; cts.Cancel (); }; // start execution ExecuteAsync (ProgramOptions, cts.Token).Wait (); // check before ending for waitForKeyBeforeExit option if (ProgramOptions.Get ("waitForKeyBeforeExit", false)) ConsoleUtils.WaitForAnyKey (); } catch (Exception ex) { LogManager.GetCurrentClassLogger ().Fatal (ex); // check before ending for waitForKeyBeforeExit option if (ProgramOptions != null && ProgramOptions.Get ("waitForKeyBeforeExit", false)) ConsoleUtils.WaitForAnyKey (); ConsoleUtils.CloseApplication (-60, true); } // set success exit code ConsoleUtils.CloseApplication (0, false); }
private static void Execute(FlexibleOptions options) { logger.Debug("Start"); // Parsing Arguments - Sanity Check ParseArguments(options); logger.Debug("Opening connections..."); CheckConnections(options); // Reaching Databases MongoServer sourceDatabase = MongoDbContext.GetServer(options.Get("source")); MongoServer targetDatabase = MongoDbContext.GetServer(options.Get("target")); // process list logger.Debug("Start migrating data..."); Migrator.DatabaseCopy(sourceDatabase, targetDatabase, _sourceDatabases, _targetDatabases, _collections, _insertBatchSize, _copyIndexes, _dropCollections, _threads, options); System.Threading.Thread.Sleep(1000); logger.Debug("Done migrating data!"); }
private static void Execute(FlexibleOptions options) { logger.Info ("Start"); // Sanity Checks string inputPath = options.Get("inputFile"); string outputPath = options.Get("outputFile"); if (String.IsNullOrEmpty(inputPath)) { logger.Fatal ("Parameter {--inputfile} is mandatory"); System.Environment.Exit (-101); } if (String.IsNullOrEmpty (inputPath)) { logger.Fatal ("Parameter {--outputFile} is mandatory"); System.Environment.Exit (-102); } if (!File.Exists(inputPath)) { logger.Fatal ("Parameter {--inputfile} should point to an existing file"); System.Environment.Exit (-103); } // Control Variables int processed = 0, errors = 0; // Processing Urls using(StreamReader fReader = new StreamReader (inputPath)) { using (StreamWriter fWriter = new StreamWriter (outputPath)) { fWriter.AutoFlush = true; string fLine; using (WebRequests client = new WebRequests ()) { // Configuring HTTP Client client.Referer = "http://sortfolio.com/"; client.Host = "sortfolio.com"; client.Encoding = "gzip, deflate, sdch"; client.Headers.Add ("Upgrade-Insecure-Requests", "1"); while ((fLine = fReader.ReadLine ()) != null) { // Processing Only "Listings" if (fLine.IndexOf ("/listings/") >= 0) { try { // Reaching Listing Page string url = fLine.Split (';')[0]; string htmlReponse = client.Get (url); // Control Variable Inc. processed++; // Sanity Check if (String.IsNullOrEmpty (htmlReponse)) { logger.Error ("Failed to read response for {0}", url); errors++; continue; } // Parsing Data HtmlDocument map = new HtmlDocument (); map.LoadHtml (htmlReponse); var tmpNode = map.DocumentNode.SelectSingleNode ("//script[contains(@id,'mail_to')]"); if (tmpNode == null) { continue; } string email = tmpNode.InnerText; // Extracting actual email data out of decoded string email = email.Replace ("eval(decodeURIComponent('", String.Empty).Replace ("'", String.Empty).Replace (")", String.Empty); email = System.Web.HttpUtility.UrlDecode (email); Regex emailRegex = new Regex (@"Node\('(.+?)'\)\);"); email = emailRegex.Match (email).Groups[1].Value; tmpNode = map.DocumentNode.SelectSingleNode ("//*[@id='content']/div/div[2]/p/strong"); string budget = tmpNode == null ? String.Empty : tmpNode.InnerText; tmpNode = map.DocumentNode.SelectSingleNode ("//span[@itemprop='addressLocality']"); string headQuarter = tmpNode == null ? String.Empty : tmpNode.InnerText; tmpNode = map.DocumentNode.SelectSingleNode ("//a[@class='website']"); string website = tmpNode == null ? String.Empty : tmpNode.InnerText; // Writing to output file fWriter.WriteLine ("{0};{1};{2};{3};{4}", url, headQuarter, website, email, budget); logger.Info ("Processed: {0}", url); if (processed % 100 == 0) { logger.Info ("\tProgress:{0} - {1}", processed, errors); } } catch(Exception ex) { logger.Error (ex); errors++; } } } } } } logger.Info ("End"); Console.ReadLine (); }
private static void CreateIndexes(MongoCollection <BsonDocument> sourceCollection, MongoCollection <BsonDocument> targetCollection, FlexibleOptions options) { if (options == null) { options = new FlexibleOptions(); } var logger = NLog.LogManager.GetLogger("CreateIndexes"); logger.Debug("{2} - {0}.{1} - Start index creation", sourceCollection.Database.Name, sourceCollection.Name, Thread.CurrentThread.ManagedThreadId); var command = new CommandDocument(); command.Add("createIndexes", targetCollection.Name); var indexList = new BsonArray(); command.Add("indexes", indexList); // Copying Indexes - If Any foreach (IndexInfo idx in sourceCollection.GetIndexes().ToList()) { // Skipping "_id_" default index - Since Every mongodb Collection has it if (idx.Name == "_id_") { continue; } // Recreating Index Options based on the current index options var opts = IndexOptions.SetBackground(idx.IsBackground || options.Get("indexes-background", false)) .SetSparse(idx.IsSparse || options.Get("indexes-sparse", false)) .SetUnique(idx.IsUnique).SetName(idx.Name).SetDropDups(idx.DroppedDups); if (idx.TimeToLive < TimeSpan.MaxValue) { opts.SetTimeToLive(idx.TimeToLive); } // Adding Index try { if (targetCollection.Database.Server.BuildInfo.Version.Major < 2 && targetCollection.Database.Server.BuildInfo.Version.MajorRevision < 6) { logger.Debug("{2} - {0}.{1} - Creating index: {2}", sourceCollection.Database.Name, sourceCollection, idx.Name, Thread.CurrentThread.ManagedThreadId); targetCollection.CreateIndex(idx.Key, opts); } else { logger.Debug("{2} - {0}.{1} - Prepare index creation: {2}", sourceCollection.Database.Name, sourceCollection, idx.Name, Thread.CurrentThread.ManagedThreadId); // removes the namespace to allow mongodb to generate the correct one... var doc = idx.RawDocument; doc.Remove("ns"); if (options.Get("indexes-background", false)) { doc["background"] = true; } if (options.Get("indexes-sparse", false)) { doc["sparse"] = true; } indexList.Add(doc); } } catch (Exception ex) { // check for timeout exception that may occur if the collection is large... if (ex is System.IO.IOException || ex is System.Net.Sockets.SocketException || (ex.InnerException != null && ex.InnerException is System.Net.Sockets.SocketException)) { logger.Warn("{3} - {0}.{1} - Timeout creating index {2}, this may occur in large collections. You should check manually after a while.", sourceCollection.Database.Name, sourceCollection.Name, idx.Name, Thread.CurrentThread.ManagedThreadId); // wait for index creation.... for (var i = 0; i < 30; i++) { System.Threading.Thread.Sleep(10000); try { if (targetCollection.IndexExists(idx.Name)) { break; } } catch { } } } else { logger.Error(ex, "{0}.{1} - Error creating index {2}" + idx.Name); } logger.Warn("{3} - {0}.{1} - Index details: {2}", sourceCollection.Database.Name, sourceCollection.Name, idx.RawDocument.ToJson(), Thread.CurrentThread.ManagedThreadId); } } if (indexList.Count > 0) { try { logger.Debug("{3} - {0}.{1} - Creating {2} indexes", sourceCollection.Database.Name, sourceCollection, indexList.Count, Thread.CurrentThread.ManagedThreadId); targetCollection.Database.RunCommand(command); } catch (Exception ex) { // check for timeout exception that may occur if the collection is large... if (ex is System.IO.IOException || ex is System.Net.Sockets.SocketException || (ex.InnerException != null && ex.InnerException is System.Net.Sockets.SocketException)) { logger.Warn("{3} - {0}.{1} - Timeout creating {2} indexes, this may occur in large collections. You should check manually after a while.", sourceCollection.Database.Name, sourceCollection.Name, indexList.Count, Thread.CurrentThread.ManagedThreadId); logger.Warn("{3} - {0}.{1} - Index details: {2}", sourceCollection.Database.Name, sourceCollection.Name, command.ToJson(), Thread.CurrentThread.ManagedThreadId); } else { logger.Error(ex, "{2} - {0}.{1} - Error creating indexes", sourceCollection.Database.Name, sourceCollection.Name, Thread.CurrentThread.ManagedThreadId); logger.Error("{3} - {0}.{1} - Index details: {2}", sourceCollection.Database.Name, sourceCollection.Name, command.ToJson(), Thread.CurrentThread.ManagedThreadId); } } } logger.Debug("{2} - {0}.{1} - Index creation completed", sourceCollection.Database.Name, sourceCollection, Thread.CurrentThread.ManagedThreadId); }
/// <summary> /// Copies a certain collection from one database to the other, including Indexes /// </summary> /// <param name="sourceDatabase"></param> /// <param name="targetDatabase"></param> /// <param name="buffer"></param> /// <param name="sourceCollection"></param> /// <param name="insertBatchSize"></param> public static void CopyCollection(MongoDatabase sourceDatabase, MongoDatabase targetDatabase, string sourceCollectionName, string targetCollectionName = "", int insertBatchSize = -1, bool copyIndexes = false, bool dropCollections = false, bool skipCount = false, bool eraseObjectId = false, FlexibleOptions options = null) { var logger = NLog.LogManager.GetLogger("CopyCollection"); try { if (options == null) { options = new FlexibleOptions(); } BsonDocument last = null; // Resets Counter long count = 0, lastCount = 0; int loop = 0; // Reaching Collections var sourceCollection = sourceDatabase.GetCollection(sourceCollectionName); var targetCollection = targetDatabase.GetCollection(String.IsNullOrEmpty(targetCollectionName) ? sourceCollectionName : targetCollectionName); // Skipping System Collections - For Safety Reasons if (sourceCollection.FullName.IndexOf("system.", StringComparison.OrdinalIgnoreCase) >= 0 || sourceCollection.Database.Name.Equals("system", StringComparison.OrdinalIgnoreCase) || sourceCollection.Database.Name.Equals("local", StringComparison.OrdinalIgnoreCase)) { return; } if (!sourceCollection.Exists()) { logger.Warn("{0}.{1} - Collection not found ", sourceDatabase.Name, sourceCollectionName); return; } logger.Debug("{2} - {0}.{1} - Start collection copy.", sourceDatabase.Name, sourceCollectionName, Thread.CurrentThread.ManagedThreadId); // get total records in source long total = 1000000; if (!skipCount) { total = sourceCollection.Count(); } logger.Debug("{2} - {0}.{1} - Total Records Found: {3}", sourceDatabase.Name, sourceCollectionName, Thread.CurrentThread.ManagedThreadId, total); // check stats try { // check if collection is capped collection // since this is a special type of collection, we will skip it if (sourceCollection.IsCapped()) { logger.Warn("{0}.{1} - Found Capped collection (feature not implemented) - Make sure you manually run the ConverToCapped command on this collection after this copy ends", sourceDatabase.Name, sourceCollectionName); } // check if batch size is set to auto if (insertBatchSize < 1) { var stats = sourceCollection.GetStats(); // older mongodb vertions < 1.8, has a 4mb limit for batch insert insertBatchSize = ((4 * 1024 * 1024) / (int)stats.AverageObjectSize) + 1; // also benchmarks didn't show any benefit for batches larger than 100... if (insertBatchSize > 200) { insertBatchSize = 200; } logger.Debug("{0}.{1} - Insert batch size: {2}", sourceDatabase.Name, sourceCollection.Name, insertBatchSize); } } catch (Exception ex) { logger.Warn(ex, "{0}.{1} - Failed to get collection statistics... continuing any way...", sourceDatabase.Name, sourceCollection.Name); } // sanity check if (insertBatchSize <= 0) { insertBatchSize = 100; } // Checking for the need to drop the collection before adding data to it if (targetCollection.Exists()) { long targetCount = targetCollection.Count(); if (options.Get("skip-existing", false) && targetCount > 0) { logger.Info("{0}.{1} - Collection found in target database, skipping... [flag 'skip-existing']", sourceDatabase.Name, sourceCollectionName); return; } if (options.Get("resume", false)) { last = targetCollection.Find(null).SetSortOrder(SortBy.Descending("_id")).SetFields("_id").SetLimit(1).FirstOrDefault(); if (last != null) { logger.Debug("{0}.{1} - Resuming collection copy, last _id: {2}", sourceDatabase.Name, sourceCollectionName, last["_id"]); count = targetCount; } } if (options.Get("if-smaller", false) && targetCount >= total) { logger.Debug("{0}.{1} - Collection of same size or larger, skipping... [flag 'if-smaller']", sourceDatabase.Name, sourceCollectionName, last["_id"]); return; } // if the collection is empty and we have collection options, drop it if (HasCollectionCreationOptions(options) && targetCount == 0) { dropCollections = true; } // check if we should drop the collection if (dropCollections && last == null) { try { targetCollection.Drop(); logger.Debug("{0}.{1} - Target collection droped: {2}.{3}.", sourceDatabase.Name, sourceCollectionName, targetDatabase.Name, targetCollection.Name); } catch (Exception ex) { logger.Error(ex, "{0}.{1} - Failed to drop target collection {2}.{3}, aborting collection copy...", sourceDatabase.Name, sourceCollectionName, targetDatabase.Name, targetCollection.Name); return; } } } // try to create the collection CreateCollection(sourceCollection, targetCollection, options); // index creation if (options.Get("copy-indexes-before", false) && options.Get("copy-indexes", false)) { CreateIndexes(sourceCollection, targetCollection, options); } // check for lazy copy options int waitTime = options.Get("lazy-wait", -1); // Local Buffer List <BsonDocument> buffer = new List <BsonDocument> (insertBatchSize); var timer = System.Diagnostics.Stopwatch.StartNew(); // Running Copy foreach (BsonDocument i in SafeQuery(sourceCollection, "_id", null, last)) { // Feedback and Local Buffer count++; buffer.Add(i); // Dumping data to database every 'X' records if (buffer.Count >= insertBatchSize) { try { // Erasing the object ids of the buffer if (eraseObjectId) { buffer.ForEach(t => { t["_id"] = ObjectId.GenerateNewId(); }); } targetCollection.SafeInsertBatch(buffer, 3, true, true); if (loop++ % 150 == 0) { logger.Debug("{7} - {0}.{1} - batch size: {2}, progress: {3} / {4} ({5}), rate: {6}/h ", sourceDatabase.Name, sourceCollection.Name, insertBatchSize, count.ToString("N0"), total.ToString("N0"), ((double)count / total).ToString("0.0%"), ((count - lastCount) / timer.Elapsed.TotalHours).ToString("N1"), Thread.CurrentThread.ManagedThreadId); lastCount = count; timer.Restart(); } if (waitTime > -1) { System.Threading.Thread.Sleep(waitTime); } } catch (Exception ex) { logger.Error(ex); System.Threading.Thread.Sleep(1000); // try again, but whithout try catch to hide the exception this time... targetCollection.SafeInsertBatch(buffer, 3, true, true); } buffer.Clear(); } } // Copying Remaining of Local Buffer if (buffer.Count > 0) { try { targetCollection.SafeInsertBatch(buffer, 3, true, true); logger.Debug("{6} - {0}.{1} - batch size: {2}, progress: {3} / {4} ({5}) ", sourceDatabase.Name, sourceCollection.Name, insertBatchSize, count, total, ((double)count / total).ToString("0.0%"), Thread.CurrentThread.ManagedThreadId); } catch (Exception ex) { logger.Error(ex); } buffer.Clear(); } // Checkign for the need to copy indexes aswell if (copyIndexes && !options.Get("copy-indexes-before", false)) { CreateIndexes(sourceCollection, targetCollection, options); } logger.Info("{2} - {0}.{1} - Collection copy completed.", sourceDatabase.Name, sourceCollectionName, Thread.CurrentThread.ManagedThreadId); } catch (Exception ex) { logger.Error(ex, "{2} - {0}.{1} - Error copying collection ", sourceDatabase.Name, sourceCollectionName ?? "", Thread.CurrentThread.ManagedThreadId); return; } }
private static void CreateCollection(MongoCollection <BsonDocument> sourceCollection, MongoCollection <BsonDocument> targetCollection, FlexibleOptions options) { if (targetCollection.Exists()) { return; } List <string> config = new List <string> (); if (!String.IsNullOrWhiteSpace(options.Get("collection-wt-configString"))) { config.AddRange(options.Get("collection-wt-configString", "").Split(',').Select(i => i.Trim()).Where(i => !String.IsNullOrEmpty(i))); } if (options.HasOption("collection-wt-block-compressor") && valid_wt_compressors.Contains(options.Get("collection-wt-block-compressor", "invalid"))) { config.RemoveAll(i => i.StartsWith("block_compressor=", StringComparison.OrdinalIgnoreCase)); config.Add("block_compressor=" + options.Get("collection-wt-block-compressor", "").ToLowerInvariant()); } if (!String.IsNullOrWhiteSpace(options.Get("collection-wt-allocation"))) { // Mongodb version 3.0.4 defaults to: "allocation_size=4KB,internal_page_max=4KB,leaf_page_max=32KB,leaf_value_max=1MB" if (options.Get("collection-wt-allocation") == "2x") { config.RemoveAll(i => i.StartsWith("allocation_size=", StringComparison.OrdinalIgnoreCase) || i.StartsWith("leaf_page_max=", StringComparison.OrdinalIgnoreCase) || i.StartsWith("internal_page_max=", StringComparison.OrdinalIgnoreCase)); config.Add("allocation_size=8KB"); config.Add("leaf_page_max=64KB"); config.Add("internal_page_max=8KB"); } else if (options.Get("collection-wt-allocation") == "4x") { config.RemoveAll(i => i.StartsWith("allocation_size=", StringComparison.OrdinalIgnoreCase) || i.StartsWith("leaf_page_max=", StringComparison.OrdinalIgnoreCase) || i.StartsWith("internal_page_max=", StringComparison.OrdinalIgnoreCase)); config.Add("allocation_size=16KB"); config.Add("leaf_page_max=64KB"); config.Add("internal_page_max=16KB"); } else if (options.Get("collection-wt-allocation") == "8x") { config.RemoveAll(i => i.StartsWith("allocation_size=", StringComparison.OrdinalIgnoreCase) || i.StartsWith("leaf_page_max=", StringComparison.OrdinalIgnoreCase) || i.StartsWith("internal_page_max=", StringComparison.OrdinalIgnoreCase)); config.Add("allocation_size=32KB"); config.Add("leaf_page_max=128KB"); config.Add("internal_page_max=32KB"); } } // apply configuration if (config.Count > 0) { try { var storageEngineDoc = new BsonDocument("wiredTiger", new BsonDocument("configString", String.Join(",", config))); targetCollection.Database.CreateCollection(targetCollection.Name, CollectionOptions.SetStorageEngineOptions(storageEngineDoc)); } catch (Exception ex) { NLog.LogManager.GetLogger("CreateCollection").Error(ex); } } }
private static bool HasCollectionCreationOptions(FlexibleOptions options) { return((options.HasOption("collection-wt-block-compressor") && valid_wt_compressors.Contains(options.Get("collection-wt-block-compressor", "invalid"))) || (!String.IsNullOrEmpty(options.Get("collection-wt-allocation")))); }
private static void CheckConnections(FlexibleOptions options) { // source server if (!String.IsNullOrWhiteSpace(options.Get("source"))) { if (options.Get("source").IndexOf("://") < 1) { options.Set("source", "mongodb://" + options.Get("source")); } var mongoUri = new MongoUrlBuilder(options.Get("source")); if (mongoUri.ConnectTimeout.TotalSeconds < 30) { mongoUri.ConnectTimeout = TimeSpan.FromSeconds(30); } if (mongoUri.SocketTimeout.TotalMinutes < 4) { mongoUri.SocketTimeout = TimeSpan.FromMinutes(4); } if (mongoUri.MaxConnectionIdleTime.TotalSeconds < 30) { mongoUri.MaxConnectionIdleTime = TimeSpan.FromSeconds(30); } // check for missing uri parameters if (!String.IsNullOrWhiteSpace(_sourceUsername) && String.IsNullOrWhiteSpace(mongoUri.Username)) { mongoUri.Username = _sourceUsername; } if (!String.IsNullOrWhiteSpace(_sourcePassword) && String.IsNullOrWhiteSpace(mongoUri.Password)) { mongoUri.Password = _sourcePassword; } if (!String.IsNullOrWhiteSpace(_sourceAuthDatabase) && String.IsNullOrWhiteSpace(mongoUri.AuthenticationSource)) { mongoUri.AuthenticationSource = _sourceAuthDatabase; } options.Set("source", mongoUri.ToString()); } else { options.Set("source", MongoDbContext.BuildConnectionString(_sourceUsername, _sourcePassword, true, true, _sourceServer, 30000, 4 * 60000, _sourceAuthDatabase)); } // check connection try { MongoDbContext.GetServer(options.Get("source")).Ping(); } catch (Exception ex) { logger.Error("Failed to connect to source mongodb server. Uri: {0}. Details: {1}", options.Get("source"), ex.Message); ConsoleUtils.CloseApplication(-111, true); } // target server if (!String.IsNullOrWhiteSpace(options.Get("target"))) { if (options.Get("target").IndexOf("://") < 1) { options.Set("target", "mongodb://" + options.Get("target")); } var mongoUri = new MongoUrlBuilder(options.Get("target")); if (mongoUri.ConnectTimeout.TotalSeconds < 30) { mongoUri.ConnectTimeout = TimeSpan.FromSeconds(30); } if (mongoUri.SocketTimeout.TotalMinutes < 4) { mongoUri.SocketTimeout = TimeSpan.FromMinutes(4); } if (mongoUri.MaxConnectionIdleTime.TotalSeconds < 30) { mongoUri.MaxConnectionIdleTime = TimeSpan.FromSeconds(30); } // check for missing uri parameters if (!String.IsNullOrWhiteSpace(_sourceUsername) && String.IsNullOrWhiteSpace(mongoUri.Username)) { mongoUri.Username = _sourceUsername; } if (!String.IsNullOrWhiteSpace(_sourcePassword) && String.IsNullOrWhiteSpace(mongoUri.Password)) { mongoUri.Password = _sourcePassword; } if (!String.IsNullOrWhiteSpace(_sourceAuthDatabase) && String.IsNullOrWhiteSpace(mongoUri.AuthenticationSource)) { mongoUri.AuthenticationSource = _sourceAuthDatabase; } options.Set("target", mongoUri.ToString()); } else { options.Set("target", MongoDbContext.BuildConnectionString(_targetUsername, _targetPassword, true, true, _targetServer, 30000, 4 * 60000, _targetAuthDatabase)); } // check connection try { MongoDbContext.GetServer(options.Get("target")).Ping(); } catch (Exception ex) { logger.Error("Failed to connect to target mongodb server. Uri: {0}. Details: {1}", options.Get("target"), ex.Message); ConsoleUtils.CloseApplication(-112, true); } }
private static void Execute(FlexibleOptions options) { Logger.Info("Start"); // Args Sanity Check if (options.Options == null || options.Options.Count == 0) { Console.WriteLine("No arguments received."); System.Environment.Exit(-101); } // Prompts for user Input Console.WriteLine("Is the configuration correct ? Y/N"); var key = Console.ReadKey().Key; // Checking Key if (key == ConsoleKey.N) // N = "NO" { Console.WriteLine(" => 'NO' : Aborting"); System.Environment.Exit(-102); } else if (key != ConsoleKey.Y) // Anything other than "N" and "Y" is an error. { Console.WriteLine(" => 'Wrong Key Pressed' : Expected either 'Y' or 'N'"); System.Environment.Exit(-102); } Console.WriteLine(" => Proceeding with Export."); // Sanity Check of Config and Arguments if (!ValidateConfig(options)) { Console.ForegroundColor = ConsoleColor.Red; Console.WriteLine("Missing MongoDB Configuration Parameter. (Server, Database, Collection and Credentials are Mandatory)"); Console.ForegroundColor = ConsoleColor.White; System.Environment.Exit(-103); } // Creating instance of MongoDB String sourceConnString = MongoDbContext.BuildConnectionString(options["sourceUsername"], options["sourcePassword"], options["sourceServer"], options["authDatabaseName"]); // Reaching Databases MongoDatabase sourceDatabase = MongoDbContext.GetServer(sourceConnString).GetDatabase(options["sourceDatabaseName"]); // Assembling "Query" to MongoDB, if any query text was provided QueryDocument query = String.IsNullOrWhiteSpace(options["mongoQuery"]) ? null : new QueryDocument(QueryDocument.Parse(options["mongoQuery"])); // Checking if the provided Collection Exists if (!sourceDatabase.CollectionExists(options["collection"])) { Console.ForegroundColor = ConsoleColor.Red; Console.WriteLine("Collection [ " + options["collection"] + " ] does not exists on the specified database"); Console.ForegroundColor = ConsoleColor.White; System.Environment.Exit(-104); } if (options["format"].ToUpper() == "CSV") { // Loading Export Configuration from XML File if (!JsonToCSV.LoadExportLayout(options["layoutFile"])) { // Error Checking Console.ForegroundColor = ConsoleColor.Red; Console.WriteLine("Error Loading Export Layout"); Console.WriteLine("Message : " + JsonToCSV.errorMessage); Console.ForegroundColor = ConsoleColor.White; System.Environment.Exit(-105); } } // Setting up MongoDB Cursor MongoCursor cursor = sourceDatabase.GetCollection <BsonDocument> (options["collection"]).Find(query); cursor.SetFlags(QueryFlags.NoCursorTimeout); // Checking for the need to apply limit int _limit = options.Get <int>("limit", -1); if (_limit != -1) { cursor.SetLimit(_limit); } // Counters int recordsProcessed = 0; // JSON Settings to keep the "JSON" output as "Strict" var jsonSettings = new JsonWriterSettings() { OutputMode = JsonOutputMode.Strict }; // File Writer using (StreamWriter fWriter = new StreamWriter(options["outputFile"], false, Encoding.UTF8)) { // Auto Flush fWriter.AutoFlush = true; // Output File Line string fileLine = String.Empty; // Should we add headers to the output CSV file? if (options["format"].ToUpper() == "CSV" && options.Get <bool>("addHeader", false)) { // Writing Headers fWriter.WriteLine(JsonToCSV.Fields); } // Iterating over documents found using the query foreach (BsonDocument document in cursor) { // Picking which export method will be used if (options["format"].ToUpper() == "CSV") { // Extracting data from it fileLine = JsonToCSV.BsonToCSV(document); } else { fileLine = document.ToJson(jsonSettings); } // Checking for errors if (String.IsNullOrWhiteSpace(fileLine)) { continue; } // Writing to output csv fWriter.WriteLine(fileLine.Replace(System.Environment.NewLine, "<br>")); // Counting if (recordsProcessed++ % 100 == 0) { Console.WriteLine("Processed : " + recordsProcessed); } } } Logger.Info("End"); }
/// <summary> /// Checks the command line params.<para/> /// arguments format: key=value or --key value /// </summary> /// <param name="args">The args.</param> internal static FlexibleOptions CheckCommandLineParams (string[] args, bool thrownOnError) { FlexibleOptions mergedOptions = null; FlexibleOptions argsOptions = null; FlexibleOptions localOptions = new FlexibleOptions (); FlexibleOptions externalLoadedOptions = null; try { // parse local configuration file // display the options listed in the configuration file try { var appSettings = System.Configuration.ConfigurationManager.AppSettings; foreach (var k in appSettings.AllKeys) { localOptions.Set (k, appSettings[k]); } } catch (Exception appSettingsEx) { if (thrownOnError) throw; GetLogger ().Warn (appSettingsEx); } // parse console arguments // parse arguments like: key=value argsOptions = ParseCommandLineArguments (args); // merge arguments with app.config options. Priority: arguments > app.config mergedOptions = FlexibleOptions.Merge (localOptions, argsOptions); // adjust alias for web hosted configuration file if (String.IsNullOrEmpty (mergedOptions.Get ("config"))) mergedOptions.Set ("config", mergedOptions.Get ("S3ConfigurationPath", mergedOptions.Get ("webConfigurationFile"))); // load and parse web hosted configuration file (priority order: argsOptions > localOptions) string externalConfigFile = mergedOptions.Get ("config", ""); bool configAbortOnError = mergedOptions.Get ("configAbortOnError", true); if (!String.IsNullOrWhiteSpace (externalConfigFile)) { foreach (var file in externalConfigFile.Trim(' ', '\'', '"', '[', ']').Split (',', ';')) { GetLogger ().Debug ("Loading configuration file from {0} ...", externalConfigFile); externalLoadedOptions = FlexibleOptions.Merge (externalLoadedOptions, LoadExtenalConfigurationFile (file.Trim (' ', '\'', '"'), configAbortOnError)); } } } catch (Exception ex) { // initialize log before dealing with exceptions if (mergedOptions != null) InitializeLog (mergedOptions.Get ("logFilename"), mergedOptions.Get ("logLevel", "Info"), InitOptions, mergedOptions); if (thrownOnError) throw; GetLogger ().Error (ex); } // merge options with the following priority: // 1. console arguments // 2. external file with json configuration object (local or web) // 3. local configuration file (app.config or web.config) mergedOptions = FlexibleOptions.Merge (mergedOptions, externalLoadedOptions, argsOptions); // reinitialize log options if different from local configuration file InitializeLog (mergedOptions.Get ("logFilename"), mergedOptions.Get ("logLevel", "Info"), InitOptions, mergedOptions); // return final merged options ProgramOptions = mergedOptions; return mergedOptions; }
private static FlexibleOptions ParseCommandLineArguments(string[] args) { var argsOptions = new FlexibleOptions (); if (args != null) { string arg; string lastTag = null; for (int ix = 0; ix < args.Length; ix++) { arg = args[ix]; // check for option with key=value sintax // also valid for --key:value int p = arg.IndexOf ('='); if (p > 0) { argsOptions.Set (arg.Substring (0, p).Trim ().TrimStart ('-', '/'), arg.Substring (p + 1).Trim ()); lastTag = null; continue; } // search for tag stating with special character if (arg.StartsWith ("-", StringComparison.Ordinal) || arg.StartsWith ("/", StringComparison.Ordinal)) { lastTag = arg.Trim ().TrimStart ('-', '/'); argsOptions.Set (lastTag, "true"); continue; } // set value of last tag if (lastTag != null) { argsOptions.Set (lastTag, arg.Trim ()); } } } return argsOptions; }
/// <summary> /// Migrates data and indexes of all collections of a certain database, to another /// </summary> /// <param name="sourceServer">Source mongodb server - Where the data will come from.</param> /// <param name="targetServer">Target mongodb server - Where the data will go to.</param> /// <param name="sourceDatabases">The source databases.</param> /// <param name="targetDatabases">The target databases.</param> /// <param name="collections">The collections.</param> /// <param name="insertBatchSize">Size (in records) of the chunk of data that will be inserted per batch.</param> /// <param name="copyIndexes">True if the indexes should be copied aswell, false otherwise.</param> /// <param name="dropCollections">The drop collections.</param> /// <param name="threads">The threads.</param> public static void DatabaseCopy(MongoServer sourceServer, MongoServer targetServer, List <string> sourceDatabases, List <string> targetDatabases, List <string> collections, int insertBatchSize = -1, bool copyIndexes = true, bool dropCollections = false, int threads = 1, FlexibleOptions options = null) { if (threads <= 1) { threads = 1; } // check if we are on the same server! bool sameServer = ServersAreEqual(sourceServer, targetServer); // create our thread manager and start producing tasks... using (var mgr = new MongoToolsLib.SimpleHelpers.ParallelTasks <CopyInfo> (0, threads, 1000, CollectionCopy)) { // list databases foreach (var db in ListDatabases(sourceServer, targetServer, sourceDatabases, targetDatabases)) { foreach (var col in ListCollections(db.Item1, collections)) { // sanity checks if (sameServer && db.Item1 == db.Item2 && col.Item1 == col.Item2) { NLog.LogManager.GetLogger("DatabaseCopy").Warn("Skiping collection, since it would be copied to itself! Database: {0}, Collection: {1}", db.Item1, col.Item1); continue; //throw new Exception ("Source and target servers and databases are the same!"); } // process task mgr.AddTask(new CopyInfo { SourceDatabase = db.Item1, TargetDatabase = db.Item2, SourceCollection = col.Item1, TargetCollection = col.Item2, BatchSize = insertBatchSize, CopyIndexes = copyIndexes, DropCollections = dropCollections, Options = options }); } } mgr.CloseAndWait(); } }
/// <summary> /// Parses out the Arguments received from the "CLI" /// </summary> /// <param name="args">Array of arguments received from the "CLI"</param> private static void ParseArguments(FlexibleOptions options) { // parse arguments _sourceUri = options["source"]; _sourceServer = options["source-server"]; _sourceUsername = options["source-username"]; _sourcePassword = options["source-password"]; _sourceAuthDatabase = options.Get("source-auth-database", options["auth-database-name-source"]); _targetUri = options["target"]; _targetServer = options["target-server"]; _targetUsername = options["target-username"]; _targetPassword = options["target-password"]; _targetAuthDatabase = options.Get("target-auth-database", options["auth-database-name-target"]); _insertBatchSize = options.Get("batch-size", options.Get("insert-batch-size", -1)); _threads = options.Get("threads", 1); _copyIndexes = options.Get("copy-indexes", false); _dropCollections = options.Get("drop-collections", false); _skipExisting = options.Get("skip-existing", false); _skipCount = options.Get("skip-count", false); _eraseObjectId = options.Get("refresh-object-id", false); // check parameter databases _sourceDatabases = ParseArgumentAsList(options, "databases").Concat(ParseArgumentAsList(options, "source-database")).Distinct().ToList(); _targetDatabases = ParseArgumentAsList(options, "target-database"); // check collections parameter _collections = ParseArgumentAsList(options, "collections"); _targetCollection = options.Get("target-collection"); // Sanity Checks if (String.IsNullOrEmpty(_sourceUri) && String.IsNullOrEmpty(_sourceServer)) { logger.Error("No source mongodb server connection information provided: use the argument 'source' to provide a mongodb uri with the connection information"); ConsoleUtils.CloseApplication(-101, true); } if (String.IsNullOrEmpty(_targetUri) && String.IsNullOrEmpty(_targetServer)) { logger.Error("No source mongodb server connection information provided: use the argument 'target' to provide a mongodb uri with the connection information"); ConsoleUtils.CloseApplication(-102, true); } if (_sourceDatabases.Count == 0) { logger.Error("No database selected: use the argument 'databases' to provide a list of databases"); ConsoleUtils.CloseApplication(-103, true); } // if no target database is provided, lets use the sourcedatabases if (_targetDatabases.Count == 0) { _targetDatabases = null; } else { // if we have target database names: // 1. sourceDatabase cannot contain wildcard if (_sourceDatabases.Any(i => SharedMethods.HasWildcard(i))) { logger.Error("Wildcard cannot be used in source database names if a list of target databases is provided!"); ConsoleUtils.CloseApplication(-104, true); } // 2. sourceDatabase cannot contain wildcard if (_sourceDatabases.Any(i => SharedMethods.HasWildcard(i))) { logger.Error("Wildcard cannot be used in target database names!"); ConsoleUtils.CloseApplication(-105, true); } // 3. check for database mapping discrepancy if (_sourceDatabases.Count != _targetDatabases.Count) { logger.Error("Different number of source and target databases detected: use the argument 'databases' and 'targetDatabases' to provide a list of databases"); ConsoleUtils.CloseApplication(-106, true); } if (_sourceDatabases.Any(i => i.IndexOf('=') > 0)) { logger.Error("Invalid use of target database parameter: if the argument 'databases' has a key=value format to indicate the target database name, the argument 'targetDatabases' cannot be used."); ConsoleUtils.CloseApplication(-107, true); } } // Collections Check // If we received a LIST of source collections but also received a TARGET collection, we should halt. // Only a collection-by-collection copy should be make when picking a target collection if (_collections != null && _collections.Count > 1 && !String.IsNullOrWhiteSpace(_targetCollection)) { logger.Error("Arguments 'collections' and 'targetCollection' are exclusive. When providing a 'targetCollection' you can only copy a SINGLE collection to it at a time."); } }
private FlexibleOptions ParseFileContent (string content) { var options = new FlexibleOptions (); if (String.IsNullOrEmpty (content)) return options; // prepare content content = content.Trim (); try { // detect xml if (content.StartsWith ("<")) { var xmlDoc = System.Xml.Linq.XDocument.Parse (content); var root = xmlDoc.Descendants ("config").FirstOrDefault (); if (root != null && root.HasElements) { foreach (var i in root.Elements ()) { options.Set (i.Name.ToString (), i.Value); } } } // parse as json else { var json = Newtonsoft.Json.Linq.JObject.Parse (content); foreach (var i in json) { options.Set (i.Key, i.Value.ToString (Newtonsoft.Json.Formatting.None)); } } } catch (Exception ex) { if (ThrownOnError) throw; RaiseErrorEvent (ex); return null; } return options; }
/// <summary> /// Main program entry point. /// </summary> static void Main(string[] args) { // set error exit code System.Environment.ExitCode = -50; try { // load configurations ProgramOptions = ConsoleUtils.Initialize (args, true); // start execution Execute (ProgramOptions); // check before ending for waitForKeyBeforeExit option if (ProgramOptions.Get ("waitForKeyBeforeExit", false)) ConsoleUtils.WaitForAnyKey (); } catch (Exception ex) { LogManager.GetCurrentClassLogger ().Fatal (ex); // check before ending for waitForKeyBeforeExit option if (ProgramOptions.Get ("waitForKeyBeforeExit", false)) ConsoleUtils.WaitForAnyKey (); ConsoleUtils.CloseApplication (-60, true); } // set success exit code ConsoleUtils.CloseApplication (0, false); }
public void Merge_WithComplexPriority_ShouldWork () { var opt1 = new FlexibleOptions (); opt1.Set ("k1", "v1"); opt1.Set ("k2", "v2"); opt1.Set ("k3", "v3"); opt1.SetAlias ("k3", "3", "key3"); var opt2 = new FlexibleOptions (); opt2.Set ("k2", "v2.2"); var opt3 = new FlexibleOptions (); opt3.Set ("k3", "v3.3"); // opt1 has lower priority var merge = FlexibleOptions.Merge (opt1, opt2, opt3); Assert.NotNull (merge); Assert.Equal (opt1.Get ("k1"), merge.Get ("k1")); Assert.Equal (opt2.Get ("k2"), merge.Get ("k2")); Assert.Equal (opt3.Get ("k3"), merge.Get ("k3")); // alias Assert.Equal (opt3.Get ("k3"), merge.Get ("key3")); Assert.Equal (opt3.Get ("k3"), merge.Get ("3")); // opt1 has higher priority merge = FlexibleOptions.Merge (opt2, opt3, opt1); Assert.NotNull (merge); Assert.Equal (opt1.Get ("k1"), merge.Get ("k1")); Assert.NotEqual (opt2.Get ("k2"), merge.Get ("k2")); Assert.NotEqual (opt3.Get ("k3"), merge.Get ("k3")); Assert.Equal (opt1.Get ("k2"), merge.Get ("k2")); Assert.Equal (opt1.Get ("k3"), merge.Get ("k3")); // alias Assert.Equal (opt1.Get ("k3"), merge.Get ("key3")); Assert.Equal (opt1.Get ("k3"), merge.Get ("3")); // opt1 has higher priority but with null options merge = FlexibleOptions.Merge (null, opt2, null, opt3, opt1, null); Assert.NotNull (merge); Assert.Equal (opt1.Get ("k1"), merge.Get ("k1")); Assert.NotEqual (opt2.Get ("k2"), merge.Get ("k2")); Assert.NotEqual (opt3.Get ("k3"), merge.Get ("k3")); Assert.Equal (opt1.Get ("k2"), merge.Get ("k2")); Assert.Equal (opt1.Get ("k3"), merge.Get ("k3")); // alias Assert.Equal (opt1.Get ("k3"), merge.Get ("key3")); Assert.Equal (opt1.Get ("k3"), merge.Get ("3")); }
private static FlexibleOptions parseFile (string content) { var options = new FlexibleOptions (); // detect xml if (content.TrimStart ().StartsWith ("<")) { var xmlDoc = System.Xml.Linq.XDocument.Parse (content); var root = xmlDoc.Descendants ("config").FirstOrDefault (); if (root != null && root.HasElements) { foreach (var i in root.Elements ()) { options.Set (i.Name.ToString (), i.Value); } } } else { var json = Newtonsoft.Json.Linq.JObject.Parse (content); foreach (var i in json) { options.Set (i.Key, i.Value.ToString (Newtonsoft.Json.Formatting.None)); } } return options; }
private static FlexibleOptions ParseCommandLineArguments (string[] args) { var argsOptions = new FlexibleOptions (); if (args != null) { string arg; bool openTag = false; string lastTag = null; for (int ix = 0; ix < args.Length; ix++) { arg = args[ix]; // check for option with key=value sintax (restriction: the previous tag must not be an open tag) // also valid for --key:value bool hasStartingMarker = arg.StartsWith ("-", StringComparison.Ordinal) || arg.StartsWith ("/", StringComparison.Ordinal); int p = arg.IndexOf ('='); if (p > 0 && (hasStartingMarker || !openTag)) { argsOptions.Set (arg.Substring (0, p).Trim ().TrimStart ('-', '/'), arg.Substring (p + 1).Trim ()); lastTag = null; openTag = false; } // search for tag stating with special character // a linux path should be valid: -path /home/file else if (hasStartingMarker && !(openTag && arg[0] == '/')) { lastTag = arg.Trim ().TrimStart ('-', '/'); argsOptions.Set (lastTag, "true"); openTag = true; } // set value of last tag else if (lastTag != null) { argsOptions.Set (lastTag, arg.Trim ()); openTag = false; } } } return argsOptions; }
/// <summary> /// Parses out the Arguments received from the "CLI" /// </summary> /// <param name="args">Array of arguments received from the "CLI"</param> private static void ParseArguments(FlexibleOptions options) { // parse arguments _sourceUri = options["source"]; _sourceServer = options["sourceServer"]; _sourceUsername = options["sourceUsername"]; _sourcePassword = options["sourcePassword"]; _sourceAuthDatabase = options.Get("sourceAuthDatabase", options["authDatabaseNameSource"]); _targetUri = options["target"]; _targetServer = options["targetServer"]; _targetUsername = options["targetUsername"]; _targetPassword = options["targetPassword"]; _targetAuthDatabase = options.Get("targetAuthDatabase", options["authDatabaseNameTarget"]); _insertBatchSize = options.Get("batch-size", options.Get("insertBatchSize", -1)); _threads = options.Get("threads", 1); _copyIndexes = options.Get("copy-indexes", false); _dropCollections = options.Get("drop-collections", false); _skipExisting = options.Get("skip-existing", false); // check parameter databases _sourceDatabases = ParseArgumentAsList(options, "databases").Concat(ParseArgumentAsList(options, "sourceDatabase")).Distinct().ToList(); _targetDatabases = ParseArgumentAsList(options, "targetDatabase"); // check collections parameter _collections = ParseArgumentAsList(options, "collections"); //******************* //** sanity checks ** //******************* if (String.IsNullOrEmpty(_sourceUri) && String.IsNullOrEmpty(_sourceServer)) { logger.Error("No source mongodb server connection information provided: use the argument 'source' to provide a mongodb uri with the connection information"); ConsoleUtils.CloseApplication(-101, true); } // TODO: target server should defaults to the source server if none is provided... if (String.IsNullOrEmpty(_targetUri) && String.IsNullOrEmpty(_targetServer)) { logger.Error("No source mongodb server connection information provided: use the argument 'source' to provide a mongodb uri with the connection information"); ConsoleUtils.CloseApplication(-102, true); } if (_sourceDatabases.Count == 0) { logger.Error("No database selected: use the argument 'databases' to provide a list of databases"); ConsoleUtils.CloseApplication(-103, true); } // if no target database is provided, lets use the sourcedatabases if (_targetDatabases.Count == 0) { _targetDatabases = null; // TODO: if same server... throw an error! // ... } else { // if we have target database names: // 1. sourceDatabase cannot contain wildcard if (_sourceDatabases.Any(i => SharedMethods.HasWildcard(i))) { logger.Error("Wildcard cannot be used in source database names if a list of target databases is provided!"); ConsoleUtils.CloseApplication(-104, true); } // 2. sourceDatabase cannot contain wildcard if (_sourceDatabases.Any(i => SharedMethods.HasWildcard(i))) { logger.Error("Wildcard cannot be used in target database names!"); ConsoleUtils.CloseApplication(-105, true); } // 2. check for database mapping discrepancy if (_sourceDatabases.Count != _targetDatabases.Count) { logger.Error("Different number of source and target databases detected: use the argument 'databases' and 'targetDatabases' to provide a list of databases"); ConsoleUtils.CloseApplication(-106, true); } if (_sourceDatabases.Any(i => i.IndexOf('=') > 0)) { logger.Error("Invalid use of target database parameter: if the argument 'databases' has a key=value format to indicate the target database name, the argument 'targetDatabases' cannot be used."); ConsoleUtils.CloseApplication(-106, true); } } }
private static async Task ExecuteAsync(FlexibleOptions options, CancellationToken token) { logger.Info("Start"); // TODO: implement execution // ... // if (token.IsCancellationRequested) return; // ... var publish = Task.Run(() => { using (var queue = CreateQueue()) { for (var j = 0; j < 10; j++) { for (var i = 0; i < 250; i++) { queue.Publish("teste khalid " + i); } logger.Debug("publish progress " + (j + 1) * 250); } } }); Task.Delay(30000).Wait(); var consume = Task.Run(() => { int count = 0; using (var queue = CreateQueue()) { //foreach (var i in queue.Get (TimeSpan.FromMinutes (30))) ParallelTasks <RabbitWorkMessage> .Process(queue.Get(TimeSpan.FromSeconds(1800)), 30, i => { // ... //i.Ack (); if (count % 2 == 0) { i.Nack(); } else { i.Requeue(); } //Task.Delay (50).Wait (); if (count++ % 250 == 0) { logger.Debug("ack progress " + count); } //} }); } }); await publish; consume.Wait(); logger.Info("End"); }
/// <summary> /// Log initialization. /// </summary> internal static void InitializeLog (string logFileName = null, string logLevel = null, InitializationOptions initOptions = null, FlexibleOptions appOptions = null) { // default parameters initialization from config file if (String.IsNullOrEmpty (logFileName)) logFileName = _logFileName ?? System.Configuration.ConfigurationManager.AppSettings["logFilename"]; if (String.IsNullOrEmpty (logFileName)) logFileName = ("${basedir}/log/" + typeof (ConsoleUtils).Namespace.Replace (".SimpleHelpers", "") + ".log"); if (String.IsNullOrEmpty (logLevel)) logLevel = _logLevel ?? (System.Configuration.ConfigurationManager.AppSettings["logLevel"] ?? "Info"); // check if log was initialized with same options if (_logFileName == logFileName && _logLevel == logLevel) return; // try to parse loglevel LogLevel currentLogLevel; try { currentLogLevel = LogLevel.FromString (logLevel); } catch { currentLogLevel = LogLevel.Info; } // save current log configuration _logFileName = logFileName; _logLevel = currentLogLevel.ToString (); // check initialization options var localOptions = initOptions != null ? initOptions.Clone () : new InitializationOptions (); // adjust options based on arguments if (appOptions != null) { if (!localOptions.DisableLogFile.HasValue && appOptions.HasOption ("DisableLogFile")) localOptions.DisableLogFile = appOptions.Get ("DisableLogFile", false); if (localOptions.EnableLogTargets == null && !String.IsNullOrEmpty (appOptions.Get ("EnableLogTargets"))) localOptions.EnableLogTargets = appOptions.GetAsList ("EnableLogTargets").Where (i => !String.IsNullOrWhiteSpace (i)).Select (i => i.Trim ()).ToArray (); if (localOptions.DisableLogTargets == null && !String.IsNullOrEmpty (appOptions.Get ("DisableLogTargets"))) localOptions.DisableLogTargets = appOptions.GetAsList ("DisableLogTargets").Where (i => !String.IsNullOrWhiteSpace (i)).Select (i => i.Trim ()).ToArray (); } // prepare list of enabled targets HashSet<string> enabledTargets; // if enabled log targets was provided, use it! if (localOptions.EnableLogTargets != null && localOptions.EnableLogTargets.Count > 0) { enabledTargets = new HashSet<string> (localOptions.EnableLogTargets, StringComparer.OrdinalIgnoreCase); } // else we remove disabled target... else { enabledTargets = new HashSet<string> (StringComparer.OrdinalIgnoreCase) { "console", "file" }; // set enabled targets if (localOptions.Targets != null) { foreach (var i in localOptions.Targets) { foreach (var n in GetNLogTargetName (i)) enabledTargets.Add (n); } } // remove disabled targets if (localOptions.DisableLogTargets != null) foreach (var i in localOptions.DisableLogTargets) enabledTargets.Remove (i); if (localOptions.DisableLogFile ?? false) enabledTargets.Remove ("file"); } // prepare log configuration var config = new NLog.Config.LoggingConfiguration (); // console output if (!Console.IsOutputRedirected && enabledTargets.Contains ("console")) { var consoleTarget = new NLog.Targets.ColoredConsoleTarget (); consoleTarget.Layout = "${longdate}\t${callsite}\t${level}\t${message}\t${onexception: \\:[Exception] ${exception:format=tostring}}"; config.AddTarget ("console", consoleTarget); var rule1 = new NLog.Config.LoggingRule ("*", LogLevel.Trace, consoleTarget); config.LoggingRules.Add (rule1); } // file output if (enabledTargets.Contains ("file")) { var fileTarget = new NLog.Targets.FileTarget (); fileTarget.FileName = logFileName; fileTarget.Layout = "${longdate}\t${callsite}\t${level}\t\"${message}${onexception: \t [Exception] ${exception:format=tostring}}\""; fileTarget.ConcurrentWrites = true; fileTarget.ConcurrentWriteAttemptDelay = 10; fileTarget.ConcurrentWriteAttempts = 8; fileTarget.AutoFlush = true; fileTarget.KeepFileOpen = true; fileTarget.DeleteOldFileOnStartup = false; fileTarget.ArchiveAboveSize = (localOptions.MaxLogFileSize > 0) ? localOptions.MaxLogFileSize : 4 * 1024 * 1024; // 4 Mb fileTarget.MaxArchiveFiles = (localOptions.MaxArchiveLogFiles > 0) ? localOptions.MaxArchiveLogFiles : 10; fileTarget.ArchiveNumbering = NLog.Targets.ArchiveNumberingMode.DateAndSequence; fileTarget.ArchiveDateFormat = "yyyyMMdd"; fileTarget.ArchiveFileName = System.IO.Path.ChangeExtension (logFileName, ".{#}" + System.IO.Path.GetExtension (logFileName)); // set file output to be async (commented out since doesn't work well on mono) // var wrapper = new NLog.Targets.Wrappers.AsyncTargetWrapper (fileTarget); config.AddTarget ("file", fileTarget); // configure log from configuration file var rule2 = new NLog.Config.LoggingRule ("*", currentLogLevel, fileTarget); config.LoggingRules.Add (rule2); } // External Log Target if (localOptions.Targets != null) { foreach (var t in localOptions.Targets) { if (GetNLogTargetName (t).Any (i => enabledTargets.Contains (i))) { config.AddTarget (t); config.LoggingRules.Add (new NLog.Config.LoggingRule ("*", currentLogLevel, t)); } } } // set configuration options LogManager.Configuration = config; }
/// <summary> /// Migrates data and indexes of all collections of a certain database, to another /// </summary> /// <param name="sourceServer">Source mongodb server - Where the data will come from.</param> /// <param name="targetServer">Target mongodb server - Where the data will go to.</param> /// <param name="sourceDatabases">The source databases.</param> /// <param name="targetDatabases">The target databases.</param> /// <param name="collections">The collections.</param> /// <param name="insertBatchSize">Size (in records) of the chunk of data that will be inserted per batch.</param> /// <param name="copyIndexes">True if the indexes should be copied aswell, false otherwise.</param> /// <param name="dropCollections">The drop collections.</param> /// <param name="threads">The threads.</param> public static void DatabaseCopy(MongoServer sourceServer, MongoServer targetServer, List <string> sourceDatabases, List <string> targetDatabases, List <string> collections, string targetCollection, int insertBatchSize = -1, bool copyIndexes = true, bool dropCollections = false, bool skipCount = false, bool eraseObjectId = false, int threads = 1, FlexibleOptions options = null) { if (threads <= 1) { threads = 1; } // check if we are on the same server! bool sameServer = ServersAreEqual(sourceServer, targetServer); // Validating whether we received multiple matches (or collection names) when the "target collection" has value var databases = ListDatabases(sourceServer, targetServer, sourceDatabases, targetDatabases); var matchingCollections = ListCollections(databases.First().Item1, collections, targetCollection).ToList(); if (matchingCollections != null && matchingCollections.Count > 1 && !String.IsNullOrWhiteSpace(targetCollection)) { // Error. In order to specify a "TargetCollection" there should be only one collection matching the mask or received as argument (as it's source) NLog.LogManager.GetLogger("DatabaseCopy").Error("In order to specify a 'TargetCollection' there should be only one collection matching the mask or received as argument (as it's source)"); return; } // create our thread manager and start producing tasks... using (var mgr = new MongoToolsLib.SimpleHelpers.ParallelTasks <CopyInfo> (0, threads, 1000, CollectionCopy)) { // list databases foreach (var db in ListDatabases(sourceServer, targetServer, sourceDatabases, targetDatabases)) { foreach (var col in ListCollections(db.Item1, collections, targetCollection)) { // sanity checks if (sameServer && db.Item1.ToString() == db.Item2.ToString() && col.Item1.ToString() == col.Item2.ToString()) { NLog.LogManager.GetLogger("DatabaseCopy").Warn("Skiping collection, since it would be copied to itself! Database: {0}, Collection: {1}", db.Item1, col.Item1); continue; } // process task mgr.AddTask(new CopyInfo { SourceDatabase = db.Item1, TargetDatabase = db.Item2, SourceCollection = col.Item1, TargetCollection = col.Item2, BatchSize = insertBatchSize, CopyIndexes = copyIndexes, DropCollections = dropCollections, EraseObjectId = eraseObjectId, Options = options, SkipCount = skipCount }); } } mgr.CloseAndWait(); } }
/// <summary> /// Migrates data and indexes of all collections of a certain database, to another /// </summary> /// <param name="sourceDatabase">Source database - Where the data will come from</param> /// <param name="targetDatabase">Target database - Where the data will go to</param> /// <param name="insertBatchSize">Size (in records) of the chunk of data that will be inserted per batch</param> /// <param name="copyIndexes">True if the indexes should be copied aswell, false otherwise</param> public static void DatabaseCopy(MongoDatabase sourceDatabase, MongoDatabase targetDatabase, int insertBatchSize = -1, bool copyIndexes = true, bool dropCollections = false, int threads = 1, FlexibleOptions options = null) { var collections = sourceDatabase.GetCollectionNames().ToList(); if (threads <= 1) { foreach (var collectionName in collections) { SharedMethods.CopyCollection(sourceDatabase, targetDatabase, collectionName, String.Empty, insertBatchSize, copyIndexes, dropCollections, options); } } else { // Multi-threading Processing of each copy request MongoToolsLib.SimpleHelpers.ParallelTasks <string> .Process(collections, 0, threads, collectionName => { // Console Feedback Console.WriteLine("Migrating Collection : " + collectionName); SharedMethods.CopyCollection(sourceDatabase, targetDatabase, collectionName, String.Empty, insertBatchSize, copyIndexes, dropCollections, options); }); } }