private Dictionary <BreakerOperationType, int> GetBreakerOperationTypeLookup(DbAdapterContainer dbAdapterContainer)
        {
            MeterData.BreakerOperationTypeDataTable breakerOperationTypeTable = new MeterData.BreakerOperationTypeDataTable();
            BreakerOperationType breakerOperationType = default(BreakerOperationType);

            foreach (BreakerOperationType operationType in Enum.GetValues(typeof(BreakerOperationType)))
            {
                breakerOperationTypeTable.AddBreakerOperationTypeRow(operationType.ToString(), operationType.ToString());
            }

            BulkLoader bulkLoader = new BulkLoader();

            bulkLoader.Connection     = dbAdapterContainer.Connection;
            bulkLoader.CommandTimeout = dbAdapterContainer.CommandTimeout;

            bulkLoader.MergeTableFormat = "MERGE INTO {0} AS Target " +
                                          "USING {1} AS Source " +
                                          "ON Source.Name = Target.Name " +
                                          "WHEN NOT MATCHED THEN " +
                                          "    INSERT (Name, Description) " +
                                          "    VALUES (Source.Name, Source.Description);";

            bulkLoader.Load(breakerOperationTypeTable);

            dbAdapterContainer.GetAdapter <BreakerOperationTypeTableAdapter>().Fill(breakerOperationTypeTable);

            return(breakerOperationTypeTable
                   .Where(row => Enum.TryParse(row.Name, out breakerOperationType)).Select(row => Tuple.Create(breakerOperationType, row.ID))
                   .ToDictionary(tuple => tuple.Item1, tuple => tuple.Item2));
        }
Beispiel #2
0
        private static void LoadEmail(int eventID, List <Recipient> recipients, string subject, string body)
        {
            EventTableAdapter eventAdapter = s_dbAdapterContainer.GetAdapter <EventTableAdapter>();

            MeterData.EventRow       eventRow    = eventAdapter.GetDataByID(eventID)[0];
            MeterData.EventDataTable systemEvent = eventAdapter.GetSystemEvent(eventRow.StartTime, eventRow.EndTime, s_timeTolerance);

            FaultEmailTableAdapter faultEmailAdapter = s_dbAdapterContainer.GetAdapter <FaultEmailTableAdapter>();

            FaultLocationData.FaultEmailDataTable      faultEmailTable      = new FaultLocationData.FaultEmailDataTable();
            FaultLocationData.EventFaultEmailDataTable eventFaultEmailTable = new FaultLocationData.EventFaultEmailDataTable();

            DateTime now    = TimeZoneInfo.ConvertTimeFromUtc(DateTime.UtcNow, s_timeZone);
            string   toLine = string.Join("; ", recipients.Select(recipient => recipient.Email));

            BulkLoader bulkLoader;

            faultEmailTable.AddFaultEmailRow(now, toLine, subject, body);
            faultEmailAdapter.Update(faultEmailTable);

            foreach (MeterData.EventRow evt in systemEvent)
            {
                if (eventRow.LineID == evt.LineID)
                {
                    eventFaultEmailTable.AddEventFaultEmailRow(evt.ID, faultEmailTable[0].ID);
                }
            }

            bulkLoader                = new BulkLoader();
            bulkLoader.Connection     = s_dbAdapterContainer.Connection;
            bulkLoader.CommandTimeout = s_dbAdapterContainer.CommandTimeout;
            bulkLoader.Load(eventFaultEmailTable);
        }
        public override void Load(DbAdapterContainer dbAdapterContainer)
        {
            MeterData.EventDataTable eventTable;
            Dictionary <EventKey, MeterData.EventRow> eventLookup;

            MeterData.EventRow eventRow;
            BulkLoader         bulkLoader;

            eventTable  = dbAdapterContainer.GetAdapter <EventTableAdapter>().GetDataByFileGroup(m_meterDataSet.FileGroup.ID);
            eventLookup = eventTable.Where(evt => evt.MeterID == m_meterDataSet.Meter.ID).ToDictionary(CreateEventKey);

            foreach (Tuple <EventKey, MeterData.BreakerOperationRow> breakerOperation in m_breakerOperations)
            {
                if (eventLookup.TryGetValue(breakerOperation.Item1, out eventRow))
                {
                    breakerOperation.Item2.EventID = eventRow.ID;
                    m_breakerOperationTable.AddBreakerOperationRow(breakerOperation.Item2);
                }
            }

            bulkLoader                = new BulkLoader();
            bulkLoader.Connection     = dbAdapterContainer.Connection;
            bulkLoader.CommandTimeout = dbAdapterContainer.CommandTimeout;
            bulkLoader.Load(m_breakerOperationTable);
        }
Beispiel #4
0
        private Dictionary <EventClassification, int> GetEventTypeLookup(DbAdapterContainer dbAdapterContainer)
        {
            MeterData.EventTypeDataTable eventTypeTable      = new MeterData.EventTypeDataTable();
            EventClassification          eventClassification = default(EventClassification);

            foreach (EventClassification classification in Enum.GetValues(typeof(EventClassification)))
            {
                eventTypeTable.AddEventTypeRow(classification.ToString(), classification.ToString());
            }

            BulkLoader bulkLoader = new BulkLoader();

            bulkLoader.Connection     = dbAdapterContainer.Connection;
            bulkLoader.CommandTimeout = dbAdapterContainer.CommandTimeout;

            bulkLoader.MergeTableFormat = "MERGE INTO {0} AS Target " +
                                          "USING {1} AS Source " +
                                          "ON Source.Name = Target.Name " +
                                          "WHEN NOT MATCHED THEN " +
                                          "    INSERT (Name, Description) " +
                                          "    VALUES (Source.Name, Source.Description);";

            bulkLoader.Load(eventTypeTable);

            dbAdapterContainer.GetAdapter <EventTypeTableAdapter>().Fill(eventTypeTable);

            return(Enumerable.Select(eventTypeTable
                                     .Where(row => Enum.TryParse(row.Name, out eventClassification)), row => Tuple.Create(eventClassification, row.ID))
                   .ToDictionary(tuple => tuple.Item1, tuple => tuple.Item2));
        }
Beispiel #5
0
        public static void Main(string[] args)
        {
            try
            {
                BulkLoader inserter;
                Console.WriteLine();
                Console.Title = "StackOverflow Data Dump Import v1.5";
                Console.WriteLine(Console.Title);
                Console.WriteLine();
                if (args.ToList().Find(a => a.ToLowerInvariant() == "help" || a.ToLowerInvariant() == "?") != null)
                {
                    RenderUsage();
                    return;
                }
                Configuration.Configuration config = new Configuration.Configuration(args);
                Console.WriteLine(config.ToString(false));

                if (config.GUI || args.Length == 0)
                {
                    Application.EnableVisualStyles();
                    Application.Run(new FrmUI(config)); // or whatever
                }
                else
                {
                    inserter = BulkLoader.Create(config);

                    inserter.RowsInserted += (s, e) =>
                    {
                        if (e.Type == CopyEventType.Error)
                        {
                            Console.WriteLine(e.Message);
                        }
                    };

                    Stopwatch sw = new Stopwatch();

                    sw.Start();
                    inserter.ProcessJobs(config);
                    sw.Stop();

                    long count = inserter.Jobs.Select(j => j.Tasks.Sum(t => t.Count)).Sum();
                    Console.WriteLine(Resources.Rs_ImpComplete + "\r\n", count.ToString("#,##0"),
                                      sw.ElapsedMilliseconds / 1000f / 60f);
                }
            }
            catch (Exception ex)
            {
                Console.WriteLine("\r\n{0}\r\n", ex.Message);
                Console.WriteLine(ex.StackTrace);
                var inner = ex.InnerException;
                while (inner != null)
                {
                    Console.WriteLine();
                    Console.WriteLine(inner.Message);
                    Console.WriteLine(inner.StackTrace);
                    inner = inner.InnerException;
                }
                RenderUsage();
            }
        }
Beispiel #6
0
        protected virtual BulkLoadContext CreateBulkLoadContext(BulkLoader bulkLoader, string databaseName,
                                                                IConfiguration[] configurations, DataBlasterParameters parameters, ILogger logger)
        {
            var context = bulkLoader.NewBulkLoadContext(databaseName);

            context.Log = new SitecoreAndUnicornLog(LoggerFactory.GetLogger(GetType()), logger);

            context.AllowTemplateChanges = true;

            // In Sitecore 9.3 initial release they did change properties on fields (versioned to unversioned) in the core database but it wasn't done in a clean way.
            // There are now more than 3000 field records stored in the versioned table which should have been moved to the unverioned table.
            // As we don't do bulk imports, or many template changes during development on the core database, we will disable this by default, and avoid deleting relevant content in the core database.
            // Sitecore its default deserialize operation is also not doing cleanup work.
            context.AllowCleanupOfFields       = !databaseName.Equals("core", StringComparison.InvariantCultureIgnoreCase);
            context.StageDataWithoutProcessing = parameters.StageDataWithoutProcessing;

            // Use the shotgun, removing items one by one is too slow for full deserialize.
            context.RemoveItemsFromCaches = false;

            context.UpdateHistory      = !SkipHistoryEngine;
            context.UpdatePublishQueue = !SkipPublishQueue;
            context.UpdateLinkDatabase = !SkipLinkDatabase &&
                                         configurations.Any(x => x.Resolve <ISyncConfiguration>().UpdateLinkDatabase);
            context.UpdateIndexes = !SkipIndexes &&
                                    configurations.Any(x => x.Resolve <ISyncConfiguration>().UpdateSearchIndex);

            return(context);
        }
        public override void Load(DbAdapterContainer dbAdapterContainer)
        {
            FaultLocationData.FaultGroupDataTable   faultGroupTable;
            FaultLocationData.FaultSegmentDataTable faultSegmentTable;
            FaultLocationData.FaultCurveDataTable   faultCurveTable;
            FaultLocationData.FaultSummaryDataTable faultSummaryTable;

            m_faultSummarizer.FillTables(dbAdapterContainer);

            faultGroupTable   = m_faultSummarizer.FaultGroupTable;
            faultSegmentTable = m_faultSummarizer.FaultSegmentTable;
            faultCurveTable   = m_faultSummarizer.FaultCurveTable;
            faultSummaryTable = m_faultSummarizer.FaultSummaryTable;

            if (faultSegmentTable.Count == 0 && faultCurveTable.Count == 0 && faultSummaryTable.Count == 0)
            {
                return;
            }

            Log.Info("Loading fault data into the database...");

            BulkLoader bulkLoader = new BulkLoader();

            bulkLoader.Connection     = dbAdapterContainer.Connection;
            bulkLoader.CommandTimeout = dbAdapterContainer.CommandTimeout;
            bulkLoader.Load(faultGroupTable);
            bulkLoader.Load(faultSegmentTable);
            bulkLoader.Load(faultCurveTable);
            bulkLoader.Load(faultSummaryTable);

            Log.Info(string.Format("Loaded {0} faults into the database.", faultSummaryTable.Count));
        }
        /// <summary>
        /// Imports the file...
        /// </summary>
        /// <param name="usingInstance">using instance.</param>
        /// <param name="getFileName">get the name of the file</param>
        /// <param name="andConfirmAnyChallenge">and confirm any challenge.</param>
        /// <returns>
        /// the current task
        /// </returns>
        public async Task Import(string usingInstance, string usingDatabase, string dbUser, string dbPassword, Func <string> getFileName, Func <string, bool> andConfirmAnyChallenge)
        {
            It.IsNull(getFileName)
            .AsGuard <ArgumentNullException>(nameof(getFileName));

            await Handler.RunAsyncOperation <Localised>(async() =>
            {
                Emitter.Publish("Selecting file...");

                var inputFilePath = getFileName();

                It.IsEmpty(inputFilePath)
                .AsGuard <OperationCanceledException, CommonLocalised>(CommonLocalised.CanceledOperation);

                //var candidateSourceName = Path.GetFileNameWithoutExtension(inputFilePath);


                // check for DB overwrite...
                Emitter.Publish("Checking for risk of overwrite...");

                var master = Provider.ConnectionToMaster(usingInstance, usingDatabase, dbUser, dbPassword);

                //if (Context.DataStoreExists(candidateSourceName, master))
                //{
                //    var format = Locals.GetString(Localised.AboutToOverwriteDBFormat);
                //    var msg = Format.String(format, candidateSourceName);
                //    (!andConfirmAnyChallenge(msg))
                //        .AsGuard<OperationCanceledException, CommonLocalised>(CommonLocalised.CanceledOperation);
                //}

                var source = Provider.ConnectionToSource(usingInstance, usingDatabase, dbUser, dbPassword);

                await BulkLoader.Load(source, master, inputFilePath);
            });
        }
        protected virtual void LoadItems(IConfiguration[] configurations, DataBlasterParameters parameters, ILogger logger)
        {
            var databaseNames = configurations
                                .SelectMany(c => c.Resolve <PredicateRootPathResolver>().GetRootPaths().Select(rp => rp.DatabaseName))
                                .Distinct();

            foreach (var databaseName in databaseNames)
            {
                logger.Info($"Syncing database '{databaseName}'...");

                var context   = CreateBulkLoadContext(BulkLoader, databaseName, configurations, parameters, logger);
                var bulkItems = ItemExtractor.ExtractBulkItems(context, configurations, databaseName);
                BulkLoader.LoadItems(context, bulkItems);

                if (context.AnyStageFailed)
                {
                    throw new Exception($"Stage failed during bulkload of database '{databaseName}': {context.FailureMessage}");
                }

                // Support publishing after sync.
                if (!IsUnicornPublishEnabled && !databaseName.Equals("core", StringComparison.OrdinalIgnoreCase))
                {
                    foreach (var itemChange in context.ItemChanges)
                    {
                        ManualPublishQueueHandler.AddItemToPublish(itemChange.ItemId);
                    }
                }
            }
        }
Beispiel #10
0
        private static void LoadEventSentEmail(EventRow eventRow, EventDataTable systemEvent, int sentEmailID)
        {
            BulkLoader bulkLoader;
            DataTable  eventSentEmailTable;

            using (AdoDataConnection connection = new AdoDataConnection(s_dbAdapterContainer.Connection, typeof(SqlDataAdapter), false))
            {
                // Query an empty table with matching schema --
                // union table to itself to eliminate unique key constraints
                eventSentEmailTable           = connection.RetrieveData("SELECT * FROM EventSentEmail WHERE 1 IS NULL UNION ALL SELECT * FROM EventSentEmail WHERE 1 IS NULL");
                eventSentEmailTable.TableName = "EventSentEmail";
            }

            foreach (MeterData.EventRow evt in systemEvent)
            {
                if (eventRow.LineID == evt.LineID)
                {
                    eventSentEmailTable.Rows.Add(0, evt.ID, sentEmailID);
                }
            }

            bulkLoader                = new BulkLoader();
            bulkLoader.Connection     = s_dbAdapterContainer.Connection;
            bulkLoader.CommandTimeout = s_dbAdapterContainer.CommandTimeout;
            bulkLoader.Load(eventSentEmailTable);
        }
        public override void Load(DbAdapterContainer dbAdapterContainer)
        {
            BulkLoader loader = new BulkLoader();

            loader.Connection     = dbAdapterContainer.Connection;
            loader.CommandTimeout = dbAdapterContainer.CommandTimeout;
            loader.Load(m_disturbanceSeverityTable);
        }
 public BulkLoader()
 {
     if (InBulkLoader)
     {
         throw new InvalidOperationException();
     }
     currentBulkLoader = this;
 }
        public override void Load(DbAdapterContainer dbAdapterContainer)
        {
            BulkLoader loader = new BulkLoader();

            loader.Connection     = dbAdapterContainer.Connection;
            loader.CommandTimeout = dbAdapterContainer.CommandTimeout;
            loader.Load(m_doubleEndedFaultDistanceTable);
            loader.Load(m_faultCurveTable);
        }
Beispiel #14
0
        public void RandomChanges()
        {
            const string Tag        = "TestWatcherRandomChanges_";
            string       folderName = Directory.GetCurrentDirectory();
            string       pattern    = Tag + "*";

            foreach (string file in Directory.EnumerateFiles(folderName, pattern))
            {
                File.Delete(file);
            }

            var ff = new FileFiller();

            var initialPopulation = new List <FileEntry>();

            for (int i = 1; i <= 99; i++)
            {
                string fName = Tag + i + ".txt";
                ff.FillFile(fName, numberOfLines: i, lineLength: FileFiller.RandomLength);
                initialPopulation.Add(new FileEntry(fName, DateTime.UtcNow, i, 0));
            }
            for (int i = 100; i <= 102400; i *= 2)
            {
                string fName = Tag + i + ".txt";
                ff.FillFile(fName, numberOfLines: i, lineLength: FileFiller.RandomLength);
                initialPopulation.Add(new FileEntry(fName, DateTime.UtcNow, i, 0));
            }

            _RandomCounted = 0;

            var model   = new FolderModel();
            var view    = new Log(toConsole: false, fileName: "out.txt");
            var counter = new LineCounter();
            var loader  = new BulkLoader();

            var controller = new Watcher(view, model, counter, loader, folderName, Tag + "*");

            controller.FolderChanged += Controller_FolderChanged_Random;
            controller.Status        += Controller_Status;
            System.Threading.Tasks.Task w = controller.StartAsync();

            int loops = 0;

            while (_lastStatus != Watcher.Monitoring)
            {
                loops++;
                Thread.Sleep(1000);
                if (loops > 100)
                {
                    Assert.Fail("Some Ting Wong"); break;
                }
            }
        }
        public override void Load(DbAdapterContainer dbAdapterContainer)
        {
            BulkLoader hourlySummaryLoader;
            BulkLoader channelNormalLoader;

            if (m_hourlySummaryTable.Count == 0 && m_channelNormalTable.Count == 0)
            {
                return;
            }

            Log.Info("Loading hourly summary data into the database...");

            hourlySummaryLoader = new BulkLoader();
            channelNormalLoader = new BulkLoader();

            hourlySummaryLoader.Connection     = dbAdapterContainer.Connection;
            hourlySummaryLoader.CommandTimeout = dbAdapterContainer.CommandTimeout;
            channelNormalLoader.Connection     = dbAdapterContainer.Connection;
            channelNormalLoader.CommandTimeout = dbAdapterContainer.CommandTimeout;

            hourlySummaryLoader.MergeTableFormat = "MERGE INTO {0} WITH (TABLOCK) AS Target " +
                                                   "USING {1} AS Source " +
                                                   "ON Source.ChannelID = Target.ChannelID AND Source.Time = Target.Time " +
                                                   "WHEN MATCHED THEN " +
                                                   "    UPDATE SET " +
                                                   "        Maximum = CASE WHEN Target.ValidCount = 0 OR Source.Maximum > Target.Maximum THEN Source.Maximum ELSE Target.Maximum END, " +
                                                   "        Minimum = CASE WHEN Target.ValidCount = 0 OR Source.Minimum < Target.Minimum THEN Source.Minimum ELSE Target.Minimum END, " +
                                                   "        Average = CASE WHEN Target.ValidCount = 0 THEN Source.Average ELSE Target.Average * (CAST(Target.ValidCount AS FLOAT) / (Target.ValidCount + Source.ValidCount)) + Source.Average * (CAST(Source.ValidCount AS FLOAT) / (Target.ValidCount + Source.ValidCount)) END, " +
                                                   "        ValidCount = Source.ValidCount + Target.ValidCount, " +
                                                   "        InvalidCount = Source.InvalidCount + Target.InvalidCount " +
                                                   "WHEN NOT MATCHED THEN " +
                                                   "    INSERT (ChannelID, Time, Maximum, Minimum, Average, ValidCount, InvalidCount) " +
                                                   "    VALUES (Source.ChannelID, Source.Time, Source.Maximum, Source.Minimum, Source.Average, Source.ValidCount, Source.InvalidCount);";

            channelNormalLoader.MergeTableFormat = "MERGE INTO {0} AS Target " +
                                                   "USING {1} AS Source " +
                                                   "ON Target.ChannelID = Source.ChannelID " +
                                                   "WHEN MATCHED THEN " +
                                                   "    UPDATE SET " +
                                                   "        Average = Target.Average * (CAST(Target.Count AS FLOAT) / (Target.Count + Source.Count)) + Source.Average * (CAST(Source.Count AS FLOAT) / (Target.Count + Source.Count)), " +
                                                   "        MeanSquare = Target.MeanSquare * (CAST(Target.Count AS FLOAT) / (Target.Count + Source.Count)) + Source.MeanSquare * (CAST(Source.Count AS FLOAT) / (Target.Count + Source.Count)), " +
                                                   "        StandardDeviation = Target.MeanSquare * (CAST(Target.Count AS FLOAT) / (Target.Count + Source.Count)) + Source.MeanSquare * (CAST(Source.Count AS FLOAT) / (Target.Count + Source.Count)) - POWER(Target.Average * (CAST(Target.Count AS FLOAT) / (Target.Count + Source.Count)) + Source.Average * (CAST(Source.Count AS FLOAT) / (Target.Count + Source.Count)), 2), " +
                                                   "        Count = Target.Count + Source.Count " +
                                                   "WHEN NOT MATCHED THEN " +
                                                   "    INSERT (ChannelID, Average, MeanSquare, StandardDeviation, Count) " +
                                                   "    VALUES (Source.ChannelID, Source.Average, Source.MeanSquare, SQRT(Source.MeanSquare - Source.Average * Source.Average), Source.Count); ";

            hourlySummaryLoader.Load(m_hourlySummaryTable);
            channelNormalLoader.Load(m_channelNormalTable);

            Log.Info(string.Format("Loaded {0} hourly summary records into the database.", m_hourlySummaryTable.Count));
        }
Beispiel #16
0
        public void Initialize()
        {
            const string Tag = "TestWatcherInitialize_";

            var x = new FileFiller();

            x.FillFile(Tag + "1" + ".txt", numberOfLines: 1, lineLength: FileFiller.RandomLength);
            x.FillFile(Tag + "2" + ".txt", numberOfLines: 2, lineLength: FileFiller.RandomLength);
            x.FillFile(Tag + "3" + ".txt", numberOfLines: 3, lineLength: FileFiller.RandomLength);
            x.FillFile(Tag + "4" + ".txt", numberOfLines: 4, lineLength: FileFiller.RandomLength);
            x.FillFile(Tag + "5" + ".txt", numberOfLines: 5, lineLength: FileFiller.RandomLength);
            x.FillFile(Tag + "6" + ".txt", numberOfLines: 6, lineLength: FileFiller.RandomLength);
            x.FillFile(Tag + "7" + ".txt", numberOfLines: 7, lineLength: FileFiller.RandomLength);
            x.FillFile(Tag + "8" + ".txt", numberOfLines: 8, lineLength: FileFiller.RandomLength);
            x.FillFile(Tag + "9" + ".txt", numberOfLines: 9, lineLength: FileFiller.RandomLength);
            x.FillFile(Tag + "10" + ".txt", numberOfLines: 10, lineLength: FileFiller.RandomLength);
            x.FillFile(Tag + "100" + ".txt", numberOfLines: 100, lineLength: FileFiller.RandomLength);
            x.FillFile(Tag + "1000" + ".txt", numberOfLines: 1000, lineLength: FileFiller.RandomLength);
            x.FillFile(Tag + "10000" + ".txt", numberOfLines: 10000, lineLength: FileFiller.RandomLength);

            var    model   = new FolderModel();
            var    view    = new Log(toConsole: false, fileName: "out.txt");
            var    counter = new LineCounter();
            var    loader  = new BulkLoader();
            string dir     = Directory.GetCurrentDirectory();

            var controller = new Watcher(view, model, counter, loader, dir, Tag + "*");

            System.Threading.Tasks.Task w = controller.StartAsync();
            w.Wait(10000);
            var q = model[Tag + "1" + ".txt"];

            Assert.AreEqual(model.Count(), 13);
            Assert.AreEqual(model[Tag + "1" + ".txt"].LineCount, 1);
            Assert.AreEqual(model[Tag + "2" + ".txt"].LineCount, 2);
            Assert.AreEqual(model[Tag + "3" + ".txt"].LineCount, 3);
            Assert.AreEqual(model[Tag + "4" + ".txt"].LineCount, 4);
            Assert.AreEqual(model[Tag + "5" + ".txt"].LineCount, 5);
            Assert.AreEqual(model[Tag + "6" + ".txt"].LineCount, 6);
            Assert.AreEqual(model[Tag + "7" + ".txt"].LineCount, 7);
            Assert.AreEqual(model[Tag + "8" + ".txt"].LineCount, 8);
            Assert.AreEqual(model[Tag + "9" + ".txt"].LineCount, 9);
            Assert.AreEqual(model[Tag + "10" + ".txt"].LineCount, 10);
            Assert.AreEqual(model[Tag + "100" + ".txt"].LineCount, 100);
            Assert.AreEqual(model[Tag + "1000" + ".txt"].LineCount, 1000);
            Assert.AreEqual(model[Tag + "10000" + ".txt"].LineCount, 10000);

            Console.WriteLine("DONE");
        }
Beispiel #17
0
        public static async Task WriteToFile(Options options, DefinitionBuilder builder, IEnumerable source)
        {
            var filename  = Path.Combine(options.OutputDirectory, builder.Name + ".csv");
            var directory = new DirectoryInfo(options.OutputDirectory);

            if (!directory.Exists && directory.Parent != null)
            {
                directory.Create();
            }

            var dbLoader = new BulkLoader(builder.Name, source);
            await dbLoader.ExportToFile(filename);

            Console.WriteLine($"Exported {builder.Name} to CSV");
        }
Beispiel #18
0
        public void AddRecordInFirstLoader()
        {
            var loader1 = Substitute.For <IRecordLoader>();

            loader1.Add(Arg.Any <IRecord>()).Returns(true);

            var loader2 = Substitute.For <IRecordLoader>();

            var loader = new BulkLoader(new [] { loader1, loader2 }, new Sequence(), Substitute.For <ILogger>());

            loader.Load(new IRecord[] { new Header() });

            loader1.ReceivedWithAnyArgs().Add(null);
            loader2.DidNotReceiveWithAnyArgs().Add(null);
        }
Beispiel #19
0
        public override void Load(DbAdapterContainer dbAdapterContainer)
        {
            BulkLoader bulkLoader = new BulkLoader();

            if (m_alarmLogTable.Count == 0)
            {
                return;
            }

            Log.Info("Loading alarm data into the database...");

            bulkLoader.Connection     = m_dbAdapterContainer.Connection;
            bulkLoader.CommandTimeout = m_dbAdapterContainer.CommandTimeout;
            bulkLoader.Load(m_alarmLogTable);

            Log.Info(string.Format("Loaded {0} alarm log records into the database...", m_alarmLogTable.Count));
        }
Beispiel #20
0
        public void ThrowsExceptionOnAnyError_TODO_CHANGE_THIS_BEHAVIOR()
        {
            var loader1 = Substitute.For <IRecordLoader>();

            loader1.Add(Arg.Any <IRecord>()).Returns(x => throw new Exception(), x => true);

            var loader2 = Substitute.For <IRecordLoader>();

            loader2.Add(Arg.Any <IRecord>()).Returns(true);

            var loader = new BulkLoader(new [] { loader1, loader2 }, new Sequence(), Substitute.For <ILogger>());

            Assert.Throws <Exception>(() => loader.Load(new IRecord[] { new Header(), new Trailer() }));

            loader1.ReceivedWithAnyArgs().Add(null);
            loader2.DidNotReceiveWithAnyArgs().Add(null);
        }
Beispiel #21
0
        public void ProcessesAllRecords()
        {
            var loader1 = Substitute.For <IRecordLoader>();

            loader1.Add(Arg.Any <IRecord>()).Returns(true, false);

            var loader2 = Substitute.For <IRecordLoader>();

            loader2.Add(Arg.Any <IRecord>()).Returns(true);

            var loader = new BulkLoader(new [] { loader1, loader2 }, new Sequence(), Substitute.For <ILogger>());

            loader.Load(new IRecord[] { new Header(), new Trailer() });

            loader1.ReceivedWithAnyArgs().Add(null);
            loader2.ReceivedWithAnyArgs().Add(null);
        }
Beispiel #22
0
        public void DropThroughLogsWarning()
        {
            var logger  = Substitute.For <ILogger>();
            var loader1 = Substitute.For <IRecordLoader>();

            loader1.Add(Arg.Any <IRecord>()).Returns(false);

            var loader2 = Substitute.For <IRecordLoader>();

            loader2.Add(Arg.Any <IRecord>()).Returns(false);

            var loader = new BulkLoader(new [] { loader1, loader2 }, new Sequence(), logger);

            loader.Load(new IRecord[] { new Header() });

            logger.ReceivedWithAnyArgs().Warning <Type, IRecord>(messageTemplate: null, propertyValue0: null, propertyValue1: null);
        }
Beispiel #23
0
        static void Main(string[] args)
        {
            try
            {
                if (args.Length != 2)
                {
                    Console.WriteLine(Usage);
                    return;
                }

                string path   = args[0];
                string filter = args[1];

                if (!Directory.Exists(path))
                {
                    Console.WriteLine(InvalidPath, path);
                    return;
                }

                Console.WriteLine("UTAU Line Counter");
                Console.WriteLine("=================");
                Console.WriteLine("Folder: " + path);
                Console.WriteLine("Filter: " + filter);

                var model      = new FolderModel();
                var view       = new Log(toConsole: true, fileName: null);
                var counter    = new LineCounter();
                var loader     = new BulkLoader();
                var controller = new Watcher(view, model, counter, loader, path, filter);
                controller.FolderChanged += Controller_FolderChanged;
                controller.Status        += Controller_Status;
                Console.WriteLine("Starting watcher.");
                Task t = controller.StartAsync();

                Console.WriteLine("Running. Ctrl-C or close the console window to exit.");
                Console.WriteLine();
                t.Wait();
                Console.WriteLine("DONE");
            }
            finally
            {
                WaitForExit();
            }
        }
Beispiel #24
0
        public void BadFilter()
        {
            var  loader = new BulkLoader();
            var  model  = new FolderModel();
            bool threwCorrectException = false;

            try
            {
                loader.Load(Directory.GetCurrentDirectory(), "\\x\\y", model);
            }
            catch (Exception ex)
            {
                if (ex is ArgumentException)
                {
                    threwCorrectException = true;
                }
            }

            Assert.IsTrue(threwCorrectException);
        }
Beispiel #25
0
        public void BadDirectory()
        {
            var  loader = new BulkLoader();
            var  model  = new FolderModel();
            bool threwCorrectException = false;

            try
            {
                loader.Load(@"xxx\yyy\zzz\www\ttt", "*.txt", model);
            }
            catch (Exception ex)
            {
                if (ex is DirectoryNotFoundException)
                {
                    threwCorrectException = true;
                }
            }

            Assert.IsTrue(threwCorrectException);
        }
Beispiel #26
0
        private void ItemCreator(PullItemModel args, CancellationToken cancellationToken)
        {
            var bulkLoader = new BulkLoader();

            try
            {
                var context = bulkLoader.NewBulkLoadContext("master");
                bulkLoader.LoadItems(context, GetAllItemsToCreate(context, cancellationToken));
                _checksumManager.RegenerateChecksum();
            }
            catch (OperationCanceledException e)
            {
                Log.Warn("Content migration operation was cancelled", e, this);
                Status.Cancelled = true;
            }
            catch (Exception e)
            {
                Log.Error("Catastrophic error when creating items", e, this);
            }
        }
        private Dictionary <BreakerOperationType, int> GetBreakerOperationTypeLookup(DbAdapterContainer dbAdapterContainer)
        {
            BreakerOperationTypeDataTable breakerOperationTypeTable = new BreakerOperationTypeDataTable();
            BreakerOperationType          breakerOperationType      = default(BreakerOperationType);

            foreach (BreakerOperationType operationType in Enum.GetValues(typeof(BreakerOperationType)))
            {
                breakerOperationTypeTable.AddBreakerOperationTypeRow(operationType.ToString(), operationType.ToString());
            }

            BulkLoader bulkLoader = new BulkLoader();

            bulkLoader.Connection     = dbAdapterContainer.Connection;
            bulkLoader.CommandTimeout = dbAdapterContainer.CommandTimeout;

            bulkLoader.MergeTableFormat = "MERGE INTO {0} AS Target " +
                                          "USING {1} AS Source " +
                                          "ON Source.Name = Target.Name " +
                                          "WHEN NOT MATCHED THEN " +
                                          "    INSERT (Name, Description) " +
                                          "    VALUES (Source.Name, Source.Description);";

            bulkLoader.Load(breakerOperationTypeTable);

            dbAdapterContainer.GetAdapter <BreakerOperationTypeTableAdapter>().Fill(breakerOperationTypeTable);

            foreach (IGrouping <string, BreakerOperationTypeRow> grouping in breakerOperationTypeTable.GroupBy(row => row.Name))
            {
                if (grouping.Count() > 1)
                {
                    Log.Warn($"Found duplicate breaker operation type: {grouping.Key}");
                }
            }

            return(breakerOperationTypeTable
                   .Where(row => Enum.TryParse(row.Name, out breakerOperationType))
                   .Select(row => new { BreakerOperationType = breakerOperationType, row.ID })
                   .ToList()
                   .DistinctBy(obj => obj.BreakerOperationType)
                   .ToDictionary(obj => obj.BreakerOperationType, obj => obj.ID));
        }
Beispiel #28
0
        private Dictionary <EventClassification, int> GetEventTypeLookup(DbAdapterContainer dbAdapterContainer)
        {
            MeterData.EventTypeDataTable eventTypeTable      = new MeterData.EventTypeDataTable();
            EventClassification          eventClassification = default(EventClassification);

            foreach (EventClassification classification in Enum.GetValues(typeof(EventClassification)))
            {
                eventTypeTable.AddEventTypeRow(classification.ToString(), classification.ToString());
            }

            BulkLoader bulkLoader = new BulkLoader();

            bulkLoader.Connection     = dbAdapterContainer.Connection;
            bulkLoader.CommandTimeout = dbAdapterContainer.CommandTimeout;

            bulkLoader.MergeTableFormat = "MERGE INTO {0} AS Target " +
                                          "USING {1} AS Source " +
                                          "ON Source.Name = Target.Name " +
                                          "WHEN NOT MATCHED THEN " +
                                          "    INSERT (Name, Description) " +
                                          "    VALUES (Source.Name, Source.Description);";

            bulkLoader.Load(eventTypeTable);

            dbAdapterContainer.GetAdapter <EventTypeTableAdapter>().Fill(eventTypeTable);

            return(eventTypeTable
                   .Where(row => Enum.TryParse(row.Name, out eventClassification))
                   .Select(row => new { EventClassification = eventClassification, row.ID })
                   .ToList()
                   .GroupBy(obj => obj.EventClassification)
                   .ToDictionary(grouping => grouping.Key, grouping =>
            {
                if (grouping.Count() > 1)
                {
                    Log.Warn($"Found duplicate event type: {grouping.Key}");
                }

                return grouping.First().ID;
            }));
        }
Beispiel #29
0
        protected virtual BulkLoadContext CreateBulkLoadContext(BulkLoader bulkLoader, string databaseName,
                                                                IConfiguration[] configurations, DataBlasterParameters parameters, ILogger logger)
        {
            var context = bulkLoader.NewBulkLoadContext(databaseName);

            context.Log = new SitecoreAndUnicornLog(LoggerFactory.GetLogger(GetType()), logger);

            context.AllowTemplateChanges       = true;
            context.StageDataWithoutProcessing = parameters.StageDataWithoutProcessing;

            // Use the shotgun, removing items one by one is too slow for full deserialize.
            context.RemoveItemsFromCaches = false;

            context.UpdateHistory      = !SkipHistoryEngine;
            context.UpdatePublishQueue = !SkipPublishQueue;
            context.UpdateLinkDatabase = !SkipLinkDatabase &&
                                         configurations.Any(x => x.Resolve <ISyncConfiguration>().UpdateLinkDatabase);
            context.UpdateIndexes = !SkipIndexes &&
                                    configurations.Any(x => x.Resolve <ISyncConfiguration>().UpdateSearchIndex);

            return(context);
        }
        public override void Load(DbAdapterContainer dbAdapterContainer)
        {
            EventDataTable eventTable;
            Dictionary <EventKey, EventRow> eventLookup;
            EventRow   eventRow;
            BulkLoader bulkLoader;

            eventTable = dbAdapterContainer.GetAdapter <EventTableAdapter>().GetDataByFileGroup(m_meterDataSet.FileGroup.ID);

            eventLookup = eventTable
                          .Where(evt => evt.MeterID == m_meterDataSet.Meter.ID)
                          .GroupBy(CreateEventKey)
                          .ToDictionary(grouping => grouping.Key, grouping =>
            {
                if (grouping.Count() > 1)
                {
                    Log.Warn($"Found duplicate events for meter {m_meterDataSet.Meter.AssetKey}: {string.Join(", ", grouping.Select(evt => evt.ID))}");
                }

                return(grouping.First());
            });

            foreach (Tuple <EventKey, BreakerOperationRow> breakerOperation in m_breakerOperations)
            {
                if (eventLookup.TryGetValue(breakerOperation.Item1, out eventRow))
                {
                    breakerOperation.Item2.EventID = eventRow.ID;
                    m_breakerOperationTable.AddBreakerOperationRow(breakerOperation.Item2);
                }
            }

            bulkLoader                = new BulkLoader();
            bulkLoader.Connection     = dbAdapterContainer.Connection;
            bulkLoader.CommandTimeout = dbAdapterContainer.CommandTimeout;
            bulkLoader.Load(m_breakerOperationTable);
        }
        private static void LoadEventSentEmail(EventRow eventRow, EventDataTable systemEvent, int sentEmailID)
        {
            BulkLoader bulkLoader;
            DataTable eventSentEmailTable;

            using (AdoDataConnection connection = new AdoDataConnection(s_dbAdapterContainer.Connection, typeof(SqlDataAdapter), false))
            {
                // Query an empty table with matching schema --
                // union table to itself to eliminate unique key constraints
                eventSentEmailTable = connection.RetrieveData("SELECT * FROM EventSentEmail WHERE 1 IS NULL UNION ALL SELECT * FROM EventSentEmail WHERE 1 IS NULL");
                eventSentEmailTable.TableName = "EventSentEmail";
            }

            foreach (MeterData.EventRow evt in systemEvent)
            {
                if (eventRow.LineID == evt.LineID)
                    eventSentEmailTable.Rows.Add(0, evt.ID, sentEmailID);
            }

            bulkLoader = new BulkLoader();
            bulkLoader.Connection = s_dbAdapterContainer.Connection;
            bulkLoader.CommandTimeout = s_dbAdapterContainer.CommandTimeout;
            bulkLoader.Load(eventSentEmailTable);
        }
 public bulk_loader_overwrite_sql_tests()
 {
     _bulkLoader = new BulkLoader<Issue>(theStore.Advanced.Serializer, DocumentMapping.For<Issue>(), null);
 }