private void AddToReferenceMap(InternalEntityEntry entry)
        {
            var mapKey     = entry.Entity ?? entry;
            var entityType = entry.EntityType;

            if (entityType.HasDefiningNavigation())
            {
                foreach (var otherType in _model.GetEntityTypes(entityType.Name)
                         .Where(et => et != entityType && TryGetEntry(mapKey, et) != null))
                {
                    UpdateLogger.DuplicateDependentEntityTypeInstanceWarning(entityType, otherType);
                }

                if (!_dependentTypeReferenceMap.TryGetValue(entityType, out var entries))
                {
                    entries = new Dictionary <object, InternalEntityEntry>(ReferenceEqualityComparer.Instance);
                    _dependentTypeReferenceMap[entityType] = entries;
                }

                entries[mapKey] = entry;
            }
            else
            {
                _entityReferenceMap[mapKey] = entry;
            }
        }
Esempio n. 2
0
        private void LoadMaps()
        {
            var mapFile = MAPS_D2P_DIRECTORY + "maps0.d2p"; // put this in const field

            string eleFile = MAPS_D2P_DIRECTORY + "elements.ele";

            if (!File.Exists(mapFile))
            {
                return;
            }

            logger.White("Loading Maps from d2p...");

            EleReader eleReader = new EleReader(eleFile);

            Elements elements = eleReader.ReadElements();

            D2pFile d2p   = new D2pFile(mapFile);
            var     datas = d2p.ReadAllFiles();

            UpdateLogger m_logger = new UpdateLogger();

            int x = 0;

            foreach (var data in datas)
            {
                DlmReader mapReader = new DlmReader(new MemoryStream(data.Value));
                mapReader.DecryptionKey = SqlSyncConfiguration.Instance.MapKey;
                ReadMap(mapReader, elements); //elements
                x++;
                m_logger.Update(x.Percentage(datas.Count) + "%");
            }
        }
Esempio n. 3
0
        public static void CreateInstances()
        {
            UpdateLogger updateLogger = new UpdateLogger();

            int num = 0;

            foreach (var record in Maps)
            {
                record.InteractiveElements = InteractiveElementRecord.GetActiveElementsOnMap(record.Id);

                record.Position = MapPositionRecord.GetMapPosition(record.Id);

                record.MonsterSpawnsSubArea = MonsterSpawnRecord.GetSpawns(record.SubAreaId).ToArray();

                record.NpcsRecord = NpcSpawnRecord.GetMapNpcs(record.Id).ToArray();

                record.Instance = new MapInstance(record);

                NpcSpawnsManager.Instance.SpawnAtStartup(record);

                if (record.AbleToSpawn)
                {
                    MonsterSpawnManager.Instance.SpawnMonsters(record);
                }

                updateLogger.Update(num.Percentage(Maps.Count));
                num++;
            }
        }
Esempio n. 4
0
        public App()
        {
            DispatcherUnhandledException += (sender, e) => {
                UpdateLogger.WriteLine("主线程错误:" + e.Exception.Message);
            };
            AppDomain.CurrentDomain.UnhandledException += (sender, e) => {
                UpdateLogger.WriteLine("工作线程错误:" + ((Exception)e.ExceptionObject).Message);
                UpdateLogger.WriteLine("工作线程错误:" + ((Exception)e.ExceptionObject).StackTrace);
                var ex = e.ExceptionObject as Exception;
                if (ex != null && ex.InnerException != null)
                {
                    UpdateLogger.WriteLine("工作线程错误:" + ex.InnerException.StackTrace);
                    UpdateLogger.WriteLine("工作线程错误: " + ex.InnerException.Message);
                }
                var nullex = e.ExceptionObject as NullReferenceException;
                if (nullex != null)
                {
                    UpdateLogger.WriteLine("Source:" + nullex.Source);

                    var enumrator = nullex.Data.GetEnumerator();
                    while (enumrator.MoveNext())
                    {
                        UpdateLogger.WriteLine("Object:" + enumrator.Current.ToString());
                    }
                }
            };
        }
Esempio n. 5
0
        //     [StartupInvoke("MonsterPowerCalculator", StartupInvokePriority.Modules)]
        public static void Initialize()
        {
            UpdateLogger logger = new UpdateLogger();
            int          i      = 0;

            foreach (var monster in MonsterRecord.Monsters)
            {
                i++;

                MonsterGrade grade = monster.GetGrade(5);

                monster.Power  = grade.Level * 2;
                monster.Power += (int)(grade.LifePoints / 10);

                if (monster.Power > 1000)
                {
                    monster.Power = 1000;
                }

                if (monster.IsMiniBoss)
                {
                    monster.Power += 300;
                }

                if (monster.IsBoss)
                {
                    monster.Power += 500;
                }

                monster.UpdateInstantElement();

                logger.Update(i.Percentage(MonsterRecord.Monsters.Count));
            }
        }
Esempio n. 6
0
        private void BackupDataWithNoIntersection(
            IEnumerable <KeyValuePair <BackupDataLogKey, BackupDataLogValue> > intersectingLogs,
            BackupDataLogKey logKey, KeyValuePair <long, int> area, OnDisk.File.IFile f, string fFilename,
            ConcurrentIOPoolManager readPool, ConcurrentIOPoolManager writePool,
            RecordKey key)
        {
            string systemBackupFilename = Server.Path + DataBackupFilename;
            int    size = area.Value;

            key.Address = area.Key;

            // no intersection nor mergeable logs, add new log! backup and log the data area
            ConcurrentIOData reader = f != null
                                          ? readPool.GetInstance(f, size)
                                          : readPool.GetInstance(fFilename, null, size);

            ConcurrentIOData writer = writePool.GetInstance(systemBackupFilename, (TransactionRoot)Root);

            if (reader == null || writer == null)
            {
                throw new SopException("This program has a bug! 'didn't get reader or writer from Async IO Pool.");
            }

            LogTracer.Verbose("BackupDataWithNoIntersection: Start for Thread {0}.", Thread.CurrentThread.ManagedThreadId);


            var logValue = new BackupDataLogValue();

            logValue.DataSize      = size;
            logValue.TransactionId = Id;

            logValue.BackupFileHandle = GetLogBackupFileHandle(DataBackupFilename);

            // return the current backup file size and grow it to make room for data to be backed up...
            logValue.BackupDataAddress = GrowBackupFile(size, writer.FileStream);

            // save a record of the backed up data..
            LogCollection.Add(logKey, logValue);

            // log after data was backed up!!
            Sop.VoidFunc logBackedupData = () =>
            {
                UpdateLogger.LogLine("{0}{1}:{2} to {3}:{4} Size={5}",
                                     BackupFromToken, f != null ? f.Filename : fFilename, area.Key,
                                     DataBackupFilename, logValue.BackupDataAddress, size);
            };

            writer.FileStream.Seek(logValue.BackupDataAddress, SeekOrigin.Begin, true);
            reader.FileStream.Seek(area.Key, SeekOrigin.Begin, true);
            reader.FileStream.BeginRead(
                reader.Buffer, 0, size, ReadCallback,
                new object[] { new[] { reader, writer }, true, logKey, logBackedupData });
        }
        private void UpdateReferenceMaps(InternalEntityEntry entry, EntityState state, EntityState?oldState)
        {
            var entityType = entry.EntityType;
            var mapKey     = entry.Entity ?? entry;

            if (entityType.HasDefiningNavigation())
            {
                foreach (var otherType in this.model.GetEntityTypes(entityType.Name).Where(et => et != entityType && TryGetEntry(mapKey, et) != null))
                {
                    UpdateLogger.DuplicateDependentEntityTypeInstanceWarning(entityType, otherType);
                }
            }

            entityReferenceMap.Update(entry, state, oldState);
        }
        private void CascadeDelete(InternalEntityEntry entry)
        {
            foreach (var fk in entry.EntityType.GetReferencingForeignKeys())
            {
                foreach (var dependent in (GetDependentsFromNavigation(entry, fk)
                                           ?? GetDependents(entry, fk)).ToList())
                {
                    if (dependent.EntityState != EntityState.Deleted &&
                        dependent.EntityState != EntityState.Detached)
                    {
                        if (fk.DeleteBehavior == DeleteBehavior.Cascade)
                        {
                            var cascadeState = dependent.EntityState == EntityState.Added
                                ? EntityState.Detached
                                : EntityState.Deleted;

                            if (SensitiveLoggingEnabled)
                            {
                                UpdateLogger.CascadeDeleteSensitive(dependent, entry, cascadeState);
                            }
                            else
                            {
                                UpdateLogger.CascadeDelete(dependent, entry, cascadeState);
                            }

                            dependent.SetEntityState(cascadeState);

                            CascadeDelete(dependent);
                        }
                        else if (fk.DeleteBehavior != DeleteBehavior.Restrict)
                        {
                            foreach (var dependentProperty in fk.Properties)
                            {
                                dependent[dependentProperty] = null;
                            }

                            if (dependent.HasConceptualNull)
                            {
                                dependent.HandleConceptualNulls(SensitiveLoggingEnabled);
                            }
                        }
                    }
                }
            }
        }
        public void Update_Correct()
        {
            var service = new CustomModTranslationUpdater();
            var logger  = new UpdateLogger();

            var oldFile = new CustomModTranslationFile()
            {
                Translations = new Dictionary <string, BaseEntry>()
                {
                    { "Key1", new BaseEntry()
                      {
                          Origin = "Origin1", Translation = "Translation1"
                      } },
                    { "Key2", new BaseEntry()
                      {
                          Origin = "Origin2", Translation = "Translation2"
                      } },
                    { "Key3", new BaseEntry()
                      {
                          Origin = "Origin3", Translation = "Translation3"
                      } },
                }
            };

            var newFile = new CustomModTranslationFile()
            {
                Translations = new Dictionary <string, BaseEntry>()
                {
                    { "Key1", new BaseEntry()
                      {
                          Origin = "AnotherOrigin1", Translation = "AnotherTranslation1"
                      } },
                    { "Key4", new BaseEntry()
                      {
                          Origin = "Origin4", Translation = "Translation4"
                      } },
                    { "Key5", new BaseEntry()
                      {
                          Origin = "Origin5", Translation = "Translation5"
                      } },
                }
            };

            service.Update(oldFile, newFile, logger);

            logger.Added.Count.Should().Be(2);
            logger.Removed.Count.Should().Be(2);
            logger.Changed.Count.Should().Be(1);

            oldFile.Translations.Count.Should().Be(5);
            oldFile.Translations["Key1"].Origin.Should().Be("AnotherOrigin1");
            oldFile.Translations["Key1"].Translation.Should().Be("AnotherTranslation1");
            oldFile.Translations["Key2"].Origin.Should().Be("Origin2");
            oldFile.Translations["Key2"].Translation.Should().Be("Translation2");
            oldFile.Translations["Key3"].Origin.Should().Be("Origin3");
            oldFile.Translations["Key3"].Translation.Should().Be("Translation3");
            oldFile.Translations["Key4"].Origin.Should().Be("Origin4");
            oldFile.Translations["Key4"].Translation.Should().Be("Translation4");
            oldFile.Translations["Key5"].Origin.Should().Be("Origin5");
            oldFile.Translations["Key5"].Translation.Should().Be("Translation5");
        }
        public void UpdateEntry_Correct()
        {
            var service = new BasicFileUpdater <BasicItemFile>();
            var logger1 = new UpdateLogger();

            var oldEntry1 = new ItemEntry()
            {
                Name = new BaseEntry()
                {
                    Origin      = "NameOrigin",
                    Translation = "NameTranslation"
                },
                Tooltip = new BaseEntry()
                {
                    Origin      = "TooltipOrigin",
                    Translation = "TooltipTranslation"
                },
            };

            var newEntry1 = new ItemEntry()
            {
                Name = new BaseEntry()
                {
                    Origin      = "AnotherNameOrigin",
                    Translation = "AnotherNameTranslation"
                },
                Tooltip = new BaseEntry()
                {
                    Origin      = "AnotherTooltipOrigin",
                    Translation = "AnotherTooltipTranslation"
                },
            };

            service.UpdateEntry("Key", oldEntry1, newEntry1, logger1);

            logger1.Changed.Count.Should().Be(2);
            oldEntry1.Name.Origin.Should().Be("AnotherNameOrigin");
            oldEntry1.Name.Translation.Should().Be("NameTranslation");
            oldEntry1.Tooltip.Origin.Should().Be("AnotherTooltipOrigin");
            oldEntry1.Tooltip.Translation.Should().Be("TooltipTranslation");


            var logger2 = new UpdateLogger();

            var oldEntry2 = new ItemEntry()
            {
                Name = new BaseEntry()
                {
                    Origin      = "NameOrigin",
                    Translation = "NameTranslation"
                },
                Tooltip = new BaseEntry()
                {
                    Origin      = "TooltipOrigin",
                    Translation = "TooltipTranslation"
                },
            };

            var newEntry2 = new ItemEntry()
            {
                Name = new BaseEntry()
                {
                    Origin      = "NameOrigin",
                    Translation = "AnotherNameTranslation"
                },
                Tooltip = new BaseEntry()
                {
                    Origin      = "TooltipOrigin",
                    Translation = "AnotherTooltipTranslation"
                },
            };

            service.UpdateEntry("Key", oldEntry2, newEntry2, logger2);

            logger2.Changed.Count.Should().Be(0);
            oldEntry2.Name.Origin.Should().Be("NameOrigin");
            oldEntry2.Name.Translation.Should().Be("NameTranslation");
            oldEntry2.Tooltip.Origin.Should().Be("TooltipOrigin");
            oldEntry2.Tooltip.Translation.Should().Be("TooltipTranslation");
        }
        public void UpdateFile_Correct()
        {
            var service = new BasicFileUpdater <BasicItemFile>();

            var logger = new UpdateLogger();

            var oldFile = new BasicItemFile()
            {
                Items = new Dictionary <string, ItemEntry>()
                {
                    {
                        "Key1", new ItemEntry()
                        {
                            Name = new BaseEntry()
                            {
                                Origin      = "NameOrigin1",
                                Translation = "NameTranslation1"
                            },
                            Tooltip = new BaseEntry()
                            {
                                Origin      = "TooltipOrigin1",
                                Translation = "TooltipTranslation1"
                            },
                        }
                    },
                    {
                        "Key2", new ItemEntry()
                        {
                            Name = new BaseEntry()
                            {
                                Origin      = "NameOrigin2",
                                Translation = "NameTranslation2"
                            },
                            Tooltip = new BaseEntry()
                            {
                                Origin      = "TooltipOrigin2",
                                Translation = "TooltipTranslation2"
                            },
                        }
                    },
                    {
                        "Key3", new ItemEntry()
                        {
                            Name = new BaseEntry()
                            {
                                Origin      = "NameOrigin3",
                                Translation = "NameTranslation3"
                            },
                            Tooltip = new BaseEntry()
                            {
                                Origin      = "TooltipOrigin3",
                                Translation = "TooltipTranslation3"
                            },
                        }
                    },
                }
            };

            var newFile = new BasicItemFile()
            {
                Items = new Dictionary <string, ItemEntry>()
                {
                    {
                        "Key1", new ItemEntry()
                        {
                            Name = new BaseEntry()
                            {
                                Origin      = "AnotherNameOrigin1",
                                Translation = "AnotherNameTranslation1"
                            },
                            Tooltip = new BaseEntry()
                            {
                                Origin      = "AnotherTooltipOrigin1",
                                Translation = "AnotherTooltipTranslation1"
                            },
                        }
                    },
                    {
                        "Key3", new ItemEntry()
                        {
                            Name = new BaseEntry()
                            {
                                Origin      = "NameOrigin3",
                                Translation = "AnotherNameTranslation3"
                            },
                            Tooltip = new BaseEntry()
                            {
                                Origin      = "TooltipOrigin3",
                                Translation = "AnotherTooltipTranslation3"
                            },
                        }
                    },
                    {
                        "Key4", new ItemEntry()
                        {
                            Name = new BaseEntry()
                            {
                                Origin      = "NameOrigin4",
                                Translation = "NameTranslation4"
                            },
                            Tooltip = new BaseEntry()
                            {
                                Origin      = "TooltipOrigin4",
                                Translation = "TooltipTranslation4"
                            },
                        }
                    },
                }
            };

            service.Update(oldFile, newFile, logger);

            logger.Added.Count.Should().Be(1);
            logger.Changed.Count.Should().Be(2);
            logger.Removed.Count.Should().Be(1);

            oldFile.Items.Count.Should().Be(4);
            oldFile.Items["Key1"].Name.Origin.Should().Be("AnotherNameOrigin1");
            oldFile.Items["Key1"].Name.Translation.Should().Be("NameTranslation1");
            oldFile.Items["Key1"].Tooltip.Origin.Should().Be("AnotherTooltipOrigin1");
            oldFile.Items["Key1"].Tooltip.Translation.Should().Be("TooltipTranslation1");
            oldFile.Items["Key3"].Name.Origin.Should().Be("NameOrigin3");
            oldFile.Items["Key3"].Name.Translation.Should().Be("NameTranslation3");
            oldFile.Items["Key3"].Tooltip.Origin.Should().Be("TooltipOrigin3");
            oldFile.Items["Key3"].Tooltip.Translation.Should().Be("TooltipTranslation3");
            oldFile.Items["Key4"].Name.Origin.Should().Be("NameOrigin4");
            oldFile.Items["Key4"].Name.Translation.Should().Be("NameTranslation4");
            oldFile.Items["Key4"].Tooltip.Origin.Should().Be("TooltipOrigin4");
            oldFile.Items["Key4"].Tooltip.Translation.Should().Be("TooltipTranslation4");
        }
Esempio n. 12
0
        private void ReadTable(MySqlConnection connection, string parameter)
        {
            long rowCount = 0;

            if (NOTIFY_PROGRESS)
            {
                try
                {
                    rowCount = Count(connection);
                }
                catch (Exception ex)
                {
                    Logger.Write("Unable to read table " + TableName + " " + ex, MessageState.WARNING);
                    AskForStructureRebuild(connection, parameter);
                }
            }
            lock (DatabaseManager.SyncRoot)
            {
                using (var command = new MySqlCommand(parameter, connection))
                {
                    try
                    {
                        this.m_reader = command.ExecuteReader();
                    }
                    catch (Exception ex)
                    {
                        this.m_reader?.Close();
                        Logger.Write("Unable to read table " + TableName + " " + ex, MessageState.WARNING);
                        AskForStructureRebuild(connection, parameter);
                        return;
                    }

                    UpdateLogger updateLogger = null;

                    if (NOTIFY_PROGRESS)
                    {
                        updateLogger = new UpdateLogger();
                    }

                    double n = 0;

                    while (this.m_reader.Read())
                    {
                        var obj = new object[this.Properties.Length];
                        for (var i = 0; i < this.m_reader.FieldCount; i++)
                        {
                            obj[i] = ConvertObject(this.m_reader[i], Properties[i]);
                        }

                        var itable = (ITable)Activator.CreateInstance(Type); // expressions?

                        for (int i = 0; i < Properties.Length; i++)
                        {
                            Properties[i].SetValue(itable, obj[i]);
                        }

                        this.Elements.Add(itable.Id, itable);

                        if (NOTIFY_PROGRESS)
                        {
                            n++;
                            double ratio = (n / rowCount) * 100;
                            updateLogger.Update((int)ratio);
                        }
                    }
                    this.m_reader.Close();

                    updateLogger?.End();
                }
            }
        }
Esempio n. 13
0
        public void Update_Correct()
        {
            var service = new LdstrFileUpdater();

            var logger = new UpdateLogger();

            var oldFile = new LdstrFile()
            {
                LdstrEntries = new Dictionary <string, LdstrEntry>()
                {
                    { "Key1", new LdstrEntry()
                      {
                          Instructions = new List <BaseEntry>()
                          {
                              new BaseEntry()
                              {
                                  Origin = "Origin1", Translation = "Translation1"
                              },
                              new BaseEntry()
                              {
                                  Origin = "Origin2", Translation = "Translation2"
                              },
                          }
                      } },
                    { "Key2", new LdstrEntry()
                      {
                          Instructions = new List <BaseEntry>()
                          {
                              new BaseEntry()
                              {
                                  Origin = "Origin3", Translation = "Translation3"
                              },
                              new BaseEntry()
                              {
                                  Origin = "Origin4", Translation = "Translation4"
                              },
                          }
                      } },
                }
            };

            var newFile = new LdstrFile()
            {
                LdstrEntries = new Dictionary <string, LdstrEntry>()
                {
                    { "Key1", new LdstrEntry()
                      {
                          Instructions = new List <BaseEntry>()
                          {
                              new BaseEntry()
                              {
                                  Origin = "Origin1", Translation = ""
                              },
                              new BaseEntry()
                              {
                                  Origin = "AnotherOrigin1", Translation = ""
                              },
                              new BaseEntry()
                              {
                                  Origin = "Origin5", Translation = ""
                              },
                          }
                      } },
                    { "Key3", new LdstrEntry()
                      {
                          Instructions = new List <BaseEntry>()
                          {
                              new BaseEntry()
                              {
                                  Origin = "Origin6", Translation = ""
                              }
                          }
                      } },
                }
            };

            service.Update(oldFile, newFile, logger);

            logger.Added.Count.Should().Be(1);
            logger.Changed.Count.Should().Be(2);
            logger.Added.Count.Should().Be(1);

            oldFile.LdstrEntries.Count.Should().Be(3);

            oldFile.LdstrEntries["Key1"].Instructions.Count.Should().Be(4);
            oldFile.LdstrEntries["Key1"].Instructions.Should().ContainSingle(i => i.Origin == "Origin1" && i.Translation == "Translation1");
            oldFile.LdstrEntries["Key1"].Instructions.Should().ContainSingle(i => i.Origin == "Origin2" && i.Translation == "Translation2");
            oldFile.LdstrEntries["Key1"].Instructions.Should().ContainSingle(i => i.Origin == "AnotherOrigin1" && i.Translation == "");
            oldFile.LdstrEntries["Key1"].Instructions.Should().ContainSingle(i => i.Origin == "Origin5" && i.Translation == "");

            oldFile.LdstrEntries["Key2"].Instructions.Count.Should().Be(2);
            oldFile.LdstrEntries["Key2"].Instructions.Should().ContainSingle(i => i.Origin == "Origin3" && i.Translation == "Translation3");
            oldFile.LdstrEntries["Key2"].Instructions.Should().ContainSingle(i => i.Origin == "Origin4" && i.Translation == "Translation4");

            oldFile.LdstrEntries["Key3"].Instructions.Count.Should().Be(1);
            oldFile.LdstrEntries["Key3"].Instructions.Should().ContainSingle(i => i.Origin == "Origin6" && i.Translation == "");
        }
Esempio n. 14
0
        private void BackupDataWithNoIntersection(
            IEnumerable <KeyValuePair <BackupDataLogKey, BackupDataLogValue> > intersectingLogs,
            BackupDataLogKey logKey, KeyValuePair <long, int> area, OnDisk.File.IFile f, string fFilename,
            ConcurrentIOPoolManager readPool, ConcurrentIOPoolManager writePool,
            RecordKey key)
        {
            string systemBackupFilename = Server.Path + DataBackupFilename;
            int    size = area.Value;

            key.Address = area.Key;
            //if (RegisterAdd(_addStore, null, null, key, size, false))
            //{
            //    Logger.LogLine("Extending, skipping Backup...");
            //    return;
            //}

            //** no intersection nor mergeable logs, add new log!
            //** backup and log the data area
            ConcurrentIOData reader = f != null
                                          ? readPool.GetInstance(f, size)
                                          : readPool.GetInstance(fFilename, null, size);

            ConcurrentIOData writer = writePool.GetInstance(systemBackupFilename, (TransactionRoot)Root, size);

            if (reader == null || writer == null)
            {
                return;
            }

            var logValue = new BackupDataLogValue();

            logValue.DataSize      = size;
            logValue.TransactionId = Id;

            //** todo: can we remove this block:
            //long readerFileSize = reader.FileStream.Length;
            //if (area.Key + size > readerFileSize)
            //{
            //    int appendSize = (int)(area.Key + size - readerFileSize);
            //    key.Address = readerFileSize;
            //    RegisterAdd(_addStore, null, null, key, appendSize, false);
            //    size = (int)(readerFileSize - area.Key);
            //    logValue.DataSize = size;
            //    reader.Buffer = new byte[size];
            //}
            //**

            reader.FileStream.Seek(area.Key, SeekOrigin.Begin);

            logValue.BackupFileHandle  = GetLogBackupFileHandle(DataBackupFilename);
            logValue.BackupDataAddress = writer.FileStream.Seek(0, SeekOrigin.End);

            UpdateLogger.LogLine("{0}{1}:{2} to {3}:{4} Size={5}",
                                 BackupFromToken, f != null ? f.Filename : fFilename, area.Key,
                                 DataBackupFilename, logValue.BackupDataAddress, size);

            // resize target file to accomodate data to be copied.
            writer.FileStream.Seek(size, SeekOrigin.End);
            writer.FileStream.Seek(logValue.BackupDataAddress, SeekOrigin.Begin);

            reader.FileStream.BeginRead(
                reader.Buffer, 0, size, ReadCallback,
                new object[] { new[] { reader, writer }, true, logKey }
                );

            //** save a record of the backed up data..
            LogCollection.Add(logKey, logValue);
        }
Esempio n. 15
0
        private void BackupDataWithIntersection(
            IEnumerable <KeyValuePair <BackupDataLogKey, BackupDataLogValue> > intersectingLogs,
            BackupDataLogKey logKey, KeyValuePair <long, int> area, OnDisk.File.IFile f, string fFilename,
            ConcurrentIOPoolManager readPool, ConcurrentIOPoolManager writePool,
            RecordKey key
            )
        {
            if (intersectingLogs == null)
            {
                //** process conflicts with other trans...
                ProcessTransactionConflicts(logKey, area.Value);
                //** area is within an already backed up area (intersectingLogs == null), do nothing...
                return;
            }
            //** get area(s) outside each intersecting segment and back it up...
            var  newRegion      = new Region(area.Key, area.Value);
            bool wasIntersected = false;

            foreach (KeyValuePair <BackupDataLogKey, BackupDataLogValue> entry in intersectingLogs)
            {
                //** process conflicts with other trans...
                ProcessTransactionConflicts(entry.Key, entry.Value.DataSize);
                if (newRegion.Subtract(entry.Key.SourceDataAddress, entry.Value.DataSize))
                {
                    wasIntersected = true;
                }
            }
            //** copy
            if (!wasIntersected)
            {
                return;
            }
            foreach (KeyValuePair <long, int> newArea in newRegion)
            {
                var logKey2 = new BackupDataLogKey();
                logKey2.SourceFilename    = logKey.SourceFilename;
                logKey2.SourceDataAddress = newArea.Key;

                var logValue = new BackupDataLogValue();
                logValue.DataSize      = newArea.Value;
                logValue.TransactionId = Id;

                int newSize = newArea.Value;
                key.Address = newArea.Key;
                //if (RegisterAdd(_addStore, null, null, key, newArea.Value, false))
                //    return;

                logValue.BackupFileHandle = GetLogBackupFileHandle(DataBackupFilename);
                ConcurrentIOData reader = f != null
                                              ? readPool.GetInstance(f, newArea.Value)
                                              : readPool.GetInstance(fFilename, null, newArea.Value);

                if (reader == null)
                {
                    throw new InvalidOperationException("Can't get ConcurrentIOData from ReadPool");
                }
                string           systemBackupFilename = Server.Path + DataBackupFilename;
                ConcurrentIOData writer = writePool.GetInstance(systemBackupFilename, ((TransactionRoot)Root),
                                                                newArea.Value);
                if (writer == null)
                {
                    throw new InvalidOperationException("Can't get ConcurrentIOData from WritePool");
                }

                logValue.BackupDataAddress = writer.FileStream.Seek(0, SeekOrigin.End);


                //** todo: can we remove this block:
                //long readerFileSize = reader.FileStream.Length;
                //if (newArea.Key + newArea.Value > readerFileSize)
                //{
                //    int appendSize = (int)(newArea.Key + newArea.Value - readerFileSize);
                //    key.Address = readerFileSize;
                //    RegisterAdd(_addStore, null, null, key, appendSize, false);
                //    newSize = (int)(readerFileSize - newArea.Key);
                //    logValue.DataSize = newSize;
                //    reader.Buffer = new byte[newSize];
                //}
                //**


                reader.FileStream.Seek(newArea.Key, SeekOrigin.Begin);
                UpdateLogger.LogLine(
                    "{0}{1}:{2} to {3}:{4} Size={5}", BackupFromToken, logKey2.SourceFilename,
                    logKey2.SourceDataAddress,
                    DataBackupFilename, logValue.BackupDataAddress, newSize);

                // resize target file to accomodate data to be copied.
                writer.FileStream.Seek(newSize, SeekOrigin.End);
                writer.FileStream.Seek(logValue.BackupDataAddress, SeekOrigin.Begin);

                reader.FileStream.BeginRead(
                    reader.Buffer, 0, newSize, ReadCallback,
                    new object[] { new[] { reader, writer }, true, logKey2 }
                    );

                //** save a record of the backed up data..
                LogCollection.Add(logKey2, logValue);
            }
        }
Esempio n. 16
0
        private void BackupDataWithIntersection(
            IEnumerable <KeyValuePair <BackupDataLogKey, BackupDataLogValue> > intersectingLogs,
            BackupDataLogKey logKey, KeyValuePair <long, int> area, OnDisk.File.IFile f, string fFilename,
            ConcurrentIOPoolManager readPool, ConcurrentIOPoolManager writePool,
            RecordKey key
            )
        {
            if (intersectingLogs == null)
            {
                // process conflicts with other trans...
                //ProcessTransactionConflicts(logKey, area.Value);
                // area is within an already backed up area (intersectingLogs == null), do nothing...
                return;
            }
            LogTracer.Verbose("BackupDataWithIntersection: Start for Thread {0}.", Thread.CurrentThread.ManagedThreadId);

            // get area(s) outside each intersecting segment and back it up...
            var newRegion = new Region(area.Key, area.Value);

            #region for future implements... ?
            //bool wasIntersected = false;
            //foreach (KeyValuePair<BackupDataLogKey, BackupDataLogValue> entry in intersectingLogs)
            //{
            //    // process conflicts with other trans...
            //    ProcessTransactionConflicts(entry.Key, entry.Value.DataSize);
            //    if (newRegion.Subtract(entry.Key.SourceDataAddress, entry.Value.DataSize))
            //        wasIntersected = true;
            //}
            //if (!wasIntersected) return;
            #endregion

            // copy modified blocks to the transaction backup file.
            foreach (KeyValuePair <long, int> newArea in newRegion)
            {
                if (readPool.AsyncThreadException != null)
                {
                    throw readPool.AsyncThreadException;
                }
                if (writePool.AsyncThreadException != null)
                {
                    throw writePool.AsyncThreadException;
                }

                var logKey2 = new BackupDataLogKey();
                logKey2.SourceFilename    = logKey.SourceFilename;
                logKey2.SourceDataAddress = newArea.Key;

                var logValue = new BackupDataLogValue();
                logValue.DataSize      = newArea.Value;
                logValue.TransactionId = Id;

                int newSize = newArea.Value;
                key.Address = newArea.Key;
                //if (RegisterAdd(_addBlocksStore, null, null, key, newArea.Value, false))
                //    return;

                logValue.BackupFileHandle = GetLogBackupFileHandle(DataBackupFilename);
                ConcurrentIOData reader = f != null
                                              ? readPool.GetInstance(f, newArea.Value)
                                              : readPool.GetInstance(fFilename, null, newArea.Value);

                if (reader == null)
                {
                    throw new InvalidOperationException("Can't get ConcurrentIOData from ReadPool");
                }
                string           systemBackupFilename = Server.Path + DataBackupFilename;
                ConcurrentIOData writer = writePool.GetInstance(systemBackupFilename, ((TransactionRoot)Root));
                if (writer == null)
                {
                    throw new InvalidOperationException("Can't get ConcurrentIOData from WritePool");
                }

                // return the current backup file size and grow it to make room for data to be backed up...
                logValue.BackupDataAddress = GrowBackupFile(newSize, writer.FileStream);

                // save a record of the backed up data..
                LogCollection.Add(logKey2, logValue);

                // prepare lambda expression to log after data was backed up!!
                Sop.VoidFunc logBackedupData = () =>
                {
                    UpdateLogger.LogLine(
                        "{0}{1}:{2} to {3}:{4} Size={5}", BackupFromToken, logKey2.SourceFilename,
                        logKey2.SourceDataAddress, DataBackupFilename, logValue.BackupDataAddress, newSize);
                };

                writer.FileStream.Seek(logValue.BackupDataAddress, SeekOrigin.Begin, true);
                reader.FileStream.Seek(newArea.Key, SeekOrigin.Begin, true);
                reader.FileStream.BeginRead(
                    reader.Buffer, 0, newSize, ReadCallback,
                    new object[] { new[] { reader, writer }, true, logKey2, logBackedupData });
            }
        }