Ejemplo n.º 1
0
        public Task <Stream> OpenRead(BackupItem item)
        {
            string constring = $"server={Server};user={Username};pwd={Password};database={Database};SslMode=none;";

            // Important Additional Connection Options
            constring += "charset=utf8;convertzerodatetime=true;";

            memoryStream = new MemoryStream();

            using (MySqlConnection conn = new MySqlConnection(constring))
            {
                using (MySqlCommand cmd = new MySqlCommand {
                    Connection = conn
                })
                {
                    using (MySqlBackup mb = new MySqlBackup(cmd))
                    {
                        conn.Open();
                        mb.ExportToMemoryStream(memoryStream);
                    }
                }
            }

            memoryStream.Seek(0, SeekOrigin.Begin);

            return(Task.FromResult(memoryStream as Stream));
        }
Ejemplo n.º 2
0
        /// <summary>
        /// Updates the HashCode for the current item if the origin and backup fields are populated.
        /// If either is invalid, reset the hashCode on the item.
        /// </summary>
        /// <param name="index"></param>
        private void UpdateItemHash(BackupItem backupItem)
        {
            if (!string.IsNullOrWhiteSpace(backupItem.OriginPath) &&
                !string.IsNullOrWhiteSpace(backupItem.BackupPath))
            {
                //Debug.WriteLine(string.Format("Hashing: '{0}' and '{1}'", backupItem.OriginPath, backupItem.BackupPath));

                // Remove the hashcode associated with the current hash before we get rid of it.
                TaskManager.DequeueBackupJob(backupItem);

                // Then calculate the new hash for the object.
                backupItem.HashCode = Hasher.StringHasher(
                    backupItem.OriginPath +
                    backupItem.BackupPath);

                // Check for duplicate origin and backup path items.
                if (!ReinitializeDuplicateBackups())
                {
                    // If there are no duplicates, then check if the item is enabled.
                    // If it is, we need to queue a new job with the new hash.
                    if (backupItem.BackupEnabled)
                    {
                        TaskManager.QueueBackupJob(backupItem);
                    }
                }
            }
            else
            {
                backupItem.HashCode = "";
            }
        }
Ejemplo n.º 3
0
        public Task <IEnumerable <BackupItem> > GetItems(DateTime?lastRun)
        {
            BackupItem database            = new BackupItem($"{Database}.bak");
            IEnumerable <BackupItem> items = new BackupItem[] { database };

            return(Task.FromResult(items));
        }
Ejemplo n.º 4
0
        /// <remarks>
        ///     Full backup action sequence, from https://technet.microsoft.com/en-us/magazine/2009.07.sqlbackup.aspx :
        ///         1. Force a database checkpoint and make a note of the log sequence number at this point. This flushes all updated-in-memory pages to disk
        ///             before anything is read by the backup to help minimize the amount of work the recovery part of restore has to do.
        ///         2. Start reading from the data files in the database.
        ///         3. Stop reading from the data files and make a note of the log sequence number of the start of the oldest active transaction at that point
        ///             (see my article "Understanding Logging and Recovery in SQL Server" for an explanation of these terms).
        ///         4. Read as much transaction log as is necessary.
        ///     ...
        ///         Backing up enough of the transaction log is required so that recovery can successfully run during the restore and so that all pages in
        ///         the database are at the same point in time—the time at which the data reading portion of the backup operation completed (Point 7).
        /// </remarks>
        public List <BackupItem> GetBackupItems(FileInfo file)
        {
            var fileInfo = FileNamingConvention.GetBackupFileInfo(file);

            List <BackupItem> result;

            if (fileInfo.IsLog)
            {
                // first backup in a day is diff or full

                result = new List <BackupItem>(LogBackupsPerDay);

                for (var pos = 1; pos <= LogBackupsPerDay; ++pos)
                {
                    var backupTime = fileInfo.StartTime.Date.Add(FirstBackupTime).AddMinutes(pos * BackupIntervalMinutes);

                    var item = new BackupItem()
                    {
                        BackupEndTime     = backupTime.AddSeconds(LogBackupDurationSeconds),
                        BackupStartTime   = backupTime,
                        BackupType        = BackupType.Log,
                        DatabaseName      = DatabaseName,
                        DatabaseBackupLsn = GetDatabaseBackupLsn(backupTime),
                        Position          = pos,
                        FirstLsn          = GetLogBackupFirstLsn(backupTime),
                        LastLsn           = GetLogBackupLastLsn(backupTime),
                        RecoveryModel     = Microsoft.SqlServer.Management.Smo.RecoveryModel.Full,
                        FileInfo          = file
                    };

                    item.CheckpointLsn = item.FirstLsn;
                    result.Add(item);
                }
            }
            else
            {
                var backupTime    = fileInfo.StartTime.Date.Add(FirstBackupTime);
                var backupEndTime = backupTime.AddSeconds(fileInfo.IsFull ? FullBackupDurationSeconds : DiffBackupDurationSeconds);

                var item = new BackupItem()
                {
                    BackupEndTime     = backupEndTime,
                    BackupStartTime   = backupTime,
                    BackupType        = fileInfo.IsFull ? BackupType.Full : BackupType.DifferentialDatabase,
                    DatabaseName      = DatabaseName,
                    DatabaseBackupLsn = GetDatabaseBackupLsn(backupTime),
                    Position          = 1,
                    FirstLsn          = GetLastLsn(backupTime),
                    LastLsn           = GetLastLsn(backupEndTime),
                    RecoveryModel     = Microsoft.SqlServer.Management.Smo.RecoveryModel.Full,
                    FileInfo          = file
                };
                item.CheckpointLsn = item.FirstLsn;

                result = new List <BackupItem>(1);
                result.Add(item);
            }

            return(result);
        }
Ejemplo n.º 5
0
        /// <summary>
        /// Update the NextBackupDate of the BackupItem according to HashCode.
        /// Increments the NextBackupDate by one BackupInterval of time.
        /// </summary>
        public static void UpdateNextBackupDate(string hashCode)
        {
            BackupItem itemToUpdate = null;

            foreach (BackupItem backupItem in _ActiveViewModel.BackupInfo)
            {
                if (backupItem.HashCode == hashCode)
                {
                    itemToUpdate = backupItem;
                    break;
                }
            }

            if (itemToUpdate == null)
            {
                return;
            }

            // Calculate the number of days to add, then create the new DateTime object.
            int      daysToAdd      = itemToUpdate.BackupFrequency * itemToUpdate.BackupPeriod;
            DateTime newDateAndTime = new DateTime(itemToUpdate.NextBackupDate.Year,
                                                   itemToUpdate.NextBackupDate.Month,
                                                   itemToUpdate.NextBackupDate.Day,
                                                   itemToUpdate.BackupTime.Hour,
                                                   itemToUpdate.BackupTime.Minute,
                                                   00).AddDays(daysToAdd);

            // Update the NextBackupDate with the new object/value and notify the UI.
            itemToUpdate.NextBackupDate = newDateAndTime;
        }
        public static AzureWebAppBackup BackupItemToAppBackup(BackupItem backup, string resourceGroupName, string name, string slot)
        {
            if (backup == null)
            {
                return(new AzureWebAppBackup());
            }
            var    dbs    = backup.Databases == null ? null : backup.Databases.ToArray();
            string status = backup.Status == null ? null : backup.Status.ToString();

            return(new AzureWebAppBackup
            {
                ResourceGroupName = resourceGroupName,
                Name = name,
                Slot = slot,
                StorageAccountUrl = backup.StorageAccountUrl,
                BlobName = backup.BlobName,
                Databases = dbs,
#if !NETSTANDARD
                BackupId = backup.BackupItemId,
#else
                BackupId = backup.BackupId,
#endif
                BackupName = backup.BackupItemName,
                BackupStatus = status,
                Scheduled = backup.Scheduled,
                BackupSizeInBytes = backup.SizeInBytes,
                WebsiteSizeInBytes = backup.WebsiteSizeInBytes,
                Created = backup.Created,
                LastRestored = backup.LastRestoreTimeStamp,
                Finished = backup.FinishedTimeStamp,
                Log = backup.Log,
                CorrelationId = backup.CorrelationId
            });
        }
Ejemplo n.º 7
0
        /// <summary>
        /// Update the NextBackupDate of the current item if a new frequency, period, or time of day is specified.
        /// </summary>
        private void HandleIntervalChanged(BackupItem itemToUpdate)
        {
            // Remove any backup jobs associated with the old hash.
            TaskManager.DequeueBackupJob(itemToUpdate.HashCode);

            // Calculate the number of days to add, then create the new DateTime object.
            int daysToAdd = itemToUpdate.BackupFrequency * itemToUpdate.BackupPeriod;

            DateTime newDateAndTime = new DateTime(itemToUpdate.LastBackupDate.Year,
                                                   itemToUpdate.LastBackupDate.Month,
                                                   itemToUpdate.LastBackupDate.Day,
                                                   itemToUpdate.BackupTime.Hour,
                                                   itemToUpdate.BackupTime.Minute,
                                                   00).AddDays(daysToAdd);

            // Update the NextBackupDate with the new object/value and notify the UI.
            itemToUpdate.NextBackupDate = newDateAndTime;
            // Also update the BackupInterval.
            itemToUpdate.BackupInterval = new TimeSpan(
                daysToAdd,
                itemToUpdate.BackupTime.Hour,
                itemToUpdate.BackupTime.Minute,
                0
                );

            // Re-enable the job if the BackEnabled is set to true.
            QueueJobByBackupItem(itemToUpdate);
        }
Ejemplo n.º 8
0
        /// <summary>
        /// Toggles the job associated to the BackupItem with given BackupItem.
        /// </summary>
        public static void QueueJobByBackupItem(BackupItem backupItem)
        {
            if (backupItem == null || string.IsNullOrWhiteSpace(backupItem.HashCode) || backupItem.HashCode.Length != 64)
            {
                return;
            }

            if (backupItem.BackupEnabled)
            {
                // We do not know how long ago the previous job was de-activated.
                // If the job is enabled and the date has passed, the copy job will run immediately.
                // To prevent this, recalculate the next backup date of the newly enabled item if its NextBackupDate is before the current date and time.
                // Keep going until the NextBackupDate is after the current date and time if necessary.
                while (backupItem.NextBackupDate < DateTime.Now)
                {
                    UpdateNextBackupDate(backupItem.HashCode);
                }
                TaskManager.QueueBackupJob(backupItem);

                return;
            }
            else
            {
                TaskManager.DequeueBackupJob(backupItem.HashCode);
            }
        }
Ejemplo n.º 9
0
        public override void OnNavigatedTo()
        {
            base.OnNavigatedTo();

            _logger.LogDebug("Syncing Properties with {type}: '{name}'", BackupItem.TypeName(), BackupItem.Name);
            PropertySync.Sync(BackupItem, this);
        }
Ejemplo n.º 10
0
        /// <summary>
        /// Delete the selected BackupInfo item via HashCode from the collection if applicable.
        /// If the code does not match anything in the list, then we have an orphan; stop the job.
        /// </summary>
        public static void DeleteBackupItemByHashCode(string hashCode)
        {
            Debug.WriteLine("Attempting to remove job with hash: " + hashCode);

            if (hashCode.Length != 64)
            {
                return;
            }

            BackupItem itemToRemove = null;

            foreach (BackupItem backupItem in _ActiveViewModel.BackupInfo)
            {
                Debug.WriteLine(string.Format("Checking against hash: ", backupItem.HashCode.Substring(0, 5)));
                if (backupItem.HashCode == hashCode)
                {
                    itemToRemove = backupItem;
                    break;
                }
            }

            // If the item exists, remove it.
            if (itemToRemove != null)
            {
                Debug.WriteLine("Removing item from list.");
                _ActiveViewModel.BackupInfo.Remove(itemToRemove);
                _ActiveViewModel.SelectedBackupItemIndex = _ActiveViewModel.BackupInfo.Count - 1;
            }
        }
        public async Task TransformItem(BackupItem output, Stream outputStream, Dictionary <BackupItem, Stream> inputFiles)
        {
            byte[] password = Encoding.Unicode.GetBytes(EncyptionKey);
            byte[] salt     = GenerateRandomBytes();

            using (Rfc2898DeriveBytes encryptionKey = new Rfc2898DeriveBytes(password, salt, ITERATIONS))
            {
                var keyBytes = encryptionKey.GetBytes(KEYSIZE / 8);

                using (RijndaelManaged rmCrypto = new RijndaelManaged())
                {
                    rmCrypto.BlockSize = KEYSIZE;
                    rmCrypto.Mode      = CipherMode.CBC;
                    rmCrypto.Padding   = PaddingMode.PKCS7;

                    using (ICryptoTransform encryptor = rmCrypto.CreateEncryptor(keyBytes, salt))
                    {
                        outputStream.Write(salt, 0, salt.Length);

                        CryptoStream cs = new CryptoStream(outputStream, encryptor, CryptoStreamMode.Write);

                        foreach (var input in inputFiles)
                        {
                            await input.Value.CopyToAsync(cs);
                        }

                        cs.FlushFinalBlock();
                        streams.Add(cs);
                    }
                }
            }
        }
Ejemplo n.º 12
0
        public Task <MappedBackupItemList> MapInput(IEnumerable <BackupItem> input)
        {
            MappedBackupItemList result = new MappedBackupItemList();

            if (CreateSingleFile)
            {
                BackupItem zippedBackupItem = new BackupItem();
                zippedBackupItem.Name = $"{OutputFile}.zip";
                zippedBackupItem.Path = "";

                result.Add(zippedBackupItem, input);
            }
            else
            {
                foreach (BackupItem file in input)
                {
                    BackupItem zippedBackupItem = new BackupItem();
                    zippedBackupItem.Name = $"{file.Name}.<ip";
                    zippedBackupItem.Path = file.Path;

                    result.Add(zippedBackupItem, file);
                }
            }

            return(Task.FromResult(result));
        }
Ejemplo n.º 13
0
        /// <summary>
        /// For testing purposes.
        /// </summary>
        /// <returns></returns>
        private async Task TestTasks()
        {
            TaskManager.InitScheduler();


            BackupItem testItem = new BackupItem
            {
                OriginPath     = @"E:\Test Origin\test 1 2 3\",
                BackupPath     = @"E:\Test Backup\",
                BackupInterval = TimeSpan.FromSeconds(10),
                NextBackupDate = DateTime.Now.AddSeconds(3)
            };

            testItem.HashCode = Hasher.StringHasher(testItem.OriginPath + testItem.BackupPath);

            _BackupInfo.Add(testItem);
            _BackupInfo[0].PropertyChanged += ModelPropertyChanged;
            await TaskManager.QueueBackupJob(testItem);


            testItem = new BackupItem()
            {
                OriginPath     = @"E:\Test Origin\The Viewer.exe",
                BackupPath     = @"E:\Test Backup\",
                BackupInterval = TimeSpan.FromSeconds(10),
                NextBackupDate = DateTime.Now.AddSeconds(4)
            };
            testItem.HashCode = Hasher.StringHasher(testItem.OriginPath + testItem.BackupPath);

            _BackupInfo.Add(testItem);
            _BackupInfo[1].PropertyChanged += ModelPropertyChanged;
            await TaskManager.QueueBackupJob(testItem);
        }
Ejemplo n.º 14
0
        /// <summary>
        /// If the NextBackupDate itself is changed directly, then just reschedule the backup job.
        /// </summary>
        private void HandleNextBackupDateChanged(BackupItem itemToUpdate)
        {
            // Remove any backup jobs associated with the old hash.
            TaskManager.DequeueBackupJob(itemToUpdate);

            // Re-enable the job if the BackEnabled is set to true.
            QueueJobByBackupItem(itemToUpdate);
        }
Ejemplo n.º 15
0
        public static async Task QueueBackupJob(BackupItem backupItem)
        {
            try
            {
                //Debug.WriteLine(backupItem.HashCode);
                // First, check the HashCode of the BackupItem to make sure we have the info to make a job.
                if (string.IsNullOrWhiteSpace(backupItem.HashCode) ||
                    backupItem.HashCode.Length != 64)
                {
                    return;
                }

                // Grab the Scheduler instance from the Factory
                IScheduler scheduler = await _SchedulerFactory.GetScheduler();

                // Check if the job exists.
                JobKey jobID = new JobKey(backupItem.HashCode, "ActiveBackups");

                if (await scheduler.CheckExists(jobID))
                {
                    await scheduler.DeleteJob(jobID);
                }

                // Define the CopyJob.
                IJobDetail job = JobBuilder.Create <BackupJob>()
                                 .WithIdentity(jobID)
                                 .UsingJobData("originPath", backupItem.OriginPath)
                                 .UsingJobData("backupPath", backupItem.BackupPath)
                                 .Build();

                // Setup the job trigger.
                ITrigger trigger = TriggerBuilder.Create()
                                   .WithIdentity(backupItem.HashCode, "ActiveBackups")
                                   .StartAt(backupItem.NextBackupDate)
                                   .WithSimpleSchedule(x => x
                                                       .WithIntervalInHours(backupItem.BackupInterval.Days * 24)
                                                       .RepeatForever()
                                                       .WithMisfireHandlingInstructionFireNow())
                                   .Build();

                // Tell quartz to schedule the job using our trigger.
                await scheduler.ScheduleJob(job, trigger);

                // Update the BackupItem to indicate that its BackupJob has been successfully queued.
                BackupInfoViewModel.SetBackupItemStatus(backupItem.HashCode, (int)StatusCodes.QUEUED);

                BackupInfoViewModel.SaveConfig();

                Debug.WriteLine(string.Format("'{0}' running at: {1} and ticking every {2} days(s)", backupItem.HashCode.Substring(0, 5), backupItem.NextBackupDate, backupItem.BackupInterval.Days));

                //Debug.WriteLine("Job queued, saving config...");
            }
            catch (Exception e)
            {
                Debug.WriteLine("QueueBackupJob: " + e.Message);
            }
        }
Ejemplo n.º 16
0
        private BackupItemViewModel GetBackupItemViewModel(BackupItem backupItem)
        {
            _logger.LogDebug("Creating {backupvm} for {backupitem}: '{backupitem}'", nameof(BackupItemViewModel), nameof(BackupItem), backupItem.Name);
            BackupItemViewModel backupItemVM = _container.Get <BackupItemViewModel>();

            backupItemVM.BackupItem = backupItem;
            backupItemVM.OnNavigatedTo();

            return(backupItemVM);
        }
Ejemplo n.º 17
0
 public async Task TransformItem(BackupItem output, Stream outputStream, Dictionary <BackupItem, Stream> inputFiles)
 {
     using (ZipArchive zipArchive = new ZipArchive(outputStream, ZipArchiveMode.Create, true))
     {
         foreach (KeyValuePair <BackupItem, Stream> file in inputFiles)
         {
             ZipArchiveEntry entry = zipArchive.CreateEntry(file.Key.FullPath);
             using (var entryStream = entry.Open())
             {
                 await file.Value.CopyToAsync(entryStream, 4096);
             }
         }
     }
 }
        public Task <MappedBackupItemList> MapInput(IEnumerable <BackupItem> input)
        {
            MappedBackupItemList result = new MappedBackupItemList();

            foreach (var file in input)
            {
                BackupItem encryptedBackupItem = new BackupItem();
                encryptedBackupItem.Name = $"{file.Name}.dat";
                encryptedBackupItem.Path = file.Path;

                result.Add(encryptedBackupItem, file);
            }

            return(Task.FromResult(result));
        }
Ejemplo n.º 19
0
        public async Task <bool> StoreItem(BackupItem item, Stream source)
        {
            string path = Path.Combine(Folder, item.FullPath);

            if (item.FullPath.StartsWith("/"))
            {
                path = Path.Combine(Folder, item.FullPath.Substring(1));
            }

            var client = await Client.GetClient();

            await client.Drive.Root.ItemWithPath(path).Content.Request().PutAsync <Item>(source);

            return(true);
        }
Ejemplo n.º 20
0
        public async Task <bool> StoreItem(BackupItem item, Stream source)
        {
            string path = Path.Combine(TargetFolder, item.FullPath);

            if (item.FullPath.StartsWith("/"))
            {
                path = Path.Combine(TargetFolder, item.FullPath.Substring(1));
            }

            using (Stream target = File.OpenWrite(path))
            {
                await source.CopyToAsync(target);
            }
            return(true);
        }
Ejemplo n.º 21
0
        public override void Process(ActionData actionData)
        {
            string asString = actionData.GetAsString("FileToRestore");

            if (!string.IsNullOrEmpty(asString))
            {
                try
                {
                    BackupItem backupItem = new BackupItem(asString);
                    this._backupHelper.RestoreBackup(backupItem.DatabaseType, DatabaseToolsSettings.Settings.DatabaseName, backupItem.FilePath, true);
                }
                catch (Exception exception1)
                {
                    Exception exception = exception1;
                    MessageBox.Show(exception.Message, "Database Backup Module", MessageBoxButton.OK, MessageBoxImage.Hand);
                }
            }
        }
Ejemplo n.º 22
0
        public static void ExecuteBackup(BackupItem backupItem)
        {
            if (!backupItem.Backup)
            {
                LogHandler.WriteLog("BackUp", "disabled", 3);
                return;
            }
            else
            {
                LogHandler.WriteLog("BackUp", "enabled", 3);
            }

            if (!File.Exists(backupItem.SourcePath) && !Directory.Exists(backupItem.SourcePath))
            {
                LogHandler.WriteLog("Source path", backupItem.SourcePath + " - Directory(File) not found", 3);
                return;
            }
            else
            {
                LogHandler.WriteLog("Source path", backupItem.SourcePath, 3);
            }

            if (!Directory.Exists(Path.GetDirectoryName(backupItem.TargetPath)))
            {
                LogHandler.WriteLog("Target path", Path.GetDirectoryName(backupItem.SourcePath) + " - Directory not found", 3);
                return;
            }
            else
            {
                LogHandler.WriteLog("Target path", Path.GetDirectoryName(backupItem.SourcePath), 3);
            }

            if (backupItem.Archiving)
            {
                LogHandler.WriteLog("Archiving", "Enabled", 3);
                ZipFile.CreateFromDirectory(backupItem.SourcePath, HandleFileName(backupItem.TargetPath));
            }
            else
            {
                LogHandler.WriteLog("Archiving", "Disabled", 3);
                File.Copy(backupItem.SourcePath, HandleFileName(backupItem.TargetPath), true);
            }
        }
Ejemplo n.º 23
0
        /// <summary>
        /// Toggles the job associated to the BackupItem in BackupInfo[SelectedIndex].
        /// </summary>
        public static void QueueJobBySelectedIndex()
        {
            BackupItem currentItem = _ActiveViewModel.BackupInfo[_ActiveViewModel.SelectedBackupItemIndex];

            if (currentItem.BackupEnabled)
            {
                // We do not know how long ago the previous job was de-activated.
                // If the job is enabled and the date has passed, the copy job will run immediately.
                // To prevent this, recalculate the next backup date of the newly enabled item if its NextBackupDate is before the current date and time.
                // Keep going until the NextBackupDate is after the current date and time if necessary.
                while (currentItem.NextBackupDate < DateTime.Now)
                {
                    UpdateNextBackupDate(currentItem.HashCode);
                }
                TaskManager.QueueBackupJob(currentItem);
            }
            else
            {
                TaskManager.DequeueBackupJob(currentItem.HashCode);
            }
        }
Ejemplo n.º 24
0
        private List <BackupItem> GetBackups(ProjectItemViewModel item)
        {
            BackupManager manager = new BackupManager(this.Model.Path);

            List <BackupItem> result = new List <BackupItem>();

            if (item.Type == ProjectItemType.File)
            {
                BackupItem backup = manager.GetBackup(item.Path);
                if (backup != null)
                {
                    result.Add(backup);
                }
            }

            foreach (ProjectItemViewModel child in item.Children)
            {
                result.AddRange(GetBackups(child));
            }

            return(result);
        }
Ejemplo n.º 25
0
        public async Task <bool> StoreItem(BackupItem item, Stream source)
        {
            string path = Path.Combine(TargetFolder, item.FullPath);

            if (item.FullPath.StartsWith("/"))
            {
                path = Path.Combine(TargetFolder, item.FullPath.Substring(1));
            }

            string folder = Path.GetDirectoryName(path);

            if (!Directory.Exists(folder))
            {
                Directory.CreateDirectory(folder);
            }

            using (Stream target = File.OpenWrite(path))
            {
                await source.CopyToAsync(target);
            }
            return(true);
        }
 protected override TreeViewItem BuildRoot()
 {
     m_rootItem.children.Clear();
     if (m_asset != null)
     {
         var assetIcon =
             EditorGUIUtility
             .ObjectContent(m_asset, m_asset.GetType())
             .image as Texture2D;
         var backupFiles =
             AssetBackup
             .EnumerateBackupFiles(m_asset);
         foreach (var backupFile in backupFiles)
         {
             var id   = m_rootItem.children.Count;
             var item = new BackupItem(id, backupFile);
             item.icon = assetIcon;
             m_rootItem.AddChild(item);
         }
     }
     return(m_rootItem);
 }
Ejemplo n.º 27
0
        public override bool PrepareRestore()
        {
            bool result = false;

            using (RestoreForm dlg = new RestoreForm(this))
            {
                if (dlg.ShowDialog() == System.Windows.Forms.DialogResult.OK)
                {
                    _selectedBackupItem = dlg.SelectedBackupItem;
                    _restorePath        = dlg.RestoreFolder;

                    string directoryName = _restorePath;
                    if (!Directory.Exists(_restorePath))
                    {
                        Directory.CreateDirectory(_restorePath);
                    }

                    if (_fileCollection != null)
                    {
                        _fileCollection.Dispose();
                        _fileCollection = null;
                    }

                    PluginSettings.Instance.ActiveDataFile = Path.Combine(_restorePath, Path.GetFileName(_selectedBackupItem.OriginalPath));
                    SetDataSourceName(PluginSettings.Instance.ActiveDataFile);

                    Core.Geocaches.Clear();
                    Core.Logs.Clear();
                    Core.Waypoints.Clear();
                    Core.LogImages.Clear();
                    Core.UserWaypoints.Clear();

                    result = true;
                }
            }
            return(result);
        }
Ejemplo n.º 28
0
        public async Task <Stream> OpenRead(BackupItem item)
        {
            string connectionString = $"Server={Server};Database={Database};User Id={Username};Password={Password};";

            connection = new SqlConnection(connectionString);
            await connection.OpenAsync();

            command = new SqlCommand($"BACKUP DATABASE {Database} TO DISK = '{TemporaryBakFile}' WITH INIT, COPY_ONLY", connection);
            await command.ExecuteNonQueryAsync();

            command.CommandText = $"SELECT * FROM OPENROWSET(BULK N'{TemporaryBakFile}', SINGLE_BLOB) AS Contents";
            reader = await command.ExecuteReaderAsync(CommandBehavior.SequentialAccess);

            if (await reader.ReadAsync())
            {
                if (!(await reader.IsDBNullAsync(0)))
                {
                    stream = reader.GetStream(0);
                    return(stream);
                }
            }

            throw new ArgumentOutOfRangeException("Database failed to backup or database not found.");
        }
Ejemplo n.º 29
0
 public PostBackupEvent(DateTime time, BackupItem backupItem) : base(time)
 {
     BackupItem = backupItem;
     Name       = backupItem.Name;
 }
Ejemplo n.º 30
0
 /// <summary>
 /// Remove a backup job from the jobs pool with given BackupItem.
 /// </summary>
 /// <param name="backupItemHashCode"></param>
 public static async void DequeueBackupJob(BackupItem backupItem)
 {
     await DequeueBackupJob(backupItem.HashCode);
 }
Ejemplo n.º 31
0
        public override bool Backup()
        {
            bool result = true;
            _fileCollection.StartReleaseForCopy();
            try
            {
                BackupItem bi = new BackupItem();
                bi.BackupFile = Path.Combine(Properties.Settings.Default.BackupFolder, string.Concat(Path.GetFileNameWithoutExtension(_fileCollection.BaseFilename), "_", DateTime.Now.ToString("s").Replace(" ", "").Replace("T", "").Replace(":", "").Replace("-", ""),".zip"));
                bi.BackupDate = DateTime.Now;
                bi.OriginalPath = _fileCollection.BaseFilename;
                //zip all files
                string[] files = Directory.GetFiles(Path.GetDirectoryName(_fileCollection.BaseFilename), string.Concat(Path.GetFileNameWithoutExtension(_fileCollection.BaseFilename), ".*"));
                List<FileInfo> fil = new List<FileInfo>();
                long totalBytes = 0;
                foreach (string f in files)
                {
                    FileInfo fi = new FileInfo(f);
                    fil.Add(fi);
                    totalBytes += fi.Length;
                }
                int max = (int)Math.Max(1, totalBytes / (1024 * 1024));
                long processed = 0;
                DateTime progUpdate = DateTime.Now.AddSeconds(2);
                byte[] buffer = new byte[4 * 1024 * 1024];
                using (Utils.ProgressBlock prog = new Utils.ProgressBlock(this, STR_BACKINGUPDATA, STR_BACKINGUPDATA, max, 0))
                {
                    using (ZipOutputStream s = new ZipOutputStream(System.IO.File.Create(bi.BackupFile)))
                    {
                        s.SetLevel(9); // 0-9, 9 being the highest compression

                        foreach (FileInfo fi in fil)
                        {
                            ZipEntry entry = new ZipEntry(Path.GetFileName(fi.FullName));
                            entry.DateTime = DateTime.Now;
                            entry.Size = fi.Length;
                            s.PutNextEntry(entry);

                            using (FileStream fs = fi.OpenRead())
                            {
                                int i;
                                do
                                {
                                    i = fs.Read(buffer, 0, buffer.Length);
                                    if (i > 0)
                                    {
                                        s.Write(buffer, 0, i);
                                        processed += i;
                                    }

                                    if (DateTime.Now >= progUpdate)
                                    {
                                        int pos = (int)(processed / (1024 * 1024));
                                        prog.UpdateProgress(STR_BACKINGUPDATA, STR_BACKINGUPDATA, max, pos);
                                        progUpdate = DateTime.Now.AddSeconds(2);
                                    }
                                } while (i == buffer.Length);
                            }

                            if (DateTime.Now >= progUpdate)
                            {
                                int pos = (int)(processed / (1024 * 1024));
                                prog.UpdateProgress(STR_BACKINGUPDATA, STR_BACKINGUPDATA, max, pos);
                                progUpdate = DateTime.Now.AddSeconds(2);
                            }

                        }

                        s.Finish();
                        s.Close();
                    }
                }


                //check backup(s) te remove
                try
                {
                    if (Properties.Settings.Default.BackupKeepMaxDays > 0)
                    {
                        DateTime dt = DateTime.Now.AddDays(-1 * Properties.Settings.Default.BackupKeepMaxDays).Date;
                        List<BackupItem> bil = (from b in _backupItemList.BackupItems where b.BackupDate.Date < dt select b).ToList();
                        foreach (BackupItem b in bil)
                        {
                            if (File.Exists(b.BackupFile))
                            {
                                File.Delete(b.BackupFile);
                            }
                            _backupItemList.RemoveBackupItem(b);
                        }
                    }
                    if (Properties.Settings.Default.BackupKeepMaxCount > 0)
                    {
                        List<BackupItem> bil = (from b in _backupItemList.BackupItems where b.OriginalPath == bi.OriginalPath select b).OrderByDescending(x => x.BackupDate).Skip(Properties.Settings.Default.BackupKeepMaxCount-1).ToList();
                        foreach (BackupItem b in bil)
                        {
                            if (File.Exists(b.BackupFile))
                            {
                                File.Delete(b.BackupFile);
                            }
                            _backupItemList.RemoveBackupItem(b);
                        }
                    }
                }
                catch
                {
                }

                //save new backup info
                _backupItemList.AddBackupItem(bi);
                SaveBackupItemList();
            }
            finally
            {
                _fileCollection.EndReleaseForCopy();
            }
            return result;
        }
Ejemplo n.º 32
0
        public override bool PrepareRestore()
        {
            bool result = false;
            using (RestoreForm dlg = new RestoreForm(this))
            {
                if (dlg.ShowDialog() == System.Windows.Forms.DialogResult.OK)
                {
                    _selectedBackupItem = dlg.SelectedBackupItem;
                    _restorePath = dlg.RestoreFolder;

                    string directoryName = _restorePath;
                    if (!Directory.Exists(_restorePath)) Directory.CreateDirectory(_restorePath);

                    if (_fileCollection != null)
                    {
                        _fileCollection.Dispose();
                        _fileCollection = null;
                    }

                    Properties.Settings.Default.ActiveDataFile = Path.Combine(_restorePath, Path.GetFileName(_selectedBackupItem.OriginalPath));
                    Properties.Settings.Default.Save();
                    SetDataSourceName(Properties.Settings.Default.ActiveDataFile);

                    Core.Geocaches.Clear();
                    Core.Logs.Clear();
                    Core.Waypoints.Clear();
                    Core.LogImages.Clear();
                    Core.UserWaypoints.Clear();

                    result = true;
                }
            }
            return result;
        }
Ejemplo n.º 33
0
 public void AddBackupItem(BackupItem item)
 {
     BackupItems.Add(item);
 }
Ejemplo n.º 34
0
 public void RemoveBackupItem(BackupItem item)
 {
     BackupItems.Remove(item);
 }