public async void Sample3() { #region smuggler_api_4 using (var store = new EmbeddableDocumentStore { DefaultDatabase = "Northwind" }) { store.Initialize(); var dataDumper = new DataDumper(store.DocumentDatabase) { SmugglerOptions = { OperateOnTypes = ItemType.Documents | ItemType.Indexes | ItemType.Attachments | ItemType.Transformers, Incremental = false } }; var exportOptions = new SmugglerExportOptions { From = new EmbeddedRavenConnectionStringOptions(), ToFile = "dump.raven" }; await dataDumper.ExportData(exportOptions); } #endregion }
public async Task CanPerformDumpWithLimit_Dumper() { var backupPath = NewDataPath("BackupFolder"); using (var store = NewDocumentStore()) { InsertUsers(store, 0, 2000); var dumper = new DataDumper(store.SystemDatabase) { SmugglerOptions = { Limit = 1500, Incremental = true } }; dumper.SmugglerOptions.Filters.Add( new FilterSetting { Path = "@metadata.Raven-Entity-Name", Values = { "Users" }, ShouldMatch = true, }); await dumper.ExportData(new SmugglerExportOptions { ToFile = backupPath }); } VerifyDump(backupPath, store => { using (var session = store.OpenSession()) { Assert.Equal(1500, session.Query <User>().Customize(x => x.WaitForNonStaleResultsAsOfNow()).Count()); } }); IOExtensions.DeleteDirectory(backupPath); }
public async Task CanDumpEmptyDatabase_Dumper() { var backupPath = NewDataPath("BackupFolder"); using (var server = GetNewServer()) { using (new DocumentStore { Url = "http://localhost:8079" }.Initialize()) { // now perform full backup var options = new SmugglerOptions { BackupPath = backupPath, }; var dumper = new DataDumper(server.Database, options); var backupStatus = new PeriodicBackupStatus(); await dumper.ExportData(null, null, true, backupStatus); } } VerifyDump(backupPath, store => Assert.Equal(0, store.DocumentDatabase.GetDocuments(0, int.MaxValue, null, CancellationToken.None).Count())); IOExtensions.DeleteDirectory(backupPath); }
public void CanBackupToDirectory_MultipleBackups() { var backupPath = NewDataPath("BackupFolder"); using (var store = NewDocumentStore()) { using (var session = store.OpenSession()) { session.Store(new User { Name = "oren" }); var periodicBackupSetup = new PeriodicBackupSetup { LocalFolderName = backupPath, IntervalMilliseconds = 25 }; session.Store(periodicBackupSetup, PeriodicBackupSetup.RavenDocumentKey); session.SaveChanges(); } SpinWait.SpinUntil(() => { var jsonDocument = store.DatabaseCommands.Get(PeriodicBackupStatus.RavenDocumentKey); if (jsonDocument == null) { return(false); } var periodicBackupStatus = jsonDocument.DataAsJson.JsonDeserialization <PeriodicBackupStatus>(); return(periodicBackupStatus.LastDocsEtag != Etag.Empty && periodicBackupStatus.LastDocsEtag != null); }); var etagForBackups = store.DatabaseCommands.Get(PeriodicBackupStatus.RavenDocumentKey).Etag; using (var session = store.OpenSession()) { session.Store(new User { Name = "ayende" }); session.SaveChanges(); } SpinWait.SpinUntil(() => store.DatabaseCommands.Get(PeriodicBackupStatus.RavenDocumentKey).Etag != etagForBackups); } using (var store = NewDocumentStore()) { var smugglerOptions = new SmugglerOptions { BackupPath = backupPath }; var dataDumper = new DataDumper(store.DocumentDatabase, smugglerOptions); dataDumper.ImportData(smugglerOptions, true).Wait(); using (var session = store.OpenSession()) { Assert.Equal("oren", session.Load <User>(1).Name); Assert.Equal("ayende", session.Load <User>(2).Name); } } IOExtensions.DeleteDirectory(backupPath); }
public async Task CanPerformDump_Dumper() { var backupPath = NewDataPath("BackupFolder"); using (var store = NewDocumentStore()) { InsertUsers(store, 0, 2000); var options = new SmugglerOptions { BackupPath = backupPath, }; var dumper = new DataDumper(store.DocumentDatabase, options); var backupStatus = new PeriodicBackupStatus(); await dumper.ExportData(null, null, true, backupStatus); } VerifyDump(backupPath, store => { using (var session = store.OpenSession()) { Assert.Equal(2000, session.Query <User>().Customize(x => x.WaitForNonStaleResultsAsOfNow()).Count()); } }); IOExtensions.DeleteDirectory(backupPath); }
public async Task CanImportFromDumpFile() { var file = Path.GetTempFileName(); using (var store = NewDocumentStoreWithData()) { var dumper = new DataDumper(store.SystemDatabase); await dumper.ExportData(new SmugglerExportOptions { ToFile = file }); } using (var store = NewDocumentStore()) { var dumper = new DataDumper(store.SystemDatabase); await dumper.ImportData(new SmugglerImportOptions { FromFile = file }); using (var session = store.OpenSession()) { // Person imported. Assert.Equal(1, session.Query <Person>().Customize(x => x.WaitForNonStaleResults()).Take(5).Count()); // Attachment imported. var attachment = store.DatabaseCommands.GetAttachment("Attachments/1"); var data = ReadFully(attachment.Data()); Assert.Equal(new byte[] { 1, 2, 3 }, data); } } }
public void CanGetCorrectResult() { using (var store = NewDocumentStore()) { var smugglerOptions = new SmugglerOptions(); var dataDumper = new DataDumper(store.DocumentDatabase, smugglerOptions); using (var stream = typeof(TroyMapReduceImport).Assembly.GetManifestResourceStream("Raven.Tests.Patching.failingdump11.ravendump")) { dataDumper.ImportData(stream, smugglerOptions).Wait(TimeSpan.FromSeconds(15)); } using (var s = store.OpenSession()) { s.Advanced.LuceneQuery <object>("Raven/DocumentsByEntityName").WaitForNonStaleResults().ToList(); store.DatabaseCommands.UpdateByIndex("Raven/DocumentsByEntityName", new IndexQuery { Query = "Tag:Regions" }, new ScriptedPatchRequest { Script = @"this.Test = 'test';" } , true); } } }
public void CanImportFromDumpFile() { var options = new SmugglerOptions { BackupPath = Path.GetTempFileName() }; using (var store = NewDocumentStoreWithData()) { var dumper = new DataDumper(store.DocumentDatabase, options); dumper.ExportData(options); } using (var store = NewDocumentStore()) { var dumper = new DataDumper(store.DocumentDatabase, options); dumper.ImportData(options); using (var session = store.OpenSession()) { // Person imported. Assert.Equal(1, session.Query <Person>().Customize(x => x.WaitForNonStaleResults()).Take(5).Count()); // Attachment imported. var attachment = store.DatabaseCommands.GetAttachment("Attachments/1"); var data = ReadFully(attachment.Data()); Assert.Equal(new byte[] { 1, 2, 3 }, data); } } }
public async Task CanDumpWhenHiddenDocsWithLimit_Dumper() { var backupPath = NewDataPath("BackupFolder"); using (var server = GetNewServer()) { using (var store = new DocumentStore { Url = "http://localhost:8079" }.Initialize()) { InsertHidenUsers(store, 2000); var user1 = store.DatabaseCommands.Get("users/1"); Assert.Null(user1); InsertUsers(store, 1, 25); // now perform full backup var dumper = new DataDumper(server.SystemDatabase); await dumper.ExportData(new SmugglerExportOptions { ToFile = backupPath }, new SmugglerOptions { Incremental = true }); } } VerifyDump(backupPath, store => { using (var session = store.OpenSession()) { Assert.Equal(25, session.Query <User>().Customize(x => x.WaitForNonStaleResultsAsOfNow()).Count()); } }); IOExtensions.DeleteDirectory(backupPath); }
public async Task CanPerformDump_Dumper() { var backupPath = NewDataPath("BackupFolder"); using (var store = NewDocumentStore()) { InsertUsers(store, 0, 2000); var dumper = new DataDumper(store.SystemDatabase) { SmugglerOptions = { Incremental = true } }; await dumper.ExportData( new SmugglerExportOptions { ToFile = backupPath, }); } VerifyDump(backupPath, store => { using (var session = store.OpenSession()) { Assert.Equal(2000, session.Query <User>().Customize(x => x.WaitForNonStaleResultsAsOfNow()).Count()); } }); IOExtensions.DeleteDirectory(backupPath); }
//////////////// public override void ProcessTriggers(TriggersSet triggersSet) { //DataStore.Add( DebugHelpers.GetCurrentContext()+"_"+this.player.name+":"+this.player.whoAmI+"_A", 1 ); var mymod = (ModHelpersMod)this.mod; try { if (mymod.ControlPanelHotkey != null && mymod.ControlPanelHotkey.JustPressed) { if (mymod.Config.DisableControlPanelHotkey) { Main.NewText("Control panel hotkey disabled.", Color.Red); } else { if (mymod.ControlPanel != null) { if (mymod.ControlPanel.IsOpen) { ControlPanelTabs.CloseDialog(); } else { ControlPanelTabs.OpenTab(UIControlPanel.DefaultTabName); } } } } } catch (Exception e) { LogHelpers.Warn("(1) - " + e.ToString()); return; } try { if (mymod.DataDumpHotkey != null && mymod.DataDumpHotkey.JustPressed) { string fileName; if (DataDumper.DumpToFile(out fileName)) { string msg = "Dumped latest debug data to log file " + fileName; Main.NewText(msg, Color.Azure); LogHelpers.Log(msg); } } } catch (Exception e) { LogHelpers.Warn("(2) - " + e.ToString()); return; } try { if (mymod.CustomHotkeys != null) { mymod.CustomHotkeys.ProcessTriggers(triggersSet); } } catch (Exception e) { LogHelpers.Warn("(3) - " + e.ToString()); return; } //DataStore.Add( DebugHelpers.GetCurrentContext()+"_"+this.player.name+":"+this.player.whoAmI+"_B", 1 ); }
public TreeCacheGarbageCollector(ICacheTrasher trasher, int interval, DataDumper dumper) { this.trasher = trasher; this.interval = interval; this.dumper = dumper; RunGarbageProcess(); }
public static ResourceCache CreateResourceCache(string path, int lifeTime, int gcInterval) { var result = new ResourceCache(new TimeSpan(0, 0, 0, 0, lifeTime)); var dumper = new DataDumper(path, "resource"); var gc = new TreeCacheGarbageCollector(result, gcInterval, dumper); return(result); }
public void CanBackupDocumentDeletion() { var backupPath = NewDataPath("BackupFolder"); using (var store = NewDocumentStore()) { string userId; using (var session = store.OpenSession()) { var periodicBackupSetup = new PeriodicExportSetup { LocalFolderName = backupPath, IntervalMilliseconds = 100 }; session.Store(periodicBackupSetup, PeriodicExportSetup.RavenDocumentKey); session.SaveChanges(); } var backupStatus = GetPerodicBackupStatus(store.DocumentDatabase); using (var session = store.OpenSession()) { var user = new User { Name = "oren" }; session.Store(user); userId = user.Id; session.SaveChanges(); } WaitForPeriodicExport(store.DocumentDatabase, backupStatus); store.DatabaseCommands.Delete(userId, null); WaitForPeriodicExport(store.DocumentDatabase, backupStatus, x => x.LastDocsDeletionEtag); } using (var store = NewDocumentStore()) { var dataDumper = new DataDumper(store.DocumentDatabase); dataDumper.ImportData(new SmugglerImportOptions { FromFile = backupPath, }, new SmugglerOptions { Incremental = true, }).Wait(); using (var session = store.OpenSession()) { Assert.Null(session.Load <User>(1)); } } IOExtensions.DeleteDirectory(backupPath); }
//////////////// protected override bool ReceiveRequestWithServer(int fromWho) { if (ModHelpersMod.Config.DebugModeDumpAlsoServer || UserHelpers.HasBasicServerPrivilege(Main.LocalPlayer)) { string _; DataDumper.DumpToFile(out _); } return(false); }
public void CanBackupToDirectory_MultipleBackups() { var backupPath = GetPath("BackupFolder"); using (var store = NewDocumentStore()) { Guid?etagForBackups; using (var session = store.OpenSession()) { session.Store(new User { Name = "oren" }); var periodicBackupSetup = new PeriodicBackupSetup { LocalFolderName = backupPath, IntervalMilliseconds = 25 }; session.Store(periodicBackupSetup, PeriodicBackupSetup.RavenDocumentKey); session.SaveChanges(); etagForBackups = session.Advanced.GetEtagFor(periodicBackupSetup); } SpinWait.SpinUntil(() => store.DatabaseCommands.Get(PeriodicBackupSetup.RavenDocumentKey).Etag != etagForBackups); etagForBackups = store.DatabaseCommands.Get(PeriodicBackupSetup.RavenDocumentKey).Etag; using (var session = store.OpenSession()) { session.Store(new User { Name = "ayende" }); session.SaveChanges(); } SpinWait.SpinUntil(() => store.DatabaseCommands.Get(PeriodicBackupSetup.RavenDocumentKey).Etag != etagForBackups); } using (var store = NewDocumentStore()) { var smugglerOptions = new SmugglerOptions { BackupPath = backupPath }; var dataDumper = new DataDumper(store.DocumentDatabase, smugglerOptions); dataDumper.ImportData(smugglerOptions, true); using (var session = store.OpenSession()) { Assert.Equal("oren", session.Load <User>(1).Name); Assert.Equal("ayende", session.Load <User>(2).Name); } } IOExtensions.DeleteDirectory(backupPath); }
/// <summary> /// Step2s the get episode links. /// </summary> /// <returns>Task.</returns> private async Task Step2GetEpisodeLinks() { var workerBlock = new ActionBlock <string> (link => { try { var(_, episodeLinkList) = GetEpisodeLinks(link, true); if (episodeLinkList == null) { return; } foreach (var episodeLink in episodeLinkList) { DataDumper.Write(episodeLink, $"{SavePath}\\Debug\\{SiteUri.Host}\\{CurrentTimeStamp:d-M-yy_HH-mm-ss}\\EpisodeNodes.txt"); } } catch (Exception ex) { Logger.Log($@"Exception raised: {ex.ToString()}", LogType.Error); } }, new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = SiteParallelism } ); // Load Title Links try { var fileLines = File .ReadAllLines( $"{SavePath}\\Debug\\{SiteUri.Host}\\{CurrentTimeStamp:d-M-yy_HH-mm-ss}\\TitleNodes.txt") .Distinct().ToList(); foreach (var line in fileLines) { workerBlock.Post(line); } } catch (Exception exception) { Logger.Log($"Exception raised: Title Links File Not Found | {exception.Source} - {exception.Message}", LogType.Error); } finally { workerBlock.Complete(); } await workerBlock.Completion; }
public void CanFullBackupToDirectory() { var backupPath = NewDataPath("BackupFolder", forceCreateDir: true); try { using (var store = NewDocumentStore()) { store.DatabaseCommands.PutAttachment("attach/1", null, new MemoryStream(new byte[] { 1, 2, 3, 4, 5 }), new RavenJObject()); using (var session = store.OpenSession()) { session.Store(new User { Name = "oren" }); var periodicBackupSetup = new PeriodicExportSetup { LocalFolderName = backupPath, FullBackupIntervalMilliseconds = 500 }; session.Store(periodicBackupSetup, PeriodicExportSetup.RavenDocumentKey); session.SaveChanges(); } WaitForNextFullBackup(store); } using (var store = NewDocumentStore()) { var dataDumper = new DataDumper(store.SystemDatabase) { SmugglerOptions = { Incremental = false } }; dataDumper.ImportData(new SmugglerImportOptions { FromFile = Directory.GetFiles(Path.GetFullPath(backupPath)) .Where(file => ".ravendb-full-dump".Equals(Path.GetExtension(file), StringComparison.InvariantCultureIgnoreCase)) .OrderBy(File.GetLastWriteTimeUtc).First() }).Wait(); using (var session = store.OpenSession()) { Assert.Equal("oren", session.Load <User>(1).Name); Assert.NotNull(store.DatabaseCommands.GetAttachment("attach/1")); } } } finally { IOExtensions.DeleteDirectory(backupPath); } }
public AppTrayIconContext() { //Initialize Objects AppSettings = new SettingsManager.Settings(); // Check if Runs Hidden if (!AppSettings.RunHidden) { //Build Menu ContextMenu mnu = new ContextMenu(); mnu.MenuItems.Add(new MenuItem(ResFiles.GlobalRes.traymenu_Activity, OpenStats)); if (!AppSettings.LockSettings) { mnu.MenuItems.Add(new MenuItem(ResFiles.GlobalRes.traymenu_Settings, OpenSettings)); } if (!AppSettings.HideMenuExit) { mnu.MenuItems.Add(new MenuItem(ResFiles.GlobalRes.traymenu_Exit, Exit)); } mnu.MenuItems.Add(new MenuItem(ResFiles.GlobalRes.traymenu_About, About)); trayIcon = new NotifyIcon() { Icon = Resources.ClockIcon, ContextMenu = mnu, Visible = true }; trayIcon.DoubleClick += new System.EventHandler(OpenStats); AppDomain.CurrentDomain.ProcessExit += new System.EventHandler(Exit); } usrSession = new UserSession(); appMon = new AppMonitor(); Database = new DB(); DBDumper = new DataDumper(appMon, Database, AppSettings.DBDumprate); appMon.Start(); //TODO: Start DB dumper anyway if (setDBConfig()) { DBDumper.Start(); } }
public void ImportReplacesAnExistingDatabase() { var options = new SmugglerOptions { BackupPath = Path.GetTempFileName() }; using (var store = NewDocumentStoreWithData()) { var dumper = new DataDumper(store.DocumentDatabase, options); dumper.ExportData(options); using (var session = store.OpenSession()) { var person = session.Load <Person>(1); person.Name = "Sean Kearon"; session.Store(new Person { Name = "Gillian" }); store.DatabaseCommands.DeleteAttachment("Attachments/1", null); store.DatabaseCommands.PutAttachment( "Attachments/2", null, new MemoryStream(new byte[] { 1, 2, 3, 4, 5, 6 }), new RavenJObject { { "Description", "This is another attachment." } }); session.SaveChanges(); } new DataDumper(store.DocumentDatabase, options).ImportData(options); using (var session = store.OpenSession()) { // Original attachment has been restored. Assert.NotNull(store.DatabaseCommands.GetAttachment("Attachments/1")); // The newly added attachment is still there. Assert.NotNull(store.DatabaseCommands.GetAttachment("Attachments/2")); // Original person has been restored. Assert.NotNull(session.Query <Person, PeopleByName>().Customize(x => x.WaitForNonStaleResults()).Single(x => x.Name == "Sean")); // The newly added person has not been removed. Assert.True(session.Query <Person, PeopleByName>().Customize(x => x.WaitForNonStaleResults()).Any(x => x.Name == "Gillian")); } } }
private void VerifyDump(string backupPath, Action <EmbeddableDocumentStore> action) { using (var store = NewDocumentStore()) { var smugglerOptions = new SmugglerOptions { BackupPath = backupPath }; var dataDumper = new DataDumper(store.DocumentDatabase, smugglerOptions); dataDumper.ImportData(smugglerOptions, true).Wait(); action(store); } }
private void LoadDataSources() { DataDumper.SetDumpSource("WorldUidWithSeed", () => { return(" " + WorldHelpers.GetUniqueIdForCurrentWorld(true) + " (net mode: " + Main.netMode + ")"); }); DataDumper.SetDumpSource("PlayerUid", () => { if (Main.myPlayer < 0 || Main.myPlayer >= (Main.player.Length - 1)) { return(" Unobtainable"); } return(" " + PlayerIdentityHelpers.GetUniqueId()); }); }
private void VerifyDump(string backupPath, Action <EmbeddableDocumentStore> action) { using (var store = NewDocumentStore()) { var dumper = new DataDumper(store.SystemDatabase) { SmugglerOptions = { Incremental = true } }; dumper.ImportData(new SmugglerImportOptions { FromFile = backupPath }).Wait(); action(store); } }
public async Task CanDumpAttachmentsWithLimit_Dumper() { var backupPath = NewDataPath("BackupFolder"); using (var store = NewDocumentStore()) { InsertAttachments(store, 328); var dumper = new DataDumper(store.DocumentDatabase); await dumper.ExportData(new SmugglerExportOptions { ToFile = backupPath }, new SmugglerOptions { Incremental = true, BatchSize = 100, Limit = 206 }); } VerifyDump(backupPath, store => Assert.Equal(206, store.DatabaseCommands.GetAttachmentHeadersStartingWith("user", 0, 500).Count())); IOExtensions.DeleteDirectory(backupPath); }
public async Task <HttpResponseMessage> ImportDatabase(int batchSize, bool includeExpiredDocuments, ItemType operateOnTypes, string filtersPipeDelimited, string transformScript) { if (!this.Request.Content.IsMimeMultipartContent()) { throw new HttpResponseException(HttpStatusCode.UnsupportedMediaType); } var streamProvider = new MultipartMemoryStreamProvider(); await Request.Content.ReadAsMultipartAsync(streamProvider); var fileStream = await streamProvider.Contents .First(c => c.Headers.ContentDisposition.Name == "\"file\"") .ReadAsStreamAsync(); var dataDumper = new DataDumper(Database); var importOptions = new SmugglerImportOptions { FromStream = fileStream }; var options = new SmugglerOptions { BatchSize = batchSize, ShouldExcludeExpired = includeExpiredDocuments, OperateOnTypes = operateOnTypes, TransformScript = transformScript }; // Filters are passed in without the aid of the model binder. Instead, we pass in a list of FilterSettings using a string like this: pathHere;;;valueHere;;;true|||againPathHere;;;anotherValue;;;false // Why? Because I don't see a way to pass a list of a values to a WebAPI method that accepts a file upload, outside of passing in a simple string value and parsing it ourselves. if (filtersPipeDelimited != null) { options.Filters.AddRange(filtersPipeDelimited .Split(new string[] { "|||" }, StringSplitOptions.RemoveEmptyEntries) .Select(f => f.Split(new string[] { ";;;" }, StringSplitOptions.RemoveEmptyEntries)) .Select(o => new FilterSetting { Path = o[0], Values = new List <string> { o[1] }, ShouldMatch = bool.Parse(o[2]) })); } await dataDumper.ImportData(importOptions, options); return(GetEmptyMessage()); }
public override void Load() { LoadHooks.AddWorldLoadEachHook(delegate { this.CurrentNetMode = Main.netMode; }); CustomLoadHooks.AddHook(ResetModeMod.WorldExitValidator, (_) => { this.CurrentNetMode = -1; return(true); }); this.Session.OnModLoad(); DataDumper.SetDumpSource("ResetMode", () => { return(ResetModeMod.Instance.Session.Data.ToString()); }); }
public void CanBackupAttachmentDeletion() { var backupPath = NewDataPath("BackupFolder"); using (var store = NewDocumentStore()) { string userId; using (var session = store.OpenSession()) { var periodicBackupSetup = new PeriodicExportSetup { LocalFolderName = backupPath, IntervalMilliseconds = 250 }; session.Store(periodicBackupSetup, PeriodicExportSetup.RavenDocumentKey); session.SaveChanges(); } var backupStatus = GetPerodicBackupStatus(store.DocumentDatabase); store.DatabaseCommands.PutAttachment("attach/1", null, new MemoryStream(new byte[] { 1, 2, 3, 4 }), new RavenJObject()); WaitForPeriodicExport(store.DocumentDatabase, backupStatus); store.DatabaseCommands.DeleteAttachment("attach/1", null); WaitForPeriodicExport(store.DocumentDatabase, backupStatus); } using (var store = NewDocumentStore()) { var dataDumper = new DataDumper(store.DocumentDatabase); dataDumper.ImportData(new SmugglerImportOptions { FromFile = backupPath, }, new SmugglerOptions { Incremental = true, }).Wait(); Assert.Null(store.DatabaseCommands.GetAttachment("attach/1")); } IOExtensions.DeleteDirectory(backupPath); }
public async Task CanPerformDumpWithLimitAndFilter_Dumper() { var backupPath = NewDataPath("BackupFolder"); using (var store = NewDocumentStore()) { var counter = 0; counter = InsertUsers(store, counter, 1000); counter = InsertDevelopers(store, counter, 2); counter = InsertUsers(store, counter, 1000); InsertDevelopers(store, counter, 2); WaitForIndexing(store); var options = new SmugglerOptions { Limit = 5, Incremental = true, Filters = { new FilterSetting { Path = "@metadata.Raven-Entity-Name", Values = { "Developers" }, ShouldMatch = true, } } }; var dumper = new DataDumper(store.DocumentDatabase); await dumper.ExportData(new SmugglerExportOptions { ToFile = backupPath }, options); } VerifyDump(backupPath, store => { using (var session = store.OpenSession()) { Assert.Equal(4, session.Query <Developer>().Customize(x => x.WaitForNonStaleResultsAsOfNow()).Count()); } }); IOExtensions.DeleteDirectory(backupPath); }
private void LoadDataSources() { DataDumper.SetDumpSource("WorldUidWithSeed", () => { return(" " + WorldHelpers.GetUniqueIdWithSeed() + " (net mode: " + Main.netMode + ")"); }); DataDumper.SetDumpSource("PlayerUid", () => { if (Main.myPlayer < 0 || Main.myPlayer >= (Main.player.Length - 1)) { return(" Unobtainable"); } bool _; string oldUid = PlayerIdentityHelpers._GetUniqueId(Main.LocalPlayer, out _); return(" " + PlayerIdentityHelpers.GetMyProperUniqueId() + " (old uid: " + oldUid + ")"); }); }
private void AssertUsersCountInBackup(int expectedNumberOfUsers, string file) { using (var store = NewDocumentStore()) { var dataDumper = new DataDumper(store.SystemDatabase); dataDumper.SmugglerOptions.Incremental = false; dataDumper.ImportData(new SmugglerImportOptions { FromFile = file }).Wait(); WaitForIndexing(store); using (var session = store.OpenSession()) { Assert.Equal(expectedNumberOfUsers, session.Query <User>().Count()); } } }
public override CmdResult Execute() { try { var x = 0; _Logger.LogInfo("query ST stat from {0} to {1} in mode {2}", Start, End, Mode); var dumper = new DataDumper { RecordLimit = (!string.IsNullOrEmpty(Limit) && Int32.TryParse(Limit, out x)) ? x : 0 }; var result = dumper.DumpRSTStatInterval(_localSaveFolder, Start, End, Mode); return result != null ? CmdResult.Success("Dumping SystemTest complete." + result) : CmdResult.Failure("Dumping SystemTest error or no stats"); } catch (Exception e) { _Logger.LogError("Dumping SystemTest command error" + e.Message); _Logger.LogException(e); return CmdResult.Failure("Dumping SystemTest command error" + e.Message); } }
public static ResourceCache CreateResourceCache(string path, int lifeTime, int gcInterval) { var result = new ResourceCache(new TimeSpan(0,0,0,0,lifeTime)); var dumper = new DataDumper(path, "resource"); var gc = new TreeCacheGarbageCollector(result,gcInterval,dumper); return result; }