public SmugglerApi(SmugglerOptions smugglerOptions, IAsyncDatabaseCommands commands, Action<string> output) : base(smugglerOptions) { this.commands = commands; this.output = output; batch = new List<RavenJObject>(); }
public void CanSaveImplicitChangesToDocumentsFromAQuery_UsingDunpFile() { using (var store = NewDocumentStore()) { store.Conventions.FindTypeTagName = FindTypeByTagName; var options = new SmugglerOptions { BackupPath = @"Dump of test-concurrency-exception2, 21 May 2013 14-36.ravendump" }; var dumper = new DataDumper(store.DocumentDatabase, options); dumper.ImportData(options); using (var session = store.OpenSession()) { session.Advanced.UseOptimisticConcurrency = true; var foos = session.Query<SectionData>() .Customize(x => x.WaitForNonStaleResults()) .Take(1024) .ToList(); Assert.True(foos.Count > 200); session.SaveChanges(); } } }
private Guid ExportDocuments(SmugglerOptions options, JsonTextWriter jsonWriter, Guid lastEtag) { int totalCount = 0; while (true) { var watch = Stopwatch.StartNew(); var documents = GetDocuments(lastEtag); watch.Stop(); if (documents.Length == 0) { ShowProgress("Done with reading documents, total: {0}", totalCount); return(lastEtag); } var currentProcessingTime = watch.Elapsed; ModifyBatchSize(options, currentProcessingTime); var final = documents.Where(options.MatchFilters).ToList(); final.ForEach(item => item.WriteTo(jsonWriter)); totalCount += final.Count; ShowProgress("Reading batch of {0,3} documents, read so far: {1,10:#,#;;0}", documents.Length, totalCount); lastEtag = new Guid(documents.Last().Value <RavenJObject>("@metadata").Value <string>("@etag")); } }
public void CanGetCorrectResult() { using (var store = NewDocumentStore()) { var smugglerOptions = new SmugglerOptions(); var dataDumper = new DataDumper(store.DocumentDatabase, smugglerOptions); using (var stream = typeof(TroyMapReduceImport).Assembly.GetManifestResourceStream("Raven.Tests.Patching.failingdump11.ravendump")) { dataDumper.ImportData(stream, smugglerOptions).Wait(TimeSpan.FromSeconds(15)); } using (var s = store.OpenSession()) { s.Advanced.LuceneQuery<object>("Raven/DocumentsByEntityName").WaitForNonStaleResults().ToList(); store.DatabaseCommands.UpdateByIndex("Raven/DocumentsByEntityName", new IndexQuery {Query = "Tag:Regions"}, new ScriptedPatchRequest { Script = @"this.Test = 'test';" } , true); } } }
public static void ReadLastEtagsFromFile(SmugglerOptions options) { var log = LogManager.GetCurrentClassLogger(); var etagFileLocation = Path.Combine(options.BackupPath, IncrementalExportStateFile); if (!File.Exists(etagFileLocation)) { return; } using (var streamReader = new StreamReader(new FileStream(etagFileLocation, FileMode.Open))) using (var jsonReader = new JsonTextReader(streamReader)) { RavenJObject ravenJObject; try { ravenJObject = RavenJObject.Load(jsonReader); } catch (Exception e) { log.WarnException("Could not parse etag document from file : " + etagFileLocation + ", ignoring, will start from scratch", e); return; } options.LastDocsEtag = Etag.Parse(ravenJObject.Value <string>("LastDocEtag")); options.LastAttachmentEtag = Etag.Parse(ravenJObject.Value <string>("LastAttachmentEtag")); } }
private async Task <int> ImportAttachments(JsonTextReader jsonReader, SmugglerOptions options) { var count = 0; while (jsonReader.Read() && jsonReader.TokenType != JsonToken.EndArray) { var item = RavenJToken.ReadFrom(jsonReader); if ((options.OperateOnTypes & ItemType.Attachments) != ItemType.Attachments) { continue; } var attachmentExportInfo = new JsonSerializer { Converters = { new JsonToJsonConverter(), new StreamFromJsonConverter() } }.Deserialize <AttachmentExportInfo>(new RavenJTokenReader(item)); ShowProgress("Importing attachment {0}", attachmentExportInfo.Key); await PutAttachment(attachmentExportInfo); count++; } await PutAttachment(null); // force flush return(count); }
private async Task <int> ImportIndexes(JsonReader jsonReader, SmugglerOptions options) { var count = 0; while (jsonReader.Read() && jsonReader.TokenType != JsonToken.EndArray) { var index = (RavenJObject)RavenJToken.ReadFrom(jsonReader); if ((options.OperateOnTypes & ItemType.Indexes) != ItemType.Indexes) { continue; } var indexName = index.Value <string>("name"); if (indexName.StartsWith("Temp/")) { continue; } if (index.Value <RavenJObject>("definition").Value <bool>("IsCompiled")) { continue; // can't import compiled indexes } if ((options.OperateOnTypes & ItemType.RemoveAnalyzers) == ItemType.RemoveAnalyzers) { index.Value <RavenJObject>("definition").Remove("Analyzers"); } await PutIndex(indexName, index); count++; } await PutIndex(null, null); return(count); }
private async Task <int> ImportDeletedAttachments(JsonReader jsonReader, SmugglerOptions options) { var count = 0; while (jsonReader.Read() && jsonReader.TokenType != JsonToken.EndArray) { var item = RavenJToken.ReadFrom(jsonReader); var deletedAttachmentInfo = new JsonSerializer { Converters = { new JsonToJsonConverter(), new StreamFromJsonConverter() } }.Deserialize <Tombstone>(new RavenJTokenReader(item)); ShowProgress("Importing deleted attachments {0}", deletedAttachmentInfo.Key); await DeleteAttachment(deletedAttachmentInfo.Key); count++; } return(count); }
protected async override Task<DatabaseTaskOutcome> RunImplementation() { var statistics = await DatabaseCommands.GetStatisticsAsync(); if (statistics.CountOfDocuments > 0) { ReportError("Database already contains documents"); return DatabaseTaskOutcome.Error; } Report("Creating Sample Data, Please wait..."); // this code assumes a small enough dataset, and doesn't do any sort // of paging or batching whatsoever. using (var sampleData = typeof(CreateSampleDataTask).Assembly.GetManifestResourceStream("Raven.Studio.Assets.EmbeddedData.Northwind.dump")) { Report("Reading documents"); var smugglerOptions = new SmugglerOptions { OperateOnTypes = ItemType.Documents | ItemType.Indexes | ItemType.Transformers, ShouldExcludeExpired = false, }; var smuggler = new SmugglerApi(smugglerOptions, DatabaseCommands, s => Report(s)); await smuggler.ImportData(sampleData, smugglerOptions); } return DatabaseTaskOutcome.Succesful; }
public virtual async Task ImportData(SmugglerImportOptions importOptions, SmugglerOptions options) { if (options.Incremental == false) { Stream stream = importOptions.FromStream; bool ownStream = false; try { if (stream == null) { stream = File.OpenRead(importOptions.FromFile); ownStream = true; } await ImportData(importOptions, options, stream); } finally { if (stream != null && ownStream) { stream.Dispose(); } } return; } #if SILVERLIGHT throw new NotSupportedException("Silverlight doesn't support importing an incremental dump files."); #else var files = Directory.GetFiles(Path.GetFullPath(importOptions.FromFile)) .Where(file => ".ravendb-incremental-dump".Equals(Path.GetExtension(file), StringComparison.CurrentCultureIgnoreCase)) .OrderBy(File.GetLastWriteTimeUtc) .ToArray(); if (files.Length == 0) { return; } var optionsWithoutIndexes = new SmugglerOptions { Filters = options.Filters, OperateOnTypes = options.OperateOnTypes & ~(ItemType.Indexes | ItemType.Transformers) }; for (var i = 0; i < files.Length - 1; i++) { using (var fileStream = File.OpenRead(Path.Combine(importOptions.FromFile, files[i]))) { await ImportData(importOptions, optionsWithoutIndexes, fileStream); } } using (var fileStream = File.OpenRead(Path.Combine(importOptions.FromFile, files.Last()))) { await ImportData(importOptions, options, fileStream); } #endif }
protected void SetSmugglerOptions(SmugglerOptions options) { if (options == null) { throw new ArgumentNullException("options"); } SmugglerOptions = options; }
/// <summary> /// /// </summary> /// <param name="jsonWriter"></param> /// <param name="options"></param> /// <param name="result"></param> /// <param name="maxEtags">Max etags are inclusive</param> protected async override void ExportDeletions(JsonTextWriter jsonWriter, SmugglerOptions options, ExportDataResult result, LastEtagsInfo maxEtags) { jsonWriter.WritePropertyName("DocsDeletions"); jsonWriter.WriteStartArray(); result.LastDocDeleteEtag = await ExportDocumentsDeletion(options, jsonWriter, result.LastDocDeleteEtag, maxEtags.LastDocDeleteEtag.IncrementBy(1)); jsonWriter.WriteEndArray(); jsonWriter.WritePropertyName("AttachmentsDeletions"); jsonWriter.WriteStartArray(); result.LastAttachmentsDeleteEtag = await ExportAttachmentsDeletion(options, jsonWriter, result.LastAttachmentsDeleteEtag, maxEtags.LastAttachmentsDeleteEtag.IncrementBy(1)); jsonWriter.WriteEndArray(); }
private async Task <int> ImportDocuments(JsonTextReader jsonReader, SmugglerOptions options) { var now = SystemTime.UtcNow; var count = 0; while (jsonReader.Read() && jsonReader.TokenType != JsonToken.EndArray) { var document = (RavenJObject)RavenJToken.ReadFrom(jsonReader); var size = DocumentHelpers.GetRoughSize(document); if (size > 1024 * 1024) { Console.WriteLine("Large document warning: {0:#,#.##;;0} kb - {1}", (double)size / 1024, document["@metadata"].Value <string>("@id")); } if ((options.OperateOnTypes & ItemType.Documents) != ItemType.Documents) { continue; } if (options.MatchFilters(document) == false) { continue; } if (options.ShouldExcludeExpired && options.ExcludeExpired(document, now)) { continue; } if (!string.IsNullOrEmpty(options.TransformScript)) { document = await TransformDocument(document, options.TransformScript); } if (document == null) { continue; } PutDocument(document, options, (int)size); count++; if (count % options.BatchSize == 0) { ShowProgress("Read {0} documents", count); } } PutDocument(null, options, -1); // force flush return(count); }
private async Task <int> ImportAttachments(JsonTextReader jsonReader, SmugglerOptions options) { var count = 0; if (jsonReader.Read() == false || jsonReader.TokenType == JsonToken.EndObject) { return(count); } if (jsonReader.TokenType != JsonToken.PropertyName) { throw new InvalidDataException("PropertyName was expected"); } if (Equals("Attachments", jsonReader.Value) == false) { throw new InvalidDataException("Attachment property was expected"); } if (jsonReader.Read() == false) { return(count); } if (jsonReader.TokenType != JsonToken.StartArray) { throw new InvalidDataException("StartArray was expected"); } while (jsonReader.Read() && jsonReader.TokenType != JsonToken.EndArray) { var item = RavenJToken.ReadFrom(jsonReader); if ((options.OperateOnTypes & ItemType.Attachments) != ItemType.Attachments) { continue; } var attachmentExportInfo = new JsonSerializer { Converters = { new JsonToJsonConverter() } }.Deserialize <AttachmentExportInfo>(new RavenJTokenReader(item)); ShowProgress("Importing attachment {0}", attachmentExportInfo.Key); await PutAttachment(attachmentExportInfo); count++; } await PutAttachment(null); // force flush return(count); }
private async Task <int> ImportIndexes(JsonReader jsonReader, SmugglerOptions options) { var count = 0; if (jsonReader.Read() == false) { return(count); } if (jsonReader.TokenType != JsonToken.PropertyName) { throw new InvalidDataException("PropertyName was expected"); } if (Equals("Indexes", jsonReader.Value) == false) { throw new InvalidDataException("Indexes property was expected"); } if (jsonReader.Read() == false) { return(count); } if (jsonReader.TokenType != JsonToken.StartArray) { throw new InvalidDataException("StartArray was expected"); } while (jsonReader.Read() && jsonReader.TokenType != JsonToken.EndArray) { var index = RavenJToken.ReadFrom(jsonReader); if ((options.OperateOnTypes & ItemType.Indexes) != ItemType.Indexes) { continue; } var indexName = index.Value <string>("name"); if (indexName.StartsWith("Temp/")) { continue; } if (index.Value <RavenJObject>("definition").Value <bool>("IsCompiled")) { continue; // can't import compiled indexes } await PutIndex(indexName, index); count++; } await PutIndex(null, null); return(count); }
public static void WriteLastEtagsFromFile(SmugglerOptions options) { var etagFileLocation = Path.Combine(options.BackupPath, IncrementalExportStateFile); using (var streamWriter = new StreamWriter(File.Create(etagFileLocation))) { new RavenJObject { { "LastDocEtag", options.LastDocsEtag.ToString() }, { "LastAttachmentEtag", options.LastAttachmentEtag.ToString() } }.WriteTo(new JsonTextWriter(streamWriter)); streamWriter.Flush(); } }
public static void ReadLastEtagsFromFile(SmugglerOptions options) { var etagFileLocation = Path.Combine(options.BackupPath, IncrementalExportStateFile); if (File.Exists(etagFileLocation)) { using (var streamReader = new StreamReader(new FileStream(etagFileLocation, FileMode.Open))) using (var jsonReader = new JsonTextReader(streamReader)) { var ravenJObject = RavenJObject.Load(jsonReader); options.LastDocsEtag = new Guid(ravenJObject.Value <string>("LastDocEtag")); options.LastAttachmentEtag = new Guid(ravenJObject.Value <string>("LastAttachmentEtag")); } } }
public void Initialize(SmugglerOptions options) { if (options == null || string.IsNullOrEmpty(options.TransformScript)) return; jint = new JintEngine() .AllowClr(false) .SetDebugMode(false) .SetMaxRecursions(50) .SetMaxSteps(options.MaxStepsForTransformScript); jint.Run(string.Format(@" function Transform(docInner){{ return ({0}).apply(this, [docInner]); }};", options.TransformScript)); }
private void ModifyBatchSize(SmugglerOptions options, TimeSpan currentProcessingTime) { var change = Math.Max(1, options.BatchSize / 3); int quarterTime = options.Timeout / 4; if (currentProcessingTime > TimeSpan.FromMilliseconds(quarterTime)) { options.BatchSize -= change; } else { options.BatchSize += change; } options.BatchSize = Math.Min(maximumBatchSize, Math.Max(minimumBatchSize, options.BatchSize)); }
public void Initialize(SmugglerOptions options) { if (options == null || string.IsNullOrEmpty(options.TransformScript)) return; jint = new Engine(cfg => { cfg.AllowDebuggerStatement(false); cfg.MaxStatements(options.MaxStepsForTransformScript); }); jint.Execute(string.Format(@" function Transform(docInner){{ return ({0}).apply(this, [docInner]); }};", options.TransformScript)); }
private void ModifyBatchSize(SmugglerOptions options, TimeSpan currentProcessingTime) { if (currentProcessingTime > TimeSpan.FromSeconds(options.Timeout / 0.5)) { return; } var change = Math.Max(1, options.BatchSize / 3); if (currentProcessingTime > TimeSpan.FromSeconds(options.Timeout / 0.7)) { options.BatchSize -= change; } else { options.BatchSize += change; } }
public static void Initialize(SmugglerOptions options) { if (options != null && !string.IsNullOrEmpty(options.TransformScript)) { jint = new JintEngine() .AllowClr(false) .SetDebugMode(false) .SetMaxRecursions(50) .SetMaxSteps(options.MaxStepsForTransformScript); jint.Run(string.Format(@" function Transform(docInner){{ return ({0}).apply(this, [docInner]); }};", options.TransformScript)); } propertiesTypeByName = new Dictionary<string, JTokenType>(); }
public async Task<HttpResponseMessage> ImportDatabase(int batchSize, bool includeExpiredDocuments, ItemType operateOnTypes, string filtersPipeDelimited, string transformScript) { if (!this.Request.Content.IsMimeMultipartContent()) { throw new HttpResponseException(HttpStatusCode.UnsupportedMediaType); } var streamProvider = new MultipartMemoryStreamProvider(); await Request.Content.ReadAsMultipartAsync(streamProvider); var fileStream = await streamProvider.Contents .First(c => c.Headers.ContentDisposition.Name == "\"file\"") .ReadAsStreamAsync(); var dataDumper = new DataDumper(Database); var importOptions = new SmugglerImportOptions { FromStream = fileStream }; var options = new SmugglerOptions { BatchSize = batchSize, ShouldExcludeExpired = includeExpiredDocuments, OperateOnTypes = operateOnTypes, TransformScript = transformScript }; // Filters are passed in without the aid of the model binder. Instead, we pass in a list of FilterSettings using a string like this: pathHere;;;valueHere;;;true|||againPathHere;;;anotherValue;;;false // Why? Because I don't see a way to pass a list of a values to a WebAPI method that accepts a file upload, outside of passing in a simple string value and parsing it ourselves. if (filtersPipeDelimited != null) { options.Filters.AddRange(filtersPipeDelimited .Split(new string[] { "|||" }, StringSplitOptions.RemoveEmptyEntries) .Select(f => f.Split(new string[] { ";;;" }, StringSplitOptions.RemoveEmptyEntries)) .Select(o => new FilterSetting { Path = o[0], Values = new List<string> { o[1] }, ShouldMatch = bool.Parse(o[2]) })); } await dataDumper.ImportData(importOptions, options); return GetEmptyMessage(); }
public async Task<HttpResponseMessage> ExportDatabase(SmugglerOptionsDto dto) { var smugglerOptions = new SmugglerOptions(); // smugglerOptions.OperateOnTypes = ; var result = GetEmptyMessage(); result.Content = new PushStreamContent(async (outputStream, content, arg3) => { { }; await new DataDumper(Database).ExportData(new SmugglerExportOptions { ToStream = outputStream }, smugglerOptions); }); return result; }
private async Task <int> ImportTransformers(JsonTextReader jsonReader, SmugglerOptions options) { var count = 0; if (jsonReader.Read() == false || jsonReader.TokenType == JsonToken.EndObject) { return(count); } if (jsonReader.TokenType != JsonToken.PropertyName) { throw new InvalidDataException("PropertyName was expected"); } if (Equals("Transformers", jsonReader.Value) == false) { throw new InvalidDataException("Transformers property was expected"); } if (jsonReader.Read() == false) { return(count); } if (jsonReader.TokenType != JsonToken.StartArray) { throw new InvalidDataException("StartArray was expected"); } while (jsonReader.Read() && jsonReader.TokenType != JsonToken.EndArray) { var transformer = RavenJToken.ReadFrom(jsonReader); if ((options.OperateOnTypes & ItemType.Transformers) != ItemType.Transformers) { continue; } var transformerName = transformer.Value <string>("name"); await PutTransformer(transformerName, transformer); count++; } await PutTransformer(null, null); // force flush return(count); }
public async Task<HttpResponseMessage> ImportDatabase() { if (!this.Request.Content.IsMimeMultipartContent()) { throw new HttpResponseException(HttpStatusCode.UnsupportedMediaType); } var streamProvider = new MultipartMemoryStreamProvider(); await Request.Content.ReadAsMultipartAsync(streamProvider); var fileStream = await streamProvider.Contents.First().ReadAsStreamAsync(); var dataDumper = new DataDumper(Database); var importOptions = new SmugglerImportOptions { FromStream = fileStream }; var options = new SmugglerOptions(); await dataDumper.ImportData(importOptions, options); return GetEmptyMessage(); }
public virtual async Task ImportData(SmugglerOptions options, bool incremental = false) { if (incremental == false) { using (FileStream fileStream = File.OpenRead(options.BackupPath)) { await ImportData(fileStream, options); } return; } var files = Directory.GetFiles(Path.GetFullPath(options.BackupPath)) .Where(file => ".ravendb-incremental-dump".Equals(Path.GetExtension(file), StringComparison.CurrentCultureIgnoreCase)) .OrderBy(File.GetLastWriteTimeUtc) .ToArray(); if (files.Length == 0) { return; } var optionsWithoutIndexes = new SmugglerOptions { BackupPath = options.BackupPath, Filters = options.Filters, OperateOnTypes = options.OperateOnTypes & ~ItemType.Indexes }; for (var i = 0; i < files.Length - 1; i++) { using (var fileStream = File.OpenRead(Path.Combine(options.BackupPath, files[i]))) { await ImportData(fileStream, optionsWithoutIndexes); } } using (var fileStream = File.OpenRead(Path.Combine(options.BackupPath, files.Last()))) { await ImportData(fileStream, options); } }
private Guid ExportDocuments(SmugglerOptions options, JsonTextWriter jsonWriter, Guid lastEtag) { int totalCount = 0; while (true) { var watch = Stopwatch.StartNew(); var documents = GetDocuments(lastEtag); watch.Stop(); if (documents.Length == 0) { var databaseStatistics = GetStats(); var lastEtagComparable = new ComparableByteArray(lastEtag); if (lastEtagComparable.CompareTo(databaseStatistics.LastDocEtag) < 0) { lastEtag = Etag.Increment(lastEtag, smugglerOptions.BatchSize); ShowProgress("Got no results but didn't get to the last doc etag, trying from: {0}", lastEtag); continue; } ShowProgress("Done with reading documents, total: {0}", totalCount); return(lastEtag); } var currentProcessingTime = watch.Elapsed; ModifyBatchSize(options, currentProcessingTime); var final = documents.Where(options.MatchFilters).ToList(); if (options.ShouldExcludeExpired) { final = documents.Where(options.ExcludeExpired).ToList(); } final.ForEach(item => item.WriteTo(jsonWriter)); totalCount += final.Count; ShowProgress("Reading batch of {0,3} documents, read so far: {1,10:#,#;;0}", documents.Length, totalCount); lastEtag = new Guid(documents.Last().Value <RavenJObject>("@metadata").Value <string>("@etag")); } }
public void ShouldTakeUnder30Minutes() { var sw = Stopwatch.StartNew(); var smugglerOptions = new SmugglerOptions(); using (var store = NewDocumentStore()) { using (var stream = typeof(LoadBigFile).Assembly.GetManifestResourceStream("Raven.StressTests.Load.LoadBigFile.dump")) { var dataDumper = new DataDumper(store.DocumentDatabase, smugglerOptions) { Progress = Console.WriteLine }; dataDumper.ImportData(stream, smugglerOptions); } } sw.Stop(); Assert.True(sw.Elapsed < TimeSpan.FromMinutes(30), string.Format("Test should run under 30 minutes, but run {0} minutes.", sw.Elapsed.TotalMinutes)); }
public void CanGetCorrectResult() { using (var store = NewDocumentStore()) { var smugglerOptions = new SmugglerOptions(); var dataDumper = new DataDumper(store.DocumentDatabase, smugglerOptions); using (var stream = typeof(TroyMapReduceImport).Assembly.GetManifestResourceStream("Raven.Tests.MailingList.Sandbox.ravendump")) { dataDumper.ImportData(stream, smugglerOptions).Wait(); } using(var s = store.OpenSession()) { var objects = s.Query<object>("LogEntry/CountByDate") .Customize(x => x.WaitForNonStaleResults()) .ToList(); Assert.Equal(4, objects.Count); } } }
private void HandleBatch(SmugglerOptions options, List <RavenJObject> batch, long sizeOfDisk) { var sw = Stopwatch.StartNew(); var actualBatchSize = batch.Count; Guid lastEtagInBatch = FlushBatch(batch); sw.Stop(); var currentProcessingTime = sw.Elapsed; batchRecording.AddLast(Tuple.Create(lastEtagInBatch, SystemTime.UtcNow)); if (sizeOfDisk >= MaxSizeOfUncomressedSizeToSendToDatabase) { options.BatchSize = actualBatchSize - actualBatchSize / 10; } else { ModifyBatchSize(options, currentProcessingTime); } }
public void CanBackupToDirectory() { var backupPath = GetPath("BackupFolder"); using (var store = NewDocumentStore()) { Etag etagForBackups; using (var session = store.OpenSession()) { session.Store(new User { Name = "oren" }); var periodicBackupSetup = new PeriodicBackupSetup { LocalFolderName = backupPath, IntervalMilliseconds = 25 }; session.Store(periodicBackupSetup, PeriodicBackupSetup.RavenDocumentKey); session.SaveChanges(); etagForBackups = session.Advanced.GetEtagFor(periodicBackupSetup); } SpinWait.SpinUntil(() => store.DatabaseCommands.Get(PeriodicBackupSetup.RavenDocumentKey).Etag != etagForBackups); } using (var store = NewDocumentStore()) { var smugglerOptions = new SmugglerOptions { BackupPath = backupPath }; var dataDumper = new DataDumper(store.DocumentDatabase, smugglerOptions); dataDumper.ImportData(smugglerOptions, true); using (var session = store.OpenSession()) { Assert.Equal("oren", session.Load<User>(1).Name); } } IOExtensions.DeleteDirectory(backupPath); }
public async Task<HttpResponseMessage> CreateSampleData() { var results = Database.Queries.Query(Constants.DocumentsByEntityNameIndex, new IndexQuery(), CancellationToken.None); if (results.Results.Count > 0) { return GetMessageWithString("You cannot create sample data in a database that already contains documents", HttpStatusCode.BadRequest); } using (var sampleData = typeof(StudioTasksController).Assembly.GetManifestResourceStream("Raven.Database.Server.Assets.EmbeddedData.Northwind.dump")) { var smugglerOptions = new SmugglerOptions { OperateOnTypes = ItemType.Documents | ItemType.Indexes | ItemType.Transformers, ShouldExcludeExpired = false, }; var dataDumper = new DataDumper(Database); await dataDumper.ImportData(new SmugglerImportOptions {FromStream = sampleData}, smugglerOptions); } return GetEmptyMessage(); }
public void WaitForIndexing(SmugglerOptions options) { var justIndexingWait = Stopwatch.StartNew(); int tries = 0; while (true) { var databaseStatistics = GetStats(); if (databaseStatistics.StaleIndexes.Length != 0) { if (tries++ % 10 == 0) { Console.Write("\rWaiting {0} for indexing ({1} total).", justIndexingWait.Elapsed, stopwatch.Elapsed); } Thread.Sleep(100); continue; } Console.WriteLine("\rWaited {0} for indexing ({1} total).", justIndexingWait.Elapsed, stopwatch.Elapsed); break; } }
private async Task <int> ImportTransformers(JsonTextReader jsonReader, SmugglerOptions options) { var count = 0; while (jsonReader.Read() && jsonReader.TokenType != JsonToken.EndArray) { var transformer = RavenJToken.ReadFrom(jsonReader); if ((options.OperateOnTypes & ItemType.Transformers) != ItemType.Transformers) { continue; } var transformerName = transformer.Value <string>("name"); await PutTransformer(transformerName, transformer); count++; } await PutTransformer(null, null); // force flush return(count); }
public virtual Task<string> ExportData(Stream stream, SmugglerOptions options, bool incremental, PeriodicBackupStatus backupStatus = null) { return ExportData(stream, options, incremental, true, backupStatus); }
protected SmugglerApiBase(SmugglerOptions smugglerOptions) { SmugglerOptions = smugglerOptions; }
protected abstract void PutDocument(RavenJObject document, SmugglerOptions options, int size);
public async virtual Task ImportData(Stream stream, SmugglerOptions options) { options = options ?? SmugglerOptions; if (options == null) { throw new ArgumentNullException("options"); } await DetectServerSupportedFeatures(); await EnsureDatabaseExists(); Stream sizeStream; var sw = Stopwatch.StartNew(); // Try to read the stream compressed, otherwise continue uncompressed. JsonTextReader jsonReader; try { sizeStream = new CountingStream(new GZipStream(stream, CompressionMode.Decompress)); var streamReader = new StreamReader(sizeStream); jsonReader = new JsonTextReader(streamReader); if (jsonReader.Read() == false) { return; } } catch (Exception e) { if (e is InvalidDataException == false #if SILVERLIGHT && e is ZlibException == false #endif ) { throw; } stream.Seek(0, SeekOrigin.Begin); sizeStream = new CountingStream(new GZipStream(stream, CompressionMode.Decompress)); var streamReader = new StreamReader(stream); jsonReader = new JsonTextReader(streamReader); if (jsonReader.Read() == false) { return; } } if (jsonReader.TokenType != JsonToken.StartObject) { throw new InvalidDataException("StartObject was expected"); } ShowProgress("Begin reading indexes"); var indexCount = await ImportIndexes(jsonReader, options); ShowProgress(string.Format("Done with reading indexes, total: {0}", indexCount)); ShowProgress("Begin reading documents"); var documentCount = await ImportDocuments(jsonReader, options); ShowProgress(string.Format("Done with reading documents, total: {0}", documentCount)); ShowProgress("Begin reading attachments"); var attachmentCount = await ImportAttachments(jsonReader, options); ShowProgress(string.Format("Done with reading attachments, total: {0}", attachmentCount)); ShowProgress("Begin reading transformers"); var transformersCount = await ImportTransformers(jsonReader, options); ShowProgress(string.Format("Done with reading transformers, total: {0}", transformersCount)); sw.Stop(); ShowProgress("Imported {0:#,#;;0} documents and {1:#,#;;0} attachments in {2:#,#;;0} ms", documentCount, attachmentCount, sw.ElapsedMilliseconds); }
public virtual async Task <ExportDataResult> ExportData(SmugglerExportOptions exportOptions, SmugglerOptions options) { SetSmugglerOptions(options); var result = new ExportDataResult { FilePath = exportOptions.ToFile, LastAttachmentsEtag = options.StartAttachmentsEtag, LastDocsEtag = options.StartDocsEtag, LastDocDeleteEtag = options.StartDocsDeletionEtag, LastAttachmentsDeleteEtag = options.StartAttachmentsDeletionEtag }; if (options.Incremental) { if (Directory.Exists(result.FilePath) == false) { if (File.Exists(result.FilePath)) { result.FilePath = Path.GetDirectoryName(result.FilePath) ?? result.FilePath; } else { Directory.CreateDirectory(result.FilePath); } } if (options.StartDocsEtag == Etag.Empty && options.StartAttachmentsEtag == Etag.Empty) { ReadLastEtagsFromFile(result); } result.FilePath = Path.Combine(result.FilePath, SystemTime.UtcNow.ToString("yyyy-MM-dd-HH-mm", CultureInfo.InvariantCulture) + ".ravendb-incremental-dump"); if (File.Exists(result.FilePath)) { var counter = 1; while (true) { // ReSharper disable once AssignNullToNotNullAttribute result.FilePath = Path.Combine(Path.GetDirectoryName(result.FilePath), SystemTime.UtcNow.ToString("yyyy-MM-dd-HH-mm", CultureInfo.InvariantCulture) + " - " + counter + ".ravendb-incremental-dump"); if (File.Exists(result.FilePath) == false) { break; } counter++; } } } SmugglerExportException lastException = null; bool ownedStream = exportOptions.ToStream == null; var stream = exportOptions.ToStream ?? File.Create(result.FilePath); try { await DetectServerSupportedFeatures(exportOptions.From); } catch (WebException e) { ShowProgress("Failed to query server for supported features. Reason : " + e.Message); SetLegacyMode(); //could not detect supported features, then run in legacy mode // lastException = new SmugglerExportException // { // LastEtag = Etag.Empty, // File = ownedStream ? result.FilePath : null // }; } try { using (var gZipStream = new GZipStream(stream, CompressionMode.Compress, leaveOpen: true)) using (var streamWriter = new StreamWriter(gZipStream)) { var jsonWriter = new JsonTextWriter(streamWriter) { Formatting = Formatting.Indented }; jsonWriter.WriteStartObject(); jsonWriter.WritePropertyName("Indexes"); jsonWriter.WriteStartArray(); if (options.OperateOnTypes.HasFlag(ItemType.Indexes)) { await ExportIndexes(exportOptions.From, jsonWriter); } jsonWriter.WriteEndArray(); // used to synchronize max returned values for put/delete operations var maxEtags = FetchCurrentMaxEtags(); jsonWriter.WritePropertyName("Docs"); jsonWriter.WriteStartArray(); if (options.OperateOnTypes.HasFlag(ItemType.Documents)) { try { result.LastDocsEtag = await ExportDocuments(exportOptions.From, options, jsonWriter, result.LastDocsEtag, maxEtags.LastDocsEtag); } catch (SmugglerExportException e) { result.LastDocsEtag = e.LastEtag; e.File = ownedStream ? result.FilePath : null; lastException = e; } } jsonWriter.WriteEndArray(); jsonWriter.WritePropertyName("Attachments"); jsonWriter.WriteStartArray(); if (options.OperateOnTypes.HasFlag(ItemType.Attachments) && lastException == null) { try { result.LastAttachmentsEtag = await ExportAttachments(exportOptions.From, jsonWriter, result.LastAttachmentsEtag, maxEtags.LastAttachmentsEtag); } catch (SmugglerExportException e) { result.LastAttachmentsEtag = e.LastEtag; e.File = ownedStream ? result.FilePath : null; lastException = e; } } jsonWriter.WriteEndArray(); jsonWriter.WritePropertyName("Transformers"); jsonWriter.WriteStartArray(); if (options.OperateOnTypes.HasFlag(ItemType.Transformers) && lastException == null) { await ExportTransformers(exportOptions.From, jsonWriter); } jsonWriter.WriteEndArray(); if (options.ExportDeletions) { ExportDeletions(jsonWriter, options, result, maxEtags); } jsonWriter.WriteEndObject(); streamWriter.Flush(); } if (options.Incremental) { WriteLastEtagsToFile(result, result.FilePath); } if (options.ExportDeletions) { PurgeTombstones(result); } if (lastException != null) { throw lastException; } return(result); } finally { if (ownedStream && stream != null) { stream.Dispose(); } } }
private void TimerCallback(object state) { if (currentTask != null) return; lock (this) { if (currentTask != null) return; currentTask = Task.Factory.StartNew(() => { var documentDatabase = Database; if (documentDatabase == null) return; using (LogContext.WithDatabase(documentDatabase.Name)) { try { var localBackupConfigs = backupConfigs; var localBackupStatus = backupStatus; if (localBackupConfigs == null) return; var databaseStatistics = documentDatabase.Statistics; // No-op if nothing has changed if (databaseStatistics.LastDocEtag == localBackupStatus.LastDocsEtag && databaseStatistics.LastAttachmentEtag == localBackupStatus.LastAttachmentsEtag) { return; } var backupPath = localBackupConfigs.LocalFolderName ?? Path.Combine(documentDatabase.Configuration.DataDirectory, "PeriodicBackup-Temp"); var options = new SmugglerOptions { BackupPath = backupPath, LastDocsEtag = localBackupStatus.LastDocsEtag, LastAttachmentEtag = localBackupStatus.LastAttachmentsEtag }; var dd = new DataDumper(documentDatabase, options); var filePath = dd.ExportData(null, true); // No-op if nothing has changed if (options.LastDocsEtag == localBackupStatus.LastDocsEtag && options.LastAttachmentEtag == localBackupStatus.LastAttachmentsEtag) { logger.Info("Periodic backup returned prematurely, nothing has changed since last backup"); return; } UploadToServer(filePath, localBackupConfigs); localBackupStatus.LastAttachmentsEtag = options.LastAttachmentEtag; localBackupStatus.LastDocsEtag = options.LastDocsEtag; var ravenJObject = RavenJObject.FromObject(localBackupStatus); ravenJObject.Remove("Id"); var putResult = documentDatabase.Put(PeriodicBackupStatus.RavenDocumentKey, null, ravenJObject, new RavenJObject(), null); // this result in backupStatus being refreshed localBackupStatus = backupStatus; if (localBackupStatus != null) { if (Etag.Increment(localBackupStatus.LastDocsEtag, 1) == putResult.ETag) // the last etag is with just us localBackupStatus.LastDocsEtag = putResult.ETag; // so we can skip it for the next time } } catch (ObjectDisposedException) { // shutting down, probably } catch (Exception e) { logger.ErrorException("Error when performing periodic backup", e); Database.AddAlert(new Alert { AlertLevel = AlertLevel.Error, CreatedAt = SystemTime.UtcNow, Message = e.Message, Title = "Error in Periodic Backup", Exception = e.ToString(), UniqueKey = "Periodic Backup Error", }); } } }) .ContinueWith(_ => { currentTask = null; }); } }
private void TimerCallback(object state) { if (executing) return; executing = true; PeriodicBackupSetup backupConfigs; try { // Setup doc might be deleted or changed by the user var document = Database.Get(PeriodicBackupSetup.RavenDocumentKey, null); if (document == null) { timer.Dispose(); timer = null; return; } backupConfigs = document.DataAsJson.JsonDeserialization<PeriodicBackupSetup>(); if (backupConfigs.Interval <= 0) { timer.Dispose(); timer = null; return; } } catch (Exception ex) { logger.WarnException(ex.Message, ex); executing = false; return; } try { var options = new SmugglerOptions { BackupPath = Path.GetTempPath(), //TODO temp path in data folder instead LastDocsEtag = backupConfigs.LastDocsEtag, LastAttachmentEtag = backupConfigs.LastAttachmentsEtag }; var dd = new DataDumper(Database, options); var filePath = dd.ExportData(null, true); // No-op if nothing has changed if (options.LastDocsEtag == backupConfigs.LastDocsEtag && options.LastAttachmentEtag == backupConfigs.LastAttachmentsEtag) { logger.Info("Periodic backup returned prematurely, nothing has changed since last backup"); return; } DoUpload(filePath, backupConfigs); // Remember the current position only once we are successful, this allows for compensatory backups // in case of failures. We reload the setup document to make sure we don't override changes made by // the user. // Setup doc might be deleted or changed by the user var document = Database.Get(PeriodicBackupSetup.RavenDocumentKey, null); if (document == null) { timer.Dispose(); timer = null; return; } backupConfigs = document.DataAsJson.JsonDeserialization<PeriodicBackupSetup>(); backupConfigs.LastAttachmentsEtag = options.LastAttachmentEtag; backupConfigs.LastDocsEtag = options.LastDocsEtag; Database.Put(PeriodicBackupSetup.RavenDocumentKey, null, RavenJObject.FromObject(backupConfigs), new RavenJObject(), null); if (backupConfigs.Interval != interval) { if (backupConfigs.Interval <= 0) { timer.Dispose(); timer = null; } else { interval = backupConfigs.Interval; timer.Change(TimeSpan.FromMinutes(backupConfigs.Interval), TimeSpan.FromMinutes(backupConfigs.Interval)); } } } catch (Exception e) { logger.ErrorException("Error when performing periodic backup", e); } finally { executing = false; } }
private void ExecuteInternal() { includeAttachments = taskModel.IncludeAttachments.Value; includeDocuments = taskModel.IncludeDocuments.Value; includeIndexes = taskModel.IncludeIndexes.Value; includeTransformers = taskModel.IncludeTransforms.Value; if (includeDocuments == false && includeAttachments == false && includeIndexes == false && includeTransformers == false) return; var openFile = new OpenFileDialog { Filter = "Raven Dumps|*.ravendump;*.raven.dump", }; if (openFile.ShowDialog() != true) return; taskModel.TaskStatus = TaskStatus.Started; taskModel.CanExecute.Value = false; output(String.Format("Importing from {0}", openFile.File.Name)); var stream = openFile.File.OpenRead(); ItemType operateOnTypes = 0; if (includeDocuments) { operateOnTypes |= ItemType.Documents; } if (includeAttachments) { operateOnTypes |= ItemType.Attachments; } if (includeIndexes) { operateOnTypes |= ItemType.Indexes; } if (includeTransformers) { operateOnTypes |= ItemType.Transformers; } if (taskModel.UseCollections.Value) { foreach (var collection in taskModel.Collections.Where(collection => collection.Selected)) { taskModel.Filters.Add(new FilterSetting { Path = "@metadata.Raven-Entity-Name", Value = collection.Name, ShouldMatch = true }); } } var smugglerOptions = new SmugglerOptions { BatchSize = taskModel.Options.Value.BatchSize, Filters = taskModel.Filters.ToList(), TransformScript = taskModel.ScriptData, ShouldExcludeExpired = taskModel.Options.Value.ShouldExcludeExpired, OperateOnTypes = operateOnTypes }; smuggler = new SmugglerApi(smugglerOptions, DatabaseCommands, output); smuggler.ImportData(stream, smugglerOptions) .Catch(exception => Infrastructure.Execute.OnTheUI(() => taskModel.ReportError(exception))) .Finally(() => { taskModel.TaskStatus = TaskStatus.Ended; taskModel.CanExecute.Value = true; }); }
public virtual Task <string> ExportData(Stream stream, SmugglerOptions options, bool incremental, PeriodicBackupStatus backupStatus = null) { return(ExportData(stream, options, incremental, true, backupStatus)); }
protected override void PutDocument(RavenJObject document, SmugglerOptions options) { if (document != null) { var metadata = document.Value<RavenJObject>("@metadata"); var key = metadata.Value<string>("@id"); document.Remove("@metadata"); bulkInsertBatch.Add(new JsonDocument { Key = key, Metadata = metadata, DataAsJson = document, }); return; } var batchToSave = new List<IEnumerable<JsonDocument>> { bulkInsertBatch }; bulkInsertBatch = new List<JsonDocument>(); database.Documents.BulkInsert(new BulkInsertOptions { BatchSize = options.BatchSize, OverwriteExisting = true }, batchToSave, Guid.NewGuid()); }
protected Task<Etag> ExportAttachmentsDeletion(SmugglerOptions options, JsonTextWriter jsonWriter, Etag startAttachmentsDeletionEtag, Etag maxAttachmentEtag) { var lastEtag = startAttachmentsDeletionEtag; database.TransactionalStorage.Batch(accessor => { foreach (var listItem in accessor.Lists.Read(Constants.RavenPeriodicBackupsAttachmentsTombstones, startAttachmentsDeletionEtag, maxAttachmentEtag, int.MaxValue)) { var o = new RavenJObject { {"Key", listItem.Key} }; o.WriteTo(jsonWriter); lastEtag = listItem.Etag; } }); return new CompletedTask<Etag>(lastEtag); }
public DataDumper(DocumentDatabase database, SmugglerOptions options) : base(options) { _database = database; }
private async Task<int> ImportIndexes(JsonReader jsonReader, SmugglerOptions options) { var count = 0; if (jsonReader.Read() == false) return count; if (jsonReader.TokenType != JsonToken.PropertyName) throw new InvalidDataException("PropertyName was expected"); if (Equals("Indexes", jsonReader.Value) == false) throw new InvalidDataException("Indexes property was expected"); if (jsonReader.Read() == false) return count; if (jsonReader.TokenType != JsonToken.StartArray) throw new InvalidDataException("StartArray was expected"); while (jsonReader.Read() && jsonReader.TokenType != JsonToken.EndArray) { var index = (RavenJObject)RavenJToken.ReadFrom(jsonReader); if ((options.OperateOnTypes & ItemType.Indexes) != ItemType.Indexes) continue; var indexName = index.Value<string>("name"); if (indexName.StartsWith("Temp/")) continue; if (index.Value<RavenJObject>("definition").Value<bool>("IsCompiled")) continue; // can't import compiled indexes if ((options.OperateOnTypes & ItemType.RemoveAnalyzers) == ItemType.RemoveAnalyzers) { index.Value<RavenJObject>("definition").Remove("Analyzers"); } await PutIndex(indexName, index); count++; } await PutIndex(null, null); return count; }
public virtual async Task<string> ExportData(Stream stream, SmugglerOptions options, bool incremental, bool lastEtagsFromFile, PeriodicBackupStatus backupStatus) { options = options ?? SmugglerOptions; if (options == null) throw new ArgumentNullException("options"); var file = options.BackupPath; #if !SILVERLIGHT if (incremental) { if (Directory.Exists(options.BackupPath) == false) { if (File.Exists(options.BackupPath)) options.BackupPath = Path.GetDirectoryName(options.BackupPath) ?? options.BackupPath; else Directory.CreateDirectory(options.BackupPath); } if (lastEtagsFromFile && backupStatus == null) ReadLastEtagsFromFile(options); if (backupStatus != null) ReadLastEtagsFromClass(options, backupStatus); file = Path.Combine(options.BackupPath, SystemTime.UtcNow.ToString("yyyy-MM-dd-HH-mm", CultureInfo.InvariantCulture) + ".ravendb-incremental-dump"); if (File.Exists(file)) { var counter = 1; while (true) { file = Path.Combine(options.BackupPath, SystemTime.UtcNow.ToString("yyyy-MM-dd-HH-mm", CultureInfo.InvariantCulture) + " - " + counter + ".ravendb-incremental-dump"); if (File.Exists(file) == false) break; counter++; } } } #else if(incremental) throw new NotSupportedException("Incremental exports are not supported in SL."); #endif await DetectServerSupportedFeatures(); SmugglerExportException lastException = null; bool ownedStream = stream == null; try { stream = stream ?? File.Create(file); using (var gZipStream = new GZipStream(stream, CompressionMode.Compress, #if SILVERLIGHT CompressionLevel.BestCompression, #endif leaveOpen: true)) using (var streamWriter = new StreamWriter(gZipStream)) { var jsonWriter = new JsonTextWriter(streamWriter) { Formatting = Formatting.Indented }; jsonWriter.WriteStartObject(); jsonWriter.WritePropertyName("Indexes"); jsonWriter.WriteStartArray(); if ((options.OperateOnTypes & ItemType.Indexes) == ItemType.Indexes) { await ExportIndexes(jsonWriter); } jsonWriter.WriteEndArray(); jsonWriter.WritePropertyName("Docs"); jsonWriter.WriteStartArray(); if (options.OperateOnTypes.HasFlag(ItemType.Documents)) { try { options.LastDocsEtag = await ExportDocuments(options, jsonWriter, options.LastDocsEtag); } catch (SmugglerExportException e) { options.LastDocsEtag = e.LastEtag; e.File = file; lastException = e; } } jsonWriter.WriteEndArray(); jsonWriter.WritePropertyName("Attachments"); jsonWriter.WriteStartArray(); if (options.OperateOnTypes.HasFlag(ItemType.Attachments) && lastException == null) { try { options.LastAttachmentEtag = await ExportAttachments(jsonWriter, options.LastAttachmentEtag); } catch (SmugglerExportException e) { options.LastAttachmentEtag = e.LastEtag; e.File = file; lastException = e; } } jsonWriter.WriteEndArray(); jsonWriter.WritePropertyName("Transformers"); jsonWriter.WriteStartArray(); if (options.OperateOnTypes.HasFlag(ItemType.Transformers) && lastException == null) { await ExportTransformers(jsonWriter); } jsonWriter.WriteEndArray(); jsonWriter.WriteEndObject(); streamWriter.Flush(); } #if !SILVERLIGHT if (incremental && lastEtagsFromFile) WriteLastEtagsFromFile(options); #endif if (lastException != null) throw lastException; return file; } finally { if (ownedStream && stream != null) stream.Dispose(); } }
private Guid ExportDocuments(SmugglerOptions options, JsonTextWriter jsonWriter, Guid lastEtag) { int totalCount = 0; while (true) { var documents = GetDocuments(lastEtag); if (documents.Length == 0) { ShowProgress("Done with reading documents, total: {0}", totalCount); return lastEtag; } var final = documents.Where(options.MatchFilters).ToList(); final.ForEach(item => item.WriteTo(jsonWriter)); totalCount += final.Count; ShowProgress("Reading batch of {0,3} documents, read so far: {1,10:#,#;;0}", documents.Length, totalCount); lastEtag = new Guid(documents.Last().Value<RavenJObject>("@metadata").Value<string>("@etag")); } }
public virtual async Task <string> ExportData(Stream stream, SmugglerOptions options, bool incremental, bool lastEtagsFromFile, PeriodicBackupStatus backupStatus) { options = options ?? SmugglerOptions; if (options == null) { throw new ArgumentNullException("options"); } var file = options.BackupPath; #if !SILVERLIGHT if (incremental) { if (Directory.Exists(options.BackupPath) == false) { if (File.Exists(options.BackupPath)) { options.BackupPath = Path.GetDirectoryName(options.BackupPath) ?? options.BackupPath; } else { Directory.CreateDirectory(options.BackupPath); } } if (lastEtagsFromFile && backupStatus == null) { ReadLastEtagsFromFile(options); } if (backupStatus != null) { ReadLastEtagsFromClass(options, backupStatus); } file = Path.Combine(options.BackupPath, SystemTime.UtcNow.ToString("yyyy-MM-dd-HH-mm", CultureInfo.InvariantCulture) + ".ravendb-incremental-dump"); if (File.Exists(file)) { var counter = 1; while (true) { file = Path.Combine(options.BackupPath, SystemTime.UtcNow.ToString("yyyy-MM-dd-HH-mm", CultureInfo.InvariantCulture) + " - " + counter + ".ravendb-incremental-dump"); if (File.Exists(file) == false) { break; } counter++; } } } #else if (incremental) { throw new NotSupportedException("Incremental exports are not supported in SL."); } #endif try { await DetectServerSupportedFeatures(); } catch (WebException e) { ShowProgress("Failed to query server for supported features. Reason : " + e.Message); SetLegacyMode(); //could not detect supported features, then run in legacy mode } SmugglerExportException lastException = null; bool ownedStream = stream == null; try { stream = stream ?? File.Create(file); using (var gZipStream = new GZipStream(stream, CompressionMode.Compress, #if SILVERLIGHT CompressionLevel.BestCompression, #endif leaveOpen: true)) using (var streamWriter = new StreamWriter(gZipStream)) { var jsonWriter = new JsonTextWriter(streamWriter) { Formatting = Formatting.Indented }; jsonWriter.WriteStartObject(); jsonWriter.WritePropertyName("Indexes"); jsonWriter.WriteStartArray(); if ((options.OperateOnTypes & ItemType.Indexes) == ItemType.Indexes) { await ExportIndexes(jsonWriter); } jsonWriter.WriteEndArray(); jsonWriter.WritePropertyName("Docs"); jsonWriter.WriteStartArray(); if (options.OperateOnTypes.HasFlag(ItemType.Documents)) { try { options.LastDocsEtag = await ExportDocuments(options, jsonWriter, options.LastDocsEtag); } catch (SmugglerExportException e) { options.LastDocsEtag = e.LastEtag; e.File = file; lastException = e; } } jsonWriter.WriteEndArray(); jsonWriter.WritePropertyName("Attachments"); jsonWriter.WriteStartArray(); if (options.OperateOnTypes.HasFlag(ItemType.Attachments) && lastException == null) { try { options.LastAttachmentEtag = await ExportAttachments(jsonWriter, options.LastAttachmentEtag); } catch (SmugglerExportException e) { options.LastAttachmentEtag = e.LastEtag; e.File = file; lastException = e; } } jsonWriter.WriteEndArray(); jsonWriter.WritePropertyName("Transformers"); jsonWriter.WriteStartArray(); if (options.OperateOnTypes.HasFlag(ItemType.Transformers) && lastException == null) { await ExportTransformers(jsonWriter); } jsonWriter.WriteEndArray(); jsonWriter.WriteEndObject(); streamWriter.Flush(); } #if !SILVERLIGHT if (incremental && lastEtagsFromFile) { WriteLastEtagsFromFile(options); } #endif if (lastException != null) { throw lastException; } return(file); } finally { if (ownedStream && stream != null) { stream.Dispose(); } } }
private void ReadLastEtagsFromClass(SmugglerOptions options, PeriodicBackupStatus backupStatus) { options.LastAttachmentEtag = backupStatus.LastAttachmentsEtag; options.LastDocsEtag = backupStatus.LastDocsEtag; }
private void TimerCallback(object state) { if (currentTask != null) return; lock (this) { if (currentTask != null) return; currentTask = Task.Factory.StartNew(() => { using (LogManager.OpenMappedContext("database", Database.Name ?? Constants.SystemDatabase)) using (new DisposableAction(() => LogContext.DatabaseName.Value = null)) { LogContext.DatabaseName.Value = Database.Name; try { var localBackupConfigs = backupConfigs; if (localBackupConfigs == null) return; var backupPath = localBackupConfigs.LocalFolderName ?? Path.Combine(Database.Configuration.DataDirectory, "PeriodicBackup-Temp"); var options = new SmugglerOptions { BackupPath = backupPath, LastDocsEtag = localBackupConfigs.LastDocsEtag, LastAttachmentEtag = localBackupConfigs.LastAttachmentsEtag }; var dd = new DataDumper(Database, options); var filePath = dd.ExportData(null, true); // No-op if nothing has changed if (options.LastDocsEtag == backupConfigs.LastDocsEtag && options.LastAttachmentEtag == backupConfigs.LastAttachmentsEtag) { logger.Info("Periodic backup returned prematurely, nothing has changed since last backup"); return; } UploadToServer(filePath, localBackupConfigs); localBackupConfigs.LastAttachmentsEtag = options.LastAttachmentEtag; localBackupConfigs.LastDocsEtag = options.LastDocsEtag; if (backupConfigs == null) // it was removed by the user? { localBackupConfigs.IntervalMilliseconds = -1; // this disable the periodic backup } var ravenJObject = RavenJObject.FromObject(localBackupConfigs); ravenJObject.Remove("Id"); var putResult = Database.Put(PeriodicBackupSetup.RavenDocumentKey, null, ravenJObject, new RavenJObject(), null); if (Etag.Increment(localBackupConfigs.LastDocsEtag, 1) == putResult.ETag) // the last etag is with just us localBackupConfigs.LastDocsEtag = putResult.ETag; // so we can skip it for the next time } catch (ObjectDisposedException) { // shutting down, probably } catch (Exception e) { Database.AddAlert(new Alert { AlertLevel = AlertLevel.Error, CreatedAt = SystemTime.UtcNow, Message = e.Message, Title = "Error in Periodic Backup", Exception = e }); logger.ErrorException("Error when performing periodic backup", e); } } }) .ContinueWith(_ => { currentTask = null; }); } }
private async Task<DatabaseTaskOutcome> ExecuteInternal() { if (includeDocuments == false && includeAttachements == false && includeIndexes == false && includeTransformers == false) return DatabaseTaskOutcome.Abandoned; var openFile = new OpenFileDialog { Filter = "Raven Dumps|*.ravendump;*.raven.dump", }; if (openFile.ShowDialog() != true) return DatabaseTaskOutcome.Abandoned; Report(String.Format("Importing from {0}", openFile.File.Name)); using (var stream = openFile.File.OpenRead()) { ItemType operateOnTypes = 0; if (includeDocuments) { operateOnTypes |= ItemType.Documents; } if (includeAttachements) { operateOnTypes |= ItemType.Attachments; } if (includeIndexes) { operateOnTypes |= ItemType.Indexes; } if (includeTransformers) { operateOnTypes |= ItemType.Transformers; } var smugglerOptions = new SmugglerOptions { BatchSize = batchSize, Filters = filterSettings, TransformScript = transformScript, ShouldExcludeExpired = shouldExcludeExpired, OperateOnTypes = operateOnTypes }; var smuggler = new SmugglerApi(smugglerOptions, DatabaseCommands, message => Report(message)); await smuggler.ImportData(stream, smugglerOptions); } return DatabaseTaskOutcome.Succesful; }
public void ImportData(SmugglerOptions options, bool incremental = false) { if (incremental == false) { using (FileStream fileStream = File.OpenRead(options.File)) { ImportData(fileStream, options); } return; } var files = Directory.GetFiles(Path.GetFullPath(options.File)) .Where(file => ".ravendb-incremental-dump".Equals(Path.GetExtension(file), StringComparison.CurrentCultureIgnoreCase)) .OrderBy(File.GetLastWriteTimeUtc) .ToArray(); if (files.Length == 0) return; var optionsWithoutIndexes = new SmugglerOptions { File = options.File, Filters = options.Filters, OperateOnTypes = options.OperateOnTypes & ~ItemType.Indexes }; for (var i = 0; i < files.Length - 1; i++) { using (var fileStream = File.OpenRead(Path.Combine(options.File, files[i]))) { ImportData(fileStream, optionsWithoutIndexes); } } using (var fileStream = File.OpenRead(Path.Combine(options.File, files.Last()))) { ImportData(fileStream, options); } }
public void ImportData(Stream stream, SmugglerOptions options, bool importIndexes = true) { EnsureDatabaseExists(); var sw = Stopwatch.StartNew(); // Try to read the stream compressed, otherwise continue uncompressed. JsonTextReader jsonReader; try { var streamReader = new StreamReader(new GZipStream(stream, CompressionMode.Decompress)); jsonReader = new JsonTextReader(streamReader); if (jsonReader.Read() == false) return; } catch (InvalidDataException) { stream.Seek(0, SeekOrigin.Begin); var streamReader = new StreamReader(stream); jsonReader = new JsonTextReader(streamReader); if (jsonReader.Read() == false) return; } if (jsonReader.TokenType != JsonToken.StartObject) throw new InvalidDataException("StartObject was expected"); // should read indexes now if (jsonReader.Read() == false) return; if (jsonReader.TokenType != JsonToken.PropertyName) throw new InvalidDataException("PropertyName was expected"); if (Equals("Indexes", jsonReader.Value) == false) throw new InvalidDataException("Indexes property was expected"); if (jsonReader.Read() == false) return; if (jsonReader.TokenType != JsonToken.StartArray) throw new InvalidDataException("StartArray was expected"); while (jsonReader.Read() && jsonReader.TokenType != JsonToken.EndArray) { var index = RavenJToken.ReadFrom(jsonReader); if ((options.OperateOnTypes & ItemType.Indexes) != ItemType.Indexes) continue; var indexName = index.Value<string>("name"); if (indexName.StartsWith("Raven/") || indexName.StartsWith("Temp/")) continue; PutIndex(indexName, index); } // should read documents now if (jsonReader.Read() == false) return; if (jsonReader.TokenType != JsonToken.PropertyName) throw new InvalidDataException("PropertyName was expected"); if (Equals("Docs", jsonReader.Value) == false) throw new InvalidDataException("Docs property was expected"); if (jsonReader.Read() == false) return; if (jsonReader.TokenType != JsonToken.StartArray) throw new InvalidDataException("StartArray was expected"); var batch = new List<RavenJObject>(); int totalCount = 0; while (jsonReader.Read() && jsonReader.TokenType != JsonToken.EndArray) { var document = (RavenJObject)RavenJToken.ReadFrom(jsonReader); if ((options.OperateOnTypes & ItemType.Documents) != ItemType.Documents) continue; if (options.MatchFilters(document) == false) continue; totalCount += 1; batch.Add(document); if (batch.Count >= 128) FlushBatch(batch); } FlushBatch(batch); var attachmentCount = 0; if (jsonReader.Read() == false || jsonReader.TokenType == JsonToken.EndObject) return; if (jsonReader.TokenType != JsonToken.PropertyName) throw new InvalidDataException("PropertyName was expected"); if (Equals("Attachments", jsonReader.Value) == false) throw new InvalidDataException("Attachment property was expected"); if (jsonReader.Read() == false) return; if (jsonReader.TokenType != JsonToken.StartArray) throw new InvalidDataException("StartArray was expected"); while (jsonReader.Read() && jsonReader.TokenType != JsonToken.EndArray) { attachmentCount += 1; var item = RavenJToken.ReadFrom(jsonReader); if ((options.OperateOnTypes & ItemType.Attachments) != ItemType.Attachments) continue; var attachmentExportInfo = new JsonSerializer { Converters = { new TrivialJsonToJsonJsonConverter() } }.Deserialize<AttachmentExportInfo>(new RavenJTokenReader(item)); ShowProgress("Importing attachment {0}", attachmentExportInfo.Key); PutAttachment(attachmentExportInfo); } ShowProgress("Imported {0:#,#;;0} documents and {1:#,#;;0} attachments in {2:#,#;;0} ms", totalCount, attachmentCount, sw.ElapsedMilliseconds); }
private async Task <Etag> ExportDocuments(SmugglerOptions options, JsonTextWriter jsonWriter, Etag lastEtag) { var now = SystemTime.UtcNow; var totalCount = 0; var lastReport = SystemTime.UtcNow; var reportInterval = TimeSpan.FromSeconds(2); var errorcount = 0; ShowProgress("Exporting Documents"); while (true) { bool hasDocs = false; try { var maxRecords = options.Limit - totalCount; if (maxRecords > 0) { using (var documents = await GetDocuments(lastEtag, Math.Min(maxRecords, options.BatchSize))) { var watch = Stopwatch.StartNew(); while (await documents.MoveNextAsync()) { hasDocs = true; var document = documents.Current; lastEtag = Etag.Parse(document.Value <RavenJObject>("@metadata").Value <string>("@etag")); if (!options.MatchFilters(document)) { continue; } if (options.ShouldExcludeExpired && options.ExcludeExpired(document, now)) { continue; } document.WriteTo(jsonWriter); totalCount++; if (totalCount % 1000 == 0 || SystemTime.UtcNow - lastReport > reportInterval) { ShowProgress("Exported {0} documents", totalCount); lastReport = SystemTime.UtcNow; } if (watch.ElapsedMilliseconds > 100) { errorcount++; } watch.Start(); } } if (hasDocs) { continue; } // The server can filter all the results. In this case, we need to try to go over with the next batch. // Note that if the ETag' server restarts number is not the same, this won't guard against an infinite loop. // (This code provides support for legacy RavenDB version: 1.0) var databaseStatistics = await GetStats(); var lastEtagComparable = new ComparableByteArray(lastEtag); if (lastEtagComparable.CompareTo(databaseStatistics.LastDocEtag) < 0) { lastEtag = EtagUtil.Increment(lastEtag, maxRecords); ShowProgress("Got no results but didn't get to the last doc etag, trying from: {0}", lastEtag); continue; } } } catch (Exception e) { ShowProgress("Got Exception during smuggler export. Exception: {0}. ", e.Message); ShowProgress("Done with reading documents, total: {0}, lastEtag: {1}", totalCount, lastEtag); throw new SmugglerExportException(e.Message, e) { LastEtag = lastEtag, }; } ShowProgress("Done with reading documents, total: {0}, lastEtag: {1}", totalCount, lastEtag); return(lastEtag); } }
public async virtual Task ImportData(SmugglerImportOptions importOptions, SmugglerOptions options, Stream stream) { SetSmugglerOptions(options); await DetectServerSupportedFeatures(importOptions.To); await EnsureDatabaseExists(importOptions.To); Stream sizeStream; var sw = Stopwatch.StartNew(); // Try to read the stream compressed, otherwise continue uncompressed. JsonTextReader jsonReader; try { sizeStream = new CountingStream(new GZipStream(stream, CompressionMode.Decompress)); var streamReader = new StreamReader(sizeStream); jsonReader = new JsonTextReader(streamReader); if (jsonReader.Read() == false) { return; } } catch (Exception e) { if (e is InvalidDataException == false) { throw; } stream.Seek(0, SeekOrigin.Begin); sizeStream = new CountingStream(new GZipStream(stream, CompressionMode.Decompress)); var streamReader = new StreamReader(stream); jsonReader = new JsonTextReader(streamReader); if (jsonReader.Read() == false) { return; } } if (jsonReader.TokenType != JsonToken.StartObject) { throw new InvalidDataException("StartObject was expected"); } var exportCounts = new Dictionary <string, int>(); var exportSectionRegistar = new Dictionary <string, Func <int> >(); exportSectionRegistar.Add("Indexes", () => { ShowProgress("Begin reading indexes"); var indexCount = ImportIndexes(jsonReader, options).Result; ShowProgress(string.Format("Done with reading indexes, total: {0}", indexCount)); return(indexCount); }); exportSectionRegistar.Add("Docs", () => { ShowProgress("Begin reading documents"); var documentCount = ImportDocuments(jsonReader, options).Result; ShowProgress(string.Format("Done with reading documents, total: {0}", documentCount)); return(documentCount); }); exportSectionRegistar.Add("Attachments", () => { ShowProgress("Begin reading attachments"); var attachmentCount = ImportAttachments(importOptions.To, jsonReader, options).Result; ShowProgress(string.Format("Done with reading attachments, total: {0}", attachmentCount)); return(attachmentCount); }); exportSectionRegistar.Add("Transformers", () => { ShowProgress("Begin reading transformers"); var transformersCount = ImportTransformers(jsonReader, options).Result; ShowProgress(string.Format("Done with reading transformers, total: {0}", transformersCount)); return(transformersCount); }); exportSectionRegistar.Add("DocsDeletions", () => { ShowProgress("Begin reading deleted documents"); var deletedDocumentsCount = ImportDeletedDocuments(jsonReader, options).Result; ShowProgress(string.Format("Done with reading deleted documents, total: {0}", deletedDocumentsCount)); return(deletedDocumentsCount); }); exportSectionRegistar.Add("AttachmentsDeletions", () => { ShowProgress("Begin reading deleted attachments"); var deletedAttachmentsCount = ImportDeletedAttachments(jsonReader, options).Result; ShowProgress(string.Format("Done with reading deleted attachments, total: {0}", deletedAttachmentsCount)); return(deletedAttachmentsCount); }); exportSectionRegistar.Keys.ForEach(k => exportCounts[k] = 0); while (jsonReader.Read() && jsonReader.TokenType != JsonToken.EndObject) { if (jsonReader.TokenType != JsonToken.PropertyName) { throw new InvalidDataException("PropertyName was expected"); } Func <int> currentAction; var currentSection = jsonReader.Value.ToString(); if (exportSectionRegistar.TryGetValue(currentSection, out currentAction) == false) { throw new InvalidDataException("Unexpected property found: " + jsonReader.Value); } if (jsonReader.Read() == false) { exportCounts[currentSection] = 0; continue; } if (jsonReader.TokenType != JsonToken.StartArray) { throw new InvalidDataException("StartArray was expected"); } exportCounts[currentSection] = currentAction(); } sw.Stop(); ShowProgress("Imported {0:#,#;;0} documents and {1:#,#;;0} attachments, deleted {2:#,#;;0} documents and {3:#,#;;0} attachments in {4:#,#;;0} ms", exportCounts["Docs"], exportCounts["Attachments"], exportCounts["DocsDeletions"], exportCounts["AttachmentsDeletions"], sw.ElapsedMilliseconds); }
protected abstract void ExportDeletions(JsonTextWriter jsonWriter, SmugglerOptions options, ExportDataResult result, LastEtagsInfo maxEtagsToFetch);
public void ExportData(SmugglerOptions options, bool incremental = false) { var lastDocsEtag = Guid.Empty; var lastAttachmentEtag = Guid.Empty; var folder = options.File; var etagFileLocation = Path.Combine(folder, "IncrementalExport.state.json"); if (incremental == true) { if (Directory.Exists(folder) == false) { Directory.CreateDirectory(folder); } options.File = Path.Combine(folder, DateTime.Now.ToString("yyyy-MM-dd-HH-mm", CultureInfo.InvariantCulture) + ".ravendb-incremental-dump"); if (File.Exists(options.File)) { var counter = 1; var found = false; while (found == false) { options.File = Path.Combine(folder, DateTime.Now.ToString("yyyy-MM-dd-HH-mm", CultureInfo.InvariantCulture) + " - " + counter + ".ravendb-incremental-dump"); if (File.Exists(options.File) == false) found = true; counter++; } } if (File.Exists(etagFileLocation)) { using (var streamReader = new StreamReader(new FileStream(etagFileLocation, FileMode.Open))) using (var jsonReader = new JsonTextReader(streamReader)) { var ravenJObject = RavenJObject.Load(jsonReader); lastDocsEtag = new Guid(ravenJObject.Value<string>("LastDocEtag")); lastAttachmentEtag = new Guid(ravenJObject.Value<string>("LastAttachmentEtag")); } } } using (var streamWriter = new StreamWriter(new GZipStream(File.Create(options.File), CompressionMode.Compress))) { var jsonWriter = new JsonTextWriter(streamWriter) { Formatting = Formatting.Indented }; jsonWriter.WriteStartObject(); jsonWriter.WritePropertyName("Indexes"); jsonWriter.WriteStartArray(); if ((options.OperateOnTypes & ItemType.Indexes) == ItemType.Indexes) { ExportIndexes(jsonWriter); } jsonWriter.WriteEndArray(); jsonWriter.WritePropertyName("Docs"); jsonWriter.WriteStartArray(); if ((options.OperateOnTypes & ItemType.Documents) == ItemType.Documents) { lastDocsEtag = ExportDocuments(options, jsonWriter, lastDocsEtag); } jsonWriter.WriteEndArray(); jsonWriter.WritePropertyName("Attachments"); jsonWriter.WriteStartArray(); if ((options.OperateOnTypes & ItemType.Attachments) == ItemType.Attachments) { lastAttachmentEtag = ExportAttachments(jsonWriter, lastAttachmentEtag); } jsonWriter.WriteEndArray(); jsonWriter.WriteEndObject(); streamWriter.Flush(); } if (incremental != true) return; using (var streamWriter = new StreamWriter(File.Create(etagFileLocation))) { new RavenJObject { {"LastDocEtag", lastDocsEtag.ToString()}, {"LastAttachmentEtag", lastAttachmentEtag.ToString()} }.WriteTo(new JsonTextWriter(streamWriter)); streamWriter.Flush(); } }