public async Task PostCreateSampleData() { DocumentsOperationContext context; using (ContextPool.AllocateOperationContext(out context)) { using (context.OpenReadTransaction()) { foreach (var collection in Database.DocumentsStorage.GetCollections(context)) { if (collection.Count > 0 && collection.Name != CollectionName.SystemCollection) { throw new InvalidOperationException("You cannot create sample data in a database that already contains documents"); } } } using (var sampleData = typeof(SampleDataHandler).GetTypeInfo().Assembly.GetManifestResourceStream("Raven.Server.Web.Studio.EmbeddedData.Northwind_3.5.35168.ravendbdump")) { using (var stream = new GZipStream(sampleData, CompressionMode.Decompress)) { var importer = new SmugglerImporter(Database); await importer.Import(context, stream); } } } }
private async Task <ImportResult> DoImportInternal(DocumentsOperationContext context, Stream stream, DatabaseSmugglerOptions smugglerOptions, Action <IOperationProgress> onProgress) { try { var importer = new SmugglerImporter(Database, smugglerOptions); return(await importer.Import(context, stream, onProgress)); } finally { stream.Dispose(); } }
public async Task PostImport() { DocumentsOperationContext context; using (ContextPool.AllocateOperationContext(out context)) { var options = DatabaseSmugglerOptionsServerSide.Create(HttpContext, context); var tuple = await GetImportStream(); using (tuple.Item2) using (var stream = new GZipStream(tuple.Item1, CompressionMode.Decompress)) { var sp = Stopwatch.StartNew(); var importer = new SmugglerImporter(Database, options); var result = await importer.Import(context, stream); sp.Stop(); WriteImportResult(context, sp, result, ResponseBodyStream()); } } }
private async Task BulkImport(BlockingCollection <Func <Task <Stream> > > files, Stopwatch sp, string directory) { var results = new ConcurrentQueue <ImportResult>(); var tasks = new Task[Environment.ProcessorCount]; for (int i = 0; i < tasks.Length; i++) { tasks[i] = Task.Run(async() => { while (files.IsCompleted == false) { Func <Task <Stream> > getFile; DocumentsOperationContext context; try { getFile = files.Take(); } catch (Exception) { continue; } using (ContextPool.AllocateOperationContext(out context)) using (Stream file = await getFile()) using (var stream = new GZipStream(file, CompressionMode.Decompress)) { var importer = new SmugglerImporter(Database); var result = await importer.Import(context, stream); results.Enqueue(result); } } }); } await Task.WhenAll(tasks); var finalResult = new ImportResult(); ImportResult importResult; while (results.TryDequeue(out importResult)) { finalResult.DocumentsCount += importResult.DocumentsCount; finalResult.IdentitiesCount += importResult.IdentitiesCount; finalResult.IndexesCount += importResult.IndexesCount; finalResult.RevisionDocumentsCount += importResult.RevisionDocumentsCount; finalResult.TransformersCount += importResult.TransformersCount; finalResult.Warnings.AddRange(importResult.Warnings); } sp.Stop(); DocumentsOperationContext finalContext; using (ContextPool.AllocateOperationContext(out finalContext)) { var memoryStream = new MemoryStream(); WriteImportResult(finalContext, sp, finalResult, memoryStream); memoryStream.Position = 0; try { using (var output = File.Create(Path.Combine(directory, "smuggler.results.txt"))) { memoryStream.CopyTo(output); } } catch (Exception) { // ignore any failure here } memoryStream.Position = 0; memoryStream.CopyTo(ResponseBodyStream()); } }