public void Store(object entity)
		{
			var shardId = shardResolutionStrategy.GenerateShardIdFor(entity, this);
			DatabaseCommands = Shards[shardId].AsyncDatabaseCommands;
			 
			database = MultiDatabase.GetDatabaseName(Shards[shardId].Url);
			string id;

			if (generateEntityIdOnTheClient.TryGetIdFromInstance(entity, out id) == false)
			{
				id = generateEntityIdOnTheClient.GetOrGenerateDocumentKey(entity);
				
			}

			var modifyDocumentId = shardStrategy.ModifyDocumentId(shardedDocumentStore.Conventions, shardId, id);
	
			BulkInsertOperation bulkInsertOperation;
			if (Bulks.TryGetValue(shardId, out bulkInsertOperation) == false)
			{
				var shard = Shards[shardId];
				bulkInsertOperation = new BulkInsertOperation(database, shard, shard.Listeners, options, shard.Changes());
				Bulks.Add(shardId, bulkInsertOperation);
			}

			bulkInsertOperation.Store(entity, modifyDocumentId);
		}
Beispiel #2
0
		public override async Task ImportData(Stream stream, SmugglerOptions options)
		{
			SmugglerJintHelper.Initialize(options ?? SmugglerOptions);

			var batchSize = options != null ? options.BatchSize : SmugglerOptions.BatchSize;

			using (store = CreateStore())
			{
				Task disposeTask = null;

				try
				{
					operation = store.BulkInsert(options: new BulkInsertOptions
					{
						BatchSize = batchSize,
						CheckForUpdates = true
					});

					operation.Report += text => ShowProgress(text);

					await base.ImportData(stream, options);
				}
				finally
				{
					disposeTask = operation.DisposeAsync();
				}

				if (disposeTask != null)
				{
					await disposeTask;
				}
			}
		}
Beispiel #3
0
        public void Store(object entity)
        {
            var shardId = shardResolutionStrategy.GenerateShardIdFor(entity, this);
            var shard   = shards[shardId];
            BulkInsertOperation bulkInsertOperation;

            if (Bulks.TryGetValue(shardId, out bulkInsertOperation) == false)
            {
                var actualDatabaseName = database ?? ((dynamic)shard).DefaultDatabase ?? MultiDatabase.GetDatabaseName(shard.Url);
                bulkInsertOperation = new BulkInsertOperation(actualDatabaseName, shard, shard.Listeners, options, shard.Changes());
                Bulks.Add(shardId, bulkInsertOperation);
            }

            DatabaseCommands = string.IsNullOrWhiteSpace(database)
                ? shard.AsyncDatabaseCommands
                : shard.AsyncDatabaseCommands.ForDatabase(database);

            string id;

            if (generateEntityIdOnTheClient.TryGetIdFromInstance(entity, out id) == false)
            {
                id = generateEntityIdOnTheClient.GetOrGenerateDocumentKey(entity);
            }
            var modifyDocumentId = shardStrategy.ModifyDocumentId(shardedDocumentStore.Conventions, shardId, id);

            bulkInsertOperation.Store(entity, modifyDocumentId);
        }
Beispiel #4
0
        public void Store(object entity)
        {
            var shardId = shardResolutionStrategy.GenerateShardIdFor(entity, this);

            DatabaseCommands = Shards[shardId].AsyncDatabaseCommands;

            database = MultiDatabase.GetDatabaseName(Shards[shardId].Url);
            string id;

            if (generateEntityIdOnTheClient.TryGetIdFromInstance(entity, out id) == false)
            {
                id = generateEntityIdOnTheClient.GetOrGenerateDocumentKey(entity);
            }

            var modifyDocumentId = shardStrategy.ModifyDocumentId(shardedDocumentStore.Conventions, shardId, id);

            BulkInsertOperation bulkInsertOperation;

            if (Bulks.TryGetValue(shardId, out bulkInsertOperation) == false)
            {
                var shard = Shards[shardId];
                bulkInsertOperation = new BulkInsertOperation(database, shard, shard.Listeners, options, shard.Changes());
                Bulks.Add(shardId, bulkInsertOperation);
            }

            bulkInsertOperation.Store(entity, modifyDocumentId);
        }
        public static void LoadRestaurants(string csvFile, BulkInsertOperation bulkInsert)
        {
            var wktReader = new WktReader();
            using (var reader = new StreamReader(csvFile))
            using (var csv = new CsvReader(reader, new CsvConfiguration {UseInvariantCulture = true}))
            {
                var restaurantCsvRows = csv.GetRecords<RestaurantCsvRow>();
                foreach (var row in restaurantCsvRows)
                {
                    Polygon deliveryArea = null;

                    if (!string.IsNullOrEmpty(row.DeliveryArea))
                        deliveryArea = (Polygon)wktReader.Read(row.DeliveryArea);

                    var restaurant = new Restaurant
                    {
                        Name = row.Name,
                        Street = row.Street,
                        City = row.City,
                        PostCode = row.PostCode,
                        Phone = row.Phone,
                        Location = new Point(row.Latitude, row.Longitude),
                        DeliveryArea = deliveryArea,
                        DriveThruArea = string.IsNullOrEmpty(row.DriveThruArea) ? null : row.DriveThruArea
                    };

                    bulkInsert.Store(restaurant);

                }
            }
        }
Beispiel #6
0
 static void Main(string[] args)
 {
     Console.WriteLine("Starting scan..");
     ravenDBConnector = new RavenDBConnector();
     Console.WriteLine("Iniating db connection..");
     ravenDBStore = ravenDBConnector.InitDBConnection();
     Console.WriteLine("Instantiating db session");
     session = ravenDBStore.OpenSession();
     Console.WriteLine("Getting the Bulk Insert object");
     bulkInsert = ravenDBStore.BulkInsert();
     string rootStartDrive = @"C:\anddev\";
     Console.WriteLine("Drive for scanning is " + rootStartDrive);
     DirectoryInfo directoryInfo = new DirectoryInfo(rootStartDrive);
     AnalyzeDirectory(directoryInfo);
     Console.WriteLine("Scanned {0} folders and {1} files under {2}", folderCount, fileCount,rootStartDrive);
     Console.ReadLine();
     ravenDBConnector.DisposeConnection();
 }
        public static void LoadRestaurants(string csvFile, BulkInsertOperation bulkInsert)
        {
            using (var reader = new StreamReader(csvFile))
            using (var csv = new CsvReader(reader))
            {
                var restaurants = csv.GetRecords<Restaurant>();
                foreach (var restaurant in restaurants)
                {
                    if (string.IsNullOrEmpty(restaurant.DeliveryArea))
                        restaurant.DeliveryArea = null;

                    if (string.IsNullOrEmpty(restaurant.DriveThruArea))
                        restaurant.DriveThruArea = null;

                    bulkInsert.Store(restaurant);
                }
            }
        }
Beispiel #8
0
        private static void ParseDisks(BulkInsertOperation insert)
        {
            int i = 0;
            var parser = new Parser();
            var buffer = new byte[1024*1024];// more than big enough for all files

            using (var bz2 = new BZip2InputStream(File.Open(@"D:\Scratch\freedb-complete-20150101.tar.bz2", FileMode.Open)))
            using (var tar = new TarInputStream(bz2))
            {
                TarEntry entry;
                while((entry=tar.GetNextEntry()) != null)
                {
                    if(entry.Size == 0 || entry.Name == "README" || entry.Name == "COPYING")
                        continue;

                    var readSoFar = 0;
                    while(true)
                    {
                        var read = tar.Read(buffer, readSoFar, ((int) entry.Size) - readSoFar);
                        if (read == 0)
                            break;

                        readSoFar += read;
                    }
                    // we do it in this fashion to have the stream reader detect the BOM / unicode / other stuff
                    // so we can read the values properly
                    var fileText = new StreamReader(new MemoryStream(buffer,0, readSoFar)).ReadToEnd();
                    try
                    {
                        var disk = parser.Parse(fileText);
                        insert.Store(disk);
                    }
                    catch (Exception e)
                    {
                        Console.WriteLine();
                        Console.WriteLine(entry.Name);
                        Console.WriteLine(e);
                    }
                }
            }
        }
Beispiel #9
0
        public async Task StoreAsync(object entity)
        {
            var shardId = shardResolutionStrategy.GenerateShardIdFor(entity, this);
            var shard   = shards[shardId];
            BulkInsertOperation bulkInsertOperation;

            if (Bulks.TryGetValue(shardId, out bulkInsertOperation) == false)
            {
                var actualDatabaseName = database ?? ((DocumentStore)shard).DefaultDatabase ?? MultiDatabase.GetDatabaseName(shard.Url);
                bulkInsertOperation = new BulkInsertOperation(actualDatabaseName, shard, shard.Listeners);
                Bulks.Add(shardId, bulkInsertOperation);
            }

            DatabaseCommands = shard.AsyncDatabaseCommands;
            string id;

            if (generateEntityIdOnTheClient.TryGetIdFromInstance(entity, out id) == false)
            {
                id = generateEntityIdOnTheClient.GetOrGenerateDocumentKey(entity);
            }
            var modifyDocumentId = shardStrategy.ModifyDocumentId(shardedDocumentStore.Conventions, shardId, id);
            await bulkInsertOperation.StoreAsync(entity, modifyDocumentId).ConfigureAwait(false);
        }
		private async Task CreateBulkInsertOperation()
		{
			if (operation != null)
				await operation.DisposeAsync();

			operation = new ChunkedBulkInsertOperation(store.DefaultDatabase, store, store.Listeners, new BulkInsertOptions
			{
                BatchSize = Options.BatchSize,
				OverwriteExisting = true
            }, store.Changes(), Options.ChunkSize, Options.TotalDocumentSizeInChunkLimitInBytes);

			operation.Report += text => Operations.ShowProgress(text);
		}
        private void GenerateWorkItem(SqlConnection connection, WorkItem curWorkItem, Dictionary<int, ClosureReasonItem> closureReasonDictionary,
            Dictionary<int, HandlingDepartment> handlingDepartmentsDictionary, Dictionary<int, ProductGroupItem> productGroupsDictionary, BulkInsertOperation workItemsSession,
            Dictionary<int, User> usersDictionary)
        {
            var comments =
                connection.Query<RavenFusion.Models.AtomSide.Comment>(@"SELECT * FROM COMMENT WHERE WORKITEM_ID=@ID",
                    new { ID = curWorkItem.Id });
            var history =
                connection.Query<RavenFusion.Models.AtomSide.WorkItemHistory>(
                    @"SELECT * FROM WORKITEMHISTORY WHERE WORKITEM_ID=@ID", new { ID = curWorkItem.Id });
            var insuraceCompanies =
                connection.Query<RavenFusion.Models.AtomSide.WorkItemInsuranceCompany>(
                    @"SELECT * FROM WorkItemInsuranceCompany WHERE WORKITEM_ID=@ID", new { ID = curWorkItem.Id });

            var workItemSignOff =
                connection.Query<RavenFusion.Models.AtomSide.WorkItemSignOff>(
                    @"SELECT * FROM WORKITEMSIGNOFF WHERE WORKITEM_ID=@ID", new { ID = curWorkItem.Id });
            var workItemSubscription =
                connection.Query<RavenFusion.Models.AtomSide.WorkItemSubscription>(
                    @"SELECT * FROM SUBSCRIPTION WHERE WORKITEM_ID=@ID", new { ID = curWorkItem.Id });
            var workItemSupplier =
                connection.Query<RavenFusion.Models.AtomSide.WorkItemSupplier>(
                    @"SELECT * FROM WORKITEMSUPPLIER WHERE WORKITEM_ID=@ID", new { ID = curWorkItem.Id });
            var workItemDocuments = connection.Query<RavenFusion.Models.RavenSide.Document>(
                @"SELECT * FROM DOCUMENT WHERE WORKITEM_ID=@ID", new { ID = curWorkItem.Id });
            RavenFusion.Models.AtomSide.ClosureReasonItem workItemClosureReason = null;
            closureReasonDictionary.TryGetValue(curWorkItem.ClosureReason_id, out workItemClosureReason);

            RavenFusion.Models.AtomSide.HandlingDepartment workItemHandlingDepartment = null;
            handlingDepartmentsDictionary.TryGetValue(curWorkItem.Department_id,
                out workItemHandlingDepartment);

            var workItemProductGroups = new List<RavenFusion.Models.RavenSide.ProductGroupDenormalized>();

            foreach (var productGroupLink in connection.Query<RavenFusion.Models.AtomSide.WorkItemProductGroupLink>(
                @"SELECT * FROM WorkItemProductGroup WHERE WORKITEM_ID=@ID",
                new { ID = curWorkItem.Id }))
            {
                RavenFusion.Models.RavenSide.ProductGroupItem curProductGroup = null;
                if (productGroupsDictionary.TryGetValue(productGroupLink.ProductGroup_id, out curProductGroup))
                {
                    workItemProductGroups.Add(new RavenFusion.Models.RavenSide.ProductGroupDenormalized()
                    {
                        Id = curProductGroup.Id,
                        Name = curProductGroup.Name
                    });
                }
            }


            workItemsSession.Store(
                new RavenFusion.Models.RavenSide.WorkItem()
                {
                    ActualUnitsOfWork = curWorkItem.ActualUnitsOfWork,
                    AssignedTo =
                        usersDictionary.ContainsKey(curWorkItem.AssignedTo_id)
                            ? new RavenFusion.Models.RavenSide.UserDenormalized()
                            {
                                Id = usersDictionary[curWorkItem.AssignedTo_id].id,
                                Name = usersDictionary[curWorkItem.AssignedTo_id].Name,
                                NickName = usersDictionary[curWorkItem.AssignedTo_id].NickName
                            }
                            : null,
                    ClientRequirement = curWorkItem.ClientRequirement,
                    ClosedBy =
                        usersDictionary.ContainsKey(curWorkItem.ClosedBy_id)
                            ? new RavenFusion.Models.RavenSide.UserDenormalized()
                            {
                                Id = usersDictionary[curWorkItem.ClosedBy_id].id,
                                Name = usersDictionary[curWorkItem.ClosedBy_id].Name,
                                NickName = usersDictionary[curWorkItem.ClosedBy_id].NickName
                            }
                            : null,
                    ClosedDate = curWorkItem.ClosedDate,
                    ClosureReason =
                        workItemClosureReason != null
                            ? new RavenFusion.Models.RavenSide.ClosureReasonDenormalized()
                            {
                                Description = workItemClosureReason.Description,
                                Id = workItemClosureReason.Id.ToString()
                            }
                            : null,
                    Comments = comments.Select(x => new RavenFusion.Models.RavenSide.Comment()
                    {
                        CommentText = x.CommentText,
                        Type = x.Type,
                        UnitsOfWork = x.UnitOfWork
                    }).ToList(),
                    CompletionComment = curWorkItem.CompletionComment,
                    Department = workItemHandlingDepartment != null
                        ? new RavenFusion.Models.RavenSide.HandlingDepartmentDenormalized()
                        {
                            Description = workItemHandlingDepartment.Description,
                            Id = workItemHandlingDepartment.id.ToString()
                        }
                        : null,
                    EstimatedStartDate = curWorkItem.EstimatedStartDate,
                    EstimatedUnitOfWork = curWorkItem.EstimatedUnitOfWork,
                    History = history.Select(x => x.Id).ToList(),
                    ImpactAnalysis = curWorkItem.ImpactAnalysis,
                    InHouseKeeping = curWorkItem.InHouseKeeping,
                    InsuranceCompanies = insuraceCompanies.Select(x => x.InsuranceCompany_id).ToList(),
                    InternalTesting = curWorkItem.InternalTesting,
                    ProductGroups = workItemProductGroups.Count > 0 ? workItemProductGroups : null,
                    Rejected = curWorkItem.Rejected,
                    RequestedCompletionDate = curWorkItem.RequestedCompletionDate,
                    Severity = curWorkItem.Severity,
                    SignOffs = workItemSignOff.Select(x => new RavenFusion.Models.RavenSide.WorkitemSignOff()
                    {
                        SignOffType = x.SignOffType,
                        SignedOff = x.SignedOff,
                        SignedOffBy =
                            usersDictionary.ContainsKey(x.SignedOffBy_id)
                                ? new RavenFusion.Models.RavenSide.UserDenormalized()
                                {
                                    Id = usersDictionary[x.SignedOffBy_id].id,
                                    Name = usersDictionary[x.SignedOffBy_id].Name,
                                    NickName = usersDictionary[x.SignedOffBy_id].NickName
                                }
                                : null,
                    }).Where(x => x.SignedOffBy != null).ToList(),
                    Subscriptions =
                        workItemSubscription.Select(
                            x =>
                                usersDictionary.ContainsKey(x.User_id)
                                    ? new RavenFusion.Models.RavenSide.UserDenormalized()
                                    {
                                        Id = usersDictionary[x.User_id].id,
                                        Name = usersDictionary[x.User_id].Name,
                                        NickName = usersDictionary[x.User_id].NickName
                                    }
                                    : null).Where(x => x != null).ToList(),
                    Summary = curWorkItem.Summary,
                    Suppliers = workItemSupplier.Select(x => x.Supplier_id).ToList(),
                    WorkItemType = curWorkItem.WorkItemType,
                    WorkStatus = curWorkItem.WorkStatus,
                    Documents = workItemDocuments.ToList()
                }, @"WorkItems/" + curWorkItem.Id);
        }
        private static async Task<Tuple<int, Etag, DateTime>> TransferStreamedDocuments(DocumentStore exportStore, 
            SmugglerDatabaseOptions databaseOptions, 
            DateTime now, 
            SmugglerJintHelper jintHelper, 
            BulkInsertOperation bulkInsertOperation, 
            TimeSpan reportInterval, 
            int totalCount, 
            string fromEtag, 
            DateTime lastReport)
        {
            Etag lastReadEtag = fromEtag;
            using (var documentsEnumerator = await exportStore.AsyncDatabaseCommands.StreamDocsAsync(fromEtag))
            {
                while (await documentsEnumerator.MoveNextAsync())
                {
                    var document = documentsEnumerator.Current;
                    var metadata = document.Value<RavenJObject>("@metadata");
                    var id = metadata.Value<string>("@id");
                    var etag = Etag.Parse(metadata.Value<string>("@etag"));

                    lastReadEtag = etag;

                    if (!databaseOptions.MatchFilters(document))
                        continue;
                    if (databaseOptions.ShouldExcludeExpired && databaseOptions.ExcludeExpired(document, now))
                        continue;

                    if (databaseOptions.StripReplicationInformation)
                        document["@metadata"] = StripReplicationInformationFromMetadata(document["@metadata"] as RavenJObject);

                    if (databaseOptions.ShouldDisableVersioningBundle)
                        document["@metadata"] = SmugglerHelper.DisableVersioning(document["@metadata"] as RavenJObject);

                    document["@metadata"] = SmugglerHelper.HandleConflictDocuments(document["@metadata"] as RavenJObject);

                    if (!string.IsNullOrEmpty(databaseOptions.TransformScript))
                    {
                        document = jintHelper.Transform(databaseOptions.TransformScript, document);
                        if (document == null)
                            continue;
                        metadata = document.Value<RavenJObject>("@metadata");
                    }

                    document.Remove("@metadata");
                    try
                    {
                        bulkInsertOperation.Store(document, metadata, id);
                    }
                    catch (Exception e)
                    {
                        if (databaseOptions.IgnoreErrorsAndContinue == false)
                            throw;

                        ShowProgress("IMPORT of a document {0} failed. Message: {1}", document, e.Message);
                    }

                    totalCount++;

                    if (totalCount%1000 == 0 || SystemTime.UtcNow - lastReport > reportInterval)
                    {
                        ShowProgress("Exported {0} documents", totalCount);
                        lastReport = SystemTime.UtcNow;
                    }
                }
            }
            return Tuple.Create(totalCount, lastReadEtag, lastReport);
        }
        private static async Task<Tuple<int, string, DateTime>> TransferDocumentsWithoutStreaming(DocumentStore exportStore, SmugglerDatabaseOptions databaseOptions, int exportBatchSize, OperationMetadata operationMetadata, DateTime now, BulkInsertOperation bulkInsertOperation, TimeSpan reportInterval, int totalCount, string fromEtag, DateTime lastReport)
        {
            var documents = await ((AsyncServerClient) exportStore.AsyncDatabaseCommands).GetDocumentsInternalAsync(null, fromEtag, exportBatchSize, operationMetadata);
            foreach (var jToken in documents)
            {
                var document = (RavenJObject) jToken;
                var metadata = document.Value<RavenJObject>("@metadata");
                var id = metadata.Value<string>("@id");
                var etag = Etag.Parse(metadata.Value<string>("@etag"));
                fromEtag = etag;

                if (!databaseOptions.MatchFilters(document))
                    continue;
                if (databaseOptions.ShouldExcludeExpired && databaseOptions.ExcludeExpired(document, now))
                    continue;

                if (databaseOptions.StripReplicationInformation)
                    document["@metadata"] = StripReplicationInformationFromMetadata(document["@metadata"] as RavenJObject);

                if (databaseOptions.ShouldDisableVersioningBundle)
                    document["@metadata"] = SmugglerHelper.DisableVersioning(document["@metadata"] as RavenJObject);

                document["@metadata"] = SmugglerHelper.HandleConflictDocuments(document["@metadata"] as RavenJObject);

                document.Remove("@metadata");
                metadata.Remove("@id");
                metadata.Remove("@etag");

                try
                {
                    bulkInsertOperation.Store(document, metadata, id);
                }
                catch (Exception e)
                {
                    if (databaseOptions.IgnoreErrorsAndContinue == false)
                        throw;

                    ShowProgress("IMPORT of a document {0} failed. Message: {1}", document, e.Message);
                }

                totalCount++;

                if (totalCount%1000 == 0 || SystemTime.UtcNow - lastReport > reportInterval)
                {
                    ShowProgress("Exported {0} documents", totalCount);
                    lastReport = SystemTime.UtcNow;
                }
            }
            return Tuple.Create(totalCount, fromEtag, lastReport);
        }
Beispiel #14
0
	    public override async Task ImportData(SmugglerImportOptions importOptions, SmugglerOptions options, Stream stream)
		{
            SetSmugglerOptions(options);

			SmugglerJintHelper.Initialize(options);

            using (store = CreateStore(importOptions.To))
			{
				Task disposeTask;

				try
				{
					operation = new ChunkedBulkInsertOperation(store.DefaultDatabase, store, store.Listeners, new BulkInsertOptions
					{
						BatchSize = options.BatchSize,
						OverwriteExisting = true
					}, store.Changes(), options.ChunkSize);

					operation.Report += text => ShowProgress(text);

                    await base.ImportData(importOptions, options, stream);
				}
				finally
				{
					 disposeTask = operation.DisposeAsync();
				}

				if (disposeTask != null)
				{
					await disposeTask;
				}
			}
		}
Beispiel #15
0
		protected override async Task PutDocument(RavenJObject document)
		{
			if (document == null)
				return;

			var metadata = document.Value<RavenJObject>("@metadata");
			var id = metadata.Value<string>("@id");
			document.Remove("@metadata");

			operation.Store(document, metadata, id);
			storedDocumentCountInBatch++;
			if (storedDocumentCountInBatch >= currentBatchSize && currentBatchSize > 0)
			{
				storedDocumentCountInBatch = 0;
				await operation.DisposeAsync();

				operation = store.BulkInsert(options: new BulkInsertOptions
				{
					BatchSize = currentBatchSize,
					CheckForUpdates = true
				});

				operation.Report += text => ShowProgress(text);
			}
		}