public bool Execute(IMongoDatabase database, MongoStorageOptions storageOptions, IMongoMigrationBag migrationBag) { var jobGraph = database.GetCollection <BsonDocument>($@"{storageOptions.Prefix}.jobGraph"); var filter = new BsonDocument("_t", "ListDto"); var update = new BsonDocument { ["$rename"] = new BsonDocument("Key", "Item"), ["$set"] = new BsonDocument("_t", BsonArray.Create(new[] { "BaseJobDto", "ExpiringJobDto", "ListDto" })) }; jobGraph.UpdateMany(filter, update); return(true); }
public bool Execute(IMongoDatabase database, MongoStorageOptions storageOptions, IMongoMigrationBag migrationBag) { var indexBuilder = Builders <BsonDocument> .IndexKeys; var jobGraphCollection = database.GetCollection <BsonDocument>(storageOptions.Prefix + ".jobGraph"); jobGraphCollection.TryCreateIndexes(indexBuilder.Descending, "StateName", "ExpireAt", "_t", "Queue", "FetchedAt", "Value"); jobGraphCollection.TryCreateIndexes(indexBuilder.Ascending, "Key"); var locksCollection = database.GetCollection <BsonDocument>($@"{storageOptions.Prefix}.locks"); locksCollection.TryCreateIndexes(indexBuilder.Descending, "Resource", "ExpireAt"); var serverCollection = database.GetCollection <BsonDocument>($@"{storageOptions.Prefix}.server"); serverCollection.TryCreateIndexes(indexBuilder.Descending, "LastHeartbeat"); return(true); }
public bool Execute(IMongoDatabase database, MongoStorageOptions storageOptions, IMongoMigrationBag migrationBag) { var indexBuilder = Builders <BsonDocument> .IndexKeys; var jobQueueCollection = database.GetCollection <BsonDocument>($@"{storageOptions.Prefix}.jobQueue"); TryCreateIndexes(jobQueueCollection, indexBuilder.Descending, "Queue", "FetchedAt"); var jobCollection = database.GetCollection <BsonDocument>($@"{storageOptions.Prefix}.job"); TryCreateIndexes(jobCollection, indexBuilder.Descending, "StateName", "ExpireAt"); var stateDataCollection = database.GetCollection <BsonDocument>($@"{storageOptions.Prefix}.stateData"); TryCreateIndexes(stateDataCollection, indexBuilder.Descending, "ExpireAt", "_t"); var locksCollection = database.GetCollection <BsonDocument>($@"{storageOptions.Prefix}.locks"); TryCreateIndexes(locksCollection, indexBuilder.Descending, "Resource", "ExpireAt"); var serverCollection = database.GetCollection <BsonDocument>($@"{storageOptions.Prefix}.server"); TryCreateIndexes(serverCollection, indexBuilder.Descending, "LastHeartbeat"); var signalCollection = database.GetCollection <BsonDocument>($@"{storageOptions.Prefix}.signal"); TryCreateIndexes(signalCollection, indexBuilder.Descending, "Signaled"); return(true); }
public bool Execute(IMongoDatabase database, MongoStorageOptions storageOptions, IMongoMigrationBag migrationBag) { var jobCollection = database.GetCollection <BsonDocument>($@"{storageOptions.Prefix}.job"); var jobParametersCollection = database.GetCollection <BsonDocument>($@"{storageOptions.Prefix}.jobParameter"); var stateCollection = database.GetCollection <BsonDocument>($@"{storageOptions.Prefix}.state"); var filter = Builders <BsonDocument> .Filter.Type("_id", BsonType.Int32); var jobs = jobCollection.Find(filter).ToList(); var jobIdMapping = jobs .Select(j => j["_id"].AsInt32) .Distinct() .ToDictionary(jid => jid, jid => new BsonObjectId(ObjectId.GenerateNewId()).ToString()); migrationBag.SetItem("JobIdMapping", jobIdMapping); var migratedJobs = jobs.Select(job => { var id = job["_id"].AsInt32; var jobParameters = jobParametersCollection.Find(jp => jp["JobId"] == id) .ToList(); var jobStates = stateCollection.Find(s => s["JobId"] == id) .SortBy(s => s["CreatedAt"]) .ToList(); job["_id"] = jobIdMapping[id]; job["Parameters"] = new BsonDocument(jobParameters.ToDictionary(jp => jp["Name"].AsString, jp => jp["Value"].AsString)); job["StateHistory"] = new BsonArray(jobStates.Select(s => { s.Remove("_id"); s.Remove("JobId"); s["Data"] = new BsonDocument(JobHelper.FromJson <Dictionary <string, string> >(s["Data"].AsString)); return(s); })); job.Remove("StateId"); return(job); }).ToList(); if (migratedJobs.Any()) { jobCollection.InsertMany(migratedJobs); } jobCollection.DeleteMany(filter); return(true); }
public bool Execute(IMongoDatabase database, MongoStorageOptions storageOptions, IMongoMigrationBag migrationBag) { var name = $@"{storageOptions.Prefix}.signal"; database.DropCollection(name); var createOptions = new CreateCollectionOptions { Capped = true, MaxSize = 1000000, MaxDocuments = 1000 }; database.CreateCollection(name, createOptions); return(true); }
public virtual bool Execute(IMongoDatabase database, MongoStorageOptions storageOptions, IMongoMigrationBag migrationBag) { foreach (var previousCollectionName in ObsoleteCollectionNames(database, storageOptions)) { database.DropCollection(previousCollectionName); } return(true); }
public bool Execute(IMongoDatabase database, MongoStorageOptions storageOptions, IMongoMigrationBag migrationBag) { var jobGraph = database.GetCollection <BsonDocument>(storageOptions.Prefix + ".jobGraph"); CreateCompositeKey(jobGraph, typeof(SetDto)); return(true); }
public bool Execute(IMongoDatabase database, MongoStorageOptions storageOptions, IMongoMigrationBag migrationBag) { var name = $@"{storageOptions.Prefix}.schema"; var filter = Builders <BsonDocument> .Filter.Not(Builders <BsonDocument> .Filter.Exists("Identifier")); var update = Builders <BsonDocument> .Update.Set("Identifier", Guid.NewGuid().ToString()); database .GetCollection <BsonDocument>(name) .FindOneAndUpdate(filter, update); return(true); }
public bool Execute(IMongoDatabase database, MongoStorageOptions storageOptions, IMongoMigrationBag migrationBag) { var jobsCollection = database.GetCollection <BsonDocument>($@"{storageOptions.Prefix}.job"); SetFieldAsObjectId(jobsCollection, "_id"); var jobQueueCollection = database.GetCollection <BsonDocument>($@"{storageOptions.Prefix}.jobQueue"); SetFieldAsObjectId(jobQueueCollection, "JobId"); return(true); }
public bool Execute(IMongoDatabase database, MongoStorageOptions storageOptions, IMongoMigrationBag migrationBag) { var jobGraph = database.GetCollection <BsonDocument>($@"{storageOptions.Prefix}.jobGraph"); var indexBuilder = Builders <BsonDocument> .IndexKeys; TryCreateUniqueIndexes(jobGraph, indexBuilder.Ascending, "Key"); TryCreateIndexes(jobGraph, indexBuilder.Descending, "StateName", "ExpireAt", "_t", "Queue", "FetchedAt", "Value", "Item"); return(true); }
public bool Execute(IMongoDatabase database, MongoStorageOptions storageOptions, IMongoMigrationBag migrationBag) { var locksCollection = database.GetCollection <BsonDocument>(storageOptions.Prefix + ".locks"); locksCollection.DeleteMany(new BsonDocument("ClientId", new BsonDocument("$exists", true))); return(true); }
public bool Execute(IMongoDatabase database, MongoStorageOptions storageOptions, IMongoMigrationBag migrationBag) { var jobGraph = database.GetCollection <BsonDocument>(storageOptions.Prefix + ".jobGraph"); var counterFilter = new BsonDocument("_t", "CounterDto"); var counters = jobGraph.FindSync(counterFilter).ToList(); if (!counters.Any()) { return(true); } var countersToInsert = new List <BsonDocument>(); foreach (var countersByKey in counters.GroupBy(c => c["Key"].AsString)) { var key = countersByKey.Key; var sum = countersByKey.Sum(c => { var value = c["Value"]; return(value.IsInt32 ? value.AsInt32 : value.AsInt64); }); BsonValue expireAt = BsonNull.Value; if (countersByKey.Any(c => c.Contains("ExpireAt") && c["ExpireAt"] != BsonNull.Value)) { expireAt = countersByKey .Where(c => c.Contains("ExpireAt")) .Select(c => c["ExpireAt"].ToUniversalTime()) .Max(); } var counterToInsert = new BsonDocument { ["Key"] = key, ["Value"] = sum, ["_id"] = ObjectId.GenerateNewId(), ["ExpireAt"] = expireAt, ["_t"] = new BsonArray(new[] { "BaseJobDto", "ExpiringJobDto", "KeyJobDto", "CounterDto" }) }; countersToInsert.Add(counterToInsert); } jobGraph.InsertMany(countersToInsert); return(true); }
public bool Execute(IMongoDatabase database, MongoStorageOptions storageOptions, IMongoMigrationBag migrationBag) { var serverCollection = database.GetCollection <BsonDocument>(storageOptions.Prefix + ".server"); var servers = serverCollection.Find(new BsonDocument()).ToList(); foreach (var server in servers) { if (!server.Contains("Data")) { continue; } var jsonData = JObject.Parse(server["Data"].AsString); var update = new BsonDocument { ["$set"] = new BsonDocument { ["WorkerCount"] = int.Parse(jsonData["WorkerCount"].Value <string>()), ["Queues"] = new BsonArray(jsonData["Queues"].ToObject <string[]>()), ["StartedAt"] = jsonData["StartedAt"]?.ToObject <DateTime?>(), ["LastHeartbeat"] = server["LastHeartbeat"] }, ["$unset"] = new BsonDocument("Data", "") }; serverCollection.UpdateOne(new BsonDocument("_id", server["_id"]), update); } return(true); }
public bool Execute(IMongoDatabase database, MongoStorageOptions storageOptions, IMongoMigrationBag migrationBag) { var db = database.GetCollection <BsonDocument>($@"{storageOptions.Prefix}.statedata"); var index = new BsonDocumentIndexKeysDefinition <BsonDocument>(new BsonDocument("Key", 1)); db.Indexes.CreateOne(index); return(true); }
public bool Execute(IMongoDatabase database, MongoStorageOptions storageOptions, IMongoMigrationBag migrationBag) { database.DropCollection(storageOptions.Prefix + ".signal"); return(true); }
public bool Execute(IMongoDatabase database, MongoStorageOptions storageOptions, IMongoMigrationBag migrationBag) { var jobGraph = database.GetCollection <BsonDocument>(storageOptions.Prefix + ".jobGraph"); var documents = jobGraph.FindSync(new BsonDocument("_t", nameof(SetDto))) .ToEnumerable(); var updates = new List <UpdateOneModel <BsonDocument> >(); foreach (var document in documents) { var compositeKey = document["Key"].AsString; var splitIndex = compositeKey.IndexOf(':'); string value; string key; if (splitIndex < 0) { key = compositeKey; value = document.Contains("Value") ? document["Value"].AsString : string.Empty; } else { key = compositeKey.Substring(0, splitIndex); value = compositeKey.Substring(splitIndex + 1); } var filter = new BsonDocument("_id", document["_id"]); var update = new BsonDocument("$set", new BsonDocument { ["Value"] = value, ["Key"] = $"{key}<{value}>" }); updates.Add(new UpdateOneModel <BsonDocument>(filter, update)); } if (!updates.Any()) { return(true); } jobGraph.BulkWrite(updates); return(true); }
public bool Execute(IMongoDatabase database, MongoStorageOptions storageOptions, IMongoMigrationBag migrationBag) { var indexBuilder = Builders <BsonDocument> .IndexKeys; var statedataCollection = database.GetCollection <BsonDocument>($@"{storageOptions.Prefix}.statedata"); statedataCollection.TryCreateIndexes(indexBuilder.Ascending, "Key"); return(true); }
public bool Execute(IMongoDatabase database, MongoStorageOptions storageOptions, IMongoMigrationBag migrationBag) { var jobIdMapping = migrationBag.GetItem <Dictionary <int, string> >("JobIdMapping"); var setCollection = database.GetCollection <BsonDocument>($@"{storageOptions.Prefix}.set"); var filter = Builders <BsonDocument> .Filter.Eq("Key", "schedule"); var migratedSetList = setCollection.Find(filter).ToList().Select(s => { s["Value"] = jobIdMapping[int.Parse(s["Value"].AsString)]; s["_t"] = new BsonArray(new[] { "KeyValueDto", "ExpiringKeyValueDto", "SetDto" }); return(s); }).ToList(); if (migratedSetList.Any()) { var stateDataCollection = database.GetCollection <BsonDocument>($@"{storageOptions.Prefix}.statedata"); stateDataCollection.InsertMany(migratedSetList); } return(true); }
public bool Execute(IMongoDatabase database, MongoStorageOptions storageOptions, IMongoMigrationBag migrationBag) { var jobCollection = database.GetCollection <BsonDocument>($@"{storageOptions.Prefix}.job"); var jobParametersCollection = database.GetCollection <BsonDocument>($@"{storageOptions.Prefix}.jobParameter"); var stateCollection = database.GetCollection <BsonDocument>($@"{storageOptions.Prefix}.state"); var filter = Builders <BsonDocument> .Filter.Type("_id", BsonType.Int32); var jobs = jobCollection.Find(filter).ToList(); var jobIdMapping = jobs .Select(j => j["_id"].AsInt32) .Distinct() .ToDictionary(jid => jid, jid => new BsonObjectId(ObjectId.GenerateNewId()).ToString()); migrationBag.SetItem("JobIdMapping", jobIdMapping); var migratedJobs = jobs.Select(job => { var id = job["_id"].AsInt32; var jobParameters = jobParametersCollection.Find(jp => jp["JobId"] == id) .ToList(); var jobStates = stateCollection.Find(s => s["JobId"] == id) .SortBy(s => s["CreatedAt"]) .ToList(); job["_id"] = jobIdMapping[id]; job["Parameters"] = new BsonDocument(jobParameters.ToDictionary(jp => jp["Name"].AsString, jp => jp["Value"].AsString)); job["StateHistory"] = new BsonArray(jobStates.Select(s => { s.Remove("_id"); s.Remove("JobId"); // We expect "Data" to be a string of raw JSON // - but it has been experienced that it wasn't if (s["Data"].IsString) { s["Data"] = new BsonDocument(JobHelper.FromJson <Dictionary <string, string> >(s["Data"].AsString)); } else { System.Diagnostics.Debug.WriteLine(s["Data"].BsonType); } if (!s["Data"].IsBsonDocument) { throw new MongoMigrationException(this, "Expected JobState field 'Data' to be BsonDocument"); } return(s); })); job.Remove("StateId"); return(job); }).ToList(); if (migratedJobs.Any()) { jobCollection.InsertMany(migratedJobs); } jobCollection.DeleteMany(filter); return(true); }
public bool Execute(IMongoDatabase database, MongoStorageOptions storageOptions, IMongoMigrationBag migrationBag) { var stateDataFindTask = database .GetCollection <BsonDocument>(storageOptions.Prefix + ".stateData") .Find(new BsonDocument()) .ToListAsync(); var jobFindTask = database .GetCollection <BsonDocument>(storageOptions.Prefix + ".job") .Find(new BsonDocument()) .ToListAsync(); var jobQueueFindTask = database .GetCollection <BsonDocument>(storageOptions.Prefix + ".jobQueue") .Find(new BsonDocument()) .ToListAsync(); // run in parallel, make sure we dont deadlock if we have a synchronization context Task.Run(() => Task.WhenAll(stateDataFindTask, jobFindTask, jobQueueFindTask)).GetAwaiter().GetResult(); var jobs = jobFindTask.Result; var stateData = stateDataFindTask.Result; var jobQueue = jobQueueFindTask.Result; foreach (var data in stateData) { var typeName = ""; if (data.TryGetValue("_t", out var typeValue)) { typeName = typeValue is BsonArray ? data["_t"].AsBsonArray.Last().AsString : data["_t"].AsString; } else { throw new InvalidOperationException($"Expected '_t' element in stateData entity, got: {data.ToJson()}"); } data["_t"] = new BsonArray(new [] { "BaseJobDto", "ExpiringJobDto", "KeyJobDto", typeName }); } foreach (var job in jobs) { job["_t"] = new BsonArray(new[] { "BaseJobDto", "ExpiringJobDto", "JobDto" }); } foreach (var jobQ in jobQueue) { jobQ["_t"] = new BsonArray { "BaseJobDto", "JobQueueDto" }; } var jobGraphEntities = jobs.Concat(stateData).Concat(jobQueue); if (jobGraphEntities.Any()) { database .GetCollection <BsonDocument>(storageOptions.Prefix + ".jobGraph") .InsertMany(jobGraphEntities); } return(true); }
public bool Execute(IMongoDatabase database, MongoStorageOptions storageOptions, IMongoMigrationBag migrationBag) { CreateJobIndex(database.GetCollection <BsonDocument>($@"{storageOptions.Prefix}.jobParameter")); CreateJobIndex(database.GetCollection <BsonDocument>($@"{storageOptions.Prefix}.jobQueue")); CreateJobIndex(database.GetCollection <BsonDocument>($@"{storageOptions.Prefix}.state")); return(true); }
public bool Execute(IMongoDatabase database, MongoStorageOptions storageOptions, IMongoMigrationBag migrationBag) { var jobIdMapping = migrationBag.GetItem <Dictionary <int, string> >("JobIdMapping"); // Update jobQueue to reflect new job id var jobQueueCollection = database.GetCollection <BsonDocument>($@"{storageOptions.Prefix}.jobQueue"); var migratedJobQueueList = jobQueueCollection.Find(_ => true).ToList().Select(jq => { // NOTE: // This is a "hack". We actually migrate to schema version 7, // where we should have migrated to schema version 6. // But there is a issue in schema version 6 that will // cause us to loose information. // Since any version of Hangfire.Mongo running schema version 6 // does not have migration, it is save to do this. jq["_id"] = new BsonObjectId(ObjectId.GenerateNewId()); jq["JobId"] = jobIdMapping[jq["JobId"].AsInt32]; return(jq); }).ToList(); if (migratedJobQueueList.Any()) { jobQueueCollection.InsertMany(migratedJobQueueList); } var filter = Builders <BsonDocument> .Filter.In("JobId", jobIdMapping.Keys); jobQueueCollection.DeleteMany(filter); return(true); }
public bool Execute(IMongoDatabase database, MongoStorageOptions storageOptions, IMongoMigrationBag migrationBag) { var indexBuilder = Builders <BsonDocument> .IndexKeys; var jobParameterCollection = database.GetCollection <BsonDocument>($@"{storageOptions.Prefix}.jobParameter"); jobParameterCollection.TryCreateIndexes(indexBuilder.Descending, "JobId"); var jobQueueCollection = database.GetCollection <BsonDocument>($@"{storageOptions.Prefix}.jobQueue"); jobQueueCollection.TryCreateIndexes(indexBuilder.Descending, "JobId"); var stateCollection = database.GetCollection <BsonDocument>($@"{storageOptions.Prefix}.state"); stateCollection.TryCreateIndexes(indexBuilder.Descending, "JobId"); return(true); }
public bool Execute(IMongoDatabase database, MongoStorageOptions storageOptions, IMongoMigrationBag migrationBag) { var jobGraph = database.GetCollection <BsonDocument>(storageOptions.Prefix + ".jobGraph"); var filter = new BsonDocument("$and", new BsonArray { new BsonDocument("_t.3", "HashDto"), new BsonDocument("Field", new BsonDocument("$exists", true)) }); jobGraph.DeleteMany(filter); return(true); }
public bool Execute(IMongoDatabase database, MongoStorageOptions storageOptions, IMongoMigrationBag migrationBag) { var oldName = $@"{storageOptions.Prefix}.statedata"; var newName = $@"{storageOptions.Prefix}.stateData"; var options = new ListCollectionsOptions { Filter = new FilterDefinitionBuilder <BsonDocument>().Eq("name", oldName) }; if (database.ListCollections(options).Any()) { options.Filter = new FilterDefinitionBuilder <BsonDocument>().Eq("name", newName); if (database.ListCollections(options).Any()) { // A situation can occur where both the old and the new name exists. database.DropCollection(oldName); } else { RenameCollection(database, oldName, newName); } } return(true); }
public bool Execute(IMongoDatabase database, MongoStorageOptions storageOptions, IMongoMigrationBag migrationBag) { foreach (var collectionName in TargetSchema.CollectionNames(storageOptions.Prefix)) { FixCollection(database, collectionName); } return(true); }
public bool Execute(IMongoDatabase database, MongoStorageOptions storageOptions, IMongoMigrationBag migrationBag) { // Update jobQueue to reflect new job id var jobQueueCollection = database.GetCollection <BsonDocument>($@"{storageOptions.Prefix}.jobQueue"); var filterBuilder = Builders <BsonDocument> .Filter; var filter = filterBuilder.Not(filterBuilder.Exists("JobId")); var migratedJobQueueList = jobQueueCollection.Find(filter).ToList().Select(jq => { jq["JobId"] = jq["_id"]; jq["_id"] = new BsonObjectId(ObjectId.GenerateNewId()); return(jq); }).ToList(); if (migratedJobQueueList.Any()) { jobQueueCollection.InsertMany(migratedJobQueueList); } return(true); }
public bool Execute(IMongoDatabase database, MongoStorageOptions storageOptions, IMongoMigrationBag migrationBag) { var locksCollection = database.GetCollection <BsonDocument>($@"{storageOptions.Prefix}.locks"); // drop existing indexes for the 'Resource' field if any exist using (var cursor = locksCollection.Indexes.List()) { var existingResourceIndexes = cursor.ToList(); foreach (var index in existingResourceIndexes) { var indexName = index["name"].AsString; if (indexName.Contains(_lockResourceIndexName)) { locksCollection.Indexes.DropOne(indexName); } } } // create new unique index for the 'Resource' field var indexOptions = new CreateIndexOptions { Name = _lockResourceIndexName, Sparse = true, Unique = true }; var indexBuilder = Builders <BsonDocument> .IndexKeys; var indexModel = new CreateIndexModel <BsonDocument>(indexBuilder.Descending(_lockResourceIndexName), indexOptions); locksCollection.Indexes.CreateOne(indexModel); return(true); }
public bool Execute(IMongoDatabase database, MongoStorageOptions storageOptions, IMongoMigrationBag migrationBag) { var jobGraph = database.GetCollection <BsonDocument>(storageOptions.Prefix + ".jobGraph"); var counters = jobGraph.Find(new BsonDocument("_t", "CounterDto")).ToList(); var idsToRemove = new BsonArray(); foreach (var countersByKey in counters.GroupBy(c => c["Key"].AsString)) { var key = countersByKey.Key; var groupedCounters = countersByKey.ToList(); // if only one, nothing to do, continue... if (groupedCounters.Count == 1) { continue; } // if all have the same value take the newest var allSameValue = groupedCounters.Select(c => Convert.ToInt32(c["Value"])).Distinct().Count() == 1; if (allSameValue) { var newestObjectId = groupedCounters.Select(c => c["_id"].AsObjectId).Max(); idsToRemove.AddRange(groupedCounters.Where(c => c["_id"].AsObjectId != newestObjectId).Select(c => c["_id"])); continue; } // if more with different values delete all with value = '1' and sum the rest, most likely there have been // created a new counterDto, which will have been counted instead of the aggregated one. idsToRemove.AddRange(groupedCounters.Where(c => Convert.ToInt32(c["Value"]) == 1).Select(c => c["_id"])); // verify there is only one counter left. if more, sum the results and put in a new document, // delete the existing groupedCounters.RemoveAll(c => idsToRemove.Contains(c["_id"].AsObjectId)); if (groupedCounters.Count <= 1) { continue; } var sum = groupedCounters.Sum(c => { var value = c["Value"]; return(value.IsInt32 ? value.AsInt32 : value.AsInt64); }); var expireAt = groupedCounters.Any(c => c.Contains("ExpireAt") && c["ExpireAt"] != BsonNull.Value) ? (BsonValue)groupedCounters.Select(c => c["ExpireAt"].ToUniversalTime()).Max() : BsonNull.Value; var counterToInsert = new BsonDocument { ["Key"] = key, ["Value"] = sum, ["_id"] = ObjectId.GenerateNewId(), ["ExpireAt"] = expireAt, ["_t"] = new BsonArray(new[] { "BaseJobDto", "ExpiringJobDto", "KeyJobDto", "CounterDto" }) }; jobGraph.InsertOne(counterToInsert); idsToRemove.AddRange(groupedCounters.Select(c => c["_id"])); } if (!idsToRemove.Any()) { return(true); } jobGraph.DeleteMany(new BsonDocument("_id", new BsonDocument("$in", idsToRemove))); return(true); }
public bool Execute(IMongoDatabase database, MongoStorageOptions storageOptions, IMongoMigrationBag migrationBag) { var jobGraph = database.GetCollection <BsonDocument>(storageOptions.Prefix + ".jobGraph"); var hashFilter = new BsonDocument("_t", "HashDto"); var hashItems = jobGraph.FindSync(hashFilter).ToList(); if (!hashItems.Any()) { return(true); } var hashItemsToInsert = new List <BsonDocument>(); foreach (var hashItemsByKey in hashItems.GroupBy(c => c["Key"].AsString)) { var key = hashItemsByKey.Key; var fields = new BsonDocument(); foreach (var hash in hashItemsByKey) { fields[hash["Field"].AsString] = hash["Value"].AsString; } // some fields don't have 'ExpireAt' field set from previous migrations. // fix the offense by adding it now. BsonValue expireAt = BsonNull.Value; if (hashItemsByKey.Any(c => c.Contains("ExpireAt") && c["ExpireAt"] != BsonNull.Value)) { expireAt = hashItemsByKey.Max(c => c["ExpireAt"].ToUniversalTime()); } var toInsert = new BsonDocument { ["Key"] = key, ["Fields"] = fields, ["_id"] = ObjectId.GenerateNewId(), ["ExpireAt"] = expireAt, ["_t"] = new BsonArray(new[] { "BaseJobDto", "ExpiringJobDto", "KeyJobDto", "HashDto" }) }; hashItemsToInsert.Add(toInsert); } jobGraph.InsertMany(hashItemsToInsert); return(true); }