private static void PrintBulkCommand(string bulkCommand, string bulkJson, BulkResult bulkResult, string ObjectName) { Console.WriteLine("Parsed Bulk Results for " + ObjectName + " \r\n *************************************************************************************"); foreach (var item in bulkResult.items) { Console.WriteLine(" operation: " + item.ResultType); Console.WriteLine(" _index: " + item.Result._index); Console.WriteLine(" _type: " + item.Result._type); Console.WriteLine(" _id: " + item.Result._id); Console.WriteLine(); } }
public static void BulkType(List <Bic_Iban_Codes> ModelData, ElasticConnection connection, PlainElastic.Net.Serialization.JsonNetSerializer serializer, string _index) { string bulkCommand = new BulkCommand(index: _index, type: "iban_bic").Refresh(); string bulkJson = new BulkBuilder(serializer) .BuildCollection(ModelData, (builder, pro) => builder.Index(data: pro, id: pro.CodeID.ToString()) ); string result = connection.Post(bulkCommand, bulkJson); BulkResult bulkResult = serializer.ToBulkResult(result); connection.Post(_index + "/_refresh"); }
public static void BulkType <T>(List <T> Model, ElasticConnection connection, PlainElastic.Net.Serialization.JsonNetSerializer serializer, string _index, string _type) where T : IElasticMapper { string bulkCommand = ""; bulkCommand = new BulkCommand(index: _index, type: _type).Refresh(); string bulkJson = new BulkBuilder(serializer) .BuildCollection(Model, (builder, pro) => builder.Index(data: pro, id: pro.id) ); string result = connection.Post(bulkCommand, bulkJson); BulkResult bulkResult = serializer.ToBulkResult(result); connection.Post(_index + "/_refresh"); }
private static void PrintBulkCommand(string bulkCommand, string bulkJson, BulkResult bulkResult) { Console.WriteLine("Executed: \r\nPOST {0} \r\n{1} \r\n".F(bulkCommand, bulkJson)); Console.WriteLine("Parsed Bulk Results"); foreach (var item in bulkResult.items) { Console.WriteLine(" operation: " + item.ResultType); Console.WriteLine(" _index: " + item.Result._index); Console.WriteLine(" _type: " + item.Result._type); Console.WriteLine(" _id: " + item.Result._id); Console.WriteLine(); } }
private static void BulkTweetIndex(IEnumerable <Tweet> tweets, ElasticConnection connection, JsonNetSerializer serializer) { string bulkCommand = new BulkCommand(index: "twitter", type: "tweet").Refresh(); int id = 10; // start adding tweets from id = 10 string bulkJson = new BulkBuilder(serializer) .BuildCollection(tweets, (builder, tweet) => builder.Index(data: tweet, id: (id++).AsString()) ); string result = connection.Post(bulkCommand, bulkJson); //Parse bulk result; BulkResult bulkResult = serializer.ToBulkResult(result); PrintBulkCommand(bulkCommand, bulkJson, bulkResult); }
public static List <BulkResult> ToBulkResult(this NestBulkResponse response) { var result = new List <BulkResult>(); if (response.Items != null) { result.AddRange(response.Items.Where(i => i.IsValid) .Select(item => BulkResult.Create(item.Id, StatusCodes.Status200OK, string.Empty))); } if (response.ItemsWithErrors != null) { result.AddRange(response.ItemsWithErrors .Select(item => BulkResult.Create(item.Id, StatusCodes.Status406NotAcceptable, item.Error.Reason))); } return(result); }
public BulkResults Bulk(string dataSetName, IEnumerable <object> documents, long requestSize, int parallelLimit) { var dataSet = DataSet(dataSetName).DataSet; var results = new BulkResults(); var validatedDocuments = documents .Select((document, index) => new { Index = index, Result = ValidateDocument(dataSetName, document), Document = document }) .ToList(); var invalidDocumentResults = validatedDocuments .Where(document => document.Result.IsFailure) .Select(document => BulkResult.Create( DocumentHelper.GetValue(document.Document, dataSet.IdField)?.ToString(), StatusCodes.Status400BadRequest, document.Result.Error) ) .ToList(); results.Results.AddRange(invalidDocumentResults); var documentElastics = validatedDocuments .Where(obj => obj.Result.IsSuccess) .Select(document => new DocumentElastic { Id = DocumentHelper.GetValue(document.Document, dataSet.IdField).ToString(), DocumentObject = document.Document, Text = DocumentHelper.GetConcatenatedText(document.Document, dataSet.InterpretedFields) }) .ToList(); var bulkResponseStruct = DocumentQuery(dataSetName).ParallelBulkIndex(documentElastics, parallelLimit, requestSize); results.Results.AddRange(bulkResponseStruct.ToBulkResult()); return(results); }
/// <summary> /// Parse the JSON and return the bulk request. /// </summary> /// <exception cref="ArgumentNullException">Thrown when a REQUIRED parameter is null or empty.</exception> /// <exception cref="FormatException">Thrown when the 'baseUrlPattern' parameter is not correctly formatted.</exception> /// <param name="jObj">JSON that will be parsed.</param> /// <param name="baseUrlPattern">Base url pattern.</param> /// <returns>Bulk request or null.</returns> public async Task <BulkRequestResponse> Parse(JObject jObj, string baseUrlPattern) { // 1. Check parameters. if (jObj == null) { throw new ArgumentNullException(nameof(jObj)); } if (string.IsNullOrWhiteSpace(baseUrlPattern)) { throw new ArgumentNullException(nameof(baseUrlPattern)); } if (!baseUrlPattern.Contains("{rootPath}")) { throw new FormatException("the baseUrlPattern is not correctly formatted"); } // 2. Parse the request. var obj = jObj.ToObject <BulkRequest>(); if (!obj.Schemas.Contains(Common.Constants.Messages.Bulk)) { return(new BulkRequestResponse { IsParsed = false, ErrorResponse = _errorResponseFactory.CreateError( ErrorMessages.TheRequestIsNotABulkOperation, HttpStatusCode.BadRequest, Common.Constants.ScimTypeValues.InvalidSyntax) }); } if (obj.Operations == null) { return(new BulkRequestResponse { IsParsed = false, ErrorResponse = _errorResponseFactory.CreateError( ErrorMessages.TheOperationsParameterMustBeSpecified, HttpStatusCode.BadRequest, Common.Constants.ScimTypeValues.InvalidSyntax) }); } var response = new BulkResult { FailOnErrors = obj.FailOnErrors }; Func <string, ErrorResponse> getBulkMethodNotSupported = (method) => { return(_errorResponseFactory.CreateError( string.Format(ErrorMessages.TheBulkMethodIsNotSupported, method), HttpStatusCode.BadRequest, Common.Constants.ScimTypeValues.InvalidSyntax)); }; Func <string, IList <string> > splitPath = (path) => { if (string.IsNullOrWhiteSpace(path)) { return(null); } path = path.TrimStart('/'); var subPaths = path.Split('/'); if (!subPaths.Any() || subPaths.Count() > 2) { return(null); } return(subPaths); }; Func <IList <string>, string> extractRootPath = (subPaths) => { if (subPaths == null) { return(null); } return(subPaths[0]); }; Func <IList <string>, string> extractId = (subPaths) => { if (subPaths == null || subPaths.Count() < 2) { return(null); } return(subPaths[1]); }; Func <string, string> getResourceType = (subPath) => { if (subPath == null) { return(null); } if (!Constants.MappingRoutePathsToResourceTypes.ContainsKey(subPath)) { return(null); } return(Constants.MappingRoutePathsToResourceTypes[subPath]); }; var schemas = await _schemaStore.GetSchemas(); var resourceTypes = schemas.Select(s => s.Name); // 3. Check operation parameters are correct. var operations = new List <BulkOperationResult>(); foreach (var operation in obj.Operations) { try { JObject data = operation.Data as JObject; // 3.1. Check data if (data == null) { return(new BulkRequestResponse { IsParsed = false, ErrorResponse = _errorResponseFactory.CreateError( ErrorMessages.TheBulkDataParameterMustBeSpecified, HttpStatusCode.BadRequest, Common.Constants.ScimTypeValues.InvalidSyntax) }); } // 3.2. Check method var httpMethod = new HttpMethod(operation.Method); if (!new [] { HttpMethod.Post, HttpMethod.Put, HttpMethod.Delete, new HttpMethod("PATCH") }.Contains(httpMethod)) { return(new BulkRequestResponse { IsParsed = false, ErrorResponse = getBulkMethodNotSupported(operation.Method) }); } // 3.3. Check path if (string.IsNullOrWhiteSpace(operation.Path)) { return(new BulkRequestResponse { IsParsed = false, ErrorResponse = _errorResponseFactory.CreateError( ErrorMessages.TheBulkOperationPathIsRequired, HttpStatusCode.BadRequest, Common.Constants.ScimTypeValues.InvalidSyntax) }); } var subPaths = splitPath(operation.Path); var rootPath = extractRootPath(subPaths); var resourceId = extractId(subPaths); var resourceType = getResourceType(rootPath); if (string.IsNullOrWhiteSpace(resourceType) || !resourceTypes.Contains(resourceType) || (httpMethod != HttpMethod.Post && string.IsNullOrWhiteSpace(resourceId))) { return(new BulkRequestResponse { IsParsed = false, ErrorResponse = _errorResponseFactory.CreateError( string.Format(ErrorMessages.TheBulkOperationPathIsNotSupported, operation.Path), HttpStatusCode.BadRequest, Common.Constants.ScimTypeValues.InvalidSyntax) }); } // 3.4. Check bulkId if (httpMethod == HttpMethod.Post && string.IsNullOrWhiteSpace(operation.BulkId)) { return(new BulkRequestResponse { IsParsed = false, ErrorResponse = _errorResponseFactory.CreateError( ErrorMessages.TheBulkIdParameterMustBeSpecified, HttpStatusCode.BadRequest, Common.Constants.ScimTypeValues.InvalidSyntax) }); } var schema = schemas.First(s => s.Name == resourceType); operations.Add(new BulkOperationResult { Data = data, BulkId = operation.BulkId, Method = httpMethod, Version = operation.Version, ResourceId = resourceId, SchemaId = schema.Id, ResourceType = resourceType, LocationPattern = baseUrlPattern.Replace("{rootPath}", rootPath) + "/{id}", Path = operation.Path }); } catch (Exception) { return(new BulkRequestResponse { IsParsed = false, ErrorResponse = getBulkMethodNotSupported(operation.Method) }); } } response.Operations = operations; return(new BulkRequestResponse { IsParsed = true, BulkResult = response }); }
/// <summary> /// Bulk tag creation. /// </summary> /// <param name="settings">The settings.</param> /// <returns></returns> public Result <BulkResults> BulkCreate(string dataSetName, List <Tag> tags, int parallelLimit, long requestSize) { var results = new BulkResults(); var tagIsInteger = DataSet(dataSetName).TagIsInteger; if (tagIsInteger && tags.Any(tag => tag.Id.Any(ch => !char.IsDigit(ch)))) { results.Results = tags .Where(tag => tag.Id.Any(ch => !char.IsDigit(ch))) .Select(t => BulkResult.Create( t.Id, StatusCodes.Status400BadRequest, string.Format(TagResources.TagIdShouldBeIntegerType, t.ParentId, t.Id))) .ToList(); return(Result.Ok(results)); } var tagIdsByLevel = TagHelper.GetTagIdsByLevel(tags, item => item.ParentId, item => item.Id); var validIds = tagIdsByLevel.SelectMany(l => l.Value).ToList(); var invalidIds = tags.Select(t => t.Id).Except(validIds); if (invalidIds.Any()) { results.Results = tags .Where(t => invalidIds.Contains(t.Id)) .Select(t => BulkResult.Create(t.Id, StatusCodes.Status404NotFound, string.Format(TagResources.ParentId_0_NotFoundInTagWithId_1, t.ParentId, t.Id))).ToList(); // returns with OK status, individual items contain error code return(Result.Ok(results)); } var orderedTagElasticList = tagIdsByLevel .SelectMany(dic => dic.Value) .Select(id => { var tag = tags.FirstOrDefault(t => t.Id == id); var tagElastic = new TagElastic { Id = tag.Id, Name = tag.Name, ParentIdList = new List <string>() }; if (!string.IsNullOrWhiteSpace(tag.ParentId)) { tagElastic.ParentIdList.Add(tag.ParentId); } return(tagElastic); }) .ToList(); TagHelper.AdjustTagElastics(orderedTagElasticList); var tagQuery = TagQuery(dataSetName); tagQuery.DeleteAll(); var bulkResponseStruct = tagQuery.ParallelBulkIndex(orderedTagElasticList, parallelLimit, requestSize); results.Results.AddRange(bulkResponseStruct.ToBulkResult()); return(Result.Ok(results)); }
public async Task <int> LoadFile( IEnumerable <FileInfo> files, string connectionName, ConnectionType connectionType, int skip = 0, int take = 0, int maxThreads = 8, CancellationToken ct = default) { using (var results = new BlockingCollection <IOperationResult>(new ConcurrentQueue <IOperationResult>())) { var bulkTimer = Stopwatch.StartNew(); var exec = await _connectionManager.Open(connectionName, connectionType, ct); if (!await exec.TestConnection(ct)) { _console.WriteLine($"Unable to connect."); return(1); } var loadTasks = new List <Task>(); var reporter = Task.Run(() => { while (results.TryTake(out var result, -1)) { _console.WriteLine(result.Message); } }); foreach (var file in files) { var fileTimer = Stopwatch.StartNew(); _console.WriteLine($"Executing queries from {file} on {connectionName} using {connectionType} using {maxThreads} threads."); IEnumerable <string> queries = await File.ReadAllLinesAsync(file.FullName); if (skip > 0) { _console.WriteLine($"skipping {skip} lines."); queries = queries.Skip(skip); } if (take > 0) { _console.WriteLine($"taking {take} lines."); queries = queries.Take(take); } // var lineNumber = skip + 1; var queriesProcessed = 0; var totalQueries = queries.Count(); // Parallel.ForEach(queries, async (queries,loop, i) => ) // this looks like it might be better //https://gist.github.com/0xced/94f6c50d620e582e19913742dbd76eb6 IProgress <IOperationResult> progress = new Progress <IOperationResult>( op => { queriesProcessed++; _console.WriteLine(op.Message); } ); // Parallel.ForEach(Partitioner.Create(queries), () => 0, async (q, loopState, partitionCount) => // { // partitionCount ++; // var result = new BulkResult<dynamic>( // await exec.ExecuteQuery<dynamic>(q, ct), // Thread.CurrentThread.ManagedThreadId, file.Name, skip, queriesProcessed, totalQueries, fileTimer.Elapsed, bulkTimer.Elapsed); // return partitionCount; // }, // (partitionCount) => Interlocked.Add(ref queriesProcessed, partitionCount) // ); // await queries.ForEachAsync(maxThreads, async (q) => { // var result = new BulkResult<dynamic>( // await exec.ExecuteQuery<dynamic>(q, ct), // Thread.CurrentThread.ManagedThreadId, file.Name, 0 + skip, queriesProcessed, totalQueries, fileTimer.Elapsed, bulkTimer.Elapsed); // //results.Add(result); // return result; // }, progress); // Partitioner.Create(queries).GetPartitions(maxThreads) // .Select( await queries.ParallelForEachAsync( async (q, queryNumber) => { Interlocked.Increment(ref queriesProcessed); IOperationResult result; try { var qResult = await exec.ExecuteQuery <dynamic>(q, ct); result = new BulkResult <dynamic>( qResult, Thread.CurrentThread.ManagedThreadId, file.Name, queryNumber + skip, queriesProcessed, totalQueries, fileTimer.Elapsed, bulkTimer.Elapsed); } catch (System.Exception ex) { result = new ErrorResult(ex); } results.Add(result); }, maxDegreeOfParallelism : maxThreads, cancellationToken : ct ); } results.CompleteAdding(); await reporter; } return(0); }