private async Task LogBatchRequest(BatchData batch, string message, BatchManager.BatchResult result, bool autoWrite) { await LogBatchRequest(batch, message, result); if (autoWrite) { Write(); } }
private async Task LogBatchUpdate(BatchData batch, string message, bool autoWrite) { await LogBatchUpdate(batch, message); if (autoWrite) { Write(); } }
/// <summary> /// Adds batch data to combiner. /// </summary> /// <param name="key">Key to combine against.</param> /// <param name="batchData">Data to add.</param> /// <param name="matrix">Transform to apply</param> private void AddData(int key, ref BatchData batchData, Matrix4x4 matrix) { // Do nothing if sealed if (isSealed) { return; } BatchData builder; if (builderDictionary.ContainsKey(key)) { // Get current batch data builder = builderDictionary[key]; } else { // Start a new batch builder.Vertices = new List <Vector3>(); builder.Normals = new List <Vector3>(); builder.UVs = new List <Vector2>(); builder.Indices = new List <int>(); builderDictionary.Add(key, builder); } // Transform vertices and normals by matrix for (int i = 0; i < batchData.Vertices.Count; i++) { Vector3 position = matrix.MultiplyPoint3x4(batchData.Vertices[i]); Vector3 normal = matrix.MultiplyVector(batchData.Normals[i]); batchData.Vertices[i] = position; batchData.Normals[i] = normal; } // Add new vertices to builder int currentVertex = builder.Vertices.Count; builder.Vertices.AddRange(batchData.Vertices); builder.Normals.AddRange(batchData.Normals); builder.UVs.AddRange(batchData.UVs); // Update vertex count vertexCount += batchData.Vertices.Count; // Update indices to new vertex base for (int i = 0; i < batchData.Indices.Count; i++) { batchData.Indices[i] += currentVertex; } // Add indices to builder builder.Indices.AddRange(batchData.Indices); // Update dictionary builderDictionary[key] = builder; }
public BatchReturn Auth(string BatchReference, string NotificationUrl, BatchData BatchData) { object[] results = this.Invoke("Auth", new object[] { BatchReference, NotificationUrl, BatchData }); return((BatchReturn)(results[0])); }
/// <remarks/> public void AuthAsync(string BatchReference, string NotificationUrl, BatchData BatchData, object userState) { if ((this.AuthOperationCompleted == null)) { this.AuthOperationCompleted = new System.Threading.SendOrPostCallback(this.OnAuthOperationCompleted); } this.InvokeAsync("Auth", new object[] { BatchReference, NotificationUrl, BatchData }, this.AuthOperationCompleted, userState); }
private List <BatchData> GetBatchData(BatchRequest batchRequest) { lock (myLock) { List <BatchData> datas = new List <BatchData>(); m_timer_1.Start(); SendBatchMsg(batchRequest); if (this.batchList.Count() != 0) { if (this.batchList.First().NotOL == true) { m_timer_1.Stop(); return(this.GetBatchList); } else { datas = CheckBatchData(batchRequest, this.batchList); return(datas); } } else { // 此处获取datas while (true) { datas = CheckBatchData(batchRequest, this.batchList); if (datas.Count() != 0) { break; } Thread.Sleep(1000); if (timeDown_1) { timeDown_1 = false; break; } } } m_timer_1.Stop(); if (datas.Count() == 0) { BatchData error = new BatchData(); error.StationId = batchRequest.StationId; error.TimeOut = true; datas.Add(error); BatchData endInfo = new BatchData(); endInfo.StationId = endInfo.StationId; endInfo.End = true; datas.Add(endInfo); } return(datas); } }
private async Task <bool> listHasItem(int id, BatchData b) { foreach (var item in mainList.Clone()) { if (item.Id == id) { b = item; return(true); } } return(false); }
async Task <BatchData> RunAsync(BatchDataHttpMethod type) { BatchData batch = null; using (var client = new HttpClient()) { client.BaseAddress = baseUrl; client.DefaultRequestHeaders.Accept.Clear(); client.DefaultRequestHeaders.Accept.Add(new MediaTypeWithQualityHeaderValue("application/json")); ServicePointManager.SecurityProtocol = SecurityProtocolType.Tls11; // Get: if (type == BatchDataHttpMethod.GET) { HttpResponseMessage response = await client.GetAsync("api/batchdata/1"); if (response.IsSuccessStatusCode) { Debug.WriteLine("Batch Request Recieved"); return(batch = await response.Content.ReadAsAsync <BatchData>()); // Install-Package Microsoft.AspNet.WebApi.Client //Console.WriteLine("{0}\t${1}\t{2}", batch.Key, batch.Data, batch.LastComplete); } else { Debug.WriteLine("Batch Request Failed"); return(batch); } } // Post if (type == BatchDataHttpMethod.POST) { HttpResponseMessage response = await client.PostAsJsonAsync <BatchData>("api/incbatchdata", jsonBatch); if (response.IsSuccessStatusCode) { Debug.WriteLine("Batch Posted"); return(null); } else { Debug.WriteLine("Batch Request Failed"); return(null); } } return(null); } }
/// <summary> /// Adds batch data to the batch builder. /// Geometry data is batched by key. /// </summary> /// <param name="key">Key to batch against.</param> /// <param name="batchData">Data to add.</param> /// <param name="matrix">Geometry transform to apply before adding.</param> public void AddToBuilder(uint key, BatchData batchData, Matrix matrix) { // Do nothing if sealed if (isSealed) { return; } BatchData builder; if (builderDictionary.ContainsKey(key)) { // Get current batch data builder = builderDictionary[key]; } else { // Start a new batch builder.Vertices = new List <VertexPositionNormalTextureBump>(); builder.Indices = new List <int>(); builderDictionary.Add(key, builder); } // Transform vertices for (int i = 0; i < batchData.Vertices.Count; i++) { VertexPositionNormalTextureBump vertex = batchData.Vertices[i]; vertex.Position = Vector3.Transform(vertex.Position, matrix); vertex.Normal = Vector3.TransformNormal(vertex.Normal, matrix); batchData.Vertices[i] = vertex; } // Add new vertices to builder int currentVertex = builder.Vertices.Count; builder.Vertices.AddRange(batchData.Vertices); // Update indices to new vertex base for (int i = 0; i < batchData.Indices.Count; i++) { batchData.Indices[i] += currentVertex; } // Add indices to builder builder.Indices.AddRange(batchData.Indices); // Update dictionary builderDictionary[key] = builder; }
/// <summary> /// Adds another ModelCombiner to this one. /// </summary> /// <param name="other">Other ModelCombiner.</param> /// <param name="matrix">Transform to apply</param> public void Add(ModelCombiner other, Matrix4x4 matrix) { // Do nothing if sealed if (isSealed || other.isSealed) { return; } // Add other items to this builder foreach (var item in other.builderDictionary) { BatchData batchData = item.Value; AddData(item.Key, ref batchData, matrix); } }
private BatchData GetFormData(FormCollection batch) { BatchData data = new BatchData(); data.dataField = batch["batchField"]; string dataValue = batch["batchValue"]; if (data.dataField == "ContactMetadata_Organization" || data.dataField == "ContactPublisher_Organization" || data.dataField == "ContactOwner_Organization") { dataValue = batch["OrganizationContactMetadata"]; } else if (data.dataField == "MaintenanceFrequency") { dataValue = batch["MaintenanceFrequency"]; } data.dataValue = dataValue; if (string.IsNullOrWhiteSpace(data.dataField) || string.IsNullOrWhiteSpace(data.dataValue)) { return(null); } List <string> uuids = new List <string>(); if (batch["uuids"] != null) { uuids = batch["uuids"].Split(',').ToList(); } if (string.IsNullOrWhiteSpace(Request.Form["updateAll"]) && uuids.Count == 0) { return(null); } List <MetaDataEntry> mdList = new List <MetaDataEntry>(); foreach (var uuid in uuids) { MetaDataEntry md = new MetaDataEntry(); md.Uuid = uuid; mdList.Add(md); } data.MetaData = mdList; return(data); }
private async Task UploadSamplePrintResponseFile() { var samplePrintResponse = new BatchData() { BatchNumber = 1, BatchDate = DateTime.Parse("2020-01-31T13:30:00.0000000Z"), PostalContactCount = 22, TotalCertificateCount = 48, ProcessedDate = DateTime.Parse("2020-02-03T15:30:00.0000000Z") }; var filename = "PrintBatchResponse-001-3101201330.json"; var path = $"{_printResponseDirectory}/Samples/{filename}"; await _blobFileTransferClient.UploadFile(JsonConvert.SerializeObject(samplePrintResponse), path); }
public void TestThrottleWork1(int timeSec, int elemntPerSec) { var throttleWork = Sys.ActorOf(Props.Create(() => new ThrottleWork(elemntPerSec, timeSec))); throttleWork.Tell(new SetTarget(probe)); int totalBatchCount = 10; //총 테스트 개수 int expectedTestSec = (totalBatchCount / elemntPerSec) + 2; //완료최대예상시간 List <object> batchDatas = new List <object>(); //테스트 데이터 준비 for (int i = 0; i < totalBatchCount; i++) { batchDatas.Add(new BatchData() { Data = $"test-{i}" }); } var batchList = new BatchList(batchDatas.ToImmutableList()); Within(TimeSpan.FromSeconds(expectedTestSec), () => { // 데이터를 한꺼번에 큐에 넣는다. throttleWork.Tell(batchList); BatchData lastMessage = null; for (int i = 0; i < totalBatchCount; i++) { var batchData = probe.ExpectMsg <BatchData>(); // 초당 5개씩 처리되는것 확인 string strResult = $"{DateTime.Now.ToString()} {batchData.Data as string}"; Console.WriteLine(strResult); if (i == totalBatchCount - 1) { lastMessage = batchData; } } //마지막 메시지 검사 Assert.Equal($"test-{totalBatchCount-1}", lastMessage.Data as string); }); }
private async Task ProcessEachFileToUploadThenDelete(string fileToProcess) { var stringBatchResponse = _fileTransferClient.DownloadFile(fileToProcess); var batchResponse = JsonConvert.DeserializeObject <BatchResponse>(stringBatchResponse); if (batchResponse?.Batch == null || batchResponse.Batch.BatchDate == DateTime.MinValue) { _aggregateLogger.LogInfo($"Could not process downloaded file to correct format [{fileToProcess}]"); return; } batchResponse.Batch.DateOfResponse = DateTime.UtcNow; var batchNumber = batchResponse.Batch.BatchNumber; var batchLogResponse = await _assessorServiceApi.GetGetBatchLogByBatchNumber(batchNumber); if (batchLogResponse?.Id == null) { _aggregateLogger.LogInfo($"Could not match an existing batch Log Batch Number [{batchNumber}]"); return; } if (!int.TryParse(batchNumber, out int batchNumberToInt)) { _aggregateLogger.LogInfo($"The Batch Number is not an integer [{batchNumber}]"); return; } var batch = new BatchData { BatchNumber = batchNumberToInt, BatchDate = batchResponse.Batch.BatchDate, PostalContactCount = batchResponse.Batch.PostalContactCount, TotalCertificateCount = batchResponse.Batch.TotalCertificateCount, PrintedDate = batchResponse.Batch.PrintedDate, PostedDate = batchResponse.Batch.PostedDate, DateOfResponse = batchResponse.Batch.DateOfResponse }; await _assessorServiceApi.UpdateBatchDataInBatchLog((Guid)batchLogResponse.Id, batch); _fileTransferClient.DeleteFile(fileToProcess); }
/// <summary> /// Add or Update Log w/ Autowrite. Auto handles Add vs update. /// </summary> /// <param name="batch"></param> /// <param name="message"></param> /// <param name="autoWrite"></param> /// <returns></returns> public async Task Log(BatchData batch, string message, bool autoWrite) { // Add or Update Log w/ Autowrite switch (type) { case LogType.Update: Log(null, batch, message, null, BatchManager.BatchResult.NullResult, false, autoWrite); break; case LogType.Add: Log(null, batch, message, null, BatchManager.BatchResult.NullResult, false, autoWrite); break; default: logger.Write(message); break; } }
/// <summary> /// Show blind closed shifts. /// </summary> /// <param name="transaction">The current transaction instance.</param> public void ShowBlindClosedShifts(IPosTransaction transaction) { if (transaction == null) { NetTracer.Warning("transaction parameter is null"); throw new ArgumentNullException("transaction"); } BatchData batchData = new BatchData(Application.Settings.Database.Connection, Application.Settings.Database.DataAreaID); string operatorId = ApplicationSettings.Terminal.TerminalOperator.OperatorId; bool multipleShiftLogOn = Application.BusinessLogic.UserAccessSystem.UserHasPermission(operatorId, AllowMultipleShiftLogOnPermission); IList <IPosBatchStaging> blindClosedShifts = batchData.GetPosBatchesWithStatus(PosBatchStatus.BlindClosed, multipleShiftLogOn ? null : operatorId); using (BlindClosedShiftsForm blindClosedShiftsForm = new BlindClosedShiftsForm(blindClosedShifts)) { POSFormsManager.ShowPOSForm(blindClosedShiftsForm); } }
public RenderSet(Mesh mesh, Material material, int maxBatches) { this.mesh = mesh; this.material = material; this.propertyBlock = new MaterialPropertyBlock(); batches = new BatchData[maxBatches]; batchSizes = new int[maxBatches]; for (int i = 0; i < maxBatches; i++) { batches[i] = new BatchData ( matrices: new Matrix4x4[BATCH_SIZE], colors: new Vector4[BATCH_SIZE] ); batchSizes[i] = 0; } }
public async Task <List <BatchResult> > Add(BatchData b) { bool success = master.TryAdd(b); List <BatchResult> results = new List <BatchResult>(); if (success) { if (!b.isListed) { mainList.Add(b); b.isListed = true; try { await Save(mainListPath); results.Add(BatchResult.ListSuccess); } catch { results.Add(BatchResult.CouldNotSaveList); } } try { await Save(mainPath); results.Add(BatchResult.Success); } catch { results.Add(BatchResult.CouldNotSave); } } else { results.Add(BatchResult.Failure); } var resLogger = new Logging(Logging.LogType.Results); foreach (var res in results) { await resLogger.Log(typeof(BatchManager), b, null, res); } resLogger.Write(); return(results); }
private void BatchForData_EventHandler(object sender, BatchEventArgs e) { try { batchMsg = new BatchMsg() { Msg = e.RawData }; CBatchStruct info = e.Value; if (info == null) { return; } BatchData batchInfo = new BatchData(); batchInfo.StationId = info.StationID; if (info.StationType == EStationType.ERainFall) { batchInfo.StationType = true; } else { batchInfo.StationType = false; } this.GetBatchList.Add(batchInfo); foreach (var v in info.Datas) { BatchData dataInfo = new BatchData(); dataInfo.Time = v.Time.ToString(); dataInfo.Data = v.Data; this.GetBatchList.Add(dataInfo); } BatchData endInfo = new BatchData(); endInfo.StationId = endInfo.StationId; endInfo.End = true; this.GetBatchList.Add(endInfo); } catch (Exception exp) { Debug.WriteLine("" + exp.Message); } }
public async Task <List <BatchResult> > UpdateInc(int id, bool isInc, int lastIndex) { BatchData b = null; List <BatchResult> results = new List <BatchResult>(); var list = mainList.Clone(); if (await listHasItem(id, b)) { b.isAvail = isInc; b.LastComplete = lastIndex; try { if (b.isAvail) { bool success = incomplete.TryAdd(b); if (success) { try { await Save(mainPath); } catch { results.Add(BatchResult.CouldNotSave); } results.Add(BatchResult.Updated); } else { results.Add(BatchResult.Failure); } } else { results.Add(BatchResult.Completed); } } catch { results.Add(BatchResult.NotAvail); } } return(results); }
private async Task LogBatchRequest(BatchData batch, string message, BatchManager.BatchResult result) { if (type == LogType.Request) { if (message == null) { logger.AddLine(string.Format("Batch {0} Requested. Result: {1}", batch.Key, result)); } else { logger.AddLine(string.Format(message + "Result: {1}", batch.Key, result)); } } else { var exLogger = new Logging(LogType.Exceptions); Exception ex = new Exception(exceptionMessage(LogType.Request)); exLogger.LogException(typeof(Logging), ex, false, true); throw ex; } }
static async Task Main() { Console.Title = "UserApplication"; var batchSubmitter = new BatchSubmitter(ServerInfo.UserApplicationWebBackendBaseAddress); using (var hubConnection = new HubConnection(ServerInfo.UserApplicationWebBackendBaseAddress)) { var hubProxy = hubConnection.CreateHubProxy("UserApplicationHub"); hubProxy.On <string>("PushStatusUpdate", Console.WriteLine); await hubConnection.Start() .ConfigureAwait(false); Console.WriteLine("Press 'S' to send a batch request."); Console.WriteLine("Press the escape key to exit"); while (true) { var key = Console.ReadKey(); Console.WriteLine(); if (key.Key == ConsoleKey.S) { Console.WriteLine("Input a batch id and press enter."); Console.Write("Batch id: "); var batchId = Console.ReadLine().Replace("Batch id: ", ""); await batchSubmitter.Submit(batchId, BatchData.Generate()).ConfigureAwait(false); } else if (key.Key == ConsoleKey.Escape) { break; } } hubConnection.Stop(); } }
/// <summary> /// Suspend the current batch. /// </summary> /// <param name="transaction">The current transaction instance.</param> public void SuspendShift(IPosTransaction transaction) { if (transaction == null) { NetTracer.Warning("transaction parameter is null"); throw new ArgumentNullException("transaction"); } BatchData batchData = new BatchData(Application.Settings.Database.Connection, Application.Settings.Database.DataAreaID); transaction.Shift.OpenedAtTerminal = string.Empty; transaction.Shift.CashDrawer = string.Empty; transaction.Shift.Status = PosBatchStatus.Suspended; transaction.Shift.StatusDateTime = DateTime.Now; batchData.UpdateBatch(transaction.Shift); ShiftUsersCache.Remove(transaction.Shift); transaction.Shift.Print(); this.Application.Services.Dialog.ShowMessage(51342); }
static void DoStaticBatching() { Dictionary <string, BatchData> CombineMap = new Dictionary <string, BatchData>(); Renderer[] renderers = GameObject.FindObjectsOfType <Renderer>(); for (int i = 0; i < renderers.Length; i++) { Renderer renderer = renderers[i]; if (GameObjectUtility.AreStaticEditorFlagsSet(renderer.gameObject, StaticEditorFlags.BatchingStatic)) { MeshFilter meshFilter = renderer.GetComponent <MeshFilter>(); if (meshFilter != null) { Material[] materials = renderer.sharedMaterials; Mesh mesh = meshFilter.sharedMesh; for (int j = 0; j < materials.Length; j++) { var key = BatchData.GetKey(materials[j], renderer.lightmapIndex); BatchData batchData; if (!CombineMap.TryGetValue(key, out batchData)) { batchData = new BatchData(materials[j], renderer.lightmapIndex); CombineMap.Add(key, batchData); } batchData.AddMesh(meshFilter, j); } } } } var itor = CombineMap.GetEnumerator(); while (itor.MoveNext()) { var batchData = itor.Current.Value; batchData.Batching(); } }
// POST: api/IncBatchData public async Task Post([FromBody] BatchData value) { List <BatchManager.BatchResult> results = null; if (value.isAvail) { results = await BatchManager.Instance.UpdateInc(value.Id, value.isAvail, value.LastComplete); if (!results.Contains(BatchManager.BatchResult.Updated)) { var inc = new IncBatchData(); bool success = await BatchManager.Instance.AddToQueue(inc.Convert(value)); if (success) { BatchManager.Instance.LoadQueue(); await logger.Log(value, "{0} Was not Updated. Adding to queue."); } } } else { //Log results await logger.Log(value, null); } if (results != null) { foreach (var r in results) { await resultsLogger.Log(typeof(IncBatchDataController), value, null, r); } } else { resultsLogger.Log(typeof(IncBatchDataController), value, null, BatchManager.BatchResult.NullResult); } }
/// <summary> /// Print recently closed batch report (Z-Report) /// </summary> /// <param name="transaction"></param> public void PrintZReport(IPosTransaction transaction) { ApplicationLog.Log("EOD.PrintZReport", "Printing Z report.", LogTraceLevel.Trace); if (FiscalPrinter.FiscalPrinter.Instance.FiscalPrinterEnabled()) { FiscalPrinter.FiscalPrinter.Instance.PrintZReport(transaction); return; } BatchData batchData = new BatchData(Application.Settings.Database.Connection, Application.Settings.Database.DataAreaID); Batch batch = batchData.ReadRecentlyClosedBatch(ApplicationSettings.Terminal.TerminalId); if (batch != null) { // Print batch. POSFormsManager.ShowPOSMessageWithBackgroundWorker(99, delegate { batch.Print(ReportType.ZReport); }); } else { NetTracer.Information("EDO::PrintZReport: batch is null"); } }
private void BatchForData_EventHandler(object sender, BatchEventArgs e) { try { CBatchStruct info = e.Value; if (info == null) { return; } if (BatchList.Bdata.Count != 0 && info.StationID == BatchList.StationId) { return; } BatchList.RawInfo = e.RawData; if (info.StationType == EStationType.ERainFall) { BatchList.SType = BatchList.Types.stationType.RainStation; } else if (info.StationType == EStationType.ERiverWater) { BatchList.SType = BatchList.Types.stationType.WaterStation; } foreach (var v in info.Datas) { BatchData dataInfo = new BatchData(); dataInfo.BatchTime = v.Time.ToString(); dataInfo.BatchValue = v.Data; BatchList.Bdata.Add(dataInfo); } } catch (Exception exp) { Debug.WriteLine("" + exp.Message); } }
public ActionResult BatchUpdate(FormCollection batch) { if (User.Identity.IsAuthenticated) { Log.Info("Starting batch update metadata."); BatchData data = GetFormData(batch); if (!string.IsNullOrWhiteSpace(Request.Form["updateAll"])) { if (data != null) { new Thread(() => _batchService.UpdateAll(data, GetUsername(), GetSecurityClaim("organization"))).Start(); TempData["message"] = "Batch-oppdatering: " + data.dataField + " = " + data.dataValue + ", er startet og kjører i bakgrunnen!"; } else { TempData["failure"] = "Ingen oppdatering valgt"; } } else { if (data != null) { new Thread(() => _batchService.Update(data, GetUsername())).Start(); TempData["message"] = "Batch-oppdatering: " + data.dataField + " = " + data.dataValue + ", er startet og kjører i bakgrunnen!"; } else { TempData["failure"] = "Ingen oppdatering valgt"; } } } return(RedirectToAction("Index")); }
protected async void btnProcess_Click(object sender, EventArgs e) { if (FileUpload1.HasFile) { var reader = new StreamReader(FileUpload1.FileContent); var document = reader.ReadToEnd(); reader.Close(); string fileName = tbFileName.Text; bool success = gen.saveDocument(document, path, fileName, fileName); if (success) { int batchSize = 1000; var isCustomBatch = int.TryParse(tbBatchSize.Text, out batchSize); if (isCustomBatch) { lblBatchSize.Text = string.Format("Batch Size {0}", batchSize); } var docList = gen.splitDocument(document, batchSize, true); int docCount = 1; List <BatchManager.BatchResult> results = null; foreach (var doc in docList) { var batch = new BatchData(); batch.Id = docCount; var name = fileName + "_" + docCount.ToString(); batch.Key = name; batch.Data = doc.Replace("\r", ""); batch.LastComplete = 0; batch.isAvail = true; gen.saveDocument(doc, path, fileName, name); docCount++; results = await BatchManager.Instance.Add(batch); } if (!results.Contains(BatchManager.BatchResult.Success)) { foreach (var result in results) { logger.Log(typeof(Admin), null, null, result); } } BatchManager.Instance.SetTotal(docCount); lblProcess.ForeColor = System.Drawing.Color.Green; lblProcess.Text = "Done: Sucess!"; } else { lblFileUpload.ForeColor = System.Drawing.Color.Red; lblFileUpload.Text = "Upload Failed: File Already Exists"; } } else { lblFileUpload.ForeColor = System.Drawing.Color.Red; lblFileUpload.Text = "Upload Failed: File not found"; } }
public async Task UpdateBatchDataInBatchLog(Guid batchId, BatchData batchData) { await _httpClient.PutAsJsonAsync($"/api/v1/batches/update-batch-data", new { Id = batchId, BatchData = batchData }); }
private static string GetOutputFileName(BatchData batchInfo) { var suffix = batchInfo.HasLimit ? $"_Batch{batchInfo.Id}" : string.Empty; var fileName = $"{batchInfo.Prefix}_{DateTime.Now.ToString("yyyyMMddHHmmssfff")}{suffix}.txt"; return Path.Combine(batchInfo.Location, fileName); }
/// <summary> /// Writes the validated document to the specified output location. /// Assumes all the regular IO permissions apply. /// If not, an IOException will be thrown /// </summary> /// <param name="batchInfo">The current batch information.</param> /// <param name="records">The records in the buffer to be written.</param> /// <param name="isFailOutput">indicates if the records are being written to the fail file</param> /// <returns>Updated batch information</returns> /// <exception cref="InvalidOperationException"> /// </exception> private BatchData Write(BatchData batchInfo, IList<List<string>> records, bool isFailOutput) { var bufferRecordCount = records.Count(); var bufferRecordIndex = 0; var delimiter = isFailOutput && Document.Options.UseInputSettingsForErrors ? Document.InputDelimiter.DecodeDelimiter() : Document.OutputDelimiter.DecodeDelimiter(); string headerLine; switch (Document.OutputType) { case DocumentType.FixedLength: headerLine = WriteFixedLengthLine(CreateHeaderRow(isFailOutput)); break; case DocumentType.Delimited: headerLine = WriteDelimitedLine(CreateHeaderRow(isFailOutput), delimiter); break; default: throw new InvalidOperationException($"Schema for Egest document has invalid type: {Document.OutputType}"); } while (bufferRecordIndex < bufferRecordCount) { using (var file = new StreamWriter(batchInfo.FileName, true)) { if ((!isFailOutput && Document.OutputHasHeader && batchInfo.Size == 0) || (isFailOutput && Document.Options.UseInputSettingsForErrors && Document.InputHasHeader && batchInfo.Size == 0) || (isFailOutput && !Document.Options.UseInputSettingsForErrors && Document.OutputHasHeader && batchInfo.Size == 0)) { file.WriteLine(headerLine); } while ((!batchInfo.HasLimit || batchInfo.Size < batchInfo.Limit) && bufferRecordIndex < bufferRecordCount) { var record = records[bufferRecordIndex]; string line; switch (Document.OutputType) { case DocumentType.FixedLength: line = WriteFixedLengthLine(record); break; case DocumentType.Delimited: line = WriteDelimitedLine(record, delimiter); break; default: throw new InvalidOperationException($"Schema for Egest document has invalid type: {Document.OutputType}"); } file.WriteLine(line); bufferRecordIndex++; batchInfo.Size++; if (!isFailOutput && batchInfo.HasLimit && batchInfo.Size >= batchInfo.Limit) { batchInfo.IsFull = true; break; } } } if (batchInfo.IsFull) { var finalBatchName = GetOutputFileName(batchInfo); UpdateOutputFile(batchInfo.FileName, finalBatchName); batchInfo.FileName = GetTempFileName(batchInfo.Location); batchInfo.Size = 0; batchInfo.IsFull = false; batchInfo.Id++; } } records.Clear(); return batchInfo; }
/// <summary> /// Adds model data to combiner. /// </summary> /// <param name="key">Key to batch against.</param> /// <param name="modelData">Model data to add.</param> /// <param name="matrix">Transform to apply.</param> public void Add(ref ModelData modelData, Matrix4x4 matrix) { // Do nothing if sealed if (isSealed) return; // Iterate submeshes foreach (var sm in modelData.SubMeshes) { // Start new batch data for this submesh BatchData batchData = new BatchData(); batchData.Vertices = new List<Vector3>(); batchData.Normals = new List<Vector3>(); batchData.UVs = new List<Vector2>(); batchData.Indices = new List<int>(); int counter = 0; int index = sm.StartIndex; for (int tri = 0; tri < sm.PrimitiveCount; tri++) { // Get indices int i1 = modelData.Indices[index++]; int i2 = modelData.Indices[index++]; int i3 = modelData.Indices[index++]; // Get vertices Vector3 vert1 = modelData.Vertices[i1]; Vector3 vert2 = modelData.Vertices[i2]; Vector3 vert3 = modelData.Vertices[i3]; // Get normals Vector3 norm1 = modelData.Normals[i1]; Vector3 norm2 = modelData.Normals[i2]; Vector3 norm3 = modelData.Normals[i3]; // Get UVs Vector3 uv1 = modelData.UVs[i1]; Vector3 uv2 = modelData.UVs[i2]; Vector3 uv3 = modelData.UVs[i3]; // Add vertices batchData.Vertices.Add(vert1); batchData.Vertices.Add(vert2); batchData.Vertices.Add(vert3); // Add normals batchData.Normals.Add(norm1); batchData.Normals.Add(norm2); batchData.Normals.Add(norm3); // Add UVs batchData.UVs.Add(uv1); batchData.UVs.Add(uv2); batchData.UVs.Add(uv3); // Add indices batchData.Indices.Add(counter++); batchData.Indices.Add(counter++); batchData.Indices.Add(counter++); } // Add to builder int key = MaterialReader.MakeTextureKey((short)sm.TextureArchive, (byte)sm.TextureRecord, (byte)0); AddData(key, ref batchData, matrix); } }
/// <summary> /// Adds batch data to combiner. /// </summary> /// <param name="key">Key to combine against.</param> /// <param name="batchData">Data to add.</param> /// <param name="matrix">Transform to apply</param> private void AddData(int key, ref BatchData batchData, Matrix4x4 matrix) { // Do nothing if sealed if (isSealed) return; BatchData builder; if (builderDictionary.ContainsKey(key)) { // Get current batch data builder = builderDictionary[key]; } else { // Start a new batch builder.Vertices = new List<Vector3>(); builder.Normals = new List<Vector3>(); builder.UVs = new List<Vector2>(); builder.Indices = new List<int>(); builderDictionary.Add(key, builder); } // Transform vertices and normals by matrix for (int i = 0; i < batchData.Vertices.Count; i++) { Vector3 position = matrix.MultiplyPoint3x4(batchData.Vertices[i]); Vector3 normal = matrix.MultiplyVector(batchData.Normals[i]); batchData.Vertices[i] = position; batchData.Normals[i] = normal; } // Add new vertices to builder int currentVertex = builder.Vertices.Count; builder.Vertices.AddRange(batchData.Vertices); builder.Normals.AddRange(batchData.Normals); builder.UVs.AddRange(batchData.UVs); // Update vertex count vertexCount += batchData.Vertices.Count; // Update indices to new vertex base for (int i = 0; i < batchData.Indices.Count; i++) { batchData.Indices[i] += currentVertex; } // Add indices to builder builder.Indices.AddRange(batchData.Indices); // Update dictionary builderDictionary[key] = builder; }
/// <summary> /// Constructor of the file system writer class /// </summary> /// <param name="argument">The argument.</param> /// <exception cref="ArgumentNullException">@No target folder has been specified</exception> public FileSystemWriter(PipelineArgument argument) { PassRecords = new List<List<string>>(); FailRecords = new List<List<string>>(); PropertyBag = argument.PropertyBag; Document = argument.Document; var targetFolder = PropertyBag.GetProperty(PledgeGlobal.ExportFolderKey); if (string.IsNullOrWhiteSpace(targetFolder)) { throw new ArgumentNullException(nameof(targetFolder), @"No target folder has been specified"); } var passPrefix = PropertyBag.GetProperty(PledgeGlobal.PassPrefixKey); var failPrefix = PropertyBag.GetProperty(PledgeGlobal.FailPrefixKey); int maxRowsPerOutput; if (!int.TryParse(PropertyBag.GetProperty(PledgeGlobal.MaxRowsPerFileKey), out maxRowsPerOutput)) { maxRowsPerOutput = 0; } PassBatchData = new BatchData { Id = maxRowsPerOutput > 0 ? 1 : 0, Limit = maxRowsPerOutput, HasLimit = maxRowsPerOutput > 0, Location = targetFolder, FileName = GetTempFileName(targetFolder), Prefix = string.IsNullOrWhiteSpace(passPrefix) ? _defaultPassPrefix : $"{passPrefix}_{_defaultPassPrefix}" }; FailBatchData = new BatchData { Location = targetFolder, FileName = GetTempFileName(targetFolder), Prefix = string.IsNullOrWhiteSpace(failPrefix) ? _defaultFailPrefix : $"{failPrefix}_{_defaultFailPrefix}" }; }