public void UnmergeBatch(BatchKey key) { if (GetBatchExists(key)) { StaticBatches[key].Unmerge(); } }
private void PopulateComponentAmounts(BatchKey compositionId, List <BatchReportEntryBase> resultReport) { var compositionPartials = m_batchRepository.GetBatches(compositionId); var usedAmounts = new Dictionary <string, Amount>(resultReport.Count); foreach (var composition in compositionPartials) { foreach (var componentEntity in composition.Components) { var componentBatch = componentEntity.Component; var componentBatchKey = new BatchKey(componentBatch.MaterialId, componentBatch.BatchNumber).UnsafeToString(); var usedAmount = new Amount(componentEntity.Volume, componentEntity.Unit); if (usedAmounts.TryGetValue(componentBatchKey, out var rollingSum)) { usedAmount = m_amountProcessor.Add(rollingSum, usedAmount); } usedAmounts[componentBatchKey] = usedAmount; } } foreach (var row in resultReport.OfType <BatchReportEntry>()) { var usageKey = new BatchKey(row.MaterialId, row.BatchNumber).UnsafeToString(); if (usedAmounts.TryGetValue(usageKey, out var amount)) { row.CustomField1 = amount.ToString(); } } }
public void Register(KAnimConverter.IAnimConverter controller) { if (!isReady) { Debug.LogError($"Batcher isnt finished setting up, controller [{controller.GetName()}] is registering too early."); } BatchKey batchKey = BatchKey.Create(controller); Vector2I vector2I = ControllerToChunkXY(controller); if (!batchSets.TryGetValue(batchKey, out BatchSet value)) { value = new BatchSet(GetBatchGroup(new BatchGroupKey(batchKey.groupID)), batchKey, vector2I); batchSets[batchKey] = value; if (value.key.materialType == KAnimBatchGroup.MaterialType.UI) { uiBatchSets.Add(new BatchSetInfo { batchSet = value, isActive = false, spatialIdx = vector2I }); } else { culledBatchSetInfos.Add(new BatchSetInfo { batchSet = value, isActive = false, spatialIdx = vector2I }); } } value.Add(controller); }
protected BatchReportEntryBase(BatchKey batchKey) { //To ensure key was resolved batchKey.UnsafeToString(); BatchKey = batchKey; }
public IEnumerable <Tuple <IPurchaseOrder, decimal> > GetOrdersByUsedBatch(BatchKey bqatch, int pageSize, int pageNumber) { var orderIds = new List <Tuple <long, decimal> >(); m_database.Sql().Call("GetOrderIdsByUsedBatch").WithParam("@projectId", m_session.Project.Id) .WithParam("@materialId", bqatch.GetMaterialId(m_batchFacade)) .WithParam("@batchNumber", bqatch.GetBatchNumber(m_batchFacade)) .WithParam("@skip", pageSize * pageNumber).WithParam("@take", pageSize).ReadRows <long, int, string, decimal>( (orderId, prio, orderNum, qty) => { orderIds.Add(new Tuple <long, decimal>(orderId, qty)); }); var entities = m_database.SelectFrom <IPurchaseOrder>() .Where(o => o.Id.InCsv(orderIds.Select(i => i.Item1))) .Execute().ToList(); foreach (var id in orderIds) { var ett = entities.FirstOrDefault(e => e.Id == id.Item1); if (ett != null) { yield return(new Tuple <IPurchaseOrder, decimal>(ett, id.Item2)); } } }
public void SetBatchPreferrence(BatchKey batchKey) { var materialId = batchKey.GetMaterialId(m_batchRepository); var batchNumber = batchKey.GetBatchNumber(m_batchRepository); var existingPreferrence = LoadPreferrences().FirstOrDefault(p => p.MaterialId == materialId) ?? m_database.New <IPackingPreferredBatch>( p => { p.MaterialId = materialId; p.UserId = m_session.User.Id; }); if (existingPreferrence.BatchNumber?.Equals(batchNumber, StringComparison.InvariantCultureIgnoreCase) != true) { m_cache.Remove(GetPreferredBatchesCacheKey()); } else if ((DateTime.Now - existingPreferrence.LastActivity).TotalMinutes < 5) { //to reduce db writes return; } existingPreferrence.BatchNumber = batchNumber; existingPreferrence.LastActivity = DateTime.Now; m_database.Save(existingPreferrence); }
private BatchReportModel LoadSegments(BatchKey key, int pageNumber) { var segments = m_batchRepository.GetBatches(key).OrderBy(b => b.Created).ToList(); var entry = new BatchSegmentsReportEntry(key); foreach (var b in segments) { entry.Segments.Add(new BatchSegmentModel { SegmentId = b.Id, Amount = new Amount(b.Volume, m_unitRepository.GetUnit(b.UnitId)).ToString(), Author = m_userRepository.GetUserNick(b.AuthorId), Date = StringUtil.FormatDate(b.Created), Price = $"{StringUtil.FormatPrice(b.ProductionWorkPrice ?? 0m)} CZK", HasRecipe = b.RecipeId != null }); } var result = new BatchReportModel(); result.Report.Add(entry); return(result); }
public IEnumerable <StockEventViewModel> GetBatchEvents(string batchId, string eventTypeName) { var etype = m_eventRepository.GetAllEventTypes().FirstOrDefault(etp => etp.TabTitle == eventTypeName).Ensure(); var rawEvents = m_eventRepository.GetBatchEvents(BatchKey.Parse(batchId)) .Where(e => e.TypeId == etype.Id) .Select(e => new StockEventViewModel(e)); var result = new List <StockEventViewModel>(); foreach (var rawEvent in rawEvents) { var sameGrpEvt = result.FirstOrDefault(e => e.GroupingKey.Equals(rawEvent.GroupingKey)); if (sameGrpEvt != null) { sameGrpEvt.Join(rawEvent, m_amountProcessor); continue; } result.Add(rawEvent); } return(result); }
public void AddToBatches(BatchKey key, RuntimeSurfaceGeometry surfaceGeometry) { if (!separateLights) { key.sourceLight = null; key.layeredTransparentSideSourceLight = null; } #if USE_TEXTURE_ARRAYS if (!separateTextures) { key.sourceShapeDescriptor = ShapeDescriptor.Empty; key.layeredTransparentSideShapeDescriptor = ShapeDescriptor.Empty; } #endif if (!separateShaders) { key.sourceMaterial = null; key.layeredTransparentSideSourceMaterial = null; } if (!StaticBatches.ContainsKey(key)) { StaticBatches[key] = new SurfaceBatch(GetUniqueMaterials(key), key.sourceMedia); } StaticBatches[key].AddSurface(surfaceGeometry); }
private BatchReportModel LoadSaleEvents(BatchKey key, int pageNumber) { var events = m_saleEventRepository.GetAllocationsByBatch(key).ToList(); var aggregatedAllocations = new List <SaleEventAllocationModel>(events.Count); foreach (var evt in events) { if (aggregatedAllocations.Any(ag => ag.Populate(evt, m_amountProcessor))) { continue; } var newRecord = new SaleEventAllocationModel(); newRecord.Populate(evt, m_amountProcessor); aggregatedAllocations.Add(newRecord); } var entry = new BatchSaleEventsReportEntry(key); entry.SaleEvents.AddRange(aggregatedAllocations.OrderByDescending(a => a.SortDt)); var result = new BatchReportModel(); result.Report.Add(entry); return(result); }
private BatchReportModel LoadOrders(BatchKey key, int ordersPageNumber) { var orders = m_ordersFacade.GetOrdersByUsedBatch(key, c_pageSize, ordersPageNumber).ToList(); var entry = new BatchOrdersReportEntry(key) { CanLoadMoreOrders = orders.Count == c_pageSize, NextOrdersPage = ordersPageNumber + 1 }; foreach (var entity in orders) { entry.Orders.Add(new BatchOrderModel { OrderId = entity.Item1.Id, Customer = entity.Item1.CustomerEmail, OrderNumber = entity.Item1.OrderNumber, PurchaseDate = StringUtil.FormatDateTime(entity.Item1.PurchaseDate), Status = m_orderStatusRepository.Translate(entity.Item1.OrderStatusId), Quantity = StringUtil.FormatDecimal(entity.Item2), IsAllocation = !OrderStatus.IsSent(entity.Item1.OrderStatusId), AllocationHandle = OrderStatus.IsSent(entity.Item1.OrderStatusId) ? null : $"{entity.Item1.Id}|{key.ToString(m_batchFacade)}" }); } var result = new BatchReportModel(); result.Report.Add(entry); return(result); }
public BatchSet(KAnimBatchGroup batchGroup, BatchKey batchKey, Vector2I spacialIdx) { idx = spacialIdx; key = batchKey; dirty = true; group = batchGroup; batches = new List <KAnimBatch>(); }
private BatchReportEntry MapEntry(DbDataReader row) { #region Column ordinals const int batchId = 0; const int inventoryName = 1; const int batchNumber = 2; const int materialName = 3; const int materialId = 4; const int batchVolume = 5; const int unit = 6; const int batchCreateDt = 7; const int batchCloseDt = 8; const int batchLockDt = 9; const int batchAvailable = 10; const int batchProductionDt = 11; const int numberOfComponents = 13; const int numberOfCompositions = 14; const int numberOfOrders = 16; const int price = 17; const int invoiceNr = 18; const int numberOfStockEvents = 19; const int numberOfSaleEvents = 20; const int numberOfSegments = 21; const int availableAmountValue = 22; const int availableAmountUnitId = 23; #endregion var key = BatchKey.Parse(row.GetString(batchId)); var entry = new BatchReportEntry(key) { InventoryName = row.GetString(inventoryName), BatchNumber = row.IsDBNull(batchNumber) ? "?" : row.GetString(batchNumber), MaterialName = row.GetString(materialName), MaterialId = row.GetInt32(materialId), CreateDt = StringUtil.FormatDateTime(row.GetDateTime(batchCreateDt)), IsClosed = !row.IsDBNull(batchCloseDt), IsLocked = !row.IsDBNull(batchLockDt), IsAvailable = row.GetBoolean(batchAvailable), NumberOfComponents = row.GetInt32(numberOfComponents), NumberOfCompositions = row.GetInt32(numberOfCompositions), NumberOfOrders = row.GetInt32(numberOfOrders), Price = row.IsDBNull(price) ? string.Empty : $"{StringUtil.FormatDecimal(row.GetDecimal(price))} CZK", InvoiceNumber = row.IsDBNull(invoiceNr) ? string.Empty : string.Join(", ", row.GetString(invoiceNr).Split(';').Distinct()), HasStockEvents = (!row.IsDBNull(numberOfStockEvents)) && (row.GetInt32(numberOfStockEvents) > 0), NumberOfSaleEvents = row.GetInt32(numberOfSaleEvents), NumberOfSegments = row.GetInt32(numberOfSegments), AvailableAmountValue = row.GetDecimal(availableAmountValue), AvailableAmountUnitId = row.GetInt32(availableAmountUnitId), TotalAmountValue = row.GetDecimal(batchVolume), TotalAmountUnitName = row.GetString(unit) }; return(entry); }
public IEnumerable <IMaterialBatch> GetBatches(BatchKey key) { var materialId = key.GetMaterialId(this); var batchNumber = key.GetBatchNumber(this); var entities = m_database.SelectFrom <IMaterialBatch>().Where(b => b.ProjectId == m_session.Project.Id) .Where(b => b.CloseDt == null).Where(b => b.MaterialId == materialId && b.BatchNumber == batchNumber) .OrderBy(b => b.Created).Execute().ToList(); return(entities.Select(e => new MaterialBatchAdapter(e, m_serviceLocator))); }
private BatchReportModel LoadPriceComponents(BatchKey key, int queryLoadPriceComponentsPage) { var entry = new PriceComponentsReportEntry(key); entry.PriceComponents.AddRange(m_batchFacade.GetPriceComponents(key)); var result = new BatchReportModel(); result.Report.Add(entry); return(result); }
public void DeleteBatch(BatchKey batchKey) { using (var tx = m_database.OpenTransaction()) { foreach (var b in m_batchRepository.GetBatches(batchKey)) { DeleteBatch(b.Id); } tx.Commit(); } }
public IEnumerable <IMaterialStockEvent> GetBatchEvents(BatchKey key) { var batchNumber = key.GetBatchNumber(m_batchRepository); var materialId = key.GetMaterialId(m_batchRepository); return (m_database.SelectFrom <IMaterialStockEvent>() .Join(e => e.Batch) .Where(m => m.ProjectId == m_session.Project.Id) .Where(m => m.Batch.MaterialId == materialId && m.Batch.BatchNumber == batchNumber) .Execute().Select(b => new MaterialStockEventAdapter(m_serviceLocator, b))); }
public void RemoveFromBatches(BatchKey key, RuntimeSurfaceGeometry surfaceGeometry) { if (GetBatchExists(key)) { var occupied = StaticBatches[key].RemoveSurface(surfaceGeometry); if (!occupied) { StaticBatches.Remove(key); } } }
public BatchKey ToKey() { if (!string.IsNullOrWhiteSpace(BatchId)) { return(BatchKey.Parse(BatchId)); } if (string.IsNullOrWhiteSpace(BatchNumberQuery) || MaterialId == null) { throw new InvalidOperationException("Cannot complete batch key"); } return(new BatchKey(MaterialId.Value, BatchNumberQuery)); }
public IEnumerable <ISaleEventAllocation> GetAllocationsByBatch(BatchKey key) { var batchNumber = key.GetBatchNumber(m_batchFacade); var materialId = key.GetMaterialId(m_batchFacade); var allocs = m_database.SelectFrom <ISaleEventAllocation>() .Join(a => a.Batch) .Where(b => b.Batch.MaterialId == materialId && b.Batch.BatchNumber == batchNumber && b.Batch.ProjectId == m_session.Project.Id) .Execute(); return(allocs.Select(a => new SaleEventAllocationAdapter(m_serviceLocator, a))); }
public void CutOrderAllocation(string handle) { // "orderId|batchKey" var parts = handle.Split('|'); var orderId = int.Parse(parts[0]); if (parts.Length == 2) { var key = BatchKey.Parse(parts[1]); m_batchFacade.CutOrderAllocation(orderId, key); } else if (parts.Length == 1) { m_batchFacade.CutOrderAllocation(orderId, null); } }
public IEnumerable <PriceComponentModel> GetPriceComponents(BatchKey key) { var batches = m_batchRepository.GetBatches(key).Select(b => b.Id); var result = new List <PriceComponentModel>(); foreach (var bid in batches) { var segmentComponents = GetPriceComponents(bid, false); foreach (var sc in segmentComponents) { var existing = result.FirstOrDefault(c => c.Text.Equals(sc.Text, StringComparison.InvariantCultureIgnoreCase)); if (existing != null) { existing.RawValue += sc.RawValue; existing.IsWarning = existing.IsWarning || sc.IsWarning; continue; } result.Add(sc); } } var warnings = new List <PriceComponentModel>(); for (var i = result.Count - 1; i >= 0; i--) { if (result[i].IsWarning) { warnings.Insert(0, result[i]); result.RemoveAt(i); } } result.AddRange(warnings); AddSumPriceComponent(result); return(result); }
/// <summary> /// Use key = null to delete all allocations from this order /// </summary> /// <param name="orderId"></param> /// <param name="key"></param> public void CutOrderAllocation(long orderId, BatchKey key) { var order = m_orderRepository.GetOrder(orderId).Ensure(); if (OrderStatus.IsSent(order.OrderStatusId)) { throw new InvalidOperationException("Tato objednávka již byla odeslána"); } var assignmentsToCut = new List <IOrderItemMaterialBatch>(); foreach (var item in order.Items) { foreach (var orderItemMaterialBatch in item.AssignedBatches.Where(i => key?.Match(i.MaterialBatch, this) ?? true)) { assignmentsToCut.Add(orderItemMaterialBatch); } foreach (var kitChild in item.KitChildren) { foreach (var orderItemMaterialBatch in kitChild.AssignedBatches.Where(i => key?.Match(i.MaterialBatch, this) ?? true)) { assignmentsToCut.Add(orderItemMaterialBatch); } } } if (!assignmentsToCut.Any()) { m_log.Error($"CutOrderAllocation - no allocations found orderId={orderId}, key={key?.ToString(this)}"); } m_database.DeleteAll(assignmentsToCut); foreach (var cutBatchId in assignmentsToCut.Select(a => a.MaterialBatchId).Distinct()) { ReleaseBatchAmountCache(cutBatchId); } }
private static void GetGiftBatches(Int32 ADaySpan) { DateTime GiftsSince = DateTime.Now.AddDays(0 - ADaySpan); String SqlQuery = "SELECT " + "a_batch_number_i AS BatchNumber, " + "a_ledger_number_i AS LedgerNumber " + "FROM PUB_a_batch " + "WHERE a_batch_description_c LIKE 'Gift Batch %' " + "AND a_date_of_entry_d > '" + GiftsSince.ToString("yyyy-MM-dd") + "' " ; DataSet GiftBatchDS = DBAccess.GDBAccessObj.Select(SqlQuery, "GiftBatchTbl", FTransaction); GiftBatches.Clear(); foreach (DataRow Row in GiftBatchDS.Tables["GiftBatchTbl"].Rows) { BatchKey NewKey = new BatchKey(); NewKey.BatchNumber = Convert.ToInt32(Row["BatchNumber"]); NewKey.LedgerNumber = Convert.ToInt32(Row["LedgerNumber"]); GiftBatches.Add(NewKey); } }
private void CreateReceivablesCreditMemo(int KeyCompany, DataTable tblVenta, string sCultureName, int RecordPos, string sChequera) { Context Context = new Context(); Context.OrganizationKey = (OrganizationKey) new CompanyKey() { Id = KeyCompany }; Context.CultureName = sCultureName; ReceivablesDocumentKey receivablesDocumentKey = new ReceivablesDocumentKey(); CustomerKey customerKey = new CustomerKey(); customerKey.Id = Convert.ToString(tblVenta.Rows[RecordPos]["idCliente"]); BatchKey batchKey = new BatchKey(); batchKey.Id = tblVenta.Rows[RecordPos]["IDLote"].ToString(); MoneyAmount moneyAmount = new MoneyAmount(); moneyAmount.Currency = Convert.ToString(tblVenta.Rows[RecordPos]["IDMoneda"]); moneyAmount.Value = Convert.ToDecimal(tblVenta.Rows[RecordPos]["MontoXCobrar"]); ReceivablesCreditMemo receivablesCreditMemo = new ReceivablesCreditMemo(); string str = tblVenta.Rows[RecordPos]["NoDocumento"].ToString(); receivablesDocumentKey.Id = str; receivablesCreditMemo.Key = receivablesDocumentKey; if (tblVenta.Rows[RecordPos]["Fecha"].ToString() != "") { receivablesCreditMemo.Date = new DateTime?(Convert.ToDateTime(tblVenta.Rows[RecordPos]["Fecha"])); } receivablesCreditMemo.BatchKey = batchKey; receivablesCreditMemo.CustomerKey = customerKey; receivablesCreditMemo.SalesAmount = moneyAmount; Policy policyByOperation = this.wsDynamicsGP.GetPolicyByOperation("CreateReceivablesCreditMemo", Context); this.wsDynamicsGP.CreateReceivablesCreditMemo(receivablesCreditMemo, Context, policyByOperation); }
private void PopulateCompositionAmounts(BatchKey componentBatchId, List <BatchReportEntryBase> report) { foreach (var reportRow in report.OfType <BatchReportEntry>()) { var componentBatches = m_batchRepository.GetBatches(componentBatchId).ToList(); if (!componentBatches.Any()) { continue; } Amount theAmount = null; foreach (var compnentBatch in componentBatches) { foreach (var compositionBatch in m_batchRepository.GetBatches(reportRow.BatchKey)) { foreach (var componentEntity in compositionBatch.Components) { var componentBatch = componentEntity.Component; if (componentBatch.MaterialId != componentBatchId.GetMaterialId(m_batchRepository) || (!componentBatch.BatchNumber.Equals(componentBatchId.GetBatchNumber(m_batchRepository), StringComparison.InvariantCultureIgnoreCase))) { continue; } var usedAmount = new Amount(componentEntity.Volume, componentEntity.Unit); theAmount = m_amountProcessor.Add(theAmount, usedAmount); } } } reportRow.CustomField1 = theAmount?.ToString(); } }
private bool GetBatchExists(BatchKey key) { return(StaticBatches.ContainsKey(key)); }
// Note: this is private, as it's best to just use MergeAllBatches // there's no real advantage to being precise with merging, // since MergeAllBatched only merges unmerged ones. private void MergeBatch(BatchKey key) { StaticBatches[key].Merge(deleteOriginalObjects); }
public bool GetBatchIsMerged(BatchKey key) { return(GetBatchExists(key) && StaticBatches[key].IsMerged); }
public Material[] GetUniqueMaterials(BatchKey key) { if (!separateLights) { key.sourceLight = null; key.layeredTransparentSideSourceLight = null; } #if USE_TEXTURE_ARRAYS if (!separateTextures) { key.sourceShapeDescriptor = ShapeDescriptor.Empty; key.layeredTransparentSideShapeDescriptor = ShapeDescriptor.Empty; } #endif var materialCount = key.layeredTransparentSideSourceMaterial ? 2 : 1; if (!separateShaders) { key.sourceMaterial = null; key.layeredTransparentSideSourceMaterial = null; } if (SurfaceMaterials.ContainsKey(key)) { if (separateShaders) { return(SurfaceMaterials[key]); } return(materialCount == 2 ? new Material[] { SurfaceMaterials[key][0], SurfaceMaterials[key][0] } : SurfaceMaterials[key]); } Material[] uniqueMaterials = new Material[materialCount]; #if USE_TEXTURE_ARRAYS if (!separateLights && !separateTextures && separateShaders) { // Everything should be consolidated, so use the source materials directly // TODO: !!! - This code becomes unnecessary once the arrays are properly reassigned (see notes below) uniqueMaterials[0] = key.sourceMaterial; if (materialCount == 2) { uniqueMaterials[1] = key.layeredTransparentSideSourceMaterial; } } else #endif { uniqueMaterials[0] = new Material(key.sourceMaterial); if (key.sourceLight != null) { uniqueMaterials[0].name += $" Light({key.sourceLight.NativeIndex})"; } if (key.layeredTransparentSideSourceMaterial) { uniqueMaterials[1] = new Material(key.layeredTransparentSideSourceMaterial); if (key.layeredTransparentSideSourceLight != null) { uniqueMaterials[1].name += $" Light({key.layeredTransparentSideSourceLight.NativeIndex})"; } } #if USE_TEXTURE_ARRAYS // TODO: !!! Need to make sure that the Texture2DArray gets reassigned when it changes // (such as on level load or if a new texture starts being used). // Doesn't matter if materials aren't being split (above), since the original material is used. #endif } SurfaceMaterials[key] = uniqueMaterials; key.sourceMedia?.SubscribeMaterial(uniqueMaterials[0]); return(uniqueMaterials); }